Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-07-20 12:55:51 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-07-20 12:55:51 +0300
commite8d2c2579383897a1dd7f9debd359abe8ae8373d (patch)
treec42be41678c2586d49a75cabce89322082698334 /spec
parentfc845b37ec3a90aaa719975f607740c22ba6a113 (diff)
Add latest changes from gitlab-org/gitlab@14-1-stable-eev14.1.0-rc42
Diffstat (limited to 'spec')
-rw-r--r--spec/config/settings_spec.rb6
-rw-r--r--spec/controllers/abuse_reports_controller_spec.rb2
-rw-r--r--spec/controllers/admin/application_settings_controller_spec.rb7
-rw-r--r--spec/controllers/admin/integrations_controller_spec.rb12
-rw-r--r--spec/controllers/admin/services_controller_spec.rb4
-rw-r--r--spec/controllers/confirmations_controller_spec.rb2
-rw-r--r--spec/controllers/dashboard/projects_controller_spec.rb1
-rw-r--r--spec/controllers/groups/group_links_controller_spec.rb6
-rw-r--r--spec/controllers/groups/settings/integrations_controller_spec.rb14
-rw-r--r--spec/controllers/help_controller_spec.rb8
-rw-r--r--spec/controllers/import/bulk_imports_controller_spec.rb4
-rw-r--r--spec/controllers/invites_controller_spec.rb36
-rw-r--r--spec/controllers/jira_connect/events_controller_spec.rb29
-rw-r--r--spec/controllers/profiles/emails_controller_spec.rb1
-rw-r--r--spec/controllers/profiles/personal_access_tokens_controller_spec.rb12
-rw-r--r--spec/controllers/projects/blob_controller_spec.rb17
-rw-r--r--spec/controllers/projects/commit_controller_spec.rb22
-rw-r--r--spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb4
-rw-r--r--spec/controllers/projects/import/jira_controller_spec.rb12
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb14
-rw-r--r--spec/controllers/projects/merge_requests/diffs_controller_spec.rb16
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb14
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb12
-rw-r--r--spec/controllers/projects/prometheus/metrics_controller_spec.rb4
-rw-r--r--spec/controllers/projects/runners_controller_spec.rb2
-rw-r--r--spec/controllers/projects/service_hook_logs_controller_spec.rb18
-rw-r--r--spec/controllers/projects/service_ping_controller_spec.rb (renamed from spec/controllers/projects/usage_ping_controller_spec.rb)2
-rw-r--r--spec/controllers/projects/services_controller_spec.rb153
-rw-r--r--spec/controllers/projects/settings/access_tokens_controller_spec.rb8
-rw-r--r--spec/controllers/projects/tree_controller_spec.rb19
-rw-r--r--spec/controllers/projects_controller_spec.rb35
-rw-r--r--spec/controllers/registrations/experience_levels_controller_spec.rb2
-rw-r--r--spec/controllers/registrations_controller_spec.rb32
-rw-r--r--spec/controllers/repositories/git_http_controller_spec.rb12
-rw-r--r--spec/controllers/root_controller_spec.rb28
-rw-r--r--spec/controllers/users/terms_controller_spec.rb1
-rw-r--r--spec/controllers/users/unsubscribes_controller_spec.rb36
-rw-r--r--spec/db/schema_spec.rb15
-rw-r--r--spec/deprecation_toolkit_env.rb5
-rw-r--r--spec/experiments/application_experiment_spec.rb164
-rw-r--r--spec/experiments/new_project_readme_content_experiment_spec.rb36
-rw-r--r--spec/factories/audit_events.rb1
-rw-r--r--spec/factories/ci/builds.rb8
-rw-r--r--spec/factories/ci/pending_builds.rb10
-rw-r--r--spec/factories/ci/reports/security/identifiers.rb15
-rw-r--r--spec/factories/ci/reports/security/links.rb14
-rw-r--r--spec/factories/ci/reports/security/scanners.rb29
-rw-r--r--spec/factories/ci/running_builds.rb10
-rw-r--r--spec/factories/events.rb1
-rw-r--r--spec/factories/integration_data.rb2
-rw-r--r--spec/factories/integrations.rb15
-rw-r--r--spec/factories/merge_request_cleanup_schedules.rb15
-rw-r--r--spec/factories/merge_request_diff_commit_users.rb8
-rw-r--r--spec/factories/packages.rb6
-rw-r--r--spec/factories/packages/helm/file_metadatum.rb2
-rw-r--r--spec/factories/packages/package_file.rb9
-rw-r--r--spec/factories/projects.rb8
-rw-r--r--spec/factories/usage_data.rb10
-rw-r--r--spec/fast_spec_helper.rb10
-rw-r--r--spec/features/admin/admin_appearance_spec.rb2
-rw-r--r--spec/features/admin/admin_dev_ops_report_spec.rb2
-rw-r--r--spec/features/admin/admin_groups_spec.rb13
-rw-r--r--spec/features/admin/admin_mode/workers_spec.rb5
-rw-r--r--spec/features/admin/admin_projects_spec.rb11
-rw-r--r--spec/features/admin/admin_runners_spec.rb274
-rw-r--r--spec/features/admin/admin_sees_background_migrations_spec.rb13
-rw-r--r--spec/features/admin/admin_settings_spec.rb113
-rw-r--r--spec/features/admin/admin_users_impersonation_tokens_spec.rb4
-rw-r--r--spec/features/admin/services/admin_visits_service_templates_spec.rb14
-rw-r--r--spec/features/admin/users/user_spec.rb78
-rw-r--r--spec/features/admin/users/users_spec.rb18
-rw-r--r--spec/features/alert_management/alert_management_list_spec.rb2
-rw-r--r--spec/features/boards/boards_spec.rb3
-rw-r--r--spec/features/boards/sidebar_spec.rb8
-rw-r--r--spec/features/calendar_spec.rb4
-rw-r--r--spec/features/cycle_analytics_spec.rb1
-rw-r--r--spec/features/dashboard/datetime_on_tooltips_spec.rb1
-rw-r--r--spec/features/dashboard/issues_filter_spec.rb4
-rw-r--r--spec/features/dashboard/merge_requests_spec.rb2
-rw-r--r--spec/features/dashboard/projects_spec.rb6
-rw-r--r--spec/features/dashboard/root_spec.rb19
-rw-r--r--spec/features/file_uploads/group_import_spec.rb1
-rw-r--r--spec/features/file_uploads/project_import_spec.rb1
-rw-r--r--spec/features/file_uploads/user_avatar_spec.rb1
-rw-r--r--spec/features/groups/import_export/connect_instance_spec.rb11
-rw-r--r--spec/features/groups/import_export/import_file_spec.rb2
-rw-r--r--spec/features/groups/members/manage_members_spec.rb4
-rw-r--r--spec/features/groups/merge_requests_spec.rb25
-rw-r--r--spec/features/groups/navbar_spec.rb74
-rw-r--r--spec/features/groups/packages_spec.rb22
-rw-r--r--spec/features/groups/settings/repository_spec.rb2
-rw-r--r--spec/features/groups/settings/user_searches_in_settings_spec.rb4
-rw-r--r--spec/features/groups/show_spec.rb8
-rw-r--r--spec/features/groups/user_browse_projects_group_page_spec.rb2
-rw-r--r--spec/features/groups_spec.rb33
-rw-r--r--spec/features/help_pages_spec.rb2
-rw-r--r--spec/features/invites_spec.rb51
-rw-r--r--spec/features/issuables/markdown_references/jira_spec.rb8
-rw-r--r--spec/features/issues/filtered_search/dropdown_assignee_spec.rb7
-rw-r--r--spec/features/issues/filtered_search/dropdown_author_spec.rb7
-rw-r--r--spec/features/issues/filtered_search/dropdown_base_spec.rb7
-rw-r--r--spec/features/issues/filtered_search/dropdown_emoji_spec.rb9
-rw-r--r--spec/features/issues/filtered_search/dropdown_hint_spec.rb8
-rw-r--r--spec/features/issues/filtered_search/dropdown_label_spec.rb9
-rw-r--r--spec/features/issues/filtered_search/dropdown_milestone_spec.rb10
-rw-r--r--spec/features/issues/filtered_search/dropdown_release_spec.rb10
-rw-r--r--spec/features/issues/filtered_search/recent_searches_spec.rb9
-rw-r--r--spec/features/issues/filtered_search/search_bar_spec.rb7
-rw-r--r--spec/features/issues/filtered_search/visual_tokens_spec.rb16
-rw-r--r--spec/features/issues/incident_issue_spec.rb49
-rw-r--r--spec/features/issues/issue_sidebar_spec.rb30
-rw-r--r--spec/features/issues/user_bulk_edits_issues_spec.rb8
-rw-r--r--spec/features/issues/user_edits_issue_spec.rb40
-rw-r--r--spec/features/issues/user_interacts_with_awards_spec.rb2
-rw-r--r--spec/features/issues/user_sees_breadcrumb_links_spec.rb1
-rw-r--r--spec/features/issues/user_sorts_issues_spec.rb23
-rw-r--r--spec/features/markdown/metrics_spec.rb3
-rw-r--r--spec/features/merge_request/batch_comments_spec.rb10
-rw-r--r--spec/features/merge_request/user_comments_on_diff_spec.rb8
-rw-r--r--spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb4
-rw-r--r--spec/features/merge_request/user_posts_diff_notes_spec.rb16
-rw-r--r--spec/features/merge_request/user_resolves_conflicts_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_pipelines_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_versions_spec.rb6
-rw-r--r--spec/features/merge_request/user_toggles_whitespace_changes_spec.rb2
-rw-r--r--spec/features/merge_request/user_views_diffs_spec.rb6
-rw-r--r--spec/features/merge_requests/user_lists_merge_requests_spec.rb16
-rw-r--r--spec/features/merge_requests/user_mass_updates_spec.rb6
-rw-r--r--spec/features/merge_requests/user_sorts_merge_requests_spec.rb2
-rw-r--r--spec/features/oauth_login_spec.rb2
-rw-r--r--spec/features/participants_autocomplete_spec.rb1
-rw-r--r--spec/features/profiles/personal_access_tokens_spec.rb17
-rw-r--r--spec/features/projects/active_tabs_spec.rb34
-rw-r--r--spec/features/projects/activity/user_sees_design_activity_spec.rb5
-rw-r--r--spec/features/projects/ci/editor_spec.rb2
-rw-r--r--spec/features/projects/ci/lint_spec.rb2
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb2
-rw-r--r--spec/features/projects/feature_flags/user_creates_feature_flag_spec.rb1
-rw-r--r--spec/features/projects/feature_flags/user_deletes_feature_flag_spec.rb1
-rw-r--r--spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb64
-rw-r--r--spec/features/projects/feature_flags/user_updates_feature_flag_spec.rb3
-rw-r--r--spec/features/projects/features_visibility_spec.rb11
-rw-r--r--spec/features/projects/files/dockerfile_dropdown_spec.rb2
-rw-r--r--spec/features/projects/files/gitignore_dropdown_spec.rb2
-rw-r--r--spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb2
-rw-r--r--spec/features/projects/files/undo_template_spec.rb4
-rw-r--r--spec/features/projects/import_export/import_file_spec.rb2
-rw-r--r--spec/features/projects/infrastructure_registry_spec.rb19
-rw-r--r--spec/features/projects/integrations/user_activates_jira_spec.rb12
-rw-r--r--spec/features/projects/integrations/user_activates_pivotaltracker_spec.rb4
-rw-r--r--spec/features/projects/navbar_spec.rb160
-rw-r--r--spec/features/projects/new_project_spec.rb113
-rw-r--r--spec/features/projects/package_files_spec.rb14
-rw-r--r--spec/features/projects/packages_spec.rb14
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb68
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb2
-rw-r--r--spec/features/projects/releases/user_views_edit_release_spec.rb12
-rw-r--r--spec/features/projects/releases/user_views_releases_spec.rb45
-rw-r--r--spec/features/projects/services/prometheus_external_alerts_spec.rb2
-rw-r--r--spec/features/projects/services/user_activates_prometheus_spec.rb2
-rw-r--r--spec/features/projects/services/user_activates_slack_notifications_spec.rb10
-rw-r--r--spec/features/projects/settings/access_tokens_spec.rb5
-rw-r--r--spec/features/projects/settings/monitor_settings_spec.rb13
-rw-r--r--spec/features/projects/settings/registry_settings_spec.rb160
-rw-r--r--spec/features/projects/settings/user_searches_in_settings_spec.rb2
-rw-r--r--spec/features/projects/terraform_spec.rb2
-rw-r--r--spec/features/projects/tree/create_directory_spec.rb2
-rw-r--r--spec/features/projects/tree/create_file_spec.rb2
-rw-r--r--spec/features/projects/user_creates_project_spec.rb6
-rw-r--r--spec/features/projects/user_sees_user_popover_spec.rb6
-rw-r--r--spec/features/projects/user_uses_shortcuts_spec.rb44
-rw-r--r--spec/features/projects/user_views_empty_project_spec.rb10
-rw-r--r--spec/features/projects_spec.rb4
-rw-r--r--spec/features/search/user_searches_for_code_spec.rb8
-rw-r--r--spec/features/search/user_searches_for_comments_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_commits_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_issues_spec.rb1
-rw-r--r--spec/features/search/user_searches_for_merge_requests_spec.rb1
-rw-r--r--spec/features/search/user_searches_for_milestones_spec.rb1
-rw-r--r--spec/features/search/user_searches_for_projects_spec.rb1
-rw-r--r--spec/features/search/user_searches_for_users_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_wiki_pages_spec.rb1
-rw-r--r--spec/features/snippets/embedded_snippet_spec.rb5
-rw-r--r--spec/features/unsubscribe_links_spec.rb2
-rw-r--r--spec/features/usage_stats_consent_spec.rb2
-rw-r--r--spec/features/users/login_spec.rb18
-rw-r--r--spec/features/users/user_browses_projects_on_user_page_spec.rb2
-rw-r--r--spec/finders/alert_management/alerts_finder_spec.rb1
-rw-r--r--spec/finders/bulk_imports/entities_finder_spec.rb84
-rw-r--r--spec/finders/bulk_imports/imports_finder_spec.rb34
-rw-r--r--spec/finders/ci/commit_statuses_finder_spec.rb2
-rw-r--r--spec/finders/ci/runners_finder_spec.rb27
-rw-r--r--spec/finders/container_repositories_finder_spec.rb35
-rw-r--r--spec/finders/deployments_finder_spec.rb3
-rw-r--r--spec/finders/environments/environments_finder_spec.rb31
-rw-r--r--spec/finders/events_finder_spec.rb1
-rw-r--r--spec/finders/group_descendants_finder_spec.rb1
-rw-r--r--spec/finders/groups_finder_spec.rb1
-rw-r--r--spec/finders/merge_requests_finder_spec.rb38
-rw-r--r--spec/finders/milestones_finder_spec.rb93
-rw-r--r--spec/finders/packages/conan/package_file_finder_spec.rb1
-rw-r--r--spec/finders/packages/go/module_finder_spec.rb1
-rw-r--r--spec/finders/packages/group_packages_finder_spec.rb1
-rw-r--r--spec/finders/packages/nuget/package_finder_spec.rb2
-rw-r--r--spec/finders/packages/package_file_finder_spec.rb1
-rw-r--r--spec/finders/packages/package_finder_spec.rb1
-rw-r--r--spec/finders/projects_finder_spec.rb34
-rw-r--r--spec/finders/template_finder_spec.rb2
-rw-r--r--spec/finders/todos_finder_spec.rb2
-rw-r--r--spec/finders/user_recent_events_finder_spec.rb2
-rw-r--r--spec/fixtures/api/schemas/cluster_status.json2
-rw-r--r--spec/fixtures/api/schemas/entities/admin_users_data_attributes_paths.json8
-rw-r--r--spec/fixtures/error_tracking/event.txt3
-rw-r--r--spec/fixtures/error_tracking/parsed_event.json1
-rw-r--r--spec/fixtures/error_tracking/transaction.txt3
-rw-r--r--spec/fixtures/error_tracking/unknown.txt3
-rw-r--r--spec/fixtures/gitlab/database/structure_example_cleaned.sql4
-rw-r--r--spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml2
-rw-r--r--spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml2
-rw-r--r--spec/fixtures/lib/gitlab/import_export/complex/project.json770
-rw-r--r--spec/fixtures/lib/gitlab/import_export/complex/tree/project/merge_requests.ndjson16
-rw-r--r--spec/fixtures/packages/nuget/package.snupkgbin0 -> 4691 bytes
-rw-r--r--spec/fixtures/packages/nuget/with_package_types.nuspec14
-rw-r--r--spec/frontend/__helpers__/dom_shims/inner_text.js2
-rw-r--r--spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap1
-rw-r--r--spec/frontend/admin/users/components/actions/actions_spec.js29
-rw-r--r--spec/frontend/admin/users/components/modals/__snapshots__/delete_user_modal_spec.js.snap (renamed from spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap)1
-rw-r--r--spec/frontend/admin/users/components/modals/delete_user_modal_spec.js (renamed from spec/frontend/pages/admin/users/components/delete_user_modal_spec.js)2
-rw-r--r--spec/frontend/admin/users/components/modals/stubs/modal_stub.js (renamed from spec/frontend/pages/admin/users/components/stubs/modal_stub.js)0
-rw-r--r--spec/frontend/admin/users/components/modals/user_modal_manager_spec.js (renamed from spec/frontend/pages/admin/users/components/user_modal_manager_spec.js)2
-rw-r--r--spec/frontend/admin/users/components/user_actions_spec.js62
-rw-r--r--spec/frontend/admin/users/constants.js16
-rw-r--r--spec/frontend/admin/users/index_spec.js36
-rw-r--r--spec/frontend/admin/users/mock_data.js4
-rw-r--r--spec/frontend/analytics/devops_report/components/service_ping_disabled_spec.js61
-rw-r--r--spec/frontend/analytics/shared/components/daterange_spec.js120
-rw-r--r--spec/frontend/analytics/shared/components/metric_card_spec.js129
-rw-r--r--spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js264
-rw-r--r--spec/frontend/analytics/shared/utils_spec.js24
-rw-r--r--spec/frontend/analytics/usage_trends/components/usage_counts_spec.js (renamed from spec/frontend/analytics/usage_trends/components/instance_counts_spec.js)22
-rw-r--r--spec/frontend/api_spec.js8
-rw-r--r--spec/frontend/batch_comments/components/draft_note_spec.js12
-rw-r--r--spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap2
-rw-r--r--spec/frontend/blob/components/blob_edit_content_spec.js8
-rw-r--r--spec/frontend/blob/csv/csv_viewer_spec.js75
-rw-r--r--spec/frontend/blob/utils_spec.js12
-rw-r--r--spec/frontend/blob/viewer/index_spec.js5
-rw-r--r--spec/frontend/blob_edit/blob_bundle_spec.js10
-rw-r--r--spec/frontend/blob_edit/edit_blob_spec.js14
-rw-r--r--spec/frontend/boards/board_card_inner_spec.js126
-rw-r--r--spec/frontend/boards/board_list_helper.js95
-rw-r--r--spec/frontend/boards/board_list_spec.js96
-rw-r--r--spec/frontend/boards/boards_util_spec.js33
-rw-r--r--spec/frontend/boards/components/board_column_spec.js37
-rw-r--r--spec/frontend/boards/components/board_content_sidebar_spec.js20
-rw-r--r--spec/frontend/boards/components/board_content_spec.js7
-rw-r--r--spec/frontend/boards/components/board_form_spec.js2
-rw-r--r--spec/frontend/boards/components/board_settings_sidebar_spec.js11
-rw-r--r--spec/frontend/boards/components/issue_board_filtered_search_spec.js44
-rw-r--r--spec/frontend/boards/mock_data.js47
-rw-r--r--spec/frontend/boards/stores/actions_spec.js88
-rw-r--r--spec/frontend/boards/stores/getters_spec.js2
-rw-r--r--spec/frontend/boards/stores/mutations_spec.js3
-rw-r--r--spec/frontend/branches/components/delete_branch_button_spec.js6
-rw-r--r--spec/frontend/captcha/captcha_modal_axios_interceptor_spec.js55
-rw-r--r--spec/frontend/ci_lint/components/ci_lint_spec.js4
-rw-r--r--spec/frontend/clusters/clusters_bundle_spec.js166
-rw-r--r--spec/frontend/clusters/components/__snapshots__/applications_spec.js.snap105
-rw-r--r--spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap1
-rw-r--r--spec/frontend/clusters/components/application_row_spec.js505
-rw-r--r--spec/frontend/clusters/components/applications_spec.js510
-rw-r--r--spec/frontend/clusters/components/knative_domain_editor_spec.js179
-rw-r--r--spec/frontend/clusters/components/uninstall_application_button_spec.js39
-rw-r--r--spec/frontend/clusters/components/uninstall_application_confirmation_modal_spec.js57
-rw-r--r--spec/frontend/clusters/components/update_application_confirmation_modal_spec.js52
-rw-r--r--spec/frontend/clusters/services/application_state_machine_spec.js206
-rw-r--r--spec/frontend/clusters/services/crossplane_provider_stack_spec.js85
-rw-r--r--spec/frontend/clusters/services/mock_data.js155
-rw-r--r--spec/frontend/clusters/stores/clusters_store_spec.js192
-rw-r--r--spec/frontend/clusters_list/store/actions_spec.js6
-rw-r--r--spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap2
-rw-r--r--spec/frontend/code_quality_walkthrough/components/step_spec.js6
-rw-r--r--spec/frontend/collapsed_sidebar_todo_spec.js171
-rw-r--r--spec/frontend/commit/commit_pipeline_status_component_spec.js4
-rw-r--r--spec/frontend/commit/pipelines/pipelines_table_spec.js4
-rw-r--r--spec/frontend/content_editor/components/content_editor_spec.js37
-rw-r--r--spec/frontend/content_editor/components/toolbar_button_spec.js34
-rw-r--r--spec/frontend/content_editor/components/toolbar_image_button_spec.js78
-rw-r--r--spec/frontend/content_editor/components/toolbar_link_button_spec.js47
-rw-r--r--spec/frontend/content_editor/components/toolbar_table_button_spec.js109
-rw-r--r--spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js16
-rw-r--r--spec/frontend/content_editor/components/top_toolbar_spec.js24
-rw-r--r--spec/frontend/content_editor/components/wrappers/image_spec.js66
-rw-r--r--spec/frontend/content_editor/extensions/hard_break_spec.js46
-rw-r--r--spec/frontend/content_editor/extensions/horizontal_rule_spec.js20
-rw-r--r--spec/frontend/content_editor/extensions/image_spec.js193
-rw-r--r--spec/frontend/content_editor/markdown_processing_examples.js3
-rw-r--r--spec/frontend/content_editor/markdown_processing_spec.js16
-rw-r--r--spec/frontend/content_editor/services/create_content_editor_spec.js12
-rw-r--r--spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js17
-rw-r--r--spec/frontend/content_editor/services/upload_file_spec.js46
-rw-r--r--spec/frontend/content_editor/test_utils.js12
-rw-r--r--spec/frontend/contributors/store/actions_spec.js6
-rw-r--r--spec/frontend/cycle_analytics/filter_bar_spec.js224
-rw-r--r--spec/frontend/cycle_analytics/formatted_stage_count_spec.js34
-rw-r--r--spec/frontend/cycle_analytics/mock_data.js41
-rw-r--r--spec/frontend/cycle_analytics/store/actions_spec.js88
-rw-r--r--spec/frontend/cycle_analytics/store/mutations_spec.js69
-rw-r--r--spec/frontend/cycle_analytics/utils_spec.js10
-rw-r--r--spec/frontend/cycle_analytics/value_stream_filters_spec.js91
-rw-r--r--spec/frontend/design_management/components/design_notes/__snapshots__/design_note_spec.js.snap7
-rw-r--r--spec/frontend/design_management/components/design_notes/design_note_spec.js3
-rw-r--r--spec/frontend/design_management/components/design_todo_button_spec.js2
-rw-r--r--spec/frontend/design_management/pages/design/index_spec.js8
-rw-r--r--spec/frontend/diffs/components/app_spec.js46
-rw-r--r--spec/frontend/diffs/components/collapsed_files_warning_spec.js93
-rw-r--r--spec/frontend/diffs/components/diff_content_spec.js17
-rw-r--r--spec/frontend/diffs/components/diff_file_spec.js5
-rw-r--r--spec/frontend/diffs/components/diff_row_spec.js127
-rw-r--r--spec/frontend/diffs/components/diff_row_utils_spec.js43
-rw-r--r--spec/frontend/diffs/components/diff_view_spec.js18
-rw-r--r--spec/frontend/diffs/components/inline_diff_table_row_spec.js325
-rw-r--r--spec/frontend/diffs/components/inline_diff_view_spec.js57
-rw-r--r--spec/frontend/diffs/components/parallel_diff_table_row_spec.js445
-rw-r--r--spec/frontend/diffs/components/parallel_diff_view_spec.js37
-rw-r--r--spec/frontend/diffs/store/actions_spec.js31
-rw-r--r--spec/frontend/diffs/store/getters_versions_dropdowns_spec.js2
-rw-r--r--spec/frontend/editor/source_editor_ci_schema_ext_spec.js (renamed from spec/frontend/editor/editor_ci_schema_ext_spec.js)6
-rw-r--r--spec/frontend/editor/source_editor_extension_base_spec.js (renamed from spec/frontend/editor/editor_lite_extension_base_spec.js)62
-rw-r--r--spec/frontend/editor/source_editor_markdown_ext_spec.js (renamed from spec/frontend/editor/editor_markdown_ext_spec.js)8
-rw-r--r--spec/frontend/editor/source_editor_spec.js (renamed from spec/frontend/editor/editor_lite_spec.js)36
-rw-r--r--spec/frontend/emoji/awards_app/store/actions_spec.js1
-rw-r--r--spec/frontend/environment.js21
-rw-r--r--spec/frontend/environments/environment_item_spec.js11
-rw-r--r--spec/frontend/environments/environments_app_spec.js16
-rw-r--r--spec/frontend/feature_flags/components/edit_feature_flag_spec.js73
-rw-r--r--spec/frontend/feature_flags/components/feature_flags_table_spec.js184
-rw-r--r--spec/frontend/feature_flags/components/form_spec.js338
-rw-r--r--spec/frontend/feature_flags/components/new_feature_flag_spec.js15
-rw-r--r--spec/frontend/feature_flags/mock_data.js88
-rw-r--r--spec/frontend/feature_flags/store/edit/actions_spec.js45
-rw-r--r--spec/frontend/feature_flags/store/helpers_spec.js360
-rw-r--r--spec/frontend/feature_flags/store/index/actions_spec.js5
-rw-r--r--spec/frontend/feature_flags/store/index/mutations_spec.js17
-rw-r--r--spec/frontend/feature_flags/store/new/actions_spec.js81
-rw-r--r--spec/frontend/feature_highlight/feature_highlight_helper_spec.js9
-rw-r--r--spec/frontend/filtered_search/filtered_search_manager_spec.js12
-rw-r--r--spec/frontend/filtered_search/visual_token_value_spec.js8
-rw-r--r--spec/frontend/fixtures/api_markdown.rb34
-rw-r--r--spec/frontend/fixtures/api_markdown.yml33
-rw-r--r--spec/frontend/fixtures/application_settings.rb8
-rw-r--r--spec/frontend/fixtures/pipelines.rb1
-rw-r--r--spec/frontend/fixtures/projects.rb3
-rw-r--r--spec/frontend/fixtures/prometheus_service.rb4
-rw-r--r--spec/frontend/fixtures/releases.rb8
-rw-r--r--spec/frontend/fixtures/runner.rb9
-rw-r--r--spec/frontend/flash_spec.js115
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_list_item_spec.js35
-rw-r--r--spec/frontend/gpg_badges_spec.js50
-rw-r--r--spec/frontend/grafana_integration/components/grafana_integration_spec.js1
-rw-r--r--spec/frontend/groups/components/app_spec.js17
-rw-r--r--spec/frontend/groups/components/group_item_spec.js18
-rw-r--r--spec/frontend/ide/components/ide_project_header_spec.js44
-rw-r--r--spec/frontend/ide/components/new_dropdown/modal_spec.js3
-rw-r--r--spec/frontend/ide/components/repo_editor_spec.js15
-rw-r--r--spec/frontend/ide/services/index_spec.js2
-rw-r--r--spec/frontend/ide/stores/modules/clientside/actions_spec.js2
-rw-r--r--spec/frontend/ide/stores/utils_spec.js6
-rw-r--r--spec/frontend/import_entities/components/group_dropdown_spec.js44
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_table_row_spec.js6
-rw-r--r--spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js4
-rw-r--r--spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js19
-rw-r--r--spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap2
-rw-r--r--spec/frontend/incidents_settings/components/incidents_settings_service_spec.js1
-rw-r--r--spec/frontend/integrations/edit/components/jira_issues_fields_spec.js109
-rw-r--r--spec/frontend/invite_members/components/invite_members_modal_spec.js295
-rw-r--r--spec/frontend/invite_members/components/members_token_select_spec.js15
-rw-r--r--spec/frontend/invite_members/mock_data/api_responses.js74
-rw-r--r--spec/frontend/invite_members/utils/response_message_parser_spec.js36
-rw-r--r--spec/frontend/issuable/components/issuable_by_email_spec.js5
-rw-r--r--spec/frontend/issuable_bulk_update_sidebar/components/status_select_spec.js77
-rw-r--r--spec/frontend/issuable_create/components/issuable_form_spec.js3
-rw-r--r--spec/frontend/issuable_show/components/issuable_show_root_spec.js8
-rw-r--r--spec/frontend/issuable_sidebar/components/issuable_sidebar_root_spec.js214
-rw-r--r--spec/frontend/issuable_spec.js2
-rw-r--r--spec/frontend/issues_list/components/issuables_list_app_spec.js4
-rw-r--r--spec/frontend/issues_list/components/issues_list_app_spec.js74
-rw-r--r--spec/frontend/issues_list/mock_data.js77
-rw-r--r--spec/frontend/issues_list/utils_spec.js29
-rw-r--r--spec/frontend/jira_connect/branches/components/project_dropdown_spec.js180
-rw-r--r--spec/frontend/jira_connect/branches/components/source_branch_dropdown_spec.js192
-rw-r--r--spec/frontend/jira_connect/components/groups_list_spec.js6
-rw-r--r--spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap2
-rw-r--r--spec/frontend/jobs/components/empty_state_spec.js1
-rw-r--r--spec/frontend/jobs/components/job_app_spec.js8
-rw-r--r--spec/frontend/jobs/components/log/collapsible_section_spec.js9
-rw-r--r--spec/frontend/jobs/components/log/line_spec.js30
-rw-r--r--spec/frontend/jobs/components/log/log_spec.js91
-rw-r--r--spec/frontend/jobs/components/log/mock_data.js65
-rw-r--r--spec/frontend/jobs/components/manual_variables_form_spec.js15
-rw-r--r--spec/frontend/jobs/components/sidebar_detail_row_spec.js2
-rw-r--r--spec/frontend/jobs/store/mutations_spec.js94
-rw-r--r--spec/frontend/jobs/store/utils_spec.js95
-rw-r--r--spec/frontend/lib/dompurify_spec.js16
-rw-r--r--spec/frontend/lib/graphql_spec.js54
-rw-r--r--spec/frontend/lib/utils/common_utils_spec.js146
-rw-r--r--spec/frontend/lib/utils/datetime/timeago_utility_spec.js103
-rw-r--r--spec/frontend/lib/utils/datetime_utility_spec.js40
-rw-r--r--spec/frontend/lib/utils/finite_state_machine_spec.js293
-rw-r--r--spec/frontend/lib/utils/text_markdown_spec.js2
-rw-r--r--spec/frontend/lib/utils/url_utility_spec.js115
-rw-r--r--spec/frontend/line_highlighter_spec.js9
-rw-r--r--spec/frontend/locale/index_spec.js86
-rw-r--r--spec/frontend/logs/stores/actions_spec.js6
-rw-r--r--spec/frontend/members/components/app_spec.js21
-rw-r--r--spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js12
-rw-r--r--spec/frontend/members/components/members_tabs_spec.js75
-rw-r--r--spec/frontend/members/components/table/members_table_spec.js19
-rw-r--r--spec/frontend/milestones/milestone_utils_spec.js47
-rw-r--r--spec/frontend/monitoring/components/charts/time_series_spec.js4
-rw-r--r--spec/frontend/monitoring/components/dashboard_actions_menu_spec.js7
-rw-r--r--spec/frontend/nav/components/top_nav_menu_item_spec.js2
-rw-r--r--spec/frontend/notes/components/comment_form_spec.js14
-rw-r--r--spec/frontend/notes/components/discussion_notes_spec.js14
-rw-r--r--spec/frontend/notes/components/noteable_note_spec.js7
-rw-r--r--spec/frontend/notes/stores/actions_spec.js75
-rw-r--r--spec/frontend/notifications/components/custom_notifications_modal_spec.js10
-rw-r--r--spec/frontend/notifications/components/notifications_dropdown_spec.js5
-rw-r--r--spec/frontend/operation_settings/components/metrics_settings_spec.js1
-rw-r--r--spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap1
-rw-r--r--spec/frontend/packages/shared/utils_spec.js2
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/app_spec.js35
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js6
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/__snapshots__/utils_spec.js.snap8
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap4
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/settings_form_spec.js20
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/utils_spec.js1
-rw-r--r--spec/frontend/pager_spec.js1
-rw-r--r--spec/frontend/pages/admin/application_settings/metrics_and_profiling/usage_statistics_spec.js57
-rw-r--r--spec/frontend/pages/projects/forks/new/components/fork_form_spec.js23
-rw-r--r--spec/frontend/pages/projects/forks/new/components/fork_groups_list_item_spec.js4
-rw-r--r--spec/frontend/pages/projects/new/components/app_spec.js33
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js25
-rw-r--r--spec/frontend/persistent_user_callout_spec.js15
-rw-r--r--spec/frontend/pipeline_editor/components/editor/ci_config_merged_preview_spec.js6
-rw-r--r--spec/frontend/pipeline_editor/components/editor/ci_editor_header_spec.js53
-rw-r--r--spec/frontend/pipeline_editor/components/editor/text_editor_spec.js10
-rw-r--r--spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js21
-rw-r--r--spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js6
-rw-r--r--spec/frontend/pipeline_editor/graphql/resolvers_spec.js39
-rw-r--r--spec/frontend/pipeline_editor/mock_data.js46
-rw-r--r--spec/frontend/pipeline_editor/pipeline_editor_app_spec.js176
-rw-r--r--spec/frontend/pipelines/empty_state_spec.js30
-rw-r--r--spec/frontend/pipelines/graph/mock_data.js8
-rw-r--r--spec/frontend/pipelines/graph/stage_column_component_spec.js64
-rw-r--r--spec/frontend/pipelines/graph_shared/__snapshots__/links_inner_spec.js.snap18
-rw-r--r--spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js57
-rw-r--r--spec/frontend/pipelines/pipelines_ci_templates_spec.js34
-rw-r--r--spec/frontend/pipelines/pipelines_spec.js89
-rw-r--r--spec/frontend/profile/preferences/components/profile_preferences_spec.js18
-rw-r--r--spec/frontend/projects/commit/components/branches_dropdown_spec.js3
-rw-r--r--spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap4
-rw-r--r--spec/frontend/projects/pipelines/charts/components/__snapshots__/ci_cd_analytics_area_chart_spec.js.snap6
-rw-r--r--spec/frontend/projects/settings/components/shared_runners_toggle_spec.js1
-rw-r--r--spec/frontend/projects/terraform_notification/terraform_notification_spec.js62
-rw-r--r--spec/frontend/registry/explorer/components/list_page/cleanup_status_spec.js87
-rw-r--r--spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js30
-rw-r--r--spec/frontend/registry/explorer/mock_data.js2
-rw-r--r--spec/frontend/releases/__snapshots__/util_spec.js.snap37
-rw-r--r--spec/frontend/releases/components/app_index_apollo_client_spec.js8
-rw-r--r--spec/frontend/releases/components/app_index_spec.js6
-rw-r--r--spec/frontend/reports/components/__snapshots__/grouped_issues_list_spec.js.snap1
-rw-r--r--spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js4
-rw-r--r--spec/frontend/repository/components/blob_button_group_spec.js117
-rw-r--r--spec/frontend/repository/components/blob_content_viewer_spec.js171
-rw-r--r--spec/frontend/repository/components/blob_edit_spec.js (renamed from spec/frontend/repository/components/blob_header_edit_spec.js)6
-rw-r--r--spec/frontend/repository/components/blob_replace_spec.js67
-rw-r--r--spec/frontend/repository/components/blob_viewers/__snapshots__/empty_viewer_spec.js.snap9
-rw-r--r--spec/frontend/repository/components/blob_viewers/download_viewer_spec.js70
-rw-r--r--spec/frontend/repository/components/blob_viewers/empty_viewer_spec.js14
-rw-r--r--spec/frontend/repository/components/blob_viewers/text_viewer_spec.js30
-rw-r--r--spec/frontend/repository/components/delete_blob_modal_spec.js130
-rw-r--r--spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap3
-rw-r--r--spec/frontend/repository/components/tree_content_spec.js63
-rw-r--r--spec/frontend/repository/components/upload_blob_modal_spec.js4
-rw-r--r--spec/frontend/repository/log_tree_spec.js51
-rw-r--r--spec/frontend/right_sidebar_spec.js16
-rw-r--r--spec/frontend/runner/components/cells/runner_actions_cell_spec.js234
-rw-r--r--spec/frontend/runner/components/helpers/masked_value_spec.js51
-rw-r--r--spec/frontend/runner/components/runner_filtered_search_bar_spec.js27
-rw-r--r--spec/frontend/runner/components/runner_list_spec.js69
-rw-r--r--spec/frontend/runner/components/runner_manual_setup_help_spec.js9
-rw-r--r--spec/frontend/runner/components/runner_registration_token_reset_spec.js25
-rw-r--r--spec/frontend/runner/components/runner_tag_spec.js45
-rw-r--r--spec/frontend/runner/components/runner_tags_spec.js12
-rw-r--r--spec/frontend/runner/components/runner_update_form_spec.js33
-rw-r--r--spec/frontend/runner/components/search_tokens/tag_token_spec.js188
-rw-r--r--spec/frontend/runner/runner_detail/runner_details_app_spec.js27
-rw-r--r--spec/frontend/runner/runner_detail/runner_update_form_utils_spec.js96
-rw-r--r--spec/frontend/runner/runner_list/runner_list_app_spec.js31
-rw-r--r--spec/frontend/runner/runner_list/runner_search_utils_spec.js39
-rw-r--r--spec/frontend/runner/sentry_utils_spec.js39
-rw-r--r--spec/frontend/search/mock_data.js53
-rw-r--r--spec/frontend/search/store/actions_spec.js106
-rw-r--r--spec/frontend/search/store/getters_spec.js32
-rw-r--r--spec/frontend/search/store/mutations_spec.js9
-rw-r--r--spec/frontend/search/store/utils_spec.js197
-rw-r--r--spec/frontend/search/topbar/components/group_filter_spec.js56
-rw-r--r--spec/frontend/search/topbar/components/project_filter_spec.js38
-rw-r--r--spec/frontend/search/topbar/components/searchable_dropdown_spec.js87
-rw-r--r--spec/frontend/search_autocomplete_spec.js50
-rw-r--r--spec/frontend/search_autocomplete_utils_spec.js114
-rw-r--r--spec/frontend/security_configuration/components/auto_dev_ops_alert_spec.js55
-rw-r--r--spec/frontend/security_configuration/components/feature_card_spec.js25
-rw-r--r--spec/frontend/security_configuration/components/redesigned_app_spec.js82
-rw-r--r--spec/frontend/security_configuration/utils_spec.js22
-rw-r--r--spec/frontend/sentry/index_spec.js4
-rw-r--r--spec/frontend/sentry/sentry_config_spec.js8
-rw-r--r--spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap2
-rw-r--r--spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js8
-rw-r--r--spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js44
-rw-r--r--spec/frontend/sidebar/components/assignees/user_name_with_status_spec.js44
-rw-r--r--spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js6
-rw-r--r--spec/frontend/sidebar/components/todo_toggle/sidebar_todo_widget_spec.js126
-rw-r--r--spec/frontend/sidebar/lock/edit_form_buttons_spec.js10
-rw-r--r--spec/frontend/sidebar/mock_data.js37
-rw-r--r--spec/frontend/sidebar/sidebar_move_issue_spec.js6
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap2
-rw-r--r--spec/frontend/snippets/components/edit_spec.js20
-rw-r--r--spec/frontend/snippets/components/snippet_blob_edit_spec.js4
-rw-r--r--spec/frontend/static_site_editor/services/submit_content_changes_spec.js10
-rw-r--r--spec/frontend/terraform/components/terraform_list_spec.js3
-rw-r--r--spec/frontend/token_access/mock_data.js84
-rw-r--r--spec/frontend/token_access/token_access_spec.js218
-rw-r--r--spec/frontend/token_access/token_projects_table_spec.js51
-rw-r--r--spec/frontend/tracking_spec.js46
-rw-r--r--spec/frontend/vue_alerts_spec.js4
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js70
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js14
-rw-r--r--spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js3
-rw-r--r--spec/frontend/vue_mr_widget/mock_data.js4
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/awards_list_spec.js.snap14
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/source_editor_spec.js.snap (renamed from spec/frontend/vue_shared/components/__snapshots__/editor_lite_spec.js.snap)4
-rw-r--r--spec/frontend/vue_shared/components/awards_list_spec.js18
-rw-r--r--spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js9
-rw-r--r--spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js16
-rw-r--r--spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js12
-rw-r--r--spec/frontend/vue_shared/components/dismissible_alert_spec.js41
-rw-r--r--spec/frontend/vue_shared/components/file_finder/index_spec.js12
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js16
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js20
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js34
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js10
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap2
-rw-r--r--spec/frontend/vue_shared/components/paginated_list_spec.js8
-rw-r--r--spec/frontend/vue_shared/components/project_avatar/default_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/project_avatar_spec.js67
-rw-r--r--spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/resizable_chart/__snapshots__/resizable_chart_container_spec.js.snap30
-rw-r--r--spec/frontend/vue_shared/components/resizable_chart/resizable_chart_container_spec.js10
-rw-r--r--spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/sidebar/copyable_field_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js10
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js19
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js29
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_value_spec.js113
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/actions_spec.js11
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/mutations_spec.js29
-rw-r--r--spec/frontend/vue_shared/components/sidebar/todo_button_spec.js (renamed from spec/frontend/vue_shared/components/todo_button_spec.js)22
-rw-r--r--spec/frontend/vue_shared/components/source_editor_spec.js (renamed from spec/frontend/vue_shared/components/editor_lite_spec.js)16
-rw-r--r--spec/frontend/vue_shared/components/user_popover/user_popover_spec.js20
-rw-r--r--spec/frontend/vue_shared/components/user_select_spec.js44
-rw-r--r--spec/frontend/vue_shared/components/web_ide_link_spec.js8
-rw-r--r--spec/frontend/vue_shared/new_namespace/components/welcome_spec.js26
-rw-r--r--spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js7
-rw-r--r--spec/frontend/vue_shared/oncall_schedules_list_spec.js2
-rw-r--r--spec/frontend/vue_shared/plugins/global_toast_spec.js9
-rw-r--r--spec/frontend/vue_shared/security_reports/components/manage_via_mr_spec.js24
-rw-r--r--spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js37
-rw-r--r--spec/frontend/vue_shared/security_reports/mock_data.js11
-rw-r--r--spec/frontend/vue_shared/security_reports/security_reports_app_spec.js10
-rw-r--r--spec/frontend/vuex_shared/bindings_spec.js10
-rw-r--r--spec/frontend_integration/diffs/diffs_interopability_spec.js82
-rw-r--r--spec/frontend_integration/ide/helpers/ide_helper.js10
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/routes/diffs.js3
-rw-r--r--spec/frontend_integration/test_helpers/setup/setup_mock_server.js10
-rw-r--r--spec/frontend_integration/test_helpers/setup/setup_testing_library.js14
-rw-r--r--spec/graphql/features/authorization_spec.rb1
-rw-r--r--spec/graphql/features/feature_flag_spec.rb21
-rw-r--r--spec/graphql/gitlab_schema_spec.rb1
-rw-r--r--spec/graphql/mutations/alert_management/prometheus_integration/create_spec.rb4
-rw-r--r--spec/graphql/mutations/alert_management/prometheus_integration/reset_token_spec.rb2
-rw-r--r--spec/graphql/mutations/alert_management/prometheus_integration/update_spec.rb2
-rw-r--r--spec/graphql/mutations/ci/job_token_scope/add_project_spec.rb65
-rw-r--r--spec/graphql/mutations/ci/job_token_scope/remove_project_spec.rb68
-rw-r--r--spec/graphql/mutations/custom_emoji/create_spec.rb1
-rw-r--r--spec/graphql/mutations/discussions/toggle_resolve_spec.rb2
-rw-r--r--spec/graphql/mutations/environments/canary_ingress/update_spec.rb1
-rw-r--r--spec/graphql/mutations/issues/create_spec.rb1
-rw-r--r--spec/graphql/mutations/issues/set_confidential_spec.rb4
-rw-r--r--spec/graphql/mutations/issues/set_severity_spec.rb3
-rw-r--r--spec/graphql/mutations/issues/update_spec.rb4
-rw-r--r--spec/graphql/mutations/labels/create_spec.rb2
-rw-r--r--spec/graphql/mutations/notes/reposition_image_diff_note_spec.rb1
-rw-r--r--spec/graphql/mutations/release_asset_links/create_spec.rb18
-rw-r--r--spec/graphql/mutations/release_asset_links/delete_spec.rb25
-rw-r--r--spec/graphql/mutations/release_asset_links/update_spec.rb20
-rw-r--r--spec/graphql/mutations/releases/create_spec.rb22
-rw-r--r--spec/graphql/mutations/releases/delete_spec.rb30
-rw-r--r--spec/graphql/mutations/releases/update_spec.rb22
-rw-r--r--spec/graphql/mutations/security/ci_configuration/base_security_analyzer_spec.rb14
-rw-r--r--spec/graphql/resolvers/alert_management/http_integrations_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/alert_management/integrations_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/ci/config_resolver_spec.rb20
-rw-r--r--spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb64
-rw-r--r--spec/graphql/resolvers/group_milestones_resolver_spec.rb19
-rw-r--r--spec/graphql/resolvers/issues_resolver_spec.rb36
-rw-r--r--spec/graphql/resolvers/project_milestones_resolver_spec.rb18
-rw-r--r--spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/projects/services_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/projects_resolver_spec.rb24
-rw-r--r--spec/graphql/types/alert_management/prometheus_integration_type_spec.rb4
-rw-r--r--spec/graphql/types/base_field_spec.rb67
-rw-r--r--spec/graphql/types/ci/detailed_status_type_spec.rb16
-rw-r--r--spec/graphql/types/ci/group_type_spec.rb1
-rw-r--r--spec/graphql/types/ci/job_token_scope_type_spec.rb75
-rw-r--r--spec/graphql/types/ci/pipeline_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/runner_type_spec.rb1
-rw-r--r--spec/graphql/types/ci/stage_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/status_action_type_spec.rb20
-rw-r--r--spec/graphql/types/deployment_tier_enum_spec.rb15
-rw-r--r--spec/graphql/types/global_id_type_spec.rb3
-rw-r--r--spec/graphql/types/issuable_searchable_field_enum_spec.rb13
-rw-r--r--spec/graphql/types/issue_type_spec.rb4
-rw-r--r--spec/graphql/types/merge_request_type_spec.rb2
-rw-r--r--spec/graphql/types/milestone_type_spec.rb2
-rw-r--r--spec/graphql/types/notes/discussion_type_spec.rb1
-rw-r--r--spec/graphql/types/notes/noteable_interface_spec.rb (renamed from spec/graphql/types/notes/noteable_type_spec.rb)2
-rw-r--r--spec/graphql/types/project_type_spec.rb80
-rw-r--r--spec/graphql/types/projects/service_type_spec.rb2
-rw-r--r--spec/graphql/types/projects/services_enum_spec.rb2
-rw-r--r--spec/graphql/types/query_complexity_type_spec.rb35
-rw-r--r--spec/graphql/types/release_asset_link_type_spec.rb2
-rw-r--r--spec/graphql/types/snippets/blob_type_spec.rb3
-rw-r--r--spec/haml_lint/linter/documentation_links_spec.rb14
-rw-r--r--spec/helpers/admin/user_actions_helper_spec.rb41
-rw-r--r--spec/helpers/analytics/unique_visits_helper_spec.rb34
-rw-r--r--spec/helpers/application_settings_helper_spec.rb20
-rw-r--r--spec/helpers/blob_helper_spec.rb16
-rw-r--r--spec/helpers/ci/pipeline_editor_helper_spec.rb36
-rw-r--r--spec/helpers/ci/runners_helper_spec.rb16
-rw-r--r--spec/helpers/clusters_helper_spec.rb7
-rw-r--r--spec/helpers/commits_helper_spec.rb9
-rw-r--r--spec/helpers/diff_helper_spec.rb25
-rw-r--r--spec/helpers/emails_helper_spec.rb8
-rw-r--r--spec/helpers/environments_helper_spec.rb14
-rw-r--r--spec/helpers/gitlab_routing_helper_spec.rb17
-rw-r--r--spec/helpers/integrations_helper_spec.rb (renamed from spec/helpers/services_helper_spec.rb)28
-rw-r--r--spec/helpers/issues_helper_spec.rb3
-rw-r--r--spec/helpers/namespaces_helper_spec.rb20
-rw-r--r--spec/helpers/nav/new_dropdown_helper_spec.rb27
-rw-r--r--spec/helpers/nav/top_nav_helper_spec.rb13
-rw-r--r--spec/helpers/operations_helper_spec.rb24
-rw-r--r--spec/helpers/packages_helper_spec.rb4
-rw-r--r--spec/helpers/projects/alert_management_helper_spec.rb14
-rw-r--r--spec/helpers/projects_helper_spec.rb33
-rw-r--r--spec/helpers/registrations_helper_spec.rb24
-rw-r--r--spec/helpers/releases_helper_spec.rb50
-rw-r--r--spec/helpers/sessions_helper_spec.rb36
-rw-r--r--spec/helpers/user_callouts_helper_spec.rb12
-rw-r--r--spec/helpers/users_helper_spec.rb50
-rw-r--r--spec/initializers/100_patch_omniauth_saml_spec.rb5
-rw-r--r--spec/initializers/attr_encrypted_no_db_connection_spec.rb20
-rw-r--r--spec/initializers/global_id_spec.rb4
-rw-r--r--spec/initializers/lograge_spec.rb2
-rw-r--r--spec/initializers/mailer_retries_spec.rb18
-rw-r--r--spec/javascripts/lib/utils/mock_data.js2
-rw-r--r--spec/lib/api/entities/basic_project_details_spec.rb27
-rw-r--r--spec/lib/api/entities/bulk_import_spec.rb19
-rw-r--r--spec/lib/api/entities/bulk_imports/entity_failure_spec.rb19
-rw-r--r--spec/lib/api/entities/bulk_imports/entity_spec.rb26
-rw-r--r--spec/lib/api/entities/ci/job_request/image_spec.rb (renamed from spec/lib/api/entities/job_request/image_spec.rb)2
-rw-r--r--spec/lib/api/entities/ci/job_request/port_spec.rb (renamed from spec/lib/api/entities/job_request/port_spec.rb)2
-rw-r--r--spec/lib/api/entities/group_detail_spec.rb19
-rw-r--r--spec/lib/api/entities/plan_limit_spec.rb3
-rw-r--r--spec/lib/api/entities/user_spec.rb2
-rw-r--r--spec/lib/api/helpers/caching_spec.rb138
-rw-r--r--spec/lib/backup/database_spec.rb2
-rw-r--r--spec/lib/backup/gitaly_backup_spec.rb41
-rw-r--r--spec/lib/backup/repositories_spec.rb19
-rw-r--r--spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb73
-rw-r--r--spec/lib/banzai/filter/upload_link_filter_spec.rb6
-rw-r--r--spec/lib/banzai/filter/wiki_link_filter_spec.rb18
-rw-r--r--spec/lib/banzai/reference_parser/base_parser_spec.rb33
-rw-r--r--spec/lib/bulk_imports/clients/graphql_spec.rb41
-rw-r--r--spec/lib/bulk_imports/clients/http_spec.rb54
-rw-r--r--spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb1
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb77
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb8
-rw-r--r--spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb28
-rw-r--r--spec/lib/bulk_imports/ndjson_pipeline_spec.rb7
-rw-r--r--spec/lib/bulk_imports/stage_spec.rb1
-rw-r--r--spec/lib/bulk_imports/users_mapper_spec.rb68
-rw-r--r--spec/lib/error_tracking/collector/sentry_request_parser_spec.rb44
-rw-r--r--spec/lib/extracts_path_spec.rb73
-rw-r--r--spec/lib/extracts_ref_spec.rb1
-rw-r--r--spec/lib/gitlab/analytics/unique_visits_spec.rb81
-rw-r--r--spec/lib/gitlab/auth/auth_finders_spec.rb24
-rw-r--r--spec/lib/gitlab/auth/ldap/adapter_spec.rb75
-rw-r--r--spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb47
-rw-r--r--spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb6
-rw-r--r--spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb46
-rw-r--r--spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb63
-rw-r--r--spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb22
-rw-r--r--spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb400
-rw-r--r--spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb2
-rw-r--r--spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb41
-rw-r--r--spec/lib/gitlab/cache/helpers_spec.rb49
-rw-r--r--spec/lib/gitlab/cache/import/caching_spec.rb24
-rw-r--r--spec/lib/gitlab/changelog/config_spec.rb12
-rw-r--r--spec/lib/gitlab/checks/container_moved_spec.rb (renamed from spec/lib/gitlab/checks/project_moved_spec.rb)44
-rw-r--r--spec/lib/gitlab/checks/project_created_spec.rb31
-rw-r--r--spec/lib/gitlab/ci/ansi2json/line_spec.rb33
-rw-r--r--spec/lib/gitlab/ci/config/entry/artifacts_spec.rb51
-rw-r--r--spec/lib/gitlab/ci/config/entry/reports_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/lint_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/matching/runner_matcher_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb54
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/pipeline_object_hierarchy_spec.rb81
-rw-r--r--spec/lib/gitlab/ci/reports/security/identifier_spec.rb125
-rw-r--r--spec/lib/gitlab/ci/reports/security/link_spec.rb31
-rw-r--r--spec/lib/gitlab/ci/reports/security/scan_spec.rb46
-rw-r--r--spec/lib/gitlab/ci/reports/security/scanned_resource_spec.rb30
-rw-r--r--spec/lib/gitlab/ci/reports/security/scanner_spec.rb146
-rw-r--r--spec/lib/gitlab/ci/reports/test_case_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/composite_spec.rb19
-rw-r--r--spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/templates/managed_cluster_applications_gitlab_ci_yaml_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/yaml_processor/dag_spec.rb41
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb100
-rw-r--r--spec/lib/gitlab/closing_issue_extractor_spec.rb2
-rw-r--r--spec/lib/gitlab/composer/cache_spec.rb1
-rw-r--r--spec/lib/gitlab/consul/internal_spec.rb2
-rw-r--r--spec/lib/gitlab/content_security_policy/config_loader_spec.rb30
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_job_spec.rb80
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb148
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb26
-rw-r--r--spec/lib/gitlab/database/custom_structure_spec.rb65
-rw-r--r--spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb22
-rw-r--r--spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb5
-rw-r--r--spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb61
-rw-r--r--spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb113
-rw-r--r--spec/lib/gitlab/database/load_balancing/sticking_spec.rb83
-rw-r--r--spec/lib/gitlab/database/load_balancing_spec.rb16
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb111
-rw-r--r--spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb121
-rw-r--r--spec/lib/gitlab/database/partitioning/partition_creator_spec.rb96
-rw-r--r--spec/lib/gitlab/database/partitioning/partition_manager_spec.rb161
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb250
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/partitioned_foreign_key_spec.rb48
-rw-r--r--spec/lib/gitlab/database/postgres_index_spec.rb48
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb38
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb2
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/type_map_cache_spec.rb2
-rw-r--r--spec/lib/gitlab/database/postgresql_database_tasks/load_schema_versions_mixin_spec.rb32
-rw-r--r--spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb303
-rw-r--r--spec/lib/gitlab/database/reindexing/coordinator_spec.rb18
-rw-r--r--spec/lib/gitlab/database/reindexing/index_selection_spec.rb57
-rw-r--r--spec/lib/gitlab/database/reindexing/reindex_concurrently_spec.rb134
-rw-r--r--spec/lib/gitlab/database/reindexing_spec.rb2
-rw-r--r--spec/lib/gitlab/database/schema_migrations/context_spec.rb78
-rw-r--r--spec/lib/gitlab/database/schema_migrations/migrations_spec.rb (renamed from spec/lib/gitlab/database/schema_version_files_spec.rb)41
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb4
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_spec.rb4
-rw-r--r--spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb4
-rw-r--r--spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb34
-rw-r--r--spec/lib/gitlab/database_spec.rb90
-rw-r--r--spec/lib/gitlab/deploy_key_access_spec.rb1
-rw-r--r--spec/lib/gitlab/diff/file_collection/base_spec.rb27
-rw-r--r--spec/lib/gitlab/diff/file_collection/commit_spec.rb8
-rw-r--r--spec/lib/gitlab/diff/file_collection/compare_spec.rb35
-rw-r--r--spec/lib/gitlab/diff/file_collection/merge_request_diff_base_spec.rb36
-rw-r--r--spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb34
-rw-r--r--spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb46
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb12
-rw-r--r--spec/lib/gitlab/email/receiver_spec.rb115
-rw-r--r--spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb68
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb122
-rw-r--r--spec/lib/gitlab/git/user_spec.rb43
-rw-r--r--spec/lib/gitlab/git_access_snippet_spec.rb31
-rw-r--r--spec/lib/gitlab/git_access_spec.rb6
-rw-r--r--spec/lib/gitlab/gitaly_client/blob_service_spec.rb100
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb33
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb1
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb13
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb22
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb27
-rw-r--r--spec/lib/gitlab/github_import/markdown_text_spec.rb7
-rw-r--r--spec/lib/gitlab/github_import/object_counter_spec.rb36
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb6
-rw-r--r--spec/lib/gitlab/github_import_spec.rb2
-rw-r--r--spec/lib/gitlab/gl_repository/repo_type_spec.rb1
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb420
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb4
-rw-r--r--spec/lib/gitlab/group_search_results_spec.rb1
-rw-r--r--spec/lib/gitlab/highlight_spec.rb7
-rw-r--r--spec/lib/gitlab/hook_data/issue_builder_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml47
-rw-r--r--spec/lib/gitlab/import_export/import_test_coverage_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/project/object_builder_spec.rb26
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb21
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml5
-rw-r--r--spec/lib/gitlab/import_export/shared_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/instrumentation_helper_spec.rb53
-rw-r--r--spec/lib/gitlab/integrations/sti_type_spec.rb8
-rw-r--r--spec/lib/gitlab/jira_import/base_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/jira_import/issues_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/jira_import/labels_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/jira_import_spec.rb16
-rw-r--r--spec/lib/gitlab/json_cache_spec.rb1
-rw-r--r--spec/lib/gitlab/kas/client_spec.rb19
-rw-r--r--spec/lib/gitlab/kas_spec.rb44
-rw-r--r--spec/lib/gitlab/kroki_spec.rb2
-rw-r--r--spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb8
-rw-r--r--spec/lib/gitlab/kubernetes/network_policy_spec.rb8
-rw-r--r--spec/lib/gitlab/language_detection_spec.rb1
-rw-r--r--spec/lib/gitlab/lfs_token_spec.rb2
-rw-r--r--spec/lib/gitlab/memory/instrumentation_spec.rb52
-rw-r--r--spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb13
-rw-r--r--spec/lib/gitlab/metrics/subscribers/active_record_spec.rb4
-rw-r--r--spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb115
-rw-r--r--spec/lib/gitlab/object_hierarchy_spec.rb303
-rw-r--r--spec/lib/gitlab/pagination/keyset/iterator_spec.rb141
-rw-r--r--spec/lib/gitlab/pagination/keyset/order_spec.rb6
-rw-r--r--spec/lib/gitlab/pagination/offset_pagination_spec.rb74
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb7
-rw-r--r--spec/lib/gitlab/prometheus/adapter_spec.rb20
-rw-r--r--spec/lib/gitlab/prometheus/query_variables_spec.rb1
-rw-r--r--spec/lib/gitlab/rate_limit_helpers_spec.rb2
-rw-r--r--spec/lib/gitlab/reactive_cache_set_cache_spec.rb1
-rw-r--r--spec/lib/gitlab/reference_extractor_spec.rb3
-rw-r--r--spec/lib/gitlab/repo_path_spec.rb36
-rw-r--r--spec/lib/gitlab/repository_set_cache_spec.rb10
-rw-r--r--spec/lib/gitlab/search_results_spec.rb9
-rw-r--r--spec/lib/gitlab/shell_spec.rb1
-rw-r--r--spec/lib/gitlab/sidekiq_config_spec.rb39
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb13
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb36
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb209
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb21
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb36
-rw-r--r--spec/lib/gitlab/sidekiq_queue_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_status_spec.rb16
-rw-r--r--spec/lib/gitlab/spamcheck/client_spec.rb9
-rw-r--r--spec/lib/gitlab/template_parser/ast_spec.rb (renamed from spec/lib/gitlab/changelog/ast_spec.rb)60
-rw-r--r--spec/lib/gitlab/template_parser/parser_spec.rb (renamed from spec/lib/gitlab/changelog/parser_spec.rb)6
-rw-r--r--spec/lib/gitlab/tracking/destinations/snowplow_spec.rb56
-rw-r--r--spec/lib/gitlab/usage/docs/helper_spec.rb79
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb7
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/collected_data_categories_metric_spec.rb15
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb75
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb3
-rw-r--r--spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb16
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb8
-rw-r--r--spec/lib/gitlab/utils_spec.rb16
-rw-r--r--spec/lib/gitlab/wiki_file_finder_spec.rb11
-rw-r--r--spec/lib/marginalia_spec.rb16
-rw-r--r--spec/lib/object_storage/direct_upload_spec.rb10
-rw-r--r--spec/lib/security/ci_configuration/sast_build_action_spec.rb9
-rw-r--r--spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb4
-rw-r--r--spec/lib/serializers/symbolized_json_spec.rb41
-rw-r--r--spec/lib/sidebars/projects/menus/deployments_menu_spec.rb11
-rw-r--r--spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb93
-rw-r--r--spec/lib/sidebars/projects/menus/issues_menu_spec.rb18
-rw-r--r--spec/lib/sidebars/projects/menus/labels_menu_spec.rb61
-rw-r--r--spec/lib/sidebars/projects/menus/members_menu_spec.rb35
-rw-r--r--spec/lib/sidebars/projects/menus/monitor_menu_spec.rb107
-rw-r--r--spec/lib/sidebars/projects/menus/project_information_menu_spec.rb59
-rw-r--r--spec/lib/sidebars/projects/menus/scope_menu_spec.rb8
-rw-r--r--spec/lib/sidebars/projects/menus/settings_menu_spec.rb16
-rw-r--r--spec/mailers/emails/admin_notification_spec.rb11
-rw-r--r--spec/mailers/emails/releases_spec.rb2
-rw-r--r--spec/mailers/emails/service_desk_spec.rb14
-rw-r--r--spec/mailers/notify_spec.rb39
-rw-r--r--spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb12
-rw-r--r--spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb12
-rw-r--r--spec/migrations/20200728080250_replace_unique_index_on_cycle_analytics_stages_spec.rb2
-rw-r--r--spec/migrations/20210610153556_delete_legacy_operations_feature_flags_spec.rb45
-rw-r--r--spec/migrations/2021061716138_cascade_delete_freeze_periods_spec.rb22
-rw-r--r--spec/migrations/20210708130419_reschedule_merge_request_diff_users_background_migration_spec.rb75
-rw-r--r--spec/migrations/active_record/schema_spec.rb6
-rw-r--r--spec/migrations/add_premium_and_ultimate_plan_limits_spec.rb86
-rw-r--r--spec/migrations/add_upvotes_count_index_to_issues_spec.rb22
-rw-r--r--spec/migrations/backfill_issues_upvotes_count_spec.rb35
-rw-r--r--spec/migrations/delete_template_services_duplicated_by_type_spec.rb6
-rw-r--r--spec/migrations/fix_batched_migrations_old_format_job_arguments_spec.rb63
-rw-r--r--spec/migrations/re_schedule_latest_pipeline_id_population_spec.rb61
-rw-r--r--spec/migrations/rename_services_to_integrations_spec.rb255
-rw-r--r--spec/migrations/reset_job_token_scope_enabled_spec.rb25
-rw-r--r--spec/migrations/schedule_backfill_draft_status_on_merge_requests_spec.rb59
-rw-r--r--spec/migrations/schedule_delete_orphaned_deployments_spec.rb48
-rw-r--r--spec/models/ability_spec.rb63
-rw-r--r--spec/models/abuse_report_spec.rb1
-rw-r--r--spec/models/alert_management/alert_spec.rb1
-rw-r--r--spec/models/application_setting/term_spec.rb4
-rw-r--r--spec/models/application_setting_spec.rb21
-rw-r--r--spec/models/audit_event_spec.rb65
-rw-r--r--spec/models/award_emoji_spec.rb69
-rw-r--r--spec/models/blob_viewer/markup_spec.rb10
-rw-r--r--spec/models/bulk_import_spec.rb6
-rw-r--r--spec/models/bulk_imports/entity_spec.rb20
-rw-r--r--spec/models/bulk_imports/file_transfer/group_config_spec.rb4
-rw-r--r--spec/models/bulk_imports/file_transfer/project_config_spec.rb4
-rw-r--r--spec/models/chat_name_spec.rb1
-rw-r--r--spec/models/chat_team_spec.rb1
-rw-r--r--spec/models/ci/build_dependencies_spec.rb18
-rw-r--r--spec/models/ci/build_spec.rb92
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb8
-rw-r--r--spec/models/ci/build_trace_chunks/fog_spec.rb51
-rw-r--r--spec/models/ci/job_artifact_spec.rb23
-rw-r--r--spec/models/ci/job_token/project_scope_link_spec.rb18
-rw-r--r--spec/models/ci/job_token/scope_spec.rb4
-rw-r--r--spec/models/ci/pending_build_spec.rb58
-rw-r--r--spec/models/ci/pipeline_schedule_spec.rb9
-rw-r--r--spec/models/ci/pipeline_spec.rb117
-rw-r--r--spec/models/ci/runner_spec.rb14
-rw-r--r--spec/models/ci/running_build_spec.rb5
-rw-r--r--spec/models/clusters/integrations/prometheus_spec.rb8
-rw-r--r--spec/models/clusters/kubernetes_namespace_spec.rb1
-rw-r--r--spec/models/commit_spec.rb1
-rw-r--r--spec/models/compare_spec.rb10
-rw-r--r--spec/models/concerns/approvable_base_spec.rb21
-rw-r--r--spec/models/concerns/atomic_internal_id_spec.rb18
-rw-r--r--spec/models/concerns/awardable_spec.rb84
-rw-r--r--spec/models/concerns/cache_markdown_field_spec.rb4
-rw-r--r--spec/models/concerns/cascading_namespace_setting_attribute_spec.rb22
-rw-r--r--spec/models/concerns/has_integrations_spec.rb12
-rw-r--r--spec/models/concerns/integrations/has_data_fields_spec.rb43
-rw-r--r--spec/models/concerns/issuable_spec.rb20
-rw-r--r--spec/models/concerns/partitioned_table_spec.rb12
-rw-r--r--spec/models/concerns/prometheus_adapter_spec.rb36
-rw-r--r--spec/models/container_repository_spec.rb55
-rw-r--r--spec/models/deploy_token_spec.rb1
-rw-r--r--spec/models/deployment_metrics_spec.rb22
-rw-r--r--spec/models/deployment_spec.rb3
-rw-r--r--spec/models/diff_discussion_spec.rb9
-rw-r--r--spec/models/diff_viewer/server_side_spec.rb1
-rw-r--r--spec/models/discussion_spec.rb18
-rw-r--r--spec/models/environment_spec.rb3
-rw-r--r--spec/models/error_tracking/error_event_spec.rb14
-rw-r--r--spec/models/error_tracking/error_spec.rb16
-rw-r--r--spec/models/event_collection_spec.rb1
-rw-r--r--spec/models/event_spec.rb14
-rw-r--r--spec/models/group_spec.rb6
-rw-r--r--spec/models/integration_spec.rb307
-rw-r--r--spec/models/integrations/asana_spec.rb10
-rw-r--r--spec/models/integrations/assembla_spec.rb6
-rw-r--r--spec/models/integrations/bamboo_spec.rb88
-rw-r--r--spec/models/integrations/base_chat_notification_spec.rb107
-rw-r--r--spec/models/integrations/base_issue_tracker_spec.rb12
-rw-r--r--spec/models/integrations/bugzilla_spec.rb15
-rw-r--r--spec/models/integrations/buildkite_spec.rb40
-rw-r--r--spec/models/integrations/campfire_spec.rb10
-rw-r--r--spec/models/integrations/confluence_spec.rb13
-rw-r--r--spec/models/integrations/custom_issue_tracker_spec.rb15
-rw-r--r--spec/models/integrations/datadog_spec.rb20
-rw-r--r--spec/models/integrations/discord_spec.rb23
-rw-r--r--spec/models/integrations/drone_ci_spec.rb58
-rw-r--r--spec/models/integrations/emails_on_push_spec.rb14
-rw-r--r--spec/models/integrations/ewm_spec.rb15
-rw-r--r--spec/models/integrations/external_wiki_spec.rb11
-rw-r--r--spec/models/integrations/flowdock_spec.rb10
-rw-r--r--spec/models/integrations/irker_spec.rb10
-rw-r--r--spec/models/integrations/jenkins_spec.rb92
-rw-r--r--spec/models/integrations/jira_spec.rb304
-rw-r--r--spec/models/integrations/mattermost_slash_commands_spec.rb30
-rw-r--r--spec/models/integrations/microsoft_teams_spec.rb57
-rw-r--r--spec/models/integrations/open_project_spec.rb13
-rw-r--r--spec/models/integrations/packagist_spec.rb10
-rw-r--r--spec/models/integrations/pipelines_email_spec.rb4
-rw-r--r--spec/models/integrations/pivotaltracker_spec.rb31
-rw-r--r--spec/models/integrations/prometheus_spec.rb (renamed from spec/models/project_services/prometheus_service_spec.rb)178
-rw-r--r--spec/models/integrations/pushover_spec.rb10
-rw-r--r--spec/models/integrations/redmine_spec.rb15
-rw-r--r--spec/models/integrations/slack_slash_commands_spec.rb8
-rw-r--r--spec/models/integrations/slack_spec.rb8
-rw-r--r--spec/models/integrations/teamcity_spec.rb84
-rw-r--r--spec/models/integrations/youtrack_spec.rb13
-rw-r--r--spec/models/internal_id_spec.rb279
-rw-r--r--spec/models/issue_spec.rb72
-rw-r--r--spec/models/label_note_spec.rb1
-rw-r--r--spec/models/lfs_file_lock_spec.rb1
-rw-r--r--spec/models/member_spec.rb297
-rw-r--r--spec/models/members/group_member_spec.rb21
-rw-r--r--spec/models/members/project_member_spec.rb13
-rw-r--r--spec/models/merge_request/cleanup_schedule_spec.rb133
-rw-r--r--spec/models/merge_request/diff_commit_user_spec.rb127
-rw-r--r--spec/models/merge_request_diff_commit_spec.rb50
-rw-r--r--spec/models/merge_request_diff_spec.rb49
-rw-r--r--spec/models/merge_request_spec.rb40
-rw-r--r--spec/models/milestone_spec.rb73
-rw-r--r--spec/models/namespace/root_storage_statistics_spec.rb1
-rw-r--r--spec/models/namespace_spec.rb85
-rw-r--r--spec/models/note_spec.rb4
-rw-r--r--spec/models/notification_setting_spec.rb1
-rw-r--r--spec/models/operations/feature_flag_spec.rb1
-rw-r--r--spec/models/packages/package_file_spec.rb7
-rw-r--r--spec/models/packages/package_spec.rb33
-rw-r--r--spec/models/plan_limits_spec.rb2
-rw-r--r--spec/models/plan_spec.rb23
-rw-r--r--spec/models/project_ci_cd_setting_spec.rb4
-rw-r--r--spec/models/project_spec.rb248
-rw-r--r--spec/models/prometheus_alert_spec.rb1
-rw-r--r--spec/models/protected_branch/push_access_level_spec.rb1
-rw-r--r--spec/models/repository_spec.rb5
-rw-r--r--spec/models/service_desk_setting_spec.rb2
-rw-r--r--spec/models/snippet_repository_spec.rb1
-rw-r--r--spec/models/snippet_spec.rb9
-rw-r--r--spec/models/terraform/state_spec.rb1
-rw-r--r--spec/models/timelog_spec.rb2
-rw-r--r--spec/models/u2f_registration_spec.rb1
-rw-r--r--spec/models/user_spec.rb111
-rw-r--r--spec/models/wiki_page_spec.rb5
-rw-r--r--spec/policies/global_policy_spec.rb6
-rw-r--r--spec/policies/integration_policy_spec.rb4
-rw-r--r--spec/policies/project_policy_spec.rb142
-rw-r--r--spec/policies/release_policy_spec.rb62
-rw-r--r--spec/presenters/blob_presenter_spec.rb22
-rw-r--r--spec/presenters/ci/build_runner_presenter_spec.rb32
-rw-r--r--spec/presenters/merge_request_presenter_spec.rb10
-rw-r--r--spec/presenters/packages/nuget/package_metadata_presenter_spec.rb4
-rw-r--r--spec/presenters/packages/nuget/service_index_presenter_spec.rb13
-rw-r--r--spec/presenters/snippet_blob_presenter_spec.rb21
-rw-r--r--spec/requests/api/admin/plan_limits_spec.rb12
-rw-r--r--spec/requests/api/api_spec.rb2
-rw-r--r--spec/requests/api/award_emoji_spec.rb1
-rw-r--r--spec/requests/api/branches_spec.rb14
-rw-r--r--spec/requests/api/bulk_imports_spec.rb67
-rw-r--r--spec/requests/api/ci/pipelines_spec.rb1
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb23
-rw-r--r--spec/requests/api/ci/runner/jobs_trace_spec.rb16
-rw-r--r--spec/requests/api/ci/runner/runners_post_spec.rb56
-rw-r--r--spec/requests/api/composer_packages_spec.rb1
-rw-r--r--spec/requests/api/debian_group_packages_spec.rb10
-rw-r--r--spec/requests/api/debian_project_packages_spec.rb10
-rw-r--r--spec/requests/api/deploy_tokens_spec.rb1
-rw-r--r--spec/requests/api/deployments_spec.rb1
-rw-r--r--spec/requests/api/error_tracking_collector_spec.rb77
-rw-r--r--spec/requests/api/error_tracking_spec.rb1
-rw-r--r--spec/requests/api/feature_flags_spec.rb1
-rw-r--r--spec/requests/api/freeze_periods_spec.rb1
-rw-r--r--spec/requests/api/geo_spec.rb30
-rw-r--r--spec/requests/api/graphql/boards/board_lists_query_spec.rb1
-rw-r--r--spec/requests/api/graphql/ci/jobs_spec.rb6
-rw-r--r--spec/requests/api/graphql/ci/pipelines_spec.rb1
-rw-r--r--spec/requests/api/graphql/ci/runner_spec.rb111
-rw-r--r--spec/requests/api/graphql/current_user_todos_spec.rb1
-rw-r--r--spec/requests/api/graphql/issue_status_counts_spec.rb1
-rw-r--r--spec/requests/api/graphql/metrics/dashboard_query_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/alert_management/alerts/set_assignees_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/alert_management/alerts/todo/create_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/alert_management/alerts/update_alert_status_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/alert_management/http_integration/create_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/alert_management/prometheus_integration/create_spec.rb3
-rw-r--r--spec/requests/api/graphql/mutations/alert_management/prometheus_integration/reset_token_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/alert_management/prometheus_integration/update_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/award_emojis/add_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/boards/create_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/branches/create_spec.rb4
-rw-r--r--spec/requests/api/graphql/mutations/ci/ci_cd_settings_update_spec.rb5
-rw-r--r--spec/requests/api/graphql/mutations/ci/job_token_scope/add_project_spec.rb78
-rw-r--r--spec/requests/api/graphql/mutations/ci/job_token_scope/remove_project_spec.rb84
-rw-r--r--spec/requests/api/graphql/mutations/commits/create_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/container_expiration_policy/update_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/discussions/toggle_resolve_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/environments/canary_ingress/update_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/issues/set_locked_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/issues/set_severity_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/issues/update_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/jira_import/import_users_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/jira_import/start_spec.rb9
-rw-r--r--spec/requests/api/graphql/mutations/labels/create_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/accept_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/create_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb10
-rw-r--r--spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/notes/create/image_diff_note_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/notes/create/note_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/notes/reposition_image_diff_note_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/packages/destroy_spec.rb93
-rw-r--r--spec/requests/api/graphql/mutations/releases/delete_spec.rb8
-rw-r--r--spec/requests/api/graphql/mutations/snippets/create_spec.rb33
-rw-r--r--spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/snippets/update_spec.rb29
-rw-r--r--spec/requests/api/graphql/mutations/user_callouts/create_spec.rb1
-rw-r--r--spec/requests/api/graphql/namespace/package_settings_spec.rb1
-rw-r--r--spec/requests/api/graphql/project/alert_management/alert/issue_spec.rb1
-rw-r--r--spec/requests/api/graphql/project/alert_management/alert_status_counts_spec.rb1
-rw-r--r--spec/requests/api/graphql/project/alert_management/integrations_spec.rb24
-rw-r--r--spec/requests/api/graphql/project/base_service_spec.rb4
-rw-r--r--spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb1
-rw-r--r--spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/issue/designs/designs_spec.rb3
-rw-r--r--spec/requests/api/graphql/project/jira_service_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/pipeline_spec.rb81
-rw-r--r--spec/requests/api/graphql/project/project_pipeline_statistics_spec.rb1
-rw-r--r--spec/requests/api/graphql/project_query_spec.rb1
-rw-r--r--spec/requests/api/graphql/query_spec.rb2
-rw-r--r--spec/requests/api/graphql/user/starred_projects_query_spec.rb1
-rw-r--r--spec/requests/api/graphql/user_query_spec.rb2
-rw-r--r--spec/requests/api/graphql_spec.rb2
-rw-r--r--spec/requests/api/group_avatar_spec.rb36
-rw-r--r--spec/requests/api/group_import_spec.rb1
-rw-r--r--spec/requests/api/group_labels_spec.rb26
-rw-r--r--spec/requests/api/group_milestones_spec.rb1
-rw-r--r--spec/requests/api/group_packages_spec.rb2
-rw-r--r--spec/requests/api/groups_spec.rb62
-rw-r--r--spec/requests/api/helm_packages_spec.rb176
-rw-r--r--spec/requests/api/helpers_spec.rb1
-rw-r--r--spec/requests/api/import_bitbucket_server_spec.rb5
-rw-r--r--spec/requests/api/internal/base_spec.rb129
-rw-r--r--spec/requests/api/internal/kubernetes_spec.rb60
-rw-r--r--spec/requests/api/labels_spec.rb30
-rw-r--r--spec/requests/api/lint_spec.rb2
-rw-r--r--spec/requests/api/markdown_spec.rb1
-rw-r--r--spec/requests/api/merge_requests_spec.rb18
-rw-r--r--spec/requests/api/metrics/dashboard/annotations_spec.rb1
-rw-r--r--spec/requests/api/metrics/user_starred_dashboards_spec.rb1
-rw-r--r--spec/requests/api/nuget_project_packages_spec.rb127
-rw-r--r--spec/requests/api/project_attributes.yml1
-rw-r--r--spec/requests/api/project_clusters_spec.rb1
-rw-r--r--spec/requests/api/project_container_repositories_spec.rb1
-rw-r--r--spec/requests/api/project_milestones_spec.rb1
-rw-r--r--spec/requests/api/project_snippets_spec.rb2
-rw-r--r--spec/requests/api/projects_spec.rb103
-rw-r--r--spec/requests/api/pypi_packages_spec.rb2
-rw-r--r--spec/requests/api/release/links_spec.rb68
-rw-r--r--spec/requests/api/releases_spec.rb82
-rw-r--r--spec/requests/api/repositories_spec.rb11
-rw-r--r--spec/requests/api/resource_access_tokens_spec.rb20
-rw-r--r--spec/requests/api/rubygem_packages_spec.rb2
-rw-r--r--spec/requests/api/services_spec.rb186
-rw-r--r--spec/requests/api/settings_spec.rb13
-rw-r--r--spec/requests/api/snippets_spec.rb2
-rw-r--r--spec/requests/api/system_hooks_spec.rb5
-rw-r--r--spec/requests/api/unleash_spec.rb1
-rw-r--r--spec/requests/api/users_spec.rb2
-rw-r--r--spec/requests/api/wikis_spec.rb3
-rw-r--r--spec/requests/git_http_spec.rb24
-rw-r--r--spec/requests/import/gitlab_groups_controller_spec.rb1
-rw-r--r--spec/requests/invite_registration_spec.rb68
-rw-r--r--spec/requests/jwt_controller_spec.rb1
-rw-r--r--spec/requests/lfs_http_spec.rb16
-rw-r--r--spec/requests/product_analytics/collector_app_spec.rb1
-rw-r--r--spec/requests/projects/merge_requests/diffs_spec.rb126
-rw-r--r--spec/requests/projects/merge_requests_discussions_spec.rb139
-rw-r--r--spec/requests/rack_attack_global_spec.rb1
-rw-r--r--spec/routing/project_routing_spec.rb10
-rw-r--r--spec/rubocop/code_reuse_helpers_spec.rb75
-rw-r--r--spec/rubocop/cop/database/multiple_databases_spec.rb15
-rw-r--r--spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb233
-rw-r--r--spec/rubocop/cop/migration/prevent_index_creation_spec.rb50
-rw-r--r--spec/rubocop/cop/migration/sidekiq_queue_migrate_spec.rb47
-rw-r--r--spec/rubocop/cop/worker_data_consistency_spec.rb50
-rw-r--r--spec/serializers/analytics/cycle_analytics/stage_entity_spec.rb8
-rw-r--r--spec/serializers/paginated_diff_entity_spec.rb11
-rw-r--r--spec/serializers/service_event_entity_spec.rb6
-rw-r--r--spec/serializers/service_field_entity_spec.rb19
-rw-r--r--spec/services/admin/propagate_integration_service_spec.rb14
-rw-r--r--spec/services/admin/propagate_service_template_spec.rb1
-rw-r--r--spec/services/alert_management/create_alert_issue_service_spec.rb1
-rw-r--r--spec/services/application_settings/update_service_spec.rb4
-rw-r--r--spec/services/audit_event_service_spec.rb10
-rw-r--r--spec/services/auth/container_registry_authentication_service_spec.rb92
-rw-r--r--spec/services/auth/dependency_proxy_authentication_service_spec.rb1
-rw-r--r--spec/services/auto_merge_service_spec.rb1
-rw-r--r--spec/services/branches/create_service_spec.rb15
-rw-r--r--spec/services/bulk_create_integration_service_spec.rb17
-rw-r--r--spec/services/bulk_imports/file_download_service_spec.rb147
-rw-r--r--spec/services/bulk_update_integration_service_spec.rb2
-rw-r--r--spec/services/captcha/captcha_verification_service_spec.rb28
-rw-r--r--spec/services/ci/after_requeue_job_service_spec.rb30
-rw-r--r--spec/services/ci/append_build_trace_service_spec.rb44
-rw-r--r--spec/services/ci/archive_trace_service_spec.rb46
-rw-r--r--spec/services/ci/create_pipeline_service/cache_spec.rb42
-rw-r--r--spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/custom_yaml_tags_spec.rb4
-rw-r--r--spec/services/ci/create_pipeline_service/dry_run_spec.rb4
-rw-r--r--spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb144
-rw-r--r--spec/services/ci/create_pipeline_service/needs_spec.rb4
-rw-r--r--spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb40
-rw-r--r--spec/services/ci/create_pipeline_service/rules_spec.rb16
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb4
-rw-r--r--spec/services/ci/destroy_pipeline_service_spec.rb24
-rw-r--r--spec/services/ci/job_token_scope/add_project_service_spec.rb39
-rw-r--r--spec/services/ci/job_token_scope/remove_project_service_spec.rb45
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service.rb14
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_same_stages.yml47
-rw-r--r--spec/services/ci/pipelines/add_job_service_spec.rb72
-rw-r--r--spec/services/ci/play_bridge_service_spec.rb10
-rw-r--r--spec/services/ci/play_build_service_spec.rb10
-rw-r--r--spec/services/ci/register_job_service_spec.rb63
-rw-r--r--spec/services/ci/retry_build_service_spec.rb15
-rw-r--r--spec/services/ci/update_build_queue_service_spec.rb12
-rw-r--r--spec/services/clusters/applications/prometheus_health_check_service_spec.rb1
-rw-r--r--spec/services/commits/commit_patch_service_spec.rb2
-rw-r--r--spec/services/container_expiration_policy_service_spec.rb1
-rw-r--r--spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb1
-rw-r--r--spec/services/design_management/copy_design_collection/copy_service_spec.rb6
-rw-r--r--spec/services/design_management/copy_design_collection/queue_service_spec.rb2
-rw-r--r--spec/services/design_management/save_designs_service_spec.rb14
-rw-r--r--spec/services/discussions/resolve_service_spec.rb2
-rw-r--r--spec/services/discussions/unresolve_service_spec.rb1
-rw-r--r--spec/services/error_tracking/collect_error_service_spec.rb44
-rw-r--r--spec/services/event_create_service_spec.rb18
-rw-r--r--spec/services/git/base_hooks_service_spec.rb22
-rw-r--r--spec/services/git/branch_push_service_spec.rb23
-rw-r--r--spec/services/git/wiki_push_service_spec.rb34
-rw-r--r--spec/services/groups/create_service_spec.rb6
-rw-r--r--spec/services/groups/group_links/destroy_service_spec.rb6
-rw-r--r--spec/services/groups/group_links/update_service_spec.rb2
-rw-r--r--spec/services/groups/transfer_service_spec.rb8
-rw-r--r--spec/services/import/bitbucket_server_service_spec.rb1
-rw-r--r--spec/services/incident_management/incidents/create_service_spec.rb1
-rw-r--r--spec/services/incident_management/incidents/update_severity_service_spec.rb86
-rw-r--r--spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb1
-rw-r--r--spec/services/incident_management/pager_duty/process_webhook_service_spec.rb1
-rw-r--r--spec/services/integrations/test/project_service_spec.rb11
-rw-r--r--spec/services/issuable/bulk_update_service_spec.rb1
-rw-r--r--spec/services/issues/close_service_spec.rb8
-rw-r--r--spec/services/issues/create_service_spec.rb86
-rw-r--r--spec/services/issues/move_service_spec.rb8
-rw-r--r--spec/services/issues/reopen_service_spec.rb4
-rw-r--r--spec/services/issues/update_service_spec.rb162
-rw-r--r--spec/services/jira/requests/projects/list_service_spec.rb20
-rw-r--r--spec/services/jira_connect/sync_service_spec.rb1
-rw-r--r--spec/services/jira_connect_installations/destroy_service_spec.rb41
-rw-r--r--spec/services/jira_import/start_import_service_spec.rb5
-rw-r--r--spec/services/jira_import/users_importer_spec.rb12
-rw-r--r--spec/services/keys/destroy_service_spec.rb2
-rw-r--r--spec/services/markdown_content_rewriter_service_spec.rb2
-rw-r--r--spec/services/members/create_service_spec.rb3
-rw-r--r--spec/services/members/groups/creator_service_spec.rb16
-rw-r--r--spec/services/members/invite_service_spec.rb1
-rw-r--r--spec/services/members/projects/creator_service_spec.rb16
-rw-r--r--spec/services/merge_requests/build_service_spec.rb12
-rw-r--r--spec/services/merge_requests/handle_assignees_change_service_spec.rb4
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb6
-rw-r--r--spec/services/merge_requests/push_options_handler_service_spec.rb101
-rw-r--r--spec/services/merge_requests/rebase_service_spec.rb31
-rw-r--r--spec/services/metrics/dashboard/annotations/create_service_spec.rb1
-rw-r--r--spec/services/metrics/dashboard/gitlab_alert_embed_service_spec.rb1
-rw-r--r--spec/services/metrics/users_starred_dashboards/create_service_spec.rb1
-rw-r--r--spec/services/namespace_settings/update_service_spec.rb65
-rw-r--r--spec/services/namespaces/in_product_marketing_emails_service_spec.rb46
-rw-r--r--spec/services/notes/copy_service_spec.rb1
-rw-r--r--spec/services/notes/create_service_spec.rb2
-rw-r--r--spec/services/notes/destroy_service_spec.rb1
-rw-r--r--spec/services/notes/post_process_service_spec.rb10
-rw-r--r--spec/services/notes/quick_actions_service_spec.rb1
-rw-r--r--spec/services/notes/update_service_spec.rb1
-rw-r--r--spec/services/notification_service_spec.rb69
-rw-r--r--spec/services/packages/composer/create_package_service_spec.rb1
-rw-r--r--spec/services/packages/conan/search_service_spec.rb1
-rw-r--r--spec/services/packages/create_package_file_service_spec.rb1
-rw-r--r--spec/services/packages/debian/find_or_create_package_service_spec.rb1
-rw-r--r--spec/services/packages/destroy_package_service_spec.rb61
-rw-r--r--spec/services/packages/maven/find_or_create_package_service_spec.rb2
-rw-r--r--spec/services/packages/nuget/metadata_extraction_service_spec.rb13
-rw-r--r--spec/services/packages/nuget/search_service_spec.rb1
-rw-r--r--spec/services/packages/nuget/update_package_from_metadata_service_spec.rb37
-rw-r--r--spec/services/packages/rubygems/dependency_resolver_service_spec.rb1
-rw-r--r--spec/services/pod_logs/base_service_spec.rb1
-rw-r--r--spec/services/pod_logs/elasticsearch_service_spec.rb1
-rw-r--r--spec/services/pod_logs/kubernetes_service_spec.rb1
-rw-r--r--spec/services/post_receive_service_spec.rb2
-rw-r--r--spec/services/projects/create_service_spec.rb102
-rw-r--r--spec/services/projects/destroy_rollback_service_spec.rb1
-rw-r--r--spec/services/projects/destroy_service_spec.rb1
-rw-r--r--spec/services/projects/gitlab_projects_import_service_spec.rb1
-rw-r--r--spec/services/projects/group_links/create_service_spec.rb5
-rw-r--r--spec/services/projects/group_links/destroy_service_spec.rb1
-rw-r--r--spec/services/projects/group_links/update_service_spec.rb83
-rw-r--r--spec/services/projects/lfs_pointers/lfs_download_service_spec.rb32
-rw-r--r--spec/services/projects/operations/update_service_spec.rb18
-rw-r--r--spec/services/projects/prometheus/alerts/notify_service_spec.rb6
-rw-r--r--spec/services/projects/protect_default_branch_service_spec.rb47
-rw-r--r--spec/services/projects/transfer_service_spec.rb83
-rw-r--r--spec/services/projects/update_pages_service_spec.rb1
-rw-r--r--spec/services/projects/update_repository_storage_service_spec.rb15
-rw-r--r--spec/services/projects/update_service_spec.rb55
-rw-r--r--spec/services/prometheus/create_default_alerts_service_spec.rb1
-rw-r--r--spec/services/prometheus/proxy_service_spec.rb2
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb1
-rw-r--r--spec/services/releases/create_evidence_service_spec.rb1
-rw-r--r--spec/services/releases/create_service_spec.rb15
-rw-r--r--spec/services/releases/destroy_service_spec.rb15
-rw-r--r--spec/services/releases/update_service_spec.rb15
-rw-r--r--spec/services/repositories/changelog_service_spec.rb2
-rw-r--r--spec/services/repositories/destroy_rollback_service_spec.rb1
-rw-r--r--spec/services/repositories/destroy_service_spec.rb1
-rw-r--r--spec/services/repositories/shell_destroy_service_spec.rb1
-rw-r--r--spec/services/resource_access_tokens/create_service_spec.rb27
-rw-r--r--spec/services/resource_access_tokens/revoke_service_spec.rb2
-rw-r--r--spec/services/resource_events/change_labels_service_spec.rb1
-rw-r--r--spec/services/resource_events/merge_into_notes_service_spec.rb1
-rw-r--r--spec/services/security/ci_configuration/sast_parser_service_spec.rb12
-rw-r--r--spec/services/service_ping/build_payload_service_spec.rb47
-rw-r--r--spec/services/service_ping/permit_data_categories_service_spec.rb67
-rw-r--r--spec/services/service_ping/submit_service_ping_service_spec.rb (renamed from spec/services/submit_usage_ping_service_spec.rb)86
-rw-r--r--spec/services/snippets/bulk_destroy_service_spec.rb1
-rw-r--r--spec/services/snippets/create_service_spec.rb8
-rw-r--r--spec/services/snippets/update_repository_storage_service_spec.rb15
-rw-r--r--spec/services/snippets/update_service_spec.rb10
-rw-r--r--spec/services/spam/akismet_service_spec.rb2
-rw-r--r--spec/services/spam/ham_service_spec.rb1
-rw-r--r--spec/services/spam/spam_action_service_spec.rb133
-rw-r--r--spec/services/spam/spam_params_spec.rb40
-rw-r--r--spec/services/spam/spam_verdict_service_spec.rb5
-rw-r--r--spec/services/system_note_service_spec.rb7
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb3
-rw-r--r--spec/services/test_hooks/project_service_spec.rb2
-rw-r--r--spec/services/test_hooks/system_service_spec.rb1
-rw-r--r--spec/services/user_project_access_changed_service_spec.rb11
-rw-r--r--spec/services/users/approve_service_spec.rb1
-rw-r--r--spec/services/users/reject_service_spec.rb1
-rw-r--r--spec/services/users/validate_otp_service_spec.rb1
-rw-r--r--spec/services/web_hook_service_spec.rb13
-rw-r--r--spec/services/wiki_pages/create_service_spec.rb20
-rw-r--r--spec/services/wiki_pages/event_create_service_spec.rb1
-rw-r--r--spec/services/wiki_pages/update_service_spec.rb22
-rw-r--r--spec/spec_helper.rb21
-rw-r--r--spec/support/capybara.rb6
-rw-r--r--spec/support/gitlab_experiment.rb10
-rw-r--r--spec/support/helpers/ci/template_helpers.rb11
-rw-r--r--spec/support/helpers/cycle_analytics_helpers.rb10
-rw-r--r--spec/support/helpers/database/table_schema_helpers.rb20
-rw-r--r--spec/support/helpers/feature_flag_helpers.rb6
-rw-r--r--spec/support/helpers/features/admin_users_helpers.rb25
-rw-r--r--spec/support/helpers/features/invite_members_modal_helper.rb2
-rw-r--r--spec/support/helpers/features/snippet_helpers.rb11
-rw-r--r--spec/support/helpers/features/source_editor_spec_helpers.rb (renamed from spec/support/helpers/features/editor_lite_spec_helpers.rb)4
-rw-r--r--spec/support/helpers/features/top_nav_spec_helpers.rb2
-rw-r--r--spec/support/helpers/grafana_api_helpers.rb2
-rw-r--r--spec/support/helpers/javascript_fixtures_helpers.rb14
-rw-r--r--spec/support/helpers/jira_service_helper.rb4
-rw-r--r--spec/support/helpers/live_debugger.rb12
-rw-r--r--spec/support/helpers/merge_request_diff_helpers.rb4
-rw-r--r--spec/support/helpers/require_migration.rb4
-rw-r--r--spec/support/helpers/services_helper.rb11
-rw-r--r--spec/support/helpers/stub_experiments.rb2
-rw-r--r--spec/support/helpers/stub_spam_services.rb23
-rw-r--r--spec/support/helpers/stubbed_feature.rb22
-rw-r--r--spec/support/helpers/test_env.rb86
-rw-r--r--spec/support/matchers/be_executed.rb11
-rw-r--r--spec/support/matchers/have_issuable_counts.rb2
-rw-r--r--spec/support/matchers/usage_metric_matchers.rb21
-rw-r--r--spec/support/omniauth_strategy.rb12
-rw-r--r--spec/support/redis/redis_helpers.rb8
-rw-r--r--spec/support/services/issuable_import_csv_service_shared_examples.rb20
-rw-r--r--spec/support/shared_contexts/features/integrations/integrations_shared_context.rb62
-rw-r--r--spec/support/shared_contexts/navbar_structure_context.rb165
-rw-r--r--spec/support/shared_contexts/policies/project_policy_shared_context.rb4
-rw-r--r--spec/support/shared_contexts/requests/api/graphql/jira_import/jira_projects_context.rb4
-rw-r--r--spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb43
-rw-r--r--spec/support/shared_contexts/unique_ip_check_shared_context.rb6
-rw-r--r--spec/support/shared_examples/ci/edit_job_token_scope_shared_examples.rb40
-rw-r--r--spec/support/shared_examples/controllers/access_tokens_controller_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb44
-rw-r--r--spec/support/shared_examples/features/cascading_settings_shared_examples.rb18
-rw-r--r--spec/support/shared_examples/features/packages_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/features/search/search_timeouts_shared_examples.rb21
-rw-r--r--spec/support/shared_examples/features/sidebar_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb11
-rw-r--r--spec/support/shared_examples/features/wiki/user_deletes_wiki_page_shared_examples.rb26
-rw-r--r--spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/graphql/design_fields_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/graphql/mutations/can_mutate_spammable_examples.rb12
-rw-r--r--spec/support/shared_examples/graphql/spam_protection_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/lib/cache_helpers_shared_examples.rb101
-rw-r--r--spec/support/shared_examples/lib/gitlab/cycle_analytics/event_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/lib/gitlab/import_export/relation_factory_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/lib/gitlab/kubernetes/network_policy_common_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/lib/gitlab/search_results_sorted_shared_examples.rb18
-rw-r--r--spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb41
-rw-r--r--spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/models/chat_integration_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb16
-rw-r--r--spec/support/shared_examples/models/integrations/base_slash_commands_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/models/integrations/has_web_hook_shared_examples.rb97
-rw-r--r--spec/support/shared_examples/models/issue_tracker_service_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/member_shared_examples.rb256
-rw-r--r--spec/support/shared_examples/models/project_ci_cd_settings_shared_examples.rb54
-rw-r--r--spec/support/shared_examples/models/wiki_shared_examples.rb95
-rw-r--r--spec/support/shared_examples/namespaces/traversal_examples.rb96
-rw-r--r--spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/quick_actions/issuable/time_tracking_quick_action_shared_examples.rb18
-rw-r--r--spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/requests/api/graphql/noteable_shared_examples.rb22
-rw-r--r--spec/support/shared_examples/requests/api/helm_packages_shared_examples.rb229
-rw-r--r--spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb136
-rw-r--r--spec/support/shared_examples/requests/api/packages_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/services/alert_management/alert_processing/alert_firing_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/alert_management/alert_processing/incident_creation_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb25
-rw-r--r--spec/support/shared_examples/services/jira_import/user_mapper_services_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/packages_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb15
-rw-r--r--spec/support/shared_examples/services/service_ping/complete_service_ping_payload_shared_examples.rb9
-rw-r--r--spec/support/shared_examples/services/service_ping/service_ping_payload_with_all_expected_metrics_shared_examples.rb11
-rw-r--r--spec/support/shared_examples/services/service_ping/service_ping_payload_without_restricted_metrics_shared_examples.rb11
-rw-r--r--spec/support/shared_examples/services/snippets_shared_examples.rb74
-rw-r--r--spec/support/shared_examples/services/wikis/create_attachment_service_shared_examples.rb19
-rw-r--r--spec/support/shared_examples/workers/in_product_marketing_email_shared_example.rb15
-rw-r--r--spec/support/sidekiq.rb21
-rw-r--r--spec/tasks/cache/clear/redis_spec.rb2
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb23
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb55
-rw-r--r--spec/tasks/gitlab/packages/composer_rake_spec.rb1
-rw-r--r--spec/tasks/gitlab/snippets_rake_spec.rb1
-rw-r--r--spec/tooling/danger/feature_flag_spec.rb20
-rw-r--r--spec/tooling/danger/project_helper_spec.rb43
-rw-r--r--spec/tooling/lib/tooling/kubernetes_client_spec.rb91
-rw-r--r--spec/uploaders/content_type_whitelist_spec.rb1
-rw-r--r--spec/uploaders/dependency_proxy/file_uploader_spec.rb2
-rw-r--r--spec/views/admin/application_settings/_eks.html.haml_spec.rb1
-rw-r--r--spec/views/admin/application_settings/_package_registry.html.haml_spec.rb1
-rw-r--r--spec/views/admin/application_settings/ci_cd.html.haml_spec.rb56
-rw-r--r--spec/views/admin/application_settings/repository.html.haml_spec.rb2
-rw-r--r--spec/views/devise/sessions/new.html.haml_spec.rb26
-rw-r--r--spec/views/groups/_home_panel.html.haml_spec.rb26
-rw-r--r--spec/views/groups/runners/_group_runners.html.haml_spec.rb43
-rw-r--r--spec/views/groups/settings/_transfer.html.haml_spec.rb17
-rw-r--r--spec/views/help/show.html.haml_spec.rb2
-rw-r--r--spec/views/layouts/_head.html.haml_spec.rb2
-rw-r--r--spec/views/layouts/_search.html.haml_spec.rb9
-rw-r--r--spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb51
-rw-r--r--spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb347
-rw-r--r--spec/views/projects/_flash_messages.html.haml_spec.rb69
-rw-r--r--spec/views/projects/_home_panel.html.haml_spec.rb32
-rw-r--r--spec/views/projects/empty.html.haml_spec.rb1
-rw-r--r--spec/views/projects/pipelines/show.html.haml_spec.rb1
-rw-r--r--spec/views/projects/runners/_specific_runners.html.haml_spec.rb43
-rw-r--r--spec/views/projects/services/_form.haml_spec.rb2
-rw-r--r--spec/views/projects/settings/operations/show.html.haml_spec.rb6
-rw-r--r--spec/views/search/_results.html.haml_spec.rb3
-rw-r--r--spec/views/shared/_global_alert.html.haml_spec.rb85
-rw-r--r--spec/views/shared/_label_row.html.haml_spec.rb3
-rw-r--r--spec/views/shared/milestones/_top.html.haml_spec.rb1
-rw-r--r--spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb65
-rw-r--r--spec/workers/build_finished_worker_spec.rb15
-rw-r--r--spec/workers/build_queue_worker_spec.rb1
-rw-r--r--spec/workers/bulk_imports/export_request_worker_spec.rb5
-rw-r--r--spec/workers/ci/archive_trace_worker_spec.rb33
-rw-r--r--spec/workers/ci/build_finished_worker_spec.rb76
-rw-r--r--spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb18
-rw-r--r--spec/workers/clusters/applications/activate_service_worker_spec.rb22
-rw-r--r--spec/workers/clusters/applications/deactivate_service_worker_spec.rb32
-rw-r--r--spec/workers/concerns/application_worker_spec.rb71
-rw-r--r--spec/workers/concerns/gitlab/github_import/object_importer_spec.rb33
-rw-r--r--spec/workers/concerns/waitable_worker_spec.rb6
-rw-r--r--spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb45
-rw-r--r--spec/workers/container_expiration_policy_worker_spec.rb35
-rw-r--r--spec/workers/database/partition_management_worker_spec.rb29
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb5
-rw-r--r--spec/workers/expire_pipeline_cache_worker_spec.rb1
-rw-r--r--spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/import_issue_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/import_note_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/import_pull_request_merged_by_worker_spec.rb8
-rw-r--r--spec/workers/gitlab/github_import/import_pull_request_review_worker_spec.rb8
-rw-r--r--spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb4
-rw-r--r--spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb4
-rw-r--r--spec/workers/gitlab/jira_import/stage/import_labels_worker_spec.rb4
-rw-r--r--spec/workers/gitlab_service_ping_worker_spec.rb (renamed from spec/workers/gitlab_usage_ping_worker_spec.rb)14
-rw-r--r--spec/workers/jira_connect/forward_event_worker_spec.rb56
-rw-r--r--spec/workers/jira_connect/sync_branch_worker_spec.rb78
-rw-r--r--spec/workers/jira_connect/sync_builds_worker_spec.rb5
-rw-r--r--spec/workers/jira_connect/sync_deployments_worker_spec.rb5
-rw-r--r--spec/workers/jira_connect/sync_feature_flags_worker_spec.rb5
-rw-r--r--spec/workers/jira_connect/sync_merge_request_worker_spec.rb35
-rw-r--r--spec/workers/jira_connect/sync_project_worker_spec.rb73
-rw-r--r--spec/workers/merge_request_cleanup_refs_worker_spec.rb89
-rw-r--r--spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb39
-rw-r--r--spec/workers/packages/helm/extraction_worker_spec.rb92
-rw-r--r--spec/workers/partition_creation_worker_spec.rb27
-rw-r--r--spec/workers/pipeline_hooks_worker_spec.rb1
-rw-r--r--spec/workers/post_receive_spec.rb7
-rw-r--r--spec/workers/project_service_worker_spec.rb14
-rw-r--r--spec/workers/projects/post_creation_worker_spec.rb36
-rw-r--r--spec/workers/propagate_integration_group_worker_spec.rb4
-rw-r--r--spec/workers/propagate_integration_inherit_descendant_worker_spec.rb4
-rw-r--r--spec/workers/propagate_integration_inherit_worker_spec.rb6
-rw-r--r--spec/workers/propagate_integration_project_worker_spec.rb4
-rw-r--r--spec/workers/remove_expired_group_links_worker_spec.rb2
-rw-r--r--spec/workers/schedule_merge_request_cleanup_refs_worker_spec.rb25
-rw-r--r--spec/workers/users/deactivate_dormant_users_worker_spec.rb12
1520 files changed, 31594 insertions, 16439 deletions
diff --git a/spec/config/settings_spec.rb b/spec/config/settings_spec.rb
index 6525ae653c9..0c2465678f9 100644
--- a/spec/config/settings_spec.rb
+++ b/spec/config/settings_spec.rb
@@ -113,12 +113,12 @@ RSpec.describe Settings do
end
end
- describe '.cron_for_usage_ping' do
+ describe '.cron_for_service_ping' do
it 'returns correct crontab for some manually calculated example' do
allow(Gitlab::CurrentSettings)
.to receive(:uuid) { 'd9e2f4e8-db1f-4e51-b03d-f427e1965c4a'}
- expect(described_class.send(:cron_for_usage_ping)).to eq('21 18 * * 4')
+ expect(described_class.send(:cron_for_service_ping)).to eq('21 18 * * 4')
end
it 'returns min, hour, day in the valid range' do
@@ -126,7 +126,7 @@ RSpec.describe Settings do
.to receive(:uuid) { SecureRandom.uuid }
10.times do
- cron = described_class.send(:cron_for_usage_ping).split(/\s/)
+ cron = described_class.send(:cron_for_service_ping).split(/\s/)
expect(cron[0].to_i).to be_between(0, 59)
expect(cron[1].to_i).to be_between(0, 23)
diff --git a/spec/controllers/abuse_reports_controller_spec.rb b/spec/controllers/abuse_reports_controller_spec.rb
index bab0d033056..3ef78226db0 100644
--- a/spec/controllers/abuse_reports_controller_spec.rb
+++ b/spec/controllers/abuse_reports_controller_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe AbuseReportsController do
post :create, params: { abuse_report: attrs }
end
- it 'redirects back to the reported user' do
+ it 'redirects back to root' do
post :create, params: { abuse_report: attrs }
expect(response).to redirect_to root_path
diff --git a/spec/controllers/admin/application_settings_controller_spec.rb b/spec/controllers/admin/application_settings_controller_spec.rb
index 6258dd30438..478bd1b7f0a 100644
--- a/spec/controllers/admin/application_settings_controller_spec.rb
+++ b/spec/controllers/admin/application_settings_controller_spec.rb
@@ -171,6 +171,13 @@ RSpec.describe Admin::ApplicationSettingsController, :do_not_mock_admin_mode_set
expect(ApplicationSetting.current.admin_mode).to be(true)
end
+ it 'updates valid_runner_registrars setting' do
+ put :update, params: { application_setting: { valid_runner_registrars: ['project', ''] } }
+
+ expect(response).to redirect_to(general_admin_application_settings_path)
+ expect(ApplicationSetting.current.valid_runner_registrars).to eq(['project'])
+ end
+
context "personal access token prefix settings" do
let(:application_settings) { ApplicationSetting.current }
diff --git a/spec/controllers/admin/integrations_controller_spec.rb b/spec/controllers/admin/integrations_controller_spec.rb
index 79c39784173..5a68bb2749b 100644
--- a/spec/controllers/admin/integrations_controller_spec.rb
+++ b/spec/controllers/admin/integrations_controller_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Admin::IntegrationsController do
end
describe '#edit' do
- Integration.available_services_names.each do |integration_name|
+ Integration.available_integration_names.each do |integration_name|
context "#{integration_name}" do
it 'successfully displays the template' do
get :edit, params: { id: integration_name }
@@ -27,7 +27,7 @@ RSpec.describe Admin::IntegrationsController do
end
it 'returns 404' do
- get :edit, params: { id: Integration.available_services_names.sample }
+ get :edit, params: { id: Integration.available_integration_names.sample }
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -37,10 +37,10 @@ RSpec.describe Admin::IntegrationsController do
describe '#update' do
include JiraServiceHelper
- let(:integration) { create(:jira_service, :instance) }
+ let(:integration) { create(:jira_integration, :instance) }
before do
- stub_jira_service_test
+ stub_jira_integration_test
allow(PropagateIntegrationWorker).to receive(:perform_async)
put :update, params: { id: integration.class.to_param, service: { url: url } }
@@ -75,8 +75,8 @@ RSpec.describe Admin::IntegrationsController do
end
describe '#reset' do
- let_it_be(:integration) { create(:jira_service, :instance) }
- let_it_be(:inheriting_integration) { create(:jira_service, inherit_from_id: integration.id) }
+ let_it_be(:integration) { create(:jira_integration, :instance) }
+ let_it_be(:inheriting_integration) { create(:jira_integration, inherit_from_id: integration.id) }
subject do
post :reset, params: { id: integration.class.to_param }
diff --git a/spec/controllers/admin/services_controller_spec.rb b/spec/controllers/admin/services_controller_spec.rb
index 995282ca4bb..06ff8f0db94 100644
--- a/spec/controllers/admin/services_controller_spec.rb
+++ b/spec/controllers/admin/services_controller_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Admin::ServicesController do
describe 'GET #edit' do
let(:service) do
- create(:jira_service, :template)
+ create(:jira_integration, :template)
end
it 'successfully displays the template' do
@@ -30,7 +30,7 @@ RSpec.describe Admin::ServicesController do
context 'when instance integration exists' do
before do
- create(:jira_service, :instance)
+ create(:jira_integration, :instance)
end
it 'redirects to the admin application integration page' do
diff --git a/spec/controllers/confirmations_controller_spec.rb b/spec/controllers/confirmations_controller_spec.rb
index c9a0ae981fc..401ee36b387 100644
--- a/spec/controllers/confirmations_controller_spec.rb
+++ b/spec/controllers/confirmations_controller_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe ConfirmationsController do
context 'user is already confirmed' do
let_it_be_with_reload(:user) { create(:user, :unconfirmed) }
+
let(:confirmation_token) { user.confirmation_token }
before do
@@ -57,6 +58,7 @@ RSpec.describe ConfirmationsController do
context 'user accesses the link after the expiry of confirmation token has passed' do
let_it_be_with_reload(:user) { create(:user, :unconfirmed) }
+
let(:confirmation_token) { user.confirmation_token }
before do
diff --git a/spec/controllers/dashboard/projects_controller_spec.rb b/spec/controllers/dashboard/projects_controller_spec.rb
index ed8dc1eb7cb..0d9bd146778 100644
--- a/spec/controllers/dashboard/projects_controller_spec.rb
+++ b/spec/controllers/dashboard/projects_controller_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe Dashboard::ProjectsController, :aggregate_failures do
context 'user logged in' do
let_it_be(:project) { create(:project, name: 'Project 1') }
let_it_be(:project2) { create(:project, name: 'Project Two') }
+
let(:projects) { [project, project2] }
before_all do
diff --git a/spec/controllers/groups/group_links_controller_spec.rb b/spec/controllers/groups/group_links_controller_spec.rb
index 94d3c1ffa0f..fafe9715946 100644
--- a/spec/controllers/groups/group_links_controller_spec.rb
+++ b/spec/controllers/groups/group_links_controller_spec.rb
@@ -88,7 +88,7 @@ RSpec.describe Groups::GroupLinksController do
end
end
- it 'updates project permissions' do
+ it 'updates project permissions', :sidekiq_inline do
expect { subject }.to change { group_member.can?(:read_project, project) }.from(false).to(true)
end
@@ -207,7 +207,7 @@ RSpec.describe Groups::GroupLinksController do
end
end
- it 'updates project permissions' do
+ it 'updates project permissions', :sidekiq_inline do
expect { subject }.to change { group_member.can?(:create_release, project) }.from(true).to(false)
end
end
@@ -244,7 +244,7 @@ RSpec.describe Groups::GroupLinksController do
expect { subject }.to change(GroupGroupLink, :count).by(-1)
end
- it 'updates project permissions' do
+ it 'updates project permissions', :sidekiq_inline do
expect { subject }.to change { group_member.can?(:create_release, project) }.from(true).to(false)
end
end
diff --git a/spec/controllers/groups/settings/integrations_controller_spec.rb b/spec/controllers/groups/settings/integrations_controller_spec.rb
index 4f1f6dcaae4..ef8f9f69710 100644
--- a/spec/controllers/groups/settings/integrations_controller_spec.rb
+++ b/spec/controllers/groups/settings/integrations_controller_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe Groups::Settings::IntegrationsController do
describe '#edit' do
context 'when user is not owner' do
it 'renders not_found' do
- get :edit, params: { group_id: group, id: Integration.available_services_names(include_project_specific: false).sample }
+ get :edit, params: { group_id: group, id: Integration.available_integration_names(include_project_specific: false).sample }
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -47,8 +47,8 @@ RSpec.describe Groups::Settings::IntegrationsController do
group.add_owner(user)
end
- Integration.available_services_names(include_project_specific: false).each do |integration_name|
- context "#{integration_name}" do
+ Integration.available_integration_names(include_project_specific: false).each do |integration_name|
+ context integration_name do
it 'successfully displays the template' do
get :edit, params: { group_id: group, id: integration_name }
@@ -63,11 +63,11 @@ RSpec.describe Groups::Settings::IntegrationsController do
describe '#update' do
include JiraServiceHelper
- let(:integration) { create(:jira_service, project: nil, group_id: group.id) }
+ let(:integration) { create(:jira_integration, project: nil, group_id: group.id) }
before do
group.add_owner(user)
- stub_jira_service_test
+ stub_jira_integration_test
put :update, params: { group_id: group, id: integration.class.to_param, service: { url: url } }
end
@@ -93,8 +93,8 @@ RSpec.describe Groups::Settings::IntegrationsController do
end
describe '#reset' do
- let_it_be(:integration) { create(:jira_service, group: group, project: nil) }
- let_it_be(:inheriting_integration) { create(:jira_service, inherit_from_id: integration.id) }
+ let_it_be(:integration) { create(:jira_integration, group: group, project: nil) }
+ let_it_be(:inheriting_integration) { create(:jira_integration, inherit_from_id: integration.id) }
subject do
post :reset, params: { group_id: group, id: integration.class.to_param }
diff --git a/spec/controllers/help_controller_spec.rb b/spec/controllers/help_controller_spec.rb
index 71d9cab7280..599e82afe9b 100644
--- a/spec/controllers/help_controller_spec.rb
+++ b/spec/controllers/help_controller_spec.rb
@@ -150,11 +150,11 @@ RSpec.describe HelpController do
context 'for Markdown formats' do
subject { get :show, params: { path: path }, format: :md }
- let(:path) { 'ssh/README' }
+ let(:path) { 'ssh/index' }
context 'when requested file exists' do
before do
- expect_file_read(File.join(Rails.root, 'doc/ssh/README.md'), content: fixture_file('blockquote_fence_after.md'))
+ expect_file_read(File.join(Rails.root, 'doc/ssh/index.md'), content: fixture_file('blockquote_fence_after.md'))
subject
end
@@ -265,7 +265,7 @@ RSpec.describe HelpController do
it 'always renders not found' do
get :show,
params: {
- path: 'ssh/README'
+ path: 'ssh/index'
},
format: :foo
expect(response).to be_not_found
@@ -274,7 +274,7 @@ RSpec.describe HelpController do
end
def stub_readme(content)
- expect_file_read(Rails.root.join('doc', 'README.md'), content: content)
+ expect_file_read(Rails.root.join('doc', 'index.md'), content: content)
end
def stub_two_factor_required
diff --git a/spec/controllers/import/bulk_imports_controller_spec.rb b/spec/controllers/import/bulk_imports_controller_spec.rb
index 8f74d210667..3b2ed2c63ed 100644
--- a/spec/controllers/import/bulk_imports_controller_spec.rb
+++ b/spec/controllers/import/bulk_imports_controller_spec.rb
@@ -51,7 +51,7 @@ RSpec.describe Import::BulkImportsController do
end
describe 'GET status' do
- let(:client) { BulkImports::Clients::HTTP.new(uri: 'http://gitlab.example', token: 'token') }
+ let(:client) { BulkImports::Clients::HTTP.new(url: 'http://gitlab.example', token: 'token') }
describe 'serialized group data' do
let(:client_response) do
@@ -149,7 +149,7 @@ RSpec.describe Import::BulkImportsController do
context 'when connection error occurs' do
before do
allow(controller).to receive(:client).and_return(client)
- allow(client).to receive(:get).and_raise(BulkImports::Clients::HTTP::ConnectionError)
+ allow(client).to receive(:get).and_raise(BulkImports::Error)
end
it 'returns 422' do
diff --git a/spec/controllers/invites_controller_spec.rb b/spec/controllers/invites_controller_spec.rb
index 6b94d186d5f..0d9cde88eca 100644
--- a/spec/controllers/invites_controller_spec.rb
+++ b/spec/controllers/invites_controller_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe InvitesController do
let_it_be(:user) { create(:user) }
let_it_be(:member, reload: true) { create(:project_member, :invited, invite_email: user.email) }
+
let(:raw_invite_token) { member.raw_invite_token }
let(:project_members) { member.source.users }
let(:md5_member_global_id) { Digest::MD5.hexdigest(member.to_global_id.to_s) }
@@ -127,38 +128,11 @@ RSpec.describe InvitesController do
expect(flash[:notice]).to include('create an account or sign in')
end
- context 'when it is part of our invite email experiment', :experiment, :aggregate_failures do
- let(:experience) { :control }
-
- before do
- stub_experiments(invite_signup_page_interaction: experience)
- end
-
- it 'sets originating_member_id session key' do
- request
-
- expect(session[:originating_member_id]).to eq(member.id)
- end
-
- context 'with control experience' do
- it 'is redirected to a new registration with invite email param and flash message' do
- request
-
- expect(response).to redirect_to(new_user_registration_path(invite_email: member.invite_email))
- expect(flash[:notice]).to eq 'To accept this invitation, create an account or sign in.'
- end
- end
-
- context 'with candidate experience' do
- let(:experience) { :candidate }
-
- it 'is redirected to a new invite registration with invite email param and no flash message' do
- request
+ it 'is redirected to a new registration with invite email param and flash message', :aggregate_failures do
+ request
- expect(response).to redirect_to(new_users_sign_up_invite_path(invite_email: member.invite_email))
- expect(flash[:notice]).to be_nil
- end
- end
+ expect(response).to redirect_to(new_user_registration_path(invite_email: member.invite_email))
+ expect(flash[:notice]).to eq 'To accept this invitation, create an account or sign in.'
end
it 'sets session keys for auto email confirmation on sign up' do
diff --git a/spec/controllers/jira_connect/events_controller_spec.rb b/spec/controllers/jira_connect/events_controller_spec.rb
index 8a07f69e480..e9fecb594a7 100644
--- a/spec/controllers/jira_connect/events_controller_spec.rb
+++ b/spec/controllers/jira_connect/events_controller_spec.rb
@@ -66,19 +66,19 @@ RSpec.describe JiraConnect::EventsController do
request.headers['Authorization'] = "JWT #{auth_token}"
end
- subject { post :uninstalled }
+ subject(:post_uninstalled) { post :uninstalled }
context 'when JWT is invalid' do
let(:auth_token) { 'invalid_token' }
it 'returns 403' do
- subject
+ post_uninstalled
expect(response).to have_gitlab_http_status(:forbidden)
end
it 'does not delete the installation' do
- expect { subject }.not_to change { JiraConnectInstallation.count }
+ expect { post_uninstalled }.not_to change { JiraConnectInstallation.count }
end
end
@@ -87,8 +87,27 @@ RSpec.describe JiraConnect::EventsController do
Atlassian::Jwt.encode({ iss: installation.client_key, qsh: qsh }, installation.shared_secret)
end
- it 'deletes the installation' do
- expect { subject }.to change { JiraConnectInstallation.count }.by(-1)
+ let(:jira_base_path) { '/-/jira_connect' }
+ let(:jira_event_path) { '/-/jira_connect/events/uninstalled' }
+
+ it 'calls the DestroyService and returns ok in case of success' do
+ expect_next_instance_of(JiraConnectInstallations::DestroyService, installation, jira_base_path, jira_event_path) do |destroy_service|
+ expect(destroy_service).to receive(:execute).and_return(true)
+ end
+
+ post_uninstalled
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'calls the DestroyService and returns unprocessable_entity in case of failure' do
+ expect_next_instance_of(JiraConnectInstallations::DestroyService, installation, jira_base_path, jira_event_path) do |destroy_service|
+ expect(destroy_service).to receive(:execute).and_return(false)
+ end
+
+ post_uninstalled
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
end
diff --git a/spec/controllers/profiles/emails_controller_spec.rb b/spec/controllers/profiles/emails_controller_spec.rb
index 950120ae564..ce16632472f 100644
--- a/spec/controllers/profiles/emails_controller_spec.rb
+++ b/spec/controllers/profiles/emails_controller_spec.rb
@@ -63,6 +63,7 @@ RSpec.describe Profiles::EmailsController do
describe '#resend_confirmation_instructions' do
let_it_be(:email) { create(:email, user: user) }
+
let(:params) { { id: email.id } }
subject { put(:resend_confirmation_instructions, params: params) }
diff --git a/spec/controllers/profiles/personal_access_tokens_controller_spec.rb b/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
index 1fdd1200028..3859af66292 100644
--- a/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
+++ b/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
@@ -64,5 +64,17 @@ RSpec.describe Profiles::PersonalAccessTokensController do
it "retrieves newly created personal access token value" do
expect(assigns(:new_personal_access_token)).to eql(token_value)
end
+
+ it "sets PAT name and scopes" do
+ name = 'My PAT'
+ scopes = 'api,read_user'
+
+ get :index, params: { name: name, scopes: scopes }
+
+ expect(assigns(:personal_access_token)).to have_attributes(
+ name: eq(name),
+ scopes: contain_exactly(:api, :read_user)
+ )
+ end
end
end
diff --git a/spec/controllers/projects/blob_controller_spec.rb b/spec/controllers/projects/blob_controller_spec.rb
index 9493215247a..53efcc65066 100644
--- a/spec/controllers/projects/blob_controller_spec.rb
+++ b/spec/controllers/projects/blob_controller_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe Projects::BlobController do
include ProjectForksHelper
- let(:project) { create(:project, :public, :repository) }
+ let(:project) { create(:project, :public, :repository, previous_default_branch: previous_default_branch) }
+ let(:previous_default_branch) { nil }
describe "GET show" do
def request
@@ -42,6 +43,20 @@ RSpec.describe Projects::BlobController do
it { is_expected.to respond_with(:not_found) }
end
+ context "renamed default branch, valid file" do
+ let(:id) { 'old-default-branch/README.md' }
+ let(:previous_default_branch) { 'old-default-branch' }
+
+ it { is_expected.to redirect_to("/#{project.full_path}/-/blob/#{project.default_branch}/README.md") }
+ end
+
+ context "renamed default branch, invalid file" do
+ let(:id) { 'old-default-branch/invalid-path.rb' }
+ let(:previous_default_branch) { 'old-default-branch' }
+
+ it { is_expected.to redirect_to("/#{project.full_path}/-/blob/#{project.default_branch}/invalid-path.rb") }
+ end
+
context "binary file" do
let(:id) { 'binary-encoding/encoding/binary-1.bin' }
diff --git a/spec/controllers/projects/commit_controller_spec.rb b/spec/controllers/projects/commit_controller_spec.rb
index c650d145bef..16bb33e95c8 100644
--- a/spec/controllers/projects/commit_controller_spec.rb
+++ b/spec/controllers/projects/commit_controller_spec.rb
@@ -483,7 +483,7 @@ RSpec.describe Projects::CommitController do
end
context 'when rendering a JSON format' do
- it 'responds with serialized pipelines' do
+ it 'responds with serialized pipelines', :aggregate_failures do
get_pipelines(id: commit.id, format: :json)
expect(response).to be_ok
@@ -491,6 +491,26 @@ RSpec.describe Projects::CommitController do
expect(json_response['count']['all']).to eq 1
expect(response).to include_pagination_headers
end
+
+ context 'with pagination' do
+ let!(:extra_pipeline) { create(:ci_pipeline, project: project, ref: project.default_branch, sha: commit.sha, status: :running) }
+
+ it 'paginates the result when ref is blank' do
+ allow(Ci::Pipeline).to receive(:default_per_page).and_return(1)
+
+ get_pipelines(id: commit.id, format: :json)
+
+ expect(json_response['pipelines'].count).to eq(1)
+ end
+
+ it 'paginates the result when ref is present' do
+ allow(Ci::Pipeline).to receive(:default_per_page).and_return(1)
+
+ get_pipelines(id: commit.id, ref: project.default_branch, format: :json)
+
+ expect(json_response['pipelines'].count).to eq(1)
+ end
+ end
end
end
end
diff --git a/spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb b/spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb
index c78b838d0df..55ab0f0eefa 100644
--- a/spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb
+++ b/spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb
@@ -109,7 +109,7 @@ RSpec.describe Projects::DesignManagement::Designs::RawImagesController do
context 'when sha is nil' do
let(:sha) { nil }
- let(:expected_ref) { 'master' }
+ let(:expected_ref) { project.design_repository.root_ref }
it_behaves_like 'a successful request for sha'
end
@@ -147,7 +147,7 @@ RSpec.describe Projects::DesignManagement::Designs::RawImagesController do
let(:file) { fixture_file_upload('spec/fixtures/dk.png', '`/png') }
let(:lfs_pointer) { Gitlab::Git::LfsPointerFile.new(file.read) }
let(:design) { create(:design, :with_lfs_file, file: lfs_pointer.pointer, issue: issue) }
- let(:lfs_oid) { project.design_repository.blob_at('HEAD', design.full_path).lfs_oid }
+ let(:lfs_oid) { project.design_repository.blob_at(design.repository.root_ref, design.full_path).lfs_oid }
let(:filepath) { design.full_path }
end
end
diff --git a/spec/controllers/projects/import/jira_controller_spec.rb b/spec/controllers/projects/import/jira_controller_spec.rb
index 37a7fce0c23..5288c0fcf21 100644
--- a/spec/controllers/projects/import/jira_controller_spec.rb
+++ b/spec/controllers/projects/import/jira_controller_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Projects::Import::JiraController do
def ensure_correct_config
sign_in(user)
project.add_maintainer(user)
- stub_jira_service_test
+ stub_jira_integration_test
end
shared_examples 'redirect with error' do |error|
@@ -54,8 +54,8 @@ RSpec.describe Projects::Import::JiraController do
context 'when loged user is a developer' do
before do
- create(:jira_service, project: project)
- stub_jira_service_test
+ create(:jira_integration, project: project)
+ stub_jira_integration_test
sign_in(user)
project.add_developer(user)
@@ -72,7 +72,7 @@ RSpec.describe Projects::Import::JiraController do
it_behaves_like 'users without permissions'
- context 'jira service configuration' do
+ context 'jira integration configuration' do
before do
sign_in(user)
project.add_maintainer(user)
@@ -80,14 +80,14 @@ RSpec.describe Projects::Import::JiraController do
context 'when Jira service is not enabled for the project' do
it 'does not query Jira service' do
- expect(project).not_to receive(:jira_service)
+ expect(project).not_to receive(:jira_integration)
end
it_behaves_like 'template with no message'
end
context 'when Jira service is not configured correctly for the project' do
- let_it_be(:jira_service) { create(:jira_service, project: project) }
+ let_it_be(:jira_integration) { create(:jira_integration, project: project) }
before do
WebMock.stub_request(:get, 'https://jira.example.com/rest/api/2/serverInfo')
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 7569a18baeb..922ecb6052a 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Projects::IssuesController do
before do
sign_in(user)
project.add_developer(user)
- create(:jira_service, project: project)
+ create(:jira_integration, project: project)
end
context 'when GitLab issues disabled' do
@@ -1016,10 +1016,13 @@ RSpec.describe Projects::IssuesController do
let(:spammy_title) { 'Whatever' }
let!(:spam_logs) { create_list(:spam_log, 2, user: user, title: spammy_title) }
+ before do
+ request.headers['X-GitLab-Captcha-Response'] = 'a-valid-captcha-response'
+ request.headers['X-GitLab-Spam-Log-Id'] = spam_logs.last.id
+ end
+
def update_verified_issue
- update_issue(
- issue_params: { title: spammy_title },
- additional_params: { spam_log_id: spam_logs.last.id, 'g-recaptcha-response': true })
+ update_issue(issue_params: { title: spammy_title })
end
it 'returns 200 status' do
@@ -1036,8 +1039,9 @@ RSpec.describe Projects::IssuesController do
it 'does not mark spam log as recaptcha_verified when it does not belong to current_user' do
spam_log = create(:spam_log)
+ request.headers['X-GitLab-Spam-Log-Id'] = spam_log.id
- expect { update_issue(issue_params: { spam_log_id: spam_log.id, 'g-recaptcha-response': true }) }
+ expect { update_issue }
.not_to change { SpamLog.last.recaptcha_verified }
end
end
diff --git a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
index 4fcb63ac616..707d074b5c1 100644
--- a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
@@ -88,8 +88,6 @@ RSpec.describe Projects::MergeRequests::DiffsController do
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
before do
- stub_feature_flags(diffs_gradual_load: false)
-
project.add_maintainer(user)
sign_in(user)
end
@@ -474,8 +472,6 @@ RSpec.describe Projects::MergeRequests::DiffsController do
diff_view: :inline,
merge_ref_head_diff: nil,
pagination_data: {
- current_page: nil,
- next_page: nil,
total_pages: nil
}.merge(pagination_data)
}
@@ -486,7 +482,7 @@ RSpec.describe Projects::MergeRequests::DiffsController do
namespace_id: project.namespace.to_param,
project_id: project,
id: merge_request.iid,
- page: 1,
+ page: 0,
per_page: 20,
format: 'json'
}
@@ -517,7 +513,7 @@ RSpec.describe Projects::MergeRequests::DiffsController do
it_behaves_like 'serializes diffs with expected arguments' do
let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
- let(:expected_options) { collection_arguments(current_page: 1, total_pages: 1).merge(merge_ref_head_diff: false) }
+ let(:expected_options) { collection_arguments(total_pages: 20).merge(merge_ref_head_diff: false) }
end
it_behaves_like 'successful request'
@@ -557,7 +553,7 @@ RSpec.describe Projects::MergeRequests::DiffsController do
it_behaves_like 'serializes diffs with expected arguments' do
let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
let(:expected_options) do
- collection_arguments(current_page: 1, total_pages: 1)
+ collection_arguments(total_pages: 20)
end
end
@@ -576,18 +572,18 @@ RSpec.describe Projects::MergeRequests::DiffsController do
it_behaves_like 'serializes diffs with expected arguments' do
let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
- let(:expected_options) { collection_arguments(current_page: 1, total_pages: 1) }
+ let(:expected_options) { collection_arguments(total_pages: 20) }
end
it_behaves_like 'successful request'
end
context 'with smaller diff batch params' do
- subject { go(page: 2, per_page: 5) }
+ subject { go(page: 5, per_page: 5) }
it_behaves_like 'serializes diffs with expected arguments' do
let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
- let(:expected_options) { collection_arguments(current_page: 2, next_page: 3, total_pages: 4) }
+ let(:expected_options) { collection_arguments(total_pages: 20) }
end
it_behaves_like 'successful request'
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index d4c52e1c7ca..7b5a58fe2e5 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -860,6 +860,20 @@ RSpec.describe Projects::MergeRequestsController do
end
end
end
+
+ context 'with pagination' do
+ before do
+ create(:ci_pipeline, project: merge_request.source_project, ref: merge_request.source_branch, sha: merge_request.diff_head_sha)
+ end
+
+ it 'paginates the result' do
+ allow(Ci::Pipeline).to receive(:default_per_page).and_return(1)
+
+ get :pipelines, params: { namespace_id: project.namespace.to_param, project_id: project, id: merge_request.iid }, format: :json
+
+ expect(json_response['pipelines'].count).to eq(1)
+ end
+ end
end
describe 'GET context commits' do
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index a80c5fa82f6..2379ff9fd98 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -66,6 +66,14 @@ RSpec.describe Projects::PipelinesController do
expect(json_response['pipelines'][0]).not_to include('coverage')
end
+ it 'paginates the result' do
+ allow(Ci::Pipeline).to receive(:default_per_page).and_return(2)
+
+ get_pipelines_index_json
+
+ check_pipeline_response(returned: 2, all: 6)
+ end
+
context 'when performing gitaly calls', :request_store do
it 'limits the Gitaly requests' do
# Isolate from test preparation (Repository#exists? is also cached in RequestStore)
@@ -284,10 +292,6 @@ RSpec.describe Projects::PipelinesController do
subject { project.namespace }
- context 'pipeline_empty_state_templates experiment' do
- it_behaves_like 'tracks assignment and records the subject', :pipeline_empty_state_templates, :namespace
- end
-
context 'code_quality_walkthrough experiment' do
it_behaves_like 'tracks assignment and records the subject', :code_quality_walkthrough, :namespace
end
diff --git a/spec/controllers/projects/prometheus/metrics_controller_spec.rb b/spec/controllers/projects/prometheus/metrics_controller_spec.rb
index c7c3be20f29..5338b77bd08 100644
--- a/spec/controllers/projects/prometheus/metrics_controller_spec.rb
+++ b/spec/controllers/projects/prometheus/metrics_controller_spec.rb
@@ -141,7 +141,7 @@ RSpec.describe Projects::Prometheus::MetricsController do
expect(flash[:notice]).to include('Metric was successfully added.')
- expect(response).to redirect_to(edit_project_service_path(project, PrometheusService))
+ expect(response).to redirect_to(edit_project_service_path(project, ::Integrations::Prometheus))
end
end
@@ -164,7 +164,7 @@ RSpec.describe Projects::Prometheus::MetricsController do
it 'destroys the metric' do
delete :destroy, params: project_params(id: metric.id)
- expect(response).to redirect_to(edit_project_service_path(project, PrometheusService))
+ expect(response).to redirect_to(edit_project_service_path(project, ::Integrations::Prometheus))
expect(PrometheusMetric.find_by(id: metric.id)).to be_nil
end
end
diff --git a/spec/controllers/projects/runners_controller_spec.rb b/spec/controllers/projects/runners_controller_spec.rb
index 39b45a7133c..70ff77d7ff0 100644
--- a/spec/controllers/projects/runners_controller_spec.rb
+++ b/spec/controllers/projects/runners_controller_spec.rb
@@ -111,7 +111,7 @@ RSpec.describe Projects::RunnersController do
expect(response).to have_gitlab_http_status(:unauthorized)
expect(project.shared_runners_enabled).to eq(false)
- expect(json_response['error']).to eq('Cannot enable shared runners because parent group does not allow it')
+ expect(json_response['error']).to eq('Shared runners enabled cannot be enabled because parent group does not allow it')
end
end
end
diff --git a/spec/controllers/projects/service_hook_logs_controller_spec.rb b/spec/controllers/projects/service_hook_logs_controller_spec.rb
index 040e59fc822..9caa4a06b44 100644
--- a/spec/controllers/projects/service_hook_logs_controller_spec.rb
+++ b/spec/controllers/projects/service_hook_logs_controller_spec.rb
@@ -27,6 +27,15 @@ RSpec.describe Projects::ServiceHookLogsController do
specify do
expect(response).to be_successful
end
+
+ it 'renders a 404 if the hook does not exist' do
+ log_params
+ integration.service_hook.destroy!
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
end
describe 'POST #retry' do
@@ -37,5 +46,14 @@ RSpec.describe Projects::ServiceHookLogsController do
expect_any_instance_of(described_class).to receive(:set_hook_execution_notice)
expect(subject).to redirect_to(edit_project_service_path(project, integration))
end
+
+ it 'renders a 404 if the hook does not exist' do
+ log_params
+ integration.service_hook.destroy!
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
end
end
diff --git a/spec/controllers/projects/usage_ping_controller_spec.rb b/spec/controllers/projects/service_ping_controller_spec.rb
index 9ace072d561..e6afaadc75f 100644
--- a/spec/controllers/projects/usage_ping_controller_spec.rb
+++ b/spec/controllers/projects/service_ping_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::UsagePingController do
+RSpec.describe Projects::ServicePingController do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
diff --git a/spec/controllers/projects/services_controller_spec.rb b/spec/controllers/projects/services_controller_spec.rb
index f8474ab1082..baf3bde83bd 100644
--- a/spec/controllers/projects/services_controller_spec.rb
+++ b/spec/controllers/projects/services_controller_spec.rb
@@ -6,10 +6,12 @@ RSpec.describe Projects::ServicesController do
include JiraServiceHelper
include AfterNextHelpers
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user) }
- let(:service) { create(:jira_service, project: project) }
- let(:service_params) { { username: 'username', password: 'password', url: 'http://example.com' } }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:jira_integration) { create(:jira_integration, project: project) }
+
+ let(:integration) { jira_integration }
+ let(:integration_params) { { username: 'username', password: 'password', url: 'http://example.com' } }
before do
sign_in(user)
@@ -17,9 +19,9 @@ RSpec.describe Projects::ServicesController do
end
describe '#test' do
- context 'when can_test? returns false' do
+ context 'when the integration is not testable' do
it 'renders 404' do
- allow_any_instance_of(Integration).to receive(:can_test?).and_return(false)
+ allow_any_instance_of(Integration).to receive(:testable?).and_return(false)
put :test, params: project_params
@@ -28,10 +30,10 @@ RSpec.describe Projects::ServicesController do
end
context 'when validations fail' do
- let(:service_params) { { active: 'true', url: '' } }
+ let(:integration_params) { { active: 'true', url: '' } }
it 'returns error messages in JSON response' do
- put :test, params: project_params(service: service_params)
+ put :test, params: project_params(service: integration_params)
expect(json_response['message']).to eq 'Validations failed.'
expect(json_response['service_response']).to include "Url can't be blank"
@@ -39,15 +41,17 @@ RSpec.describe Projects::ServicesController do
end
end
- context 'success' do
+ context 'when successful' do
context 'with empty project' do
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
+
+ context 'with chat notification integration' do
+ let_it_be(:teams_integration) { project.create_microsoft_teams_integration(webhook: 'http://webhook.com') }
- context 'with chat notification service' do
- let(:service) { project.create_microsoft_teams_service(webhook: 'http://webhook.com') }
+ let(:integration) { teams_integration }
it 'returns success' do
- allow_any_instance_of(::MicrosoftTeams::Notifier).to receive(:ping).and_return(true)
+ allow_next(::MicrosoftTeams::Notifier).to receive(:ping).and_return(true)
put :test, params: project_params
@@ -56,28 +60,28 @@ RSpec.describe Projects::ServicesController do
end
it 'returns success' do
- stub_jira_service_test
+ stub_jira_integration_test
expect(Gitlab::HTTP).to receive(:get).with('/rest/api/2/serverInfo', any_args).and_call_original
- put :test, params: project_params(service: service_params)
+ put :test, params: project_params(service: integration_params)
expect(response).to be_successful
end
end
it 'returns success' do
- stub_jira_service_test
+ stub_jira_integration_test
expect(Gitlab::HTTP).to receive(:get).with('/rest/api/2/serverInfo', any_args).and_call_original
- put :test, params: project_params(service: service_params)
+ put :test, params: project_params(service: integration_params)
expect(response).to be_successful
end
context 'when service is configured for the first time' do
- let(:service_params) do
+ let(:integration_params) do
{
'active' => '1',
'push_events' => '1',
@@ -108,17 +112,17 @@ RSpec.describe Projects::ServicesController do
def do_put
put :test, params: project_params(id: 'buildkite',
- service: service_params)
+ service: integration_params)
end
end
end
- context 'failure' do
+ context 'when unsuccessful' do
it 'returns an error response when the integration test fails' do
stub_request(:get, 'http://example.com/rest/api/2/serverInfo')
.to_return(status: 404)
- put :test, params: project_params(service: service_params)
+ put :test, params: project_params(service: integration_params)
expect(response).to be_successful
expect(json_response).to eq(
@@ -130,7 +134,7 @@ RSpec.describe Projects::ServicesController do
end
context 'with the Slack integration' do
- let_it_be(:service) { build(:slack_service) }
+ let_it_be(:integration) { build(:integrations_slack) }
it 'returns an error response when the URL is blocked' do
put :test, params: project_params(service: { webhook: 'http://127.0.0.1' })
@@ -163,17 +167,17 @@ RSpec.describe Projects::ServicesController do
describe 'PUT #update' do
describe 'as HTML' do
- let(:service_params) { { active: true } }
- let(:params) { project_params(service: service_params) }
+ let(:integration_params) { { active: true } }
+ let(:params) { project_params(service: integration_params) }
let(:message) { 'Jira settings saved and active.' }
- let(:redirect_url) { edit_project_service_path(project, service) }
+ let(:redirect_url) { edit_project_service_path(project, integration) }
before do
put :update, params: params
end
- shared_examples 'service update' do
+ shared_examples 'integration update' do
it 'redirects to the correct url with a flash message' do
expect(response).to redirect_to(redirect_url)
expect(flash[:notice]).to eq(message)
@@ -181,61 +185,61 @@ RSpec.describe Projects::ServicesController do
end
context 'when param `active` is set to true' do
- let(:params) { project_params(service: service_params, redirect_to: redirect) }
+ let(:params) { project_params(service: integration_params, redirect_to: redirect) }
context 'when redirect_to param is present' do
let(:redirect) { '/redirect_here' }
let(:redirect_url) { redirect }
- it_behaves_like 'service update'
+ it_behaves_like 'integration update'
end
context 'when redirect_to is an external domain' do
let(:redirect) { 'http://examle.com' }
- it_behaves_like 'service update'
+ it_behaves_like 'integration update'
end
context 'when redirect_to param is an empty string' do
let(:redirect) { '' }
- it_behaves_like 'service update'
+ it_behaves_like 'integration update'
end
end
context 'when param `active` is set to false' do
- let(:service_params) { { active: false } }
- let(:message) { 'Jira settings saved, but not active.' }
+ let(:integration_params) { { active: false } }
+ let(:message) { 'Jira settings saved, but not active.' }
- it_behaves_like 'service update'
+ it_behaves_like 'integration update'
end
- context 'wehn param `inherit_from_id` is set to empty string' do
- let(:service_params) { { inherit_from_id: '' } }
+ context 'when param `inherit_from_id` is set to empty string' do
+ let(:integration_params) { { inherit_from_id: '' } }
it 'sets inherit_from_id to nil' do
- expect(service.reload.inherit_from_id).to eq(nil)
+ expect(integration.reload.inherit_from_id).to eq(nil)
end
end
- context 'wehn param `inherit_from_id` is set to some value' do
- let(:instance_service) { create(:jira_service, :instance) }
- let(:service_params) { { inherit_from_id: instance_service.id } }
+ context 'when param `inherit_from_id` is set to some value' do
+ let(:instance_service) { create(:jira_integration, :instance) }
+ let(:integration_params) { { inherit_from_id: instance_service.id } }
it 'sets inherit_from_id to value' do
- expect(service.reload.inherit_from_id).to eq(instance_service.id)
+ expect(integration.reload.inherit_from_id).to eq(instance_service.id)
end
end
end
describe 'as JSON' do
before do
- stub_jira_service_test
- put :update, params: project_params(service: service_params, format: :json)
+ stub_jira_integration_test
+ put :update, params: project_params(service: integration_params, format: :json)
end
context 'when update succeeds' do
- let(:service_params) { { url: 'http://example.com' } }
+ let(:integration_params) { { url: 'http://example.com' } }
it 'returns JSON response with no errors' do
expect(response).to be_successful
@@ -244,59 +248,67 @@ RSpec.describe Projects::ServicesController do
end
context 'when update fails' do
- let(:service_params) { { url: '' } }
+ let(:integration_params) { { url: '' } }
it 'returns JSON response with errors' do
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response).to include(
'active' => true,
- 'errors' => { 'url' => ['must be a valid URL', %{can't be blank}] }
+ 'errors' => { 'url' => ['must be a valid URL', %(can't be blank)] }
)
end
end
end
- context 'Prometheus service' do
- let!(:service) { create(:prometheus_service, project: project) }
- let(:service_params) { { manual_configuration: '1', api_url: 'http://example.com' } }
+ context 'with Prometheus integration' do
+ let_it_be(:prometheus_integration) { create(:prometheus_integration, project: project) }
- context 'feature flag :settings_operations_prometheus_service is enabled' do
+ let(:integration) { prometheus_integration }
+ let(:integration_params) { { manual_configuration: '1', api_url: 'http://example.com' } }
+
+ context 'when feature flag :settings_operations_prometheus_service is enabled' do
before do
stub_feature_flags(settings_operations_prometheus_service: true)
end
it 'redirects user back to edit page with alert' do
- put :update, params: project_params.merge(service: service_params)
+ put :update, params: project_params.merge(service: integration_params)
- expect(response).to redirect_to(edit_project_service_path(project, service))
- expected_alert = "You can now manage your Prometheus settings on the <a href=\"#{project_settings_operations_path(project)}\">Operations</a> page. Fields on this page has been deprecated."
+ expect(response).to redirect_to(edit_project_service_path(project, integration))
+ expected_alert = [
+ "You can now manage your Prometheus settings on the",
+ %(<a href="#{project_settings_operations_path(project)}">Operations</a> page.),
+ "Fields on this page have been deprecated."
+ ].join(' ')
expect(controller).to set_flash.now[:alert].to(expected_alert)
end
- it 'does not modify service' do
- expect { put :update, params: project_params.merge(service: service_params) }.not_to change { project.prometheus_service.reload.attributes }
+ it 'does not modify integration' do
+ expect { put :update, params: project_params.merge(service: integration_params) }
+ .not_to change { project.prometheus_integration.reload.attributes }
end
end
- context 'feature flag :settings_operations_prometheus_service is disabled' do
+ context 'when feature flag :settings_operations_prometheus_service is disabled' do
before do
stub_feature_flags(settings_operations_prometheus_service: false)
end
- it 'modifies service' do
- expect { put :update, params: project_params.merge(service: service_params) }.to change { project.prometheus_service.reload.attributes }
+ it 'modifies integration' do
+ expect { put :update, params: project_params.merge(service: integration_params) }
+ .to change { project.prometheus_integration.reload.attributes }
end
end
end
end
describe 'GET #edit' do
- context 'Jira service' do
- let(:service_param) { 'jira' }
+ context 'with Jira service' do
+ let(:integration_param) { 'jira' }
before do
- get :edit, params: project_params(id: service_param)
+ get :edit, params: project_params(id: integration_param)
end
context 'with approved services' do
@@ -306,25 +318,30 @@ RSpec.describe Projects::ServicesController do
end
end
- context 'Prometheus service' do
- let(:service_param) { 'prometheus' }
+ context 'with Prometheus service' do
+ let(:integration_param) { 'prometheus' }
- context 'feature flag :settings_operations_prometheus_service is enabled' do
+ context 'when feature flag :settings_operations_prometheus_service is enabled' do
before do
stub_feature_flags(settings_operations_prometheus_service: true)
- get :edit, params: project_params(id: service_param)
+ get :edit, params: project_params(id: integration_param)
end
it 'renders deprecation warning notice' do
- expected_alert = "You can now manage your Prometheus settings on the <a href=\"#{project_settings_operations_path(project)}\">Operations</a> page. Fields on this page has been deprecated."
+ expected_alert = [
+ "You can now manage your Prometheus settings on the",
+ %(<a href="#{project_settings_operations_path(project)}">Operations</a> page.),
+ "Fields on this page have been deprecated."
+ ].join(' ')
+
expect(controller).to set_flash.now[:alert].to(expected_alert)
end
end
- context 'feature flag :settings_operations_prometheus_service is disabled' do
+ context 'when feature flag :settings_operations_prometheus_service is disabled' do
before do
stub_feature_flags(settings_operations_prometheus_service: false)
- get :edit, params: project_params(id: service_param)
+ get :edit, params: project_params(id: integration_param)
end
it 'does not render deprecation warning notice' do
@@ -340,7 +357,7 @@ RSpec.describe Projects::ServicesController do
opts.reverse_merge(
namespace_id: project.namespace,
project_id: project,
- id: service.to_param
+ id: integration.to_param
)
end
end
diff --git a/spec/controllers/projects/settings/access_tokens_controller_spec.rb b/spec/controllers/projects/settings/access_tokens_controller_spec.rb
index 2a7e3d0b322..834a9e276f9 100644
--- a/spec/controllers/projects/settings/access_tokens_controller_spec.rb
+++ b/spec/controllers/projects/settings/access_tokens_controller_spec.rb
@@ -61,6 +61,14 @@ RSpec.describe Projects::Settings::AccessTokensController do
expect { subject }.not_to change { User.count }
end
end
+
+ context 'with custom access level' do
+ let(:access_token_params) { { name: 'Nerd bot', scopes: ["api"], expires_at: Date.today + 1.month, access_level: 20 } }
+
+ subject { post :create, params: { namespace_id: project.namespace, project_id: project }.merge(project_access_token: access_token_params) }
+
+ it_behaves_like 'project access tokens available #create'
+ end
end
describe '#revoke', :sidekiq_inline do
diff --git a/spec/controllers/projects/tree_controller_spec.rb b/spec/controllers/projects/tree_controller_spec.rb
index 8e4e275bdbe..143516e4712 100644
--- a/spec/controllers/projects/tree_controller_spec.rb
+++ b/spec/controllers/projects/tree_controller_spec.rb
@@ -3,8 +3,9 @@
require 'spec_helper'
RSpec.describe Projects::TreeController do
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user) }
+ let(:project) { create(:project, :repository, previous_default_branch: previous_default_branch) }
+ let(:previous_default_branch) { nil }
+ let(:user) { create(:user) }
before do
sign_in(user)
@@ -55,6 +56,20 @@ RSpec.describe Projects::TreeController do
it { is_expected.to respond_with(:not_found) }
end
+ context "renamed default branch, valid file" do
+ let(:id) { 'old-default-branch/encoding/' }
+ let(:previous_default_branch) { 'old-default-branch' }
+
+ it { is_expected.to redirect_to("/#{project.full_path}/-/tree/#{project.default_branch}/encoding/") }
+ end
+
+ context "renamed default branch, invalid file" do
+ let(:id) { 'old-default-branch/invalid-path/' }
+ let(:previous_default_branch) { 'old-default-branch' }
+
+ it { is_expected.to redirect_to("/#{project.full_path}/-/tree/#{project.default_branch}/invalid-path/") }
+ end
+
context "valid empty branch, invalid path" do
let(:id) { 'empty-branch/invalid-path/' }
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index ce229fb861a..46c17d6a6fe 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -119,11 +119,6 @@ RSpec.describe ProjectsController do
get :activity, params: { namespace_id: project.namespace, id: project, format: :json }
expect(json_response['html']).to eq("\n")
- end
-
- it 'filters out invisible event when calculating the count' do
- get :activity, params: { namespace_id: project.namespace, id: project, format: :json }
-
expect(json_response['count']).to eq(0)
end
end
@@ -464,12 +459,6 @@ RSpec.describe ProjectsController do
post :create, params: { project: project_params }
end
-
- it 'tracks a created event for the new_repo experiment', :experiment do
- expect(experiment(:new_repo, :candidate)).to track(:project_created).on_next_instance
-
- post :create, params: { project: project_params }
- end
end
describe 'POST #archive' do
@@ -1484,6 +1473,30 @@ RSpec.describe ProjectsController do
end
end
+ context 'GET show.atom' do
+ let_it_be(:public_project) { create(:project, :public) }
+ let_it_be(:event) { create(:event, :commented, project: public_project, target: create(:note, project: public_project)) }
+ let_it_be(:invisible_event) { create(:event, :commented, project: public_project, target: create(:note, :confidential, project: public_project)) }
+
+ it 'filters by calling event.visible_to_user?' do
+ expect(EventCollection).to receive_message_chain(:new, :to_a).and_return([event, invisible_event])
+ expect(event).to receive(:visible_to_user?).and_return(true)
+ expect(invisible_event).to receive(:visible_to_user?).and_return(false)
+
+ get :show, format: :atom, params: { id: public_project, namespace_id: public_project.namespace }
+
+ expect(response).to render_template('xml.atom')
+ expect(assigns(:events)).to eq([event])
+ end
+
+ it 'filters by calling event.visible_to_user?' do
+ get :show, format: :atom, params: { id: public_project, namespace_id: public_project.namespace }
+
+ expect(response).to render_template('xml.atom')
+ expect(assigns(:events)).to eq([event])
+ end
+ end
+
describe 'GET resolve' do
shared_examples 'resolvable endpoint' do
it 'redirects to the project page' do
diff --git a/spec/controllers/registrations/experience_levels_controller_spec.rb b/spec/controllers/registrations/experience_levels_controller_spec.rb
index 6b8ab3ec715..ad145264bb8 100644
--- a/spec/controllers/registrations/experience_levels_controller_spec.rb
+++ b/spec/controllers/registrations/experience_levels_controller_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Registrations::ExperienceLevelsController do
end
it { is_expected.to have_gitlab_http_status(:ok) }
- it { is_expected.to render_template('layouts/signup_onboarding') }
+ it { is_expected.to render_template('layouts/minimal') }
it { is_expected.to render_template(:show) }
end
end
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index 81486c310d4..72aa9038c3e 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -187,38 +187,6 @@ RSpec.describe RegistrationsController do
end
end
- context 'when it is part of our invite_signup_page_interaction experiment', :experiment do
- let_it_be(:member) { create(:project_member, :invited, invite_email: user_params.dig(:user, :email)) }
-
- let(:originating_member_id) { member.id }
- let(:session_params) do
- {
- invite_email: user_params.dig(:user, :email),
- originating_member_id: originating_member_id
- }
- end
-
- context 'when member exists from the session key value' do
- it 'tracks the experiment' do
- expect(experiment(:invite_signup_page_interaction)).to track(:form_submission)
- .with_context(actor: member)
- .on_next_instance
-
- subject
- end
- end
-
- context 'when member does not exist from the session key value' do
- let(:originating_member_id) { -1 }
-
- it 'tracks the experiment' do
- expect(experiment(:invite_signup_page_interaction)).not_to track(:form_submission)
-
- subject
- end
- end
- end
-
context 'when invite email matches email used on registration' do
let(:session_params) { { invite_email: user_params.dig(:user, :email) } }
diff --git a/spec/controllers/repositories/git_http_controller_spec.rb b/spec/controllers/repositories/git_http_controller_spec.rb
index 4eede594bb9..04d5008cb34 100644
--- a/spec/controllers/repositories/git_http_controller_spec.rb
+++ b/spec/controllers/repositories/git_http_controller_spec.rb
@@ -34,18 +34,6 @@ RSpec.describe Repositories::GitHttpController do
end
end
- context 'when project_statistics_sync feature flag is disabled' do
- before do
- stub_feature_flags(project_statistics_sync: false, disable_git_http_fetch_writes: false)
- end
-
- it 'updates project statistics async for projects' do
- expect(ProjectDailyStatisticsWorker).to receive(:perform_async)
-
- send_request
- end
- end
-
it 'updates project statistics sync for projects' do
stub_feature_flags(disable_git_http_fetch_writes: false)
diff --git a/spec/controllers/root_controller_spec.rb b/spec/controllers/root_controller_spec.rb
index 01ff646274a..dbf1b3baf25 100644
--- a/spec/controllers/root_controller_spec.rb
+++ b/spec/controllers/root_controller_spec.rb
@@ -128,11 +128,31 @@ RSpec.describe RootController do
end
end
- context 'who uses the default dashboard setting' do
- it 'renders the default dashboard' do
- get :index
+ context 'who uses the default dashboard setting', :aggregate_failures do
+ render_views
+
+ context 'with customize homepage banner' do
+ it 'renders the default dashboard' do
+ get :index
+
+ expect(response).to render_template 'root/index'
+ expect(response.body).to have_css('.js-customize-homepage-banner')
+ end
+ end
+
+ context 'without customize homepage banner' do
+ before do
+ Users::DismissUserCalloutService.new(
+ container: nil, current_user: user, params: { feature_name: UserCalloutsHelper::CUSTOMIZE_HOMEPAGE }
+ ).execute
+ end
+
+ it 'renders the default dashboard' do
+ get :index
- expect(response).to render_template 'dashboard/projects/index'
+ expect(response).to render_template 'root/index'
+ expect(response.body).not_to have_css('.js-customize-homepage-banner')
+ end
end
end
end
diff --git a/spec/controllers/users/terms_controller_spec.rb b/spec/controllers/users/terms_controller_spec.rb
index 0acc3008187..30b8eb5a958 100644
--- a/spec/controllers/users/terms_controller_spec.rb
+++ b/spec/controllers/users/terms_controller_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Users::TermsController do
include TermsHelper
let_it_be(:user) { create(:user) }
+
let(:term) { create(:term) }
before do
diff --git a/spec/controllers/users/unsubscribes_controller_spec.rb b/spec/controllers/users/unsubscribes_controller_spec.rb
new file mode 100644
index 00000000000..5670c951e59
--- /dev/null
+++ b/spec/controllers/users/unsubscribes_controller_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::UnsubscribesController do
+ let!(:user) { create :user, email: 'me@example.com' }
+
+ describe "show" do
+ it "responds with success" do
+ get :show, params: { email: Base64.urlsafe_encode64('me@example.com') }
+
+ assert_response :success
+ end
+
+ it "behaves the same if email address isn't known in the system" do
+ get :show, params: { email: Base64.urlsafe_encode64('i@dont_exists.com') }
+
+ assert_response :success
+ end
+ end
+
+ describe "create" do
+ it "unsubscribes the connected user" do
+ post :create, params: { email: Base64.urlsafe_encode64('me@example.com') }
+
+ assert user.reload.admin_email_unsubscribed_at
+ end
+
+ # Don't tell if the email does not exists
+ it "behaves the same if email address isn't known in the system" do
+ post :create, params: { email: Base64.urlsafe_encode64('i@dont_exists.com') }
+
+ assert_response :redirect
+ end
+ end
+end
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 8b02cfa30ab..18f2f7b54c4 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe 'Database schema' do
compliance_management_frameworks: %w[group_id],
commit_user_mentions: %w[commit_id],
deploy_keys_projects: %w[deploy_key_id],
- deployments: %w[deployable_id environment_id user_id],
+ deployments: %w[deployable_id user_id],
draft_notes: %w[discussion_id commit_id],
epics: %w[updated_by_id last_edited_by_id state_id],
events: %w[target_id],
@@ -56,6 +56,7 @@ RSpec.describe 'Database schema' do
ldap_group_links: %w[group_id],
members: %w[source_id created_by_id],
merge_requests: %w[last_edited_by_id state_id],
+ merge_request_diff_commits: %w[commit_author_id committer_id],
namespaces: %w[owner_id parent_id],
notes: %w[author_id commit_id noteable_id updated_by_id resolved_by_id confirmed_by_id discussion_id],
notification_settings: %w[source_id],
@@ -264,6 +265,18 @@ RSpec.describe 'Database schema' do
end
end
+ context 'index names' do
+ it 'disallows index names with a _ccnew[0-9]* suffix' do
+ # During REINDEX operations, Postgres generates a temporary index with a _ccnew[0-9]* suffix
+ # Since indexes are being considered temporary and subject to removal if they stick around for longer. See Gitlab::Database::Reindexing.
+ #
+ # Hence we disallow adding permanent indexes with this suffix.
+ problematic_indexes = Gitlab::Database::PostgresIndex.match("#{Gitlab::Database::Reindexing::ReindexConcurrently::TEMPORARY_INDEX_PATTERN}$").all
+
+ expect(problematic_indexes).to be_empty
+ end
+ end
+
private
def retrieve_columns_name_with_jsonb
diff --git a/spec/deprecation_toolkit_env.rb b/spec/deprecation_toolkit_env.rb
index 00d66ff3cdc..8e06dcb8c75 100644
--- a/spec/deprecation_toolkit_env.rb
+++ b/spec/deprecation_toolkit_env.rb
@@ -55,11 +55,8 @@ module DeprecationToolkitEnv
# one by one
def self.allowed_kwarg_warning_paths
%w[
- activerecord-6.0.3.7/lib/active_record/migration.rb
- activesupport-6.0.3.7/lib/active_support/cache.rb
- activerecord-6.0.3.7/lib/active_record/relation.rb
asciidoctor-2.0.12/lib/asciidoctor/extensions.rb
- attr_encrypted-3.1.0/lib/attr_encrypted/adapters/active_record.rb
+ gitlab-labkit-0.20.0/lib/labkit/correlation/grpc/client_interceptor.rb
]
end
diff --git a/spec/experiments/application_experiment_spec.rb b/spec/experiments/application_experiment_spec.rb
index 22c436e4159..9c03910cf66 100644
--- a/spec/experiments/application_experiment_spec.rb
+++ b/spec/experiments/application_experiment_spec.rb
@@ -3,11 +3,10 @@
require 'spec_helper'
RSpec.describe ApplicationExperiment, :experiment do
- subject { described_class.new('namespaced/stub') }
+ subject { described_class.new('namespaced/stub', **context) }
- let(:feature_definition) do
- { name: 'namespaced_stub', type: 'experiment', group: 'group::adoption', default_enabled: false }
- end
+ let(:context) { {} }
+ let(:feature_definition) { { name: 'namespaced_stub', type: 'experiment', default_enabled: false } }
around do |example|
Feature::Definition.definitions[:namespaced_stub] = Feature::Definition.new('namespaced_stub.yml', feature_definition)
@@ -19,19 +18,13 @@ RSpec.describe ApplicationExperiment, :experiment do
allow(subject).to receive(:enabled?).and_return(true)
end
- it "naively assumes a 1x1 relationship to feature flags for tests" do
- expect(Feature).to receive(:persist_used!).with('namespaced_stub')
-
- described_class.new('namespaced/stub')
- end
-
it "doesn't raise an exception without a defined control" do
# because we have a default behavior defined
expect { experiment('namespaced/stub') { } }.not_to raise_error
end
- describe "enabled" do
+ describe "#enabled?" do
before do
allow(subject).to receive(:enabled?).and_call_original
@@ -63,103 +56,104 @@ RSpec.describe ApplicationExperiment, :experiment do
end
end
- describe "publishing results" do
- it "doesn't record, track or push data to the client if we shouldn't track", :snowplow do
+ describe "#publish" do
+ it "doesn't track or publish to the client or database if we can't track", :snowplow do
allow(subject).to receive(:should_track?).and_return(false)
- subject.record!
- expect(subject).not_to receive(:record_experiment)
- expect(subject).not_to receive(:track)
- expect(Gon).not_to receive(:push)
+ expect(subject).not_to receive(:publish_to_client)
+ expect(subject).not_to receive(:publish_to_database)
- subject.publish(:action)
+ subject.publish
expect_no_snowplow_event
end
- describe 'recording the experiment' do
- it 'does not record the experiment if we do not tell it to' do
- expect(subject).not_to receive(:record_experiment)
-
- subject.publish
- end
-
- it 'records the experiment if we tell it to' do
- subject.record!
-
- expect(subject).to receive(:record_experiment)
-
- subject.publish
- end
- end
-
it "tracks the assignment" do
expect(subject).to receive(:track).with(:assignment)
subject.publish
end
- it "pushes the experiment knowledge into the client using Gon" do
- expect(Gon).to receive(:push).with({ experiment: { 'namespaced/stub' => subject.signature } }, true)
+ it "publishes the to the client" do
+ expect(subject).to receive(:publish_to_client)
subject.publish
end
- it "handles when Gon raises exceptions (like when it can't be pushed into)" do
- expect(Gon).to receive(:push).and_raise(NoMethodError)
+ it "publishes to the database if we've opted for that" do
+ subject.record!
+
+ expect(subject).to receive(:publish_to_database)
- expect { subject.publish }.not_to raise_error
+ subject.publish
end
- end
- it "can exclude from within the block" do
- expect(described_class.new('namespaced/stub') { |e| e.exclude! }).to be_excluded
- end
+ describe "#publish_to_client" do
+ it "adds the data into Gon" do
+ signature = { key: '86208ac54ca798e11f127e8b23ec396a', variant: 'control' }
+ expect(Gon).to receive(:push).with({ experiment: { 'namespaced/stub' => hash_including(signature) } }, true)
- describe 'recording the experiment subject' do
- using RSpec::Parameterized::TableSyntax
+ subject.publish_to_client
+ end
- subject { described_class.new('namespaced/stub', nil, **context) }
+ it "handles when Gon raises exceptions (like when it can't be pushed into)" do
+ expect(Gon).to receive(:push).and_raise(NoMethodError)
- before do
- subject.record!
+ expect { subject.publish_to_client }.not_to raise_error
+ end
end
- context 'when providing a compatible context' do
- where(:context_key, :object_type) do
- :namespace | :namespace
- :group | :namespace
- :project | :project
- :user | :user
- :actor | :user
+ describe "#publish_to_database" do
+ using RSpec::Parameterized::TableSyntax
+ let(:context) { { context_key => context_value }}
+
+ before do
+ subject.record!
end
- with_them do
- let(:context) { { context_key => build(object_type) }}
+ context "when there's a usable subject" do
+ where(:context_key, :context_value, :object_type) do
+ :namespace | build(:namespace) | :namespace
+ :group | build(:namespace) | :namespace
+ :project | build(:project) | :project
+ :user | build(:user) | :user
+ :actor | build(:user) | :user
+ end
- it 'records the experiment and the experiment subject from the context' do
- expect { subject.publish }.to change(Experiment, :count).by(1)
+ with_them do
+ it "creates an experiment and experiment subject record" do
+ expect { subject.publish_to_database }.to change(Experiment, :count).by(1)
- expect(Experiment.last.name).to eq('namespaced/stub')
- expect(ExperimentSubject.last.send(object_type)).to eq(context[context_key])
+ expect(Experiment.last.name).to eq('namespaced/stub')
+ expect(ExperimentSubject.last.send(object_type)).to eq(context[context_key])
+ end
end
end
- end
- context 'when providing an incompatible or no context' do
- where(context_hash: [{ foo: :bar }, {}])
+ context "when there's not a usable subject" do
+ where(:context_key, :context_value) do
+ :namespace | nil
+ :foo | :bar
+ end
- with_them do
- let(:context) { context_hash }
+ with_them do
+ it "doesn't create an experiment record" do
+ expect { subject.publish_to_database }.not_to change(Experiment, :count)
+ end
- it 'does not record the experiment' do
- expect { subject.publish }.not_to change(Experiment, :count)
+ it "doesn't create an experiment subject record" do
+ expect { subject.publish_to_database }.not_to change(ExperimentSubject, :count)
+ end
end
end
end
end
- describe "tracking events", :snowplow do
+ describe "#track", :snowplow do
+ let(:fake_context) do
+ SnowplowTracker::SelfDescribingJson.new('iglu:com.gitlab/fake/jsonschema/0-0-0', { data: '_data_' })
+ end
+
it "doesn't track if we shouldn't track" do
allow(subject).to receive(:should_track?).and_return(false)
@@ -169,9 +163,7 @@ RSpec.describe ApplicationExperiment, :experiment do
end
it "tracks the event with the expected arguments and merged contexts" do
- subject.track(:action, property: '_property_', context: [
- SnowplowTracker::SelfDescribingJson.new('iglu:com.gitlab/fake/jsonschema/0-0-0', { data: '_data_' })
- ])
+ subject.track(:action, property: '_property_', context: [fake_context])
expect_snowplow_event(
category: 'namespaced/stub',
@@ -189,9 +181,35 @@ RSpec.describe ApplicationExperiment, :experiment do
]
)
end
+
+ it "tracks the event correctly even when using the base class" do
+ subject = Gitlab::Experiment.new(:unnamed)
+ subject.track(:action, context: [fake_context])
+
+ expect_snowplow_event(
+ category: 'unnamed',
+ action: 'action',
+ context: [
+ {
+ schema: 'iglu:com.gitlab/fake/jsonschema/0-0-0',
+ data: { data: '_data_' }
+ },
+ {
+ schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/1-0-0',
+ data: { experiment: 'unnamed', key: subject.context.key, variant: 'control' }
+ }
+ ]
+ )
+ end
+ end
+
+ describe "#key_for" do
+ it "generates MD5 hashes" do
+ expect(subject.key_for(foo: :bar)).to eq('6f9ac12afdb9b58c2f19a136d09f9153')
+ end
end
- describe "variant resolution" do
+ context "when resolving variants" do
it "uses the default value as specified in the yaml" do
expect(Feature).to receive(:enabled?).with('namespaced_stub', subject, type: :experiment, default_enabled: :yaml)
diff --git a/spec/experiments/new_project_readme_content_experiment_spec.rb b/spec/experiments/new_project_readme_content_experiment_spec.rb
new file mode 100644
index 00000000000..92a883078df
--- /dev/null
+++ b/spec/experiments/new_project_readme_content_experiment_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe NewProjectReadmeContentExperiment, :experiment do
+ subject { described_class.new(namespace: project.namespace) }
+
+ let(:project) { create(:project, name: 'Experimental', description: 'An experiment project') }
+
+ it "renders the basic README" do
+ expect(subject.run_with(project)).to eq(<<~MARKDOWN.strip)
+ # Experimental
+
+ An experiment project
+ MARKDOWN
+ end
+
+ describe "the advanced variant" do
+ let(:markdown) { subject.run_with(project, variant: :advanced) }
+ let(:initial_url) { 'https://docs.gitlab.com/ee/user/project/repository/web_editor.html#create-a-file' }
+
+ it "renders the project details" do
+ expect(markdown).to include(<<~MARKDOWN.strip)
+ # Experimental
+
+ An experiment project
+
+ ## Getting started
+ MARKDOWN
+ end
+
+ it "renders redirect URLs" do
+ expect(markdown).to include(Rails.application.routes.url_helpers.experiment_redirect_url(subject, initial_url))
+ end
+ end
+end
diff --git a/spec/factories/audit_events.rb b/spec/factories/audit_events.rb
index 05b86d2f13b..10f60591922 100644
--- a/spec/factories/audit_events.rb
+++ b/spec/factories/audit_events.rb
@@ -51,6 +51,7 @@ FactoryBot.define do
trait :unauthenticated do
author_id { -1 }
+ author_name { 'An unauthenticated user' }
details do
{
custom_message: 'Custom action',
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index 395d3ea598c..0e535aeaa8d 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -508,6 +508,14 @@ FactoryBot.define do
end
end
+ trait :cluster_image_scanning do
+ options do
+ {
+ artifacts: { reports: { cluster_image_scanning: 'gl-cluster-image-scanning-report.json' } }
+ }
+ end
+ end
+
trait :license_scanning do
options do
{
diff --git a/spec/factories/ci/pending_builds.rb b/spec/factories/ci/pending_builds.rb
new file mode 100644
index 00000000000..90779ae8ab9
--- /dev/null
+++ b/spec/factories/ci/pending_builds.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_pending_build, class: 'Ci::PendingBuild' do
+ build factory: :ci_build
+ project
+ protected { build.protected }
+ instance_runners_enabled { true }
+ end
+end
diff --git a/spec/factories/ci/reports/security/identifiers.rb b/spec/factories/ci/reports/security/identifiers.rb
new file mode 100644
index 00000000000..5211cb5c54a
--- /dev/null
+++ b/spec/factories/ci/reports/security/identifiers.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_reports_security_identifier, class: '::Gitlab::Ci::Reports::Security::Identifier' do
+ external_id { 'PREDICTABLE_RANDOM' }
+ external_type { 'find_sec_bugs_type' }
+ name { "#{external_type}-#{external_id}" }
+
+ skip_create
+
+ initialize_with do
+ ::Gitlab::Ci::Reports::Security::Identifier.new(**attributes)
+ end
+ end
+end
diff --git a/spec/factories/ci/reports/security/links.rb b/spec/factories/ci/reports/security/links.rb
new file mode 100644
index 00000000000..77af827e7be
--- /dev/null
+++ b/spec/factories/ci/reports/security/links.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_reports_security_link, class: '::Gitlab::Ci::Reports::Security::Link' do
+ name { 'CVE-2020-0202' }
+ url { 'https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-0202' }
+
+ skip_create
+
+ initialize_with do
+ ::Gitlab::Ci::Reports::Security::Link.new(**attributes)
+ end
+ end
+end
diff --git a/spec/factories/ci/reports/security/scanners.rb b/spec/factories/ci/reports/security/scanners.rb
new file mode 100644
index 00000000000..8b68ebdb47a
--- /dev/null
+++ b/spec/factories/ci/reports/security/scanners.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_reports_security_scanner, class: '::Gitlab::Ci::Reports::Security::Scanner' do
+ external_id { 'find_sec_bugs' }
+ name { 'Find Security Bugs' }
+ vendor { 'Security Scanner Vendor' }
+ version { '1.0.0' }
+
+ skip_create
+
+ initialize_with do
+ ::Gitlab::Ci::Reports::Security::Scanner.new(**attributes)
+ end
+ end
+
+ factory :ci_reports_security_scan, class: '::Gitlab::Ci::Reports::Security::Scan' do
+ status { 'success' }
+ type { 'sast' }
+ start_time { 'placeholder' }
+ end_time { 'placeholder' }
+
+ skip_create
+
+ initialize_with do
+ ::Gitlab::Ci::Reports::Security::Scan.new(attributes)
+ end
+ end
+end
diff --git a/spec/factories/ci/running_builds.rb b/spec/factories/ci/running_builds.rb
new file mode 100644
index 00000000000..96d2e6913f4
--- /dev/null
+++ b/spec/factories/ci/running_builds.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_running_build, class: 'Ci::RunningBuild' do
+ build factory: :ci_build
+ project
+ runner factory: :ci_runner
+ runner_type { runner.runner_type }
+ end
+end
diff --git a/spec/factories/events.rb b/spec/factories/events.rb
index c9e4ada3ffa..d182dc9f95f 100644
--- a/spec/factories/events.rb
+++ b/spec/factories/events.rb
@@ -17,7 +17,6 @@ FactoryBot.define do
trait(:left) { action { :left } }
trait(:destroyed) { action { :destroyed } }
trait(:expired) { action { :expired } }
- trait(:archived) { action { :archived } }
trait(:approved) { action { :approved } }
factory :closed_issue_event do
diff --git a/spec/factories/integration_data.rb b/spec/factories/integration_data.rb
index a6b2693b8df..a7406794437 100644
--- a/spec/factories/integration_data.rb
+++ b/spec/factories/integration_data.rb
@@ -4,7 +4,7 @@
# The factories are used when creating integrations.
FactoryBot.define do
factory :jira_tracker_data, class: 'Integrations::JiraTrackerData' do
- integration factory: :jira_service
+ integration factory: :jira_integration
end
factory :issue_tracker_data, class: 'Integrations::IssueTrackerData' do
diff --git a/spec/factories/integrations.rb b/spec/factories/integrations.rb
index 1dd2839aa46..ed8a562b331 100644
--- a/spec/factories/integrations.rb
+++ b/spec/factories/integrations.rb
@@ -27,7 +27,7 @@ FactoryBot.define do
end
end
- factory :prometheus_service do
+ factory :prometheus_integration, class: 'Integrations::Prometheus' do
project
active { true }
properties do
@@ -45,7 +45,7 @@ FactoryBot.define do
token { 'test' }
end
- factory :jira_service, class: 'Integrations::Jira' do
+ factory :jira_integration, class: 'Integrations::Jira' do
project
active { true }
type { 'JiraService' }
@@ -91,13 +91,13 @@ FactoryBot.define do
issue_tracker
end
- factory :redmine_service, class: 'Integrations::Redmine' do
+ factory :redmine_integration, class: 'Integrations::Redmine' do
project
active { true }
issue_tracker
end
- factory :youtrack_service, class: 'Integrations::Youtrack' do
+ factory :youtrack_integration, class: 'Integrations::Youtrack' do
project
active { true }
issue_tracker
@@ -160,20 +160,21 @@ FactoryBot.define do
password { 'my-secret-password' }
end
- factory :slack_service, class: 'Integrations::Slack' do
+ # avoids conflict with slack_integration factory
+ factory :integrations_slack, class: 'Integrations::Slack' do
project
active { true }
webhook { 'https://slack.service.url' }
type { 'SlackService' }
end
- factory :slack_slash_commands_service, class: 'Integrations::SlackSlashCommands' do
+ factory :slack_slash_commands_integration, class: 'Integrations::SlackSlashCommands' do
project
active { true }
type { 'SlackSlashCommandsService' }
end
- factory :pipelines_email_service, class: 'Integrations::PipelinesEmail' do
+ factory :pipelines_email_integration, class: 'Integrations::PipelinesEmail' do
project
active { true }
type { 'PipelinesEmailService' }
diff --git a/spec/factories/merge_request_cleanup_schedules.rb b/spec/factories/merge_request_cleanup_schedules.rb
index a89d0c88731..ecf0d5818e4 100644
--- a/spec/factories/merge_request_cleanup_schedules.rb
+++ b/spec/factories/merge_request_cleanup_schedules.rb
@@ -3,6 +3,19 @@
FactoryBot.define do
factory :merge_request_cleanup_schedule, class: 'MergeRequest::CleanupSchedule' do
merge_request
- scheduled_at { Time.current }
+ scheduled_at { 1.day.ago }
+
+ trait :running do
+ status { MergeRequest::CleanupSchedule::STATUSES[:running] }
+ end
+
+ trait :completed do
+ status { MergeRequest::CleanupSchedule::STATUSES[:completed] }
+ completed_at { Time.current }
+ end
+
+ trait :failed do
+ status { MergeRequest::CleanupSchedule::STATUSES[:failed] }
+ end
end
end
diff --git a/spec/factories/merge_request_diff_commit_users.rb b/spec/factories/merge_request_diff_commit_users.rb
new file mode 100644
index 00000000000..94bd358454c
--- /dev/null
+++ b/spec/factories/merge_request_diff_commit_users.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :merge_request_diff_commit_user, class: 'MergeRequest::DiffCommitUser' do
+ name { generate(:name) }
+ email { generate(:email) }
+ end
+end
diff --git a/spec/factories/packages.rb b/spec/factories/packages.rb
index cedda8d0854..cd9c8a8bfbb 100644
--- a/spec/factories/packages.rb
+++ b/spec/factories/packages.rb
@@ -162,6 +162,12 @@ FactoryBot.define do
pkg.nuget_metadatum = build(:nuget_metadatum)
end
end
+
+ trait(:with_symbol_package) do
+ after :create do |package|
+ create :package_file, :snupkg, package: package, file_name: "#{package.name}.#{package.version}.snupkg"
+ end
+ end
end
factory :pypi_package do
diff --git a/spec/factories/packages/helm/file_metadatum.rb b/spec/factories/packages/helm/file_metadatum.rb
index e809f592546..cbc7e114ef6 100644
--- a/spec/factories/packages/helm/file_metadatum.rb
+++ b/spec/factories/packages/helm/file_metadatum.rb
@@ -3,7 +3,7 @@
FactoryBot.define do
factory :helm_file_metadatum, class: 'Packages::Helm::FileMetadatum' do
package_file { association(:helm_package_file, without_loaded_metadatum: true) }
- channel { 'stable' }
+ sequence(:channel) { |n| "#{FFaker::Lorem.word}-#{n}" }
metadata { { 'name': package_file.package.name, 'version': package_file.package.version, 'apiVersion': 'v2' } }
end
end
diff --git a/spec/factories/packages/package_file.rb b/spec/factories/packages/package_file.rb
index d82fbe02311..ac121da432c 100644
--- a/spec/factories/packages/package_file.rb
+++ b/spec/factories/packages/package_file.rb
@@ -205,6 +205,7 @@ FactoryBot.define do
package { association(:helm_package, without_package_files: true) }
file_name { "#{package.name}-#{package.version}.tgz" }
file_fixture { "spec/fixtures/packages/helm/rook-ceph-v1.5.8.tgz" }
+ file_sha256 { 'fd2b2fa0329e80a2a602c2bb3b40608bcd6ee5cf96cf46fd0d2800a4c129c9db' }
transient do
without_loaded_metadatum { false }
@@ -271,6 +272,14 @@ FactoryBot.define do
size { 300.kilobytes }
end
+ trait(:snupkg) do
+ package
+ file_fixture { 'spec/fixtures/packages/nuget/package.snupkg' }
+ file_name { 'package.snupkg' }
+ file_sha1 { '5fe852b2a6abd96c22c11fa1ff2fb19d9ce58b57' }
+ size { 300.kilobytes }
+ end
+
trait(:gem) do
package
file_fixture { 'spec/fixtures/packages/rubygems/package-0.0.1.gem' }
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 6641d8749f9..84686c58a8e 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -396,24 +396,24 @@ FactoryBot.define do
factory :redmine_project, parent: :project do
has_external_issue_tracker { true }
- redmine_service
+ redmine_integration
end
factory :youtrack_project, parent: :project do
has_external_issue_tracker { true }
- youtrack_service
+ youtrack_integration
end
factory :jira_project, parent: :project do
has_external_issue_tracker { true }
- jira_service
+ jira_integration
end
factory :prometheus_project, parent: :project do
after :create do |project|
- project.create_prometheus_service(
+ project.create_prometheus_integration(
active: true,
properties: {
api_url: 'https://prometheus.example.com/',
diff --git a/spec/factories/usage_data.rb b/spec/factories/usage_data.rb
index 2aa926e4dd8..4593294fd14 100644
--- a/spec/factories/usage_data.rb
+++ b/spec/factories/usage_data.rb
@@ -9,16 +9,16 @@ FactoryBot.define do
projects << create(:project, :repository)
group = create(:group)
create(:board, project: projects[0])
- create(:jira_service, project: projects[0])
- create(:jira_service, :without_properties_callback, project: projects[1])
- create(:jira_service, :jira_cloud_service, project: projects[2])
- create(:jira_service, :without_properties_callback, project: projects[3], properties: { url: 'https://mysite.atlassian.net' })
+ create(:jira_integration, project: projects[0])
+ create(:jira_integration, :without_properties_callback, project: projects[1])
+ create(:jira_integration, :jira_cloud_service, project: projects[2])
+ create(:jira_integration, :without_properties_callback, project: projects[3], properties: { url: 'https://mysite.atlassian.net' })
jira_label = create(:label, project: projects[0])
create(:jira_import_state, :finished, project: projects[0], label: jira_label, failed_to_import_count: 2, imported_issues_count: 7, total_issue_count: 9)
create(:jira_import_state, :finished, project: projects[1], label: jira_label, imported_issues_count: 3, total_issue_count: 3)
create(:jira_import_state, :finished, project: projects[1], label: jira_label, imported_issues_count: 3)
create(:jira_import_state, :scheduled, project: projects[1], label: jira_label)
- create(:prometheus_service, project: projects[1])
+ create(:prometheus_integration, project: projects[1])
create(:service, project: projects[1], type: 'JenkinsService', active: true)
create(:service, project: projects[0], type: 'SlackSlashCommandsService', active: true)
create(:service, project: projects[1], type: 'SlackService', active: true)
diff --git a/spec/fast_spec_helper.rb b/spec/fast_spec_helper.rb
index 2f0bcd318d9..cd20019115d 100644
--- a/spec/fast_spec_helper.rb
+++ b/spec/fast_spec_helper.rb
@@ -20,5 +20,15 @@ require 'active_support/all'
ActiveSupport::Dependencies.autoload_paths << 'lib'
ActiveSupport::Dependencies.autoload_paths << 'ee/lib'
+ActiveSupport::Dependencies.autoload_paths << 'jh/lib'
ActiveSupport::XmlMini.backend = 'Nokogiri'
+
+RSpec.configure do |config|
+ unless ENV['CI']
+ # Allow running `:focus` examples locally,
+ # falling back to all tests when there is no `:focus` example.
+ config.filter_run focus: true
+ config.run_all_when_everything_filtered = true
+ end
+end
diff --git a/spec/features/admin/admin_appearance_spec.rb b/spec/features/admin/admin_appearance_spec.rb
index 5596ad7bf21..b96762ec6ad 100644
--- a/spec/features/admin/admin_appearance_spec.rb
+++ b/spec/features/admin/admin_appearance_spec.rb
@@ -90,7 +90,7 @@ RSpec.describe 'Admin Appearance' do
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
visit new_project_path
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-panel-name="blank_project"]').click
expect_custom_new_project_appearance(appearance)
end
diff --git a/spec/features/admin/admin_dev_ops_report_spec.rb b/spec/features/admin/admin_dev_ops_report_spec.rb
index 33f984af807..8f1960b681c 100644
--- a/spec/features/admin/admin_dev_ops_report_spec.rb
+++ b/spec/features/admin/admin_dev_ops_report_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe 'DevOps Report page', :js do
it 'shows empty state' do
visit admin_dev_ops_report_path
- expect(page).to have_selector(".js-empty-state")
+ expect(page).to have_text('Service ping is off')
end
it 'hides the intro callout' do
diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb
index f9673a8aa2f..7d7b2baf941 100644
--- a/spec/features/admin/admin_groups_spec.rb
+++ b/spec/features/admin/admin_groups_spec.rb
@@ -132,6 +132,19 @@ RSpec.describe 'Admin Groups' do
expect(page).to have_text(note_text)
end
+
+ context 'when group has open access requests' do
+ let!(:access_request) { create(:group_member, :access_request, group: group) }
+
+ it 'shows access requests with link to manage access' do
+ visit admin_group_path(group)
+
+ page.within '[data-testid="access-requests"]' do
+ expect(page).to have_content access_request.user.name
+ expect(page).to have_link 'Manage access', href: group_group_members_path(group, tab: 'access_requests')
+ end
+ end
+ end
end
describe 'group edit' do
diff --git a/spec/features/admin/admin_mode/workers_spec.rb b/spec/features/admin/admin_mode/workers_spec.rb
index fbbcf19063b..0caa883fb5b 100644
--- a/spec/features/admin/admin_mode/workers_spec.rb
+++ b/spec/features/admin/admin_mode/workers_spec.rb
@@ -4,6 +4,8 @@ require 'spec_helper'
# Test an operation that triggers background jobs requiring administrative rights
RSpec.describe 'Admin mode for workers', :request_store do
+ include Spec::Support::Helpers::Features::AdminUsersHelpers
+
let(:user) { create(:user) }
let(:user_to_delete) { create(:user) }
@@ -37,7 +39,8 @@ RSpec.describe 'Admin mode for workers', :request_store do
it 'can delete user', :js do
visit admin_user_path(user_to_delete)
- click_button 'Delete user'
+
+ click_action_in_user_dropdown(user_to_delete.id, 'Delete user')
page.within '.modal-dialog' do
find("input[name='username']").send_keys(user_to_delete.name)
diff --git a/spec/features/admin/admin_projects_spec.rb b/spec/features/admin/admin_projects_spec.rb
index cbbe9aa3b8b..15def00f354 100644
--- a/spec/features/admin/admin_projects_spec.rb
+++ b/spec/features/admin/admin_projects_spec.rb
@@ -52,6 +52,8 @@ RSpec.describe "Admin::Projects" do
end
describe "GET /admin/projects/:namespace_id/:id" do
+ let!(:access_request) { create(:project_member, :access_request, project: project) }
+
before do
expect(project).to be_persisted
@@ -67,6 +69,15 @@ RSpec.describe "Admin::Projects" do
expect(page).to have_content(project.creator.name)
expect(page).to have_content(project.id)
end
+
+ context 'when project has open access requests' do
+ it 'shows access requests with link to manage access' do
+ page.within '[data-testid="access-requests"]' do
+ expect(page).to have_content access_request.user.name
+ expect(page).to have_link 'Manage access', href: project_project_members_path(project, tab: 'access_requests')
+ end
+ end
+ end
end
describe 'transfer project' do
diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb
index d7a267fec69..54c07985a21 100644
--- a/spec/features/admin/admin_runners_spec.rb
+++ b/spec/features/admin/admin_runners_spec.rb
@@ -4,8 +4,6 @@ require 'spec_helper'
RSpec.describe "Admin Runners" do
include StubENV
- include FilteredSearchHelpers
- include SortingHelper
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
@@ -14,31 +12,68 @@ RSpec.describe "Admin Runners" do
gitlab_enable_admin_mode_sign_in(admin)
end
- describe "Runners page" do
- let(:pipeline) { create(:ci_pipeline) }
-
- before do
- stub_feature_flags(runner_list_view_vue_ui: false)
- end
+ describe "Runners page", :js do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:project) { create(:project, namespace: namespace, creator: user) }
context "when there are runners" do
it 'has all necessary texts' do
- runner = create(:ci_runner, contacted_at: Time.now)
- create(:ci_build, pipeline: pipeline, runner_id: runner.id)
+ create(:ci_runner, :instance, contacted_at: Time.now)
+
visit admin_runners_path
expect(page).to have_text "Set up a shared runner manually"
expect(page).to have_text "Runners currently online: 1"
end
- describe 'search', :js do
+ it 'with an instance runner shows an instance badge and no project count' do
+ runner = create(:ci_runner, :instance)
+
+ visit admin_runners_path
+
+ within "[data-testid='runner-row-#{runner.id}']" do
+ expect(page).to have_selector '.badge', text: 'shared'
+ expect(page).to have_text 'n/a'
+ end
+ end
+
+ it 'with a group runner shows a group badge and no project count' do
+ runner = create(:ci_runner, :group, groups: [group])
+
+ visit admin_runners_path
+
+ within "[data-testid='runner-row-#{runner.id}']" do
+ expect(page).to have_selector '.badge', text: 'group'
+ expect(page).to have_text 'n/a'
+ end
+ end
+
+ it 'with a project runner shows a project badge and project count' do
+ runner = create(:ci_runner, :project, projects: [project])
+
+ visit admin_runners_path
+
+ within "[data-testid='runner-row-#{runner.id}']" do
+ expect(page).to have_selector '.badge', text: 'specific'
+ expect(page).to have_text '1'
+ end
+ end
+
+ describe 'search' do
before do
- create(:ci_runner, description: 'runner-foo')
- create(:ci_runner, description: 'runner-bar')
+ create(:ci_runner, :instance, description: 'runner-foo')
+ create(:ci_runner, :instance, description: 'runner-bar')
visit admin_runners_path
end
+ it 'shows runners' do
+ expect(page).to have_content("runner-foo")
+ expect(page).to have_content("runner-bar")
+ end
+
it 'shows correct runner when description matches' do
input_filtered_search_keys('runner-foo')
@@ -53,28 +88,29 @@ RSpec.describe "Admin Runners" do
end
end
- describe 'filter by status', :js do
+ describe 'filter by status' do
it 'shows correct runner when status matches' do
- create(:ci_runner, description: 'runner-active', active: true)
- create(:ci_runner, description: 'runner-paused', active: false)
+ create(:ci_runner, :instance, description: 'runner-active', active: true)
+ create(:ci_runner, :instance, description: 'runner-paused', active: false)
visit admin_runners_path
expect(page).to have_content 'runner-active'
expect(page).to have_content 'runner-paused'
- input_filtered_search_keys('status:=active')
+ input_filtered_search_filter_is_only('Status', 'Active')
+
expect(page).to have_content 'runner-active'
expect(page).not_to have_content 'runner-paused'
end
it 'shows no runner when status does not match' do
- create(:ci_runner, :online, description: 'runner-active', active: true)
- create(:ci_runner, :online, description: 'runner-paused', active: false)
+ create(:ci_runner, :instance, description: 'runner-active', active: true)
+ create(:ci_runner, :instance, description: 'runner-paused', active: false)
visit admin_runners_path
- input_filtered_search_keys('status:=offline')
+ input_filtered_search_filter_is_only('Status', 'Online')
expect(page).not_to have_content 'runner-active'
expect(page).not_to have_content 'runner-paused'
@@ -83,46 +119,48 @@ RSpec.describe "Admin Runners" do
end
it 'shows correct runner when status is selected and search term is entered' do
- create(:ci_runner, description: 'runner-a-1', active: true)
- create(:ci_runner, description: 'runner-a-2', active: false)
- create(:ci_runner, description: 'runner-b-1', active: true)
+ create(:ci_runner, :instance, description: 'runner-a-1', active: true)
+ create(:ci_runner, :instance, description: 'runner-a-2', active: false)
+ create(:ci_runner, :instance, description: 'runner-b-1', active: true)
visit admin_runners_path
- input_filtered_search_keys('status:=active')
+ input_filtered_search_filter_is_only('Status', 'Active')
+
expect(page).to have_content 'runner-a-1'
expect(page).to have_content 'runner-b-1'
expect(page).not_to have_content 'runner-a-2'
- input_filtered_search_keys('status:=active runner-a')
+ input_filtered_search_keys('runner-a')
+
expect(page).to have_content 'runner-a-1'
expect(page).not_to have_content 'runner-b-1'
expect(page).not_to have_content 'runner-a-2'
end
end
- describe 'filter by type', :js do
- it 'shows correct runner when type matches' do
- create :ci_runner, :project, description: 'runner-project'
- create :ci_runner, :group, description: 'runner-group'
+ describe 'filter by type' do
+ before do
+ create(:ci_runner, :project, description: 'runner-project', projects: [project])
+ create(:ci_runner, :group, description: 'runner-group', groups: [group])
+ end
+ it 'shows correct runner when type matches' do
visit admin_runners_path
expect(page).to have_content 'runner-project'
expect(page).to have_content 'runner-group'
- input_filtered_search_keys('type:=project_type')
+ input_filtered_search_filter_is_only('Type', 'project')
+
expect(page).to have_content 'runner-project'
expect(page).not_to have_content 'runner-group'
end
it 'shows no runner when type does not match' do
- create :ci_runner, :project, description: 'runner-project'
- create :ci_runner, :group, description: 'runner-group'
-
visit admin_runners_path
- input_filtered_search_keys('type:=instance_type')
+ input_filtered_search_filter_is_only('Type', 'instance')
expect(page).not_to have_content 'runner-project'
expect(page).not_to have_content 'runner-group'
@@ -131,95 +169,93 @@ RSpec.describe "Admin Runners" do
end
it 'shows correct runner when type is selected and search term is entered' do
- create :ci_runner, :project, description: 'runner-a-1'
- create :ci_runner, :instance, description: 'runner-a-2'
- create :ci_runner, :project, description: 'runner-b-1'
+ create(:ci_runner, :project, description: 'runner-2-project', projects: [project])
visit admin_runners_path
- input_filtered_search_keys('type:=project_type')
- expect(page).to have_content 'runner-a-1'
- expect(page).to have_content 'runner-b-1'
- expect(page).not_to have_content 'runner-a-2'
+ input_filtered_search_filter_is_only('Type', 'project')
- input_filtered_search_keys('type:=project_type runner-a')
- expect(page).to have_content 'runner-a-1'
- expect(page).not_to have_content 'runner-b-1'
- expect(page).not_to have_content 'runner-a-2'
+ expect(page).to have_content 'runner-project'
+ expect(page).to have_content 'runner-2-project'
+ expect(page).not_to have_content 'runner-group'
+
+ input_filtered_search_keys('runner-project')
+
+ expect(page).to have_content 'runner-project'
+ expect(page).not_to have_content 'runner-2-project'
+ expect(page).not_to have_content 'runner-group'
end
end
- describe 'filter by tag', :js do
- it 'shows correct runner when tag matches' do
- create :ci_runner, description: 'runner-blue', tag_list: ['blue']
- create :ci_runner, description: 'runner-red', tag_list: ['red']
+ describe 'filter by tag' do
+ before do
+ create(:ci_runner, :instance, description: 'runner-blue', tag_list: ['blue'])
+ create(:ci_runner, :instance, description: 'runner-red', tag_list: ['red'])
+ end
+ it 'shows correct runner when tag matches' do
visit admin_runners_path
expect(page).to have_content 'runner-blue'
expect(page).to have_content 'runner-red'
- input_filtered_search_keys('tag:=blue')
+ input_filtered_search_filter_is_only('Tags', 'blue')
expect(page).to have_content 'runner-blue'
expect(page).not_to have_content 'runner-red'
end
it 'shows no runner when tag does not match' do
- create :ci_runner, description: 'runner-blue', tag_list: ['blue']
- create :ci_runner, description: 'runner-red', tag_list: ['blue']
-
visit admin_runners_path
- input_filtered_search_keys('tag:=red')
+ input_filtered_search_filter_is_only('Tags', 'green')
expect(page).not_to have_content 'runner-blue'
- expect(page).not_to have_content 'runner-blue'
expect(page).to have_text 'No runners found'
end
it 'shows correct runner when tag is selected and search term is entered' do
- create :ci_runner, description: 'runner-a-1', tag_list: ['blue']
- create :ci_runner, description: 'runner-a-2', tag_list: ['red']
- create :ci_runner, description: 'runner-b-1', tag_list: ['blue']
+ create(:ci_runner, :instance, description: 'runner-2-blue', tag_list: ['blue'])
visit admin_runners_path
- input_filtered_search_keys('tag:=blue')
+ input_filtered_search_filter_is_only('Tags', 'blue')
- expect(page).to have_content 'runner-a-1'
- expect(page).to have_content 'runner-b-1'
- expect(page).not_to have_content 'runner-a-2'
+ expect(page).to have_content 'runner-blue'
+ expect(page).to have_content 'runner-2-blue'
+ expect(page).not_to have_content 'runner-red'
- input_filtered_search_keys('tag:=blue runner-a')
+ input_filtered_search_keys('runner-2-blue')
- expect(page).to have_content 'runner-a-1'
- expect(page).not_to have_content 'runner-b-1'
- expect(page).not_to have_content 'runner-a-2'
+ expect(page).to have_content 'runner-2-blue'
+ expect(page).not_to have_content 'runner-blue'
+ expect(page).not_to have_content 'runner-red'
end
end
- it 'sorts by last contact date', :js do
- create(:ci_runner, description: 'runner-1', created_at: '2018-07-12 15:37', contacted_at: '2018-07-12 15:37')
- create(:ci_runner, description: 'runner-2', created_at: '2018-07-12 16:37', contacted_at: '2018-07-12 16:37')
+ it 'sorts by last contact date' do
+ create(:ci_runner, :instance, description: 'runner-1', created_at: '2018-07-12 15:37', contacted_at: '2018-07-12 15:37')
+ create(:ci_runner, :instance, description: 'runner-2', created_at: '2018-07-12 16:37', contacted_at: '2018-07-12 16:37')
visit admin_runners_path
- within '[data-testid="runners-table"] .gl-responsive-table-row:nth-child(2)' do
+ within '[data-testid="runner-list"] tbody tr:nth-child(1)' do
expect(page).to have_content 'runner-2'
end
- within '[data-testid="runners-table"] .gl-responsive-table-row:nth-child(3)' do
+ within '[data-testid="runner-list"] tbody tr:nth-child(2)' do
expect(page).to have_content 'runner-1'
end
- sorting_by 'Last Contact'
+ click_on 'Created date' # Open "sort by" dropdown
+ click_on 'Last contact'
+ click_on 'Sort direction: Descending'
- within '[data-testid="runners-table"] .gl-responsive-table-row:nth-child(2)' do
+ within '[data-testid="runner-list"] tbody tr:nth-child(1)' do
expect(page).to have_content 'runner-1'
end
- within '[data-testid="runners-table"] .gl-responsive-table-row:nth-child(3)' do
+ within '[data-testid="runner-list"] tbody tr:nth-child(2)' do
expect(page).to have_content 'runner-2'
end
end
@@ -237,47 +273,6 @@ RSpec.describe "Admin Runners" do
end
end
- context 'group runner' do
- let(:group) { create(:group) }
- let!(:runner) { create(:ci_runner, :group, groups: [group]) }
-
- it 'shows the label and does not show the project count' do
- visit admin_runners_path
-
- within "[data-testid='runner-row-#{runner.id}']" do
- expect(page).to have_selector '.badge', text: 'group'
- expect(page).to have_text 'n/a'
- end
- end
- end
-
- context 'shared runner' do
- it 'shows the label and does not show the project count' do
- runner = create(:ci_runner, :instance)
-
- visit admin_runners_path
-
- within "[data-testid='runner-row-#{runner.id}']" do
- expect(page).to have_selector '.badge', text: 'shared'
- expect(page).to have_text 'n/a'
- end
- end
- end
-
- context 'specific runner' do
- it 'shows the label and the project count' do
- project = create(:project)
- runner = create(:ci_runner, :project, projects: [project])
-
- visit admin_runners_path
-
- within "[data-testid='runner-row-#{runner.id}']" do
- expect(page).to have_selector '.badge', text: 'specific'
- expect(page).to have_text '1'
- end
- end
- end
-
describe 'runners registration token' do
let!(:token) { Gitlab::CurrentSettings.runners_registration_token }
@@ -286,17 +281,23 @@ RSpec.describe "Admin Runners" do
end
it 'has a registration token' do
- expect(page.find('[data-testid="registration_token"]')).to have_content(token)
+ click_on 'Click to reveal'
+ expect(page.find('[data-testid="registration-token"]')).to have_content(token)
end
describe 'reset registration token' do
- let(:page_token) { find('[data-testid="registration_token"]').text }
+ let(:page_token) { find('[data-testid="registration-token"]').text }
before do
click_button 'Reset registration token'
+
+ page.accept_alert
+
+ wait_for_requests
end
it 'changes registration token' do
+ click_on 'Click to reveal'
expect(page_token).not_to eq token
end
end
@@ -409,4 +410,43 @@ RSpec.describe "Admin Runners" do
end
end
end
+
+ private
+
+ def search_bar_selector
+ '[data-testid="runners-filtered-search"]'
+ end
+
+ # The filters must be clicked first to be able to receive events
+ # See: https://gitlab.com/gitlab-org/gitlab-ui/-/issues/1493
+ def focus_filtered_search
+ page.within(search_bar_selector) do
+ page.find('.gl-filtered-search-term-token').click
+ end
+ end
+
+ def input_filtered_search_keys(search_term)
+ focus_filtered_search
+
+ page.within(search_bar_selector) do
+ page.find('input').send_keys(search_term)
+ click_on 'Search'
+ end
+ end
+
+ def input_filtered_search_filter_is_only(filter, value)
+ focus_filtered_search
+
+ page.within(search_bar_selector) do
+ click_on filter
+
+ # For OPERATOR_IS_ONLY, clicking the filter
+ # immediately preselects "=" operator
+
+ page.find('input').send_keys(value)
+ page.find('input').send_keys(:enter)
+
+ click_on 'Search'
+ end
+ end
end
diff --git a/spec/features/admin/admin_sees_background_migrations_spec.rb b/spec/features/admin/admin_sees_background_migrations_spec.rb
index d848a8352bc..11823195310 100644
--- a/spec/features/admin/admin_sees_background_migrations_spec.rb
+++ b/spec/features/admin/admin_sees_background_migrations_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe "Admin > Admin sees background migrations" do
end
end
- it 'can view queued migrations' do
+ it 'can view queued migrations and pause and resume them' do
visit admin_background_migrations_path
within '#content-body' do
@@ -40,7 +40,16 @@ RSpec.describe "Admin > Admin sees background migrations" do
expect(page).to have_content(active_migration.job_class_name)
expect(page).to have_content(active_migration.table_name)
expect(page).to have_content('0.00%')
- expect(page).to have_content(active_migration.status.humanize)
+ expect(page).not_to have_content('Paused')
+ expect(page).to have_content('Active')
+
+ click_button('Pause')
+ expect(page).not_to have_content('Active')
+ expect(page).to have_content('Paused')
+
+ click_button('Resume')
+ expect(page).not_to have_content('Paused')
+ expect(page).to have_content('Active')
end
end
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index c289c18126d..9efb31ef4c1 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -269,77 +269,43 @@ RSpec.describe 'Admin updates settings' do
end
context 'Integrations page' do
+ let(:mailgun_events_receiver_enabled) { true }
+
before do
+ stub_feature_flags(mailgun_events_receiver: mailgun_events_receiver_enabled)
visit general_admin_application_settings_path
end
it 'enable hiding third party offers' do
page.within('.as-third-party-offers') do
- check 'Do not display offers from third parties within GitLab'
+ check 'Do not display offers from third parties'
click_button 'Save changes'
end
expect(page).to have_content "Application settings saved successfully"
expect(current_settings.hide_third_party_offers).to be true
end
- end
-
- context 'when Service Templates are enabled' do
- before do
- stub_feature_flags(disable_service_templates: false)
- visit general_admin_application_settings_path
- end
-
- it 'shows Service Templates link' do
- expect(page).to have_link('Service Templates')
- end
-
- context 'when the Slack Notifications Service template is active' do
- before do
- create(:service, :template, type: 'SlackService', active: true)
-
- visit general_admin_application_settings_path
- end
- it 'change Slack Notifications Service template settings', :js do
- first(:link, 'Service Templates').click
- click_link 'Slack notifications'
- fill_in 'Webhook', with: 'http://localhost'
- fill_in 'Username', with: 'test_user'
- fill_in 'service[push_channel]', with: '#test_channel'
- page.check('Notify only broken pipelines')
- page.select 'All branches', from: 'Branches to be notified'
- page.select 'Match any of the labels', from: 'Labels to be notified behavior'
-
- check_all_events
- click_button 'Save changes'
+ context 'when mailgun_events_receiver feature flag is enabled' do
+ it 'enabling Mailgun events', :aggregate_failures do
+ page.within('.as-mailgun') do
+ check 'Enable Mailgun event receiver'
+ fill_in 'Mailgun HTTP webhook signing key', with: 'MAILGUN_SIGNING_KEY'
+ click_button 'Save changes'
+ end
expect(page).to have_content 'Application settings saved successfully'
-
- click_link 'Slack notifications'
-
- expect(page.all('input[type=checkbox]')).to all(be_checked)
- expect(find_field('Webhook').value).to eq 'http://localhost'
- expect(find_field('Username').value).to eq 'test_user'
- expect(find('[name="service[push_channel]"]').value).to eq '#test_channel'
- end
-
- it 'defaults Deployment events to false for chat notification template settings', :js do
- first(:link, 'Service Templates').click
- click_link 'Slack notifications'
-
- expect(find_field('Deployment')).not_to be_checked
+ expect(current_settings.mailgun_events_enabled).to be true
+ expect(current_settings.mailgun_signing_key).to eq 'MAILGUN_SIGNING_KEY'
end
end
- end
- context 'When Service templates are disabled' do
- before do
- stub_feature_flags(disable_service_templates: true)
- end
+ context 'when mailgun_events_receiver feature flag is disabled' do
+ let(:mailgun_events_receiver_enabled) { false }
- it 'does not show Service Templates link' do
- expect(page).not_to have_link('Service Templates')
+ it 'does not have mailgun' do
+ expect(page).not_to have_selector('.as-mailgun')
+ end
end
end
@@ -370,6 +336,43 @@ RSpec.describe 'Admin updates settings' do
expect(page).to have_content "Application settings saved successfully"
end
+ context 'Runner Registration' do
+ context 'when feature is enabled' do
+ before do
+ stub_feature_flags(runner_registration_control: true)
+ end
+
+ it 'allows admins to control who has access to register runners' do
+ visit ci_cd_admin_application_settings_path
+
+ expect(current_settings.valid_runner_registrars).to eq(ApplicationSetting::VALID_RUNNER_REGISTRAR_TYPES)
+
+ page.within('.as-runner') do
+ find_all('.form-check-input').each(&:click)
+
+ click_button 'Save changes'
+ end
+
+ expect(current_settings.valid_runner_registrars).to eq([])
+ expect(page).to have_content "Application settings saved successfully"
+ end
+ end
+
+ context 'when feature is disabled' do
+ before do
+ stub_feature_flags(runner_registration_control: false)
+ end
+
+ it 'does not allow admins to control who has access to register runners' do
+ visit ci_cd_admin_application_settings_path
+
+ expect(current_settings.valid_runner_registrars).to eq(ApplicationSetting::VALID_RUNNER_REGISTRAR_TYPES)
+
+ expect(page).not_to have_css('.as-runner')
+ end
+ end
+ end
+
context 'Container Registry' do
let(:feature_flag_enabled) { true }
let(:client_support) { true }
@@ -530,7 +533,7 @@ RSpec.describe 'Admin updates settings' do
wait_for_requests
- expect(page).to have_selector '.js-usage-ping-payload'
+ expect(page).to have_selector '.js-service-ping-payload'
expect(page).to have_button 'Hide payload'
expect(page).to have_content expected_payload_content
end
@@ -581,8 +584,8 @@ RSpec.describe 'Admin updates settings' do
new_documentation_url = 'https://docs.gitlab.com'
page.within('.as-help-page') do
- fill_in 'Help page text', with: 'Example text'
- check 'Hide marketing-related entries from help'
+ fill_in 'Additional text to show on the Help page', with: 'Example text'
+ check 'Hide marketing-related entries from the Help page.'
fill_in 'Support page URL', with: new_support_url
fill_in 'Documentation pages URL', with: new_documentation_url
click_button 'Save changes'
diff --git a/spec/features/admin/admin_users_impersonation_tokens_spec.rb b/spec/features/admin/admin_users_impersonation_tokens_spec.rb
index dc528dd92d4..ee64e71f176 100644
--- a/spec/features/admin/admin_users_impersonation_tokens_spec.rb
+++ b/spec/features/admin/admin_users_impersonation_tokens_spec.rb
@@ -28,10 +28,10 @@ RSpec.describe 'Admin > Users > Impersonation Tokens', :js do
name = 'Hello World'
visit admin_user_impersonation_tokens_path(user_id: user.username)
- fill_in "Name", with: name
+ fill_in "Token name", with: name
# Set date to 1st of next month
- find_field("Expires at").click
+ find_field("Expiration date").click
find(".pika-next").click
click_on "1"
diff --git a/spec/features/admin/services/admin_visits_service_templates_spec.rb b/spec/features/admin/services/admin_visits_service_templates_spec.rb
index 9d011b97f63..d367867ebb5 100644
--- a/spec/features/admin/services/admin_visits_service_templates_spec.rb
+++ b/spec/features/admin/services/admin_visits_service_templates_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'Admin visits service templates' do
let(:admin) { create(:user, :admin) }
- let(:slack_service) { Integration.for_template.find { |s| s.type == 'SlackService' } }
+ let(:slack_integration) { Integration.for_template.find { |s| s.type == 'SlackService' } }
before do
sign_in(admin)
@@ -23,7 +23,7 @@ RSpec.describe 'Admin visits service templates' do
context 'with an active service template' do
before do
- create(:slack_service, :template, active: true)
+ create(:integrations_slack, :template, active: true)
visit(admin_application_settings_services_path)
end
@@ -33,20 +33,20 @@ RSpec.describe 'Admin visits service templates' do
context 'without instance-level integration' do
it 'shows a link to service template' do
- expect(page).to have_link('Slack', href: edit_admin_application_settings_service_path(slack_service.id))
- expect(page).not_to have_link('Slack', href: edit_admin_application_settings_integration_path(slack_service))
+ expect(page).to have_link('Slack', href: edit_admin_application_settings_service_path(slack_integration.id))
+ expect(page).not_to have_link('Slack', href: edit_admin_application_settings_integration_path(slack_integration))
end
end
context 'with instance-level integration' do
before do
- create(:slack_service, instance: true, project: nil)
+ create(:integrations_slack, instance: true, project: nil)
visit(admin_application_settings_services_path)
end
it 'shows a link to instance-level integration' do
- expect(page).not_to have_link('Slack', href: edit_admin_application_settings_service_path(slack_service.id))
- expect(page).to have_link('Slack', href: edit_admin_application_settings_integration_path(slack_service))
+ expect(page).not_to have_link('Slack', href: edit_admin_application_settings_service_path(slack_integration.id))
+ expect(page).to have_link('Slack', href: edit_admin_application_settings_integration_path(slack_integration))
end
end
end
diff --git a/spec/features/admin/users/user_spec.rb b/spec/features/admin/users/user_spec.rb
index 3599658ee56..e6eb76b13eb 100644
--- a/spec/features/admin/users/user_spec.rb
+++ b/spec/features/admin/users/user_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Admin::Users::User' do
+ include Spec::Support::Helpers::Features::AdminUsersHelpers
+
let_it_be(:user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') }
let_it_be(:current_user) { create(:admin) }
@@ -12,15 +14,18 @@ RSpec.describe 'Admin::Users::User' do
end
describe 'GET /admin/users/:id' do
- it 'has user info', :aggregate_failures do
+ it 'has user info', :js, :aggregate_failures do
visit admin_user_path(user)
expect(page).to have_content(user.email)
expect(page).to have_content(user.name)
expect(page).to have_content("ID: #{user.id}")
expect(page).to have_content("Namespace ID: #{user.namespace_id}")
- expect(page).to have_button('Deactivate user')
- expect(page).to have_button('Block user')
+
+ click_user_dropdown_toggle(user.id)
+
+ expect(page).to have_button('Block')
+ expect(page).to have_button('Deactivate')
expect(page).to have_button('Delete user')
expect(page).to have_button('Delete user and contributions')
end
@@ -29,9 +34,7 @@ RSpec.describe 'Admin::Users::User' do
it 'shows confirmation and allows blocking and unblocking', :js do
visit admin_user_path(user)
- find('button', text: 'Block user').click
-
- wait_for_requests
+ click_action_in_user_dropdown(user.id, 'Block')
expect(page).to have_content('Block user')
expect(page).to have_content('You can always unblock their account, their data will remain intact.')
@@ -41,21 +44,18 @@ RSpec.describe 'Admin::Users::User' do
wait_for_requests
expect(page).to have_content('Successfully blocked')
- expect(page).to have_content('This user is blocked')
-
- find('button', text: 'Unblock user').click
- wait_for_requests
+ click_action_in_user_dropdown(user.id, 'Unblock')
expect(page).to have_content('Unblock user')
expect(page).to have_content('You can always block their account again if needed.')
find('.modal-footer button', text: 'Unblock').click
- wait_for_requests
-
expect(page).to have_content('Successfully unblocked')
- expect(page).to have_content('Block this user')
+
+ click_user_dropdown_toggle(user.id)
+ expect(page).to have_content('Block')
end
end
@@ -63,9 +63,7 @@ RSpec.describe 'Admin::Users::User' do
it 'shows confirmation and allows deactivating/re-activating', :js do
visit admin_user_path(user)
- find('button', text: 'Deactivate user').click
-
- wait_for_requests
+ click_action_in_user_dropdown(user.id, 'Deactivate')
expect(page).to have_content('Deactivate user')
expect(page).to have_content('You can always re-activate their account, their data will remain intact.')
@@ -75,11 +73,8 @@ RSpec.describe 'Admin::Users::User' do
wait_for_requests
expect(page).to have_content('Successfully deactivated')
- expect(page).to have_content('Reactivate this user')
-
- find('button', text: 'Activate user').click
- wait_for_requests
+ click_action_in_user_dropdown(user.id, 'Activate')
expect(page).to have_content('Activate user')
expect(page).to have_content('You can always deactivate their account again if needed.')
@@ -89,7 +84,9 @@ RSpec.describe 'Admin::Users::User' do
wait_for_requests
expect(page).to have_content('Successfully activated')
- expect(page).to have_content('Deactivate this user')
+
+ click_user_dropdown_toggle(user.id)
+ expect(page).to have_content('Deactivate')
end
end
@@ -367,8 +364,43 @@ RSpec.describe 'Admin::Users::User' do
expect(page).to have_content(user.name)
expect(page).to have_content('Pending approval')
- expect(page).to have_link('Approve user')
- expect(page).to have_link('Reject request')
+
+ click_user_dropdown_toggle(user.id)
+
+ expect(page).to have_button('Approve')
+ expect(page).to have_button('Reject')
+ end
+ end
+ end
+
+ context 'when user has an unconfirmed email', :js do
+ let(:unconfirmed_user) { create(:user, :unconfirmed) }
+
+ where(:path_helper) do
+ [
+ [-> (user) { admin_user_path(user) }],
+ [-> (user) { projects_admin_user_path(user) }],
+ [-> (user) { keys_admin_user_path(user) }],
+ [-> (user) { admin_user_identities_path(user) }],
+ [-> (user) { admin_user_impersonation_tokens_path(user) }]
+ ]
+ end
+
+ with_them do
+ it "allows an admin to force confirmation of the user's email", :aggregate_failures do
+ visit path_helper.call(unconfirmed_user)
+
+ click_button 'Confirm user'
+
+ page.within('[role="dialog"]') do
+ expect(page).to have_content("Confirm user #{unconfirmed_user.name}?")
+ expect(page).to have_content('This user has an unconfirmed email address. You may force a confirmation.')
+
+ click_button 'Confirm user'
+ end
+
+ expect(page).to have_content('Successfully confirmed')
+ expect(page).not_to have_button('Confirm user')
end
end
end
diff --git a/spec/features/admin/users/users_spec.rb b/spec/features/admin/users/users_spec.rb
index 187fa6fc2a4..119b01ff552 100644
--- a/spec/features/admin/users/users_spec.rb
+++ b/spec/features/admin/users/users_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Admin::Users' do
+ include Spec::Support::Helpers::Features::AdminUsersHelpers
+
let_it_be(:user, reload: true) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') }
let_it_be(:current_user) { create(:admin) }
@@ -572,12 +574,6 @@ RSpec.describe 'Admin::Users' do
end
end
- def click_user_dropdown_toggle(user_id)
- page.within("[data-testid='user-actions-#{user_id}']") do
- find("[data-testid='dropdown-toggle']").click
- end
- end
-
def first_row
page.all('[role="row"]')[1]
end
@@ -592,14 +588,4 @@ RSpec.describe 'Admin::Users' do
click_link option
end
end
-
- def click_action_in_user_dropdown(user_id, action)
- click_user_dropdown_toggle(user_id)
-
- within find("[data-testid='user-actions-#{user_id}']") do
- find('li button', text: action).click
- end
-
- wait_for_requests
- end
end
diff --git a/spec/features/alert_management/alert_management_list_spec.rb b/spec/features/alert_management/alert_management_list_spec.rb
index aeaadacb38d..1e710169c9c 100644
--- a/spec/features/alert_management/alert_management_list_spec.rb
+++ b/spec/features/alert_management/alert_management_list_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe 'Alert Management index', :js do
end
context 'when the prometheus integration is enabled' do
- let_it_be(:integration) { create(:prometheus_service, project: project) }
+ let_it_be(:integration) { create(:prometheus_integration, project: project) }
it_behaves_like 'alert page with title, filtered search, and table'
end
diff --git a/spec/features/boards/boards_spec.rb b/spec/features/boards/boards_spec.rb
index 02bb7574fb0..4b52bb953ed 100644
--- a/spec/features/boards/boards_spec.rb
+++ b/spec/features/boards/boards_spec.rb
@@ -244,8 +244,7 @@ RSpec.describe 'Project issue boards', :js do
expect(page).to have_selector(selector, text: development.title, count: 1)
end
- # TODO https://gitlab.com/gitlab-org/gitlab/-/issues/323551
- xit 'issue moves between lists and does not show the "Development" label since the card is in the "Development" list label' do
+ it 'issue moves between lists and does not show the "Development" label since the card is in the "Development" list label' do
drag(list_from_index: 1, from_index: 1, list_to_index: 2)
wait_for_board_cards(2, 7)
diff --git a/spec/features/boards/sidebar_spec.rb b/spec/features/boards/sidebar_spec.rb
index 977147c3c6b..0bb8e0bcdc0 100644
--- a/spec/features/boards/sidebar_spec.rb
+++ b/spec/features/boards/sidebar_spec.rb
@@ -31,4 +31,12 @@ RSpec.describe 'Project issue boards sidebar', :js do
def click_first_issue_card
click_card(first_card)
end
+
+ def refresh_and_click_first_card
+ page.refresh
+
+ wait_for_requests
+
+ first_card.click
+ end
end
diff --git a/spec/features/calendar_spec.rb b/spec/features/calendar_spec.rb
index 21da92c9f43..a8aa3f0b36a 100644
--- a/spec/features/calendar_spec.rb
+++ b/spec/features/calendar_spec.rb
@@ -145,7 +145,7 @@ RSpec.describe 'Contributions Calendar', :js do
describe '1 issue creation calendar activity' do
before do
- Issues::CreateService.new(project: contributed_project, current_user: user, params: issue_params).execute
+ Issues::CreateService.new(project: contributed_project, current_user: user, params: issue_params, spam_params: nil).execute
end
it_behaves_like 'a day with activity', contribution_count: 1
@@ -180,7 +180,7 @@ RSpec.describe 'Contributions Calendar', :js do
push_code_contribution
travel_to(Date.yesterday) do
- Issues::CreateService.new(project: contributed_project, current_user: user, params: issue_params).execute
+ Issues::CreateService.new(project: contributed_project, current_user: user, params: issue_params, spam_params: nil).execute
end
end
include_context 'visit user page'
diff --git a/spec/features/cycle_analytics_spec.rb b/spec/features/cycle_analytics_spec.rb
index 8c7564535b5..d0f8767884e 100644
--- a/spec/features/cycle_analytics_spec.rb
+++ b/spec/features/cycle_analytics_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Value Stream Analytics', :js do
let_it_be(:user) { create(:user) }
let_it_be(:guest) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
+
let(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
let(:milestone) { create(:milestone, project: project) }
let(:mr) { create_merge_request_closing_issue(user, project, issue, commit_message: "References #{issue.to_reference}") }
diff --git a/spec/features/dashboard/datetime_on_tooltips_spec.rb b/spec/features/dashboard/datetime_on_tooltips_spec.rb
index 442b8904974..bf9f6895d24 100644
--- a/spec/features/dashboard/datetime_on_tooltips_spec.rb
+++ b/spec/features/dashboard/datetime_on_tooltips_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe 'Tooltips on .timeago dates', :js do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, name: 'test', namespace: user.namespace) }
+
let(:created_date) { 1.day.ago.beginning_of_minute - 1.hour }
before_all do
diff --git a/spec/features/dashboard/issues_filter_spec.rb b/spec/features/dashboard/issues_filter_spec.rb
index 4bd00bd0a80..4d59e1ded3d 100644
--- a/spec/features/dashboard/issues_filter_spec.rb
+++ b/spec/features/dashboard/issues_filter_spec.rb
@@ -81,14 +81,14 @@ RSpec.describe 'Dashboard Issues filtering', :js do
sort_by('Created date')
visit_issues(assignee_username: user.username)
- expect(find('.issues-filters')).to have_content('Created date')
+ expect(page).to have_button('Created date')
end
it 'keeps sorting issues after visiting Projects Issues page' do
sort_by('Created date')
visit project_issues_path(project)
- expect(find('.issues-filters')).to have_content('Created date')
+ expect(page).to have_button('Created date')
end
end
diff --git a/spec/features/dashboard/merge_requests_spec.rb b/spec/features/dashboard/merge_requests_spec.rb
index 26b376be660..aa2485d4236 100644
--- a/spec/features/dashboard/merge_requests_spec.rb
+++ b/spec/features/dashboard/merge_requests_spec.rb
@@ -53,6 +53,7 @@ RSpec.describe 'Dashboard Merge Requests' do
context 'merge requests exist' do
let_it_be(:author_user) { create(:user) }
+
let(:label) { create(:label) }
let!(:assigned_merge_request) do
@@ -181,6 +182,7 @@ RSpec.describe 'Dashboard Merge Requests' do
context 'merge request review', :js do
let_it_be(:author_user) { create(:user) }
+
let!(:review_requested_merge_request) do
create(:merge_request,
reviewers: [current_user],
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index 20c753b1cdb..1f0981de7e1 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -18,12 +18,6 @@ RSpec.describe 'Dashboard Projects' do
end
end
- it 'shows the customize banner', :js do
- visit dashboard_projects_path
-
- expect(page).to have_content('Do you want to customize this page?')
- end
-
context 'when user has access to the project' do
it 'shows role badge' do
visit dashboard_projects_path
diff --git a/spec/features/dashboard/root_spec.rb b/spec/features/dashboard/root_spec.rb
new file mode 100644
index 00000000000..55bb43c6fcf
--- /dev/null
+++ b/spec/features/dashboard/root_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Root path' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ end
+
+ it 'shows the customize banner', :js do
+ visit root_path
+
+ expect(page).to have_content('Do you want to customize this page?')
+ end
+end
diff --git a/spec/features/file_uploads/group_import_spec.rb b/spec/features/file_uploads/group_import_spec.rb
index 0f9d05c3975..a8592f99bd6 100644
--- a/spec/features/file_uploads/group_import_spec.rb
+++ b/spec/features/file_uploads/group_import_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Upload a group export archive', :api, :js do
let_it_be(:user) { create(:user, :admin) }
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
+
let(:api_path) { '/groups/import' }
let(:url) { capybara_url(api(api_path, personal_access_token: personal_access_token)) }
let(:file) { fixture_file_upload('spec/fixtures/group_export.tar.gz') }
diff --git a/spec/features/file_uploads/project_import_spec.rb b/spec/features/file_uploads/project_import_spec.rb
index 1bf16f46c63..82b6f490d2a 100644
--- a/spec/features/file_uploads/project_import_spec.rb
+++ b/spec/features/file_uploads/project_import_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Upload a project export archive', :api, :js do
let_it_be(:user) { create(:user, :admin) }
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
+
let(:api_path) { '/projects/import' }
let(:url) { capybara_url(api(api_path, personal_access_token: personal_access_token)) }
let(:file) { fixture_file_upload('spec/features/projects/import_export/test_project_export.tar.gz') }
diff --git a/spec/features/file_uploads/user_avatar_spec.rb b/spec/features/file_uploads/user_avatar_spec.rb
index 043115be61a..c30e3452201 100644
--- a/spec/features/file_uploads/user_avatar_spec.rb
+++ b/spec/features/file_uploads/user_avatar_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'Upload a user avatar', :js do
let_it_be(:user, reload: true) { create(:user) }
+
let(:file) { fixture_file_upload('spec/fixtures/banana_sample.gif') }
before do
diff --git a/spec/features/groups/import_export/connect_instance_spec.rb b/spec/features/groups/import_export/connect_instance_spec.rb
index 563c8f429f8..cf893e444c4 100644
--- a/spec/features/groups/import_export/connect_instance_spec.rb
+++ b/spec/features/groups/import_export/connect_instance_spec.rb
@@ -24,6 +24,7 @@ RSpec.describe 'Import/Export - Connect to another instance', :js do
pat = 'demo-pat'
stub_path = 'stub-group'
total = 37
+
stub_request(:get, "%{url}/api/v4/groups?page=1&per_page=20&top_level_only=true&min_access_level=50&search=" % { url: source_url }).to_return(
body: [{
id: 2595438,
@@ -32,7 +33,7 @@ RSpec.describe 'Import/Export - Connect to another instance', :js do
path: stub_path,
full_name: 'Stub',
full_path: stub_path
- }].to_json,
+ }].to_json,
headers: {
'Content-Type' => 'application/json',
'X-Next-Page' => 2,
@@ -43,6 +44,10 @@ RSpec.describe 'Import/Export - Connect to another instance', :js do
}
)
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client).to receive(:validate_instance_version!).and_return(true)
+ end
+
expect(page).to have_content 'Import groups from another instance of GitLab'
expect(page).to have_content 'Not all related objects are migrated'
@@ -53,6 +58,10 @@ RSpec.describe 'Import/Export - Connect to another instance', :js do
expect(page).to have_content 'Showing 1-1 of %{total} groups from %{url}' % { url: source_url, total: total }
expect(page).to have_content stub_path
+
+ visit '/'
+
+ wait_for_all_requests
end
end
diff --git a/spec/features/groups/import_export/import_file_spec.rb b/spec/features/groups/import_export/import_file_spec.rb
index 08295a3392a..76d17c4409d 100644
--- a/spec/features/groups/import_export/import_file_spec.rb
+++ b/spec/features/groups/import_export/import_file_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe 'Import/Export - Group Import', :js do
click_link 'Import group'
fill_in :import_group_path, with: 'test-group-import'
- expect(page).to have_content 'Group path is already taken. Suggestions: test-group-import1'
+ expect(page).to have_content "Group path is already taken. We've suggested one that is available."
end
end
end
diff --git a/spec/features/groups/members/manage_members_spec.rb b/spec/features/groups/members/manage_members_spec.rb
index ee18298e894..1d57d0a9103 100644
--- a/spec/features/groups/members/manage_members_spec.rb
+++ b/spec/features/groups/members/manage_members_spec.rb
@@ -93,13 +93,13 @@ RSpec.describe 'Groups > Members > Manage members' do
visit group_group_members_path(group)
click_on 'Invite members'
- fill_in 'Select members or type email addresses', with: '@gitlab.com'
+ find('[data-testid="members-token-select-input"]').set('@gitlab.com')
wait_for_requests
expect(page).to have_content('No matches found')
- fill_in 'Select members or type email addresses', with: 'undisclosed_email@gitlab.com'
+ find('[data-testid="members-token-select-input"]').set('undisclosed_email@gitlab.com')
wait_for_requests
expect(page).to have_content("Jane 'invisible' Doe")
diff --git a/spec/features/groups/merge_requests_spec.rb b/spec/features/groups/merge_requests_spec.rb
index f79c93157dc..077f680629f 100644
--- a/spec/features/groups/merge_requests_spec.rb
+++ b/spec/features/groups/merge_requests_spec.rb
@@ -75,4 +75,29 @@ RSpec.describe 'Group merge requests page' do
end
end
end
+
+ context 'empty state with no merge requests' do
+ before do
+ MergeRequest.delete_all
+ end
+
+ it 'shows an empty state, button to create merge request and no filters bar', :aggregate_failures, :js do
+ visit path
+
+ expect(page).to have_selector('.empty-state')
+ expect(page).to have_link('Select project to create merge request')
+ expect(page).not_to have_selector('.issues-filters')
+ end
+
+ context 'with no open merge requests' do
+ it 'shows an empty state, button to create merge request and filters bar', :aggregate_failures, :js do
+ create(:merge_request, :closed, source_project: project, target_project: project)
+ visit path
+
+ expect(page).to have_selector('.empty-state')
+ expect(page).to have_link('Select project to create merge request')
+ expect(page).to have_selector('.issues-filters')
+ end
+ end
+ end
end
diff --git a/spec/features/groups/navbar_spec.rb b/spec/features/groups/navbar_spec.rb
index 70a19445c89..0a159056569 100644
--- a/spec/features/groups/navbar_spec.rb
+++ b/spec/features/groups/navbar_spec.rb
@@ -11,40 +11,6 @@ RSpec.describe 'Group navbar' do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
- let(:structure) do
- [
- group_context_nav_item,
- group_information_nav_item,
- {
- nav_item: _('Issues'),
- nav_sub_items: issues_nav_items
- },
- {
- nav_item: _('Merge requests'),
- nav_sub_items: []
- },
- (security_and_compliance_nav_item if Gitlab.ee?),
- (push_rules_nav_item if Gitlab.ee?),
- {
- nav_item: _('Kubernetes'),
- nav_sub_items: []
- },
- (analytics_nav_item if Gitlab.ee?),
- members_nav_item
- ].compact
- end
-
- let(:members_nav_item) do
- nil
- end
-
- let(:group_context_nav_item) do
- {
- nav_item: "#{group.name[0, 1].upcase} #{group.name}",
- nav_sub_items: []
- }
- end
-
before do
insert_package_nav(_('Kubernetes'))
@@ -85,44 +51,4 @@ RSpec.describe 'Group navbar' do
it_behaves_like 'verified navigation bar'
end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- let(:group_context_nav_item) do
- nil
- end
-
- let(:group_information_nav_item) do
- {
- nav_item: _('Group overview'),
- nav_sub_items: [
- _('Details'),
- _('Activity')
- ]
- }
- end
-
- let(:members_nav_item) do
- {
- nav_item: _('Members'),
- nav_sub_items: []
- }
- end
-
- let(:issues_nav_items) do
- [
- _('List'),
- _('Board'),
- _('Labels'),
- _('Milestones')
- ]
- end
-
- before do
- stub_feature_flags(sidebar_refactor: false)
-
- visit group_path(group)
- end
-
- it_behaves_like 'verified navigation bar'
- end
end
diff --git a/spec/features/groups/packages_spec.rb b/spec/features/groups/packages_spec.rb
index 60e0c08b3d4..752303fdd78 100644
--- a/spec/features/groups/packages_spec.rb
+++ b/spec/features/groups/packages_spec.rb
@@ -38,18 +38,22 @@ RSpec.describe 'Group Packages' do
context 'when there are packages' do
let_it_be(:second_project) { create(:project, name: 'second-project', group: group) }
- let_it_be(:conan_package) { create(:conan_package, project: project, name: 'zzz', created_at: 1.day.ago, version: '1.0.0') }
+ let_it_be(:npm_package) { create(:npm_package, project: project, name: 'zzz', created_at: 1.day.ago, version: '1.0.0') }
let_it_be(:maven_package) { create(:maven_package, project: second_project, name: 'aaa', created_at: 2.days.ago, version: '2.0.0') }
- let_it_be(:packages) { [conan_package, maven_package] }
+ let_it_be(:packages) { [npm_package, maven_package] }
it_behaves_like 'packages list', check_project_name: true
- it_behaves_like 'package details link'
+ context 'when package_details_apollo feature flag is off' do
+ before do
+ stub_feature_flags(package_details_apollo: false)
+ end
+
+ it_behaves_like 'package details link'
+ end
it 'allows you to navigate to the project page' do
- page.within('[data-qa-selector="packages-table"]') do
- find('[data-qa-selector="package-path"]', text: project.name).click
- end
+ find('[data-testid="root-link"]', text: project.name).click
expect(page).to have_current_path(project_path(project))
expect(page).to have_content(project.name)
@@ -58,15 +62,15 @@ RSpec.describe 'Group Packages' do
context 'sorting' do
it_behaves_like 'shared package sorting' do
let_it_be(:package_one) { maven_package }
- let_it_be(:package_two) { conan_package }
+ let_it_be(:package_two) { npm_package }
end
it_behaves_like 'correctly sorted packages list', 'Project' do
- let(:packages) { [maven_package, conan_package] }
+ let(:packages) { [maven_package, npm_package] }
end
it_behaves_like 'correctly sorted packages list', 'Project', ascending: true do
- let(:packages) { [conan_package, maven_package] }
+ let(:packages) { [npm_package, maven_package] }
end
end
end
diff --git a/spec/features/groups/settings/repository_spec.rb b/spec/features/groups/settings/repository_spec.rb
index 3c1609a2605..7082b2b20bd 100644
--- a/spec/features/groups/settings/repository_spec.rb
+++ b/spec/features/groups/settings/repository_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe 'Group Repository settings' do
it 'renders the correct setting section content' do
within("#js-default-branch-name") do
expect(page).to have_content("Default initial branch name")
- expect(page).to have_content("Set the default name of the initial branch when creating new repositories through the user interface.")
+ expect(page).to have_content("The default name for the initial branch of new repositories created in the group.")
end
end
end
diff --git a/spec/features/groups/settings/user_searches_in_settings_spec.rb b/spec/features/groups/settings/user_searches_in_settings_spec.rb
index a01514714dd..c258dd41b03 100644
--- a/spec/features/groups/settings/user_searches_in_settings_spec.rb
+++ b/spec/features/groups/settings/user_searches_in_settings_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'User searches group settings', :js do
visit group_settings_integrations_path(group)
end
- it_behaves_like 'can highlight results', 'Project integration management'
+ it_behaves_like 'can highlight results', 'Group-level integration management'
end
context 'in Repository page' do
@@ -48,6 +48,6 @@ RSpec.describe 'User searches group settings', :js do
visit group_settings_packages_and_registries_path(group)
end
- it_behaves_like 'can highlight results', 'GitLab Packages'
+ it_behaves_like 'can highlight results', 'Use GitLab as a private registry'
end
end
diff --git a/spec/features/groups/show_spec.rb b/spec/features/groups/show_spec.rb
index 4bcba4c21ed..79226facad4 100644
--- a/spec/features/groups/show_spec.rb
+++ b/spec/features/groups/show_spec.rb
@@ -208,13 +208,13 @@ RSpec.describe 'Group show page' do
expect(page).to have_selector('.content[itemscope][itemtype="https://schema.org/Organization"]')
page.within('.group-home-panel') do
- expect(page).to have_selector('img.avatar[itemprop="logo"]')
+ expect(page).to have_selector('[itemprop="logo"]')
expect(page).to have_selector('[itemprop="name"]', text: group.name)
expect(page).to have_selector('[itemprop="description"]', text: group.description)
end
page.within('[itemprop="owns"][itemtype="https://schema.org/SoftwareSourceCode"]') do
- expect(page).to have_selector('img.avatar[itemprop="image"]')
+ expect(page).to have_selector('[itemprop="image"]')
expect(page).to have_selector('[itemprop="name"]', text: project.name)
expect(page).to have_selector('[itemprop="description"]', text: project.description)
end
@@ -224,12 +224,12 @@ RSpec.describe 'Group show page' do
el.click
wait_for_all_requests
page.within(el) do
- expect(page).to have_selector('img.avatar[itemprop="logo"]')
+ expect(page).to have_selector('[itemprop="logo"]')
expect(page).to have_selector('[itemprop="name"]', text: subgroup.name)
expect(page).to have_selector('[itemprop="description"]', text: subgroup.description)
page.within('[itemprop="owns"][itemtype="https://schema.org/SoftwareSourceCode"]') do
- expect(page).to have_selector('img.avatar[itemprop="image"]')
+ expect(page).to have_selector('[itemprop="image"]')
expect(page).to have_selector('[itemprop="name"]', text: subproject.name)
expect(page).to have_selector('[itemprop="description"]', text: subproject.description)
end
diff --git a/spec/features/groups/user_browse_projects_group_page_spec.rb b/spec/features/groups/user_browse_projects_group_page_spec.rb
index 999449a94b0..73fde7cafe5 100644
--- a/spec/features/groups/user_browse_projects_group_page_spec.rb
+++ b/spec/features/groups/user_browse_projects_group_page_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe 'User browse group projects page' do
visit projects_group_path(group)
expect(page).to have_link project.name
- expect(page).to have_xpath("//span[@class='badge badge-warning']", text: 'archived')
+ expect(page).to have_css('span.badge.badge-warning', text: 'archived')
end
end
end
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index 5f8079f0436..efde570512f 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -90,7 +90,7 @@ RSpec.describe 'Group' do
fill_in 'group_path', with: user.username
wait_for_requests
- expect(page).to have_content('Group path is already taken')
+ expect(page).to have_content("Group path is already taken. We've suggested one that is available.")
end
it 'does not break after an invalid form submit' do
@@ -257,7 +257,7 @@ RSpec.describe 'Group' do
fill_in 'Group URL', with: subgroup.path
wait_for_requests
- expect(page).to have_content('Group path is already taken')
+ expect(page).to have_content("Group path is already taken. We've suggested one that is available.")
end
end
end
@@ -447,35 +447,6 @@ RSpec.describe 'Group' do
end
end
- describe 'new_repo experiment' do
- let_it_be(:group) { create_default(:group) }
-
- it 'when in candidate renders "project/repository"' do
- stub_experiments(new_repo: :candidate)
-
- visit group_path(group)
-
- find('li.header-new.dropdown').click
-
- page.within('li.header-new.dropdown') do
- expect(page).to have_selector('a', text: 'New project/repository')
- end
- end
-
- it 'when in control renders "project/repository"' do
- stub_experiments(new_repo: :control)
-
- visit group_path(group)
-
- find('li.header-new.dropdown').click
-
- page.within('li.header-new.dropdown') do
- expect(page).to have_selector('a', text: 'New project')
- expect(page).to have_no_selector('a', text: 'New project/repository')
- end
- end
- end
-
def remove_with_confirm(button_text, confirm_with)
click_button button_text
fill_in 'confirm_name_input', with: confirm_with
diff --git a/spec/features/help_pages_spec.rb b/spec/features/help_pages_spec.rb
index 90647305281..66ba4dc987c 100644
--- a/spec/features/help_pages_spec.rb
+++ b/spec/features/help_pages_spec.rb
@@ -65,7 +65,7 @@ RSpec.describe 'Help Pages' do
end
it 'uses a custom support url' do
- expect(page).to have_link "See our website for getting help", href: "http://example.com/help"
+ expect(page).to have_link "See our website for help", href: "http://example.com/help"
end
end
end
diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb
index a72cf033d61..93602033d73 100644
--- a/spec/features/invites_spec.rb
+++ b/spec/features/invites_spec.rb
@@ -272,54 +272,15 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
end
end
- context 'with invite_signup_page_interaction experiment on', :experiment do
- context 'with control experience' do
- before do
- stub_experiments(invite_signup_page_interaction: :control)
- end
-
- it 'lands on invite sign up page and tracks the accepted invite' do
- expect(experiment(:invite_signup_page_interaction)).to track(:view)
- .with_context(actor: group_invite)
- .on_next_instance
-
- visit invite_path(group_invite.raw_invite_token)
-
- expect(current_path).to eq(new_user_registration_path)
-
- expect(experiment(:invite_signup_page_interaction)).to track(:form_submission)
- .with_context(actor: group_invite)
- .on_next_instance
-
- fill_in_sign_up_form(new_user, 'Register')
-
- expect(current_path).to eq(users_sign_up_welcome_path)
- end
- end
-
- context 'with candidate experience on .com' do
- before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
- stub_experiments(invite_signup_page_interaction: :candidate)
- end
+ context 'when accepting an invite without an account' do
+ it 'lands on sign up page and then registers' do
+ visit invite_path(group_invite.raw_invite_token)
- it 'lands on invite sign up page and tracks the accepted invite' do
- expect(experiment(:invite_signup_page_interaction)).to track(:view)
- .with_context(actor: group_invite)
- .on_next_instance
+ expect(current_path).to eq(new_user_registration_path)
- visit invite_path(group_invite.raw_invite_token)
-
- expect(current_path).to eq(new_users_sign_up_invite_path)
-
- expect(experiment(:invite_signup_page_interaction)).to track(:form_submission)
- .with_context(actor: group_invite)
- .on_next_instance
+ fill_in_sign_up_form(new_user, 'Register')
- fill_in_sign_up_form(new_user, 'Continue')
-
- expect(current_path).to eq(users_sign_up_welcome_path)
- end
+ expect(current_path).to eq(users_sign_up_welcome_path)
end
end
diff --git a/spec/features/issuables/markdown_references/jira_spec.rb b/spec/features/issuables/markdown_references/jira_spec.rb
index a3a259e21a1..ae9c8d31c02 100644
--- a/spec/features/issuables/markdown_references/jira_spec.rb
+++ b/spec/features/issuables/markdown_references/jira_spec.rb
@@ -81,7 +81,7 @@ RSpec.describe "Jira", :js do
context "when both external and internal issues trackers are enabled for the actual project" do
before do
- create(:jira_service, project: actual_project)
+ create(:jira_integration, project: actual_project)
end
include_examples "correct references" do
@@ -94,7 +94,7 @@ RSpec.describe "Jira", :js do
let(:actual_project) { create(:project, :public, :repository, :issues_disabled) }
before do
- create(:jira_service, project: actual_project)
+ create(:jira_integration, project: actual_project)
end
include_examples "correct references" do
@@ -125,7 +125,7 @@ RSpec.describe "Jira", :js do
context "when both external and internal issues trackers are enabled for the actual project" do
before do
- create(:jira_service, project: actual_project)
+ create(:jira_integration, project: actual_project)
end
include_examples "correct references" do
@@ -138,7 +138,7 @@ RSpec.describe "Jira", :js do
let(:actual_project) { create(:project, :public, :repository, :issues_disabled) }
before do
- create(:jira_service, project: actual_project)
+ create(:jira_integration, project: actual_project)
end
include_examples "correct references" do
diff --git a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
index 381633b0fc9..e873ebb21c4 100644
--- a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
@@ -5,15 +5,16 @@ require 'spec_helper'
RSpec.describe 'Dropdown assignee', :js do
include FilteredSearchHelpers
- let!(:project) { create(:project) }
- let!(:user) { create(:user, name: 'administrator', username: 'root') }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user, name: 'administrator', username: 'root') }
+ let_it_be(:issue) { create(:issue, project: project) }
+
let(:js_dropdown_assignee) { '#js-dropdown-assignee' }
let(:filter_dropdown) { find("#{js_dropdown_assignee} .filter-dropdown") }
before do
project.add_maintainer(user)
sign_in(user)
- create(:issue, project: project)
visit project_issues_path(project)
end
diff --git a/spec/features/issues/filtered_search/dropdown_author_spec.rb b/spec/features/issues/filtered_search/dropdown_author_spec.rb
index 91c85825a17..893ffc6575b 100644
--- a/spec/features/issues/filtered_search/dropdown_author_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_author_spec.rb
@@ -5,15 +5,16 @@ require 'spec_helper'
RSpec.describe 'Dropdown author', :js do
include FilteredSearchHelpers
- let!(:project) { create(:project) }
- let!(:user) { create(:user, name: 'administrator', username: 'root') }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user, name: 'administrator', username: 'root') }
+ let_it_be(:issue) { create(:issue, project: project) }
+
let(:js_dropdown_author) { '#js-dropdown-author' }
let(:filter_dropdown) { find("#{js_dropdown_author} .filter-dropdown") }
before do
project.add_maintainer(user)
sign_in(user)
- create(:issue, project: project)
visit project_issues_path(project)
end
diff --git a/spec/features/issues/filtered_search/dropdown_base_spec.rb b/spec/features/issues/filtered_search/dropdown_base_spec.rb
index d730525cb8b..3a304515cab 100644
--- a/spec/features/issues/filtered_search/dropdown_base_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_base_spec.rb
@@ -5,8 +5,10 @@ require 'spec_helper'
RSpec.describe 'Dropdown base', :js do
include FilteredSearchHelpers
- let!(:project) { create(:project) }
- let!(:user) { create(:user, name: 'administrator', username: 'root') }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user, name: 'administrator', username: 'root') }
+ let_it_be(:issue) { create(:issue, project: project) }
+
let(:filtered_search) { find('.filtered-search') }
let(:js_dropdown_assignee) { '#js-dropdown-assignee' }
let(:filter_dropdown) { find("#{js_dropdown_assignee} .filter-dropdown") }
@@ -18,7 +20,6 @@ RSpec.describe 'Dropdown base', :js do
before do
project.add_maintainer(user)
sign_in(user)
- create(:issue, project: project)
visit project_issues_path(project)
end
diff --git a/spec/features/issues/filtered_search/dropdown_emoji_spec.rb b/spec/features/issues/filtered_search/dropdown_emoji_spec.rb
index c2c933f8a86..f5ab53d5052 100644
--- a/spec/features/issues/filtered_search/dropdown_emoji_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_emoji_spec.rb
@@ -5,10 +5,11 @@ require 'spec_helper'
RSpec.describe 'Dropdown emoji', :js do
include FilteredSearchHelpers
- let!(:project) { create(:project, :public) }
- let!(:user) { create(:user, name: 'administrator', username: 'root') }
- let!(:issue) { create(:issue, project: project) }
- let!(:award_emoji_star) { create(:award_emoji, name: 'star', user: user, awardable: issue) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user, name: 'administrator', username: 'root') }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:award_emoji_star) { create(:award_emoji, name: 'star', user: user, awardable: issue) }
+
let(:filtered_search) { find('.filtered-search') }
let(:js_dropdown_emoji) { '#js-dropdown-my-reaction' }
let(:filter_dropdown) { find("#{js_dropdown_emoji} .filter-dropdown") }
diff --git a/spec/features/issues/filtered_search/dropdown_hint_spec.rb b/spec/features/issues/filtered_search/dropdown_hint_spec.rb
index 9edc6e0b593..9cc58a33bb7 100644
--- a/spec/features/issues/filtered_search/dropdown_hint_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_hint_spec.rb
@@ -5,8 +5,10 @@ require 'spec_helper'
RSpec.describe 'Dropdown hint', :js do
include FilteredSearchHelpers
- let!(:project) { create(:project, :public) }
- let!(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:issue) { create(:issue, project: project) }
+
let(:filtered_search) { find('.filtered-search') }
let(:js_dropdown_hint) { '#js-dropdown-hint' }
let(:js_dropdown_operator) { '#js-dropdown-operator' }
@@ -21,8 +23,6 @@ RSpec.describe 'Dropdown hint', :js do
before do
project.add_maintainer(user)
- create(:issue, project: project)
- create(:merge_request, source_project: project, target_project: project)
end
context 'when user not logged in' do
diff --git a/spec/features/issues/filtered_search/dropdown_label_spec.rb b/spec/features/issues/filtered_search/dropdown_label_spec.rb
index c0d5fe0d860..1b48810f716 100644
--- a/spec/features/issues/filtered_search/dropdown_label_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_label_spec.rb
@@ -5,22 +5,23 @@ require 'spec_helper'
RSpec.describe 'Dropdown label', :js do
include FilteredSearchHelpers
- let(:project) { create(:project) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:label) { create(:label, project: project, title: 'bug-label') }
+
let(:filtered_search) { find('.filtered-search') }
let(:filter_dropdown) { find('#js-dropdown-label .filter-dropdown') }
before do
project.add_maintainer(user)
sign_in(user)
- create(:issue, project: project)
visit project_issues_path(project)
end
describe 'behavior' do
it 'loads all the labels when opened' do
- create(:label, project: project, title: 'bug-label')
filtered_search.set('label:=')
expect_filtered_search_dropdown_results(filter_dropdown, 1)
diff --git a/spec/features/issues/filtered_search/dropdown_milestone_spec.rb b/spec/features/issues/filtered_search/dropdown_milestone_spec.rb
index 68afd973f1d..859d1e4a5e5 100644
--- a/spec/features/issues/filtered_search/dropdown_milestone_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_milestone_spec.rb
@@ -5,10 +5,11 @@ require 'spec_helper'
RSpec.describe 'Dropdown milestone', :js do
include FilteredSearchHelpers
- let!(:project) { create(:project) }
- let!(:user) { create(:user) }
- let!(:milestone) { create(:milestone, title: 'v1.0', project: project) }
- let!(:uppercase_milestone) { create(:milestone, title: 'CAP_MILESTONE', project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:milestone) { create(:milestone, title: 'v1.0', project: project) }
+ let_it_be(:uppercase_milestone) { create(:milestone, title: 'CAP_MILESTONE', project: project) }
+ let_it_be(:issue) { create(:issue, project: project) }
let(:filtered_search) { find('.filtered-search') }
let(:filter_dropdown) { find('#js-dropdown-milestone .filter-dropdown') }
@@ -16,7 +17,6 @@ RSpec.describe 'Dropdown milestone', :js do
before do
project.add_maintainer(user)
sign_in(user)
- create(:issue, project: project)
visit project_issues_path(project)
end
diff --git a/spec/features/issues/filtered_search/dropdown_release_spec.rb b/spec/features/issues/filtered_search/dropdown_release_spec.rb
index daf686c2850..2210a26c251 100644
--- a/spec/features/issues/filtered_search/dropdown_release_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_release_spec.rb
@@ -5,10 +5,11 @@ require 'spec_helper'
RSpec.describe 'Dropdown release', :js do
include FilteredSearchHelpers
- let!(:project) { create(:project, :repository) }
- let!(:user) { create(:user) }
- let!(:release) { create(:release, tag: 'v1.0', project: project) }
- let!(:crazy_release) { create(:release, tag: '☺!/"#%&\'{}+,-.<>;=@]_`{|}🚀', project: project) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:release) { create(:release, tag: 'v1.0', project: project) }
+ let_it_be(:crazy_release) { create(:release, tag: '☺!/"#%&\'{}+,-.<>;=@]_`{|}🚀', project: project) }
+ let_it_be(:issue) { create(:issue, project: project) }
let(:filtered_search) { find('.filtered-search') }
let(:filter_dropdown) { find('#js-dropdown-release .filter-dropdown') }
@@ -16,7 +17,6 @@ RSpec.describe 'Dropdown release', :js do
before do
project.add_maintainer(user)
sign_in(user)
- create(:issue, project: project)
visit project_issues_path(project)
end
diff --git a/spec/features/issues/filtered_search/recent_searches_spec.rb b/spec/features/issues/filtered_search/recent_searches_spec.rb
index 61c1e35f3c8..3ddcbf1bd01 100644
--- a/spec/features/issues/filtered_search/recent_searches_spec.rb
+++ b/spec/features/issues/filtered_search/recent_searches_spec.rb
@@ -6,14 +6,15 @@ RSpec.describe 'Recent searches', :js do
include FilteredSearchHelpers
include MobileHelpers
- let(:project_1) { create(:project, :public) }
- let(:project_2) { create(:project, :public) }
+ let_it_be(:project_1) { create(:project, :public) }
+ let_it_be(:project_2) { create(:project, :public) }
+ let_it_be(:issue_1) { create(:issue, project: project_1) }
+ let_it_be(:issue_2) { create(:issue, project: project_2) }
+
let(:project_1_local_storage_key) { "#{project_1.full_path}-issue-recent-searches" }
before do
Capybara.ignore_hidden_elements = false
- create(:issue, project: project_1)
- create(:issue, project: project_2)
# Visit any fast-loading page so we can clear local storage without a DOM exception
visit '/404'
diff --git a/spec/features/issues/filtered_search/search_bar_spec.rb b/spec/features/issues/filtered_search/search_bar_spec.rb
index 2a094281133..1efcc329e32 100644
--- a/spec/features/issues/filtered_search/search_bar_spec.rb
+++ b/spec/features/issues/filtered_search/search_bar_spec.rb
@@ -5,14 +5,15 @@ require 'spec_helper'
RSpec.describe 'Search bar', :js do
include FilteredSearchHelpers
- let!(:project) { create(:project) }
- let!(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:issue) { create(:issue, project: project) }
+
let(:filtered_search) { find('.filtered-search') }
before do
project.add_maintainer(user)
sign_in(user)
- create(:issue, project: project)
visit project_issues_path(project)
end
diff --git a/spec/features/issues/filtered_search/visual_tokens_spec.rb b/spec/features/issues/filtered_search/visual_tokens_spec.rb
index c585d7f6194..644d7cc4611 100644
--- a/spec/features/issues/filtered_search/visual_tokens_spec.rb
+++ b/spec/features/issues/filtered_search/visual_tokens_spec.rb
@@ -5,13 +5,14 @@ require 'spec_helper'
RSpec.describe 'Visual tokens', :js do
include FilteredSearchHelpers
- let!(:project) { create(:project) }
- let!(:user) { create(:user, name: 'administrator', username: 'root') }
- let!(:user_rock) { create(:user, name: 'The Rock', username: 'rock') }
- let!(:milestone_nine) { create(:milestone, title: '9.0', project: project) }
- let!(:milestone_ten) { create(:milestone, title: '10.0', project: project) }
- let!(:label) { create(:label, project: project, title: 'abc') }
- let!(:cc_label) { create(:label, project: project, title: 'Community Contribution') }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user, name: 'administrator', username: 'root') }
+ let_it_be(:user_rock) { create(:user, name: 'The Rock', username: 'rock') }
+ let_it_be(:milestone_nine) { create(:milestone, title: '9.0', project: project) }
+ let_it_be(:milestone_ten) { create(:milestone, title: '10.0', project: project) }
+ let_it_be(:label) { create(:label, project: project, title: 'abc') }
+ let_it_be(:cc_label) { create(:label, project: project, title: 'Community Contribution') }
+ let_it_be(:issue) { create(:issue, project: project) }
let(:filtered_search) { find('.filtered-search') }
let(:filter_author_dropdown) { find("#js-dropdown-author .filter-dropdown") }
@@ -27,7 +28,6 @@ RSpec.describe 'Visual tokens', :js do
project.add_user(user, :maintainer)
project.add_user(user_rock, :maintainer)
sign_in(user)
- create(:issue, project: project)
set_cookie('sidebar_collapsed', 'true')
diff --git a/spec/features/issues/incident_issue_spec.rb b/spec/features/issues/incident_issue_spec.rb
index d004ee85dd8..3033a138551 100644
--- a/spec/features/issues/incident_issue_spec.rb
+++ b/spec/features/issues/incident_issue_spec.rb
@@ -3,20 +3,57 @@
require 'spec_helper'
RSpec.describe 'Incident Detail', :js do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:payload) do
+ {
+ 'title' => 'Alert title',
+ 'start_time' => '2020-04-27T10:10:22.265949279Z',
+ 'custom' => {
+ 'alert' => {
+ 'fields' => %w[one two]
+ }
+ },
+ 'yet' => {
+ 'another' => 73
+ }
+ }
+ end
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:started_at) { Time.now.rfc3339 }
+ let_it_be(:alert) { create(:alert_management_alert, project: project, payload: payload, started_at: started_at) }
+ let_it_be(:incident) { create(:incident, project: project, description: 'hello', alert_management_alert: alert) }
+
context 'when user displays the incident' do
- it 'shows the incident tabs' do
- project = create(:project, :public)
- incident = create(:incident, project: project, description: 'hello')
+ before do
+ project.add_developer(user)
+ sign_in(user)
visit project_issue_path(project, incident)
wait_for_requests
+ end
+ it 'shows incident and alert data' do
page.within('.issuable-details') do
incident_tabs = find('[data-testid="incident-tabs"]')
- expect(find('h2')).to have_content(incident.title)
- expect(incident_tabs).to have_content('Summary')
- expect(incident_tabs).to have_content(incident.description)
+ aggregate_failures 'shows title and Summary tab' do
+ expect(find('h2')).to have_content(incident.title)
+ expect(incident_tabs).to have_content('Summary')
+ expect(incident_tabs).to have_content(incident.description)
+ end
+
+ aggregate_failures 'shows the incident highlight bar' do
+ expect(incident_tabs).to have_content('Alert events: 1')
+ expect(incident_tabs).to have_content('Original alert: #1')
+ end
+
+ aggregate_failures 'shows the Alert details tab' do
+ click_link 'Alert details'
+
+ expect(incident_tabs).to have_content('"title": "Alert title"')
+ expect(incident_tabs).to have_content('"yet.another": 73')
+ end
end
end
end
diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb
index d828b1c1f0c..0e2ef5cc6eb 100644
--- a/spec/features/issues/issue_sidebar_spec.rb
+++ b/spec/features/issues/issue_sidebar_spec.rb
@@ -259,37 +259,35 @@ RSpec.describe 'Issue Sidebar' do
end
context 'editing issue milestone', :js do
- let_it_be(:milestone_expired) { create(:milestone, project: project, due_date: 5.days.ago) }
+ let_it_be(:milestone_expired) { create(:milestone, project: project, title: 'Foo - expired', due_date: 5.days.ago) }
let_it_be(:milestone_no_duedate) { create(:milestone, project: project, title: 'Foo - No due date') }
let_it_be(:milestone1) { create(:milestone, project: project, title: 'Milestone-1', due_date: 20.days.from_now) }
let_it_be(:milestone2) { create(:milestone, project: project, title: 'Milestone-2', due_date: 15.days.from_now) }
let_it_be(:milestone3) { create(:milestone, project: project, title: 'Milestone-3', due_date: 10.days.from_now) }
before do
- page.within('[data-testid="milestone_title"]') do
- click_on 'Edit'
+ page.within('.block.milestone') do
+ click_button 'Edit'
end
+
+ wait_for_all_requests
end
- it 'shows milestons list in the dropdown' do
- page.within('.block.milestone .dropdown-content') do
+ it 'shows milestones list in the dropdown' do
+ page.within('.block.milestone') do
# 5 milestones + "No milestone" = 6 items
- expect(page.find('ul')).to have_selector('li[data-milestone-id]', count: 6)
+ expect(page.find('.gl-new-dropdown-contents')).to have_selector('li.gl-new-dropdown-item', count: 6)
end
end
- it 'shows expired milestone at the bottom of the list' do
- page.within('.block.milestone .dropdown-content ul') do
+ it 'shows expired milestone at the bottom of the list and milestone due earliest at the top of the list', :aggregate_failures do
+ page.within('.block.milestone .gl-new-dropdown-contents') do
expect(page.find('li:last-child')).to have_content milestone_expired.title
- end
- end
- it 'shows milestone due earliest at the top of the list' do
- page.within('.block.milestone .dropdown-content ul') do
- expect(page.all('li[data-milestone-id]')[1]).to have_content milestone3.title
- expect(page.all('li[data-milestone-id]')[2]).to have_content milestone2.title
- expect(page.all('li[data-milestone-id]')[3]).to have_content milestone1.title
- expect(page.all('li[data-milestone-id]')[4]).to have_content milestone_no_duedate.title
+ expect(page.all('li.gl-new-dropdown-item')[1]).to have_content milestone3.title
+ expect(page.all('li.gl-new-dropdown-item')[2]).to have_content milestone2.title
+ expect(page.all('li.gl-new-dropdown-item')[3]).to have_content milestone1.title
+ expect(page.all('li.gl-new-dropdown-item')[4]).to have_content milestone_no_duedate.title
end
end
end
diff --git a/spec/features/issues/user_bulk_edits_issues_spec.rb b/spec/features/issues/user_bulk_edits_issues_spec.rb
index e34c16e27ba..44c23813e3c 100644
--- a/spec/features/issues/user_bulk_edits_issues_spec.rb
+++ b/spec/features/issues/user_bulk_edits_issues_spec.rb
@@ -13,26 +13,26 @@ RSpec.describe 'Multiple issue updating from issues#index', :js do
end
context 'status' do
- it 'sets to closed' do
+ it 'sets to closed', :js do
visit project_issues_path(project)
click_button 'Edit issues'
check 'Select all'
click_button 'Select status'
- click_link 'Closed'
+ click_button 'Closed'
click_update_issues_button
expect(page).to have_selector('.issue', count: 0)
end
- it 'sets to open' do
+ it 'sets to open', :js do
create_closed
visit project_issues_path(project, state: 'closed')
click_button 'Edit issues'
check 'Select all'
click_button 'Select status'
- click_link 'Open'
+ click_button 'Open'
click_update_issues_button
expect(page).to have_selector('.issue', count: 0)
diff --git a/spec/features/issues/user_edits_issue_spec.rb b/spec/features/issues/user_edits_issue_spec.rb
index c59cc99467c..e4bba706453 100644
--- a/spec/features/issues/user_edits_issue_spec.rb
+++ b/spec/features/issues/user_edits_issue_spec.rb
@@ -333,37 +333,40 @@ RSpec.describe "Issues > User edits issue", :js do
describe 'update milestone' do
context 'by authorized user' do
- it 'allows user to select unassigned' do
+ it 'allows user to select no milestone' do
visit project_issue_path(project, issue)
+ wait_for_requests
- page.within('.milestone') do
- expect(page).to have_content "None"
- end
+ page.within('.block.milestone') do
+ expect(page).to have_content 'None'
+
+ click_button 'Edit'
+ wait_for_requests
+ click_button 'No milestone'
+ wait_for_requests
- find('.block.milestone .edit-link').click
- sleep 2 # wait for ajax stuff to complete
- first('.dropdown-content li').click
- sleep 2
- page.within('.milestone') do
expect(page).to have_content 'None'
end
end
it 'allows user to de-select milestone' do
visit project_issue_path(project, issue)
+ wait_for_requests
page.within('.milestone') do
- click_link 'Edit'
- click_link milestone.title
+ click_button 'Edit'
+ wait_for_requests
+ click_button milestone.title
- page.within '.value' do
+ page.within '[data-testid="select-milestone"]' do
expect(page).to have_content milestone.title
end
- click_link 'Edit'
- click_link milestone.title
+ click_button 'Edit'
+ wait_for_requests
+ click_button 'No milestone'
- page.within '.value' do
+ page.within '[data-testid="select-milestone"]' do
expect(page).to have_content 'None'
end
end
@@ -371,16 +374,17 @@ RSpec.describe "Issues > User edits issue", :js do
it 'allows user to search milestone' do
visit project_issue_path(project_with_milestones, issue_with_milestones)
+ wait_for_requests
page.within('.milestone') do
- click_link 'Edit'
+ click_button 'Edit'
wait_for_requests
# We need to enclose search string in quotes for exact match as all the milestone titles
# within tests are prefixed with `My title`.
- find('.dropdown-input-field', visible: true).send_keys "\"#{milestones[0].title}\""
+ find('.gl-form-input', visible: true).send_keys "\"#{milestones[0].title}\""
wait_for_requests
- page.within '.dropdown-content' do
+ page.within '.gl-new-dropdown-contents' do
expect(page).to have_content milestones[0].title
end
end
diff --git a/spec/features/issues/user_interacts_with_awards_spec.rb b/spec/features/issues/user_interacts_with_awards_spec.rb
index 2921eea7641..2e52a8d862e 100644
--- a/spec/features/issues/user_interacts_with_awards_spec.rb
+++ b/spec/features/issues/user_interacts_with_awards_spec.rb
@@ -63,7 +63,7 @@ RSpec.describe 'User interacts with awards' do
page.within('.awards') do
expect(page).to have_selector('[data-testid="award-button"]')
expect(page.find('[data-testid="award-button"].selected .js-counter')).to have_content('1')
- expect(page).to have_css('[data-testid="award-button"].selected[title="You"]')
+ expect(page).to have_css('[data-testid="award-button"].selected[title="You reacted with :8ball:"]')
expect do
page.find('[data-testid="award-button"].selected').click
diff --git a/spec/features/issues/user_sees_breadcrumb_links_spec.rb b/spec/features/issues/user_sees_breadcrumb_links_spec.rb
index f5793758a9b..9f8cd2a769d 100644
--- a/spec/features/issues/user_sees_breadcrumb_links_spec.rb
+++ b/spec/features/issues/user_sees_breadcrumb_links_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'New issue breadcrumb' do
let_it_be(:project, reload: true) { create(:project) }
+
let(:user) { project.creator }
before do
diff --git a/spec/features/issues/user_sorts_issues_spec.rb b/spec/features/issues/user_sorts_issues_spec.rb
index c161e1deb83..48297e9049e 100644
--- a/spec/features/issues/user_sorts_issues_spec.rb
+++ b/spec/features/issues/user_sorts_issues_spec.rb
@@ -22,11 +22,11 @@ RSpec.describe "User sorts issues" do
create(:award_emoji, :upvote, awardable: issue2)
sign_in(user)
-
- visit(project_issues_path(project))
end
it 'keeps the sort option' do
+ visit(project_issues_path(project))
+
find('.filter-dropdown-container .dropdown').click
page.within('ul.dropdown-menu.dropdown-menu-right li') do
@@ -47,11 +47,10 @@ RSpec.describe "User sorts issues" do
end
it 'sorts by popularity', :js do
- find('.filter-dropdown-container .dropdown').click
+ visit(project_issues_path(project))
- page.within('ul.dropdown-menu.dropdown-menu-right li') do
- click_link("Popularity")
- end
+ click_button 'Created date'
+ click_on 'Popularity'
page.within(".issues-list") do
page.within("li.issue:nth-child(1)") do
@@ -129,7 +128,7 @@ RSpec.describe "User sorts issues" do
it 'filters by none' do
visit project_issues_path(project, due_date: Issue::NoDueDate.name)
- page.within '.issues-holder' do
+ page.within '.issues-list' do
expect(page).not_to have_content('foo')
expect(page).not_to have_content('bar')
expect(page).to have_content('baz')
@@ -139,7 +138,7 @@ RSpec.describe "User sorts issues" do
it 'filters by any' do
visit project_issues_path(project, due_date: Issue::AnyDueDate.name)
- page.within '.issues-holder' do
+ page.within '.issues-list' do
expect(page).to have_content('foo')
expect(page).to have_content('bar')
expect(page).to have_content('baz')
@@ -153,7 +152,7 @@ RSpec.describe "User sorts issues" do
visit project_issues_path(project, due_date: Issue::DueThisWeek.name)
- page.within '.issues-holder' do
+ page.within '.issues-list' do
expect(page).to have_content('foo')
expect(page).to have_content('bar')
expect(page).not_to have_content('baz')
@@ -167,7 +166,7 @@ RSpec.describe "User sorts issues" do
visit project_issues_path(project, due_date: Issue::DueThisMonth.name)
- page.within '.issues-holder' do
+ page.within '.issues-list' do
expect(page).to have_content('foo')
expect(page).to have_content('bar')
expect(page).not_to have_content('baz')
@@ -181,7 +180,7 @@ RSpec.describe "User sorts issues" do
visit project_issues_path(project, due_date: Issue::Overdue.name)
- page.within '.issues-holder' do
+ page.within '.issues-list' do
expect(page).not_to have_content('foo')
expect(page).not_to have_content('bar')
expect(page).to have_content('baz')
@@ -195,7 +194,7 @@ RSpec.describe "User sorts issues" do
visit project_issues_path(project, due_date: Issue::DueNextMonthAndPreviousTwoWeeks.name)
- page.within '.issues-holder' do
+ page.within '.issues-list' do
expect(page).not_to have_content('foo')
expect(page).not_to have_content('bar')
expect(page).to have_content('baz')
diff --git a/spec/features/markdown/metrics_spec.rb b/spec/features/markdown/metrics_spec.rb
index f9781f6c702..44354c9df47 100644
--- a/spec/features/markdown/metrics_spec.rb
+++ b/spec/features/markdown/metrics_spec.rb
@@ -176,10 +176,11 @@ RSpec.describe 'Metrics rendering', :js, :kubeclient, :use_clean_rails_memory_st
create(:clusters_integrations_prometheus, cluster: cluster)
stub_kubeclient_discover(cluster.platform.api_url)
stub_prometheus_request(/prometheus-prometheus-server/, body: prometheus_values_body)
- stub_prometheus_request(/prometheus\/api\/v1/, body: prometheus_values_body)
+ stub_prometheus_request(%r{prometheus/api/v1}, body: prometheus_values_body)
end
let_it_be(:cluster) { create(:cluster, :provided_by_gcp, :project, projects: [project], user: user) }
+
let(:params) { [project.namespace.path, project.path, cluster.id] }
let(:query_params) { { group: 'Cluster Health', title: 'CPU Usage', y_label: 'CPU (cores)' } }
let(:metrics_url) { urls.namespace_project_cluster_url(*params, **query_params) }
diff --git a/spec/features/merge_request/batch_comments_spec.rb b/spec/features/merge_request/batch_comments_spec.rb
index 5b11d9cb919..c646698219b 100644
--- a/spec/features/merge_request/batch_comments_spec.rb
+++ b/spec/features/merge_request/batch_comments_spec.rb
@@ -13,6 +13,8 @@ RSpec.describe 'Merge request > Batch comments', :js do
end
before do
+ stub_feature_flags(paginated_notes: false)
+
project.add_maintainer(user)
sign_in(user)
@@ -24,7 +26,7 @@ RSpec.describe 'Merge request > Batch comments', :js do
end
it 'has review bar' do
- expect(page).to have_css('.review-bar-component', visible: false)
+ expect(page).to have_selector('[data-testid="review_bar_component"]', visible: false)
end
it 'adds draft note' do
@@ -32,7 +34,7 @@ RSpec.describe 'Merge request > Batch comments', :js do
expect(find('.draft-note-component')).to have_content('Line is wrong')
- expect(page).to have_css('.review-bar-component')
+ expect(page).to have_selector('[data-testid="review_bar_component"]')
expect(find('.review-bar-content .btn-confirm')).to have_content('1')
end
@@ -259,8 +261,8 @@ RSpec.describe 'Merge request > Batch comments', :js do
end
def write_parallel_comment(line, **params)
- find("td[id='#{line}']").hover
- find(".is-over button").click
+ find("div[id='#{line}']").hover
+ find(".js-add-diff-note-button").click
write_comment(selector: "form[data-line-code='#{line}']", **params)
end
diff --git a/spec/features/merge_request/user_comments_on_diff_spec.rb b/spec/features/merge_request/user_comments_on_diff_spec.rb
index 0fd140a00bd..54c3fe738d2 100644
--- a/spec/features/merge_request/user_comments_on_diff_spec.rb
+++ b/spec/features/merge_request/user_comments_on_diff_spec.rb
@@ -132,7 +132,7 @@ RSpec.describe 'User comments on a diff', :js do
# In `files/ruby/popen.rb`
it 'allows comments for changes involving both sides' do
# click +15, select -13 add and verify comment
- click_diff_line(find('div[data-path="files/ruby/popen.rb"] .new_line a[data-linenumber="15"]').find(:xpath, '../..'), 'right')
+ click_diff_line(find('div[data-path="files/ruby/popen.rb"] .right-side a[data-linenumber="15"]').find(:xpath, '../../..'), 'right')
add_comment('-13', '+15')
end
@@ -141,7 +141,7 @@ RSpec.describe 'User comments on a diff', :js do
page.within('[data-path="files/ruby/popen.rb"]') do
all('.js-unfold-all')[0].click
end
- click_diff_line(find('div[data-path="files/ruby/popen.rb"] .old_line a[data-linenumber="9"]').find(:xpath, '../..'), 'left')
+ click_diff_line(find('div[data-path="files/ruby/popen.rb"] .left-side a[data-linenumber="9"]').find(:xpath, '../..'), 'left')
add_comment('1', '-9')
end
@@ -150,7 +150,7 @@ RSpec.describe 'User comments on a diff', :js do
page.within('[data-path="files/ruby/popen.rb"]') do
all('.js-unfold-all')[1].click
end
- click_diff_line(find('div[data-path="files/ruby/popen.rb"] .old_line a[data-linenumber="21"]').find(:xpath, '../..'), 'left')
+ click_diff_line(find('div[data-path="files/ruby/popen.rb"] .left-side a[data-linenumber="21"]').find(:xpath, '../..'), 'left')
add_comment('18', '21')
end
@@ -159,7 +159,7 @@ RSpec.describe 'User comments on a diff', :js do
page.within('[data-path="files/ruby/popen.rb"]') do
all('.js-unfold-down')[1].click
end
- click_diff_line(find('div[data-path="files/ruby/popen.rb"] .old_line a[data-linenumber="30"]').find(:xpath, '../..'), 'left')
+ click_diff_line(find('div[data-path="files/ruby/popen.rb"] .left-side a[data-linenumber="30"]').find(:xpath, '../..'), 'left')
add_comment('+28', '37')
end
end
diff --git a/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb b/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
index ac0c66524f0..3665ad91dd6 100644
--- a/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
+++ b/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
@@ -16,14 +16,14 @@ RSpec.describe 'Batch diffs', :js do
wait_for_requests
# Add discussion to first line of first file
- click_diff_line(find('.diff-file.file-holder:first-of-type tr.line_holder.new:first-of-type'))
+ click_diff_line(find('.diff-file.file-holder:first-of-type .line_holder .left-side:first-of-type'))
page.within('.js-discussion-note-form') do
fill_in('note_note', with: 'First Line Comment')
click_button('Add comment now')
end
# Add discussion to first line of last file
- click_diff_line(find('.diff-file.file-holder:last-of-type tr.line_holder.new:first-of-type'))
+ click_diff_line(find('.diff-file.file-holder:last-of-type .line_holder .left-side:first-of-type'))
page.within('.js-discussion-note-form') do
fill_in('note_note', with: 'Last Line Comment')
click_button('Add comment now')
diff --git a/spec/features/merge_request/user_posts_diff_notes_spec.rb b/spec/features/merge_request/user_posts_diff_notes_spec.rb
index 163ce10132e..c339a7d9976 100644
--- a/spec/features/merge_request/user_posts_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_diff_notes_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'Merge request > User posts diff notes', :js do
let(:user) { project.creator }
let(:comment_button_class) { '.add-diff-note' }
let(:notes_holder_input_class) { 'js-temp-notes-holder' }
- let(:notes_holder_input_xpath) { './following-sibling::*[contains(concat(" ", @class, " "), " notes_holder ")]' }
+ let(:notes_holder_input_xpath) { '..//following-sibling::*[contains(concat(" ", @class, " "), " notes_holder ")]' }
let(:test_note_comment) { 'this is a test note!' }
before do
@@ -27,7 +27,7 @@ RSpec.describe 'Merge request > User posts diff notes', :js do
context 'with an old line on the left and no line on the right' do
it 'allows commenting on the left side' do
- should_allow_commenting(find('[id="6eb14e00385d2fb284765eb1cd8d420d33d63fc9_23_22"]').find(:xpath, '..'), 'left')
+ should_allow_commenting(find('[id="6eb14e00385d2fb284765eb1cd8d420d33d63fc9_23_22"]'), 'left')
end
it 'does not allow commenting on the right side' do
@@ -67,7 +67,7 @@ RSpec.describe 'Merge request > User posts diff notes', :js do
context 'with a match line' do
it 'does not allow commenting' do
- line_holder = find('.match', match: :first).find(:xpath, '..')
+ line_holder = find('.match', match: :first)
match_should_not_allow_commenting(line_holder)
end
end
@@ -81,17 +81,13 @@ RSpec.describe 'Merge request > User posts diff notes', :js do
wait_for_requests
end
- # The first `.js-unfold` unfolds upwards, therefore the first
- # `.line_holder` will be an unfolded line.
- let(:line_holder) { first('#a5cc2925ca8258af241be7e5b0381edf30266302 .line_holder') }
-
it 'allows commenting on the left side' do
- should_allow_commenting(line_holder, 'left')
+ should_allow_commenting(first('#a5cc2925ca8258af241be7e5b0381edf30266302 .line_holder [data-testid="left-side"]'))
end
it 'allows commenting on the right side' do
# Automatically shifts comment box to left side.
- should_allow_commenting(line_holder, 'right')
+ should_allow_commenting(first('#a5cc2925ca8258af241be7e5b0381edf30266302 .line_holder [data-testid="right-side"]'))
end
end
end
@@ -149,7 +145,7 @@ RSpec.describe 'Merge request > User posts diff notes', :js do
# The first `.js-unfold` unfolds upwards, therefore the first
# `.line_holder` will be an unfolded line.
- let(:line_holder) { first('.line_holder[id="a5cc2925ca8258af241be7e5b0381edf30266302_1_1"]') }
+ let(:line_holder) { first('[id="a5cc2925ca8258af241be7e5b0381edf30266302_1_1"]') }
it 'allows commenting' do
should_allow_commenting line_holder
diff --git a/spec/features/merge_request/user_resolves_conflicts_spec.rb b/spec/features/merge_request/user_resolves_conflicts_spec.rb
index d9e3bfd6a9c..03ab42aaccd 100644
--- a/spec/features/merge_request/user_resolves_conflicts_spec.rb
+++ b/spec/features/merge_request/user_resolves_conflicts_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Merge request > User resolves conflicts', :js do
- include Spec::Support::Helpers::Features::EditorLiteSpecHelpers
+ include Spec::Support::Helpers::Features::SourceEditorSpecHelpers
let(:project) { create(:project, :repository) }
let(:user) { project.creator }
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index a85700fc721..2f7758143a1 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -373,7 +373,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
wait_for_requests
page.within('.mr-widget-body') do
- expect(page).to have_content('Fast-forward merge is not possible')
+ expect(page).to have_content('Merge Merge blocked: fast-forward merge is not possible. To merge this request, first rebase locally.')
end
end
end
diff --git a/spec/features/merge_request/user_sees_pipelines_spec.rb b/spec/features/merge_request/user_sees_pipelines_spec.rb
index 2d8fe10b987..a6c8b10f5ca 100644
--- a/spec/features/merge_request/user_sees_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_spec.rb
@@ -137,7 +137,7 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
check_head_pipeline(expected_project: parent_project)
end
- it 'does not create a pipeline in the parent project when user cancels the action' do
+ it 'does not create a pipeline in the parent project when user cancels the action', :clean_gitlab_redis_cache, :clean_gitlab_redis_shared_state do
visit project_merge_request_path(parent_project, merge_request)
create_merge_request_pipeline
diff --git a/spec/features/merge_request/user_sees_versions_spec.rb b/spec/features/merge_request/user_sees_versions_spec.rb
index 34ae082750b..5abf4e2f5ad 100644
--- a/spec/features/merge_request/user_sees_versions_spec.rb
+++ b/spec/features/merge_request/user_sees_versions_spec.rb
@@ -17,8 +17,6 @@ RSpec.describe 'Merge request > User sees versions', :js do
let!(:params) { {} }
before do
- stub_feature_flags(diffs_gradual_load: false)
-
project.add_maintainer(user)
sign_in(user)
visit diffs_project_merge_request_path(project, merge_request, params)
@@ -30,8 +28,8 @@ RSpec.describe 'Merge request > User sees versions', :js do
line_code = "#{file_id}_#{line_code}"
page.within(diff_file_selector) do
- find(".line_holder[id='#{line_code}'] td:nth-of-type(1)").hover
- find(".line_holder[id='#{line_code}'] button").click
+ first("[id='#{line_code}']").hover
+ first("[id='#{line_code}'] [role='button']").click
page.within("form[data-line-code='#{line_code}']") do
fill_in "note[note]", with: comment
diff --git a/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb b/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb
index b72ac071ecb..19774accaaf 100644
--- a/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb
+++ b/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe 'Merge request > User toggles whitespace changes', :js do
end
describe 'clicking "Hide whitespace changes" button' do
- it 'toggles the "Hide whitespace changes" button' do
+ it 'toggles the "Hide whitespace changes" button', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/333793' do
find('[data-testid="show-whitespace"]').click
visit diffs_project_merge_request_path(project, merge_request)
diff --git a/spec/features/merge_request/user_views_diffs_spec.rb b/spec/features/merge_request/user_views_diffs_spec.rb
index d5061657c59..09dfe41a718 100644
--- a/spec/features/merge_request/user_views_diffs_spec.rb
+++ b/spec/features/merge_request/user_views_diffs_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'User views diffs', :js do
page.within('.file-holder[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd"]') do
expect(find('.text-file')).to have_content('fileutils')
- expect(page).to have_selector('.new_line [data-linenumber="1"]', count: 1)
+ expect(page).to have_selector('[data-interop-type="new"] [data-linenumber="1"]')
end
end
@@ -32,8 +32,8 @@ RSpec.describe 'User views diffs', :js do
page.within('.file-holder[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd"]') do
all('.js-unfold-all')[1].click
- expect(page).to have_selector('.new_line [data-linenumber="24"]', count: 1)
- expect(page).not_to have_selector('.new_line [data-linenumber="1"]')
+ expect(page).to have_selector('[data-interop-type="new"] [data-linenumber="24"]', count: 1)
+ expect(page).not_to have_selector('[data-interop-type="new"] [data-linenumber="1"]')
end
end
diff --git a/spec/features/merge_requests/user_lists_merge_requests_spec.rb b/spec/features/merge_requests/user_lists_merge_requests_spec.rb
index 6b8dcd7dbb6..ab6242784fe 100644
--- a/spec/features/merge_requests/user_lists_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_lists_merge_requests_spec.rb
@@ -23,7 +23,9 @@ RSpec.describe 'Merge requests > User lists merge requests' do
milestone: create(:milestone, project: project, due_date: '2013-12-11'),
created_at: 1.minute.ago,
updated_at: 1.minute.ago)
- create(:merge_request,
+ @fix.metrics.update_column(:merged_at, 10.seconds.ago)
+
+ @markdown = create(:merge_request,
title: 'markdown',
source_project: project,
source_branch: 'markdown',
@@ -32,12 +34,15 @@ RSpec.describe 'Merge requests > User lists merge requests' do
milestone: create(:milestone, project: project, due_date: '2013-12-12'),
created_at: 2.minutes.ago,
updated_at: 2.minutes.ago)
- create(:merge_request,
+ @markdown.metrics.update_column(:merged_at, 50.seconds.ago)
+
+ @merge_test = create(:merge_request,
title: 'merge-test',
source_project: project,
source_branch: 'merge-test',
created_at: 3.minutes.ago,
updated_at: 10.seconds.ago)
+ @merge_test.metrics.update_column(:merged_at, 10.seconds.ago)
end
context 'merge request reviewers' do
@@ -102,6 +107,13 @@ RSpec.describe 'Merge requests > User lists merge requests' do
expect(count_merge_requests).to eq(3)
end
+ it 'sorts by merged at' do
+ visit_merge_requests(project, sort: sort_value_merged_date)
+
+ expect(first_merge_request).to include('markdown')
+ expect(count_merge_requests).to eq(3)
+ end
+
it 'filters on one label and sorts by due date' do
label = create(:label, project: project)
create(:label_link, label: label, target: @fix)
diff --git a/spec/features/merge_requests/user_mass_updates_spec.rb b/spec/features/merge_requests/user_mass_updates_spec.rb
index 0fe69c5ca5b..46c12784ea8 100644
--- a/spec/features/merge_requests/user_mass_updates_spec.rb
+++ b/spec/features/merge_requests/user_mass_updates_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe 'Merge requests > User mass updates', :js do
visit project_merge_requests_path(project)
end
- it 'closes merge request' do
+ it 'closes merge request', :js do
change_status('Closed')
expect(page).to have_selector('.merge-request', count: 0)
@@ -31,7 +31,7 @@ RSpec.describe 'Merge requests > User mass updates', :js do
visit project_merge_requests_path(project, state: 'closed')
end
- it 'reopens merge request' do
+ it 'reopens merge request', :js do
change_status('Open')
expect(page).to have_selector('.merge-request', count: 0)
@@ -109,7 +109,7 @@ RSpec.describe 'Merge requests > User mass updates', :js do
click_button 'Edit merge requests'
check 'Select all'
click_button 'Select status'
- click_link text
+ click_button text
click_update_merge_requests_button
end
diff --git a/spec/features/merge_requests/user_sorts_merge_requests_spec.rb b/spec/features/merge_requests/user_sorts_merge_requests_spec.rb
index 54c9fbef218..99473f3b1ea 100644
--- a/spec/features/merge_requests/user_sorts_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_sorts_merge_requests_spec.rb
@@ -60,7 +60,7 @@ RSpec.describe 'User sorts merge requests' do
visit(project_issues_path(project))
- expect(find('.issues-filters a.is-active')).not_to have_content('Milestone')
+ expect(page).not_to have_button('Milestone')
end
context 'when merge requests have awards' do
diff --git a/spec/features/oauth_login_spec.rb b/spec/features/oauth_login_spec.rb
index dc27bfbef50..3402bda5a41 100644
--- a/spec/features/oauth_login_spec.rb
+++ b/spec/features/oauth_login_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe 'OAuth Login', :js, :allow_forgery_protection do
providers = [:github, :twitter, :bitbucket, :gitlab, :google_oauth2,
:facebook, :cas3, :auth0, :authentiq, :salesforce]
- around(:all) do |example|
+ around do |example|
with_omniauth_full_host { example.run }
end
diff --git a/spec/features/participants_autocomplete_spec.rb b/spec/features/participants_autocomplete_spec.rb
index 2781cfffbaf..cc805e7d369 100644
--- a/spec/features/participants_autocomplete_spec.rb
+++ b/spec/features/participants_autocomplete_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Member autocomplete', :js do
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:user) { create(:user) }
let_it_be(:author) { create(:user) }
+
let(:note) { create(:note, noteable: noteable, project: noteable.project) }
before do
diff --git a/spec/features/profiles/personal_access_tokens_spec.rb b/spec/features/profiles/personal_access_tokens_spec.rb
index 379c25d6002..de511e99182 100644
--- a/spec/features/profiles/personal_access_tokens_spec.rb
+++ b/spec/features/profiles/personal_access_tokens_spec.rb
@@ -42,10 +42,10 @@ RSpec.describe 'Profile > Personal Access Tokens', :js do
name = 'My PAT'
visit profile_personal_access_tokens_path
- fill_in "Name", with: name
+ fill_in "Token name", with: name
# Set date to 1st of next month
- find_field("Expires at").click
+ find_field("Expiration date").click
find(".pika-next").click
click_on "1"
@@ -66,7 +66,7 @@ RSpec.describe 'Profile > Personal Access Tokens', :js do
it "displays an error message" do
disallow_personal_access_token_saves!
visit profile_personal_access_tokens_path
- fill_in "Name", with: 'My PAT'
+ fill_in "Token name", with: 'My PAT'
expect { click_on "Create personal access token" }.not_to change { PersonalAccessToken.count }
expect(page).to have_content("Name cannot be nil")
@@ -149,4 +149,15 @@ RSpec.describe 'Profile > Personal Access Tokens', :js do
expect(page).to have_pushed_frontend_feature_flags(personalAccessTokensScopedToProjects: true)
end
+
+ it "prefills token details" do
+ name = 'My PAT'
+ scopes = 'api,read_user'
+
+ visit profile_personal_access_tokens_path({ name: name, scopes: scopes })
+
+ expect(page).to have_field("Token name", with: name)
+ expect(find("#personal_access_token_scopes_api")).to be_checked
+ expect(find("#personal_access_token_scopes_read_user")).to be_checked
+ end
end
diff --git a/spec/features/projects/active_tabs_spec.rb b/spec/features/projects/active_tabs_spec.rb
index 39950adc83f..b8c928004ed 100644
--- a/spec/features/projects/active_tabs_spec.rb
+++ b/spec/features/projects/active_tabs_spec.rb
@@ -24,17 +24,6 @@ RSpec.describe 'Project active tab' do
expect(page).to have_selector('.sidebar-top-level-items > li.active', count: 1)
expect(find('.sidebar-top-level-items > li.active')).to have_content(project.name)
end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
-
- visit project_path(project)
- end
-
- it_behaves_like 'page has active tab', 'Project'
- it_behaves_like 'page has active sub tab', 'Details'
- end
end
context 'on Project information' do
@@ -80,11 +69,7 @@ RSpec.describe 'Project active tab' do
end
context 'on project Issues' do
- let(:feature_flag_value) { true }
-
before do
- stub_feature_flags(sidebar_refactor: feature_flag_value)
-
visit project_issues_path(project)
end
@@ -98,21 +83,6 @@ RSpec.describe 'Project active tab' do
it_behaves_like 'page has active tab', 'Issues'
it_behaves_like 'page has active sub tab', 'Milestones'
end
-
- context 'when feature flag is disabled' do
- let(:feature_flag_value) { false }
-
- %w(Milestones Labels).each do |sub_menu|
- context "on project Issues/#{sub_menu}" do
- before do
- click_tab(sub_menu)
- end
-
- it_behaves_like 'page has active tab', 'Issues'
- it_behaves_like 'page has active sub tab', sub_menu
- end
- end
- end
end
context 'on project Merge Requests' do
@@ -168,9 +138,9 @@ RSpec.describe 'Project active tab' do
visit project_cycle_analytics_path(project)
end
- context 'on project Analytics/Value Stream Analytics' do
+ context 'on project Analytics/Value stream Analytics' do
it_behaves_like 'page has active tab', _('Analytics')
- it_behaves_like 'page has active sub tab', _('Value Stream')
+ it_behaves_like 'page has active sub tab', _('Value stream')
end
context 'on project Analytics/"CI/CD"' do
diff --git a/spec/features/projects/activity/user_sees_design_activity_spec.rb b/spec/features/projects/activity/user_sees_design_activity_spec.rb
index 27a52b87178..389e86299e5 100644
--- a/spec/features/projects/activity/user_sees_design_activity_spec.rb
+++ b/spec/features/projects/activity/user_sees_design_activity_spec.rb
@@ -8,7 +8,6 @@ RSpec.describe 'Projects > Activity > User sees design Activity', :js do
let_it_be(:uploader) { create(:user) }
let_it_be(:editor) { create(:user) }
let_it_be(:deleter) { create(:user) }
- let_it_be(:archiver) { create(:user) }
def design_activity(user, action)
[user.name, user.to_reference, action, 'design'].join(' ')
@@ -24,7 +23,6 @@ RSpec.describe 'Projects > Activity > User sees design Activity', :js do
create(:design_event, :created, author: uploader, **common_attrs)
create(:design_event, :updated, author: editor, **common_attrs)
create(:design_event, :destroyed, author: deleter, **common_attrs)
- create(:design_event, :archived, author: archiver, **common_attrs)
end
before do
@@ -39,7 +37,6 @@ RSpec.describe 'Projects > Activity > User sees design Activity', :js do
expect(page).to have_content(design_activity(uploader, 'uploaded'))
expect(page).to have_content(design_activity(editor, 'revised'))
expect(page).to have_content(design_activity(deleter, 'deleted'))
- expect(page).to have_content(design_activity(archiver, 'archived'))
end
it 'allows filtering out the design events', :aggregate_failures do
@@ -48,7 +45,6 @@ RSpec.describe 'Projects > Activity > User sees design Activity', :js do
expect(page).not_to have_content(design_activity(uploader, 'uploaded'))
expect(page).not_to have_content(design_activity(editor, 'revised'))
expect(page).not_to have_content(design_activity(deleter, 'deleted'))
- expect(page).not_to have_content(design_activity(archiver, 'archived'))
end
it 'allows filtering in the design events', :aggregate_failures do
@@ -58,7 +54,6 @@ RSpec.describe 'Projects > Activity > User sees design Activity', :js do
expect(page).to have_content(design_activity(uploader, 'uploaded'))
expect(page).to have_content(design_activity(editor, 'revised'))
expect(page).to have_content(design_activity(deleter, 'deleted'))
- expect(page).to have_content(design_activity(archiver, 'archived'))
end
end
diff --git a/spec/features/projects/ci/editor_spec.rb b/spec/features/projects/ci/editor_spec.rb
index 7012cc6edaa..c0cc12eac66 100644
--- a/spec/features/projects/ci/editor_spec.rb
+++ b/spec/features/projects/ci/editor_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Pipeline Editor', :js do
- include Spec::Support::Helpers::Features::EditorLiteSpecHelpers
+ include Spec::Support::Helpers::Features::SourceEditorSpecHelpers
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
diff --git a/spec/features/projects/ci/lint_spec.rb b/spec/features/projects/ci/lint_spec.rb
index 353c8558185..0d9ea6331a7 100644
--- a/spec/features/projects/ci/lint_spec.rb
+++ b/spec/features/projects/ci/lint_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'CI Lint', :js, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/297782' do
- include Spec::Support::Helpers::Features::EditorLiteSpecHelpers
+ include Spec::Support::Helpers::Features::SourceEditorSpecHelpers
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
index 8c497cded8e..21e587288f5 100644
--- a/spec/features/projects/clusters/gcp_spec.rb
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -214,7 +214,7 @@ RSpec.describe 'Gcp Cluster', :js do
it 'user does not see the offer' do
page.within('.as-third-party-offers') do
click_button 'Expand'
- check 'Do not display offers from third parties within GitLab'
+ check 'Do not display offers from third parties'
click_button 'Save changes'
end
diff --git a/spec/features/projects/feature_flags/user_creates_feature_flag_spec.rb b/spec/features/projects/feature_flags/user_creates_feature_flag_spec.rb
index eaafc7e607b..4af5c91479a 100644
--- a/spec/features/projects/feature_flags/user_creates_feature_flag_spec.rb
+++ b/spec/features/projects/feature_flags/user_creates_feature_flag_spec.rb
@@ -10,7 +10,6 @@ RSpec.describe 'User creates feature flag', :js do
before do
project.add_developer(user)
- stub_feature_flags(feature_flag_permissions: false)
sign_in(user)
end
diff --git a/spec/features/projects/feature_flags/user_deletes_feature_flag_spec.rb b/spec/features/projects/feature_flags/user_deletes_feature_flag_spec.rb
index 581709aacee..43540dc4522 100644
--- a/spec/features/projects/feature_flags/user_deletes_feature_flag_spec.rb
+++ b/spec/features/projects/feature_flags/user_deletes_feature_flag_spec.rb
@@ -15,7 +15,6 @@ RSpec.describe 'User deletes feature flag', :js do
before do
project.add_developer(user)
- stub_feature_flags(feature_flag_permissions: false)
sign_in(user)
visit(project_feature_flags_path(project))
diff --git a/spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb b/spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb
index d922bc1f4a0..30bfcb645f4 100644
--- a/spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb
+++ b/spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb
@@ -16,33 +16,63 @@ RSpec.describe 'User sees feature flag list', :js do
sign_in(user)
end
- context 'with legacy feature flags' do
+ context 'with feature flags' do
before do
- create_flag(project, 'ci_live_trace', false, version: :legacy_flag).tap do |feature_flag|
- create_scope(feature_flag, 'review/*', true)
+ create_flag(project, 'ci_live_trace', false).tap do |feature_flag|
+ create_strategy(feature_flag).tap do |strat|
+ create(:operations_scope, strategy: strat, environment_scope: '*')
+ create(:operations_scope, strategy: strat, environment_scope: 'review/*')
+ end
end
- create_flag(project, 'drop_legacy_artifacts', false, version: :legacy_flag)
- create_flag(project, 'mr_train', true, version: :legacy_flag).tap do |feature_flag|
- create_scope(feature_flag, 'production', false)
+ create_flag(project, 'drop_legacy_artifacts', false)
+ create_flag(project, 'mr_train', true).tap do |feature_flag|
+ create_strategy(feature_flag).tap do |strat|
+ create(:operations_scope, strategy: strat, environment_scope: 'production')
+ end
end
+ create(:operations_feature_flag, :new_version_flag, project: project,
+ name: 'my_flag', active: false)
end
- it 'shows empty page' do
+ it 'shows the user the first flag' do
visit(project_feature_flags_path(project))
- expect(page).to have_text 'Get started with feature flags'
- expect(page).to have_selector('.btn-confirm', text: 'New feature flag')
- expect(page).to have_selector('[data-qa-selector="configure_feature_flags_button"]', text: 'Configure')
+ within_feature_flag_row(1) do
+ expect(page.find('.js-feature-flag-id')).to have_content('^1')
+ expect(page.find('.feature-flag-name')).to have_content('ci_live_trace')
+ expect_status_toggle_button_not_to_be_checked
+
+ within_feature_flag_scopes do
+ expect(page.find('[data-testid="strategy-badge"]')).to have_content('All Users: All Environments, review/*')
+ end
+ end
end
- end
- context 'with new version flags' do
- before do
- create(:operations_feature_flag, :new_version_flag, project: project,
- name: 'my_flag', active: false)
+ it 'shows the user the second flag' do
+ visit(project_feature_flags_path(project))
+
+ within_feature_flag_row(2) do
+ expect(page.find('.js-feature-flag-id')).to have_content('^2')
+ expect(page.find('.feature-flag-name')).to have_content('drop_legacy_artifacts')
+ expect_status_toggle_button_not_to_be_checked
+ end
+ end
+
+ it 'shows the user the third flag' do
+ visit(project_feature_flags_path(project))
+
+ within_feature_flag_row(3) do
+ expect(page.find('.js-feature-flag-id')).to have_content('^3')
+ expect(page.find('.feature-flag-name')).to have_content('mr_train')
+ expect_status_toggle_button_to_be_checked
+
+ within_feature_flag_scopes do
+ expect(page.find('[data-testid="strategy-badge"]')).to have_content('All Users: production')
+ end
+ end
end
- it 'user updates the status toggle' do
+ it 'allows the user to update the status toggle' do
visit(project_feature_flags_path(project))
within_feature_flag_row(1) do
@@ -58,7 +88,7 @@ RSpec.describe 'User sees feature flag list', :js do
visit(project_feature_flags_path(project))
end
- it 'shows empty page' do
+ it 'shows the empty page' do
expect(page).to have_text 'Get started with feature flags'
expect(page).to have_selector('.btn-confirm', text: 'New feature flag')
expect(page).to have_selector('[data-qa-selector="configure_feature_flags_button"]', text: 'Configure')
diff --git a/spec/features/projects/feature_flags/user_updates_feature_flag_spec.rb b/spec/features/projects/feature_flags/user_updates_feature_flag_spec.rb
index 9c03a26abc8..f6330491886 100644
--- a/spec/features/projects/feature_flags/user_updates_feature_flag_spec.rb
+++ b/spec/features/projects/feature_flags/user_updates_feature_flag_spec.rb
@@ -13,9 +13,6 @@ RSpec.describe 'User updates feature flag', :js do
end
before do
- stub_feature_flags(
- feature_flag_permissions: false
- )
sign_in(user)
end
diff --git a/spec/features/projects/features_visibility_spec.rb b/spec/features/projects/features_visibility_spec.rb
index 2e5a5cef0fd..a7e773dda2d 100644
--- a/spec/features/projects/features_visibility_spec.rb
+++ b/spec/features/projects/features_visibility_spec.rb
@@ -68,17 +68,6 @@ RSpec.describe 'Edit Project Settings' do
expect(page).not_to have_selector('.shortcuts-issues')
expect(page).not_to have_selector('.shortcuts-labels')
end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- it 'hides issues tab and show labels tab' do
- stub_feature_flags(sidebar_refactor: false)
-
- visit project_path(project)
-
- expect(page).not_to have_selector('.shortcuts-issues')
- expect(page).to have_selector('.shortcuts-labels')
- end
- end
end
context "pipelines subtabs" do
diff --git a/spec/features/projects/files/dockerfile_dropdown_spec.rb b/spec/features/projects/files/dockerfile_dropdown_spec.rb
index 40d19a94b42..11663158b33 100644
--- a/spec/features/projects/files/dockerfile_dropdown_spec.rb
+++ b/spec/features/projects/files/dockerfile_dropdown_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Projects > Files > User wants to add a Dockerfile file', :js do
- include Spec::Support::Helpers::Features::EditorLiteSpecHelpers
+ include Spec::Support::Helpers::Features::SourceEditorSpecHelpers
before do
project = create(:project, :repository)
diff --git a/spec/features/projects/files/gitignore_dropdown_spec.rb b/spec/features/projects/files/gitignore_dropdown_spec.rb
index a9f2463ecf6..d47eaee2e79 100644
--- a/spec/features/projects/files/gitignore_dropdown_spec.rb
+++ b/spec/features/projects/files/gitignore_dropdown_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Projects > Files > User wants to add a .gitignore file', :js do
- include Spec::Support::Helpers::Features::EditorLiteSpecHelpers
+ include Spec::Support::Helpers::Features::SourceEditorSpecHelpers
before do
project = create(:project, :repository)
diff --git a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
index b0ccb5fca94..fc199f66490 100644
--- a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
+++ b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file', :js do
- include Spec::Support::Helpers::Features::EditorLiteSpecHelpers
+ include Spec::Support::Helpers::Features::SourceEditorSpecHelpers
let(:params) { {} }
let(:filename) { '.gitlab-ci.yml' }
diff --git a/spec/features/projects/files/undo_template_spec.rb b/spec/features/projects/files/undo_template_spec.rb
index 09ae595490a..560cb53ead2 100644
--- a/spec/features/projects/files/undo_template_spec.rb
+++ b/spec/features/projects/files/undo_template_spec.rb
@@ -47,11 +47,11 @@ end
def check_undo_button_display
expect(page).to have_content('template applied')
- expect(page).to have_css('.toasted-container')
+ expect(page).to have_css('.b-toaster')
end
def check_content_reverted(template_content)
- find('.toasted-container a', text: 'Undo').click
+ find('.b-toaster a', text: 'Undo').click
expect(page).not_to have_content(template_content)
expect(page).to have_css('.template-type-selector .dropdown-toggle-text')
end
diff --git a/spec/features/projects/import_export/import_file_spec.rb b/spec/features/projects/import_export/import_file_spec.rb
index 25836514981..a4c57e83bdd 100644
--- a/spec/features/projects/import_export/import_file_spec.rb
+++ b/spec/features/projects/import_export/import_file_spec.rb
@@ -62,6 +62,6 @@ RSpec.describe 'Import/Export - project import integration test', :js do
end
def click_import_project
- find('[data-qa-selector="import_project_link"]').click
+ find('[data-qa-panel-name="import_project"]').click
end
end
diff --git a/spec/features/projects/infrastructure_registry_spec.rb b/spec/features/projects/infrastructure_registry_spec.rb
index 9cab4ebeb3a..c3cb3955092 100644
--- a/spec/features/projects/infrastructure_registry_spec.rb
+++ b/spec/features/projects/infrastructure_registry_spec.rb
@@ -28,13 +28,30 @@ RSpec.describe 'Infrastructure Registry' do
visit_project_infrastructure_registry
end
- context 'when there are packages' do
+ context 'when there are modules' do
let_it_be(:terraform_module) { create(:terraform_module_package, project: project, created_at: 1.day.ago, version: '1.0.0') }
let_it_be(:terraform_module2) { create(:terraform_module_package, project: project, created_at: 2.days.ago, version: '2.0.0') }
let_it_be(:packages) { [terraform_module, terraform_module2] }
it_behaves_like 'packages list'
+ context 'details link' do
+ it 'navigates to the correct url' do
+ page.within(packages_table_selector) do
+ click_link terraform_module.name
+ end
+
+ expect(page).to have_current_path(project_infrastructure_registry_path(terraform_module.project, terraform_module))
+
+ expect(page).to have_css('.packages-app h1[data-testid="title"]', text: terraform_module.name)
+
+ page.within(%Q([name="#{terraform_module.name}"])) do
+ expect(page).to have_content('Provision instructions')
+ expect(page).to have_content('Registry setup')
+ end
+ end
+ end
+
context 'deleting a package' do
let_it_be(:project) { create(:project) }
let_it_be(:terraform_module) { create(:terraform_module_package, project: project) }
diff --git a/spec/features/projects/integrations/user_activates_jira_spec.rb b/spec/features/projects/integrations/user_activates_jira_spec.rb
index 10f84aae93f..d7679d38cae 100644
--- a/spec/features/projects/integrations/user_activates_jira_spec.rb
+++ b/spec/features/projects/integrations/user_activates_jira_spec.rb
@@ -65,7 +65,7 @@ RSpec.describe 'User activates Jira', :js do
include JiraServiceHelper
before do
- stub_jira_service_test
+ stub_jira_integration_test
visit_project_integration('Jira')
fill_form(disable: true)
click_save_integration
@@ -105,14 +105,14 @@ RSpec.describe 'User activates Jira', :js do
click_save_integration
expect(page).to have_content('Jira settings saved and active.')
- expect(project.reload.jira_service.data_fields).to have_attributes(
+ expect(project.reload.jira_integration.data_fields).to have_attributes(
jira_issue_transition_automatic: false,
jira_issue_transition_id: '1, 2, 3'
)
end
it 'using automatic transitions' do
- create(:jira_service, project: project, jira_issue_transition_automatic: false, jira_issue_transition_id: '1, 2, 3')
+ create(:jira_integration, project: project, jira_issue_transition_automatic: false, jira_issue_transition_id: '1, 2, 3')
visit_project_integration('Jira')
expect(page).to have_field('Enable Jira transitions', checked: true)
@@ -123,14 +123,14 @@ RSpec.describe 'User activates Jira', :js do
click_save_integration
expect(page).to have_content('Jira settings saved and active.')
- expect(project.reload.jira_service.data_fields).to have_attributes(
+ expect(project.reload.jira_integration.data_fields).to have_attributes(
jira_issue_transition_automatic: true,
jira_issue_transition_id: ''
)
end
it 'disabling issue transitions' do
- create(:jira_service, project: project, jira_issue_transition_automatic: true, jira_issue_transition_id: '1, 2, 3')
+ create(:jira_integration, project: project, jira_issue_transition_automatic: true, jira_issue_transition_id: '1, 2, 3')
visit_project_integration('Jira')
expect(page).to have_field('Enable Jira transitions', checked: true)
@@ -140,7 +140,7 @@ RSpec.describe 'User activates Jira', :js do
click_save_integration
expect(page).to have_content('Jira settings saved and active.')
- expect(project.reload.jira_service.data_fields).to have_attributes(
+ expect(project.reload.jira_integration.data_fields).to have_attributes(
jira_issue_transition_automatic: false,
jira_issue_transition_id: ''
)
diff --git a/spec/features/projects/integrations/user_activates_pivotaltracker_spec.rb b/spec/features/projects/integrations/user_activates_pivotaltracker_spec.rb
index 83f66d4fa7b..ea34a766719 100644
--- a/spec/features/projects/integrations/user_activates_pivotaltracker_spec.rb
+++ b/spec/features/projects/integrations/user_activates_pivotaltracker_spec.rb
@@ -10,11 +10,11 @@ RSpec.describe 'User activates PivotalTracker' do
end
it 'activates service', :js do
- visit_project_integration('PivotalTracker')
+ visit_project_integration('Pivotal Tracker')
fill_in('Token', with: 'verySecret')
click_test_then_save_integration(expect_test_to_fail: false)
- expect(page).to have_content('PivotalTracker settings saved and active.')
+ expect(page).to have_content('Pivotal Tracker settings saved and active.')
end
end
diff --git a/spec/features/projects/navbar_spec.rb b/spec/features/projects/navbar_spec.rb
index bce11e6bc8a..876bc82d16c 100644
--- a/spec/features/projects/navbar_spec.rb
+++ b/spec/features/projects/navbar_spec.rb
@@ -14,134 +14,54 @@ RSpec.describe 'Project navbar' do
before do
sign_in(user)
- end
- context 'when sidebar refactor feature flag is disabled' do
- let(:project_context_nav_item) do
- nil
- end
+ stub_config(registry: { enabled: false })
+ insert_package_nav(_('Infrastructure'))
+ insert_infrastructure_registry_nav
+ end
+ it_behaves_like 'verified navigation bar' do
before do
- stub_feature_flags(sidebar_refactor: false)
- insert_package_nav(_('Operations'))
- insert_infrastructure_registry_nav
-
- insert_after_sub_nav_item(
- _('Boards'),
- within: _('Issues'),
- new_sub_nav_item_name: _('Labels')
- )
-
- insert_after_nav_item(
- _('Snippets'),
- new_nav_item: {
- nav_item: _('Members'),
- nav_sub_items: []
- }
- )
-
- stub_config(registry: { enabled: false })
+ visit project_path(project)
end
+ end
- it_behaves_like 'verified navigation bar' do
- before do
- visit project_path(project)
- end
+ context 'when value stream is available' do
+ before do
+ visit project_path(project)
end
- context 'when value stream is available' do
- before do
- visit project_path(project)
+ it 'redirects to value stream when Analytics item is clicked' do
+ page.within('.sidebar-top-level-items') do
+ find('.shortcuts-analytics').click
end
- it 'redirects to value stream when Analytics item is clicked' do
- page.within('.sidebar-top-level-items') do
- find('.shortcuts-analytics').click
- end
-
- wait_for_requests
+ wait_for_requests
- expect(page).to have_current_path(project_cycle_analytics_path(project))
- end
- end
-
- context 'when pages are available' do
- before do
- stub_config(pages: { enabled: true })
-
- insert_after_sub_nav_item(
- _('Operations'),
- within: _('Settings'),
- new_sub_nav_item_name: _('Pages')
- )
-
- visit project_path(project)
- end
-
- it_behaves_like 'verified navigation bar'
- end
-
- context 'when container registry is available' do
- before do
- stub_config(registry: { enabled: true })
-
- insert_container_nav
-
- visit project_path(project)
- end
-
- it_behaves_like 'verified navigation bar'
+ expect(page).to have_current_path(project_cycle_analytics_path(project))
end
end
- context 'when sidebar refactor feature flag is enabled' do
- let(:monitor_nav_item) do
- {
- nav_item: _('Monitor'),
- nav_sub_items: monitor_menu_items
- }
- end
+ context 'when pages are available' do
+ before do
+ stub_config(pages: { enabled: true })
- let(:monitor_menu_items) do
- [
- _('Metrics'),
- _('Logs'),
- _('Tracing'),
- _('Error Tracking'),
- _('Alerts'),
- _('Incidents'),
- _('Product Analytics')
- ]
- end
+ insert_after_sub_nav_item(
+ _('Monitor'),
+ within: _('Settings'),
+ new_sub_nav_item_name: _('Pages')
+ )
- let(:project_information_nav_item) do
- {
- nav_item: _('Project information'),
- nav_sub_items: [
- _('Activity'),
- _('Labels'),
- _('Members')
- ]
- }
+ visit project_path(project)
end
- let(:settings_menu_items) do
- [
- _('General'),
- _('Integrations'),
- _('Webhooks'),
- _('Access Tokens'),
- _('Repository'),
- _('CI/CD'),
- _('Monitor')
- ]
- end
+ it_behaves_like 'verified navigation bar'
+ end
+ context 'when container registry is available' do
before do
- stub_feature_flags(sidebar_refactor: true)
stub_config(registry: { enabled: true })
- insert_package_nav(_('Monitor'))
- insert_infrastructure_registry_nav
+
insert_container_nav
insert_after_sub_nav_item(
@@ -150,30 +70,6 @@ RSpec.describe 'Project navbar' do
new_sub_nav_item_name: _('Packages & Registries')
)
- insert_after_nav_item(
- _('Monitor'),
- new_nav_item: {
- nav_item: _('Infrastructure'),
- nav_sub_items: [
- _('Kubernetes clusters'),
- _('Serverless platform'),
- _('Terraform')
- ]
- }
- )
-
- insert_after_nav_item(
- _('Security & Compliance'),
- new_nav_item: {
- nav_item: _('Deployments'),
- nav_sub_items: [
- _('Feature Flags'),
- _('Environments'),
- _('Releases')
- ]
- }
- )
-
visit project_path(project)
end
diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb
index c57432ae94e..ef28979798f 100644
--- a/spec/features/projects/new_project_spec.rb
+++ b/spec/features/projects/new_project_spec.rb
@@ -14,82 +14,13 @@ RSpec.describe 'New project', :js do
sign_in(user)
end
- context 'new repo experiment', :experiment do
- it 'when in control renders "project"' do
- stub_experiments(new_repo: :control)
-
- visit new_project_path
-
- find('li.header-new.dropdown').click
-
- page.within('li.header-new.dropdown') do
- expect(page).to have_selector('a', text: 'New project')
- expect(page).to have_no_selector('a', text: 'New project/repository')
- end
-
- expect(page).to have_selector('h3', text: 'Create blank project')
- expect(page).to have_no_selector('h3', text: 'Create blank project/repository')
- end
-
- it 'when in candidate renders "project/repository"' do
- stub_experiments(new_repo: :candidate)
-
- visit new_project_path
-
- find('li.header-new.dropdown').click
-
- page.within('li.header-new.dropdown') do
- expect(page).to have_selector('a', text: 'New project/repository')
- end
-
- expect(page).to have_selector('h3', text: 'Create blank project/repository')
- end
-
- it 'when in control it renders "project" in the new projects dropdown' do
- stub_experiments(new_repo: :control)
-
- visit new_project_path
-
- open_top_nav_projects
-
- within_top_nav do
- if Feature.enabled?(:combined_menu, default_enabled: :yaml)
- expect(page).to have_selector('a', text: 'Create new project')
- expect(page).to have_no_selector('a', text: 'Create blank project/repository')
- else
- expect(page).to have_selector('a', text: 'Create blank project')
- expect(page).to have_selector('a', text: 'Import project')
- expect(page).to have_no_selector('a', text: 'Create blank project/repository')
- expect(page).to have_no_selector('a', text: 'Import project/repository')
- end
- end
- end
-
- it 'when in candidate it renders "project/repository" in the new projects dropdown' do
- stub_experiments(new_repo: :candidate)
-
- visit new_project_path
-
- open_top_nav_projects
-
- within_top_nav do
- if Feature.enabled?(:combined_menu, default_enabled: :yaml)
- expect(page).to have_selector('a', text: 'Create new project')
- else
- expect(page).to have_selector('a', text: 'Create blank project/repository')
- expect(page).to have_selector('a', text: 'Import project/repository')
- end
- end
- end
- end
-
it 'shows a message if multiple levels are restricted' do
Gitlab::CurrentSettings.update!(
restricted_visibility_levels: [Gitlab::VisibilityLevel::PRIVATE, Gitlab::VisibilityLevel::INTERNAL]
)
visit new_project_path
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-panel-name="blank_project"]').click
expect(page).to have_content 'Other visibility settings have been disabled by the administrator.'
end
@@ -100,7 +31,7 @@ RSpec.describe 'New project', :js do
)
visit new_project_path
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-panel-name="blank_project"]').click
expect(page).to have_content 'Visibility settings have been disabled by the administrator.'
end
@@ -115,14 +46,14 @@ RSpec.describe 'New project', :js do
it 'shows "New project" page', :js do
visit new_project_path
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-panel-name="blank_project"]').click
expect(page).to have_content('Project name')
expect(page).to have_content('Project URL')
expect(page).to have_content('Project slug')
click_link('New project')
- find('[data-qa-selector="import_project_link"]').click
+ find('[data-qa-panel-name="import_project"]').click
expect(page).to have_link('GitHub')
expect(page).to have_link('Bitbucket')
@@ -135,7 +66,7 @@ RSpec.describe 'New project', :js do
before do
visit new_project_path
- find('[data-qa-selector="import_project_link"]').click
+ find('[data-qa-panel-name="import_project"]').click
end
it 'has Manifest file' do
@@ -149,7 +80,7 @@ RSpec.describe 'New project', :js do
stub_application_setting(default_project_visibility: level)
visit new_project_path
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-panel-name="blank_project"]').click
page.within('#blank-project-pane') do
expect(find_field("project_visibility_level_#{level}")).to be_checked
end
@@ -157,7 +88,7 @@ RSpec.describe 'New project', :js do
it "saves visibility level #{level} on validation error" do
visit new_project_path
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-panel-name="blank_project"]').click
choose(key)
click_button('Create project')
@@ -177,7 +108,7 @@ RSpec.describe 'New project', :js do
context 'when admin mode is enabled', :enable_admin_mode do
it 'has private selected' do
visit new_project_path(namespace_id: group.id)
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-panel-name="blank_project"]').click
page.within('#blank-project-pane') do
expect(find_field("project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).to be_checked
@@ -204,7 +135,7 @@ RSpec.describe 'New project', :js do
context 'when admin mode is enabled', :enable_admin_mode do
it 'has private selected' do
visit new_project_path(namespace_id: group.id, project: { visibility_level: Gitlab::VisibilityLevel::PRIVATE })
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-panel-name="blank_project"]').click
page.within('#blank-project-pane') do
expect(find_field("project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).to be_checked
@@ -225,7 +156,7 @@ RSpec.describe 'New project', :js do
context 'Readme selector' do
it 'shows the initialize with Readme checkbox on "Blank project" tab' do
visit new_project_path
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-panel-name="blank_project"]').click
expect(page).to have_css('input#project_initialize_with_readme')
expect(page).to have_content('Initialize repository with a README')
@@ -233,7 +164,7 @@ RSpec.describe 'New project', :js do
it 'does not show the initialize with Readme checkbox on "Create from template" tab' do
visit new_project_path
- find('[data-qa-selector="create_from_template_link"]').click
+ find('[data-qa-panel-name="create_from_template"]').click
first('.choose-template').click
page.within '.project-fields-form' do
@@ -244,7 +175,7 @@ RSpec.describe 'New project', :js do
it 'does not show the initialize with Readme checkbox on "Import project" tab' do
visit new_project_path
- find('[data-qa-selector="import_project_link"]').click
+ find('[data-qa-panel-name="import_project"]').click
first('.js-import-git-toggle-button').click
page.within '#import-project-pane' do
@@ -258,7 +189,7 @@ RSpec.describe 'New project', :js do
context 'with user namespace' do
before do
visit new_project_path
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-panel-name="blank_project"]').click
end
it 'selects the user namespace' do
@@ -274,7 +205,7 @@ RSpec.describe 'New project', :js do
before do
group.add_owner(user)
visit new_project_path(namespace_id: group.id)
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-panel-name="blank_project"]').click
end
it 'selects the group namespace' do
@@ -291,7 +222,7 @@ RSpec.describe 'New project', :js do
before do
group.add_maintainer(user)
visit new_project_path(namespace_id: subgroup.id)
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-panel-name="blank_project"]').click
end
it 'selects the group namespace' do
@@ -311,7 +242,7 @@ RSpec.describe 'New project', :js do
internal_group.add_owner(user)
private_group.add_owner(user)
visit new_project_path(namespace_id: public_group.id)
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-panel-name="blank_project"]').click
end
it 'enables the correct visibility options' do
@@ -341,7 +272,7 @@ RSpec.describe 'New project', :js do
context 'Import project options', :js do
before do
visit new_project_path
- find('[data-qa-selector="import_project_link"]').click
+ find('[data-qa-panel-name="import_project"]').click
end
context 'from git repository url, "Repo by URL"' do
@@ -362,6 +293,14 @@ RSpec.describe 'New project', :js do
expect(git_import_instructions).to have_content 'Git repository URL'
end
+ it 'reports error if repo URL does not end with .git' do
+ fill_in 'project_import_url', with: 'http://foo/bar'
+ # simulate blur event
+ find('body').click
+
+ expect(page).to have_text('A repository URL usually ends in a .git suffix')
+ end
+
it 'keeps "Import project" tab open after form validation error' do
collision_project = create(:project, name: 'test-name-collision', namespace: user.namespace)
@@ -405,7 +344,7 @@ RSpec.describe 'New project', :js do
before do
group.add_developer(user)
visit new_project_path(namespace_id: group.id)
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-panel-name="blank_project"]').click
end
it 'selects the group namespace' do
diff --git a/spec/features/projects/package_files_spec.rb b/spec/features/projects/package_files_spec.rb
index bea9a9929b9..c5c03396d71 100644
--- a/spec/features/projects/package_files_spec.rb
+++ b/spec/features/projects/package_files_spec.rb
@@ -23,12 +23,18 @@ RSpec.describe 'PackageFiles' do
expect(status_code).to eq(200)
end
- it 'renders the download link with the correct url', :js do
- visit project_package_path(project, package)
+ context 'when package_details_apollo feature flag is off' do
+ before do
+ stub_feature_flags(package_details_apollo: false)
+ end
- download_url = download_project_package_file_path(project, package_file)
+ it 'renders the download link with the correct url', :js do
+ visit project_package_path(project, package)
- expect(page).to have_link(package_file.file_name, href: download_url)
+ download_url = download_project_package_file_path(project, package_file)
+
+ expect(page).to have_link(package_file.file_name, href: download_url)
+ end
end
it 'does not allow download of package belonging to different project' do
diff --git a/spec/features/projects/packages_spec.rb b/spec/features/projects/packages_spec.rb
index e5c684bdff5..fa4c57c305d 100644
--- a/spec/features/projects/packages_spec.rb
+++ b/spec/features/projects/packages_spec.rb
@@ -31,13 +31,19 @@ RSpec.describe 'Packages' do
end
context 'when there are packages' do
- let_it_be(:conan_package) { create(:conan_package, project: project, name: 'zzz', created_at: 1.day.ago, version: '1.0.0') }
+ let_it_be(:npm_package) { create(:npm_package, project: project, name: 'zzz', created_at: 1.day.ago, version: '1.0.0') }
let_it_be(:maven_package) { create(:maven_package, project: project, name: 'aaa', created_at: 2.days.ago, version: '2.0.0') }
- let_it_be(:packages) { [conan_package, maven_package] }
+ let_it_be(:packages) { [npm_package, maven_package] }
it_behaves_like 'packages list'
- it_behaves_like 'package details link'
+ context 'when package_details_apollo feature flag is off' do
+ before do
+ stub_feature_flags(package_details_apollo: false)
+ end
+
+ it_behaves_like 'package details link'
+ end
context 'deleting a package' do
let_it_be(:project) { create(:project) }
@@ -54,7 +60,7 @@ RSpec.describe 'Packages' do
it_behaves_like 'shared package sorting' do
let_it_be(:package_one) { maven_package }
- let_it_be(:package_two) { conan_package }
+ let_it_be(:package_two) { npm_package }
end
end
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index 70dc0bd04e8..0958e1d1891 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -7,7 +7,8 @@ RSpec.describe 'Pipeline', :js do
include ProjectForksHelper
include ::ExclusiveLeaseHelpers
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
+
let(:user) { create(:user) }
let(:role) { :developer }
@@ -59,8 +60,9 @@ RSpec.describe 'Pipeline', :js do
describe 'GET /:project/-/pipelines/:id' do
include_context 'pipeline builds'
- let(:group) { create(:group) }
- let(:project) { create(:project, :repository, group: group) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project, reload: true) { create(:project, :repository, group: group) }
+
let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master', sha: project.commit.id, user: user) }
subject(:visit_pipeline) { visit project_pipeline_path(project, pipeline) }
@@ -246,6 +248,8 @@ RSpec.describe 'Pipeline', :js do
end
context 'when pipeline has a delayed job' do
+ let(:project) { create(:project, :repository, group: group) }
+
it 'shows the scheduled icon and an unschedule action for the delayed job' do
page.within('#ci-badge-delayed-job') do
expect(page).to have_selector('.js-ci-status-icon-scheduled')
@@ -434,30 +438,44 @@ RSpec.describe 'Pipeline', :js do
end
end
- context 'deleting pipeline' do
- context 'when user can not delete' do
- before do
- visit_pipeline
+ shared_context 'delete pipeline' do
+ context 'deleting pipeline' do
+ context 'when user can not delete' do
+ before do
+ visit_pipeline
+ end
+
+ it { expect(page).not_to have_button('Delete') }
end
- it { expect(page).not_to have_button('Delete') }
- end
+ context 'when deleting' do
+ before do
+ group.add_owner(user)
- context 'when deleting' do
- before do
- group.add_owner(user)
+ visit_pipeline
- visit_pipeline
+ click_button 'Delete'
+ click_button 'Delete pipeline'
+ end
- click_button 'Delete'
- click_button 'Delete pipeline'
+ it 'redirects to pipeline overview page', :sidekiq_inline do
+ expect(page).to have_content('The pipeline has been deleted')
+ expect(current_path).to eq(project_pipelines_path(project))
+ end
end
+ end
+ end
- it 'redirects to pipeline overview page', :sidekiq_might_not_need_inline do
- expect(page).to have_content('The pipeline has been deleted')
- expect(current_path).to eq(project_pipelines_path(project))
- end
+ context 'when cancel_pipelines_prior_to_destroy is enabled' do
+ include_context 'delete pipeline'
+ end
+
+ context 'when cancel_pipelines_prior_to_destroy is disabled' do
+ before do
+ stub_feature_flags(cancel_pipelines_prior_to_destroy: false)
end
+
+ include_context 'delete pipeline'
end
context 'when pipeline ref does not exist in repository anymore' do
@@ -550,6 +568,7 @@ RSpec.describe 'Pipeline', :js do
end
context 'when pipeline is merge request pipeline' do
+ let(:project) { create(:project, :repository, group: group) }
let(:source_project) { project }
let(:target_project) { project }
@@ -634,7 +653,8 @@ RSpec.describe 'Pipeline', :js do
describe 'GET /:project/-/pipelines/:id' do
include_context 'pipeline builds'
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
+
let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master', sha: project.commit.id, user: user) }
before do
@@ -997,7 +1017,8 @@ RSpec.describe 'Pipeline', :js do
describe 'GET /:project/-/pipelines/:id/builds' do
include_context 'pipeline builds'
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
+
let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master', sha: project.commit.id) }
before do
@@ -1234,7 +1255,8 @@ RSpec.describe 'Pipeline', :js do
describe 'GET /:project/-/pipelines/:id/dag' do
include_context 'pipeline builds'
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
+
let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master', sha: project.commit.id) }
before do
@@ -1263,7 +1285,7 @@ RSpec.describe 'Pipeline', :js do
end
context 'when user sees pipeline flags in a pipeline detail page' do
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
context 'when pipeline is latest' do
include_context 'pipeline builds'
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index f1672af1019..1de0eea4657 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -783,7 +783,7 @@ RSpec.describe 'Pipelines', :js do
end
it 'renders empty state' do
- expect(page).to have_content 'Build with confidence'
+ expect(page).to have_content 'Use a sample CI/CD template'
end
end
end
diff --git a/spec/features/projects/releases/user_views_edit_release_spec.rb b/spec/features/projects/releases/user_views_edit_release_spec.rb
index 024c0a227c5..561b283ee15 100644
--- a/spec/features/projects/releases/user_views_edit_release_spec.rb
+++ b/spec/features/projects/releases/user_views_edit_release_spec.rb
@@ -4,9 +4,11 @@ require 'spec_helper'
RSpec.describe 'User edits Release', :js do
let_it_be(:project) { create(:project, :repository) }
- let_it_be(:release) { create(:release, :with_milestones, milestones_count: 1, project: project, name: 'The first release' ) }
let_it_be(:user) { create(:user) }
+ let(:release) { create(:release, :with_milestones, milestones_count: 1, project: project, name: 'The first release' ) }
+ let(:release_link) { create(:release_link, release: release) }
+
before do
project.add_developer(user)
@@ -68,6 +70,14 @@ RSpec.describe 'User edits Release', :js do
expect(release.description).to eq('Updated Release notes')
end
+ it 'does not affect the asset link' do
+ fill_out_form_and_click 'Save changes'
+
+ expected_filepath = release_link.filepath
+ release_link.reload
+ expect(release_link.filepath).to eq(expected_filepath)
+ end
+
it 'redirects to the previous page when "Cancel" is clicked when the url includes a back_url query parameter' do
back_path = project_releases_path(project, params: { page: 2 })
visit edit_project_release_path(project, release, params: { back_url: back_path })
diff --git a/spec/features/projects/releases/user_views_releases_spec.rb b/spec/features/projects/releases/user_views_releases_spec.rb
index fcb1b6a0015..6bc4c66b8ca 100644
--- a/spec/features/projects/releases/user_views_releases_spec.rb
+++ b/spec/features/projects/releases/user_views_releases_spec.rb
@@ -14,9 +14,14 @@ RSpec.describe 'User views releases', :js do
let_it_be(:maintainer) { create(:user) }
let_it_be(:guest) { create(:user) }
+ let_it_be(:internal_link) { create(:release_link, release: release_v1, name: 'An internal link', url: "#{project.web_url}/-/jobs/1/artifacts/download", filepath: nil) }
+ let_it_be(:internal_link_with_redirect) { create(:release_link, release: release_v1, name: 'An internal link with a redirect', url: "#{project.web_url}/-/jobs/2/artifacts/download", filepath: '/binaries/linux-amd64' ) }
+ let_it_be(:external_link) { create(:release_link, release: release_v1, name: 'An external link', url: "https://example.com/an/external/link", filepath: nil) }
+
before do
project.add_maintainer(maintainer)
project.add_guest(guest)
+ stub_default_url_options(host: 'localhost')
end
shared_examples 'releases index page' do
@@ -25,6 +30,8 @@ RSpec.describe 'User views releases', :js do
sign_in(maintainer)
visit project_releases_path(project)
+
+ wait_for_requests
end
it 'sees the release' do
@@ -35,38 +42,18 @@ RSpec.describe 'User views releases', :js do
end
end
- context 'when there is a link as an asset' do
- let!(:release_link) { create(:release_link, release: release_v1, url: url ) }
- let(:url) { "#{project.web_url}/-/jobs/1/artifacts/download" }
- let(:direct_asset_link) { Gitlab::Routing.url_helpers.project_release_url(project, release_v1) << "/downloads#{release_link.filepath}" }
+ it 'renders the correct links', :aggregate_failures do
+ page.within("##{release_v1.tag} .js-assets-list") do
+ external_link_indicator_selector = '[data-testid="external-link-indicator"]'
- it 'sees the link' do
- page.within("##{release_v1.tag} .js-assets-list") do
- expect(page).to have_link release_link.name, href: direct_asset_link
- expect(page).not_to have_css('[data-testid="external-link-indicator"]')
- end
- end
+ expect(page).to have_link internal_link.name, href: internal_link.url
+ expect(find_link(internal_link.name)).not_to have_css(external_link_indicator_selector)
- context 'when there is a link redirect' do
- let!(:release_link) { create(:release_link, release: release_v1, name: 'linux-amd64 binaries', filepath: '/binaries/linux-amd64', url: url) }
- let(:url) { "#{project.web_url}/-/jobs/1/artifacts/download" }
+ expect(page).to have_link internal_link_with_redirect.name, href: Gitlab::Routing.url_helpers.project_release_url(project, release_v1) << "/downloads#{internal_link_with_redirect.filepath}"
+ expect(find_link(internal_link_with_redirect.name)).not_to have_css(external_link_indicator_selector)
- it 'sees the link', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/329301' do
- page.within("##{release_v1.tag} .js-assets-list") do
- expect(page).to have_link release_link.name, href: direct_asset_link
- expect(page).not_to have_css('[data-testid="external-link-indicator"]')
- end
- end
- end
-
- context 'when url points to external resource' do
- let(:url) { 'http://google.com/download' }
-
- it 'sees that the link is external resource', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/329302' do
- page.within("##{release_v1.tag} .js-assets-list") do
- expect(page).to have_css('[data-testid="external-link-indicator"]')
- end
- end
+ expect(page).to have_link external_link.name, href: external_link.url
+ expect(find_link(external_link.name)).to have_css(external_link_indicator_selector)
end
end
diff --git a/spec/features/projects/services/prometheus_external_alerts_spec.rb b/spec/features/projects/services/prometheus_external_alerts_spec.rb
index 4c32905a8c5..c2ae72ddb5e 100644
--- a/spec/features/projects/services/prometheus_external_alerts_spec.rb
+++ b/spec/features/projects/services/prometheus_external_alerts_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'Prometheus external alerts', :js do
context 'with manual configuration' do
before do
- create(:prometheus_service, project: project, api_url: 'http://prometheus.example.com', manual_configuration: '1', active: true)
+ create(:prometheus_integration, project: project, api_url: 'http://prometheus.example.com', manual_configuration: '1', active: true)
end
it 'shows the Alerts section' do
diff --git a/spec/features/projects/services/user_activates_prometheus_spec.rb b/spec/features/projects/services/user_activates_prometheus_spec.rb
index b89e89d250f..73ad8088be2 100644
--- a/spec/features/projects/services/user_activates_prometheus_spec.rb
+++ b/spec/features/projects/services/user_activates_prometheus_spec.rb
@@ -17,6 +17,6 @@ RSpec.describe 'User activates Prometheus' do
click_button('Save changes')
expect(page).not_to have_content('Prometheus settings saved and active.')
- expect(page).to have_content('Fields on this page has been deprecated.')
+ expect(page).to have_content('Fields on this page have been deprecated.')
end
end
diff --git a/spec/features/projects/services/user_activates_slack_notifications_spec.rb b/spec/features/projects/services/user_activates_slack_notifications_spec.rb
index dec83ff1489..d5fe8b083ba 100644
--- a/spec/features/projects/services/user_activates_slack_notifications_spec.rb
+++ b/spec/features/projects/services/user_activates_slack_notifications_spec.rb
@@ -20,12 +20,12 @@ RSpec.describe 'User activates Slack notifications', :js do
end
context 'when service is already configured' do
- let(:service) { Integrations::Slack.new }
- let(:project) { create(:project, slack_service: service) }
+ let(:integration) { Integrations::Slack.new }
+ let(:project) { create(:project, slack_integration: integration) }
before do
- service.fields
- service.update!(
+ integration.fields
+ integration.update!(
push_channel: 1,
issue_channel: 2,
merge_request_channel: 3,
@@ -34,7 +34,7 @@ RSpec.describe 'User activates Slack notifications', :js do
pipeline_channel: 6,
wiki_page_channel: 7)
- visit(edit_project_service_path(project, service))
+ visit(edit_project_service_path(project, integration))
end
it 'filters events by channel' do
diff --git a/spec/features/projects/settings/access_tokens_spec.rb b/spec/features/projects/settings/access_tokens_spec.rb
index 76d5d7308d1..33e2623522e 100644
--- a/spec/features/projects/settings/access_tokens_spec.rb
+++ b/spec/features/projects/settings/access_tokens_spec.rb
@@ -51,10 +51,10 @@ RSpec.describe 'Project > Settings > Access Tokens', :js do
name = 'My project access token'
visit project_settings_access_tokens_path(project)
- fill_in 'Name', with: name
+ fill_in 'Token name', with: name
# Set date to 1st of next month
- find_field('Expires at').click
+ find_field('Expiration date').click
find('.pika-next').click
click_on '1'
@@ -68,6 +68,7 @@ RSpec.describe 'Project > Settings > Access Tokens', :js do
expect(active_project_access_tokens).to have_text('In')
expect(active_project_access_tokens).to have_text('api')
expect(active_project_access_tokens).to have_text('read_api')
+ expect(active_project_access_tokens).to have_text('Maintainer')
expect(created_project_access_token).not_to be_empty
end
diff --git a/spec/features/projects/settings/monitor_settings_spec.rb b/spec/features/projects/settings/monitor_settings_spec.rb
index 971a747e64f..2d8c418b7d0 100644
--- a/spec/features/projects/settings/monitor_settings_spec.rb
+++ b/spec/features/projects/settings/monitor_settings_spec.rb
@@ -18,17 +18,6 @@ RSpec.describe 'Projects > Settings > For a forked project', :js do
expect(page).to have_selector('.sidebar-sub-level-items a[aria-label="Monitor"]', text: 'Monitor', visible: false)
end
-
- context 'when feature flag sidebar_refactor is disabled' do
- it 'renders the menu "Operations" in the sidebar' do
- stub_feature_flags(sidebar_refactor: false)
-
- visit project_path(project)
- wait_for_requests
-
- expect(page).to have_selector('.sidebar-sub-level-items a[aria-label="Operations"]', text: 'Operations', visible: false)
- end
- end
end
describe 'Settings > Monitor' do
@@ -53,7 +42,7 @@ RSpec.describe 'Projects > Settings > For a forked project', :js do
expect(find_field(send_email)).to be_checked
end
- it 'updates form values' do
+ it 'updates form values', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/333665' do
check(create_issue)
uncheck(send_email)
click_on('No template selected')
diff --git a/spec/features/projects/settings/registry_settings_spec.rb b/spec/features/projects/settings/registry_settings_spec.rb
index 1cc54b71d4a..3f9f2dae453 100644
--- a/spec/features/projects/settings/registry_settings_spec.rb
+++ b/spec/features/projects/settings/registry_settings_spec.rb
@@ -11,125 +11,105 @@ RSpec.describe 'Project > Settings > CI/CD > Container registry tag expiration p
let(:container_registry_enabled) { true }
let(:container_registry_enabled_on_project) { true }
- shared_examples 'an expiration policy form' do
- before do
- project.update!(container_registry_enabled: container_registry_enabled_on_project)
- project.container_expiration_policy.update!(enabled: true)
+ subject { visit project_settings_packages_and_registries_path(project) }
- sign_in(user)
- stub_container_registry_config(enabled: container_registry_enabled)
- end
-
- context 'as owner' do
- it 'shows available section' do
- subject
-
- settings_block = find('[data-testid="registry-settings-app"]')
- expect(settings_block).to have_text 'Clean up image tags'
- end
+ before do
+ project.update!(container_registry_enabled: container_registry_enabled_on_project)
+ project.container_expiration_policy.update!(enabled: true)
- it 'saves cleanup policy submit the form' do
- subject
-
- within '[data-testid="registry-settings-app"]' do
- select('Every day', from: 'Run cleanup')
- select('50 tags per image name', from: 'Keep the most recent:')
- fill_in('Keep tags matching:', with: 'stable')
- select('7 days', from: 'Remove tags older than:')
- fill_in('Remove tags matching:', with: '.*-production')
-
- submit_button = find('[data-testid="save-button"')
- expect(submit_button).not_to be_disabled
- submit_button.click
- end
+ sign_in(user)
+ stub_container_registry_config(enabled: container_registry_enabled)
+ end
- expect(find('.gl-toast')).to have_content('Cleanup policy successfully saved.')
- end
+ context 'as owner' do
+ it 'shows available section' do
+ subject
- it 'does not save cleanup policy submit form with invalid regex' do
- subject
+ settings_block = find('[data-testid="registry-settings-app"]')
+ expect(settings_block).to have_text 'Clean up image tags'
+ end
- within '[data-testid="registry-settings-app"]' do
- fill_in('Remove tags matching:', with: '*-production')
+ it 'saves cleanup policy submit the form' do
+ subject
- submit_button = find('[data-testid="save-button"')
- expect(submit_button).not_to be_disabled
- submit_button.click
- end
+ within '[data-testid="registry-settings-app"]' do
+ select('Every day', from: 'Run cleanup')
+ select('50 tags per image name', from: 'Keep the most recent:')
+ fill_in('Keep tags matching:', with: 'stable')
+ select('7 days', from: 'Remove tags older than:')
+ fill_in('Remove tags matching:', with: '.*-production')
- expect(find('.gl-toast')).to have_content('Something went wrong while updating the cleanup policy.')
+ submit_button = find('[data-testid="save-button"')
+ expect(submit_button).not_to be_disabled
+ submit_button.click
end
+
+ expect(find('.gl-toast')).to have_content('Cleanup policy successfully saved.')
end
- context 'with a project without expiration policy' do
- where(:application_setting, :feature_flag, :result) do
- true | true | :available_section
- true | false | :available_section
- false | true | :available_section
- false | false | :disabled_message
- end
+ it 'does not save cleanup policy submit form with invalid regex' do
+ subject
- with_them do
- before do
- project.container_expiration_policy.destroy!
- stub_feature_flags(container_expiration_policies_historic_entry: false)
- stub_application_setting(container_expiration_policies_enable_historic_entries: application_setting)
- stub_feature_flags(container_expiration_policies_historic_entry: project) if feature_flag
- end
+ within '[data-testid="registry-settings-app"]' do
+ fill_in('Remove tags matching:', with: '*-production')
- it 'displays the expected result' do
- subject
-
- within '[data-testid="registry-settings-app"]' do
- case result
- when :available_section
- expect(find('[data-testid="enable-toggle"]')).to have_content('Disabled - Tags will not be automatically deleted.')
- when :disabled_message
- expect(find('.gl-alert-title')).to have_content('Cleanup policy for tags is disabled')
- end
- end
- end
+ submit_button = find('[data-testid="save-button"')
+ expect(submit_button).not_to be_disabled
+ submit_button.click
end
- end
-
- context 'when registry is disabled' do
- let(:container_registry_enabled) { false }
- it 'does not exists' do
- subject
+ expect(find('.gl-toast')).to have_content('Something went wrong while updating the cleanup policy.')
+ end
+ end
- expect(page).not_to have_selector('[data-testid="registry-settings-app"]')
- end
+ context 'with a project without expiration policy' do
+ where(:application_setting, :feature_flag, :result) do
+ true | true | :available_section
+ true | false | :available_section
+ false | true | :available_section
+ false | false | :disabled_message
end
- context 'when container registry is disabled on project' do
- let(:container_registry_enabled_on_project) { false }
+ with_them do
+ before do
+ project.container_expiration_policy.destroy!
+ stub_feature_flags(container_expiration_policies_historic_entry: false)
+ stub_application_setting(container_expiration_policies_enable_historic_entries: application_setting)
+ stub_feature_flags(container_expiration_policies_historic_entry: project) if feature_flag
+ end
- it 'does not exists' do
+ it 'displays the expected result' do
subject
- expect(page).not_to have_selector('[data-testid="registry-settings-app"]')
+ within '[data-testid="registry-settings-app"]' do
+ case result
+ when :available_section
+ expect(find('[data-testid="enable-toggle"]')).to have_content('Disabled - Tags will not be automatically deleted.')
+ when :disabled_message
+ expect(find('.gl-alert-title')).to have_content('Cleanup policy for tags is disabled')
+ end
+ end
end
end
end
- context 'with sidebar feature flag off' do
- subject { visit project_settings_ci_cd_path(project) }
+ context 'when registry is disabled' do
+ let(:container_registry_enabled) { false }
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
+ it 'does not exists' do
+ subject
- it_behaves_like 'an expiration policy form'
+ expect(page).not_to have_selector('[data-testid="registry-settings-app"]')
+ end
end
- context 'with sidebar feature flag on' do
- subject { visit project_settings_packages_and_registries_path(project) }
+ context 'when container registry is disabled on project' do
+ let(:container_registry_enabled_on_project) { false }
- before do
- stub_feature_flags(sidebar_refactor: true)
- end
+ it 'does not exists' do
+ subject
- it_behaves_like 'an expiration policy form'
+ expect(page).not_to have_selector('[data-testid="registry-settings-app"]')
+ end
end
end
diff --git a/spec/features/projects/settings/user_searches_in_settings_spec.rb b/spec/features/projects/settings/user_searches_in_settings_spec.rb
index a60743f0e47..7ed96d01189 100644
--- a/spec/features/projects/settings/user_searches_in_settings_spec.rb
+++ b/spec/features/projects/settings/user_searches_in_settings_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe 'User searches project settings', :js do
visit project_settings_access_tokens_path(project)
end
- it_behaves_like 'can highlight results', 'Expires at'
+ it_behaves_like 'can highlight results', 'Expiration date'
end
context 'in Repository page' do
diff --git a/spec/features/projects/terraform_spec.rb b/spec/features/projects/terraform_spec.rb
index 55b906c2bc5..d080d101285 100644
--- a/spec/features/projects/terraform_spec.rb
+++ b/spec/features/projects/terraform_spec.rb
@@ -58,7 +58,7 @@ RSpec.describe 'Terraform', :js do
context 'when clicking on the delete button' do
let(:additional_state) { create(:terraform_state, project: project) }
- it 'removes the state', :aggregate_failures do
+ it 'removes the state', :aggregate_failures, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/333640' do
visit project_terraform_index_path(project)
expect(page).to have_content(additional_state.name)
diff --git a/spec/features/projects/tree/create_directory_spec.rb b/spec/features/projects/tree/create_directory_spec.rb
index 54b081161e5..e2ae858cb9b 100644
--- a/spec/features/projects/tree/create_directory_spec.rb
+++ b/spec/features/projects/tree/create_directory_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe 'Multi-file editor new directory', :js do
find('.js-ide-commit-mode').click
# Compact mode depends on the size of window. If it is shorter than MAX_WINDOW_HEIGHT_COMPACT,
- # (as it is with CHROME_HEADLESS=0), this initial commit button will exist. Otherwise, if it is
+ # (as it is with WEBDRIVER_HEADLESS=0), this initial commit button will exist. Otherwise, if it is
# taller (as it is by default with chrome headless) then the button will not exist.
if page.has_css?('.qa-begin-commit-button')
find('.qa-begin-commit-button').click
diff --git a/spec/features/projects/tree/create_file_spec.rb b/spec/features/projects/tree/create_file_spec.rb
index cefb84e6f5e..956b8898854 100644
--- a/spec/features/projects/tree/create_file_spec.rb
+++ b/spec/features/projects/tree/create_file_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe 'Multi-file editor new file', :js do
find('.js-ide-commit-mode').click
# Compact mode depends on the size of window. If it is shorter than MAX_WINDOW_HEIGHT_COMPACT,
- # (as it is with CHROME_HEADLESS=0), this initial commit button will exist. Otherwise, if it is
+ # (as it is with WEBDRIVER_HEADLESS=0), this initial commit button will exist. Otherwise, if it is
# taller (as it is by default with chrome headless) then the button will not exist.
if page.has_css?('.qa-begin-commit-button')
find('.qa-begin-commit-button').click
diff --git a/spec/features/projects/user_creates_project_spec.rb b/spec/features/projects/user_creates_project_spec.rb
index aff3022bd4e..a5b51bac747 100644
--- a/spec/features/projects/user_creates_project_spec.rb
+++ b/spec/features/projects/user_creates_project_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'User creates a project', :js do
it 'creates a new project' do
visit(new_project_path)
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-panel-name="blank_project"]').click
fill_in(:project_name, with: 'Empty')
# part of the new_project_readme experiment
@@ -46,7 +46,7 @@ RSpec.describe 'User creates a project', :js do
it 'creates a new project' do
visit(new_project_path)
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-panel-name="blank_project"]').click
fill_in :project_name, with: 'A Subgroup Project'
fill_in :project_path, with: 'a-subgroup-project'
@@ -75,7 +75,7 @@ RSpec.describe 'User creates a project', :js do
it 'creates a new project' do
visit(new_project_path)
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-panel-name="blank_project"]').click
fill_in :project_name, with: 'a-new-project'
fill_in :project_path, with: 'a-new-project'
diff --git a/spec/features/projects/user_sees_user_popover_spec.rb b/spec/features/projects/user_sees_user_popover_spec.rb
index db451578ff8..0bbe7f26cd4 100644
--- a/spec/features/projects/user_sees_user_popover_spec.rb
+++ b/spec/features/projects/user_sees_user_popover_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe 'User sees user popover', :js do
include Spec::Support::Helpers::Features::NotesHelpers
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user, pronouns: 'they/them') }
+ let_it_be(:project) { create(:project, :repository, creator: user) }
- let(:user) { project.creator }
let(:merge_request) do
create(:merge_request, source_project: project, target_project: project)
end
@@ -32,7 +32,7 @@ RSpec.describe 'User sees user popover', :js do
expect(page).to have_css(popover_selector, visible: true)
page.within(popover_selector) do
- expect(page).to have_content(user.name)
+ expect(page).to have_content("#{user.name} (they/them)")
end
end
diff --git a/spec/features/projects/user_uses_shortcuts_spec.rb b/spec/features/projects/user_uses_shortcuts_spec.rb
index 2f7844ff615..7bb15451538 100644
--- a/spec/features/projects/user_uses_shortcuts_spec.rb
+++ b/spec/features/projects/user_uses_shortcuts_spec.rb
@@ -77,20 +77,6 @@ RSpec.describe 'User uses shortcuts', :js do
expect(page).to have_active_navigation(project.name)
end
- context 'when feature flag :sidebar_refactor is disabled' do
- it 'redirects to the details page' do
- stub_feature_flags(sidebar_refactor: false)
-
- visit project_issues_path(project)
-
- find('body').native.send_key('g')
- find('body').native.send_key('p')
-
- expect(page).to have_active_navigation('Project')
- expect(page).to have_active_sub_navigation('Details')
- end
- end
-
it 'redirects to the activity page' do
find('body').native.send_key('g')
find('body').native.send_key('v')
@@ -196,36 +182,6 @@ RSpec.describe 'User uses shortcuts', :js do
expect(page).to have_active_navigation('Monitor')
expect(page).to have_active_sub_navigation('Metrics')
end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- it 'redirects to the Operations page' do
- find('body').native.send_key('g')
- find('body').native.send_key('l')
-
- expect(page).to have_active_navigation('Operations')
- expect(page).to have_active_sub_navigation('Metrics')
- end
-
- it 'redirects to the Kubernetes page with active Operations' do
- find('body').native.send_key('g')
- find('body').native.send_key('k')
-
- expect(page).to have_active_navigation('Operations')
- expect(page).to have_active_sub_navigation('Kubernetes')
- end
-
- it 'redirects to the Environments page' do
- find('body').native.send_key('g')
- find('body').native.send_key('e')
-
- expect(page).to have_active_navigation('Operations')
- expect(page).to have_active_sub_navigation('Environments')
- end
- end
end
context 'when navigating to the Infrastructure pages' do
diff --git a/spec/features/projects/user_views_empty_project_spec.rb b/spec/features/projects/user_views_empty_project_spec.rb
index cce38456df9..696a7f4ee8a 100644
--- a/spec/features/projects/user_views_empty_project_spec.rb
+++ b/spec/features/projects/user_views_empty_project_spec.rb
@@ -7,10 +7,12 @@ RSpec.describe 'User views an empty project' do
let_it_be(:user) { create(:user) }
shared_examples 'allowing push to default branch' do
- it 'shows push-to-master instructions' do
+ let(:default_branch) { project.default_branch_or_main }
+
+ it 'shows push-to-default-branch instructions' do
visit project_path(project)
- expect(page).to have_content('git push -u origin master')
+ expect(page).to have_content("git push -u origin #{default_branch}")
end
end
@@ -47,7 +49,7 @@ RSpec.describe 'User views an empty project' do
it 'does not show push-to-master instructions' do
visit project_path(project)
- expect(page).not_to have_content('git push -u origin master')
+ expect(page).not_to have_content('git push -u origin')
end
end
end
@@ -61,7 +63,7 @@ RSpec.describe 'User views an empty project' do
it 'does not show push-to-master instructions nor invite members link', :aggregate_failures, :js do
visit project_path(project)
- expect(page).not_to have_content('git push -u origin master')
+ expect(page).not_to have_content('git push -u origin')
expect(page).not_to have_button(text: 'Invite members')
end
end
diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb
index 2ac829d406c..a3d134d49eb 100644
--- a/spec/features/projects_spec.rb
+++ b/spec/features/projects_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe 'Project' do
shared_examples 'creates from template' do |template, sub_template_tab = nil|
it "is created from template", :js do
- find('[data-qa-selector="create_from_template_link"]').click
+ find('[data-qa-panel-name="create_from_template"]').click
find(".project-template #{sub_template_tab}").click if sub_template_tab
find("label[for=#{template.name}]").click
fill_in("project_name", with: template.name)
@@ -256,7 +256,7 @@ RSpec.describe 'Project' do
expect(page).to have_selector '#confirm_name_input:focus'
end
- it 'deletes a project', :sidekiq_might_not_need_inline do
+ it 'deletes a project', :sidekiq_inline do
expect { remove_with_confirm('Delete project', project.path, 'Yes, delete project') }.to change { Project.count }.by(-1)
expect(page).to have_content "Project '#{project.full_name}' is in the process of being deleted."
expect(Project.all.count).to be_zero
diff --git a/spec/features/search/user_searches_for_code_spec.rb b/spec/features/search/user_searches_for_code_spec.rb
index 094b31ba784..ef7af0ba138 100644
--- a/spec/features/search/user_searches_for_code_spec.rb
+++ b/spec/features/search/user_searches_for_code_spec.rb
@@ -21,7 +21,8 @@ RSpec.describe 'User searches for code' do
expect(page).to have_selector('.results', text: 'application.js')
expect(page).to have_selector('.file-content .code')
expect(page).to have_selector("span.line[lang='javascript']")
- expect(page).to have_link('application.js', href: /master\/files\/js\/application.js/)
+ expect(page).to have_link('application.js', href: %r{master/files/js/application.js})
+ expect(page).to have_button('Copy file path')
end
context 'when on a project page', :js do
@@ -37,6 +38,7 @@ RSpec.describe 'User searches for code' do
end
include_examples 'top right search form'
+ include_examples 'search timeouts', 'blobs'
it 'finds code' do
fill_in('dashboard_search', with: 'rspec')
@@ -45,7 +47,7 @@ RSpec.describe 'User searches for code' do
expect(page).to have_selector('.results', text: 'Update capybara, rspec-rails, poltergeist to recent versions')
find("#L3").click
- expect(current_url).to match(/master\/.gitignore#L3/)
+ expect(current_url).to match(%r{master/.gitignore#L3})
end
it 'search mutiple words with refs switching' do
@@ -63,7 +65,7 @@ RSpec.describe 'User searches for code' do
expect(page).to have_selector('.results', text: expected_result)
expect(find_field('dashboard_search').value).to eq(search)
- expect(find("#L1502")[:href]).to match(/v1.0.0\/files\/markdown\/ruby-style-guide.md#L1502/)
+ expect(find("#L1502")[:href]).to match(%r{v1.0.0/files/markdown/ruby-style-guide.md#L1502})
end
end
diff --git a/spec/features/search/user_searches_for_comments_spec.rb b/spec/features/search/user_searches_for_comments_spec.rb
index 2a12b22b457..5185a2460dc 100644
--- a/spec/features/search/user_searches_for_comments_spec.rb
+++ b/spec/features/search/user_searches_for_comments_spec.rb
@@ -13,6 +13,8 @@ RSpec.describe 'User searches for comments' do
visit(project_path(project))
end
+ include_examples 'search timeouts', 'notes'
+
context 'when a comment is in commits' do
context 'when comment belongs to an invalid commit' do
let(:comment) { create(:note_on_commit, author: user, project: project, commit_id: 12345678, note: 'Bug here') }
diff --git a/spec/features/search/user_searches_for_commits_spec.rb b/spec/features/search/user_searches_for_commits_spec.rb
index 1a882050126..279db686aa9 100644
--- a/spec/features/search/user_searches_for_commits_spec.rb
+++ b/spec/features/search/user_searches_for_commits_spec.rb
@@ -14,6 +14,8 @@ RSpec.describe 'User searches for commits', :js do
visit(search_path(project_id: project.id))
end
+ include_examples 'search timeouts', 'commits'
+
context 'when searching by SHA' do
it 'finds a commit and redirects to its page' do
submit_search(sha)
diff --git a/spec/features/search/user_searches_for_issues_spec.rb b/spec/features/search/user_searches_for_issues_spec.rb
index 184f8ba0d36..b0902096770 100644
--- a/spec/features/search/user_searches_for_issues_spec.rb
+++ b/spec/features/search/user_searches_for_issues_spec.rb
@@ -23,6 +23,7 @@ RSpec.describe 'User searches for issues', :js do
end
include_examples 'top right search form'
+ include_examples 'search timeouts', 'issues'
it 'finds an issue' do
search_for_issue(issue1.title)
diff --git a/spec/features/search/user_searches_for_merge_requests_spec.rb b/spec/features/search/user_searches_for_merge_requests_spec.rb
index 32952a127d3..d7f490ba9bc 100644
--- a/spec/features/search/user_searches_for_merge_requests_spec.rb
+++ b/spec/features/search/user_searches_for_merge_requests_spec.rb
@@ -22,6 +22,7 @@ RSpec.describe 'User searches for merge requests', :js do
end
include_examples 'top right search form'
+ include_examples 'search timeouts', 'merge_requests'
it 'finds a merge request' do
search_for_mr(merge_request1.title)
diff --git a/spec/features/search/user_searches_for_milestones_spec.rb b/spec/features/search/user_searches_for_milestones_spec.rb
index e81abb44ba5..7a1ec16385c 100644
--- a/spec/features/search/user_searches_for_milestones_spec.rb
+++ b/spec/features/search/user_searches_for_milestones_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe 'User searches for milestones', :js do
end
include_examples 'top right search form'
+ include_examples 'search timeouts', 'milestones'
it 'finds a milestone' do
fill_in('dashboard_search', with: milestone1.title)
diff --git a/spec/features/search/user_searches_for_projects_spec.rb b/spec/features/search/user_searches_for_projects_spec.rb
index e34ae031679..c38ad077cd0 100644
--- a/spec/features/search/user_searches_for_projects_spec.rb
+++ b/spec/features/search/user_searches_for_projects_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe 'User searches for projects', :js do
end
include_examples 'top right search form'
+ include_examples 'search timeouts', 'projects'
it 'finds a project' do
visit(search_path)
diff --git a/spec/features/search/user_searches_for_users_spec.rb b/spec/features/search/user_searches_for_users_spec.rb
index 826ed73c9bf..a5cf12fa068 100644
--- a/spec/features/search/user_searches_for_users_spec.rb
+++ b/spec/features/search/user_searches_for_users_spec.rb
@@ -11,6 +11,8 @@ RSpec.describe 'User searches for users' do
sign_in(user1)
end
+ include_examples 'search timeouts', 'users'
+
context 'when on the dashboard' do
it 'finds the user', :js do
visit dashboard_projects_path
diff --git a/spec/features/search/user_searches_for_wiki_pages_spec.rb b/spec/features/search/user_searches_for_wiki_pages_spec.rb
index 8913f1fe9ee..06545d8640f 100644
--- a/spec/features/search/user_searches_for_wiki_pages_spec.rb
+++ b/spec/features/search/user_searches_for_wiki_pages_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe 'User searches for wiki pages', :js do
end
include_examples 'top right search form'
+ include_examples 'search timeouts', 'wiki_blobs'
shared_examples 'search wiki blobs' do
it 'finds a page' do
diff --git a/spec/features/snippets/embedded_snippet_spec.rb b/spec/features/snippets/embedded_snippet_spec.rb
index b799fb2fc00..90d877d29b7 100644
--- a/spec/features/snippets/embedded_snippet_spec.rb
+++ b/spec/features/snippets/embedded_snippet_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'Embedded Snippets' do
let_it_be(:snippet) { create(:personal_snippet, :public, :repository) }
+
let(:blobs) { snippet.blobs.first(3) }
it 'loads snippet', :js do
@@ -27,8 +28,8 @@ RSpec.describe 'Embedded Snippets' do
blobs.each do |blob|
expect(page).to have_content(blob.path)
expect(page.find(".snippet-file-content .blob-content[data-blob-id='#{blob.id}'] code")).to have_content(blob.data.squish)
- expect(page).to have_link('Open raw', href: /-\/snippets\/#{snippet.id}\/raw\/master\/#{blob.path}/)
- expect(page).to have_link('Download', href: /-\/snippets\/#{snippet.id}\/raw\/master\/#{blob.path}\?inline=false/)
+ expect(page).to have_link('Open raw', href: %r{-/snippets/#{snippet.id}/raw/master/#{blob.path}})
+ expect(page).to have_link('Download', href: %r{-/snippets/#{snippet.id}/raw/master/#{blob.path}\?inline=false})
end
end
end
diff --git a/spec/features/unsubscribe_links_spec.rb b/spec/features/unsubscribe_links_spec.rb
index b2d0f29808c..b7471720008 100644
--- a/spec/features/unsubscribe_links_spec.rb
+++ b/spec/features/unsubscribe_links_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'Unsubscribe links', :sidekiq_might_not_need_inline do
let(:author) { create(:user) }
let(:project) { create(:project, :public) }
let(:params) { { title: 'A bug!', description: 'Fix it!', assignees: [recipient] } }
- let(:issue) { Issues::CreateService.new(project: project, current_user: author, params: params).execute }
+ let(:issue) { Issues::CreateService.new(project: project, current_user: author, params: params, spam_params: nil).execute }
let(:mail) { ActionMailer::Base.deliveries.last }
let(:body) { Capybara::Node::Simple.new(mail.default_part_body.to_s) }
diff --git a/spec/features/usage_stats_consent_spec.rb b/spec/features/usage_stats_consent_spec.rb
index 6fa1d7d76b5..69bd6f35558 100644
--- a/spec/features/usage_stats_consent_spec.rb
+++ b/spec/features/usage_stats_consent_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe 'Usage stats consent' do
expect(page).to have_content(message)
- click_link 'Send usage data'
+ click_link 'Send service data'
expect(page).not_to have_content(message)
expect(page).to have_content('Application settings saved successfully')
diff --git a/spec/features/users/login_spec.rb b/spec/features/users/login_spec.rb
index 7010059a7ff..6c38d5d8b24 100644
--- a/spec/features/users/login_spec.rb
+++ b/spec/features/users/login_spec.rb
@@ -97,6 +97,8 @@ RSpec.describe 'Login' do
describe 'with an unconfirmed email address' do
let!(:user) { create(:user, confirmed_at: nil) }
let(:grace_period) { 2.days }
+ let(:alert_title) { 'Please confirm your email address' }
+ let(:alert_message) { "To continue, you need to select the link in the confirmation email we sent to verify your email address. If you didn't get our email, select Resend confirmation email" }
before do
stub_application_setting(send_user_confirmation_email: true)
@@ -109,13 +111,14 @@ RSpec.describe 'Login' do
gitlab_sign_in(user)
- expect(page).not_to have_content(I18n.t('devise.failure.unconfirmed'))
+ expect(page).not_to have_content(alert_title)
+ expect(page).not_to have_content(alert_message)
expect(page).not_to have_link('Resend confirmation email', href: new_user_confirmation_path)
end
end
context 'when the confirmation grace period is expired' do
- it 'prevents the user from logging in and renders a resend confirmation email link' do
+ it 'prevents the user from logging in and renders a resend confirmation email link', :js do
travel_to((grace_period + 1.day).from_now) do
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
@@ -123,7 +126,8 @@ RSpec.describe 'Login' do
gitlab_sign_in(user)
- expect(page).to have_content(I18n.t('devise.failure.unconfirmed'))
+ expect(page).to have_content(alert_title)
+ expect(page).to have_content(alert_message)
expect(page).to have_link('Resend confirmation email', href: new_user_confirmation_path)
end
end
@@ -889,6 +893,8 @@ RSpec.describe 'Login' do
context 'when sending confirmation email and not yet confirmed' do
let!(:user) { create(:user, confirmed_at: nil) }
let(:grace_period) { 2.days }
+ let(:alert_title) { 'Please confirm your email address' }
+ let(:alert_message) { "To continue, you need to select the link in the confirmation email we sent to verify your email address. If you didn't get our email, select Resend confirmation email" }
before do
stub_application_setting(send_user_confirmation_email: true)
@@ -906,7 +912,7 @@ RSpec.describe 'Login' do
end
context "when not having confirmed within Devise's allow_unconfirmed_access_for time" do
- it 'does not allow login and shows a flash alert to confirm the email address' do
+ it 'does not allow login and shows a flash alert to confirm the email address', :js do
travel_to((grace_period + 1.day).from_now) do
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
@@ -915,7 +921,9 @@ RSpec.describe 'Login' do
gitlab_sign_in(user)
expect(current_path).to eq new_user_session_path
- expect(page).to have_content(I18n.t('devise.failure.unconfirmed'))
+ expect(page).to have_content(alert_title)
+ expect(page).to have_content(alert_message)
+ expect(page).to have_link('Resend confirmation email', href: new_user_confirmation_path)
end
end
end
diff --git a/spec/features/users/user_browses_projects_on_user_page_spec.rb b/spec/features/users/user_browses_projects_on_user_page_spec.rb
index ded90be3924..5e7d7b76843 100644
--- a/spec/features/users/user_browses_projects_on_user_page_spec.rb
+++ b/spec/features/users/user_browses_projects_on_user_page_spec.rb
@@ -125,7 +125,7 @@ RSpec.describe 'Users > User browses projects on user page', :js do
end
before do
- Issues::CreateService.new(project: contributed_project, current_user: user, params: { title: 'Bug in old browser' }).execute
+ Issues::CreateService.new(project: contributed_project, current_user: user, params: { title: 'Bug in old browser' }, spam_params: nil).execute
event = create(:push_event, project: contributed_project, author: user)
create(:push_event_payload, event: event, commit_count: 3)
end
diff --git a/spec/finders/alert_management/alerts_finder_spec.rb b/spec/finders/alert_management/alerts_finder_spec.rb
index 3a88db5d854..7fcbc7b20a1 100644
--- a/spec/finders/alert_management/alerts_finder_spec.rb
+++ b/spec/finders/alert_management/alerts_finder_spec.rb
@@ -266,6 +266,7 @@ RSpec.describe AlertManagement::AlertsFinder, '#execute' do
context 'assignee username given' do
let_it_be(:assignee) { create(:user) }
let_it_be(:alert) { create(:alert_management_alert, project: project, assignees: [assignee]) }
+
let(:params) { { assignee_username: username } }
context 'with valid assignee_username' do
diff --git a/spec/finders/bulk_imports/entities_finder_spec.rb b/spec/finders/bulk_imports/entities_finder_spec.rb
new file mode 100644
index 00000000000..e053011b60d
--- /dev/null
+++ b/spec/finders/bulk_imports/entities_finder_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::EntitiesFinder do
+ let_it_be(:user) { create(:user) }
+
+ let_it_be(:user_import_1) { create(:bulk_import, user: user) }
+ let_it_be(:started_entity_1) { create(:bulk_import_entity, :started, bulk_import: user_import_1) }
+ let_it_be(:finished_entity_1) { create(:bulk_import_entity, :finished, bulk_import: user_import_1) }
+ let_it_be(:failed_entity_1) { create(:bulk_import_entity, :failed, bulk_import: user_import_1) }
+
+ let_it_be(:user_import_2) { create(:bulk_import, user: user) }
+ let_it_be(:started_entity_2) { create(:bulk_import_entity, :started, bulk_import: user_import_2) }
+ let_it_be(:finished_entity_2) { create(:bulk_import_entity, :finished, bulk_import: user_import_2) }
+ let_it_be(:failed_entity_2) { create(:bulk_import_entity, :failed, bulk_import: user_import_2) }
+
+ let_it_be(:not_user_import) { create(:bulk_import) }
+ let_it_be(:started_entity_3) { create(:bulk_import_entity, :started, bulk_import: not_user_import) }
+ let_it_be(:finished_entity_3) { create(:bulk_import_entity, :finished, bulk_import: not_user_import) }
+ let_it_be(:failed_entity_3) { create(:bulk_import_entity, :failed, bulk_import: not_user_import) }
+
+ subject { described_class.new(user: user) }
+
+ describe '#execute' do
+ it 'returns a list of import entities associated with user' do
+ expect(subject.execute)
+ .to contain_exactly(
+ started_entity_1, finished_entity_1, failed_entity_1,
+ started_entity_2, finished_entity_2, failed_entity_2
+ )
+ end
+
+ context 'when bulk import is specified' do
+ subject { described_class.new(user: user, bulk_import: user_import_1) }
+
+ it 'returns a list of import entities filtered by bulk import' do
+ expect(subject.execute)
+ .to contain_exactly(
+ started_entity_1, finished_entity_1, failed_entity_1
+ )
+ end
+
+ context 'when specified import is not associated with user' do
+ subject { described_class.new(user: user, bulk_import: not_user_import) }
+
+ it 'does not return entities' do
+ expect(subject.execute).to be_empty
+ end
+ end
+ end
+
+ context 'when status is specified' do
+ subject { described_class.new(user: user, status: 'failed') }
+
+ it 'returns a list of import entities filtered by status' do
+ expect(subject.execute)
+ .to contain_exactly(
+ failed_entity_1, failed_entity_2
+ )
+ end
+
+ context 'when invalid status is specified' do
+ subject { described_class.new(user: user, status: 'invalid') }
+
+ it 'does not filter entities by status' do
+ expect(subject.execute)
+ .to contain_exactly(
+ started_entity_1, finished_entity_1, failed_entity_1,
+ started_entity_2, finished_entity_2, failed_entity_2
+ )
+ end
+ end
+ end
+
+ context 'when bulk import and status are specified' do
+ subject { described_class.new(user: user, bulk_import: user_import_2, status: 'finished') }
+
+ it 'returns matched import entities' do
+ expect(subject.execute).to contain_exactly(finished_entity_2)
+ end
+ end
+ end
+end
diff --git a/spec/finders/bulk_imports/imports_finder_spec.rb b/spec/finders/bulk_imports/imports_finder_spec.rb
new file mode 100644
index 00000000000..aac83c86c84
--- /dev/null
+++ b/spec/finders/bulk_imports/imports_finder_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::ImportsFinder do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:started_import) { create(:bulk_import, :started, user: user) }
+ let_it_be(:finished_import) { create(:bulk_import, :finished, user: user) }
+ let_it_be(:not_user_import) { create(:bulk_import) }
+
+ subject { described_class.new(user: user) }
+
+ describe '#execute' do
+ it 'returns a list of imports associated with user' do
+ expect(subject.execute).to contain_exactly(started_import, finished_import)
+ end
+
+ context 'when status is specified' do
+ subject { described_class.new(user: user, status: 'started') }
+
+ it 'returns a list of import entities filtered by status' do
+ expect(subject.execute).to contain_exactly(started_import)
+ end
+
+ context 'when invalid status is specified' do
+ subject { described_class.new(user: user, status: 'invalid') }
+
+ it 'does not filter entities by status' do
+ expect(subject.execute).to contain_exactly(started_import, finished_import)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/finders/ci/commit_statuses_finder_spec.rb b/spec/finders/ci/commit_statuses_finder_spec.rb
index 05a1a98cfe0..2e26e38f4b4 100644
--- a/spec/finders/ci/commit_statuses_finder_spec.rb
+++ b/spec/finders/ci/commit_statuses_finder_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Ci::CommitStatusesFinder, '#execute' do
context 'tag refs' do
let_it_be(:tags) { TagsFinder.new(project.repository, {}).execute }
+
let(:subject) { described_class.new(project, project.repository, user, tags).execute }
context 'no pipelines' do
@@ -131,6 +132,7 @@ RSpec.describe Ci::CommitStatusesFinder, '#execute' do
context 'CI pipelines visible to' do
let_it_be(:tags) { TagsFinder.new(project.repository, {}).execute }
+
let(:subject) { described_class.new(project, project.repository, user, tags).execute }
before do
diff --git a/spec/finders/ci/runners_finder_spec.rb b/spec/finders/ci/runners_finder_spec.rb
index 250a85dde30..7f05947ac48 100644
--- a/spec/finders/ci/runners_finder_spec.rb
+++ b/spec/finders/ci/runners_finder_spec.rb
@@ -7,12 +7,29 @@ RSpec.describe Ci::RunnersFinder do
let_it_be(:admin) { create(:user, :admin) }
describe '#execute' do
- context 'with empty params' do
- it 'returns all runners' do
- runner1 = create :ci_runner, active: true
- runner2 = create :ci_runner, active: false
+ context 'with 2 runners' do
+ let_it_be(:runner1) { create(:ci_runner, active: true) }
+ let_it_be(:runner2) { create(:ci_runner, active: false) }
+
+ context 'with empty params' do
+ it 'returns all runners' do
+ expect(Ci::Runner).to receive(:with_tags).and_call_original
+ expect(described_class.new(current_user: admin, params: {}).execute).to match_array [runner1, runner2]
+ end
+ end
- expect(described_class.new(current_user: admin, params: {}).execute).to match_array [runner1, runner2]
+ context 'with preload param set to :tag_name true' do
+ it 'requests tags' do
+ expect(Ci::Runner).to receive(:with_tags).and_call_original
+ expect(described_class.new(current_user: admin, params: { preload: { tag_name: true } }).execute).to match_array [runner1, runner2]
+ end
+ end
+
+ context 'with preload param set to :tag_name false' do
+ it 'does not request tags' do
+ expect(Ci::Runner).not_to receive(:with_tags)
+ expect(described_class.new(current_user: admin, params: { preload: { tag_name: false } }).execute).to match_array [runner1, runner2]
+ end
end
end
diff --git a/spec/finders/container_repositories_finder_spec.rb b/spec/finders/container_repositories_finder_spec.rb
index 983f6dba28b..5d449d1b811 100644
--- a/spec/finders/container_repositories_finder_spec.rb
+++ b/spec/finders/container_repositories_finder_spec.rb
@@ -7,11 +7,14 @@ RSpec.describe ContainerRepositoriesFinder do
let_it_be(:guest) { create(:user) }
let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:project) { create(:project, :public, group: group) }
let_it_be(:project_repository) { create(:container_repository, name: 'my_image', project: project) }
+
let(:params) { {} }
before do
+ project.project_feature.update!(container_registry_access_level: ProjectFeature::PRIVATE)
+
group.add_reporter(reporter)
project.add_reporter(reporter)
end
@@ -76,6 +79,14 @@ RSpec.describe ContainerRepositoriesFinder do
it_behaves_like 'with name search'
it_behaves_like 'with sorting'
+
+ context 'when project has container registry disabled' do
+ before do
+ project.project_feature.update!(container_registry_access_level: ProjectFeature::DISABLED)
+ end
+
+ it { is_expected.to match_array([other_repository]) }
+ end
end
context 'when subject_type is project' do
@@ -85,6 +96,14 @@ RSpec.describe ContainerRepositoriesFinder do
it_behaves_like 'with name search'
it_behaves_like 'with sorting'
+
+ context 'when project has container registry disabled' do
+ before do
+ project.project_feature.update!(container_registry_access_level: ProjectFeature::DISABLED)
+ end
+
+ it { is_expected.to be nil }
+ end
end
context 'with invalid subject_type' do
@@ -95,9 +114,19 @@ RSpec.describe ContainerRepositoriesFinder do
end
context 'with unauthorized user' do
- subject { described_class.new(user: guest, subject: group).execute }
+ subject { described_class.new(user: guest, subject: subject_type).execute }
- it { is_expected.to be nil }
+ context 'when subject_type is group' do
+ let(:subject_type) { group }
+
+ it { is_expected.to be nil }
+ end
+
+ context 'when subject_type is project' do
+ let(:subject_type) { project }
+
+ it { is_expected.to be nil }
+ end
end
end
end
diff --git a/spec/finders/deployments_finder_spec.rb b/spec/finders/deployments_finder_spec.rb
index bd03b254f40..6d9d0c33de3 100644
--- a/spec/finders/deployments_finder_spec.rb
+++ b/spec/finders/deployments_finder_spec.rb
@@ -68,6 +68,7 @@ RSpec.describe DeploymentsFinder do
context 'at project scope' do
let_it_be(:project) { create(:project, :public, :test_repo) }
+
let(:base_params) { { project: project } }
describe 'filtering' do
@@ -76,6 +77,7 @@ RSpec.describe DeploymentsFinder do
let_it_be(:deployment_2) { create(:deployment, :success, project: project, updated_at: 47.hours.ago) }
let_it_be(:deployment_3) { create(:deployment, :success, project: project, updated_at: 4.days.ago) }
let_it_be(:deployment_4) { create(:deployment, :success, project: project, updated_at: 1.hour.ago) }
+
let(:params) { { **base_params, updated_before: 1.day.ago, updated_after: 3.days.ago, order_by: :updated_at } }
it 'returns deployments with matched updated_at' do
@@ -268,6 +270,7 @@ RSpec.describe DeploymentsFinder do
let_it_be(:group_project_1) { create(:project, :public, :test_repo, group: group) }
let_it_be(:group_project_2) { create(:project, :public, :test_repo, group: group) }
let_it_be(:subgroup_project_1) { create(:project, :public, :test_repo, group: subgroup) }
+
let(:base_params) { { group: group } }
describe 'ordering' do
diff --git a/spec/finders/environments/environments_finder_spec.rb b/spec/finders/environments/environments_finder_spec.rb
index 68c0c524478..71d10ceb5d3 100644
--- a/spec/finders/environments/environments_finder_spec.rb
+++ b/spec/finders/environments/environments_finder_spec.rb
@@ -3,9 +3,11 @@
require 'spec_helper'
RSpec.describe Environments::EnvironmentsFinder do
- let(:project) { create(:project, :repository) }
- let(:user) { project.creator }
- let(:environment) { create(:environment, :available, project: project) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.creator }
+ let_it_be(:environment) { create(:environment, :available, project: project) }
+ let_it_be(:environment_stopped) { create(:environment, :stopped, name: 'test2', project: project) }
+ let_it_be(:environment_available) { create(:environment, :available, name: 'test3', project: project) }
before do
project.add_maintainer(user)
@@ -13,18 +15,18 @@ RSpec.describe Environments::EnvironmentsFinder do
describe '#execute' do
context 'with states parameter' do
- let(:stopped_environment) { create(:environment, :stopped, project: project) }
+ let_it_be(:stopped_environment) { create(:environment, :stopped, project: project) }
it 'returns environments with the requested state' do
result = described_class.new(project, user, states: 'available').execute
- expect(result).to contain_exactly(environment)
+ expect(result).to contain_exactly(environment, environment_available)
end
it 'returns environments with any of the requested states' do
result = described_class.new(project, user, states: %w(available stopped)).execute
- expect(result).to contain_exactly(environment, stopped_environment)
+ expect(result).to contain_exactly(environment, environment_stopped, environment_available, stopped_environment)
end
it 'raises exception when requested state is invalid' do
@@ -37,25 +39,30 @@ RSpec.describe Environments::EnvironmentsFinder do
it 'returns environments with the requested state' do
result = described_class.new(project, user, states: :available).execute
- expect(result).to contain_exactly(environment)
+ expect(result).to contain_exactly(environment, environment_available)
end
it 'returns environments with any of the requested states' do
result = described_class.new(project, user, states: [:available, :stopped]).execute
- expect(result).to contain_exactly(environment, stopped_environment)
+ expect(result).to contain_exactly(environment, environment_stopped, environment_available, stopped_environment)
end
end
end
context 'with search and states' do
- let(:environment2) { create(:environment, :stopped, name: 'test2', project: project) }
- let(:environment3) { create(:environment, :available, name: 'test3', project: project) }
-
it 'searches environments by name and state' do
result = described_class.new(project, user, search: 'test', states: :available).execute
- expect(result).to contain_exactly(environment3)
+ expect(result).to contain_exactly(environment_available)
+ end
+ end
+
+ context 'with id' do
+ it 'searches environments by name and state' do
+ result = described_class.new(project, user, search: 'test', environment_ids: [environment_available.id]).execute
+
+ expect(result).to contain_exactly(environment_available)
end
end
end
diff --git a/spec/finders/events_finder_spec.rb b/spec/finders/events_finder_spec.rb
index fe2e449f03d..5ecd38cd9cc 100644
--- a/spec/finders/events_finder_spec.rb
+++ b/spec/finders/events_finder_spec.rb
@@ -22,6 +22,7 @@ RSpec.describe EventsFinder do
let!(:other_developer_event) { create(:event, :created, project: project1, author: other_user, target: opened_merge_request3 ) }
let_it_be(:public_project) { create(:project, :public, creator_id: user.id, namespace: user.namespace) }
+
let(:confidential_issue) { create(:closed_issue, confidential: true, project: public_project, author: user) }
let!(:confidential_event) { create(:event, :closed, project: public_project, author: user, target: confidential_issue) }
diff --git a/spec/finders/group_descendants_finder_spec.rb b/spec/finders/group_descendants_finder_spec.rb
index 3c3bb13a629..01c6eb05907 100644
--- a/spec/finders/group_descendants_finder_spec.rb
+++ b/spec/finders/group_descendants_finder_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe GroupDescendantsFinder do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
+
let(:params) { {} }
subject(:finder) do
diff --git a/spec/finders/groups_finder_spec.rb b/spec/finders/groups_finder_spec.rb
index d69720ae98e..481e2983dd7 100644
--- a/spec/finders/groups_finder_spec.rb
+++ b/spec/finders/groups_finder_spec.rb
@@ -180,6 +180,7 @@ RSpec.describe GroupsFinder do
let_it_be(:internal_sub_subgroup) { create(:group, :internal, parent: public_subgroup) }
let_it_be(:private_sub_subgroup) { create(:group, :private, parent: public_subgroup) }
let_it_be(:public_sub_subgroup) { create(:group, :public, parent: public_subgroup) }
+
let(:params) { { include_parent_descendants: true, parent: parent_group } }
context 'with nil parent' do
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index 3b835d366db..c2ea918449c 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -520,6 +520,44 @@ RSpec.describe MergeRequestsFinder do
end
end
+ context 'filtering by approved by' do
+ let(:params) { { approved_by_usernames: user2.username } }
+
+ before do
+ create(:approval, merge_request: merge_request3, user: user2)
+ end
+
+ it 'returns merge requests approved by that user' do
+ merge_requests = described_class.new(user, params).execute
+
+ expect(merge_requests).to contain_exactly(merge_request3)
+ end
+
+ context 'not filter' do
+ let(:params) { { not: { approved_by_usernames: user2.username } } }
+
+ it 'returns merge requests not approved by that user' do
+ merge_requests = described_class.new(user, params).execute
+
+ expect(merge_requests).to contain_exactly(merge_request1, merge_request2, merge_request4, merge_request5)
+ end
+ end
+
+ context 'when filtering by author and not approved by' do
+ let(:params) { { not: { approved_by_usernames: user2.username }, author_username: user.username } }
+
+ before do
+ merge_request4.update!(author: user2)
+ end
+
+ it 'returns merge requests authored by user and not approved by user2' do
+ merge_requests = described_class.new(user, params).execute
+
+ expect(merge_requests).to contain_exactly(merge_request1, merge_request2, merge_request5)
+ end
+ end
+ end
+
context 'filtering by created_at/updated_at' do
let(:new_project) { create(:project, forked_from_project: project1) }
diff --git a/spec/finders/milestones_finder_spec.rb b/spec/finders/milestones_finder_spec.rb
index 6e486671132..8b26599cbfa 100644
--- a/spec/finders/milestones_finder_spec.rb
+++ b/spec/finders/milestones_finder_spec.rb
@@ -3,46 +3,68 @@
require 'spec_helper'
RSpec.describe MilestonesFinder do
- let(:now) { Time.now }
- let(:group) { create(:group) }
- let(:project_1) { create(:project, namespace: group) }
- let(:project_2) { create(:project, namespace: group) }
- let!(:milestone_1) { create(:milestone, group: group, title: 'one test', start_date: now - 1.day, due_date: now) }
- let!(:milestone_2) { create(:milestone, group: group, start_date: now + 1.day, due_date: now + 2.days) }
- let!(:milestone_3) { create(:milestone, project: project_1, state: 'active', start_date: now + 2.days, due_date: now + 3.days) }
- let!(:milestone_4) { create(:milestone, project: project_2, state: 'active', start_date: now + 4.days, due_date: now + 5.days) }
-
- it 'returns milestones for projects' do
- result = described_class.new(project_ids: [project_1.id, project_2.id], state: 'all').execute
-
- expect(result).to contain_exactly(milestone_3, milestone_4)
- end
+ let_it_be(:now) { Date.current }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project_1) { create(:project, namespace: group) }
+ let_it_be(:project_2) { create(:project, namespace: group) }
+ let_it_be(:milestone_2) { create(:milestone, group: group, start_date: now + 1.day, due_date: now + 2.days) }
+ let_it_be(:milestone_4) { create(:milestone, project: project_2, state: 'active', start_date: now + 4.days, due_date: now + 5.days) }
+
+ context 'without filters' do
+ let_it_be(:milestone_1) { create(:milestone, group: group, start_date: now - 1.day, due_date: now) }
+ let_it_be(:milestone_3) { create(:milestone, project: project_1, state: 'active', start_date: now + 2.days) }
+ let_it_be(:milestone_5) { create(:milestone, group: group, due_date: now - 2.days) }
+
+ it 'returns milestones for projects' do
+ result = described_class.new(project_ids: [project_1.id, project_2.id], state: 'all').execute
+
+ expect(result).to contain_exactly(milestone_3, milestone_4)
+ end
- it 'returns milestones for groups' do
- result = described_class.new(group_ids: group.id, state: 'all').execute
+ it 'returns milestones for groups' do
+ result = described_class.new(group_ids: group.id, state: 'all').execute
- expect(result).to contain_exactly(milestone_1, milestone_2)
- end
-
- context 'milestones for groups and project' do
- let(:result) do
- described_class.new(project_ids: [project_1.id, project_2.id], group_ids: group.id, state: 'all').execute
+ expect(result).to contain_exactly(milestone_5, milestone_1, milestone_2)
end
- it 'returns milestones for groups and projects' do
- expect(result).to contain_exactly(milestone_1, milestone_2, milestone_3, milestone_4)
+ context 'milestones for groups and project' do
+ let(:extra_params) {{}}
+ let(:result) do
+ described_class.new({ project_ids: [project_1.id, project_2.id], group_ids: group.id, state: 'all' }.merge(extra_params)).execute
+ end
+
+ it 'returns milestones for groups and projects' do
+ expect(result).to contain_exactly(milestone_5, milestone_1, milestone_2, milestone_3, milestone_4)
+ end
+
+ it 'orders milestones by due date', :aggregate_failures do
+ expect(result.first).to eq(milestone_5)
+ expect(result.second).to eq(milestone_1)
+ expect(result.third).to eq(milestone_2)
+ end
+
+ context 'when grouping and sorting by expired_last' do
+ let(:extra_params) { { sort: :expired_last_due_date_asc } }
+
+ it 'current milestones are returned first, then milestones without due date followed by expired milestones, sorted by due date in ascending order' do
+ expect(result).to eq([milestone_1, milestone_2, milestone_4, milestone_3, milestone_5])
+ end
+ end
end
- it 'orders milestones by due date' do
- milestone = create(:milestone, group: group, due_date: now - 2.days)
+ describe '#find_by' do
+ it 'finds a single milestone' do
+ finder = described_class.new(project_ids: [project_1.id], state: 'all')
- expect(result.first).to eq(milestone)
- expect(result.second).to eq(milestone_1)
- expect(result.third).to eq(milestone_2)
+ expect(finder.find_by(iid: milestone_3.iid)).to eq(milestone_3)
+ end
end
end
context 'with filters' do
+ let_it_be(:milestone_1) { create(:milestone, group: group, state: 'closed', title: 'one test', start_date: now - 1.day, due_date: now) }
+ let_it_be(:milestone_3) { create(:milestone, project: project_1, state: 'closed', start_date: now + 2.days, due_date: now + 3.days) }
+
let(:params) do
{
project_ids: [project_1.id, project_2.id],
@@ -51,11 +73,6 @@ RSpec.describe MilestonesFinder do
}
end
- before do
- milestone_1.close
- milestone_3.close
- end
-
it 'filters by id' do
params[:ids] = [milestone_1.id, milestone_2.id]
@@ -118,12 +135,4 @@ RSpec.describe MilestonesFinder do
end
end
end
-
- describe '#find_by' do
- it 'finds a single milestone' do
- finder = described_class.new(project_ids: [project_1.id], state: 'all')
-
- expect(finder.find_by(iid: milestone_3.iid)).to eq(milestone_3)
- end
- end
end
diff --git a/spec/finders/packages/conan/package_file_finder_spec.rb b/spec/finders/packages/conan/package_file_finder_spec.rb
index d0c9efa1418..c2f445c58f7 100644
--- a/spec/finders/packages/conan/package_file_finder_spec.rb
+++ b/spec/finders/packages/conan/package_file_finder_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe ::Packages::Conan::PackageFileFinder do
let_it_be(:package) { create(:conan_package) }
let_it_be(:package_file) { package.package_files.first }
+
let(:package_file_name) { package_file.file_name }
let(:params) { {} }
diff --git a/spec/finders/packages/go/module_finder_spec.rb b/spec/finders/packages/go/module_finder_spec.rb
index e5c8827fc8d..a93fd855529 100644
--- a/spec/finders/packages/go/module_finder_spec.rb
+++ b/spec/finders/packages/go/module_finder_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Packages::Go::ModuleFinder do
let_it_be(:project) { create :project }
let_it_be(:other_project) { create :project }
+
let(:finder) { described_class.new project, module_name }
shared_examples 'an invalid path' do
diff --git a/spec/finders/packages/group_packages_finder_spec.rb b/spec/finders/packages/group_packages_finder_spec.rb
index 29b2f0fffd7..d7f62bdfbb4 100644
--- a/spec/finders/packages/group_packages_finder_spec.rb
+++ b/spec/finders/packages/group_packages_finder_spec.rb
@@ -129,6 +129,7 @@ RSpec.describe Packages::GroupPackagesFinder do
context 'with package_name' do
let_it_be(:named_package) { create(:maven_package, project: project, name: 'maven') }
+
let(:params) { { package_name: package_name } }
context 'as complete name' do
diff --git a/spec/finders/packages/nuget/package_finder_spec.rb b/spec/finders/packages/nuget/package_finder_spec.rb
index 59cca2d06dc..4ad02ce7da8 100644
--- a/spec/finders/packages/nuget/package_finder_spec.rb
+++ b/spec/finders/packages/nuget/package_finder_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Packages::Nuget::PackageFinder do
let_it_be(:package3) { create(:nuget_package, name: 'Another.Dummy.Package', project: project) }
let_it_be(:other_package_1) { create(:nuget_package, name: package1.name, version: package1.version) }
let_it_be(:other_package_2) { create(:nuget_package, name: package1.name, version: package2.version) }
+
let(:package_name) { package1.name }
let(:package_version) { nil }
let(:limit) { 50 }
@@ -57,6 +58,7 @@ RSpec.describe Packages::Nuget::PackageFinder do
let_it_be(:package4) { create(:nuget_package, name: package1.name, project: project) }
let_it_be(:package5) { create(:nuget_package, name: package1.name, project: project) }
let_it_be(:package6) { create(:nuget_package, name: package1.name, project: project) }
+
let(:limit) { 2 }
it { is_expected.to match_array([package5, package6]) }
diff --git a/spec/finders/packages/package_file_finder_spec.rb b/spec/finders/packages/package_file_finder_spec.rb
index ab58f75fcae..8014f04d917 100644
--- a/spec/finders/packages/package_file_finder_spec.rb
+++ b/spec/finders/packages/package_file_finder_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Packages::PackageFileFinder do
let_it_be(:package) { create(:maven_package) }
let_it_be(:package_file) { package.package_files.first }
+
let(:package_file_name) { package_file.file_name }
let(:params) { {} }
diff --git a/spec/finders/packages/package_finder_spec.rb b/spec/finders/packages/package_finder_spec.rb
index 2bb4f05a41d..1b0c88a4771 100644
--- a/spec/finders/packages/package_finder_spec.rb
+++ b/spec/finders/packages/package_finder_spec.rb
@@ -25,6 +25,7 @@ RSpec.describe ::Packages::PackageFinder do
context 'processing packages' do
let_it_be(:nuget_package) { create(:nuget_package, :processing, project: project) }
+
let(:package_id) { nuget_package.id }
it 'are not returned' do
diff --git a/spec/finders/projects_finder_spec.rb b/spec/finders/projects_finder_spec.rb
index b8b5e2c3bb7..21b5b2f6130 100644
--- a/spec/finders/projects_finder_spec.rb
+++ b/spec/finders/projects_finder_spec.rb
@@ -31,10 +31,6 @@ RSpec.describe ProjectsFinder do
let(:use_cte) { true }
let(:finder) { described_class.new(params: params.merge(use_cte: use_cte), current_user: current_user, project_ids_relation: project_ids_relation) }
- before do
- stub_feature_flags(project_finder_similarity_sort: false)
- end
-
subject { finder.execute }
shared_examples 'ProjectFinder#execute examples' do
@@ -368,32 +364,28 @@ RSpec.describe ProjectsFinder do
end
describe 'sorting' do
+ let_it_be(:more_projects) do
+ [
+ create(:project, :internal, group: group, name: 'projA', path: 'projA'),
+ create(:project, :internal, group: group, name: 'projABC', path: 'projABC'),
+ create(:project, :internal, group: group, name: 'projAB', path: 'projAB')
+ ]
+ end
+
context 'when sorting by a field' do
let(:params) { { sort: 'name_asc' } }
- it { is_expected.to eq([internal_project, public_project]) }
+ it { is_expected.to eq(([internal_project, public_project] + more_projects).sort_by { |p| p[:name] }) }
end
context 'when sorting by similarity' do
let(:params) { { sort: 'similarity', search: 'pro' } }
- let_it_be(:internal_project2) do
- create(:project, :internal, group: group, name: 'projA', path: 'projA')
- end
-
- let_it_be(:internal_project3) do
- create(:project, :internal, group: group, name: 'projABC', path: 'projABC')
- end
-
- let_it_be(:internal_project4) do
- create(:project, :internal, group: group, name: 'projAB', path: 'projAB')
- end
-
- before do
- stub_feature_flags(project_finder_similarity_sort: current_user)
- end
+ it { is_expected.to eq([more_projects[0], more_projects[2], more_projects[1]]) }
+ end
- it { is_expected.to eq([internal_project2, internal_project4, internal_project3]) }
+ context 'when no sort is provided' do
+ it { is_expected.to eq(([internal_project, public_project] + more_projects).sort_by { |p| p[:id] }.reverse) }
end
end
diff --git a/spec/finders/template_finder_spec.rb b/spec/finders/template_finder_spec.rb
index b7339288c51..97eecf8a89d 100644
--- a/spec/finders/template_finder_spec.rb
+++ b/spec/finders/template_finder_spec.rb
@@ -123,6 +123,7 @@ RSpec.describe TemplateFinder do
describe '#execute' do
let_it_be(:project) { nil }
+
let(:params) { {} }
subject(:result) { described_class.new(type, project, params).execute }
@@ -149,6 +150,7 @@ RSpec.describe TemplateFinder do
describe '#template_names' do
let_it_be(:project) { nil }
+
let(:params) { {} }
subject(:result) { described_class.new(type, project, params).template_names.values.flatten.map { |el| OpenStruct.new(el) } }
diff --git a/spec/finders/todos_finder_spec.rb b/spec/finders/todos_finder_spec.rb
index 577ad80ede1..5611a67e977 100644
--- a/spec/finders/todos_finder_spec.rb
+++ b/spec/finders/todos_finder_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe TodosFinder do
let_it_be(:project) { create(:project, :repository, namespace: group) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+
let(:finder) { described_class }
before_all do
@@ -153,6 +154,7 @@ RSpec.describe TodosFinder do
context 'by groups' do
context 'with subgroups' do
let_it_be(:subgroup) { create(:group, parent: group) }
+
let!(:todo3) { create(:todo, user: user, group: subgroup, target: issue) }
it 'returns todos from subgroups when filtered by a group' do
diff --git a/spec/finders/user_recent_events_finder_spec.rb b/spec/finders/user_recent_events_finder_spec.rb
index 5a9243d150d..74c563b9bf6 100644
--- a/spec/finders/user_recent_events_finder_spec.rb
+++ b/spec/finders/user_recent_events_finder_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe UserRecentEventsFinder do
let!(:internal_event) { create(:event, project: internal_project, author: project_owner) }
let!(:public_event) { create(:event, project: public_project, author: project_owner) }
let_it_be(:issue) { create(:issue, project: public_project) }
+
let(:limit) { nil }
let(:params) { { limit: limit } }
@@ -43,6 +44,7 @@ RSpec.describe UserRecentEventsFinder do
context 'events from multiple users' do
let_it_be(:second_user, reload: true) { create(:user) }
let_it_be(:private_project_second_user) { create(:project, :private, creator: second_user) }
+
let(:internal_project_second_user) { create(:project, :internal, creator: second_user) }
let(:public_project_second_user) { create(:project, :public, creator: second_user) }
let!(:private_event_second_user) { create(:event, project: private_project_second_user, author: second_user) }
diff --git a/spec/fixtures/api/schemas/cluster_status.json b/spec/fixtures/api/schemas/cluster_status.json
index ce62655648b..6f9535286ed 100644
--- a/spec/fixtures/api/schemas/cluster_status.json
+++ b/spec/fixtures/api/schemas/cluster_status.json
@@ -37,8 +37,6 @@
"hostname": { "type": ["string", "null"] },
"email": { "type": ["string", "null"] },
"stack": { "type": ["string", "null"] },
- "modsecurity_enabled": { "type": ["boolean", "null"] },
- "modsecurity_mode": {"type": ["integer", "0"]},
"host": {"type": ["string", "null"]},
"port": {"type": ["integer", "514"]},
"protocol": {"type": ["integer", "0"]},
diff --git a/spec/fixtures/api/schemas/entities/admin_users_data_attributes_paths.json b/spec/fixtures/api/schemas/entities/admin_users_data_attributes_paths.json
index eab8b626876..44d8e48a972 100644
--- a/spec/fixtures/api/schemas/entities/admin_users_data_attributes_paths.json
+++ b/spec/fixtures/api/schemas/entities/admin_users_data_attributes_paths.json
@@ -11,7 +11,9 @@
"unlock": { "type": "string" },
"delete": { "type": "string" },
"delete_with_contributions": { "type": "string" },
- "admin_user": { "type": "string" }
+ "admin_user": { "type": "string" },
+ "ban": { "type": "string" },
+ "unban": { "type": "string" }
},
"required": [
"edit",
@@ -24,7 +26,9 @@
"unlock",
"delete",
"delete_with_contributions",
- "admin_user"
+ "admin_user",
+ "ban",
+ "unban"
],
"additionalProperties": false
}
diff --git a/spec/fixtures/error_tracking/event.txt b/spec/fixtures/error_tracking/event.txt
new file mode 100644
index 00000000000..e87eb885e10
--- /dev/null
+++ b/spec/fixtures/error_tracking/event.txt
@@ -0,0 +1,3 @@
+{"event_id":"7c9ae6e58f03442b9203bbdcf6ae904c","dsn":"http://1fedb514e17f4b958435093deb03048c@localhost:3000/api/v4/projects/7/error_tracking/collector/7","sdk":{"name":"sentry.ruby","version":"4.5.1"},"sent_at":"2021-07-08T12:59:16Z"}
+{"type":"event","content_type":"application/json"}
+{"event_id":"7c9ae6e58f03442b9203bbdcf6ae904c","level":"error","timestamp":"2021-07-08T12:59:16Z","release":"db853d7","environment":"development","server_name":"MacBook.local","modules":{"rake":"13.0.3","concurrent-ruby":"1.1.9","i18n":"1.8.10","minitest":"5.14.4","thread_safe":"0.3.6","tzinfo":"1.2.9","uglifier":"4.2.0","web-console":"3.7.0"},"message":"","user":{},"tags":{"request_id":"4253dcd9-5e48-474a-89b4-0e945ab825af"},"contexts":{"os":{"name":"Darwin","version":"Darwin Kernel Version 20.5.0: Sat May 8 05:10:33 PDT 2021; root:xnu-7195.121.3~9/RELEASE_X86_64","build":"20.5.0","kernel_version":"Darwin Kernel Version 20.5.0: Sat May 8 05:10:33 PDT 2021; root:xnu-7195.121.3~9/RELEASE_X86_64"},"runtime":{"name":"ruby","version":"ruby 2.5.1p57 (2018-03-29 revision 63029) [x86_64-darwin19]"},"trace":{"trace_id":"d82b93fbc39e4d13b85762afa2e3ff36","span_id":"4a3ed8701e7f4ea4","parent_span_id":null,"description":null,"op":"rails.request","status":null}},"extra":{},"fingerprint":[],"breadcrumbs":{"values":[{"category":"start_processing.action_controller","data":{"controller":"PostsController","action":"error2","params":{"controller":"posts","action":"error2"},"format":"html","method":"GET","path":"/posts/error2","start_timestamp":1625749156.5553},"level":null,"message":"","timestamp":1625749156,"type":null},{"category":"process_action.action_controller","data":{"controller":"PostsController","action":"error2","params":{"controller":"posts","action":"error2"},"format":"html","method":"GET","path":"/posts/error2","start_timestamp":1625749156.55539,"view_runtime":null,"db_runtime":0},"level":null,"message":"","timestamp":1625749156,"type":null}]},"transaction":"PostsController#error2","platform":"ruby","sdk":{"name":"sentry.ruby.rails","version":"4.5.1"},"request":{"url":"http://localhost/posts/error2","method":"GET","headers":{},"env":{"SERVER_NAME":"localhost","SERVER_PORT":"4444"}},"exception":{"values":[{"type":"ActionView::MissingTemplate","value":"Missing template posts/error2, application/error2 with {:locale=>[:en], :formats=>[:html], :variants=>[], :handlers=>[:raw, :erb, :html, :builder, :ruby, :coffee, :jbuilder]}. Searched in:\n * \"/Users/developer/rails-project/app/views\"\n","module":"ActionView","thread_id":70254489510160,"stacktrace":{"frames":[{"project_root":"/Users/developer/rails-project","abs_path":"/Users/developer/.asdf/installs/ruby/2.5.1/lib/ruby/gems/2.5.0/gems/puma-3.12.6/lib/puma/thread_pool.rb","function":"block in spawn_thread","lineno":135,"in_app":false,"filename":"puma/thread_pool.rb","pre_context":[" end\n","\n"," begin\n"],"context_line":" block.call(work, *extra)\n","post_context":[" rescue Exception => e\n"," STDERR.puts \"Error reached top of thread-pool: #{e.message} (#{e.class})\"\n"," end\n"]},{"project_root":"/Users/developer/rails-project","abs_path":"/Users/developer/.asdf/installs/ruby/2.5.1/lib/ruby/gems/2.5.0/gems/puma-3.12.6/lib/puma/server.rb","function":"block in run","lineno":334,"in_app":false,"filename":"puma/server.rb","pre_context":[" client.close\n"," else\n"," if process_now\n"],"context_line":" process_client client, buffer\n","post_context":[" else\n"," client.set_timeout @first_data_timeout\n"," @reactor.add client\n"]},{"project_root":"/Users/developer/rails-project","abs_path":"/Users/developer/.asdf/installs/ruby/2.5.1/lib/ruby/gems/2.5.0/gems/actionview-5.2.6/lib/action_view/path_set.rb","function":"find","lineno":48,"in_app":false,"filename":"action_view/path_set.rb","pre_context":[" end\n","\n"," def find(*args)\n"],"context_line":" find_all(*args).first || raise(MissingTemplate.new(self, *args))\n","post_context":[" end\n","\n"," def find_file(path, prefixes = [], *args)\n"]}]}}]}} \ No newline at end of file
diff --git a/spec/fixtures/error_tracking/parsed_event.json b/spec/fixtures/error_tracking/parsed_event.json
new file mode 100644
index 00000000000..1b144bd43dd
--- /dev/null
+++ b/spec/fixtures/error_tracking/parsed_event.json
@@ -0,0 +1 @@
+{"event_id":"7c9ae6e58f03442b9203bbdcf6ae904c","level":"error","timestamp":"2021-07-08T12:59:16Z","release":"db853d7","environment":"development","server_name":"MacBook.local","modules":{"rake":"13.0.3","concurrent-ruby":"1.1.9","i18n":"1.8.10","minitest":"5.14.4","thread_safe":"0.3.6","tzinfo":"1.2.9","uglifier":"4.2.0","web-console":"3.7.0"},"message":"","user":{},"tags":{"request_id":"4253dcd9-5e48-474a-89b4-0e945ab825af"},"contexts":{"os":{"name":"Darwin","version":"Darwin Kernel Version 20.5.0: Sat May 8 05:10:33 PDT 2021; root:xnu-7195.121.3~9/RELEASE_X86_64","build":"20.5.0","kernel_version":"Darwin Kernel Version 20.5.0: Sat May 8 05:10:33 PDT 2021; root:xnu-7195.121.3~9/RELEASE_X86_64"},"runtime":{"name":"ruby","version":"ruby 2.5.1p57 (2018-03-29 revision 63029) [x86_64-darwin19]"},"trace":{"trace_id":"d82b93fbc39e4d13b85762afa2e3ff36","span_id":"4a3ed8701e7f4ea4","parent_span_id":null,"description":null,"op":"rails.request","status":null}},"extra":{},"fingerprint":[],"breadcrumbs":{"values":[{"category":"start_processing.action_controller","data":{"controller":"PostsController","action":"error2","params":{"controller":"posts","action":"error2"},"format":"html","method":"GET","path":"/posts/error2","start_timestamp":1625749156.5553},"level":null,"message":"","timestamp":1625749156,"type":null},{"category":"process_action.action_controller","data":{"controller":"PostsController","action":"error2","params":{"controller":"posts","action":"error2"},"format":"html","method":"GET","path":"/posts/error2","start_timestamp":1625749156.55539,"view_runtime":null,"db_runtime":0},"level":null,"message":"","timestamp":1625749156,"type":null}]},"transaction":"PostsController#error2","platform":"ruby","sdk":{"name":"sentry.ruby.rails","version":"4.5.1"},"request":{"url":"http://localhost/posts/error2","method":"GET","headers":{},"env":{"SERVER_NAME":"localhost","SERVER_PORT":"4444"}},"exception":{"values":[{"type":"ActionView::MissingTemplate","value":"Missing template posts/error2, application/error2 with {:locale=>[:en], :formats=>[:html], :variants=>[], :handlers=>[:raw, :erb, :html, :builder, :ruby, :coffee, :jbuilder]}. Searched in:\n * \"/Users/developer/rails-project/app/views\"\n","module":"ActionView","thread_id":70254489510160,"stacktrace":{"frames":[{"project_root":"/Users/developer/rails-project","abs_path":"/Users/developer/.asdf/installs/ruby/2.5.1/lib/ruby/gems/2.5.0/gems/puma-3.12.6/lib/puma/thread_pool.rb","function":"block in spawn_thread","lineno":135,"in_app":false,"filename":"puma/thread_pool.rb","pre_context":[" end\n","\n"," begin\n"],"context_line":" block.call(work, *extra)\n","post_context":[" rescue Exception => e\n"," STDERR.puts \"Error reached top of thread-pool: #{e.message} (#{e.class})\"\n"," end\n"]},{"project_root":"/Users/developer/rails-project","abs_path":"/Users/developer/.asdf/installs/ruby/2.5.1/lib/ruby/gems/2.5.0/gems/puma-3.12.6/lib/puma/server.rb","function":"block in run","lineno":334,"in_app":false,"filename":"puma/server.rb","pre_context":[" client.close\n"," else\n"," if process_now\n"],"context_line":" process_client client, buffer\n","post_context":[" else\n"," client.set_timeout @first_data_timeout\n"," @reactor.add client\n"]},{"project_root":"/Users/developer/rails-project","abs_path":"/Users/developer/.asdf/installs/ruby/2.5.1/lib/ruby/gems/2.5.0/gems/actionview-5.2.6/lib/action_view/path_set.rb","function":"find","lineno":48,"in_app":false,"filename":"action_view/path_set.rb","pre_context":[" end\n","\n"," def find(*args)\n"],"context_line":" find_all(*args).first || raise(MissingTemplate.new(self, *args))\n","post_context":[" end\n","\n"," def find_file(path, prefixes = [], *args)\n"]}]}}]}} \ No newline at end of file
diff --git a/spec/fixtures/error_tracking/transaction.txt b/spec/fixtures/error_tracking/transaction.txt
new file mode 100644
index 00000000000..3d3f2aa90f0
--- /dev/null
+++ b/spec/fixtures/error_tracking/transaction.txt
@@ -0,0 +1,3 @@
+{"event_id":"4a304dbdf3404e87962e99bced2f6c8b","dsn":"","sdk":{"name":"sentry.ruby","version":"4.5.1"},"sent_at":"2021-07-08T12:58:29Z"}
+{"type":"transaction","content_type":"application/json"}
+{} \ No newline at end of file
diff --git a/spec/fixtures/error_tracking/unknown.txt b/spec/fixtures/error_tracking/unknown.txt
new file mode 100644
index 00000000000..2a5c51f2596
--- /dev/null
+++ b/spec/fixtures/error_tracking/unknown.txt
@@ -0,0 +1,3 @@
+{"event_id":"7c9ae6e58f03442b9203bbdcf6ae904c","dsn":"","sdk":{"name":"sentry.ruby","version":"4.5.1"},"sent_at":"2021-07-08T12:59:16Z"}
+{"type":"unknown","content_type":"application/json"}
+{} \ No newline at end of file
diff --git a/spec/fixtures/gitlab/database/structure_example_cleaned.sql b/spec/fixtures/gitlab/database/structure_example_cleaned.sql
index dc112da7037..ab6af34dda7 100644
--- a/spec/fixtures/gitlab/database/structure_example_cleaned.sql
+++ b/spec/fixtures/gitlab/database/structure_example_cleaned.sql
@@ -23,6 +23,4 @@ ALTER TABLE ONLY abuse_reports ALTER COLUMN id SET DEFAULT nextval('abuse_report
ALTER TABLE ONLY abuse_reports
ADD CONSTRAINT abuse_reports_pkey PRIMARY KEY (id);
-CREATE INDEX index_abuse_reports_on_user_id ON abuse_reports USING btree (user_id);-- schema_migrations.version information is no longer stored in this file,
--- but instead tracked in the db/schema_migrations directory
--- see https://gitlab.com/gitlab-org/gitlab/-/issues/218590 for details
+CREATE INDEX index_abuse_reports_on_user_id ON abuse_reports USING btree (user_id);
diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml
index 5cebfbcbad9..9de4d2a5644 100644
--- a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml
+++ b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml
@@ -12,7 +12,7 @@ milestone: "13.9"
introduced_by_url:
time_frame: 7d
data_source:
-data_category: Operational
+data_category: Optional
distribution:
- ee
tier:
diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml
index d448e7bf3f6..0e7de369c82 100644
--- a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml
+++ b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml
@@ -13,7 +13,7 @@ milestone: "13.9"
introduced_by_url:
time_frame: 7d
data_source:
-data_category: Operational
+data_category: Optional
distribution:
- ce
- ee
diff --git a/spec/fixtures/lib/gitlab/import_export/complex/project.json b/spec/fixtures/lib/gitlab/import_export/complex/project.json
index 9b8bc9d304e..fe885861d00 100644
--- a/spec/fixtures/lib/gitlab/import_export/complex/project.json
+++ b/spec/fixtures/lib/gitlab/import_export/complex/project.json
@@ -2799,7 +2799,15 @@
"author_email": "dmitriy.zaporozhets@gmail.com",
"committed_date": "2014-08-06T08:35:52.000+02:00",
"committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com"
+ "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ },
+ "committer": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ }
},
{
"merge_request_diff_id": 27,
@@ -2811,7 +2819,15 @@
"author_email": "dmitriy.zaporozhets@gmail.com",
"committed_date": "2014-02-27T10:01:38.000+01:00",
"committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com"
+ "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ },
+ "committer": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ }
},
{
"merge_request_diff_id": 27,
@@ -2823,7 +2839,15 @@
"author_email": "dmitriy.zaporozhets@gmail.com",
"committed_date": "2014-02-27T09:57:31.000+01:00",
"committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com"
+ "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ },
+ "committer": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ }
},
{
"merge_request_diff_id": 27,
@@ -2835,7 +2859,15 @@
"author_email": "dmitriy.zaporozhets@gmail.com",
"committed_date": "2014-02-27T09:54:21.000+01:00",
"committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com"
+ "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ },
+ "committer": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ }
},
{
"merge_request_diff_id": 27,
@@ -2847,7 +2879,15 @@
"author_email": "dmitriy.zaporozhets@gmail.com",
"committed_date": "2014-02-27T09:49:50.000+01:00",
"committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com"
+ "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ },
+ "committer": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ }
},
{
"merge_request_diff_id": 27,
@@ -2859,7 +2899,15 @@
"author_email": "dmitriy.zaporozhets@gmail.com",
"committed_date": "2014-02-27T09:48:32.000+01:00",
"committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com"
+ "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ },
+ "committer": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ }
}
],
"merge_request_diff_files": [
@@ -3247,7 +3295,15 @@
"author_email": "dmitriy.zaporozhets@gmail.com",
"committed_date": "2014-02-27T09:26:01.000+01:00",
"committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com"
+ "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ },
+ "committer": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ }
}
],
"merge_request_diff_files": [
@@ -3510,7 +3566,15 @@
"author_email": "james@jameslopez.es",
"committed_date": "2016-01-19T13:22:56.000+01:00",
"committer_name": "James Lopez",
- "committer_email": "james@jameslopez.es"
+ "committer_email": "james@jameslopez.es",
+ "commit_author": {
+ "name": "James Lopez",
+ "email": "james@jameslopez.es"
+ },
+ "committer": {
+ "name": "James Lopez",
+ "email": "james@jameslopez.es"
+ }
}
],
"merge_request_diff_files": [
@@ -3773,7 +3837,15 @@
"author_email": "james@jameslopez.es",
"committed_date": "2016-01-19T14:14:43.000+01:00",
"committer_name": "James Lopez",
- "committer_email": "james@jameslopez.es"
+ "committer_email": "james@jameslopez.es",
+ "commit_author": {
+ "name": "James Lopez",
+ "email": "james@jameslopez.es"
+ },
+ "committer": {
+ "name": "James Lopez",
+ "email": "james@jameslopez.es"
+ }
},
{
"merge_request_diff_id": 14,
@@ -3785,7 +3857,15 @@
"author_email": "marin@gitlab.com",
"committed_date": "2015-12-07T12:52:12.000+01:00",
"committer_name": "Marin Jankovski",
- "committer_email": "marin@gitlab.com"
+ "committer_email": "marin@gitlab.com",
+ "commit_author": {
+ "name": "Marin Jankovski",
+ "email": "marin@gitlab.com"
+ },
+ "committer": {
+ "name": "Marin Jankovski",
+ "email": "marin@gitlab.com"
+ }
},
{
"merge_request_diff_id": 14,
@@ -3797,7 +3877,15 @@
"author_email": "maxlazio@gmail.com",
"committed_date": "2015-12-07T11:54:28.000+01:00",
"committer_name": "Marin Jankovski",
- "committer_email": "maxlazio@gmail.com"
+ "committer_email": "maxlazio@gmail.com",
+ "commit_author": {
+ "name": "Marin Jankovski",
+ "email": "maxlazio@gmail.com"
+ },
+ "committer": {
+ "name": "Marin Jankovski",
+ "email": "maxlazio@gmail.com"
+ }
},
{
"merge_request_diff_id": 14,
@@ -3809,7 +3897,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T16:27:12.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 14,
@@ -3821,7 +3917,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T08:50:17.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 14,
@@ -3833,7 +3937,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T08:39:43.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 14,
@@ -3845,7 +3957,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T07:21:40.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 14,
@@ -3857,7 +3977,15 @@
"author_email": "minsik.yoon@samsung.com",
"committed_date": "2015-11-13T06:01:27.000+01:00",
"committer_name": "윤민식",
- "committer_email": "minsik.yoon@samsung.com"
+ "committer_email": "minsik.yoon@samsung.com",
+ "commit_author": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ },
+ "committer": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ }
},
{
"merge_request_diff_id": 14,
@@ -3869,7 +3997,15 @@
"author_email": "minsik.yoon@samsung.com",
"committed_date": "2015-11-13T06:00:16.000+01:00",
"committer_name": "윤민식",
- "committer_email": "minsik.yoon@samsung.com"
+ "committer_email": "minsik.yoon@samsung.com",
+ "commit_author": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ },
+ "committer": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ }
},
{
"merge_request_diff_id": 14,
@@ -3881,7 +4017,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T05:23:14.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 14,
@@ -3893,7 +4037,15 @@
"author_email": "minsik.yoon@samsung.com",
"committed_date": "2015-11-13T05:08:45.000+01:00",
"committer_name": "윤민식",
- "committer_email": "minsik.yoon@samsung.com"
+ "committer_email": "minsik.yoon@samsung.com",
+ "commit_author": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ },
+ "committer": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ }
},
{
"merge_request_diff_id": 14,
@@ -3905,7 +4057,15 @@
"author_email": "minsik.yoon@samsung.com",
"committed_date": "2015-11-13T05:08:04.000+01:00",
"committer_name": "윤민식",
- "committer_email": "minsik.yoon@samsung.com"
+ "committer_email": "minsik.yoon@samsung.com",
+ "commit_author": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ },
+ "committer": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ }
},
{
"merge_request_diff_id": 14,
@@ -3917,7 +4077,15 @@
"author_email": "stanhu@packetzoom.com",
"committed_date": "2015-08-25T17:53:12.000+02:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@packetzoom.com"
+ "committer_email": "stanhu@packetzoom.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@packetzoom.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@packetzoom.com"
+ }
},
{
"merge_request_diff_id": 14,
@@ -3929,7 +4097,15 @@
"author_email": "sytse@gitlab.com",
"committed_date": "2015-01-10T22:23:29.000+01:00",
"committer_name": "Sytse Sijbrandij",
- "committer_email": "sytse@gitlab.com"
+ "committer_email": "sytse@gitlab.com",
+ "commit_author": {
+ "name": "Sytse Sijbrandij",
+ "email": "sytse@gitlab.com"
+ },
+ "committer": {
+ "name": "Sytse Sijbrandij",
+ "email": "sytse@gitlab.com"
+ }
},
{
"merge_request_diff_id": 14,
@@ -3941,7 +4117,15 @@
"author_email": "marmis85@gmail.com",
"committed_date": "2015-01-10T21:28:18.000+01:00",
"committer_name": "marmis85",
- "committer_email": "marmis85@gmail.com"
+ "committer_email": "marmis85@gmail.com",
+ "commit_author": {
+ "name": "marmis85",
+ "email": "marmis85@gmail.com"
+ },
+ "committer": {
+ "name": "marmis85",
+ "email": "marmis85@gmail.com"
+ }
},
{
"merge_request_diff_id": 14,
@@ -3953,7 +4137,15 @@
"author_email": "dmitriy.zaporozhets@gmail.com",
"committed_date": "2014-02-27T10:01:38.000+01:00",
"committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com"
+ "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ },
+ "committer": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ }
},
{
"merge_request_diff_id": 14,
@@ -3965,7 +4157,15 @@
"author_email": "dmitriy.zaporozhets@gmail.com",
"committed_date": "2014-02-27T09:57:31.000+01:00",
"committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com"
+ "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ },
+ "committer": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ }
},
{
"merge_request_diff_id": 14,
@@ -3977,7 +4177,15 @@
"author_email": "dmitriy.zaporozhets@gmail.com",
"committed_date": "2014-02-27T09:54:21.000+01:00",
"committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com"
+ "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ },
+ "committer": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ }
},
{
"merge_request_diff_id": 14,
@@ -3989,7 +4197,15 @@
"author_email": "dmitriy.zaporozhets@gmail.com",
"committed_date": "2014-02-27T09:49:50.000+01:00",
"committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com"
+ "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ },
+ "committer": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ }
},
{
"merge_request_diff_id": 14,
@@ -4001,7 +4217,15 @@
"author_email": "dmitriy.zaporozhets@gmail.com",
"committed_date": "2014-02-27T09:48:32.000+01:00",
"committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com"
+ "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ },
+ "committer": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ }
}
],
"merge_request_diff_files": [
@@ -4458,7 +4682,15 @@
"author_email": "james@jameslopez.es",
"committed_date": "2016-01-19T15:25:23.000+01:00",
"committer_name": "James Lopez",
- "committer_email": "james@jameslopez.es"
+ "committer_email": "james@jameslopez.es",
+ "commit_author": {
+ "name": "James Lopez",
+ "email": "james@jameslopez.es"
+ },
+ "committer": {
+ "name": "James Lopez",
+ "email": "james@jameslopez.es"
+ }
},
{
"merge_request_diff_id": 13,
@@ -4470,7 +4702,15 @@
"author_email": "marin@gitlab.com",
"committed_date": "2015-12-07T12:52:12.000+01:00",
"committer_name": "Marin Jankovski",
- "committer_email": "marin@gitlab.com"
+ "committer_email": "marin@gitlab.com",
+ "commit_author": {
+ "name": "Marin Jankovski",
+ "email": "marin@gitlab.com"
+ },
+ "committer": {
+ "name": "Marin Jankovski",
+ "email": "marin@gitlab.com"
+ }
},
{
"merge_request_diff_id": 13,
@@ -4482,7 +4722,15 @@
"author_email": "maxlazio@gmail.com",
"committed_date": "2015-12-07T11:54:28.000+01:00",
"committer_name": "Marin Jankovski",
- "committer_email": "maxlazio@gmail.com"
+ "committer_email": "maxlazio@gmail.com",
+ "commit_author": {
+ "name": "Marin Jankovski",
+ "email": "maxlazio@gmail.com"
+ },
+ "committer": {
+ "name": "Marin Jankovski",
+ "email": "maxlazio@gmail.com"
+ }
},
{
"merge_request_diff_id": 13,
@@ -4494,7 +4742,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T16:27:12.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 13,
@@ -4506,7 +4762,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T08:50:17.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 13,
@@ -4518,7 +4782,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T08:39:43.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 13,
@@ -4530,7 +4802,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T07:21:40.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 13,
@@ -4542,7 +4822,15 @@
"author_email": "minsik.yoon@samsung.com",
"committed_date": "2015-11-13T06:01:27.000+01:00",
"committer_name": "윤민식",
- "committer_email": "minsik.yoon@samsung.com"
+ "committer_email": "minsik.yoon@samsung.com",
+ "commit_author": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ },
+ "committer": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ }
},
{
"merge_request_diff_id": 13,
@@ -4554,7 +4842,15 @@
"author_email": "minsik.yoon@samsung.com",
"committed_date": "2015-11-13T06:00:16.000+01:00",
"committer_name": "윤민식",
- "committer_email": "minsik.yoon@samsung.com"
+ "committer_email": "minsik.yoon@samsung.com",
+ "commit_author": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ },
+ "committer": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ }
},
{
"merge_request_diff_id": 13,
@@ -4566,7 +4862,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T05:23:14.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 13,
@@ -4578,7 +4882,15 @@
"author_email": "minsik.yoon@samsung.com",
"committed_date": "2015-11-13T05:08:45.000+01:00",
"committer_name": "윤민식",
- "committer_email": "minsik.yoon@samsung.com"
+ "committer_email": "minsik.yoon@samsung.com",
+ "commit_author": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ },
+ "committer": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ }
},
{
"merge_request_diff_id": 13,
@@ -4590,7 +4902,15 @@
"author_email": "minsik.yoon@samsung.com",
"committed_date": "2015-11-13T05:08:04.000+01:00",
"committer_name": "윤민식",
- "committer_email": "minsik.yoon@samsung.com"
+ "committer_email": "minsik.yoon@samsung.com",
+ "commit_author": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ },
+ "committer": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ }
},
{
"merge_request_diff_id": 13,
@@ -4602,7 +4922,15 @@
"author_email": "stanhu@packetzoom.com",
"committed_date": "2015-08-25T17:53:12.000+02:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@packetzoom.com"
+ "committer_email": "stanhu@packetzoom.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@packetzoom.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@packetzoom.com"
+ }
},
{
"merge_request_diff_id": 13,
@@ -4614,7 +4942,15 @@
"author_email": "sytse@gitlab.com",
"committed_date": "2015-01-10T22:23:29.000+01:00",
"committer_name": "Sytse Sijbrandij",
- "committer_email": "sytse@gitlab.com"
+ "committer_email": "sytse@gitlab.com",
+ "commit_author": {
+ "name": "Sytse Sijbrandij",
+ "email": "sytse@gitlab.com"
+ },
+ "committer": {
+ "name": "Sytse Sijbrandij",
+ "email": "sytse@gitlab.com"
+ }
},
{
"merge_request_diff_id": 13,
@@ -4626,7 +4962,15 @@
"author_email": "marmis85@gmail.com",
"committed_date": "2015-01-10T21:28:18.000+01:00",
"committer_name": "marmis85",
- "committer_email": "marmis85@gmail.com"
+ "committer_email": "marmis85@gmail.com",
+ "commit_author": {
+ "name": "marmis85",
+ "email": "marmis85@gmail.com"
+ },
+ "committer": {
+ "name": "marmis85",
+ "email": "marmis85@gmail.com"
+ }
}
],
"merge_request_diff_files": [
@@ -4967,7 +5311,15 @@
"author_email": "james@jameslopez.es",
"committed_date": "2016-01-19T14:08:21.000+01:00",
"committer_name": "James Lopez",
- "committer_email": "james@jameslopez.es"
+ "committer_email": "james@jameslopez.es",
+ "commit_author": {
+ "name": "James Lopez",
+ "email": "james@jameslopez.es"
+ },
+ "committer": {
+ "name": "James Lopez",
+ "email": "james@jameslopez.es"
+ }
},
{
"merge_request_diff_id": 12,
@@ -4979,7 +5331,15 @@
"author_email": "marin@gitlab.com",
"committed_date": "2015-12-07T12:52:12.000+01:00",
"committer_name": "Marin Jankovski",
- "committer_email": "marin@gitlab.com"
+ "committer_email": "marin@gitlab.com",
+ "commit_author": {
+ "name": "Marin Jankovski",
+ "email": "marin@gitlab.com"
+ },
+ "committer": {
+ "name": "Marin Jankovski",
+ "email": "marin@gitlab.com"
+ }
},
{
"merge_request_diff_id": 12,
@@ -4991,7 +5351,15 @@
"author_email": "maxlazio@gmail.com",
"committed_date": "2015-12-07T11:54:28.000+01:00",
"committer_name": "Marin Jankovski",
- "committer_email": "maxlazio@gmail.com"
+ "committer_email": "maxlazio@gmail.com",
+ "commit_author": {
+ "name": "Marin Jankovski",
+ "email": "maxlazio@gmail.com"
+ },
+ "committer": {
+ "name": "Marin Jankovski",
+ "email": "maxlazio@gmail.com"
+ }
},
{
"merge_request_diff_id": 12,
@@ -5003,7 +5371,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T16:27:12.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 12,
@@ -5015,7 +5391,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T08:50:17.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 12,
@@ -5027,7 +5411,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T08:39:43.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 12,
@@ -5039,7 +5431,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T07:21:40.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 12,
@@ -5051,7 +5451,15 @@
"author_email": "minsik.yoon@samsung.com",
"committed_date": "2015-11-13T06:01:27.000+01:00",
"committer_name": "윤민식",
- "committer_email": "minsik.yoon@samsung.com"
+ "committer_email": "minsik.yoon@samsung.com",
+ "commit_author": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ },
+ "committer": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ }
},
{
"merge_request_diff_id": 12,
@@ -5063,7 +5471,15 @@
"author_email": "minsik.yoon@samsung.com",
"committed_date": "2015-11-13T06:00:16.000+01:00",
"committer_name": "윤민식",
- "committer_email": "minsik.yoon@samsung.com"
+ "committer_email": "minsik.yoon@samsung.com",
+ "commit_author": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ },
+ "committer": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ }
},
{
"merge_request_diff_id": 12,
@@ -5075,7 +5491,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T05:23:14.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 12,
@@ -5087,7 +5511,15 @@
"author_email": "minsik.yoon@samsung.com",
"committed_date": "2015-11-13T05:08:45.000+01:00",
"committer_name": "윤민식",
- "committer_email": "minsik.yoon@samsung.com"
+ "committer_email": "minsik.yoon@samsung.com",
+ "commit_author": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ },
+ "committer": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ }
},
{
"merge_request_diff_id": 12,
@@ -5099,7 +5531,15 @@
"author_email": "minsik.yoon@samsung.com",
"committed_date": "2015-11-13T05:08:04.000+01:00",
"committer_name": "윤민식",
- "committer_email": "minsik.yoon@samsung.com"
+ "committer_email": "minsik.yoon@samsung.com",
+ "commit_author": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ },
+ "committer": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ }
},
{
"merge_request_diff_id": 12,
@@ -5111,7 +5551,15 @@
"author_email": "stanhu@packetzoom.com",
"committed_date": "2015-08-25T17:53:12.000+02:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@packetzoom.com"
+ "committer_email": "stanhu@packetzoom.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@packetzoom.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@packetzoom.com"
+ }
}
],
"merge_request_diff_files": [
@@ -5675,7 +6123,15 @@
"author_email": "james@jameslopez.es",
"committed_date": "2016-01-19T14:43:23.000+01:00",
"committer_name": "James Lopez",
- "committer_email": "james@jameslopez.es"
+ "committer_email": "james@jameslopez.es",
+ "commit_author": {
+ "name": "James Lopez",
+ "email": "james@jameslopez.es"
+ },
+ "committer": {
+ "name": "James Lopez",
+ "email": "james@jameslopez.es"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5687,7 +6143,15 @@
"author_email": "marin@gitlab.com",
"committed_date": "2015-12-07T12:52:12.000+01:00",
"committer_name": "Marin Jankovski",
- "committer_email": "marin@gitlab.com"
+ "committer_email": "marin@gitlab.com",
+ "commit_author": {
+ "name": "Marin Jankovski",
+ "email": "marin@gitlab.com"
+ },
+ "committer": {
+ "name": "Marin Jankovski",
+ "email": "marin@gitlab.com"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5699,7 +6163,15 @@
"author_email": "maxlazio@gmail.com",
"committed_date": "2015-12-07T11:54:28.000+01:00",
"committer_name": "Marin Jankovski",
- "committer_email": "maxlazio@gmail.com"
+ "committer_email": "maxlazio@gmail.com",
+ "commit_author": {
+ "name": "Marin Jankovski",
+ "email": "maxlazio@gmail.com"
+ },
+ "committer": {
+ "name": "Marin Jankovski",
+ "email": "maxlazio@gmail.com"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5711,7 +6183,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T16:27:12.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5723,7 +6203,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T08:50:17.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5735,7 +6223,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T08:39:43.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5747,7 +6243,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T07:21:40.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5759,7 +6263,15 @@
"author_email": "minsik.yoon@samsung.com",
"committed_date": "2015-11-13T06:01:27.000+01:00",
"committer_name": "윤민식",
- "committer_email": "minsik.yoon@samsung.com"
+ "committer_email": "minsik.yoon@samsung.com",
+ "commit_author": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ },
+ "committer": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5771,7 +6283,15 @@
"author_email": "minsik.yoon@samsung.com",
"committed_date": "2015-11-13T06:00:16.000+01:00",
"committer_name": "윤민식",
- "committer_email": "minsik.yoon@samsung.com"
+ "committer_email": "minsik.yoon@samsung.com",
+ "commit_author": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ },
+ "committer": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5783,7 +6303,15 @@
"author_email": "stanhu@gmail.com",
"committed_date": "2015-11-13T05:23:14.000+01:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@gmail.com"
+ "committer_email": "stanhu@gmail.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@gmail.com"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5795,7 +6323,15 @@
"author_email": "minsik.yoon@samsung.com",
"committed_date": "2015-11-13T05:08:45.000+01:00",
"committer_name": "윤민식",
- "committer_email": "minsik.yoon@samsung.com"
+ "committer_email": "minsik.yoon@samsung.com",
+ "commit_author": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ },
+ "committer": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5807,7 +6343,15 @@
"author_email": "minsik.yoon@samsung.com",
"committed_date": "2015-11-13T05:08:04.000+01:00",
"committer_name": "윤민식",
- "committer_email": "minsik.yoon@samsung.com"
+ "committer_email": "minsik.yoon@samsung.com",
+ "commit_author": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ },
+ "committer": {
+ "name": "윤민식",
+ "email": "minsik.yoon@samsung.com"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5819,7 +6363,15 @@
"author_email": "stanhu@packetzoom.com",
"committed_date": "2015-08-25T17:53:12.000+02:00",
"committer_name": "Stan Hu",
- "committer_email": "stanhu@packetzoom.com"
+ "committer_email": "stanhu@packetzoom.com",
+ "commit_author": {
+ "name": "Stan Hu",
+ "email": "stanhu@packetzoom.com"
+ },
+ "committer": {
+ "name": "Stan Hu",
+ "email": "stanhu@packetzoom.com"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5831,7 +6383,15 @@
"author_email": "sytse@gitlab.com",
"committed_date": "2015-01-10T22:23:29.000+01:00",
"committer_name": "Sytse Sijbrandij",
- "committer_email": "sytse@gitlab.com"
+ "committer_email": "sytse@gitlab.com",
+ "commit_author": {
+ "name": "Sytse Sijbrandij",
+ "email": "sytse@gitlab.com"
+ },
+ "committer": {
+ "name": "Sytse Sijbrandij",
+ "email": "sytse@gitlab.com"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5843,7 +6403,15 @@
"author_email": "marmis85@gmail.com",
"committed_date": "2015-01-10T21:28:18.000+01:00",
"committer_name": "marmis85",
- "committer_email": "marmis85@gmail.com"
+ "committer_email": "marmis85@gmail.com",
+ "commit_author": {
+ "name": "marmis85",
+ "email": "marmis85@gmail.com"
+ },
+ "committer": {
+ "name": "marmis85",
+ "email": "marmis85@gmail.com"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5855,7 +6423,15 @@
"author_email": "dmitriy.zaporozhets@gmail.com",
"committed_date": "2014-02-27T10:01:38.000+01:00",
"committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com"
+ "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ },
+ "committer": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5867,7 +6443,15 @@
"author_email": "dmitriy.zaporozhets@gmail.com",
"committed_date": "2014-02-27T09:57:31.000+01:00",
"committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com"
+ "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ },
+ "committer": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5879,7 +6463,15 @@
"author_email": "dmitriy.zaporozhets@gmail.com",
"committed_date": "2014-02-27T09:54:21.000+01:00",
"committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com"
+ "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ },
+ "committer": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5891,7 +6483,15 @@
"author_email": "dmitriy.zaporozhets@gmail.com",
"committed_date": "2014-02-27T09:49:50.000+01:00",
"committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com"
+ "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ },
+ "committer": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ }
},
{
"merge_request_diff_id": 10,
@@ -5903,7 +6503,15 @@
"author_email": "dmitriy.zaporozhets@gmail.com",
"committed_date": "2014-02-27T09:48:32.000+01:00",
"committer_name": "Dmitriy Zaporozhets",
- "committer_email": "dmitriy.zaporozhets@gmail.com"
+ "committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ },
+ "committer": {
+ "name": "Dmitriy Zaporozhets",
+ "email": "dmitriy.zaporozhets@gmail.com"
+ }
}
],
"merge_request_diff_files": [
@@ -6348,7 +6956,15 @@
"author_email": "james@jameslopez.es",
"committed_date": "2016-01-19T15:44:02.000+01:00",
"committer_name": "James Lopez",
- "committer_email": "james@jameslopez.es"
+ "committer_email": "james@jameslopez.es",
+ "commit_author": {
+ "name": "James Lopez",
+ "email": "james@jameslopez.es"
+ },
+ "committer": {
+ "name": "James Lopez",
+ "email": "james@jameslopez.es"
+ }
}
],
"merge_request_diff_files": [
diff --git a/spec/fixtures/lib/gitlab/import_export/complex/tree/project/merge_requests.ndjson b/spec/fixtures/lib/gitlab/import_export/complex/tree/project/merge_requests.ndjson
index 3687c005b96..741360c0b8e 100644
--- a/spec/fixtures/lib/gitlab/import_export/complex/tree/project/merge_requests.ndjson
+++ b/spec/fixtures/lib/gitlab/import_export/complex/tree/project/merge_requests.ndjson
@@ -1,9 +1,9 @@
-{"id":27,"target_branch":"feature","source_branch":"feature_conflict","source_project_id":2147483547,"author_id":1,"assignee_id":null,"title":"MR1","created_at":"2016-06-14T15:02:36.568Z","updated_at":"2016-06-14T15:02:56.815Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":9,"description":null,"position":0,"updated_by_id":null,"merge_error":null,"diff_head_sha":"HEAD","source_branch_sha":"ABCD","target_branch_sha":"DCBA","merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":true,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":669,"note":"added 3 commits\n\n<ul><li>16ea4e20...074a2a32 - 2 commits from branch <code>master</code></li><li>ca223a02 - readme: fix typos</li></ul>\n\n[Compare with previous version](/group/project/merge_requests/1/diffs?diff_id=1189&start_sha=16ea4e207fb258fe4e9c73185a725207c9a4f3e1)","noteable_type":"MergeRequest","author_id":26,"created_at":"2020-03-28T12:47:33.461Z","updated_at":"2020-03-28T12:47:33.461Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"system":true,"st_diff":null,"updated_by_id":null,"position":null,"original_position":null,"resolved_at":null,"resolved_by_id":null,"discussion_id":null,"change_position":null,"resolved_by_push":null,"confidential":null,"type":null,"author":{"name":"User 4"},"award_emoji":[],"system_note_metadata":{"id":4789,"commit_count":3,"action":"commit","created_at":"2020-03-28T12:47:33.461Z","updated_at":"2020-03-28T12:47:33.461Z"},"events":[],"suggestions":[]},{"id":670,"note":"unmarked as a **Work In Progress**","noteable_type":"MergeRequest","author_id":26,"created_at":"2020-03-28T12:48:36.951Z","updated_at":"2020-03-28T12:48:36.951Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"system":true,"st_diff":null,"updated_by_id":null,"position":null,"original_position":null,"resolved_at":null,"resolved_by_id":null,"discussion_id":null,"change_position":null,"resolved_by_push":null,"confidential":null,"type":null,"author":{"name":"User 4"},"award_emoji":[],"system_note_metadata":{"id":4790,"commit_count":null,"action":"title","created_at":"2020-03-28T12:48:36.951Z","updated_at":"2020-03-28T12:48:36.951Z"},"events":[],"suggestions":[]},{"id":671,"note":"Sit voluptatibus eveniet architecto quidem.","note_html":"<p>something else entirely</p>","cached_markdown_version":917504,"noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:02:56.632Z","updated_at":"2016-06-14T15:02:56.632Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[],"award_emoji":[{"id":1,"name":"tada","user_id":1,"awardable_type":"Note","awardable_id":1,"created_at":"2019-11-05T15:37:21.287Z","updated_at":"2019-11-05T15:37:21.287Z"}]},{"id":672,"note":"Odio maxime ratione voluptatibus sed.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:02:56.656Z","updated_at":"2016-06-14T15:02:56.656Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":673,"note":"Et deserunt et omnis nihil excepturi accusantium.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:02:56.679Z","updated_at":"2016-06-14T15:02:56.679Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":674,"note":"Saepe asperiores exercitationem non dignissimos laborum reiciendis et ipsum.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:02:56.700Z","updated_at":"2016-06-14T15:02:56.700Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[],"suggestions":[{"id":1,"note_id":674,"relative_order":0,"applied":false,"commit_id":null,"from_content":"Original line\n","to_content":"New line\n","lines_above":0,"lines_below":0,"outdated":false}]},{"id":675,"note":"Numquam est at dolor quo et sed eligendi similique.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:02:56.720Z","updated_at":"2016-06-14T15:02:56.720Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":676,"note":"Et perferendis aliquam sunt nisi labore delectus.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:02:56.742Z","updated_at":"2016-06-14T15:02:56.742Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":677,"note":"Aut ex rerum et in.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:02:56.791Z","updated_at":"2016-06-14T15:02:56.791Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":678,"note":"Dolor laborum earum ut exercitationem.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:02:56.814Z","updated_at":"2016-06-14T15:02:56.814Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"resource_label_events":[{"id":243,"action":"add","issue_id":null,"merge_request_id":27,"label_id":null,"user_id":1,"created_at":"2018-08-28T08:24:00.494Z"}],"merge_request_diff":{"id":27,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":27,"relative_order":0,"sha":"bb5206fee213d983da88c47f9cf4cc6caf9c66dc","message":"Feature conflict added\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-08-06T08:35:52.000+02:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-08-06T08:35:52.000+02:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com"},{"merge_request_diff_id":27,"relative_order":1,"sha":"5937ac0a7beb003549fc5fd26fc247adbce4a52e","message":"Add submodule from gitlab.com\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T10:01:38.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T10:01:38.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com"},{"merge_request_diff_id":27,"relative_order":2,"sha":"570e7b2abdd848b95f2f578043fc23bd6f6fd24d","message":"Change some files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:57:31.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:57:31.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com"},{"merge_request_diff_id":27,"relative_order":3,"sha":"6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9","message":"More submodules\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:54:21.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:54:21.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com"},{"merge_request_diff_id":27,"relative_order":4,"sha":"d14d6c0abdd253381df51a723d58691b2ee1ab08","message":"Remove ds_store files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:49:50.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:49:50.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com"},{"merge_request_diff_id":27,"relative_order":5,"sha":"c1acaa58bbcbc3eafe538cb8274ba387047b69f8","message":"Ignore DS files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:48:32.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:48:32.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com"}],"merge_request_diff_files":[{"merge_request_diff_id":27,"relative_order":0,"utf8_diff":"Binary files a/.DS_Store and /dev/null differ\n","new_path":".DS_Store","old_path":".DS_Store","a_mode":"100644","b_mode":"0","new_file":false,"renamed_file":false,"deleted_file":true,"too_large":false},{"merge_request_diff_id":27,"relative_order":1,"utf8_diff":"--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n","new_path":".gitignore","old_path":".gitignore","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":2,"utf8_diff":"--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n","new_path":".gitmodules","old_path":".gitmodules","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":3,"utf8_diff":"Binary files a/files/.DS_Store and /dev/null differ\n","new_path":"files/.DS_Store","old_path":"files/.DS_Store","a_mode":"100644","b_mode":"0","new_file":false,"renamed_file":false,"deleted_file":true,"too_large":false},{"merge_request_diff_id":27,"relative_order":4,"utf8_diff":"--- /dev/null\n+++ b/files/ruby/feature.rb\n@@ -0,0 +1,4 @@\n+# This file was changed in feature branch\n+# We put different code here to make merge conflict\n+class Conflict\n+end\n","new_path":"files/ruby/feature.rb","old_path":"files/ruby/feature.rb","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":5,"utf8_diff":"--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" => path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" => path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output << stdout.read\n @cmd_output << stderr.read\n","new_path":"files/ruby/popen.rb","old_path":"files/ruby/popen.rb","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":6,"utf8_diff":"--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n","new_path":"files/ruby/regex.rb","old_path":"files/ruby/regex.rb","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":7,"utf8_diff":"--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n","new_path":"gitlab-grack","old_path":"gitlab-grack","a_mode":"0","b_mode":"160000","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":8,"utf8_diff":"--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n","new_path":"gitlab-shell","old_path":"gitlab-shell","a_mode":"0","b_mode":"160000","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":27,"created_at":"2016-06-14T15:02:36.572Z","updated_at":"2016-06-14T15:02:36.658Z","base_commit_sha":"ae73cb07c9eeaf35924a10f713b364d32b2dd34f","real_size":"9"},"events":[{"id":221,"target_type":"MergeRequest","target_id":27,"project_id":36,"created_at":"2016-06-14T15:02:36.703Z","updated_at":"2016-06-14T15:02:36.703Z","action":1,"author_id":1},{"id":187,"target_type":"MergeRequest","target_id":27,"project_id":5,"created_at":"2016-06-14T15:02:36.703Z","updated_at":"2016-06-14T15:02:36.703Z","action":1,"author_id":1}],"approvals_before_merge":1,"award_emoji":[{"id":1,"name":"thumbsup","user_id":1,"awardable_type":"MergeRequest","awardable_id":27,"created_at":"2020-01-07T11:21:21.235Z","updated_at":"2020-01-07T11:21:21.235Z"},{"id":2,"name":"drum","user_id":1,"awardable_type":"MergeRequest","awardable_id":27,"created_at":"2020-01-07T11:21:21.235Z","updated_at":"2020-01-07T11:21:21.235Z"}]}
-{"id":26,"target_branch":"master","source_branch":"feature","source_project_id":4,"author_id":1,"assignee_id":null,"title":"MR2","created_at":"2016-06-14T15:02:36.418Z","updated_at":"2016-06-14T15:02:57.013Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":8,"description":null,"position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":679,"note":"Qui rerum totam nisi est.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:02:56.848Z","updated_at":"2016-06-14T15:02:56.848Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":680,"note":"Pariatur magni corrupti consequatur debitis minima error beatae voluptatem.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:02:56.871Z","updated_at":"2016-06-14T15:02:56.871Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":681,"note":"Qui quis ut modi eos rerum ratione.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:02:56.895Z","updated_at":"2016-06-14T15:02:56.895Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":682,"note":"Illum quidem expedita mollitia fugit.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:02:56.918Z","updated_at":"2016-06-14T15:02:56.918Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":683,"note":"Consectetur voluptate sit sint possimus veritatis quod.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:02:56.942Z","updated_at":"2016-06-14T15:02:56.942Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":684,"note":"Natus libero quibusdam rem assumenda deleniti accusamus sed earum.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:02:56.966Z","updated_at":"2016-06-14T15:02:56.966Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":685,"note":"Tenetur autem nihil rerum odit.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:02:56.989Z","updated_at":"2016-06-14T15:02:56.989Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":686,"note":"Quia maiores et odio sed.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:02:57.012Z","updated_at":"2016-06-14T15:02:57.012Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":26,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":26,"sha":"0b4bc9a49b562e85de7cc9e834518ea6828729b9","relative_order":0,"message":"Feature added\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:26:01.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:26:01.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com"}],"merge_request_diff_files":[{"merge_request_diff_id":26,"relative_order":0,"utf8_diff":"--- /dev/null\n+++ b/files/ruby/feature.rb\n@@ -0,0 +1,5 @@\n+class Feature\n+ def foo\n+ puts 'bar'\n+ end\n+end\n","new_path":"files/ruby/feature.rb","old_path":"files/ruby/feature.rb","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":26,"created_at":"2016-06-14T15:02:36.421Z","updated_at":"2016-06-14T15:02:36.474Z","base_commit_sha":"ae73cb07c9eeaf35924a10f713b364d32b2dd34f","real_size":"1"},"events":[{"id":222,"target_type":"MergeRequest","target_id":26,"project_id":36,"created_at":"2016-06-14T15:02:36.496Z","updated_at":"2016-06-14T15:02:36.496Z","action":1,"author_id":1},{"id":186,"target_type":"MergeRequest","target_id":26,"project_id":5,"created_at":"2016-06-14T15:02:36.496Z","updated_at":"2016-06-14T15:02:36.496Z","action":1,"author_id":1}]}
-{"id":15,"target_branch":"test-7","source_branch":"test-1","source_project_id":5,"author_id":22,"assignee_id":16,"title":"Qui accusantium et inventore facilis doloribus occaecati officiis.","created_at":"2016-06-14T15:02:25.168Z","updated_at":"2016-06-14T15:02:59.521Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":7,"description":"Et commodi deserunt aspernatur vero rerum. Ut non dolorum alias in odit est libero. Voluptatibus eos in et vitae repudiandae facilis ex mollitia.","position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":777,"note":"Pariatur voluptas placeat aspernatur culpa suscipit soluta.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:02:59.348Z","updated_at":"2016-06-14T15:02:59.348Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":778,"note":"Alias et iure mollitia suscipit molestiae voluptatum nostrum asperiores.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:02:59.372Z","updated_at":"2016-06-14T15:02:59.372Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":779,"note":"Laudantium qui eum qui sunt.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:02:59.395Z","updated_at":"2016-06-14T15:02:59.395Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":780,"note":"Quas rem est iusto ut delectus fugiat recusandae mollitia.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:02:59.418Z","updated_at":"2016-06-14T15:02:59.418Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":781,"note":"Repellendus ab et qui nesciunt.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:02:59.444Z","updated_at":"2016-06-14T15:02:59.444Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":782,"note":"Non possimus voluptatum odio qui ut.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:02:59.469Z","updated_at":"2016-06-14T15:02:59.469Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":783,"note":"Dolores repellendus eum ducimus quam ab dolorem quia.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:02:59.494Z","updated_at":"2016-06-14T15:02:59.494Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":784,"note":"Facilis dolorem aut corrupti id ratione occaecati.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:02:59.520Z","updated_at":"2016-06-14T15:02:59.520Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":15,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":15,"relative_order":0,"sha":"94b8d581c48d894b86661718582fecbc5e3ed2eb","message":"fixes #10\n","authored_date":"2016-01-19T13:22:56.000+01:00","author_name":"James Lopez","author_email":"james@jameslopez.es","committed_date":"2016-01-19T13:22:56.000+01:00","committer_name":"James Lopez","committer_email":"james@jameslopez.es"}],"merge_request_diff_files":[{"merge_request_diff_id":15,"relative_order":0,"utf8_diff":"--- /dev/null\n+++ b/test\n","new_path":"test","old_path":"test","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":15,"created_at":"2016-06-14T15:02:25.171Z","updated_at":"2016-06-14T15:02:25.230Z","base_commit_sha":"be93687618e4b132087f430a4d8fc3a609c9b77c","real_size":"1"},"events":[{"id":223,"target_type":"MergeRequest","target_id":15,"project_id":36,"created_at":"2016-06-14T15:02:25.262Z","updated_at":"2016-06-14T15:02:25.262Z","action":1,"author_id":1},{"id":175,"target_type":"MergeRequest","target_id":15,"project_id":5,"created_at":"2016-06-14T15:02:25.262Z","updated_at":"2016-06-14T15:02:25.262Z","action":1,"author_id":22}]}
-{"id":14,"target_branch":"fix","source_branch":"test-3","source_project_id":5,"author_id":20,"assignee_id":20,"title":"In voluptas aut sequi voluptatem ullam vel corporis illum consequatur.","created_at":"2016-06-14T15:02:24.760Z","updated_at":"2016-06-14T15:02:59.749Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":6,"description":"Dicta magnam non voluptates nam dignissimos nostrum deserunt. Dolorum et suscipit iure quae doloremque. Necessitatibus saepe aut labore sed.","position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":785,"note":"Atque cupiditate necessitatibus deserunt minus natus odit.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:02:59.559Z","updated_at":"2016-06-14T15:02:59.559Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":786,"note":"Non dolorem provident mollitia nesciunt optio ex eveniet.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:02:59.587Z","updated_at":"2016-06-14T15:02:59.587Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":787,"note":"Similique officia nemo quasi commodi accusantium quae qui.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:02:59.621Z","updated_at":"2016-06-14T15:02:59.621Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":788,"note":"Et est et alias ad dolor qui.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:02:59.650Z","updated_at":"2016-06-14T15:02:59.650Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":789,"note":"Numquam temporibus ratione voluptatibus aliquid.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:02:59.675Z","updated_at":"2016-06-14T15:02:59.675Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":790,"note":"Ut ex aliquam consectetur perferendis est hic aut quia.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:02:59.703Z","updated_at":"2016-06-14T15:02:59.703Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":791,"note":"Esse eos quam quaerat aut ut asperiores officiis.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:02:59.726Z","updated_at":"2016-06-14T15:02:59.726Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":792,"note":"Sint facilis accusantium iure blanditiis.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:02:59.748Z","updated_at":"2016-06-14T15:02:59.748Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":14,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":14,"relative_order":0,"sha":"ddd4ff416a931589c695eb4f5b23f844426f6928","message":"fixes #10\n","authored_date":"2016-01-19T14:14:43.000+01:00","author_name":"James Lopez","author_email":"james@jameslopez.es","committed_date":"2016-01-19T14:14:43.000+01:00","committer_name":"James Lopez","committer_email":"james@jameslopez.es"},{"merge_request_diff_id":14,"relative_order":1,"sha":"be93687618e4b132087f430a4d8fc3a609c9b77c","message":"Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6","authored_date":"2015-12-07T12:52:12.000+01:00","author_name":"Marin Jankovski","author_email":"marin@gitlab.com","committed_date":"2015-12-07T12:52:12.000+01:00","committer_name":"Marin Jankovski","committer_email":"marin@gitlab.com"},{"merge_request_diff_id":14,"relative_order":2,"sha":"048721d90c449b244b7b4c53a9186b04330174ec","message":"LFS object pointer.\n","authored_date":"2015-12-07T11:54:28.000+01:00","author_name":"Marin Jankovski","author_email":"maxlazio@gmail.com","committed_date":"2015-12-07T11:54:28.000+01:00","committer_name":"Marin Jankovski","committer_email":"maxlazio@gmail.com"},{"merge_request_diff_id":14,"relative_order":3,"sha":"5f923865dde3436854e9ceb9cdb7815618d4e849","message":"GitLab currently doesn't support patches that involve a merge commit: add a commit here\n","authored_date":"2015-11-13T16:27:12.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T16:27:12.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":14,"relative_order":4,"sha":"d2d430676773caa88cdaf7c55944073b2fd5561a","message":"Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5","authored_date":"2015-11-13T08:50:17.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T08:50:17.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":14,"relative_order":5,"sha":"2ea1f3dec713d940208fb5ce4a38765ecb5d3f73","message":"Add GitLab SVG\n","authored_date":"2015-11-13T08:39:43.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T08:39:43.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":14,"relative_order":6,"sha":"59e29889be61e6e0e5e223bfa9ac2721d31605b8","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4","authored_date":"2015-11-13T07:21:40.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T07:21:40.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":14,"relative_order":7,"sha":"66eceea0db202bb39c4e445e8ca28689645366c5","message":"add spaces in whitespace file\n","authored_date":"2015-11-13T06:01:27.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T06:01:27.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com"},{"merge_request_diff_id":14,"relative_order":8,"sha":"08f22f255f082689c0d7d39d19205085311542bc","message":"remove empty file.(beacase git ignore empty file)\nadd whitespace test file.\n","authored_date":"2015-11-13T06:00:16.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T06:00:16.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com"},{"merge_request_diff_id":14,"relative_order":9,"sha":"19e2e9b4ef76b422ce1154af39a91323ccc57434","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3","authored_date":"2015-11-13T05:23:14.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T05:23:14.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":14,"relative_order":10,"sha":"c642fe9b8b9f28f9225d7ea953fe14e74748d53b","message":"add whitespace in empty\n","authored_date":"2015-11-13T05:08:45.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T05:08:45.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com"},{"merge_request_diff_id":14,"relative_order":11,"sha":"9a944d90955aaf45f6d0c88f30e27f8d2c41cec0","message":"add empty file\n","authored_date":"2015-11-13T05:08:04.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T05:08:04.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com"},{"merge_request_diff_id":14,"relative_order":12,"sha":"c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd","message":"Add ISO-8859 test file\n","authored_date":"2015-08-25T17:53:12.000+02:00","author_name":"Stan Hu","author_email":"stanhu@packetzoom.com","committed_date":"2015-08-25T17:53:12.000+02:00","committer_name":"Stan Hu","committer_email":"stanhu@packetzoom.com"},{"merge_request_diff_id":14,"relative_order":13,"sha":"e56497bb5f03a90a51293fc6d516788730953899","message":"Merge branch 'tree_helper_spec' into 'master'\n\nAdd directory structure for tree_helper spec\n\nThis directory structure is needed for a testing the method flatten_tree(tree) in the TreeHelper module\n\nSee [merge request #275](https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/275#note_732774)\n\nSee merge request !2\n","authored_date":"2015-01-10T22:23:29.000+01:00","author_name":"Sytse Sijbrandij","author_email":"sytse@gitlab.com","committed_date":"2015-01-10T22:23:29.000+01:00","committer_name":"Sytse Sijbrandij","committer_email":"sytse@gitlab.com"},{"merge_request_diff_id":14,"relative_order":14,"sha":"4cd80ccab63c82b4bad16faa5193fbd2aa06df40","message":"add directory structure for tree_helper spec\n","authored_date":"2015-01-10T21:28:18.000+01:00","author_name":"marmis85","author_email":"marmis85@gmail.com","committed_date":"2015-01-10T21:28:18.000+01:00","committer_name":"marmis85","committer_email":"marmis85@gmail.com"},{"merge_request_diff_id":14,"relative_order":15,"sha":"5937ac0a7beb003549fc5fd26fc247adbce4a52e","message":"Add submodule from gitlab.com\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T10:01:38.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T10:01:38.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com"},{"merge_request_diff_id":14,"relative_order":16,"sha":"570e7b2abdd848b95f2f578043fc23bd6f6fd24d","message":"Change some files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:57:31.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:57:31.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com"},{"merge_request_diff_id":14,"relative_order":17,"sha":"6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9","message":"More submodules\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:54:21.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:54:21.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com"},{"merge_request_diff_id":14,"relative_order":18,"sha":"d14d6c0abdd253381df51a723d58691b2ee1ab08","message":"Remove ds_store files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:49:50.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:49:50.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com"},{"merge_request_diff_id":14,"relative_order":19,"sha":"c1acaa58bbcbc3eafe538cb8274ba387047b69f8","message":"Ignore DS files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:48:32.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:48:32.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com"}],"merge_request_diff_files":[{"merge_request_diff_id":14,"relative_order":0,"utf8_diff":"Binary files a/.DS_Store and /dev/null differ\n","new_path":".DS_Store","old_path":".DS_Store","a_mode":"100644","b_mode":"0","new_file":false,"renamed_file":false,"deleted_file":true,"too_large":false},{"merge_request_diff_id":14,"relative_order":1,"utf8_diff":"--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n","new_path":".gitignore","old_path":".gitignore","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":2,"utf8_diff":"--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n","new_path":".gitmodules","old_path":".gitmodules","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":3,"utf8_diff":"--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n","new_path":"CHANGELOG","old_path":"CHANGELOG","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":4,"utf8_diff":"--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n","new_path":"encoding/iso8859.txt","old_path":"encoding/iso8859.txt","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":5,"utf8_diff":"Binary files a/files/.DS_Store and /dev/null differ\n","new_path":"files/.DS_Store","old_path":"files/.DS_Store","a_mode":"100644","b_mode":"0","new_file":false,"renamed_file":false,"deleted_file":true,"too_large":false},{"merge_request_diff_id":14,"relative_order":6,"utf8_diff":"--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n+<svg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\">\n+ <!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch -->\n+ <title>wm</title>\n+ <desc>Created with Sketch.</desc>\n+ <defs>\n+ <path id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"></path>\n+ </defs>\n+ <g id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\">\n+ <path d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\">\n+ <g id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\">\n+ <g id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\">\n+ <path d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"></path>\n+ </g>\n+ <g id=\"g16\">\n+ <g id=\"g18-Clipped\">\n+ <mask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\">\n+ <use xlink:href=\"#path-1\"></use>\n+ </mask>\n+ <g id=\"path22\"></g>\n+ <g id=\"g18\" mask=\"url(#mask-2)\">\n+ <g transform=\"translate(382.736659, 312.879425)\">\n+ <g id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\">\n+ <path d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\">\n+ <path d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\">\n+ <path d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\">\n+ <path d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <path d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <path d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\">\n+ <path d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\">\n+ <path d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path54\"></g>\n+ </g>\n+ <g id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\">\n+ <path d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <g id=\"path62\"></g>\n+ </g>\n+ <g id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\">\n+ <path d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path70\"></g>\n+ </g>\n+ <g id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\">\n+ <path d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <path d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\">\n+ <path d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\">\n+ <path d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+</svg>\n\\ No newline at end of file\n","new_path":"files/images/wm.svg","old_path":"files/images/wm.svg","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":7,"utf8_diff":"--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n","new_path":"files/lfs/lfs_object.iso","old_path":"files/lfs/lfs_object.iso","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":8,"utf8_diff":"--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" => path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" => path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output << stdout.read\n @cmd_output << stderr.read\n","new_path":"files/ruby/popen.rb","old_path":"files/ruby/popen.rb","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":9,"utf8_diff":"--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n","new_path":"files/ruby/regex.rb","old_path":"files/ruby/regex.rb","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":10,"utf8_diff":"--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n","new_path":"files/whitespace","old_path":"files/whitespace","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":11,"utf8_diff":"--- /dev/null\n+++ b/foo/bar/.gitkeep\n","new_path":"foo/bar/.gitkeep","old_path":"foo/bar/.gitkeep","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":12,"utf8_diff":"--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n","new_path":"gitlab-grack","old_path":"gitlab-grack","a_mode":"0","b_mode":"160000","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":13,"utf8_diff":"--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n","new_path":"gitlab-shell","old_path":"gitlab-shell","a_mode":"0","b_mode":"160000","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":14,"utf8_diff":"--- /dev/null\n+++ b/test\n","new_path":"test","old_path":"test","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":14,"created_at":"2016-06-14T15:02:24.770Z","updated_at":"2016-06-14T15:02:25.007Z","base_commit_sha":"ae73cb07c9eeaf35924a10f713b364d32b2dd34f","real_size":"15"},"events":[{"id":224,"target_type":"MergeRequest","target_id":14,"project_id":36,"created_at":"2016-06-14T15:02:25.113Z","updated_at":"2016-06-14T15:02:25.113Z","action":1,"author_id":1},{"id":174,"target_type":"MergeRequest","target_id":14,"project_id":5,"created_at":"2016-06-14T15:02:25.113Z","updated_at":"2016-06-14T15:02:25.113Z","action":1,"author_id":20}]}
-{"id":13,"target_branch":"improve/awesome","source_branch":"test-8","source_project_id":5,"author_id":16,"assignee_id":25,"title":"Voluptates consequatur eius nemo amet libero animi illum delectus tempore.","created_at":"2016-06-14T15:02:24.415Z","updated_at":"2016-06-14T15:02:59.958Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":5,"description":"Est eaque quasi qui qui. Similique voluptatem impedit iusto ratione reprehenderit. Itaque est illum ut nulla aut.","position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":793,"note":"In illum maxime aperiam nulla est aspernatur.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:02:59.782Z","updated_at":"2016-06-14T15:02:59.782Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[{"merge_request_diff_id":14,"id":529,"target_type":"Note","target_id":793,"project_id":4,"created_at":"2016-07-07T14:35:12.128Z","updated_at":"2016-07-07T14:35:12.128Z","action":6,"author_id":1}]},{"id":794,"note":"Enim quia perferendis cum distinctio tenetur optio voluptas veniam.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:02:59.807Z","updated_at":"2016-06-14T15:02:59.807Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":795,"note":"Dolor ad quia quis pariatur ducimus.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:02:59.831Z","updated_at":"2016-06-14T15:02:59.831Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":796,"note":"Et a odio voluptate aut.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:02:59.854Z","updated_at":"2016-06-14T15:02:59.854Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":797,"note":"Quis nihil temporibus voluptatum modi minima a ut.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:02:59.879Z","updated_at":"2016-06-14T15:02:59.879Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":798,"note":"Ut alias consequatur in nostrum.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:02:59.904Z","updated_at":"2016-06-14T15:02:59.904Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":799,"note":"Voluptatibus aperiam assumenda et neque sint libero.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:02:59.926Z","updated_at":"2016-06-14T15:02:59.926Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":800,"note":"Veritatis voluptatem dolor dolores magni quo ut ipsa fuga.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:02:59.956Z","updated_at":"2016-06-14T15:02:59.956Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":13,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":13,"relative_order":0,"sha":"0bfedc29d30280c7e8564e19f654584b459e5868","message":"fixes #10\n","authored_date":"2016-01-19T15:25:23.000+01:00","author_name":"James Lopez","author_email":"james@jameslopez.es","committed_date":"2016-01-19T15:25:23.000+01:00","committer_name":"James Lopez","committer_email":"james@jameslopez.es"},{"merge_request_diff_id":13,"relative_order":1,"sha":"be93687618e4b132087f430a4d8fc3a609c9b77c","message":"Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6","authored_date":"2015-12-07T12:52:12.000+01:00","author_name":"Marin Jankovski","author_email":"marin@gitlab.com","committed_date":"2015-12-07T12:52:12.000+01:00","committer_name":"Marin Jankovski","committer_email":"marin@gitlab.com"},{"merge_request_diff_id":13,"relative_order":2,"sha":"048721d90c449b244b7b4c53a9186b04330174ec","message":"LFS object pointer.\n","authored_date":"2015-12-07T11:54:28.000+01:00","author_name":"Marin Jankovski","author_email":"maxlazio@gmail.com","committed_date":"2015-12-07T11:54:28.000+01:00","committer_name":"Marin Jankovski","committer_email":"maxlazio@gmail.com"},{"merge_request_diff_id":13,"relative_order":3,"sha":"5f923865dde3436854e9ceb9cdb7815618d4e849","message":"GitLab currently doesn't support patches that involve a merge commit: add a commit here\n","authored_date":"2015-11-13T16:27:12.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T16:27:12.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":13,"relative_order":4,"sha":"d2d430676773caa88cdaf7c55944073b2fd5561a","message":"Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5","authored_date":"2015-11-13T08:50:17.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T08:50:17.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":13,"relative_order":5,"sha":"2ea1f3dec713d940208fb5ce4a38765ecb5d3f73","message":"Add GitLab SVG\n","authored_date":"2015-11-13T08:39:43.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T08:39:43.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":13,"relative_order":6,"sha":"59e29889be61e6e0e5e223bfa9ac2721d31605b8","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4","authored_date":"2015-11-13T07:21:40.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T07:21:40.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":13,"relative_order":7,"sha":"66eceea0db202bb39c4e445e8ca28689645366c5","message":"add spaces in whitespace file\n","authored_date":"2015-11-13T06:01:27.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T06:01:27.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com"},{"merge_request_diff_id":13,"relative_order":8,"sha":"08f22f255f082689c0d7d39d19205085311542bc","message":"remove empty file.(beacase git ignore empty file)\nadd whitespace test file.\n","authored_date":"2015-11-13T06:00:16.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T06:00:16.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com"},{"merge_request_diff_id":13,"relative_order":9,"sha":"19e2e9b4ef76b422ce1154af39a91323ccc57434","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3","authored_date":"2015-11-13T05:23:14.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T05:23:14.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":13,"relative_order":10,"sha":"c642fe9b8b9f28f9225d7ea953fe14e74748d53b","message":"add whitespace in empty\n","authored_date":"2015-11-13T05:08:45.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T05:08:45.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com"},{"merge_request_diff_id":13,"relative_order":11,"sha":"9a944d90955aaf45f6d0c88f30e27f8d2c41cec0","message":"add empty file\n","authored_date":"2015-11-13T05:08:04.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T05:08:04.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com"},{"merge_request_diff_id":13,"relative_order":12,"sha":"c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd","message":"Add ISO-8859 test file\n","authored_date":"2015-08-25T17:53:12.000+02:00","author_name":"Stan Hu","author_email":"stanhu@packetzoom.com","committed_date":"2015-08-25T17:53:12.000+02:00","committer_name":"Stan Hu","committer_email":"stanhu@packetzoom.com"},{"merge_request_diff_id":13,"relative_order":13,"sha":"e56497bb5f03a90a51293fc6d516788730953899","message":"Merge branch 'tree_helper_spec' into 'master'\n\nAdd directory structure for tree_helper spec\n\nThis directory structure is needed for a testing the method flatten_tree(tree) in the TreeHelper module\n\nSee [merge request #275](https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/275#note_732774)\n\nSee merge request !2\n","authored_date":"2015-01-10T22:23:29.000+01:00","author_name":"Sytse Sijbrandij","author_email":"sytse@gitlab.com","committed_date":"2015-01-10T22:23:29.000+01:00","committer_name":"Sytse Sijbrandij","committer_email":"sytse@gitlab.com"},{"merge_request_diff_id":13,"relative_order":14,"sha":"4cd80ccab63c82b4bad16faa5193fbd2aa06df40","message":"add directory structure for tree_helper spec\n","authored_date":"2015-01-10T21:28:18.000+01:00","author_name":"marmis85","author_email":"marmis85@gmail.com","committed_date":"2015-01-10T21:28:18.000+01:00","committer_name":"marmis85","committer_email":"marmis85@gmail.com"}],"merge_request_diff_files":[{"merge_request_diff_id":13,"relative_order":0,"utf8_diff":"--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n","new_path":"CHANGELOG","old_path":"CHANGELOG","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":13,"relative_order":1,"utf8_diff":"--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n","new_path":"encoding/iso8859.txt","old_path":"encoding/iso8859.txt","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":13,"relative_order":2,"utf8_diff":"--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n+<svg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\">\n+ <!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch -->\n+ <title>wm</title>\n+ <desc>Created with Sketch.</desc>\n+ <defs>\n+ <path id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"></path>\n+ </defs>\n+ <g id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\">\n+ <path d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\">\n+ <g id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\">\n+ <g id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\">\n+ <path d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"></path>\n+ </g>\n+ <g id=\"g16\">\n+ <g id=\"g18-Clipped\">\n+ <mask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\">\n+ <use xlink:href=\"#path-1\"></use>\n+ </mask>\n+ <g id=\"path22\"></g>\n+ <g id=\"g18\" mask=\"url(#mask-2)\">\n+ <g transform=\"translate(382.736659, 312.879425)\">\n+ <g id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\">\n+ <path d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\">\n+ <path d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\">\n+ <path d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\">\n+ <path d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <path d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <path d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\">\n+ <path d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\">\n+ <path d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path54\"></g>\n+ </g>\n+ <g id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\">\n+ <path d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <g id=\"path62\"></g>\n+ </g>\n+ <g id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\">\n+ <path d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path70\"></g>\n+ </g>\n+ <g id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\">\n+ <path d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <path d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\">\n+ <path d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\">\n+ <path d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+</svg>\n\\ No newline at end of file\n","new_path":"files/images/wm.svg","old_path":"files/images/wm.svg","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":13,"relative_order":3,"utf8_diff":"--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n","new_path":"files/lfs/lfs_object.iso","old_path":"files/lfs/lfs_object.iso","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":13,"relative_order":4,"utf8_diff":"--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n","new_path":"files/whitespace","old_path":"files/whitespace","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":13,"relative_order":5,"utf8_diff":"--- /dev/null\n+++ b/foo/bar/.gitkeep\n","new_path":"foo/bar/.gitkeep","old_path":"foo/bar/.gitkeep","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":13,"relative_order":6,"utf8_diff":"--- /dev/null\n+++ b/test\n","new_path":"test","old_path":"test","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":13,"created_at":"2016-06-14T15:02:24.420Z","updated_at":"2016-06-14T15:02:24.561Z","base_commit_sha":"5937ac0a7beb003549fc5fd26fc247adbce4a52e","real_size":"7"},"events":[{"id":225,"target_type":"MergeRequest","target_id":13,"project_id":36,"created_at":"2016-06-14T15:02:24.636Z","updated_at":"2016-06-14T15:02:24.636Z","action":1,"author_id":16},{"id":173,"target_type":"MergeRequest","target_id":13,"project_id":5,"created_at":"2016-06-14T15:02:24.636Z","updated_at":"2016-06-14T15:02:24.636Z","action":1,"author_id":16}]}
-{"id":12,"target_branch":"flatten-dirs","source_branch":"test-2","source_project_id":5,"author_id":1,"assignee_id":22,"title":"In a rerum harum nihil accusamus aut quia nobis non.","created_at":"2016-06-14T15:02:24.000Z","updated_at":"2016-06-14T15:03:00.225Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":4,"description":"Nam magnam odit velit rerum. Sapiente dolore sunt saepe debitis. Culpa maiores ut ad dolores dolorem et.","position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":801,"note":"Nihil dicta molestias expedita atque.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:03:00.001Z","updated_at":"2016-06-14T15:03:00.001Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":12,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":802,"note":"Illum culpa voluptas enim accusantium deserunt.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:03:00.034Z","updated_at":"2016-06-14T15:03:00.034Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":12,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":803,"note":"Dicta esse aliquam laboriosam unde alias.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:03:00.065Z","updated_at":"2016-06-14T15:03:00.065Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":12,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":804,"note":"Dicta autem et sed molestiae ut quae.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:03:00.097Z","updated_at":"2016-06-14T15:03:00.097Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":12,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":805,"note":"Ut ut temporibus voluptas dolore quia velit.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:03:00.129Z","updated_at":"2016-06-14T15:03:00.129Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":12,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":806,"note":"Dolores similique sint pariatur error id quia fugit aut.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:03:00.162Z","updated_at":"2016-06-14T15:03:00.162Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":12,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":807,"note":"Quisquam provident nihil aperiam voluptatem.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:03:00.193Z","updated_at":"2016-06-14T15:03:00.193Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":12,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":808,"note":"Similique quo vero expedita deserunt ipsam earum.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:03:00.224Z","updated_at":"2016-06-14T15:03:00.224Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":12,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":12,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":12,"relative_order":0,"sha":"97a0df9696e2aebf10c31b3016f40214e0e8f243","message":"fixes #10\n","authored_date":"2016-01-19T14:08:21.000+01:00","author_name":"James Lopez","author_email":"james@jameslopez.es","committed_date":"2016-01-19T14:08:21.000+01:00","committer_name":"James Lopez","committer_email":"james@jameslopez.es"},{"merge_request_diff_id":12,"relative_order":1,"sha":"be93687618e4b132087f430a4d8fc3a609c9b77c","message":"Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6","authored_date":"2015-12-07T12:52:12.000+01:00","author_name":"Marin Jankovski","author_email":"marin@gitlab.com","committed_date":"2015-12-07T12:52:12.000+01:00","committer_name":"Marin Jankovski","committer_email":"marin@gitlab.com"},{"merge_request_diff_id":12,"relative_order":2,"sha":"048721d90c449b244b7b4c53a9186b04330174ec","message":"LFS object pointer.\n","authored_date":"2015-12-07T11:54:28.000+01:00","author_name":"Marin Jankovski","author_email":"maxlazio@gmail.com","committed_date":"2015-12-07T11:54:28.000+01:00","committer_name":"Marin Jankovski","committer_email":"maxlazio@gmail.com"},{"merge_request_diff_id":12,"relative_order":3,"sha":"5f923865dde3436854e9ceb9cdb7815618d4e849","message":"GitLab currently doesn't support patches that involve a merge commit: add a commit here\n","authored_date":"2015-11-13T16:27:12.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T16:27:12.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":12,"relative_order":4,"sha":"d2d430676773caa88cdaf7c55944073b2fd5561a","message":"Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5","authored_date":"2015-11-13T08:50:17.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T08:50:17.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":12,"relative_order":5,"sha":"2ea1f3dec713d940208fb5ce4a38765ecb5d3f73","message":"Add GitLab SVG\n","authored_date":"2015-11-13T08:39:43.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T08:39:43.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":12,"relative_order":6,"sha":"59e29889be61e6e0e5e223bfa9ac2721d31605b8","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4","authored_date":"2015-11-13T07:21:40.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T07:21:40.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":12,"relative_order":7,"sha":"66eceea0db202bb39c4e445e8ca28689645366c5","message":"add spaces in whitespace file\n","authored_date":"2015-11-13T06:01:27.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T06:01:27.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com"},{"merge_request_diff_id":12,"relative_order":8,"sha":"08f22f255f082689c0d7d39d19205085311542bc","message":"remove empty file.(beacase git ignore empty file)\nadd whitespace test file.\n","authored_date":"2015-11-13T06:00:16.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T06:00:16.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com"},{"merge_request_diff_id":12,"relative_order":9,"sha":"19e2e9b4ef76b422ce1154af39a91323ccc57434","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3","authored_date":"2015-11-13T05:23:14.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T05:23:14.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":12,"relative_order":10,"sha":"c642fe9b8b9f28f9225d7ea953fe14e74748d53b","message":"add whitespace in empty\n","authored_date":"2015-11-13T05:08:45.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T05:08:45.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com"},{"merge_request_diff_id":12,"relative_order":11,"sha":"9a944d90955aaf45f6d0c88f30e27f8d2c41cec0","message":"add empty file\n","authored_date":"2015-11-13T05:08:04.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T05:08:04.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com"},{"merge_request_diff_id":12,"relative_order":12,"sha":"c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd","message":"Add ISO-8859 test file\n","authored_date":"2015-08-25T17:53:12.000+02:00","author_name":"Stan Hu","author_email":"stanhu@packetzoom.com","committed_date":"2015-08-25T17:53:12.000+02:00","committer_name":"Stan Hu","committer_email":"stanhu@packetzoom.com"}],"merge_request_diff_files":[{"merge_request_diff_id":12,"relative_order":0,"utf8_diff":"--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n","new_path":"CHANGELOG","old_path":"CHANGELOG","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":12,"relative_order":1,"utf8_diff":"--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n","new_path":"encoding/iso8859.txt","old_path":"encoding/iso8859.txt","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":12,"relative_order":2,"utf8_diff":"--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n+<svg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\">\n+ <!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch -->\n+ <title>wm</title>\n+ <desc>Created with Sketch.</desc>\n+ <defs>\n+ <path id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"></path>\n+ </defs>\n+ <g id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\">\n+ <path d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\">\n+ <g id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\">\n+ <g id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\">\n+ <path d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"></path>\n+ </g>\n+ <g id=\"g16\">\n+ <g id=\"g18-Clipped\">\n+ <mask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\">\n+ <use xlink:href=\"#path-1\"></use>\n+ </mask>\n+ <g id=\"path22\"></g>\n+ <g id=\"g18\" mask=\"url(#mask-2)\">\n+ <g transform=\"translate(382.736659, 312.879425)\">\n+ <g id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\">\n+ <path d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\">\n+ <path d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\">\n+ <path d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\">\n+ <path d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <path d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <path d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\">\n+ <path d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\">\n+ <path d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path54\"></g>\n+ </g>\n+ <g id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\">\n+ <path d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <g id=\"path62\"></g>\n+ </g>\n+ <g id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\">\n+ <path d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path70\"></g>\n+ </g>\n+ <g id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\">\n+ <path d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <path d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\">\n+ <path d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\">\n+ <path d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+</svg>\n\\ No newline at end of file\n","new_path":"files/images/wm.svg","old_path":"files/images/wm.svg","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":12,"relative_order":3,"utf8_diff":"--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n","new_path":"files/lfs/lfs_object.iso","old_path":"files/lfs/lfs_object.iso","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":12,"relative_order":4,"utf8_diff":"--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n","new_path":"files/whitespace","old_path":"files/whitespace","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":12,"relative_order":5,"utf8_diff":"--- /dev/null\n+++ b/test\n","new_path":"test","old_path":"test","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":12,"created_at":"2016-06-14T15:02:24.006Z","updated_at":"2016-06-14T15:02:24.169Z","base_commit_sha":"e56497bb5f03a90a51293fc6d516788730953899","real_size":"6"},"events":[{"id":226,"target_type":"MergeRequest","target_id":12,"project_id":36,"created_at":"2016-06-14T15:02:24.253Z","updated_at":"2016-06-14T15:02:24.253Z","action":1,"author_id":1},{"id":172,"target_type":"MergeRequest","target_id":12,"project_id":5,"created_at":"2016-06-14T15:02:24.253Z","updated_at":"2016-06-14T15:02:24.253Z","action":1,"author_id":1}]}
+{"id":27,"target_branch":"feature","source_branch":"feature_conflict","source_project_id":2147483547,"author_id":1,"assignee_id":null,"title":"MR1","created_at":"2016-06-14T15:02:36.568Z","updated_at":"2016-06-14T15:02:56.815Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":9,"description":null,"position":0,"updated_by_id":null,"merge_error":null,"diff_head_sha":"HEAD","source_branch_sha":"ABCD","target_branch_sha":"DCBA","merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":true,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":669,"note":"added 3 commits\n\n<ul><li>16ea4e20...074a2a32 - 2 commits from branch <code>master</code></li><li>ca223a02 - readme: fix typos</li></ul>\n\n[Compare with previous version](/group/project/merge_requests/1/diffs?diff_id=1189&start_sha=16ea4e207fb258fe4e9c73185a725207c9a4f3e1)","noteable_type":"MergeRequest","author_id":26,"created_at":"2020-03-28T12:47:33.461Z","updated_at":"2020-03-28T12:47:33.461Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"system":true,"st_diff":null,"updated_by_id":null,"position":null,"original_position":null,"resolved_at":null,"resolved_by_id":null,"discussion_id":null,"change_position":null,"resolved_by_push":null,"confidential":null,"type":null,"author":{"name":"User 4"},"award_emoji":[],"system_note_metadata":{"id":4789,"commit_count":3,"action":"commit","created_at":"2020-03-28T12:47:33.461Z","updated_at":"2020-03-28T12:47:33.461Z"},"events":[],"suggestions":[]},{"id":670,"note":"unmarked as a **Work In Progress**","noteable_type":"MergeRequest","author_id":26,"created_at":"2020-03-28T12:48:36.951Z","updated_at":"2020-03-28T12:48:36.951Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"system":true,"st_diff":null,"updated_by_id":null,"position":null,"original_position":null,"resolved_at":null,"resolved_by_id":null,"discussion_id":null,"change_position":null,"resolved_by_push":null,"confidential":null,"type":null,"author":{"name":"User 4"},"award_emoji":[],"system_note_metadata":{"id":4790,"commit_count":null,"action":"title","created_at":"2020-03-28T12:48:36.951Z","updated_at":"2020-03-28T12:48:36.951Z"},"events":[],"suggestions":[]},{"id":671,"note":"Sit voluptatibus eveniet architecto quidem.","note_html":"<p>something else entirely</p>","cached_markdown_version":917504,"noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:02:56.632Z","updated_at":"2016-06-14T15:02:56.632Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[],"award_emoji":[{"id":1,"name":"tada","user_id":1,"awardable_type":"Note","awardable_id":1,"created_at":"2019-11-05T15:37:21.287Z","updated_at":"2019-11-05T15:37:21.287Z"}]},{"id":672,"note":"Odio maxime ratione voluptatibus sed.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:02:56.656Z","updated_at":"2016-06-14T15:02:56.656Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":673,"note":"Et deserunt et omnis nihil excepturi accusantium.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:02:56.679Z","updated_at":"2016-06-14T15:02:56.679Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":674,"note":"Saepe asperiores exercitationem non dignissimos laborum reiciendis et ipsum.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:02:56.700Z","updated_at":"2016-06-14T15:02:56.700Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[],"suggestions":[{"id":1,"note_id":674,"relative_order":0,"applied":false,"commit_id":null,"from_content":"Original line\n","to_content":"New line\n","lines_above":0,"lines_below":0,"outdated":false}]},{"id":675,"note":"Numquam est at dolor quo et sed eligendi similique.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:02:56.720Z","updated_at":"2016-06-14T15:02:56.720Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":676,"note":"Et perferendis aliquam sunt nisi labore delectus.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:02:56.742Z","updated_at":"2016-06-14T15:02:56.742Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":677,"note":"Aut ex rerum et in.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:02:56.791Z","updated_at":"2016-06-14T15:02:56.791Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":678,"note":"Dolor laborum earum ut exercitationem.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:02:56.814Z","updated_at":"2016-06-14T15:02:56.814Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"resource_label_events":[{"id":243,"action":"add","issue_id":null,"merge_request_id":27,"label_id":null,"user_id":1,"created_at":"2018-08-28T08:24:00.494Z"}],"merge_request_diff":{"id":27,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":27,"relative_order":0,"sha":"bb5206fee213d983da88c47f9cf4cc6caf9c66dc","message":"Feature conflict added\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-08-06T08:35:52.000+02:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-08-06T08:35:52.000+02:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":27,"relative_order":1,"sha":"5937ac0a7beb003549fc5fd26fc247adbce4a52e","message":"Add submodule from gitlab.com\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T10:01:38.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T10:01:38.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":27,"relative_order":2,"sha":"570e7b2abdd848b95f2f578043fc23bd6f6fd24d","message":"Change some files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:57:31.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:57:31.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":27,"relative_order":3,"sha":"6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9","message":"More submodules\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:54:21.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:54:21.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":27,"relative_order":4,"sha":"d14d6c0abdd253381df51a723d58691b2ee1ab08","message":"Remove ds_store files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:49:50.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:49:50.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":27,"relative_order":5,"sha":"c1acaa58bbcbc3eafe538cb8274ba387047b69f8","message":"Ignore DS files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:48:32.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:48:32.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}}],"merge_request_diff_files":[{"merge_request_diff_id":27,"relative_order":0,"utf8_diff":"Binary files a/.DS_Store and /dev/null differ\n","new_path":".DS_Store","old_path":".DS_Store","a_mode":"100644","b_mode":"0","new_file":false,"renamed_file":false,"deleted_file":true,"too_large":false},{"merge_request_diff_id":27,"relative_order":1,"utf8_diff":"--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n","new_path":".gitignore","old_path":".gitignore","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":2,"utf8_diff":"--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n","new_path":".gitmodules","old_path":".gitmodules","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":3,"utf8_diff":"Binary files a/files/.DS_Store and /dev/null differ\n","new_path":"files/.DS_Store","old_path":"files/.DS_Store","a_mode":"100644","b_mode":"0","new_file":false,"renamed_file":false,"deleted_file":true,"too_large":false},{"merge_request_diff_id":27,"relative_order":4,"utf8_diff":"--- /dev/null\n+++ b/files/ruby/feature.rb\n@@ -0,0 +1,4 @@\n+# This file was changed in feature branch\n+# We put different code here to make merge conflict\n+class Conflict\n+end\n","new_path":"files/ruby/feature.rb","old_path":"files/ruby/feature.rb","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":5,"utf8_diff":"--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" => path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" => path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output << stdout.read\n @cmd_output << stderr.read\n","new_path":"files/ruby/popen.rb","old_path":"files/ruby/popen.rb","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":6,"utf8_diff":"--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n","new_path":"files/ruby/regex.rb","old_path":"files/ruby/regex.rb","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":7,"utf8_diff":"--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n","new_path":"gitlab-grack","old_path":"gitlab-grack","a_mode":"0","b_mode":"160000","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":8,"utf8_diff":"--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n","new_path":"gitlab-shell","old_path":"gitlab-shell","a_mode":"0","b_mode":"160000","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":27,"created_at":"2016-06-14T15:02:36.572Z","updated_at":"2016-06-14T15:02:36.658Z","base_commit_sha":"ae73cb07c9eeaf35924a10f713b364d32b2dd34f","real_size":"9"},"events":[{"id":221,"target_type":"MergeRequest","target_id":27,"project_id":36,"created_at":"2016-06-14T15:02:36.703Z","updated_at":"2016-06-14T15:02:36.703Z","action":1,"author_id":1},{"id":187,"target_type":"MergeRequest","target_id":27,"project_id":5,"created_at":"2016-06-14T15:02:36.703Z","updated_at":"2016-06-14T15:02:36.703Z","action":1,"author_id":1}],"approvals_before_merge":1,"award_emoji":[{"id":1,"name":"thumbsup","user_id":1,"awardable_type":"MergeRequest","awardable_id":27,"created_at":"2020-01-07T11:21:21.235Z","updated_at":"2020-01-07T11:21:21.235Z"},{"id":2,"name":"drum","user_id":1,"awardable_type":"MergeRequest","awardable_id":27,"created_at":"2020-01-07T11:21:21.235Z","updated_at":"2020-01-07T11:21:21.235Z"}]}
+{"id":26,"target_branch":"master","source_branch":"feature","source_project_id":4,"author_id":1,"assignee_id":null,"title":"MR2","created_at":"2016-06-14T15:02:36.418Z","updated_at":"2016-06-14T15:02:57.013Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":8,"description":null,"position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":679,"note":"Qui rerum totam nisi est.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:02:56.848Z","updated_at":"2016-06-14T15:02:56.848Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":680,"note":"Pariatur magni corrupti consequatur debitis minima error beatae voluptatem.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:02:56.871Z","updated_at":"2016-06-14T15:02:56.871Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":681,"note":"Qui quis ut modi eos rerum ratione.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:02:56.895Z","updated_at":"2016-06-14T15:02:56.895Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":682,"note":"Illum quidem expedita mollitia fugit.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:02:56.918Z","updated_at":"2016-06-14T15:02:56.918Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":683,"note":"Consectetur voluptate sit sint possimus veritatis quod.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:02:56.942Z","updated_at":"2016-06-14T15:02:56.942Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":684,"note":"Natus libero quibusdam rem assumenda deleniti accusamus sed earum.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:02:56.966Z","updated_at":"2016-06-14T15:02:56.966Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":685,"note":"Tenetur autem nihil rerum odit.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:02:56.989Z","updated_at":"2016-06-14T15:02:56.989Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":686,"note":"Quia maiores et odio sed.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:02:57.012Z","updated_at":"2016-06-14T15:02:57.012Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":26,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":26,"sha":"0b4bc9a49b562e85de7cc9e834518ea6828729b9","relative_order":0,"message":"Feature added\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:26:01.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:26:01.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}}],"merge_request_diff_files":[{"merge_request_diff_id":26,"relative_order":0,"utf8_diff":"--- /dev/null\n+++ b/files/ruby/feature.rb\n@@ -0,0 +1,5 @@\n+class Feature\n+ def foo\n+ puts 'bar'\n+ end\n+end\n","new_path":"files/ruby/feature.rb","old_path":"files/ruby/feature.rb","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":26,"created_at":"2016-06-14T15:02:36.421Z","updated_at":"2016-06-14T15:02:36.474Z","base_commit_sha":"ae73cb07c9eeaf35924a10f713b364d32b2dd34f","real_size":"1"},"events":[{"id":222,"target_type":"MergeRequest","target_id":26,"project_id":36,"created_at":"2016-06-14T15:02:36.496Z","updated_at":"2016-06-14T15:02:36.496Z","action":1,"author_id":1},{"id":186,"target_type":"MergeRequest","target_id":26,"project_id":5,"created_at":"2016-06-14T15:02:36.496Z","updated_at":"2016-06-14T15:02:36.496Z","action":1,"author_id":1}]}
+{"id":15,"target_branch":"test-7","source_branch":"test-1","source_project_id":5,"author_id":22,"assignee_id":16,"title":"Qui accusantium et inventore facilis doloribus occaecati officiis.","created_at":"2016-06-14T15:02:25.168Z","updated_at":"2016-06-14T15:02:59.521Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":7,"description":"Et commodi deserunt aspernatur vero rerum. Ut non dolorum alias in odit est libero. Voluptatibus eos in et vitae repudiandae facilis ex mollitia.","position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":777,"note":"Pariatur voluptas placeat aspernatur culpa suscipit soluta.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:02:59.348Z","updated_at":"2016-06-14T15:02:59.348Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":778,"note":"Alias et iure mollitia suscipit molestiae voluptatum nostrum asperiores.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:02:59.372Z","updated_at":"2016-06-14T15:02:59.372Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":779,"note":"Laudantium qui eum qui sunt.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:02:59.395Z","updated_at":"2016-06-14T15:02:59.395Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":780,"note":"Quas rem est iusto ut delectus fugiat recusandae mollitia.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:02:59.418Z","updated_at":"2016-06-14T15:02:59.418Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":781,"note":"Repellendus ab et qui nesciunt.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:02:59.444Z","updated_at":"2016-06-14T15:02:59.444Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":782,"note":"Non possimus voluptatum odio qui ut.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:02:59.469Z","updated_at":"2016-06-14T15:02:59.469Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":783,"note":"Dolores repellendus eum ducimus quam ab dolorem quia.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:02:59.494Z","updated_at":"2016-06-14T15:02:59.494Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":784,"note":"Facilis dolorem aut corrupti id ratione occaecati.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:02:59.520Z","updated_at":"2016-06-14T15:02:59.520Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":15,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":15,"relative_order":0,"sha":"94b8d581c48d894b86661718582fecbc5e3ed2eb","message":"fixes #10\n","authored_date":"2016-01-19T13:22:56.000+01:00","author_name":"James Lopez","author_email":"james@jameslopez.es","committed_date":"2016-01-19T13:22:56.000+01:00","committer_name":"James Lopez","committer_email":"james@jameslopez.es","commit_author":{"name":"James Lopez","email":"james@jameslopez.es"},"committer":{"name":"James Lopez","email":"james@jameslopez.es"}}],"merge_request_diff_files":[{"merge_request_diff_id":15,"relative_order":0,"utf8_diff":"--- /dev/null\n+++ b/test\n","new_path":"test","old_path":"test","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":15,"created_at":"2016-06-14T15:02:25.171Z","updated_at":"2016-06-14T15:02:25.230Z","base_commit_sha":"be93687618e4b132087f430a4d8fc3a609c9b77c","real_size":"1"},"events":[{"id":223,"target_type":"MergeRequest","target_id":15,"project_id":36,"created_at":"2016-06-14T15:02:25.262Z","updated_at":"2016-06-14T15:02:25.262Z","action":1,"author_id":1},{"id":175,"target_type":"MergeRequest","target_id":15,"project_id":5,"created_at":"2016-06-14T15:02:25.262Z","updated_at":"2016-06-14T15:02:25.262Z","action":1,"author_id":22}]}
+{"id":14,"target_branch":"fix","source_branch":"test-3","source_project_id":5,"author_id":20,"assignee_id":20,"title":"In voluptas aut sequi voluptatem ullam vel corporis illum consequatur.","created_at":"2016-06-14T15:02:24.760Z","updated_at":"2016-06-14T15:02:59.749Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":6,"description":"Dicta magnam non voluptates nam dignissimos nostrum deserunt. Dolorum et suscipit iure quae doloremque. Necessitatibus saepe aut labore sed.","position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":785,"note":"Atque cupiditate necessitatibus deserunt minus natus odit.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:02:59.559Z","updated_at":"2016-06-14T15:02:59.559Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":786,"note":"Non dolorem provident mollitia nesciunt optio ex eveniet.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:02:59.587Z","updated_at":"2016-06-14T15:02:59.587Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":787,"note":"Similique officia nemo quasi commodi accusantium quae qui.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:02:59.621Z","updated_at":"2016-06-14T15:02:59.621Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":788,"note":"Et est et alias ad dolor qui.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:02:59.650Z","updated_at":"2016-06-14T15:02:59.650Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":789,"note":"Numquam temporibus ratione voluptatibus aliquid.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:02:59.675Z","updated_at":"2016-06-14T15:02:59.675Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":790,"note":"Ut ex aliquam consectetur perferendis est hic aut quia.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:02:59.703Z","updated_at":"2016-06-14T15:02:59.703Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":791,"note":"Esse eos quam quaerat aut ut asperiores officiis.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:02:59.726Z","updated_at":"2016-06-14T15:02:59.726Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":792,"note":"Sint facilis accusantium iure blanditiis.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:02:59.748Z","updated_at":"2016-06-14T15:02:59.748Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":14,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":14,"relative_order":0,"sha":"ddd4ff416a931589c695eb4f5b23f844426f6928","message":"fixes #10\n","authored_date":"2016-01-19T14:14:43.000+01:00","author_name":"James Lopez","author_email":"james@jameslopez.es","committed_date":"2016-01-19T14:14:43.000+01:00","committer_name":"James Lopez","committer_email":"james@jameslopez.es","commit_author":{"name":"James Lopez","email":"james@jameslopez.es"},"committer":{"name":"James Lopez","email":"james@jameslopez.es"}},{"merge_request_diff_id":14,"relative_order":1,"sha":"be93687618e4b132087f430a4d8fc3a609c9b77c","message":"Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6","authored_date":"2015-12-07T12:52:12.000+01:00","author_name":"Marin Jankovski","author_email":"marin@gitlab.com","committed_date":"2015-12-07T12:52:12.000+01:00","committer_name":"Marin Jankovski","committer_email":"marin@gitlab.com","commit_author":{"name":"Marin Jankovski","email":"marin@gitlab.com"},"committer":{"name":"Marin Jankovski","email":"marin@gitlab.com"}},{"merge_request_diff_id":14,"relative_order":2,"sha":"048721d90c449b244b7b4c53a9186b04330174ec","message":"LFS object pointer.\n","authored_date":"2015-12-07T11:54:28.000+01:00","author_name":"Marin Jankovski","author_email":"maxlazio@gmail.com","committed_date":"2015-12-07T11:54:28.000+01:00","committer_name":"Marin Jankovski","committer_email":"maxlazio@gmail.com","commit_author":{"name":"Marin Jankovski","email":"maxlazio@gmail.com"},"committer":{"name":"Marin Jankovski","email":"maxlazio@gmail.com"}},{"merge_request_diff_id":14,"relative_order":3,"sha":"5f923865dde3436854e9ceb9cdb7815618d4e849","message":"GitLab currently doesn't support patches that involve a merge commit: add a commit here\n","authored_date":"2015-11-13T16:27:12.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T16:27:12.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":14,"relative_order":4,"sha":"d2d430676773caa88cdaf7c55944073b2fd5561a","message":"Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5","authored_date":"2015-11-13T08:50:17.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T08:50:17.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":14,"relative_order":5,"sha":"2ea1f3dec713d940208fb5ce4a38765ecb5d3f73","message":"Add GitLab SVG\n","authored_date":"2015-11-13T08:39:43.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T08:39:43.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":14,"relative_order":6,"sha":"59e29889be61e6e0e5e223bfa9ac2721d31605b8","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4","authored_date":"2015-11-13T07:21:40.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T07:21:40.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":14,"relative_order":7,"sha":"66eceea0db202bb39c4e445e8ca28689645366c5","message":"add spaces in whitespace file\n","authored_date":"2015-11-13T06:01:27.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T06:01:27.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":14,"relative_order":8,"sha":"08f22f255f082689c0d7d39d19205085311542bc","message":"remove empty file.(beacase git ignore empty file)\nadd whitespace test file.\n","authored_date":"2015-11-13T06:00:16.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T06:00:16.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":14,"relative_order":9,"sha":"19e2e9b4ef76b422ce1154af39a91323ccc57434","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3","authored_date":"2015-11-13T05:23:14.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T05:23:14.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":14,"relative_order":10,"sha":"c642fe9b8b9f28f9225d7ea953fe14e74748d53b","message":"add whitespace in empty\n","authored_date":"2015-11-13T05:08:45.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T05:08:45.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":14,"relative_order":11,"sha":"9a944d90955aaf45f6d0c88f30e27f8d2c41cec0","message":"add empty file\n","authored_date":"2015-11-13T05:08:04.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T05:08:04.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":14,"relative_order":12,"sha":"c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd","message":"Add ISO-8859 test file\n","authored_date":"2015-08-25T17:53:12.000+02:00","author_name":"Stan Hu","author_email":"stanhu@packetzoom.com","committed_date":"2015-08-25T17:53:12.000+02:00","committer_name":"Stan Hu","committer_email":"stanhu@packetzoom.com","commit_author":{"name":"Stan Hu","email":"stanhu@packetzoom.com"},"committer":{"name":"Stan Hu","email":"stanhu@packetzoom.com"}},{"merge_request_diff_id":14,"relative_order":13,"sha":"e56497bb5f03a90a51293fc6d516788730953899","message":"Merge branch 'tree_helper_spec' into 'master'\n\nAdd directory structure for tree_helper spec\n\nThis directory structure is needed for a testing the method flatten_tree(tree) in the TreeHelper module\n\nSee [merge request #275](https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/275#note_732774)\n\nSee merge request !2\n","authored_date":"2015-01-10T22:23:29.000+01:00","author_name":"Sytse Sijbrandij","author_email":"sytse@gitlab.com","committed_date":"2015-01-10T22:23:29.000+01:00","committer_name":"Sytse Sijbrandij","committer_email":"sytse@gitlab.com","commit_author":{"name":"Sytse Sijbrandij","email":"sytse@gitlab.com"},"committer":{"name":"Sytse Sijbrandij","email":"sytse@gitlab.com"}},{"merge_request_diff_id":14,"relative_order":14,"sha":"4cd80ccab63c82b4bad16faa5193fbd2aa06df40","message":"add directory structure for tree_helper spec\n","authored_date":"2015-01-10T21:28:18.000+01:00","author_name":"marmis85","author_email":"marmis85@gmail.com","committed_date":"2015-01-10T21:28:18.000+01:00","committer_name":"marmis85","committer_email":"marmis85@gmail.com","commit_author":{"name":"marmis85","email":"marmis85@gmail.com"},"committer":{"name":"marmis85","email":"marmis85@gmail.com"}},{"merge_request_diff_id":14,"relative_order":15,"sha":"5937ac0a7beb003549fc5fd26fc247adbce4a52e","message":"Add submodule from gitlab.com\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T10:01:38.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T10:01:38.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":14,"relative_order":16,"sha":"570e7b2abdd848b95f2f578043fc23bd6f6fd24d","message":"Change some files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:57:31.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:57:31.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":14,"relative_order":17,"sha":"6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9","message":"More submodules\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:54:21.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:54:21.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":14,"relative_order":18,"sha":"d14d6c0abdd253381df51a723d58691b2ee1ab08","message":"Remove ds_store files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:49:50.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:49:50.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":14,"relative_order":19,"sha":"c1acaa58bbcbc3eafe538cb8274ba387047b69f8","message":"Ignore DS files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:48:32.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:48:32.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}}],"merge_request_diff_files":[{"merge_request_diff_id":14,"relative_order":0,"utf8_diff":"Binary files a/.DS_Store and /dev/null differ\n","new_path":".DS_Store","old_path":".DS_Store","a_mode":"100644","b_mode":"0","new_file":false,"renamed_file":false,"deleted_file":true,"too_large":false},{"merge_request_diff_id":14,"relative_order":1,"utf8_diff":"--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n","new_path":".gitignore","old_path":".gitignore","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":2,"utf8_diff":"--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n","new_path":".gitmodules","old_path":".gitmodules","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":3,"utf8_diff":"--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n","new_path":"CHANGELOG","old_path":"CHANGELOG","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":4,"utf8_diff":"--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n","new_path":"encoding/iso8859.txt","old_path":"encoding/iso8859.txt","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":5,"utf8_diff":"Binary files a/files/.DS_Store and /dev/null differ\n","new_path":"files/.DS_Store","old_path":"files/.DS_Store","a_mode":"100644","b_mode":"0","new_file":false,"renamed_file":false,"deleted_file":true,"too_large":false},{"merge_request_diff_id":14,"relative_order":6,"utf8_diff":"--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n+<svg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\">\n+ <!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch -->\n+ <title>wm</title>\n+ <desc>Created with Sketch.</desc>\n+ <defs>\n+ <path id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"></path>\n+ </defs>\n+ <g id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\">\n+ <path d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\">\n+ <g id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\">\n+ <g id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\">\n+ <path d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"></path>\n+ </g>\n+ <g id=\"g16\">\n+ <g id=\"g18-Clipped\">\n+ <mask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\">\n+ <use xlink:href=\"#path-1\"></use>\n+ </mask>\n+ <g id=\"path22\"></g>\n+ <g id=\"g18\" mask=\"url(#mask-2)\">\n+ <g transform=\"translate(382.736659, 312.879425)\">\n+ <g id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\">\n+ <path d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\">\n+ <path d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\">\n+ <path d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\">\n+ <path d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <path d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <path d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\">\n+ <path d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\">\n+ <path d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path54\"></g>\n+ </g>\n+ <g id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\">\n+ <path d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <g id=\"path62\"></g>\n+ </g>\n+ <g id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\">\n+ <path d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path70\"></g>\n+ </g>\n+ <g id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\">\n+ <path d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <path d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\">\n+ <path d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\">\n+ <path d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+</svg>\n\\ No newline at end of file\n","new_path":"files/images/wm.svg","old_path":"files/images/wm.svg","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":7,"utf8_diff":"--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n","new_path":"files/lfs/lfs_object.iso","old_path":"files/lfs/lfs_object.iso","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":8,"utf8_diff":"--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" => path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" => path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output << stdout.read\n @cmd_output << stderr.read\n","new_path":"files/ruby/popen.rb","old_path":"files/ruby/popen.rb","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":9,"utf8_diff":"--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n","new_path":"files/ruby/regex.rb","old_path":"files/ruby/regex.rb","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":10,"utf8_diff":"--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n","new_path":"files/whitespace","old_path":"files/whitespace","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":11,"utf8_diff":"--- /dev/null\n+++ b/foo/bar/.gitkeep\n","new_path":"foo/bar/.gitkeep","old_path":"foo/bar/.gitkeep","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":12,"utf8_diff":"--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n","new_path":"gitlab-grack","old_path":"gitlab-grack","a_mode":"0","b_mode":"160000","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":13,"utf8_diff":"--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n","new_path":"gitlab-shell","old_path":"gitlab-shell","a_mode":"0","b_mode":"160000","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":14,"utf8_diff":"--- /dev/null\n+++ b/test\n","new_path":"test","old_path":"test","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":14,"created_at":"2016-06-14T15:02:24.770Z","updated_at":"2016-06-14T15:02:25.007Z","base_commit_sha":"ae73cb07c9eeaf35924a10f713b364d32b2dd34f","real_size":"15"},"events":[{"id":224,"target_type":"MergeRequest","target_id":14,"project_id":36,"created_at":"2016-06-14T15:02:25.113Z","updated_at":"2016-06-14T15:02:25.113Z","action":1,"author_id":1},{"id":174,"target_type":"MergeRequest","target_id":14,"project_id":5,"created_at":"2016-06-14T15:02:25.113Z","updated_at":"2016-06-14T15:02:25.113Z","action":1,"author_id":20}]}
+{"id":13,"target_branch":"improve/awesome","source_branch":"test-8","source_project_id":5,"author_id":16,"assignee_id":25,"title":"Voluptates consequatur eius nemo amet libero animi illum delectus tempore.","created_at":"2016-06-14T15:02:24.415Z","updated_at":"2016-06-14T15:02:59.958Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":5,"description":"Est eaque quasi qui qui. Similique voluptatem impedit iusto ratione reprehenderit. Itaque est illum ut nulla aut.","position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":793,"note":"In illum maxime aperiam nulla est aspernatur.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:02:59.782Z","updated_at":"2016-06-14T15:02:59.782Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[{"merge_request_diff_id":14,"id":529,"target_type":"Note","target_id":793,"project_id":4,"created_at":"2016-07-07T14:35:12.128Z","updated_at":"2016-07-07T14:35:12.128Z","action":6,"author_id":1}]},{"id":794,"note":"Enim quia perferendis cum distinctio tenetur optio voluptas veniam.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:02:59.807Z","updated_at":"2016-06-14T15:02:59.807Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":795,"note":"Dolor ad quia quis pariatur ducimus.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:02:59.831Z","updated_at":"2016-06-14T15:02:59.831Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":796,"note":"Et a odio voluptate aut.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:02:59.854Z","updated_at":"2016-06-14T15:02:59.854Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":797,"note":"Quis nihil temporibus voluptatum modi minima a ut.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:02:59.879Z","updated_at":"2016-06-14T15:02:59.879Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":798,"note":"Ut alias consequatur in nostrum.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:02:59.904Z","updated_at":"2016-06-14T15:02:59.904Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":799,"note":"Voluptatibus aperiam assumenda et neque sint libero.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:02:59.926Z","updated_at":"2016-06-14T15:02:59.926Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":800,"note":"Veritatis voluptatem dolor dolores magni quo ut ipsa fuga.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:02:59.956Z","updated_at":"2016-06-14T15:02:59.956Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":13,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":13,"relative_order":0,"sha":"0bfedc29d30280c7e8564e19f654584b459e5868","message":"fixes #10\n","authored_date":"2016-01-19T15:25:23.000+01:00","author_name":"James Lopez","author_email":"james@jameslopez.es","committed_date":"2016-01-19T15:25:23.000+01:00","committer_name":"James Lopez","committer_email":"james@jameslopez.es","commit_author":{"name":"James Lopez","email":"james@jameslopez.es"},"committer":{"name":"James Lopez","email":"james@jameslopez.es"}},{"merge_request_diff_id":13,"relative_order":1,"sha":"be93687618e4b132087f430a4d8fc3a609c9b77c","message":"Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6","authored_date":"2015-12-07T12:52:12.000+01:00","author_name":"Marin Jankovski","author_email":"marin@gitlab.com","committed_date":"2015-12-07T12:52:12.000+01:00","committer_name":"Marin Jankovski","committer_email":"marin@gitlab.com","commit_author":{"name":"Marin Jankovski","email":"marin@gitlab.com"},"committer":{"name":"Marin Jankovski","email":"marin@gitlab.com"}},{"merge_request_diff_id":13,"relative_order":2,"sha":"048721d90c449b244b7b4c53a9186b04330174ec","message":"LFS object pointer.\n","authored_date":"2015-12-07T11:54:28.000+01:00","author_name":"Marin Jankovski","author_email":"maxlazio@gmail.com","committed_date":"2015-12-07T11:54:28.000+01:00","committer_name":"Marin Jankovski","committer_email":"maxlazio@gmail.com","commit_author":{"name":"Marin Jankovski","email":"maxlazio@gmail.com"},"committer":{"name":"Marin Jankovski","email":"maxlazio@gmail.com"}},{"merge_request_diff_id":13,"relative_order":3,"sha":"5f923865dde3436854e9ceb9cdb7815618d4e849","message":"GitLab currently doesn't support patches that involve a merge commit: add a commit here\n","authored_date":"2015-11-13T16:27:12.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T16:27:12.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":13,"relative_order":4,"sha":"d2d430676773caa88cdaf7c55944073b2fd5561a","message":"Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5","authored_date":"2015-11-13T08:50:17.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T08:50:17.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":13,"relative_order":5,"sha":"2ea1f3dec713d940208fb5ce4a38765ecb5d3f73","message":"Add GitLab SVG\n","authored_date":"2015-11-13T08:39:43.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T08:39:43.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":13,"relative_order":6,"sha":"59e29889be61e6e0e5e223bfa9ac2721d31605b8","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4","authored_date":"2015-11-13T07:21:40.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T07:21:40.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":13,"relative_order":7,"sha":"66eceea0db202bb39c4e445e8ca28689645366c5","message":"add spaces in whitespace file\n","authored_date":"2015-11-13T06:01:27.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T06:01:27.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":13,"relative_order":8,"sha":"08f22f255f082689c0d7d39d19205085311542bc","message":"remove empty file.(beacase git ignore empty file)\nadd whitespace test file.\n","authored_date":"2015-11-13T06:00:16.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T06:00:16.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":13,"relative_order":9,"sha":"19e2e9b4ef76b422ce1154af39a91323ccc57434","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3","authored_date":"2015-11-13T05:23:14.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T05:23:14.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":13,"relative_order":10,"sha":"c642fe9b8b9f28f9225d7ea953fe14e74748d53b","message":"add whitespace in empty\n","authored_date":"2015-11-13T05:08:45.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T05:08:45.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":13,"relative_order":11,"sha":"9a944d90955aaf45f6d0c88f30e27f8d2c41cec0","message":"add empty file\n","authored_date":"2015-11-13T05:08:04.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T05:08:04.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":13,"relative_order":12,"sha":"c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd","message":"Add ISO-8859 test file\n","authored_date":"2015-08-25T17:53:12.000+02:00","author_name":"Stan Hu","author_email":"stanhu@packetzoom.com","committed_date":"2015-08-25T17:53:12.000+02:00","committer_name":"Stan Hu","committer_email":"stanhu@packetzoom.com","commit_author":{"name":"Stan Hu","email":"stanhu@packetzoom.com"},"committer":{"name":"Stan Hu","email":"stanhu@packetzoom.com"}},{"merge_request_diff_id":13,"relative_order":13,"sha":"e56497bb5f03a90a51293fc6d516788730953899","message":"Merge branch 'tree_helper_spec' into 'master'\n\nAdd directory structure for tree_helper spec\n\nThis directory structure is needed for a testing the method flatten_tree(tree) in the TreeHelper module\n\nSee [merge request #275](https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/275#note_732774)\n\nSee merge request !2\n","authored_date":"2015-01-10T22:23:29.000+01:00","author_name":"Sytse Sijbrandij","author_email":"sytse@gitlab.com","committed_date":"2015-01-10T22:23:29.000+01:00","committer_name":"Sytse Sijbrandij","committer_email":"sytse@gitlab.com","commit_author":{"name":"Sytse Sijbrandij","email":"sytse@gitlab.com"},"committer":{"name":"Sytse Sijbrandij","email":"sytse@gitlab.com"}},{"merge_request_diff_id":13,"relative_order":14,"sha":"4cd80ccab63c82b4bad16faa5193fbd2aa06df40","message":"add directory structure for tree_helper spec\n","authored_date":"2015-01-10T21:28:18.000+01:00","author_name":"marmis85","author_email":"marmis85@gmail.com","committed_date":"2015-01-10T21:28:18.000+01:00","committer_name":"marmis85","committer_email":"marmis85@gmail.com","commit_author":{"name":"marmis85","email":"marmis85@gmail.com"},"committer":{"name":"marmis85","email":"marmis85@gmail.com"}}],"merge_request_diff_files":[{"merge_request_diff_id":13,"relative_order":0,"utf8_diff":"--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n","new_path":"CHANGELOG","old_path":"CHANGELOG","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":13,"relative_order":1,"utf8_diff":"--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n","new_path":"encoding/iso8859.txt","old_path":"encoding/iso8859.txt","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":13,"relative_order":2,"utf8_diff":"--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n+<svg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\">\n+ <!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch -->\n+ <title>wm</title>\n+ <desc>Created with Sketch.</desc>\n+ <defs>\n+ <path id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"></path>\n+ </defs>\n+ <g id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\">\n+ <path d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\">\n+ <g id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\">\n+ <g id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\">\n+ <path d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"></path>\n+ </g>\n+ <g id=\"g16\">\n+ <g id=\"g18-Clipped\">\n+ <mask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\">\n+ <use xlink:href=\"#path-1\"></use>\n+ </mask>\n+ <g id=\"path22\"></g>\n+ <g id=\"g18\" mask=\"url(#mask-2)\">\n+ <g transform=\"translate(382.736659, 312.879425)\">\n+ <g id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\">\n+ <path d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\">\n+ <path d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\">\n+ <path d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\">\n+ <path d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <path d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <path d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\">\n+ <path d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\">\n+ <path d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path54\"></g>\n+ </g>\n+ <g id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\">\n+ <path d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <g id=\"path62\"></g>\n+ </g>\n+ <g id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\">\n+ <path d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path70\"></g>\n+ </g>\n+ <g id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\">\n+ <path d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <path d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\">\n+ <path d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\">\n+ <path d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+</svg>\n\\ No newline at end of file\n","new_path":"files/images/wm.svg","old_path":"files/images/wm.svg","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":13,"relative_order":3,"utf8_diff":"--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n","new_path":"files/lfs/lfs_object.iso","old_path":"files/lfs/lfs_object.iso","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":13,"relative_order":4,"utf8_diff":"--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n","new_path":"files/whitespace","old_path":"files/whitespace","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":13,"relative_order":5,"utf8_diff":"--- /dev/null\n+++ b/foo/bar/.gitkeep\n","new_path":"foo/bar/.gitkeep","old_path":"foo/bar/.gitkeep","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":13,"relative_order":6,"utf8_diff":"--- /dev/null\n+++ b/test\n","new_path":"test","old_path":"test","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":13,"created_at":"2016-06-14T15:02:24.420Z","updated_at":"2016-06-14T15:02:24.561Z","base_commit_sha":"5937ac0a7beb003549fc5fd26fc247adbce4a52e","real_size":"7"},"events":[{"id":225,"target_type":"MergeRequest","target_id":13,"project_id":36,"created_at":"2016-06-14T15:02:24.636Z","updated_at":"2016-06-14T15:02:24.636Z","action":1,"author_id":16},{"id":173,"target_type":"MergeRequest","target_id":13,"project_id":5,"created_at":"2016-06-14T15:02:24.636Z","updated_at":"2016-06-14T15:02:24.636Z","action":1,"author_id":16}]}
+{"id":12,"target_branch":"flatten-dirs","source_branch":"test-2","source_project_id":5,"author_id":1,"assignee_id":22,"title":"In a rerum harum nihil accusamus aut quia nobis non.","created_at":"2016-06-14T15:02:24.000Z","updated_at":"2016-06-14T15:03:00.225Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":4,"description":"Nam magnam odit velit rerum. Sapiente dolore sunt saepe debitis. Culpa maiores ut ad dolores dolorem et.","position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":801,"note":"Nihil dicta molestias expedita atque.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:03:00.001Z","updated_at":"2016-06-14T15:03:00.001Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":12,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":802,"note":"Illum culpa voluptas enim accusantium deserunt.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:03:00.034Z","updated_at":"2016-06-14T15:03:00.034Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":12,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":803,"note":"Dicta esse aliquam laboriosam unde alias.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:03:00.065Z","updated_at":"2016-06-14T15:03:00.065Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":12,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":804,"note":"Dicta autem et sed molestiae ut quae.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:03:00.097Z","updated_at":"2016-06-14T15:03:00.097Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":12,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":805,"note":"Ut ut temporibus voluptas dolore quia velit.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:03:00.129Z","updated_at":"2016-06-14T15:03:00.129Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":12,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":806,"note":"Dolores similique sint pariatur error id quia fugit aut.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:03:00.162Z","updated_at":"2016-06-14T15:03:00.162Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":12,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":807,"note":"Quisquam provident nihil aperiam voluptatem.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:03:00.193Z","updated_at":"2016-06-14T15:03:00.193Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":12,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":808,"note":"Similique quo vero expedita deserunt ipsam earum.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:03:00.224Z","updated_at":"2016-06-14T15:03:00.224Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":12,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":12,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":12,"relative_order":0,"sha":"97a0df9696e2aebf10c31b3016f40214e0e8f243","message":"fixes #10\n","authored_date":"2016-01-19T14:08:21.000+01:00","author_name":"James Lopez","author_email":"james@jameslopez.es","committed_date":"2016-01-19T14:08:21.000+01:00","committer_name":"James Lopez","committer_email":"james@jameslopez.es","commit_author":{"name":"James Lopez","email":"james@jameslopez.es"},"committer":{"name":"James Lopez","email":"james@jameslopez.es"}},{"merge_request_diff_id":12,"relative_order":1,"sha":"be93687618e4b132087f430a4d8fc3a609c9b77c","message":"Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6","authored_date":"2015-12-07T12:52:12.000+01:00","author_name":"Marin Jankovski","author_email":"marin@gitlab.com","committed_date":"2015-12-07T12:52:12.000+01:00","committer_name":"Marin Jankovski","committer_email":"marin@gitlab.com","commit_author":{"name":"Marin Jankovski","email":"marin@gitlab.com"},"committer":{"name":"Marin Jankovski","email":"marin@gitlab.com"}},{"merge_request_diff_id":12,"relative_order":2,"sha":"048721d90c449b244b7b4c53a9186b04330174ec","message":"LFS object pointer.\n","authored_date":"2015-12-07T11:54:28.000+01:00","author_name":"Marin Jankovski","author_email":"maxlazio@gmail.com","committed_date":"2015-12-07T11:54:28.000+01:00","committer_name":"Marin Jankovski","committer_email":"maxlazio@gmail.com","commit_author":{"name":"Marin Jankovski","email":"maxlazio@gmail.com"},"committer":{"name":"Marin Jankovski","email":"maxlazio@gmail.com"}},{"merge_request_diff_id":12,"relative_order":3,"sha":"5f923865dde3436854e9ceb9cdb7815618d4e849","message":"GitLab currently doesn't support patches that involve a merge commit: add a commit here\n","authored_date":"2015-11-13T16:27:12.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T16:27:12.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":12,"relative_order":4,"sha":"d2d430676773caa88cdaf7c55944073b2fd5561a","message":"Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5","authored_date":"2015-11-13T08:50:17.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T08:50:17.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":12,"relative_order":5,"sha":"2ea1f3dec713d940208fb5ce4a38765ecb5d3f73","message":"Add GitLab SVG\n","authored_date":"2015-11-13T08:39:43.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T08:39:43.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":12,"relative_order":6,"sha":"59e29889be61e6e0e5e223bfa9ac2721d31605b8","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4","authored_date":"2015-11-13T07:21:40.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T07:21:40.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":12,"relative_order":7,"sha":"66eceea0db202bb39c4e445e8ca28689645366c5","message":"add spaces in whitespace file\n","authored_date":"2015-11-13T06:01:27.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T06:01:27.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":12,"relative_order":8,"sha":"08f22f255f082689c0d7d39d19205085311542bc","message":"remove empty file.(beacase git ignore empty file)\nadd whitespace test file.\n","authored_date":"2015-11-13T06:00:16.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T06:00:16.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":12,"relative_order":9,"sha":"19e2e9b4ef76b422ce1154af39a91323ccc57434","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3","authored_date":"2015-11-13T05:23:14.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T05:23:14.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":12,"relative_order":10,"sha":"c642fe9b8b9f28f9225d7ea953fe14e74748d53b","message":"add whitespace in empty\n","authored_date":"2015-11-13T05:08:45.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T05:08:45.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":12,"relative_order":11,"sha":"9a944d90955aaf45f6d0c88f30e27f8d2c41cec0","message":"add empty file\n","authored_date":"2015-11-13T05:08:04.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T05:08:04.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":12,"relative_order":12,"sha":"c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd","message":"Add ISO-8859 test file\n","authored_date":"2015-08-25T17:53:12.000+02:00","author_name":"Stan Hu","author_email":"stanhu@packetzoom.com","committed_date":"2015-08-25T17:53:12.000+02:00","committer_name":"Stan Hu","committer_email":"stanhu@packetzoom.com","commit_author":{"name":"Stan Hu","email":"stanhu@packetzoom.com"},"committer":{"name":"Stan Hu","email":"stanhu@packetzoom.com"}}],"merge_request_diff_files":[{"merge_request_diff_id":12,"relative_order":0,"utf8_diff":"--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n","new_path":"CHANGELOG","old_path":"CHANGELOG","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":12,"relative_order":1,"utf8_diff":"--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n","new_path":"encoding/iso8859.txt","old_path":"encoding/iso8859.txt","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":12,"relative_order":2,"utf8_diff":"--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n+<svg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\">\n+ <!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch -->\n+ <title>wm</title>\n+ <desc>Created with Sketch.</desc>\n+ <defs>\n+ <path id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"></path>\n+ </defs>\n+ <g id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\">\n+ <path d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\">\n+ <g id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\">\n+ <g id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\">\n+ <path d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"></path>\n+ </g>\n+ <g id=\"g16\">\n+ <g id=\"g18-Clipped\">\n+ <mask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\">\n+ <use xlink:href=\"#path-1\"></use>\n+ </mask>\n+ <g id=\"path22\"></g>\n+ <g id=\"g18\" mask=\"url(#mask-2)\">\n+ <g transform=\"translate(382.736659, 312.879425)\">\n+ <g id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\">\n+ <path d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\">\n+ <path d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\">\n+ <path d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\">\n+ <path d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <path d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <path d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\">\n+ <path d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\">\n+ <path d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path54\"></g>\n+ </g>\n+ <g id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\">\n+ <path d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <g id=\"path62\"></g>\n+ </g>\n+ <g id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\">\n+ <path d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path70\"></g>\n+ </g>\n+ <g id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\">\n+ <path d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <path d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\">\n+ <path d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\">\n+ <path d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+</svg>\n\\ No newline at end of file\n","new_path":"files/images/wm.svg","old_path":"files/images/wm.svg","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":12,"relative_order":3,"utf8_diff":"--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n","new_path":"files/lfs/lfs_object.iso","old_path":"files/lfs/lfs_object.iso","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":12,"relative_order":4,"utf8_diff":"--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n","new_path":"files/whitespace","old_path":"files/whitespace","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":12,"relative_order":5,"utf8_diff":"--- /dev/null\n+++ b/test\n","new_path":"test","old_path":"test","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":12,"created_at":"2016-06-14T15:02:24.006Z","updated_at":"2016-06-14T15:02:24.169Z","base_commit_sha":"e56497bb5f03a90a51293fc6d516788730953899","real_size":"6"},"events":[{"id":226,"target_type":"MergeRequest","target_id":12,"project_id":36,"created_at":"2016-06-14T15:02:24.253Z","updated_at":"2016-06-14T15:02:24.253Z","action":1,"author_id":1},{"id":172,"target_type":"MergeRequest","target_id":12,"project_id":5,"created_at":"2016-06-14T15:02:24.253Z","updated_at":"2016-06-14T15:02:24.253Z","action":1,"author_id":1}]}
{"id":11,"target_branch":"test-15","source_branch":"'test'","source_project_id":5,"author_id":16,"assignee_id":16,"title":"Corporis provident similique perspiciatis dolores eos animi.","created_at":"2016-06-14T15:02:23.767Z","updated_at":"2016-06-14T15:03:00.475Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":3,"description":"Libero nesciunt mollitia quis odit eos vero quasi. Iure voluptatem ut sint pariatur voluptates ut aut. Laborum possimus unde illum ipsum eum.","position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":809,"note":"Omnis ratione laboriosam dolores qui.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:03:00.260Z","updated_at":"2016-06-14T15:03:00.260Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":11,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":810,"note":"Voluptas voluptates pariatur dolores maxime est voluptas.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:03:00.290Z","updated_at":"2016-06-14T15:03:00.290Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":11,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":811,"note":"Sit perspiciatis facilis ipsum consequatur.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:03:00.323Z","updated_at":"2016-06-14T15:03:00.323Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":11,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":812,"note":"Ut neque aliquam nam et est.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:03:00.349Z","updated_at":"2016-06-14T15:03:00.349Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":11,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":813,"note":"Et debitis rerum minima sit aut dolorem.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:03:00.374Z","updated_at":"2016-06-14T15:03:00.374Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":11,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":814,"note":"Ea nisi earum fugit iste aperiam consequatur.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:03:00.397Z","updated_at":"2016-06-14T15:03:00.397Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":11,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":815,"note":"Amet ratione consequatur laudantium rerum voluptas est nobis.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:03:00.450Z","updated_at":"2016-06-14T15:03:00.450Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":11,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":816,"note":"Ab ducimus cumque quia dolorem vitae sint beatae rerum.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:03:00.474Z","updated_at":"2016-06-14T15:03:00.474Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":11,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":11,"state":"empty","merge_request_diff_commits":[],"merge_request_diff_files":[],"merge_request_id":11,"created_at":"2016-06-14T15:02:23.772Z","updated_at":"2016-06-14T15:02:23.833Z","base_commit_sha":"e56497bb5f03a90a51293fc6d516788730953899","real_size":null},"events":[{"id":227,"target_type":"MergeRequest","target_id":11,"project_id":36,"created_at":"2016-06-14T15:02:23.865Z","updated_at":"2016-06-14T15:02:23.865Z","action":1,"author_id":16},{"id":171,"target_type":"MergeRequest","target_id":11,"project_id":5,"created_at":"2016-06-14T15:02:23.865Z","updated_at":"2016-06-14T15:02:23.865Z","action":1,"author_id":16}]}
-{"id":10,"target_branch":"feature","source_branch":"test-5","source_project_id":5,"author_id":20,"assignee_id":25,"title":"Eligendi reprehenderit doloribus quia et sit id.","created_at":"2016-06-14T15:02:23.014Z","updated_at":"2016-06-14T15:03:00.685Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":2,"description":"Ut dolor quia aliquid dolore et nisi. Est minus suscipit enim quaerat sapiente consequatur rerum. Eveniet provident consequatur dolor accusantium reiciendis.","position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":817,"note":"Recusandae et voluptas enim qui et.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:03:00.510Z","updated_at":"2016-06-14T15:03:00.510Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":10,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":818,"note":"Asperiores dolorem rerum ipsum totam.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:03:00.538Z","updated_at":"2016-06-14T15:03:00.538Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":10,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":819,"note":"Qui quam et iure quasi provident cumque itaque sequi.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:03:00.562Z","updated_at":"2016-06-14T15:03:00.562Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":10,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":820,"note":"Sint accusantium aliquid iste qui iusto minus vel.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:03:00.585Z","updated_at":"2016-06-14T15:03:00.585Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":10,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":821,"note":"Dolor corrupti dolorem blanditiis voluptas.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:03:00.610Z","updated_at":"2016-06-14T15:03:00.610Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":10,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":822,"note":"Est perferendis assumenda aliquam aliquid sit ipsum ullam aut.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:03:00.635Z","updated_at":"2016-06-14T15:03:00.635Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":10,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":823,"note":"Hic neque reiciendis quaerat maiores.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:03:00.659Z","updated_at":"2016-06-14T15:03:00.659Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":10,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":824,"note":"Sequi architecto doloribus ut vel autem.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:03:00.683Z","updated_at":"2016-06-14T15:03:00.683Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":10,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":10,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":10,"relative_order":0,"sha":"f998ac87ac9244f15e9c15109a6f4e62a54b779d","message":"fixes #10\n","authored_date":"2016-01-19T14:43:23.000+01:00","author_name":"James Lopez","author_email":"james@jameslopez.es","committed_date":"2016-01-19T14:43:23.000+01:00","committer_name":"James Lopez","committer_email":"james@jameslopez.es"},{"merge_request_diff_id":10,"relative_order":1,"sha":"be93687618e4b132087f430a4d8fc3a609c9b77c","message":"Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6","authored_date":"2015-12-07T12:52:12.000+01:00","author_name":"Marin Jankovski","author_email":"marin@gitlab.com","committed_date":"2015-12-07T12:52:12.000+01:00","committer_name":"Marin Jankovski","committer_email":"marin@gitlab.com"},{"merge_request_diff_id":10,"relative_order":2,"sha":"048721d90c449b244b7b4c53a9186b04330174ec","message":"LFS object pointer.\n","authored_date":"2015-12-07T11:54:28.000+01:00","author_name":"Marin Jankovski","author_email":"maxlazio@gmail.com","committed_date":"2015-12-07T11:54:28.000+01:00","committer_name":"Marin Jankovski","committer_email":"maxlazio@gmail.com"},{"merge_request_diff_id":10,"relative_order":3,"sha":"5f923865dde3436854e9ceb9cdb7815618d4e849","message":"GitLab currently doesn't support patches that involve a merge commit: add a commit here\n","authored_date":"2015-11-13T16:27:12.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T16:27:12.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":10,"relative_order":4,"sha":"d2d430676773caa88cdaf7c55944073b2fd5561a","message":"Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5","authored_date":"2015-11-13T08:50:17.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T08:50:17.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":10,"relative_order":5,"sha":"2ea1f3dec713d940208fb5ce4a38765ecb5d3f73","message":"Add GitLab SVG\n","authored_date":"2015-11-13T08:39:43.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T08:39:43.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":10,"relative_order":6,"sha":"59e29889be61e6e0e5e223bfa9ac2721d31605b8","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4","authored_date":"2015-11-13T07:21:40.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T07:21:40.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":10,"relative_order":7,"sha":"66eceea0db202bb39c4e445e8ca28689645366c5","message":"add spaces in whitespace file\n","authored_date":"2015-11-13T06:01:27.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T06:01:27.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com"},{"merge_request_diff_id":10,"relative_order":8,"sha":"08f22f255f082689c0d7d39d19205085311542bc","message":"remove empty file.(beacase git ignore empty file)\nadd whitespace test file.\n","authored_date":"2015-11-13T06:00:16.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T06:00:16.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com"},{"merge_request_diff_id":10,"relative_order":9,"sha":"19e2e9b4ef76b422ce1154af39a91323ccc57434","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3","authored_date":"2015-11-13T05:23:14.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T05:23:14.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com"},{"merge_request_diff_id":10,"relative_order":10,"sha":"c642fe9b8b9f28f9225d7ea953fe14e74748d53b","message":"add whitespace in empty\n","authored_date":"2015-11-13T05:08:45.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T05:08:45.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com"},{"merge_request_diff_id":10,"relative_order":11,"sha":"9a944d90955aaf45f6d0c88f30e27f8d2c41cec0","message":"add empty file\n","authored_date":"2015-11-13T05:08:04.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T05:08:04.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com"},{"merge_request_diff_id":10,"relative_order":12,"sha":"c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd","message":"Add ISO-8859 test file\n","authored_date":"2015-08-25T17:53:12.000+02:00","author_name":"Stan Hu","author_email":"stanhu@packetzoom.com","committed_date":"2015-08-25T17:53:12.000+02:00","committer_name":"Stan Hu","committer_email":"stanhu@packetzoom.com"},{"merge_request_diff_id":10,"relative_order":13,"sha":"e56497bb5f03a90a51293fc6d516788730953899","message":"Merge branch 'tree_helper_spec' into 'master'\n\nAdd directory structure for tree_helper spec\n\nThis directory structure is needed for a testing the method flatten_tree(tree) in the TreeHelper module\n\nSee [merge request #275](https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/275#note_732774)\n\nSee merge request !2\n","authored_date":"2015-01-10T22:23:29.000+01:00","author_name":"Sytse Sijbrandij","author_email":"sytse@gitlab.com","committed_date":"2015-01-10T22:23:29.000+01:00","committer_name":"Sytse Sijbrandij","committer_email":"sytse@gitlab.com"},{"merge_request_diff_id":10,"relative_order":14,"sha":"4cd80ccab63c82b4bad16faa5193fbd2aa06df40","message":"add directory structure for tree_helper spec\n","authored_date":"2015-01-10T21:28:18.000+01:00","author_name":"marmis85","author_email":"marmis85@gmail.com","committed_date":"2015-01-10T21:28:18.000+01:00","committer_name":"marmis85","committer_email":"marmis85@gmail.com"},{"merge_request_diff_id":10,"relative_order":16,"sha":"5937ac0a7beb003549fc5fd26fc247adbce4a52e","message":"Add submodule from gitlab.com\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T10:01:38.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T10:01:38.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com"},{"merge_request_diff_id":10,"relative_order":17,"sha":"570e7b2abdd848b95f2f578043fc23bd6f6fd24d","message":"Change some files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:57:31.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:57:31.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com"},{"merge_request_diff_id":10,"relative_order":18,"sha":"6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9","message":"More submodules\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:54:21.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:54:21.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com"},{"merge_request_diff_id":10,"relative_order":19,"sha":"d14d6c0abdd253381df51a723d58691b2ee1ab08","message":"Remove ds_store files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:49:50.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:49:50.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com"},{"merge_request_diff_id":10,"relative_order":20,"sha":"c1acaa58bbcbc3eafe538cb8274ba387047b69f8","message":"Ignore DS files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:48:32.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:48:32.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com"}],"merge_request_diff_files":[{"merge_request_diff_id":10,"relative_order":0,"utf8_diff":"Binary files a/.DS_Store and /dev/null differ\n","new_path":".DS_Store","old_path":".DS_Store","a_mode":"100644","b_mode":"0","new_file":false,"renamed_file":false,"deleted_file":true,"too_large":false},{"merge_request_diff_id":10,"relative_order":1,"utf8_diff":"--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n","new_path":".gitignore","old_path":".gitignore","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":2,"utf8_diff":"--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n","new_path":".gitmodules","old_path":".gitmodules","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":3,"utf8_diff":"--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n","new_path":"CHANGELOG","old_path":"CHANGELOG","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":4,"utf8_diff":"--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n","new_path":"encoding/iso8859.txt","old_path":"encoding/iso8859.txt","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":5,"utf8_diff":"Binary files a/files/.DS_Store and /dev/null differ\n","new_path":"files/.DS_Store","old_path":"files/.DS_Store","a_mode":"100644","b_mode":"0","new_file":false,"renamed_file":false,"deleted_file":true,"too_large":false},{"merge_request_diff_id":10,"relative_order":6,"utf8_diff":"--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n+<svg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\">\n+ <!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch -->\n+ <title>wm</title>\n+ <desc>Created with Sketch.</desc>\n+ <defs>\n+ <path id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"></path>\n+ </defs>\n+ <g id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\">\n+ <path d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\">\n+ <g id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\">\n+ <g id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\">\n+ <path d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"></path>\n+ </g>\n+ <g id=\"g16\">\n+ <g id=\"g18-Clipped\">\n+ <mask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\">\n+ <use xlink:href=\"#path-1\"></use>\n+ </mask>\n+ <g id=\"path22\"></g>\n+ <g id=\"g18\" mask=\"url(#mask-2)\">\n+ <g transform=\"translate(382.736659, 312.879425)\">\n+ <g id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\">\n+ <path d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\">\n+ <path d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\">\n+ <path d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\">\n+ <path d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <path d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <path d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\">\n+ <path d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\">\n+ <path d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path54\"></g>\n+ </g>\n+ <g id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\">\n+ <path d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <g id=\"path62\"></g>\n+ </g>\n+ <g id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\">\n+ <path d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path70\"></g>\n+ </g>\n+ <g id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\">\n+ <path d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <path d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\">\n+ <path d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\">\n+ <path d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+</svg>\n\\ No newline at end of file\n","new_path":"files/images/wm.svg","old_path":"files/images/wm.svg","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":7,"utf8_diff":"--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n","new_path":"files/lfs/lfs_object.iso","old_path":"files/lfs/lfs_object.iso","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":8,"utf8_diff":"--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" => path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" => path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output << stdout.read\n @cmd_output << stderr.read\n","new_path":"files/ruby/popen.rb","old_path":"files/ruby/popen.rb","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":9,"utf8_diff":"--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n","new_path":"files/ruby/regex.rb","old_path":"files/ruby/regex.rb","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":10,"utf8_diff":"--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n","new_path":"files/whitespace","old_path":"files/whitespace","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":11,"utf8_diff":"--- /dev/null\n+++ b/foo/bar/.gitkeep\n","new_path":"foo/bar/.gitkeep","old_path":"foo/bar/.gitkeep","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":12,"utf8_diff":"--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n","new_path":"gitlab-grack","old_path":"gitlab-grack","a_mode":"0","b_mode":"160000","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":13,"utf8_diff":"--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n","new_path":"gitlab-shell","old_path":"gitlab-shell","a_mode":"0","b_mode":"160000","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":14,"utf8_diff":"--- /dev/null\n+++ b/test\n","new_path":"test","old_path":"test","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":10,"created_at":"2016-06-14T15:02:23.019Z","updated_at":"2016-06-14T15:02:23.493Z","base_commit_sha":"ae73cb07c9eeaf35924a10f713b364d32b2dd34f","real_size":"15"},"events":[{"id":228,"target_type":"MergeRequest","target_id":10,"project_id":36,"created_at":"2016-06-14T15:02:23.660Z","updated_at":"2016-06-14T15:02:23.660Z","action":1,"author_id":1},{"id":170,"target_type":"MergeRequest","target_id":10,"project_id":5,"created_at":"2016-06-14T15:02:23.660Z","updated_at":"2016-06-14T15:02:23.660Z","action":1,"author_id":20}]}
-{"id":9,"target_branch":"test-6","source_branch":"test-12","source_project_id":5,"author_id":16,"assignee_id":6,"title":"Et ipsam voluptas velit sequi illum ut.","created_at":"2016-06-14T15:02:22.825Z","updated_at":"2016-06-14T15:03:00.904Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":1,"description":"Eveniet nihil ratione veniam similique qui aut sapiente tempora. Sed praesentium iusto dignissimos possimus id repudiandae quo nihil. Qui doloremque autem et iure fugit.","position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":825,"note":"Aliquid voluptatem consequatur voluptas ex perspiciatis.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:03:00.722Z","updated_at":"2016-06-14T15:03:00.722Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":9,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":826,"note":"Itaque optio voluptatem praesentium voluptas.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:03:00.745Z","updated_at":"2016-06-14T15:03:00.745Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":9,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":827,"note":"Ut est corporis fuga asperiores delectus excepturi aperiam.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:03:00.771Z","updated_at":"2016-06-14T15:03:00.771Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":9,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":828,"note":"Similique ea dolore officiis temporibus.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:03:00.798Z","updated_at":"2016-06-14T15:03:00.798Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":9,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":829,"note":"Qui laudantium qui quae quis.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:03:00.828Z","updated_at":"2016-06-14T15:03:00.828Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":9,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":830,"note":"Et vel voluptas amet laborum qui soluta.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:03:00.850Z","updated_at":"2016-06-14T15:03:00.850Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":9,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":831,"note":"Enim ad consequuntur assumenda provident voluptatem similique deleniti.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:03:00.876Z","updated_at":"2016-06-14T15:03:00.876Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":9,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":832,"note":"Officiis sequi commodi pariatur totam fugiat voluptas corporis dignissimos.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:03:00.902Z","updated_at":"2016-06-14T15:03:00.902Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":9,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":9,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":9,"relative_order":0,"sha":"a4e5dfebf42e34596526acb8611bc7ed80e4eb3f","message":"fixes #10\n","authored_date":"2016-01-19T15:44:02.000+01:00","author_name":"James Lopez","author_email":"james@jameslopez.es","committed_date":"2016-01-19T15:44:02.000+01:00","committer_name":"James Lopez","committer_email":"james@jameslopez.es"}],"merge_request_diff_files":[{"merge_request_diff_id":9,"relative_order":0,"utf8_diff":"--- /dev/null\n+++ b/test\n","new_path":"test","old_path":"test","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":9,"created_at":"2016-06-14T15:02:22.829Z","updated_at":"2016-06-14T15:02:22.900Z","base_commit_sha":"be93687618e4b132087f430a4d8fc3a609c9b77c","real_size":"1"},"events":[{"id":229,"target_type":"MergeRequest","target_id":9,"project_id":36,"created_at":"2016-06-14T15:02:22.927Z","updated_at":"2016-06-14T15:02:22.927Z","action":1,"author_id":16},{"id":169,"target_type":"MergeRequest","target_id":9,"project_id":5,"created_at":"2016-06-14T15:02:22.927Z","updated_at":"2016-06-14T15:02:22.927Z","action":1,"author_id":16}]}
+{"id":10,"target_branch":"feature","source_branch":"test-5","source_project_id":5,"author_id":20,"assignee_id":25,"title":"Eligendi reprehenderit doloribus quia et sit id.","created_at":"2016-06-14T15:02:23.014Z","updated_at":"2016-06-14T15:03:00.685Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":2,"description":"Ut dolor quia aliquid dolore et nisi. Est minus suscipit enim quaerat sapiente consequatur rerum. Eveniet provident consequatur dolor accusantium reiciendis.","position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":817,"note":"Recusandae et voluptas enim qui et.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:03:00.510Z","updated_at":"2016-06-14T15:03:00.510Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":10,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":818,"note":"Asperiores dolorem rerum ipsum totam.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:03:00.538Z","updated_at":"2016-06-14T15:03:00.538Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":10,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":819,"note":"Qui quam et iure quasi provident cumque itaque sequi.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:03:00.562Z","updated_at":"2016-06-14T15:03:00.562Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":10,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":820,"note":"Sint accusantium aliquid iste qui iusto minus vel.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:03:00.585Z","updated_at":"2016-06-14T15:03:00.585Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":10,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":821,"note":"Dolor corrupti dolorem blanditiis voluptas.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:03:00.610Z","updated_at":"2016-06-14T15:03:00.610Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":10,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":822,"note":"Est perferendis assumenda aliquam aliquid sit ipsum ullam aut.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:03:00.635Z","updated_at":"2016-06-14T15:03:00.635Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":10,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":823,"note":"Hic neque reiciendis quaerat maiores.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:03:00.659Z","updated_at":"2016-06-14T15:03:00.659Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":10,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":824,"note":"Sequi architecto doloribus ut vel autem.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:03:00.683Z","updated_at":"2016-06-14T15:03:00.683Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":10,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":10,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":10,"relative_order":0,"sha":"f998ac87ac9244f15e9c15109a6f4e62a54b779d","message":"fixes #10\n","authored_date":"2016-01-19T14:43:23.000+01:00","author_name":"James Lopez","author_email":"james@jameslopez.es","committed_date":"2016-01-19T14:43:23.000+01:00","committer_name":"James Lopez","committer_email":"james@jameslopez.es","commit_author":{"name":"James Lopez","email":"james@jameslopez.es"},"committer":{"name":"James Lopez","email":"james@jameslopez.es"}},{"merge_request_diff_id":10,"relative_order":1,"sha":"be93687618e4b132087f430a4d8fc3a609c9b77c","message":"Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6","authored_date":"2015-12-07T12:52:12.000+01:00","author_name":"Marin Jankovski","author_email":"marin@gitlab.com","committed_date":"2015-12-07T12:52:12.000+01:00","committer_name":"Marin Jankovski","committer_email":"marin@gitlab.com","commit_author":{"name":"Marin Jankovski","email":"marin@gitlab.com"},"committer":{"name":"Marin Jankovski","email":"marin@gitlab.com"}},{"merge_request_diff_id":10,"relative_order":2,"sha":"048721d90c449b244b7b4c53a9186b04330174ec","message":"LFS object pointer.\n","authored_date":"2015-12-07T11:54:28.000+01:00","author_name":"Marin Jankovski","author_email":"maxlazio@gmail.com","committed_date":"2015-12-07T11:54:28.000+01:00","committer_name":"Marin Jankovski","committer_email":"maxlazio@gmail.com","commit_author":{"name":"Marin Jankovski","email":"maxlazio@gmail.com"},"committer":{"name":"Marin Jankovski","email":"maxlazio@gmail.com"}},{"merge_request_diff_id":10,"relative_order":3,"sha":"5f923865dde3436854e9ceb9cdb7815618d4e849","message":"GitLab currently doesn't support patches that involve a merge commit: add a commit here\n","authored_date":"2015-11-13T16:27:12.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T16:27:12.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":10,"relative_order":4,"sha":"d2d430676773caa88cdaf7c55944073b2fd5561a","message":"Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5","authored_date":"2015-11-13T08:50:17.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T08:50:17.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":10,"relative_order":5,"sha":"2ea1f3dec713d940208fb5ce4a38765ecb5d3f73","message":"Add GitLab SVG\n","authored_date":"2015-11-13T08:39:43.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T08:39:43.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":10,"relative_order":6,"sha":"59e29889be61e6e0e5e223bfa9ac2721d31605b8","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4","authored_date":"2015-11-13T07:21:40.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T07:21:40.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":10,"relative_order":7,"sha":"66eceea0db202bb39c4e445e8ca28689645366c5","message":"add spaces in whitespace file\n","authored_date":"2015-11-13T06:01:27.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T06:01:27.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":10,"relative_order":8,"sha":"08f22f255f082689c0d7d39d19205085311542bc","message":"remove empty file.(beacase git ignore empty file)\nadd whitespace test file.\n","authored_date":"2015-11-13T06:00:16.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T06:00:16.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":10,"relative_order":9,"sha":"19e2e9b4ef76b422ce1154af39a91323ccc57434","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3","authored_date":"2015-11-13T05:23:14.000+01:00","author_name":"Stan Hu","author_email":"stanhu@gmail.com","committed_date":"2015-11-13T05:23:14.000+01:00","committer_name":"Stan Hu","committer_email":"stanhu@gmail.com","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":10,"relative_order":10,"sha":"c642fe9b8b9f28f9225d7ea953fe14e74748d53b","message":"add whitespace in empty\n","authored_date":"2015-11-13T05:08:45.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T05:08:45.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":10,"relative_order":11,"sha":"9a944d90955aaf45f6d0c88f30e27f8d2c41cec0","message":"add empty file\n","authored_date":"2015-11-13T05:08:04.000+01:00","author_name":"윤민식","author_email":"minsik.yoon@samsung.com","committed_date":"2015-11-13T05:08:04.000+01:00","committer_name":"윤민식","committer_email":"minsik.yoon@samsung.com","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":10,"relative_order":12,"sha":"c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd","message":"Add ISO-8859 test file\n","authored_date":"2015-08-25T17:53:12.000+02:00","author_name":"Stan Hu","author_email":"stanhu@packetzoom.com","committed_date":"2015-08-25T17:53:12.000+02:00","committer_name":"Stan Hu","committer_email":"stanhu@packetzoom.com","commit_author":{"name":"Stan Hu","email":"stanhu@packetzoom.com"},"committer":{"name":"Stan Hu","email":"stanhu@packetzoom.com"}},{"merge_request_diff_id":10,"relative_order":13,"sha":"e56497bb5f03a90a51293fc6d516788730953899","message":"Merge branch 'tree_helper_spec' into 'master'\n\nAdd directory structure for tree_helper spec\n\nThis directory structure is needed for a testing the method flatten_tree(tree) in the TreeHelper module\n\nSee [merge request #275](https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/275#note_732774)\n\nSee merge request !2\n","authored_date":"2015-01-10T22:23:29.000+01:00","author_name":"Sytse Sijbrandij","author_email":"sytse@gitlab.com","committed_date":"2015-01-10T22:23:29.000+01:00","committer_name":"Sytse Sijbrandij","committer_email":"sytse@gitlab.com","commit_author":{"name":"Sytse Sijbrandij","email":"sytse@gitlab.com"},"committer":{"name":"Sytse Sijbrandij","email":"sytse@gitlab.com"}},{"merge_request_diff_id":10,"relative_order":14,"sha":"4cd80ccab63c82b4bad16faa5193fbd2aa06df40","message":"add directory structure for tree_helper spec\n","authored_date":"2015-01-10T21:28:18.000+01:00","author_name":"marmis85","author_email":"marmis85@gmail.com","committed_date":"2015-01-10T21:28:18.000+01:00","committer_name":"marmis85","committer_email":"marmis85@gmail.com","commit_author":{"name":"marmis85","email":"marmis85@gmail.com"},"committer":{"name":"marmis85","email":"marmis85@gmail.com"}},{"merge_request_diff_id":10,"relative_order":16,"sha":"5937ac0a7beb003549fc5fd26fc247adbce4a52e","message":"Add submodule from gitlab.com\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T10:01:38.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T10:01:38.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":10,"relative_order":17,"sha":"570e7b2abdd848b95f2f578043fc23bd6f6fd24d","message":"Change some files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:57:31.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:57:31.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":10,"relative_order":18,"sha":"6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9","message":"More submodules\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:54:21.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:54:21.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":10,"relative_order":19,"sha":"d14d6c0abdd253381df51a723d58691b2ee1ab08","message":"Remove ds_store files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:49:50.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:49:50.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":10,"relative_order":20,"sha":"c1acaa58bbcbc3eafe538cb8274ba387047b69f8","message":"Ignore DS files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:48:32.000+01:00","author_name":"Dmitriy Zaporozhets","author_email":"dmitriy.zaporozhets@gmail.com","committed_date":"2014-02-27T09:48:32.000+01:00","committer_name":"Dmitriy Zaporozhets","committer_email":"dmitriy.zaporozhets@gmail.com","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}}],"merge_request_diff_files":[{"merge_request_diff_id":10,"relative_order":0,"utf8_diff":"Binary files a/.DS_Store and /dev/null differ\n","new_path":".DS_Store","old_path":".DS_Store","a_mode":"100644","b_mode":"0","new_file":false,"renamed_file":false,"deleted_file":true,"too_large":false},{"merge_request_diff_id":10,"relative_order":1,"utf8_diff":"--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n","new_path":".gitignore","old_path":".gitignore","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":2,"utf8_diff":"--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n","new_path":".gitmodules","old_path":".gitmodules","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":3,"utf8_diff":"--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n","new_path":"CHANGELOG","old_path":"CHANGELOG","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":4,"utf8_diff":"--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n","new_path":"encoding/iso8859.txt","old_path":"encoding/iso8859.txt","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":5,"utf8_diff":"Binary files a/files/.DS_Store and /dev/null differ\n","new_path":"files/.DS_Store","old_path":"files/.DS_Store","a_mode":"100644","b_mode":"0","new_file":false,"renamed_file":false,"deleted_file":true,"too_large":false},{"merge_request_diff_id":10,"relative_order":6,"utf8_diff":"--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n+<svg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\">\n+ <!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch -->\n+ <title>wm</title>\n+ <desc>Created with Sketch.</desc>\n+ <defs>\n+ <path id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"></path>\n+ </defs>\n+ <g id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\">\n+ <path d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\">\n+ <g id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\">\n+ <g id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\">\n+ <path d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"></path>\n+ </g>\n+ <g id=\"g16\">\n+ <g id=\"g18-Clipped\">\n+ <mask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\">\n+ <use xlink:href=\"#path-1\"></use>\n+ </mask>\n+ <g id=\"path22\"></g>\n+ <g id=\"g18\" mask=\"url(#mask-2)\">\n+ <g transform=\"translate(382.736659, 312.879425)\">\n+ <g id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\">\n+ <path d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\">\n+ <path d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\">\n+ <path d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\">\n+ <path d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <path d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <path d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\">\n+ <path d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\">\n+ <path d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path54\"></g>\n+ </g>\n+ <g id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\">\n+ <path d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <g id=\"path62\"></g>\n+ </g>\n+ <g id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\">\n+ <path d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path70\"></g>\n+ </g>\n+ <g id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\">\n+ <path d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <path d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\">\n+ <path d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\">\n+ <path d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+</svg>\n\\ No newline at end of file\n","new_path":"files/images/wm.svg","old_path":"files/images/wm.svg","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":7,"utf8_diff":"--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n","new_path":"files/lfs/lfs_object.iso","old_path":"files/lfs/lfs_object.iso","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":8,"utf8_diff":"--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" => path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" => path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output << stdout.read\n @cmd_output << stderr.read\n","new_path":"files/ruby/popen.rb","old_path":"files/ruby/popen.rb","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":9,"utf8_diff":"--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n","new_path":"files/ruby/regex.rb","old_path":"files/ruby/regex.rb","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":10,"utf8_diff":"--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n","new_path":"files/whitespace","old_path":"files/whitespace","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":11,"utf8_diff":"--- /dev/null\n+++ b/foo/bar/.gitkeep\n","new_path":"foo/bar/.gitkeep","old_path":"foo/bar/.gitkeep","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":12,"utf8_diff":"--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n","new_path":"gitlab-grack","old_path":"gitlab-grack","a_mode":"0","b_mode":"160000","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":13,"utf8_diff":"--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n","new_path":"gitlab-shell","old_path":"gitlab-shell","a_mode":"0","b_mode":"160000","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":10,"relative_order":14,"utf8_diff":"--- /dev/null\n+++ b/test\n","new_path":"test","old_path":"test","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":10,"created_at":"2016-06-14T15:02:23.019Z","updated_at":"2016-06-14T15:02:23.493Z","base_commit_sha":"ae73cb07c9eeaf35924a10f713b364d32b2dd34f","real_size":"15"},"events":[{"id":228,"target_type":"MergeRequest","target_id":10,"project_id":36,"created_at":"2016-06-14T15:02:23.660Z","updated_at":"2016-06-14T15:02:23.660Z","action":1,"author_id":1},{"id":170,"target_type":"MergeRequest","target_id":10,"project_id":5,"created_at":"2016-06-14T15:02:23.660Z","updated_at":"2016-06-14T15:02:23.660Z","action":1,"author_id":20}]}
+{"id":9,"target_branch":"test-6","source_branch":"test-12","source_project_id":5,"author_id":16,"assignee_id":6,"title":"Et ipsam voluptas velit sequi illum ut.","created_at":"2016-06-14T15:02:22.825Z","updated_at":"2016-06-14T15:03:00.904Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":1,"description":"Eveniet nihil ratione veniam similique qui aut sapiente tempora. Sed praesentium iusto dignissimos possimus id repudiandae quo nihil. Qui doloremque autem et iure fugit.","position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":825,"note":"Aliquid voluptatem consequatur voluptas ex perspiciatis.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:03:00.722Z","updated_at":"2016-06-14T15:03:00.722Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":9,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":826,"note":"Itaque optio voluptatem praesentium voluptas.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:03:00.745Z","updated_at":"2016-06-14T15:03:00.745Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":9,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":827,"note":"Ut est corporis fuga asperiores delectus excepturi aperiam.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:03:00.771Z","updated_at":"2016-06-14T15:03:00.771Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":9,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":828,"note":"Similique ea dolore officiis temporibus.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:03:00.798Z","updated_at":"2016-06-14T15:03:00.798Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":9,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":829,"note":"Qui laudantium qui quae quis.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:03:00.828Z","updated_at":"2016-06-14T15:03:00.828Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":9,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":830,"note":"Et vel voluptas amet laborum qui soluta.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:03:00.850Z","updated_at":"2016-06-14T15:03:00.850Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":9,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":831,"note":"Enim ad consequuntur assumenda provident voluptatem similique deleniti.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:03:00.876Z","updated_at":"2016-06-14T15:03:00.876Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":9,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":832,"note":"Officiis sequi commodi pariatur totam fugiat voluptas corporis dignissimos.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:03:00.902Z","updated_at":"2016-06-14T15:03:00.902Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":9,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":9,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":9,"relative_order":0,"sha":"a4e5dfebf42e34596526acb8611bc7ed80e4eb3f","message":"fixes #10\n","authored_date":"2016-01-19T15:44:02.000+01:00","author_name":"James Lopez","author_email":"james@jameslopez.es","committed_date":"2016-01-19T15:44:02.000+01:00","committer_name":"James Lopez","committer_email":"james@jameslopez.es","commit_author":{"name":"James Lopez","email":"james@jameslopez.es"},"committer":{"name":"James Lopez","email":"james@jameslopez.es"}}],"merge_request_diff_files":[{"merge_request_diff_id":9,"relative_order":0,"utf8_diff":"--- /dev/null\n+++ b/test\n","new_path":"test","old_path":"test","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":9,"created_at":"2016-06-14T15:02:22.829Z","updated_at":"2016-06-14T15:02:22.900Z","base_commit_sha":"be93687618e4b132087f430a4d8fc3a609c9b77c","real_size":"1"},"events":[{"id":229,"target_type":"MergeRequest","target_id":9,"project_id":36,"created_at":"2016-06-14T15:02:22.927Z","updated_at":"2016-06-14T15:02:22.927Z","action":1,"author_id":16},{"id":169,"target_type":"MergeRequest","target_id":9,"project_id":5,"created_at":"2016-06-14T15:02:22.927Z","updated_at":"2016-06-14T15:02:22.927Z","action":1,"author_id":16}]}
diff --git a/spec/fixtures/packages/nuget/package.snupkg b/spec/fixtures/packages/nuget/package.snupkg
new file mode 100644
index 00000000000..9d97b36e792
--- /dev/null
+++ b/spec/fixtures/packages/nuget/package.snupkg
Binary files differ
diff --git a/spec/fixtures/packages/nuget/with_package_types.nuspec b/spec/fixtures/packages/nuget/with_package_types.nuspec
new file mode 100644
index 00000000000..b0e61e9d47e
--- /dev/null
+++ b/spec/fixtures/packages/nuget/with_package_types.nuspec
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="utf-8"?>
+<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd">
+ <metadata>
+ <id>Test.Package</id>
+ <version>3.5.2</version>
+ <authors>Test Author</authors>
+ <owners>Test Owner</owners>
+ <requireLicenseAcceptance>false</requireLicenseAcceptance>
+ <description>Package Description</description>
+ <packageTypes>
+ <packageType name="SymbolsPackage" />
+ </packageTypes>
+ </metadata>
+</package>
diff --git a/spec/frontend/__helpers__/dom_shims/inner_text.js b/spec/frontend/__helpers__/dom_shims/inner_text.js
index 2b8201eed31..a48f0fee689 100644
--- a/spec/frontend/__helpers__/dom_shims/inner_text.js
+++ b/spec/frontend/__helpers__/dom_shims/inner_text.js
@@ -5,7 +5,7 @@ Object.defineProperty(global.Element.prototype, 'innerText', {
return this.textContent;
},
set(value) {
- this.textContext = value;
+ this.textContent = value;
},
configurable: true, // make it so that it doesn't blow chunks on re-running tests with things like --watch
});
diff --git a/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap b/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap
index 1eb9ccc9c6c..10437c48f88 100644
--- a/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap
+++ b/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap
@@ -16,6 +16,7 @@ exports[`AddContextCommitsModal renders modal with 2 tabs 1`] = `
>
<gl-tabs-stub
contentclass="pt-0"
+ queryparamname="tab"
theme="indigo"
value="0"
>
diff --git a/spec/frontend/admin/users/components/actions/actions_spec.js b/spec/frontend/admin/users/components/actions/actions_spec.js
index 5db5b8a90a9..67d9bac8580 100644
--- a/spec/frontend/admin/users/components/actions/actions_spec.js
+++ b/spec/frontend/admin/users/components/actions/actions_spec.js
@@ -39,37 +39,12 @@ describe('Action components', () => {
await nextTick();
- const div = wrapper.find('div');
- expect(div.attributes('data-path')).toBe('/test');
- expect(div.attributes('data-modal-attributes')).toContain('John Doe');
+ expect(wrapper.attributes('data-path')).toBe('/test');
+ expect(wrapper.attributes('data-modal-attributes')).toContain('John Doe');
expect(findDropdownItem().exists()).toBe(true);
});
});
- describe('LINK_ACTIONS', () => {
- it.each`
- action | method
- ${'Approve'} | ${'put'}
- ${'Reject'} | ${'delete'}
- `(
- 'renders a dropdown item link with method "$method" for "$action"',
- async ({ action, method }) => {
- initComponent({
- component: Actions[action],
- props: {
- path: '/test',
- },
- });
-
- await nextTick();
-
- const item = wrapper.find(GlDropdownItem);
- expect(item.attributes('href')).toBe('/test');
- expect(item.attributes('data-method')).toContain(method);
- },
- );
- });
-
describe('DELETE_ACTION_COMPONENTS', () => {
const oncallSchedules = [{ name: 'schedule1' }, { name: 'schedule2' }];
it.each(DELETE_ACTIONS)('renders a dropdown item for "%s"', async (action) => {
diff --git a/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap b/spec/frontend/admin/users/components/modals/__snapshots__/delete_user_modal_spec.js.snap
index 4c644a0d05f..5e367891337 100644
--- a/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
+++ b/spec/frontend/admin/users/components/modals/__snapshots__/delete_user_modal_spec.js.snap
@@ -10,6 +10,7 @@ exports[`User Operation confirmation modal renders modal with form included 1`]
<oncall-schedules-list-stub
schedules="schedule1,schedule2"
+ username="username"
/>
<p>
diff --git a/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js b/spec/frontend/admin/users/components/modals/delete_user_modal_spec.js
index 93d9ee43179..fee74764645 100644
--- a/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js
+++ b/spec/frontend/admin/users/components/modals/delete_user_modal_spec.js
@@ -1,6 +1,6 @@
import { GlButton, GlFormInput } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import DeleteUserModal from '~/pages/admin/users/components/delete_user_modal.vue';
+import DeleteUserModal from '~/admin/users/components/modals/delete_user_modal.vue';
import OncallSchedulesList from '~/vue_shared/components/oncall_schedules_list.vue';
import ModalStub from './stubs/modal_stub';
diff --git a/spec/frontend/pages/admin/users/components/stubs/modal_stub.js b/spec/frontend/admin/users/components/modals/stubs/modal_stub.js
index 4dc55e909a0..4dc55e909a0 100644
--- a/spec/frontend/pages/admin/users/components/stubs/modal_stub.js
+++ b/spec/frontend/admin/users/components/modals/stubs/modal_stub.js
diff --git a/spec/frontend/pages/admin/users/components/user_modal_manager_spec.js b/spec/frontend/admin/users/components/modals/user_modal_manager_spec.js
index 3669bc40d7e..65ce242662b 100644
--- a/spec/frontend/pages/admin/users/components/user_modal_manager_spec.js
+++ b/spec/frontend/admin/users/components/modals/user_modal_manager_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import UserModalManager from '~/pages/admin/users/components/user_modal_manager.vue';
+import UserModalManager from '~/admin/users/components/modals/user_modal_manager.vue';
import ModalStub from './stubs/modal_stub';
describe('Users admin page Modal Manager', () => {
diff --git a/spec/frontend/admin/users/components/user_actions_spec.js b/spec/frontend/admin/users/components/user_actions_spec.js
index debe964e7aa..43313424553 100644
--- a/spec/frontend/admin/users/components/user_actions_spec.js
+++ b/spec/frontend/admin/users/components/user_actions_spec.js
@@ -1,4 +1,5 @@
import { GlDropdownDivider } from '@gitlab/ui';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import Actions from '~/admin/users/components/actions';
import AdminUserActions from '~/admin/users/components/user_actions.vue';
@@ -6,7 +7,7 @@ import { I18N_USER_ACTIONS } from '~/admin/users/constants';
import { generateUserPaths } from '~/admin/users/utils';
import { capitalizeFirstCharacter } from '~/lib/utils/text_utility';
-import { CONFIRMATION_ACTIONS, DELETE_ACTIONS, LINK_ACTIONS, LDAP, EDIT } from '../constants';
+import { CONFIRMATION_ACTIONS, DELETE_ACTIONS, LDAP, EDIT } from '../constants';
import { users, paths } from '../mock_data';
describe('AdminUserActions component', () => {
@@ -20,7 +21,7 @@ describe('AdminUserActions component', () => {
findUserActions(id).find('[data-testid="dropdown-toggle"]');
const findDropdownDivider = () => wrapper.findComponent(GlDropdownDivider);
- const initComponent = ({ actions = [] } = {}) => {
+ const initComponent = ({ actions = [], showButtonLabels } = {}) => {
wrapper = shallowMountExtended(AdminUserActions, {
propsData: {
user: {
@@ -28,6 +29,10 @@ describe('AdminUserActions component', () => {
actions,
},
paths,
+ showButtonLabels,
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
},
});
};
@@ -62,7 +67,7 @@ describe('AdminUserActions component', () => {
describe('actions dropdown', () => {
describe('when there are actions', () => {
- const actions = [EDIT, ...LINK_ACTIONS];
+ const actions = [EDIT, ...CONFIRMATION_ACTIONS];
beforeEach(() => {
initComponent({ actions });
@@ -72,19 +77,6 @@ describe('AdminUserActions component', () => {
expect(findActionsDropdown().exists()).toBe(true);
});
- describe('when there are actions that should render as links', () => {
- beforeEach(() => {
- initComponent({ actions: LINK_ACTIONS });
- });
-
- it.each(LINK_ACTIONS)('renders an action component item for "%s"', (action) => {
- const component = wrapper.find(Actions[capitalizeFirstCharacter(action)]);
-
- expect(component.props('path')).toBe(userPaths[action]);
- expect(component.text()).toBe(I18N_USER_ACTIONS[action]);
- });
- });
-
describe('when there are actions that require confirmation', () => {
beforeEach(() => {
initComponent({ actions: CONFIRMATION_ACTIONS });
@@ -157,4 +149,42 @@ describe('AdminUserActions component', () => {
});
});
});
+
+ describe('when `showButtonLabels` prop is `false`', () => {
+ beforeEach(() => {
+ initComponent({ actions: [EDIT, ...CONFIRMATION_ACTIONS] });
+ });
+
+ it('does not render "Edit" button label', () => {
+ const tooltip = getBinding(findEditButton().element, 'gl-tooltip');
+
+ expect(findEditButton().text()).toBe('');
+ expect(findEditButton().attributes('aria-label')).toBe(I18N_USER_ACTIONS.edit);
+ expect(tooltip).toBeDefined();
+ expect(tooltip.value).toBe(I18N_USER_ACTIONS.edit);
+ });
+
+ it('does not render "User administration" dropdown button label', () => {
+ expect(findActionsDropdown().props('text')).toBe(I18N_USER_ACTIONS.userAdministration);
+ expect(findActionsDropdown().props('textSrOnly')).toBe(true);
+ });
+ });
+
+ describe('when `showButtonLabels` prop is `true`', () => {
+ beforeEach(() => {
+ initComponent({ actions: [EDIT, ...CONFIRMATION_ACTIONS], showButtonLabels: true });
+ });
+
+ it('renders "Edit" button label', () => {
+ const tooltip = getBinding(findEditButton().element, 'gl-tooltip');
+
+ expect(findEditButton().text()).toBe(I18N_USER_ACTIONS.edit);
+ expect(tooltip).not.toBeDefined();
+ });
+
+ it('renders "User administration" dropdown button label', () => {
+ expect(findActionsDropdown().props('text')).toBe(I18N_USER_ACTIONS.userAdministration);
+ expect(findActionsDropdown().props('textSrOnly')).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/admin/users/constants.js b/spec/frontend/admin/users/constants.js
index 60abdc6c248..d341eb03b1b 100644
--- a/spec/frontend/admin/users/constants.js
+++ b/spec/frontend/admin/users/constants.js
@@ -7,13 +7,23 @@ const ACTIVATE = 'activate';
const DEACTIVATE = 'deactivate';
const REJECT = 'reject';
const APPROVE = 'approve';
+const BAN = 'ban';
+const UNBAN = 'unban';
export const EDIT = 'edit';
export const LDAP = 'ldapBlocked';
-export const LINK_ACTIONS = [APPROVE, REJECT];
-
-export const CONFIRMATION_ACTIONS = [ACTIVATE, BLOCK, DEACTIVATE, UNLOCK, UNBLOCK];
+export const CONFIRMATION_ACTIONS = [
+ ACTIVATE,
+ BLOCK,
+ DEACTIVATE,
+ UNLOCK,
+ UNBLOCK,
+ BAN,
+ UNBAN,
+ APPROVE,
+ REJECT,
+];
export const DELETE_ACTIONS = [DELETE, DELETE_WITH_CONTRIBUTIONS];
diff --git a/spec/frontend/admin/users/index_spec.js b/spec/frontend/admin/users/index_spec.js
index 20b60bd8640..06dbadd6d3d 100644
--- a/spec/frontend/admin/users/index_spec.js
+++ b/spec/frontend/admin/users/index_spec.js
@@ -1,7 +1,8 @@
import { createWrapper } from '@vue/test-utils';
-import { initAdminUsersApp } from '~/admin/users';
+import { initAdminUsersApp, initAdminUserActions } from '~/admin/users';
import AdminUsersApp from '~/admin/users/components/app.vue';
-import { users, paths } from './mock_data';
+import UserActions from '~/admin/users/components/user_actions.vue';
+import { users, user, paths } from './mock_data';
describe('initAdminUsersApp', () => {
let wrapper;
@@ -14,15 +15,12 @@ describe('initAdminUsersApp', () => {
el.setAttribute('data-users', JSON.stringify(users));
el.setAttribute('data-paths', JSON.stringify(paths));
- document.body.appendChild(el);
-
wrapper = createWrapper(initAdminUsersApp(el));
});
afterEach(() => {
wrapper.destroy();
wrapper = null;
- el.remove();
el = null;
});
@@ -33,3 +31,31 @@ describe('initAdminUsersApp', () => {
});
});
});
+
+describe('initAdminUserActions', () => {
+ let wrapper;
+ let el;
+
+ const findUserActions = () => wrapper.find(UserActions);
+
+ beforeEach(() => {
+ el = document.createElement('div');
+ el.setAttribute('data-user', JSON.stringify(user));
+ el.setAttribute('data-paths', JSON.stringify(paths));
+
+ wrapper = createWrapper(initAdminUserActions(el));
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ el = null;
+ });
+
+ it('parses and passes props', () => {
+ expect(findUserActions().props()).toMatchObject({
+ user,
+ paths,
+ });
+ });
+});
diff --git a/spec/frontend/admin/users/mock_data.js b/spec/frontend/admin/users/mock_data.js
index 4689ab36773..ded3e6f7edf 100644
--- a/spec/frontend/admin/users/mock_data.js
+++ b/spec/frontend/admin/users/mock_data.js
@@ -18,6 +18,8 @@ export const users = [
},
];
+export const user = users[0];
+
export const paths = {
edit: '/admin/users/id/edit',
approve: '/admin/users/id/approve',
@@ -30,6 +32,8 @@ export const paths = {
delete: '/admin/users/id',
deleteWithContributions: '/admin/users/id',
adminUser: '/admin/users/id',
+ ban: '/admin/users/id/ban',
+ unban: '/admin/users/id/unban',
};
export const createGroupCountResponse = (groupCounts) => ({
diff --git a/spec/frontend/analytics/devops_report/components/service_ping_disabled_spec.js b/spec/frontend/analytics/devops_report/components/service_ping_disabled_spec.js
new file mode 100644
index 00000000000..75ef9d9db94
--- /dev/null
+++ b/spec/frontend/analytics/devops_report/components/service_ping_disabled_spec.js
@@ -0,0 +1,61 @@
+import { GlEmptyState, GlSprintf } from '@gitlab/ui';
+import { TEST_HOST } from 'helpers/test_constants';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ServicePingDisabled from '~/analytics/devops_report/components/service_ping_disabled.vue';
+
+describe('~/analytics/devops_report/components/service_ping_disabled.vue', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const createWrapper = ({ isAdmin = false } = {}) => {
+ wrapper = shallowMountExtended(ServicePingDisabled, {
+ provide: {
+ isAdmin,
+ svgPath: TEST_HOST,
+ docsLink: TEST_HOST,
+ primaryButtonPath: TEST_HOST,
+ },
+ stubs: { GlEmptyState, GlSprintf },
+ });
+ };
+
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findMessageForRegularUsers = () => wrapper.findComponent(GlSprintf);
+ const findDocsLink = () => wrapper.findByTestId('docs-link');
+ const findPowerOnButton = () => wrapper.findByTestId('power-on-button');
+
+ it('renders empty state with provided SVG path', () => {
+ createWrapper();
+
+ expect(findEmptyState().props('svgPath')).toBe(TEST_HOST);
+ });
+
+ describe('for regular users', () => {
+ beforeEach(() => {
+ createWrapper({ isAdmin: false });
+ });
+
+ it('renders message without power-on button', () => {
+ expect(findMessageForRegularUsers().exists()).toBe(true);
+ expect(findPowerOnButton().exists()).toBe(false);
+ });
+
+ it('renders docs link', () => {
+ expect(findDocsLink().exists()).toBe(true);
+ expect(findDocsLink().attributes('href')).toBe(TEST_HOST);
+ });
+ });
+
+ describe('for admins', () => {
+ beforeEach(() => {
+ createWrapper({ isAdmin: true });
+ });
+
+ it('renders power-on button', () => {
+ expect(findPowerOnButton().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/analytics/shared/components/daterange_spec.js b/spec/frontend/analytics/shared/components/daterange_spec.js
new file mode 100644
index 00000000000..854582abb82
--- /dev/null
+++ b/spec/frontend/analytics/shared/components/daterange_spec.js
@@ -0,0 +1,120 @@
+import { GlDaterangePicker } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import { useFakeDate } from 'helpers/fake_date';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import Daterange from '~/analytics/shared/components/daterange.vue';
+
+const defaultProps = {
+ startDate: new Date(2019, 8, 1),
+ endDate: new Date(2019, 8, 11),
+};
+
+describe('Daterange component', () => {
+ useFakeDate(2019, 8, 25);
+
+ let wrapper;
+
+ const factory = (props = defaultProps) => {
+ wrapper = mount(Daterange, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ directives: { GlTooltip: createMockDirective() },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findDaterangePicker = () => wrapper.find(GlDaterangePicker);
+
+ const findDateRangeIndicator = () => wrapper.find('.daterange-indicator');
+
+ describe('template', () => {
+ describe('when show is false', () => {
+ it('does not render the daterange picker', () => {
+ factory({ show: false });
+ expect(findDaterangePicker().exists()).toBe(false);
+ });
+ });
+
+ describe('when show is true', () => {
+ it('renders the daterange picker', () => {
+ factory({ show: true });
+ expect(findDaterangePicker().exists()).toBe(true);
+ });
+ });
+
+ describe('with a minDate being set', () => {
+ it('emits the change event with the minDate when the user enters a start date before the minDate', () => {
+ const startDate = new Date('2019-09-01');
+ const endDate = new Date('2019-09-30');
+ const minDate = new Date('2019-06-01');
+
+ factory({ show: true, startDate, endDate, minDate });
+
+ const input = findDaterangePicker().find('input');
+
+ input.setValue('2019-01-01');
+ input.trigger('change');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().change).toEqual([[{ startDate: minDate, endDate }]]);
+ });
+ });
+ });
+
+ describe('with a maxDateRange being set', () => {
+ beforeEach(() => {
+ factory({ maxDateRange: 30 });
+ });
+
+ it('displays the max date range indicator', () => {
+ expect(findDateRangeIndicator().exists()).toBe(true);
+ });
+
+ it('displays the correct number of selected days in the indicator', () => {
+ expect(findDateRangeIndicator().find('span').text()).toBe('10 days selected');
+ });
+
+ it('displays a tooltip', () => {
+ const icon = wrapper.find('[data-testid="helper-icon"]');
+ const tooltip = getBinding(icon.element, 'gl-tooltip');
+
+ expect(tooltip).toBeDefined();
+ expect(icon.attributes('title')).toBe(
+ 'Showing data for workflow items created in this date range. Date range cannot exceed 30 days.',
+ );
+ });
+ });
+ });
+
+ describe('computed', () => {
+ describe('dateRange', () => {
+ beforeEach(() => {
+ factory({ show: true });
+ });
+
+ describe('set', () => {
+ it('emits the change event with an object containing startDate and endDate', () => {
+ const startDate = new Date('2019-10-01');
+ const endDate = new Date('2019-10-05');
+ wrapper.vm.dateRange = { startDate, endDate };
+
+ expect(wrapper.emitted().change).toEqual([[{ startDate, endDate }]]);
+ });
+ });
+
+ describe('get', () => {
+ it("returns value of dateRange from state's startDate and endDate", () => {
+ expect(wrapper.vm.dateRange).toEqual({
+ startDate: defaultProps.startDate,
+ endDate: defaultProps.endDate,
+ });
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/analytics/shared/components/metric_card_spec.js b/spec/frontend/analytics/shared/components/metric_card_spec.js
deleted file mode 100644
index 7f587d227ab..00000000000
--- a/spec/frontend/analytics/shared/components/metric_card_spec.js
+++ /dev/null
@@ -1,129 +0,0 @@
-import { GlDeprecatedSkeletonLoading as GlSkeletonLoading } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-import MetricCard from '~/analytics/shared/components/metric_card.vue';
-
-const metrics = [
- { key: 'first_metric', value: 10, label: 'First metric', unit: 'days', link: 'some_link' },
- { key: 'second_metric', value: 20, label: 'Yet another metric' },
- { key: 'third_metric', value: null, label: 'Null metric without value', unit: 'parsecs' },
- { key: 'fourth_metric', value: '-', label: 'Metric without value', unit: 'parsecs' },
-];
-
-const defaultProps = {
- title: 'My fancy title',
- isLoading: false,
- metrics,
-};
-
-describe('MetricCard', () => {
- let wrapper;
-
- const factory = (props = defaultProps) => {
- wrapper = mount(MetricCard, {
- propsData: {
- ...defaultProps,
- ...props,
- },
- directives: {
- GlTooltip: createMockDirective(),
- },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- const findTitle = () => wrapper.find({ ref: 'title' });
- const findLoadingIndicator = () => wrapper.find(GlSkeletonLoading);
- const findMetricsWrapper = () => wrapper.find({ ref: 'metricsWrapper' });
- const findMetricItem = () => wrapper.findAll({ ref: 'metricItem' });
- const findTooltip = () => wrapper.find('[data-testid="tooltip"]');
-
- describe('template', () => {
- it('renders the title', () => {
- factory();
-
- expect(findTitle().text()).toContain('My fancy title');
- });
-
- describe('when isLoading is true', () => {
- beforeEach(() => {
- factory({ isLoading: true });
- });
-
- it('displays a loading indicator', () => {
- expect(findLoadingIndicator().exists()).toBe(true);
- });
-
- it('does not display the metrics container', () => {
- expect(findMetricsWrapper().exists()).toBe(false);
- });
- });
-
- describe('when isLoading is false', () => {
- beforeEach(() => {
- factory({ isLoading: false });
- });
-
- it('does not display a loading indicator', () => {
- expect(findLoadingIndicator().exists()).toBe(false);
- });
-
- it('displays the metrics container', () => {
- expect(findMetricsWrapper().exists()).toBe(true);
- });
-
- it('renders two metrics', () => {
- expect(findMetricItem()).toHaveLength(metrics.length);
- });
-
- describe('with tooltip text', () => {
- const tooltipText = 'This is a tooltip';
- const tooltipMetric = {
- key: 'fifth_metric',
- value: '-',
- label: 'Metric with tooltip',
- unit: 'parsecs',
- tooltipText,
- };
-
- beforeEach(() => {
- factory({
- isLoading: false,
- metrics: [tooltipMetric],
- });
- });
-
- it('will render a tooltip', () => {
- const tt = getBinding(findTooltip().element, 'gl-tooltip');
- expect(tt.value.title).toEqual(tooltipText);
- });
- });
-
- describe.each`
- columnIndex | label | value | unit | link
- ${0} | ${'First metric'} | ${10} | ${' days'} | ${'some_link'}
- ${1} | ${'Yet another metric'} | ${20} | ${''} | ${null}
- ${2} | ${'Null metric without value'} | ${'-'} | ${''} | ${null}
- ${3} | ${'Metric without value'} | ${'-'} | ${''} | ${null}
- `('metric columns', ({ columnIndex, label, value, unit, link }) => {
- it(`renders ${value}${unit} ${label} with URL ${link}`, () => {
- const allMetricItems = findMetricItem();
- const metricItem = allMetricItems.at(columnIndex);
- const text = metricItem.text();
-
- expect(text).toContain(`${value}${unit}`);
- expect(text).toContain(label);
-
- if (link) {
- expect(metricItem.find('a').attributes('href')).toBe(link);
- } else {
- expect(metricItem.find('a').exists()).toBe(false);
- }
- });
- });
- });
- });
-});
diff --git a/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js b/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js
new file mode 100644
index 00000000000..2537b8fb816
--- /dev/null
+++ b/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js
@@ -0,0 +1,264 @@
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import { TEST_HOST } from 'helpers/test_constants';
+import ProjectsDropdownFilter from '~/analytics/shared/components/projects_dropdown_filter.vue';
+import getProjects from '~/analytics/shared/graphql/projects.query.graphql';
+
+const projects = [
+ {
+ id: 'gid://gitlab/Project/1',
+ name: 'Gitlab Test',
+ fullPath: 'gitlab-org/gitlab-test',
+ avatarUrl: `${TEST_HOST}/images/home/nasa.svg`,
+ },
+ {
+ id: 'gid://gitlab/Project/2',
+ name: 'Gitlab Shell',
+ fullPath: 'gitlab-org/gitlab-shell',
+ avatarUrl: null,
+ },
+ {
+ id: 'gid://gitlab/Project/3',
+ name: 'Foo',
+ fullPath: 'gitlab-org/foo',
+ avatarUrl: null,
+ },
+];
+
+const defaultMocks = {
+ $apollo: {
+ query: jest.fn().mockResolvedValue({
+ data: { group: { projects: { nodes: projects } } },
+ }),
+ },
+};
+
+let spyQuery;
+
+describe('ProjectsDropdownFilter component', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ spyQuery = defaultMocks.$apollo.query;
+ wrapper = mount(ProjectsDropdownFilter, {
+ mocks: { ...defaultMocks },
+ propsData: {
+ groupId: 1,
+ groupNamespace: 'gitlab-org',
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findDropdown = () => wrapper.find(GlDropdown);
+
+ const findDropdownItems = () =>
+ findDropdown()
+ .findAll(GlDropdownItem)
+ .filter((w) => w.text() !== 'No matching results');
+
+ const findDropdownAtIndex = (index) => findDropdownItems().at(index);
+
+ const findDropdownButton = () => findDropdown().find('.dropdown-toggle');
+ const findDropdownButtonAvatar = () => findDropdown().find('.gl-avatar');
+ const findDropdownButtonAvatarAtIndex = (index) =>
+ findDropdownAtIndex(index).find('img.gl-avatar');
+ const findDropdownButtonIdentIconAtIndex = (index) =>
+ findDropdownAtIndex(index).find('div.gl-avatar-identicon');
+
+ const findDropdownNameAtIndex = (index) =>
+ findDropdownAtIndex(index).find('[data-testid="project-name"');
+ const findDropdownFullPathAtIndex = (index) =>
+ findDropdownAtIndex(index).find('[data-testid="project-full-path"]');
+
+ const selectDropdownItemAtIndex = (index) =>
+ findDropdownAtIndex(index).find('button').trigger('click');
+
+ const selectedIds = () => wrapper.vm.selectedProjects.map(({ id }) => id);
+
+ describe('queryParams are applied when fetching data', () => {
+ beforeEach(() => {
+ createComponent({
+ queryParams: {
+ first: 50,
+ includeSubgroups: true,
+ },
+ });
+ });
+
+ it('applies the correct queryParams when making an api call', async () => {
+ wrapper.setData({ searchTerm: 'gitlab' });
+
+ expect(spyQuery).toHaveBeenCalledTimes(1);
+
+ await wrapper.vm.$nextTick(() => {
+ expect(spyQuery).toHaveBeenCalledWith({
+ query: getProjects,
+ variables: {
+ search: 'gitlab',
+ groupFullPath: wrapper.vm.groupNamespace,
+ first: 50,
+ includeSubgroups: true,
+ },
+ });
+ });
+ });
+ });
+
+ describe('when passed a an array of defaultProject as prop', () => {
+ beforeEach(() => {
+ createComponent({
+ defaultProjects: [projects[0]],
+ });
+ });
+
+ it("displays the defaultProject's name", () => {
+ expect(findDropdownButton().text()).toContain(projects[0].name);
+ });
+
+ it("renders the defaultProject's avatar", () => {
+ expect(findDropdownButtonAvatar().exists()).toBe(true);
+ });
+
+ it('marks the defaultProject as selected', () => {
+ expect(findDropdownAtIndex(0).props('isChecked')).toBe(true);
+ });
+ });
+
+ describe('when multiSelect is false', () => {
+ beforeEach(() => {
+ createComponent({ multiSelect: false });
+ });
+
+ describe('displays the correct information', () => {
+ it('contains 3 items', () => {
+ expect(findDropdownItems()).toHaveLength(3);
+ });
+
+ it('renders an avatar when the project has an avatarUrl', () => {
+ expect(findDropdownButtonAvatarAtIndex(0).exists()).toBe(true);
+ expect(findDropdownButtonIdentIconAtIndex(0).exists()).toBe(false);
+ });
+
+ it("renders an identicon when the project doesn't have an avatarUrl", () => {
+ expect(findDropdownButtonAvatarAtIndex(1).exists()).toBe(false);
+ expect(findDropdownButtonIdentIconAtIndex(1).exists()).toBe(true);
+ });
+
+ it('renders the project name', () => {
+ projects.forEach((project, index) => {
+ expect(findDropdownNameAtIndex(index).text()).toBe(project.name);
+ });
+ });
+
+ it('renders the project fullPath', () => {
+ projects.forEach((project, index) => {
+ expect(findDropdownFullPathAtIndex(index).text()).toBe(project.fullPath);
+ });
+ });
+ });
+
+ describe('on project click', () => {
+ it('should emit the "selected" event with the selected project', () => {
+ selectDropdownItemAtIndex(0);
+
+ expect(wrapper.emitted().selected).toEqual([[[projects[0]]]]);
+ });
+
+ it('should change selection when new project is clicked', () => {
+ selectDropdownItemAtIndex(1);
+
+ expect(wrapper.emitted().selected).toEqual([[[projects[1]]]]);
+ });
+
+ it('selection should be emptied when a project is deselected', () => {
+ selectDropdownItemAtIndex(0); // Select the item
+ selectDropdownItemAtIndex(0); // deselect it
+
+ expect(wrapper.emitted().selected).toEqual([[[projects[0]]], [[]]]);
+ });
+
+ it('renders an avatar in the dropdown button when the project has an avatarUrl', async () => {
+ selectDropdownItemAtIndex(0);
+
+ await wrapper.vm.$nextTick().then(() => {
+ expect(findDropdownButtonAvatarAtIndex(0).exists()).toBe(true);
+ expect(findDropdownButtonIdentIconAtIndex(0).exists()).toBe(false);
+ });
+ });
+
+ it("renders an identicon in the dropdown button when the project doesn't have an avatarUrl", async () => {
+ selectDropdownItemAtIndex(1);
+
+ await wrapper.vm.$nextTick().then(() => {
+ expect(findDropdownButtonAvatarAtIndex(1).exists()).toBe(false);
+ expect(findDropdownButtonIdentIconAtIndex(1).exists()).toBe(true);
+ });
+ });
+ });
+ });
+
+ describe('when multiSelect is true', () => {
+ beforeEach(() => {
+ createComponent({ multiSelect: true });
+ });
+
+ describe('displays the correct information', () => {
+ it('contains 3 items', () => {
+ expect(findDropdownItems()).toHaveLength(3);
+ });
+
+ it('renders an avatar when the project has an avatarUrl', () => {
+ expect(findDropdownButtonAvatarAtIndex(0).exists()).toBe(true);
+ expect(findDropdownButtonIdentIconAtIndex(0).exists()).toBe(false);
+ });
+
+ it("renders an identicon when the project doesn't have an avatarUrl", () => {
+ expect(findDropdownButtonAvatarAtIndex(1).exists()).toBe(false);
+ expect(findDropdownButtonIdentIconAtIndex(1).exists()).toBe(true);
+ });
+
+ it('renders the project name', () => {
+ projects.forEach((project, index) => {
+ expect(findDropdownNameAtIndex(index).text()).toBe(project.name);
+ });
+ });
+
+ it('renders the project fullPath', () => {
+ projects.forEach((project, index) => {
+ expect(findDropdownFullPathAtIndex(index).text()).toBe(project.fullPath);
+ });
+ });
+ });
+
+ describe('on project click', () => {
+ it('should add to selection when new project is clicked', () => {
+ selectDropdownItemAtIndex(0);
+ selectDropdownItemAtIndex(1);
+
+ expect(selectedIds()).toEqual([projects[0].id, projects[1].id]);
+ });
+
+ it('should remove from selection when clicked again', () => {
+ selectDropdownItemAtIndex(0);
+ expect(selectedIds()).toEqual([projects[0].id]);
+
+ selectDropdownItemAtIndex(0);
+ expect(selectedIds()).toEqual([]);
+ });
+
+ it('renders the correct placeholder text when multiple projects are selected', async () => {
+ selectDropdownItemAtIndex(0);
+ selectDropdownItemAtIndex(1);
+
+ await wrapper.vm.$nextTick().then(() => {
+ expect(findDropdownButton().text()).toBe('2 projects selected');
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/analytics/shared/utils_spec.js b/spec/frontend/analytics/shared/utils_spec.js
new file mode 100644
index 00000000000..e3293f2d8bd
--- /dev/null
+++ b/spec/frontend/analytics/shared/utils_spec.js
@@ -0,0 +1,24 @@
+import { filterBySearchTerm } from '~/analytics/shared/utils';
+
+describe('filterBySearchTerm', () => {
+ const data = [
+ { name: 'eins', title: 'one' },
+ { name: 'zwei', title: 'two' },
+ { name: 'drei', title: 'three' },
+ ];
+ const searchTerm = 'rei';
+
+ it('filters data by `name` for the provided search term', () => {
+ expect(filterBySearchTerm(data, searchTerm)).toEqual([data[2]]);
+ });
+
+ it('with no search term returns the data', () => {
+ ['', null].forEach((search) => {
+ expect(filterBySearchTerm(data, search)).toEqual(data);
+ });
+ });
+
+ it('with a key, filters by the provided key', () => {
+ expect(filterBySearchTerm(data, 'ne', 'title')).toEqual([data[0]]);
+ });
+});
diff --git a/spec/frontend/analytics/usage_trends/components/instance_counts_spec.js b/spec/frontend/analytics/usage_trends/components/usage_counts_spec.js
index 707d2cc310f..703767dab47 100644
--- a/spec/frontend/analytics/usage_trends/components/instance_counts_spec.js
+++ b/spec/frontend/analytics/usage_trends/components/usage_counts_spec.js
@@ -1,5 +1,6 @@
+import { GlDeprecatedSkeletonLoading as GlSkeletonLoading } from '@gitlab/ui';
+import { GlSingleStat } from '@gitlab/ui/dist/charts';
import { shallowMount } from '@vue/test-utils';
-import MetricCard from '~/analytics/shared/components/metric_card.vue';
import UsageCounts from '~/analytics/usage_trends/components/usage_counts.vue';
import { mockUsageCounts } from '../mock_data';
@@ -27,18 +28,18 @@ describe('UsageCounts', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
- const findMetricCard = () => wrapper.find(MetricCard);
+ const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoading);
+ const findAllSingleStats = () => wrapper.findAllComponents(GlSingleStat);
describe('while loading', () => {
beforeEach(() => {
createComponent({ loading: true });
});
- it('displays the metric card with isLoading=true', () => {
- expect(findMetricCard().props('isLoading')).toBe(true);
+ it('displays a loading indicator', () => {
+ expect(findSkeletonLoader().exists()).toBe(true);
});
});
@@ -47,8 +48,15 @@ describe('UsageCounts', () => {
createComponent({ data: { counts: mockUsageCounts } });
});
- it('passes the counts data to the metric card', () => {
- expect(findMetricCard().props('metrics')).toEqual(mockUsageCounts);
+ it.each`
+ index | value | title
+ ${0} | ${mockUsageCounts[0].value} | ${mockUsageCounts[0].label}
+ ${1} | ${mockUsageCounts[1].value} | ${mockUsageCounts[1].label}
+ `('renders a GlSingleStat for "$title"', ({ index, value, title }) => {
+ const singleStat = findAllSingleStats().at(index);
+
+ expect(singleStat.props('value')).toBe(`${value}`);
+ expect(singleStat.props('title')).toBe(title);
});
});
});
diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js
index f708d8c7728..c3e5a2973d7 100644
--- a/spec/frontend/api_spec.js
+++ b/spec/frontend/api_spec.js
@@ -1481,7 +1481,7 @@ describe('Api', () => {
'Content-Type': 'application/json',
};
- describe('when usage data increment counter is called with feature flag disabled', () => {
+ describe('when service data increment counter is called with feature flag disabled', () => {
beforeEach(() => {
gon.features = { ...gon.features, usageDataApi: false };
});
@@ -1495,7 +1495,7 @@ describe('Api', () => {
});
});
- describe('when usage data increment counter is called', () => {
+ describe('when service data increment counter is called', () => {
beforeEach(() => {
gon.features = { ...gon.features, usageDataApi: true };
});
@@ -1526,7 +1526,7 @@ describe('Api', () => {
window.gon.current_user_id = 1;
});
- describe('when usage data increment unique users is called with feature flag disabled', () => {
+ describe('when service data increment unique users is called with feature flag disabled', () => {
beforeEach(() => {
gon.features = { ...gon.features, usageDataApi: false };
});
@@ -1541,7 +1541,7 @@ describe('Api', () => {
});
});
- describe('when usage data increment unique users is called', () => {
+ describe('when service data increment unique users is called', () => {
beforeEach(() => {
gon.features = { ...gon.features, usageDataApi: true };
});
diff --git a/spec/frontend/batch_comments/components/draft_note_spec.js b/spec/frontend/batch_comments/components/draft_note_spec.js
index c2d488a465e..5d22823e974 100644
--- a/spec/frontend/batch_comments/components/draft_note_spec.js
+++ b/spec/frontend/batch_comments/components/draft_note_spec.js
@@ -1,5 +1,6 @@
import { getByRole } from '@testing-library/dom';
import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { stubComponent } from 'helpers/stub_component';
import DraftNote from '~/batch_comments/components/draft_note.vue';
import { createStore } from '~/batch_comments/stores';
import NoteableNote from '~/notes/components/noteable_note.vue';
@@ -8,6 +9,14 @@ import { createDraft } from '../mock_data';
const localVue = createLocalVue();
+const NoteableNoteStub = stubComponent(NoteableNote, {
+ template: `
+ <div>
+ <slot name="note-header-info">Test</slot>
+ </div>
+ `,
+});
+
describe('Batch comments draft note component', () => {
let store;
let wrapper;
@@ -26,6 +35,9 @@ describe('Batch comments draft note component', () => {
store,
propsData,
localVue,
+ stubs: {
+ NoteableNote: NoteableNoteStub,
+ },
});
jest.spyOn(wrapper.vm.$store, 'dispatch').mockImplementation();
diff --git a/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap b/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
index 53815820bbe..dfa6b99080b 100644
--- a/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
+++ b/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
@@ -10,7 +10,7 @@ exports[`Blob Header Filepath rendering matches the snapshot 1`] = `
cssclasses="mr-2"
filemode=""
filename="foo/bar/dummy.md"
- size="18"
+ size="16"
/>
<strong
diff --git a/spec/frontend/blob/components/blob_edit_content_spec.js b/spec/frontend/blob/components/blob_edit_content_spec.js
index 7de8d9236ed..9fc2356c018 100644
--- a/spec/frontend/blob/components/blob_edit_content_spec.js
+++ b/spec/frontend/blob/components/blob_edit_content_spec.js
@@ -3,7 +3,7 @@ import { nextTick } from 'vue';
import BlobEditContent from '~/blob/components/blob_edit_content.vue';
import * as utils from '~/blob/utils';
-jest.mock('~/editor/editor_lite');
+jest.mock('~/editor/source_editor');
describe('Blob Header Editing', () => {
let wrapper;
@@ -26,7 +26,7 @@ describe('Blob Header Editing', () => {
}
beforeEach(() => {
- jest.spyOn(utils, 'initEditorLite').mockImplementation(() => ({
+ jest.spyOn(utils, 'initSourceEditor').mockImplementation(() => ({
onDidChangeModelContent,
updateModelLanguage,
getValue,
@@ -68,9 +68,9 @@ describe('Blob Header Editing', () => {
expect(wrapper.find('#editor').exists()).toBe(true);
});
- it('initialises Editor Lite', () => {
+ it('initialises Source Editor', () => {
const el = wrapper.find({ ref: 'editor' }).element;
- expect(utils.initEditorLite).toHaveBeenCalledWith({
+ expect(utils.initSourceEditor).toHaveBeenCalledWith({
el,
blobPath: fileName,
blobGlobalId: fileGlobalId,
diff --git a/spec/frontend/blob/csv/csv_viewer_spec.js b/spec/frontend/blob/csv/csv_viewer_spec.js
new file mode 100644
index 00000000000..abb914b8f57
--- /dev/null
+++ b/spec/frontend/blob/csv/csv_viewer_spec.js
@@ -0,0 +1,75 @@
+import { GlAlert, GlLoadingIcon, GlTable } from '@gitlab/ui';
+import { getAllByRole } from '@testing-library/dom';
+import { shallowMount, mount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import CSVViewer from '~/blob/csv/csv_viewer.vue';
+
+const validCsv = 'one,two,three';
+const brokenCsv = '{\n "json": 1,\n "key": [1, 2, 3]\n}';
+
+describe('app/assets/javascripts/blob/csv/csv_viewer.vue', () => {
+ let wrapper;
+
+ const createComponent = ({ csv = validCsv, mountFunction = shallowMount } = {}) => {
+ wrapper = mountFunction(CSVViewer, {
+ propsData: {
+ csv,
+ },
+ });
+ };
+
+ const findCsvTable = () => wrapper.findComponent(GlTable);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should render loading spinner', () => {
+ createComponent();
+
+ expect(findLoadingIcon().props()).toMatchObject({
+ size: 'lg',
+ });
+ });
+
+ describe('when the CSV contains errors', () => {
+ it('should render alert', async () => {
+ createComponent({ csv: brokenCsv });
+ await nextTick;
+
+ expect(findAlert().props()).toMatchObject({
+ variant: 'danger',
+ });
+ });
+ });
+
+ describe('when the CSV contains no errors', () => {
+ it('should not render alert', async () => {
+ createComponent();
+ await nextTick;
+
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('renders the CSV table with the correct attributes', async () => {
+ createComponent();
+ await nextTick;
+
+ expect(findCsvTable().attributes()).toMatchObject({
+ 'empty-text': 'No CSV data to display.',
+ items: validCsv,
+ });
+ });
+
+ it('renders the CSV table with the correct content', async () => {
+ createComponent({ mountFunction: mount });
+ await nextTick;
+
+ expect(getAllByRole(wrapper.element, 'row', { name: /One/i })).toHaveLength(1);
+ expect(getAllByRole(wrapper.element, 'row', { name: /Two/i })).toHaveLength(1);
+ expect(getAllByRole(wrapper.element, 'row', { name: /Three/i })).toHaveLength(1);
+ });
+ });
+});
diff --git a/spec/frontend/blob/utils_spec.js b/spec/frontend/blob/utils_spec.js
index 3ff2e47e0b6..a543c0060cb 100644
--- a/spec/frontend/blob/utils_spec.js
+++ b/spec/frontend/blob/utils_spec.js
@@ -1,10 +1,10 @@
import * as utils from '~/blob/utils';
-import Editor from '~/editor/editor_lite';
+import Editor from '~/editor/source_editor';
-jest.mock('~/editor/editor_lite');
+jest.mock('~/editor/source_editor');
describe('Blob utilities', () => {
- describe('initEditorLite', () => {
+ describe('initSourceEditor', () => {
let editorEl;
const blobPath = 'foo.txt';
const blobContent = 'Foo bar';
@@ -15,8 +15,8 @@ describe('Blob utilities', () => {
});
describe('Monaco editor', () => {
- it('initializes the Editor Lite', () => {
- utils.initEditorLite({ el: editorEl });
+ it('initializes the Source Editor', () => {
+ utils.initSourceEditor({ el: editorEl });
expect(Editor).toHaveBeenCalledWith({
scrollbar: {
alwaysConsumeMouseWheel: false,
@@ -34,7 +34,7 @@ describe('Blob utilities', () => {
expect(Editor.prototype.createInstance).not.toHaveBeenCalled();
- utils.initEditorLite(params);
+ utils.initSourceEditor(params);
expect(Editor.prototype.createInstance).toHaveBeenCalledWith(params);
},
diff --git a/spec/frontend/blob/viewer/index_spec.js b/spec/frontend/blob/viewer/index_spec.js
index e4f145ae81b..6a24b76abc8 100644
--- a/spec/frontend/blob/viewer/index_spec.js
+++ b/spec/frontend/blob/viewer/index_spec.js
@@ -6,6 +6,10 @@ import { setTestTimeout } from 'helpers/timeout';
import BlobViewer from '~/blob/viewer/index';
import axios from '~/lib/utils/axios_utils';
+const execImmediately = (callback) => {
+ callback();
+};
+
describe('Blob viewer', () => {
let blob;
let mock;
@@ -17,6 +21,7 @@ describe('Blob viewer', () => {
setTestTimeout(2000);
beforeEach(() => {
+ jest.spyOn(window, 'requestIdleCallback').mockImplementation(execImmediately);
$.fn.extend(jQueryMock);
mock = new MockAdapter(axios);
diff --git a/spec/frontend/blob_edit/blob_bundle_spec.js b/spec/frontend/blob_edit/blob_bundle_spec.js
index eecc54be35b..8986dfbfa9c 100644
--- a/spec/frontend/blob_edit/blob_bundle_spec.js
+++ b/spec/frontend/blob_edit/blob_bundle_spec.js
@@ -3,21 +3,21 @@ import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import waitForPromises from 'helpers/wait_for_promises';
import blobBundle from '~/blob_edit/blob_bundle';
-import EditorLite from '~/blob_edit/edit_blob';
+import SourceEditor from '~/blob_edit/edit_blob';
jest.mock('~/blob_edit/edit_blob');
describe('BlobBundle', () => {
- it('does not load EditorLite by default', () => {
+ it('does not load SourceEditor by default', () => {
blobBundle();
- expect(EditorLite).not.toHaveBeenCalled();
+ expect(SourceEditor).not.toHaveBeenCalled();
});
- it('loads EditorLite for the edit screen', async () => {
+ it('loads SourceEditor for the edit screen', async () => {
setFixtures(`<div class="js-edit-blob-form"></div>`);
blobBundle();
await waitForPromises();
- expect(EditorLite).toHaveBeenCalled();
+ expect(SourceEditor).toHaveBeenCalled();
});
describe('No Suggest Popover', () => {
diff --git a/spec/frontend/blob_edit/edit_blob_spec.js b/spec/frontend/blob_edit/edit_blob_spec.js
index 3134feedcf3..2be72ded0a2 100644
--- a/spec/frontend/blob_edit/edit_blob_spec.js
+++ b/spec/frontend/blob_edit/edit_blob_spec.js
@@ -1,12 +1,12 @@
import waitForPromises from 'helpers/wait_for_promises';
import EditBlob from '~/blob_edit/edit_blob';
-import EditorLite from '~/editor/editor_lite';
-import { FileTemplateExtension } from '~/editor/extensions/editor_file_template_ext';
-import { EditorMarkdownExtension } from '~/editor/extensions/editor_markdown_ext';
+import { FileTemplateExtension } from '~/editor/extensions/source_editor_file_template_ext';
+import { EditorMarkdownExtension } from '~/editor/extensions/source_editor_markdown_ext';
+import SourceEditor from '~/editor/source_editor';
-jest.mock('~/editor/editor_lite');
-jest.mock('~/editor/extensions/editor_markdown_ext');
-jest.mock('~/editor/extensions/editor_file_template_ext');
+jest.mock('~/editor/source_editor');
+jest.mock('~/editor/extensions/source_editor_markdown_ext');
+jest.mock('~/editor/extensions/source_editor_file_template_ext');
describe('Blob Editing', () => {
const useMock = jest.fn();
@@ -24,7 +24,7 @@ describe('Blob Editing', () => {
<textarea id="file-content"></textarea>
</form>
`);
- jest.spyOn(EditorLite.prototype, 'createInstance').mockReturnValue(mockInstance);
+ jest.spyOn(SourceEditor.prototype, 'createInstance').mockReturnValue(mockInstance);
});
afterEach(() => {
EditorMarkdownExtension.mockClear();
diff --git a/spec/frontend/boards/board_card_inner_spec.js b/spec/frontend/boards/board_card_inner_spec.js
index 15ea5d4eec4..87f9a68f5dd 100644
--- a/spec/frontend/boards/board_card_inner_spec.js
+++ b/spec/frontend/boards/board_card_inner_spec.js
@@ -1,7 +1,7 @@
-import { GlLabel, GlLoadingIcon } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
+import { GlLabel, GlLoadingIcon, GlTooltip } from '@gitlab/ui';
import { range } from 'lodash';
import Vuex from 'vuex';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import BoardBlockedIcon from '~/boards/components/board_blocked_icon.vue';
import BoardCardInner from '~/boards/components/board_card_inner.vue';
import { issuableTypes } from '~/boards/constants';
@@ -35,8 +35,16 @@ describe('Board card component', () => {
let store;
const findBoardBlockedIcon = () => wrapper.find(BoardBlockedIcon);
-
- const createStore = () => {
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findEpicCountablesTotalTooltip = () => wrapper.findComponent(GlTooltip);
+ const findEpicCountables = () => wrapper.findByTestId('epic-countables');
+ const findEpicCountablesBadgeIssues = () => wrapper.findByTestId('epic-countables-counts-issues');
+ const findEpicCountablesBadgeWeight = () => wrapper.findByTestId('epic-countables-weight-issues');
+ const findEpicBadgeProgress = () => wrapper.findByTestId('epic-progress');
+ const findEpicCountablesTotalWeight = () => wrapper.findByTestId('epic-countables-total-weight');
+ const findEpicProgressTooltip = () => wrapper.findByTestId('epic-progress-tooltip-content');
+
+ const createStore = ({ isEpicBoard = false } = {}) => {
store = new Vuex.Store({
...defaultStore,
state: {
@@ -45,16 +53,14 @@ describe('Board card component', () => {
},
getters: {
isGroupBoard: () => true,
- isEpicBoard: () => false,
+ isEpicBoard: () => isEpicBoard,
isProjectBoard: () => false,
},
});
};
const createWrapper = (props = {}) => {
- createStore();
-
- wrapper = mount(BoardCardInner, {
+ wrapper = mountExtended(BoardCardInner, {
store,
propsData: {
list,
@@ -88,6 +94,7 @@ describe('Board card component', () => {
weight: 1,
};
+ createStore();
createWrapper({ item: issue, list });
});
@@ -414,7 +421,108 @@ describe('Board card component', () => {
},
});
- expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+ });
+
+ describe('is an epic board', () => {
+ const descendantCounts = {
+ closedEpics: 0,
+ closedIssues: 0,
+ openedEpics: 0,
+ openedIssues: 0,
+ };
+
+ const descendantWeightSum = {
+ closedIssues: 0,
+ openedIssues: 0,
+ };
+
+ beforeEach(() => {
+ createStore({ isEpicBoard: true });
+ });
+
+ it('should render if the item has issues', () => {
+ createWrapper({
+ item: {
+ ...issue,
+ descendantCounts,
+ descendantWeightSum,
+ hasIssues: true,
+ },
+ });
+
+ expect(findEpicCountables().exists()).toBe(true);
+ });
+
+ it('should not render if the item does not have issues', () => {
+ createWrapper({
+ item: {
+ ...issue,
+ descendantCounts,
+ descendantWeightSum,
+ hasIssues: false,
+ },
+ });
+
+ expect(findEpicCountablesBadgeIssues().exists()).toBe(false);
+ });
+
+ it('shows render item countBadge, weights, and progress correctly', () => {
+ createWrapper({
+ item: {
+ ...issue,
+ descendantCounts: {
+ ...descendantCounts,
+ openedIssues: 1,
+ },
+ descendantWeightSum: {
+ closedIssues: 10,
+ openedIssues: 5,
+ },
+ hasIssues: true,
+ },
+ });
+
+ expect(findEpicCountablesBadgeIssues().text()).toBe('1');
+ expect(findEpicCountablesBadgeWeight().text()).toBe('15');
+ expect(findEpicBadgeProgress().text()).toBe('67%');
+ });
+
+ it('does not render progress when weight is zero', () => {
+ createWrapper({
+ item: {
+ ...issue,
+ descendantCounts: {
+ ...descendantCounts,
+ openedIssues: 1,
+ },
+ descendantWeightSum,
+ hasIssues: true,
+ },
+ });
+
+ expect(findEpicBadgeProgress().exists()).toBe(false);
+ });
+
+ it('renders the tooltip with the correct data', () => {
+ createWrapper({
+ item: {
+ ...issue,
+ descendantCounts,
+ descendantWeightSum: {
+ closedIssues: 10,
+ openedIssues: 5,
+ },
+ hasIssues: true,
+ },
+ });
+
+ const tooltip = findEpicCountablesTotalTooltip();
+ expect(tooltip).toBeDefined();
+
+ expect(findEpicCountablesTotalWeight().text()).toBe('15');
+ expect(findEpicProgressTooltip().text()).toBe('10 of 15 weight completed');
});
});
});
diff --git a/spec/frontend/boards/board_list_helper.js b/spec/frontend/boards/board_list_helper.js
index 915b470df8d..c440c110094 100644
--- a/spec/frontend/boards/board_list_helper.js
+++ b/spec/frontend/boards/board_list_helper.js
@@ -1,34 +1,57 @@
-/* global List */
-/* global ListIssue */
-import MockAdapter from 'axios-mock-adapter';
-import Sortable from 'sortablejs';
-import Vue from 'vue';
-import BoardList from '~/boards/components/board_list_deprecated.vue';
-import '~/boards/models/issue';
-import '~/boards/models/list';
-import store from '~/boards/stores';
-import boardsStore from '~/boards/stores/boards_store';
-import axios from '~/lib/utils/axios_utils';
-import { listObj, boardsMockInterceptor } from './mock_data';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import Vuex from 'vuex';
-window.Sortable = Sortable;
+import BoardCard from '~/boards/components/board_card.vue';
+import BoardList from '~/boards/components/board_list.vue';
+import BoardNewIssue from '~/boards/components/board_new_issue.vue';
+import defaultState from '~/boards/stores/state';
+import { mockList, mockIssuesByListId, issues } from './mock_data';
export default function createComponent({
- done,
listIssueProps = {},
componentProps = {},
listProps = {},
-}) {
- const el = document.createElement('div');
+ actions = {},
+ getters = {},
+ provide = {},
+ state = defaultState,
+ stubs = {
+ BoardNewIssue,
+ BoardCard,
+ },
+} = {}) {
+ const localVue = createLocalVue();
+ localVue.use(Vuex);
- document.body.appendChild(el);
- const mock = new MockAdapter(axios);
- mock.onAny().reply(boardsMockInterceptor);
- boardsStore.create();
+ const store = new Vuex.Store({
+ state: {
+ boardItemsByListId: mockIssuesByListId,
+ boardItems: issues,
+ pageInfoByListId: {
+ 'gid://gitlab/List/1': { hasNextPage: true },
+ 'gid://gitlab/List/2': {},
+ },
+ listsFlags: {
+ 'gid://gitlab/List/1': {},
+ 'gid://gitlab/List/2': {},
+ },
+ selectedBoardItems: [],
+ ...state,
+ },
+ getters: {
+ isGroupBoard: () => false,
+ isProjectBoard: () => true,
+ isEpicBoard: () => false,
+ ...getters,
+ },
+ actions,
+ });
- const BoardListComp = Vue.extend(BoardList);
- const list = new List({ ...listObj, ...listProps });
- const issue = new ListIssue({
+ const list = {
+ ...mockList,
+ ...listProps,
+ };
+ const issue = {
title: 'Testing',
id: 1,
iid: 1,
@@ -36,31 +59,31 @@ export default function createComponent({
labels: [],
assignees: [],
...listIssueProps,
- });
- if (!Object.prototype.hasOwnProperty.call(listProps, 'issuesSize')) {
- list.issuesSize = 1;
+ };
+ if (!Object.prototype.hasOwnProperty.call(listProps, 'issuesCount')) {
+ list.issuesCount = 1;
}
- list.issues.push(issue);
- const component = new BoardListComp({
- el,
+ const component = shallowMount(BoardList, {
+ localVue,
store,
propsData: {
disabled: false,
list,
- issues: list.issues,
- loading: false,
+ boardItems: [issue],
+ canAdminList: true,
...componentProps,
},
provide: {
groupId: null,
rootPath: '/',
+ weightFeatureAvailable: false,
+ boardWeight: null,
+ canAdminList: true,
+ ...provide,
},
- }).$mount();
-
- Vue.nextTick(() => {
- done();
+ stubs,
});
- return { component, mock };
+ return component;
}
diff --git a/spec/frontend/boards/board_list_spec.js b/spec/frontend/boards/board_list_spec.js
index 76629c96f22..a3b1810ab80 100644
--- a/spec/frontend/boards/board_list_spec.js
+++ b/spec/frontend/boards/board_list_spec.js
@@ -1,95 +1,9 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
-import Vuex from 'vuex';
import { useFakeRequestAnimationFrame } from 'helpers/fake_request_animation_frame';
+import createComponent from 'jest/boards/board_list_helper';
import BoardCard from '~/boards/components/board_card.vue';
-import BoardList from '~/boards/components/board_list.vue';
-import BoardNewIssue from '~/boards/components/board_new_issue.vue';
import eventHub from '~/boards/eventhub';
-import defaultState from '~/boards/stores/state';
-import { mockList, mockIssuesByListId, issues, mockIssues } from './mock_data';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-const actions = {
- fetchItemsForList: jest.fn(),
-};
-
-const createStore = (state = defaultState) => {
- return new Vuex.Store({
- state,
- actions,
- getters: {
- isGroupBoard: () => false,
- isProjectBoard: () => true,
- isEpicBoard: () => false,
- },
- });
-};
-
-const createComponent = ({
- listIssueProps = {},
- componentProps = {},
- listProps = {},
- state = {},
-} = {}) => {
- const store = createStore({
- boardItemsByListId: mockIssuesByListId,
- boardItems: issues,
- pageInfoByListId: {
- 'gid://gitlab/List/1': { hasNextPage: true },
- 'gid://gitlab/List/2': {},
- },
- listsFlags: {
- 'gid://gitlab/List/1': {},
- 'gid://gitlab/List/2': {},
- },
- selectedBoardItems: [],
- ...state,
- });
- const list = {
- ...mockList,
- ...listProps,
- };
- const issue = {
- title: 'Testing',
- id: 1,
- iid: 1,
- confidential: false,
- labels: [],
- assignees: [],
- ...listIssueProps,
- };
- if (!Object.prototype.hasOwnProperty.call(listProps, 'issuesCount')) {
- list.issuesCount = 1;
- }
-
- const component = shallowMount(BoardList, {
- localVue,
- propsData: {
- disabled: false,
- list,
- boardItems: [issue],
- canAdminList: true,
- ...componentProps,
- },
- store,
- provide: {
- groupId: null,
- rootPath: '/',
- weightFeatureAvailable: false,
- boardWeight: null,
- canAdminList: true,
- },
- stubs: {
- BoardCard,
- BoardNewIssue,
- },
- });
-
- return component;
-};
+import { mockIssues } from './mock_data';
describe('Board list component', () => {
let wrapper;
@@ -101,7 +15,6 @@ describe('Board list component', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
describe('When Expanded', () => {
@@ -176,6 +89,10 @@ describe('Board list component', () => {
});
describe('load more issues', () => {
+ const actions = {
+ fetchItemsForList: jest.fn(),
+ };
+
beforeEach(() => {
wrapper = createComponent({
listProps: { issuesCount: 25 },
@@ -184,6 +101,7 @@ describe('Board list component', () => {
it('does not load issues if already loading', () => {
wrapper = createComponent({
+ actions,
state: { listsFlags: { 'gid://gitlab/List/1': { isLoadingMore: true } } },
});
wrapper.vm.listRef.dispatchEvent(new Event('scroll'));
diff --git a/spec/frontend/boards/boards_util_spec.js b/spec/frontend/boards/boards_util_spec.js
index 289905a1948..d45b6e35a45 100644
--- a/spec/frontend/boards/boards_util_spec.js
+++ b/spec/frontend/boards/boards_util_spec.js
@@ -1,4 +1,35 @@
-import { filterVariables } from '~/boards/boards_util';
+import { formatIssueInput, filterVariables } from '~/boards/boards_util';
+
+describe('formatIssueInput', () => {
+ it('correctly merges boardConfig into the issue', () => {
+ const boardConfig = {
+ labels: [
+ {
+ type: 'GroupLabel',
+ id: 44,
+ },
+ ],
+ assigneeId: '55',
+ milestoneId: 66,
+ weight: 1,
+ };
+
+ const issueInput = {
+ labelIds: ['gid://gitlab/GroupLabel/5'],
+ projectPath: 'gitlab-org/gitlab-test',
+ id: 'gid://gitlab/Issue/11',
+ };
+
+ const result = formatIssueInput(issueInput, boardConfig);
+ expect(result).toEqual({
+ projectPath: 'gitlab-org/gitlab-test',
+ id: 'gid://gitlab/Issue/11',
+ labelIds: ['gid://gitlab/GroupLabel/5', 'gid://gitlab/GroupLabel/44'],
+ assigneeIds: ['gid://gitlab/User/55'],
+ milestoneId: 'gid://gitlab/Milestone/66',
+ });
+ });
+});
describe('filterVariables', () => {
it.each([
diff --git a/spec/frontend/boards/components/board_column_spec.js b/spec/frontend/boards/components/board_column_spec.js
index 4e523d636cd..f1964daa8b2 100644
--- a/spec/frontend/boards/components/board_column_spec.js
+++ b/spec/frontend/boards/components/board_column_spec.js
@@ -15,6 +15,10 @@ describe('Board Column Component', () => {
wrapper = null;
});
+ const initStore = () => {
+ store = createStore();
+ };
+
const createComponent = ({ listType = ListType.backlog, collapsed = false } = {}) => {
const boardId = '1';
@@ -29,8 +33,6 @@ describe('Board Column Component', () => {
listMock.assignee = {};
}
- store = createStore();
-
wrapper = shallowMount(BoardColumn, {
store,
propsData: {
@@ -47,6 +49,10 @@ describe('Board Column Component', () => {
const isCollapsed = () => wrapper.classes('is-collapsed');
describe('Given different list types', () => {
+ beforeEach(() => {
+ initStore();
+ });
+
it('is expandable when List Type is `backlog`', () => {
createComponent({ listType: ListType.backlog });
@@ -79,4 +85,31 @@ describe('Board Column Component', () => {
expect(wrapper.element.scrollIntoView).toHaveBeenCalled();
});
});
+
+ describe('on mount', () => {
+ beforeEach(async () => {
+ initStore();
+ jest.spyOn(store, 'dispatch').mockImplementation();
+ });
+
+ describe('when list is collapsed', () => {
+ it('does not call fetchItemsForList when', async () => {
+ createComponent({ collapsed: true });
+
+ await nextTick();
+
+ expect(store.dispatch).toHaveBeenCalledTimes(0);
+ });
+ });
+
+ describe('when the list is not collapsed', () => {
+ it('calls fetchItemsForList when', async () => {
+ createComponent({ collapsed: false });
+
+ await nextTick();
+
+ expect(store.dispatch).toHaveBeenCalledWith('fetchItemsForList', { listId: 300 });
+ });
+ });
+ });
});
diff --git a/spec/frontend/boards/components/board_content_sidebar_spec.js b/spec/frontend/boards/components/board_content_sidebar_spec.js
index 10d739c65f5..8a8250205d0 100644
--- a/spec/frontend/boards/components/board_content_sidebar_spec.js
+++ b/spec/frontend/boards/components/board_content_sidebar_spec.js
@@ -1,5 +1,6 @@
import { GlDrawer } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { MountingPortal } from 'portal-vue';
import Vuex from 'vuex';
import SidebarDropdownWidget from 'ee_else_ce/sidebar/components/sidebar_dropdown_widget.vue';
import { stubComponent } from 'helpers/stub_component';
@@ -9,7 +10,8 @@ import BoardSidebarTitle from '~/boards/components/sidebar/board_sidebar_title.v
import { ISSUABLE } from '~/boards/constants';
import SidebarDateWidget from '~/sidebar/components/date/sidebar_date_widget.vue';
import SidebarSubscriptionsWidget from '~/sidebar/components/subscriptions/sidebar_subscriptions_widget.vue';
-import { mockIssue, mockIssueGroupPath, mockIssueProjectPath } from '../mock_data';
+import SidebarTodoWidget from '~/sidebar/components/todo_toggle/sidebar_todo_widget.vue';
+import { mockActiveIssue, mockIssue, mockIssueGroupPath, mockIssueProjectPath } from '../mock_data';
describe('BoardContentSidebar', () => {
let wrapper;
@@ -25,7 +27,7 @@ describe('BoardContentSidebar', () => {
},
getters: {
activeBoardItem: () => {
- return { ...mockIssue, epic: null };
+ return { ...mockActiveIssue, epic: null };
},
groupPathForActiveIssue: () => mockIssueGroupPath,
projectPathForActiveIssue: () => mockIssueProjectPath,
@@ -90,6 +92,14 @@ describe('BoardContentSidebar', () => {
expect(wrapper.findComponent(GlDrawer).exists()).toBe(true);
});
+ it('confirms we render MountingPortal', () => {
+ expect(wrapper.find(MountingPortal).props()).toMatchObject({
+ mountTo: '#js-right-sidebar-portal',
+ append: true,
+ name: 'board-content-sidebar',
+ });
+ });
+
it('does not render GlDrawer when isSidebarOpen is false', () => {
createStore({ mockGetters: { isSidebarOpen: () => false } });
createComponent();
@@ -101,6 +111,10 @@ describe('BoardContentSidebar', () => {
expect(wrapper.findComponent(GlDrawer).props('open')).toBe(true);
});
+ it('renders SidebarTodoWidget', () => {
+ expect(wrapper.findComponent(SidebarTodoWidget).exists()).toBe(true);
+ });
+
it('renders BoardSidebarLabelsSelect', () => {
expect(wrapper.findComponent(BoardSidebarLabelsSelect).exists()).toBe(true);
});
@@ -138,7 +152,7 @@ describe('BoardContentSidebar', () => {
expect(toggleBoardItem).toHaveBeenCalledTimes(1);
expect(toggleBoardItem).toHaveBeenCalledWith(expect.any(Object), {
- boardItem: { ...mockIssue, epic: null },
+ boardItem: { ...mockActiveIssue, epic: null },
sidebarType: ISSUABLE,
});
});
diff --git a/spec/frontend/boards/components/board_content_spec.js b/spec/frontend/boards/components/board_content_spec.js
index 8c1a7bd3947..5a799b6388e 100644
--- a/spec/frontend/boards/components/board_content_spec.js
+++ b/spec/frontend/boards/components/board_content_spec.js
@@ -1,5 +1,6 @@
import { GlAlert } from '@gitlab/ui';
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
import Draggable from 'vuedraggable';
import Vuex from 'vuex';
import EpicsSwimlanes from 'ee_component/boards/components/epics_swimlanes.vue';
@@ -8,8 +9,7 @@ import BoardColumnDeprecated from '~/boards/components/board_column_deprecated.v
import BoardContent from '~/boards/components/board_content.vue';
import { mockLists, mockListsWithModel } from '../mock_data';
-const localVue = createLocalVue();
-localVue.use(Vuex);
+Vue.use(Vuex);
const actions = {
moveList: jest.fn(),
@@ -44,7 +44,6 @@ describe('BoardContent', () => {
...state,
});
wrapper = shallowMount(BoardContent, {
- localVue,
propsData: {
lists: mockListsWithModel,
disabled: false,
diff --git a/spec/frontend/boards/components/board_form_spec.js b/spec/frontend/boards/components/board_form_spec.js
index 80d740458dc..3966c3e6b87 100644
--- a/spec/frontend/boards/components/board_form_spec.js
+++ b/spec/frontend/boards/components/board_form_spec.js
@@ -12,8 +12,8 @@ import { createStore } from '~/boards/stores';
import { visitUrl } from '~/lib/utils/url_utility';
jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
visitUrl: jest.fn().mockName('visitUrlMock'),
- stripFinalUrlSegment: jest.requireActual('~/lib/utils/url_utility').stripFinalUrlSegment,
}));
const currentBoard = {
diff --git a/spec/frontend/boards/components/board_settings_sidebar_spec.js b/spec/frontend/boards/components/board_settings_sidebar_spec.js
index 464331b6e30..20a08be6c19 100644
--- a/spec/frontend/boards/components/board_settings_sidebar_spec.js
+++ b/spec/frontend/boards/components/board_settings_sidebar_spec.js
@@ -3,6 +3,7 @@ import { GlDrawer, GlLabel } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
+import { MountingPortal } from 'portal-vue';
import Vuex from 'vuex';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import BoardSettingsSidebar from '~/boards/components/board_settings_sidebar.vue';
@@ -51,6 +52,16 @@ describe('BoardSettingsSidebar', () => {
wrapper.destroy();
});
+ it('finds a MountingPortal component', () => {
+ createComponent();
+
+ expect(wrapper.find(MountingPortal).props()).toMatchObject({
+ mountTo: '#js-right-sidebar-portal',
+ append: true,
+ name: 'board-settings-sidebar',
+ });
+ });
+
describe('when sidebarType is "list"', () => {
it('finds a GlDrawer component', () => {
createComponent();
diff --git a/spec/frontend/boards/components/issue_board_filtered_search_spec.js b/spec/frontend/boards/components/issue_board_filtered_search_spec.js
new file mode 100644
index 00000000000..0e3cf59901e
--- /dev/null
+++ b/spec/frontend/boards/components/issue_board_filtered_search_spec.js
@@ -0,0 +1,44 @@
+import { shallowMount } from '@vue/test-utils';
+import BoardFilteredSearch from '~/boards/components/board_filtered_search.vue';
+import IssueBoardFilteredSpec from '~/boards/components/issue_board_filtered_search.vue';
+import { BoardType } from '~/boards/constants';
+import issueBoardFilters from '~/boards/issue_board_filters';
+import { mockTokens } from '../mock_data';
+
+describe('IssueBoardFilter', () => {
+ let wrapper;
+
+ const createComponent = ({ initialFilterParams = {} } = {}) => {
+ wrapper = shallowMount(IssueBoardFilteredSpec, {
+ provide: { initialFilterParams },
+ props: { fullPath: '', boardType: '' },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('finds BoardFilteredSearch', () => {
+ expect(wrapper.find(BoardFilteredSearch).exists()).toBe(true);
+ });
+
+ it.each([[BoardType.group], [BoardType.project]])(
+ 'when boardType is %s we pass the correct tokens to BoardFilteredSearch',
+ (boardType) => {
+ const { fetchAuthors, fetchLabels } = issueBoardFilters({}, '', boardType);
+
+ const tokens = mockTokens(fetchLabels, fetchAuthors);
+
+ expect(wrapper.find(BoardFilteredSearch).props('tokens').toString()).toBe(
+ tokens.toString(),
+ );
+ },
+ );
+ });
+});
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index bcaca9522e4..6ac4db8cdaa 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -5,6 +5,9 @@ import Vue from 'vue';
import '~/boards/models/list';
import { ListType } from '~/boards/constants';
import boardsStore from '~/boards/stores/boards_store';
+import { __ } from '~/locale';
+import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
+import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue';
export const boardObj = {
id: 1,
@@ -179,6 +182,7 @@ export const mockIssue = {
export const mockActiveIssue = {
...mockIssue,
+ fullId: 'gid://gitlab/Issue/436',
id: 436,
iid: '27',
subscribed: false,
@@ -287,7 +291,7 @@ export const setMockEndpoints = (opts = {}) => {
export const mockList = {
id: 'gid://gitlab/List/1',
- title: 'Backlog',
+ title: 'Open',
position: -Infinity,
listType: 'backlog',
collapsed: false,
@@ -526,3 +530,44 @@ export const mockMoveData = {
originalIssue: { foo: 'bar' },
...mockMoveIssueParams,
};
+
+export const mockTokens = (fetchLabels, fetchAuthors) => [
+ {
+ icon: 'labels',
+ title: __('Label'),
+ type: 'label_name',
+ operators: [
+ { value: '=', description: 'is' },
+ { value: '!=', description: 'is not' },
+ ],
+ token: LabelToken,
+ unique: false,
+ symbol: '~',
+ fetchLabels,
+ },
+ {
+ icon: 'pencil',
+ title: __('Author'),
+ type: 'author_username',
+ operators: [
+ { value: '=', description: 'is' },
+ { value: '!=', description: 'is not' },
+ ],
+ symbol: '@',
+ token: AuthorToken,
+ unique: true,
+ fetchAuthors,
+ },
+ {
+ icon: 'user',
+ title: __('Assignee'),
+ type: 'assignee_username',
+ operators: [
+ { value: '=', description: 'is' },
+ { value: '!=', description: 'is not' },
+ ],
+ token: AuthorToken,
+ unique: true,
+ fetchAuthors,
+ },
+];
diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js
index b28412f2127..5e16e389ddc 100644
--- a/spec/frontend/boards/stores/actions_spec.js
+++ b/spec/frontend/boards/stores/actions_spec.js
@@ -492,6 +492,63 @@ describe('moveList', () => {
});
describe('updateList', () => {
+ const listId = 'gid://gitlab/List/1';
+ const createState = (boardItemsByListId = {}) => ({
+ fullPath: 'gitlab-org',
+ fullBoardId: 'gid://gitlab/Board/1',
+ boardType: 'group',
+ disabled: false,
+ boardLists: [{ type: 'closed' }],
+ issuableType: issuableTypes.issue,
+ boardItemsByListId,
+ });
+
+ describe('when state doesnt have list items', () => {
+ it('calls fetchItemsByList', async () => {
+ const dispatch = jest.fn();
+
+ jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
+ data: {
+ updateBoardList: {
+ errors: [],
+ list: {
+ id: listId,
+ },
+ },
+ },
+ });
+
+ await actions.updateList({ commit: () => {}, state: createState(), dispatch }, { listId });
+
+ expect(dispatch.mock.calls).toEqual([['fetchItemsForList', { listId }]]);
+ });
+ });
+
+ describe('when state has list items', () => {
+ it('doesnt call fetchItemsByList', async () => {
+ const commit = jest.fn();
+ const dispatch = jest.fn();
+
+ jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
+ data: {
+ updateBoardList: {
+ errors: [],
+ list: {
+ id: listId,
+ },
+ },
+ },
+ });
+
+ await actions.updateList(
+ { commit, state: createState({ [listId]: [] }), dispatch },
+ { listId },
+ );
+
+ expect(dispatch.mock.calls).toEqual([]);
+ });
+ });
+
it('should commit UPDATE_LIST_FAILURE mutation when API returns an error', (done) => {
jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
data: {
@@ -502,19 +559,10 @@ describe('updateList', () => {
},
});
- const state = {
- fullPath: 'gitlab-org',
- fullBoardId: 'gid://gitlab/Board/1',
- boardType: 'group',
- disabled: false,
- boardLists: [{ type: 'closed' }],
- issuableType: issuableTypes.issue,
- };
-
testAction(
actions.updateList,
{ listId: 'gid://gitlab/List/1', position: 1 },
- state,
+ createState(),
[{ type: types.UPDATE_LIST_FAILURE }],
[],
done,
@@ -667,6 +715,19 @@ describe('fetchItemsForList', () => {
[listId]: pageInfo,
};
+ describe('when list id is undefined', () => {
+ it('does not call the query', async () => {
+ jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
+
+ await actions.fetchItemsForList(
+ { state, getters: () => {}, commit: () => {} },
+ { listId: undefined },
+ );
+
+ expect(gqlClient.query).toHaveBeenCalledTimes(0);
+ });
+ });
+
it('should commit mutations REQUEST_ITEMS_FOR_LIST and RECEIVE_ITEMS_FOR_LIST_SUCCESS on success', (done) => {
jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
@@ -1111,16 +1172,13 @@ describe('updateIssueOrder', () => {
describe('setAssignees', () => {
const node = { username: 'name' };
- const projectPath = 'h/h';
- const refPath = `${projectPath}#3`;
- const iid = '1';
describe('when succeeds', () => {
it('calls the correct mutation with the correct values', (done) => {
testAction(
actions.setAssignees,
- [node],
- { activeBoardItem: { iid, referencePath: refPath }, commit: () => {} },
+ { assignees: [node], iid: '1' },
+ { commit: () => {} },
[
{
type: 'UPDATE_BOARD_ITEM_BY_ID',
diff --git a/spec/frontend/boards/stores/getters_spec.js b/spec/frontend/boards/stores/getters_spec.js
index e7efb21bee5..c0774dd3ae1 100644
--- a/spec/frontend/boards/stores/getters_spec.js
+++ b/spec/frontend/boards/stores/getters_spec.js
@@ -92,7 +92,7 @@ describe('Boards - Getters', () => {
it.each`
id | expected
${'1'} | ${'issue'}
- ${''} | ${{}}
+ ${''} | ${{ id: '', iid: '', fullId: '' }}
`('returns $expected when $id is passed to state', ({ id, expected }) => {
const state = { boardItems: { 1: 'issue' }, activeId: id };
diff --git a/spec/frontend/boards/stores/mutations_spec.js b/spec/frontend/boards/stores/mutations_spec.js
index 5b38f04e77b..37f0969a39a 100644
--- a/spec/frontend/boards/stores/mutations_spec.js
+++ b/spec/frontend/boards/stores/mutations_spec.js
@@ -35,6 +35,7 @@ describe('Board Store Mutations', () => {
describe('SET_INITIAL_BOARD_DATA', () => {
it('Should set initial Boards data to state', () => {
+ const allowSubEpics = true;
const boardId = 1;
const fullPath = 'gitlab-org';
const boardType = 'group';
@@ -45,6 +46,7 @@ describe('Board Store Mutations', () => {
const issuableType = issuableTypes.issue;
mutations[types.SET_INITIAL_BOARD_DATA](state, {
+ allowSubEpics,
boardId,
fullPath,
boardType,
@@ -53,6 +55,7 @@ describe('Board Store Mutations', () => {
issuableType,
});
+ expect(state.allowSubEpics).toBe(allowSubEpics);
expect(state.boardId).toEqual(boardId);
expect(state.fullPath).toEqual(fullPath);
expect(state.boardType).toEqual(boardType);
diff --git a/spec/frontend/branches/components/delete_branch_button_spec.js b/spec/frontend/branches/components/delete_branch_button_spec.js
index acbc83a9bdc..b029f34c3d7 100644
--- a/spec/frontend/branches/components/delete_branch_button_spec.js
+++ b/spec/frontend/branches/components/delete_branch_button_spec.js
@@ -34,7 +34,7 @@ describe('Delete branch button', () => {
expect(findDeleteButton().attributes()).toMatchObject({
title: 'Delete branch',
- variant: 'danger',
+ variant: 'default',
icon: 'remove',
});
});
@@ -44,7 +44,7 @@ describe('Delete branch button', () => {
expect(findDeleteButton().attributes()).toMatchObject({
title: 'Delete protected branch',
- variant: 'danger',
+ variant: 'default',
icon: 'remove',
});
});
@@ -78,7 +78,7 @@ describe('Delete branch button', () => {
expect(findDeleteButton().attributes()).toMatchObject({
title: 'Delete branch',
- variant: 'danger',
+ variant: 'default',
});
});
diff --git a/spec/frontend/captcha/captcha_modal_axios_interceptor_spec.js b/spec/frontend/captcha/captcha_modal_axios_interceptor_spec.js
index df81b78d010..553ca52f9ce 100644
--- a/spec/frontend/captcha/captcha_modal_axios_interceptor_spec.js
+++ b/spec/frontend/captcha/captcha_modal_axios_interceptor_spec.js
@@ -1,6 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import { registerCaptchaModalInterceptor } from '~/captcha/captcha_modal_axios_interceptor';
+import UnsolvedCaptchaError from '~/captcha/unsolved_captcha_error';
import { waitForCaptchaToBeSolved } from '~/captcha/wait_for_captcha_to_be_solved';
import axios from '~/lib/utils/axios_utils';
import httpStatusCodes from '~/lib/utils/http_status';
@@ -25,22 +26,24 @@ describe('registerCaptchaModalInterceptor', () => {
let mock;
beforeEach(() => {
+ waitForCaptchaToBeSolved.mockRejectedValue(new UnsolvedCaptchaError());
+
mock = new MockAdapter(axios);
- mock.onAny('/no-captcha').reply(200, AXIOS_RESPONSE);
- mock.onAny('/error').reply(404, AXIOS_RESPONSE);
- mock.onAny('/captcha').reply((config) => {
+ mock.onAny('/endpoint-without-captcha').reply(200, AXIOS_RESPONSE);
+ mock.onAny('/endpoint-with-unrelated-error').reply(404, AXIOS_RESPONSE);
+ mock.onAny('/endpoint-with-captcha').reply((config) => {
if (!supportedMethods.includes(config.method)) {
return [httpStatusCodes.METHOD_NOT_ALLOWED, { method: config.method }];
}
- try {
- const { captcha_response, spam_log_id, ...rest } = JSON.parse(config.data);
- // eslint-disable-next-line babel/camelcase
- if (captcha_response === CAPTCHA_RESPONSE && spam_log_id === SPAM_LOG_ID) {
- return [httpStatusCodes.OK, { ...rest, method: config.method, CAPTCHA_SUCCESS }];
- }
- } catch (e) {
- return [httpStatusCodes.BAD_REQUEST, { method: config.method }];
+ const data = JSON.parse(config.data);
+ const {
+ 'X-GitLab-Captcha-Response': captchaResponse,
+ 'X-GitLab-Spam-Log-Id': spamLogId,
+ } = config.headers;
+
+ if (captchaResponse === CAPTCHA_RESPONSE && spamLogId === SPAM_LOG_ID) {
+ return [httpStatusCodes.OK, { ...data, method: config.method, CAPTCHA_SUCCESS }];
}
return [httpStatusCodes.CONFLICT, NEEDS_CAPTCHA_RESPONSE];
@@ -56,7 +59,7 @@ describe('registerCaptchaModalInterceptor', () => {
describe.each([...supportedMethods, ...unsupportedMethods])('For HTTP method %s', (method) => {
it('successful requests are passed through', async () => {
- const { data, status } = await axios[method]('/no-captcha');
+ const { data, status } = await axios[method]('/endpoint-without-captcha');
expect(status).toEqual(httpStatusCodes.OK);
expect(data).toEqual(AXIOS_RESPONSE);
@@ -64,7 +67,7 @@ describe('registerCaptchaModalInterceptor', () => {
});
it('error requests without needs_captcha_response_errors are passed through', async () => {
- await expect(() => axios[method]('/error')).rejects.toThrow(
+ await expect(() => axios[method]('/endpoint-with-unrelated-error')).rejects.toThrow(
expect.objectContaining({
response: expect.objectContaining({
status: httpStatusCodes.NOT_FOUND,
@@ -79,21 +82,35 @@ describe('registerCaptchaModalInterceptor', () => {
describe.each(supportedMethods)('For HTTP method %s', (method) => {
describe('error requests with needs_captcha_response_errors', () => {
const submittedData = { ID: 12345 };
+ const submittedHeaders = { 'Submitted-Header': 67890 };
it('re-submits request if captcha was solved correctly', async () => {
- waitForCaptchaToBeSolved.mockResolvedValue(CAPTCHA_RESPONSE);
- const { data: returnedData } = await axios[method]('/captcha', submittedData);
+ waitForCaptchaToBeSolved.mockResolvedValueOnce(CAPTCHA_RESPONSE);
+ const axiosResponse = await axios[method]('/endpoint-with-captcha', submittedData, {
+ headers: submittedHeaders,
+ });
+ const {
+ data: returnedData,
+ config: { headers: returnedHeaders },
+ } = axiosResponse;
expect(waitForCaptchaToBeSolved).toHaveBeenCalledWith(CAPTCHA_SITE_KEY);
expect(returnedData).toEqual({ ...submittedData, CAPTCHA_SUCCESS, method });
+ expect(returnedHeaders).toEqual(
+ expect.objectContaining({
+ ...submittedHeaders,
+ 'X-GitLab-Captcha-Response': CAPTCHA_RESPONSE,
+ 'X-GitLab-Spam-Log-Id': SPAM_LOG_ID,
+ }),
+ );
expect(mock.history[method]).toHaveLength(2);
});
it('does not re-submit request if captcha was not solved', async () => {
- const error = new Error('Captcha not solved');
- waitForCaptchaToBeSolved.mockRejectedValue(error);
- await expect(() => axios[method]('/captcha', submittedData)).rejects.toThrow(error);
+ await expect(() => axios[method]('/endpoint-with-captcha', submittedData)).rejects.toThrow(
+ new UnsolvedCaptchaError(),
+ );
expect(waitForCaptchaToBeSolved).toHaveBeenCalledWith(CAPTCHA_SITE_KEY);
expect(mock.history[method]).toHaveLength(1);
@@ -103,7 +120,7 @@ describe('registerCaptchaModalInterceptor', () => {
describe.each(unsupportedMethods)('For HTTP method %s', (method) => {
it('ignores captcha response', async () => {
- await expect(() => axios[method]('/captcha')).rejects.toThrow(
+ await expect(() => axios[method]('/endpoint-with-captcha')).rejects.toThrow(
expect.objectContaining({
response: expect.objectContaining({
status: httpStatusCodes.METHOD_NOT_ALLOWED,
diff --git a/spec/frontend/ci_lint/components/ci_lint_spec.js b/spec/frontend/ci_lint/components/ci_lint_spec.js
index 8a065436da0..36d860b1ccd 100644
--- a/spec/frontend/ci_lint/components/ci_lint_spec.js
+++ b/spec/frontend/ci_lint/components/ci_lint_spec.js
@@ -4,7 +4,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import CiLint from '~/ci_lint/components/ci_lint.vue';
import CiLintResults from '~/pipeline_editor/components/lint/ci_lint_results.vue';
import lintCIMutation from '~/pipeline_editor/graphql/mutations/lint_ci.mutation.graphql';
-import EditorLite from '~/vue_shared/components/editor_lite.vue';
+import SourceEditor from '~/vue_shared/components/source_editor.vue';
import { mockLintDataValid } from '../mock_data';
describe('CI Lint', () => {
@@ -35,7 +35,7 @@ describe('CI Lint', () => {
});
};
- const findEditor = () => wrapper.find(EditorLite);
+ const findEditor = () => wrapper.find(SourceEditor);
const findAlert = () => wrapper.find(GlAlert);
const findCiLintResults = () => wrapper.find(CiLintResults);
const findValidateBtn = () => wrapper.find('[data-testid="ci-lint-validate"]');
diff --git a/spec/frontend/clusters/clusters_bundle_spec.js b/spec/frontend/clusters/clusters_bundle_spec.js
index cd0eda2ab49..42990334f0a 100644
--- a/spec/frontend/clusters/clusters_bundle_spec.js
+++ b/spec/frontend/clusters/clusters_bundle_spec.js
@@ -2,15 +2,12 @@ import MockAdapter from 'axios-mock-adapter';
import { loadHTMLFixture } from 'helpers/fixtures';
import { setTestTimeout } from 'helpers/timeout';
import Clusters from '~/clusters/clusters_bundle';
-import { APPLICATION_STATUS, APPLICATIONS, RUNNER } from '~/clusters/constants';
import axios from '~/lib/utils/axios_utils';
import initProjectSelectDropdown from '~/project_select';
jest.mock('~/lib/utils/poll');
jest.mock('~/project_select');
-const { INSTALLING, INSTALLABLE, INSTALLED, UNINSTALLING } = APPLICATION_STATUS;
-
describe('Clusters', () => {
setTestTimeout(1000);
@@ -57,67 +54,6 @@ describe('Clusters', () => {
});
});
- describe('checkForNewInstalls', () => {
- const INITIAL_APP_MAP = {
- helm: { status: null, title: 'Helm Tiller' },
- ingress: { status: null, title: 'Ingress' },
- runner: { status: null, title: 'GitLab Runner' },
- };
-
- it('does not show alert when things transition from initial null state to something', () => {
- cluster.checkForNewInstalls(INITIAL_APP_MAP, {
- ...INITIAL_APP_MAP,
- helm: { status: INSTALLABLE, title: 'Helm Tiller' },
- });
-
- const flashMessage = document.querySelector('.js-cluster-application-notice .flash-text');
-
- expect(flashMessage).toBeNull();
- });
-
- it('shows an alert when something gets newly installed', () => {
- cluster.checkForNewInstalls(
- {
- ...INITIAL_APP_MAP,
- helm: { status: INSTALLING, title: 'Helm Tiller' },
- },
- {
- ...INITIAL_APP_MAP,
- helm: { status: INSTALLED, title: 'Helm Tiller' },
- },
- );
-
- const flashMessage = document.querySelector('.js-cluster-application-notice .flash-text');
-
- expect(flashMessage).not.toBeNull();
- expect(flashMessage.textContent.trim()).toEqual(
- 'Helm Tiller was successfully installed on your Kubernetes cluster',
- );
- });
-
- it('shows an alert when multiple things gets newly installed', () => {
- cluster.checkForNewInstalls(
- {
- ...INITIAL_APP_MAP,
- helm: { status: INSTALLING, title: 'Helm Tiller' },
- ingress: { status: INSTALLABLE, title: 'Ingress' },
- },
- {
- ...INITIAL_APP_MAP,
- helm: { status: INSTALLED, title: 'Helm Tiller' },
- ingress: { status: INSTALLED, title: 'Ingress' },
- },
- );
-
- const flashMessage = document.querySelector('.js-cluster-application-notice .flash-text');
-
- expect(flashMessage).not.toBeNull();
- expect(flashMessage.textContent.trim()).toEqual(
- 'Helm Tiller, Ingress was successfully installed on your Kubernetes cluster',
- );
- });
- });
-
describe('updateContainer', () => {
const { location } = window;
@@ -237,77 +173,6 @@ describe('Clusters', () => {
});
});
- describe('installApplication', () => {
- it.each(APPLICATIONS)('tries to install %s', (applicationId, done) => {
- jest.spyOn(cluster.service, 'installApplication').mockResolvedValue();
-
- cluster.store.state.applications[applicationId].status = INSTALLABLE;
-
- const params = {};
- if (applicationId === 'knative') {
- params.hostname = 'test-example.com';
- }
-
- // eslint-disable-next-line promise/valid-params
- cluster
- .installApplication({ id: applicationId, params })
- .then(() => {
- expect(cluster.store.state.applications[applicationId].status).toEqual(INSTALLING);
- expect(cluster.store.state.applications[applicationId].requestReason).toEqual(null);
- expect(cluster.service.installApplication).toHaveBeenCalledWith(applicationId, params);
- done();
- })
- .catch();
- });
-
- it('sets error request status when the request fails', () => {
- jest
- .spyOn(cluster.service, 'installApplication')
- .mockRejectedValueOnce(new Error('STUBBED ERROR'));
-
- cluster.store.state.applications.helm.status = INSTALLABLE;
-
- const promise = cluster.installApplication({ id: 'helm' });
-
- return promise.then(() => {
- expect(cluster.store.state.applications.helm.status).toEqual(INSTALLABLE);
- expect(cluster.store.state.applications.helm.installFailed).toBe(true);
-
- expect(cluster.store.state.applications.helm.requestReason).toBeDefined();
- });
- });
- });
-
- describe('uninstallApplication', () => {
- it.each(APPLICATIONS)('tries to uninstall %s', (applicationId) => {
- jest.spyOn(cluster.service, 'uninstallApplication').mockResolvedValueOnce();
-
- cluster.store.state.applications[applicationId].status = INSTALLED;
-
- cluster.uninstallApplication({ id: applicationId });
-
- expect(cluster.store.state.applications[applicationId].status).toEqual(UNINSTALLING);
- expect(cluster.store.state.applications[applicationId].requestReason).toEqual(null);
- expect(cluster.service.uninstallApplication).toHaveBeenCalledWith(applicationId);
- });
-
- it('sets error request status when the uninstall request fails', () => {
- jest
- .spyOn(cluster.service, 'uninstallApplication')
- .mockRejectedValueOnce(new Error('STUBBED ERROR'));
-
- cluster.store.state.applications.helm.status = INSTALLED;
-
- const promise = cluster.uninstallApplication({ id: 'helm' });
-
- return promise.then(() => {
- expect(cluster.store.state.applications.helm.status).toEqual(INSTALLED);
- expect(cluster.store.state.applications.helm.uninstallFailed).toBe(true);
- expect(cluster.store.state.applications.helm.requestReason).toBeDefined();
- });
- });
- });
-
describe('fetch cluster environments success', () => {
beforeEach(() => {
jest.spyOn(cluster.store, 'toggleFetchEnvironments').mockReturnThis();
@@ -328,7 +193,6 @@ describe('Clusters', () => {
describe('handleClusterStatusSuccess', () => {
beforeEach(() => {
jest.spyOn(cluster.store, 'updateStateFromServer').mockReturnThis();
- jest.spyOn(cluster, 'checkForNewInstalls').mockReturnThis();
jest.spyOn(cluster, 'updateContainer').mockReturnThis();
cluster.handleClusterStatusSuccess({ data: {} });
});
@@ -337,38 +201,8 @@ describe('Clusters', () => {
expect(cluster.store.updateStateFromServer).toHaveBeenCalled();
});
- it('checks for new installable apps', () => {
- expect(cluster.checkForNewInstalls).toHaveBeenCalled();
- });
-
it('updates message containers', () => {
expect(cluster.updateContainer).toHaveBeenCalled();
});
});
-
- describe('updateApplication', () => {
- const params = { version: '1.0.0' };
- let storeUpdateApplication;
- let installApplication;
-
- beforeEach(() => {
- storeUpdateApplication = jest.spyOn(cluster.store, 'updateApplication');
- installApplication = jest.spyOn(cluster.service, 'installApplication');
-
- cluster.updateApplication({ id: RUNNER, params });
- });
-
- afterEach(() => {
- storeUpdateApplication.mockRestore();
- installApplication.mockRestore();
- });
-
- it('calls store updateApplication method', () => {
- expect(storeUpdateApplication).toHaveBeenCalledWith(RUNNER);
- });
-
- it('sends installApplication request', () => {
- expect(installApplication).toHaveBeenCalledWith(RUNNER, params);
- });
- });
});
diff --git a/spec/frontend/clusters/components/__snapshots__/applications_spec.js.snap b/spec/frontend/clusters/components/__snapshots__/applications_spec.js.snap
deleted file mode 100644
index c2ace1b4e30..00000000000
--- a/spec/frontend/clusters/components/__snapshots__/applications_spec.js.snap
+++ /dev/null
@@ -1,105 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`Applications Cert-Manager application shows the correct description 1`] = `
-<p
- data-testid="certManagerDescription"
->
- Cert-Manager is a native Kubernetes certificate management controller that helps with issuing certificates. Installing Cert-Manager on your cluster will issue a certificate by
- <a
- class="gl-link"
- href="https://letsencrypt.org/"
- rel="noopener noreferrer"
- target="_blank"
- >
- Let's Encrypt
- </a>
- and ensure that certificates are valid and up-to-date.
-</p>
-`;
-
-exports[`Applications Cilium application shows the correct description 1`] = `
-<p
- data-testid="ciliumDescription"
->
- Protect your clusters with GitLab Container Network Policies by enforcing how pods communicate with each other and other network endpoints.
- <a
- class="gl-link"
- href="cilium-help-path"
- rel="noopener"
- target="_blank"
- >
- Learn more about configuring Network Policies here.
- </a>
-</p>
-`;
-
-exports[`Applications Crossplane application shows the correct description 1`] = `
-<p
- data-testid="crossplaneDescription"
->
- Crossplane enables declarative provisioning of managed services from your cloud of choice using
- <code>
- kubectl
- </code>
- or
- <a
- class="gl-link"
- href="https://docs.gitlab.com/ee/user/clusters/applications.html#crossplane"
- rel="noopener noreferrer"
- target="_blank"
- >
- GitLab Integration
- </a>
- . Crossplane runs inside your Kubernetes cluster and supports secure connectivity and secrets management between app containers and the cloud services they depend on.
-</p>
-`;
-
-exports[`Applications Ingress application shows the correct warning message 1`] = `
-<span
- data-testid="ingressCostWarning"
->
- Installing Ingress may incur additional costs. Learn more about
- <a
- class="gl-link"
- href="https://cloud.google.com/compute/pricing#lb"
- rel="noopener noreferrer"
- target="_blank"
- >
- pricing
- </a>
- .
-</span>
-`;
-
-exports[`Applications Knative application shows the correct description 1`] = `
-<span
- data-testid="installed-via"
->
- installed via
- <a
- class="gl-link"
- href=""
- rel="noopener"
- target="_blank"
- >
- Cloud Run
- </a>
-</span>
-`;
-
-exports[`Applications Prometheus application shows the correct description 1`] = `
-<span
- data-testid="prometheusDescription"
->
- Prometheus is an open-source monitoring system with
- <a
- class="gl-link"
- href="https://docs.gitlab.com/ee/user/project/integrations/prometheus.html"
- rel="noopener noreferrer"
- target="_blank"
- >
- GitLab Integration
- </a>
- to monitor deployed applications.
-</span>
-`;
diff --git a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
index e5e336eb3d5..0e1fe790771 100644
--- a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
+++ b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
@@ -156,7 +156,6 @@ exports[`Remove cluster confirmation modal renders splitbutton with modal includ
<!---->
</div>
-
</ul>
</div>
diff --git a/spec/frontend/clusters/components/application_row_spec.js b/spec/frontend/clusters/components/application_row_spec.js
deleted file mode 100644
index 6bad1db542b..00000000000
--- a/spec/frontend/clusters/components/application_row_spec.js
+++ /dev/null
@@ -1,505 +0,0 @@
-import { GlSprintf } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import ApplicationRow from '~/clusters/components/application_row.vue';
-import UninstallApplicationConfirmationModal from '~/clusters/components/uninstall_application_confirmation_modal.vue';
-import UpdateApplicationConfirmationModal from '~/clusters/components/update_application_confirmation_modal.vue';
-import { APPLICATION_STATUS, ELASTIC_STACK } from '~/clusters/constants';
-import eventHub from '~/clusters/event_hub';
-
-import { DEFAULT_APPLICATION_STATE } from '../services/mock_data';
-
-describe('Application Row', () => {
- let wrapper;
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- const mountComponent = (data) => {
- wrapper = shallowMount(ApplicationRow, {
- stubs: { GlSprintf },
- propsData: {
- ...DEFAULT_APPLICATION_STATE,
- ...data,
- },
- });
- };
-
- describe('Title', () => {
- it('shows title', () => {
- mountComponent({ titleLink: null });
-
- const title = wrapper.find('.js-cluster-application-title');
-
- expect(title.element).toBeInstanceOf(HTMLSpanElement);
- expect(title.text()).toEqual(DEFAULT_APPLICATION_STATE.title);
- });
-
- it('shows title link', () => {
- expect(DEFAULT_APPLICATION_STATE.titleLink).toBeDefined();
- mountComponent();
- const title = wrapper.find('.js-cluster-application-title');
-
- expect(title.element).toBeInstanceOf(HTMLAnchorElement);
- expect(title.text()).toEqual(DEFAULT_APPLICATION_STATE.title);
- });
- });
-
- describe('Install button', () => {
- const button = () => wrapper.find('.js-cluster-application-install-button');
- const checkButtonState = (label, loading, disabled) => {
- expect(button().text()).toEqual(label);
- expect(button().props('loading')).toEqual(loading);
- expect(button().props('disabled')).toEqual(disabled);
- };
-
- it('has indeterminate state on page load', () => {
- mountComponent({ status: null });
-
- expect(button().text()).toBe('');
- });
-
- it('has install button', () => {
- mountComponent();
-
- expect(button().exists()).toBe(true);
- });
-
- it('has disabled "Install" when APPLICATION_STATUS.NOT_INSTALLABLE', () => {
- mountComponent({ status: APPLICATION_STATUS.NOT_INSTALLABLE });
-
- checkButtonState('Install', false, true);
- });
-
- it('has enabled "Install" when APPLICATION_STATUS.INSTALLABLE', () => {
- mountComponent({ status: APPLICATION_STATUS.INSTALLABLE });
-
- checkButtonState('Install', false, false);
- });
-
- it('has loading "Installing" when APPLICATION_STATUS.INSTALLING', () => {
- mountComponent({ status: APPLICATION_STATUS.INSTALLING });
-
- checkButtonState('Installing', true, true);
- });
-
- it('has disabled "Install" when APPLICATION_STATUS.UNINSTALLED', () => {
- mountComponent({ status: APPLICATION_STATUS.UNINSTALLED });
-
- checkButtonState('Install', false, true);
- });
-
- it('has disabled "Externally installed" when APPLICATION_STATUS.EXTERNALLY_INSTALLED', () => {
- mountComponent({ status: APPLICATION_STATUS.EXTERNALLY_INSTALLED });
-
- checkButtonState('Externally installed', false, true);
- });
-
- it('has disabled "Installed" when application is installed and not uninstallable', () => {
- mountComponent({
- status: APPLICATION_STATUS.INSTALLED,
- installed: true,
- uninstallable: false,
- });
-
- checkButtonState('Installed', false, true);
- });
-
- it('hides when application is installed and uninstallable', () => {
- mountComponent({
- status: APPLICATION_STATUS.INSTALLED,
- installed: true,
- uninstallable: true,
- });
-
- expect(button().exists()).toBe(false);
- });
-
- it('has enabled "Install" when install fails', () => {
- mountComponent({
- status: APPLICATION_STATUS.INSTALLABLE,
- installFailed: true,
- });
-
- checkButtonState('Install', false, false);
- });
-
- it('has disabled "Install" when installation disabled', () => {
- mountComponent({
- status: APPLICATION_STATUS.INSTALLABLE,
- installable: false,
- });
-
- checkButtonState('Install', false, true);
- });
-
- it('has enabled "Install" when REQUEST_FAILURE (so you can try installing again)', () => {
- mountComponent({ status: APPLICATION_STATUS.INSTALLABLE });
-
- checkButtonState('Install', false, false);
- });
-
- it('clicking install button emits event', () => {
- const spy = jest.spyOn(eventHub, '$emit');
- mountComponent({ status: APPLICATION_STATUS.INSTALLABLE });
-
- button().vm.$emit('click');
-
- expect(spy).toHaveBeenCalledWith('installApplication', {
- id: DEFAULT_APPLICATION_STATE.id,
- params: {},
- });
- });
-
- it('clicking install button when installApplicationRequestParams are provided emits event', () => {
- const spy = jest.spyOn(eventHub, '$emit');
- mountComponent({
- status: APPLICATION_STATUS.INSTALLABLE,
- installApplicationRequestParams: { hostname: 'jupyter' },
- });
-
- button().vm.$emit('click');
-
- expect(spy).toHaveBeenCalledWith('installApplication', {
- id: DEFAULT_APPLICATION_STATE.id,
- params: { hostname: 'jupyter' },
- });
- });
-
- it('clicking disabled install button emits nothing', () => {
- const spy = jest.spyOn(eventHub, '$emit');
- mountComponent({ status: APPLICATION_STATUS.INSTALLING });
-
- expect(button().props('disabled')).toEqual(true);
-
- button().vm.$emit('click');
-
- expect(spy).not.toHaveBeenCalled();
- });
- });
-
- describe('Uninstall button', () => {
- it('displays button when app is installed and uninstallable', () => {
- mountComponent({
- installed: true,
- uninstallable: true,
- status: APPLICATION_STATUS.NOT_INSTALLABLE,
- });
- const uninstallButton = wrapper.find('.js-cluster-application-uninstall-button');
-
- expect(uninstallButton.exists()).toBe(true);
- });
-
- it('displays a success toast message if application uninstall was successful', async () => {
- mountComponent({
- title: 'GitLab Runner',
- uninstallSuccessful: false,
- });
-
- wrapper.vm.$toast = { show: jest.fn() };
- wrapper.setProps({ uninstallSuccessful: true });
-
- await wrapper.vm.$nextTick();
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(
- 'GitLab Runner uninstalled successfully.',
- );
- });
- });
-
- describe('when confirmation modal triggers confirm event', () => {
- it('triggers uninstallApplication event', () => {
- jest.spyOn(eventHub, '$emit');
- mountComponent();
- wrapper.find(UninstallApplicationConfirmationModal).vm.$emit('confirm');
-
- expect(eventHub.$emit).toHaveBeenCalledWith('uninstallApplication', {
- id: DEFAULT_APPLICATION_STATE.id,
- });
- });
- });
-
- describe('Update button', () => {
- const button = () => wrapper.find('.js-cluster-application-update-button');
-
- it('has indeterminate state on page load', () => {
- mountComponent();
-
- expect(button().exists()).toBe(false);
- });
-
- it('has enabled "Update" when "updateAvailable" is true', () => {
- mountComponent({ updateAvailable: true });
-
- expect(button().exists()).toBe(true);
- expect(button().text()).toContain('Update');
- });
-
- it('has enabled "Retry update" when update process fails', () => {
- mountComponent({
- status: APPLICATION_STATUS.INSTALLED,
- updateFailed: true,
- });
-
- expect(button().exists()).toBe(true);
- expect(button().text()).toContain('Retry update');
- });
-
- it('has disabled "Updating" when APPLICATION_STATUS.UPDATING', () => {
- mountComponent({ status: APPLICATION_STATUS.UPDATING });
-
- expect(button().exists()).toBe(true);
- expect(button().text()).toContain('Updating');
- });
-
- it('clicking update button emits event', () => {
- const spy = jest.spyOn(eventHub, '$emit');
- mountComponent({
- status: APPLICATION_STATUS.INSTALLED,
- updateAvailable: true,
- });
-
- button().vm.$emit('click');
-
- expect(spy).toHaveBeenCalledWith('updateApplication', {
- id: DEFAULT_APPLICATION_STATE.id,
- params: {},
- });
- });
-
- it('clicking disabled update button emits nothing', () => {
- const spy = jest.spyOn(eventHub, '$emit');
- mountComponent({ status: APPLICATION_STATUS.UPDATING });
-
- button().vm.$emit('click');
-
- expect(spy).not.toHaveBeenCalled();
- });
-
- it('displays an error message if application update failed', () => {
- mountComponent({
- title: 'GitLab Runner',
- status: APPLICATION_STATUS.INSTALLED,
- updateFailed: true,
- });
- const failureMessage = wrapper.find('.js-cluster-application-update-details');
-
- expect(failureMessage.exists()).toBe(true);
- expect(failureMessage.text()).toContain(
- 'Update failed. Please check the logs and try again.',
- );
- });
-
- it('displays a success toast message if application update was successful', async () => {
- mountComponent({
- title: 'GitLab Runner',
- updateSuccessful: false,
- });
-
- wrapper.vm.$toast = { show: jest.fn() };
- wrapper.setProps({ updateSuccessful: true });
-
- await wrapper.vm.$nextTick();
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith('GitLab Runner updated successfully.');
- });
-
- describe('when updating does not require confirmation', () => {
- beforeEach(() => mountComponent({ updateAvailable: true }));
-
- it('the modal is not rendered', () => {
- expect(wrapper.find(UpdateApplicationConfirmationModal).exists()).toBe(false);
- });
-
- it('the correct button is rendered', () => {
- expect(wrapper.find("[data-qa-selector='update_button']").exists()).toBe(true);
- });
- });
-
- describe('when updating requires confirmation', () => {
- beforeEach(() => {
- mountComponent({
- updateAvailable: true,
- id: ELASTIC_STACK,
- version: '1.1.2',
- });
- });
-
- it('displays a modal', () => {
- expect(wrapper.find(UpdateApplicationConfirmationModal).exists()).toBe(true);
- });
-
- it('the correct button is rendered', () => {
- expect(wrapper.find("[data-qa-selector='update_button_with_confirmation']").exists()).toBe(
- true,
- );
- });
-
- it('triggers updateApplication event', () => {
- jest.spyOn(eventHub, '$emit');
- wrapper.find(UpdateApplicationConfirmationModal).vm.$emit('confirm');
-
- expect(eventHub.$emit).toHaveBeenCalledWith('updateApplication', {
- id: ELASTIC_STACK,
- params: {},
- });
- });
- });
-
- describe('updating Elastic Stack special case', () => {
- it('needs confirmation if version is lower than 3.0.0', () => {
- mountComponent({
- updateAvailable: true,
- id: ELASTIC_STACK,
- version: '1.1.2',
- });
-
- expect(wrapper.find("[data-qa-selector='update_button_with_confirmation']").exists()).toBe(
- true,
- );
- expect(wrapper.find(UpdateApplicationConfirmationModal).exists()).toBe(true);
- });
-
- it('does not need confirmation is version is 3.0.0', () => {
- mountComponent({
- updateAvailable: true,
- id: ELASTIC_STACK,
- version: '3.0.0',
- });
-
- expect(wrapper.find("[data-qa-selector='update_button']").exists()).toBe(true);
- expect(wrapper.find(UpdateApplicationConfirmationModal).exists()).toBe(false);
- });
-
- it('does not need confirmation if version is higher than 3.0.0', () => {
- mountComponent({
- updateAvailable: true,
- id: ELASTIC_STACK,
- version: '5.2.1',
- });
-
- expect(wrapper.find("[data-qa-selector='update_button']").exists()).toBe(true);
- expect(wrapper.find(UpdateApplicationConfirmationModal).exists()).toBe(false);
- });
- });
- });
-
- describe('Version', () => {
- const updateDetails = () => wrapper.find('.js-cluster-application-update-details');
- const versionEl = () => wrapper.find('.js-cluster-application-update-version');
-
- it('displays a version number if application has been updated', () => {
- const version = '0.1.45';
- mountComponent({
- status: APPLICATION_STATUS.INSTALLED,
- updateSuccessful: true,
- version,
- });
-
- expect(updateDetails().text()).toBe(`Updated to chart v${version}`);
- });
-
- it('contains a link to the chart repo if application has been updated', () => {
- const version = '0.1.45';
- const chartRepo = 'https://gitlab.com/gitlab-org/charts/gitlab-runner';
- mountComponent({
- status: APPLICATION_STATUS.INSTALLED,
- updateSuccessful: true,
- chartRepo,
- version,
- });
-
- expect(versionEl().attributes('href')).toEqual(chartRepo);
- expect(versionEl().props('target')).toEqual('_blank');
- });
-
- it('does not display a version number if application update failed', () => {
- const version = '0.1.45';
- mountComponent({
- status: APPLICATION_STATUS.INSTALLED,
- updateFailed: true,
- version,
- });
-
- expect(updateDetails().text()).toBe('Update failed');
- expect(versionEl().exists()).toBe(false);
- });
-
- it('displays updating when the application update is currently updating', () => {
- mountComponent({
- status: APPLICATION_STATUS.UPDATING,
- updateSuccessful: true,
- version: '1.2.3',
- });
-
- expect(updateDetails().text()).toBe('Updating');
- expect(versionEl().exists()).toBe(false);
- });
- });
-
- describe('Error block', () => {
- const generalErrorMessage = () => wrapper.find('.js-cluster-application-general-error-message');
-
- describe('when nothing fails', () => {
- it('does not show error block', () => {
- mountComponent();
-
- expect(generalErrorMessage().exists()).toBe(false);
- });
- });
-
- describe('when install or uninstall fails', () => {
- const statusReason = 'We broke it 0.0';
- const requestReason = 'We broke the request 0.0';
-
- beforeEach(() => {
- mountComponent({
- status: APPLICATION_STATUS.ERROR,
- statusReason,
- requestReason,
- installFailed: true,
- });
- });
-
- it('shows status reason if it is available', () => {
- const statusErrorMessage = wrapper.find('.js-cluster-application-status-error-message');
-
- expect(statusErrorMessage.text()).toEqual(statusReason);
- });
-
- it('shows request reason if it is available', () => {
- const requestErrorMessage = wrapper.find('.js-cluster-application-request-error-message');
-
- expect(requestErrorMessage.text()).toEqual(requestReason);
- });
- });
-
- describe('when install fails', () => {
- beforeEach(() => {
- mountComponent({
- status: APPLICATION_STATUS.ERROR,
- installFailed: true,
- });
- });
-
- it('shows a general message indicating the installation failed', () => {
- expect(generalErrorMessage().text()).toEqual(
- `Something went wrong while installing ${DEFAULT_APPLICATION_STATE.title}`,
- );
- });
- });
-
- describe('when uninstall fails', () => {
- beforeEach(() => {
- mountComponent({
- status: APPLICATION_STATUS.ERROR,
- uninstallFailed: true,
- });
- });
-
- it('shows a general message indicating the uninstalling failed', () => {
- expect(generalErrorMessage().text()).toEqual(
- `Something went wrong while uninstalling ${DEFAULT_APPLICATION_STATE.title}`,
- );
- });
- });
- });
-});
diff --git a/spec/frontend/clusters/components/applications_spec.js b/spec/frontend/clusters/components/applications_spec.js
deleted file mode 100644
index 511f5fc1d89..00000000000
--- a/spec/frontend/clusters/components/applications_spec.js
+++ /dev/null
@@ -1,510 +0,0 @@
-import { shallowMount, mount } from '@vue/test-utils';
-import ApplicationRow from '~/clusters/components/application_row.vue';
-import Applications from '~/clusters/components/applications.vue';
-import CrossplaneProviderStack from '~/clusters/components/crossplane_provider_stack.vue';
-import KnativeDomainEditor from '~/clusters/components/knative_domain_editor.vue';
-import { CLUSTER_TYPE, PROVIDER_TYPE } from '~/clusters/constants';
-import eventHub from '~/clusters/event_hub';
-import { APPLICATIONS_MOCK_STATE } from '../services/mock_data';
-
-describe('Applications', () => {
- let wrapper;
-
- beforeEach(() => {
- gon.features = gon.features || {};
- });
-
- const createComponent = ({ applications, type, propsData } = {}, isShallow) => {
- const mountMethod = isShallow ? shallowMount : mount;
-
- wrapper = mountMethod(Applications, {
- stubs: { ApplicationRow },
- propsData: {
- type,
- applications: { ...APPLICATIONS_MOCK_STATE, ...applications },
- ...propsData,
- },
- });
- };
-
- const createShallowComponent = (options) => createComponent(options, true);
- const findByTestId = (id) => wrapper.find(`[data-testid="${id}"]`);
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('Project cluster applications', () => {
- beforeEach(() => {
- createComponent({ type: CLUSTER_TYPE.PROJECT });
- });
-
- it('renders a row for Ingress', () => {
- expect(wrapper.find('.js-cluster-application-row-ingress').exists()).toBe(true);
- });
-
- it('renders a row for Cert-Manager', () => {
- expect(wrapper.find('.js-cluster-application-row-cert_manager').exists()).toBe(true);
- });
-
- it('renders a row for Crossplane', () => {
- expect(wrapper.find('.js-cluster-application-row-crossplane').exists()).toBe(true);
- });
-
- it('renders a row for Prometheus', () => {
- expect(wrapper.find('.js-cluster-application-row-prometheus').exists()).toBe(true);
- });
-
- it('renders a row for GitLab Runner', () => {
- expect(wrapper.find('.js-cluster-application-row-runner').exists()).toBe(true);
- });
-
- it('renders a row for Jupyter', () => {
- expect(wrapper.find('.js-cluster-application-row-jupyter').exists()).toBe(true);
- });
-
- it('renders a row for Knative', () => {
- expect(wrapper.find('.js-cluster-application-row-knative').exists()).toBe(true);
- });
-
- it('renders a row for Elastic Stack', () => {
- expect(wrapper.find('.js-cluster-application-row-elastic_stack').exists()).toBe(true);
- });
-
- it('renders a row for Cilium', () => {
- expect(wrapper.find('.js-cluster-application-row-cilium').exists()).toBe(true);
- });
- });
-
- describe('Group cluster applications', () => {
- beforeEach(() => {
- createComponent({ type: CLUSTER_TYPE.GROUP });
- });
-
- it('renders a row for Ingress', () => {
- expect(wrapper.find('.js-cluster-application-row-ingress').exists()).toBe(true);
- });
-
- it('renders a row for Cert-Manager', () => {
- expect(wrapper.find('.js-cluster-application-row-cert_manager').exists()).toBe(true);
- });
-
- it('renders a row for Crossplane', () => {
- expect(wrapper.find('.js-cluster-application-row-crossplane').exists()).toBe(true);
- });
-
- it('renders a row for Prometheus', () => {
- expect(wrapper.find('.js-cluster-application-row-prometheus').exists()).toBe(true);
- });
-
- it('renders a row for GitLab Runner', () => {
- expect(wrapper.find('.js-cluster-application-row-runner').exists()).toBe(true);
- });
-
- it('renders a row for Jupyter', () => {
- expect(wrapper.find('.js-cluster-application-row-jupyter').exists()).toBe(true);
- });
-
- it('renders a row for Knative', () => {
- expect(wrapper.find('.js-cluster-application-row-knative').exists()).toBe(true);
- });
-
- it('renders a row for Elastic Stack', () => {
- expect(wrapper.find('.js-cluster-application-row-elastic_stack').exists()).toBe(true);
- });
-
- it('renders a row for Cilium', () => {
- expect(wrapper.find('.js-cluster-application-row-cilium').exists()).toBe(true);
- });
- });
-
- describe('Instance cluster applications', () => {
- beforeEach(() => {
- createComponent({ type: CLUSTER_TYPE.INSTANCE });
- });
-
- it('renders a row for Ingress', () => {
- expect(wrapper.find('.js-cluster-application-row-ingress').exists()).toBe(true);
- });
-
- it('renders a row for Cert-Manager', () => {
- expect(wrapper.find('.js-cluster-application-row-cert_manager').exists()).toBe(true);
- });
-
- it('renders a row for Crossplane', () => {
- expect(wrapper.find('.js-cluster-application-row-crossplane').exists()).toBe(true);
- });
-
- it('renders a row for Prometheus', () => {
- expect(wrapper.find('.js-cluster-application-row-prometheus').exists()).toBe(true);
- });
-
- it('renders a row for GitLab Runner', () => {
- expect(wrapper.find('.js-cluster-application-row-runner').exists()).toBe(true);
- });
-
- it('renders a row for Jupyter', () => {
- expect(wrapper.find('.js-cluster-application-row-jupyter').exists()).toBe(true);
- });
-
- it('renders a row for Knative', () => {
- expect(wrapper.find('.js-cluster-application-row-knative').exists()).toBe(true);
- });
-
- it('renders a row for Elastic Stack', () => {
- expect(wrapper.find('.js-cluster-application-row-elastic_stack').exists()).toBe(true);
- });
-
- it('renders a row for Cilium', () => {
- expect(wrapper.find('.js-cluster-application-row-cilium').exists()).toBe(true);
- });
- });
-
- describe('Helm application', () => {
- it('does not render a row for Helm Tiller', () => {
- createComponent();
- expect(wrapper.find('.js-cluster-application-row-helm').exists()).toBe(false);
- });
- });
-
- describe('Ingress application', () => {
- it('shows the correct warning message', () => {
- createComponent();
- expect(findByTestId('ingressCostWarning').element).toMatchSnapshot();
- });
-
- describe('when installed', () => {
- describe('with ip address', () => {
- it('renders ip address with a clipboard button', () => {
- createComponent({
- applications: {
- ingress: {
- title: 'Ingress',
- status: 'installed',
- externalIp: '0.0.0.0',
- },
- },
- });
-
- expect(wrapper.find('.js-endpoint').element.value).toEqual('0.0.0.0');
- expect(wrapper.find('.js-clipboard-btn').attributes('data-clipboard-text')).toEqual(
- '0.0.0.0',
- );
- });
- });
-
- describe('with hostname', () => {
- it('renders hostname with a clipboard button', () => {
- createComponent({
- applications: {
- ingress: {
- title: 'Ingress',
- status: 'installed',
- externalHostname: 'localhost.localdomain',
- },
- cert_manager: { title: 'Cert-Manager' },
- crossplane: { title: 'Crossplane', stack: '' },
- runner: { title: 'GitLab Runner' },
- prometheus: { title: 'Prometheus' },
- jupyter: { title: 'JupyterHub', hostname: '' },
- knative: { title: 'Knative', hostname: '' },
- elastic_stack: { title: 'Elastic Stack' },
- cilium: { title: 'GitLab Container Network Policies' },
- },
- });
-
- expect(wrapper.find('.js-endpoint').element.value).toEqual('localhost.localdomain');
-
- expect(wrapper.find('.js-clipboard-btn').attributes('data-clipboard-text')).toEqual(
- 'localhost.localdomain',
- );
- });
- });
-
- describe('without ip address', () => {
- it('renders an input text with a loading icon and an alert text', () => {
- createComponent({
- applications: {
- ingress: {
- title: 'Ingress',
- status: 'installed',
- },
- },
- });
-
- expect(wrapper.find('.js-ingress-ip-loading-icon').exists()).toBe(true);
- expect(wrapper.find('.js-no-endpoint-message').exists()).toBe(true);
- });
- });
- });
-
- describe('before installing', () => {
- it('does not render the IP address', () => {
- createComponent();
-
- expect(wrapper.text()).not.toContain('Ingress IP Address');
- expect(wrapper.find('.js-endpoint').exists()).toBe(false);
- });
- });
- });
-
- describe('Cert-Manager application', () => {
- it('shows the correct description', () => {
- createComponent();
- expect(findByTestId('certManagerDescription').element).toMatchSnapshot();
- });
-
- describe('when not installed', () => {
- it('renders email & allows editing', () => {
- createComponent({
- applications: {
- cert_manager: {
- title: 'Cert-Manager',
- email: 'before@example.com',
- status: 'installable',
- },
- },
- });
-
- expect(wrapper.find('.js-email').element.value).toEqual('before@example.com');
- expect(wrapper.find('.js-email').attributes('readonly')).toBe(undefined);
- });
- });
-
- describe('when installed', () => {
- it('renders email in readonly', () => {
- createComponent({
- applications: {
- cert_manager: {
- title: 'Cert-Manager',
- email: 'after@example.com',
- status: 'installed',
- },
- },
- });
-
- expect(wrapper.find('.js-email').element.value).toEqual('after@example.com');
- expect(wrapper.find('.js-email').attributes('readonly')).toEqual('readonly');
- });
- });
- });
-
- describe('Jupyter application', () => {
- describe('with ingress installed with ip & jupyter installable', () => {
- it('renders hostname active input', () => {
- createComponent({
- applications: {
- ingress: {
- title: 'Ingress',
- status: 'installed',
- externalIp: '1.1.1.1',
- },
- },
- });
-
- expect(
- wrapper.find('.js-cluster-application-row-jupyter .js-hostname').attributes('readonly'),
- ).toEqual(undefined);
- });
- });
-
- describe('with ingress installed without external ip', () => {
- it('does not render hostname input', () => {
- createComponent({
- applications: {
- ingress: { title: 'Ingress', status: 'installed' },
- },
- });
-
- expect(wrapper.find('.js-cluster-application-row-jupyter .js-hostname').exists()).toBe(
- false,
- );
- });
- });
-
- describe('with ingress & jupyter installed', () => {
- it('renders readonly input', () => {
- createComponent({
- applications: {
- ingress: {
- title: 'Ingress',
- status: 'installed',
- externalIp: '1.1.1.1',
- },
- jupyter: { title: 'JupyterHub', status: 'installed', hostname: '' },
- },
- });
-
- expect(
- wrapper.find('.js-cluster-application-row-jupyter .js-hostname').attributes('readonly'),
- ).toEqual('readonly');
- });
- });
-
- describe('without ingress installed', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('does not render input', () => {
- expect(wrapper.find('.js-cluster-application-row-jupyter .js-hostname').exists()).toBe(
- false,
- );
- });
- });
- });
-
- describe('Prometheus application', () => {
- it('shows the correct description', () => {
- createComponent();
- expect(findByTestId('prometheusDescription').element).toMatchSnapshot();
- });
- });
-
- describe('Knative application', () => {
- const availableDomain = {
- id: 4,
- domain: 'newhostname.com',
- };
- const propsData = {
- applications: {
- knative: {
- title: 'Knative',
- hostname: 'example.com',
- status: 'installed',
- externalIp: '1.1.1.1',
- installed: true,
- availableDomains: [availableDomain],
- pagesDomain: null,
- },
- },
- };
- let knativeDomainEditor;
-
- beforeEach(() => {
- createShallowComponent(propsData);
- jest.spyOn(eventHub, '$emit');
-
- knativeDomainEditor = wrapper.find(KnativeDomainEditor);
- });
-
- it('shows the correct description', async () => {
- createComponent();
- wrapper.setProps({
- providerType: PROVIDER_TYPE.GCP,
- preInstalledKnative: true,
- });
-
- await wrapper.vm.$nextTick();
-
- expect(findByTestId('installed-via').element).toMatchSnapshot();
- });
-
- it('emits saveKnativeDomain event when knative domain editor emits save event', () => {
- propsData.applications.knative.hostname = availableDomain.domain;
- propsData.applications.knative.pagesDomain = availableDomain;
- knativeDomainEditor.vm.$emit('save');
-
- expect(eventHub.$emit).toHaveBeenCalledWith('saveKnativeDomain', {
- id: 'knative',
- params: {
- hostname: availableDomain.domain,
- pages_domain_id: availableDomain.id,
- },
- });
- });
-
- it('emits saveKnativeDomain event when knative domain editor emits save event with custom domain', () => {
- const newHostName = 'someothernewhostname.com';
- propsData.applications.knative.hostname = newHostName;
- propsData.applications.knative.pagesDomain = null;
- knativeDomainEditor.vm.$emit('save');
-
- expect(eventHub.$emit).toHaveBeenCalledWith('saveKnativeDomain', {
- id: 'knative',
- params: {
- hostname: newHostName,
- pages_domain_id: undefined,
- },
- });
- });
-
- it('emits setKnativeHostname event when knative domain editor emits change event', () => {
- wrapper.find(KnativeDomainEditor).vm.$emit('set', {
- domain: availableDomain.domain,
- domainId: availableDomain.id,
- });
-
- expect(eventHub.$emit).toHaveBeenCalledWith('setKnativeDomain', {
- id: 'knative',
- domain: availableDomain.domain,
- domainId: availableDomain.id,
- });
- });
- });
-
- describe('Crossplane application', () => {
- const propsData = {
- applications: {
- crossplane: {
- title: 'Crossplane',
- stack: {
- code: '',
- },
- },
- },
- };
-
- beforeEach(() => createShallowComponent(propsData));
-
- it('renders the correct Component', () => {
- const crossplane = wrapper.find(CrossplaneProviderStack);
- expect(crossplane.exists()).toBe(true);
- });
-
- it('shows the correct description', () => {
- createComponent();
- expect(findByTestId('crossplaneDescription').element).toMatchSnapshot();
- });
- });
-
- describe('Elastic Stack application', () => {
- describe('with elastic stack installable', () => {
- it('renders the install button enabled', () => {
- createComponent();
-
- expect(
- wrapper
- .find(
- '.js-cluster-application-row-elastic_stack .js-cluster-application-install-button',
- )
- .attributes('disabled'),
- ).toBeUndefined();
- });
- });
-
- describe('elastic stack installed', () => {
- it('renders uninstall button', () => {
- createComponent({
- applications: {
- elastic_stack: { title: 'Elastic Stack', status: 'installed' },
- },
- });
-
- expect(
- wrapper
- .find(
- '.js-cluster-application-row-elastic_stack .js-cluster-application-install-button',
- )
- .attributes('disabled'),
- ).toEqual('disabled');
- });
- });
- });
-
- describe('Cilium application', () => {
- it('shows the correct description', () => {
- createComponent({ propsData: { ciliumHelpPath: 'cilium-help-path' } });
- expect(findByTestId('ciliumDescription').element).toMatchSnapshot();
- });
- });
-});
diff --git a/spec/frontend/clusters/components/knative_domain_editor_spec.js b/spec/frontend/clusters/components/knative_domain_editor_spec.js
deleted file mode 100644
index 207eb071171..00000000000
--- a/spec/frontend/clusters/components/knative_domain_editor_spec.js
+++ /dev/null
@@ -1,179 +0,0 @@
-import { GlDropdownItem, GlButton } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import KnativeDomainEditor from '~/clusters/components/knative_domain_editor.vue';
-import { APPLICATION_STATUS } from '~/clusters/constants';
-
-const { UPDATING } = APPLICATION_STATUS;
-
-describe('KnativeDomainEditor', () => {
- let wrapper;
- let knative;
-
- const createComponent = (props = {}) => {
- wrapper = shallowMount(KnativeDomainEditor, {
- propsData: { ...props },
- });
- };
-
- beforeEach(() => {
- knative = {
- title: 'Knative',
- hostname: 'example.com',
- installed: true,
- };
- });
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- describe('knative has an assigned IP address', () => {
- beforeEach(() => {
- knative.externalIp = '1.1.1.1';
- createComponent({ knative });
- });
-
- it('renders ip address with a clipboard button', () => {
- expect(wrapper.find('.js-knative-endpoint').exists()).toBe(true);
- expect(wrapper.find('.js-knative-endpoint').element.value).toEqual(knative.externalIp);
- });
-
- it('displays ip address clipboard button', () => {
- expect(wrapper.find('.js-knative-endpoint-clipboard-btn').attributes('text')).toEqual(
- knative.externalIp,
- );
- });
-
- it('renders domain & allows editing', () => {
- const domainNameInput = wrapper.find('.js-knative-domainname');
-
- expect(domainNameInput.element.value).toEqual(knative.hostname);
- expect(domainNameInput.attributes('readonly')).toBeFalsy();
- });
-
- it('renders an update/save Knative domain button', () => {
- expect(wrapper.find('.js-knative-save-domain-button').exists()).toBe(true);
- });
- });
-
- describe('knative without ip address', () => {
- beforeEach(() => {
- knative.externalIp = null;
- createComponent({ knative });
- });
-
- it('renders an input text with a loading icon', () => {
- expect(wrapper.find('.js-knative-ip-loading-icon').exists()).toBe(true);
- });
-
- it('renders message indicating there is not IP address assigned', () => {
- expect(wrapper.find('.js-no-knative-endpoint-message').exists()).toBe(true);
- });
- });
-
- describe('clicking save changes button', () => {
- beforeEach(() => {
- createComponent({ knative });
- });
-
- it('triggers save event and pass current knative hostname', () => {
- wrapper.find(GlButton).vm.$emit('click');
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.emitted('save').length).toEqual(1);
- });
- });
- });
-
- describe('when knative domain name was saved successfully', () => {
- beforeEach(() => {
- createComponent({ knative });
- });
-
- it('displays toast indicating a successful update', () => {
- wrapper.vm.$toast = { show: jest.fn() };
- wrapper.setProps({ knative: { updateSuccessful: true, ...knative } });
-
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(
- 'Knative domain name was updated successfully.',
- );
- });
- });
- });
-
- describe('when knative domain name input changes', () => {
- it('emits "set" event with updated domain name', () => {
- const newDomain = {
- id: 4,
- domain: 'newhostname.com',
- };
-
- createComponent({ knative: { ...knative, availableDomains: [newDomain] } });
- jest.spyOn(wrapper.vm, 'selectDomain');
-
- wrapper.find(GlDropdownItem).vm.$emit('click');
-
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.selectDomain).toHaveBeenCalledWith(newDomain);
- expect(wrapper.emitted('set')[0]).toEqual([
- {
- domain: newDomain.domain,
- domainId: newDomain.id,
- },
- ]);
- });
- });
-
- it('emits "set" event with updated custom domain name', () => {
- const newHostname = 'newhostname.com';
-
- createComponent({ knative });
- jest.spyOn(wrapper.vm, 'selectCustomDomain');
-
- wrapper.setData({ knativeHostname: newHostname });
-
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.selectCustomDomain).toHaveBeenCalledWith(newHostname);
- expect(wrapper.emitted('set')[0]).toEqual([
- {
- domain: newHostname,
- domainId: null,
- },
- ]);
- });
- });
- });
-
- describe('when updating knative domain name failed', () => {
- beforeEach(() => {
- createComponent({ knative });
- });
-
- it('displays an error banner indicating the operation failure', () => {
- wrapper.setProps({ knative: { updateFailed: true, ...knative } });
-
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find('.js-cluster-knative-domain-name-failure-message').exists()).toBe(true);
- });
- });
- });
-
- describe(`when knative status is ${UPDATING}`, () => {
- beforeEach(() => {
- createComponent({ knative: { status: UPDATING, ...knative } });
- });
-
- it('renders loading spinner in save button', () => {
- expect(wrapper.find(GlButton).props('loading')).toBe(true);
- });
-
- it('renders disabled save button', () => {
- expect(wrapper.find(GlButton).props('disabled')).toBe(true);
- });
-
- it('renders save button with "Saving" label', () => {
- expect(wrapper.find(GlButton).text()).toBe('Saving');
- });
- });
-});
diff --git a/spec/frontend/clusters/components/uninstall_application_button_spec.js b/spec/frontend/clusters/components/uninstall_application_button_spec.js
deleted file mode 100644
index 2596820e5ac..00000000000
--- a/spec/frontend/clusters/components/uninstall_application_button_spec.js
+++ /dev/null
@@ -1,39 +0,0 @@
-import { GlButton } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import UninstallApplicationButton from '~/clusters/components/uninstall_application_button.vue';
-import { APPLICATION_STATUS } from '~/clusters/constants';
-
-const { INSTALLED, UPDATING, UNINSTALLING } = APPLICATION_STATUS;
-
-describe('UninstallApplicationButton', () => {
- let wrapper;
-
- const createComponent = (props = {}) => {
- wrapper = shallowMount(UninstallApplicationButton, {
- propsData: { ...props },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe.each`
- status | loading | disabled | text
- ${INSTALLED} | ${false} | ${false} | ${'Uninstall'}
- ${UPDATING} | ${false} | ${true} | ${'Uninstall'}
- ${UNINSTALLING} | ${true} | ${true} | ${'Uninstalling'}
- `('when app status is $status', ({ loading, disabled, status, text }) => {
- beforeEach(() => {
- createComponent({ status });
- });
-
- it(`renders a button with loading=${loading} and disabled=${disabled}`, () => {
- expect(wrapper.find(GlButton).props()).toMatchObject({ loading, disabled });
- });
-
- it(`renders a button with text="${text}"`, () => {
- expect(wrapper.find(GlButton).text()).toBe(text);
- });
- });
-});
diff --git a/spec/frontend/clusters/components/uninstall_application_confirmation_modal_spec.js b/spec/frontend/clusters/components/uninstall_application_confirmation_modal_spec.js
deleted file mode 100644
index 74ae4ecc486..00000000000
--- a/spec/frontend/clusters/components/uninstall_application_confirmation_modal_spec.js
+++ /dev/null
@@ -1,57 +0,0 @@
-import { GlModal } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import UninstallApplicationConfirmationModal from '~/clusters/components/uninstall_application_confirmation_modal.vue';
-import { INGRESS } from '~/clusters/constants';
-
-describe('UninstallApplicationConfirmationModal', () => {
- let wrapper;
- const appTitle = 'Ingress';
-
- const createComponent = (props = {}) => {
- wrapper = shallowMount(UninstallApplicationConfirmationModal, {
- propsData: { ...props },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- beforeEach(() => {
- createComponent({ application: INGRESS, applicationTitle: appTitle });
- });
-
- it(`renders a modal with a title "Uninstall ${appTitle}"`, () => {
- expect(wrapper.find(GlModal).attributes('title')).toEqual(`Uninstall ${appTitle}`);
- });
-
- it(`renders a modal with an ok button labeled "Uninstall ${appTitle}"`, () => {
- expect(wrapper.find(GlModal).attributes('ok-title')).toEqual(`Uninstall ${appTitle}`);
- });
-
- describe('when ok button is clicked', () => {
- beforeEach(() => {
- jest.spyOn(wrapper.vm, 'trackUninstallButtonClick');
- wrapper.find(GlModal).vm.$emit('ok');
- });
-
- it('emits confirm event', () =>
- wrapper.vm.$nextTick().then(() => {
- expect(wrapper.emitted('confirm')).toBeTruthy();
- }));
-
- it('calls track uninstall button click mixin', () => {
- expect(wrapper.vm.trackUninstallButtonClick).toHaveBeenCalledWith(INGRESS);
- });
- });
-
- it('displays a warning text indicating the app will be uninstalled', () => {
- expect(wrapper.text()).toContain(`You are about to uninstall ${appTitle} from your cluster.`);
- });
-
- it('displays a custom warning text depending on the application', () => {
- expect(wrapper.text()).toContain(
- `The associated load balancer and IP will be deleted and cannot be restored.`,
- );
- });
-});
diff --git a/spec/frontend/clusters/components/update_application_confirmation_modal_spec.js b/spec/frontend/clusters/components/update_application_confirmation_modal_spec.js
deleted file mode 100644
index e933f17a980..00000000000
--- a/spec/frontend/clusters/components/update_application_confirmation_modal_spec.js
+++ /dev/null
@@ -1,52 +0,0 @@
-import { GlModal } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import UpdateApplicationConfirmationModal from '~/clusters/components/update_application_confirmation_modal.vue';
-import { ELASTIC_STACK } from '~/clusters/constants';
-
-describe('UpdateApplicationConfirmationModal', () => {
- let wrapper;
- const appTitle = 'Elastic stack';
-
- const createComponent = (props = {}) => {
- wrapper = shallowMount(UpdateApplicationConfirmationModal, {
- propsData: { ...props },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- beforeEach(() => {
- createComponent({ application: ELASTIC_STACK, applicationTitle: appTitle });
- });
-
- it(`renders a modal with a title "Update ${appTitle}"`, () => {
- expect(wrapper.find(GlModal).attributes('title')).toEqual(`Update ${appTitle}`);
- });
-
- it(`renders a modal with an ok button labeled "Update ${appTitle}"`, () => {
- expect(wrapper.find(GlModal).attributes('ok-title')).toEqual(`Update ${appTitle}`);
- });
-
- describe('when ok button is clicked', () => {
- beforeEach(() => {
- wrapper.find(GlModal).vm.$emit('ok');
- });
-
- it('emits confirm event', () =>
- wrapper.vm.$nextTick().then(() => {
- expect(wrapper.emitted('confirm')).toBeTruthy();
- }));
-
- it('displays a warning text indicating the app will be updated', () => {
- expect(wrapper.text()).toContain(`You are about to update ${appTitle} on your cluster.`);
- });
-
- it('displays a custom warning text depending on the application', () => {
- expect(wrapper.text()).toContain(
- `Your Elasticsearch cluster will be re-created during this upgrade. Your logs will be re-indexed, and you will lose historical logs from hosts terminated in the last 30 days.`,
- );
- });
- });
-});
diff --git a/spec/frontend/clusters/services/application_state_machine_spec.js b/spec/frontend/clusters/services/application_state_machine_spec.js
deleted file mode 100644
index 4e731e331c2..00000000000
--- a/spec/frontend/clusters/services/application_state_machine_spec.js
+++ /dev/null
@@ -1,206 +0,0 @@
-import {
- APPLICATION_STATUS,
- UNINSTALL_EVENT,
- UPDATE_EVENT,
- INSTALL_EVENT,
-} from '~/clusters/constants';
-import transitionApplicationState from '~/clusters/services/application_state_machine';
-
-const {
- NO_STATUS,
- SCHEDULED,
- NOT_INSTALLABLE,
- INSTALLABLE,
- INSTALLING,
- INSTALLED,
- ERROR,
- UPDATING,
- UPDATED,
- UPDATE_ERRORED,
- UNINSTALLING,
- UNINSTALL_ERRORED,
- UNINSTALLED,
- PRE_INSTALLED,
- EXTERNALLY_INSTALLED,
-} = APPLICATION_STATUS;
-
-const NO_EFFECTS = 'no effects';
-
-describe('applicationStateMachine', () => {
- const noEffectsToEmptyObject = (effects) => (typeof effects === 'string' ? {} : effects);
-
- describe(`current state is ${NO_STATUS}`, () => {
- it.each`
- expectedState | event | effects
- ${INSTALLING} | ${SCHEDULED} | ${NO_EFFECTS}
- ${NOT_INSTALLABLE} | ${NOT_INSTALLABLE} | ${NO_EFFECTS}
- ${INSTALLABLE} | ${INSTALLABLE} | ${NO_EFFECTS}
- ${INSTALLING} | ${INSTALLING} | ${NO_EFFECTS}
- ${INSTALLED} | ${INSTALLED} | ${NO_EFFECTS}
- ${INSTALLABLE} | ${ERROR} | ${{ installFailed: true }}
- ${UPDATING} | ${UPDATING} | ${NO_EFFECTS}
- ${INSTALLED} | ${UPDATED} | ${NO_EFFECTS}
- ${INSTALLED} | ${UPDATE_ERRORED} | ${{ updateFailed: true }}
- ${UNINSTALLING} | ${UNINSTALLING} | ${NO_EFFECTS}
- ${INSTALLED} | ${UNINSTALL_ERRORED} | ${{ uninstallFailed: true }}
- ${UNINSTALLED} | ${UNINSTALLED} | ${NO_EFFECTS}
- ${PRE_INSTALLED} | ${PRE_INSTALLED} | ${NO_EFFECTS}
- ${EXTERNALLY_INSTALLED} | ${EXTERNALLY_INSTALLED} | ${NO_EFFECTS}
- `(`transitions to $expectedState on $event event and applies $effects`, (data) => {
- const { expectedState, event, effects } = data;
- const currentAppState = {
- status: NO_STATUS,
- };
-
- expect(transitionApplicationState(currentAppState, event)).toEqual({
- status: expectedState,
- ...noEffectsToEmptyObject(effects),
- });
- });
- });
-
- describe(`current state is ${NOT_INSTALLABLE}`, () => {
- it.each`
- expectedState | event | effects
- ${INSTALLABLE} | ${INSTALLABLE} | ${NO_EFFECTS}
- `(`transitions to $expectedState on $event event and applies $effects`, (data) => {
- const { expectedState, event, effects } = data;
- const currentAppState = {
- status: NOT_INSTALLABLE,
- };
-
- expect(transitionApplicationState(currentAppState, event)).toEqual({
- status: expectedState,
- ...noEffectsToEmptyObject(effects),
- });
- });
- });
-
- describe(`current state is ${INSTALLABLE}`, () => {
- it.each`
- expectedState | event | effects
- ${INSTALLING} | ${INSTALL_EVENT} | ${{ installFailed: false }}
- ${INSTALLED} | ${INSTALLED} | ${{ installFailed: false }}
- ${NOT_INSTALLABLE} | ${NOT_INSTALLABLE} | ${NO_EFFECTS}
- ${UNINSTALLED} | ${UNINSTALLED} | ${{ installFailed: false }}
- `(`transitions to $expectedState on $event event and applies $effects`, (data) => {
- const { expectedState, event, effects } = data;
- const currentAppState = {
- status: INSTALLABLE,
- };
-
- expect(transitionApplicationState(currentAppState, event)).toEqual({
- status: expectedState,
- ...noEffectsToEmptyObject(effects),
- });
- });
- });
-
- describe(`current state is ${INSTALLING}`, () => {
- it.each`
- expectedState | event | effects
- ${INSTALLED} | ${INSTALLED} | ${NO_EFFECTS}
- ${INSTALLABLE} | ${ERROR} | ${{ installFailed: true }}
- `(`transitions to $expectedState on $event event and applies $effects`, (data) => {
- const { expectedState, event, effects } = data;
- const currentAppState = {
- status: INSTALLING,
- };
-
- expect(transitionApplicationState(currentAppState, event)).toEqual({
- status: expectedState,
- ...noEffectsToEmptyObject(effects),
- });
- });
- });
-
- describe(`current state is ${INSTALLED}`, () => {
- it.each`
- expectedState | event | effects
- ${UPDATING} | ${UPDATE_EVENT} | ${{ updateFailed: false, updateSuccessful: false }}
- ${UNINSTALLING} | ${UNINSTALL_EVENT} | ${{ uninstallFailed: false, uninstallSuccessful: false }}
- ${NOT_INSTALLABLE} | ${NOT_INSTALLABLE} | ${NO_EFFECTS}
- ${UNINSTALLED} | ${UNINSTALLED} | ${NO_EFFECTS}
- ${INSTALLABLE} | ${ERROR} | ${{ installFailed: true }}
- `(`transitions to $expectedState on $event event and applies $effects`, (data) => {
- const { expectedState, event, effects } = data;
- const currentAppState = {
- status: INSTALLED,
- };
-
- expect(transitionApplicationState(currentAppState, event)).toEqual({
- status: expectedState,
- ...noEffectsToEmptyObject(effects),
- });
- });
- });
-
- describe(`current state is ${UPDATING}`, () => {
- it.each`
- expectedState | event | effects
- ${INSTALLED} | ${UPDATED} | ${{ updateSuccessful: true }}
- ${INSTALLED} | ${UPDATE_ERRORED} | ${{ updateFailed: true }}
- `(`transitions to $expectedState on $event event and applies $effects`, (data) => {
- const { expectedState, event, effects } = data;
- const currentAppState = {
- status: UPDATING,
- };
-
- expect(transitionApplicationState(currentAppState, event)).toEqual({
- status: expectedState,
- ...effects,
- });
- });
- });
-
- describe(`current state is ${UNINSTALLING}`, () => {
- it.each`
- expectedState | event | effects
- ${INSTALLABLE} | ${INSTALLABLE} | ${{ uninstallSuccessful: true }}
- ${INSTALLED} | ${UNINSTALL_ERRORED} | ${{ uninstallFailed: true }}
- `(`transitions to $expectedState on $event event and applies $effects`, (data) => {
- const { expectedState, event, effects } = data;
- const currentAppState = {
- status: UNINSTALLING,
- };
-
- expect(transitionApplicationState(currentAppState, event)).toEqual({
- status: expectedState,
- ...effects,
- });
- });
- });
-
- describe(`current state is ${UNINSTALLED}`, () => {
- it.each`
- expectedState | event | effects
- ${INSTALLED} | ${INSTALLED} | ${NO_EFFECTS}
- ${INSTALLABLE} | ${ERROR} | ${{ installFailed: true }}
- `(`transitions to $expectedState on $event event and applies $effects`, (data) => {
- const { expectedState, event, effects } = data;
- const currentAppState = {
- status: UNINSTALLED,
- };
-
- expect(transitionApplicationState(currentAppState, event)).toEqual({
- status: expectedState,
- ...noEffectsToEmptyObject(effects),
- });
- });
- });
- describe('current state is undefined', () => {
- it('returns the current state without having any effects', () => {
- const currentAppState = {};
- expect(transitionApplicationState(currentAppState, INSTALLABLE)).toEqual(currentAppState);
- });
- });
-
- describe('with event is undefined', () => {
- it('returns the current state without having any effects', () => {
- const currentAppState = {
- status: NO_STATUS,
- };
- expect(transitionApplicationState(currentAppState, undefined)).toEqual(currentAppState);
- });
- });
-});
diff --git a/spec/frontend/clusters/services/crossplane_provider_stack_spec.js b/spec/frontend/clusters/services/crossplane_provider_stack_spec.js
deleted file mode 100644
index f95b175ca64..00000000000
--- a/spec/frontend/clusters/services/crossplane_provider_stack_spec.js
+++ /dev/null
@@ -1,85 +0,0 @@
-import { GlDropdownItem, GlIcon } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import CrossplaneProviderStack from '~/clusters/components/crossplane_provider_stack.vue';
-
-describe('CrossplaneProviderStack component', () => {
- let wrapper;
-
- const defaultProps = {
- stacks: [
- {
- name: 'Google Cloud Platform',
- code: 'gcp',
- },
- {
- name: 'Amazon Web Services',
- code: 'aws',
- },
- ],
- };
-
- function createComponent(props = {}) {
- const propsData = {
- ...defaultProps,
- ...props,
- };
-
- wrapper = shallowMount(CrossplaneProviderStack, {
- propsData,
- });
- }
-
- beforeEach(() => {
- const crossplane = {
- title: 'crossplane',
- stack: '',
- };
- createComponent({ crossplane });
- });
-
- const findDropdownElements = () => wrapper.findAll(GlDropdownItem);
- const findFirstDropdownElement = () => findDropdownElements().at(0);
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('renders all of the available stacks in the dropdown', () => {
- const dropdownElements = findDropdownElements();
-
- expect(dropdownElements.length).toBe(defaultProps.stacks.length);
-
- defaultProps.stacks.forEach((stack, index) =>
- expect(dropdownElements.at(index).text()).toEqual(stack.name),
- );
- });
-
- it('displays the correct label for the first dropdown item if a stack is selected', () => {
- const crossplane = {
- title: 'crossplane',
- stack: 'gcp',
- };
- createComponent({ crossplane });
- expect(wrapper.vm.dropdownText).toBe('Google Cloud Platform');
- });
-
- it('emits the "set" event with the selected stack value', () => {
- const crossplane = {
- title: 'crossplane',
- stack: 'gcp',
- };
- createComponent({ crossplane });
- findFirstDropdownElement().vm.$emit('click');
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.emitted().set[0][0].code).toEqual('gcp');
- });
- });
-
- it('renders the correct dropdown text when no stack is selected', () => {
- expect(wrapper.vm.dropdownText).toBe('Select Stack');
- });
-
- it('renders an external link', () => {
- expect(wrapper.find(GlIcon).props('name')).toBe('external-link');
- });
-});
diff --git a/spec/frontend/clusters/services/mock_data.js b/spec/frontend/clusters/services/mock_data.js
index a75fcb0cb06..cf63d5452ac 100644
--- a/spec/frontend/clusters/services/mock_data.js
+++ b/spec/frontend/clusters/services/mock_data.js
@@ -1,170 +1,19 @@
-import { APPLICATION_STATUS } from '~/clusters/constants';
-
const CLUSTERS_MOCK_DATA = {
GET: {
'/gitlab-org/gitlab-shell/clusters/1/status.json': {
data: {
status: 'errored',
status_reason: 'Failed to request to CloudPlatform.',
- applications: [
- {
- name: 'helm',
- status: APPLICATION_STATUS.INSTALLABLE,
- status_reason: null,
- can_uninstall: false,
- },
- {
- name: 'ingress',
- status: APPLICATION_STATUS.ERROR,
- status_reason: 'Cannot connect',
- external_ip: null,
- external_hostname: null,
- can_uninstall: false,
- },
- {
- name: 'runner',
- status: APPLICATION_STATUS.INSTALLING,
- status_reason: null,
- can_uninstall: false,
- },
- {
- name: 'prometheus',
- status: APPLICATION_STATUS.ERROR,
- status_reason: 'Cannot connect',
- can_uninstall: false,
- },
- {
- name: 'jupyter',
- status: APPLICATION_STATUS.INSTALLING,
- status_reason: 'Cannot connect',
- can_uninstall: false,
- },
- {
- name: 'knative',
- status: APPLICATION_STATUS.INSTALLING,
- status_reason: 'Cannot connect',
- can_uninstall: false,
- },
- {
- name: 'cert_manager',
- status: APPLICATION_STATUS.ERROR,
- status_reason: 'Cannot connect',
- email: 'test@example.com',
- can_uninstall: false,
- },
- {
- name: 'crossplane',
- status: APPLICATION_STATUS.ERROR,
- status_reason: 'Cannot connect',
- can_uninstall: false,
- },
- {
- name: 'elastic_stack',
- status: APPLICATION_STATUS.ERROR,
- status_reason: 'Cannot connect',
- can_uninstall: false,
- },
- ],
},
},
'/gitlab-org/gitlab-shell/clusters/2/status.json': {
data: {
status: 'errored',
status_reason: 'Failed to request to CloudPlatform.',
- applications: [
- {
- name: 'helm',
- status: APPLICATION_STATUS.INSTALLED,
- status_reason: null,
- },
- {
- name: 'ingress',
- status: APPLICATION_STATUS.INSTALLED,
- status_reason: 'Cannot connect',
- external_ip: '1.1.1.1',
- external_hostname: null,
- },
- {
- name: 'runner',
- status: APPLICATION_STATUS.INSTALLING,
- status_reason: null,
- },
- {
- name: 'prometheus',
- status: APPLICATION_STATUS.ERROR,
- status_reason: 'Cannot connect',
- },
- {
- name: 'jupyter',
- status: APPLICATION_STATUS.INSTALLABLE,
- status_reason: 'Cannot connect',
- },
- {
- name: 'knative',
- status: APPLICATION_STATUS.INSTALLABLE,
- status_reason: 'Cannot connect',
- },
- {
- name: 'cert_manager',
- status: APPLICATION_STATUS.ERROR,
- status_reason: 'Cannot connect',
- email: 'test@example.com',
- },
- {
- name: 'crossplane',
- status: APPLICATION_STATUS.ERROR,
- status_reason: 'Cannot connect',
- stack: 'gcp',
- },
- {
- name: 'elastic_stack',
- status: APPLICATION_STATUS.ERROR,
- status_reason: 'Cannot connect',
- },
- ],
},
},
},
- POST: {
- '/gitlab-org/gitlab-shell/clusters/1/applications/helm': {},
- '/gitlab-org/gitlab-shell/clusters/1/applications/ingress': {},
- '/gitlab-org/gitlab-shell/clusters/1/applications/crossplane': {},
- '/gitlab-org/gitlab-shell/clusters/1/applications/cert_manager': {},
- '/gitlab-org/gitlab-shell/clusters/1/applications/runner': {},
- '/gitlab-org/gitlab-shell/clusters/1/applications/prometheus': {},
- '/gitlab-org/gitlab-shell/clusters/1/applications/jupyter': {},
- '/gitlab-org/gitlab-shell/clusters/1/applications/knative': {},
- '/gitlab-org/gitlab-shell/clusters/1/applications/elastic_stack': {},
- },
-};
-
-const DEFAULT_APPLICATION_STATE = {
- id: 'some-app',
- title: 'My App',
- titleLink: 'https://about.gitlab.com/',
- description: 'Some description about this interesting application!',
- status: null,
- statusReason: null,
- requestReason: null,
-};
-
-const APPLICATIONS_MOCK_STATE = {
- helm: { title: 'Helm Tiller', status: 'installable' },
- ingress: {
- title: 'Ingress',
- status: 'installable',
- },
- crossplane: { title: 'Crossplane', status: 'installable', stack: '' },
- cert_manager: { title: 'Cert-Manager', status: 'installable' },
- runner: { title: 'GitLab Runner' },
- prometheus: { title: 'Prometheus' },
- jupyter: { title: 'JupyterHub', status: 'installable', hostname: '' },
- knative: { title: 'Knative ', status: 'installable', hostname: '' },
- elastic_stack: { title: 'Elastic Stack', status: 'installable' },
- cilium: {
- title: 'GitLab Container Network Policies',
- status: 'not_installable',
- },
+ POST: {},
};
-export { CLUSTERS_MOCK_DATA, DEFAULT_APPLICATION_STATE, APPLICATIONS_MOCK_STATE };
+export { CLUSTERS_MOCK_DATA };
diff --git a/spec/frontend/clusters/stores/clusters_store_spec.js b/spec/frontend/clusters/stores/clusters_store_spec.js
index cdba6fc6ab8..5e797bbf8a8 100644
--- a/spec/frontend/clusters/stores/clusters_store_spec.js
+++ b/spec/frontend/clusters/stores/clusters_store_spec.js
@@ -1,4 +1,3 @@
-import { APPLICATION_INSTALLED_STATUSES, APPLICATION_STATUS, RUNNER } from '~/clusters/constants';
import ClustersStore from '~/clusters/stores/clusters_store';
import { CLUSTERS_MOCK_DATA } from '../services/mock_data';
@@ -31,17 +30,6 @@ describe('Clusters Store', () => {
});
});
- describe('updateAppProperty', () => {
- it('should store new request reason', () => {
- expect(store.state.applications.helm.requestReason).toEqual(null);
-
- const newReason = 'We broke it.';
- store.updateAppProperty('helm', 'requestReason', newReason);
-
- expect(store.state.applications.helm.requestReason).toEqual(newReason);
- });
- });
-
describe('updateStateFromServer', () => {
it('should store new polling data from server', () => {
const mockResponseData =
@@ -50,196 +38,16 @@ describe('Clusters Store', () => {
expect(store.state).toEqual({
helpPath: null,
- helmHelpPath: null,
- ingressHelpPath: null,
environmentsHelpPath: null,
clustersHelpPath: null,
deployBoardsHelpPath: null,
- cloudRunHelpPath: null,
status: mockResponseData.status,
statusReason: mockResponseData.status_reason,
providerType: null,
- preInstalledKnative: false,
rbac: false,
- applications: {
- helm: {
- title: 'Legacy Helm Tiller server',
- status: mockResponseData.applications[0].status,
- statusReason: mockResponseData.applications[0].status_reason,
- requestReason: null,
- installable: true,
- installed: false,
- installFailed: false,
- uninstallable: false,
- uninstallSuccessful: false,
- uninstallFailed: false,
- validationError: null,
- },
- ingress: {
- title: 'Ingress',
- status: APPLICATION_STATUS.INSTALLABLE,
- statusReason: mockResponseData.applications[1].status_reason,
- requestReason: null,
- externalIp: null,
- externalHostname: null,
- installable: true,
- installed: false,
- installFailed: true,
- uninstallable: false,
- updateFailed: false,
- uninstallSuccessful: false,
- uninstallFailed: false,
- validationError: null,
- },
- runner: {
- title: 'GitLab Runner',
- status: mockResponseData.applications[2].status,
- statusReason: mockResponseData.applications[2].status_reason,
- requestReason: null,
- version: mockResponseData.applications[2].version,
- updateAvailable: mockResponseData.applications[2].update_available,
- chartRepo: 'https://gitlab.com/gitlab-org/charts/gitlab-runner',
- installable: true,
- installed: false,
- installFailed: false,
- updateFailed: false,
- updateSuccessful: false,
- uninstallable: false,
- uninstallSuccessful: false,
- uninstallFailed: false,
- validationError: null,
- },
- prometheus: {
- title: 'Prometheus',
- status: APPLICATION_STATUS.INSTALLABLE,
- statusReason: mockResponseData.applications[3].status_reason,
- requestReason: null,
- installable: true,
- installed: false,
- installFailed: true,
- uninstallable: false,
- uninstallSuccessful: false,
- uninstallFailed: false,
- validationError: null,
- },
- jupyter: {
- title: 'JupyterHub',
- status: mockResponseData.applications[4].status,
- statusReason: mockResponseData.applications[4].status_reason,
- requestReason: null,
- hostname: '',
- installable: true,
- installed: false,
- installFailed: false,
- uninstallable: false,
- uninstallSuccessful: false,
- uninstallFailed: false,
- validationError: null,
- },
- knative: {
- title: 'Knative',
- status: mockResponseData.applications[5].status,
- statusReason: mockResponseData.applications[5].status_reason,
- requestReason: null,
- hostname: null,
- isEditingDomain: false,
- externalIp: null,
- externalHostname: null,
- installable: true,
- installed: false,
- installFailed: false,
- uninstallable: false,
- uninstallSuccessful: false,
- uninstallFailed: false,
- updateSuccessful: false,
- updateFailed: false,
- validationError: null,
- },
- cert_manager: {
- title: 'Cert-Manager',
- status: APPLICATION_STATUS.INSTALLABLE,
- installFailed: true,
- statusReason: mockResponseData.applications[6].status_reason,
- requestReason: null,
- email: mockResponseData.applications[6].email,
- installable: true,
- installed: false,
- uninstallable: false,
- uninstallSuccessful: false,
- uninstallFailed: false,
- validationError: null,
- },
- elastic_stack: {
- title: 'Elastic Stack',
- status: APPLICATION_STATUS.INSTALLABLE,
- installFailed: true,
- statusReason: mockResponseData.applications[7].status_reason,
- requestReason: null,
- installable: true,
- installed: false,
- uninstallable: false,
- uninstallSuccessful: false,
- uninstallFailed: false,
- validationError: null,
- },
- crossplane: {
- title: 'Crossplane',
- status: APPLICATION_STATUS.INSTALLABLE,
- installFailed: true,
- statusReason: mockResponseData.applications[8].status_reason,
- requestReason: null,
- installable: true,
- installed: false,
- uninstallable: false,
- uninstallSuccessful: false,
- uninstallFailed: false,
- validationError: null,
- },
- cilium: {
- title: 'GitLab Container Network Policies',
- status: null,
- statusReason: null,
- requestReason: null,
- installable: false,
- installed: false,
- installFailed: false,
- uninstallable: false,
- uninstallSuccessful: false,
- uninstallFailed: false,
- validationError: null,
- },
- },
environments: [],
fetchingEnvironments: false,
});
});
-
- describe.each(APPLICATION_INSTALLED_STATUSES)(
- 'given the current app status is %s',
- (status) => {
- it('marks application as installed', () => {
- const mockResponseData =
- CLUSTERS_MOCK_DATA.GET['/gitlab-org/gitlab-shell/clusters/2/status.json'].data;
- const runnerAppIndex = 2;
-
- mockResponseData.applications[runnerAppIndex].status = status;
-
- store.updateStateFromServer(mockResponseData);
-
- expect(store.state.applications[RUNNER].installed).toBe(true);
- });
- },
- );
-
- it('sets default hostname for jupyter when ingress has a ip address', () => {
- const mockResponseData =
- CLUSTERS_MOCK_DATA.GET['/gitlab-org/gitlab-shell/clusters/2/status.json'].data;
-
- store.updateStateFromServer(mockResponseData);
-
- expect(store.state.applications.jupyter.hostname).toEqual(
- `jupyter.${store.state.applications.ingress.externalIp}.nip.io`,
- );
- });
});
});
diff --git a/spec/frontend/clusters_list/store/actions_spec.js b/spec/frontend/clusters_list/store/actions_spec.js
index b2ef3c2138a..f4b69053e14 100644
--- a/spec/frontend/clusters_list/store/actions_spec.js
+++ b/spec/frontend/clusters_list/store/actions_spec.js
@@ -5,7 +5,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import { MAX_REQUESTS } from '~/clusters_list/constants';
import * as actions from '~/clusters_list/store/actions';
import * as types from '~/clusters_list/store/mutation_types';
-import { deprecatedCreateFlash as flashError } from '~/flash';
+import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
import Poll from '~/lib/utils/poll';
import { apiData } from '../mock_data';
@@ -101,7 +101,9 @@ describe('Clusters store actions', () => {
},
],
() => {
- expect(flashError).toHaveBeenCalledWith(expect.stringMatching('error'));
+ expect(createFlash).toHaveBeenCalledWith({
+ message: expect.stringMatching('error'),
+ });
done();
},
);
diff --git a/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap b/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap
index b59d1597a12..118d8ceceb9 100644
--- a/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap
+++ b/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap
@@ -13,7 +13,9 @@ exports[`Code navigation popover component renders popover 1`] = `
<gl-tabs-stub
contentclass="gl-py-0"
navclass="gl-hidden"
+ queryparamname="tab"
theme="indigo"
+ value="0"
>
<gl-tab-stub
title="Definition"
diff --git a/spec/frontend/code_quality_walkthrough/components/step_spec.js b/spec/frontend/code_quality_walkthrough/components/step_spec.js
index c397faf1f35..bdbcda5f902 100644
--- a/spec/frontend/code_quality_walkthrough/components/step_spec.js
+++ b/spec/frontend/code_quality_walkthrough/components/step_spec.js
@@ -4,11 +4,11 @@ import Cookies from 'js-cookie';
import Step from '~/code_quality_walkthrough/components/step.vue';
import { EXPERIMENT_NAME, STEPS } from '~/code_quality_walkthrough/constants';
import { TRACKING_CONTEXT_SCHEMA } from '~/experimentation/constants';
-import { getParameterByName } from '~/lib/utils/common_utils';
+import { getParameterByName } from '~/lib/utils/url_utility';
import Tracking from '~/tracking';
-jest.mock('~/lib/utils/common_utils', () => ({
- ...jest.requireActual('~/lib/utils/common_utils'),
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
getParameterByName: jest.fn(),
}));
diff --git a/spec/frontend/collapsed_sidebar_todo_spec.js b/spec/frontend/collapsed_sidebar_todo_spec.js
deleted file mode 100644
index 7c659822672..00000000000
--- a/spec/frontend/collapsed_sidebar_todo_spec.js
+++ /dev/null
@@ -1,171 +0,0 @@
-/* eslint-disable no-new */
-import MockAdapter from 'axios-mock-adapter';
-import { clone } from 'lodash';
-import waitForPromises from 'helpers/wait_for_promises';
-import { TEST_HOST } from 'spec/test_constants';
-import axios from '~/lib/utils/axios_utils';
-import Sidebar from '~/right_sidebar';
-import { fixTitle } from '~/tooltips';
-
-jest.mock('~/tooltips');
-
-describe('Issuable right sidebar collapsed todo toggle', () => {
- const fixtureName = 'issues/open-issue.html';
- const jsonFixtureName = 'todos/todos.json';
- let mock;
-
- beforeEach(() => {
- const todoData = getJSONFixture(jsonFixtureName);
- new Sidebar();
- loadFixtures(fixtureName);
-
- document.querySelector('.js-right-sidebar').classList.toggle('right-sidebar-expanded');
- document.querySelector('.js-right-sidebar').classList.toggle('right-sidebar-collapsed');
-
- mock = new MockAdapter(axios);
-
- mock.onPost(`${TEST_HOST}/frontend-fixtures/issues-project/todos`).reply(() => {
- const response = clone(todoData);
-
- return [200, response];
- });
-
- mock.onDelete(/(.*)\/dashboard\/todos\/\d+$/).reply(() => {
- const response = clone(todoData);
- delete response.delete_path;
-
- return [200, response];
- });
- });
-
- afterEach(() => {
- mock.restore();
- });
-
- it('shows add todo button', () => {
- expect(document.querySelector('.js-issuable-todo.sidebar-collapsed-icon')).not.toBeNull();
-
- expect(
- document
- .querySelector('.js-issuable-todo.sidebar-collapsed-icon svg')
- .getAttribute('data-testid'),
- ).toBe('todo-add-icon');
-
- expect(
- document.querySelector('.js-issuable-todo.sidebar-collapsed-icon .todo-undone'),
- ).toBeNull();
- });
-
- it('sets default tooltip title', () => {
- expect(
- document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').getAttribute('title'),
- ).toBe('Add a to do');
- });
-
- it('toggle todo state', (done) => {
- document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click();
-
- setImmediate(() => {
- expect(
- document.querySelector('.js-issuable-todo.sidebar-collapsed-icon .todo-undone'),
- ).not.toBeNull();
-
- expect(
- document
- .querySelector('.js-issuable-todo.sidebar-collapsed-icon svg.todo-undone')
- .getAttribute('data-testid'),
- ).toBe('todo-done-icon');
-
- done();
- });
- });
-
- it('toggle todo state of expanded todo toggle', (done) => {
- document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click();
-
- setImmediate(() => {
- expect(
- document.querySelector('.issuable-sidebar-header .js-issuable-todo').textContent.trim(),
- ).toBe('Mark as done');
-
- done();
- });
- });
-
- it('toggles todo button tooltip', (done) => {
- document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click();
-
- setImmediate(() => {
- const el = document.querySelector('.js-issuable-todo.sidebar-collapsed-icon');
-
- expect(el.getAttribute('title')).toBe('Mark as done');
- expect(fixTitle).toHaveBeenCalledWith(el);
-
- done();
- });
- });
-
- it('marks todo as done', (done) => {
- document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click();
-
- waitForPromises()
- .then(() => {
- expect(
- document.querySelector('.js-issuable-todo.sidebar-collapsed-icon .todo-undone'),
- ).not.toBeNull();
-
- document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click();
- })
- .then(waitForPromises)
- .then(() => {
- expect(
- document.querySelector('.js-issuable-todo.sidebar-collapsed-icon .todo-undone'),
- ).toBeNull();
-
- expect(
- document.querySelector('.issuable-sidebar-header .js-issuable-todo').textContent.trim(),
- ).toBe('Add a to do');
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('updates aria-label to Mark as done', (done) => {
- document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click();
-
- setImmediate(() => {
- expect(
- document
- .querySelector('.js-issuable-todo.sidebar-collapsed-icon')
- .getAttribute('aria-label'),
- ).toBe('Mark as done');
-
- done();
- });
- });
-
- it('updates aria-label to add todo', (done) => {
- document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click();
-
- waitForPromises()
- .then(() => {
- expect(
- document
- .querySelector('.js-issuable-todo.sidebar-collapsed-icon')
- .getAttribute('aria-label'),
- ).toBe('Mark as done');
-
- document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').click();
- })
- .then(waitForPromises)
- .then(() => {
- expect(
- document
- .querySelector('.js-issuable-todo.sidebar-collapsed-icon')
- .getAttribute('aria-label'),
- ).toBe('Add a to do');
- })
- .then(done)
- .catch(done.fail);
- });
-});
diff --git a/spec/frontend/commit/commit_pipeline_status_component_spec.js b/spec/frontend/commit/commit_pipeline_status_component_spec.js
index a56f761269a..8082b8524e7 100644
--- a/spec/frontend/commit/commit_pipeline_status_component_spec.js
+++ b/spec/frontend/commit/commit_pipeline_status_component_spec.js
@@ -2,7 +2,7 @@ import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Visibility from 'visibilityjs';
import { getJSONFixture } from 'helpers/fixtures';
-import { deprecatedCreateFlash as flash } from '~/flash';
+import createFlash from '~/flash';
import Poll from '~/lib/utils/poll';
import CommitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
@@ -170,7 +170,7 @@ describe('Commit pipeline status component', () => {
});
it('displays flash error message', () => {
- expect(flash).toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/commit/pipelines/pipelines_table_spec.js b/spec/frontend/commit/pipelines/pipelines_table_spec.js
index 4bf6727af3b..1defb3d586c 100644
--- a/spec/frontend/commit/pipelines/pipelines_table_spec.js
+++ b/spec/frontend/commit/pipelines/pipelines_table_spec.js
@@ -66,7 +66,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
describe('with pipelines', () => {
beforeEach(async () => {
- mock.onGet('endpoint.json').reply(200, [pipeline]);
+ mock.onGet('endpoint.json').reply(200, [pipeline], { 'x-total': 10 });
createComponent();
@@ -110,7 +110,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
document.body.appendChild(element);
element.addEventListener('update-pipelines-count', (event) => {
- expect(event.detail.pipelines).toEqual([pipeline]);
+ expect(event.detail.pipelineCount).toEqual(10);
done();
});
diff --git a/spec/frontend/content_editor/components/content_editor_spec.js b/spec/frontend/content_editor/components/content_editor_spec.js
index 59c4190ad3a..563e80e04c1 100644
--- a/spec/frontend/content_editor/components/content_editor_spec.js
+++ b/spec/frontend/content_editor/components/content_editor_spec.js
@@ -1,5 +1,7 @@
+import { GlAlert } from '@gitlab/ui';
import { EditorContent } from '@tiptap/vue-2';
-import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ContentEditor from '~/content_editor/components/content_editor.vue';
import TopToolbar from '~/content_editor/components/top_toolbar.vue';
import { createContentEditor } from '~/content_editor/services/create_content_editor';
@@ -8,8 +10,11 @@ describe('ContentEditor', () => {
let wrapper;
let editor;
+ const findEditorElement = () => wrapper.findByTestId('content-editor');
+ const findErrorAlert = () => wrapper.findComponent(GlAlert);
+
const createWrapper = async (contentEditor) => {
- wrapper = shallowMount(ContentEditor, {
+ wrapper = shallowMountExtended(ContentEditor, {
propsData: {
contentEditor,
},
@@ -49,7 +54,7 @@ describe('ContentEditor', () => {
editor.tiptapEditor.isFocused = isFocused;
createWrapper(editor);
- expect(wrapper.classes()).toStrictEqual(classes);
+ expect(findEditorElement().classes()).toStrictEqual(classes);
},
);
@@ -57,6 +62,30 @@ describe('ContentEditor', () => {
editor.tiptapEditor.isFocused = true;
createWrapper(editor);
- expect(wrapper.classes()).toContain('is-focused');
+ expect(findEditorElement().classes()).toContain('is-focused');
+ });
+
+ describe('displaying error', () => {
+ const error = 'Content Editor error';
+
+ beforeEach(async () => {
+ createWrapper(editor);
+
+ editor.tiptapEditor.emit('error', error);
+
+ await nextTick();
+ });
+
+ it('displays error notifications from the tiptap editor', () => {
+ expect(findErrorAlert().text()).toBe(error);
+ });
+
+ it('allows dismissing an error alert', async () => {
+ findErrorAlert().vm.$emit('dismiss');
+
+ await nextTick();
+
+ expect(findErrorAlert().exists()).toBe(false);
+ });
});
});
diff --git a/spec/frontend/content_editor/components/toolbar_button_spec.js b/spec/frontend/content_editor/components/toolbar_button_spec.js
index a49efa34017..d848adcbff8 100644
--- a/spec/frontend/content_editor/components/toolbar_button_spec.js
+++ b/spec/frontend/content_editor/components/toolbar_button_spec.js
@@ -1,33 +1,17 @@
import { GlButton } from '@gitlab/ui';
-import { Extension } from '@tiptap/core';
import { shallowMount } from '@vue/test-utils';
import ToolbarButton from '~/content_editor/components/toolbar_button.vue';
-import { createContentEditor } from '~/content_editor/services/create_content_editor';
+import { createTestEditor, mockChainedCommands } from '../test_utils';
describe('content_editor/components/toolbar_button', () => {
let wrapper;
let tiptapEditor;
- let toggleFooSpy;
const CONTENT_TYPE = 'bold';
const ICON_NAME = 'bold';
const LABEL = 'Bold';
const buildEditor = () => {
- toggleFooSpy = jest.fn();
- tiptapEditor = createContentEditor({
- extensions: [
- {
- tiptapExtension: Extension.create({
- addCommands() {
- return {
- toggleFoo: () => toggleFooSpy,
- };
- },
- }),
- },
- ],
- renderMarkdown: () => true,
- }).tiptapEditor;
+ tiptapEditor = createTestEditor();
jest.spyOn(tiptapEditor, 'isActive');
};
@@ -78,20 +62,28 @@ describe('content_editor/components/toolbar_button', () => {
describe('when button is clicked', () => {
it('executes the content type command when executeCommand = true', async () => {
- buildWrapper({ editorCommand: 'toggleFoo' });
+ const editorCommand = 'toggleFoo';
+ const mockCommands = mockChainedCommands(tiptapEditor, [editorCommand, 'focus', 'run']);
+
+ buildWrapper({ editorCommand });
await findButton().trigger('click');
- expect(toggleFooSpy).toHaveBeenCalled();
+ expect(mockCommands[editorCommand]).toHaveBeenCalled();
+ expect(mockCommands.focus).toHaveBeenCalled();
+ expect(mockCommands.run).toHaveBeenCalled();
expect(wrapper.emitted().execute).toHaveLength(1);
});
it('does not executes the content type command when executeCommand = false', async () => {
+ const editorCommand = 'toggleFoo';
+ const mockCommands = mockChainedCommands(tiptapEditor, [editorCommand, 'run']);
+
buildWrapper();
await findButton().trigger('click');
- expect(toggleFooSpy).not.toHaveBeenCalled();
+ expect(mockCommands[editorCommand]).not.toHaveBeenCalled();
expect(wrapper.emitted().execute).toHaveLength(1);
});
});
diff --git a/spec/frontend/content_editor/components/toolbar_image_button_spec.js b/spec/frontend/content_editor/components/toolbar_image_button_spec.js
new file mode 100644
index 00000000000..701dcf83476
--- /dev/null
+++ b/spec/frontend/content_editor/components/toolbar_image_button_spec.js
@@ -0,0 +1,78 @@
+import { GlButton, GlFormInputGroup } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import ToolbarImageButton from '~/content_editor/components/toolbar_image_button.vue';
+import { configure as configureImageExtension } from '~/content_editor/extensions/image';
+import { createTestEditor, mockChainedCommands } from '../test_utils';
+
+describe('content_editor/components/toolbar_image_button', () => {
+ let wrapper;
+ let editor;
+
+ const buildWrapper = () => {
+ wrapper = mountExtended(ToolbarImageButton, {
+ propsData: {
+ tiptapEditor: editor,
+ },
+ });
+ };
+
+ const findImageURLInput = () =>
+ wrapper.findComponent(GlFormInputGroup).find('input[type="text"]');
+ const findApplyImageButton = () => wrapper.findComponent(GlButton);
+
+ const selectFile = async (file) => {
+ const input = wrapper.find({ ref: 'fileSelector' });
+
+ // override the property definition because `input.files` isn't directly modifyable
+ Object.defineProperty(input.element, 'files', { value: [file], writable: true });
+ await input.trigger('change');
+ };
+
+ beforeEach(() => {
+ const { tiptapExtension: Image } = configureImageExtension({
+ renderMarkdown: jest.fn(),
+ uploadsPath: '/uploads/',
+ });
+
+ editor = createTestEditor({
+ extensions: [Image],
+ });
+
+ buildWrapper();
+ });
+
+ afterEach(() => {
+ editor.destroy();
+ wrapper.destroy();
+ });
+
+ it('sets the image to the value in the URL input when "Insert" button is clicked', async () => {
+ const commands = mockChainedCommands(editor, ['focus', 'setImage', 'run']);
+
+ await findImageURLInput().setValue('https://example.com/img.jpg');
+ await findApplyImageButton().trigger('click');
+
+ expect(commands.focus).toHaveBeenCalled();
+ expect(commands.setImage).toHaveBeenCalledWith({
+ alt: 'img',
+ src: 'https://example.com/img.jpg',
+ canonicalSrc: 'https://example.com/img.jpg',
+ });
+ expect(commands.run).toHaveBeenCalled();
+
+ expect(wrapper.emitted().execute[0]).toEqual([{ contentType: 'image', value: 'url' }]);
+ });
+
+ it('uploads the selected image when file input changes', async () => {
+ const commands = mockChainedCommands(editor, ['focus', 'uploadImage', 'run']);
+ const file = new File(['foo'], 'foo.png', { type: 'image/png' });
+
+ await selectFile(file);
+
+ expect(commands.focus).toHaveBeenCalled();
+ expect(commands.uploadImage).toHaveBeenCalledWith({ file });
+ expect(commands.run).toHaveBeenCalled();
+
+ expect(wrapper.emitted().execute[0]).toEqual([{ contentType: 'image', value: 'upload' }]);
+ });
+});
diff --git a/spec/frontend/content_editor/components/toolbar_link_button_spec.js b/spec/frontend/content_editor/components/toolbar_link_button_spec.js
index 812e769c891..576a2912f72 100644
--- a/spec/frontend/content_editor/components/toolbar_link_button_spec.js
+++ b/spec/frontend/content_editor/components/toolbar_link_button_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlDropdownDivider, GlFormInputGroup, GlButton } from '@gitlab/ui';
+import { GlDropdown, GlDropdownDivider, GlButton, GlFormInputGroup } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import ToolbarLinkButton from '~/content_editor/components/toolbar_link_button.vue';
import { tiptapExtension as Link } from '~/content_editor/extensions/link';
@@ -16,9 +16,6 @@ describe('content_editor/components/toolbar_link_button', () => {
propsData: {
tiptapEditor: editor,
},
- stubs: {
- GlFormInputGroup,
- },
});
};
const findDropdown = () => wrapper.findComponent(GlDropdown);
@@ -45,9 +42,8 @@ describe('content_editor/components/toolbar_link_button', () => {
});
describe('when there is an active link', () => {
- beforeEach(() => {
- jest.spyOn(editor, 'isActive');
- editor.isActive.mockReturnValueOnce(true);
+ beforeEach(async () => {
+ jest.spyOn(editor, 'isActive').mockReturnValueOnce(true);
buildWrapper();
});
@@ -78,8 +74,36 @@ describe('content_editor/components/toolbar_link_button', () => {
expect(commands.focus).toHaveBeenCalled();
expect(commands.unsetLink).toHaveBeenCalled();
- expect(commands.setLink).toHaveBeenCalledWith({ href: 'https://example' });
+ expect(commands.setLink).toHaveBeenCalledWith({
+ href: 'https://example',
+ canonicalSrc: 'https://example',
+ });
expect(commands.run).toHaveBeenCalled();
+
+ expect(wrapper.emitted().execute[0]).toEqual([{ contentType: 'link' }]);
+ });
+
+ describe('on selection update', () => {
+ it('updates link input box with canonical-src if present', async () => {
+ jest.spyOn(editor, 'getAttributes').mockReturnValueOnce({
+ canonicalSrc: 'uploads/my-file.zip',
+ href: '/username/my-project/uploads/abcdefgh133535/my-file.zip',
+ });
+
+ await editor.emit('selectionUpdate', { editor });
+
+ expect(findLinkURLInput().element.value).toEqual('uploads/my-file.zip');
+ });
+
+ it('updates link input box with link href otherwise', async () => {
+ jest.spyOn(editor, 'getAttributes').mockReturnValueOnce({
+ href: 'https://gitlab.com',
+ });
+
+ await editor.emit('selectionUpdate', { editor });
+
+ expect(findLinkURLInput().element.value).toEqual('https://gitlab.com');
+ });
});
});
@@ -106,8 +130,13 @@ describe('content_editor/components/toolbar_link_button', () => {
await findApplyLinkButton().trigger('click');
expect(commands.focus).toHaveBeenCalled();
- expect(commands.setLink).toHaveBeenCalledWith({ href: 'https://example' });
+ expect(commands.setLink).toHaveBeenCalledWith({
+ href: 'https://example',
+ canonicalSrc: 'https://example',
+ });
expect(commands.run).toHaveBeenCalled();
+
+ expect(wrapper.emitted().execute[0]).toEqual([{ contentType: 'link' }]);
});
});
diff --git a/spec/frontend/content_editor/components/toolbar_table_button_spec.js b/spec/frontend/content_editor/components/toolbar_table_button_spec.js
new file mode 100644
index 00000000000..237b2848246
--- /dev/null
+++ b/spec/frontend/content_editor/components/toolbar_table_button_spec.js
@@ -0,0 +1,109 @@
+import { GlDropdown, GlButton } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import ToolbarTableButton from '~/content_editor/components/toolbar_table_button.vue';
+import { tiptapExtension as Table } from '~/content_editor/extensions/table';
+import { tiptapExtension as TableCell } from '~/content_editor/extensions/table_cell';
+import { tiptapExtension as TableHeader } from '~/content_editor/extensions/table_header';
+import { tiptapExtension as TableRow } from '~/content_editor/extensions/table_row';
+import { createTestEditor, mockChainedCommands } from '../test_utils';
+
+describe('content_editor/components/toolbar_table_button', () => {
+ let wrapper;
+ let editor;
+
+ const buildWrapper = () => {
+ wrapper = mountExtended(ToolbarTableButton, {
+ propsData: {
+ tiptapEditor: editor,
+ },
+ });
+ };
+
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const getNumButtons = () => findDropdown().findAllComponents(GlButton).length;
+
+ beforeEach(() => {
+ editor = createTestEditor({
+ extensions: [Table, TableCell, TableRow, TableHeader],
+ });
+
+ buildWrapper();
+ });
+
+ afterEach(() => {
+ editor.destroy();
+ wrapper.destroy();
+ });
+
+ it('renders a grid of 3x3 buttons to create a table', () => {
+ expect(getNumButtons()).toBe(9); // 3 x 3
+ });
+
+ describe.each`
+ row | col | numButtons | tableSize
+ ${1} | ${2} | ${9} | ${'1x2'}
+ ${2} | ${2} | ${9} | ${'2x2'}
+ ${2} | ${3} | ${12} | ${'2x3'}
+ ${3} | ${2} | ${12} | ${'3x2'}
+ ${3} | ${3} | ${16} | ${'3x3'}
+ `('button($row, $col) in the table creator grid', ({ row, col, numButtons, tableSize }) => {
+ describe('on mouse over', () => {
+ beforeEach(async () => {
+ const button = wrapper.findByTestId(`table-${row}-${col}`);
+ await button.trigger('mouseover');
+ });
+
+ it('marks all rows and cols before it as active', () => {
+ const prevRow = Math.max(1, row - 1);
+ const prevCol = Math.max(1, col - 1);
+ expect(wrapper.findByTestId(`table-${prevRow}-${prevCol}`).element).toHaveClass(
+ 'gl-bg-blue-50!',
+ );
+ });
+
+ it('shows a help text indicating the size of the table being inserted', () => {
+ expect(findDropdown().element).toHaveText(`Insert a ${tableSize} table.`);
+ });
+
+ it('adds another row and col of buttons to create a bigger table', () => {
+ expect(getNumButtons()).toBe(numButtons);
+ });
+ });
+
+ describe('on click', () => {
+ let commands;
+
+ beforeEach(async () => {
+ commands = mockChainedCommands(editor, ['focus', 'insertTable', 'run']);
+
+ const button = wrapper.findByTestId(`table-${row}-${col}`);
+ await button.trigger('mouseover');
+ await button.trigger('click');
+ });
+
+ it('inserts a table with $tableSize rows and cols', () => {
+ expect(commands.focus).toHaveBeenCalled();
+ expect(commands.insertTable).toHaveBeenCalledWith({
+ rows: row,
+ cols: col,
+ withHeaderRow: true,
+ });
+ expect(commands.run).toHaveBeenCalled();
+
+ expect(wrapper.emitted().execute).toHaveLength(1);
+ });
+ });
+ });
+
+ it('does not create more buttons than a 8x8 grid', async () => {
+ for (let i = 3; i < 8; i += 1) {
+ expect(getNumButtons()).toBe(i * i);
+
+ // eslint-disable-next-line no-await-in-loop
+ await wrapper.findByTestId(`table-${i}-${i}`).trigger('mouseover');
+ expect(findDropdown().element).toHaveText(`Insert a ${i}x${i} table.`);
+ }
+
+ expect(getNumButtons()).toBe(64); // 8x8 (and not 9x9)
+ });
+});
diff --git a/spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js b/spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js
index 8c54f6bb8bb..9a46e27404f 100644
--- a/spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js
+++ b/spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js
@@ -2,21 +2,16 @@ import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ToolbarTextStyleDropdown from '~/content_editor/components/toolbar_text_style_dropdown.vue';
import { TEXT_STYLE_DROPDOWN_ITEMS } from '~/content_editor/constants';
-import { createTestContentEditorExtension, createTestEditor } from '../test_utils';
+import { tiptapExtension as Heading } from '~/content_editor/extensions/heading';
+import { createTestEditor, mockChainedCommands } from '../test_utils';
describe('content_editor/components/toolbar_headings_dropdown', () => {
let wrapper;
let tiptapEditor;
- let commandMocks;
const buildEditor = () => {
- const testExtension = createTestContentEditorExtension({
- commands: TEXT_STYLE_DROPDOWN_ITEMS.map((item) => item.editorCommand),
- });
-
- commandMocks = testExtension.commandMocks;
tiptapEditor = createTestEditor({
- extensions: [testExtension.tiptapExtension],
+ extensions: [Heading],
});
jest.spyOn(tiptapEditor, 'isActive');
@@ -104,9 +99,12 @@ describe('content_editor/components/toolbar_headings_dropdown', () => {
TEXT_STYLE_DROPDOWN_ITEMS.forEach((textStyle, index) => {
const { editorCommand, commandParams } = textStyle;
+ const commands = mockChainedCommands(tiptapEditor, [editorCommand, 'focus', 'run']);
wrapper.findAllComponents(GlDropdownItem).at(index).vm.$emit('click');
- expect(commandMocks[editorCommand]).toHaveBeenCalledWith(commandParams || {});
+ expect(commands[editorCommand]).toHaveBeenCalledWith(commandParams || {});
+ expect(commands.focus).toHaveBeenCalled();
+ expect(commands.run).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/content_editor/components/top_toolbar_spec.js b/spec/frontend/content_editor/components/top_toolbar_spec.js
index 0d55fa730ae..5411793cd5e 100644
--- a/spec/frontend/content_editor/components/top_toolbar_spec.js
+++ b/spec/frontend/content_editor/components/top_toolbar_spec.js
@@ -39,17 +39,19 @@ describe('content_editor/components/top_toolbar', () => {
});
describe.each`
- testId | controlProps
- ${'bold'} | ${{ contentType: 'bold', iconName: 'bold', label: 'Bold text', editorCommand: 'toggleBold' }}
- ${'italic'} | ${{ contentType: 'italic', iconName: 'italic', label: 'Italic text', editorCommand: 'toggleItalic' }}
- ${'strike'} | ${{ contentType: 'strike', iconName: 'strikethrough', label: 'Strikethrough', editorCommand: 'toggleStrike' }}
- ${'code'} | ${{ contentType: 'code', iconName: 'code', label: 'Code', editorCommand: 'toggleCode' }}
- ${'blockquote'} | ${{ contentType: 'blockquote', iconName: 'quote', label: 'Insert a quote', editorCommand: 'toggleBlockquote' }}
- ${'bullet-list'} | ${{ contentType: 'bulletList', iconName: 'list-bulleted', label: 'Add a bullet list', editorCommand: 'toggleBulletList' }}
- ${'ordered-list'} | ${{ contentType: 'orderedList', iconName: 'list-numbered', label: 'Add a numbered list', editorCommand: 'toggleOrderedList' }}
- ${'code-block'} | ${{ contentType: 'codeBlock', iconName: 'doc-code', label: 'Insert a code block', editorCommand: 'toggleCodeBlock' }}
- ${'text-styles'} | ${{}}
- ${'link'} | ${{}}
+ testId | controlProps
+ ${'bold'} | ${{ contentType: 'bold', iconName: 'bold', label: 'Bold text', editorCommand: 'toggleBold' }}
+ ${'italic'} | ${{ contentType: 'italic', iconName: 'italic', label: 'Italic text', editorCommand: 'toggleItalic' }}
+ ${'strike'} | ${{ contentType: 'strike', iconName: 'strikethrough', label: 'Strikethrough', editorCommand: 'toggleStrike' }}
+ ${'code'} | ${{ contentType: 'code', iconName: 'code', label: 'Code', editorCommand: 'toggleCode' }}
+ ${'blockquote'} | ${{ contentType: 'blockquote', iconName: 'quote', label: 'Insert a quote', editorCommand: 'toggleBlockquote' }}
+ ${'bullet-list'} | ${{ contentType: 'bulletList', iconName: 'list-bulleted', label: 'Add a bullet list', editorCommand: 'toggleBulletList' }}
+ ${'ordered-list'} | ${{ contentType: 'orderedList', iconName: 'list-numbered', label: 'Add a numbered list', editorCommand: 'toggleOrderedList' }}
+ ${'horizontal-rule'} | ${{ contentType: 'horizontalRule', iconName: 'dash', label: 'Add a horizontal rule', editorCommand: 'setHorizontalRule' }}
+ ${'code-block'} | ${{ contentType: 'codeBlock', iconName: 'doc-code', label: 'Insert a code block', editorCommand: 'toggleCodeBlock' }}
+ ${'text-styles'} | ${{}}
+ ${'link'} | ${{}}
+ ${'image'} | ${{}}
`('given a $testId toolbar control', ({ testId, controlProps }) => {
beforeEach(() => {
buildWrapper();
diff --git a/spec/frontend/content_editor/components/wrappers/image_spec.js b/spec/frontend/content_editor/components/wrappers/image_spec.js
new file mode 100644
index 00000000000..7b057f9cabc
--- /dev/null
+++ b/spec/frontend/content_editor/components/wrappers/image_spec.js
@@ -0,0 +1,66 @@
+import { GlLoadingIcon } from '@gitlab/ui';
+import { NodeViewWrapper } from '@tiptap/vue-2';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ImageWrapper from '~/content_editor/components/wrappers/image.vue';
+
+describe('content/components/wrappers/image', () => {
+ let wrapper;
+
+ const createWrapper = async (nodeAttrs = {}) => {
+ wrapper = shallowMountExtended(ImageWrapper, {
+ propsData: {
+ node: {
+ attrs: nodeAttrs,
+ },
+ },
+ });
+ };
+ const findImage = () => wrapper.findByTestId('image');
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders a node-view-wrapper with display-inline-block class', () => {
+ createWrapper();
+
+ expect(wrapper.findComponent(NodeViewWrapper).classes()).toContain('gl-display-inline-block');
+ });
+
+ it('renders an image that displays the node src', () => {
+ const src = 'foobar.png';
+
+ createWrapper({ src });
+
+ expect(findImage().attributes().src).toBe(src);
+ });
+
+ describe('when uploading', () => {
+ beforeEach(() => {
+ createWrapper({ uploading: true });
+ });
+
+ it('renders a gl-loading-icon component', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+
+ it('adds gl-opacity-5 class selector to image', () => {
+ expect(findImage().classes()).toContain('gl-opacity-5');
+ });
+ });
+
+ describe('when not uploading', () => {
+ beforeEach(() => {
+ createWrapper({ uploading: false });
+ });
+
+ it('does not render a gl-loading-icon component', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+
+ it('does not add gl-opacity-5 class selector to image', () => {
+ expect(findImage().classes()).not.toContain('gl-opacity-5');
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/extensions/hard_break_spec.js b/spec/frontend/content_editor/extensions/hard_break_spec.js
new file mode 100644
index 00000000000..ebd58e60b0c
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/hard_break_spec.js
@@ -0,0 +1,46 @@
+import { tiptapExtension as HardBreak } from '~/content_editor/extensions/hard_break';
+import { createTestEditor, createDocBuilder } from '../test_utils';
+
+describe('content_editor/extensions/hard_break', () => {
+ let tiptapEditor;
+ let eq;
+ let doc;
+ let p;
+ let hardBreak;
+
+ beforeEach(() => {
+ tiptapEditor = createTestEditor({ extensions: [HardBreak] });
+
+ ({
+ builders: { doc, p, hardBreak },
+ eq,
+ } = createDocBuilder({
+ tiptapEditor,
+ names: { hardBreak: { nodeType: HardBreak.name } },
+ }));
+ });
+
+ describe('Shift-Enter shortcut', () => {
+ it('inserts a hard break when shortcut is executed', () => {
+ const initialDoc = doc(p(''));
+ const expectedDoc = doc(p(hardBreak()));
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+ tiptapEditor.commands.keyboardShortcut('Shift-Enter');
+
+ expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
+ });
+ });
+
+ describe('Mod-Enter shortcut', () => {
+ it('does not insert a hard break when shortcut is executed', () => {
+ const initialDoc = doc(p(''));
+ const expectedDoc = initialDoc;
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+ tiptapEditor.commands.keyboardShortcut('Mod-Enter');
+
+ expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/extensions/horizontal_rule_spec.js b/spec/frontend/content_editor/extensions/horizontal_rule_spec.js
new file mode 100644
index 00000000000..a1bc7f0e8ed
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/horizontal_rule_spec.js
@@ -0,0 +1,20 @@
+import { hrInputRuleRegExp } from '~/content_editor/extensions/horizontal_rule';
+
+describe('content_editor/extensions/horizontal_rule', () => {
+ describe.each`
+ input | matches
+ ${'---'} | ${true}
+ ${'--'} | ${false}
+ ${'---x'} | ${false}
+ ${' ---x'} | ${false}
+ ${' --- '} | ${false}
+ ${'x---x'} | ${false}
+ ${'x---'} | ${false}
+ `('hrInputRuleRegExp', ({ input, matches }) => {
+ it(`${matches ? 'matches' : 'does not match'}: "${input}"`, () => {
+ const match = new RegExp(hrInputRuleRegExp).test(input);
+
+ expect(match).toBe(matches);
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/extensions/image_spec.js b/spec/frontend/content_editor/extensions/image_spec.js
new file mode 100644
index 00000000000..922966b813a
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/image_spec.js
@@ -0,0 +1,193 @@
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import { once } from 'lodash';
+import waitForPromises from 'helpers/wait_for_promises';
+import * as Image from '~/content_editor/extensions/image';
+import httpStatus from '~/lib/utils/http_status';
+import { loadMarkdownApiResult } from '../markdown_processing_examples';
+import { createTestEditor, createDocBuilder } from '../test_utils';
+
+describe('content_editor/extensions/image', () => {
+ let tiptapEditor;
+ let eq;
+ let doc;
+ let p;
+ let image;
+ let renderMarkdown;
+ let mock;
+ const uploadsPath = '/uploads/';
+ const validFile = new File(['foo'], 'foo.png', { type: 'image/png' });
+ const invalidFile = new File(['foo'], 'bar.html', { type: 'text/html' });
+
+ beforeEach(() => {
+ renderMarkdown = jest
+ .fn()
+ .mockResolvedValue(loadMarkdownApiResult('project_wiki_attachment_image').body);
+
+ const { tiptapExtension } = Image.configure({ renderMarkdown, uploadsPath });
+
+ tiptapEditor = createTestEditor({ extensions: [tiptapExtension] });
+
+ ({
+ builders: { doc, p, image },
+ eq,
+ } = createDocBuilder({
+ tiptapEditor,
+ names: { image: { nodeType: tiptapExtension.name } },
+ }));
+
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.reset();
+ });
+
+ it.each`
+ file | valid | description
+ ${validFile} | ${true} | ${'handles paste event when mime type is valid'}
+ ${invalidFile} | ${false} | ${'does not handle paste event when mime type is invalid'}
+ `('$description', ({ file, valid }) => {
+ const pasteEvent = Object.assign(new Event('paste'), {
+ clipboardData: {
+ files: [file],
+ },
+ });
+ let handled;
+
+ tiptapEditor.view.someProp('handlePaste', (eventHandler) => {
+ handled = eventHandler(tiptapEditor.view, pasteEvent);
+ });
+
+ expect(handled).toBe(valid);
+ });
+
+ it.each`
+ file | valid | description
+ ${validFile} | ${true} | ${'handles drop event when mime type is valid'}
+ ${invalidFile} | ${false} | ${'does not handle drop event when mime type is invalid'}
+ `('$description', ({ file, valid }) => {
+ const dropEvent = Object.assign(new Event('drop'), {
+ dataTransfer: {
+ files: [file],
+ },
+ });
+ let handled;
+
+ tiptapEditor.view.someProp('handleDrop', (eventHandler) => {
+ handled = eventHandler(tiptapEditor.view, dropEvent);
+ });
+
+ expect(handled).toBe(valid);
+ });
+
+ it('handles paste event when mime type is correct', () => {
+ const pasteEvent = Object.assign(new Event('paste'), {
+ clipboardData: {
+ files: [new File(['foo'], 'foo.png', { type: 'image/png' })],
+ },
+ });
+ const handled = tiptapEditor.view.someProp('handlePaste', (eventHandler) => {
+ return eventHandler(tiptapEditor.view, pasteEvent);
+ });
+
+ expect(handled).toBe(true);
+ });
+
+ describe('uploadImage command', () => {
+ describe('when file has correct mime type', () => {
+ let initialDoc;
+ const base64EncodedFile = 'data:image/png;base64,Zm9v';
+
+ beforeEach(() => {
+ initialDoc = doc(p(''));
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+ });
+
+ describe('when uploading image succeeds', () => {
+ const successResponse = {
+ link: {
+ markdown: '[image](/uploads/25265/image.png)',
+ },
+ };
+
+ beforeEach(() => {
+ mock.onPost().reply(httpStatus.OK, successResponse);
+ });
+
+ it('inserts an image with src set to the encoded image file and uploading true', (done) => {
+ const expectedDoc = doc(p(image({ uploading: true, src: base64EncodedFile })));
+
+ tiptapEditor.on(
+ 'update',
+ once(() => {
+ expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
+ done();
+ }),
+ );
+
+ tiptapEditor.commands.uploadImage({ file: validFile });
+ });
+
+ it('updates the inserted image with canonicalSrc when upload is successful', async () => {
+ const expectedDoc = doc(
+ p(
+ image({
+ canonicalSrc: 'test-file.png',
+ src: base64EncodedFile,
+ alt: 'test file',
+ uploading: false,
+ }),
+ ),
+ );
+
+ tiptapEditor.commands.uploadImage({ file: validFile });
+
+ await waitForPromises();
+
+ expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
+ });
+ });
+
+ describe('when uploading image request fails', () => {
+ beforeEach(() => {
+ mock.onPost().reply(httpStatus.INTERNAL_SERVER_ERROR);
+ });
+
+ it('resets the doc to orginal state', async () => {
+ const expectedDoc = doc(p(''));
+
+ tiptapEditor.commands.uploadImage({ file: validFile });
+
+ await waitForPromises();
+
+ expect(eq(tiptapEditor.state.doc, expectedDoc)).toBe(true);
+ });
+
+ it('emits an error event that includes an error message', (done) => {
+ tiptapEditor.commands.uploadImage({ file: validFile });
+
+ tiptapEditor.on('error', (message) => {
+ expect(message).toBe('An error occurred while uploading the image. Please try again.');
+ done();
+ });
+ });
+ });
+ });
+
+ describe('when file does not have correct mime type', () => {
+ let initialDoc;
+
+ beforeEach(() => {
+ initialDoc = doc(p(''));
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+ });
+
+ it('does not start the upload image process', () => {
+ tiptapEditor.commands.uploadImage({ file: invalidFile });
+
+ expect(eq(tiptapEditor.state.doc, initialDoc)).toBe(true);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/markdown_processing_examples.js b/spec/frontend/content_editor/markdown_processing_examples.js
index 12bf2cbb747..12eed00f3c6 100644
--- a/spec/frontend/content_editor/markdown_processing_examples.js
+++ b/spec/frontend/content_editor/markdown_processing_examples.js
@@ -1,7 +1,6 @@
import fs from 'fs';
import path from 'path';
import jsYaml from 'js-yaml';
-import { toArray } from 'lodash';
import { getJSONFixture } from 'helpers/fixtures';
export const loadMarkdownApiResult = (testName) => {
@@ -15,5 +14,5 @@ export const loadMarkdownApiExamples = () => {
const apiMarkdownYamlText = fs.readFileSync(apiMarkdownYamlPath);
const apiMarkdownExampleObjects = jsYaml.safeLoad(apiMarkdownYamlText);
- return apiMarkdownExampleObjects.map((example) => toArray(example));
+ return apiMarkdownExampleObjects.map(({ name, context, markdown }) => [name, context, markdown]);
};
diff --git a/spec/frontend/content_editor/markdown_processing_spec.js b/spec/frontend/content_editor/markdown_processing_spec.js
index cb34476d680..028cd6a8271 100644
--- a/spec/frontend/content_editor/markdown_processing_spec.js
+++ b/spec/frontend/content_editor/markdown_processing_spec.js
@@ -3,11 +3,15 @@ import { loadMarkdownApiExamples, loadMarkdownApiResult } from './markdown_proce
describe('markdown processing', () => {
// Ensure we generate same markdown that was provided to Markdown API.
- it.each(loadMarkdownApiExamples())('correctly handles %s', async (testName, markdown) => {
- const { html } = loadMarkdownApiResult(testName);
- const contentEditor = createContentEditor({ renderMarkdown: () => html });
- await contentEditor.setSerializedContent(markdown);
+ it.each(loadMarkdownApiExamples())(
+ 'correctly handles %s (context: %s)',
+ async (name, context, markdown) => {
+ const testName = context ? `${context}_${name}` : name;
+ const { html, body } = loadMarkdownApiResult(testName);
+ const contentEditor = createContentEditor({ renderMarkdown: () => html || body });
+ await contentEditor.setSerializedContent(markdown);
- expect(contentEditor.getSerializedContent()).toBe(markdown);
- });
+ expect(contentEditor.getSerializedContent()).toBe(markdown);
+ },
+ );
});
diff --git a/spec/frontend/content_editor/services/create_content_editor_spec.js b/spec/frontend/content_editor/services/create_content_editor_spec.js
index 59b2fab6d54..b614efd954a 100644
--- a/spec/frontend/content_editor/services/create_content_editor_spec.js
+++ b/spec/frontend/content_editor/services/create_content_editor_spec.js
@@ -5,10 +5,11 @@ import { createTestContentEditorExtension } from '../test_utils';
describe('content_editor/services/create_editor', () => {
let renderMarkdown;
let editor;
+ const uploadsPath = '/uploads';
beforeEach(() => {
renderMarkdown = jest.fn();
- editor = createContentEditor({ renderMarkdown });
+ editor = createContentEditor({ renderMarkdown, uploadsPath });
});
it('sets gl-outline-0! class selector to the tiptapEditor instance', () => {
@@ -48,4 +49,13 @@ describe('content_editor/services/create_editor', () => {
it('throws an error when a renderMarkdown fn is not provided', () => {
expect(() => createContentEditor()).toThrow(PROVIDE_SERIALIZER_OR_RENDERER_ERROR);
});
+
+ it('provides uploadsPath and renderMarkdown function to Image extension', () => {
+ expect(
+ editor.tiptapEditor.extensionManager.extensions.find((e) => e.name === 'image').options,
+ ).toMatchObject({
+ uploadsPath,
+ renderMarkdown,
+ });
+ });
});
diff --git a/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js b/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js
index cf74b5c56c9..64f3d8df6e0 100644
--- a/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js
+++ b/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js
@@ -1,26 +1,23 @@
-import { BulletList } from '@tiptap/extension-bullet-list';
-import { CodeBlockLowlight } from '@tiptap/extension-code-block-lowlight';
-import { Document } from '@tiptap/extension-document';
-import { Heading } from '@tiptap/extension-heading';
-import { ListItem } from '@tiptap/extension-list-item';
-import { Paragraph } from '@tiptap/extension-paragraph';
-import { Text } from '@tiptap/extension-text';
-import { Editor } from '@tiptap/vue-2';
import { mockTracking } from 'helpers/tracking_helper';
import {
KEYBOARD_SHORTCUT_TRACKING_ACTION,
INPUT_RULE_TRACKING_ACTION,
CONTENT_EDITOR_TRACKING_LABEL,
} from '~/content_editor/constants';
+import { tiptapExtension as BulletList } from '~/content_editor/extensions/bullet_list';
+import { tiptapExtension as CodeBlockLowlight } from '~/content_editor/extensions/code_block_highlight';
+import { tiptapExtension as Heading } from '~/content_editor/extensions/heading';
+import { tiptapExtension as ListItem } from '~/content_editor/extensions/list_item';
import trackInputRulesAndShortcuts from '~/content_editor/services/track_input_rules_and_shortcuts';
import { ENTER_KEY, BACKSPACE_KEY } from '~/lib/utils/keys';
+import { createTestEditor } from '../test_utils';
describe('content_editor/services/track_input_rules_and_shortcuts', () => {
let trackingSpy;
let editor;
let trackedExtensions;
const HEADING_TEXT = 'Heading text';
- const extensions = [Document, Paragraph, Text, Heading, CodeBlockLowlight, BulletList, ListItem];
+ const extensions = [Heading, CodeBlockLowlight, BulletList, ListItem];
beforeEach(() => {
trackingSpy = mockTracking(undefined, null, jest.spyOn);
@@ -29,7 +26,7 @@ describe('content_editor/services/track_input_rules_and_shortcuts', () => {
describe('given the heading extension is instrumented', () => {
beforeEach(() => {
trackedExtensions = extensions.map(trackInputRulesAndShortcuts);
- editor = new Editor({
+ editor = createTestEditor({
extensions: extensions.map(trackInputRulesAndShortcuts),
});
});
diff --git a/spec/frontend/content_editor/services/upload_file_spec.js b/spec/frontend/content_editor/services/upload_file_spec.js
new file mode 100644
index 00000000000..87c5298079e
--- /dev/null
+++ b/spec/frontend/content_editor/services/upload_file_spec.js
@@ -0,0 +1,46 @@
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import { uploadFile } from '~/content_editor/services/upload_file';
+import httpStatus from '~/lib/utils/http_status';
+
+describe('content_editor/services/upload_file', () => {
+ const uploadsPath = '/uploads';
+ const file = new File(['content'], 'file.txt');
+ // TODO: Replace with automated fixture
+ const renderedAttachmentLinkFixture =
+ '<a href="/group1/project1/-/wikis/test-file.png" data-canonical-src="test-file.png"><img data-src="/group1/project1/-/wikis/test-file.png" data-canonical-src="test-file.png"></a></p>';
+ const successResponse = {
+ link: {
+ markdown: '[GitLab](https://gitlab.com)',
+ },
+ };
+ const parseHTML = (html) => new DOMParser().parseFromString(html, 'text/html');
+ let mock;
+ let renderMarkdown;
+ let renderedMarkdown;
+
+ beforeEach(() => {
+ const formData = new FormData();
+ formData.append('file', file);
+
+ renderedMarkdown = parseHTML(renderedAttachmentLinkFixture);
+
+ mock = new MockAdapter(axios);
+ mock.onPost(uploadsPath, formData).reply(httpStatus.OK, successResponse);
+ renderMarkdown = jest.fn().mockResolvedValue(renderedAttachmentLinkFixture);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('returns src and canonicalSrc of uploaded file', async () => {
+ const response = await uploadFile({ uploadsPath, renderMarkdown, file });
+
+ expect(renderMarkdown).toHaveBeenCalledWith(successResponse.link.markdown);
+ expect(response).toEqual({
+ src: renderedMarkdown.querySelector('a').getAttribute('href'),
+ canonicalSrc: renderedMarkdown.querySelector('a').dataset.canonicalSrc,
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/test_utils.js b/spec/frontend/content_editor/test_utils.js
index 8e73aef678b..090e1d92218 100644
--- a/spec/frontend/content_editor/test_utils.js
+++ b/spec/frontend/content_editor/test_utils.js
@@ -3,6 +3,16 @@ import { Document } from '@tiptap/extension-document';
import { Paragraph } from '@tiptap/extension-paragraph';
import { Text } from '@tiptap/extension-text';
import { Editor } from '@tiptap/vue-2';
+import { builders, eq } from 'prosemirror-test-builder';
+
+export const createDocBuilder = ({ tiptapEditor, names = {} }) => {
+ const docBuilders = builders(tiptapEditor.schema, {
+ p: { nodeType: 'paragraph' },
+ ...names,
+ });
+
+ return { eq, builders: docBuilders };
+};
/**
* Creates an instance of the Tiptap Editor class
@@ -15,7 +25,7 @@ import { Editor } from '@tiptap/vue-2';
* include in the editor
* @returns An instance of a Tiptap’s Editor class
*/
-export const createTestEditor = ({ extensions = [] }) => {
+export const createTestEditor = ({ extensions = [] } = {}) => {
return new Editor({
extensions: [Document, Text, Paragraph, ...extensions],
});
diff --git a/spec/frontend/contributors/store/actions_spec.js b/spec/frontend/contributors/store/actions_spec.js
index 82b6492b779..a4054ab1fc8 100644
--- a/spec/frontend/contributors/store/actions_spec.js
+++ b/spec/frontend/contributors/store/actions_spec.js
@@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/contributors/stores/actions';
import * as types from '~/contributors/stores/mutation_types';
-import { deprecatedCreateFlash as flashError } from '~/flash';
+import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
jest.mock('~/flash.js');
@@ -47,7 +47,9 @@ describe('Contributors store actions', () => {
[{ type: types.SET_LOADING_STATE, payload: true }],
[],
() => {
- expect(flashError).toHaveBeenCalledWith(expect.stringMatching('error'));
+ expect(createFlash).toHaveBeenCalledWith({
+ message: expect.stringMatching('error'),
+ });
mock.restore();
done();
},
diff --git a/spec/frontend/cycle_analytics/filter_bar_spec.js b/spec/frontend/cycle_analytics/filter_bar_spec.js
new file mode 100644
index 00000000000..407f21bd956
--- /dev/null
+++ b/spec/frontend/cycle_analytics/filter_bar_spec.js
@@ -0,0 +1,224 @@
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import Vuex from 'vuex';
+import {
+ filterMilestones,
+ filterLabels,
+} from 'jest/vue_shared/components/filtered_search_bar/store/modules/filters/mock_data';
+import FilterBar from '~/cycle_analytics/components/filter_bar.vue';
+import storeConfig from '~/cycle_analytics/store';
+import * as commonUtils from '~/lib/utils/common_utils';
+import * as urlUtils from '~/lib/utils/url_utility';
+import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
+import * as utils from '~/vue_shared/components/filtered_search_bar/filtered_search_utils';
+import initialFiltersState from '~/vue_shared/components/filtered_search_bar/store/modules/filters/state';
+import UrlSync from '~/vue_shared/components/url_sync.vue';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+const milestoneTokenType = 'milestone';
+const labelsTokenType = 'labels';
+const authorTokenType = 'author';
+const assigneesTokenType = 'assignees';
+
+const initialFilterBarState = {
+ selectedMilestone: null,
+ selectedAuthor: null,
+ selectedAssigneeList: null,
+ selectedLabelList: null,
+};
+
+const defaultParams = {
+ milestone_title: null,
+ 'not[milestone_title]': null,
+ author_username: null,
+ 'not[author_username]': null,
+ assignee_username: null,
+ 'not[assignee_username]': null,
+ label_name: null,
+ 'not[label_name]': null,
+};
+
+async function shouldMergeUrlParams(wrapper, result) {
+ await wrapper.vm.$nextTick();
+ expect(urlUtils.mergeUrlParams).toHaveBeenCalledWith(result, window.location.href, {
+ spreadArrays: true,
+ });
+ expect(commonUtils.historyPushState).toHaveBeenCalled();
+}
+
+describe('Filter bar', () => {
+ let wrapper;
+ let store;
+ let mock;
+
+ let setFiltersMock;
+
+ const createStore = (initialState = {}) => {
+ setFiltersMock = jest.fn();
+
+ return new Vuex.Store({
+ modules: {
+ filters: {
+ namespaced: true,
+ state: {
+ ...initialFiltersState(),
+ ...initialState,
+ },
+ actions: {
+ setFilters: setFiltersMock,
+ },
+ },
+ },
+ });
+ };
+
+ const createComponent = (initialStore) => {
+ return shallowMount(FilterBar, {
+ localVue,
+ store: initialStore,
+ propsData: {
+ groupPath: 'foo',
+ },
+ stubs: {
+ UrlSync,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ mock.restore();
+ });
+
+ const selectedMilestone = [filterMilestones[0]];
+ const selectedLabelList = [filterLabels[0]];
+
+ const findFilteredSearch = () => wrapper.findComponent(FilteredSearchBar);
+ const getSearchToken = (type) =>
+ findFilteredSearch()
+ .props('tokens')
+ .find((token) => token.type === type);
+
+ describe('default', () => {
+ beforeEach(() => {
+ store = createStore();
+ wrapper = createComponent(store);
+ });
+
+ it('renders FilteredSearchBar component', () => {
+ expect(findFilteredSearch().exists()).toBe(true);
+ });
+ });
+
+ describe('when the state has data', () => {
+ beforeEach(() => {
+ store = createStore({
+ milestones: { data: selectedMilestone },
+ labels: { data: selectedLabelList },
+ authors: { data: [] },
+ assignees: { data: [] },
+ });
+ wrapper = createComponent(store);
+ });
+
+ it('displays the milestone and label token', () => {
+ const tokens = findFilteredSearch().props('tokens');
+
+ expect(tokens).toHaveLength(4);
+ expect(tokens[0].type).toBe(milestoneTokenType);
+ expect(tokens[1].type).toBe(labelsTokenType);
+ expect(tokens[2].type).toBe(authorTokenType);
+ expect(tokens[3].type).toBe(assigneesTokenType);
+ });
+
+ it('provides the initial milestone token', () => {
+ const { initialMilestones: milestoneToken } = getSearchToken(milestoneTokenType);
+
+ expect(milestoneToken).toHaveLength(selectedMilestone.length);
+ });
+
+ it('provides the initial label token', () => {
+ const { initialLabels: labelToken } = getSearchToken(labelsTokenType);
+
+ expect(labelToken).toHaveLength(selectedLabelList.length);
+ });
+ });
+
+ describe('when the user interacts', () => {
+ beforeEach(() => {
+ store = createStore({
+ milestones: { data: filterMilestones },
+ labels: { data: filterLabels },
+ });
+ wrapper = createComponent(store);
+ jest.spyOn(utils, 'processFilters');
+ });
+
+ it('clicks on the search button, setFilters is dispatched', () => {
+ const filters = [
+ { type: 'milestone', value: { data: selectedMilestone[0].title, operator: '=' } },
+ { type: 'labels', value: { data: selectedLabelList[0].title, operator: '=' } },
+ ];
+
+ findFilteredSearch().vm.$emit('onFilter', filters);
+
+ expect(utils.processFilters).toHaveBeenCalledWith(filters);
+
+ expect(setFiltersMock).toHaveBeenCalledWith(expect.anything(), {
+ selectedLabelList: [{ value: selectedLabelList[0].title, operator: '=' }],
+ selectedMilestone: { value: selectedMilestone[0].title, operator: '=' },
+ selectedAssigneeList: [],
+ selectedAuthor: null,
+ });
+ });
+ });
+
+ describe.each([
+ ['selectedMilestone', 'milestone_title', { value: '12.0', operator: '=' }, '12.0'],
+ ['selectedAuthor', 'author_username', { value: 'rootUser', operator: '=' }, 'rootUser'],
+ [
+ 'selectedLabelList',
+ 'label_name',
+ [
+ { value: 'Afternix', operator: '=' },
+ { value: 'Brouceforge', operator: '=' },
+ ],
+ ['Afternix', 'Brouceforge'],
+ ],
+ [
+ 'selectedAssigneeList',
+ 'assignee_username',
+ [
+ { value: 'rootUser', operator: '=' },
+ { value: 'secondaryUser', operator: '=' },
+ ],
+ ['rootUser', 'secondaryUser'],
+ ],
+ ])('with a %s updates the %s url parameter', (stateKey, paramKey, payload, result) => {
+ beforeEach(() => {
+ commonUtils.historyPushState = jest.fn();
+ urlUtils.mergeUrlParams = jest.fn();
+
+ mock = new MockAdapter(axios);
+ wrapper = createComponent(storeConfig);
+
+ wrapper.vm.$store.dispatch('filters/setFilters', {
+ ...initialFilterBarState,
+ [stateKey]: payload,
+ });
+ });
+ it(`sets the ${paramKey} url parameter`, () => {
+ return shouldMergeUrlParams(wrapper, {
+ ...defaultParams,
+ [paramKey]: result,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/cycle_analytics/formatted_stage_count_spec.js b/spec/frontend/cycle_analytics/formatted_stage_count_spec.js
new file mode 100644
index 00000000000..1228b8511ea
--- /dev/null
+++ b/spec/frontend/cycle_analytics/formatted_stage_count_spec.js
@@ -0,0 +1,34 @@
+import { shallowMount } from '@vue/test-utils';
+import Component from '~/cycle_analytics/components/formatted_stage_count.vue';
+
+describe('Formatted Stage Count', () => {
+ let wrapper = null;
+
+ const createComponent = (stageCount = null) => {
+ wrapper = shallowMount(Component, {
+ propsData: {
+ stageCount,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it.each`
+ stageCount | expectedOutput
+ ${null} | ${'-'}
+ ${1} | ${'1 item'}
+ ${10} | ${'10 items'}
+ ${1000} | ${'1,000 items'}
+ ${1001} | ${'1,000+ items'}
+ `('returns "$expectedOutput" for stageCount=$stageCount', ({ stageCount, expectedOutput }) => {
+ createComponent(stageCount);
+ expect(wrapper.text()).toContain(expectedOutput);
+ });
+});
diff --git a/spec/frontend/cycle_analytics/mock_data.js b/spec/frontend/cycle_analytics/mock_data.js
index 242ea1932fb..4e6471d5f7b 100644
--- a/spec/frontend/cycle_analytics/mock_data.js
+++ b/spec/frontend/cycle_analytics/mock_data.js
@@ -1,5 +1,10 @@
-import { DEFAULT_VALUE_STREAM } from '~/cycle_analytics/constants';
+import { TEST_HOST } from 'helpers/test_constants';
+import { DEFAULT_VALUE_STREAM, DEFAULT_DAYS_IN_PAST } from '~/cycle_analytics/constants';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+import { getDateInPast } from '~/lib/utils/datetime_utility';
+
+export const createdBefore = new Date(2019, 0, 14);
+export const createdAfter = getDateInPast(createdBefore, DEFAULT_DAYS_IN_PAST);
export const getStageByTitle = (stages, title) =>
stages.find((stage) => stage.title && stage.title.toLowerCase().trim() === title) || {};
@@ -169,6 +174,15 @@ export const stageMedians = {
staging: 388800,
};
+export const formattedStageMedians = {
+ issue: '2d',
+ plan: '1d',
+ review: '1w',
+ code: '1d',
+ test: '3d',
+ staging: '4d',
+};
+
export const allowedStages = [issueStage, planStage, codeStage];
export const transformedProjectStagePathData = [
@@ -212,6 +226,31 @@ export const transformedProjectStagePathData = [
export const selectedValueStream = DEFAULT_VALUE_STREAM;
+export const group = {
+ id: 1,
+ name: 'foo',
+ path: 'foo',
+ full_path: 'foo',
+ avatar_url: `${TEST_HOST}/images/home/nasa.svg`,
+};
+
+export const currentGroup = convertObjectPropsToCamelCase(group, { deep: true });
+
+export const selectedProjects = [
+ {
+ id: 'gid://gitlab/Project/1',
+ name: 'cool project',
+ pathWithNamespace: 'group/cool-project',
+ avatarUrl: null,
+ },
+ {
+ id: 'gid://gitlab/Project/2',
+ name: 'another cool project',
+ pathWithNamespace: 'group/another-cool-project',
+ avatarUrl: null,
+ },
+];
+
export const rawValueStreamStages = [
{
title: 'Issue',
diff --git a/spec/frontend/cycle_analytics/store/actions_spec.js b/spec/frontend/cycle_analytics/store/actions_spec.js
index 4f37e1266fb..8a8dd374f8e 100644
--- a/spec/frontend/cycle_analytics/store/actions_spec.js
+++ b/spec/frontend/cycle_analytics/store/actions_spec.js
@@ -3,7 +3,7 @@ import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/cycle_analytics/store/actions';
import httpStatusCodes from '~/lib/utils/http_status';
-import { selectedStage, selectedValueStream } from '../mock_data';
+import { allowedStages, selectedStage, selectedValueStream } from '../mock_data';
const mockRequestPath = 'some/cool/path';
const mockFullPath = '/namespace/-/analytics/value_stream_analytics/value_streams';
@@ -25,6 +25,10 @@ const mockRequestedDataMutations = [
},
];
+const features = {
+ cycleAnalyticsForGroups: true,
+};
+
describe('Project Value Stream Analytics actions', () => {
let state;
let mock;
@@ -175,6 +179,7 @@ describe('Project Value Stream Analytics actions', () => {
beforeEach(() => {
state = {
+ features,
fullPath: mockFullPath,
};
mock = new MockAdapter(axios);
@@ -187,9 +192,33 @@ describe('Project Value Stream Analytics actions', () => {
state,
payload: {},
expectedMutations: [{ type: 'REQUEST_VALUE_STREAMS' }],
- expectedActions: [{ type: 'receiveValueStreamsSuccess' }, { type: 'setSelectedStage' }],
+ expectedActions: [
+ { type: 'receiveValueStreamsSuccess' },
+ { type: 'setSelectedStage' },
+ { type: 'fetchStageMedians' },
+ ],
}));
+ describe('with cycleAnalyticsForGroups=false', () => {
+ beforeEach(() => {
+ state = {
+ features: { cycleAnalyticsForGroups: false },
+ fullPath: mockFullPath,
+ };
+ mock = new MockAdapter(axios);
+ mock.onGet(mockValueStreamPath).reply(httpStatusCodes.OK);
+ });
+
+ it("does not dispatch the 'fetchStageMedians' request", () =>
+ testAction({
+ action: actions.fetchValueStreams,
+ state,
+ payload: {},
+ expectedMutations: [{ type: 'REQUEST_VALUE_STREAMS' }],
+ expectedActions: [{ type: 'receiveValueStreamsSuccess' }, { type: 'setSelectedStage' }],
+ }));
+ });
+
describe('with a failing request', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
@@ -280,4 +309,59 @@ describe('Project Value Stream Analytics actions', () => {
}));
});
});
+
+ describe('fetchStageMedians', () => {
+ const mockValueStreamPath = /median/;
+
+ const stageMediansPayload = [
+ { id: 'issue', value: null },
+ { id: 'plan', value: null },
+ { id: 'code', value: null },
+ ];
+
+ const stageMedianError = new Error(
+ `Request failed with status code ${httpStatusCodes.BAD_REQUEST}`,
+ );
+
+ beforeEach(() => {
+ state = {
+ fullPath: mockFullPath,
+ selectedValueStream,
+ stages: allowedStages,
+ };
+ mock = new MockAdapter(axios);
+ mock.onGet(mockValueStreamPath).reply(httpStatusCodes.OK);
+ });
+
+ it(`commits the 'REQUEST_STAGE_MEDIANS' and 'RECEIVE_STAGE_MEDIANS_SUCCESS' mutations`, () =>
+ testAction({
+ action: actions.fetchStageMedians,
+ state,
+ payload: {},
+ expectedMutations: [
+ { type: 'REQUEST_STAGE_MEDIANS' },
+ { type: 'RECEIVE_STAGE_MEDIANS_SUCCESS', payload: stageMediansPayload },
+ ],
+ expectedActions: [],
+ }));
+
+ describe('with a failing request', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ mock.onGet(mockValueStreamPath).reply(httpStatusCodes.BAD_REQUEST);
+ });
+
+ it(`commits the 'RECEIVE_VALUE_STREAM_STAGES_ERROR' mutation`, () =>
+ testAction({
+ action: actions.fetchStageMedians,
+ state,
+ payload: {},
+ expectedMutations: [
+ { type: 'REQUEST_STAGE_MEDIANS' },
+ { type: 'RECEIVE_STAGE_MEDIANS_ERROR', payload: stageMedianError },
+ ],
+ expectedActions: [],
+ }));
+ });
+ });
});
diff --git a/spec/frontend/cycle_analytics/store/mutations_spec.js b/spec/frontend/cycle_analytics/store/mutations_spec.js
index 88e1a13f506..77b19280517 100644
--- a/spec/frontend/cycle_analytics/store/mutations_spec.js
+++ b/spec/frontend/cycle_analytics/store/mutations_spec.js
@@ -1,3 +1,5 @@
+import { useFakeDate } from 'helpers/fake_date';
+import { DEFAULT_DAYS_TO_DISPLAY } from '~/cycle_analytics/constants';
import * as types from '~/cycle_analytics/store/mutation_types';
import mutations from '~/cycle_analytics/store/mutations';
import {
@@ -9,15 +11,23 @@ import {
selectedValueStream,
rawValueStreamStages,
valueStreamStages,
+ rawStageMedians,
+ formattedStageMedians,
} from '../mock_data';
let state;
const mockRequestPath = 'fake/request/path';
-const mockStartData = '2021-04-20';
+const mockCreatedAfter = '2020-06-18';
+const mockCreatedBefore = '2020-07-18';
+const features = {
+ cycleAnalyticsForGroups: true,
+};
describe('Project Value Stream Analytics mutations', () => {
+ useFakeDate(2020, 6, 18);
+
beforeEach(() => {
- state = {};
+ state = { features };
});
afterEach(() => {
@@ -46,6 +56,8 @@ describe('Project Value Stream Analytics mutations', () => {
${types.RECEIVE_STAGE_DATA_ERROR} | ${'selectedStageEvents'} | ${[]}
${types.RECEIVE_STAGE_DATA_ERROR} | ${'hasError'} | ${true}
${types.RECEIVE_STAGE_DATA_ERROR} | ${'isEmptyStage'} | ${true}
+ ${types.REQUEST_STAGE_MEDIANS} | ${'medians'} | ${{}}
+ ${types.RECEIVE_STAGE_MEDIANS_ERROR} | ${'medians'} | ${{}}
`('$mutation will set $stateKey to $value', ({ mutation, stateKey, value }) => {
mutations[mutation](state, {});
@@ -53,15 +65,19 @@ describe('Project Value Stream Analytics mutations', () => {
});
it.each`
- mutation | payload | stateKey | value
- ${types.INITIALIZE_VSA} | ${{ requestPath: mockRequestPath }} | ${'requestPath'} | ${mockRequestPath}
- ${types.SET_DATE_RANGE} | ${{ startDate: mockStartData }} | ${'startDate'} | ${mockStartData}
- ${types.SET_LOADING} | ${true} | ${'isLoading'} | ${true}
- ${types.SET_LOADING} | ${false} | ${'isLoading'} | ${false}
- ${types.SET_SELECTED_VALUE_STREAM} | ${selectedValueStream} | ${'selectedValueStream'} | ${selectedValueStream}
- ${types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS} | ${rawData} | ${'summary'} | ${convertedData.summary}
- ${types.RECEIVE_VALUE_STREAMS_SUCCESS} | ${[selectedValueStream]} | ${'valueStreams'} | ${[selectedValueStream]}
- ${types.RECEIVE_VALUE_STREAM_STAGES_SUCCESS} | ${{ stages: rawValueStreamStages }} | ${'stages'} | ${valueStreamStages}
+ mutation | payload | stateKey | value
+ ${types.INITIALIZE_VSA} | ${{ requestPath: mockRequestPath }} | ${'requestPath'} | ${mockRequestPath}
+ ${types.SET_DATE_RANGE} | ${{ startDate: DEFAULT_DAYS_TO_DISPLAY }} | ${'startDate'} | ${DEFAULT_DAYS_TO_DISPLAY}
+ ${types.SET_DATE_RANGE} | ${{ startDate: DEFAULT_DAYS_TO_DISPLAY }} | ${'createdAfter'} | ${mockCreatedAfter}
+ ${types.SET_DATE_RANGE} | ${{ startDate: DEFAULT_DAYS_TO_DISPLAY }} | ${'createdBefore'} | ${mockCreatedBefore}
+ ${types.SET_LOADING} | ${true} | ${'isLoading'} | ${true}
+ ${types.SET_LOADING} | ${false} | ${'isLoading'} | ${false}
+ ${types.SET_SELECTED_VALUE_STREAM} | ${selectedValueStream} | ${'selectedValueStream'} | ${selectedValueStream}
+ ${types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS} | ${rawData} | ${'summary'} | ${convertedData.summary}
+ ${types.RECEIVE_VALUE_STREAMS_SUCCESS} | ${[selectedValueStream]} | ${'valueStreams'} | ${[selectedValueStream]}
+ ${types.RECEIVE_VALUE_STREAM_STAGES_SUCCESS} | ${{ stages: rawValueStreamStages }} | ${'stages'} | ${valueStreamStages}
+ ${types.RECEIVE_VALUE_STREAMS_SUCCESS} | ${[selectedValueStream]} | ${'valueStreams'} | ${[selectedValueStream]}
+ ${types.RECEIVE_STAGE_MEDIANS_SUCCESS} | ${rawStageMedians} | ${'medians'} | ${formattedStageMedians}
`(
'$mutation with $payload will set $stateKey to $value',
({ mutation, payload, stateKey, value }) => {
@@ -92,4 +108,35 @@ describe('Project Value Stream Analytics mutations', () => {
},
);
});
+
+ describe('with cycleAnalyticsForGroups=false', () => {
+ useFakeDate(2020, 6, 18);
+
+ beforeEach(() => {
+ state = { features: { cycleAnalyticsForGroups: false } };
+ });
+
+ const formattedMedians = {
+ code: '2d',
+ issue: '-',
+ plan: '21h',
+ review: '-',
+ staging: '2d',
+ test: '4h',
+ };
+
+ it.each`
+ mutation | payload | stateKey | value
+ ${types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS} | ${rawData} | ${'medians'} | ${formattedMedians}
+ ${types.REQUEST_CYCLE_ANALYTICS_DATA} | ${{}} | ${'medians'} | ${{}}
+ ${types.RECEIVE_CYCLE_ANALYTICS_DATA_ERROR} | ${{}} | ${'medians'} | ${{}}
+ `(
+ '$mutation with $payload will set $stateKey to $value',
+ ({ mutation, payload, stateKey, value }) => {
+ mutations[mutation](state, payload);
+
+ expect(state).toMatchObject({ [stateKey]: value });
+ },
+ );
+ });
});
diff --git a/spec/frontend/cycle_analytics/utils_spec.js b/spec/frontend/cycle_analytics/utils_spec.js
index 15137bb0571..1fecdfc0539 100644
--- a/spec/frontend/cycle_analytics/utils_spec.js
+++ b/spec/frontend/cycle_analytics/utils_spec.js
@@ -1,3 +1,4 @@
+import { useFakeDate } from 'helpers/fake_date';
import {
decorateEvents,
decorateData,
@@ -6,6 +7,7 @@ import {
medianTimeToParsedSeconds,
formatMedianValues,
filterStagesByHiddenStatus,
+ calculateFormattedDayInPast,
} from '~/cycle_analytics/utils';
import {
selectedStage,
@@ -149,4 +151,12 @@ describe('Value stream analytics utils', () => {
expect(filterStagesByHiddenStatus(mockStages, isHidden)).toEqual(result);
});
});
+
+ describe('calculateFormattedDayInPast', () => {
+ useFakeDate(1815, 11, 10);
+
+ it('will return 2 dates, now and past', () => {
+ expect(calculateFormattedDayInPast(5)).toEqual({ now: '1815-12-10', past: '1815-12-05' });
+ });
+ });
});
diff --git a/spec/frontend/cycle_analytics/value_stream_filters_spec.js b/spec/frontend/cycle_analytics/value_stream_filters_spec.js
new file mode 100644
index 00000000000..6e96a6d756a
--- /dev/null
+++ b/spec/frontend/cycle_analytics/value_stream_filters_spec.js
@@ -0,0 +1,91 @@
+import { shallowMount } from '@vue/test-utils';
+import Daterange from '~/analytics/shared/components/daterange.vue';
+import ProjectsDropdownFilter from '~/analytics/shared/components/projects_dropdown_filter.vue';
+import FilterBar from '~/cycle_analytics/components/filter_bar.vue';
+import ValueStreamFilters from '~/cycle_analytics/components/value_stream_filters.vue';
+import {
+ createdAfter as startDate,
+ createdBefore as endDate,
+ currentGroup,
+ selectedProjects,
+} from './mock_data';
+
+function createComponent(props = {}) {
+ return shallowMount(ValueStreamFilters, {
+ propsData: {
+ selectedProjects,
+ groupId: currentGroup.id,
+ groupPath: currentGroup.fullPath,
+ startDate,
+ endDate,
+ ...props,
+ },
+ });
+}
+
+describe('ValueStreamFilters', () => {
+ let wrapper;
+
+ const findProjectsDropdown = () => wrapper.findComponent(ProjectsDropdownFilter);
+ const findDateRangePicker = () => wrapper.findComponent(Daterange);
+ const findFilterBar = () => wrapper.findComponent(FilterBar);
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('will render the filter bar', () => {
+ expect(findFilterBar().exists()).toBe(true);
+ });
+
+ it('will render the projects dropdown', () => {
+ expect(findProjectsDropdown().exists()).toBe(true);
+ expect(wrapper.findComponent(ProjectsDropdownFilter).props()).toEqual(
+ expect.objectContaining({
+ queryParams: wrapper.vm.projectsQueryParams,
+ multiSelect: wrapper.vm.$options.multiProjectSelect,
+ }),
+ );
+ });
+
+ it('will render the date range picker', () => {
+ expect(findDateRangePicker().exists()).toBe(true);
+ });
+
+ it('will emit `selectProject` when a project is selected', () => {
+ findProjectsDropdown().vm.$emit('selected');
+
+ expect(wrapper.emitted('selectProject')).not.toBeUndefined();
+ });
+
+ it('will emit `setDateRange` when the date range changes', () => {
+ findDateRangePicker().vm.$emit('change');
+
+ expect(wrapper.emitted('setDateRange')).not.toBeUndefined();
+ });
+
+ describe('hasDateRangeFilter = false', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ hasDateRangeFilter: false });
+ });
+
+ it('will not render the date range picker', () => {
+ expect(findDateRangePicker().exists()).toBe(false);
+ });
+ });
+
+ describe('hasProjectFilter = false', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ hasProjectFilter: false });
+ });
+
+ it('will not render the project dropdown', () => {
+ expect(findProjectsDropdown().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/design_management/components/design_notes/__snapshots__/design_note_spec.js.snap b/spec/frontend/design_management/components/design_notes/__snapshots__/design_note_spec.js.snap
index 084a7e5d712..4ecf82a4714 100644
--- a/spec/frontend/design_management/components/design_notes/__snapshots__/design_note_spec.js.snap
+++ b/spec/frontend/design_management/components/design_notes/__snapshots__/design_note_spec.js.snap
@@ -6,7 +6,7 @@ exports[`Design note component should match the snapshot 1`] = `
id="note_123"
>
<user-avatar-link-stub
- imgalt=""
+ imgalt="foo-bar"
imgcssclasses=""
imgsize="40"
imgsrc=""
@@ -22,7 +22,8 @@ exports[`Design note component should match the snapshot 1`] = `
<div>
<gl-link-stub
class="js-user-link"
- data-user-id="author-id"
+ data-user-id="1"
+ data-username="foo-bar"
>
<span
class="note-header-author-name gl-font-weight-bold"
@@ -35,7 +36,7 @@ exports[`Design note component should match the snapshot 1`] = `
<span
class="note-headline-light"
>
- @
+ @foo-bar
</span>
</gl-link-stub>
diff --git a/spec/frontend/design_management/components/design_notes/design_note_spec.js b/spec/frontend/design_management/components/design_notes/design_note_spec.js
index 1cd556eabb4..3f5f5bcdfa7 100644
--- a/spec/frontend/design_management/components/design_notes/design_note_spec.js
+++ b/spec/frontend/design_management/components/design_notes/design_note_spec.js
@@ -9,7 +9,8 @@ const scrollIntoViewMock = jest.fn();
const note = {
id: 'gid://gitlab/DiffNote/123',
author: {
- id: 'author-id',
+ id: 'gid://gitlab/User/1',
+ username: 'foo-bar',
},
body: 'test',
userPermissions: {
diff --git a/spec/frontend/design_management/components/design_todo_button_spec.js b/spec/frontend/design_management/components/design_todo_button_spec.js
index 20686d0ae6c..757bf50c527 100644
--- a/spec/frontend/design_management/components/design_todo_button_spec.js
+++ b/spec/frontend/design_management/components/design_todo_button_spec.js
@@ -2,7 +2,7 @@ import { shallowMount, mount } from '@vue/test-utils';
import DesignTodoButton from '~/design_management/components/design_todo_button.vue';
import createDesignTodoMutation from '~/design_management/graphql/mutations/create_design_todo.mutation.graphql';
import todoMarkDoneMutation from '~/graphql_shared/mutations/todo_mark_done.mutation.graphql';
-import TodoButton from '~/vue_shared/components/todo_button.vue';
+import TodoButton from '~/vue_shared/components/sidebar/todo_toggle/todo_button.vue';
import mockDesign from '../mock_data/design';
const mockDesignWithPendingTodos = {
diff --git a/spec/frontend/design_management/pages/design/index_spec.js b/spec/frontend/design_management/pages/design/index_spec.js
index 11c88c3d0f5..1332e872246 100644
--- a/spec/frontend/design_management/pages/design/index_spec.js
+++ b/spec/frontend/design_management/pages/design/index_spec.js
@@ -20,7 +20,7 @@ import {
import {
DESIGN_TRACKING_PAGE_NAME,
DESIGN_SNOWPLOW_EVENT_TYPES,
- DESIGN_USAGE_PING_EVENT_TYPES,
+ DESIGN_SERVICE_PING_EVENT_TYPES,
} from '~/design_management/utils/tracking';
import createFlash from '~/flash';
import mockAllVersions from '../../mock_data/all_versions';
@@ -391,7 +391,7 @@ describe('Design management design index page', () => {
});
describe('with usage_data_design_action enabled', () => {
- it('tracks design view usage ping', () => {
+ it('tracks design view service ping', () => {
createComponent(
{ loading: true },
{
@@ -402,13 +402,13 @@ describe('Design management design index page', () => {
);
expect(Api.trackRedisHllUserEvent).toHaveBeenCalledTimes(1);
expect(Api.trackRedisHllUserEvent).toHaveBeenCalledWith(
- DESIGN_USAGE_PING_EVENT_TYPES.DESIGN_ACTION,
+ DESIGN_SERVICE_PING_EVENT_TYPES.DESIGN_ACTION,
);
});
});
describe('with usage_data_design_action disabled', () => {
- it("doesn't track design view usage ping", () => {
+ it("doesn't track design view service ping", () => {
createComponent({ loading: true });
expect(Api.trackRedisHllUserEvent).toHaveBeenCalledTimes(0);
});
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index 8a1c5547581..b5eb3e1713c 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -6,14 +6,19 @@ import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import { TEST_HOST } from 'spec/test_constants';
import App from '~/diffs/components/app.vue';
-import CollapsedFilesWarning from '~/diffs/components/collapsed_files_warning.vue';
import CommitWidget from '~/diffs/components/commit_widget.vue';
import CompareVersions from '~/diffs/components/compare_versions.vue';
import DiffFile from '~/diffs/components/diff_file.vue';
-import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
import NoChanges from '~/diffs/components/no_changes.vue';
import TreeList from '~/diffs/components/tree_list.vue';
+/* eslint-disable import/order */
+/* You know what: sometimes alphabetical isn't the best order */
+import CollapsedFilesWarning from '~/diffs/components/collapsed_files_warning.vue';
+import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
+import MergeConflictWarning from '~/diffs/components/merge_conflict_warning.vue';
+/* eslint-enable import/order */
+
import axios from '~/lib/utils/axios_utils';
import * as urlUtils from '~/lib/utils/url_utility';
import createDiffsStore from '../create_diffs_store';
@@ -541,6 +546,43 @@ describe('diffs/components/app', () => {
expect(getCollapsedFilesWarning(wrapper).exists()).toBe(false);
});
});
+
+ describe('merge conflicts', () => {
+ it('should render the merge conflicts banner if viewing the whole changeset and there are conflicts', () => {
+ createComponent({}, ({ state }) => {
+ Object.assign(state.diffs, {
+ latestDiff: true,
+ startVersion: null,
+ hasConflicts: true,
+ canMerge: false,
+ conflictResolutionPath: 'path',
+ });
+ });
+
+ expect(wrapper.find(MergeConflictWarning).exists()).toBe(true);
+ });
+
+ it.each`
+ prop | value
+ ${'latestDiff'} | ${false}
+ ${'startVersion'} | ${'notnull'}
+ ${'hasConflicts'} | ${false}
+ `(
+ "should not render if any of the MR properties aren't correct - like $prop: $value",
+ ({ prop, value }) => {
+ createComponent({}, ({ state }) => {
+ Object.assign(state.diffs, {
+ latestDiff: true,
+ startVersion: null,
+ hasConflicts: true,
+ [prop]: value,
+ });
+ });
+
+ expect(wrapper.find(MergeConflictWarning).exists()).toBe(false);
+ },
+ );
+ });
});
it('should display commit widget if store has a commit', () => {
diff --git a/spec/frontend/diffs/components/collapsed_files_warning_spec.js b/spec/frontend/diffs/components/collapsed_files_warning_spec.js
index 77c2e19cb68..46caeb01132 100644
--- a/spec/frontend/diffs/components/collapsed_files_warning_spec.js
+++ b/spec/frontend/diffs/components/collapsed_files_warning_spec.js
@@ -1,10 +1,13 @@
import { shallowMount, mount, createLocalVue } from '@vue/test-utils';
+import { nextTick } from 'vue';
import Vuex from 'vuex';
import CollapsedFilesWarning from '~/diffs/components/collapsed_files_warning.vue';
import { CENTERED_LIMITED_CONTAINER_CLASSES, EVT_EXPAND_ALL_FILES } from '~/diffs/constants';
import eventHub from '~/diffs/event_hub';
import createStore from '~/diffs/store/modules';
+import file from '../mock_data/diff_file';
+
const propsData = {
limited: true,
mergeable: true,
@@ -12,6 +15,13 @@ const propsData = {
};
const limitedClasses = CENTERED_LIMITED_CONTAINER_CLASSES.split(' ');
+async function files(store, count) {
+ const copies = Array(count).fill(file);
+ store.state.diffs.diffFiles.push(...copies);
+
+ return nextTick();
+}
+
describe('CollapsedFilesWarning', () => {
const localVue = createLocalVue();
let store;
@@ -42,48 +52,63 @@ describe('CollapsedFilesWarning', () => {
wrapper.destroy();
});
- it.each`
- limited | containerClasses
- ${true} | ${limitedClasses}
- ${false} | ${[]}
- `(
- 'has the correct container classes when limited is $limited',
- ({ limited, containerClasses }) => {
- createComponent({ limited });
-
- expect(wrapper.classes()).toEqual(['col-12'].concat(containerClasses));
- },
- );
-
- it.each`
- present | dismissed
- ${false} | ${true}
- ${true} | ${false}
- `('toggles the alert when dismissed is $dismissed', ({ present, dismissed }) => {
- createComponent({ dismissed });
-
- expect(wrapper.find('[data-testid="root"]').exists()).toBe(present);
- });
+ describe('when there is more than one file', () => {
+ it.each`
+ limited | containerClasses
+ ${true} | ${limitedClasses}
+ ${false} | ${[]}
+ `(
+ 'has the correct container classes when limited is $limited',
+ async ({ limited, containerClasses }) => {
+ createComponent({ limited });
+ await files(store, 2);
+
+ expect(wrapper.classes()).toEqual(['col-12'].concat(containerClasses));
+ },
+ );
- it('dismisses the component when the alert "x" is clicked', async () => {
- createComponent({}, { full: true });
+ it.each`
+ present | dismissed
+ ${false} | ${true}
+ ${true} | ${false}
+ `('toggles the alert when dismissed is $dismissed', async ({ present, dismissed }) => {
+ createComponent({ dismissed });
+ await files(store, 2);
- expect(wrapper.find('[data-testid="root"]').exists()).toBe(true);
+ expect(wrapper.find('[data-testid="root"]').exists()).toBe(present);
+ });
- getAlertCloseButton().element.click();
+ it('dismisses the component when the alert "x" is clicked', async () => {
+ createComponent({}, { full: true });
+ await files(store, 2);
- await wrapper.vm.$nextTick();
+ expect(wrapper.find('[data-testid="root"]').exists()).toBe(true);
- expect(wrapper.find('[data-testid="root"]').exists()).toBe(false);
- });
+ getAlertCloseButton().element.click();
- it(`emits the \`${EVT_EXPAND_ALL_FILES}\` event when the alert action button is clicked`, () => {
- createComponent({}, { full: true });
+ await wrapper.vm.$nextTick();
- jest.spyOn(eventHub, '$emit');
+ expect(wrapper.find('[data-testid="root"]').exists()).toBe(false);
+ });
- getAlertActionButton().vm.$emit('click');
+ it(`emits the \`${EVT_EXPAND_ALL_FILES}\` event when the alert action button is clicked`, async () => {
+ createComponent({}, { full: true });
+ await files(store, 2);
- expect(eventHub.$emit).toHaveBeenCalledWith(EVT_EXPAND_ALL_FILES);
+ jest.spyOn(eventHub, '$emit');
+
+ getAlertActionButton().vm.$emit('click');
+
+ expect(eventHub.$emit).toHaveBeenCalledWith(EVT_EXPAND_ALL_FILES);
+ });
+ });
+
+ describe('when there is a single file', () => {
+ it('should not display', async () => {
+ createComponent();
+ await files(store, 1);
+
+ expect(wrapper.find('[data-testid="root"]').exists()).toBe(false);
+ });
});
});
diff --git a/spec/frontend/diffs/components/diff_content_spec.js b/spec/frontend/diffs/components/diff_content_spec.js
index 7012889440c..0a7dfc02c65 100644
--- a/spec/frontend/diffs/components/diff_content_spec.js
+++ b/spec/frontend/diffs/components/diff_content_spec.js
@@ -4,8 +4,6 @@ import Vuex from 'vuex';
import DiffContentComponent from '~/diffs/components/diff_content.vue';
import DiffDiscussions from '~/diffs/components/diff_discussions.vue';
import DiffView from '~/diffs/components/diff_view.vue';
-import InlineDiffView from '~/diffs/components/inline_diff_view.vue';
-import ParallelDiffView from '~/diffs/components/parallel_diff_view.vue';
import { IMAGE_DIFF_POSITION_TYPE } from '~/diffs/constants';
import { diffViewerModes } from '~/ide/constants';
import NoteForm from '~/notes/components/note_form.vue';
@@ -107,25 +105,10 @@ describe('DiffContent', () => {
});
const textDiffFile = { ...defaultProps.diffFile, viewer: { name: diffViewerModes.text } };
- it('should render diff inline view if `isInlineView` is true', () => {
- isInlineViewGetterMock.mockReturnValue(true);
- createComponent({ props: { diffFile: textDiffFile } });
-
- expect(wrapper.find(InlineDiffView).exists()).toBe(true);
- });
-
- it('should render parallel view if `isParallelView` getter is true', () => {
- isParallelViewGetterMock.mockReturnValue(true);
- createComponent({ props: { diffFile: textDiffFile } });
-
- expect(wrapper.find(ParallelDiffView).exists()).toBe(true);
- });
it('should render diff view if `unifiedDiffComponents` are true', () => {
- isParallelViewGetterMock.mockReturnValue(true);
createComponent({
props: { diffFile: textDiffFile },
- provide: { glFeatures: { unifiedDiffComponents: true } },
});
expect(wrapper.find(DiffView).exists()).toBe(true);
diff --git a/spec/frontend/diffs/components/diff_file_spec.js b/spec/frontend/diffs/components/diff_file_spec.js
index 1e8ad9344f2..99dda8d5deb 100644
--- a/spec/frontend/diffs/components/diff_file_spec.js
+++ b/spec/frontend/diffs/components/diff_file_spec.js
@@ -110,7 +110,6 @@ const findLoader = (wrapper) => wrapper.find('[data-testid="loader-icon"]');
const findToggleButton = (wrapper) => wrapper.find('[data-testid="expand-button"]');
const toggleFile = (wrapper) => findDiffHeader(wrapper).vm.$emit('toggleFile');
-const isDisplayNone = (element) => element.style.display === 'none';
const getReadableFile = () => JSON.parse(JSON.stringify(diffFileMockDataReadable));
const getUnreadableFile = () => JSON.parse(JSON.stringify(diffFileMockDataUnreadable));
@@ -305,9 +304,7 @@ describe('DiffFile', () => {
it('should not have any content at all', async () => {
await wrapper.vm.$nextTick();
- Array.from(findDiffContentArea(wrapper).element.children).forEach((child) => {
- expect(isDisplayNone(child)).toBe(true);
- });
+ expect(findDiffContentArea(wrapper).element.children.length).toBe(0);
});
it('should not have the class `has-body` to present the header differently', () => {
diff --git a/spec/frontend/diffs/components/diff_row_spec.js b/spec/frontend/diffs/components/diff_row_spec.js
index 137cc7e3f86..c0c92908701 100644
--- a/spec/frontend/diffs/components/diff_row_spec.js
+++ b/spec/frontend/diffs/components/diff_row_spec.js
@@ -8,6 +8,12 @@ import diffsModule from '~/diffs/store/modules';
import { findInteropAttributes } from '../find_interop_attributes';
import diffFileMockData from '../mock_data/diff_file';
+const showCommentForm = jest.fn();
+const enterdragging = jest.fn();
+const stopdragging = jest.fn();
+const setHighlightedRow = jest.fn();
+let wrapper;
+
describe('DiffRow', () => {
const testLines = [
{
@@ -29,7 +35,7 @@ describe('DiffRow', () => {
},
];
- const createWrapper = ({ props, state, actions, isLoggedIn = true }) => {
+ const createWrapper = ({ props, state = {}, actions, isLoggedIn = true }) => {
Vue.use(Vuex);
const diffs = diffsModule();
@@ -43,11 +49,25 @@ describe('DiffRow', () => {
getters,
});
+ window.gon = { current_user_id: isLoggedIn ? 1 : 0 };
+ const coverageFileData = state.coverageFiles?.files ? state.coverageFiles.files : {};
+
const propsData = {
fileHash: 'abc',
filePath: 'abc',
line: {},
index: 0,
+ isHighlighted: false,
+ fileLineCoverage: (file, line) => {
+ const hits = coverageFileData[file]?.[line];
+ if (hits) {
+ return { text: `Test coverage: ${hits} hits`, class: 'coverage' };
+ } else if (hits === 0) {
+ return { text: 'No test coverage', class: 'no-coverage' };
+ }
+
+ return {};
+ },
...props,
};
@@ -55,49 +75,37 @@ describe('DiffRow', () => {
glFeatures: { dragCommentSelection: true },
};
- return shallowMount(DiffRow, { propsData, store, provide });
+ return shallowMount(DiffRow, {
+ propsData,
+ store,
+ provide,
+ listeners: {
+ enterdragging,
+ stopdragging,
+ setHighlightedRow,
+ showCommentForm,
+ },
+ });
};
- it('isHighlighted returns true given line.left', () => {
- const props = {
- line: {
- left: {
- line_code: 'abc',
- },
- },
- };
- const state = { highlightedRow: 'abc' };
- const wrapper = createWrapper({ props, state });
- expect(wrapper.vm.isHighlighted).toBe(true);
- });
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
- it('isHighlighted returns true given line.right', () => {
- const props = {
- line: {
- right: {
- line_code: 'abc',
- },
- },
- };
- const state = { highlightedRow: 'abc' };
- const wrapper = createWrapper({ props, state });
- expect(wrapper.vm.isHighlighted).toBe(true);
- });
+ window.gon = {};
+ showCommentForm.mockReset();
+ enterdragging.mockReset();
+ stopdragging.mockReset();
+ setHighlightedRow.mockReset();
- it('isHighlighted returns false given line.left', () => {
- const props = {
- line: {
- left: {
- line_code: 'abc',
- },
- },
- };
- const wrapper = createWrapper({ props });
- expect(wrapper.vm.isHighlighted).toBe(false);
+ Object.values(DiffRow).forEach(({ cache }) => {
+ if (cache) {
+ cache.clear();
+ }
+ });
});
- const getCommentButton = (wrapper, side) =>
- wrapper.find(`[data-testid="${side}-comment-button"]`);
+ const getCommentButton = (side) => wrapper.find(`[data-testid="${side}-comment-button"]`);
describe.each`
side
@@ -105,33 +113,30 @@ describe('DiffRow', () => {
${'right'}
`('$side side', ({ side }) => {
it(`renders empty cells if ${side} is unavailable`, () => {
- const wrapper = createWrapper({ props: { line: testLines[2], inline: false } });
+ wrapper = createWrapper({ props: { line: testLines[2], inline: false } });
expect(wrapper.find(`[data-testid="${side}-line-number"]`).exists()).toBe(false);
expect(wrapper.find(`[data-testid="${side}-empty-cell"]`).exists()).toBe(true);
});
describe('comment button', () => {
- const showCommentForm = jest.fn();
let line;
beforeEach(() => {
- showCommentForm.mockReset();
// https://eslint.org/docs/rules/prefer-destructuring#when-not-to-use-it
// eslint-disable-next-line prefer-destructuring
line = testLines[3];
});
it('renders', () => {
- const wrapper = createWrapper({ props: { line, inline: false } });
- expect(getCommentButton(wrapper, side).exists()).toBe(true);
+ wrapper = createWrapper({ props: { line, inline: false } });
+ expect(getCommentButton(side).exists()).toBe(true);
});
it('responds to click and keyboard events', async () => {
- const wrapper = createWrapper({
+ wrapper = createWrapper({
props: { line, inline: false },
- actions: { showCommentForm },
});
- const commentButton = getCommentButton(wrapper, side);
+ const commentButton = getCommentButton(side);
await commentButton.trigger('click');
await commentButton.trigger('keydown.enter');
@@ -142,11 +147,10 @@ describe('DiffRow', () => {
it('ignores click and keyboard events when comments are disabled', async () => {
line[side].commentsDisabled = true;
- const wrapper = createWrapper({
+ wrapper = createWrapper({
props: { line, inline: false },
- actions: { showCommentForm },
});
- const commentButton = getCommentButton(wrapper, side);
+ const commentButton = getCommentButton(side);
await commentButton.trigger('click');
await commentButton.trigger('keydown.enter');
@@ -157,19 +161,20 @@ describe('DiffRow', () => {
});
it('renders avatars', () => {
- const wrapper = createWrapper({ props: { line: testLines[0], inline: false } });
+ wrapper = createWrapper({ props: { line: testLines[0], inline: false } });
+
expect(wrapper.find(`[data-testid="${side}-discussions"]`).exists()).toBe(true);
});
});
it('renders left line numbers', () => {
- const wrapper = createWrapper({ props: { line: testLines[0] } });
+ wrapper = createWrapper({ props: { line: testLines[0] } });
const lineNumber = testLines[0].left.old_line;
expect(wrapper.find(`[data-linenumber="${lineNumber}"]`).exists()).toBe(true);
});
it('renders right line numbers', () => {
- const wrapper = createWrapper({ props: { line: testLines[0] } });
+ wrapper = createWrapper({ props: { line: testLines[0] } });
const lineNumber = testLines[0].right.new_line;
expect(wrapper.find(`[data-linenumber="${lineNumber}"]`).exists()).toBe(true);
});
@@ -186,12 +191,10 @@ describe('DiffRow', () => {
${'left'}
${'right'}
`('emits `enterdragging` onDragEnter $side side', ({ side }) => {
- const expectation = { ...line[side], index: 0 };
- const wrapper = createWrapper({ props: { line } });
+ wrapper = createWrapper({ props: { line } });
fireEvent.dragEnter(getByTestId(wrapper.element, `${side}-side`));
- expect(wrapper.emitted().enterdragging).toBeTruthy();
- expect(wrapper.emitted().enterdragging[0]).toEqual([expectation]);
+ expect(enterdragging).toHaveBeenCalledWith({ ...line[side], index: 0 });
});
it.each`
@@ -199,10 +202,10 @@ describe('DiffRow', () => {
${'left'}
${'right'}
`('emits `stopdragging` onDrop $side side', ({ side }) => {
- const wrapper = createWrapper({ props: { line } });
+ wrapper = createWrapper({ props: { line } });
fireEvent.dragEnd(getByTestId(wrapper.element, `${side}-side`));
- expect(wrapper.emitted().stopdragging).toBeTruthy();
+ expect(stopdragging).toHaveBeenCalled();
});
});
@@ -231,7 +234,7 @@ describe('DiffRow', () => {
it('for lines with coverage', () => {
const coverageFiles = { files: { [name]: { [line]: 5 } } };
- const wrapper = createWrapper({ props, state: { coverageFiles } });
+ wrapper = createWrapper({ props, state: { coverageFiles } });
const coverage = wrapper.find('.line-coverage.right-side');
expect(coverage.attributes('title')).toContain('Test coverage: 5 hits');
@@ -240,7 +243,7 @@ describe('DiffRow', () => {
it('for lines without coverage', () => {
const coverageFiles = { files: { [name]: { [line]: 0 } } };
- const wrapper = createWrapper({ props, state: { coverageFiles } });
+ wrapper = createWrapper({ props, state: { coverageFiles } });
const coverage = wrapper.find('.line-coverage.right-side');
expect(coverage.attributes('title')).toContain('No test coverage');
@@ -249,7 +252,7 @@ describe('DiffRow', () => {
it('for unknown lines', () => {
const coverageFiles = {};
- const wrapper = createWrapper({ props, state: { coverageFiles } });
+ wrapper = createWrapper({ props, state: { coverageFiles } });
const coverage = wrapper.find('.line-coverage.right-side');
expect(coverage.attributes('title')).toBeFalsy();
@@ -267,7 +270,7 @@ describe('DiffRow', () => {
${'with parallel and no left side'} | ${{ right: { old_line: 3, new_line: 5 } }} | ${false} | ${null} | ${{ type: 'new', line: '5', newLine: '5' }}
${'with parallel and right side'} | ${{ left: { old_line: 3 }, right: { new_line: 5 } }} | ${false} | ${{ type: 'old', line: '3', oldLine: '3' }} | ${{ type: 'new', line: '5', newLine: '5' }}
`('$desc, sets interop data attributes', ({ line, inline, leftSide, rightSide }) => {
- const wrapper = createWrapper({ props: { line, inline } });
+ wrapper = createWrapper({ props: { line, inline } });
expect(findInteropAttributes(wrapper, '[data-testid="left-side"]')).toEqual(leftSide);
expect(findInteropAttributes(wrapper, '[data-testid="right-side"]')).toEqual(rightSide);
diff --git a/spec/frontend/diffs/components/diff_row_utils_spec.js b/spec/frontend/diffs/components/diff_row_utils_spec.js
index 47ae3cd5867..930b8bcdb08 100644
--- a/spec/frontend/diffs/components/diff_row_utils_spec.js
+++ b/spec/frontend/diffs/components/diff_row_utils_spec.js
@@ -11,24 +11,21 @@ const LINE_CODE = 'abc123';
describe('isHighlighted', () => {
it('should return true if line is highlighted', () => {
- const state = { diffs: { highlightedRow: LINE_CODE } };
const line = { line_code: LINE_CODE };
const isCommented = false;
- expect(utils.isHighlighted(state, line, isCommented)).toBe(true);
+ expect(utils.isHighlighted(LINE_CODE, line, isCommented)).toBe(true);
});
it('should return false if line is not highlighted', () => {
- const state = { diffs: { highlightedRow: 'xxx' } };
const line = { line_code: LINE_CODE };
const isCommented = false;
- expect(utils.isHighlighted(state, line, isCommented)).toBe(false);
+ expect(utils.isHighlighted('xxx', line, isCommented)).toBe(false);
});
it('should return true if isCommented is true', () => {
- const state = { diffs: { highlightedRow: 'xxx' } };
const line = { line_code: LINE_CODE };
const isCommented = true;
- expect(utils.isHighlighted(state, line, isCommented)).toBe(true);
+ expect(utils.isHighlighted('xxx', line, isCommented)).toBe(true);
});
});
@@ -143,19 +140,14 @@ describe('addCommentTooltip', () => {
'Commenting on symbolic links that replace or are replaced by files is currently not supported.';
const brokenRealTooltip =
'Commenting on files that replace or are replaced by symbolic links is currently not supported.';
- const commentTooltip = 'Add a comment to this line';
const dragTooltip = 'Add a comment to this line or drag for multiple lines';
it('should return default tooltip', () => {
expect(utils.addCommentTooltip()).toBeUndefined();
});
- it('should return comment tooltip', () => {
- expect(utils.addCommentTooltip({})).toEqual(commentTooltip);
- });
-
it('should return drag comment tooltip when dragging is enabled', () => {
- expect(utils.addCommentTooltip({}, true)).toEqual(dragTooltip);
+ expect(utils.addCommentTooltip({})).toEqual(dragTooltip);
});
it('should return broken symlink tooltip', () => {
@@ -258,30 +250,3 @@ describe('mapParallel', () => {
expect(mapped.right).toMatchObject(rightExpectation);
});
});
-
-describe('mapInline', () => {
- it('should assign computed properties to the line object', () => {
- const content = {
- diffFile: {},
- shouldRenderDraftRow: () => false,
- };
- const line = {
- discussions: [{}],
- discussionsExpanded: true,
- hasForm: true,
- };
- const expectation = {
- commentRowClasses: '',
- hasDiscussions: true,
- isContextLine: false,
- isMatchLine: false,
- isMetaLine: false,
- renderDiscussion: true,
- hasDraft: false,
- hasCommentForm: true,
- };
- const mapped = utils.mapInline(content)(line);
-
- expect(mapped).toMatchObject(expectation);
- });
-});
diff --git a/spec/frontend/diffs/components/diff_view_spec.js b/spec/frontend/diffs/components/diff_view_spec.js
index 83b173c1f5d..3af66526050 100644
--- a/spec/frontend/diffs/components/diff_view_spec.js
+++ b/spec/frontend/diffs/components/diff_view_spec.js
@@ -28,7 +28,7 @@ describe('DiffView', () => {
};
const diffs = {
actions: { showCommentForm },
- getters: { commitId: () => 'abc123' },
+ getters: { commitId: () => 'abc123', fileLineCoverage: () => ({}) },
namespaced: true,
};
const notes = {
@@ -41,7 +41,7 @@ describe('DiffView', () => {
});
const propsData = {
- diffFile: {},
+ diffFile: { file_hash: '123' },
diffLines: [],
...props,
};
@@ -84,15 +84,15 @@ describe('DiffView', () => {
it('sets `dragStart` onStartDragging', () => {
const wrapper = createWrapper({ diffLines: [{}] });
- wrapper.findComponent(DiffRow).vm.$emit('startdragging', { test: true });
- expect(wrapper.vm.dragStart).toEqual({ test: true });
+ wrapper.findComponent(DiffRow).vm.$emit('startdragging', { line: { test: true } });
+ expect(wrapper.vm.idState.dragStart).toEqual({ test: true });
});
it('does not call `setSelectedCommentPosition` on different chunks onDragOver', () => {
const wrapper = createWrapper({ diffLines: [{}] });
const diffRow = getDiffRow(wrapper);
- diffRow.$emit('startdragging', { chunk: 0 });
+ diffRow.$emit('startdragging', { line: { chunk: 0 } });
diffRow.$emit('enterdragging', { chunk: 1 });
expect(setSelectedCommentPosition).not.toHaveBeenCalled();
@@ -109,7 +109,7 @@ describe('DiffView', () => {
const wrapper = createWrapper({ diffLines: [{}] });
const diffRow = getDiffRow(wrapper);
- diffRow.$emit('startdragging', { chunk: 1, index: start });
+ diffRow.$emit('startdragging', { line: { chunk: 1, index: start } });
diffRow.$emit('enterdragging', { chunk: 1, index: end });
const arg = setSelectedCommentPosition.mock.calls[0][1];
@@ -122,11 +122,11 @@ describe('DiffView', () => {
const wrapper = createWrapper({ diffLines: [{}] });
const diffRow = getDiffRow(wrapper);
- diffRow.$emit('startdragging', { test: true });
- expect(wrapper.vm.dragStart).toEqual({ test: true });
+ diffRow.$emit('startdragging', { line: { test: true } });
+ expect(wrapper.vm.idState.dragStart).toEqual({ test: true });
diffRow.$emit('stopdragging');
- expect(wrapper.vm.dragStart).toBeNull();
+ expect(wrapper.vm.idState.dragStart).toBeNull();
expect(showCommentForm).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/diffs/components/inline_diff_table_row_spec.js b/spec/frontend/diffs/components/inline_diff_table_row_spec.js
deleted file mode 100644
index 9c3e00cd6cf..00000000000
--- a/spec/frontend/diffs/components/inline_diff_table_row_spec.js
+++ /dev/null
@@ -1,325 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import DiffGutterAvatars from '~/diffs/components/diff_gutter_avatars.vue';
-import { mapInline } from '~/diffs/components/diff_row_utils';
-import InlineDiffTableRow from '~/diffs/components/inline_diff_table_row.vue';
-import { createStore } from '~/mr_notes/stores';
-import { findInteropAttributes } from '../find_interop_attributes';
-import discussionsMockData from '../mock_data/diff_discussions';
-import diffFileMockData from '../mock_data/diff_file';
-
-const TEST_USER_ID = 'abc123';
-const TEST_USER = { id: TEST_USER_ID };
-
-describe('InlineDiffTableRow', () => {
- let wrapper;
- let store;
- const mockDiffContent = {
- diffFile: diffFileMockData,
- shouldRenderDraftRow: jest.fn(),
- hasParallelDraftLeft: jest.fn(),
- hasParallelDraftRight: jest.fn(),
- draftForLine: jest.fn(),
- };
-
- const applyMap = mapInline(mockDiffContent);
- const thisLine = applyMap(diffFileMockData.highlighted_diff_lines[0]);
-
- const createComponent = (props = {}, propsStore = store) => {
- wrapper = shallowMount(InlineDiffTableRow, {
- store: propsStore,
- propsData: {
- line: thisLine,
- fileHash: diffFileMockData.file_hash,
- filePath: diffFileMockData.file_path,
- contextLinesPath: 'contextLinesPath',
- isHighlighted: false,
- ...props,
- },
- });
- };
-
- beforeEach(() => {
- store = createStore();
- store.state.notes.userData = TEST_USER;
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('does not add hll class to line content when line does not match highlighted row', () => {
- createComponent();
- expect(wrapper.find('.line_content').classes('hll')).toBe(false);
- });
-
- it('adds hll class to lineContent when line is the highlighted row', () => {
- store.state.diffs.highlightedRow = thisLine.line_code;
- createComponent({}, store);
- expect(wrapper.find('.line_content').classes('hll')).toBe(true);
- });
-
- it('adds hll class to lineContent when line is part of a multiline comment', () => {
- createComponent({ isCommented: true });
- expect(wrapper.find('.line_content').classes('hll')).toBe(true);
- });
-
- describe('sets coverage title and class', () => {
- it('for lines with coverage', () => {
- const name = diffFileMockData.file_path;
- const line = thisLine.new_line;
-
- store.state.diffs.coverageFiles = { files: { [name]: { [line]: 5 } } };
- createComponent({}, store);
- const coverage = wrapper.find('.line-coverage');
-
- expect(coverage.attributes('title')).toContain('Test coverage: 5 hits');
- expect(coverage.classes('coverage')).toBe(true);
- });
-
- it('for lines without coverage', () => {
- const name = diffFileMockData.file_path;
- const line = thisLine.new_line;
-
- store.state.diffs.coverageFiles = { files: { [name]: { [line]: 0 } } };
- createComponent({}, store);
- const coverage = wrapper.find('.line-coverage');
-
- expect(coverage.attributes('title')).toContain('No test coverage');
- expect(coverage.classes('no-coverage')).toBe(true);
- });
-
- it('for unknown lines', () => {
- store.state.diffs.coverageFiles = {};
- createComponent({}, store);
-
- const coverage = wrapper.find('.line-coverage');
-
- expect(coverage.attributes('title')).toBeUndefined();
- expect(coverage.classes('coverage')).toBe(false);
- expect(coverage.classes('no-coverage')).toBe(false);
- });
- });
-
- describe('Table Cells', () => {
- const findNewTd = () => wrapper.find({ ref: 'newTd' });
- const findOldTd = () => wrapper.find({ ref: 'oldTd' });
-
- describe('td', () => {
- it('highlights when isHighlighted true', () => {
- store.state.diffs.highlightedRow = thisLine.line_code;
- createComponent({}, store);
-
- expect(findNewTd().classes()).toContain('hll');
- expect(findOldTd().classes()).toContain('hll');
- });
-
- it('does not highlight when isHighlighted false', () => {
- createComponent();
-
- expect(findNewTd().classes()).not.toContain('hll');
- expect(findOldTd().classes()).not.toContain('hll');
- });
- });
-
- describe('comment button', () => {
- const findNoteButton = () => wrapper.find({ ref: 'addDiffNoteButton' });
-
- it.each`
- userData | expectation
- ${TEST_USER} | ${true}
- ${null} | ${false}
- `('exists is $expectation - with userData ($userData)', ({ userData, expectation }) => {
- store.state.notes.userData = userData;
- createComponent({}, store);
-
- expect(findNoteButton().exists()).toBe(expectation);
- });
-
- it.each`
- isHover | line | expectation
- ${true} | ${{ ...thisLine, discussions: [] }} | ${true}
- ${false} | ${{ ...thisLine, discussions: [] }} | ${false}
- ${true} | ${{ ...thisLine, type: 'context', discussions: [] }} | ${false}
- ${true} | ${{ ...thisLine, type: 'old-nonewline', discussions: [] }} | ${false}
- ${true} | ${{ ...thisLine, discussions: [{}] }} | ${false}
- `('visible is $expectation - line ($line)', ({ isHover, line, expectation }) => {
- createComponent({ line: applyMap(line) });
- wrapper.setData({ isHover });
-
- return wrapper.vm.$nextTick().then(() => {
- expect(findNoteButton().isVisible()).toBe(expectation);
- });
- });
-
- it.each`
- disabled | commentsDisabled
- ${'disabled'} | ${true}
- ${undefined} | ${false}
- `(
- 'has attribute disabled=$disabled when the outer component has prop commentsDisabled=$commentsDisabled',
- ({ disabled, commentsDisabled }) => {
- createComponent({
- line: applyMap({ ...thisLine, commentsDisabled }),
- });
-
- wrapper.setData({ isHover: true });
-
- return wrapper.vm.$nextTick().then(() => {
- expect(findNoteButton().attributes('disabled')).toBe(disabled);
- });
- },
- );
-
- const symlinkishFileTooltip =
- 'Commenting on symbolic links that replace or are replaced by files is currently not supported.';
- const realishFileTooltip =
- 'Commenting on files that replace or are replaced by symbolic links is currently not supported.';
- const otherFileTooltip = 'Add a comment to this line';
- const findTooltip = () => wrapper.find({ ref: 'addNoteTooltip' });
-
- it.each`
- tooltip | commentsDisabled
- ${symlinkishFileTooltip} | ${{ wasSymbolic: true }}
- ${symlinkishFileTooltip} | ${{ isSymbolic: true }}
- ${realishFileTooltip} | ${{ wasReal: true }}
- ${realishFileTooltip} | ${{ isReal: true }}
- ${otherFileTooltip} | ${false}
- `(
- 'has the correct tooltip when commentsDisabled=$commentsDisabled',
- ({ tooltip, commentsDisabled }) => {
- createComponent({
- line: applyMap({ ...thisLine, commentsDisabled }),
- });
-
- wrapper.setData({ isHover: true });
-
- return wrapper.vm.$nextTick().then(() => {
- expect(findTooltip().attributes('title')).toBe(tooltip);
- });
- },
- );
- });
-
- describe('line number', () => {
- const findLineNumberOld = () => wrapper.find({ ref: 'lineNumberRefOld' });
- const findLineNumberNew = () => wrapper.find({ ref: 'lineNumberRefNew' });
-
- it('renders line numbers in correct cells', () => {
- createComponent();
-
- expect(findLineNumberOld().exists()).toBe(false);
- expect(findLineNumberNew().exists()).toBe(true);
- });
-
- describe('with lineNumber prop', () => {
- const TEST_LINE_CODE = 'LC_42';
- const TEST_LINE_NUMBER = 1;
-
- describe.each`
- lineProps | findLineNumber | expectedHref | expectedClickArg
- ${{ line_code: TEST_LINE_CODE, old_line: TEST_LINE_NUMBER }} | ${findLineNumberOld} | ${`#${TEST_LINE_CODE}`} | ${TEST_LINE_CODE}
- ${{ line_code: undefined, old_line: TEST_LINE_NUMBER }} | ${findLineNumberOld} | ${'#'} | ${undefined}
- ${{ line_code: undefined, left: { line_code: TEST_LINE_CODE }, old_line: TEST_LINE_NUMBER }} | ${findLineNumberOld} | ${'#'} | ${TEST_LINE_CODE}
- ${{ line_code: undefined, right: { line_code: TEST_LINE_CODE }, new_line: TEST_LINE_NUMBER }} | ${findLineNumberNew} | ${'#'} | ${TEST_LINE_CODE}
- `(
- 'with line ($lineProps)',
- ({ lineProps, findLineNumber, expectedHref, expectedClickArg }) => {
- beforeEach(() => {
- jest.spyOn(store, 'dispatch').mockImplementation();
- createComponent({
- line: applyMap({ ...thisLine, ...lineProps }),
- });
- });
-
- it('renders', () => {
- expect(findLineNumber().exists()).toBe(true);
- expect(findLineNumber().attributes()).toEqual({
- href: expectedHref,
- 'data-linenumber': TEST_LINE_NUMBER.toString(),
- });
- });
-
- it('on click, dispatches setHighlightedRow', () => {
- expect(store.dispatch).toHaveBeenCalledTimes(1);
-
- findLineNumber().trigger('click');
-
- expect(store.dispatch).toHaveBeenCalledWith(
- 'diffs/setHighlightedRow',
- expectedClickArg,
- );
- expect(store.dispatch).toHaveBeenCalledTimes(2);
- });
- },
- );
- });
- });
-
- describe('diff-gutter-avatars', () => {
- const TEST_LINE_CODE = 'LC_42';
- const TEST_FILE_HASH = diffFileMockData.file_hash;
- const findAvatars = () => wrapper.find(DiffGutterAvatars);
- let line;
-
- beforeEach(() => {
- jest.spyOn(store, 'dispatch').mockImplementation();
-
- line = {
- line_code: TEST_LINE_CODE,
- type: 'new',
- old_line: null,
- new_line: 1,
- discussions: [{ ...discussionsMockData }],
- discussionsExpanded: true,
- text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
- rich_text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
- meta_data: null,
- };
- });
-
- describe('with showCommentButton', () => {
- it('renders if line has discussions', () => {
- createComponent({ line: applyMap(line) });
-
- expect(findAvatars().props()).toEqual({
- discussions: line.discussions,
- discussionsExpanded: line.discussionsExpanded,
- });
- });
-
- it('does notrender if line has no discussions', () => {
- line.discussions = [];
- createComponent({ line: applyMap(line) });
-
- expect(findAvatars().exists()).toEqual(false);
- });
-
- it('toggles line discussion', () => {
- createComponent({ line: applyMap(line) });
-
- expect(store.dispatch).toHaveBeenCalledTimes(1);
-
- findAvatars().vm.$emit('toggleLineDiscussions');
-
- expect(store.dispatch).toHaveBeenCalledWith('diffs/toggleLineDiscussions', {
- lineCode: TEST_LINE_CODE,
- fileHash: TEST_FILE_HASH,
- expanded: !line.discussionsExpanded,
- });
- });
- });
- });
- });
-
- describe('interoperability', () => {
- it.each`
- desc | line | expectation
- ${'with type old'} | ${{ ...thisLine, type: 'old', old_line: 3, new_line: 5 }} | ${{ type: 'old', line: '3', oldLine: '3', newLine: '5' }}
- ${'with type new'} | ${{ ...thisLine, type: 'new', old_line: 3, new_line: 5 }} | ${{ type: 'new', line: '5', oldLine: '3', newLine: '5' }}
- `('$desc, sets interop data attributes', ({ line, expectation }) => {
- createComponent({ line });
-
- expect(findInteropAttributes(wrapper)).toEqual(expectation);
- });
- });
-});
diff --git a/spec/frontend/diffs/components/inline_diff_view_spec.js b/spec/frontend/diffs/components/inline_diff_view_spec.js
deleted file mode 100644
index 27834804f77..00000000000
--- a/spec/frontend/diffs/components/inline_diff_view_spec.js
+++ /dev/null
@@ -1,57 +0,0 @@
-import '~/behaviors/markdown/render_gfm';
-import { getByText } from '@testing-library/dom';
-import { mount } from '@vue/test-utils';
-import { mapInline } from '~/diffs/components/diff_row_utils';
-import InlineDiffView from '~/diffs/components/inline_diff_view.vue';
-import { createStore } from '~/mr_notes/stores';
-import discussionsMockData from '../mock_data/diff_discussions';
-import diffFileMockData from '../mock_data/diff_file';
-
-describe('InlineDiffView', () => {
- let wrapper;
- const getDiffFileMock = () => ({ ...diffFileMockData });
- const getDiscussionsMockData = () => [{ ...discussionsMockData }];
- const notesLength = getDiscussionsMockData()[0].notes.length;
-
- const setup = (diffFile, diffLines) => {
- const mockDiffContent = {
- diffFile,
- shouldRenderDraftRow: jest.fn(),
- };
-
- const store = createStore();
-
- store.dispatch('diffs/setInlineDiffViewType');
- wrapper = mount(InlineDiffView, {
- store,
- propsData: {
- diffFile,
- diffLines: diffLines.map(mapInline(mockDiffContent)),
- },
- });
- };
-
- describe('template', () => {
- it('should have rendered diff lines', () => {
- const diffFile = getDiffFileMock();
- setup(diffFile, diffFile.highlighted_diff_lines);
-
- expect(wrapper.findAll('tr.line_holder').length).toEqual(8);
- expect(wrapper.findAll('tr.line_holder.new').length).toEqual(4);
- expect(wrapper.findAll('tr.line_expansion.match').length).toEqual(1);
- getByText(wrapper.element, /Bad dates/i);
- });
-
- it('should render discussions', () => {
- const diffFile = getDiffFileMock();
- diffFile.highlighted_diff_lines[1].discussions = getDiscussionsMockData();
- diffFile.highlighted_diff_lines[1].discussionsExpanded = true;
- setup(diffFile, diffFile.highlighted_diff_lines);
-
- expect(wrapper.findAll('.notes_holder').length).toEqual(1);
- expect(wrapper.findAll('.notes_holder .note').length).toEqual(notesLength + 1);
- getByText(wrapper.element, 'comment 5');
- wrapper.vm.$store.dispatch('setInitialNotes', []);
- });
- });
-});
diff --git a/spec/frontend/diffs/components/parallel_diff_table_row_spec.js b/spec/frontend/diffs/components/parallel_diff_table_row_spec.js
deleted file mode 100644
index ed191d849fd..00000000000
--- a/spec/frontend/diffs/components/parallel_diff_table_row_spec.js
+++ /dev/null
@@ -1,445 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
-import DiffGutterAvatars from '~/diffs/components/diff_gutter_avatars.vue';
-import { mapParallel } from '~/diffs/components/diff_row_utils';
-import ParallelDiffTableRow from '~/diffs/components/parallel_diff_table_row.vue';
-import { createStore } from '~/mr_notes/stores';
-import { findInteropAttributes } from '../find_interop_attributes';
-import discussionsMockData from '../mock_data/diff_discussions';
-import diffFileMockData from '../mock_data/diff_file';
-
-describe('ParallelDiffTableRow', () => {
- const mockDiffContent = {
- diffFile: diffFileMockData,
- shouldRenderDraftRow: jest.fn(),
- hasParallelDraftLeft: jest.fn(),
- hasParallelDraftRight: jest.fn(),
- draftForLine: jest.fn(),
- };
-
- const applyMap = mapParallel(mockDiffContent);
-
- describe('when one side is empty', () => {
- let wrapper;
- let vm;
- const thisLine = diffFileMockData.parallel_diff_lines[0];
- const rightLine = diffFileMockData.parallel_diff_lines[0].right;
-
- beforeEach(() => {
- wrapper = shallowMount(ParallelDiffTableRow, {
- store: createStore(),
- propsData: {
- line: applyMap(thisLine),
- fileHash: diffFileMockData.file_hash,
- filePath: diffFileMockData.file_path,
- contextLinesPath: 'contextLinesPath',
- isHighlighted: false,
- },
- });
-
- vm = wrapper.vm;
- });
-
- it('does not highlight non empty line content when line does not match highlighted row', (done) => {
- vm.$nextTick()
- .then(() => {
- expect(vm.$el.querySelector('.line_content.right-side').classList).not.toContain('hll');
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('highlights nonempty line content when line is the highlighted row', (done) => {
- vm.$nextTick()
- .then(() => {
- vm.$store.state.diffs.highlightedRow = rightLine.line_code;
-
- return vm.$nextTick();
- })
- .then(() => {
- expect(vm.$el.querySelector('.line_content.right-side').classList).toContain('hll');
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('highlights nonempty line content when line is part of a multiline comment', () => {
- wrapper.setProps({ isCommented: true });
- return vm.$nextTick().then(() => {
- expect(vm.$el.querySelector('.line_content.right-side').classList).toContain('hll');
- });
- });
- });
-
- describe('when both sides have content', () => {
- let vm;
- const thisLine = diffFileMockData.parallel_diff_lines[2];
- const rightLine = diffFileMockData.parallel_diff_lines[2].right;
-
- beforeEach(() => {
- vm = createComponentWithStore(Vue.extend(ParallelDiffTableRow), createStore(), {
- line: applyMap(thisLine),
- fileHash: diffFileMockData.file_hash,
- filePath: diffFileMockData.file_path,
- contextLinesPath: 'contextLinesPath',
- isHighlighted: false,
- }).$mount();
- });
-
- it('does not highlight either line when line does not match highlighted row', (done) => {
- vm.$nextTick()
- .then(() => {
- expect(vm.$el.querySelector('.line_content.right-side').classList).not.toContain('hll');
- expect(vm.$el.querySelector('.line_content.left-side').classList).not.toContain('hll');
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('adds hll class to lineContent when line is the highlighted row', (done) => {
- vm.$nextTick()
- .then(() => {
- vm.$store.state.diffs.highlightedRow = rightLine.line_code;
-
- return vm.$nextTick();
- })
- .then(() => {
- expect(vm.$el.querySelector('.line_content.right-side').classList).toContain('hll');
- expect(vm.$el.querySelector('.line_content.left-side').classList).toContain('hll');
- })
- .then(done)
- .catch(done.fail);
- });
-
- describe('sets coverage title and class', () => {
- it('for lines with coverage', (done) => {
- vm.$nextTick()
- .then(() => {
- const name = diffFileMockData.file_path;
- const line = rightLine.new_line;
-
- vm.$store.state.diffs.coverageFiles = { files: { [name]: { [line]: 5 } } };
-
- return vm.$nextTick();
- })
- .then(() => {
- const coverage = vm.$el.querySelector('.line-coverage.right-side');
-
- expect(coverage.title).toContain('Test coverage: 5 hits');
- expect(coverage.classList).toContain('coverage');
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('for lines without coverage', (done) => {
- vm.$nextTick()
- .then(() => {
- const name = diffFileMockData.file_path;
- const line = rightLine.new_line;
-
- vm.$store.state.diffs.coverageFiles = { files: { [name]: { [line]: 0 } } };
-
- return vm.$nextTick();
- })
- .then(() => {
- const coverage = vm.$el.querySelector('.line-coverage.right-side');
-
- expect(coverage.title).toContain('No test coverage');
- expect(coverage.classList).toContain('no-coverage');
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('for unknown lines', (done) => {
- vm.$nextTick()
- .then(() => {
- vm.$store.state.diffs.coverageFiles = {};
-
- return vm.$nextTick();
- })
- .then(() => {
- const coverage = vm.$el.querySelector('.line-coverage.right-side');
-
- expect(coverage.title).not.toContain('Coverage');
- expect(coverage.classList).not.toContain('coverage');
- expect(coverage.classList).not.toContain('no-coverage');
- })
- .then(done)
- .catch(done.fail);
- });
- });
- });
-
- describe('Table Cells', () => {
- let wrapper;
- let store;
- let thisLine;
- const TEST_USER_ID = 'abc123';
- const TEST_USER = { id: TEST_USER_ID };
-
- const createComponent = (props = {}, propsStore = store, data = {}) => {
- wrapper = shallowMount(ParallelDiffTableRow, {
- store: propsStore,
- propsData: {
- line: thisLine,
- fileHash: diffFileMockData.file_hash,
- filePath: diffFileMockData.file_path,
- contextLinesPath: 'contextLinesPath',
- isHighlighted: false,
- ...props,
- },
- data() {
- return data;
- },
- });
- };
-
- beforeEach(() => {
- // eslint-disable-next-line prefer-destructuring
- thisLine = diffFileMockData.parallel_diff_lines[2];
- store = createStore();
- store.state.notes.userData = TEST_USER;
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- const findNewTd = () => wrapper.find({ ref: 'newTd' });
- const findOldTd = () => wrapper.find({ ref: 'oldTd' });
-
- describe('td', () => {
- it('highlights when isHighlighted true', () => {
- store.state.diffs.highlightedRow = thisLine.left.line_code;
- createComponent({}, store);
-
- expect(findNewTd().classes()).toContain('hll');
- expect(findOldTd().classes()).toContain('hll');
- });
-
- it('does not highlight when isHighlighted false', () => {
- createComponent();
-
- expect(findNewTd().classes()).not.toContain('hll');
- expect(findOldTd().classes()).not.toContain('hll');
- });
- });
-
- describe('comment button', () => {
- const findNoteButton = () => wrapper.find({ ref: 'addDiffNoteButtonLeft' });
-
- it.each`
- hover | line | userData | expectation
- ${true} | ${{}} | ${TEST_USER} | ${true}
- ${true} | ${{ line: { left: null } }} | ${TEST_USER} | ${false}
- ${true} | ${{}} | ${null} | ${false}
- ${false} | ${{}} | ${TEST_USER} | ${false}
- `(
- 'exists is $expectation - with userData ($userData)',
- async ({ hover, line, userData, expectation }) => {
- store.state.notes.userData = userData;
- createComponent(line, store);
- if (hover) await wrapper.find('.line_holder').trigger('mouseover');
-
- expect(findNoteButton().exists()).toBe(expectation);
- },
- );
-
- it.each`
- line | expectation
- ${{ ...thisLine, left: { discussions: [] } }} | ${true}
- ${{ ...thisLine, left: { type: 'context', discussions: [] } }} | ${false}
- ${{ ...thisLine, left: { type: 'old-nonewline', discussions: [] } }} | ${false}
- ${{ ...thisLine, left: { discussions: [{}] } }} | ${false}
- `('visible is $expectation - line ($line)', async ({ line, expectation }) => {
- createComponent({ line: applyMap(line) }, store, {
- isLeftHover: true,
- isCommentButtonRendered: true,
- });
-
- expect(findNoteButton().isVisible()).toBe(expectation);
- });
-
- it.each`
- disabled | commentsDisabled
- ${'disabled'} | ${true}
- ${undefined} | ${false}
- `(
- 'has attribute disabled=$disabled when the outer component has prop commentsDisabled=$commentsDisabled',
- ({ disabled, commentsDisabled }) => {
- thisLine.left.commentsDisabled = commentsDisabled;
- createComponent({ line: { ...thisLine } }, store, {
- isLeftHover: true,
- isCommentButtonRendered: true,
- });
-
- expect(findNoteButton().attributes('disabled')).toBe(disabled);
- },
- );
-
- const symlinkishFileTooltip =
- 'Commenting on symbolic links that replace or are replaced by files is currently not supported.';
- const realishFileTooltip =
- 'Commenting on files that replace or are replaced by symbolic links is currently not supported.';
- const otherFileTooltip = 'Add a comment to this line';
- const findTooltip = () => wrapper.find({ ref: 'addNoteTooltipLeft' });
-
- it.each`
- tooltip | commentsDisabled
- ${symlinkishFileTooltip} | ${{ wasSymbolic: true }}
- ${symlinkishFileTooltip} | ${{ isSymbolic: true }}
- ${realishFileTooltip} | ${{ wasReal: true }}
- ${realishFileTooltip} | ${{ isReal: true }}
- ${otherFileTooltip} | ${false}
- `(
- 'has the correct tooltip when commentsDisabled=$commentsDisabled',
- ({ tooltip, commentsDisabled }) => {
- thisLine.left.commentsDisabled = commentsDisabled;
- createComponent({ line: { ...thisLine } }, store, {
- isLeftHover: true,
- isCommentButtonRendered: true,
- });
-
- expect(findTooltip().attributes('title')).toBe(tooltip);
- },
- );
- });
-
- describe('line number', () => {
- const findLineNumberOld = () => wrapper.find({ ref: 'lineNumberRefOld' });
- const findLineNumberNew = () => wrapper.find({ ref: 'lineNumberRefNew' });
-
- it('renders line numbers in correct cells', () => {
- createComponent();
-
- expect(findLineNumberOld().exists()).toBe(true);
- expect(findLineNumberNew().exists()).toBe(true);
- });
-
- describe('with lineNumber prop', () => {
- const TEST_LINE_CODE = 'LC_42';
- const TEST_LINE_NUMBER = 1;
-
- describe.each`
- lineProps | findLineNumber | expectedHref | expectedClickArg
- ${{ line_code: TEST_LINE_CODE, old_line: TEST_LINE_NUMBER }} | ${findLineNumberOld} | ${`#${TEST_LINE_CODE}`} | ${TEST_LINE_CODE}
- ${{ line_code: undefined, old_line: TEST_LINE_NUMBER }} | ${findLineNumberOld} | ${'#'} | ${undefined}
- `(
- 'with line ($lineProps)',
- ({ lineProps, findLineNumber, expectedHref, expectedClickArg }) => {
- beforeEach(() => {
- jest.spyOn(store, 'dispatch').mockImplementation();
- Object.assign(thisLine.left, lineProps);
- Object.assign(thisLine.right, lineProps);
- createComponent({
- line: applyMap({ ...thisLine }),
- });
- });
-
- it('renders', () => {
- expect(findLineNumber().exists()).toBe(true);
- expect(findLineNumber().attributes()).toEqual({
- href: expectedHref,
- 'data-linenumber': TEST_LINE_NUMBER.toString(),
- });
- });
-
- it('on click, dispatches setHighlightedRow', () => {
- expect(store.dispatch).toHaveBeenCalledTimes(1);
-
- findLineNumber().trigger('click');
-
- expect(store.dispatch).toHaveBeenCalledWith(
- 'diffs/setHighlightedRow',
- expectedClickArg,
- );
- expect(store.dispatch).toHaveBeenCalledTimes(2);
- });
- },
- );
- });
- });
-
- describe('diff-gutter-avatars', () => {
- const TEST_LINE_CODE = 'LC_42';
- const TEST_FILE_HASH = diffFileMockData.file_hash;
- const findAvatars = () => wrapper.find(DiffGutterAvatars);
- let line;
-
- beforeEach(() => {
- jest.spyOn(store, 'dispatch').mockImplementation();
-
- line = applyMap({
- left: {
- line_code: TEST_LINE_CODE,
- type: 'new',
- old_line: null,
- new_line: 1,
- discussions: [{ ...discussionsMockData }],
- discussionsExpanded: true,
- text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
- rich_text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
- meta_data: null,
- },
- });
- });
-
- describe('with showCommentButton', () => {
- it('renders if line has discussions', () => {
- createComponent({ line });
-
- expect(findAvatars().props()).toEqual({
- discussions: line.left.discussions,
- discussionsExpanded: line.left.discussionsExpanded,
- });
- });
-
- it('does notrender if line has no discussions', () => {
- line.left.discussions = [];
- createComponent({ line: applyMap(line) });
-
- expect(findAvatars().exists()).toEqual(false);
- });
-
- it('toggles line discussion', () => {
- createComponent({ line });
-
- expect(store.dispatch).toHaveBeenCalledTimes(1);
-
- findAvatars().vm.$emit('toggleLineDiscussions');
-
- expect(store.dispatch).toHaveBeenCalledWith('diffs/toggleLineDiscussions', {
- lineCode: TEST_LINE_CODE,
- fileHash: TEST_FILE_HASH,
- expanded: !line.left.discussionsExpanded,
- });
- });
- });
- });
-
- describe('interoperability', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('adds old side interoperability data attributes', () => {
- expect(findInteropAttributes(wrapper, '.line_content.left-side')).toEqual({
- type: 'old',
- line: thisLine.left.old_line.toString(),
- oldLine: thisLine.left.old_line.toString(),
- });
- });
-
- it('adds new side interoperability data attributes', () => {
- expect(findInteropAttributes(wrapper, '.line_content.right-side')).toEqual({
- type: 'new',
- line: thisLine.right.new_line.toString(),
- newLine: thisLine.right.new_line.toString(),
- });
- });
- });
- });
-});
diff --git a/spec/frontend/diffs/components/parallel_diff_view_spec.js b/spec/frontend/diffs/components/parallel_diff_view_spec.js
deleted file mode 100644
index 452e1f58551..00000000000
--- a/spec/frontend/diffs/components/parallel_diff_view_spec.js
+++ /dev/null
@@ -1,37 +0,0 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import Vuex from 'vuex';
-import parallelDiffTableRow from '~/diffs/components/parallel_diff_table_row.vue';
-import ParallelDiffView from '~/diffs/components/parallel_diff_view.vue';
-import { createStore } from '~/mr_notes/stores';
-import diffFileMockData from '../mock_data/diff_file';
-
-let wrapper;
-const localVue = createLocalVue();
-
-localVue.use(Vuex);
-
-function factory() {
- const diffFile = { ...diffFileMockData };
- const store = createStore();
-
- wrapper = shallowMount(ParallelDiffView, {
- localVue,
- store,
- propsData: {
- diffFile,
- diffLines: diffFile.parallel_diff_lines,
- },
- });
-}
-
-describe('ParallelDiffView', () => {
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('renders diff lines', () => {
- factory();
-
- expect(wrapper.findAll(parallelDiffTableRow).length).toBe(8);
- });
-});
diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js
index 14f8e090be9..c2e5d07bcfd 100644
--- a/spec/frontend/diffs/store/actions_spec.js
+++ b/spec/frontend/diffs/store/actions_spec.js
@@ -8,7 +8,6 @@ import {
DIFF_VIEW_COOKIE_NAME,
INLINE_DIFF_VIEW_TYPE,
PARALLEL_DIFF_VIEW_TYPE,
- DIFFS_PER_PAGE,
} from '~/diffs/constants';
import {
setBaseConfig,
@@ -154,16 +153,16 @@ describe('DiffsStoreActions', () => {
it('should fetch batch diff files', (done) => {
const endpointBatch = '/fetch/diffs_batch';
- const res1 = { diff_files: [{ file_hash: 'test' }], pagination: { next_page: 2 } };
- const res2 = { diff_files: [{ file_hash: 'test2' }], pagination: {} };
+ const res1 = { diff_files: [{ file_hash: 'test' }], pagination: { total_pages: 7 } };
+ const res2 = { diff_files: [{ file_hash: 'test2' }], pagination: { total_pages: 7 } };
mock
.onGet(
mergeUrlParams(
{
w: '1',
view: 'inline',
- page: 1,
- per_page: DIFFS_PER_PAGE,
+ page: 0,
+ per_page: 5,
},
endpointBatch,
),
@@ -174,8 +173,8 @@ describe('DiffsStoreActions', () => {
{
w: '1',
view: 'inline',
- page: 2,
- per_page: DIFFS_PER_PAGE,
+ page: 5,
+ per_page: 7,
},
endpointBatch,
),
@@ -1020,10 +1019,12 @@ describe('DiffsStoreActions', () => {
const endpointUpdateUser = 'user/prefs';
let putSpy;
let mock;
+ let gon;
beforeEach(() => {
mock = new MockAdapter(axios);
putSpy = jest.spyOn(axios, 'put');
+ gon = window.gon;
mock.onPut(endpointUpdateUser).reply(200, {});
jest.spyOn(eventHub, '$emit').mockImplementation();
@@ -1031,6 +1032,7 @@ describe('DiffsStoreActions', () => {
afterEach(() => {
mock.restore();
+ window.gon = gon;
});
it('commits SET_SHOW_WHITESPACE', (done) => {
@@ -1044,7 +1046,9 @@ describe('DiffsStoreActions', () => {
);
});
- it('saves to the database', async () => {
+ it('saves to the database when the user is logged in', async () => {
+ window.gon = { current_user_id: 12345 };
+
await setShowWhitespace(
{ state: { endpointUpdateUser }, commit() {} },
{ showWhitespace: true, updateDatabase: true },
@@ -1053,6 +1057,17 @@ describe('DiffsStoreActions', () => {
expect(putSpy).toHaveBeenCalledWith(endpointUpdateUser, { show_whitespace_in_diffs: true });
});
+ it('does not try to save to the API if the user is not logged in', async () => {
+ window.gon = {};
+
+ await setShowWhitespace(
+ { state: { endpointUpdateUser }, commit() {} },
+ { showWhitespace: true, updateDatabase: true },
+ );
+
+ expect(putSpy).not.toHaveBeenCalled();
+ });
+
it('emits eventHub event', async () => {
await setShowWhitespace(
{ state: {}, commit() {} },
diff --git a/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js b/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js
index dbef547c297..99f13a1c84c 100644
--- a/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js
+++ b/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js
@@ -54,7 +54,7 @@ describe('Compare diff version dropdowns', () => {
Object.defineProperty(window, 'location', {
writable: true,
- value: { href: `https://example.gitlab.com${diffHeadParam}` },
+ value: { search: diffHeadParam },
});
expectedFirstVersion = {
diff --git a/spec/frontend/editor/editor_ci_schema_ext_spec.js b/spec/frontend/editor/source_editor_ci_schema_ext_spec.js
index 2f0ecfb151e..07ac080fe08 100644
--- a/spec/frontend/editor/editor_ci_schema_ext_spec.js
+++ b/spec/frontend/editor/source_editor_ci_schema_ext_spec.js
@@ -1,8 +1,8 @@
import { languages } from 'monaco-editor';
import { TEST_HOST } from 'helpers/test_constants';
import { EXTENSION_CI_SCHEMA_FILE_NAME_MATCH } from '~/editor/constants';
-import EditorLite from '~/editor/editor_lite';
-import { CiSchemaExtension } from '~/editor/extensions/editor_ci_schema_ext';
+import { CiSchemaExtension } from '~/editor/extensions/source_editor_ci_schema_ext';
+import SourceEditor from '~/editor/source_editor';
const mockRef = 'AABBCCDD';
@@ -17,7 +17,7 @@ describe('~/editor/editor_ci_config_ext', () => {
const createMockEditor = ({ blobPath = defaultBlobPath } = {}) => {
setFixtures('<div id="editor"></div>');
editorEl = document.getElementById('editor');
- editor = new EditorLite();
+ editor = new SourceEditor();
instance = editor.createInstance({
el: editorEl,
blobPath,
diff --git a/spec/frontend/editor/editor_lite_extension_base_spec.js b/spec/frontend/editor/source_editor_extension_base_spec.js
index 59e1b8968eb..352db9d0d51 100644
--- a/spec/frontend/editor/editor_lite_extension_base_spec.js
+++ b/spec/frontend/editor/source_editor_extension_base_spec.js
@@ -5,7 +5,7 @@ import {
EDITOR_TYPE_CODE,
EDITOR_TYPE_DIFF,
} from '~/editor/constants';
-import { EditorLiteExtension } from '~/editor/extensions/editor_lite_extension_base';
+import { SourceEditorExtension } from '~/editor/extensions/source_editor_extension_base';
jest.mock('~/helpers/startup_css_helper', () => {
return {
@@ -22,7 +22,7 @@ jest.mock('~/helpers/startup_css_helper', () => {
};
});
-describe('The basis for an Editor Lite extension', () => {
+describe('The basis for an Source Editor extension', () => {
const defaultLine = 3;
let ext;
let event;
@@ -63,7 +63,7 @@ describe('The basis for an Editor Lite extension', () => {
const instance = {
layout: jest.fn(),
};
- ext = new EditorLiteExtension({ instance });
+ ext = new SourceEditorExtension({ instance });
expect(instance.layout).not.toHaveBeenCalled();
// We're waiting for the waitForCSSLoaded mock to kick in
@@ -79,7 +79,7 @@ describe('The basis for an Editor Lite extension', () => {
${'does not fail if both instance and the options are omitted'} | ${undefined} | ${undefined}
${'throws if only options are passed'} | ${undefined} | ${defaultOptions}
`('$description', ({ instance, options } = {}) => {
- EditorLiteExtension.deferRerender = jest.fn();
+ SourceEditorExtension.deferRerender = jest.fn();
const originalInstance = { ...instance };
if (instance) {
@@ -88,54 +88,54 @@ describe('The basis for an Editor Lite extension', () => {
expect(instance[prop]).toBeUndefined();
});
// Both instance and options are passed
- ext = new EditorLiteExtension({ instance, ...options });
+ ext = new SourceEditorExtension({ instance, ...options });
Object.entries(options).forEach(([prop, value]) => {
expect(ext[prop]).toBeUndefined();
expect(instance[prop]).toBe(value);
});
} else {
- ext = new EditorLiteExtension({ instance });
+ ext = new SourceEditorExtension({ instance });
expect(instance).toEqual(originalInstance);
}
} else if (options) {
// Options are passed without instance
expect(() => {
- ext = new EditorLiteExtension({ ...options });
+ ext = new SourceEditorExtension({ ...options });
}).toThrow(ERROR_INSTANCE_REQUIRED_FOR_EXTENSION);
} else {
// Neither options nor instance are passed
expect(() => {
- ext = new EditorLiteExtension();
+ ext = new SourceEditorExtension();
}).not.toThrow();
}
});
it('initializes the line highlighting', () => {
- EditorLiteExtension.deferRerender = jest.fn();
- const spy = jest.spyOn(EditorLiteExtension, 'highlightLines');
- ext = new EditorLiteExtension({ instance: {} });
+ SourceEditorExtension.deferRerender = jest.fn();
+ const spy = jest.spyOn(SourceEditorExtension, 'highlightLines');
+ ext = new SourceEditorExtension({ instance: {} });
expect(spy).toHaveBeenCalled();
});
it('sets up the line linking for code instance', () => {
- EditorLiteExtension.deferRerender = jest.fn();
- const spy = jest.spyOn(EditorLiteExtension, 'setupLineLinking');
+ SourceEditorExtension.deferRerender = jest.fn();
+ const spy = jest.spyOn(SourceEditorExtension, 'setupLineLinking');
const instance = {
getEditorType: jest.fn().mockReturnValue(EDITOR_TYPE_CODE),
onMouseMove: jest.fn(),
onMouseDown: jest.fn(),
};
- ext = new EditorLiteExtension({ instance });
+ ext = new SourceEditorExtension({ instance });
expect(spy).toHaveBeenCalledWith(instance);
});
it('does not set up the line linking for diff instance', () => {
- EditorLiteExtension.deferRerender = jest.fn();
- const spy = jest.spyOn(EditorLiteExtension, 'setupLineLinking');
+ SourceEditorExtension.deferRerender = jest.fn();
+ const spy = jest.spyOn(SourceEditorExtension, 'setupLineLinking');
const instance = {
getEditorType: jest.fn().mockReturnValue(EDITOR_TYPE_DIFF),
};
- ext = new EditorLiteExtension({ instance });
+ ext = new SourceEditorExtension({ instance });
expect(spy).not.toHaveBeenCalled();
});
});
@@ -172,7 +172,7 @@ describe('The basis for an Editor Lite extension', () => {
${'does not highlight if hash is incomplete 2'} | ${'#L-'} | ${false} | ${null}
`('$desc', ({ hash, shouldReveal, expectedRange } = {}) => {
window.location.hash = hash;
- EditorLiteExtension.highlightLines(instance);
+ SourceEditorExtension.highlightLines(instance);
if (!shouldReveal) {
expect(revealSpy).not.toHaveBeenCalled();
expect(decorationsSpy).not.toHaveBeenCalled();
@@ -194,7 +194,7 @@ describe('The basis for an Editor Lite extension', () => {
decorationsSpy.mockReturnValue('foo');
window.location.hash = '#L10';
expect(instance.lineDecorations).toBeUndefined();
- EditorLiteExtension.highlightLines(instance);
+ SourceEditorExtension.highlightLines(instance);
expect(instance.lineDecorations).toBe('foo');
});
});
@@ -208,7 +208,7 @@ describe('The basis for an Editor Lite extension', () => {
};
beforeEach(() => {
- EditorLiteExtension.onMouseMoveHandler(event); // generate the anchor
+ SourceEditorExtension.onMouseMoveHandler(event); // generate the anchor
});
it.each`
@@ -216,7 +216,7 @@ describe('The basis for an Editor Lite extension', () => {
${'onMouseMove'} | ${instance.onMouseMove}
${'onMouseDown'} | ${instance.onMouseDown}
`('sets up the $desc listener', ({ spy } = {}) => {
- EditorLiteExtension.setupLineLinking(instance);
+ SourceEditorExtension.setupLineLinking(instance);
expect(spy).toHaveBeenCalled();
});
@@ -230,7 +230,7 @@ describe('The basis for an Editor Lite extension', () => {
fn(event);
});
- EditorLiteExtension.setupLineLinking(instance);
+ SourceEditorExtension.setupLineLinking(instance);
if (shouldRemove) {
expect(instance.deltaDecorations).toHaveBeenCalledWith(instance.lineDecorations, []);
} else {
@@ -241,7 +241,7 @@ describe('The basis for an Editor Lite extension', () => {
describe('onMouseMoveHandler', () => {
it('stops propagation for contextmenu event on the generated anchor', () => {
- EditorLiteExtension.onMouseMoveHandler(event);
+ SourceEditorExtension.onMouseMoveHandler(event);
const anchor = findLine(defaultLine).querySelector('a');
const contextMenuEvent = new Event('contextmenu');
@@ -253,27 +253,27 @@ describe('The basis for an Editor Lite extension', () => {
it('creates an anchor if it does not exist yet', () => {
expect(findLine(defaultLine).querySelector('a')).toBe(null);
- EditorLiteExtension.onMouseMoveHandler(event);
+ SourceEditorExtension.onMouseMoveHandler(event);
expect(findLine(defaultLine).querySelector('a')).not.toBe(null);
});
it('does not create a new anchor if it exists', () => {
- EditorLiteExtension.onMouseMoveHandler(event);
+ SourceEditorExtension.onMouseMoveHandler(event);
expect(findLine(defaultLine).querySelector('a')).not.toBe(null);
- EditorLiteExtension.createAnchor = jest.fn();
- EditorLiteExtension.onMouseMoveHandler(event);
- expect(EditorLiteExtension.createAnchor).not.toHaveBeenCalled();
+ SourceEditorExtension.createAnchor = jest.fn();
+ SourceEditorExtension.onMouseMoveHandler(event);
+ expect(SourceEditorExtension.createAnchor).not.toHaveBeenCalled();
expect(findLine(defaultLine).querySelectorAll('a')).toHaveLength(1);
});
it('does not create a link if the event is triggered on a wrong node', () => {
setFixtures('<div class="wrong-class">3</div>');
- EditorLiteExtension.createAnchor = jest.fn();
+ SourceEditorExtension.createAnchor = jest.fn();
const wrongEvent = generateEventMock({ el: document.querySelector('.wrong-class') });
- EditorLiteExtension.onMouseMoveHandler(wrongEvent);
- expect(EditorLiteExtension.createAnchor).not.toHaveBeenCalled();
+ SourceEditorExtension.onMouseMoveHandler(wrongEvent);
+ expect(SourceEditorExtension.createAnchor).not.toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/editor/editor_markdown_ext_spec.js b/spec/frontend/editor/source_editor_markdown_ext_spec.js
index 3f64dcfd7a0..943e21250b4 100644
--- a/spec/frontend/editor/editor_markdown_ext_spec.js
+++ b/spec/frontend/editor/source_editor_markdown_ext_spec.js
@@ -1,8 +1,8 @@
import { Range, Position } from 'monaco-editor';
-import EditorLite from '~/editor/editor_lite';
-import { EditorMarkdownExtension } from '~/editor/extensions/editor_markdown_ext';
+import { EditorMarkdownExtension } from '~/editor/extensions/source_editor_markdown_ext';
+import SourceEditor from '~/editor/source_editor';
-describe('Markdown Extension for Editor Lite', () => {
+describe('Markdown Extension for Source Editor', () => {
let editor;
let instance;
let editorEl;
@@ -25,7 +25,7 @@ describe('Markdown Extension for Editor Lite', () => {
beforeEach(() => {
setFixtures('<div id="editor" data-editor-loading></div>');
editorEl = document.getElementById('editor');
- editor = new EditorLite();
+ editor = new SourceEditor();
instance = editor.createInstance({
el: editorEl,
blobPath: filePath,
diff --git a/spec/frontend/editor/editor_lite_spec.js b/spec/frontend/editor/source_editor_spec.js
index 815457e012f..d87d373c952 100644
--- a/spec/frontend/editor/editor_lite_spec.js
+++ b/spec/frontend/editor/source_editor_spec.js
@@ -2,12 +2,12 @@
import { editor as monacoEditor, languages as monacoLanguages } from 'monaco-editor';
import waitForPromises from 'helpers/wait_for_promises';
import {
- EDITOR_LITE_INSTANCE_ERROR_NO_EL,
+ SOURCE_EDITOR_INSTANCE_ERROR_NO_EL,
URI_PREFIX,
EDITOR_READY_EVENT,
} from '~/editor/constants';
-import EditorLite from '~/editor/editor_lite';
-import { EditorLiteExtension } from '~/editor/extensions/editor_lite_extension_base';
+import { SourceEditorExtension } from '~/editor/extensions/source_editor_extension_base';
+import SourceEditor from '~/editor/source_editor';
import { DEFAULT_THEME, themes } from '~/ide/lib/themes';
import { joinPaths } from '~/lib/utils/url_utility';
@@ -25,7 +25,7 @@ describe('Base editor', () => {
setFixtures('<div id="editor" data-editor-loading></div>');
editorEl = document.getElementById('editor');
defaultArguments = { el: editorEl, blobPath, blobContent, blobGlobalId };
- editor = new EditorLite();
+ editor = new SourceEditor();
});
afterEach(() => {
@@ -49,7 +49,7 @@ describe('Base editor', () => {
expect(editorEl.dataset.editorLoading).toBeUndefined();
});
- describe('instance of the Editor Lite', () => {
+ describe('instance of the Source Editor', () => {
let modelSpy;
let instanceSpy;
const setModel = jest.fn();
@@ -58,7 +58,7 @@ describe('Base editor', () => {
modelSpy = jest.spyOn(monacoEditor, 'createModel').mockImplementation(() => res);
};
const mockDecorateInstance = (decorations = {}) => {
- jest.spyOn(EditorLite, 'convertMonacoToELInstance').mockImplementation((inst) => {
+ jest.spyOn(SourceEditor, 'convertMonacoToELInstance').mockImplementation((inst) => {
return Object.assign(inst, decorations);
});
};
@@ -76,11 +76,11 @@ describe('Base editor', () => {
mockDecorateInstance();
expect(() => {
editor.createInstance();
- }).toThrow(EDITOR_LITE_INSTANCE_ERROR_NO_EL);
+ }).toThrow(SOURCE_EDITOR_INSTANCE_ERROR_NO_EL);
expect(modelSpy).not.toHaveBeenCalled();
expect(instanceSpy).not.toHaveBeenCalled();
- expect(EditorLite.convertMonacoToELInstance).not.toHaveBeenCalled();
+ expect(SourceEditor.convertMonacoToELInstance).not.toHaveBeenCalled();
});
it('creates model to be supplied to Monaco editor', () => {
@@ -246,7 +246,7 @@ describe('Base editor', () => {
let editorEl2;
let inst1;
let inst2;
- const readOnlyIndex = '68'; // readOnly option has the internal index of 68 in the editor's options
+ const readOnlyIndex = '78'; // readOnly option has the internal index of 78 in the editor's options
beforeEach(() => {
setFixtures('<div id="editor1"></div><div id="editor2"></div>');
@@ -261,7 +261,7 @@ describe('Base editor', () => {
blobPath,
};
- editor = new EditorLite();
+ editor = new SourceEditor();
instanceSpy = jest.spyOn(monacoEditor, 'create');
});
@@ -304,7 +304,7 @@ describe('Base editor', () => {
});
it('shares global editor options among all instances', () => {
- editor = new EditorLite({
+ editor = new SourceEditor({
readOnly: true,
});
@@ -316,7 +316,7 @@ describe('Base editor', () => {
});
it('allows overriding editor options on the instance level', () => {
- editor = new EditorLite({
+ editor = new SourceEditor({
readOnly: true,
});
inst1 = editor.createInstance({
@@ -410,7 +410,7 @@ describe('Base editor', () => {
return WithStaticMethod.computeBoo(this.base);
}
}
- class WithStaticMethodExtended extends EditorLiteExtension {
+ class WithStaticMethodExtended extends SourceEditorExtension {
static computeBoo(a) {
return a + 1;
}
@@ -546,7 +546,7 @@ describe('Base editor', () => {
beforeEach(() => {
editorExtensionSpy = jest
- .spyOn(EditorLite, 'pushToImportsArray')
+ .spyOn(SourceEditor, 'pushToImportsArray')
.mockImplementation((arr) => {
arr.push(
Promise.resolve({
@@ -593,7 +593,7 @@ describe('Base editor', () => {
const useSpy = jest.fn().mockImplementation(() => {
calls.push('use');
});
- jest.spyOn(EditorLite, 'convertMonacoToELInstance').mockImplementation((inst) => {
+ jest.spyOn(SourceEditor, 'convertMonacoToELInstance').mockImplementation((inst) => {
const decoratedInstance = inst;
decoratedInstance.use = useSpy;
return decoratedInstance;
@@ -664,7 +664,7 @@ describe('Base editor', () => {
it('sets default syntax highlighting theme', () => {
const expectedTheme = themes.find((t) => t.name === DEFAULT_THEME);
- editor = new EditorLite();
+ editor = new SourceEditor();
expect(themeDefineSpy).toHaveBeenCalledWith(DEFAULT_THEME, expectedTheme.data);
expect(themeSetSpy).toHaveBeenCalledWith(DEFAULT_THEME);
@@ -676,7 +676,7 @@ describe('Base editor', () => {
expect(expectedTheme.name).not.toBe(DEFAULT_THEME);
window.gon.user_color_scheme = expectedTheme.name;
- editor = new EditorLite();
+ editor = new SourceEditor();
expect(themeDefineSpy).toHaveBeenCalledWith(expectedTheme.name, expectedTheme.data);
expect(themeSetSpy).toHaveBeenCalledWith(expectedTheme.name);
@@ -687,7 +687,7 @@ describe('Base editor', () => {
const nonExistentTheme = { name };
window.gon.user_color_scheme = nonExistentTheme.name;
- editor = new EditorLite();
+ editor = new SourceEditor();
expect(themeDefineSpy).not.toHaveBeenCalled();
expect(themeSetSpy).toHaveBeenCalledWith(DEFAULT_THEME);
diff --git a/spec/frontend/emoji/awards_app/store/actions_spec.js b/spec/frontend/emoji/awards_app/store/actions_spec.js
index e96920d1112..02b643244d2 100644
--- a/spec/frontend/emoji/awards_app/store/actions_spec.js
+++ b/spec/frontend/emoji/awards_app/store/actions_spec.js
@@ -5,6 +5,7 @@ import * as actions from '~/emoji/awards_app/store/actions';
import axios from '~/lib/utils/axios_utils';
jest.mock('@sentry/browser');
+jest.mock('~/vue_shared/plugins/global_toast');
describe('Awards app actions', () => {
afterEach(() => {
diff --git a/spec/frontend/environment.js b/spec/frontend/environment.js
index d1bc11538a3..29aa416149c 100644
--- a/spec/frontend/environment.js
+++ b/spec/frontend/environment.js
@@ -43,6 +43,9 @@ class CustomEnvironment extends JSDOMEnvironment {
};
this.global.IS_EE = IS_EE;
+ // Set up global `gl` object
+ this.global.gl = {};
+
this.rejectedPromises = [];
this.global.promiseRejectionHandler = (error) => {
@@ -67,6 +70,24 @@ class CustomEnvironment extends JSDOMEnvironment {
getEntriesByName: () => [],
});
+ //
+ // Monaco-related environment variables
+ //
+ this.global.MonacoEnvironment = { globalAPI: true };
+ Object.defineProperty(this.global, 'matchMedia', {
+ writable: true,
+ value: (query) => ({
+ matches: false,
+ media: query,
+ onchange: null,
+ addListener: () => null, // deprecated
+ removeListener: () => null, // deprecated
+ addEventListener: () => null,
+ removeEventListener: () => null,
+ dispatchEvent: () => null,
+ }),
+ });
+
this.global.PerformanceObserver = class {
/* eslint-disable no-useless-constructor, no-unused-vars, no-empty-function, class-methods-use-this */
constructor(callback) {}
diff --git a/spec/frontend/environments/environment_item_spec.js b/spec/frontend/environments/environment_item_spec.js
index 09ab1223fd1..62806c9e44c 100644
--- a/spec/frontend/environments/environment_item_spec.js
+++ b/spec/frontend/environments/environment_item_spec.js
@@ -285,6 +285,17 @@ describe('Environment item', () => {
it('should not render the "Upcoming deployment" column', () => {
expect(findUpcomingDeployment().exists()).toBe(false);
});
+
+ it('should set the name cell to be full width', () => {
+ expect(wrapper.find('[data-testid="environment-name-cell"]').classes('section-100')).toBe(
+ true,
+ );
+ });
+
+ it('should hide non-folder properties', () => {
+ expect(wrapper.find('[data-testid="environment-deployment-id-cell"]').exists()).toBe(false);
+ expect(wrapper.find('[data-testid="environment-build-cell"]').exists()).toBe(false);
+ });
});
describe('When environment can be deleted', () => {
diff --git a/spec/frontend/environments/environments_app_spec.js b/spec/frontend/environments/environments_app_spec.js
index 542cf58b079..1abdeff614c 100644
--- a/spec/frontend/environments/environments_app_spec.js
+++ b/spec/frontend/environments/environments_app_spec.js
@@ -1,3 +1,4 @@
+import { GlTabs } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
@@ -7,6 +8,7 @@ import EmptyState from '~/environments/components/empty_state.vue';
import EnableReviewAppModal from '~/environments/components/enable_review_app_modal.vue';
import EnvironmentsApp from '~/environments/components/environments_app.vue';
import axios from '~/lib/utils/axios_utils';
+import * as urlUtils from '~/lib/utils/url_utility';
import { environment, folder } from './mock_data';
describe('Environment', () => {
@@ -264,4 +266,18 @@ describe('Environment', () => {
});
});
});
+
+ describe('tabs', () => {
+ beforeEach(() => {
+ mockRequest(200, { environments: [] });
+ jest
+ .spyOn(urlUtils, 'getParameterByName')
+ .mockImplementation((param) => (param === 'scope' ? 'stopped' : null));
+ return createWrapper(true);
+ });
+
+ it('selects the tab for the parameter', () => {
+ expect(wrapper.findComponent(GlTabs).attributes('value')).toBe('1');
+ });
+ });
});
diff --git a/spec/frontend/feature_flags/components/edit_feature_flag_spec.js b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
index 0948b08f942..799b567a2c0 100644
--- a/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
+++ b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
@@ -1,21 +1,16 @@
import { GlToggle, GlAlert } from '@gitlab/ui';
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
+import Vue from 'vue';
import Vuex from 'vuex';
import { mockTracking } from 'helpers/tracking_helper';
import { TEST_HOST } from 'spec/test_constants';
import EditFeatureFlag from '~/feature_flags/components/edit_feature_flag.vue';
import Form from '~/feature_flags/components/form.vue';
-import { LEGACY_FLAG, NEW_VERSION_FLAG } from '~/feature_flags/constants';
import createStore from '~/feature_flags/store/edit';
import axios from '~/lib/utils/axios_utils';
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-const userCalloutId = 'feature_flags_new_version';
-const userCalloutsPath = `${TEST_HOST}/user_callouts`;
-
+Vue.use(Vuex);
describe('Edit feature flag form', () => {
let wrapper;
let mock;
@@ -25,20 +20,14 @@ describe('Edit feature flag form', () => {
endpoint: `${TEST_HOST}/feature_flags.json`,
});
- const factory = (opts = {}) => {
+ const factory = (provide = {}) => {
if (wrapper) {
wrapper.destroy();
wrapper = null;
}
wrapper = shallowMount(EditFeatureFlag, {
- localVue,
store,
- provide: {
- showUserCallout: true,
- userCalloutId,
- userCalloutsPath,
- ...opts,
- },
+ provide,
});
};
@@ -52,18 +41,8 @@ describe('Edit feature flag form', () => {
updated_at: '2019-01-17T17:27:39.778Z',
name: 'feature_flag',
description: '',
- version: LEGACY_FLAG,
edit_path: '/h5bp/html5-boilerplate/-/feature_flags/21/edit',
destroy_path: '/h5bp/html5-boilerplate/-/feature_flags/21',
- scopes: [
- {
- id: 21,
- active: false,
- environment_scope: '*',
- created_at: '2019-01-17T17:27:39.778Z',
- updated_at: '2019-01-17T17:27:39.778Z',
- },
- ],
});
factory();
setImmediate(() => done());
@@ -74,9 +53,7 @@ describe('Edit feature flag form', () => {
mock.restore();
});
- const findAlert = () => wrapper.find(GlAlert);
- const findWarningGlAlert = () =>
- wrapper.findAll(GlAlert).filter((c) => c.props('variant') === 'warning');
+ const findWarningGlAlert = () => wrapper.findComponent(GlAlert);
it('should display the iid', () => {
expect(wrapper.find('h3').text()).toContain('^5');
@@ -86,21 +63,13 @@ describe('Edit feature flag form', () => {
expect(wrapper.find(GlToggle).exists()).toBe(true);
});
- it('should set the value of the toggle to whether or not the flag is active', () => {
- expect(wrapper.find(GlToggle).props('value')).toBe(true);
- });
-
- it('should alert users the flag is read-only', () => {
- expect(findAlert().text()).toContain('GitLab is moving to a new way of managing feature flags');
- });
-
describe('with error', () => {
it('should render the error', () => {
store.dispatch('receiveUpdateFeatureFlagError', { message: ['The name is required'] });
return wrapper.vm.$nextTick(() => {
const warningGlAlert = findWarningGlAlert();
- expect(warningGlAlert.at(1).exists()).toEqual(true);
- expect(warningGlAlert.at(1).text()).toContain('The name is required');
+ expect(warningGlAlert.exists()).toEqual(true);
+ expect(warningGlAlert.text()).toContain('The name is required');
});
});
});
@@ -114,32 +83,6 @@ describe('Edit feature flag form', () => {
expect(wrapper.find(Form).exists()).toEqual(true);
});
- it('should set the version of the form from the feature flag', () => {
- expect(wrapper.find(Form).props('version')).toBe(LEGACY_FLAG);
-
- mock.resetHandlers();
-
- mock.onGet(`${TEST_HOST}/feature_flags.json`).replyOnce(200, {
- id: 21,
- iid: 5,
- active: true,
- created_at: '2019-01-17T17:27:39.778Z',
- updated_at: '2019-01-17T17:27:39.778Z',
- name: 'feature_flag',
- description: '',
- version: NEW_VERSION_FLAG,
- edit_path: '/h5bp/html5-boilerplate/-/feature_flags/21/edit',
- destroy_path: '/h5bp/html5-boilerplate/-/feature_flags/21',
- strategies: [],
- });
-
- factory();
-
- return axios.waitForAll().then(() => {
- expect(wrapper.find(Form).props('version')).toBe(NEW_VERSION_FLAG);
- });
- });
-
it('should track when the toggle is clicked', () => {
const toggle = wrapper.find(GlToggle);
const spy = mockTracking('_category_', toggle.element, jest.spyOn);
diff --git a/spec/frontend/feature_flags/components/feature_flags_table_spec.js b/spec/frontend/feature_flags/components/feature_flags_table_spec.js
index 816bc9b9707..d06d60ae310 100644
--- a/spec/frontend/feature_flags/components/feature_flags_table_spec.js
+++ b/spec/frontend/feature_flags/components/feature_flags_table_spec.js
@@ -8,9 +8,6 @@ import {
ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
ROLLOUT_STRATEGY_USER_ID,
ROLLOUT_STRATEGY_GITLAB_USER_LIST,
- NEW_VERSION_FLAG,
- LEGACY_FLAG,
- DEFAULT_PERCENT_ROLLOUT,
} from '~/feature_flags/constants';
const getDefaultProps = () => ({
@@ -23,17 +20,28 @@ const getDefaultProps = () => ({
description: 'flag description',
destroy_path: 'destroy/path',
edit_path: 'edit/path',
- version: LEGACY_FLAG,
- scopes: [
+ scopes: [],
+ strategies: [
{
- id: 1,
- active: true,
- environmentScope: 'scope',
- canUpdate: true,
- protected: false,
- rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
- rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
- shouldBeDestroyed: false,
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ scopes: [{ environment_scope: '*' }],
+ },
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '50' },
+ scopes: [{ environment_scope: 'production' }, { environment_scope: 'staging' }],
+ },
+ {
+ name: ROLLOUT_STRATEGY_USER_ID,
+ parameters: { userIds: '1,2,3,4' },
+ scopes: [{ environment_scope: 'review/*' }],
+ },
+ {
+ name: ROLLOUT_STRATEGY_GITLAB_USER_LIST,
+ parameters: {},
+ user_list: { name: 'test list' },
+ scopes: [{ environment_scope: '*' }],
},
],
},
@@ -43,6 +51,7 @@ const getDefaultProps = () => ({
describe('Feature flag table', () => {
let wrapper;
let props;
+ let badges;
const createWrapper = (propsData, opts = {}) => {
wrapper = shallowMount(FeatureFlagsTable, {
@@ -56,6 +65,15 @@ describe('Feature flag table', () => {
beforeEach(() => {
props = getDefaultProps();
+ createWrapper(props, {
+ provide: { csrfToken: 'fakeToken' },
+ });
+
+ badges = wrapper.findAll('[data-testid="strategy-badge"]');
+ });
+
+ beforeEach(() => {
+ props = getDefaultProps();
});
afterEach(() => {
@@ -97,17 +115,10 @@ describe('Feature flag table', () => {
);
});
- it('should render an environments specs column', () => {
- const envColumn = wrapper.find('.js-feature-flag-environments');
-
- expect(envColumn).toBeDefined();
- expect(trimText(envColumn.text())).toBe('scope');
- });
-
it('should render an environments specs badge with active class', () => {
const envColumn = wrapper.find('.js-feature-flag-environments');
- expect(trimText(envColumn.find(GlBadge).text())).toBe('scope');
+ expect(trimText(envColumn.find(GlBadge).text())).toBe('All Users: All Environments');
});
it('should render an actions column', () => {
@@ -120,11 +131,13 @@ describe('Feature flag table', () => {
describe('when active and with an update toggle', () => {
let toggle;
+ let spy;
beforeEach(() => {
props.featureFlags[0].update_path = props.featureFlags[0].destroy_path;
createWrapper(props);
toggle = wrapper.find(GlToggle);
+ spy = mockTracking('_category_', toggle.element, jest.spyOn);
});
it('should have a toggle', () => {
@@ -143,123 +156,40 @@ describe('Feature flag table', () => {
expect(wrapper.emitted('toggle-flag')).toEqual([[flag]]);
});
});
- });
-
- describe('with an active scope and a percentage rollout strategy', () => {
- beforeEach(() => {
- props.featureFlags[0].scopes[0].rolloutStrategy = ROLLOUT_STRATEGY_PERCENT_ROLLOUT;
- props.featureFlags[0].scopes[0].rolloutPercentage = '54';
- createWrapper(props);
- });
- it('should render an environments specs badge with percentage', () => {
- const envColumn = wrapper.find('.js-feature-flag-environments');
+ it('tracks a click', () => {
+ toggle.trigger('click');
- expect(trimText(envColumn.find(GlBadge).text())).toBe('scope: 54%');
+ expect(spy).toHaveBeenCalledWith('_category_', 'click_button', {
+ label: 'feature_flag_toggle',
+ });
});
});
- describe('with an inactive scope', () => {
- beforeEach(() => {
- props.featureFlags[0].scopes[0].active = false;
- createWrapper(props);
- });
-
- it('should render an environments specs badge with inactive class', () => {
- const envColumn = wrapper.find('.js-feature-flag-environments');
-
- expect(trimText(envColumn.find(GlBadge).text())).toBe('scope');
- });
+ it('shows All Environments if the environment scope is *', () => {
+ expect(badges.at(0).text()).toContain('All Environments');
});
- describe('with a new version flag', () => {
- let toggle;
- let spy;
- let badges;
-
- beforeEach(() => {
- const newVersionProps = {
- ...props,
- featureFlags: [
- {
- id: 1,
- iid: 1,
- active: true,
- name: 'flag name',
- description: 'flag description',
- destroy_path: 'destroy/path',
- edit_path: 'edit/path',
- update_path: 'update/path',
- version: NEW_VERSION_FLAG,
- scopes: [],
- strategies: [
- {
- name: ROLLOUT_STRATEGY_ALL_USERS,
- parameters: {},
- scopes: [{ environment_scope: '*' }],
- },
- {
- name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
- parameters: { percentage: '50' },
- scopes: [{ environment_scope: 'production' }, { environment_scope: 'staging' }],
- },
- {
- name: ROLLOUT_STRATEGY_USER_ID,
- parameters: { userIds: '1,2,3,4' },
- scopes: [{ environment_scope: 'review/*' }],
- },
- {
- name: ROLLOUT_STRATEGY_GITLAB_USER_LIST,
- parameters: {},
- user_list: { name: 'test list' },
- scopes: [{ environment_scope: '*' }],
- },
- ],
- },
- ],
- };
- createWrapper(newVersionProps, {
- provide: { csrfToken: 'fakeToken', glFeatures: { featureFlagsNewVersion: true } },
- });
-
- toggle = wrapper.find(GlToggle);
- spy = mockTracking('_category_', toggle.element, jest.spyOn);
- badges = wrapper.findAll('[data-testid="strategy-badge"]');
- });
-
- it('shows All Environments if the environment scope is *', () => {
- expect(badges.at(0).text()).toContain('All Environments');
- });
-
- it('shows the environment scope if another is set', () => {
- expect(badges.at(1).text()).toContain('production');
- expect(badges.at(1).text()).toContain('staging');
- expect(badges.at(2).text()).toContain('review/*');
- });
-
- it('shows All Users for the default strategy', () => {
- expect(badges.at(0).text()).toContain('All Users');
- });
-
- it('shows the percent for a percent rollout', () => {
- expect(badges.at(1).text()).toContain('Percent of users - 50%');
- });
+ it('shows the environment scope if another is set', () => {
+ expect(badges.at(1).text()).toContain('production');
+ expect(badges.at(1).text()).toContain('staging');
+ expect(badges.at(2).text()).toContain('review/*');
+ });
- it('shows the number of users for users with ID', () => {
- expect(badges.at(2).text()).toContain('User IDs - 4 users');
- });
+ it('shows All Users for the default strategy', () => {
+ expect(badges.at(0).text()).toContain('All Users');
+ });
- it('shows the name of a user list for user list', () => {
- expect(badges.at(3).text()).toContain('User List - test list');
- });
+ it('shows the percent for a percent rollout', () => {
+ expect(badges.at(1).text()).toContain('Percent of users - 50%');
+ });
- it('tracks a click', () => {
- toggle.trigger('click');
+ it('shows the number of users for users with ID', () => {
+ expect(badges.at(2).text()).toContain('User IDs - 4 users');
+ });
- expect(spy).toHaveBeenCalledWith('_category_', 'click_button', {
- label: 'feature_flag_toggle',
- });
- });
+ it('shows the name of a user list for user list', () => {
+ expect(badges.at(3).text()).toContain('User List - test list');
});
it('renders a feature flag without an iid', () => {
diff --git a/spec/frontend/feature_flags/components/form_spec.js b/spec/frontend/feature_flags/components/form_spec.js
index 6c3fce68618..c0f9638390a 100644
--- a/spec/frontend/feature_flags/components/form_spec.js
+++ b/spec/frontend/feature_flags/components/form_spec.js
@@ -1,18 +1,12 @@
-import { GlFormTextarea, GlFormCheckbox, GlButton, GlToggle } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import { uniqueId } from 'lodash';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import Api from '~/api';
-import EnvironmentsDropdown from '~/feature_flags/components/environments_dropdown.vue';
import Form from '~/feature_flags/components/form.vue';
import Strategy from '~/feature_flags/components/strategy.vue';
import {
ROLLOUT_STRATEGY_ALL_USERS,
ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
- INTERNAL_ID_PREFIX,
- DEFAULT_PERCENT_ROLLOUT,
- LEGACY_FLAG,
- NEW_VERSION_FLAG,
} from '~/feature_flags/constants';
import RelatedIssuesRoot from '~/related_issues/components/related_issues_root.vue';
import { featureFlag, userList, allUsersStrategy } from '../mock_data';
@@ -29,15 +23,8 @@ describe('feature flag form', () => {
const requiredInjections = {
environmentsEndpoint: '/environments.json',
projectId: '1',
- glFeatures: {
- featureFlagPermissions: true,
- featureFlagsNewVersion: true,
- },
};
- const findAddNewScopeRow = () => wrapper.findByTestId('add-new-scope');
- const findGlToggle = () => wrapper.find(GlToggle);
-
const factory = (props = {}, provide = {}) => {
wrapper = extendedWrapper(
shallowMount(Form, {
@@ -100,328 +87,6 @@ describe('feature flag form', () => {
it('should render description textarea', () => {
expect(wrapper.find('#feature-flag-description').exists()).toBe(true);
});
-
- describe('scopes', () => {
- it('should render scopes table', () => {
- expect(wrapper.find('.js-scopes-table').exists()).toBe(true);
- });
-
- it('should render scopes table with a new row ', () => {
- expect(findAddNewScopeRow().exists()).toBe(true);
- });
-
- describe('status toggle', () => {
- describe('without filled text input', () => {
- it('should add a new scope with the text value empty and the status', () => {
- findGlToggle().vm.$emit('change', true);
-
- expect(wrapper.vm.formScopes).toHaveLength(1);
- expect(wrapper.vm.formScopes[0].active).toEqual(true);
- expect(wrapper.vm.formScopes[0].environmentScope).toEqual('');
-
- expect(wrapper.vm.newScope).toEqual('');
- });
- });
-
- it('has label', () => {
- expect(findGlToggle().props('label')).toBe(Form.i18n.statusLabel);
- });
-
- it('should be disabled if the feature flag is not active', (done) => {
- wrapper.setProps({ active: false });
- wrapper.vm.$nextTick(() => {
- expect(findGlToggle().props('disabled')).toBe(true);
- done();
- });
- });
- });
- });
- });
-
- describe('with provided data', () => {
- beforeEach(() => {
- factory({
- ...requiredProps,
- name: featureFlag.name,
- description: featureFlag.description,
- active: true,
- version: LEGACY_FLAG,
- scopes: [
- {
- id: 1,
- active: true,
- environmentScope: 'scope',
- canUpdate: true,
- protected: false,
- rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
- rolloutPercentage: '54',
- rolloutUserIds: '123',
- shouldIncludeUserIds: true,
- },
- {
- id: 2,
- active: true,
- environmentScope: 'scope',
- canUpdate: false,
- protected: true,
- rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
- rolloutPercentage: '54',
- rolloutUserIds: '123',
- shouldIncludeUserIds: true,
- },
- ],
- });
- });
-
- describe('scopes', () => {
- it('should be possible to remove a scope', () => {
- expect(wrapper.findByTestId('feature-flag-delete').exists()).toEqual(true);
- });
-
- it('renders empty row to add a new scope', () => {
- expect(findAddNewScopeRow().exists()).toEqual(true);
- });
-
- it('renders the user id checkbox', () => {
- expect(wrapper.find(GlFormCheckbox).exists()).toBe(true);
- });
-
- it('renders the user id text area', () => {
- expect(wrapper.find(GlFormTextarea).exists()).toBe(true);
-
- expect(wrapper.find(GlFormTextarea).vm.value).toBe('123');
- });
-
- describe('update scope', () => {
- describe('on click on toggle', () => {
- it('should update the scope', () => {
- findGlToggle().vm.$emit('change', false);
-
- expect(wrapper.vm.formScopes[0].active).toBe(false);
- });
-
- it('should be disabled if the feature flag is not active', (done) => {
- wrapper.setProps({ active: false });
-
- wrapper.vm.$nextTick(() => {
- expect(findGlToggle().props('disabled')).toBe(true);
- done();
- });
- });
- });
- describe('on strategy change', () => {
- it('should not include user IDs if All Users is selected', () => {
- const scope = wrapper.find({ ref: 'scopeRow' });
- scope.find('select').setValue(ROLLOUT_STRATEGY_ALL_USERS);
- return wrapper.vm.$nextTick().then(() => {
- expect(scope.find('#rollout-user-id-0').exists()).toBe(false);
- });
- });
- });
- });
-
- describe('deleting an existing scope', () => {
- beforeEach(() => {
- wrapper.find('.js-delete-scope').vm.$emit('click');
- });
-
- it('should add `shouldBeDestroyed` key the clicked scope', () => {
- expect(wrapper.vm.formScopes[0].shouldBeDestroyed).toBe(true);
- });
-
- it('should not render deleted scopes', () => {
- expect(wrapper.vm.filteredScopes).toEqual([expect.objectContaining({ id: 2 })]);
- });
- });
-
- describe('deleting a new scope', () => {
- it('should remove the scope from formScopes', () => {
- factory({
- ...requiredProps,
- name: 'feature_flag_1',
- description: 'this is a feature flag',
- scopes: [
- {
- environmentScope: 'new_scope',
- active: false,
- id: uniqueId(INTERNAL_ID_PREFIX),
- canUpdate: true,
- protected: false,
- strategies: [
- {
- name: ROLLOUT_STRATEGY_ALL_USERS,
- parameters: {},
- },
- ],
- },
- ],
- });
-
- wrapper.find('.js-delete-scope').vm.$emit('click');
-
- expect(wrapper.vm.formScopes).toEqual([]);
- });
- });
-
- describe('with * scope', () => {
- beforeEach(() => {
- factory({
- ...requiredProps,
- name: 'feature_flag_1',
- description: 'this is a feature flag',
- scopes: [
- {
- environmentScope: '*',
- active: false,
- canUpdate: false,
- rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
- rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
- },
- ],
- });
- });
-
- it('renders read-only name', () => {
- expect(wrapper.find('.js-scope-all').exists()).toEqual(true);
- });
- });
-
- describe('without permission to update', () => {
- it('should have the flag name input disabled', () => {
- const input = wrapper.find('#feature-flag-name');
-
- expect(input.element.disabled).toBe(true);
- });
-
- it('should have the flag discription text area disabled', () => {
- const textarea = wrapper.find('#feature-flag-description');
-
- expect(textarea.element.disabled).toBe(true);
- });
-
- it('should have the scope that cannot be updated be disabled', () => {
- const row = wrapper.findAll('.gl-responsive-table-row').at(2);
-
- expect(row.find(EnvironmentsDropdown).vm.disabled).toBe(true);
- expect(row.find(GlToggle).props('disabled')).toBe(true);
- expect(row.find('.js-delete-scope').exists()).toBe(false);
- });
- });
- });
-
- describe('on submit', () => {
- const selectFirstRolloutStrategyOption = (dropdownIndex) => {
- wrapper
- .findAll('select.js-rollout-strategy')
- .at(dropdownIndex)
- .findAll('option')
- .at(1)
- .setSelected();
- };
-
- beforeEach(() => {
- factory({
- ...requiredProps,
- name: 'feature_flag_1',
- active: true,
- description: 'this is a feature flag',
- scopes: [
- {
- id: 1,
- environmentScope: 'production',
- canUpdate: true,
- protected: true,
- active: false,
- rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
- rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
- rolloutUserIds: '',
- },
- ],
- });
-
- return wrapper.vm.$nextTick();
- });
-
- it('should emit handleSubmit with the updated data', () => {
- wrapper.find('#feature-flag-name').setValue('feature_flag_2');
-
- return wrapper.vm
- .$nextTick()
- .then(() => {
- wrapper
- .find('.js-new-scope-name')
- .find(EnvironmentsDropdown)
- .vm.$emit('selectEnvironment', 'review');
-
- return wrapper.vm.$nextTick();
- })
- .then(() => {
- findAddNewScopeRow().find(GlToggle).vm.$emit('change', true);
- })
- .then(() => {
- findGlToggle().vm.$emit('change', true);
- return wrapper.vm.$nextTick();
- })
-
- .then(() => {
- selectFirstRolloutStrategyOption(0);
- return wrapper.vm.$nextTick();
- })
- .then(() => {
- selectFirstRolloutStrategyOption(2);
- return wrapper.vm.$nextTick();
- })
- .then(() => {
- wrapper.find('.js-rollout-percentage').setValue('55');
-
- return wrapper.vm.$nextTick();
- })
- .then(() => {
- wrapper.find({ ref: 'submitButton' }).vm.$emit('click');
-
- const data = wrapper.emitted().handleSubmit[0][0];
-
- expect(data.name).toEqual('feature_flag_2');
- expect(data.description).toEqual('this is a feature flag');
- expect(data.active).toBe(true);
-
- expect(data.scopes).toEqual([
- {
- id: 1,
- active: true,
- environmentScope: 'production',
- canUpdate: true,
- protected: true,
- rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
- rolloutPercentage: '55',
- rolloutUserIds: '',
- shouldIncludeUserIds: false,
- },
- {
- id: expect.any(String),
- active: false,
- environmentScope: 'review',
- canUpdate: true,
- protected: false,
- rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
- rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
- rolloutUserIds: '',
- },
- {
- id: expect.any(String),
- active: true,
- environmentScope: '',
- canUpdate: true,
- protected: false,
- rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
- rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
- rolloutUserIds: '',
- shouldIncludeUserIds: false,
- },
- ]);
- });
- });
- });
});
describe('with strategies', () => {
@@ -432,7 +97,6 @@ describe('feature flag form', () => {
name: featureFlag.name,
description: featureFlag.description,
active: true,
- version: NEW_VERSION_FLAG,
strategies: [
{
type: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
diff --git a/spec/frontend/feature_flags/components/new_feature_flag_spec.js b/spec/frontend/feature_flags/components/new_feature_flag_spec.js
index e209c14d8c7..fe98b6421d4 100644
--- a/spec/frontend/feature_flags/components/new_feature_flag_spec.js
+++ b/spec/frontend/feature_flags/components/new_feature_flag_spec.js
@@ -4,7 +4,6 @@ import Vuex from 'vuex';
import { TEST_HOST } from 'spec/test_constants';
import Form from '~/feature_flags/components/form.vue';
import NewFeatureFlag from '~/feature_flags/components/new_feature_flag.vue';
-import { ROLLOUT_STRATEGY_ALL_USERS, DEFAULT_PERCENT_ROLLOUT } from '~/feature_flags/constants';
import createStore from '~/feature_flags/store/new';
import { allUsersStrategy } from '../mock_data';
@@ -71,20 +70,6 @@ describe('New feature flag form', () => {
expect(wrapper.find(Form).exists()).toEqual(true);
});
- it('should render default * row', () => {
- const defaultScope = {
- id: expect.any(String),
- environmentScope: '*',
- active: true,
- rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
- rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
- rolloutUserIds: '',
- };
- expect(wrapper.vm.scopes).toEqual([defaultScope]);
-
- expect(wrapper.find(Form).props('scopes')).toContainEqual(defaultScope);
- });
-
it('has an all users strategy by default', () => {
const strategies = wrapper.find(Form).props('strategies');
diff --git a/spec/frontend/feature_flags/mock_data.js b/spec/frontend/feature_flags/mock_data.js
index 11a91e5b2a8..b5f09ac1957 100644
--- a/spec/frontend/feature_flags/mock_data.js
+++ b/spec/frontend/feature_flags/mock_data.js
@@ -16,86 +16,24 @@ export const featureFlag = {
destroy_path: 'feature_flags/1',
update_path: 'feature_flags/1',
edit_path: 'feature_flags/1/edit',
- scopes: [
+ strategies: [
{
- id: 1,
- active: true,
- environment_scope: '*',
- can_update: true,
- protected: false,
- created_at: '2019-01-14T06:41:40.987Z',
- updated_at: '2019-01-14T06:41:40.987Z',
- strategies: [
- {
- name: ROLLOUT_STRATEGY_ALL_USERS,
- parameters: {},
- },
- ],
+ id: 9,
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ scopes: [{ id: 17, environment_scope: '*' }],
},
{
- id: 2,
- active: false,
- environment_scope: 'production',
- can_update: true,
- protected: false,
- created_at: '2019-01-14T06:41:40.987Z',
- updated_at: '2019-01-14T06:41:40.987Z',
- strategies: [
- {
- name: ROLLOUT_STRATEGY_ALL_USERS,
- parameters: {},
- },
- ],
+ id: 8,
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: {},
+ scopes: [{ id: 18, environment_scope: 'review/*' }],
},
{
- id: 3,
- active: false,
- environment_scope: 'review/*',
- can_update: true,
- protected: false,
- created_at: '2019-01-14T06:41:40.987Z',
- updated_at: '2019-01-14T06:41:40.987Z',
- strategies: [
- {
- name: ROLLOUT_STRATEGY_ALL_USERS,
- parameters: {},
- },
- ],
- },
- {
- id: 4,
- active: true,
- environment_scope: 'development',
- can_update: true,
- protected: false,
- created_at: '2019-01-14T06:41:40.987Z',
- updated_at: '2019-01-14T06:41:40.987Z',
- strategies: [
- {
- name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
- parameters: {
- percentage: '86',
- },
- },
- ],
- },
- {
- id: 5,
- active: true,
- environment_scope: 'development',
- can_update: true,
- protected: false,
- created_at: '2019-01-14T06:41:40.987Z',
- updated_at: '2019-01-14T06:41:40.987Z',
- strategies: [
- {
- name: ROLLOUT_STRATEGY_FLEXIBLE_ROLLOUT,
- parameters: {
- rollout: '42',
- stickiness: 'DEFAULT',
- },
- },
- ],
+ id: 7,
+ name: ROLLOUT_STRATEGY_USER_ID,
+ parameters: { userIds: '1,2,3,4' },
+ scopes: [{ id: 19, environment_scope: 'production' }],
},
],
};
diff --git a/spec/frontend/feature_flags/store/edit/actions_spec.js b/spec/frontend/feature_flags/store/edit/actions_spec.js
index afcac53468c..12fccd79170 100644
--- a/spec/frontend/feature_flags/store/edit/actions_spec.js
+++ b/spec/frontend/feature_flags/store/edit/actions_spec.js
@@ -1,11 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import { TEST_HOST } from 'spec/test_constants';
-import {
- NEW_VERSION_FLAG,
- LEGACY_FLAG,
- ROLLOUT_STRATEGY_ALL_USERS,
-} from '~/feature_flags/constants';
+import { ROLLOUT_STRATEGY_ALL_USERS } from '~/feature_flags/constants';
import {
updateFeatureFlag,
requestUpdateFeatureFlag,
@@ -19,7 +15,7 @@ import {
} from '~/feature_flags/store/edit/actions';
import * as types from '~/feature_flags/store/edit/mutation_types';
import state from '~/feature_flags/store/edit/state';
-import { mapStrategiesToRails, mapFromScopesViewModel } from '~/feature_flags/store/helpers';
+import { mapStrategiesToRails } from '~/feature_flags/store/helpers';
import axios from '~/lib/utils/axios_utils';
jest.mock('~/lib/utils/url_utility');
@@ -46,46 +42,9 @@ describe('Feature flags Edit Module actions', () => {
describe('success', () => {
it('dispatches requestUpdateFeatureFlag and receiveUpdateFeatureFlagSuccess ', (done) => {
const featureFlag = {
- name: 'feature_flag',
- description: 'feature flag',
- scopes: [
- {
- id: '1',
- environmentScope: '*',
- active: true,
- shouldBeDestroyed: false,
- canUpdate: true,
- protected: false,
- rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
- },
- ],
- version: LEGACY_FLAG,
- active: true,
- };
- mock.onPut(mockedState.endpoint, mapFromScopesViewModel(featureFlag)).replyOnce(200);
-
- testAction(
- updateFeatureFlag,
- featureFlag,
- mockedState,
- [],
- [
- {
- type: 'requestUpdateFeatureFlag',
- },
- {
- type: 'receiveUpdateFeatureFlagSuccess',
- },
- ],
- done,
- );
- });
- it('handles new version flags as well', (done) => {
- const featureFlag = {
name: 'name',
description: 'description',
active: true,
- version: NEW_VERSION_FLAG,
strategies: [
{
name: ROLLOUT_STRATEGY_ALL_USERS,
diff --git a/spec/frontend/feature_flags/store/helpers_spec.js b/spec/frontend/feature_flags/store/helpers_spec.js
index 711e2a1286e..2a6211c8cc1 100644
--- a/spec/frontend/feature_flags/store/helpers_spec.js
+++ b/spec/frontend/feature_flags/store/helpers_spec.js
@@ -1,351 +1,7 @@
-import { uniqueId } from 'lodash';
-import {
- ROLLOUT_STRATEGY_ALL_USERS,
- ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
- ROLLOUT_STRATEGY_USER_ID,
- PERCENT_ROLLOUT_GROUP_ID,
- INTERNAL_ID_PREFIX,
- DEFAULT_PERCENT_ROLLOUT,
- LEGACY_FLAG,
- NEW_VERSION_FLAG,
-} from '~/feature_flags/constants';
-import {
- mapToScopesViewModel,
- mapFromScopesViewModel,
- createNewEnvironmentScope,
- mapStrategiesToViewModel,
- mapStrategiesToRails,
-} from '~/feature_flags/store/helpers';
+import { NEW_VERSION_FLAG } from '~/feature_flags/constants';
+import { mapStrategiesToViewModel, mapStrategiesToRails } from '~/feature_flags/store/helpers';
describe('feature flags helpers spec', () => {
- describe('mapToScopesViewModel', () => {
- it('converts the data object from the Rails API into something more usable by Vue', () => {
- const input = [
- {
- id: 3,
- environment_scope: 'environment_scope',
- active: true,
- can_update: true,
- protected: true,
- strategies: [
- {
- name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
- parameters: {
- percentage: '56',
- },
- },
- {
- name: ROLLOUT_STRATEGY_USER_ID,
- parameters: {
- userIds: '123,234',
- },
- },
- ],
-
- _destroy: true,
- },
- ];
-
- const expected = [
- expect.objectContaining({
- id: 3,
- environmentScope: 'environment_scope',
- active: true,
- canUpdate: true,
- protected: true,
- rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
- rolloutPercentage: '56',
- rolloutUserIds: '123, 234',
- shouldBeDestroyed: true,
- }),
- ];
-
- const actual = mapToScopesViewModel(input);
-
- expect(actual).toEqual(expected);
- });
-
- it('returns Boolean properties even when their Rails counterparts were not provided (are `undefined`)', () => {
- const input = [
- {
- id: 3,
- environment_scope: 'environment_scope',
- },
- ];
-
- const [result] = mapToScopesViewModel(input);
-
- expect(result).toEqual(
- expect.objectContaining({
- active: false,
- canUpdate: false,
- protected: false,
- shouldBeDestroyed: false,
- }),
- );
- });
-
- it('returns an empty array if null or undefined is provided as a parameter', () => {
- expect(mapToScopesViewModel(null)).toEqual([]);
- expect(mapToScopesViewModel(undefined)).toEqual([]);
- });
-
- describe('with user IDs per environment', () => {
- let oldGon;
-
- beforeEach(() => {
- oldGon = window.gon;
- window.gon = { features: { featureFlagsUsersPerEnvironment: true } };
- });
-
- afterEach(() => {
- window.gon = oldGon;
- });
-
- it('sets the user IDs as a comma separated string', () => {
- const input = [
- {
- id: 3,
- environment_scope: 'environment_scope',
- active: true,
- can_update: true,
- protected: true,
- strategies: [
- {
- name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
- parameters: {
- percentage: '56',
- },
- },
- {
- name: ROLLOUT_STRATEGY_USER_ID,
- parameters: {
- userIds: '123,234',
- },
- },
- ],
-
- _destroy: true,
- },
- ];
-
- const expected = [
- {
- id: 3,
- environmentScope: 'environment_scope',
- active: true,
- canUpdate: true,
- protected: true,
- rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
- rolloutPercentage: '56',
- rolloutUserIds: '123, 234',
- shouldBeDestroyed: true,
- shouldIncludeUserIds: true,
- },
- ];
-
- const actual = mapToScopesViewModel(input);
-
- expect(actual).toEqual(expected);
- });
- });
- });
-
- describe('mapFromScopesViewModel', () => {
- it('converts the object emitted from the Vue component into an object than is in the right format to be submitted to the Rails API', () => {
- const input = {
- name: 'name',
- description: 'description',
- active: true,
- scopes: [
- {
- id: 4,
- environmentScope: 'environmentScope',
- active: true,
- canUpdate: true,
- protected: true,
- shouldBeDestroyed: true,
- shouldIncludeUserIds: true,
- rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
- rolloutPercentage: '48',
- rolloutUserIds: '123, 234',
- },
- ],
- };
-
- const expected = {
- operations_feature_flag: {
- name: 'name',
- description: 'description',
- active: true,
- version: LEGACY_FLAG,
- scopes_attributes: [
- {
- id: 4,
- environment_scope: 'environmentScope',
- active: true,
- can_update: true,
- protected: true,
- _destroy: true,
- strategies: [
- {
- name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
- parameters: {
- groupId: PERCENT_ROLLOUT_GROUP_ID,
- percentage: '48',
- },
- },
- {
- name: ROLLOUT_STRATEGY_USER_ID,
- parameters: {
- userIds: '123,234',
- },
- },
- ],
- },
- ],
- },
- };
-
- const actual = mapFromScopesViewModel(input);
-
- expect(actual).toEqual(expected);
- });
-
- it('should strip out internal IDs', () => {
- const input = {
- scopes: [{ id: 3 }, { id: uniqueId(INTERNAL_ID_PREFIX) }],
- };
-
- const result = mapFromScopesViewModel(input);
- const [realId, internalId] = result.operations_feature_flag.scopes_attributes;
-
- expect(realId.id).toBe(3);
- expect(internalId.id).toBeUndefined();
- });
-
- it('returns scopes_attributes as [] if param.scopes is null or undefined', () => {
- let {
- operations_feature_flag: { scopes_attributes: actualScopes },
- } = mapFromScopesViewModel({ scopes: null });
-
- expect(actualScopes).toEqual([]);
-
- ({
- operations_feature_flag: { scopes_attributes: actualScopes },
- } = mapFromScopesViewModel({ scopes: undefined }));
-
- expect(actualScopes).toEqual([]);
- });
- describe('with user IDs per environment', () => {
- it('sets the user IDs as a comma separated string', () => {
- const input = {
- name: 'name',
- description: 'description',
- active: true,
- scopes: [
- {
- id: 4,
- environmentScope: 'environmentScope',
- active: true,
- canUpdate: true,
- protected: true,
- shouldBeDestroyed: true,
- rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
- rolloutPercentage: '48',
- rolloutUserIds: '123, 234',
- shouldIncludeUserIds: true,
- },
- ],
- };
-
- const expected = {
- operations_feature_flag: {
- name: 'name',
- description: 'description',
- version: LEGACY_FLAG,
- active: true,
- scopes_attributes: [
- {
- id: 4,
- environment_scope: 'environmentScope',
- active: true,
- can_update: true,
- protected: true,
- _destroy: true,
- strategies: [
- {
- name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
- parameters: {
- groupId: PERCENT_ROLLOUT_GROUP_ID,
- percentage: '48',
- },
- },
- {
- name: ROLLOUT_STRATEGY_USER_ID,
- parameters: {
- userIds: '123,234',
- },
- },
- ],
- },
- ],
- },
- };
-
- const actual = mapFromScopesViewModel(input);
-
- expect(actual).toEqual(expected);
- });
- });
- });
-
- describe('createNewEnvironmentScope', () => {
- it('should return a new environment scope object populated with the default options', () => {
- const expected = {
- environmentScope: '',
- active: false,
- id: expect.stringContaining(INTERNAL_ID_PREFIX),
- rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
- rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
- rolloutUserIds: '',
- };
-
- const actual = createNewEnvironmentScope();
-
- expect(actual).toEqual(expected);
- });
-
- it('should return a new environment scope object with overrides applied', () => {
- const overrides = {
- environmentScope: 'environmentScope',
- active: true,
- };
-
- const expected = {
- environmentScope: 'environmentScope',
- active: true,
- id: expect.stringContaining(INTERNAL_ID_PREFIX),
- rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
- rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
- rolloutUserIds: '',
- };
-
- const actual = createNewEnvironmentScope(overrides);
-
- expect(actual).toEqual(expected);
- });
-
- it('sets canUpdate and protected when called with featureFlagPermissions=true', () => {
- expect(createNewEnvironmentScope({}, true)).toEqual(
- expect.objectContaining({
- canUpdate: true,
- protected: false,
- }),
- );
- });
- });
-
describe('mapStrategiesToViewModel', () => {
it('should map rails casing to view model casing', () => {
expect(
@@ -380,14 +36,14 @@ describe('feature flags helpers spec', () => {
});
it('inserts spaces between user ids', () => {
- const strategy = mapStrategiesToViewModel([
+ const [strategy] = mapStrategiesToViewModel([
{
id: '1',
name: 'userWithId',
parameters: { userIds: 'user1,user2,user3' },
scopes: [],
},
- ])[0];
+ ]);
expect(strategy.parameters).toEqual({ userIds: 'user1, user2, user3' });
});
@@ -399,7 +55,6 @@ describe('feature flags helpers spec', () => {
mapStrategiesToRails({
name: 'test',
description: 'test description',
- version: NEW_VERSION_FLAG,
active: true,
strategies: [
{
@@ -421,8 +76,8 @@ describe('feature flags helpers spec', () => {
operations_feature_flag: {
name: 'test',
description: 'test description',
- version: NEW_VERSION_FLAG,
active: true,
+ version: NEW_VERSION_FLAG,
strategies_attributes: [
{
id: '1',
@@ -447,7 +102,6 @@ describe('feature flags helpers spec', () => {
mapStrategiesToRails({
name: 'test',
description: 'test description',
- version: NEW_VERSION_FLAG,
active: true,
strategies: [
{
@@ -462,8 +116,8 @@ describe('feature flags helpers spec', () => {
operations_feature_flag: {
name: 'test',
description: 'test description',
- version: NEW_VERSION_FLAG,
active: true,
+ version: NEW_VERSION_FLAG,
strategies_attributes: [
{
id: '1',
@@ -483,7 +137,6 @@ describe('feature flags helpers spec', () => {
it('removes white space between user ids', () => {
const result = mapStrategiesToRails({
name: 'test',
- version: NEW_VERSION_FLAG,
active: true,
strategies: [
{
@@ -503,7 +156,6 @@ describe('feature flags helpers spec', () => {
it('preserves the value of active', () => {
const result = mapStrategiesToRails({
name: 'test',
- version: NEW_VERSION_FLAG,
active: false,
strategies: [],
});
diff --git a/spec/frontend/feature_flags/store/index/actions_spec.js b/spec/frontend/feature_flags/store/index/actions_spec.js
index ec311ef92a3..a59f99f538c 100644
--- a/spec/frontend/feature_flags/store/index/actions_spec.js
+++ b/spec/frontend/feature_flags/store/index/actions_spec.js
@@ -1,7 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import { TEST_HOST } from 'spec/test_constants';
-import { mapToScopesViewModel } from '~/feature_flags/store/helpers';
import {
requestFeatureFlags,
receiveFeatureFlagsSuccess,
@@ -255,7 +254,6 @@ describe('Feature flags actions', () => {
beforeEach(() => {
mockedState.featureFlags = getRequestData.feature_flags.map((flag) => ({
...flag,
- scopes: mapToScopesViewModel(flag.scopes || []),
}));
mock = new MockAdapter(axios);
});
@@ -314,7 +312,6 @@ describe('Feature flags actions', () => {
beforeEach(() => {
mockedState.featureFlags = getRequestData.feature_flags.map((f) => ({
...f,
- scopes: mapToScopesViewModel(f.scopes || []),
}));
});
@@ -338,7 +335,6 @@ describe('Feature flags actions', () => {
beforeEach(() => {
mockedState.featureFlags = getRequestData.feature_flags.map((f) => ({
...f,
- scopes: mapToScopesViewModel(f.scopes || []),
}));
});
@@ -362,7 +358,6 @@ describe('Feature flags actions', () => {
beforeEach(() => {
mockedState.featureFlags = getRequestData.feature_flags.map((f) => ({
...f,
- scopes: mapToScopesViewModel(f.scopes || []),
}));
});
diff --git a/spec/frontend/feature_flags/store/index/mutations_spec.js b/spec/frontend/feature_flags/store/index/mutations_spec.js
index b9354196c68..c19f459e124 100644
--- a/spec/frontend/feature_flags/store/index/mutations_spec.js
+++ b/spec/frontend/feature_flags/store/index/mutations_spec.js
@@ -1,4 +1,3 @@
-import { mapToScopesViewModel } from '~/feature_flags/store/helpers';
import * as types from '~/feature_flags/store/index/mutation_types';
import mutations from '~/feature_flags/store/index/mutations';
import state from '~/feature_flags/store/index/state';
@@ -49,15 +48,6 @@ describe('Feature flags store Mutations', () => {
expect(stateCopy.hasError).toEqual(false);
});
- it('should set featureFlags with the transformed data', () => {
- const expected = getRequestData.feature_flags.map((flag) => ({
- ...flag,
- scopes: mapToScopesViewModel(flag.scopes || []),
- }));
-
- expect(stateCopy.featureFlags).toEqual(expected);
- });
-
it('should set count with the given data', () => {
expect(stateCopy.count).toEqual(37);
});
@@ -131,13 +121,11 @@ describe('Feature flags store Mutations', () => {
beforeEach(() => {
stateCopy.featureFlags = getRequestData.feature_flags.map((flag) => ({
...flag,
- scopes: mapToScopesViewModel(flag.scopes || []),
}));
stateCopy.count = { featureFlags: 1, userLists: 0 };
mutations[types.UPDATE_FEATURE_FLAG](stateCopy, {
...featureFlag,
- scopes: mapToScopesViewModel(featureFlag.scopes || []),
active: false,
});
});
@@ -146,7 +134,6 @@ describe('Feature flags store Mutations', () => {
expect(stateCopy.featureFlags).toEqual([
{
...featureFlag,
- scopes: mapToScopesViewModel(featureFlag.scopes || []),
active: false,
},
]);
@@ -158,7 +145,6 @@ describe('Feature flags store Mutations', () => {
stateCopy.featureFlags = getRequestData.feature_flags.map((flag) => ({
...flag,
...flagState,
- scopes: mapToScopesViewModel(flag.scopes || []),
}));
stateCopy.count = stateCount;
@@ -174,7 +160,6 @@ describe('Feature flags store Mutations', () => {
expect(stateCopy.featureFlags).toEqual([
{
...featureFlag,
- scopes: mapToScopesViewModel(featureFlag.scopes || []),
active: false,
},
]);
@@ -185,7 +170,6 @@ describe('Feature flags store Mutations', () => {
beforeEach(() => {
stateCopy.featureFlags = getRequestData.feature_flags.map((flag) => ({
...flag,
- scopes: mapToScopesViewModel(flag.scopes || []),
}));
mutations[types.RECEIVE_UPDATE_FEATURE_FLAG_ERROR](stateCopy, featureFlag.id);
});
@@ -194,7 +178,6 @@ describe('Feature flags store Mutations', () => {
expect(stateCopy.featureFlags).toEqual([
{
...featureFlag,
- scopes: mapToScopesViewModel(featureFlag.scopes || []),
active: false,
},
]);
diff --git a/spec/frontend/feature_flags/store/new/actions_spec.js b/spec/frontend/feature_flags/store/new/actions_spec.js
index 00dfb982ded..7900b200eb2 100644
--- a/spec/frontend/feature_flags/store/new/actions_spec.js
+++ b/spec/frontend/feature_flags/store/new/actions_spec.js
@@ -1,13 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
-import { TEST_HOST } from 'spec/test_constants';
-import {
- ROLLOUT_STRATEGY_ALL_USERS,
- ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
- LEGACY_FLAG,
- NEW_VERSION_FLAG,
-} from '~/feature_flags/constants';
-import { mapFromScopesViewModel, mapStrategiesToRails } from '~/feature_flags/store/helpers';
+import { ROLLOUT_STRATEGY_ALL_USERS } from '~/feature_flags/constants';
+import { mapStrategiesToRails } from '~/feature_flags/store/helpers';
import {
createFeatureFlag,
requestCreateFeatureFlag,
@@ -24,33 +18,13 @@ describe('Feature flags New Module Actions', () => {
let mockedState;
beforeEach(() => {
- mockedState = state({ endpoint: 'feature_flags.json', path: '/feature_flags' });
+ mockedState = state({ endpoint: '/feature_flags.json', path: '/feature_flags' });
});
describe('createFeatureFlag', () => {
let mock;
- const actionParams = {
- name: 'name',
- description: 'description',
- active: true,
- version: LEGACY_FLAG,
- scopes: [
- {
- id: 1,
- environmentScope: 'environmentScope',
- active: true,
- canUpdate: true,
- protected: true,
- shouldBeDestroyed: false,
- rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
- rolloutPercentage: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
- },
- ],
- };
-
beforeEach(() => {
- mockedState.endpoint = `${TEST_HOST}/endpoint.json`;
mock = new MockAdapter(axios);
});
@@ -60,33 +34,10 @@ describe('Feature flags New Module Actions', () => {
describe('success', () => {
it('dispatches requestCreateFeatureFlag and receiveCreateFeatureFlagSuccess ', (done) => {
- const convertedActionParams = mapFromScopesViewModel(actionParams);
-
- mock.onPost(`${TEST_HOST}/endpoint.json`, convertedActionParams).replyOnce(200);
-
- testAction(
- createFeatureFlag,
- actionParams,
- mockedState,
- [],
- [
- {
- type: 'requestCreateFeatureFlag',
- },
- {
- type: 'receiveCreateFeatureFlagSuccess',
- },
- ],
- done,
- );
- });
-
- it('sends strategies for new style feature flags', (done) => {
- const newVersionFlagParams = {
+ const actionParams = {
name: 'name',
description: 'description',
active: true,
- version: NEW_VERSION_FLAG,
strategies: [
{
name: ROLLOUT_STRATEGY_ALL_USERS,
@@ -97,13 +48,11 @@ describe('Feature flags New Module Actions', () => {
},
],
};
- mock
- .onPost(`${TEST_HOST}/endpoint.json`, mapStrategiesToRails(newVersionFlagParams))
- .replyOnce(200);
+ mock.onPost(mockedState.endpoint, mapStrategiesToRails(actionParams)).replyOnce(200);
testAction(
createFeatureFlag,
- newVersionFlagParams,
+ actionParams,
mockedState,
[],
[
@@ -121,10 +70,22 @@ describe('Feature flags New Module Actions', () => {
describe('error', () => {
it('dispatches requestCreateFeatureFlag and receiveCreateFeatureFlagError ', (done) => {
- const convertedActionParams = mapFromScopesViewModel(actionParams);
-
+ const actionParams = {
+ name: 'name',
+ description: 'description',
+ active: true,
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ id: 1,
+ scopes: [{ id: 1, environmentScope: 'environmentScope', shouldBeDestroyed: false }],
+ shouldBeDestroyed: false,
+ },
+ ],
+ };
mock
- .onPost(`${TEST_HOST}/endpoint.json`, convertedActionParams)
+ .onPost(mockedState.endpoint, mapStrategiesToRails(actionParams))
.replyOnce(500, { message: [] });
testAction(
diff --git a/spec/frontend/feature_highlight/feature_highlight_helper_spec.js b/spec/frontend/feature_highlight/feature_highlight_helper_spec.js
index 1b5bffc1f9b..b87571830ca 100644
--- a/spec/frontend/feature_highlight/feature_highlight_helper_spec.js
+++ b/spec/frontend/feature_highlight/feature_highlight_helper_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import { dismiss } from '~/feature_highlight/feature_highlight_helper';
-import { deprecatedCreateFlash as Flash } from '~/flash';
+import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
import httpStatusCodes from '~/lib/utils/http_status';
@@ -32,9 +32,10 @@ describe('feature highlight helper', () => {
await dismiss(endpoint, highlightId);
- expect(Flash).toHaveBeenCalledWith(
- 'An error occurred while dismissing the feature highlight. Refresh the page and try dismissing again.',
- );
+ expect(createFlash).toHaveBeenCalledWith({
+ message:
+ 'An error occurred while dismissing the feature highlight. Refresh the page and try dismissing again.',
+ });
});
});
});
diff --git a/spec/frontend/filtered_search/filtered_search_manager_spec.js b/spec/frontend/filtered_search/filtered_search_manager_spec.js
index c03c8f6c529..83e7f6c9b3f 100644
--- a/spec/frontend/filtered_search/filtered_search_manager_spec.js
+++ b/spec/frontend/filtered_search/filtered_search_manager_spec.js
@@ -8,12 +8,14 @@ import IssuableFilteredSearchTokenKeys from '~/filtered_search/issuable_filtered
import RecentSearchesRoot from '~/filtered_search/recent_searches_root';
import RecentSearchesService from '~/filtered_search/services/recent_searches_service';
import RecentSearchesServiceError from '~/filtered_search/services/recent_searches_service_error';
-import * as commonUtils from '~/lib/utils/common_utils';
+import createFlash from '~/flash';
import { BACKSPACE_KEY_CODE, DELETE_KEY_CODE } from '~/lib/utils/keycodes';
-import { visitUrl } from '~/lib/utils/url_utility';
+import { visitUrl, getParameterByName } from '~/lib/utils/url_utility';
+jest.mock('~/flash');
jest.mock('~/lib/utils/url_utility', () => ({
...jest.requireActual('~/lib/utils/url_utility'),
+ getParameterByName: jest.fn(),
visitUrl: jest.fn(),
}));
@@ -84,9 +86,10 @@ describe('Filtered Search Manager', () => {
jest
.spyOn(FilteredSearchDropdownManager.prototype, 'updateDropdownOffset')
.mockImplementation();
- jest.spyOn(commonUtils, 'getParameterByName').mockReturnValue(null);
jest.spyOn(FilteredSearchVisualTokens, 'unselectTokens');
+ getParameterByName.mockReturnValue(null);
+
input = document.querySelector('.filtered-search');
tokensContainer = document.querySelector('.tokens-container');
manager = new FilteredSearchManager({ page, useDefaultState });
@@ -127,11 +130,10 @@ describe('Filtered Search Manager', () => {
jest
.spyOn(RecentSearchesService.prototype, 'fetch')
.mockImplementation(() => Promise.reject(new RecentSearchesServiceError()));
- jest.spyOn(window, 'Flash').mockImplementation();
manager.setup();
- expect(window.Flash).not.toHaveBeenCalled();
+ expect(createFlash).not.toHaveBeenCalled();
});
});
diff --git a/spec/frontend/filtered_search/visual_token_value_spec.js b/spec/frontend/filtered_search/visual_token_value_spec.js
index 772fa7d07ed..7185f382fc1 100644
--- a/spec/frontend/filtered_search/visual_token_value_spec.js
+++ b/spec/frontend/filtered_search/visual_token_value_spec.js
@@ -1,11 +1,14 @@
import { escape } from 'lodash';
import FilteredSearchSpecHelper from 'helpers/filtered_search_spec_helper';
import { TEST_HOST } from 'helpers/test_constants';
-import DropdownUtils from '~/filtered_search//dropdown_utils';
+import DropdownUtils from '~/filtered_search/dropdown_utils';
import VisualTokenValue from '~/filtered_search/visual_token_value';
+import createFlash from '~/flash';
import AjaxCache from '~/lib/utils/ajax_cache';
import UsersCache from '~/lib/utils/users_cache';
+jest.mock('~/flash');
+
describe('Filtered Search Visual Tokens', () => {
const findElements = (tokenElement) => {
const tokenNameElement = tokenElement.querySelector('.name');
@@ -43,7 +46,6 @@ describe('Filtered Search Visual Tokens', () => {
});
it('ignores error if UsersCache throws', (done) => {
- jest.spyOn(window, 'Flash').mockImplementation(() => {});
const dummyError = new Error('Earth rotated backwards');
const { subject, tokenValueContainer, tokenValueElement } = findElements(authorToken);
const tokenValue = tokenValueElement.innerText;
@@ -55,7 +57,7 @@ describe('Filtered Search Visual Tokens', () => {
subject
.updateUserTokenAppearance(tokenValueContainer, tokenValueElement, tokenValue)
.then(() => {
- expect(window.Flash.mock.calls.length).toBe(0);
+ expect(createFlash.mock.calls.length).toBe(0);
})
.then(done)
.catch(done.fail);
diff --git a/spec/frontend/fixtures/api_markdown.rb b/spec/frontend/fixtures/api_markdown.rb
index 1c3967b2c36..94db262e4fd 100644
--- a/spec/frontend/fixtures/api_markdown.rb
+++ b/spec/frontend/fixtures/api_markdown.rb
@@ -4,12 +4,29 @@ require 'spec_helper'
RSpec.describe API::MergeRequests, '(JavaScript fixtures)', type: :request do
include ApiHelpers
+ include WikiHelpers
include JavaScriptFixturesHelpers
+ let_it_be(:user) { create(:user) }
+
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:project) { create(:project, :public, :repository, group: group) }
+
+ let_it_be(:project_wiki) { create(:project_wiki, user: user) }
+
+ let(:project_wiki_page) { create(:wiki_page, wiki: project_wiki) }
+
fixture_subdir = 'api/markdown'
before(:all) do
clean_frontend_fixtures(fixture_subdir)
+
+ group.add_owner(user)
+ project.add_maintainer(user)
+ end
+
+ before do
+ sign_in(user)
end
markdown_examples = begin
@@ -19,14 +36,27 @@ RSpec.describe API::MergeRequests, '(JavaScript fixtures)', type: :request do
end
markdown_examples.each do |markdown_example|
+ context = markdown_example.fetch(:context, '')
name = markdown_example.fetch(:name)
- context "for #{name}" do
+ context "for #{name}#{!context.empty? ? " (context: #{context})" : ''}" do
let(:markdown) { markdown_example.fetch(:markdown) }
+ name = "#{context}_#{name}" unless context.empty?
+
it "#{fixture_subdir}/#{name}.json" do
- post api("/markdown"), params: { text: markdown, gfm: true }
+ api_url = case context
+ when 'project'
+ "/#{project.full_path}/preview_markdown"
+ when 'group'
+ "/groups/#{group.full_path}/preview_markdown"
+ when 'project_wiki'
+ "/#{project.full_path}/-/wikis/#{project_wiki_page.slug}/preview_markdown"
+ else
+ api "/markdown"
+ end
+ post api_url, params: { text: markdown, gfm: true }
expect(response).to be_successful
end
end
diff --git a/spec/frontend/fixtures/api_markdown.yml b/spec/frontend/fixtures/api_markdown.yml
index 3274e914f03..8d8c9a1d902 100644
--- a/spec/frontend/fixtures/api_markdown.yml
+++ b/spec/frontend/fixtures/api_markdown.yml
@@ -10,8 +10,28 @@
markdown: '`code`'
- name: strike
markdown: '~~del~~'
+- name: horizontal_rule
+ markdown: '---'
- name: link
markdown: '[GitLab](https://gitlab.com)'
+- name: attachment_link
+ context: project_wiki
+ markdown: '[test-file](test-file.zip)'
+- name: attachment_link
+ context: project
+ markdown: '[test-file](/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.zip)'
+- name: attachment_link
+ context: group
+ markdown: '[test-file](/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.zip)'
+- name: attachment_image
+ context: project_wiki
+ markdown: '![test-file](test-file.png)'
+- name: attachment_image
+ context: project
+ markdown: '![test-file](/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png)'
+- name: attachment_image
+ context: group
+ markdown: '![test-file](/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png)'
- name: code_block
markdown: |-
```javascript
@@ -54,3 +74,16 @@
markdown: |-
This is a line after a\
hard break
+- name: table
+ markdown: |-
+ | header | header |
+ |--------|--------|
+ | cell | cell |
+ | cell | cell |
+- name: table_with_alignment
+ markdown: |-
+ | header | : header : | header : |
+ |--------|------------|----------|
+ | cell | cell | cell |
+ | cell | cell | cell |
+
diff --git a/spec/frontend/fixtures/application_settings.rb b/spec/frontend/fixtures/application_settings.rb
index ebccecb32ba..b09bea56b94 100644
--- a/spec/frontend/fixtures/application_settings.rb
+++ b/spec/frontend/fixtures/application_settings.rb
@@ -34,4 +34,12 @@ RSpec.describe Admin::ApplicationSettingsController, '(JavaScript fixtures)', ty
expect(response).to be_successful
end
+
+ it 'application_settings/usage.html' do
+ stub_application_setting(usage_ping_enabled: false)
+
+ get :metrics_and_profiling
+
+ expect(response).to be_successful
+ end
end
diff --git a/spec/frontend/fixtures/pipelines.rb b/spec/frontend/fixtures/pipelines.rb
index 2a538352abe..f695b74ec87 100644
--- a/spec/frontend/fixtures/pipelines.rb
+++ b/spec/frontend/fixtures/pipelines.rb
@@ -13,6 +13,7 @@ RSpec.describe Projects::PipelinesController, '(JavaScript fixtures)', type: :co
let!(:build_pipeline_without_author) { create(:ci_build, pipeline: pipeline_without_author, stage: 'test') }
let_it_be(:pipeline_without_commit) { create(:ci_pipeline, status: :success, project: project, sha: '0000') }
+
let!(:build_pipeline_without_commit) { create(:ci_build, pipeline: pipeline_without_commit, stage: 'test') }
let(:commit) { create(:commit, project: project) }
diff --git a/spec/frontend/fixtures/projects.rb b/spec/frontend/fixtures/projects.rb
index 778ae218160..7873d59dbad 100644
--- a/spec/frontend/fixtures/projects.rb
+++ b/spec/frontend/fixtures/projects.rb
@@ -61,13 +61,12 @@ RSpec.describe 'Projects (JavaScript fixtures)', type: :controller do
clean_frontend_fixtures('graphql/projects/access_tokens')
end
- fragment_paths = ['graphql_shared/fragments/pageInfo.fragment.graphql']
base_input_path = 'access_tokens/graphql/queries/'
base_output_path = 'graphql/projects/access_tokens/'
query_name = 'get_projects.query.graphql'
it "#{base_output_path}#{query_name}.json" do
- query = get_graphql_query_as_string("#{base_input_path}#{query_name}", fragment_paths)
+ query = get_graphql_query_as_string("#{base_input_path}#{query_name}")
post_graphql(query, current_user: user, variables: { search: '', first: 2 })
diff --git a/spec/frontend/fixtures/prometheus_service.rb b/spec/frontend/fixtures/prometheus_service.rb
index 3a59ecf3868..c349f2a24bc 100644
--- a/spec/frontend/fixtures/prometheus_service.rb
+++ b/spec/frontend/fixtures/prometheus_service.rb
@@ -7,7 +7,7 @@ RSpec.describe Projects::ServicesController, '(JavaScript fixtures)', type: :con
let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
let(:project) { create(:project_empty_repo, namespace: namespace, path: 'services-project') }
- let!(:service) { create(:prometheus_service, project: project) }
+ let!(:integration) { create(:prometheus_integration, project: project) }
let(:user) { project.owner }
render_views
@@ -28,7 +28,7 @@ RSpec.describe Projects::ServicesController, '(JavaScript fixtures)', type: :con
get :edit, params: {
namespace_id: namespace,
project_id: project,
- id: service.to_param
+ id: integration.to_param
}
expect(response).to be_successful
diff --git a/spec/frontend/fixtures/releases.rb b/spec/frontend/fixtures/releases.rb
index ac34400bc01..e8f259fba15 100644
--- a/spec/frontend/fixtures/releases.rb
+++ b/spec/frontend/fixtures/releases.rb
@@ -133,15 +133,13 @@ RSpec.describe 'Releases (JavaScript fixtures)' do
all_releases_query_path = 'releases/graphql/queries/all_releases.query.graphql'
one_release_query_path = 'releases/graphql/queries/one_release.query.graphql'
one_release_for_editing_query_path = 'releases/graphql/queries/one_release_for_editing.query.graphql'
- release_fragment_path = 'releases/graphql/fragments/release.fragment.graphql'
- release_for_editing_fragment_path = 'releases/graphql/fragments/release_for_editing.fragment.graphql'
before(:all) do
clean_frontend_fixtures('graphql/releases/')
end
it "graphql/#{all_releases_query_path}.json" do
- query = get_graphql_query_as_string(all_releases_query_path, [release_fragment_path])
+ query = get_graphql_query_as_string(all_releases_query_path)
post_graphql(query, current_user: admin, variables: { fullPath: project.full_path })
@@ -150,7 +148,7 @@ RSpec.describe 'Releases (JavaScript fixtures)' do
end
it "graphql/#{one_release_query_path}.json" do
- query = get_graphql_query_as_string(one_release_query_path, [release_fragment_path])
+ query = get_graphql_query_as_string(one_release_query_path)
post_graphql(query, current_user: admin, variables: { fullPath: project.full_path, tagName: release.tag })
@@ -159,7 +157,7 @@ RSpec.describe 'Releases (JavaScript fixtures)' do
end
it "graphql/#{one_release_for_editing_query_path}.json" do
- query = get_graphql_query_as_string(one_release_for_editing_query_path, [release_for_editing_fragment_path])
+ query = get_graphql_query_as_string(one_release_for_editing_query_path)
post_graphql(query, current_user: admin, variables: { fullPath: project.full_path, tagName: release.tag })
diff --git a/spec/frontend/fixtures/runner.rb b/spec/frontend/fixtures/runner.rb
index b88fb840137..e29a58f43b9 100644
--- a/spec/frontend/fixtures/runner.rb
+++ b/spec/frontend/fixtures/runner.rb
@@ -36,10 +36,7 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
get_runners_query_name = 'get_runners.query.graphql'
let_it_be(:query) do
- get_graphql_query_as_string("#{query_path}#{get_runners_query_name}", [
- 'runner/graphql/runner_node.fragment.graphql',
- 'graphql_shared/fragments/pageInfo.fragment.graphql'
- ])
+ get_graphql_query_as_string("#{query_path}#{get_runners_query_name}")
end
it "#{fixtures_path}#{get_runners_query_name}.json" do
@@ -59,9 +56,7 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
get_runner_query_name = 'get_runner.query.graphql'
let_it_be(:query) do
- get_graphql_query_as_string("#{query_path}#{get_runner_query_name}", [
- 'runner/graphql/runner_details.fragment.graphql'
- ])
+ get_graphql_query_as_string("#{query_path}#{get_runner_query_name}")
end
it "#{fixtures_path}#{get_runner_query_name}.json" do
diff --git a/spec/frontend/flash_spec.js b/spec/frontend/flash_spec.js
index 28e8522cc12..96e5202780b 100644
--- a/spec/frontend/flash_spec.js
+++ b/spec/frontend/flash_spec.js
@@ -1,5 +1,4 @@
import createFlash, {
- deprecatedCreateFlash,
createFlashEl,
createAction,
hideFlash,
@@ -125,120 +124,6 @@ describe('Flash', () => {
});
});
- describe('deprecatedCreateFlash', () => {
- const message = 'test';
- const type = 'alert';
- const parent = document;
- const actionConfig = null;
- const fadeTransition = false;
- const addBodyClass = true;
- const defaultParams = [message, type, parent, actionConfig, fadeTransition, addBodyClass];
-
- describe('no flash-container', () => {
- it('does not add to the DOM', () => {
- const flashEl = deprecatedCreateFlash(message);
-
- expect(flashEl).toBeNull();
-
- expect(document.querySelector('.flash-alert')).toBeNull();
- });
- });
-
- describe('with flash-container', () => {
- beforeEach(() => {
- setFixtures(
- '<div class="content-wrapper js-content-wrapper"><div class="flash-container"></div></div>',
- );
- });
-
- afterEach(() => {
- document.querySelector('.js-content-wrapper').remove();
- });
-
- it('adds flash element into container', () => {
- deprecatedCreateFlash(...defaultParams);
-
- expect(document.querySelector('.flash-alert')).not.toBeNull();
-
- expect(document.body.className).toContain('flash-shown');
- });
-
- it('adds flash into specified parent', () => {
- deprecatedCreateFlash(
- message,
- type,
- document.querySelector('.content-wrapper'),
- actionConfig,
- fadeTransition,
- addBodyClass,
- );
-
- expect(document.querySelector('.content-wrapper .flash-alert')).not.toBeNull();
- expect(document.querySelector('.content-wrapper').innerText.trim()).toEqual(message);
- });
-
- it('adds container classes when inside content-wrapper', () => {
- deprecatedCreateFlash(...defaultParams);
-
- expect(document.querySelector('.flash-text').className).toBe('flash-text');
- expect(document.querySelector('.content-wrapper').innerText.trim()).toEqual(message);
- });
-
- it('does not add container when outside of content-wrapper', () => {
- document.querySelector('.content-wrapper').className = 'js-content-wrapper';
- deprecatedCreateFlash(...defaultParams);
-
- expect(document.querySelector('.flash-text').className.trim()).toContain('flash-text');
- });
-
- it('removes element after clicking', () => {
- deprecatedCreateFlash(...defaultParams);
-
- document.querySelector('.flash-alert .js-close-icon').click();
-
- expect(document.querySelector('.flash-alert')).toBeNull();
-
- expect(document.body.className).not.toContain('flash-shown');
- });
-
- describe('with actionConfig', () => {
- it('adds action link', () => {
- const newActionConfig = { title: 'test' };
- deprecatedCreateFlash(
- message,
- type,
- parent,
- newActionConfig,
- fadeTransition,
- addBodyClass,
- );
-
- expect(document.querySelector('.flash-action')).not.toBeNull();
- });
-
- it('calls actionConfig clickHandler on click', () => {
- const newActionConfig = {
- title: 'test',
- clickHandler: jest.fn(),
- };
-
- deprecatedCreateFlash(
- message,
- type,
- parent,
- newActionConfig,
- fadeTransition,
- addBodyClass,
- );
-
- document.querySelector('.flash-action').click();
-
- expect(newActionConfig.clickHandler).toHaveBeenCalled();
- });
- });
- });
- });
-
describe('createFlash', () => {
const message = 'test';
const type = 'alert';
diff --git a/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js b/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
index 9a68115e4f6..5a05265afdc 100644
--- a/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
+++ b/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
@@ -1,9 +1,11 @@
+import { GlButton } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { trimText } from 'helpers/text_helper';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import frequentItemsListItemComponent from '~/frequent_items/components/frequent_items_list_item.vue';
import { createStore } from '~/frequent_items/store';
+import ProjectAvatar from '~/vue_shared/components/project_avatar.vue';
import { mockProject } from '../mock_data';
const localVue = createLocalVue();
@@ -15,12 +17,12 @@ describe('FrequentItemsListItemComponent', () => {
let store;
const findTitle = () => wrapper.find({ ref: 'frequentItemsItemTitle' });
- const findAvatar = () => wrapper.find({ ref: 'frequentItemsItemAvatar' });
+ const findAvatar = () => wrapper.findComponent(ProjectAvatar);
const findAllTitles = () => wrapper.findAll({ ref: 'frequentItemsItemTitle' });
const findNamespace = () => wrapper.find({ ref: 'frequentItemsItemNamespace' });
- const findAllAnchors = () => wrapper.findAll('a');
+ const findAllButtons = () => wrapper.findAllComponents(GlButton);
const findAllNamespace = () => wrapper.findAll({ ref: 'frequentItemsItemNamespace' });
- const findAvatarContainer = () => wrapper.findAll({ ref: 'frequentItemsItemAvatarContainer' });
+ const findAllAvatars = () => wrapper.findAllComponents(ProjectAvatar);
const findAllMetadataContainers = () =>
wrapper.findAll({ ref: 'frequentItemsItemMetadataContainer' });
@@ -91,16 +93,8 @@ describe('FrequentItemsListItemComponent', () => {
createComponent();
});
- it('should render avatar if avatarUrl is present', () => {
- wrapper.setProps({ avatarUrl: 'path/to/avatar.png' });
-
- return wrapper.vm.$nextTick(() => {
- expect(findAvatar().exists()).toBe(true);
- });
- });
-
- it('should not render avatar if avatarUrl is not present', () => {
- expect(findAvatar().exists()).toBe(false);
+ it('renders avatar', () => {
+ expect(findAvatar().exists()).toBe(true);
});
it('renders root element with the right classes', () => {
@@ -109,8 +103,8 @@ describe('FrequentItemsListItemComponent', () => {
it.each`
name | selector | expected
- ${'anchor'} | ${findAllAnchors} | ${1}
- ${'avatar container'} | ${findAvatarContainer} | ${1}
+ ${'button'} | ${findAllButtons} | ${1}
+ ${'avatar container'} | ${findAllAvatars} | ${1}
${'metadata container'} | ${findAllMetadataContainers} | ${1}
${'title'} | ${findAllTitles} | ${1}
${'namespace'} | ${findAllNamespace} | ${1}
@@ -119,13 +113,10 @@ describe('FrequentItemsListItemComponent', () => {
});
it('tracks when item link is clicked', () => {
- const link = wrapper.find('a');
- // NOTE: this listener is required to prevent the click from going through and causing:
- // `Error: Not implemented: navigation ...`
- link.element.addEventListener('click', (e) => {
- e.preventDefault();
- });
- link.trigger('click');
+ const link = wrapper.findComponent(GlButton);
+
+ link.vm.$emit('click');
+
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_link', {
label: 'projects_dropdown_frequent_items_list_item',
});
diff --git a/spec/frontend/gpg_badges_spec.js b/spec/frontend/gpg_badges_spec.js
index cd2cc88fa5a..44c70f1ad4d 100644
--- a/spec/frontend/gpg_badges_spec.js
+++ b/spec/frontend/gpg_badges_spec.js
@@ -17,19 +17,23 @@ describe('GpgBadges', () => {
};
const dummyUrl = `${TEST_HOST}/dummy/signatures`;
- beforeEach(() => {
- mock = new MockAdapter(axios);
+ const setForm = ({ utf8 = '✓', search = '' } = {}) => {
setFixtures(`
<form
class="commits-search-form js-signature-container" data-signatures-path="${dummyUrl}" action="${dummyUrl}"
method="get">
- <input name="utf8" type="hidden" value="✓">
- <input type="search" name="search" id="commits-search"class="form-control search-text-input input-short">
+ <input name="utf8" type="hidden" value="${utf8}">
+ <input type="search" name="search" value="${search}" id="commits-search"class="form-control search-text-input input-short">
</form>
<div class="parent-container">
<div class="js-loading-gpg-badge" data-commit-sha="${dummyCommitSha}"></div>
</div>
`);
+ };
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ setForm();
});
afterEach(() => {
@@ -62,6 +66,44 @@ describe('GpgBadges', () => {
.catch(done.fail);
});
+ it('fetches commit signatures', async () => {
+ mock.onGet(dummyUrl).replyOnce(200);
+
+ await GpgBadges.fetch();
+
+ expect(mock.history.get).toHaveLength(1);
+ expect(mock.history.get[0]).toMatchObject({
+ params: { search: '', utf8: '✓' },
+ url: dummyUrl,
+ });
+ });
+
+ it('fetches commit signatures with search parameters with spaces', async () => {
+ mock.onGet(dummyUrl).replyOnce(200);
+ setForm({ search: 'my search' });
+
+ await GpgBadges.fetch();
+
+ expect(mock.history.get).toHaveLength(1);
+ expect(mock.history.get[0]).toMatchObject({
+ params: { search: 'my search', utf8: '✓' },
+ url: dummyUrl,
+ });
+ });
+
+ it('fetches commit signatures with search parameters with plus symbols', async () => {
+ mock.onGet(dummyUrl).replyOnce(200);
+ setForm({ search: 'my+search' });
+
+ await GpgBadges.fetch();
+
+ expect(mock.history.get).toHaveLength(1);
+ expect(mock.history.get[0]).toMatchObject({
+ params: { search: 'my+search', utf8: '✓' },
+ url: dummyUrl,
+ });
+ });
+
it('displays a loading spinner', (done) => {
mock.onGet(dummyUrl).replyOnce(200);
diff --git a/spec/frontend/grafana_integration/components/grafana_integration_spec.js b/spec/frontend/grafana_integration/components/grafana_integration_spec.js
index 3cb4dd41574..d5338430054 100644
--- a/spec/frontend/grafana_integration/components/grafana_integration_spec.js
+++ b/spec/frontend/grafana_integration/components/grafana_integration_spec.js
@@ -114,7 +114,6 @@ describe('grafana integration component', () => {
.then(() =>
expect(createFlash).toHaveBeenCalledWith({
message: `There was an error saving your changes. ${message}`,
- type: 'alert',
}),
);
});
diff --git a/spec/frontend/groups/components/app_spec.js b/spec/frontend/groups/components/app_spec.js
index e559c9519f2..da0ff2a64ec 100644
--- a/spec/frontend/groups/components/app_spec.js
+++ b/spec/frontend/groups/components/app_spec.js
@@ -1,9 +1,9 @@
-import '~/flash';
import { GlModal, GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import Vue from 'vue';
import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
import appComponent from '~/groups/components/app.vue';
import groupFolderComponent from '~/groups/components/group_folder.vue';
import groupItemComponent from '~/groups/components/group_item.vue';
@@ -27,6 +27,7 @@ import {
const $toast = {
show: jest.fn(),
};
+jest.mock('~/flash');
describe('AppComponent', () => {
let wrapper;
@@ -123,12 +124,12 @@ describe('AppComponent', () => {
mock.onGet('/dashboard/groups.json').reply(400);
jest.spyOn(window, 'scrollTo').mockImplementation(() => {});
- jest.spyOn(window, 'Flash').mockImplementation(() => {});
-
return vm.fetchGroups({}).then(() => {
expect(vm.isLoading).toBe(false);
expect(window.scrollTo).toHaveBeenCalledWith({ behavior: 'smooth', top: 0 });
- expect(window.Flash).toHaveBeenCalledWith('An error occurred. Please try again.');
+ expect(createFlash).toHaveBeenCalledWith({
+ message: 'An error occurred. Please try again.',
+ });
});
});
});
@@ -324,15 +325,13 @@ describe('AppComponent', () => {
const message = 'An error occurred. Please try again.';
jest.spyOn(vm.service, 'leaveGroup').mockRejectedValue({ status: 500 });
jest.spyOn(vm.store, 'removeGroup');
- jest.spyOn(window, 'Flash').mockImplementation(() => {});
-
vm.leaveGroup();
expect(vm.targetGroup.isBeingRemoved).toBe(true);
expect(vm.service.leaveGroup).toHaveBeenCalledWith(childGroupItem.leavePath);
return waitForPromises().then(() => {
expect(vm.store.removeGroup).not.toHaveBeenCalled();
- expect(window.Flash).toHaveBeenCalledWith(message);
+ expect(createFlash).toHaveBeenCalledWith({ message });
expect(vm.targetGroup.isBeingRemoved).toBe(false);
});
});
@@ -341,15 +340,13 @@ describe('AppComponent', () => {
const message = 'Failed to leave the group. Please make sure you are not the only owner.';
jest.spyOn(vm.service, 'leaveGroup').mockRejectedValue({ status: 403 });
jest.spyOn(vm.store, 'removeGroup');
- jest.spyOn(window, 'Flash').mockImplementation(() => {});
-
vm.leaveGroup(childGroupItem, groupItem);
expect(vm.targetGroup.isBeingRemoved).toBe(true);
expect(vm.service.leaveGroup).toHaveBeenCalledWith(childGroupItem.leavePath);
return waitForPromises().then(() => {
expect(vm.store.removeGroup).not.toHaveBeenCalled();
- expect(window.Flash).toHaveBeenCalledWith(message);
+ expect(createFlash).toHaveBeenCalledWith({ message });
expect(vm.targetGroup.isBeingRemoved).toBe(false);
});
});
diff --git a/spec/frontend/groups/components/group_item_spec.js b/spec/frontend/groups/components/group_item_spec.js
index 546cdd3cd6f..2369685f506 100644
--- a/spec/frontend/groups/components/group_item_spec.js
+++ b/spec/frontend/groups/components/group_item_spec.js
@@ -162,11 +162,11 @@ describe('GroupItemComponent', () => {
wrapper = createComponent({ group });
});
- it('renders the group pending removal badge', () => {
+ it('renders the group pending deletion badge', () => {
const badgeEl = wrapper.vm.$el.querySelector('.badge-warning');
expect(badgeEl).toBeDefined();
- expect(badgeEl.innerHTML).toContain('pending removal');
+ expect(badgeEl.innerHTML).toContain('pending deletion');
});
});
@@ -176,10 +176,10 @@ describe('GroupItemComponent', () => {
wrapper = createComponent({ group });
});
- it('does not render the group pending removal badge', () => {
+ it('does not render the group pending deletion badge', () => {
const groupTextContainer = wrapper.vm.$el.querySelector('.group-text-container');
- expect(groupTextContainer).not.toContain('pending removal');
+ expect(groupTextContainer).not.toContain('pending deletion');
});
it('renders `item-actions` component and passes correct props to it', () => {
@@ -236,13 +236,13 @@ describe('GroupItemComponent', () => {
describe('schema.org props', () => {
describe('when showSchemaMarkup is disabled on the group', () => {
it.each(['itemprop', 'itemtype', 'itemscope'], 'it does not set %s', (attr) => {
- expect(wrapper.vm.$el.getAttribute(attr)).toBeNull();
+ expect(wrapper.attributes(attr)).toBeUndefined();
});
it.each(
['.js-group-avatar', '.js-group-name', '.js-group-description'],
'it does not set `itemprop` on sub-nodes',
(selector) => {
- expect(wrapper.vm.$el.querySelector(selector).getAttribute('itemprop')).toBeNull();
+ expect(wrapper.find(selector).attributes('itemprop')).toBeUndefined();
},
);
});
@@ -263,16 +263,16 @@ describe('GroupItemComponent', () => {
${'itemtype'} | ${'https://schema.org/Organization'}
${'itemprop'} | ${'subOrganization'}
`('it does set correct $attr', ({ attr, value } = {}) => {
- expect(wrapper.vm.$el.getAttribute(attr)).toBe(value);
+ expect(wrapper.attributes(attr)).toBe(value);
});
it.each`
selector | propValue
- ${'[data-testid="group-avatar"]'} | ${'logo'}
+ ${'img'} | ${'logo'}
${'[data-testid="group-name"]'} | ${'name'}
${'[data-testid="group-description"]'} | ${'description'}
`('it does set correct $selector', ({ selector, propValue } = {}) => {
- expect(wrapper.vm.$el.querySelector(selector).getAttribute('itemprop')).toBe(propValue);
+ expect(wrapper.find(selector).attributes('itemprop')).toBe(propValue);
});
});
});
diff --git a/spec/frontend/ide/components/ide_project_header_spec.js b/spec/frontend/ide/components/ide_project_header_spec.js
new file mode 100644
index 00000000000..fc39651c661
--- /dev/null
+++ b/spec/frontend/ide/components/ide_project_header_spec.js
@@ -0,0 +1,44 @@
+import { shallowMount } from '@vue/test-utils';
+import IDEProjectHeader from '~/ide/components/ide_project_header.vue';
+import ProjectAvatar from '~/vue_shared/components/project_avatar.vue';
+
+const mockProject = {
+ name: 'test proj',
+ avatar_url: 'https://gitlab.com',
+ path_with_namespace: 'path/with-namespace',
+ web_url: 'https://gitlab.com/project',
+};
+
+describe('IDE project header', () => {
+ let wrapper;
+
+ const findProjectAvatar = () => wrapper.findComponent(ProjectAvatar);
+ const findProjectLink = () => wrapper.find('[data-testid="go-to-project-link"');
+
+ const createComponent = () => {
+ wrapper = shallowMount(IDEProjectHeader, { propsData: { project: mockProject } });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('template', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders ProjectAvatar with correct props', () => {
+ expect(findProjectAvatar().props()).toMatchObject({
+ projectName: mockProject.name,
+ projectAvatarUrl: mockProject.avatar_url,
+ });
+ });
+
+ it('renders a link to the project URL', () => {
+ const link = findProjectLink();
+ expect(link.exists()).toBe(true);
+ expect(link.attributes('href')).toBe(mockProject.web_url);
+ });
+ });
+});
diff --git a/spec/frontend/ide/components/new_dropdown/modal_spec.js b/spec/frontend/ide/components/new_dropdown/modal_spec.js
index fce6ccf4b58..41111f5dbb4 100644
--- a/spec/frontend/ide/components/new_dropdown/modal_spec.js
+++ b/spec/frontend/ide/components/new_dropdown/modal_spec.js
@@ -184,9 +184,6 @@ describe('new file modal component', () => {
expect(createFlash).toHaveBeenCalledWith({
message: 'The name "test-path/test" is already taken in this directory.',
- type: 'alert',
- parent: expect.anything(),
- actionConfig: null,
fadeTransition: false,
addBodyClass: true,
});
diff --git a/spec/frontend/ide/components/repo_editor_spec.js b/spec/frontend/ide/components/repo_editor_spec.js
index 8e8fb31b15a..4bf3334ae6b 100644
--- a/spec/frontend/ide/components/repo_editor_spec.js
+++ b/spec/frontend/ide/components/repo_editor_spec.js
@@ -8,8 +8,8 @@ import waitForPromises from 'helpers/wait_for_promises';
import waitUsingRealTimer from 'helpers/wait_using_real_timer';
import { exampleConfigs, exampleFiles } from 'jest/ide/lib/editorconfig/mock_data';
import { EDITOR_CODE_INSTANCE_FN, EDITOR_DIFF_INSTANCE_FN } from '~/editor/constants';
-import EditorLite from '~/editor/editor_lite';
-import { EditorWebIdeExtension } from '~/editor/extensions/editor_lite_webide_ext';
+import { EditorWebIdeExtension } from '~/editor/extensions/source_editor_webide_ext';
+import SourceEditor from '~/editor/source_editor';
import RepoEditor from '~/ide/components/repo_editor.vue';
import {
leftSidebarViews,
@@ -123,8 +123,8 @@ describe('RepoEditor', () => {
const findPreviewTab = () => wrapper.find('[data-testid="preview-tab"]');
beforeEach(() => {
- createInstanceSpy = jest.spyOn(EditorLite.prototype, EDITOR_CODE_INSTANCE_FN);
- createDiffInstanceSpy = jest.spyOn(EditorLite.prototype, EDITOR_DIFF_INSTANCE_FN);
+ createInstanceSpy = jest.spyOn(SourceEditor.prototype, EDITOR_CODE_INSTANCE_FN);
+ createDiffInstanceSpy = jest.spyOn(SourceEditor.prototype, EDITOR_DIFF_INSTANCE_FN);
createModelSpy = jest.spyOn(monacoEditor, 'createModel');
jest.spyOn(service, 'getFileData').mockResolvedValue();
jest.spyOn(service, 'getRawFileData').mockResolvedValue();
@@ -252,7 +252,7 @@ describe('RepoEditor', () => {
);
it('installs the WebIDE extension', async () => {
- const extensionSpy = jest.spyOn(EditorLite, 'instanceApplyExtension');
+ const extensionSpy = jest.spyOn(SourceEditor, 'instanceApplyExtension');
await createComponent();
expect(extensionSpy).toHaveBeenCalled();
Reflect.ownKeys(EditorWebIdeExtension.prototype)
@@ -640,11 +640,12 @@ describe('RepoEditor', () => {
pasteImage();
await waitForFileContentChange();
+ expect(vm.$store.state.entries['foo/foo.png'].rawPath.startsWith('blob:')).toBe(true);
expect(vm.$store.state.entries['foo/foo.png']).toMatchObject({
path: 'foo/foo.png',
type: 'blob',
- content: 'Zm9v',
- rawPath: 'data:image/png;base64,Zm9v',
+ content: 'foo',
+ rawPath: vm.$store.state.entries['foo/foo.png'].rawPath,
});
});
diff --git a/spec/frontend/ide/services/index_spec.js b/spec/frontend/ide/services/index_spec.js
index 925446aa280..eacf1244d55 100644
--- a/spec/frontend/ide/services/index_spec.js
+++ b/spec/frontend/ide/services/index_spec.js
@@ -292,7 +292,7 @@ describe('IDE services', () => {
it('posts to usage endpoint', () => {
const TEST_PROJECT_PATH = 'foo/bar';
- const axiosURL = `${TEST_RELATIVE_URL_ROOT}/${TEST_PROJECT_PATH}/usage_ping/web_ide_pipelines_count`;
+ const axiosURL = `${TEST_RELATIVE_URL_ROOT}/${TEST_PROJECT_PATH}/service_ping/web_ide_pipelines_count`;
mock.onPost(axiosURL).reply(200);
diff --git a/spec/frontend/ide/stores/modules/clientside/actions_spec.js b/spec/frontend/ide/stores/modules/clientside/actions_spec.js
index c167d056039..88d7a630a90 100644
--- a/spec/frontend/ide/stores/modules/clientside/actions_spec.js
+++ b/spec/frontend/ide/stores/modules/clientside/actions_spec.js
@@ -5,7 +5,7 @@ import * as actions from '~/ide/stores/modules/clientside/actions';
import axios from '~/lib/utils/axios_utils';
const TEST_PROJECT_URL = `${TEST_HOST}/lorem/ipsum`;
-const TEST_USAGE_URL = `${TEST_PROJECT_URL}/usage_ping/web_ide_clientside_preview`;
+const TEST_USAGE_URL = `${TEST_PROJECT_URL}/service_ping/web_ide_clientside_preview`;
describe('IDE store module clientside actions', () => {
let rootGetters;
diff --git a/spec/frontend/ide/stores/utils_spec.js b/spec/frontend/ide/stores/utils_spec.js
index 8f7b8c5e311..79b6b66319e 100644
--- a/spec/frontend/ide/stores/utils_spec.js
+++ b/spec/frontend/ide/stores/utils_spec.js
@@ -604,7 +604,7 @@ describe('Multi-file store utils', () => {
let entries;
beforeEach(() => {
- const img = { content: '/base64/encoded/image+' };
+ const img = { content: 'png-gibberish', rawPath: 'blob:1234' };
mdFile = { path: 'path/to/some/directory/myfile.md' };
entries = {
// invalid (or lack of) extensions are also supported as long as there's
@@ -637,14 +637,14 @@ describe('Multi-file store utils', () => {
${'* ![img](img.png "title here")'} | ${'png'} | ${'img'} | ${'title here'}
`(
'correctly transforms markdown with uncommitted images: $markdownBefore',
- ({ markdownBefore, ext, imgAlt, imgTitle }) => {
+ ({ markdownBefore, imgAlt, imgTitle }) => {
mdFile.content = markdownBefore;
expect(utils.extractMarkdownImagesFromEntries(mdFile, entries)).toEqual({
content: '* {{gl_md_img_1}}',
images: {
'{{gl_md_img_1}}': {
- src: `data:image/${ext};base64,/base64/encoded/image+`,
+ src: 'blob:1234',
alt: imgAlt,
title: imgTitle,
},
diff --git a/spec/frontend/import_entities/components/group_dropdown_spec.js b/spec/frontend/import_entities/components/group_dropdown_spec.js
new file mode 100644
index 00000000000..f7aa0e889ea
--- /dev/null
+++ b/spec/frontend/import_entities/components/group_dropdown_spec.js
@@ -0,0 +1,44 @@
+import { GlSearchBoxByType, GlDropdown } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import GroupDropdown from '~/import_entities/components/group_dropdown.vue';
+
+describe('Import entities group dropdown component', () => {
+ let wrapper;
+ let namespacesTracker;
+
+ const createComponent = (propsData) => {
+ namespacesTracker = jest.fn();
+
+ wrapper = shallowMount(GroupDropdown, {
+ scopedSlots: {
+ default: namespacesTracker,
+ },
+ stubs: { GlDropdown },
+ propsData,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('passes namespaces from props to default slot', () => {
+ const namespaces = ['ns1', 'ns2'];
+ createComponent({ namespaces });
+
+ expect(namespacesTracker).toHaveBeenCalledWith({ namespaces });
+ });
+
+ it('filters namespaces based on user input', async () => {
+ const namespaces = ['match1', 'some unrelated', 'match2'];
+ createComponent({ namespaces });
+
+ namespacesTracker.mockReset();
+ wrapper.find(GlSearchBoxByType).vm.$emit('input', 'match');
+
+ await nextTick();
+
+ expect(namespacesTracker).toHaveBeenCalledWith({ namespaces: ['match1', 'match2'] });
+ });
+});
diff --git a/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js
index aa6a40cad18..654a8fd00d3 100644
--- a/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js
@@ -1,8 +1,9 @@
-import { GlButton, GlDropdown, GlDropdownItem, GlLink, GlFormInput } from '@gitlab/ui';
+import { GlButton, GlDropdownItem, GlLink, GlFormInput } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
+import ImportGroupDropdown from '~/import_entities/components/group_dropdown.vue';
import { STATUSES } from '~/import_entities/constants';
import ImportTableRow from '~/import_entities/import_groups/components/import_table_row.vue';
import addValidationErrorMutation from '~/import_entities/import_groups/graphql/mutations/add_validation_error.mutation.graphql';
@@ -41,7 +42,7 @@ describe('import table row', () => {
};
const findImportButton = () => findByText(GlButton, 'Import');
const findNameInput = () => wrapper.find(GlFormInput);
- const findNamespaceDropdown = () => wrapper.find(GlDropdown);
+ const findNamespaceDropdown = () => wrapper.find(ImportGroupDropdown);
const createComponent = (props) => {
apolloProvider = createMockApollo([
@@ -65,6 +66,7 @@ describe('import table row', () => {
wrapper = shallowMount(ImportTableRow, {
apolloProvider,
+ stubs: { ImportGroupDropdown },
propsData: {
availableNamespaces: availableNamespacesFixture,
groupPathRegex: /.*/,
diff --git a/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js b/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js
index d9f4168f1a5..0e748baa313 100644
--- a/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js
+++ b/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js
@@ -11,6 +11,8 @@ import state from '~/import_entities/import_projects/store/state';
describe('ImportProjectsTable', () => {
let wrapper;
+ const USER_NAMESPACE = 'root';
+
const findFilterField = () =>
wrapper
.findAllComponents(GlFormInput)
@@ -48,7 +50,7 @@ describe('ImportProjectsTable', () => {
localVue.use(Vuex);
const store = new Vuex.Store({
- state: { ...state(), ...initialState },
+ state: { ...state(), defaultTargetNamespace: USER_NAMESPACE, ...initialState },
getters: {
...getters,
...customGetters,
diff --git a/spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js b/spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js
index e15389be53a..72640f3d601 100644
--- a/spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js
+++ b/spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js
@@ -1,11 +1,11 @@
-import { GlBadge, GlButton } from '@gitlab/ui';
+import { GlBadge, GlButton, GlDropdown } from '@gitlab/ui';
import { createLocalVue, shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import Vuex from 'vuex';
import { STATUSES } from '~/import_entities//constants';
+import ImportGroupDropdown from '~/import_entities/components/group_dropdown.vue';
import ImportStatus from '~/import_entities/components/import_status.vue';
import ProviderRepoTableRow from '~/import_entities/import_projects/components/provider_repo_table_row.vue';
-import Select2Select from '~/vue_shared/components/select2_select.vue';
describe('ProviderRepoTableRow', () => {
let wrapper;
@@ -16,10 +16,8 @@ describe('ProviderRepoTableRow', () => {
newName: 'newName',
};
- const availableNamespaces = [
- { text: 'Groups', children: [{ id: 'test', text: 'test' }] },
- { text: 'Users', children: [{ id: 'root', text: 'root' }] },
- ];
+ const availableNamespaces = ['test'];
+ const userNamespace = 'root';
function initStore(initialState) {
const store = new Vuex.Store({
@@ -48,7 +46,7 @@ describe('ProviderRepoTableRow', () => {
wrapper = shallowMount(ProviderRepoTableRow, {
localVue,
store,
- propsData: { availableNamespaces, ...props },
+ propsData: { availableNamespaces, userNamespace, ...props },
});
}
@@ -81,9 +79,8 @@ describe('ProviderRepoTableRow', () => {
expect(wrapper.find(ImportStatus).props().status).toBe(STATUSES.NONE);
});
- it('renders a select2 namespace select', () => {
- expect(wrapper.find(Select2Select).exists()).toBe(true);
- expect(wrapper.find(Select2Select).props().options.data).toBe(availableNamespaces);
+ it('renders a group namespace select', () => {
+ expect(wrapper.find(ImportGroupDropdown).props().namespaces).toBe(availableNamespaces);
});
it('renders import button', () => {
@@ -133,7 +130,7 @@ describe('ProviderRepoTableRow', () => {
});
it('does not renders a namespace select', () => {
- expect(wrapper.find(Select2Select).exists()).toBe(false);
+ expect(wrapper.find(GlDropdown).exists()).toBe(false);
});
it('does not render import button', () => {
diff --git a/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap
index 4f70f908c4a..1e3c344ce65 100644
--- a/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap
+++ b/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap
@@ -39,7 +39,9 @@ exports[`IncidentsSettingTabs should render the component 1`] = `
class="settings-content"
>
<gl-tabs-stub
+ queryparamname="tab"
theme="indigo"
+ value="0"
>
<!---->
diff --git a/spec/frontend/incidents_settings/components/incidents_settings_service_spec.js b/spec/frontend/incidents_settings/components/incidents_settings_service_spec.js
index f4342c56f98..1b0253480e0 100644
--- a/spec/frontend/incidents_settings/components/incidents_settings_service_spec.js
+++ b/spec/frontend/incidents_settings/components/incidents_settings_service_spec.js
@@ -39,7 +39,6 @@ describe('IncidentsSettingsService', () => {
return service.updateSettings({}).then(() => {
expect(createFlash).toHaveBeenCalledWith({
message: expect.stringContaining(ERROR_MSG),
- type: 'alert',
});
});
});
diff --git a/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js b/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
index eb5f7e9fe40..2860d3cc37a 100644
--- a/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
+++ b/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
@@ -2,7 +2,6 @@ import { GlFormCheckbox, GlFormInput } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import JiraIssuesFields from '~/integrations/edit/components/jira_issues_fields.vue';
-import JiraUpgradeCta from '~/integrations/edit/components/jira_upgrade_cta.vue';
import eventHub from '~/integrations/edit/event_hub';
import { createStore } from '~/integrations/edit/store';
@@ -14,6 +13,7 @@ describe('JiraIssuesFields', () => {
editProjectPath: '/edit',
showJiraIssuesIntegration: true,
showJiraVulnerabilitiesIntegration: true,
+ upgradePlanPath: 'https://gitlab.com',
};
const createComponent = ({ isInheriting = false, props, ...options } = {}) => {
@@ -37,60 +37,79 @@ describe('JiraIssuesFields', () => {
const findEnableCheckboxDisabled = () =>
findEnableCheckbox().find('[type=checkbox]').attributes('disabled');
const findProjectKey = () => wrapper.findComponent(GlFormInput);
- const findJiraUpgradeCta = () => wrapper.findComponent(JiraUpgradeCta);
+ const findPremiumUpgradeCTA = () => wrapper.findByTestId('premium-upgrade-cta');
+ const findUltimateUpgradeCTA = () => wrapper.findByTestId('ultimate-upgrade-cta');
const findJiraForVulnerabilities = () => wrapper.findByTestId('jira-for-vulnerabilities');
const setEnableCheckbox = async (isEnabled = true) =>
findEnableCheckbox().vm.$emit('input', isEnabled);
- describe('jira issues call to action', () => {
- it('shows the premium message', () => {
- createComponent({
- props: { showJiraIssuesIntegration: false },
- });
-
- expect(findJiraUpgradeCta().props()).toMatchObject({
- showPremiumMessage: true,
- showUltimateMessage: false,
- });
- });
-
- it('shows the ultimate message', () => {
- createComponent({
- props: {
- showJiraIssuesIntegration: true,
- showJiraVulnerabilitiesIntegration: false,
- },
- });
-
- expect(findJiraUpgradeCta().props()).toMatchObject({
- showPremiumMessage: false,
- showUltimateMessage: true,
- });
- });
- });
-
describe('template', () => {
- describe('upgrade banner for non-Premium user', () => {
- beforeEach(() => {
- createComponent({ props: { initialProjectKey: '', showJiraIssuesIntegration: false } });
- });
+ describe.each`
+ showJiraIssuesIntegration | showJiraVulnerabilitiesIntegration
+ ${false} | ${false}
+ ${false} | ${true}
+ ${true} | ${false}
+ ${true} | ${true}
+ `(
+ 'when `showJiraIssuesIntegration` is $jiraIssues and `showJiraVulnerabilitiesIntegration` is $jiraVulnerabilities',
+ ({ showJiraIssuesIntegration, showJiraVulnerabilitiesIntegration }) => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ showJiraIssuesIntegration,
+ showJiraVulnerabilitiesIntegration,
+ },
+ });
+ });
- it('does not show checkbox and input field', () => {
- expect(findEnableCheckbox().exists()).toBe(false);
- expect(findProjectKey().exists()).toBe(false);
- });
- });
+ if (showJiraIssuesIntegration) {
+ it('renders checkbox and input field', () => {
+ expect(findEnableCheckbox().exists()).toBe(true);
+ expect(findEnableCheckboxDisabled()).toBeUndefined();
+ expect(findProjectKey().exists()).toBe(true);
+ });
+
+ it('does not render the Premium CTA', () => {
+ expect(findPremiumUpgradeCTA().exists()).toBe(false);
+ });
+
+ if (!showJiraVulnerabilitiesIntegration) {
+ it.each`
+ scenario | enableJiraIssues
+ ${'when "Enable Jira issues" is checked, renders Ultimate upgrade CTA'} | ${true}
+ ${'when "Enable Jira issues" is unchecked, does not render Ultimate upgrade CTA'} | ${false}
+ `('$scenario', async ({ enableJiraIssues }) => {
+ if (enableJiraIssues) {
+ await setEnableCheckbox();
+ }
+ expect(findUltimateUpgradeCTA().exists()).toBe(enableJiraIssues);
+ });
+ }
+ } else {
+ it('does not render checkbox and input field', () => {
+ expect(findEnableCheckbox().exists()).toBe(false);
+ expect(findProjectKey().exists()).toBe(false);
+ });
+
+ it('renders the Premium CTA', () => {
+ const premiumUpgradeCTA = findPremiumUpgradeCTA();
+
+ expect(premiumUpgradeCTA.exists()).toBe(true);
+ expect(premiumUpgradeCTA.props('upgradePlanPath')).toBe(defaultProps.upgradePlanPath);
+ });
+ }
+
+ it('does not render the Ultimate CTA', () => {
+ expect(findUltimateUpgradeCTA().exists()).toBe(false);
+ });
+ },
+ );
describe('Enable Jira issues checkbox', () => {
beforeEach(() => {
createComponent({ props: { initialProjectKey: '' } });
});
- it('renders enabled checkbox', () => {
- expect(findEnableCheckbox().exists()).toBe(true);
- expect(findEnableCheckboxDisabled()).toBeUndefined();
- });
-
it('renders disabled project_key input', () => {
const projectKey = findProjectKey();
@@ -99,10 +118,6 @@ describe('JiraIssuesFields', () => {
expect(projectKey.attributes('required')).toBeUndefined();
});
- it('does not show upgrade banner', () => {
- expect(findJiraUpgradeCta().exists()).toBe(false);
- });
-
// As per https://vuejs.org/v2/guide/forms.html#Checkbox-1,
// browsers don't include unchecked boxes in form submissions.
it('includes issues_enabled as false even if unchecked', () => {
diff --git a/spec/frontend/invite_members/components/invite_members_modal_spec.js b/spec/frontend/invite_members/components/invite_members_modal_spec.js
index eabbea84234..b828b5d8a04 100644
--- a/spec/frontend/invite_members/components/invite_members_modal_spec.js
+++ b/spec/frontend/invite_members/components/invite_members_modal_spec.js
@@ -1,11 +1,27 @@
-import { GlDropdown, GlDropdownItem, GlDatepicker, GlSprintf, GlLink, GlModal } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import {
+ GlDropdown,
+ GlDropdownItem,
+ GlDatepicker,
+ GlFormGroup,
+ GlSprintf,
+ GlLink,
+ GlModal,
+} from '@gitlab/ui';
+import MockAdapter from 'axios-mock-adapter';
import { stubComponent } from 'helpers/stub_component';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
import ExperimentTracking from '~/experimentation/experiment_tracking';
import InviteMembersModal from '~/invite_members/components/invite_members_modal.vue';
+import MembersTokenSelect from '~/invite_members/components/members_token_select.vue';
import { INVITE_MEMBERS_IN_COMMENT } from '~/invite_members/constants';
+import axios from '~/lib/utils/axios_utils';
+import httpStatus from '~/lib/utils/http_status';
+import { apiPaths, membersApiResponse, invitationsApiResponse } from '../mock_data/api_responses';
+
+let wrapper;
+let mock;
jest.mock('~/experimentation/experiment_tracking');
@@ -26,10 +42,16 @@ const user3 = {
username: 'one_2',
avatar_url: '',
};
+const user4 = {
+ id: 'user-defined-token',
+ name: 'email4@example.com',
+ username: 'one_4',
+ avatar_url: '',
+};
const sharedGroup = { id: '981' };
const createComponent = (data = {}, props = {}) => {
- return shallowMount(InviteMembersModal, {
+ wrapper = shallowMountExtended(InviteMembersModal, {
propsData: {
id,
name,
@@ -51,46 +73,56 @@ const createComponent = (data = {}, props = {}) => {
GlDropdown: true,
GlDropdownItem: true,
GlSprintf,
+ GlFormGroup: stubComponent(GlFormGroup, {
+ props: ['state', 'invalidFeedback'],
+ }),
},
});
};
const createInviteMembersToProjectWrapper = () => {
- return createComponent({ inviteeType: 'members' }, { isProject: true });
+ createComponent({ inviteeType: 'members' }, { isProject: true });
};
const createInviteMembersToGroupWrapper = () => {
- return createComponent({ inviteeType: 'members' }, { isProject: false });
+ createComponent({ inviteeType: 'members' }, { isProject: false });
};
const createInviteGroupToProjectWrapper = () => {
- return createComponent({ inviteeType: 'group' }, { isProject: true });
+ createComponent({ inviteeType: 'group' }, { isProject: true });
};
const createInviteGroupToGroupWrapper = () => {
- return createComponent({ inviteeType: 'group' }, { isProject: false });
+ createComponent({ inviteeType: 'group' }, { isProject: false });
};
-describe('InviteMembersModal', () => {
- let wrapper;
+beforeEach(() => {
+ gon.api_version = 'v4';
+ mock = new MockAdapter(axios);
+});
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
+afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ mock.restore();
+});
+describe('InviteMembersModal', () => {
const findDropdown = () => wrapper.findComponent(GlDropdown);
const findDropdownItems = () => findDropdown().findAllComponents(GlDropdownItem);
const findDatepicker = () => wrapper.findComponent(GlDatepicker);
const findLink = () => wrapper.findComponent(GlLink);
const findIntroText = () => wrapper.find({ ref: 'introText' }).text();
- const findCancelButton = () => wrapper.findComponent({ ref: 'cancelButton' });
- const findInviteButton = () => wrapper.findComponent({ ref: 'inviteButton' });
+ const findCancelButton = () => wrapper.findByTestId('cancel-button');
+ const findInviteButton = () => wrapper.findByTestId('invite-button');
const clickInviteButton = () => findInviteButton().vm.$emit('click');
+ const findMembersFormGroup = () => wrapper.findByTestId('members-form-group');
+ const membersFormGroupInvalidFeedback = () => findMembersFormGroup().props('invalidFeedback');
+ const findMembersSelect = () => wrapper.findComponent(MembersTokenSelect);
describe('rendering the modal', () => {
beforeEach(() => {
- wrapper = createComponent();
+ createComponent();
});
it('renders the modal with the correct title', () => {
@@ -132,7 +164,7 @@ describe('InviteMembersModal', () => {
describe('when inviting to a project', () => {
describe('when inviting members', () => {
it('includes the correct invitee, type, and formatted name', () => {
- wrapper = createInviteMembersToProjectWrapper();
+ createInviteMembersToProjectWrapper();
expect(findIntroText()).toBe("You're inviting members to the test name project.");
});
@@ -140,7 +172,7 @@ describe('InviteMembersModal', () => {
describe('when sharing with a group', () => {
it('includes the correct invitee, type, and formatted name', () => {
- wrapper = createInviteGroupToProjectWrapper();
+ createInviteGroupToProjectWrapper();
expect(findIntroText()).toBe("You're inviting a group to the test name project.");
});
@@ -150,7 +182,7 @@ describe('InviteMembersModal', () => {
describe('when inviting to a group', () => {
describe('when inviting members', () => {
it('includes the correct invitee, type, and formatted name', () => {
- wrapper = createInviteMembersToGroupWrapper();
+ createInviteMembersToGroupWrapper();
expect(findIntroText()).toBe("You're inviting members to the test name group.");
});
@@ -158,7 +190,7 @@ describe('InviteMembersModal', () => {
describe('when sharing with a group', () => {
it('includes the correct invitee, type, and formatted name', () => {
- wrapper = createInviteGroupToGroupWrapper();
+ createInviteGroupToGroupWrapper();
expect(findIntroText()).toBe("You're inviting a group to the test name group.");
});
@@ -167,22 +199,30 @@ describe('InviteMembersModal', () => {
});
describe('submitting the invite form', () => {
- const apiErrorMessage = 'Member already exists';
+ const mockMembersApi = (code, data) => {
+ mock.onPost(apiPaths.GROUPS_MEMBERS).reply(code, data);
+ };
+ const mockInvitationsApi = (code, data) => {
+ mock.onPost(apiPaths.GROUPS_INVITATIONS).reply(code, data);
+ };
+
+ const expectedEmailRestrictedError =
+ "email 'email@example.com' does not match the allowed domains: example1.org";
+ const expectedSyntaxError = 'email contains an invalid email address';
describe('when inviting an existing user to group by user ID', () => {
const postData = {
- user_id: '1',
+ user_id: '1,2',
access_level: defaultAccessLevel,
expires_at: undefined,
invite_source: inviteSource,
format: 'json',
};
- describe('when invites are sent successfully', () => {
+ describe('when member is added successfully', () => {
beforeEach(() => {
- wrapper = createInviteMembersToGroupWrapper();
+ createComponent({ newUsersToInvite: [user1, user2] });
- wrapper.setData({ newUsersToInvite: [user1] });
wrapper.vm.$toast = { show: jest.fn() };
jest.spyOn(Api, 'addGroupMembersByUserId').mockResolvedValue({ data: postData });
jest.spyOn(wrapper.vm, 'showToastMessageSuccess');
@@ -190,54 +230,102 @@ describe('InviteMembersModal', () => {
clickInviteButton();
});
- it('calls Api addGroupMembersByUserId with the correct params', () => {
+ it('calls Api addGroupMembersByUserId with the correct params', async () => {
+ await waitForPromises;
+
expect(Api.addGroupMembersByUserId).toHaveBeenCalledWith(id, postData);
});
- it('displays the successful toastMessage', () => {
+ it('displays the successful toastMessage', async () => {
+ await waitForPromises;
+
expect(wrapper.vm.showToastMessageSuccess).toHaveBeenCalled();
});
});
- describe('when the invite received an api error message', () => {
+ describe('when member is not added successfully', () => {
beforeEach(() => {
- wrapper = createComponent({ newUsersToInvite: [user1] });
+ createInviteMembersToGroupWrapper();
- wrapper.vm.$toast = { show: jest.fn() };
- jest
- .spyOn(Api, 'addGroupMembersByUserId')
- .mockRejectedValue({ response: { data: { message: apiErrorMessage } } });
- jest.spyOn(wrapper.vm, 'showToastMessageError');
+ wrapper.setData({ newUsersToInvite: [user1] });
+ });
+
+ it('displays "Member already exists" api message for http status conflict', async () => {
+ mockMembersApi(httpStatus.CONFLICT, membersApiResponse.MEMBER_ALREADY_EXISTS);
clickInviteButton();
+
+ await waitForPromises();
+
+ expect(membersFormGroupInvalidFeedback()).toBe('Member already exists');
+ expect(findMembersFormGroup().props('state')).toBe(false);
+ expect(findMembersSelect().props('validationState')).toBe(false);
});
- it('displays the apiErrorMessage in the toastMessage', async () => {
+ it('clears the invalid state and message once the list of members to invite is cleared', async () => {
+ mockMembersApi(httpStatus.CONFLICT, membersApiResponse.MEMBER_ALREADY_EXISTS);
+
+ clickInviteButton();
+
await waitForPromises();
- expect(wrapper.vm.showToastMessageError).toHaveBeenCalledWith({
- response: { data: { message: apiErrorMessage } },
- });
+ expect(membersFormGroupInvalidFeedback()).toBe('Member already exists');
+ expect(findMembersFormGroup().props('state')).toBe(false);
+ expect(findMembersSelect().props('validationState')).toBe(false);
+
+ findMembersSelect().vm.$emit('clear');
+
+ await waitForPromises();
+
+ expect(membersFormGroupInvalidFeedback()).toBe('');
+ expect(findMembersFormGroup().props('state')).not.toBe(false);
+ expect(findMembersSelect().props('validationState')).not.toBe(false);
});
- });
- describe('when any invite failed for any other reason', () => {
- beforeEach(() => {
- wrapper = createComponent({ newUsersToInvite: [user1, user2] });
+ it('displays the generic error for http server error', async () => {
+ mockMembersApi(httpStatus.INTERNAL_SERVER_ERROR, 'Request failed with status code 500');
- wrapper.vm.$toast = { show: jest.fn() };
- jest
- .spyOn(Api, 'addGroupMembersByUserId')
- .mockRejectedValue({ response: { data: { success: false } } });
- jest.spyOn(wrapper.vm, 'showToastMessageError');
+ clickInviteButton();
+
+ await waitForPromises();
+
+ expect(membersFormGroupInvalidFeedback()).toBe('Something went wrong');
+ });
+
+ it('displays the restricted user api message for response with bad request', async () => {
+ mockMembersApi(httpStatus.BAD_REQUEST, membersApiResponse.SINGLE_USER_RESTRICTED);
clickInviteButton();
+
+ await waitForPromises();
+
+ expect(membersFormGroupInvalidFeedback()).toBe(expectedEmailRestrictedError);
});
- it('displays the generic error toastMessage', async () => {
+ it('displays the first part of the error when multiple existing users are restricted by email', async () => {
+ mockMembersApi(httpStatus.CREATED, membersApiResponse.MULTIPLE_USERS_RESTRICTED);
+
+ clickInviteButton();
+
await waitForPromises();
- expect(wrapper.vm.showToastMessageError).toHaveBeenCalled();
+ expect(membersFormGroupInvalidFeedback()).toBe(
+ "root: User email 'admin@example.com' does not match the allowed domain of example2.com",
+ );
+ expect(findMembersSelect().props('validationState')).toBe(false);
+ });
+
+ it('displays an access_level error message received for the existing user', async () => {
+ mockMembersApi(httpStatus.BAD_REQUEST, membersApiResponse.SINGLE_USER_ACCESS_LEVEL);
+
+ clickInviteButton();
+
+ await waitForPromises();
+
+ expect(membersFormGroupInvalidFeedback()).toBe(
+ 'should be greater than or equal to Owner inherited membership from group Gitlab Org',
+ );
+ expect(findMembersSelect().props('validationState')).toBe(false);
});
});
});
@@ -253,7 +341,7 @@ describe('InviteMembersModal', () => {
describe('when invites are sent successfully', () => {
beforeEach(() => {
- wrapper = createComponent({ newUsersToInvite: [user3] });
+ createComponent({ newUsersToInvite: [user3] });
wrapper.vm.$toast = { show: jest.fn() };
jest.spyOn(Api, 'inviteGroupMembersByEmail').mockResolvedValue({ data: postData });
@@ -271,23 +359,84 @@ describe('InviteMembersModal', () => {
});
});
- describe('when any invite failed for any reason', () => {
+ describe('when invites are not sent successfully', () => {
beforeEach(() => {
- wrapper = createComponent({ newUsersToInvite: [user1, user2] });
+ createInviteMembersToGroupWrapper();
+
+ wrapper.setData({ newUsersToInvite: [user3] });
+ });
+
+ it('displays the api error for invalid email syntax', async () => {
+ mockInvitationsApi(httpStatus.BAD_REQUEST, invitationsApiResponse.EMAIL_INVALID);
+
+ clickInviteButton();
+
+ await waitForPromises();
+
+ expect(membersFormGroupInvalidFeedback()).toBe(expectedSyntaxError);
+ expect(findMembersSelect().props('validationState')).toBe(false);
+ });
+ it('displays the restricted email error when restricted email is invited', async () => {
+ mockInvitationsApi(httpStatus.CREATED, invitationsApiResponse.EMAIL_RESTRICTED);
+
+ clickInviteButton();
+
+ await waitForPromises();
+
+ expect(membersFormGroupInvalidFeedback()).toContain(expectedEmailRestrictedError);
+ expect(findMembersSelect().props('validationState')).toBe(false);
+ });
+
+ it('displays the successful toast message when email has already been invited', async () => {
+ mockInvitationsApi(httpStatus.CREATED, invitationsApiResponse.EMAIL_TAKEN);
wrapper.vm.$toast = { show: jest.fn() };
- jest
- .spyOn(Api, 'addGroupMembersByUserId')
- .mockRejectedValue({ response: { data: { success: false } } });
- jest.spyOn(wrapper.vm, 'showToastMessageError');
+ jest.spyOn(wrapper.vm, 'showToastMessageSuccess');
+
+ clickInviteButton();
+
+ await waitForPromises();
+
+ expect(wrapper.vm.showToastMessageSuccess).toHaveBeenCalled();
+ expect(findMembersSelect().props('validationState')).toBe(null);
+ });
+
+ it('displays the first error message when multiple emails return a restricted error message', async () => {
+ mockInvitationsApi(httpStatus.CREATED, invitationsApiResponse.MULTIPLE_EMAIL_RESTRICTED);
clickInviteButton();
+
+ await waitForPromises();
+
+ expect(membersFormGroupInvalidFeedback()).toContain(expectedEmailRestrictedError);
+ expect(findMembersSelect().props('validationState')).toBe(false);
+ });
+
+ it('displays the invalid syntax error for bad request', async () => {
+ mockInvitationsApi(httpStatus.BAD_REQUEST, invitationsApiResponse.ERROR_EMAIL_INVALID);
+
+ clickInviteButton();
+
+ await waitForPromises();
+
+ expect(membersFormGroupInvalidFeedback()).toBe(expectedSyntaxError);
+ expect(findMembersSelect().props('validationState')).toBe(false);
});
+ });
+
+ describe('when multiple emails are invited at the same time', () => {
+ it('displays the invalid syntax error if one of the emails is invalid', async () => {
+ createInviteMembersToGroupWrapper();
+
+ wrapper.setData({ newUsersToInvite: [user3, user4] });
+ mockInvitationsApi(httpStatus.CREATED, invitationsApiResponse.ERROR_EMAIL_INVALID);
+
+ clickInviteButton();
- it('displays the generic error toastMessage', async () => {
await waitForPromises();
- expect(wrapper.vm.showToastMessageError).toHaveBeenCalled();
+ expect(membersFormGroupInvalidFeedback()).toBe(expectedSyntaxError);
+ expect(findMembersSelect().props('validationState')).toBe(false);
});
});
});
@@ -305,7 +454,7 @@ describe('InviteMembersModal', () => {
describe('when invites are sent successfully', () => {
beforeEach(() => {
- wrapper = createComponent({ newUsersToInvite: [user1, user3] });
+ createComponent({ newUsersToInvite: [user1, user3] });
wrapper.vm.$toast = { show: jest.fn() };
jest.spyOn(Api, 'inviteGroupMembersByEmail').mockResolvedValue({ data: postData });
@@ -350,24 +499,20 @@ describe('InviteMembersModal', () => {
describe('when any invite failed for any reason', () => {
beforeEach(() => {
- wrapper = createComponent({ newUsersToInvite: [user1, user3] });
+ createInviteMembersToGroupWrapper();
- wrapper.vm.$toast = { show: jest.fn() };
-
- jest
- .spyOn(Api, 'inviteGroupMembersByEmail')
- .mockRejectedValue({ response: { data: { success: false } } });
+ wrapper.setData({ newUsersToInvite: [user1, user3] });
- jest.spyOn(Api, 'addGroupMembersByUserId').mockResolvedValue({ data: postData });
- jest.spyOn(wrapper.vm, 'showToastMessageError');
+ mockInvitationsApi(httpStatus.BAD_REQUEST, invitationsApiResponse.EMAIL_INVALID);
+ mockMembersApi(httpStatus.OK, '200 OK');
clickInviteButton();
});
- it('displays the generic error toastMessage', async () => {
+ it('displays the first error message', async () => {
await waitForPromises();
- expect(wrapper.vm.showToastMessageError).toHaveBeenCalled();
+ expect(membersFormGroupInvalidFeedback()).toBe(expectedSyntaxError);
});
});
});
@@ -382,7 +527,7 @@ describe('InviteMembersModal', () => {
};
beforeEach(() => {
- wrapper = createComponent({ groupToBeSharedWith: sharedGroup });
+ createComponent({ groupToBeSharedWith: sharedGroup });
wrapper.setData({ inviteeType: 'group' });
wrapper.vm.$toast = { show: jest.fn() };
@@ -403,7 +548,7 @@ describe('InviteMembersModal', () => {
describe('when sharing the group fails', () => {
beforeEach(() => {
- wrapper = createComponent({ groupToBeSharedWith: sharedGroup });
+ createComponent({ groupToBeSharedWith: sharedGroup });
wrapper.setData({ inviteeType: 'group' });
wrapper.vm.$toast = { show: jest.fn() };
@@ -412,22 +557,20 @@ describe('InviteMembersModal', () => {
.spyOn(Api, 'groupShareWithGroup')
.mockRejectedValue({ response: { data: { success: false } } });
- jest.spyOn(wrapper.vm, 'showToastMessageError');
-
clickInviteButton();
});
- it('displays the generic error toastMessage', async () => {
+ it('displays the generic error message', async () => {
await waitForPromises();
- expect(wrapper.vm.showToastMessageError).toHaveBeenCalled();
+ expect(membersFormGroupInvalidFeedback()).toBe('Something went wrong');
});
});
});
describe('tracking', () => {
beforeEach(() => {
- wrapper = createComponent({ newUsersToInvite: [user3] });
+ createComponent({ newUsersToInvite: [user3] });
wrapper.vm.$toast = { show: jest.fn() };
jest.spyOn(Api, 'inviteGroupMembersByEmail').mockResolvedValue({});
diff --git a/spec/frontend/invite_members/components/members_token_select_spec.js b/spec/frontend/invite_members/components/members_token_select_spec.js
index f6e79d3607f..12db7e42464 100644
--- a/spec/frontend/invite_members/components/members_token_select_spec.js
+++ b/spec/frontend/invite_members/components/members_token_select_spec.js
@@ -115,6 +115,21 @@ describe('MembersTokenSelect', () => {
expect(wrapper.emitted().input[0][0]).toEqual([user1, user2]);
});
});
+
+ describe('when user is removed', () => {
+ it('emits `clear` event', () => {
+ findTokenSelector().vm.$emit('token-remove', [user1]);
+
+ expect(wrapper.emitted('clear')).toEqual([[]]);
+ });
+
+ it('does not emit `clear` event when there are still tokens selected', () => {
+ findTokenSelector().vm.$emit('input', [user1, user2]);
+ findTokenSelector().vm.$emit('token-remove', [user1]);
+
+ expect(wrapper.emitted('clear')).toBeUndefined();
+ });
+ });
});
describe('when text input is blurred', () => {
diff --git a/spec/frontend/invite_members/mock_data/api_responses.js b/spec/frontend/invite_members/mock_data/api_responses.js
new file mode 100644
index 00000000000..79b56a33708
--- /dev/null
+++ b/spec/frontend/invite_members/mock_data/api_responses.js
@@ -0,0 +1,74 @@
+const INVITATIONS_API_EMAIL_INVALID = {
+ message: { error: 'email contains an invalid email address' },
+};
+
+const INVITATIONS_API_ERROR_EMAIL_INVALID = {
+ error: 'email contains an invalid email address',
+};
+
+const INVITATIONS_API_EMAIL_RESTRICTED = {
+ message: {
+ 'email@example.com':
+ "Invite email 'email@example.com' does not match the allowed domains: example1.org",
+ },
+ status: 'error',
+};
+
+const INVITATIONS_API_MULTIPLE_EMAIL_RESTRICTED = {
+ message: {
+ 'email@example.com':
+ "Invite email email 'email@example.com' does not match the allowed domains: example1.org",
+ 'email4@example.com':
+ "Invite email email 'email4@example.com' does not match the allowed domains: example1.org",
+ },
+ status: 'error',
+};
+
+const INVITATIONS_API_EMAIL_TAKEN = {
+ message: {
+ 'email@example2.com': 'Invite email has already been taken',
+ },
+ status: 'error',
+};
+
+const MEMBERS_API_MEMBER_ALREADY_EXISTS = {
+ message: 'Member already exists',
+};
+
+const MEMBERS_API_SINGLE_USER_RESTRICTED = {
+ message: { user: ["email 'email@example.com' does not match the allowed domains: example1.org"] },
+};
+
+const MEMBERS_API_SINGLE_USER_ACCESS_LEVEL = {
+ message: {
+ access_level: [
+ 'should be greater than or equal to Owner inherited membership from group Gitlab Org',
+ ],
+ },
+};
+
+const MEMBERS_API_MULTIPLE_USERS_RESTRICTED = {
+ message:
+ "root: User email 'admin@example.com' does not match the allowed domain of example2.com and user18: User email 'user18@example.org' does not match the allowed domain of example2.com",
+ status: 'error',
+};
+
+export const apiPaths = {
+ GROUPS_MEMBERS: '/api/v4/groups/1/members',
+ GROUPS_INVITATIONS: '/api/v4/groups/1/invitations',
+};
+
+export const membersApiResponse = {
+ MEMBER_ALREADY_EXISTS: MEMBERS_API_MEMBER_ALREADY_EXISTS,
+ SINGLE_USER_ACCESS_LEVEL: MEMBERS_API_SINGLE_USER_ACCESS_LEVEL,
+ SINGLE_USER_RESTRICTED: MEMBERS_API_SINGLE_USER_RESTRICTED,
+ MULTIPLE_USERS_RESTRICTED: MEMBERS_API_MULTIPLE_USERS_RESTRICTED,
+};
+
+export const invitationsApiResponse = {
+ EMAIL_INVALID: INVITATIONS_API_EMAIL_INVALID,
+ ERROR_EMAIL_INVALID: INVITATIONS_API_ERROR_EMAIL_INVALID,
+ EMAIL_RESTRICTED: INVITATIONS_API_EMAIL_RESTRICTED,
+ MULTIPLE_EMAIL_RESTRICTED: INVITATIONS_API_MULTIPLE_EMAIL_RESTRICTED,
+ EMAIL_TAKEN: INVITATIONS_API_EMAIL_TAKEN,
+};
diff --git a/spec/frontend/invite_members/utils/response_message_parser_spec.js b/spec/frontend/invite_members/utils/response_message_parser_spec.js
new file mode 100644
index 00000000000..3c88b5a2418
--- /dev/null
+++ b/spec/frontend/invite_members/utils/response_message_parser_spec.js
@@ -0,0 +1,36 @@
+import {
+ responseMessageFromSuccess,
+ responseMessageFromError,
+} from '~/invite_members/utils/response_message_parser';
+
+describe('Response message parser', () => {
+ const expectedMessage = 'expected display message';
+
+ describe('parse message from successful response', () => {
+ const exampleKeyedMsg = { 'email@example.com': expectedMessage };
+ const exampleUserMsgMultiple =
+ ' and username1: id not found and username2: email is restricted';
+
+ it.each([
+ [[{ data: { message: expectedMessage } }]],
+ [[{ data: { message: expectedMessage + exampleUserMsgMultiple } }]],
+ [[{ data: { error: expectedMessage } }]],
+ [[{ data: { message: [expectedMessage] } }]],
+ [[{ data: { message: exampleKeyedMsg } }]],
+ ])(`returns "${expectedMessage}" from success response: %j`, (successResponse) => {
+ expect(responseMessageFromSuccess(successResponse)).toBe(expectedMessage);
+ });
+ });
+
+ describe('message from error response', () => {
+ it.each([
+ [{ response: { data: { error: expectedMessage } } }],
+ [{ response: { data: { message: { user: [expectedMessage] } } } }],
+ [{ response: { data: { message: { access_level: [expectedMessage] } } } }],
+ [{ response: { data: { message: { error: expectedMessage } } } }],
+ [{ response: { data: { message: expectedMessage } } }],
+ ])(`returns "${expectedMessage}" from error response: %j`, (errorResponse) => {
+ expect(responseMessageFromError(errorResponse)).toBe(expectedMessage);
+ });
+ });
+});
diff --git a/spec/frontend/issuable/components/issuable_by_email_spec.js b/spec/frontend/issuable/components/issuable_by_email_spec.js
index f11c41fe25d..01abf239e57 100644
--- a/spec/frontend/issuable/components/issuable_by_email_spec.js
+++ b/spec/frontend/issuable/components/issuable_by_email_spec.js
@@ -154,10 +154,7 @@ describe('IssuableByEmail', () => {
await clickResetEmail();
- expect(mockToastShow).toHaveBeenCalledWith(
- 'There was an error when reseting email token.',
- { type: 'error' },
- );
+ expect(mockToastShow).toHaveBeenCalledWith('There was an error when reseting email token.');
expect(findFormInputGroup().props('value')).toBe('user@gitlab.com');
});
});
diff --git a/spec/frontend/issuable_bulk_update_sidebar/components/status_select_spec.js b/spec/frontend/issuable_bulk_update_sidebar/components/status_select_spec.js
new file mode 100644
index 00000000000..09dcb963154
--- /dev/null
+++ b/spec/frontend/issuable_bulk_update_sidebar/components/status_select_spec.js
@@ -0,0 +1,77 @@
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import StatusSelect from '~/issuable_bulk_update_sidebar/components/status_select.vue';
+import { ISSUE_STATUS_SELECT_OPTIONS } from '~/issuable_bulk_update_sidebar/constants';
+
+describe('StatusSelect', () => {
+ let wrapper;
+
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findAllDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findHiddenInput = () => wrapper.find('input');
+
+ function createComponent() {
+ wrapper = shallowMount(StatusSelect);
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('with no value selected', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders default text', () => {
+ expect(findDropdown().props('text')).toBe('Select status');
+ });
+
+ it('renders dropdown items with `is-checked` prop set to `false`', () => {
+ const dropdownItems = findAllDropdownItems();
+
+ expect(dropdownItems.at(0).props('isChecked')).toBe(false);
+ expect(dropdownItems.at(1).props('isChecked')).toBe(false);
+ });
+ });
+
+ describe('when selecting a value', () => {
+ const selectItemAtIndex = 0;
+
+ beforeEach(async () => {
+ createComponent();
+ await findAllDropdownItems().at(selectItemAtIndex).vm.$emit('click');
+ });
+
+ it('updates value of the hidden input', () => {
+ expect(findHiddenInput().attributes('value')).toBe(
+ ISSUE_STATUS_SELECT_OPTIONS[selectItemAtIndex].value,
+ );
+ });
+
+ it('updates the dropdown text prop', () => {
+ expect(findDropdown().props('text')).toBe(
+ ISSUE_STATUS_SELECT_OPTIONS[selectItemAtIndex].text,
+ );
+ });
+
+ it('sets dropdown item `is-checked` prop to `true`', () => {
+ const dropdownItems = findAllDropdownItems();
+
+ expect(dropdownItems.at(0).props('isChecked')).toBe(true);
+ expect(dropdownItems.at(1).props('isChecked')).toBe(false);
+ });
+
+ describe('when selecting the value that is already selected', () => {
+ it('clears dropdown selection', async () => {
+ await findAllDropdownItems().at(selectItemAtIndex).vm.$emit('click');
+
+ const dropdownItems = findAllDropdownItems();
+
+ expect(dropdownItems.at(0).props('isChecked')).toBe(false);
+ expect(dropdownItems.at(1).props('isChecked')).toBe(false);
+ expect(findDropdown().props('text')).toBe('Select status');
+ });
+ });
+ });
+});
diff --git a/spec/frontend/issuable_create/components/issuable_form_spec.js b/spec/frontend/issuable_create/components/issuable_form_spec.js
index a074fddf091..30b116bc35c 100644
--- a/spec/frontend/issuable_create/components/issuable_form_spec.js
+++ b/spec/frontend/issuable_create/components/issuable_form_spec.js
@@ -23,6 +23,9 @@ const createComponent = ({
<button class="js-issuable-save">Submit issuable</button>
`,
},
+ stubs: {
+ MarkdownField,
+ },
});
};
diff --git a/spec/frontend/issuable_show/components/issuable_show_root_spec.js b/spec/frontend/issuable_show/components/issuable_show_root_spec.js
index b4c125f4910..7ad409c3a74 100644
--- a/spec/frontend/issuable_show/components/issuable_show_root_spec.js
+++ b/spec/frontend/issuable_show/components/issuable_show_root_spec.js
@@ -133,14 +133,6 @@ describe('IssuableShowRoot', () => {
expect(wrapper.emitted('task-list-update-failure')).toBeTruthy();
});
- it('component emits `sidebar-toggle` event bubbled via issuable-sidebar', () => {
- const issuableSidebar = wrapper.find(IssuableSidebar);
-
- issuableSidebar.vm.$emit('sidebar-toggle', true);
-
- expect(wrapper.emitted('sidebar-toggle')).toBeTruthy();
- });
-
it.each(['keydown-title', 'keydown-description'])(
'component emits `%s` event with event object and issuableMeta params via issuable-body',
(eventName) => {
diff --git a/spec/frontend/issuable_sidebar/components/issuable_sidebar_root_spec.js b/spec/frontend/issuable_sidebar/components/issuable_sidebar_root_spec.js
index 62a0016d67b..c872925cca2 100644
--- a/spec/frontend/issuable_sidebar/components/issuable_sidebar_root_spec.js
+++ b/spec/frontend/issuable_sidebar/components/issuable_sidebar_root_spec.js
@@ -1,88 +1,80 @@
import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
-import { shallowMount } from '@vue/test-utils';
import Cookies from 'js-cookie';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import IssuableSidebarRoot from '~/issuable_sidebar/components/issuable_sidebar_root.vue';
+import { USER_COLLAPSED_GUTTER_COOKIE } from '~/issuable_sidebar/constants';
-const createComponent = (expanded = true) =>
- shallowMount(IssuableSidebarRoot, {
- propsData: {
- expanded,
- },
+const MOCK_LAYOUT_PAGE_CLASS = 'layout-page';
+
+const createComponent = () => {
+ setFixtures(`<div class="${MOCK_LAYOUT_PAGE_CLASS}"></div>`);
+
+ return shallowMountExtended(IssuableSidebarRoot, {
slots: {
'right-sidebar-items': `
<button class="js-todo">Todo</button>
`,
},
});
+};
describe('IssuableSidebarRoot', () => {
let wrapper;
- beforeEach(() => {
- wrapper = createComponent();
- });
+ const findToggleSidebarButton = () => wrapper.findByTestId('toggle-right-sidebar-button');
+
+ const assertPageLayoutClasses = ({ isExpanded }) => {
+ const { classList } = document.querySelector(`.${MOCK_LAYOUT_PAGE_CLASS}`);
+ if (isExpanded) {
+ expect(classList).toContain('right-sidebar-expanded');
+ expect(classList).not.toContain('right-sidebar-collapsed');
+ } else {
+ expect(classList).toContain('right-sidebar-collapsed');
+ expect(classList).not.toContain('right-sidebar-expanded');
+ }
+ };
afterEach(() => {
wrapper.destroy();
});
- describe('watch', () => {
- describe('isExpanded', () => {
- it('emits `sidebar-toggle` event on component', async () => {
- wrapper.setData({
- isExpanded: false,
- });
-
- await wrapper.vm.$nextTick();
-
- expect(wrapper.emitted('sidebar-toggle')).toBeTruthy();
- expect(wrapper.emitted('sidebar-toggle')[0]).toEqual([
- {
- expanded: false,
- },
- ]);
- });
- });
- });
+ describe('when sidebar is expanded', () => {
+ beforeEach(() => {
+ jest.spyOn(Cookies, 'set').mockImplementation(jest.fn());
+ jest.spyOn(Cookies, 'get').mockReturnValue(false);
+ jest.spyOn(bp, 'isDesktop').mockReturnValue(true);
- describe('methods', () => {
- describe('updatePageContainerClass', () => {
- beforeEach(() => {
- setFixtures('<div class="layout-page"></div>');
- });
+ wrapper = createComponent();
+ });
- it.each`
- isExpanded | layoutPageClass
- ${true} | ${'right-sidebar-expanded'}
- ${false} | ${'right-sidebar-collapsed'}
- `(
- 'set class $layoutPageClass to container element when `isExpanded` prop is $isExpanded',
- async ({ isExpanded, layoutPageClass }) => {
- wrapper.setData({
- isExpanded,
- });
+ it('renders component container element with class `right-sidebar-expanded`', () => {
+ expect(wrapper.classes()).toContain('right-sidebar-expanded');
+ });
- await wrapper.vm.$nextTick();
+ it('sets layout class to reflect expanded state', () => {
+ assertPageLayoutClasses({ isExpanded: true });
+ });
- wrapper.vm.updatePageContainerClass();
+ it('renders sidebar toggle button with text and icon', () => {
+ const buttonEl = findToggleSidebarButton();
- expect(document.querySelector('.layout-page').classList.contains(layoutPageClass)).toBe(
- true,
- );
- },
- );
+ expect(buttonEl.exists()).toBe(true);
+ expect(buttonEl.attributes('title')).toBe('Toggle sidebar');
+ expect(buttonEl.find('span').text()).toBe('Collapse sidebar');
+ expect(wrapper.findByTestId('icon-collapse').isVisible()).toBe(true);
});
- describe('handleWindowResize', () => {
- beforeEach(async () => {
- wrapper.setData({
- userExpanded: true,
- });
+ describe('when collapsing the sidebar', () => {
+ it('updates "collapsed_gutter" cookie value and layout classes', async () => {
+ await findToggleSidebarButton().trigger('click');
- await wrapper.vm.$nextTick();
+ expect(Cookies.set).toHaveBeenCalledWith(USER_COLLAPSED_GUTTER_COOKIE, true);
+ assertPageLayoutClasses({ isExpanded: false });
});
+ });
+ describe('when window `resize` event is triggered', () => {
it.each`
breakpoint | isExpandedValue
${'xs'} | ${false}
@@ -91,109 +83,49 @@ describe('IssuableSidebarRoot', () => {
${'lg'} | ${true}
${'xl'} | ${true}
`(
- 'sets `isExpanded` prop to $isExpandedValue only when current screen size is `lg` or `xl`',
+ 'sets page layout classes correctly when current screen size is `$breakpoint`',
async ({ breakpoint, isExpandedValue }) => {
jest.spyOn(bp, 'isDesktop').mockReturnValue(breakpoint === 'lg' || breakpoint === 'xl');
- wrapper.vm.handleWindowResize();
+ window.dispatchEvent(new Event('resize'));
+ await wrapper.vm.$nextTick();
- expect(wrapper.vm.isExpanded).toBe(isExpandedValue);
+ assertPageLayoutClasses({ isExpanded: isExpandedValue });
},
);
-
- it('calls `updatePageContainerClass` method', () => {
- jest.spyOn(wrapper.vm, 'updatePageContainerClass');
-
- wrapper.vm.handleWindowResize();
-
- expect(wrapper.vm.updatePageContainerClass).toHaveBeenCalled();
- });
- });
-
- describe('handleToggleSidebarClick', () => {
- beforeEach(async () => {
- jest.spyOn(Cookies, 'set').mockImplementation(jest.fn());
- wrapper.setData({
- isExpanded: true,
- });
-
- await wrapper.vm.$nextTick();
- });
-
- it('flips value of `isExpanded`', () => {
- wrapper.vm.handleToggleSidebarClick();
-
- expect(wrapper.vm.isExpanded).toBe(false);
- expect(wrapper.vm.userExpanded).toBe(false);
- });
-
- it('updates "collapsed_gutter" cookie value', () => {
- wrapper.vm.handleToggleSidebarClick();
-
- expect(Cookies.set).toHaveBeenCalledWith('collapsed_gutter', true);
- });
-
- it('calls `updatePageContainerClass` method', () => {
- jest.spyOn(wrapper.vm, 'updatePageContainerClass');
-
- wrapper.vm.handleWindowResize();
-
- expect(wrapper.vm.updatePageContainerClass).toHaveBeenCalled();
- });
});
});
- describe('template', () => {
- describe('sidebar expanded', () => {
- beforeEach(async () => {
- wrapper.setData({
- isExpanded: true,
- });
+ describe('when sidebar is collapsed', () => {
+ beforeEach(() => {
+ jest.spyOn(Cookies, 'get').mockReturnValue(true);
- await wrapper.vm.$nextTick();
- });
-
- it('renders component container element with class `right-sidebar-expanded` when `isExpanded` prop is true', () => {
- expect(wrapper.classes()).toContain('right-sidebar-expanded');
- });
-
- it('renders sidebar toggle button with text and icon', () => {
- const buttonEl = wrapper.find('button');
-
- expect(buttonEl.exists()).toBe(true);
- expect(buttonEl.attributes('title')).toBe('Toggle sidebar');
- expect(buttonEl.find('span').text()).toBe('Collapse sidebar');
- expect(buttonEl.find('[data-testid="icon-collapse"]').isVisible()).toBe(true);
- });
+ wrapper = createComponent();
});
- describe('sidebar collapsed', () => {
- beforeEach(async () => {
- wrapper.setData({
- isExpanded: false,
- });
-
- await wrapper.vm.$nextTick();
- });
+ it('renders component container element with class `right-sidebar-collapsed`', () => {
+ expect(wrapper.classes()).toContain('right-sidebar-collapsed');
+ });
- it('renders component container element with class `right-sidebar-collapsed` when `isExpanded` prop is false', () => {
- expect(wrapper.classes()).toContain('right-sidebar-collapsed');
- });
+ it('sets layout class to reflect collapsed state', () => {
+ assertPageLayoutClasses({ isExpanded: false });
+ });
- it('renders sidebar toggle button with text and icon', () => {
- const buttonEl = wrapper.find('button');
+ it('renders sidebar toggle button with text and icon', () => {
+ const buttonEl = findToggleSidebarButton();
- expect(buttonEl.exists()).toBe(true);
- expect(buttonEl.attributes('title')).toBe('Toggle sidebar');
- expect(buttonEl.find('[data-testid="icon-expand"]').isVisible()).toBe(true);
- });
+ expect(buttonEl.exists()).toBe(true);
+ expect(buttonEl.attributes('title')).toBe('Toggle sidebar');
+ expect(wrapper.findByTestId('icon-expand').isVisible()).toBe(true);
});
+ });
- it('renders sidebar items', () => {
- const sidebarItemsEl = wrapper.find('[data-testid="sidebar-items"]');
+ it('renders slotted sidebar items', () => {
+ wrapper = createComponent();
- expect(sidebarItemsEl.exists()).toBe(true);
- expect(sidebarItemsEl.find('button.js-todo').exists()).toBe(true);
- });
+ const sidebarItemsEl = wrapper.findByTestId('sidebar-items');
+
+ expect(sidebarItemsEl.exists()).toBe(true);
+ expect(sidebarItemsEl.find('button.js-todo').exists()).toBe(true);
});
});
diff --git a/spec/frontend/issuable_spec.js b/spec/frontend/issuable_spec.js
index 9c8f1e04609..e0bd7b802c9 100644
--- a/spec/frontend/issuable_spec.js
+++ b/spec/frontend/issuable_spec.js
@@ -1,5 +1,5 @@
+import issuableInitBulkUpdateSidebar from '~/issuable_bulk_update_sidebar/issuable_init_bulk_update_sidebar';
import IssuableIndex from '~/issuable_index';
-import issuableInitBulkUpdateSidebar from '~/issuable_init_bulk_update_sidebar';
describe('Issuable', () => {
describe('initBulkUpdate', () => {
diff --git a/spec/frontend/issues_list/components/issuables_list_app_spec.js b/spec/frontend/issues_list/components/issuables_list_app_spec.js
index a7f3dd81517..86112dad444 100644
--- a/spec/frontend/issues_list/components/issuables_list_app_spec.js
+++ b/spec/frontend/issues_list/components/issuables_list_app_spec.js
@@ -8,7 +8,7 @@ import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
-import { deprecatedCreateFlash as flash } from '~/flash';
+import createFlash from '~/flash';
import Issuable from '~/issues_list/components/issuable.vue';
import IssuablesListApp from '~/issues_list/components/issuables_list_app.vue';
import { PAGE_SIZE, PAGE_SIZE_MANUAL, RELATIVE_POSITION } from '~/issues_list/constants';
@@ -104,7 +104,7 @@ describe('Issuables list component', () => {
});
it('flashes an error', () => {
- expect(flash).toHaveBeenCalledTimes(1);
+ expect(createFlash).toHaveBeenCalledTimes(1);
});
});
diff --git a/spec/frontend/issues_list/components/issues_list_app_spec.js b/spec/frontend/issues_list/components/issues_list_app_spec.js
index a3ac57ee1bb..846236e1fb5 100644
--- a/spec/frontend/issues_list/components/issues_list_app_spec.js
+++ b/spec/frontend/issues_list/components/issues_list_app_spec.js
@@ -5,6 +5,7 @@ import { cloneDeep } from 'lodash';
import { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import getIssuesQuery from 'ee_else_ce/issues_list/queries/get_issues.query.graphql';
+import getIssuesCountQuery from 'ee_else_ce/issues_list/queries/get_issues_count.query.graphql';
import createMockApollo from 'helpers/mock_apollo_helper';
import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
@@ -13,15 +14,16 @@ import {
filteredTokens,
locationSearch,
urlParams,
+ getIssuesCountQueryResponse,
} from 'jest/issues_list/mock_data';
import createFlash from '~/flash';
+import { convertToGraphQLId } from '~/graphql_shared/utils';
import CsvImportExportButtons from '~/issuable/components/csv_import_export_buttons.vue';
import IssuableByEmail from '~/issuable/components/issuable_by_email.vue';
import IssuableList from '~/issuable_list/components/issuable_list_root.vue';
import { IssuableListTabs, IssuableStates } from '~/issuable_list/constants';
import IssuesListApp from '~/issues_list/components/issues_list_app.vue';
import {
- apiSortParams,
CREATED_DESC,
DUE_DATE_OVERDUE,
PARAM_DUE_DATE,
@@ -55,19 +57,18 @@ describe('IssuesListApp component', () => {
localVue.use(VueApollo);
const defaultProvide = {
- autocompleteUsersPath: 'autocomplete/users/path',
calendarPath: 'calendar/path',
canBulkUpdate: false,
emptyStateSvgPath: 'empty-state.svg',
exportCsvPath: 'export/csv/path',
hasBlockedIssuesFeature: true,
hasIssueWeightsFeature: true,
+ hasIterationsFeature: true,
hasProjectIssues: true,
- isSignedIn: false,
+ isSignedIn: true,
issuesPath: 'path/to/issues',
jiraIntegrationPath: 'jira/integration/path',
newIssuePath: 'new/issue/path',
- projectLabelsPath: 'project/labels/path',
projectPath: 'path/to/project',
rssPath: 'rss/path',
showNewIssueLink: true,
@@ -77,7 +78,7 @@ describe('IssuesListApp component', () => {
let defaultQueryResponse = getIssuesQueryResponse;
if (IS_EE) {
defaultQueryResponse = cloneDeep(getIssuesQueryResponse);
- defaultQueryResponse.data.project.issues.nodes[0].blockedByCount = 1;
+ defaultQueryResponse.data.project.issues.nodes[0].blockingCount = 1;
defaultQueryResponse.data.project.issues.nodes[0].healthStatus = null;
defaultQueryResponse.data.project.issues.nodes[0].weight = 5;
}
@@ -93,10 +94,14 @@ describe('IssuesListApp component', () => {
const mountComponent = ({
provide = {},
- response = defaultQueryResponse,
+ issuesQueryResponse = jest.fn().mockResolvedValue(defaultQueryResponse),
+ issuesQueryCountResponse = jest.fn().mockResolvedValue(getIssuesCountQueryResponse),
mountFn = shallowMount,
} = {}) => {
- const requestHandlers = [[getIssuesQuery, jest.fn().mockResolvedValue(response)]];
+ const requestHandlers = [
+ [getIssuesQuery, issuesQueryResponse],
+ [getIssuesCountQuery, issuesQueryCountResponse],
+ ];
const apolloProvider = createMockApollo(requestHandlers);
return mountFn(IssuesListApp, {
@@ -137,8 +142,8 @@ describe('IssuesListApp component', () => {
currentTab: IssuableStates.Opened,
tabCounts: {
opened: 1,
- closed: undefined,
- all: undefined,
+ closed: 1,
+ all: 1,
},
issuablesLoading: false,
isManualOrdering: false,
@@ -148,8 +153,8 @@ describe('IssuesListApp component', () => {
hasPreviousPage: getIssuesQueryResponse.data.project.issues.pageInfo.hasPreviousPage,
hasNextPage: getIssuesQueryResponse.data.project.issues.pageInfo.hasNextPage,
urlParams: {
+ sort: urlSortParams[CREATED_DESC],
state: IssuableStates.Opened,
- ...urlSortParams[CREATED_DESC],
},
});
});
@@ -178,7 +183,7 @@ describe('IssuesListApp component', () => {
describe('csv import/export component', () => {
describe('when user is signed in', () => {
- const search = '?search=refactor&state=opened&sort=created_date';
+ const search = '?search=refactor&sort=created_date&state=opened';
beforeEach(() => {
global.jsdom.reconfigure({ url: `${TEST_HOST}${search}` });
@@ -273,13 +278,17 @@ describe('IssuesListApp component', () => {
describe('sort', () => {
it.each(Object.keys(urlSortParams))('is set as %s from the url params', (sortKey) => {
- global.jsdom.reconfigure({ url: setUrlParams(urlSortParams[sortKey], TEST_HOST) });
+ global.jsdom.reconfigure({
+ url: setUrlParams({ sort: urlSortParams[sortKey] }, TEST_HOST),
+ });
wrapper = mountComponent();
expect(findIssuableList().props()).toMatchObject({
initialSortBy: sortKey,
- urlParams: urlSortParams[sortKey],
+ urlParams: {
+ sort: urlSortParams[sortKey],
+ },
});
});
});
@@ -542,9 +551,13 @@ describe('IssuesListApp component', () => {
});
it('renders all tokens', () => {
+ const preloadedAuthors = [
+ { ...mockCurrentUser, id: convertToGraphQLId('User', mockCurrentUser.id) },
+ ];
+
expect(findIssuableList().props('searchTokens')).toMatchObject([
- { type: TOKEN_TYPE_AUTHOR, preloadedAuthors: [mockCurrentUser] },
- { type: TOKEN_TYPE_ASSIGNEE, preloadedAuthors: [mockCurrentUser] },
+ { type: TOKEN_TYPE_AUTHOR, preloadedAuthors },
+ { type: TOKEN_TYPE_ASSIGNEE, preloadedAuthors },
{ type: TOKEN_TYPE_MILESTONE },
{ type: TOKEN_TYPE_LABEL },
{ type: TOKEN_TYPE_MY_REACTION },
@@ -557,6 +570,29 @@ describe('IssuesListApp component', () => {
});
});
+ describe('errors', () => {
+ describe.each`
+ error | mountOption | message
+ ${'fetching issues'} | ${'issuesQueryResponse'} | ${IssuesListApp.i18n.errorFetchingIssues}
+ ${'fetching issue counts'} | ${'issuesQueryCountResponse'} | ${IssuesListApp.i18n.errorFetchingCounts}
+ `('when there is an error $error', ({ mountOption, message }) => {
+ beforeEach(() => {
+ wrapper = mountComponent({
+ [mountOption]: jest.fn().mockRejectedValue(new Error('ERROR')),
+ });
+ jest.runOnlyPendingTimers();
+ });
+
+ it('shows an error message', () => {
+ expect(createFlash).toHaveBeenCalledWith({
+ captureError: true,
+ error: new Error('Network error: ERROR'),
+ message,
+ });
+ });
+ });
+ });
+
describe('events', () => {
describe('when "click-tab" event is emitted by IssuableList', () => {
beforeEach(() => {
@@ -622,7 +658,7 @@ describe('IssuesListApp component', () => {
};
beforeEach(() => {
- wrapper = mountComponent({ response });
+ wrapper = mountComponent({ issuesQueryResponse: jest.fn().mockResolvedValue(response) });
jest.runOnlyPendingTimers();
});
@@ -640,7 +676,7 @@ describe('IssuesListApp component', () => {
});
describe('when "sort" event is emitted by IssuableList', () => {
- it.each(Object.keys(apiSortParams))(
+ it.each(Object.keys(urlSortParams))(
'updates to the new sort when payload is `%s`',
async (sortKey) => {
wrapper = mountComponent();
@@ -650,7 +686,9 @@ describe('IssuesListApp component', () => {
jest.runOnlyPendingTimers();
await nextTick();
- expect(findIssuableList().props('urlParams')).toMatchObject(urlSortParams[sortKey]);
+ expect(findIssuableList().props('urlParams')).toMatchObject({
+ sort: urlSortParams[sortKey],
+ });
},
);
});
diff --git a/spec/frontend/issues_list/mock_data.js b/spec/frontend/issues_list/mock_data.js
index 6c669e02070..fd59241fd1d 100644
--- a/spec/frontend/issues_list/mock_data.js
+++ b/spec/frontend/issues_list/mock_data.js
@@ -7,9 +7,8 @@ export const getIssuesQueryResponse = {
data: {
project: {
issues: {
- count: 1,
pageInfo: {
- hasNextPage: false,
+ hasNextPage: true,
hasPreviousPage: false,
startCursor: 'startcursor',
endCursor: 'endcursor',
@@ -70,6 +69,16 @@ export const getIssuesQueryResponse = {
},
};
+export const getIssuesCountQueryResponse = {
+ data: {
+ project: {
+ issues: {
+ count: 1,
+ },
+ },
+ },
+};
+
export const locationSearch = [
'?search=find+issues',
'author_username=homer',
@@ -86,10 +95,10 @@ export const locationSearch = [
'not[label_name][]=drama',
'my_reaction_emoji=thumbsup',
'confidential=no',
- 'iteration_title=season:+%234',
- 'not[iteration_title]=season:+%2320',
- 'epic_id=gitlab-org%3A%3A%2612',
- 'not[epic_id]=gitlab-org%3A%3A%2634',
+ 'iteration_id=4',
+ 'not[iteration_id]=20',
+ 'epic_id=12',
+ 'not[epic_id]=34',
'weight=1',
'not[weight]=3',
].join('&');
@@ -118,10 +127,10 @@ export const filteredTokens = [
{ type: 'labels', value: { data: 'drama', operator: OPERATOR_IS_NOT } },
{ type: 'my_reaction_emoji', value: { data: 'thumbsup', operator: OPERATOR_IS } },
{ type: 'confidential', value: { data: 'no', operator: OPERATOR_IS } },
- { type: 'iteration', value: { data: 'season: #4', operator: OPERATOR_IS } },
- { type: 'iteration', value: { data: 'season: #20', operator: OPERATOR_IS_NOT } },
- { type: 'epic_id', value: { data: 'gitlab-org::&12', operator: OPERATOR_IS } },
- { type: 'epic_id', value: { data: 'gitlab-org::&34', operator: OPERATOR_IS_NOT } },
+ { type: 'iteration', value: { data: '4', operator: OPERATOR_IS } },
+ { type: 'iteration', value: { data: '20', operator: OPERATOR_IS_NOT } },
+ { type: 'epic_id', value: { data: '12', operator: OPERATOR_IS } },
+ { type: 'epic_id', value: { data: '34', operator: OPERATOR_IS_NOT } },
{ type: 'weight', value: { data: '1', operator: OPERATOR_IS } },
{ type: 'weight', value: { data: '3', operator: OPERATOR_IS_NOT } },
{ type: 'filtered-search-term', value: { data: 'find' } },
@@ -138,30 +147,32 @@ export const filteredTokensWithSpecialValues = [
];
export const apiParams = {
- author_username: 'homer',
- 'not[author_username]': 'marge',
- assignee_username: ['bart', 'lisa'],
- 'not[assignee_username]': ['patty', 'selma'],
- milestone: 'season 4',
- 'not[milestone]': 'season 20',
- labels: ['cartoon', 'tv'],
- 'not[labels]': ['live action', 'drama'],
- my_reaction_emoji: 'thumbsup',
+ authorUsername: 'homer',
+ assigneeUsernames: ['bart', 'lisa'],
+ milestoneTitle: 'season 4',
+ labelName: ['cartoon', 'tv'],
+ myReactionEmoji: 'thumbsup',
confidential: 'no',
- iteration_title: 'season: #4',
- 'not[iteration_title]': 'season: #20',
- epic_id: '12',
- 'not[epic_id]': 'gitlab-org::&34',
+ iterationId: '4',
+ epicId: '12',
weight: '1',
- 'not[weight]': '3',
+ not: {
+ authorUsername: 'marge',
+ assigneeUsernames: ['patty', 'selma'],
+ milestoneTitle: 'season 20',
+ labelName: ['live action', 'drama'],
+ iterationId: '20',
+ epicId: '34',
+ weight: '3',
+ },
};
export const apiParamsWithSpecialValues = {
- assignee_id: '123',
- assignee_username: 'bart',
- my_reaction_emoji: 'None',
- iteration_id: 'Current',
- epic_id: 'None',
+ assigneeId: '123',
+ assigneeUsernames: 'bart',
+ myReactionEmoji: 'None',
+ iterationWildcardId: 'CURRENT',
+ epicId: 'None',
weight: 'None',
};
@@ -176,10 +187,10 @@ export const urlParams = {
'not[label_name][]': ['live action', 'drama'],
my_reaction_emoji: 'thumbsup',
confidential: 'no',
- iteration_title: 'season: #4',
- 'not[iteration_title]': 'season: #20',
- epic_id: 'gitlab-org%3A%3A%2612',
- 'not[epic_id]': 'gitlab-org::&34',
+ iteration_id: '4',
+ 'not[iteration_id]': '20',
+ epic_id: '12',
+ 'not[epic_id]': '34',
weight: '1',
'not[weight]': '3',
};
diff --git a/spec/frontend/issues_list/utils_spec.js b/spec/frontend/issues_list/utils_spec.js
index e377c35a0aa..b7863068570 100644
--- a/spec/frontend/issues_list/utils_spec.js
+++ b/spec/frontend/issues_list/utils_spec.js
@@ -8,10 +8,11 @@ import {
urlParams,
urlParamsWithSpecialValues,
} from 'jest/issues_list/mock_data';
-import { API_PARAM, DUE_DATE_VALUES, URL_PARAM, urlSortParams } from '~/issues_list/constants';
+import { DUE_DATE_VALUES, urlSortParams } from '~/issues_list/constants';
import {
- convertToParams,
+ convertToApiParams,
convertToSearchQuery,
+ convertToUrlParams,
getDueDateValue,
getFilterTokens,
getSortKey,
@@ -20,7 +21,7 @@ import {
describe('getSortKey', () => {
it.each(Object.keys(urlSortParams))('returns %s given the correct inputs', (sortKey) => {
- const { sort } = urlSortParams[sortKey];
+ const sort = urlSortParams[sortKey];
expect(getSortKey(sort)).toBe(sortKey);
});
});
@@ -80,31 +81,23 @@ describe('getFilterTokens', () => {
});
});
-describe('convertToParams', () => {
+describe('convertToApiParams', () => {
it('returns api params given filtered tokens', () => {
- expect(convertToParams(filteredTokens, API_PARAM)).toEqual({
- ...apiParams,
- epic_id: 'gitlab-org::&12',
- });
+ expect(convertToApiParams(filteredTokens)).toEqual(apiParams);
});
it('returns api params given filtered tokens with special values', () => {
- expect(convertToParams(filteredTokensWithSpecialValues, API_PARAM)).toEqual(
- apiParamsWithSpecialValues,
- );
+ expect(convertToApiParams(filteredTokensWithSpecialValues)).toEqual(apiParamsWithSpecialValues);
});
+});
+describe('convertToUrlParams', () => {
it('returns url params given filtered tokens', () => {
- expect(convertToParams(filteredTokens, URL_PARAM)).toEqual({
- ...urlParams,
- epic_id: 'gitlab-org::&12',
- });
+ expect(convertToUrlParams(filteredTokens)).toEqual(urlParams);
});
it('returns url params given filtered tokens with special values', () => {
- expect(convertToParams(filteredTokensWithSpecialValues, URL_PARAM)).toEqual(
- urlParamsWithSpecialValues,
- );
+ expect(convertToUrlParams(filteredTokensWithSpecialValues)).toEqual(urlParamsWithSpecialValues);
});
});
diff --git a/spec/frontend/jira_connect/branches/components/project_dropdown_spec.js b/spec/frontend/jira_connect/branches/components/project_dropdown_spec.js
new file mode 100644
index 00000000000..ec4cb2739f8
--- /dev/null
+++ b/spec/frontend/jira_connect/branches/components/project_dropdown_spec.js
@@ -0,0 +1,180 @@
+import { GlDropdown, GlDropdownItem, GlLoadingIcon, GlSearchBoxByType } from '@gitlab/ui';
+import { mount, shallowMount, createLocalVue } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import ProjectDropdown from '~/jira_connect/branches/components/project_dropdown.vue';
+import { PROJECTS_PER_PAGE } from '~/jira_connect/branches/constants';
+import getProjectsQuery from '~/jira_connect/branches/graphql/queries/get_projects.query.graphql';
+
+const localVue = createLocalVue();
+
+const mockProjects = [
+ {
+ id: 'test',
+ name: 'test',
+ nameWithNamespace: 'test',
+ avatarUrl: 'https://gitlab.com',
+ path: 'test-path',
+ fullPath: 'test-path',
+ repository: {
+ empty: false,
+ },
+ },
+ {
+ id: 'gitlab',
+ name: 'GitLab',
+ nameWithNamespace: 'gitlab-org/gitlab',
+ avatarUrl: 'https://gitlab.com',
+ path: 'gitlab',
+ fullPath: 'gitlab-org/gitlab',
+ repository: {
+ empty: false,
+ },
+ },
+];
+
+const mockProjectsQueryResponse = {
+ data: {
+ projects: {
+ nodes: mockProjects,
+ pageInfo: {
+ hasNextPage: false,
+ hasPreviousPage: false,
+ startCursor: '',
+ endCursor: '',
+ },
+ },
+ },
+};
+const mockGetProjectsQuerySuccess = jest.fn().mockResolvedValue(mockProjectsQueryResponse);
+const mockGetProjectsQueryFailed = jest.fn().mockRejectedValue(new Error('GraphQL error'));
+const mockQueryLoading = jest.fn().mockReturnValue(new Promise(() => {}));
+
+describe('ProjectDropdown', () => {
+ let wrapper;
+
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findAllDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findDropdownItemByText = (text) =>
+ findAllDropdownItems().wrappers.find((item) => item.text() === text);
+ const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
+
+ function createMockApolloProvider({ mockGetProjectsQuery = mockGetProjectsQuerySuccess } = {}) {
+ localVue.use(VueApollo);
+
+ const mockApollo = createMockApollo([[getProjectsQuery, mockGetProjectsQuery]]);
+
+ return mockApollo;
+ }
+
+ function createComponent({ mockApollo, props, mountFn = shallowMount } = {}) {
+ wrapper = mountFn(ProjectDropdown, {
+ localVue,
+ apolloProvider: mockApollo || createMockApolloProvider(),
+ propsData: props,
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when loading projects', () => {
+ beforeEach(() => {
+ createComponent({
+ mockApollo: createMockApolloProvider({ mockGetProjectsQuery: mockQueryLoading }),
+ });
+ });
+
+ it('sets dropdown `loading` prop to `true`', () => {
+ expect(findDropdown().props('loading')).toBe(true);
+ });
+
+ it('renders loading icon in dropdown', () => {
+ expect(findLoadingIcon().isVisible()).toBe(true);
+ });
+ });
+
+ describe('when projects query succeeds', () => {
+ beforeEach(async () => {
+ createComponent();
+ await waitForPromises();
+ await wrapper.vm.$nextTick();
+ });
+
+ it('sets dropdown `loading` prop to `false`', () => {
+ expect(findDropdown().props('loading')).toBe(false);
+ });
+
+ it('renders dropdown items', () => {
+ const dropdownItems = findAllDropdownItems();
+ expect(dropdownItems.wrappers).toHaveLength(mockProjects.length);
+ expect(dropdownItems.wrappers.map((item) => item.text())).toEqual(
+ mockProjects.map((project) => project.nameWithNamespace),
+ );
+ });
+
+ describe('when selecting a dropdown item', () => {
+ it('emits `change` event with the selected project name', async () => {
+ const mockProject = mockProjects[0];
+ const itemToSelect = findDropdownItemByText(mockProject.nameWithNamespace);
+ await itemToSelect.vm.$emit('click');
+
+ expect(wrapper.emitted('change')[0]).toEqual([mockProject]);
+ });
+ });
+
+ describe('when `selectedProject` prop is specified', () => {
+ const mockProject = mockProjects[0];
+
+ beforeEach(async () => {
+ wrapper.setProps({
+ selectedProject: mockProject,
+ });
+ });
+
+ it('sets `isChecked` prop of the corresponding dropdown item to `true`', () => {
+ expect(findDropdownItemByText(mockProject.nameWithNamespace).props('isChecked')).toBe(true);
+ });
+
+ it('sets dropdown text to `selectedBranchName` value', () => {
+ expect(findDropdown().props('text')).toBe(mockProject.nameWithNamespace);
+ });
+ });
+ });
+
+ describe('when projects query fails', () => {
+ beforeEach(async () => {
+ createComponent({
+ mockApollo: createMockApolloProvider({ mockGetProjectsQuery: mockGetProjectsQueryFailed }),
+ });
+ await waitForPromises();
+ });
+
+ it('emits `error` event', () => {
+ expect(wrapper.emitted('error')).toBeTruthy();
+ });
+ });
+
+ describe('when searching branches', () => {
+ it('triggers a refetch', async () => {
+ createComponent({ mountFn: mount });
+ await waitForPromises();
+ jest.clearAllMocks();
+
+ const mockSearchTerm = 'gitl';
+ await findSearchBox().vm.$emit('input', mockSearchTerm);
+
+ expect(mockGetProjectsQuerySuccess).toHaveBeenCalledWith({
+ after: '',
+ first: PROJECTS_PER_PAGE,
+ membership: true,
+ search: mockSearchTerm,
+ searchNamespaces: true,
+ sort: 'similarity',
+ });
+ });
+ });
+});
diff --git a/spec/frontend/jira_connect/branches/components/source_branch_dropdown_spec.js b/spec/frontend/jira_connect/branches/components/source_branch_dropdown_spec.js
new file mode 100644
index 00000000000..9dd11dd6345
--- /dev/null
+++ b/spec/frontend/jira_connect/branches/components/source_branch_dropdown_spec.js
@@ -0,0 +1,192 @@
+import { GlDropdown, GlDropdownItem, GlLoadingIcon, GlSearchBoxByType } from '@gitlab/ui';
+import { mount, shallowMount, createLocalVue } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import SourceBranchDropdown from '~/jira_connect/branches/components/source_branch_dropdown.vue';
+import { BRANCHES_PER_PAGE } from '~/jira_connect/branches/constants';
+import getProjectQuery from '~/jira_connect/branches/graphql/queries/get_project.query.graphql';
+
+const localVue = createLocalVue();
+
+const mockProject = {
+ id: 'test',
+ fullPath: 'test-path',
+ repository: {
+ branchNames: ['main', 'f-test', 'release'],
+ rootRef: 'main',
+ },
+};
+
+const mockProjectQueryResponse = {
+ data: {
+ project: mockProject,
+ },
+};
+const mockGetProjectQuery = jest.fn().mockResolvedValue(mockProjectQueryResponse);
+const mockQueryLoading = jest.fn().mockReturnValue(new Promise(() => {}));
+
+describe('SourceBranchDropdown', () => {
+ let wrapper;
+
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findAllDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findDropdownItemByText = (text) =>
+ findAllDropdownItems().wrappers.find((item) => item.text() === text);
+ const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
+
+ const assertDropdownItems = () => {
+ const dropdownItems = findAllDropdownItems();
+ expect(dropdownItems.wrappers).toHaveLength(mockProject.repository.branchNames.length);
+ expect(dropdownItems.wrappers.map((item) => item.text())).toEqual(
+ mockProject.repository.branchNames,
+ );
+ };
+
+ function createMockApolloProvider({ getProjectQueryLoading = false } = {}) {
+ localVue.use(VueApollo);
+
+ const mockApollo = createMockApollo([
+ [getProjectQuery, getProjectQueryLoading ? mockQueryLoading : mockGetProjectQuery],
+ ]);
+
+ return mockApollo;
+ }
+
+ function createComponent({ mockApollo, props, mountFn = shallowMount } = {}) {
+ wrapper = mountFn(SourceBranchDropdown, {
+ localVue,
+ apolloProvider: mockApollo || createMockApolloProvider(),
+ propsData: props,
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when `selectedProject` prop is not specified', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('sets dropdown `disabled` prop to `true`', () => {
+ expect(findDropdown().props('disabled')).toBe(true);
+ });
+
+ describe('when `selectedProject` becomes specified', () => {
+ beforeEach(async () => {
+ wrapper.setProps({
+ selectedProject: mockProject,
+ });
+
+ await waitForPromises();
+ });
+
+ it('sets dropdown props correctly', () => {
+ expect(findDropdown().props()).toMatchObject({
+ loading: false,
+ disabled: false,
+ text: 'Select a branch',
+ });
+ });
+
+ it('renders available source branches as dropdown items', () => {
+ assertDropdownItems();
+ });
+ });
+ });
+
+ describe('when `selectedProject` prop is specified', () => {
+ describe('when branches are loading', () => {
+ it('renders loading icon in dropdown', () => {
+ createComponent({
+ mockApollo: createMockApolloProvider({ getProjectQueryLoading: true }),
+ props: { selectedProject: mockProject },
+ });
+
+ expect(findLoadingIcon().isVisible()).toBe(true);
+ });
+ });
+
+ describe('when branches have loaded', () => {
+ describe('when searching branches', () => {
+ it('triggers a refetch', async () => {
+ createComponent({ mountFn: mount, props: { selectedProject: mockProject } });
+ await waitForPromises();
+ jest.clearAllMocks();
+
+ const mockSearchTerm = 'mai';
+ await findSearchBox().vm.$emit('input', mockSearchTerm);
+
+ expect(mockGetProjectQuery).toHaveBeenCalledWith({
+ branchNamesLimit: BRANCHES_PER_PAGE,
+ branchNamesOffset: 0,
+ branchNamesSearchPattern: `*${mockSearchTerm}*`,
+ projectPath: 'test-path',
+ });
+ });
+ });
+
+ describe('template', () => {
+ beforeEach(async () => {
+ createComponent({ props: { selectedProject: mockProject } });
+ await waitForPromises();
+ });
+
+ it('sets dropdown props correctly', () => {
+ expect(findDropdown().props()).toMatchObject({
+ loading: false,
+ disabled: false,
+ text: 'Select a branch',
+ });
+ });
+
+ it('omits monospace styling from dropdown', () => {
+ expect(findDropdown().classes()).not.toContain('gl-font-monospace');
+ });
+
+ it('renders available source branches as dropdown items', () => {
+ assertDropdownItems();
+ });
+
+ it("emits `change` event with the repository's `rootRef` by default", () => {
+ expect(wrapper.emitted('change')[0]).toEqual([mockProject.repository.rootRef]);
+ });
+
+ describe('when selecting a dropdown item', () => {
+ it('emits `change` event with the selected branch name', async () => {
+ const mockBranchName = mockProject.repository.branchNames[1];
+ const itemToSelect = findDropdownItemByText(mockBranchName);
+ await itemToSelect.vm.$emit('click');
+
+ expect(wrapper.emitted('change')[1]).toEqual([mockBranchName]);
+ });
+ });
+
+ describe('when `selectedBranchName` prop is specified', () => {
+ const mockBranchName = mockProject.repository.branchNames[2];
+
+ beforeEach(async () => {
+ wrapper.setProps({
+ selectedBranchName: mockBranchName,
+ });
+ });
+
+ it('sets `isChecked` prop of the corresponding dropdown item to `true`', () => {
+ expect(findDropdownItemByText(mockBranchName).props('isChecked')).toBe(true);
+ });
+
+ it('sets dropdown text to `selectedBranchName` value', () => {
+ expect(findDropdown().props('text')).toBe(mockBranchName);
+ });
+
+ it('adds monospace styling to dropdown', () => {
+ expect(findDropdown().classes()).toContain('gl-font-monospace');
+ });
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/jira_connect/components/groups_list_spec.js b/spec/frontend/jira_connect/components/groups_list_spec.js
index 4b875928a90..d583fb68771 100644
--- a/spec/frontend/jira_connect/components/groups_list_spec.js
+++ b/spec/frontend/jira_connect/components/groups_list_spec.js
@@ -160,9 +160,13 @@ describe('GroupsList', () => {
expect(findGroupsList().classes()).toContain('gl-opacity-5');
});
- it('sets loading prop of ths search box', () => {
+ it('sets loading prop of the search box', () => {
expect(findSearchBox().props('isLoading')).toBe(true);
});
+
+ it('sets value prop of the search box to the search term', () => {
+ expect(findSearchBox().props('value')).toBe(mockSearchTeam);
+ });
});
describe('when group search finishes loading', () => {
diff --git a/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap b/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
index 172b6e4831c..f2142ce1fcf 100644
--- a/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
+++ b/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
@@ -176,7 +176,6 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
<!---->
</div>
-
</ul>
</div>
</td>
@@ -304,7 +303,6 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
<!---->
</div>
-
</ul>
</div>
</td>
diff --git a/spec/frontend/jobs/components/empty_state_spec.js b/spec/frontend/jobs/components/empty_state_spec.js
index c9de110ce06..9738fd14275 100644
--- a/spec/frontend/jobs/components/empty_state_spec.js
+++ b/spec/frontend/jobs/components/empty_state_spec.js
@@ -9,7 +9,6 @@ describe('Empty State', () => {
illustrationSizeClass: 'svg-430',
title: 'This job has not started yet',
playable: false,
- variablesSettingsUrl: '',
};
const createWrapper = (props) => {
diff --git a/spec/frontend/jobs/components/job_app_spec.js b/spec/frontend/jobs/components/job_app_spec.js
index 3fcefde1aba..1f4dd7d6216 100644
--- a/spec/frontend/jobs/components/job_app_spec.js
+++ b/spec/frontend/jobs/components/job_app_spec.js
@@ -24,6 +24,7 @@ describe('Job App', () => {
let store;
let wrapper;
let mock;
+ let origGon;
const initSettings = {
endpoint: `${TEST_HOST}jobs/123.json`,
@@ -37,7 +38,6 @@ describe('Job App', () => {
deploymentHelpUrl: 'help/deployment',
codeQualityHelpPath: '/help/code_quality',
runnerSettingsUrl: 'settings/ci-cd/runners',
- variablesSettingsUrl: 'settings/ci-cd/variables',
terminalPath: 'jobs/123/terminal',
projectPath: 'user-name/project-name',
subscriptionsMoreMinutesUrl: 'https://customers.gitlab.com/buy_pipeline_minutes',
@@ -86,11 +86,17 @@ describe('Job App', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
store = createStore();
+
+ origGon = window.gon;
+
+ window.gon = { features: { infinitelyCollapsibleSections: false } }; // NOTE: All of this passes with the feature flag
});
afterEach(() => {
wrapper.destroy();
mock.restore();
+
+ window.gon = origGon;
});
describe('while loading', () => {
diff --git a/spec/frontend/jobs/components/log/collapsible_section_spec.js b/spec/frontend/jobs/components/log/collapsible_section_spec.js
index 66f22162c97..4e23a3ba7b8 100644
--- a/spec/frontend/jobs/components/log/collapsible_section_spec.js
+++ b/spec/frontend/jobs/components/log/collapsible_section_spec.js
@@ -4,6 +4,7 @@ import { collapsibleSectionClosed, collapsibleSectionOpened } from './mock_data'
describe('Job Log Collapsible Section', () => {
let wrapper;
+ let origGon;
const traceEndpoint = 'jobs/335';
@@ -18,8 +19,16 @@ describe('Job Log Collapsible Section', () => {
});
};
+ beforeEach(() => {
+ origGon = window.gon;
+
+ window.gon = { features: { infinitelyCollapsibleSections: false } }; // NOTE: This also works with true
+ });
+
afterEach(() => {
wrapper.destroy();
+
+ window.gon = origGon;
});
describe('with closed section', () => {
diff --git a/spec/frontend/jobs/components/log/line_spec.js b/spec/frontend/jobs/components/log/line_spec.js
index 367154e7f82..d184696cd1f 100644
--- a/spec/frontend/jobs/components/log/line_spec.js
+++ b/spec/frontend/jobs/components/log/line_spec.js
@@ -94,6 +94,16 @@ describe('Job Log Line', () => {
expect(findLinkAttributeByIndex(0).href).toBe(queryUrl);
});
+ it('renders links that have brackets `[]` in their parameters', () => {
+ const url = `${httpUrl}?label_name[]=frontend`;
+
+ createComponent(mockProps({ text: url }));
+
+ expect(findLine().text()).toBe(url);
+ expect(findLinks().at(0).text()).toBe(url);
+ expect(findLinks().at(0).attributes('href')).toBe(url);
+ });
+
it('renders multiple links surrounded by text', () => {
createComponent(
mockProps({ text: `Well, my HTTP url: ${httpUrl} and my HTTPS url: ${httpsUrl}` }),
@@ -125,6 +135,26 @@ describe('Job Log Line', () => {
expect(findLinkAttributeByIndex(4).href).toBe(httpsUrl);
});
+ it('renders multiple links surrounded by brackets', () => {
+ createComponent(mockProps({ text: `(${httpUrl}) <${httpUrl}> {${httpsUrl}}` }));
+ expect(findLine().text()).toBe(
+ '(http://example.com) <http://example.com> {https://example.com}',
+ );
+
+ const links = findLinks();
+
+ expect(links).toHaveLength(3);
+
+ expect(links.at(0).text()).toBe(httpUrl);
+ expect(links.at(0).attributes('href')).toBe(httpUrl);
+
+ expect(links.at(1).text()).toBe(httpUrl);
+ expect(links.at(1).attributes('href')).toBe(httpUrl);
+
+ expect(links.at(2).text()).toBe(httpsUrl);
+ expect(links.at(2).attributes('href')).toBe(httpsUrl);
+ });
+
it('renders text with symbols in it', () => {
const text = 'apt-get update < /dev/null > /dev/null';
createComponent(mockProps({ text }));
diff --git a/spec/frontend/jobs/components/log/log_spec.js b/spec/frontend/jobs/components/log/log_spec.js
index b7aff1f3e3b..99fb6846ce5 100644
--- a/spec/frontend/jobs/components/log/log_spec.js
+++ b/spec/frontend/jobs/components/log/log_spec.js
@@ -1,7 +1,7 @@
import { mount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import Log from '~/jobs/components/log/log.vue';
-import { logLinesParser } from '~/jobs/store/utils';
+import { logLinesParserLegacy, logLinesParser } from '~/jobs/store/utils';
import { jobLog } from './mock_data';
describe('Job Log', () => {
@@ -9,6 +9,7 @@ describe('Job Log', () => {
let actions;
let state;
let store;
+ let origGon;
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -25,8 +26,12 @@ describe('Job Log', () => {
toggleCollapsibleLine: () => {},
};
+ origGon = window.gon;
+
+ window.gon = { features: { infinitelyCollapsibleSections: false } };
+
state = {
- trace: logLinesParser(jobLog),
+ trace: logLinesParserLegacy(jobLog),
traceEndpoint: 'jobs/id',
};
@@ -40,6 +45,88 @@ describe('Job Log', () => {
afterEach(() => {
wrapper.destroy();
+
+ window.gon = origGon;
+ });
+
+ const findCollapsibleLine = () => wrapper.find('.collapsible-line');
+
+ describe('line numbers', () => {
+ it('renders a line number for each open line', () => {
+ expect(wrapper.find('#L1').text()).toBe('1');
+ expect(wrapper.find('#L2').text()).toBe('2');
+ expect(wrapper.find('#L3').text()).toBe('3');
+ });
+
+ it('links to the provided path and correct line number', () => {
+ expect(wrapper.find('#L1').attributes('href')).toBe(`${state.traceEndpoint}#L1`);
+ });
+ });
+
+ describe('collapsible sections', () => {
+ it('renders a clickable header section', () => {
+ expect(findCollapsibleLine().attributes('role')).toBe('button');
+ });
+
+ it('renders an icon with the open state', () => {
+ expect(findCollapsibleLine().find('[data-testid="angle-down-icon"]').exists()).toBe(true);
+ });
+
+ describe('on click header section', () => {
+ it('calls toggleCollapsibleLine', () => {
+ jest.spyOn(wrapper.vm, 'toggleCollapsibleLine');
+
+ findCollapsibleLine().trigger('click');
+
+ expect(wrapper.vm.toggleCollapsibleLine).toHaveBeenCalled();
+ });
+ });
+ });
+});
+
+describe('Job Log, infinitelyCollapsibleSections feature flag enabled', () => {
+ let wrapper;
+ let actions;
+ let state;
+ let store;
+ let origGon;
+
+ const localVue = createLocalVue();
+ localVue.use(Vuex);
+
+ const createComponent = () => {
+ wrapper = mount(Log, {
+ localVue,
+ store,
+ });
+ };
+
+ beforeEach(() => {
+ actions = {
+ toggleCollapsibleLine: () => {},
+ };
+
+ origGon = window.gon;
+
+ window.gon = { features: { infinitelyCollapsibleSections: true } };
+
+ state = {
+ trace: logLinesParser(jobLog).parsedLines,
+ traceEndpoint: 'jobs/id',
+ };
+
+ store = new Vuex.Store({
+ actions,
+ state,
+ });
+
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+
+ window.gon = origGon;
});
const findCollapsibleLine = () => wrapper.find('.collapsible-line');
diff --git a/spec/frontend/jobs/components/log/mock_data.js b/spec/frontend/jobs/components/log/mock_data.js
index eb8c4fe8bc9..76c35703106 100644
--- a/spec/frontend/jobs/components/log/mock_data.js
+++ b/spec/frontend/jobs/components/log/mock_data.js
@@ -58,6 +58,71 @@ export const utilsMockData = [
},
];
+export const multipleCollapsibleSectionsMockData = [
+ {
+ offset: 1001,
+ content: [{ text: ' on docker-auto-scale-com 8a6210b8' }],
+ },
+ {
+ offset: 1002,
+ content: [
+ {
+ text: 'Executing "step_script" stage of the job script',
+ },
+ ],
+ section: 'step-script',
+ section_header: true,
+ },
+ {
+ offset: 1003,
+ content: [{ text: 'sleep 60' }],
+ section: 'step-script',
+ },
+ {
+ offset: 1004,
+ content: [
+ {
+ text:
+ 'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam lorem dolor, congue ac condimentum vitae',
+ },
+ ],
+ section: 'step-script',
+ },
+ {
+ offset: 1005,
+ content: [{ text: 'executing...' }],
+ section: 'step-script',
+ },
+ {
+ offset: 1006,
+ content: [{ text: '1st collapsible section' }],
+ section: 'collapsible-1',
+ section_header: true,
+ },
+ {
+ offset: 1007,
+ content: [
+ {
+ text:
+ 'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam lorem dolor, congue ac condimentum vitae',
+ },
+ ],
+ section: 'collapsible-1',
+ },
+ {
+ offset: 1008,
+ content: [],
+ section: 'collapsible-1',
+ section_duration: '01:00',
+ },
+ {
+ offset: 1009,
+ content: [],
+ section: 'step-script',
+ section_duration: '10:00',
+ },
+];
+
export const originalTrace = [
{
offset: 1,
diff --git a/spec/frontend/jobs/components/manual_variables_form_spec.js b/spec/frontend/jobs/components/manual_variables_form_spec.js
index 376a822dde5..7e42ee957d3 100644
--- a/spec/frontend/jobs/components/manual_variables_form_spec.js
+++ b/spec/frontend/jobs/components/manual_variables_form_spec.js
@@ -1,3 +1,4 @@
+import { GlSprintf, GlLink } from '@gitlab/ui';
import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
@@ -18,7 +19,6 @@ describe('Manual Variables Form', () => {
method: 'post',
button_title: 'Trigger this manual action',
},
- variablesSettingsUrl: '/settings',
};
const createComponent = ({ props = {}, mountFn = shallowMount } = {}) => {
@@ -33,15 +33,19 @@ describe('Manual Variables Form', () => {
propsData: { ...requiredProps, ...props },
localVue,
store,
+ stubs: {
+ GlSprintf,
+ },
}),
);
};
const findInputKey = () => wrapper.findComponent({ ref: 'inputKey' });
const findInputValue = () => wrapper.findComponent({ ref: 'inputSecretValue' });
+ const findHelpText = () => wrapper.findComponent(GlSprintf);
+ const findHelpLink = () => wrapper.findComponent(GlLink);
const findTriggerBtn = () => wrapper.findByTestId('trigger-manual-job-btn');
- const findHelpText = () => wrapper.findByTestId('form-help-text');
const findDeleteVarBtn = () => wrapper.findByTestId('delete-variable-btn');
const findCiVariableKey = () => wrapper.findByTestId('ci-variable-key');
const findCiVariableValue = () => wrapper.findByTestId('ci-variable-value');
@@ -62,11 +66,10 @@ describe('Manual Variables Form', () => {
});
it('renders help text with provided link', () => {
- expect(findHelpText().text()).toBe(
- 'Specify variable values to be used in this run. The values specified in CI/CD settings will be used as default',
+ expect(findHelpText().exists()).toBe(true);
+ expect(findHelpLink().attributes('href')).toBe(
+ '/help/ci/variables/index#add-a-cicd-variable-to-a-project',
);
-
- expect(wrapper.find('a').attributes('href')).toBe(requiredProps.variablesSettingsUrl);
});
describe('when adding a new variable', () => {
diff --git a/spec/frontend/jobs/components/sidebar_detail_row_spec.js b/spec/frontend/jobs/components/sidebar_detail_row_spec.js
index bae4d6cf837..43f2e022dd8 100644
--- a/spec/frontend/jobs/components/sidebar_detail_row_spec.js
+++ b/spec/frontend/jobs/components/sidebar_detail_row_spec.js
@@ -7,7 +7,7 @@ describe('Sidebar detail row', () => {
const title = 'this is the title';
const value = 'this is the value';
- const helpUrl = '/help/ci/runners/README.html';
+ const helpUrl = '/help/ci/runners/index.html';
const findHelpLink = () => wrapper.findComponent(GlLink);
diff --git a/spec/frontend/jobs/store/mutations_spec.js b/spec/frontend/jobs/store/mutations_spec.js
index 1c7e45dfb3d..159315330e4 100644
--- a/spec/frontend/jobs/store/mutations_spec.js
+++ b/spec/frontend/jobs/store/mutations_spec.js
@@ -4,12 +4,21 @@ import state from '~/jobs/store/state';
describe('Jobs Store Mutations', () => {
let stateCopy;
+ let origGon;
const html =
'I, [2018-08-17T22:57:45.707325 #1841] INFO -- : Writing /builds/ab89e95b0fa0b9272ea0c797b76908f24d36992630e9325273a4ce3.png<br>I';
beforeEach(() => {
stateCopy = state();
+
+ origGon = window.gon;
+
+ window.gon = { features: { infinitelyCollapsibleSections: false } };
+ });
+
+ afterEach(() => {
+ window.gon = origGon;
});
describe('SET_JOB_ENDPOINT', () => {
@@ -267,3 +276,88 @@ describe('Jobs Store Mutations', () => {
});
});
});
+
+describe('Job Store mutations, feature flag ON', () => {
+ let stateCopy;
+ let origGon;
+
+ const html =
+ 'I, [2018-08-17T22:57:45.707325 #1841] INFO -- : Writing /builds/ab89e95b0fa0b9272ea0c797b76908f24d36992630e9325273a4ce3.png<br>I';
+
+ beforeEach(() => {
+ stateCopy = state();
+
+ origGon = window.gon;
+
+ window.gon = { features: { infinitelyCollapsibleSections: true } };
+ });
+
+ afterEach(() => {
+ window.gon = origGon;
+ });
+
+ describe('RECEIVE_TRACE_SUCCESS', () => {
+ describe('with new job log', () => {
+ describe('log.lines', () => {
+ describe('when append is true', () => {
+ it('sets the parsed log ', () => {
+ mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
+ append: true,
+ size: 511846,
+ complete: true,
+ lines: [
+ {
+ offset: 1,
+ content: [{ text: 'Running with gitlab-runner 11.12.1 (5a147c92)' }],
+ },
+ ],
+ });
+
+ expect(stateCopy.trace).toEqual([
+ {
+ offset: 1,
+ content: [{ text: 'Running with gitlab-runner 11.12.1 (5a147c92)' }],
+ lineNumber: 1,
+ },
+ ]);
+ });
+ });
+
+ describe('when lines are defined', () => {
+ it('sets the parsed log ', () => {
+ mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
+ append: false,
+ size: 511846,
+ complete: true,
+ lines: [
+ { offset: 0, content: [{ text: 'Running with gitlab-runner 11.11.1 (5a147c92)' }] },
+ ],
+ });
+
+ expect(stateCopy.trace).toEqual([
+ {
+ offset: 0,
+ content: [{ text: 'Running with gitlab-runner 11.11.1 (5a147c92)' }],
+ lineNumber: 1,
+ },
+ ]);
+ });
+ });
+
+ describe('when lines are null', () => {
+ it('sets the default value', () => {
+ mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
+ append: true,
+ html,
+ size: 511846,
+ complete: false,
+ lines: null,
+ });
+
+ expect(stateCopy.trace).toEqual([]);
+ });
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/jobs/store/utils_spec.js b/spec/frontend/jobs/store/utils_spec.js
index e50d304bb08..35ac2945ab5 100644
--- a/spec/frontend/jobs/store/utils_spec.js
+++ b/spec/frontend/jobs/store/utils_spec.js
@@ -1,5 +1,6 @@
import {
logLinesParser,
+ logLinesParserLegacy,
updateIncrementalTrace,
parseHeaderLine,
parseLine,
@@ -17,6 +18,7 @@ import {
headerTraceIncremental,
collapsibleTrace,
collapsibleTraceIncremental,
+ multipleCollapsibleSectionsMockData,
} from '../components/log/mock_data';
describe('Jobs Store Utils', () => {
@@ -175,11 +177,11 @@ describe('Jobs Store Utils', () => {
expect(isCollapsibleSection()).toEqual(false);
});
});
- describe('logLinesParser', () => {
+ describe('logLinesParserLegacy', () => {
let result;
beforeEach(() => {
- result = logLinesParser(utilsMockData);
+ result = logLinesParserLegacy(utilsMockData);
});
describe('regular line', () => {
@@ -216,6 +218,87 @@ describe('Jobs Store Utils', () => {
});
});
+ describe('logLinesParser', () => {
+ let result;
+
+ beforeEach(() => {
+ result = logLinesParser(utilsMockData);
+ });
+
+ describe('regular line', () => {
+ it('adds a lineNumber property with correct index', () => {
+ expect(result.parsedLines[0].lineNumber).toEqual(1);
+ expect(result.parsedLines[1].line.lineNumber).toEqual(2);
+ });
+ });
+
+ describe('collapsible section', () => {
+ it('adds a `isClosed` property', () => {
+ expect(result.parsedLines[1].isClosed).toEqual(false);
+ });
+
+ it('adds a `isHeader` property', () => {
+ expect(result.parsedLines[1].isHeader).toEqual(true);
+ });
+
+ it('creates a lines array property with the content of the collapsible section', () => {
+ expect(result.parsedLines[1].lines.length).toEqual(2);
+ expect(result.parsedLines[1].lines[0].content).toEqual(utilsMockData[2].content);
+ expect(result.parsedLines[1].lines[1].content).toEqual(utilsMockData[3].content);
+ });
+ });
+
+ describe('section duration', () => {
+ it('adds the section information to the header section', () => {
+ expect(result.parsedLines[1].line.section_duration).toEqual(
+ utilsMockData[4].section_duration,
+ );
+ });
+
+ it('does not add section duration as a line', () => {
+ expect(result.parsedLines[1].lines.includes(utilsMockData[4])).toEqual(false);
+ });
+ });
+
+ describe('multiple collapsible sections', () => {
+ beforeEach(() => {
+ result = logLinesParser(multipleCollapsibleSectionsMockData);
+ });
+
+ it('should contain a section inside another section', () => {
+ const innerSection = [
+ {
+ isClosed: false,
+ isHeader: true,
+ line: {
+ content: [{ text: '1st collapsible section' }],
+ lineNumber: 6,
+ offset: 1006,
+ section: 'collapsible-1',
+ section_duration: '01:00',
+ section_header: true,
+ },
+ lines: [
+ {
+ content: [
+ {
+ text:
+ 'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam lorem dolor, congue ac condimentum vitae',
+ },
+ ],
+ lineNumber: 7,
+ offset: 1007,
+ section: 'collapsible-1',
+ },
+ ],
+ },
+ ];
+
+ expect(result.parsedLines[1].lines).toEqual(expect.arrayContaining(innerSection));
+ });
+ });
+ });
+
describe('findOffsetAndRemove', () => {
describe('when last item is header', () => {
const existingLog = [
@@ -391,7 +474,7 @@ describe('Jobs Store Utils', () => {
describe('updateIncrementalTrace', () => {
describe('without repeated section', () => {
it('concats and parses both arrays', () => {
- const oldLog = logLinesParser(originalTrace);
+ const oldLog = logLinesParserLegacy(originalTrace);
const result = updateIncrementalTrace(regularIncremental, oldLog);
expect(result).toEqual([
@@ -419,7 +502,7 @@ describe('Jobs Store Utils', () => {
describe('with regular line repeated offset', () => {
it('updates the last line and formats with the incremental part', () => {
- const oldLog = logLinesParser(originalTrace);
+ const oldLog = logLinesParserLegacy(originalTrace);
const result = updateIncrementalTrace(regularIncrementalRepeated, oldLog);
expect(result).toEqual([
@@ -438,7 +521,7 @@ describe('Jobs Store Utils', () => {
describe('with header line repeated', () => {
it('updates the header line and formats with the incremental part', () => {
- const oldLog = logLinesParser(headerTrace);
+ const oldLog = logLinesParserLegacy(headerTrace);
const result = updateIncrementalTrace(headerTraceIncremental, oldLog);
expect(result).toEqual([
@@ -464,7 +547,7 @@ describe('Jobs Store Utils', () => {
describe('with collapsible line repeated', () => {
it('updates the collapsible line and formats with the incremental part', () => {
- const oldLog = logLinesParser(collapsibleTrace);
+ const oldLog = logLinesParserLegacy(collapsibleTrace);
const result = updateIncrementalTrace(collapsibleTraceIncremental, oldLog);
expect(result).toEqual([
diff --git a/spec/frontend/lib/dompurify_spec.js b/spec/frontend/lib/dompurify_spec.js
index a01f86678e9..fa8dbb12a08 100644
--- a/spec/frontend/lib/dompurify_spec.js
+++ b/spec/frontend/lib/dompurify_spec.js
@@ -30,6 +30,9 @@ const unsafeUrls = [
`https://evil.url/${absoluteGon.sprite_file_icons}`,
];
+const forbiddenDataAttrs = ['data-remote', 'data-url', 'data-type', 'data-method'];
+const acceptedDataAttrs = ['data-random', 'data-custom'];
+
describe('~/lib/dompurify', () => {
let originalGon;
@@ -95,4 +98,17 @@ describe('~/lib/dompurify', () => {
expect(sanitize(htmlXlink)).toBe(expectedSanitized);
});
});
+
+ describe('handles data attributes correctly', () => {
+ it.each(forbiddenDataAttrs)('removes %s attributes', (attr) => {
+ const htmlHref = `<a ${attr}="true">hello</a>`;
+ expect(sanitize(htmlHref)).toBe('<a>hello</a>');
+ });
+
+ it.each(acceptedDataAttrs)('does not remove %s attributes', (attr) => {
+ const attrWithValue = `${attr}="true"`;
+ const htmlHref = `<a ${attrWithValue}>hello</a>`;
+ expect(sanitize(htmlHref)).toBe(`<a ${attrWithValue}>hello</a>`);
+ });
+ });
});
diff --git a/spec/frontend/lib/graphql_spec.js b/spec/frontend/lib/graphql_spec.js
new file mode 100644
index 00000000000..a39ce2ffd99
--- /dev/null
+++ b/spec/frontend/lib/graphql_spec.js
@@ -0,0 +1,54 @@
+import getPipelineDetails from 'shared_queries/pipelines/get_pipeline_details.query.graphql';
+import { stripWhitespaceFromQuery } from '~/lib/graphql';
+import { queryToObject } from '~/lib/utils/url_utility';
+
+describe('stripWhitespaceFromQuery', () => {
+ const operationName = 'getPipelineDetails';
+ const variables = `{
+ projectPath: 'root/abcd-dag',
+ iid: '44'
+ }`;
+
+ const testQuery = getPipelineDetails.loc.source.body;
+ const defaultPath = '/api/graphql';
+ const encodedVariables = encodeURIComponent(variables);
+
+ it('shortens the query argument by replacing multiple spaces and newlines with a single space', () => {
+ const testString = `${defaultPath}?query=${encodeURIComponent(testQuery)}`;
+ expect(testString.length > stripWhitespaceFromQuery(testString, defaultPath).length).toBe(true);
+ });
+
+ it('does not contract a single space', () => {
+ const simpleSingleString = `${defaultPath}?query=${encodeURIComponent('fragment Nonsense')}`;
+ expect(stripWhitespaceFromQuery(simpleSingleString, defaultPath)).toEqual(simpleSingleString);
+ });
+
+ it('works with a non-default path', () => {
+ const newPath = 'another/graphql/path';
+ const newPathSingleString = `${newPath}?query=${encodeURIComponent('fragment Nonsense')}`;
+ expect(stripWhitespaceFromQuery(newPathSingleString, newPath)).toEqual(newPathSingleString);
+ });
+
+ it('does not alter other arguments', () => {
+ const bareParams = `?query=${encodeURIComponent(
+ testQuery,
+ )}&operationName=${operationName}&variables=${encodedVariables}`;
+ const testLongString = `${defaultPath}${bareParams}`;
+
+ const processed = stripWhitespaceFromQuery(testLongString, defaultPath);
+ const decoded = decodeURIComponent(processed);
+ const params = queryToObject(decoded);
+
+ expect(params.operationName).toBe(operationName);
+ expect(params.variables).toBe(variables);
+ });
+
+ it('works when there are no query params', () => {
+ expect(stripWhitespaceFromQuery(defaultPath, defaultPath)).toEqual(defaultPath);
+ });
+
+ it('works when the params do not include a query', () => {
+ const paramsWithoutQuery = `${defaultPath}&variables=${encodedVariables}`;
+ expect(stripWhitespaceFromQuery(paramsWithoutQuery, defaultPath)).toEqual(paramsWithoutQuery);
+ });
+});
diff --git a/spec/frontend/lib/utils/common_utils_spec.js b/spec/frontend/lib/utils/common_utils_spec.js
index e03d1ef7295..f5a74ee7f09 100644
--- a/spec/frontend/lib/utils/common_utils_spec.js
+++ b/spec/frontend/lib/utils/common_utils_spec.js
@@ -1,6 +1,56 @@
import * as commonUtils from '~/lib/utils/common_utils';
describe('common_utils', () => {
+ describe('getPagePath', () => {
+ const { getPagePath } = commonUtils;
+
+ let originalBody;
+
+ beforeEach(() => {
+ originalBody = document.body;
+ document.body = document.createElement('body');
+ });
+
+ afterEach(() => {
+ document.body = originalBody;
+ });
+
+ it('returns an empty path if none is defined', () => {
+ expect(getPagePath()).toBe('');
+ expect(getPagePath(0)).toBe('');
+ });
+
+ describe('returns a path', () => {
+ const mockSection = 'my_section';
+ const mockSubSection = 'my_sub_section';
+ const mockPage = 'my_page';
+
+ it('returns a page', () => {
+ document.body.dataset.page = mockPage;
+
+ expect(getPagePath()).toBe(mockPage);
+ expect(getPagePath(0)).toBe(mockPage);
+ });
+
+ it('returns a section and page', () => {
+ document.body.dataset.page = `${mockSection}:${mockPage}`;
+
+ expect(getPagePath()).toBe(mockSection);
+ expect(getPagePath(0)).toBe(mockSection);
+ expect(getPagePath(1)).toBe(mockPage);
+ });
+
+ it('returns a section and subsection', () => {
+ document.body.dataset.page = `${mockSection}:${mockSubSection}:${mockPage}`;
+
+ expect(getPagePath()).toBe(mockSection);
+ expect(getPagePath(0)).toBe(mockSection);
+ expect(getPagePath(1)).toBe(mockSubSection);
+ expect(getPagePath(2)).toBe(mockPage);
+ });
+ });
+ });
+
describe('parseUrl', () => {
it('returns an anchor tag with url', () => {
expect(commonUtils.parseUrl('/some/absolute/url').pathname).toContain('some/absolute/url');
@@ -26,42 +76,6 @@ describe('common_utils', () => {
});
});
- describe('urlParamsToArray', () => {
- it('returns empty array for empty querystring', () => {
- expect(commonUtils.urlParamsToArray('')).toEqual([]);
- });
-
- it('should decode params', () => {
- expect(commonUtils.urlParamsToArray('?label_name%5B%5D=test')[0]).toBe('label_name[]=test');
- });
-
- it('should remove the question mark from the search params', () => {
- const paramsArray = commonUtils.urlParamsToArray('?test=thing');
-
- expect(paramsArray[0][0]).not.toBe('?');
- });
- });
-
- describe('urlParamsToObject', () => {
- it('parses path for label with trailing +', () => {
- expect(commonUtils.urlParamsToObject('label_name[]=label%2B', {})).toEqual({
- label_name: ['label+'],
- });
- });
-
- it('parses path for milestone with trailing +', () => {
- expect(commonUtils.urlParamsToObject('milestone_title=A%2B', {})).toEqual({
- milestone_title: 'A+',
- });
- });
-
- it('parses path for search terms with spaces', () => {
- expect(commonUtils.urlParamsToObject('search=two+words', {})).toEqual({
- search: 'two words',
- });
- });
- });
-
describe('handleLocationHash', () => {
beforeEach(() => {
jest.spyOn(window.document, 'getElementById');
@@ -175,33 +189,6 @@ describe('common_utils', () => {
});
});
- describe('parseQueryStringIntoObject', () => {
- it('should return object with query parameters', () => {
- expect(commonUtils.parseQueryStringIntoObject('scope=all&page=2')).toEqual({
- scope: 'all',
- page: '2',
- });
-
- expect(commonUtils.parseQueryStringIntoObject('scope=all')).toEqual({ scope: 'all' });
- expect(commonUtils.parseQueryStringIntoObject()).toEqual({});
- });
- });
-
- describe('objectToQueryString', () => {
- it('returns empty string when `param` is undefined, null or empty string', () => {
- expect(commonUtils.objectToQueryString()).toBe('');
- expect(commonUtils.objectToQueryString('')).toBe('');
- });
-
- it('returns query string with values of `params`', () => {
- const singleQueryParams = { foo: true };
- const multipleQueryParams = { foo: true, bar: true };
-
- expect(commonUtils.objectToQueryString(singleQueryParams)).toBe('foo=true');
- expect(commonUtils.objectToQueryString(multipleQueryParams)).toBe('foo=true&bar=true');
- });
- });
-
describe('buildUrlWithCurrentLocation', () => {
it('should build an url with current location and given parameters', () => {
expect(commonUtils.buildUrlWithCurrentLocation()).toEqual(window.location.pathname);
@@ -310,39 +297,6 @@ describe('common_utils', () => {
});
});
- describe('getParameterByName', () => {
- beforeEach(() => {
- window.history.pushState({}, null, '?scope=all&p=2');
- });
-
- afterEach(() => {
- window.history.replaceState({}, null, null);
- });
-
- it('should return valid parameter', () => {
- const value = commonUtils.getParameterByName('scope');
-
- expect(commonUtils.getParameterByName('p')).toEqual('2');
- expect(value).toBe('all');
- });
-
- it('should return invalid parameter', () => {
- const value = commonUtils.getParameterByName('fakeParameter');
-
- expect(value).toBe(null);
- });
-
- it('should return valid paramentes if URL is provided', () => {
- let value = commonUtils.getParameterByName('foo', 'http://cocteau.twins/?foo=bar');
-
- expect(value).toBe('bar');
-
- value = commonUtils.getParameterByName('manan', 'http://cocteau.twins/?foo=bar&manan=canchu');
-
- expect(value).toBe('canchu');
- });
- });
-
describe('normalizedHeaders', () => {
it('should upperCase all the header keys to keep them consistent', () => {
const apiHeaders = {
diff --git a/spec/frontend/lib/utils/datetime/timeago_utility_spec.js b/spec/frontend/lib/utils/datetime/timeago_utility_spec.js
new file mode 100644
index 00000000000..2314ec678d3
--- /dev/null
+++ b/spec/frontend/lib/utils/datetime/timeago_utility_spec.js
@@ -0,0 +1,103 @@
+import { getTimeago, localTimeAgo, timeFor } from '~/lib/utils/datetime/timeago_utility';
+import { s__ } from '~/locale';
+import '~/commons/bootstrap';
+
+describe('TimeAgo utils', () => {
+ let oldGon;
+
+ afterEach(() => {
+ window.gon = oldGon;
+ });
+
+ beforeEach(() => {
+ oldGon = window.gon;
+ });
+
+ describe('getTimeago', () => {
+ describe('with User Setting timeDisplayRelative: true', () => {
+ beforeEach(() => {
+ window.gon = { time_display_relative: true };
+ });
+
+ it.each([
+ [new Date().toISOString(), 'just now'],
+ [new Date().getTime(), 'just now'],
+ [new Date(), 'just now'],
+ [null, 'just now'],
+ ])('formats date `%p` as `%p`', (date, result) => {
+ expect(getTimeago().format(date)).toEqual(result);
+ });
+ });
+
+ describe('with User Setting timeDisplayRelative: false', () => {
+ beforeEach(() => {
+ window.gon = { time_display_relative: false };
+ });
+
+ it.each([
+ [new Date().toISOString(), 'Jul 6, 2020, 12:00 AM'],
+ [new Date(), 'Jul 6, 2020, 12:00 AM'],
+ [new Date().getTime(), 'Jul 6, 2020, 12:00 AM'],
+ // Slightly different behaviour when `null` is passed :see_no_evil`
+ [null, 'Jan 1, 1970, 12:00 AM'],
+ ])('formats date `%p` as `%p`', (date, result) => {
+ expect(getTimeago().format(date)).toEqual(result);
+ });
+ });
+ });
+
+ describe('timeFor', () => {
+ it('returns localize `past due` when in past', () => {
+ const date = new Date();
+ date.setFullYear(date.getFullYear() - 1);
+
+ expect(timeFor(date)).toBe(s__('Timeago|Past due'));
+ });
+
+ it('returns localized remaining time when in the future', () => {
+ const date = new Date();
+ date.setFullYear(date.getFullYear() + 1);
+
+ // Add a day to prevent a transient error. If date is even 1 second
+ // short of a full year, timeFor will return '11 months remaining'
+ date.setDate(date.getDate() + 1);
+
+ expect(timeFor(date)).toBe(s__('Timeago|1 year remaining'));
+ });
+ });
+
+ describe('localTimeAgo', () => {
+ beforeEach(() => {
+ document.body.innerHTML =
+ '<time title="some time" datetime="2020-02-18T22:22:32Z">1 hour ago</time>';
+ });
+
+ describe.each`
+ timeDisplayRelative | text
+ ${true} | ${'4 months ago'}
+ ${false} | ${'Feb 18, 2020, 10:22 PM'}
+ `(
+ `With User Setting timeDisplayRelative: $timeDisplayRelative`,
+ ({ timeDisplayRelative, text }) => {
+ it.each`
+ updateTooltip | title
+ ${false} | ${'some time'}
+ ${true} | ${'Feb 18, 2020 10:22pm UTC'}
+ `(
+ `has content: '${text}' and tooltip: '$title' with updateTooltip = $updateTooltip`,
+ ({ updateTooltip, title }) => {
+ window.gon = { time_display_relative: timeDisplayRelative };
+
+ const element = document.querySelector('time');
+ localTimeAgo([element], updateTooltip);
+
+ jest.runAllTimers();
+
+ expect(element.getAttribute('title')).toBe(title);
+ expect(element.innerText).toBe(text);
+ },
+ );
+ },
+ );
+ });
+});
diff --git a/spec/frontend/lib/utils/datetime_utility_spec.js b/spec/frontend/lib/utils/datetime_utility_spec.js
index df0ccb19cb7..f6ad41d5478 100644
--- a/spec/frontend/lib/utils/datetime_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime_utility_spec.js
@@ -1,30 +1,9 @@
-import $ from 'jquery';
import timezoneMock from 'timezone-mock';
import * as datetimeUtility from '~/lib/utils/datetime_utility';
import { __, s__ } from '~/locale';
import '~/commons/bootstrap';
describe('Date time utils', () => {
- describe('timeFor', () => {
- it('returns localize `past due` when in past', () => {
- const date = new Date();
- date.setFullYear(date.getFullYear() - 1);
-
- expect(datetimeUtility.timeFor(date)).toBe(s__('Timeago|Past due'));
- });
-
- it('returns localized remaining time when in the future', () => {
- const date = new Date();
- date.setFullYear(date.getFullYear() + 1);
-
- // Add a day to prevent a transient error. If date is even 1 second
- // short of a full year, timeFor will return '11 months remaining'
- date.setDate(date.getDate() + 1);
-
- expect(datetimeUtility.timeFor(date)).toBe(s__('Timeago|1 year remaining'));
- });
- });
-
describe('get localized day name', () => {
it('should return Sunday', () => {
const day = datetimeUtility.getDayName(new Date('07/17/2016'));
@@ -870,25 +849,6 @@ describe('approximateDuration', () => {
});
});
-describe('localTimeAgo', () => {
- beforeEach(() => {
- document.body.innerHTML = `<time title="some time" datetime="2020-02-18T22:22:32Z">1 hour ago</time>`;
- });
-
- it.each`
- timeagoArg | title
- ${false} | ${'some time'}
- ${true} | ${'Feb 18, 2020 10:22pm UTC'}
- `('converts $seconds seconds to $approximation', ({ timeagoArg, title }) => {
- const element = document.querySelector('time');
- datetimeUtility.localTimeAgo($(element), timeagoArg);
-
- jest.runAllTimers();
-
- expect(element.getAttribute('title')).toBe(title);
- });
-});
-
describe('differenceInSeconds', () => {
const startDateTime = new Date('2019-07-17T00:00:00.000Z');
diff --git a/spec/frontend/lib/utils/finite_state_machine_spec.js b/spec/frontend/lib/utils/finite_state_machine_spec.js
new file mode 100644
index 00000000000..441dd24c758
--- /dev/null
+++ b/spec/frontend/lib/utils/finite_state_machine_spec.js
@@ -0,0 +1,293 @@
+import { machine, transition } from '~/lib/utils/finite_state_machine';
+
+describe('Finite State Machine', () => {
+ const STATE_IDLE = 'idle';
+ const STATE_LOADING = 'loading';
+ const STATE_ERRORED = 'errored';
+
+ const TRANSITION_START_LOAD = 'START_LOAD';
+ const TRANSITION_LOAD_ERROR = 'LOAD_ERROR';
+ const TRANSITION_LOAD_SUCCESS = 'LOAD_SUCCESS';
+ const TRANSITION_ACKNOWLEDGE_ERROR = 'ACKNOWLEDGE_ERROR';
+
+ const definition = {
+ initial: STATE_IDLE,
+ states: {
+ [STATE_IDLE]: {
+ on: {
+ [TRANSITION_START_LOAD]: STATE_LOADING,
+ },
+ },
+ [STATE_LOADING]: {
+ on: {
+ [TRANSITION_LOAD_ERROR]: STATE_ERRORED,
+ [TRANSITION_LOAD_SUCCESS]: STATE_IDLE,
+ },
+ },
+ [STATE_ERRORED]: {
+ on: {
+ [TRANSITION_ACKNOWLEDGE_ERROR]: STATE_IDLE,
+ [TRANSITION_START_LOAD]: STATE_LOADING,
+ },
+ },
+ },
+ };
+
+ describe('machine', () => {
+ const STATE_IMPOSSIBLE = 'impossible';
+ const badDefinition = {
+ init: definition.initial,
+ badKeyShouldBeStates: definition.states,
+ };
+ const unstartableDefinition = {
+ initial: STATE_IMPOSSIBLE,
+ states: definition.states,
+ };
+ let liveMachine;
+
+ beforeEach(() => {
+ liveMachine = machine(definition);
+ });
+
+ it('throws an error if the machine definition is invalid', () => {
+ expect(() => machine(badDefinition)).toThrowError(
+ 'A state machine must have an initial state (`.initial`) and a dictionary of possible states (`.states`)',
+ );
+ });
+
+ it('throws an error if the initial state is invalid', () => {
+ expect(() => machine(unstartableDefinition)).toThrowError(
+ `Cannot initialize the state machine to state '${STATE_IMPOSSIBLE}'. Is that one of the machine's defined states?`,
+ );
+ });
+
+ it.each`
+ partOfMachine | equals | description | eqDescription
+ ${'keys'} | ${['is', 'send', 'value', 'states']} | ${'keys'} | ${'the correct array'}
+ ${'is'} | ${expect.any(Function)} | ${'`is` property'} | ${'a function'}
+ ${'send'} | ${expect.any(Function)} | ${'`send` property'} | ${'a function'}
+ ${'value'} | ${definition.initial} | ${'`value` property'} | ${'the same as the `initial` value of the machine definition'}
+ ${'states'} | ${definition.states} | ${'`states` property'} | ${'the same as the `states` value of the machine definition'}
+ `("The machine's $description should be $eqDescription", ({ partOfMachine, equals }) => {
+ const test = partOfMachine === 'keys' ? Object.keys(liveMachine) : liveMachine[partOfMachine];
+
+ expect(test).toEqual(equals);
+ });
+
+ it.each`
+ initialState | transitionEvent | expectedState
+ ${definition.initial} | ${TRANSITION_START_LOAD} | ${STATE_LOADING}
+ ${STATE_LOADING} | ${TRANSITION_LOAD_ERROR} | ${STATE_ERRORED}
+ ${STATE_ERRORED} | ${TRANSITION_ACKNOWLEDGE_ERROR} | ${STATE_IDLE}
+ ${STATE_IDLE} | ${TRANSITION_START_LOAD} | ${STATE_LOADING}
+ ${STATE_LOADING} | ${TRANSITION_LOAD_SUCCESS} | ${STATE_IDLE}
+ `(
+ 'properly steps from $initialState to $expectedState when the event "$transitionEvent" is sent',
+ ({ initialState, transitionEvent, expectedState }) => {
+ liveMachine.value = initialState;
+
+ liveMachine.send(transitionEvent);
+
+ expect(liveMachine.is(expectedState)).toBe(true);
+ expect(liveMachine.value).toBe(expectedState);
+ },
+ );
+
+ it.each`
+ initialState | transitionEvent
+ ${STATE_IDLE} | ${TRANSITION_ACKNOWLEDGE_ERROR}
+ ${STATE_IDLE} | ${TRANSITION_LOAD_SUCCESS}
+ ${STATE_IDLE} | ${TRANSITION_LOAD_ERROR}
+ ${STATE_IDLE} | ${'RANDOM_FOO'}
+ ${STATE_LOADING} | ${TRANSITION_START_LOAD}
+ ${STATE_LOADING} | ${TRANSITION_ACKNOWLEDGE_ERROR}
+ ${STATE_LOADING} | ${'RANDOM_FOO'}
+ ${STATE_ERRORED} | ${TRANSITION_LOAD_ERROR}
+ ${STATE_ERRORED} | ${TRANSITION_LOAD_SUCCESS}
+ ${STATE_ERRORED} | ${'RANDOM_FOO'}
+ `(
+ `does not perform any transition if the machine can't move from "$initialState" using the "$transitionEvent" event`,
+ ({ initialState, transitionEvent }) => {
+ liveMachine.value = initialState;
+
+ liveMachine.send(transitionEvent);
+
+ expect(liveMachine.is(initialState)).toBe(true);
+ expect(liveMachine.value).toBe(initialState);
+ },
+ );
+
+ describe('send', () => {
+ it.each`
+ startState | transitionEvent | result
+ ${STATE_IDLE} | ${TRANSITION_START_LOAD} | ${STATE_LOADING}
+ ${STATE_LOADING} | ${TRANSITION_LOAD_SUCCESS} | ${STATE_IDLE}
+ ${STATE_LOADING} | ${TRANSITION_LOAD_ERROR} | ${STATE_ERRORED}
+ ${STATE_ERRORED} | ${TRANSITION_ACKNOWLEDGE_ERROR} | ${STATE_IDLE}
+ ${STATE_ERRORED} | ${TRANSITION_START_LOAD} | ${STATE_LOADING}
+ `(
+ 'successfully transitions to $result from $startState when the transition $transitionEvent is received',
+ ({ startState, transitionEvent, result }) => {
+ liveMachine.value = startState;
+
+ expect(liveMachine.send(transitionEvent)).toEqual(result);
+ },
+ );
+
+ it.each`
+ startState | transitionEvent
+ ${STATE_IDLE} | ${TRANSITION_ACKNOWLEDGE_ERROR}
+ ${STATE_IDLE} | ${TRANSITION_LOAD_SUCCESS}
+ ${STATE_IDLE} | ${TRANSITION_LOAD_ERROR}
+ ${STATE_IDLE} | ${'RANDOM_FOO'}
+ ${STATE_LOADING} | ${TRANSITION_START_LOAD}
+ ${STATE_LOADING} | ${TRANSITION_ACKNOWLEDGE_ERROR}
+ ${STATE_LOADING} | ${'RANDOM_FOO'}
+ ${STATE_ERRORED} | ${TRANSITION_LOAD_ERROR}
+ ${STATE_ERRORED} | ${TRANSITION_LOAD_SUCCESS}
+ ${STATE_ERRORED} | ${'RANDOM_FOO'}
+ `(
+ 'remains as $startState if an undefined transition ($transitionEvent) is received',
+ ({ startState, transitionEvent }) => {
+ liveMachine.value = startState;
+
+ expect(liveMachine.send(transitionEvent)).toEqual(startState);
+ },
+ );
+
+ describe('detached', () => {
+ it.each`
+ startState | transitionEvent | result
+ ${STATE_IDLE} | ${TRANSITION_START_LOAD} | ${STATE_LOADING}
+ ${STATE_LOADING} | ${TRANSITION_LOAD_SUCCESS} | ${STATE_IDLE}
+ ${STATE_LOADING} | ${TRANSITION_LOAD_ERROR} | ${STATE_ERRORED}
+ ${STATE_ERRORED} | ${TRANSITION_ACKNOWLEDGE_ERROR} | ${STATE_IDLE}
+ ${STATE_ERRORED} | ${TRANSITION_START_LOAD} | ${STATE_LOADING}
+ `(
+ 'successfully transitions to $result from $startState when the transition $transitionEvent is received outside the context of the machine',
+ ({ startState, transitionEvent, result }) => {
+ const liveSend = machine({
+ ...definition,
+ initial: startState,
+ }).send;
+
+ expect(liveSend(transitionEvent)).toEqual(result);
+ },
+ );
+
+ it.each`
+ startState | transitionEvent
+ ${STATE_IDLE} | ${TRANSITION_ACKNOWLEDGE_ERROR}
+ ${STATE_IDLE} | ${TRANSITION_LOAD_SUCCESS}
+ ${STATE_IDLE} | ${TRANSITION_LOAD_ERROR}
+ ${STATE_IDLE} | ${'RANDOM_FOO'}
+ ${STATE_LOADING} | ${TRANSITION_START_LOAD}
+ ${STATE_LOADING} | ${TRANSITION_ACKNOWLEDGE_ERROR}
+ ${STATE_LOADING} | ${'RANDOM_FOO'}
+ ${STATE_ERRORED} | ${TRANSITION_LOAD_ERROR}
+ ${STATE_ERRORED} | ${TRANSITION_LOAD_SUCCESS}
+ ${STATE_ERRORED} | ${'RANDOM_FOO'}
+ `(
+ 'remains as $startState if an undefined transition ($transitionEvent) is received',
+ ({ startState, transitionEvent }) => {
+ const liveSend = machine({
+ ...definition,
+ initial: startState,
+ }).send;
+
+ expect(liveSend(transitionEvent)).toEqual(startState);
+ },
+ );
+ });
+ });
+
+ describe('is', () => {
+ it.each`
+ bool | test | actual
+ ${true} | ${STATE_IDLE} | ${STATE_IDLE}
+ ${false} | ${STATE_LOADING} | ${STATE_IDLE}
+ ${false} | ${STATE_ERRORED} | ${STATE_IDLE}
+ ${true} | ${STATE_LOADING} | ${STATE_LOADING}
+ ${false} | ${STATE_IDLE} | ${STATE_LOADING}
+ ${false} | ${STATE_ERRORED} | ${STATE_LOADING}
+ ${true} | ${STATE_ERRORED} | ${STATE_ERRORED}
+ ${false} | ${STATE_IDLE} | ${STATE_ERRORED}
+ ${false} | ${STATE_LOADING} | ${STATE_ERRORED}
+ `(
+ 'returns "$bool" for "$test" when the current state is "$actual"',
+ ({ bool, test, actual }) => {
+ liveMachine = machine({
+ ...definition,
+ initial: actual,
+ });
+
+ expect(liveMachine.is(test)).toEqual(bool);
+ },
+ );
+
+ describe('detached', () => {
+ it.each`
+ bool | test | actual
+ ${true} | ${STATE_IDLE} | ${STATE_IDLE}
+ ${false} | ${STATE_LOADING} | ${STATE_IDLE}
+ ${false} | ${STATE_ERRORED} | ${STATE_IDLE}
+ ${true} | ${STATE_LOADING} | ${STATE_LOADING}
+ ${false} | ${STATE_IDLE} | ${STATE_LOADING}
+ ${false} | ${STATE_ERRORED} | ${STATE_LOADING}
+ ${true} | ${STATE_ERRORED} | ${STATE_ERRORED}
+ ${false} | ${STATE_IDLE} | ${STATE_ERRORED}
+ ${false} | ${STATE_LOADING} | ${STATE_ERRORED}
+ `(
+ 'returns "$bool" for "$test" when the current state is "$actual"',
+ ({ bool, test, actual }) => {
+ const liveIs = machine({
+ ...definition,
+ initial: actual,
+ }).is;
+
+ expect(liveIs(test)).toEqual(bool);
+ },
+ );
+ });
+ });
+ });
+
+ describe('transition', () => {
+ it.each`
+ startState | transitionEvent | result
+ ${STATE_IDLE} | ${TRANSITION_START_LOAD} | ${STATE_LOADING}
+ ${STATE_LOADING} | ${TRANSITION_LOAD_SUCCESS} | ${STATE_IDLE}
+ ${STATE_LOADING} | ${TRANSITION_LOAD_ERROR} | ${STATE_ERRORED}
+ ${STATE_ERRORED} | ${TRANSITION_ACKNOWLEDGE_ERROR} | ${STATE_IDLE}
+ ${STATE_ERRORED} | ${TRANSITION_START_LOAD} | ${STATE_LOADING}
+ `(
+ 'successfully transitions to $result from $startState when the transition $transitionEvent is received',
+ ({ startState, transitionEvent, result }) => {
+ expect(transition(definition, startState, transitionEvent)).toEqual(result);
+ },
+ );
+
+ it.each`
+ startState | transitionEvent
+ ${STATE_IDLE} | ${TRANSITION_ACKNOWLEDGE_ERROR}
+ ${STATE_IDLE} | ${TRANSITION_LOAD_SUCCESS}
+ ${STATE_IDLE} | ${TRANSITION_LOAD_ERROR}
+ ${STATE_IDLE} | ${'RANDOM_FOO'}
+ ${STATE_LOADING} | ${TRANSITION_START_LOAD}
+ ${STATE_LOADING} | ${TRANSITION_ACKNOWLEDGE_ERROR}
+ ${STATE_LOADING} | ${'RANDOM_FOO'}
+ ${STATE_ERRORED} | ${TRANSITION_LOAD_ERROR}
+ ${STATE_ERRORED} | ${TRANSITION_LOAD_SUCCESS}
+ ${STATE_ERRORED} | ${'RANDOM_FOO'}
+ `(
+ 'remains as $startState if an undefined transition ($transitionEvent) is received',
+ ({ startState, transitionEvent }) => {
+ expect(transition(definition, startState, transitionEvent)).toEqual(startState);
+ },
+ );
+
+ it('remains as the provided starting state if it is an unrecognized state', () => {
+ expect(transition(definition, 'RANDOM_FOO', TRANSITION_START_LOAD)).toEqual('RANDOM_FOO');
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/text_markdown_spec.js b/spec/frontend/lib/utils/text_markdown_spec.js
index cad500039c0..beedb9b2eba 100644
--- a/spec/frontend/lib/utils/text_markdown_spec.js
+++ b/spec/frontend/lib/utils/text_markdown_spec.js
@@ -300,7 +300,7 @@ describe('init markdown', () => {
});
});
- describe('Editor Lite', () => {
+ describe('Source Editor', () => {
let editor;
beforeEach(() => {
diff --git a/spec/frontend/lib/utils/url_utility_spec.js b/spec/frontend/lib/utils/url_utility_spec.js
index 31c78681994..66d0faa95e7 100644
--- a/spec/frontend/lib/utils/url_utility_spec.js
+++ b/spec/frontend/lib/utils/url_utility_spec.js
@@ -24,6 +24,16 @@ const setWindowLocation = (value) => {
};
describe('URL utility', () => {
+ let originalLocation;
+
+ beforeAll(() => {
+ originalLocation = window.location;
+ });
+
+ afterAll(() => {
+ window.location = originalLocation;
+ });
+
describe('webIDEUrl', () => {
afterEach(() => {
gon.relative_url_root = '';
@@ -319,19 +329,17 @@ describe('URL utility', () => {
});
describe('doesHashExistInUrl', () => {
- it('should return true when the given string exists in the URL hash', () => {
+ beforeEach(() => {
setWindowLocation({
- href: 'https://gitlab.com/gitlab-org/gitlab-test/issues/1#note_1',
+ hash: 'https://gitlab.com/gitlab-org/gitlab-test/issues/1#note_1',
});
+ });
+ it('should return true when the given string exists in the URL hash', () => {
expect(urlUtils.doesHashExistInUrl('note_')).toBe(true);
});
it('should return false when the given string does not exist in the URL hash', () => {
- setWindowLocation({
- href: 'https://gitlab.com/gitlab-org/gitlab-test/issues/1#note_1',
- });
-
expect(urlUtils.doesHashExistInUrl('doesnotexist')).toBe(false);
});
});
@@ -651,6 +659,45 @@ describe('URL utility', () => {
});
});
+ describe('urlParamsToArray', () => {
+ it('returns empty array for empty querystring', () => {
+ expect(urlUtils.urlParamsToArray('')).toEqual([]);
+ });
+
+ it('should decode params', () => {
+ expect(urlUtils.urlParamsToArray('?label_name%5B%5D=test')[0]).toBe('label_name[]=test');
+ });
+
+ it('should remove the question mark from the search params', () => {
+ const paramsArray = urlUtils.urlParamsToArray('?test=thing');
+
+ expect(paramsArray[0][0]).not.toBe('?');
+ });
+ });
+
+ describe('urlParamsToObject', () => {
+ it('parses path for label with trailing +', () => {
+ // eslint-disable-next-line import/no-deprecated
+ expect(urlUtils.urlParamsToObject('label_name[]=label%2B', {})).toEqual({
+ label_name: ['label+'],
+ });
+ });
+
+ it('parses path for milestone with trailing +', () => {
+ // eslint-disable-next-line import/no-deprecated
+ expect(urlUtils.urlParamsToObject('milestone_title=A%2B', {})).toEqual({
+ milestone_title: 'A+',
+ });
+ });
+
+ it('parses path for search terms with spaces', () => {
+ // eslint-disable-next-line import/no-deprecated
+ expect(urlUtils.urlParamsToObject('search=two+words', {})).toEqual({
+ search: 'two words',
+ });
+ });
+ });
+
describe('queryToObject', () => {
it.each`
case | query | options | result
@@ -673,12 +720,68 @@ describe('URL utility', () => {
});
});
+ describe('getParameterByName', () => {
+ const { getParameterByName } = urlUtils;
+
+ it('should return valid parameter', () => {
+ setWindowLocation({ search: '?scope=all&p=2' });
+
+ expect(getParameterByName('p')).toEqual('2');
+ expect(getParameterByName('scope')).toBe('all');
+ });
+
+ it('should return invalid parameter', () => {
+ setWindowLocation({ search: '?scope=all&p=2' });
+
+ expect(getParameterByName('fakeParameter')).toBe(null);
+ });
+
+ it('should return a parameter with spaces', () => {
+ setWindowLocation({ search: '?search=my terms' });
+
+ expect(getParameterByName('search')).toBe('my terms');
+ });
+
+ it('should return a parameter with encoded spaces', () => {
+ setWindowLocation({ search: '?search=my%20terms' });
+
+ expect(getParameterByName('search')).toBe('my terms');
+ });
+
+ it('should return a parameter with plus signs as spaces', () => {
+ setWindowLocation({ search: '?search=my+terms' });
+
+ expect(getParameterByName('search')).toBe('my terms');
+ });
+
+ it('should return valid parameters if search is provided', () => {
+ expect(getParameterByName('foo', 'foo=bar')).toBe('bar');
+ expect(getParameterByName('foo', '?foo=bar')).toBe('bar');
+
+ expect(getParameterByName('manan', 'foo=bar&manan=canchu')).toBe('canchu');
+ expect(getParameterByName('manan', '?foo=bar&manan=canchu')).toBe('canchu');
+ });
+ });
+
describe('objectToQuery', () => {
it('converts search query object back into a search query', () => {
const searchQueryObject = { one: '1', two: '2' };
expect(urlUtils.objectToQuery(searchQueryObject)).toEqual('one=1&two=2');
});
+
+ it('returns empty string when `params` is undefined, null or empty string', () => {
+ expect(urlUtils.objectToQuery()).toBe('');
+ expect(urlUtils.objectToQuery('')).toBe('');
+ });
+
+ it('returns query string with values of `params`', () => {
+ const singleQueryParams = { foo: true };
+ const multipleQueryParams = { foo: true, bar: true };
+
+ expect(urlUtils.objectToQuery(singleQueryParams)).toBe('foo=true');
+ expect(urlUtils.objectToQuery(multipleQueryParams)).toBe('foo=true&bar=true');
+ });
});
describe('cleanLeadingSeparator', () => {
diff --git a/spec/frontend/line_highlighter_spec.js b/spec/frontend/line_highlighter_spec.js
index b5a0adc9d49..97ae6c0e3b7 100644
--- a/spec/frontend/line_highlighter_spec.js
+++ b/spec/frontend/line_highlighter_spec.js
@@ -49,6 +49,15 @@ describe('LineHighlighter', () => {
}
});
+ it('highlights a range of lines given in the URL hash using GitHub format', () => {
+ new LineHighlighter({ hash: '#L5-L25' });
+
+ expect($(`.${testContext.css}`).length).toBe(21);
+ for (let line = 5; line <= 25; line += 1) {
+ expect($(`#LC${line}`)).toHaveClass(testContext.css);
+ }
+ });
+
it('scrolls to the first highlighted line on initial load', () => {
jest.spyOn(utils, 'scrollToElement');
new LineHighlighter({ hash: '#L5-25' });
diff --git a/spec/frontend/locale/index_spec.js b/spec/frontend/locale/index_spec.js
index a08be502735..220061fc64a 100644
--- a/spec/frontend/locale/index_spec.js
+++ b/spec/frontend/locale/index_spec.js
@@ -1,5 +1,5 @@
import { setLanguage } from 'helpers/locale_helper';
-import { createDateTimeFormat, formatNumber, languageCode } from '~/locale';
+import { createDateTimeFormat, formatNumber, languageCode, getPreferredLocales } from '~/locale';
describe('locale', () => {
afterEach(() => setLanguage(null));
@@ -18,13 +18,91 @@ describe('locale', () => {
});
});
+ describe('getPreferredLocales', () => {
+ beforeEach(() => {
+ // Need to spy on window.navigator.languages as it is read-only
+ jest
+ .spyOn(window.navigator, 'languages', 'get')
+ .mockReturnValueOnce(['en-GB', 'en-US', 'de-AT']);
+ });
+
+ it('filters navigator.languages by GitLab language', () => {
+ setLanguage('en');
+
+ expect(getPreferredLocales()).toEqual(['en-GB', 'en-US', 'en']);
+ });
+
+ it('filters navigator.languages by GitLab language without locale and sets English Fallback', () => {
+ setLanguage('de');
+
+ expect(getPreferredLocales()).toEqual(['de-AT', 'de', 'en']);
+ });
+
+ it('filters navigator.languages by GitLab language with locale and sets English Fallback', () => {
+ setLanguage('de-DE');
+
+ expect(getPreferredLocales()).toEqual(['de-AT', 'de-DE', 'de', 'en']);
+ });
+
+ it('adds GitLab language if navigator.languages does not contain it', () => {
+ setLanguage('es-ES');
+
+ expect(getPreferredLocales()).toEqual(['es-ES', 'es', 'en']);
+ });
+ });
+
describe('createDateTimeFormat', () => {
- beforeEach(() => setLanguage('en'));
+ const date = new Date(2015, 0, 3, 15, 13, 22);
+ const formatOptions = { dateStyle: 'long', timeStyle: 'medium' };
it('creates an instance of Intl.DateTimeFormat', () => {
- const dateFormat = createDateTimeFormat({ year: 'numeric', month: 'long', day: 'numeric' });
+ const dateFormat = createDateTimeFormat(formatOptions);
+
+ expect(dateFormat).toBeInstanceOf(Intl.DateTimeFormat);
+ });
+
+ it('falls back to `en` and GitLab language is default', () => {
+ setLanguage(null);
+ jest.spyOn(window.navigator, 'languages', 'get').mockReturnValueOnce(['de-AT', 'en-GB']);
+
+ const dateFormat = createDateTimeFormat(formatOptions);
+ expect(dateFormat.format(date)).toBe(
+ new Intl.DateTimeFormat('en-GB', formatOptions).format(date),
+ );
+ });
+
+ it('falls back to `en` locale if browser languages are empty', () => {
+ setLanguage('en');
+ jest.spyOn(window.navigator, 'languages', 'get').mockReturnValueOnce([]);
+
+ const dateFormat = createDateTimeFormat(formatOptions);
+ expect(dateFormat.format(date)).toBe(
+ new Intl.DateTimeFormat('en', formatOptions).format(date),
+ );
+ });
+
+ it('prefers `en-GB` if it is the preferred language and GitLab language is `en`', () => {
+ setLanguage('en');
+ jest
+ .spyOn(window.navigator, 'languages', 'get')
+ .mockReturnValueOnce(['en-GB', 'en-US', 'en']);
+
+ const dateFormat = createDateTimeFormat(formatOptions);
+ expect(dateFormat.format(date)).toBe(
+ new Intl.DateTimeFormat('en-GB', formatOptions).format(date),
+ );
+ });
+
+ it('prefers `de-AT` if it is GitLab language and not part of the browser languages', () => {
+ setLanguage('de-AT');
+ jest
+ .spyOn(window.navigator, 'languages', 'get')
+ .mockReturnValueOnce(['en-GB', 'en-US', 'en']);
- expect(dateFormat.format(new Date(2015, 6, 3))).toBe('July 3, 2015');
+ const dateFormat = createDateTimeFormat(formatOptions);
+ expect(dateFormat.format(date)).toBe(
+ new Intl.DateTimeFormat('de-AT', formatOptions).format(date),
+ );
});
});
diff --git a/spec/frontend/logs/stores/actions_spec.js b/spec/frontend/logs/stores/actions_spec.js
index 9307a3b62fb..46ef1500a20 100644
--- a/spec/frontend/logs/stores/actions_spec.js
+++ b/spec/frontend/logs/stores/actions_spec.js
@@ -1,6 +1,5 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
-import { deprecatedCreateFlash as flash } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { convertToFixedRange } from '~/lib/utils/datetime_range';
import { TOKEN_TYPE_POD_NAME } from '~/logs/constants';
@@ -32,7 +31,6 @@ import {
mockNextCursor,
} from '../mock_data';
-jest.mock('~/flash');
jest.mock('~/lib/utils/datetime_range');
jest.mock('~/logs/utils');
@@ -75,10 +73,6 @@ describe('Logs Store actions', () => {
state = logsPageState();
});
- afterEach(() => {
- flash.mockClear();
- });
-
describe('setInitData', () => {
it('should commit environment and pod name mutation', () =>
testAction(
diff --git a/spec/frontend/members/components/app_spec.js b/spec/frontend/members/components/app_spec.js
index b9fdf8792fd..9590cd9d8d4 100644
--- a/spec/frontend/members/components/app_spec.js
+++ b/spec/frontend/members/components/app_spec.js
@@ -5,7 +5,8 @@ import Vuex from 'vuex';
import * as commonUtils from '~/lib/utils/common_utils';
import MembersApp from '~/members/components/app.vue';
import FilterSortContainer from '~/members/components/filter_sort/filter_sort_container.vue';
-import { MEMBER_TYPES } from '~/members/constants';
+import MembersTable from '~/members/components/table/members_table.vue';
+import { MEMBER_TYPES, TAB_QUERY_PARAM_VALUES } from '~/members/constants';
import { RECEIVE_MEMBER_ROLE_ERROR, HIDE_ERROR } from '~/members/store/mutation_types';
import mutations from '~/members/store/mutations';
@@ -19,7 +20,7 @@ describe('MembersApp', () => {
const createComponent = (state = {}, options = {}) => {
store = new Vuex.Store({
modules: {
- [MEMBER_TYPES.user]: {
+ [MEMBER_TYPES.group]: {
namespaced: true,
state: {
showError: true,
@@ -34,7 +35,8 @@ describe('MembersApp', () => {
wrapper = shallowMount(MembersApp, {
localVue,
propsData: {
- namespace: MEMBER_TYPES.user,
+ namespace: MEMBER_TYPES.group,
+ tabQueryParamValue: TAB_QUERY_PARAM_VALUES.group,
},
store,
...options,
@@ -57,7 +59,7 @@ describe('MembersApp', () => {
it('renders and scrolls to error alert', async () => {
createComponent({ showError: false, errorMessage: '' });
- store.commit(`${MEMBER_TYPES.user}/${RECEIVE_MEMBER_ROLE_ERROR}`, {
+ store.commit(`${MEMBER_TYPES.group}/${RECEIVE_MEMBER_ROLE_ERROR}`, {
error: new Error('Network Error'),
});
@@ -77,7 +79,7 @@ describe('MembersApp', () => {
it('does not render and scroll to error alert', async () => {
createComponent();
- store.commit(`${MEMBER_TYPES.user}/${HIDE_ERROR}`);
+ store.commit(`${MEMBER_TYPES.group}/${HIDE_ERROR}`);
await nextTick();
@@ -103,4 +105,13 @@ describe('MembersApp', () => {
expect(findFilterSortContainer().exists()).toBe(true);
});
+
+ it('renders `MembersTable` component and passes `tabQueryParamValue` prop', () => {
+ createComponent();
+
+ const membersTableComponent = wrapper.findComponent(MembersTable);
+
+ expect(membersTableComponent.exists()).toBe(true);
+ expect(membersTableComponent.props('tabQueryParamValue')).toBe(TAB_QUERY_PARAM_VALUES.group);
+ });
});
diff --git a/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js b/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
index 5e04e20801a..a3b91cb20bb 100644
--- a/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
+++ b/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
@@ -216,5 +216,17 @@ describe('MembersFilteredSearchBar', () => {
'https://localhost/?two_factor=enabled&search=foobar&sort=name_asc',
);
});
+
+ it('adds active tab query param', () => {
+ window.location.search = '?tab=invited';
+
+ createComponent();
+
+ findFilteredSearchBar().vm.$emit('onFilter', [
+ { type: 'filtered-search-term', value: { data: 'foobar' } },
+ ]);
+
+ expect(window.location.href).toBe('https://localhost/?search=foobar&tab=invited');
+ });
});
});
diff --git a/spec/frontend/members/components/members_tabs_spec.js b/spec/frontend/members/components/members_tabs_spec.js
index 6f1a6d0c223..33d8eebf7eb 100644
--- a/spec/frontend/members/components/members_tabs_spec.js
+++ b/spec/frontend/members/components/members_tabs_spec.js
@@ -1,9 +1,14 @@
+import { GlTabs } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import MembersApp from '~/members/components/app.vue';
import MembersTabs from '~/members/components/members_tabs.vue';
-import { MEMBER_TYPES } from '~/members/constants';
+import {
+ MEMBER_TYPES,
+ TAB_QUERY_PARAM_VALUES,
+ ACTIVE_TAB_QUERY_PARAM_NAME,
+} from '~/members/constants';
import { pagination } from '../mock_data';
describe('MembersTabs', () => {
@@ -93,6 +98,18 @@ describe('MembersTabs', () => {
wrapper.destroy();
});
+ it('renders `GlTabs` with `syncActiveTabWithQueryParams` and `queryParamName` props set', async () => {
+ await createComponent();
+
+ const glTabsComponent = wrapper.findComponent(GlTabs);
+
+ expect(glTabsComponent.exists()).toBe(true);
+ expect(glTabsComponent.props()).toMatchObject({
+ syncActiveTabWithQueryParams: true,
+ queryParamName: ACTIVE_TAB_QUERY_PARAM_NAME,
+ });
+ });
+
describe('when tabs have a count', () => {
it('renders tabs with count', async () => {
await createComponent();
@@ -106,7 +123,7 @@ describe('MembersTabs', () => {
expect(findActiveTab().text()).toContain('Members');
});
- it('renders `MembersApp` and passes `namespace` prop', async () => {
+ it('renders `MembersApp` and passes `namespace` and `tabQueryParamValue` props', async () => {
await createComponent();
const membersApps = wrapper.findAllComponents(MembersApp).wrappers;
@@ -115,6 +132,10 @@ describe('MembersTabs', () => {
expect(membersApps[1].props('namespace')).toBe(MEMBER_TYPES.group);
expect(membersApps[2].props('namespace')).toBe(MEMBER_TYPES.invite);
expect(membersApps[3].props('namespace')).toBe(MEMBER_TYPES.accessRequest);
+
+ expect(membersApps[1].props('tabQueryParamValue')).toBe(TAB_QUERY_PARAM_VALUES.group);
+ expect(membersApps[2].props('tabQueryParamValue')).toBe(TAB_QUERY_PARAM_VALUES.invite);
+ expect(membersApps[3].props('tabQueryParamValue')).toBe(TAB_QUERY_PARAM_VALUES.accessRequest);
});
});
@@ -127,56 +148,16 @@ describe('MembersTabs', () => {
expect(findTabByText('Invited')).toBeUndefined();
expect(findTabByText('Access requests')).toBeUndefined();
});
- });
- describe('when url param matches `filteredSearchBar.searchParam`', () => {
- beforeEach(() => {
- window.location.search = '?search_groups=foo+bar';
- });
-
- const expectGroupsTabActive = () => {
- expect(findActiveTab().text()).toContain('Groups');
- };
-
- describe('when tab has a count', () => {
- it('sets tab that corresponds to search param as active tab', async () => {
- await createComponent();
-
- expectGroupsTabActive();
+ describe('when url param matches `filteredSearchBar.searchParam`', () => {
+ beforeEach(() => {
+ window.location.search = '?search_groups=foo+bar';
});
- });
-
- describe('when tab does not have a count', () => {
- it('sets tab that corresponds to search param as active tab', async () => {
- await createComponent({ totalItems: 0 });
-
- expectGroupsTabActive();
- });
- });
- });
-
- describe('when url param matches `pagination.paramName`', () => {
- beforeEach(() => {
- window.location.search = '?invited_page=2';
- });
-
- const expectInvitedTabActive = () => {
- expect(findActiveTab().text()).toContain('Invited');
- };
-
- describe('when tab has a count', () => {
- it('sets tab that corresponds to pagination param as active tab', async () => {
- await createComponent();
-
- expectInvitedTabActive();
- });
- });
- describe('when tab does not have a count', () => {
- it('sets tab that corresponds to pagination param as active tab', async () => {
+ it('shows tab that corresponds to search param', async () => {
await createComponent({ totalItems: 0 });
- expectInvitedTabActive();
+ expect(findTabByText('Groups')).not.toBeUndefined();
});
});
});
diff --git a/spec/frontend/members/components/table/members_table_spec.js b/spec/frontend/members/components/table/members_table_spec.js
index 5308d7651a3..3a17d78bd17 100644
--- a/spec/frontend/members/components/table/members_table_spec.js
+++ b/spec/frontend/members/components/table/members_table_spec.js
@@ -15,7 +15,7 @@ import MemberAvatar from '~/members/components/table/member_avatar.vue';
import MemberSource from '~/members/components/table/member_source.vue';
import MembersTable from '~/members/components/table/members_table.vue';
import RoleDropdown from '~/members/components/table/role_dropdown.vue';
-import { MEMBER_TYPES } from '~/members/constants';
+import { MEMBER_TYPES, TAB_QUERY_PARAM_VALUES } from '~/members/constants';
import * as initUserPopovers from '~/user_popovers';
import {
member as memberMock,
@@ -34,7 +34,7 @@ describe('MembersTable', () => {
const createStore = (state = {}) => {
return new Vuex.Store({
modules: {
- [MEMBER_TYPES.user]: {
+ [MEMBER_TYPES.invite]: {
namespaced: true,
state: {
members: [],
@@ -54,11 +54,14 @@ describe('MembersTable', () => {
const createComponent = (state, provide = {}) => {
wrapper = mount(MembersTable, {
localVue,
+ propsData: {
+ tabQueryParamValue: TAB_QUERY_PARAM_VALUES.invite,
+ },
store: createStore(state),
provide: {
sourceId: 1,
currentUserId: 1,
- namespace: MEMBER_TYPES.user,
+ namespace: MEMBER_TYPES.invite,
...provide,
},
stubs: [
@@ -74,7 +77,7 @@ describe('MembersTable', () => {
});
};
- const url = 'https://localhost/foo-bar/-/project_members';
+ const url = 'https://localhost/foo-bar/-/project_members?tab=invited';
const getByText = (text, options) =>
createWrapper(getByTextHelper(wrapper.element, text, options));
@@ -92,7 +95,7 @@ describe('MembersTable', () => {
const expectCorrectLinkToPage2 = () => {
expect(findPagination().findByText('2', { selector: 'a' }).attributes('href')).toBe(
- `${url}?page=2`,
+ `${url}&invited_members_page=2`,
);
};
@@ -271,7 +274,7 @@ describe('MembersTable', () => {
currentPage: 1,
perPage: 5,
totalItems: 10,
- paramName: 'page',
+ paramName: 'invited_members_page',
},
});
@@ -279,14 +282,14 @@ describe('MembersTable', () => {
});
it('removes any url params defined as `null` in the `params` attribute', () => {
- window.location = new URL(`${url}?search_groups=foo`);
+ window.location = new URL(`${url}&search_groups=foo`);
createComponent({
pagination: {
currentPage: 1,
perPage: 5,
totalItems: 10,
- paramName: 'page',
+ paramName: 'invited_members_page',
params: { search_groups: null },
},
});
diff --git a/spec/frontend/milestones/milestone_utils_spec.js b/spec/frontend/milestones/milestone_utils_spec.js
new file mode 100644
index 00000000000..f863f31e5a9
--- /dev/null
+++ b/spec/frontend/milestones/milestone_utils_spec.js
@@ -0,0 +1,47 @@
+import { useFakeDate } from 'helpers/fake_date';
+import { sortMilestonesByDueDate } from '~/milestones/milestone_utils';
+
+describe('sortMilestonesByDueDate', () => {
+ useFakeDate(2021, 6, 22);
+ const mockMilestones = [
+ {
+ id: 2,
+ },
+ {
+ id: 1,
+ dueDate: '2021-01-01',
+ },
+ {
+ id: 4,
+ dueDate: '2021-02-01',
+ expired: true,
+ },
+ {
+ id: 3,
+ dueDate: `2021-08-01`,
+ },
+ ];
+
+ describe('sorts milestones', () => {
+ it('expired milestones are kept at the bottom of the list', () => {
+ const sortedMilestones = [...mockMilestones].sort(sortMilestonesByDueDate);
+
+ expect(sortedMilestones[2].id).toBe(mockMilestones[1].id); // milestone with id `1` is expired
+ expect(sortedMilestones[3].id).toBe(mockMilestones[2].id); // milestone with id `4` is expired
+ });
+
+ it('milestones with closest due date are kept at the top of the list', () => {
+ const sortedMilestones = [...mockMilestones].sort(sortMilestonesByDueDate);
+
+ // milestone with id `3` & 2021-08-01 is closest to current date i.e. 2021-07-22
+ expect(sortedMilestones[0].id).toBe(mockMilestones[3].id);
+ });
+
+ it('milestones with no due date are kept between milestones with closest due date and expired milestones', () => {
+ const sortedMilestones = [...mockMilestones].sort(sortMilestonesByDueDate);
+
+ // milestone with id `2` has no due date
+ expect(sortedMilestones[1].id).toBe(mockMilestones[0].id);
+ });
+ });
+});
diff --git a/spec/frontend/monitoring/components/charts/time_series_spec.js b/spec/frontend/monitoring/components/charts/time_series_spec.js
index 754ddd96c9b..ea6e4f4a5ed 100644
--- a/spec/frontend/monitoring/components/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/components/charts/time_series_spec.js
@@ -51,6 +51,8 @@ describe('Time series component', () => {
},
stubs: {
GlPopover: true,
+ GlLineChart,
+ GlAreaChart,
},
attachTo: document.body,
});
@@ -202,7 +204,7 @@ describe('Time series component', () => {
describe('when series is of line type', () => {
beforeEach(() => {
- createWrapper();
+ createWrapper({}, mount);
wrapper.vm.formatTooltipText(mockLineSeriesData());
return wrapper.vm.$nextTick();
});
diff --git a/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js b/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js
index 6e98ca28071..dbb9fd5f603 100644
--- a/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdownItem } from '@gitlab/ui';
+import { GlDropdownItem, GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import CustomMetricsFormFields from '~/custom_metrics/components/custom_metrics_form_fields.vue';
import { redirectTo } from '~/lib/utils/url_utility';
@@ -43,6 +43,9 @@ describe('Actions menu', () => {
wrapper = shallowMount(ActionsMenu, {
propsData: { ...dashboardActionsMenuProps, ...props },
store,
+ stubs: {
+ GlModal,
+ },
...options,
});
};
@@ -82,7 +85,7 @@ describe('Actions menu', () => {
it('modal for custom metrics form is rendered', () => {
expect(findAddMetricModal().exists()).toBe(true);
- expect(findAddMetricModal().attributes().modalid).toBe('addMetric');
+ expect(findAddMetricModal().props('modalId')).toBe('addMetric');
});
it('add metric modal submit button exists', () => {
diff --git a/spec/frontend/nav/components/top_nav_menu_item_spec.js b/spec/frontend/nav/components/top_nav_menu_item_spec.js
index fd2b4d3b056..71154e18915 100644
--- a/spec/frontend/nav/components/top_nav_menu_item_spec.js
+++ b/spec/frontend/nav/components/top_nav_menu_item_spec.js
@@ -73,7 +73,7 @@ describe('~/nav/components/top_nav_menu_item.vue', () => {
expect(findButtonIcons()).toEqual([
{
name: TEST_MENU_ITEM.icon,
- classes: ['gl-mr-2!'],
+ classes: ['gl-mr-3!'],
},
{
name: 'chevron-right',
diff --git a/spec/frontend/notes/components/comment_form_spec.js b/spec/frontend/notes/components/comment_form_spec.js
index 537622b7918..bb79b43205b 100644
--- a/spec/frontend/notes/components/comment_form_spec.js
+++ b/spec/frontend/notes/components/comment_form_spec.js
@@ -7,7 +7,7 @@ import Vuex from 'vuex';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import batchComments from '~/batch_comments/stores/modules/batch_comments';
import { refreshUserMergeRequestCounts } from '~/commons/nav/user_merge_requests';
-import { deprecatedCreateFlash as flash } from '~/flash';
+import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
import CommentForm from '~/notes/components/comment_form.vue';
import * as constants from '~/notes/constants';
@@ -464,9 +464,9 @@ describe('issue_comment_form component', () => {
await wrapper.vm.$nextTick;
await wrapper.vm.$nextTick;
- expect(flash).toHaveBeenCalledWith(
- `Something went wrong while closing the ${type}. Please try again later.`,
- );
+ expect(createFlash).toHaveBeenCalledWith({
+ message: `Something went wrong while closing the ${type}. Please try again later.`,
+ });
});
});
@@ -500,9 +500,9 @@ describe('issue_comment_form component', () => {
await wrapper.vm.$nextTick;
await wrapper.vm.$nextTick;
- expect(flash).toHaveBeenCalledWith(
- `Something went wrong while reopening the ${type}. Please try again later.`,
- );
+ expect(createFlash).toHaveBeenCalledWith({
+ message: `Something went wrong while reopening the ${type}. Please try again later.`,
+ });
});
});
diff --git a/spec/frontend/notes/components/discussion_notes_spec.js b/spec/frontend/notes/components/discussion_notes_spec.js
index cd24b9afbdf..59ac75f00e6 100644
--- a/spec/frontend/notes/components/discussion_notes_spec.js
+++ b/spec/frontend/notes/components/discussion_notes_spec.js
@@ -1,5 +1,5 @@
import { getByRole } from '@testing-library/dom';
-import { shallowMount } from '@vue/test-utils';
+import { shallowMount, mount } from '@vue/test-utils';
import '~/behaviors/markdown/render_gfm';
import DiscussionNotes from '~/notes/components/discussion_notes.vue';
import NoteableNote from '~/notes/components/noteable_note.vue';
@@ -23,8 +23,8 @@ describe('DiscussionNotes', () => {
let wrapper;
const getList = () => getByRole(wrapper.element, 'list');
- const createComponent = (props) => {
- wrapper = shallowMount(DiscussionNotes, {
+ const createComponent = (props, mountingMethod = shallowMount) => {
+ wrapper = mountingMethod(DiscussionNotes, {
store,
propsData: {
discussion: discussionMock,
@@ -33,7 +33,11 @@ describe('DiscussionNotes', () => {
...props,
},
scopedSlots: {
- footer: '<p slot-scope="{ showReplies }">showReplies:{{showReplies}}</p>',
+ footer: `
+ <template #default="{ showReplies }">
+ <p>showReplies:{{ showReplies }}</p>,
+ </template>
+ `,
},
slots: {
'avatar-badge': '<span class="avatar-badge-slot-content" />',
@@ -112,7 +116,7 @@ describe('DiscussionNotes', () => {
});
it('passes down avatar-badge slot content', () => {
- createComponent();
+ createComponent({}, mount);
expect(wrapper.find('.avatar-badge-slot-content').exists()).toBe(true);
});
});
diff --git a/spec/frontend/notes/components/noteable_note_spec.js b/spec/frontend/notes/components/noteable_note_spec.js
index 7444c441e06..f217dfd2e48 100644
--- a/spec/frontend/notes/components/noteable_note_spec.js
+++ b/spec/frontend/notes/components/noteable_note_spec.js
@@ -1,5 +1,4 @@
import { mount } from '@vue/test-utils';
-import { escape } from 'lodash';
import Vue from 'vue';
import Vuex from 'vuex';
@@ -263,7 +262,9 @@ describe('issue_note', () => {
await waitForPromises();
expect(alertSpy).not.toHaveBeenCalled();
- expect(wrapper.vm.note.note_html).toBe(escape(noteBody));
+ expect(wrapper.vm.note.note_html).toBe(
+ '<p><img src="data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7"></p>\n',
+ );
});
});
@@ -291,7 +292,7 @@ describe('issue_note', () => {
await wrapper.vm.$nextTick();
let noteBodyProps = noteBody.props();
- expect(noteBodyProps.note.note_html).toBe(updatedText);
+ expect(noteBodyProps.note.note_html).toBe(`<p>${updatedText}</p>\n`);
noteBody.vm.$emit('cancelForm');
await wrapper.vm.$nextTick();
diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js
index 7eef2017dfb..2ff65d3f47e 100644
--- a/spec/frontend/notes/stores/actions_spec.js
+++ b/spec/frontend/notes/stores/actions_spec.js
@@ -2,7 +2,7 @@ import AxiosMockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import { TEST_HOST } from 'spec/test_constants';
import Api from '~/api';
-import { deprecatedCreateFlash as Flash } from '~/flash';
+import createFlash from '~/flash';
import { EVENT_ISSUABLE_VUE_APP_CHANGE } from '~/issuable/constants';
import axios from '~/lib/utils/axios_utils';
import * as notesConstants from '~/notes/constants';
@@ -33,10 +33,7 @@ jest.mock('~/flash', () => {
};
});
- return {
- createFlash: flash,
- deprecatedCreateFlash: flash,
- };
+ return flash;
});
describe('Actions Notes Store', () => {
@@ -348,13 +345,13 @@ describe('Actions Notes Store', () => {
await startPolling();
expect(axiosMock.history.get).toHaveLength(1);
- expect(Flash).not.toHaveBeenCalled();
+ expect(createFlash).not.toHaveBeenCalled();
await advanceXMoreIntervals(1);
expect(axiosMock.history.get).toHaveLength(2);
- expect(Flash).toHaveBeenCalled();
- expect(Flash).toHaveBeenCalledTimes(1);
+ expect(createFlash).toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalledTimes(1);
});
it('resets the failure counter on success', async () => {
@@ -375,14 +372,14 @@ describe('Actions Notes Store', () => {
await advanceXMoreIntervals(1); // Failure #2
// That was the first failure AFTER a success, so we should NOT see the error displayed
- expect(Flash).not.toHaveBeenCalled();
+ expect(createFlash).not.toHaveBeenCalled();
// Now we'll allow another failure
await advanceXMoreIntervals(1); // Failure #3
// Since this is the second failure in a row, the error should happen
- expect(Flash).toHaveBeenCalled();
- expect(Flash).toHaveBeenCalledTimes(1);
+ expect(createFlash).toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalledTimes(1);
});
it('hides the error display if it exists on success', async () => {
@@ -393,8 +390,8 @@ describe('Actions Notes Store', () => {
await advanceXMoreIntervals(2);
// After two errors, the error should be displayed
- expect(Flash).toHaveBeenCalled();
- expect(Flash).toHaveBeenCalledTimes(1);
+ expect(createFlash).toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalledTimes(1);
axiosMock.reset();
successMock();
@@ -906,7 +903,7 @@ describe('Actions Notes Store', () => {
.then(() => done.fail('Expected error to be thrown!'))
.catch((err) => {
expect(err).toBe(error);
- expect(Flash).not.toHaveBeenCalled();
+ expect(createFlash).not.toHaveBeenCalled();
})
.then(done)
.catch(done.fail);
@@ -928,11 +925,10 @@ describe('Actions Notes Store', () => {
)
.then((resp) => {
expect(resp.hasFlash).toBe(true);
- expect(Flash).toHaveBeenCalledWith(
- 'Your comment could not be submitted because something went wrong',
- 'alert',
- flashContainer,
- );
+ expect(createFlash).toHaveBeenCalledWith({
+ message: 'Your comment could not be submitted because something went wrong',
+ parent: flashContainer,
+ });
})
.catch(() => done.fail('Expected success response!'))
.then(done)
@@ -954,7 +950,7 @@ describe('Actions Notes Store', () => {
)
.then((data) => {
expect(data).toBe(res);
- expect(Flash).not.toHaveBeenCalled();
+ expect(createFlash).not.toHaveBeenCalled();
})
.then(done)
.catch(done.fail);
@@ -997,7 +993,7 @@ describe('Actions Notes Store', () => {
['resolveDiscussion', { discussionId }],
['restartPolling'],
]);
- expect(Flash).not.toHaveBeenCalled();
+ expect(createFlash).not.toHaveBeenCalled();
});
});
@@ -1012,7 +1008,10 @@ describe('Actions Notes Store', () => {
[mutationTypes.SET_RESOLVING_DISCUSSION, false],
]);
expect(dispatch.mock.calls).toEqual([['stopPolling'], ['restartPolling']]);
- expect(Flash).toHaveBeenCalledWith(TEST_ERROR_MESSAGE, 'alert', flashContainer);
+ expect(createFlash).toHaveBeenCalledWith({
+ message: TEST_ERROR_MESSAGE,
+ parent: flashContainer,
+ });
});
});
@@ -1027,11 +1026,10 @@ describe('Actions Notes Store', () => {
[mutationTypes.SET_RESOLVING_DISCUSSION, false],
]);
expect(dispatch.mock.calls).toEqual([['stopPolling'], ['restartPolling']]);
- expect(Flash).toHaveBeenCalledWith(
- 'Something went wrong while applying the suggestion. Please try again.',
- 'alert',
- flashContainer,
- );
+ expect(createFlash).toHaveBeenCalledWith({
+ message: 'Something went wrong while applying the suggestion. Please try again.',
+ parent: flashContainer,
+ });
});
});
@@ -1039,7 +1037,7 @@ describe('Actions Notes Store', () => {
dispatch.mockReturnValue(Promise.reject());
testSubmitSuggestion(done, () => {
- expect(Flash).not.toHaveBeenCalled();
+ expect(createFlash).not.toHaveBeenCalled();
});
});
});
@@ -1083,7 +1081,7 @@ describe('Actions Notes Store', () => {
['restartPolling'],
]);
- expect(Flash).not.toHaveBeenCalled();
+ expect(createFlash).not.toHaveBeenCalled();
});
});
@@ -1101,7 +1099,10 @@ describe('Actions Notes Store', () => {
]);
expect(dispatch.mock.calls).toEqual([['stopPolling'], ['restartPolling']]);
- expect(Flash).toHaveBeenCalledWith(TEST_ERROR_MESSAGE, 'alert', flashContainer);
+ expect(createFlash).toHaveBeenCalledWith({
+ message: TEST_ERROR_MESSAGE,
+ parent: flashContainer,
+ });
});
});
@@ -1119,11 +1120,11 @@ describe('Actions Notes Store', () => {
]);
expect(dispatch.mock.calls).toEqual([['stopPolling'], ['restartPolling']]);
- expect(Flash).toHaveBeenCalledWith(
- 'Something went wrong while applying the batch of suggestions. Please try again.',
- 'alert',
- flashContainer,
- );
+ expect(createFlash).toHaveBeenCalledWith({
+ message:
+ 'Something went wrong while applying the batch of suggestions. Please try again.',
+ parent: flashContainer,
+ });
});
});
@@ -1139,7 +1140,7 @@ describe('Actions Notes Store', () => {
[mutationTypes.SET_RESOLVING_DISCUSSION, false],
]);
- expect(Flash).not.toHaveBeenCalled();
+ expect(createFlash).not.toHaveBeenCalled();
});
});
});
@@ -1283,7 +1284,7 @@ describe('Actions Notes Store', () => {
)
.then(() => done.fail('Expected error to be thrown'))
.catch(() => {
- expect(Flash).toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalled();
done();
});
});
diff --git a/spec/frontend/notifications/components/custom_notifications_modal_spec.js b/spec/frontend/notifications/components/custom_notifications_modal_spec.js
index 5e4114d91f5..0782ec7cdd5 100644
--- a/spec/frontend/notifications/components/custom_notifications_modal_spec.js
+++ b/spec/frontend/notifications/components/custom_notifications_modal_spec.js
@@ -177,11 +177,8 @@ describe('CustomNotificationsModal', () => {
await waitForPromises();
- expect(
- mockToastShow,
- ).toHaveBeenCalledWith(
+ expect(mockToastShow).toHaveBeenCalledWith(
'An error occurred while loading the notification settings. Please try again.',
- { type: 'error' },
);
});
});
@@ -255,11 +252,8 @@ describe('CustomNotificationsModal', () => {
await waitForPromises();
- expect(
- mockToastShow,
- ).toHaveBeenCalledWith(
+ expect(mockToastShow).toHaveBeenCalledWith(
'An error occurred while updating the notification settings. Please try again.',
- { type: 'error' },
);
});
});
diff --git a/spec/frontend/notifications/components/notifications_dropdown_spec.js b/spec/frontend/notifications/components/notifications_dropdown_spec.js
index e90bd68d067..e12251ce6d9 100644
--- a/spec/frontend/notifications/components/notifications_dropdown_spec.js
+++ b/spec/frontend/notifications/components/notifications_dropdown_spec.js
@@ -242,11 +242,8 @@ describe('NotificationsDropdown', () => {
await clickDropdownItemAt(1);
expect(wrapper.vm.selectedNotificationLevel).toBe('global');
- expect(
- mockToastShow,
- ).toHaveBeenCalledWith(
+ expect(mockToastShow).toHaveBeenCalledWith(
'An error occurred while updating the notification settings. Please try again.',
- { type: 'error' },
);
});
diff --git a/spec/frontend/operation_settings/components/metrics_settings_spec.js b/spec/frontend/operation_settings/components/metrics_settings_spec.js
index 5eecfd395e2..258c6eae692 100644
--- a/spec/frontend/operation_settings/components/metrics_settings_spec.js
+++ b/spec/frontend/operation_settings/components/metrics_settings_spec.js
@@ -205,7 +205,6 @@ describe('operation settings external dashboard component', () => {
.then(() =>
expect(createFlash).toHaveBeenCalledWith({
message: `There was an error saving your changes. ${message}`,
- type: 'alert',
}),
);
});
diff --git a/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap b/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
index 07aba62fef6..dbebdeeb452 100644
--- a/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
+++ b/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
@@ -21,6 +21,7 @@ exports[`packages_list_app renders 1`] = `
<img
alt=""
class="gl-max-w-full"
+ role="img"
src="helpSvg"
/>
</div>
diff --git a/spec/frontend/packages/shared/utils_spec.js b/spec/frontend/packages/shared/utils_spec.js
index 463e4a4febb..a1076b729f8 100644
--- a/spec/frontend/packages/shared/utils_spec.js
+++ b/spec/frontend/packages/shared/utils_spec.js
@@ -40,6 +40,8 @@ describe('Packages shared utils', () => {
${'pypi'} | ${'PyPI'}
${'rubygems'} | ${'RubyGems'}
${'composer'} | ${'Composer'}
+ ${'debian'} | ${'Debian'}
+ ${'helm'} | ${'Helm'}
${'foo'} | ${null}
`(`package type`, ({ packageType, expectedResult }) => {
it(`${packageType} should show as ${expectedResult}`, () => {
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/app_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/app_spec.js
new file mode 100644
index 00000000000..97444ec108f
--- /dev/null
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/app_spec.js
@@ -0,0 +1,35 @@
+import { GlEmptyState } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+
+import PackagesApp from '~/packages_and_registries/package_registry/components/details/app.vue';
+
+describe('PackagesApp', () => {
+ let wrapper;
+
+ function createComponent() {
+ wrapper = shallowMount(PackagesApp, {
+ provide: {
+ titleComponent: 'titleComponent',
+ projectName: 'projectName',
+ canDelete: 'canDelete',
+ svgPath: 'svgPath',
+ npmPath: 'npmPath',
+ npmHelpPath: 'npmHelpPath',
+ projectListUrl: 'projectListUrl',
+ groupListUrl: 'groupListUrl',
+ },
+ });
+ }
+
+ const emptyState = () => wrapper.findComponent(GlEmptyState);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders an empty state component', () => {
+ createComponent();
+
+ expect(emptyState().exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js b/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js
index 14ee3f3e3b8..f2877a1f2a5 100644
--- a/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js
+++ b/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js
@@ -137,7 +137,7 @@ describe('Group Settings App', () => {
href: PACKAGES_DOCS_PATH,
target: '_blank',
});
- expect(findLink().text()).toBe('More Information');
+ expect(findLink().text()).toBe('Learn more.');
});
it('calls the graphql API with the proper variables', () => {
@@ -244,9 +244,7 @@ describe('Group Settings App', () => {
await waitForPromises();
- expect(show).toHaveBeenCalledWith(SUCCESS_UPDATING_SETTINGS, {
- type: 'success',
- });
+ expect(show).toHaveBeenCalledWith(SUCCESS_UPDATING_SETTINGS);
});
it('has an optimistic response', async () => {
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/__snapshots__/utils_spec.js.snap b/spec/frontend/packages_and_registries/settings/project/settings/__snapshots__/utils_spec.js.snap
index 7062773b46b..cf554717127 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/__snapshots__/utils_spec.js.snap
+++ b/spec/frontend/packages_and_registries/settings/project/settings/__snapshots__/utils_spec.js.snap
@@ -33,6 +33,10 @@ Array [
exports[`Utils formOptionsGenerator returns an object containing keepN 1`] = `
Array [
Object {
+ "key": null,
+ "label": "",
+ },
+ Object {
"default": false,
"key": "ONE_TAG",
"label": "1 tag per image name",
@@ -74,6 +78,10 @@ Array [
exports[`Utils formOptionsGenerator returns an object containing olderThan 1`] = `
Array [
Object {
+ "key": null,
+ "label": "",
+ },
+ Object {
"default": false,
"key": "SEVEN_DAYS",
"label": "7 days",
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap b/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap
index 7a52b4a5d0f..1009db46401 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap
@@ -22,7 +22,7 @@ exports[`Settings Form Enable matches snapshot 1`] = `
exports[`Settings Form Keep N matches snapshot 1`] = `
<expiration-dropdown-stub
data-testid="keep-n-dropdown"
- formoptions="[object Object],[object Object],[object Object],[object Object],[object Object],[object Object]"
+ formoptions="[object Object],[object Object],[object Object],[object Object],[object Object],[object Object],[object Object]"
label="Keep the most recent:"
name="keep-n"
value="TEN_TAGS"
@@ -44,7 +44,7 @@ exports[`Settings Form Keep Regex matches snapshot 1`] = `
exports[`Settings Form OlderThan matches snapshot 1`] = `
<expiration-dropdown-stub
data-testid="older-than-dropdown"
- formoptions="[object Object],[object Object],[object Object],[object Object]"
+ formoptions="[object Object],[object Object],[object Object],[object Object],[object Object]"
label="Remove tags older than:"
name="older-than"
value="FOURTEEN_DAYS"
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/settings_form_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/settings_form_spec.js
index 7e5383d7ff1..3a71af94d5a 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/settings_form_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/settings_form_spec.js
@@ -132,9 +132,9 @@ describe('Settings Form', () => {
model | finder | fieldName | type | defaultValue
${'enabled'} | ${findEnableToggle} | ${'Enable'} | ${'toggle'} | ${false}
${'cadence'} | ${findCadenceDropdown} | ${'Cadence'} | ${'dropdown'} | ${'EVERY_DAY'}
- ${'keepN'} | ${findKeepNDropdown} | ${'Keep N'} | ${'dropdown'} | ${'TEN_TAGS'}
+ ${'keepN'} | ${findKeepNDropdown} | ${'Keep N'} | ${'dropdown'} | ${''}
${'nameRegexKeep'} | ${findKeepRegexInput} | ${'Keep Regex'} | ${'textarea'} | ${''}
- ${'olderThan'} | ${findOlderThanDropdown} | ${'OlderThan'} | ${'dropdown'} | ${'NINETY_DAYS'}
+ ${'olderThan'} | ${findOlderThanDropdown} | ${'OlderThan'} | ${'dropdown'} | ${''}
${'nameRegex'} | ${findRemoveRegexInput} | ${'Remove regex'} | ${'textarea'} | ${''}
`('$fieldName', ({ model, finder, type, defaultValue }) => {
it('matches snapshot', () => {
@@ -293,10 +293,10 @@ describe('Settings Form', () => {
input: {
cadence: 'EVERY_DAY',
enabled: true,
- keepN: 'TEN_TAGS',
+ keepN: null,
nameRegex: 'asdasdssssdfdf',
nameRegexKeep: 'sss',
- olderThan: 'NINETY_DAYS',
+ olderThan: null,
projectPath: 'path',
},
});
@@ -321,9 +321,7 @@ describe('Settings Form', () => {
await waitForPromises();
await wrapper.vm.$nextTick();
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_SUCCESS_MESSAGE, {
- type: 'success',
- });
+ expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_SUCCESS_MESSAGE);
});
describe('when submit fails', () => {
@@ -339,9 +337,7 @@ describe('Settings Form', () => {
await waitForPromises();
await wrapper.vm.$nextTick();
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith('foo', {
- type: 'error',
- });
+ expect(wrapper.vm.$toast.show).toHaveBeenCalledWith('foo');
});
});
@@ -355,9 +351,7 @@ describe('Settings Form', () => {
await waitForPromises();
await wrapper.vm.$nextTick();
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_ERROR_MESSAGE, {
- type: 'error',
- });
+ expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_ERROR_MESSAGE);
});
it('parses the error messages', async () => {
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/utils_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/utils_spec.js
index 4c81671cd46..ed126d87ae3 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/utils_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/utils_spec.js
@@ -12,6 +12,7 @@ describe('Utils', () => {
olderThanTranslationGenerator,
);
expect(result).toEqual([
+ { key: null, label: '' },
{ variable: 1, label: '1 day' },
{ variable: 2, label: '2 days' },
]);
diff --git a/spec/frontend/pager_spec.js b/spec/frontend/pager_spec.js
index 95679a51c6d..ff352303143 100644
--- a/spec/frontend/pager_spec.js
+++ b/spec/frontend/pager_spec.js
@@ -6,6 +6,7 @@ import { removeParams } from '~/lib/utils/url_utility';
import Pager from '~/pager';
jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
removeParams: jest.fn().mockName('removeParams'),
}));
diff --git a/spec/frontend/pages/admin/application_settings/metrics_and_profiling/usage_statistics_spec.js b/spec/frontend/pages/admin/application_settings/metrics_and_profiling/usage_statistics_spec.js
new file mode 100644
index 00000000000..858c7b76ac8
--- /dev/null
+++ b/spec/frontend/pages/admin/application_settings/metrics_and_profiling/usage_statistics_spec.js
@@ -0,0 +1,57 @@
+import initSetHelperText, {
+ HELPER_TEXT_SERVICE_PING_DISABLED,
+ HELPER_TEXT_SERVICE_PING_ENABLED,
+} from '~/pages/admin/application_settings/metrics_and_profiling/usage_statistics';
+
+describe('UsageStatistics', () => {
+ const FIXTURE = 'application_settings/usage.html';
+ let usagePingCheckBox;
+ let usagePingFeaturesCheckBox;
+ let usagePingFeaturesLabel;
+ let usagePingFeaturesHelperText;
+
+ beforeEach(() => {
+ loadFixtures(FIXTURE);
+ initSetHelperText();
+ usagePingCheckBox = document.getElementById('application_setting_usage_ping_enabled');
+ usagePingFeaturesCheckBox = document.getElementById(
+ 'application_setting_usage_ping_features_enabled',
+ );
+ usagePingFeaturesLabel = document.getElementById('service_ping_features_label');
+ usagePingFeaturesHelperText = document.getElementById('service_ping_features_helper_text');
+ });
+
+ const expectEnabledUsagePingFeaturesCheckBox = () => {
+ expect(usagePingFeaturesCheckBox.classList.contains('gl-cursor-not-allowed')).toBe(false);
+ expect(usagePingFeaturesHelperText.textContent).toEqual(HELPER_TEXT_SERVICE_PING_ENABLED);
+ };
+
+ const expectDisabledUsagePingFeaturesCheckBox = () => {
+ expect(usagePingFeaturesLabel.classList.contains('gl-cursor-not-allowed')).toBe(true);
+ expect(usagePingFeaturesHelperText.textContent).toEqual(HELPER_TEXT_SERVICE_PING_DISABLED);
+ };
+
+ describe('Registration Features checkbox', () => {
+ it('is disabled when Usage Ping checkbox is unchecked', () => {
+ expect(usagePingCheckBox.checked).toBe(false);
+ expectDisabledUsagePingFeaturesCheckBox();
+ });
+
+ it('is enabled when Usage Ping checkbox is checked', () => {
+ usagePingCheckBox.click();
+ expect(usagePingCheckBox.checked).toBe(true);
+ expectEnabledUsagePingFeaturesCheckBox();
+ });
+
+ it('is switched to disabled when Usage Ping checkbox is unchecked ', () => {
+ usagePingCheckBox.click();
+ usagePingFeaturesCheckBox.click();
+ expectEnabledUsagePingFeaturesCheckBox();
+
+ usagePingCheckBox.click();
+ expect(usagePingCheckBox.checked).toBe(false);
+ expect(usagePingFeaturesCheckBox.checked).toBe(false);
+ expectDisabledUsagePingFeaturesCheckBox();
+ });
+ });
+});
diff --git a/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js b/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
index c80ccfa8256..dd617b1ffc2 100644
--- a/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
+++ b/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
@@ -29,10 +29,12 @@ describe('ForkForm component', () => {
const MOCK_NAMESPACES_RESPONSE = [
{
name: 'one',
+ full_name: 'one-group/one',
id: 1,
},
{
name: 'two',
+ full_name: 'two-group/two',
id: 2,
},
];
@@ -155,7 +157,7 @@ describe('ForkForm component', () => {
describe('forks namespaces', () => {
beforeEach(() => {
mockGetRequest({ namespaces: MOCK_NAMESPACES_RESPONSE });
- createComponent();
+ createFullComponent();
});
it('make GET request from endpoint', async () => {
@@ -178,8 +180,23 @@ describe('ForkForm component', () => {
const optionsArray = findForkUrlInput().findAll('option');
expect(optionsArray).toHaveLength(MOCK_NAMESPACES_RESPONSE.length + 1);
- expect(optionsArray.at(1).text()).toBe(MOCK_NAMESPACES_RESPONSE[0].name);
- expect(optionsArray.at(2).text()).toBe(MOCK_NAMESPACES_RESPONSE[1].name);
+ expect(optionsArray.at(1).text()).toBe(MOCK_NAMESPACES_RESPONSE[0].full_name);
+ expect(optionsArray.at(2).text()).toBe(MOCK_NAMESPACES_RESPONSE[1].full_name);
+ });
+
+ it('set namespaces in alphabetical order', async () => {
+ const namespace = {
+ name: 'three',
+ full_name: 'aaa/three',
+ id: 3,
+ };
+ mockGetRequest({
+ namespaces: [...MOCK_NAMESPACES_RESPONSE, namespace],
+ });
+ createComponent();
+ await axios.waitForAll();
+
+ expect(wrapper.vm.namespaces).toEqual([namespace, ...MOCK_NAMESPACES_RESPONSE]);
});
});
diff --git a/spec/frontend/pages/projects/forks/new/components/fork_groups_list_item_spec.js b/spec/frontend/pages/projects/forks/new/components/fork_groups_list_item_spec.js
index b5425fa6f2e..490dafed4ae 100644
--- a/spec/frontend/pages/projects/forks/new/components/fork_groups_list_item_spec.js
+++ b/spec/frontend/pages/projects/forks/new/components/fork_groups_list_item_spec.js
@@ -34,10 +34,10 @@ describe('Fork groups list item component', () => {
});
};
- it('renders pending removal badge if applicable', () => {
+ it('renders pending deletion badge if applicable', () => {
createWrapper({ group: { ...DEFAULT_GROUP_DATA, marked_for_deletion: true } });
- expect(wrapper.find(GlBadge).text()).toBe('pending removal');
+ expect(wrapper.find(GlBadge).text()).toBe('pending deletion');
});
it('renders go to fork button if has forked project', () => {
diff --git a/spec/frontend/pages/projects/new/components/app_spec.js b/spec/frontend/pages/projects/new/components/app_spec.js
index b604e636243..ab8c6d529a8 100644
--- a/spec/frontend/pages/projects/new/components/app_spec.js
+++ b/spec/frontend/pages/projects/new/components/app_spec.js
@@ -1,13 +1,10 @@
import { shallowMount } from '@vue/test-utils';
-import { assignGitlabExperiment } from 'helpers/experimentation_helper';
import App from '~/pages/projects/new/components/app.vue';
import NewNamespacePage from '~/vue_shared/new_namespace/new_namespace_page.vue';
describe('Experimental new project creation app', () => {
let wrapper;
- const findNewNamespacePage = () => wrapper.findComponent(NewNamespacePage);
-
const createComponent = (propsData) => {
wrapper = shallowMount(App, { propsData });
};
@@ -16,36 +13,6 @@ describe('Experimental new project creation app', () => {
wrapper.destroy();
});
- describe('new_repo experiment', () => {
- it('passes new_repo experiment', () => {
- createComponent();
-
- expect(findNewNamespacePage().props().experiment).toBe('new_repo');
- });
-
- describe('when in the candidate variant', () => {
- assignGitlabExperiment('new_repo', 'candidate');
-
- it('has "repository" in the panel title', () => {
- createComponent();
-
- expect(findNewNamespacePage().props().panels[0].title).toBe(
- 'Create blank project/repository',
- );
- });
- });
-
- describe('when in the control variant', () => {
- assignGitlabExperiment('new_repo', 'control');
-
- it('has "project" in the panel title', () => {
- createComponent();
-
- expect(findNewNamespacePage().props().panels[0].title).toBe('Create blank project');
- });
- });
- });
-
it('passes custom new project guideline text to underlying component', () => {
const DEMO_GUIDELINES = 'Demo guidelines';
const guidelineSelector = '#new-project-guideline';
diff --git a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
index 878721666ff..4c253f0610b 100644
--- a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
+++ b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
@@ -94,6 +94,8 @@ describe('Settings Panel', () => {
const findPackageSettings = () => wrapper.find({ ref: 'package-settings' });
const findPackagesEnabledInput = () => wrapper.find('[name="project[packages_enabled]"]');
const findPagesSettings = () => wrapper.find({ ref: 'pages-settings' });
+ const findPagesAccessLevels = () =>
+ wrapper.find('[name="project[project_feature_attributes][pages_access_level]"]');
const findEmailSettings = () => wrapper.find({ ref: 'email-settings' });
const findShowDefaultAwardEmojis = () =>
wrapper.find('input[name="project[project_setting_attributes][show_default_award_emojis]"]');
@@ -479,6 +481,29 @@ describe('Settings Panel', () => {
describe('Pages', () => {
it.each`
+ visibilityLevel | pagesAccessControlForced | output
+ ${visibilityOptions.PRIVATE} | ${true} | ${[[visibilityOptions.INTERNAL, 'Only Project Members'], [visibilityOptions.PUBLIC, 'Everyone With Access']]}
+ ${visibilityOptions.PRIVATE} | ${false} | ${[[visibilityOptions.INTERNAL, 'Only Project Members'], [visibilityOptions.PUBLIC, 'Everyone With Access'], [30, 'Everyone']]}
+ ${visibilityOptions.INTERNAL} | ${true} | ${[[visibilityOptions.INTERNAL, 'Only Project Members'], [visibilityOptions.PUBLIC, 'Everyone With Access']]}
+ ${visibilityOptions.INTERNAL} | ${false} | ${[[visibilityOptions.INTERNAL, 'Only Project Members'], [visibilityOptions.PUBLIC, 'Everyone With Access'], [30, 'Everyone']]}
+ ${visibilityOptions.PUBLIC} | ${true} | ${[[visibilityOptions.INTERNAL, 'Only Project Members'], [visibilityOptions.PUBLIC, 'Everyone With Access']]}
+ ${visibilityOptions.PUBLIC} | ${false} | ${[[visibilityOptions.INTERNAL, 'Only Project Members'], [visibilityOptions.PUBLIC, 'Everyone With Access'], [30, 'Everyone']]}
+ `(
+ 'renders correct options when pagesAccessControlForced is $pagesAccessControlForced and visibilityLevel is $visibilityLevel',
+ async ({ visibilityLevel, pagesAccessControlForced, output }) => {
+ wrapper = mountComponent({
+ pagesAvailable: true,
+ pagesAccessControlEnabled: true,
+ pagesAccessControlForced,
+ });
+
+ await findProjectVisibilityLevelInput().trigger('change', visibilityLevel);
+
+ expect(findPagesAccessLevels().props('options')).toStrictEqual(output);
+ },
+ );
+
+ it.each`
pagesAvailable | pagesAccessControlEnabled | visibility
${true} | ${true} | ${'show'}
${true} | ${false} | ${'hide'}
diff --git a/spec/frontend/persistent_user_callout_spec.js b/spec/frontend/persistent_user_callout_spec.js
index 403142d7ff7..1e51ddf909a 100644
--- a/spec/frontend/persistent_user_callout_spec.js
+++ b/spec/frontend/persistent_user_callout_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
-import { deprecatedCreateFlash as Flash } from '~/flash';
+import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
import PersistentUserCallout from '~/persistent_user_callout';
@@ -96,9 +96,9 @@ describe('PersistentUserCallout', () => {
return waitForPromises().then(() => {
expect(persistentUserCallout.container.remove).not.toHaveBeenCalled();
- expect(Flash).toHaveBeenCalledWith(
- 'An error occurred while dismissing the alert. Refresh the page and try again.',
- );
+ expect(createFlash).toHaveBeenCalledWith({
+ message: 'An error occurred while dismissing the alert. Refresh the page and try again.',
+ });
});
});
});
@@ -203,9 +203,10 @@ describe('PersistentUserCallout', () => {
return waitForPromises().then(() => {
expect(window.location.assign).not.toHaveBeenCalled();
- expect(Flash).toHaveBeenCalledWith(
- 'An error occurred while acknowledging the notification. Refresh the page and try again.',
- );
+ expect(createFlash).toHaveBeenCalledWith({
+ message:
+ 'An error occurred while acknowledging the notification. Refresh the page and try again.',
+ });
});
});
});
diff --git a/spec/frontend/pipeline_editor/components/editor/ci_config_merged_preview_spec.js b/spec/frontend/pipeline_editor/components/editor/ci_config_merged_preview_spec.js
index fb191fccb0d..7dd8a77d055 100644
--- a/spec/frontend/pipeline_editor/components/editor/ci_config_merged_preview_spec.js
+++ b/spec/frontend/pipeline_editor/components/editor/ci_config_merged_preview_spec.js
@@ -8,7 +8,7 @@ import { mockLintResponse, mockCiConfigPath } from '../../mock_data';
describe('Text editor component', () => {
let wrapper;
- const MockEditorLite = {
+ const MockSourceEditor = {
template: '<div/>',
props: ['value', 'fileName', 'editorOptions'],
mounted() {
@@ -26,13 +26,13 @@ describe('Text editor component', () => {
ciConfigPath: mockCiConfigPath,
},
stubs: {
- EditorLite: MockEditorLite,
+ SourceEditor: MockSourceEditor,
},
});
};
const findIcon = () => wrapper.findComponent(GlIcon);
- const findEditor = () => wrapper.findComponent(MockEditorLite);
+ const findEditor = () => wrapper.findComponent(MockSourceEditor);
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/pipeline_editor/components/editor/ci_editor_header_spec.js b/spec/frontend/pipeline_editor/components/editor/ci_editor_header_spec.js
new file mode 100644
index 00000000000..3ee53d4a055
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/editor/ci_editor_header_spec.js
@@ -0,0 +1,53 @@
+import { GlButton } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
+import CiEditorHeader from '~/pipeline_editor/components/editor/ci_editor_header.vue';
+import {
+ pipelineEditorTrackingOptions,
+ TEMPLATE_REPOSITORY_URL,
+} from '~/pipeline_editor/constants';
+
+describe('CI Editor Header', () => {
+ let wrapper;
+ let trackingSpy = null;
+
+ const createComponent = () => {
+ wrapper = shallowMount(CiEditorHeader, {});
+ };
+
+ const findLinkBtn = () => wrapper.findComponent(GlButton);
+
+ afterEach(() => {
+ wrapper.destroy();
+ unmockTracking();
+ });
+
+ describe('link button', () => {
+ beforeEach(() => {
+ createComponent();
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ });
+
+ it('finds the browse template button', () => {
+ expect(findLinkBtn().exists()).toBe(true);
+ });
+
+ it('contains the link to the template repo', () => {
+ expect(findLinkBtn().attributes('href')).toBe(TEMPLATE_REPOSITORY_URL);
+ });
+
+ it('has the external-link icon', () => {
+ expect(findLinkBtn().props('icon')).toBe('external-link');
+ });
+
+ it('tracks the click on the browse button', async () => {
+ const { label, actions } = pipelineEditorTrackingOptions;
+
+ await findLinkBtn().vm.$emit('click');
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, actions.browse_templates, {
+ label,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js b/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js
index 6f9245e39aa..c6c7f593cc5 100644
--- a/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js
+++ b/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { EDITOR_READY_EVENT } from '~/editor/constants';
-import { EditorLiteExtension } from '~/editor/extensions/editor_lite_extension_base';
+import { SourceEditorExtension } from '~/editor/extensions/source_editor_extension_base';
import TextEditor from '~/pipeline_editor/components/editor/text_editor.vue';
import {
mockCiConfigPath,
@@ -19,7 +19,7 @@ describe('Pipeline Editor | Text editor component', () => {
let mockUse;
let mockRegisterCiSchema;
- const MockEditorLite = {
+ const MockSourceEditor = {
template: '<div/>',
props: ['value', 'fileName'],
mounted() {
@@ -55,15 +55,15 @@ describe('Pipeline Editor | Text editor component', () => {
[EDITOR_READY_EVENT]: editorReadyListener,
},
stubs: {
- EditorLite: MockEditorLite,
+ SourceEditor: MockSourceEditor,
},
});
};
- const findEditor = () => wrapper.findComponent(MockEditorLite);
+ const findEditor = () => wrapper.findComponent(MockSourceEditor);
beforeEach(() => {
- EditorLiteExtension.deferRerender = jest.fn();
+ SourceEditorExtension.deferRerender = jest.fn();
});
afterEach(() => {
diff --git a/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js b/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js
index e731ad8695e..85b51d08f88 100644
--- a/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js
+++ b/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js
@@ -207,7 +207,8 @@ describe('Pipeline editor branch switcher', () => {
it('updates session history when selecting a different branch', async () => {
const branch = findDropdownItems().at(1);
- await branch.vm.$emit('click');
+ branch.vm.$emit('click');
+ await waitForPromises();
expect(window.history.pushState).toHaveBeenCalled();
expect(window.history.pushState.mock.calls[0][2]).toContain(`?branch_name=${branch.text()}`);
@@ -215,7 +216,8 @@ describe('Pipeline editor branch switcher', () => {
it('does not update session history when selecting current branch', async () => {
const branch = findDropdownItems().at(0);
- await branch.vm.$emit('click');
+ branch.vm.$emit('click');
+ await waitForPromises();
expect(branch.text()).toBe(mockDefaultBranch);
expect(window.history.pushState).not.toHaveBeenCalled();
@@ -227,7 +229,8 @@ describe('Pipeline editor branch switcher', () => {
expect(branch.text()).not.toBe(mockDefaultBranch);
expect(wrapper.emitted('refetchContent')).toBeUndefined();
- await branch.vm.$emit('click');
+ branch.vm.$emit('click');
+ await waitForPromises();
expect(wrapper.emitted('refetchContent')).toBeDefined();
expect(wrapper.emitted('refetchContent')).toHaveLength(1);
@@ -239,10 +242,20 @@ describe('Pipeline editor branch switcher', () => {
expect(branch.text()).toBe(mockDefaultBranch);
expect(wrapper.emitted('refetchContent')).toBeUndefined();
- await branch.vm.$emit('click');
+ branch.vm.$emit('click');
+ await waitForPromises();
expect(wrapper.emitted('refetchContent')).toBeUndefined();
});
+
+ it('emits the updateCommitSha event when selecting a different branch', async () => {
+ expect(wrapper.emitted('updateCommitSha')).toBeUndefined();
+
+ const branch = findDropdownItems().at(1);
+ branch.vm.$emit('click');
+
+ expect(wrapper.emitted('updateCommitSha')).toHaveLength(1);
+ });
});
describe('when searching', () => {
diff --git a/spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js b/spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js
index 8def83d578b..3becf82ed6e 100644
--- a/spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js
+++ b/spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js
@@ -6,7 +6,7 @@ import EditorTab from '~/pipeline_editor/components/ui/editor_tab.vue';
const mockContent1 = 'MOCK CONTENT 1';
const mockContent2 = 'MOCK CONTENT 2';
-const MockEditorLite = {
+const MockSourceEditor = {
template: '<div>EDITOR</div>',
};
@@ -48,12 +48,12 @@ describe('~/pipeline_editor/components/ui/editor_tab.vue', () => {
wrapper = mount(EditorTab, {
propsData: props,
slots: {
- default: MockEditorLite,
+ default: MockSourceEditor,
},
});
};
- const findSlotComponent = () => wrapper.findComponent(MockEditorLite);
+ const findSlotComponent = () => wrapper.findComponent(MockSourceEditor);
const findAlert = () => wrapper.findComponent(GlAlert);
beforeEach(() => {
diff --git a/spec/frontend/pipeline_editor/graphql/resolvers_spec.js b/spec/frontend/pipeline_editor/graphql/resolvers_spec.js
index d39c0d80296..76ae96c623a 100644
--- a/spec/frontend/pipeline_editor/graphql/resolvers_spec.js
+++ b/spec/frontend/pipeline_editor/graphql/resolvers_spec.js
@@ -1,15 +1,8 @@
import MockAdapter from 'axios-mock-adapter';
-import Api from '~/api';
import axios from '~/lib/utils/axios_utils';
import httpStatus from '~/lib/utils/http_status';
import { resolvers } from '~/pipeline_editor/graphql/resolvers';
-import {
- mockCiConfigPath,
- mockCiYml,
- mockDefaultBranch,
- mockLintResponse,
- mockProjectFullPath,
-} from '../mock_data';
+import { mockLintResponse } from '../mock_data';
jest.mock('~/api', () => {
return {
@@ -18,36 +11,6 @@ jest.mock('~/api', () => {
});
describe('~/pipeline_editor/graphql/resolvers', () => {
- describe('Query', () => {
- describe('blobContent', () => {
- beforeEach(() => {
- Api.getRawFile.mockResolvedValue({
- data: mockCiYml,
- });
- });
-
- afterEach(() => {
- Api.getRawFile.mockReset();
- });
-
- it('resolves lint data with type names', async () => {
- const result = resolvers.Query.blobContent(null, {
- projectPath: mockProjectFullPath,
- path: mockCiConfigPath,
- ref: mockDefaultBranch,
- });
-
- expect(Api.getRawFile).toHaveBeenCalledWith(mockProjectFullPath, mockCiConfigPath, {
- ref: mockDefaultBranch,
- });
-
- // eslint-disable-next-line no-underscore-dangle
- expect(result.__typename).toBe('BlobContent');
- await expect(result.rawData).resolves.toBe(mockCiYml);
- });
- });
- });
-
describe('Mutation', () => {
describe('lintCI', () => {
let mock;
diff --git a/spec/frontend/pipeline_editor/mock_data.js b/spec/frontend/pipeline_editor/mock_data.js
index cadcdf6ae2e..4d4a8c21d78 100644
--- a/spec/frontend/pipeline_editor/mock_data.js
+++ b/spec/frontend/pipeline_editor/mock_data.js
@@ -35,6 +35,23 @@ job_build:
- echo "build"
needs: ["job_test_2"]
`;
+export const mockBlobContentQueryResponse = {
+ data: {
+ project: { repository: { blobs: { nodes: [{ rawBlob: mockCiYml }] } } },
+ },
+};
+
+export const mockBlobContentQueryResponseNoCiFile = {
+ data: {
+ project: { repository: { blobs: { nodes: [] } } },
+ },
+};
+
+export const mockBlobContentQueryResponseEmptyCiFile = {
+ data: {
+ project: { repository: { blobs: { nodes: [{ rawBlob: '' }] } } },
+ },
+};
const mockJobFields = {
beforeScript: [],
@@ -139,6 +156,35 @@ export const mergeUnwrappedCiConfig = (mergedConfig) => {
};
};
+export const mockNewCommitShaResults = {
+ data: {
+ project: {
+ pipelines: {
+ nodes: [
+ {
+ id: 'gid://gitlab/Ci::Pipeline/1',
+ sha: 'd0d56d363d8a3f67a8ab9fc00207d468f30032ca',
+ path: `/${mockProjectFullPath}/-/pipelines/488`,
+ commitPath: `/${mockProjectFullPath}/-/commit/d0d56d363d8a3f67a8ab9fc00207d468f30032ca`,
+ },
+ {
+ id: 'gid://gitlab/Ci::Pipeline/2',
+ sha: 'fcab2ece40b26f428dfa3aa288b12c3c5bdb06aa',
+ path: `/${mockProjectFullPath}/-/pipelines/487`,
+ commitPath: `/${mockProjectFullPath}/-/commit/fcab2ece40b26f428dfa3aa288b12c3c5bdb06aa`,
+ },
+ {
+ id: 'gid://gitlab/Ci::Pipeline/3',
+ sha: '6c16b17c7f94a438ae19a96c285bb49e3c632cf4',
+ path: `/${mockProjectFullPath}/-/pipelines/433`,
+ commitPath: `/${mockProjectFullPath}/-/commit/6c16b17c7f94a438ae19a96c285bb49e3c632cf4`,
+ },
+ ],
+ },
+ },
+ },
+};
+
export const mockProjectBranches = {
data: {
project: {
diff --git a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
index c88fe159c0d..b0d1a69ee56 100644
--- a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
+++ b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
@@ -3,7 +3,6 @@ import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import httpStatusCodes from '~/lib/utils/http_status';
import CommitForm from '~/pipeline_editor/components/commit/commit_form.vue';
import TextEditor from '~/pipeline_editor/components/editor/text_editor.vue';
@@ -11,21 +10,30 @@ import PipelineEditorTabs from '~/pipeline_editor/components/pipeline_editor_tab
import PipelineEditorEmptyState from '~/pipeline_editor/components/ui/pipeline_editor_empty_state.vue';
import PipelineEditorMessages from '~/pipeline_editor/components/ui/pipeline_editor_messages.vue';
import { COMMIT_SUCCESS, COMMIT_FAILURE } from '~/pipeline_editor/constants';
+import getBlobContent from '~/pipeline_editor/graphql/queries/blob_content.graphql';
import getCiConfigData from '~/pipeline_editor/graphql/queries/ci_config.graphql';
+import getPipelineQuery from '~/pipeline_editor/graphql/queries/client/pipeline.graphql';
+import getTemplate from '~/pipeline_editor/graphql/queries/get_starter_template.query.graphql';
+import getLatestCommitShaQuery from '~/pipeline_editor/graphql/queries/latest_commit_sha.query.graphql';
import PipelineEditorApp from '~/pipeline_editor/pipeline_editor_app.vue';
import PipelineEditorHome from '~/pipeline_editor/pipeline_editor_home.vue';
import {
mockCiConfigPath,
mockCiConfigQueryResponse,
+ mockBlobContentQueryResponse,
+ mockBlobContentQueryResponseEmptyCiFile,
+ mockBlobContentQueryResponseNoCiFile,
mockCiYml,
+ mockCommitSha,
mockDefaultBranch,
mockProjectFullPath,
+ mockNewCommitShaResults,
} from './mock_data';
const localVue = createLocalVue();
localVue.use(VueApollo);
-const MockEditorLite = {
+const MockSourceEditor = {
template: '<div/>',
};
@@ -44,6 +52,10 @@ describe('Pipeline editor app component', () => {
let mockApollo;
let mockBlobContentData;
let mockCiConfigData;
+ let mockGetTemplate;
+ let mockUpdateCommitSha;
+ let mockLatestCommitShaQuery;
+ let mockPipelineQuery;
const createComponent = ({ blobLoading = false, options = {}, provide = {} } = {}) => {
wrapper = shallowMount(PipelineEditorApp, {
@@ -55,7 +67,7 @@ describe('Pipeline editor app component', () => {
PipelineEditorHome,
PipelineEditorTabs,
PipelineEditorMessages,
- EditorLite: MockEditorLite,
+ SourceEditor: MockSourceEditor,
PipelineEditorEmptyState,
},
mocks: {
@@ -75,16 +87,23 @@ describe('Pipeline editor app component', () => {
};
const createComponentWithApollo = async ({ props = {}, provide = {} } = {}) => {
- const handlers = [[getCiConfigData, mockCiConfigData]];
+ const handlers = [
+ [getBlobContent, mockBlobContentData],
+ [getCiConfigData, mockCiConfigData],
+ [getTemplate, mockGetTemplate],
+ [getLatestCommitShaQuery, mockLatestCommitShaQuery],
+ [getPipelineQuery, mockPipelineQuery],
+ ];
+
const resolvers = {
Query: {
- blobContent() {
- return {
- __typename: 'BlobContent',
- rawData: mockBlobContentData(),
- };
+ commitSha() {
+ return mockCommitSha;
},
},
+ Mutation: {
+ updateCommitSha: mockUpdateCommitSha,
+ },
};
mockApollo = createMockApollo(handlers, resolvers);
@@ -116,6 +135,10 @@ describe('Pipeline editor app component', () => {
beforeEach(() => {
mockBlobContentData = jest.fn();
mockCiConfigData = jest.fn();
+ mockGetTemplate = jest.fn();
+ mockUpdateCommitSha = jest.fn();
+ mockLatestCommitShaQuery = jest.fn();
+ mockPipelineQuery = jest.fn();
});
afterEach(() => {
@@ -133,7 +156,7 @@ describe('Pipeline editor app component', () => {
describe('when queries are called', () => {
beforeEach(() => {
- mockBlobContentData.mockResolvedValue(mockCiYml);
+ mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
mockCiConfigData.mockResolvedValue(mockCiConfigQueryResponse);
});
@@ -154,39 +177,19 @@ describe('Pipeline editor app component', () => {
expect(mockCiConfigData).toHaveBeenCalledWith({
content: mockCiYml,
projectPath: mockProjectFullPath,
+ sha: mockCommitSha,
});
});
});
describe('when no CI config file exists', () => {
- describe('in a project without a repository', () => {
- it('shows an empty state and does not show editor home component', async () => {
- mockBlobContentData.mockRejectedValueOnce({
- response: {
- status: httpStatusCodes.BAD_REQUEST,
- },
- });
- await createComponentWithApollo();
-
- expect(findEmptyState().exists()).toBe(true);
- expect(findAlert().exists()).toBe(false);
- expect(findEditorHome().exists()).toBe(false);
- });
- });
-
- describe('in a project with a repository', () => {
- it('shows an empty state and does not show editor home component', async () => {
- mockBlobContentData.mockRejectedValueOnce({
- response: {
- status: httpStatusCodes.NOT_FOUND,
- },
- });
- await createComponentWithApollo();
+ it('shows an empty state and does not show editor home component', async () => {
+ mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponseNoCiFile);
+ await createComponentWithApollo();
- expect(findEmptyState().exists()).toBe(true);
- expect(findAlert().exists()).toBe(false);
- expect(findEditorHome().exists()).toBe(false);
- });
+ expect(findEmptyState().exists()).toBe(true);
+ expect(findAlert().exists()).toBe(false);
+ expect(findEditorHome().exists()).toBe(false);
});
describe('because of a fetching error', () => {
@@ -204,13 +207,28 @@ describe('Pipeline editor app component', () => {
});
});
+ describe('with an empty CI config file', () => {
+ describe('with empty state feature flag on', () => {
+ it('does not show the empty screen state', async () => {
+ mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponseEmptyCiFile);
+
+ await createComponentWithApollo({
+ provide: {
+ glFeatures: {
+ pipelineEditorEmptyStateAction: true,
+ },
+ },
+ });
+
+ expect(findEmptyState().exists()).toBe(false);
+ expect(findTextEditor().exists()).toBe(true);
+ });
+ });
+ });
+
describe('when landing on the empty state with feature flag on', () => {
it('user can click on CTA button and see an empty editor', async () => {
- mockBlobContentData.mockRejectedValueOnce({
- response: {
- status: httpStatusCodes.NOT_FOUND,
- },
- });
+ mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponseNoCiFile);
await createComponentWithApollo({
provide: {
@@ -315,21 +333,83 @@ describe('Pipeline editor app component', () => {
});
it('hides start screen when refetch fetches CI file', async () => {
- mockBlobContentData.mockRejectedValue({
- response: {
- status: httpStatusCodes.NOT_FOUND,
- },
- });
+ mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponseNoCiFile);
await createComponentWithApollo();
expect(findEmptyState().exists()).toBe(true);
expect(findEditorHome().exists()).toBe(false);
- mockBlobContentData.mockResolvedValue(mockCiYml);
+ mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
await wrapper.vm.$apollo.queries.initialCiFileContent.refetch();
expect(findEmptyState().exists()).toBe(false);
expect(findEditorHome().exists()).toBe(true);
});
});
+
+ describe('when a template parameter is present in the URL', () => {
+ const { location } = window;
+
+ beforeEach(() => {
+ delete window.location;
+ window.location = new URL('https://localhost?template=Android');
+ });
+
+ afterEach(() => {
+ window.location = location;
+ });
+
+ it('renders the given template', async () => {
+ await createComponentWithApollo();
+
+ expect(mockGetTemplate).toHaveBeenCalledWith({
+ projectPath: mockProjectFullPath,
+ templateName: 'Android',
+ });
+
+ expect(findEmptyState().exists()).toBe(false);
+ expect(findTextEditor().exists()).toBe(true);
+ });
+ });
+
+ describe('when updating commit sha', () => {
+ const newCommitSha = mockNewCommitShaResults.data.project.pipelines.nodes[0].sha;
+
+ beforeEach(async () => {
+ mockUpdateCommitSha.mockResolvedValue(newCommitSha);
+ mockLatestCommitShaQuery.mockResolvedValue(mockNewCommitShaResults);
+ await createComponentWithApollo();
+ });
+
+ it('fetches updated commit sha for the new branch', async () => {
+ expect(mockLatestCommitShaQuery).not.toHaveBeenCalled();
+
+ wrapper
+ .findComponent(PipelineEditorHome)
+ .vm.$emit('updateCommitSha', { newBranch: 'new-branch' });
+ await waitForPromises();
+
+ expect(mockLatestCommitShaQuery).toHaveBeenCalledWith({
+ projectPath: mockProjectFullPath,
+ ref: 'new-branch',
+ });
+ });
+
+ it('updates commit sha with the newly fetched commit sha', async () => {
+ expect(mockUpdateCommitSha).not.toHaveBeenCalled();
+
+ wrapper
+ .findComponent(PipelineEditorHome)
+ .vm.$emit('updateCommitSha', { newBranch: 'new-branch' });
+ await waitForPromises();
+
+ expect(mockUpdateCommitSha).toHaveBeenCalled();
+ expect(mockUpdateCommitSha).toHaveBeenCalledWith(
+ expect.any(Object),
+ { commitSha: mockNewCommitShaResults.data.project.pipelines.nodes[0].sha },
+ expect.any(Object),
+ expect.any(Object),
+ );
+ });
+ });
});
diff --git a/spec/frontend/pipelines/empty_state_spec.js b/spec/frontend/pipelines/empty_state_spec.js
index 912bc7a104a..1af3065477d 100644
--- a/spec/frontend/pipelines/empty_state_spec.js
+++ b/spec/frontend/pipelines/empty_state_spec.js
@@ -1,14 +1,21 @@
+import '~/commons';
import { mount } from '@vue/test-utils';
import EmptyState from '~/pipelines/components/pipelines_list/empty_state.vue';
+import PipelinesCiTemplates from '~/pipelines/components/pipelines_list/pipelines_ci_templates.vue';
describe('Pipelines Empty State', () => {
let wrapper;
const findIllustration = () => wrapper.find('img');
const findButton = () => wrapper.find('a');
+ const pipelinesCiTemplates = () => wrapper.findComponent(PipelinesCiTemplates);
const createWrapper = (props = {}) => {
wrapper = mount(EmptyState, {
+ provide: {
+ pipelineEditorPath: '',
+ suggestedCiTemplates: [],
+ },
propsData: {
emptyStateSvgPath: 'foo.svg',
canSetCi: true,
@@ -27,27 +34,8 @@ describe('Pipelines Empty State', () => {
wrapper = null;
});
- it('should render empty state SVG', () => {
- expect(findIllustration().attributes('src')).toBe('foo.svg');
- });
-
- it('should render empty state header', () => {
- expect(wrapper.text()).toContain('Build with confidence');
- });
-
- it('should render empty state information', () => {
- expect(wrapper.text()).toContain(
- 'GitLab CI/CD can automatically build, test, and deploy your code. Let GitLab take care of time',
- 'consuming tasks, so you can spend more time creating',
- );
- });
-
- it('should render button with help path', () => {
- expect(findButton().attributes('href')).toBe('/help/ci/quick_start/index.md');
- });
-
- it('should render button text', () => {
- expect(findButton().text()).toBe('Get started with CI/CD');
+ it('should render the CI/CD templates', () => {
+ expect(pipelinesCiTemplates()).toExist();
});
});
diff --git a/spec/frontend/pipelines/graph/mock_data.js b/spec/frontend/pipelines/graph/mock_data.js
index 28fe3b67e7b..3812483766d 100644
--- a/spec/frontend/pipelines/graph/mock_data.js
+++ b/spec/frontend/pipelines/graph/mock_data.js
@@ -12,6 +12,10 @@ export const mockPipelineResponse = {
usesNeeds: true,
downstream: null,
upstream: null,
+ userPermissions: {
+ __typename: 'PipelinePermissions',
+ updatePipeline: true,
+ },
stages: {
__typename: 'CiStageConnection',
nodes: [
@@ -573,6 +577,10 @@ export const wrappedPipelineReturn = {
iid: '38',
complete: true,
usesNeeds: true,
+ userPermissions: {
+ __typename: 'PipelinePermissions',
+ updatePipeline: true,
+ },
downstream: {
__typename: 'PipelineConnection',
nodes: [],
diff --git a/spec/frontend/pipelines/graph/stage_column_component_spec.js b/spec/frontend/pipelines/graph/stage_column_component_spec.js
index f9f6c96a1a6..99e8ea9d0a4 100644
--- a/spec/frontend/pipelines/graph/stage_column_component_spec.js
+++ b/spec/frontend/pipelines/graph/stage_column_component_spec.js
@@ -31,6 +31,9 @@ const defaultProps = {
name: 'Fish',
groups: mockGroups,
pipelineId: 159,
+ userPermissions: {
+ updatePipeline: true,
+ },
};
describe('stage column component', () => {
@@ -53,7 +56,6 @@ describe('stage column component', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
describe('when mounted', () => {
@@ -152,36 +154,52 @@ describe('stage column component', () => {
});
describe('with action', () => {
- beforeEach(() => {
+ const defaults = {
+ groups: [
+ {
+ id: 4259,
+ name: '<img src=x onerror=alert(document.domain)>',
+ status: {
+ icon: 'status_success',
+ label: 'success',
+ tooltip: '<img src=x onerror=alert(document.domain)>',
+ },
+ jobs: [mockJob],
+ },
+ ],
+ title: 'test',
+ hasTriggeredBy: false,
+ action: {
+ icon: 'play',
+ title: 'Play all',
+ path: 'action',
+ },
+ };
+
+ it('renders action button if permissions are permitted', () => {
createComponent({
method: mount,
props: {
- groups: [
- {
- id: 4259,
- name: '<img src=x onerror=alert(document.domain)>',
- status: {
- icon: 'status_success',
- label: 'success',
- tooltip: '<img src=x onerror=alert(document.domain)>',
- },
- jobs: [mockJob],
- },
- ],
- title: 'test',
- hasTriggeredBy: false,
- action: {
- icon: 'play',
- title: 'Play all',
- path: 'action',
- },
+ ...defaults,
},
});
- });
- it('renders action button', () => {
expect(findActionComponent().exists()).toBe(true);
});
+
+ it('does not render action button if permissions are not permitted', () => {
+ createComponent({
+ method: mount,
+ props: {
+ ...defaults,
+ userPermissions: {
+ updatePipeline: false,
+ },
+ },
+ });
+
+ expect(findActionComponent().exists()).toBe(false);
+ });
});
describe('without action', () => {
diff --git a/spec/frontend/pipelines/graph_shared/__snapshots__/links_inner_spec.js.snap b/spec/frontend/pipelines/graph_shared/__snapshots__/links_inner_spec.js.snap
index 16c28791514..82206e907ff 100644
--- a/spec/frontend/pipelines/graph_shared/__snapshots__/links_inner_spec.js.snap
+++ b/spec/frontend/pipelines/graph_shared/__snapshots__/links_inner_spec.js.snap
@@ -2,29 +2,29 @@
exports[`Links Inner component with a large number of needs matches snapshot and has expected path 1`] = `
"<div class=\\"gl-display-flex gl-relative\\" totalgroups=\\"10\\"><svg id=\\"link-svg\\" viewBox=\\"0,0,1019,445\\" width=\\"1019px\\" height=\\"445px\\" class=\\"gl-absolute gl-pointer-events-none\\">
- <path d=\\"M202,118L42,118C72,118,72,138,102,138\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
- <path d=\\"M202,118L52,118C82,118,82,148,112,148\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
- <path d=\\"M222,138L62,138C92,138,92,158,122,158\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
- <path d=\\"M212,128L72,128C102,128,102,168,132,168\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
- <path d=\\"M232,148L82,148C112,148,112,178,142,178\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
+ <path d=\\"M202,118C52,118,52,138,102,138\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
+ <path d=\\"M202,118C62,118,62,148,112,148\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
+ <path d=\\"M222,138C72,138,72,158,122,158\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
+ <path d=\\"M212,128C82,128,82,168,132,168\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
+ <path d=\\"M232,148C92,148,92,178,142,178\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
</svg> </div>"
`;
exports[`Links Inner component with a parallel need matches snapshot and has expected path 1`] = `
"<div class=\\"gl-display-flex gl-relative\\" totalgroups=\\"10\\"><svg id=\\"link-svg\\" viewBox=\\"0,0,1019,445\\" width=\\"1019px\\" height=\\"445px\\" class=\\"gl-absolute gl-pointer-events-none\\">
- <path d=\\"M192,108L22,108C52,108,52,118,82,118\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
+ <path d=\\"M192,108C32,108,32,118,82,118\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
</svg> </div>"
`;
exports[`Links Inner component with one need matches snapshot and has expected path 1`] = `
"<div class=\\"gl-display-flex gl-relative\\" totalgroups=\\"10\\"><svg id=\\"link-svg\\" viewBox=\\"0,0,1019,445\\" width=\\"1019px\\" height=\\"445px\\" class=\\"gl-absolute gl-pointer-events-none\\">
- <path d=\\"M202,118L42,118C72,118,72,138,102,138\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
+ <path d=\\"M202,118C52,118,52,138,102,138\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
</svg> </div>"
`;
exports[`Links Inner component with same stage needs matches snapshot and has expected path 1`] = `
"<div class=\\"gl-display-flex gl-relative\\" totalgroups=\\"10\\"><svg id=\\"link-svg\\" viewBox=\\"0,0,1019,445\\" width=\\"1019px\\" height=\\"445px\\" class=\\"gl-absolute gl-pointer-events-none\\">
- <path d=\\"M192,108L22,108C52,108,52,118,82,118\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
- <path d=\\"M202,118L32,118C62,118,62,128,92,128\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
+ <path d=\\"M192,108C32,108,32,118,82,118\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
+ <path d=\\"M202,118C42,118,42,128,92,128\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
</svg> </div>"
`;
diff --git a/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js b/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js
index 7bac7036f46..1b89e322d31 100644
--- a/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js
+++ b/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js
@@ -6,7 +6,7 @@ import LinksInner from '~/pipelines/components/graph_shared/links_inner.vue';
import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
import JobPill from '~/pipelines/components/pipeline_graph/job_pill.vue';
import PipelineGraph from '~/pipelines/components/pipeline_graph/pipeline_graph.vue';
-import StagePill from '~/pipelines/components/pipeline_graph/stage_pill.vue';
+import StageName from '~/pipelines/components/pipeline_graph/stage_name.vue';
import { pipelineData, singleStageData } from './mock_data';
describe('pipeline graph component', () => {
@@ -35,11 +35,9 @@ describe('pipeline graph component', () => {
const findAlert = () => wrapper.findComponent(GlAlert);
const findAllJobPills = () => wrapper.findAll(JobPill);
- const findAllStageBackgroundElements = () => wrapper.findAll('[data-testid="stage-background"]');
- const findAllStagePills = () => wrapper.findAllComponents(StagePill);
+ const findAllStageNames = () => wrapper.findAllComponents(StageName);
const findLinksLayer = () => wrapper.findComponent(LinksLayer);
const findPipelineGraph = () => wrapper.find('[data-testid="graph-container"]');
- const findStageBackgroundElementAt = (index) => findAllStageBackgroundElements().at(index);
afterEach(() => {
wrapper.destroy();
@@ -67,10 +65,10 @@ describe('pipeline graph component', () => {
wrapper = createComponent({ pipelineData: singleStageData });
});
- it('renders the right number of stage pills', () => {
+ it('renders the right number of stage titles', () => {
const expectedStagesLength = singleStageData.stages.length;
- expect(findAllStagePills()).toHaveLength(expectedStagesLength);
+ expect(findAllStageNames()).toHaveLength(expectedStagesLength);
});
it('renders the right number of job pills', () => {
@@ -81,20 +79,6 @@ describe('pipeline graph component', () => {
expect(findAllJobPills()).toHaveLength(expectedJobsLength);
});
-
- describe('rounds corner', () => {
- it.each`
- cssClass | expectedState
- ${'gl-rounded-bottom-left-6'} | ${true}
- ${'gl-rounded-top-left-6'} | ${true}
- ${'gl-rounded-top-right-6'} | ${true}
- ${'gl-rounded-bottom-right-6'} | ${true}
- `('$cssClass should be $expectedState on the only element', ({ cssClass, expectedState }) => {
- const classes = findStageBackgroundElementAt(0).classes();
-
- expect(classes.includes(cssClass)).toBe(expectedState);
- });
- });
});
describe('with multiple stages and jobs', () => {
@@ -102,10 +86,10 @@ describe('pipeline graph component', () => {
wrapper = createComponent();
});
- it('renders the right number of stage pills', () => {
+ it('renders the right number of stage titles', () => {
const expectedStagesLength = pipelineData.stages.length;
- expect(findAllStagePills()).toHaveLength(expectedStagesLength);
+ expect(findAllStageNames()).toHaveLength(expectedStagesLength);
});
it('renders the right number of job pills', () => {
@@ -116,34 +100,5 @@ describe('pipeline graph component', () => {
expect(findAllJobPills()).toHaveLength(expectedJobsLength);
});
-
- describe('rounds corner', () => {
- it.each`
- cssClass | expectedState
- ${'gl-rounded-bottom-left-6'} | ${true}
- ${'gl-rounded-top-left-6'} | ${true}
- ${'gl-rounded-top-right-6'} | ${false}
- ${'gl-rounded-bottom-right-6'} | ${false}
- `(
- '$cssClass should be $expectedState on the first element',
- ({ cssClass, expectedState }) => {
- const classes = findStageBackgroundElementAt(0).classes();
-
- expect(classes.includes(cssClass)).toBe(expectedState);
- },
- );
-
- it.each`
- cssClass | expectedState
- ${'gl-rounded-bottom-left-6'} | ${false}
- ${'gl-rounded-top-left-6'} | ${false}
- ${'gl-rounded-top-right-6'} | ${true}
- ${'gl-rounded-bottom-right-6'} | ${true}
- `('$cssClass should be $expectedState on the last element', ({ cssClass, expectedState }) => {
- const classes = findStageBackgroundElementAt(pipelineData.stages.length - 1).classes();
-
- expect(classes.includes(cssClass)).toBe(expectedState);
- });
- });
});
});
diff --git a/spec/frontend/pipelines/pipelines_ci_templates_spec.js b/spec/frontend/pipelines/pipelines_ci_templates_spec.js
index 0c37bf2d84a..db66b675fb9 100644
--- a/spec/frontend/pipelines/pipelines_ci_templates_spec.js
+++ b/spec/frontend/pipelines/pipelines_ci_templates_spec.js
@@ -1,30 +1,25 @@
+import '~/commons';
import { shallowMount } from '@vue/test-utils';
-import ExperimentTracking from '~/experimentation/experiment_tracking';
+import { mockTracking } from 'helpers/tracking_helper';
import PipelinesCiTemplate from '~/pipelines/components/pipelines_list/pipelines_ci_templates.vue';
-const addCiYmlPath = "/-/new/main?commit_message='Add%20.gitlab-ci.yml'";
+const pipelineEditorPath = '/-/ci/editor';
const suggestedCiTemplates = [
{ name: 'Android', logo: '/assets/illustrations/logos/android.svg' },
{ name: 'Bash', logo: '/assets/illustrations/logos/bash.svg' },
{ name: 'C++', logo: '/assets/illustrations/logos/c_plus_plus.svg' },
];
-jest.mock('~/experimentation/experiment_tracking');
-
describe('Pipelines CI Templates', () => {
let wrapper;
-
- const GlEmoji = { template: '<img/>' };
+ let trackingSpy;
const createWrapper = () => {
return shallowMount(PipelinesCiTemplate, {
provide: {
- addCiYmlPath,
+ pipelineEditorPath,
suggestedCiTemplates,
},
- stubs: {
- GlEmoji,
- },
});
};
@@ -44,9 +39,9 @@ describe('Pipelines CI Templates', () => {
wrapper = createWrapper();
});
- it('links to the hello world template', () => {
+ it('links to the getting started template', () => {
expect(findTestTemplateLinks().at(0).attributes('href')).toBe(
- addCiYmlPath.concat('&template=Hello-World'),
+ pipelineEditorPath.concat('?template=Getting-Started'),
);
});
});
@@ -68,7 +63,7 @@ describe('Pipelines CI Templates', () => {
it('links to the correct template', () => {
expect(findTemplateLinks().at(0).attributes('href')).toBe(
- addCiYmlPath.concat('&template=Android'),
+ pipelineEditorPath.concat('?template=Android'),
);
});
@@ -88,24 +83,25 @@ describe('Pipelines CI Templates', () => {
describe('tracking', () => {
beforeEach(() => {
wrapper = createWrapper();
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
});
it('sends an event when template is clicked', () => {
findTemplateLinks().at(0).vm.$emit('click');
- expect(ExperimentTracking).toHaveBeenCalledWith('pipeline_empty_state_templates', {
+ expect(trackingSpy).toHaveBeenCalledTimes(1);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'template_clicked', {
label: 'Android',
});
- expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith('template_clicked');
});
- it('sends an event when Hello-World template is clicked', () => {
+ it('sends an event when Getting-Started template is clicked', () => {
findTestTemplateLinks().at(0).vm.$emit('click');
- expect(ExperimentTracking).toHaveBeenCalledWith('pipeline_empty_state_templates', {
- label: 'Hello-World',
+ expect(trackingSpy).toHaveBeenCalledTimes(1);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'template_clicked', {
+ label: 'Getting-Started',
});
- expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith('template_clicked');
});
});
});
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js
index 874ecbccf82..2166961cedd 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/pipelines/pipelines_spec.js
@@ -12,6 +12,7 @@ import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
import NavigationControls from '~/pipelines/components/pipelines_list/nav_controls.vue';
import PipelinesComponent from '~/pipelines/components/pipelines_list/pipelines.vue';
+import PipelinesCiTemplates from '~/pipelines/components/pipelines_list/pipelines_ci_templates.vue';
import PipelinesTableComponent from '~/pipelines/components/pipelines_list/pipelines_table.vue';
import { RAW_TEXT_WARNING } from '~/pipelines/constants';
import Store from '~/pipelines/stores/pipelines_store';
@@ -82,6 +83,10 @@ describe('Pipelines', () => {
const createComponent = (props = defaultProps) => {
wrapper = extendedWrapper(
mount(PipelinesComponent, {
+ provide: {
+ pipelineEditorPath: '',
+ suggestedCiTemplates: [],
+ },
propsData: {
store: new Store(),
projectId: mockProjectId,
@@ -551,52 +556,74 @@ describe('Pipelines', () => {
await waitForPromises();
});
- it('renders empty state', () => {
- expect(findEmptyState().text()).toContain('Build with confidence');
- expect(findEmptyState().text()).toContain(
- 'GitLab CI/CD can automatically build, test, and deploy your code.',
- );
-
- expect(findEmptyState().find(GlButton).text()).toBe('Get started with CI/CD');
- expect(findEmptyState().find(GlButton).attributes('href')).toBe(
- '/help/ci/quick_start/index.md',
- );
+ it('renders the CI/CD templates', () => {
+ expect(wrapper.find(PipelinesCiTemplates)).toExist();
});
describe('when the code_quality_walkthrough experiment is active', () => {
beforeAll(() => {
getExperimentData.mockImplementation((name) => name === 'code_quality_walkthrough');
- getExperimentVariant.mockReturnValue('candidate');
});
- it('renders another CTA button', () => {
- expect(findEmptyState().findComponent(GlButton).text()).toBe('Add a code quality job');
- expect(findEmptyState().findComponent(GlButton).attributes('href')).toBe(
- paths.codeQualityPagePath,
- );
+ describe('the control state', () => {
+ beforeAll(() => {
+ getExperimentVariant.mockReturnValue('control');
+ });
+
+ it('renders the CI/CD templates', () => {
+ expect(wrapper.find(PipelinesCiTemplates)).toExist();
+ });
+ });
+
+ describe('the candidate state', () => {
+ beforeAll(() => {
+ getExperimentVariant.mockReturnValue('candidate');
+ });
+
+ it('renders another CTA button', () => {
+ expect(findEmptyState().findComponent(GlButton).text()).toBe('Add a code quality job');
+ expect(findEmptyState().findComponent(GlButton).attributes('href')).toBe(
+ paths.codeQualityPagePath,
+ );
+ });
});
});
describe('when the ci_runner_templates experiment is active', () => {
beforeAll(() => {
getExperimentData.mockImplementation((name) => name === 'ci_runner_templates');
- getExperimentVariant.mockReturnValue('candidate');
});
- it('renders two buttons', () => {
- expect(findEmptyState().findAllComponents(GlButton).length).toBe(2);
- expect(findEmptyState().findAllComponents(GlButton).at(0).text()).toBe(
- 'Install GitLab Runners',
- );
- expect(findEmptyState().findAllComponents(GlButton).at(0).attributes('href')).toBe(
- paths.ciRunnerSettingsPath,
- );
- expect(findEmptyState().findAllComponents(GlButton).at(1).text()).toBe(
- 'Learn about Runners',
- );
- expect(findEmptyState().findAllComponents(GlButton).at(1).attributes('href')).toBe(
- '/help/ci/quick_start/index.md',
- );
+ describe('the control state', () => {
+ beforeAll(() => {
+ getExperimentVariant.mockReturnValue('control');
+ });
+
+ it('renders the CI/CD templates', () => {
+ expect(wrapper.find(PipelinesCiTemplates)).toExist();
+ });
+ });
+
+ describe('the candidate state', () => {
+ beforeAll(() => {
+ getExperimentVariant.mockReturnValue('candidate');
+ });
+
+ it('renders two buttons', () => {
+ expect(findEmptyState().findAllComponents(GlButton).length).toBe(2);
+ expect(findEmptyState().findAllComponents(GlButton).at(0).text()).toBe(
+ 'Install GitLab Runners',
+ );
+ expect(findEmptyState().findAllComponents(GlButton).at(0).attributes('href')).toBe(
+ paths.ciRunnerSettingsPath,
+ );
+ expect(findEmptyState().findAllComponents(GlButton).at(1).text()).toBe(
+ 'Learn about Runners',
+ );
+ expect(findEmptyState().findAllComponents(GlButton).at(1).attributes('href')).toBe(
+ '/help/ci/quick_start/index.md',
+ );
+ });
});
});
diff --git a/spec/frontend/profile/preferences/components/profile_preferences_spec.js b/spec/frontend/profile/preferences/components/profile_preferences_spec.js
index 9e6f5594d26..f1172a73d36 100644
--- a/spec/frontend/profile/preferences/components/profile_preferences_spec.js
+++ b/spec/frontend/profile/preferences/components/profile_preferences_spec.js
@@ -2,6 +2,7 @@ import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import createFlash from '~/flash';
import IntegrationView from '~/profile/preferences/components/integration_view.vue';
import ProfilePreferences from '~/profile/preferences/components/profile_preferences.vue';
import { i18n } from '~/profile/preferences/constants';
@@ -15,6 +16,7 @@ import {
lightModeThemeId2,
} from '../mock_data';
+jest.mock('~/flash');
const expectedUrl = '/foo';
describe('ProfilePreferences component', () => {
@@ -54,10 +56,6 @@ describe('ProfilePreferences component', () => {
return wrapper.findComponent(GlButton);
}
- function findFlashError() {
- return document.querySelector('.flash-container .flash-text');
- }
-
function createThemeInput(themeId = lightModeThemeId1) {
const input = document.createElement('input');
input.setAttribute('name', 'user[theme_id]');
@@ -82,10 +80,6 @@ describe('ProfilePreferences component', () => {
document.body.classList.add('content-wrapper');
}
- beforeEach(() => {
- setFixtures('<div class="flash-container"></div>');
- });
-
afterEach(() => {
wrapper.destroy();
wrapper = null;
@@ -152,7 +146,7 @@ describe('ProfilePreferences component', () => {
const successEvent = new CustomEvent('ajax:success');
form.dispatchEvent(successEvent);
- expect(findFlashError().innerText.trim()).toEqual(i18n.defaultSuccess);
+ expect(createFlash).toHaveBeenCalledWith({ message: i18n.defaultSuccess, type: 'notice' });
});
it('displays the custom success message', () => {
@@ -160,14 +154,14 @@ describe('ProfilePreferences component', () => {
const successEvent = new CustomEvent('ajax:success', { detail: [{ message }] });
form.dispatchEvent(successEvent);
- expect(findFlashError().innerText.trim()).toEqual(message);
+ expect(createFlash).toHaveBeenCalledWith({ message, type: 'notice' });
});
it('displays the default error message', () => {
const errorEvent = new CustomEvent('ajax:error');
form.dispatchEvent(errorEvent);
- expect(findFlashError().innerText.trim()).toEqual(i18n.defaultError);
+ expect(createFlash).toHaveBeenCalledWith({ message: i18n.defaultError, type: 'alert' });
});
it('displays the custom error message', () => {
@@ -175,7 +169,7 @@ describe('ProfilePreferences component', () => {
const errorEvent = new CustomEvent('ajax:error', { detail: [{ message }] });
form.dispatchEvent(errorEvent);
- expect(findFlashError().innerText.trim()).toEqual(message);
+ expect(createFlash).toHaveBeenCalledWith({ message, type: 'alert' });
});
});
diff --git a/spec/frontend/projects/commit/components/branches_dropdown_spec.js b/spec/frontend/projects/commit/components/branches_dropdown_spec.js
index ab84c3768d0..30556cdeae1 100644
--- a/spec/frontend/projects/commit/components/branches_dropdown_spec.js
+++ b/spec/frontend/projects/commit/components/branches_dropdown_spec.js
@@ -3,6 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import BranchesDropdown from '~/projects/commit/components/branches_dropdown.vue';
Vue.use(Vuex);
@@ -82,7 +83,7 @@ describe('BranchesDropdown', () => {
expect(findSearchBoxByType().exists()).toBe(true);
expect(findSearchBoxByType().vm.$attrs).toMatchObject({
placeholder: 'Search branches',
- debounce: 250,
+ debounce: DEFAULT_DEBOUNCE_AND_THROTTLE_MS,
});
});
});
diff --git a/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap b/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
index f0d72124379..c255fcce321 100644
--- a/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
+++ b/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
@@ -57,10 +57,6 @@ exports[`Project remove modal initialized matches the snapshot 1`] = `
/>
</gl-alert-stub>
- <p>
- This action cannot be undone. You will lose this project's repository and all related resources, including issues, merge requests, etc.
- </p>
-
<p
class="gl-mb-1"
>
diff --git a/spec/frontend/projects/pipelines/charts/components/__snapshots__/ci_cd_analytics_area_chart_spec.js.snap b/spec/frontend/projects/pipelines/charts/components/__snapshots__/ci_cd_analytics_area_chart_spec.js.snap
index c37f6415898..fc51825f15b 100644
--- a/spec/frontend/projects/pipelines/charts/components/__snapshots__/ci_cd_analytics_area_chart_spec.js.snap
+++ b/spec/frontend/projects/pipelines/charts/components/__snapshots__/ci_cd_analytics_area_chart_spec.js.snap
@@ -21,11 +21,7 @@ exports[`CiCdAnalyticsAreaChart matches the snapshot 1`] = `
option="[object Object]"
thresholds=""
width="0"
- >
- <template />
-
- <template />
- </glareachart-stub>
+ />
</div>
</div>
`;
diff --git a/spec/frontend/projects/settings/components/shared_runners_toggle_spec.js b/spec/frontend/projects/settings/components/shared_runners_toggle_spec.js
index 2d6efe7ae83..0c5bbe2a115 100644
--- a/spec/frontend/projects/settings/components/shared_runners_toggle_spec.js
+++ b/spec/frontend/projects/settings/components/shared_runners_toggle_spec.js
@@ -20,6 +20,7 @@ describe('projects/settings/components/shared_runners', () => {
isDisabledAndUnoverridable: false,
isLoading: false,
updatePath: TEST_UPDATE_PATH,
+ isCreditCardValidationRequired: false,
...props,
},
});
diff --git a/spec/frontend/projects/terraform_notification/terraform_notification_spec.js b/spec/frontend/projects/terraform_notification/terraform_notification_spec.js
new file mode 100644
index 00000000000..be34b207c4b
--- /dev/null
+++ b/spec/frontend/projects/terraform_notification/terraform_notification_spec.js
@@ -0,0 +1,62 @@
+import { GlBanner } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { setCookie, parseBoolean } from '~/lib/utils/common_utils';
+import TerraformNotification from '~/projects/terraform_notification/components/terraform_notification.vue';
+
+jest.mock('~/lib/utils/common_utils');
+
+const bannerDissmisedKey = 'terraform_notification_dismissed_for_project_1';
+
+describe('TerraformNotificationBanner', () => {
+ let wrapper;
+
+ const propsData = {
+ projectId: 1,
+ };
+ const findBanner = () => wrapper.findComponent(GlBanner);
+
+ beforeEach(() => {
+ wrapper = shallowMount(TerraformNotification, {
+ propsData,
+ stubs: { GlBanner },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ parseBoolean.mockReturnValue(false);
+ });
+
+ describe('when the dismiss cookie is set', () => {
+ beforeEach(() => {
+ parseBoolean.mockReturnValue(true);
+ wrapper = shallowMount(TerraformNotification, {
+ propsData,
+ });
+ });
+
+ it('should not render the banner', () => {
+ expect(findBanner().exists()).toBe(false);
+ });
+ });
+
+ describe('when the dismiss cookie is not set', () => {
+ it('should render the banner', () => {
+ expect(findBanner().exists()).toBe(true);
+ });
+ });
+
+ describe('when close button is clicked', () => {
+ beforeEach(async () => {
+ await findBanner().vm.$emit('close');
+ });
+
+ it('should set the cookie with the bannerDissmisedKey', () => {
+ expect(setCookie).toHaveBeenCalledWith(bannerDissmisedKey, true);
+ });
+
+ it('should remove the banner', () => {
+ expect(findBanner().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/registry/explorer/components/list_page/cleanup_status_spec.js b/spec/frontend/registry/explorer/components/list_page/cleanup_status_spec.js
new file mode 100644
index 00000000000..c89bb874a7f
--- /dev/null
+++ b/spec/frontend/registry/explorer/components/list_page/cleanup_status_spec.js
@@ -0,0 +1,87 @@
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import CleanupStatus from '~/registry/explorer/components/list_page/cleanup_status.vue';
+import {
+ ASYNC_DELETE_IMAGE_ERROR_MESSAGE,
+ CLEANUP_STATUS_SCHEDULED,
+ CLEANUP_STATUS_ONGOING,
+ CLEANUP_STATUS_UNFINISHED,
+ UNFINISHED_STATUS,
+ UNSCHEDULED_STATUS,
+ SCHEDULED_STATUS,
+ ONGOING_STATUS,
+} from '~/registry/explorer/constants';
+
+describe('cleanup_status', () => {
+ let wrapper;
+
+ const findMainIcon = () => wrapper.findByTestId('main-icon');
+ const findExtraInfoIcon = () => wrapper.findByTestId('extra-info');
+
+ const mountComponent = (propsData = { status: SCHEDULED_STATUS }) => {
+ wrapper = shallowMountExtended(CleanupStatus, {
+ propsData,
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it.each`
+ status | visible | text
+ ${UNFINISHED_STATUS} | ${true} | ${CLEANUP_STATUS_UNFINISHED}
+ ${SCHEDULED_STATUS} | ${true} | ${CLEANUP_STATUS_SCHEDULED}
+ ${ONGOING_STATUS} | ${true} | ${CLEANUP_STATUS_ONGOING}
+ ${UNSCHEDULED_STATUS} | ${false} | ${''}
+ `(
+ 'when the status is $status is $visible that the component is mounted and has the correct text',
+ ({ status, visible, text }) => {
+ mountComponent({ status });
+
+ expect(findMainIcon().exists()).toBe(visible);
+ expect(wrapper.text()).toBe(text);
+ },
+ );
+
+ describe('main icon', () => {
+ it('exists', () => {
+ mountComponent();
+
+ expect(findMainIcon().exists()).toBe(true);
+ });
+
+ it(`has the orange class when the status is ${UNFINISHED_STATUS}`, () => {
+ mountComponent({ status: UNFINISHED_STATUS });
+
+ expect(findMainIcon().classes('gl-text-orange-500')).toBe(true);
+ });
+ });
+
+ describe('extra info icon', () => {
+ it.each`
+ status | visible
+ ${UNFINISHED_STATUS} | ${true}
+ ${SCHEDULED_STATUS} | ${false}
+ ${ONGOING_STATUS} | ${false}
+ `(
+ 'when the status is $status is $visible that the extra icon is visible',
+ ({ status, visible }) => {
+ mountComponent({ status });
+
+ expect(findExtraInfoIcon().exists()).toBe(visible);
+ },
+ );
+
+ it(`has a tooltip`, () => {
+ mountComponent({ status: UNFINISHED_STATUS });
+
+ const tooltip = getBinding(findExtraInfoIcon().element, 'gl-tooltip');
+
+ expect(tooltip.value.title).toBe(ASYNC_DELETE_IMAGE_ERROR_MESSAGE);
+ });
+ });
+});
diff --git a/spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js b/spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js
index 323d7b177e7..db0f869ab52 100644
--- a/spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js
+++ b/spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js
@@ -3,15 +3,14 @@ import { shallowMount } from '@vue/test-utils';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import DeleteButton from '~/registry/explorer/components/delete_button.vue';
+import CleanupStatus from '~/registry/explorer/components/list_page/cleanup_status.vue';
import Component from '~/registry/explorer/components/list_page/image_list_row.vue';
import {
ROW_SCHEDULED_FOR_DELETION,
LIST_DELETE_BUTTON_DISABLED,
REMOVE_REPOSITORY_LABEL,
- ASYNC_DELETE_IMAGE_ERROR_MESSAGE,
- CLEANUP_TIMED_OUT_ERROR_MESSAGE,
IMAGE_DELETE_SCHEDULED_STATUS,
- IMAGE_FAILED_DELETED_STATUS,
+ SCHEDULED_STATUS,
ROOT_IMAGE_TEXT,
} from '~/registry/explorer/constants';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
@@ -27,7 +26,7 @@ describe('Image List Row', () => {
const findTagsCount = () => wrapper.find('[data-testid="tags-count"]');
const findDeleteBtn = () => wrapper.findComponent(DeleteButton);
const findClipboardButton = () => wrapper.findComponent(ClipboardButton);
- const findWarningIcon = () => wrapper.find('[data-testid="warning-icon"]');
+ const findCleanupStatus = () => wrapper.findComponent(CleanupStatus);
const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
const findListItemComponent = () => wrapper.findComponent(ListItem);
@@ -106,23 +105,22 @@ describe('Image List Row', () => {
expect(button.props('title')).toBe(item.location);
});
- describe('warning icon', () => {
+ describe('cleanup status component', () => {
it.each`
- status | expirationPolicyStartedAt | shown | title
- ${IMAGE_FAILED_DELETED_STATUS} | ${true} | ${true} | ${ASYNC_DELETE_IMAGE_ERROR_MESSAGE}
- ${''} | ${true} | ${true} | ${CLEANUP_TIMED_OUT_ERROR_MESSAGE}
- ${''} | ${false} | ${false} | ${''}
+ expirationPolicyCleanupStatus | shown
+ ${null} | ${false}
+ ${SCHEDULED_STATUS} | ${true}
`(
- 'when status is $status and expirationPolicyStartedAt is $expirationPolicyStartedAt',
- ({ expirationPolicyStartedAt, status, shown, title }) => {
- mountComponent({ item: { ...item, status, expirationPolicyStartedAt } });
+ 'when expirationPolicyCleanupStatus is $expirationPolicyCleanupStatus it is $shown that the component exists',
+ ({ expirationPolicyCleanupStatus, shown }) => {
+ mountComponent({ item: { ...item, expirationPolicyCleanupStatus } });
- const icon = findWarningIcon();
- expect(icon.exists()).toBe(shown);
+ expect(findCleanupStatus().exists()).toBe(shown);
if (shown) {
- const tooltip = getBinding(icon.element, 'gl-tooltip');
- expect(tooltip.value.title).toBe(title);
+ expect(findCleanupStatus().props()).toMatchObject({
+ status: expirationPolicyCleanupStatus,
+ });
}
},
);
diff --git a/spec/frontend/registry/explorer/mock_data.js b/spec/frontend/registry/explorer/mock_data.js
index fe258dcd4e8..27246cf2364 100644
--- a/spec/frontend/registry/explorer/mock_data.js
+++ b/spec/frontend/registry/explorer/mock_data.js
@@ -9,6 +9,7 @@ export const imagesListResponse = [
canDelete: true,
createdAt: '2020-11-03T13:29:21Z',
expirationPolicyStartedAt: null,
+ expirationPolicyCleanupStatus: 'UNSCHEDULED',
},
{
__typename: 'ContainerRepository',
@@ -20,6 +21,7 @@ export const imagesListResponse = [
canDelete: true,
createdAt: '2020-09-21T06:57:43Z',
expirationPolicyStartedAt: null,
+ expirationPolicyCleanupStatus: 'UNSCHEDULED',
},
];
diff --git a/spec/frontend/releases/__snapshots__/util_spec.js.snap b/spec/frontend/releases/__snapshots__/util_spec.js.snap
index e0a1343c39c..b2580d47549 100644
--- a/spec/frontend/releases/__snapshots__/util_spec.js.snap
+++ b/spec/frontend/releases/__snapshots__/util_spec.js.snap
@@ -5,6 +5,7 @@ Object {
"data": Array [
Object {
"_links": Object {
+ "__typename": "ReleaseLinks",
"closedIssuesUrl": "http://localhost/releases-namespace/releases-project/-/issues?release_tag=v1.2&scope=all&state=closed",
"closedMergeRequestsUrl": "http://localhost/releases-namespace/releases-project/-/merge_requests?release_tag=v1.2&scope=all&state=closed",
"editUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.2/edit",
@@ -19,24 +20,29 @@ Object {
"links": Array [],
"sources": Array [
Object {
+ "__typename": "ReleaseSource",
"format": "zip",
"url": "http://localhost/releases-namespace/releases-project/-/archive/v1.2/releases-project-v1.2.zip",
},
Object {
+ "__typename": "ReleaseSource",
"format": "tar.gz",
"url": "http://localhost/releases-namespace/releases-project/-/archive/v1.2/releases-project-v1.2.tar.gz",
},
Object {
+ "__typename": "ReleaseSource",
"format": "tar.bz2",
"url": "http://localhost/releases-namespace/releases-project/-/archive/v1.2/releases-project-v1.2.tar.bz2",
},
Object {
+ "__typename": "ReleaseSource",
"format": "tar",
"url": "http://localhost/releases-namespace/releases-project/-/archive/v1.2/releases-project-v1.2.tar",
},
],
},
"author": Object {
+ "__typename": "UserCore",
"avatarUrl": "https://www.gravatar.com/avatar/16f8e2050ce10180ca571c2eb19cfce2?s=80&d=identicon",
"username": "administrator",
"webUrl": "http://localhost/administrator",
@@ -57,6 +63,7 @@ Object {
},
Object {
"_links": Object {
+ "__typename": "ReleaseLinks",
"closedIssuesUrl": "http://localhost/releases-namespace/releases-project/-/issues?release_tag=v1.1&scope=all&state=closed",
"closedMergeRequestsUrl": "http://localhost/releases-namespace/releases-project/-/merge_requests?release_tag=v1.1&scope=all&state=closed",
"editUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/edit",
@@ -70,6 +77,7 @@ Object {
"count": 8,
"links": Array [
Object {
+ "__typename": "ReleaseAssetLink",
"directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/downloads/binaries/awesome-app-3",
"external": true,
"id": "gid://gitlab/Releases::Link/13",
@@ -78,6 +86,7 @@ Object {
"url": "https://example.com/image",
},
Object {
+ "__typename": "ReleaseAssetLink",
"directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/downloads/binaries/awesome-app-2",
"external": true,
"id": "gid://gitlab/Releases::Link/12",
@@ -86,6 +95,7 @@ Object {
"url": "https://example.com/package",
},
Object {
+ "__typename": "ReleaseAssetLink",
"directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/downloads/binaries/awesome-app-1",
"external": false,
"id": "gid://gitlab/Releases::Link/11",
@@ -94,6 +104,7 @@ Object {
"url": "http://localhost/releases-namespace/releases-project/runbook",
},
Object {
+ "__typename": "ReleaseAssetLink",
"directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/downloads/binaries/linux-amd64",
"external": true,
"id": "gid://gitlab/Releases::Link/10",
@@ -104,24 +115,29 @@ Object {
],
"sources": Array [
Object {
+ "__typename": "ReleaseSource",
"format": "zip",
"url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.zip",
},
Object {
+ "__typename": "ReleaseSource",
"format": "tar.gz",
"url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.tar.gz",
},
Object {
+ "__typename": "ReleaseSource",
"format": "tar.bz2",
"url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.tar.bz2",
},
Object {
+ "__typename": "ReleaseSource",
"format": "tar",
"url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.tar",
},
],
},
"author": Object {
+ "__typename": "UserCore",
"avatarUrl": "https://www.gravatar.com/avatar/16f8e2050ce10180ca571c2eb19cfce2?s=80&d=identicon",
"username": "administrator",
"webUrl": "http://localhost/administrator",
@@ -134,6 +150,7 @@ Object {
"descriptionHtml": "<p data-sourcepos=\\"1:1-1:33\\" dir=\\"auto\\">Best. Release. <strong>Ever.</strong> <gl-emoji title=\\"rocket\\" data-name=\\"rocket\\" data-unicode-version=\\"6.0\\">🚀</gl-emoji></p>",
"evidences": Array [
Object {
+ "__typename": "ReleaseEvidence",
"collectedAt": "2018-12-03T00:00:00Z",
"filepath": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/evidences/1.json",
"sha": "760d6cdfb0879c3ffedec13af470e0f71cf52c6cde4d",
@@ -141,6 +158,7 @@ Object {
],
"milestones": Array [
Object {
+ "__typename": "Milestone",
"description": "The 12.3 milestone",
"id": "gid://gitlab/Milestone/123",
"issueStats": Object {
@@ -153,6 +171,7 @@ Object {
"webUrl": "/releases-namespace/releases-project/-/milestones/1",
},
Object {
+ "__typename": "Milestone",
"description": "The 12.4 milestone",
"id": "gid://gitlab/Milestone/124",
"issueStats": Object {
@@ -173,6 +192,7 @@ Object {
},
],
"paginationInfo": Object {
+ "__typename": "PageInfo",
"endCursor": "eyJyZWxlYXNlZF9hdCI6IjIwMTgtMTItMTAgMDA6MDA6MDAuMDAwMDAwMDAwIFVUQyIsImlkIjoiMSJ9",
"hasNextPage": false,
"hasPreviousPage": false,
@@ -192,24 +212,28 @@ Object {
"count": undefined,
"links": Array [
Object {
+ "directAssetPath": "/binaries/awesome-app-3",
"id": "gid://gitlab/Releases::Link/13",
"linkType": "image",
"name": "Image",
"url": "https://example.com/image",
},
Object {
+ "directAssetPath": "/binaries/awesome-app-2",
"id": "gid://gitlab/Releases::Link/12",
"linkType": "package",
"name": "Package",
"url": "https://example.com/package",
},
Object {
+ "directAssetPath": "/binaries/awesome-app-1",
"id": "gid://gitlab/Releases::Link/11",
"linkType": "runbook",
"name": "Runbook",
"url": "http://localhost/releases-namespace/releases-project/runbook",
},
Object {
+ "directAssetPath": "/binaries/linux-amd64",
"id": "gid://gitlab/Releases::Link/10",
"linkType": "other",
"name": "linux-amd64 binaries",
@@ -247,6 +271,7 @@ exports[`releases/util.js convertOneReleaseGraphQLResponse matches snapshot 1`]
Object {
"data": Object {
"_links": Object {
+ "__typename": "ReleaseLinks",
"closedIssuesUrl": "http://localhost/releases-namespace/releases-project/-/issues?release_tag=v1.1&scope=all&state=closed",
"closedMergeRequestsUrl": "http://localhost/releases-namespace/releases-project/-/merge_requests?release_tag=v1.1&scope=all&state=closed",
"editUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/edit",
@@ -260,6 +285,7 @@ Object {
"count": 8,
"links": Array [
Object {
+ "__typename": "ReleaseAssetLink",
"directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/downloads/binaries/awesome-app-3",
"external": true,
"id": "gid://gitlab/Releases::Link/13",
@@ -268,6 +294,7 @@ Object {
"url": "https://example.com/image",
},
Object {
+ "__typename": "ReleaseAssetLink",
"directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/downloads/binaries/awesome-app-2",
"external": true,
"id": "gid://gitlab/Releases::Link/12",
@@ -276,6 +303,7 @@ Object {
"url": "https://example.com/package",
},
Object {
+ "__typename": "ReleaseAssetLink",
"directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/downloads/binaries/awesome-app-1",
"external": false,
"id": "gid://gitlab/Releases::Link/11",
@@ -284,6 +312,7 @@ Object {
"url": "http://localhost/releases-namespace/releases-project/runbook",
},
Object {
+ "__typename": "ReleaseAssetLink",
"directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/downloads/binaries/linux-amd64",
"external": true,
"id": "gid://gitlab/Releases::Link/10",
@@ -294,24 +323,29 @@ Object {
],
"sources": Array [
Object {
+ "__typename": "ReleaseSource",
"format": "zip",
"url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.zip",
},
Object {
+ "__typename": "ReleaseSource",
"format": "tar.gz",
"url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.tar.gz",
},
Object {
+ "__typename": "ReleaseSource",
"format": "tar.bz2",
"url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.tar.bz2",
},
Object {
+ "__typename": "ReleaseSource",
"format": "tar",
"url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.tar",
},
],
},
"author": Object {
+ "__typename": "UserCore",
"avatarUrl": "https://www.gravatar.com/avatar/16f8e2050ce10180ca571c2eb19cfce2?s=80&d=identicon",
"username": "administrator",
"webUrl": "http://localhost/administrator",
@@ -324,6 +358,7 @@ Object {
"descriptionHtml": "<p data-sourcepos=\\"1:1-1:33\\" dir=\\"auto\\">Best. Release. <strong>Ever.</strong> <gl-emoji title=\\"rocket\\" data-name=\\"rocket\\" data-unicode-version=\\"6.0\\">🚀</gl-emoji></p>",
"evidences": Array [
Object {
+ "__typename": "ReleaseEvidence",
"collectedAt": "2018-12-03T00:00:00Z",
"filepath": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/evidences/1.json",
"sha": "760d6cdfb0879c3ffedec13af470e0f71cf52c6cde4d",
@@ -331,6 +366,7 @@ Object {
],
"milestones": Array [
Object {
+ "__typename": "Milestone",
"description": "The 12.3 milestone",
"id": "gid://gitlab/Milestone/123",
"issueStats": Object {
@@ -343,6 +379,7 @@ Object {
"webUrl": "/releases-namespace/releases-project/-/milestones/1",
},
Object {
+ "__typename": "Milestone",
"description": "The 12.4 milestone",
"id": "gid://gitlab/Milestone/124",
"issueStats": Object {
diff --git a/spec/frontend/releases/components/app_index_apollo_client_spec.js b/spec/frontend/releases/components/app_index_apollo_client_spec.js
index 002d8939058..096d319c82f 100644
--- a/spec/frontend/releases/components/app_index_apollo_client_spec.js
+++ b/spec/frontend/releases/components/app_index_apollo_client_spec.js
@@ -3,6 +3,7 @@ import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import allReleasesQuery from 'shared_queries/releases/all_releases.query.graphql';
import createFlash from '~/flash';
import { historyPushState } from '~/lib/utils/common_utils';
import ReleasesIndexApolloClientApp from '~/releases/components/app_index_apollo_client.vue';
@@ -12,7 +13,6 @@ import ReleasesEmptyState from '~/releases/components/releases_empty_state.vue';
import ReleasesPaginationApolloClient from '~/releases/components/releases_pagination_apollo_client.vue';
import ReleasesSortApolloClient from '~/releases/components/releases_sort_apollo_client.vue';
import { PAGE_SIZE, CREATED_ASC, DEFAULT_SORT } from '~/releases/constants';
-import allReleasesQuery from '~/releases/graphql/queries/all_releases.query.graphql';
Vue.use(VueApollo);
@@ -21,10 +21,14 @@ jest.mock('~/flash');
let mockQueryParams;
jest.mock('~/lib/utils/common_utils', () => ({
...jest.requireActual('~/lib/utils/common_utils'),
+ historyPushState: jest.fn(),
+}));
+
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
getParameterByName: jest
.fn()
.mockImplementation((parameterName) => mockQueryParams[parameterName]),
- historyPushState: jest.fn(),
}));
describe('app_index_apollo_client.vue', () => {
diff --git a/spec/frontend/releases/components/app_index_spec.js b/spec/frontend/releases/components/app_index_spec.js
index 3a28020c284..43e88650ae3 100644
--- a/spec/frontend/releases/components/app_index_spec.js
+++ b/spec/frontend/releases/components/app_index_spec.js
@@ -2,14 +2,14 @@ import { shallowMount } from '@vue/test-utils';
import { merge } from 'lodash';
import Vue from 'vue';
import Vuex from 'vuex';
-import { getParameterByName } from '~/lib/utils/common_utils';
+import { getParameterByName } from '~/lib/utils/url_utility';
import AppIndex from '~/releases/components/app_index.vue';
import ReleaseSkeletonLoader from '~/releases/components/release_skeleton_loader.vue';
import ReleasesPagination from '~/releases/components/releases_pagination.vue';
import ReleasesSort from '~/releases/components/releases_sort.vue';
-jest.mock('~/lib/utils/common_utils', () => ({
- ...jest.requireActual('~/lib/utils/common_utils'),
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
getParameterByName: jest.fn(),
}));
diff --git a/spec/frontend/reports/components/__snapshots__/grouped_issues_list_spec.js.snap b/spec/frontend/reports/components/__snapshots__/grouped_issues_list_spec.js.snap
index c932379a253..111757e2d30 100644
--- a/spec/frontend/reports/components/__snapshots__/grouped_issues_list_spec.js.snap
+++ b/spec/frontend/reports/components/__snapshots__/grouped_issues_list_spec.js.snap
@@ -14,6 +14,7 @@ Object {
exports[`Grouped Issues List with data renders a report item with the correct props 1`] = `
Object {
"component": "TestIssueBody",
+ "iconComponent": "IssueStatusIcon",
"isNew": false,
"issue": Object {
"name": "foo",
diff --git a/spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js b/spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js
index d29048d640c..0f7c2559e8b 100644
--- a/spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js
+++ b/spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js
@@ -114,7 +114,7 @@ describe('Grouped test reports app', () => {
setReports(newFailedTestReports);
});
- it('tracks usage ping metric when enabled', () => {
+ it('tracks service ping metric when enabled', () => {
mountComponent({ glFeatures: { usageDataITestingSummaryWidgetTotal: true } });
findExpandButton().trigger('click');
@@ -132,7 +132,7 @@ describe('Grouped test reports app', () => {
expect(Api.trackRedisHllUserEvent).toHaveBeenCalledTimes(1);
});
- it('does not track usage ping metric when disabled', () => {
+ it('does not track service ping metric when disabled', () => {
mountComponent({ glFeatures: { usageDataITestingSummaryWidgetTotal: false } });
findExpandButton().trigger('click');
diff --git a/spec/frontend/repository/components/blob_button_group_spec.js b/spec/frontend/repository/components/blob_button_group_spec.js
new file mode 100644
index 00000000000..a449fd6f06c
--- /dev/null
+++ b/spec/frontend/repository/components/blob_button_group_spec.js
@@ -0,0 +1,117 @@
+import { GlButton } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import BlobButtonGroup from '~/repository/components/blob_button_group.vue';
+import DeleteBlobModal from '~/repository/components/delete_blob_modal.vue';
+import UploadBlobModal from '~/repository/components/upload_blob_modal.vue';
+
+const DEFAULT_PROPS = {
+ name: 'some name',
+ path: 'some/path',
+ canPushCode: true,
+ replacePath: 'some/replace/path',
+ deletePath: 'some/delete/path',
+ emptyRepo: false,
+};
+
+const DEFAULT_INJECT = {
+ targetBranch: 'master',
+ originalBranch: 'master',
+};
+
+describe('BlobButtonGroup component', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(BlobButtonGroup, {
+ propsData: {
+ ...DEFAULT_PROPS,
+ ...props,
+ },
+ provide: {
+ ...DEFAULT_INJECT,
+ },
+ directives: {
+ GlModal: createMockDirective(),
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findDeleteBlobModal = () => wrapper.findComponent(DeleteBlobModal);
+ const findUploadBlobModal = () => wrapper.findComponent(UploadBlobModal);
+ const findReplaceButton = () => wrapper.findAll(GlButton).at(0);
+
+ it('renders component', () => {
+ createComponent();
+
+ const { name, path } = DEFAULT_PROPS;
+
+ expect(wrapper.props()).toMatchObject({
+ name,
+ path,
+ });
+ });
+
+ describe('buttons', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders both the replace and delete button', () => {
+ expect(wrapper.findAll(GlButton)).toHaveLength(2);
+ });
+
+ it('renders the buttons in the correct order', () => {
+ expect(wrapper.findAll(GlButton).at(0).text()).toBe('Replace');
+ expect(wrapper.findAll(GlButton).at(1).text()).toBe('Delete');
+ });
+
+ it('triggers the UploadBlobModal from the replace button', () => {
+ const { value } = getBinding(findReplaceButton().element, 'gl-modal');
+ const modalId = findUploadBlobModal().props('modalId');
+
+ expect(modalId).toEqual(value);
+ });
+ });
+
+ it('renders UploadBlobModal', () => {
+ createComponent();
+
+ const { targetBranch, originalBranch } = DEFAULT_INJECT;
+ const { name, path, canPushCode, replacePath } = DEFAULT_PROPS;
+ const title = `Replace ${name}`;
+
+ expect(findUploadBlobModal().props()).toMatchObject({
+ modalTitle: title,
+ commitMessage: title,
+ targetBranch,
+ originalBranch,
+ canPushCode,
+ path,
+ replacePath,
+ primaryBtnText: 'Replace file',
+ });
+ });
+
+ it('renders DeleteBlobModel', () => {
+ createComponent();
+
+ const { targetBranch, originalBranch } = DEFAULT_INJECT;
+ const { name, canPushCode, deletePath, emptyRepo } = DEFAULT_PROPS;
+ const title = `Delete ${name}`;
+
+ expect(findDeleteBlobModal().props()).toMatchObject({
+ modalTitle: title,
+ commitMessage: title,
+ targetBranch,
+ originalBranch,
+ canPushCode,
+ deletePath,
+ emptyRepo,
+ });
+ });
+});
diff --git a/spec/frontend/repository/components/blob_content_viewer_spec.js b/spec/frontend/repository/components/blob_content_viewer_spec.js
index 495039b4ccb..a83d0a607f2 100644
--- a/spec/frontend/repository/components/blob_content_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_content_viewer_spec.js
@@ -1,11 +1,23 @@
import { GlLoadingIcon } from '@gitlab/ui';
-import { shallowMount, mount } from '@vue/test-utils';
+import { shallowMount, mount, createLocalVue } from '@vue/test-utils';
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import BlobContent from '~/blob/components/blob_content.vue';
import BlobHeader from '~/blob/components/blob_header.vue';
+import BlobButtonGroup from '~/repository/components/blob_button_group.vue';
import BlobContentViewer from '~/repository/components/blob_content_viewer.vue';
-import BlobHeaderEdit from '~/repository/components/blob_header_edit.vue';
-import BlobReplace from '~/repository/components/blob_replace.vue';
+import BlobEdit from '~/repository/components/blob_edit.vue';
+import { loadViewer, viewerProps } from '~/repository/components/blob_viewers';
+import DownloadViewer from '~/repository/components/blob_viewers/download_viewer.vue';
+import EmptyViewer from '~/repository/components/blob_viewers/empty_viewer.vue';
+import TextViewer from '~/repository/components/blob_viewers/text_viewer.vue';
+import blobInfoQuery from '~/repository/queries/blob_info.query.graphql';
+
+jest.mock('~/repository/components/blob_viewers');
let wrapper;
const simpleMockData = {
@@ -17,6 +29,7 @@ const simpleMockData = {
fileType: 'text',
tooLarge: false,
path: 'some_file.js',
+ webPath: 'some_file.js',
editBlobPath: 'some_file.js/edit',
ideEditPath: 'some_file.js/ide/edit',
storedExternally: false,
@@ -27,7 +40,6 @@ const simpleMockData = {
canLock: true,
isLocked: false,
lockLink: 'some_file.js/lock',
- canModifyBlob: true,
forkPath: 'some_file.js/fork',
simpleViewer: {
fileType: 'text',
@@ -47,6 +59,51 @@ const richMockData = {
},
};
+const projectMockData = {
+ userPermissions: {
+ pushCode: true,
+ },
+ repository: {
+ empty: false,
+ },
+};
+
+const localVue = createLocalVue();
+const mockAxios = new MockAdapter(axios);
+
+const createComponentWithApollo = (mockData = {}) => {
+ localVue.use(VueApollo);
+
+ const defaultPushCode = projectMockData.userPermissions.pushCode;
+ const defaultEmptyRepo = projectMockData.repository.empty;
+ const { blobs, emptyRepo = defaultEmptyRepo, canPushCode = defaultPushCode } = mockData;
+
+ const mockResolver = jest.fn().mockResolvedValue({
+ data: {
+ project: {
+ userPermissions: { pushCode: canPushCode },
+ repository: {
+ empty: emptyRepo,
+ blobs: {
+ nodes: [blobs],
+ },
+ },
+ },
+ },
+ });
+
+ const fakeApollo = createMockApollo([[blobInfoQuery, mockResolver]]);
+
+ wrapper = shallowMount(BlobContentViewer, {
+ localVue,
+ apolloProvider: fakeApollo,
+ propsData: {
+ path: 'some_file.js',
+ projectPath: 'some/path',
+ },
+ });
+};
+
const createFactory = (mountFn) => (
{ props = {}, mockData = {}, stubs = {} } = {},
loading = false,
@@ -78,9 +135,9 @@ const fullFactory = createFactory(mount);
describe('Blob content viewer component', () => {
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findBlobHeader = () => wrapper.findComponent(BlobHeader);
- const findBlobHeaderEdit = () => wrapper.findComponent(BlobHeaderEdit);
+ const findBlobEdit = () => wrapper.findComponent(BlobEdit);
const findBlobContent = () => wrapper.findComponent(BlobContent);
- const findBlobReplace = () => wrapper.findComponent(BlobReplace);
+ const findBlobButtonGroup = () => wrapper.findComponent(BlobButtonGroup);
afterEach(() => {
wrapper.destroy();
@@ -163,6 +220,67 @@ describe('Blob content viewer component', () => {
});
});
+ describe('legacy viewers', () => {
+ it('does not load a legacy viewer when a rich viewer is not available', async () => {
+ createComponentWithApollo({ blobs: simpleMockData });
+ await waitForPromises();
+
+ expect(mockAxios.history.get).toHaveLength(0);
+ });
+
+ it('loads a legacy viewer when a rich viewer is available', async () => {
+ createComponentWithApollo({ blobs: richMockData });
+ await waitForPromises();
+
+ expect(mockAxios.history.get).toHaveLength(1);
+ });
+ });
+
+ describe('Blob viewer', () => {
+ afterEach(() => {
+ loadViewer.mockRestore();
+ viewerProps.mockRestore();
+ });
+
+ it('does not render a BlobContent component if a Blob viewer is available', () => {
+ loadViewer.mockReturnValueOnce(() => true);
+ factory({ mockData: { blobInfo: richMockData } });
+
+ expect(findBlobContent().exists()).toBe(false);
+ });
+
+ it.each`
+ viewer | loadViewerReturnValue | viewerPropsReturnValue
+ ${'empty'} | ${EmptyViewer} | ${{}}
+ ${'download'} | ${DownloadViewer} | ${{ filePath: '/some/file/path', fileName: 'test.js', fileSize: 100 }}
+ ${'text'} | ${TextViewer} | ${{ content: 'test', fileName: 'test.js', readOnly: true }}
+ `(
+ 'renders viewer component for $viewer files',
+ async ({ viewer, loadViewerReturnValue, viewerPropsReturnValue }) => {
+ loadViewer.mockReturnValue(loadViewerReturnValue);
+ viewerProps.mockReturnValue(viewerPropsReturnValue);
+
+ factory({
+ mockData: {
+ blobInfo: {
+ ...simpleMockData,
+ fileType: null,
+ simpleViewer: {
+ ...simpleMockData.simpleViewer,
+ fileType: viewer,
+ },
+ },
+ },
+ });
+
+ await nextTick();
+
+ expect(loadViewer).toHaveBeenCalledWith(viewer);
+ expect(wrapper.findComponent(loadViewerReturnValue).exists()).toBe(true);
+ },
+ );
+ });
+
describe('BlobHeader action slot', () => {
const { ideEditPath, editBlobPath } = simpleMockData;
@@ -177,7 +295,7 @@ describe('Blob content viewer component', () => {
await nextTick();
- expect(findBlobHeaderEdit().props()).toMatchObject({
+ expect(findBlobEdit().props()).toMatchObject({
editPath: editBlobPath,
webIdePath: ideEditPath,
});
@@ -194,31 +312,56 @@ describe('Blob content viewer component', () => {
await nextTick();
- expect(findBlobHeaderEdit().props()).toMatchObject({
+ expect(findBlobEdit().props()).toMatchObject({
editPath: editBlobPath,
webIdePath: ideEditPath,
});
});
- describe('BlobReplace', () => {
- const { name, path } = simpleMockData;
+ it('does not render BlobHeaderEdit button when viewing a binary file', async () => {
+ fullFactory({
+ mockData: { blobInfo: richMockData, isBinary: true },
+ stubs: {
+ BlobContent: true,
+ BlobReplace: true,
+ },
+ });
+
+ await nextTick();
+
+ expect(findBlobEdit().exists()).toBe(false);
+ });
+
+ describe('BlobButtonGroup', () => {
+ const { name, path, replacePath, webPath } = simpleMockData;
+ const {
+ userPermissions: { pushCode },
+ repository: { empty },
+ } = projectMockData;
it('renders component', async () => {
window.gon.current_user_id = 1;
fullFactory({
- mockData: { blobInfo: simpleMockData },
+ mockData: {
+ blobInfo: simpleMockData,
+ project: { userPermissions: { pushCode }, repository: { empty } },
+ },
stubs: {
BlobContent: true,
- BlobReplace: true,
+ BlobButtonGroup: true,
},
});
await nextTick();
- expect(findBlobReplace().props()).toMatchObject({
+ expect(findBlobButtonGroup().props()).toMatchObject({
name,
path,
+ replacePath,
+ deletePath: webPath,
+ canPushCode: pushCode,
+ emptyRepo: empty,
});
});
@@ -235,7 +378,7 @@ describe('Blob content viewer component', () => {
await nextTick();
- expect(findBlobReplace().exists()).toBe(false);
+ expect(findBlobButtonGroup().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/repository/components/blob_header_edit_spec.js b/spec/frontend/repository/components/blob_edit_spec.js
index c0eb7c523c4..e6e69cd8549 100644
--- a/spec/frontend/repository/components/blob_header_edit_spec.js
+++ b/spec/frontend/repository/components/blob_edit_spec.js
@@ -1,6 +1,6 @@
import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import BlobHeaderEdit from '~/repository/components/blob_header_edit.vue';
+import BlobEdit from '~/repository/components/blob_edit.vue';
import WebIdeLink from '~/vue_shared/components/web_ide_link.vue';
const DEFAULT_PROPS = {
@@ -8,11 +8,11 @@ const DEFAULT_PROPS = {
webIdePath: 'some_file.js/ide/edit',
};
-describe('BlobHeaderEdit component', () => {
+describe('BlobEdit component', () => {
let wrapper;
const createComponent = (consolidatedEditButton = false, props = {}) => {
- wrapper = shallowMount(BlobHeaderEdit, {
+ wrapper = shallowMount(BlobEdit, {
propsData: {
...DEFAULT_PROPS,
...props,
diff --git a/spec/frontend/repository/components/blob_replace_spec.js b/spec/frontend/repository/components/blob_replace_spec.js
deleted file mode 100644
index 4a6f147da22..00000000000
--- a/spec/frontend/repository/components/blob_replace_spec.js
+++ /dev/null
@@ -1,67 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import BlobReplace from '~/repository/components/blob_replace.vue';
-import UploadBlobModal from '~/repository/components/upload_blob_modal.vue';
-
-const DEFAULT_PROPS = {
- name: 'some name',
- path: 'some/path',
- canPushCode: true,
- replacePath: 'some/replace/path',
-};
-
-const DEFAULT_INJECT = {
- targetBranch: 'master',
- originalBranch: 'master',
-};
-
-describe('BlobReplace component', () => {
- let wrapper;
-
- const createComponent = (props = {}) => {
- wrapper = shallowMount(BlobReplace, {
- propsData: {
- ...DEFAULT_PROPS,
- ...props,
- },
- provide: {
- ...DEFAULT_INJECT,
- },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- const findUploadBlobModal = () => wrapper.findComponent(UploadBlobModal);
-
- it('renders component', () => {
- createComponent();
-
- const { name, path } = DEFAULT_PROPS;
-
- expect(wrapper.props()).toMatchObject({
- name,
- path,
- });
- });
-
- it('renders UploadBlobModal', () => {
- createComponent();
-
- const { targetBranch, originalBranch } = DEFAULT_INJECT;
- const { name, path, canPushCode, replacePath } = DEFAULT_PROPS;
- const title = `Replace ${name}`;
-
- expect(findUploadBlobModal().props()).toMatchObject({
- modalTitle: title,
- commitMessage: title,
- targetBranch,
- originalBranch,
- canPushCode,
- path,
- replacePath,
- primaryBtnText: 'Replace file',
- });
- });
-});
diff --git a/spec/frontend/repository/components/blob_viewers/__snapshots__/empty_viewer_spec.js.snap b/spec/frontend/repository/components/blob_viewers/__snapshots__/empty_viewer_spec.js.snap
new file mode 100644
index 00000000000..e702ea5fd00
--- /dev/null
+++ b/spec/frontend/repository/components/blob_viewers/__snapshots__/empty_viewer_spec.js.snap
@@ -0,0 +1,9 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Empty Viewer matches the snapshot 1`] = `
+<div
+ class="nothing-here-block"
+>
+ Empty file
+</div>
+`;
diff --git a/spec/frontend/repository/components/blob_viewers/download_viewer_spec.js b/spec/frontend/repository/components/blob_viewers/download_viewer_spec.js
new file mode 100644
index 00000000000..c71b2b3c55c
--- /dev/null
+++ b/spec/frontend/repository/components/blob_viewers/download_viewer_spec.js
@@ -0,0 +1,70 @@
+import { GlLink, GlIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { numberToHumanSize } from '~/lib/utils/number_utils';
+import DownloadViewer from '~/repository/components/blob_viewers/download_viewer.vue';
+
+describe('Text Viewer', () => {
+ let wrapper;
+
+ const DEFAULT_PROPS = {
+ fileName: 'file_name.js',
+ filePath: '/some/file/path',
+ fileSize: 2269674,
+ };
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(DownloadViewer, {
+ propsData: {
+ ...DEFAULT_PROPS,
+ ...props,
+ },
+ });
+ };
+
+ it('renders component', () => {
+ createComponent();
+
+ const { fileName, filePath, fileSize } = DEFAULT_PROPS;
+ expect(wrapper.props()).toMatchObject({
+ fileName,
+ filePath,
+ fileSize,
+ });
+ });
+
+ it('renders download human readable file size text', () => {
+ createComponent();
+
+ const downloadText = `Download (${numberToHumanSize(DEFAULT_PROPS.fileSize)})`;
+ expect(wrapper.text()).toBe(downloadText);
+ });
+
+ it('renders download text', () => {
+ createComponent({
+ fileSize: 0,
+ });
+
+ expect(wrapper.text()).toBe('Download');
+ });
+
+ it('renders download link', () => {
+ createComponent();
+ const { filePath, fileName } = DEFAULT_PROPS;
+
+ expect(wrapper.findComponent(GlLink).attributes()).toMatchObject({
+ rel: 'nofollow',
+ target: '_blank',
+ href: filePath,
+ download: fileName,
+ });
+ });
+
+ it('renders download icon', () => {
+ createComponent();
+
+ expect(wrapper.findComponent(GlIcon).props()).toMatchObject({
+ name: 'download',
+ size: 16,
+ });
+ });
+});
diff --git a/spec/frontend/repository/components/blob_viewers/empty_viewer_spec.js b/spec/frontend/repository/components/blob_viewers/empty_viewer_spec.js
new file mode 100644
index 00000000000..e65f20ea0af
--- /dev/null
+++ b/spec/frontend/repository/components/blob_viewers/empty_viewer_spec.js
@@ -0,0 +1,14 @@
+import { shallowMount } from '@vue/test-utils';
+import EmptyViewer from '~/repository/components/blob_viewers/empty_viewer.vue';
+
+describe('Empty Viewer', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = shallowMount(EmptyViewer);
+ });
+
+ it('matches the snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+});
diff --git a/spec/frontend/repository/components/blob_viewers/text_viewer_spec.js b/spec/frontend/repository/components/blob_viewers/text_viewer_spec.js
new file mode 100644
index 00000000000..88c5bee6564
--- /dev/null
+++ b/spec/frontend/repository/components/blob_viewers/text_viewer_spec.js
@@ -0,0 +1,30 @@
+import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import TextViewer from '~/repository/components/blob_viewers/text_viewer.vue';
+import SourceEditor from '~/vue_shared/components/source_editor.vue';
+
+describe('Text Viewer', () => {
+ let wrapper;
+ const propsData = {
+ content: 'Some content',
+ fileName: 'file_name.js',
+ readOnly: true,
+ };
+
+ const createComponent = () => {
+ wrapper = shallowMount(TextViewer, { propsData });
+ };
+
+ const findEditor = () => wrapper.findComponent(SourceEditor);
+
+ it('renders a Source Editor component', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findEditor().exists()).toBe(true);
+ expect(findEditor().props('value')).toBe(propsData.content);
+ expect(findEditor().props('fileName')).toBe(propsData.fileName);
+ expect(findEditor().props('editorOptions')).toEqual({ readOnly: propsData.readOnly });
+ });
+});
diff --git a/spec/frontend/repository/components/delete_blob_modal_spec.js b/spec/frontend/repository/components/delete_blob_modal_spec.js
new file mode 100644
index 00000000000..a74e3e6d325
--- /dev/null
+++ b/spec/frontend/repository/components/delete_blob_modal_spec.js
@@ -0,0 +1,130 @@
+import { GlFormTextarea, GlModal, GlFormInput, GlToggle } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import DeleteBlobModal from '~/repository/components/delete_blob_modal.vue';
+
+jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
+
+const initialProps = {
+ modalId: 'Delete-blob',
+ modalTitle: 'Delete File',
+ deletePath: 'some/path',
+ commitMessage: 'Delete File',
+ targetBranch: 'some-target-branch',
+ originalBranch: 'main',
+ canPushCode: true,
+ emptyRepo: false,
+};
+
+describe('DeleteBlobModal', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(DeleteBlobModal, {
+ propsData: {
+ ...initialProps,
+ ...props,
+ },
+ });
+ };
+
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findForm = () => wrapper.findComponent({ ref: 'form' });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders Modal component', () => {
+ createComponent();
+
+ const { modalTitle: title } = initialProps;
+
+ expect(findModal().props()).toMatchObject({
+ title,
+ size: 'md',
+ actionPrimary: {
+ text: 'Delete file',
+ },
+ actionCancel: {
+ text: 'Cancel',
+ },
+ });
+ });
+
+ describe('form', () => {
+ it('gets passed the path for action attribute', () => {
+ createComponent();
+ expect(findForm().attributes('action')).toBe(initialProps.deletePath);
+ });
+
+ it('submits the form', async () => {
+ createComponent();
+
+ const submitSpy = jest.spyOn(findForm().element, 'submit');
+ findModal().vm.$emit('primary', { preventDefault: () => {} });
+ await nextTick();
+
+ expect(submitSpy).toHaveBeenCalled();
+ submitSpy.mockRestore();
+ });
+
+ it.each`
+ component | defaultValue | canPushCode | targetBranch | originalBranch | exist
+ ${GlFormTextarea} | ${initialProps.commitMessage} | ${true} | ${initialProps.targetBranch} | ${initialProps.originalBranch} | ${true}
+ ${GlFormInput} | ${initialProps.targetBranch} | ${true} | ${initialProps.targetBranch} | ${initialProps.originalBranch} | ${true}
+ ${GlFormInput} | ${undefined} | ${false} | ${initialProps.targetBranch} | ${initialProps.originalBranch} | ${false}
+ ${GlToggle} | ${'true'} | ${true} | ${initialProps.targetBranch} | ${initialProps.originalBranch} | ${true}
+ ${GlToggle} | ${undefined} | ${true} | ${'same-branch'} | ${'same-branch'} | ${false}
+ `(
+ 'has the correct form fields ',
+ ({ component, defaultValue, canPushCode, targetBranch, originalBranch, exist }) => {
+ createComponent({
+ canPushCode,
+ targetBranch,
+ originalBranch,
+ });
+ const formField = wrapper.findComponent(component);
+
+ if (!exist) {
+ expect(formField.exists()).toBe(false);
+ return;
+ }
+
+ expect(formField.exists()).toBe(true);
+ expect(formField.attributes('value')).toBe(defaultValue);
+ },
+ );
+
+ it.each`
+ input | value | emptyRepo | canPushCode | exist
+ ${'authenticity_token'} | ${'mock-csrf-token'} | ${false} | ${true} | ${true}
+ ${'authenticity_token'} | ${'mock-csrf-token'} | ${true} | ${false} | ${true}
+ ${'_method'} | ${'delete'} | ${false} | ${true} | ${true}
+ ${'_method'} | ${'delete'} | ${true} | ${false} | ${true}
+ ${'original_branch'} | ${initialProps.originalBranch} | ${false} | ${true} | ${true}
+ ${'original_branch'} | ${undefined} | ${true} | ${true} | ${false}
+ ${'create_merge_request'} | ${'1'} | ${false} | ${false} | ${true}
+ ${'create_merge_request'} | ${'1'} | ${false} | ${true} | ${true}
+ ${'create_merge_request'} | ${undefined} | ${true} | ${false} | ${false}
+ `(
+ 'passes $input as a hidden input with the correct value',
+ ({ input, value, emptyRepo, canPushCode, exist }) => {
+ createComponent({
+ emptyRepo,
+ canPushCode,
+ });
+
+ const inputMethod = findForm().find(`input[name="${input}"]`);
+
+ if (!exist) {
+ expect(inputMethod.exists()).toBe(false);
+ return;
+ }
+
+ expect(inputMethod.attributes('type')).toBe('hidden');
+ expect(inputMethod.attributes('value')).toBe(value);
+ },
+ );
+ });
+});
diff --git a/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap b/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
index ac60fc4917d..6f461f4c69b 100644
--- a/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
+++ b/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
@@ -11,6 +11,7 @@ exports[`Repository table row component renders a symlink table row 1`] = `
class="tree-item-link str-truncated"
data-qa-selector="file_name_link"
href="https://test.com"
+ title="test"
>
<file-icon-stub
class="mr-1 position-relative text-secondary"
@@ -64,6 +65,7 @@ exports[`Repository table row component renders table row 1`] = `
class="tree-item-link str-truncated"
data-qa-selector="file_name_link"
href="https://test.com"
+ title="test"
>
<file-icon-stub
class="mr-1 position-relative text-secondary"
@@ -117,6 +119,7 @@ exports[`Repository table row component renders table row for path with special
class="tree-item-link str-truncated"
data-qa-selector="file_name_link"
href="https://test.com"
+ title="test"
>
<file-icon-stub
class="mr-1 position-relative text-secondary"
diff --git a/spec/frontend/repository/components/tree_content_spec.js b/spec/frontend/repository/components/tree_content_spec.js
index d397bc185e2..1d1ec58100f 100644
--- a/spec/frontend/repository/components/tree_content_spec.js
+++ b/spec/frontend/repository/components/tree_content_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
+import filesQuery from 'shared_queries/repository/files.query.graphql';
import FilePreview from '~/repository/components/preview/index.vue';
import FileTable from '~/repository/components/table/index.vue';
import TreeContent from '~/repository/components/tree_content.vue';
-import { TREE_INITIAL_FETCH_COUNT } from '~/repository/constants';
let vm;
let $apollo;
@@ -19,10 +19,17 @@ function factory(path, data = () => ({})) {
mocks: {
$apollo,
},
+ provide: {
+ glFeatures: {
+ increasePageSizeExponentially: true,
+ },
+ },
});
}
describe('Repository table component', () => {
+ const findFileTable = () => vm.find(FileTable);
+
afterEach(() => {
vm.destroy();
});
@@ -85,14 +92,12 @@ describe('Repository table component', () => {
describe('FileTable showMore', () => {
describe('when is present', () => {
- const fileTable = () => vm.find(FileTable);
-
beforeEach(async () => {
factory('/');
});
it('is changes hasShowMore to false when "showMore" event is emitted', async () => {
- fileTable().vm.$emit('showMore');
+ findFileTable().vm.$emit('showMore');
await vm.vm.$nextTick();
@@ -100,7 +105,7 @@ describe('Repository table component', () => {
});
it('changes clickedShowMore when "showMore" event is emitted', async () => {
- fileTable().vm.$emit('showMore');
+ findFileTable().vm.$emit('showMore');
await vm.vm.$nextTick();
@@ -110,7 +115,7 @@ describe('Repository table component', () => {
it('triggers fetchFiles when "showMore" event is emitted', () => {
jest.spyOn(vm.vm, 'fetchFiles');
- fileTable().vm.$emit('showMore');
+ findFileTable().vm.$emit('showMore');
expect(vm.vm.fetchFiles).toHaveBeenCalled();
});
@@ -126,10 +131,52 @@ describe('Repository table component', () => {
expect(vm.vm.hasShowMore).toBe(false);
});
- it('has limit of 1000 files on initial load', () => {
+ it.each`
+ totalBlobs | pagesLoaded | limitReached
+ ${900} | ${1} | ${false}
+ ${1000} | ${1} | ${true}
+ ${1002} | ${1} | ${true}
+ ${1002} | ${2} | ${false}
+ ${1900} | ${2} | ${false}
+ ${2000} | ${2} | ${true}
+ `('has limit of 1000 entries per page', async ({ totalBlobs, pagesLoaded, limitReached }) => {
factory('/');
- expect(TREE_INITIAL_FETCH_COUNT * vm.vm.pageSize).toBe(1000);
+ const blobs = new Array(totalBlobs).fill('fakeBlob');
+ vm.setData({ entries: { blobs }, pagesLoaded });
+
+ await vm.vm.$nextTick();
+
+ expect(findFileTable().props('hasMore')).toBe(limitReached);
+ });
+
+ it.each`
+ fetchCounter | pageSize
+ ${0} | ${10}
+ ${2} | ${30}
+ ${4} | ${50}
+ ${6} | ${70}
+ ${8} | ${90}
+ ${10} | ${100}
+ ${20} | ${100}
+ ${100} | ${100}
+ ${200} | ${100}
+ `('exponentially increases page size, to a maximum of 100', ({ fetchCounter, pageSize }) => {
+ factory('/');
+ vm.setData({ fetchCounter });
+
+ vm.vm.fetchFiles();
+
+ expect($apollo.query).toHaveBeenCalledWith({
+ query: filesQuery,
+ variables: {
+ pageSize,
+ nextPageCursor: '',
+ path: '/',
+ projectPath: '',
+ ref: '',
+ },
+ });
});
});
});
diff --git a/spec/frontend/repository/components/upload_blob_modal_spec.js b/spec/frontend/repository/components/upload_blob_modal_spec.js
index d93b1d7e5f1..08a6583b60c 100644
--- a/spec/frontend/repository/components/upload_blob_modal_spec.js
+++ b/spec/frontend/repository/components/upload_blob_modal_spec.js
@@ -190,7 +190,9 @@ describe('UploadBlobModal', () => {
});
it('creates a flash error', () => {
- expect(createFlash).toHaveBeenCalledWith('Error uploading file. Please try again.');
+ expect(createFlash).toHaveBeenCalledWith({
+ message: 'Error uploading file. Please try again.',
+ });
});
afterEach(() => {
diff --git a/spec/frontend/repository/log_tree_spec.js b/spec/frontend/repository/log_tree_spec.js
index 8cabf902a4f..5186c9a8992 100644
--- a/spec/frontend/repository/log_tree_spec.js
+++ b/spec/frontend/repository/log_tree_spec.js
@@ -1,6 +1,10 @@
import MockAdapter from 'axios-mock-adapter';
+import { createMockClient } from 'helpers/mock_apollo_helper';
import axios from '~/lib/utils/axios_utils';
import { resolveCommit, fetchLogsTree } from '~/repository/log_tree';
+import commitsQuery from '~/repository/queries/commits.query.graphql';
+import projectPathQuery from '~/repository/queries/project_path.query.graphql';
+import refQuery from '~/repository/queries/ref.query.graphql';
const mockData = [
{
@@ -10,6 +14,7 @@ const mockData = [
committed_date: '2019-01-01',
},
commit_path: `https://test.com`,
+ commit_title_html: 'commit title',
file_name: 'index.js',
type: 'blob',
},
@@ -50,19 +55,15 @@ describe('fetchLogsTree', () => {
global.gon = { relative_url_root: '' };
- client = {
- readQuery: () => ({
- projectPath: 'gitlab-org/gitlab-foss',
- escapedRef: 'main',
- commits: [],
- }),
- writeQuery: jest.fn(),
- };
-
resolver = {
entry: { name: 'index.js', type: 'blob' },
resolve: jest.fn(),
};
+
+ client = createMockClient();
+ client.writeQuery({ query: projectPathQuery, data: { projectPath: 'gitlab-org/gitlab-foss' } });
+ client.writeQuery({ query: refQuery, data: { ref: 'main', escapedRef: 'main' } });
+ client.writeQuery({ query: commitsQuery, data: { commits: [] } });
});
afterEach(() => {
@@ -125,25 +126,19 @@ describe('fetchLogsTree', () => {
it('writes query to client', async () => {
await fetchLogsTree(client, '', '0', resolver);
- expect(client.writeQuery).toHaveBeenCalledWith({
- query: expect.anything(),
- data: {
- projectPath: 'gitlab-org/gitlab-foss',
- escapedRef: 'main',
- commits: [
- expect.objectContaining({
- __typename: 'LogTreeCommit',
- commitPath: 'https://test.com',
- committedDate: '2019-01-01',
- fileName: 'index.js',
- filePath: '/index.js',
- message: 'testing message',
- sha: '123',
- titleHtml: undefined,
- type: 'blob',
- }),
- ],
- },
+ expect(client.readQuery({ query: commitsQuery })).toEqual({
+ commits: [
+ expect.objectContaining({
+ commitPath: 'https://test.com',
+ committedDate: '2019-01-01',
+ fileName: 'index.js',
+ filePath: '/index.js',
+ message: 'testing message',
+ sha: '123',
+ titleHtml: 'commit title',
+ type: 'blob',
+ }),
+ ],
});
});
});
diff --git a/spec/frontend/right_sidebar_spec.js b/spec/frontend/right_sidebar_spec.js
index 8699e1cf420..d1f861669a0 100644
--- a/spec/frontend/right_sidebar_spec.js
+++ b/spec/frontend/right_sidebar_spec.js
@@ -66,22 +66,6 @@ describe('RightSidebar', () => {
assertSidebarState('collapsed');
});
- it('should broadcast todo:toggle event when add todo clicked', (done) => {
- const todos = getJSONFixture('todos/todos.json');
- mock.onPost(/(.*)\/todos$/).reply(200, todos);
-
- const todoToggleSpy = jest.fn();
- $(document).on('todo:toggle', todoToggleSpy);
-
- $('.issuable-sidebar-header .js-issuable-todo').click();
-
- setImmediate(() => {
- expect(todoToggleSpy.mock.calls.length).toEqual(1);
-
- done();
- });
- });
-
it('should not hide collapsed icons', () => {
[].forEach.call(document.querySelectorAll('.sidebar-collapsed-icon'), (el) => {
expect(el.querySelector('.fa, svg').classList.contains('hidden')).toBeFalsy();
diff --git a/spec/frontend/runner/components/cells/runner_actions_cell_spec.js b/spec/frontend/runner/components/cells/runner_actions_cell_spec.js
index 12651a82a0c..95f7c38cafc 100644
--- a/spec/frontend/runner/components/cells/runner_actions_cell_spec.js
+++ b/spec/frontend/runner/components/cells/runner_actions_cell_spec.js
@@ -1,18 +1,30 @@
-import { shallowMount } from '@vue/test-utils';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
import RunnerActionCell from '~/runner/components/cells/runner_actions_cell.vue';
-import deleteRunnerMutation from '~/runner/graphql/delete_runner.mutation.graphql';
import getRunnersQuery from '~/runner/graphql/get_runners.query.graphql';
+import runnerDeleteMutation from '~/runner/graphql/runner_delete.mutation.graphql';
import runnerUpdateMutation from '~/runner/graphql/runner_update.mutation.graphql';
+import { captureException } from '~/runner/sentry_utils';
+import { runnerData } from '../../mock_data';
-const mockId = '1';
+const mockRunner = runnerData.data.runner;
const getRunnersQueryName = getRunnersQuery.definitions[0].name.value;
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+jest.mock('~/flash');
+jest.mock('~/runner/sentry_utils');
+
describe('RunnerTypeCell', () => {
let wrapper;
- let mutate;
+ const runnerDeleteMutationHandler = jest.fn();
+ const runnerUpdateMutationHandler = jest.fn();
const findEditBtn = () => wrapper.findByTestId('edit-runner');
const findToggleActiveBtn = () => wrapper.findByTestId('toggle-active-runner');
@@ -23,26 +35,43 @@ describe('RunnerTypeCell', () => {
shallowMount(RunnerActionCell, {
propsData: {
runner: {
- id: `gid://gitlab/Ci::Runner/${mockId}`,
+ id: mockRunner.id,
active,
},
},
- mocks: {
- $apollo: {
- mutate,
- },
- },
+ localVue,
+ apolloProvider: createMockApollo([
+ [runnerDeleteMutation, runnerDeleteMutationHandler],
+ [runnerUpdateMutation, runnerUpdateMutationHandler],
+ ]),
...options,
}),
);
};
beforeEach(() => {
- mutate = jest.fn();
+ runnerDeleteMutationHandler.mockResolvedValue({
+ data: {
+ runnerDelete: {
+ errors: [],
+ },
+ },
+ });
+
+ runnerUpdateMutationHandler.mockResolvedValue({
+ data: {
+ runnerUpdate: {
+ runner: runnerData.data.runner,
+ errors: [],
+ },
+ },
+ });
});
afterEach(() => {
- mutate.mockReset();
+ runnerDeleteMutationHandler.mockReset();
+ runnerUpdateMutationHandler.mockReset();
+
wrapper.destroy();
});
@@ -58,17 +87,6 @@ describe('RunnerTypeCell', () => {
${'paused'} | ${'Resume'} | ${'play'} | ${false} | ${true}
`('When the runner is $state', ({ label, icon, isActive, newActiveValue }) => {
beforeEach(() => {
- mutate.mockResolvedValue({
- data: {
- runnerUpdate: {
- runner: {
- id: `gid://gitlab/Ci::Runner/1`,
- __typename: 'CiRunner',
- },
- },
- },
- });
-
createComponent({ active: isActive });
});
@@ -93,46 +111,93 @@ describe('RunnerTypeCell', () => {
});
describe(`When clicking on the ${icon} button`, () => {
- beforeEach(async () => {
+ it(`The apollo mutation to set active to ${newActiveValue} is called`, async () => {
+ expect(runnerUpdateMutationHandler).toHaveBeenCalledTimes(0);
+
await findToggleActiveBtn().vm.$emit('click');
- await waitForPromises();
- });
- it(`The apollo mutation to set active to ${newActiveValue} is called`, () => {
- expect(mutate).toHaveBeenCalledTimes(1);
- expect(mutate).toHaveBeenCalledWith({
- mutation: runnerUpdateMutation,
- variables: {
- input: {
- id: `gid://gitlab/Ci::Runner/${mockId}`,
- active: newActiveValue,
- },
+ expect(runnerUpdateMutationHandler).toHaveBeenCalledTimes(1);
+ expect(runnerUpdateMutationHandler).toHaveBeenCalledWith({
+ input: {
+ id: mockRunner.id,
+ active: newActiveValue,
},
});
});
- it('The button does not have a loading state', () => {
+ it('The button does not have a loading state after the mutation occurs', async () => {
+ await findToggleActiveBtn().vm.$emit('click');
+
+ expect(findToggleActiveBtn().props('loading')).toBe(true);
+
+ await waitForPromises();
+
expect(findToggleActiveBtn().props('loading')).toBe(false);
});
});
- });
- describe('When the user clicks a runner', () => {
- beforeEach(() => {
- createComponent();
+ describe('When update fails', () => {
+ describe('On a network error', () => {
+ const mockErrorMsg = 'Update error!';
+
+ beforeEach(async () => {
+ runnerUpdateMutationHandler.mockRejectedValueOnce(new Error(mockErrorMsg));
+
+ await findToggleActiveBtn().vm.$emit('click');
+ });
+
+ it('error is reported to sentry', () => {
+ expect(captureException).toHaveBeenCalledWith({
+ error: new Error(`Network error: ${mockErrorMsg}`),
+ component: 'RunnerActionsCell',
+ });
+ });
- mutate.mockResolvedValue({
- data: {
- runnerDelete: {
- runner: {
- id: `gid://gitlab/Ci::Runner/1`,
- __typename: 'CiRunner',
+ it('error is shown to the user', () => {
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ });
+ });
+
+ describe('On a validation error', () => {
+ const mockErrorMsg = 'Runner not found!';
+ const mockErrorMsg2 = 'User not allowed!';
+
+ beforeEach(async () => {
+ runnerUpdateMutationHandler.mockResolvedValue({
+ data: {
+ runnerUpdate: {
+ runner: runnerData.data.runner,
+ errors: [mockErrorMsg, mockErrorMsg2],
+ },
},
- },
- },
+ });
+
+ await findToggleActiveBtn().vm.$emit('click');
+ });
+
+ it('error is reported to sentry', () => {
+ expect(captureException).toHaveBeenCalledWith({
+ error: new Error(`${mockErrorMsg} ${mockErrorMsg2}`),
+ component: 'RunnerActionsCell',
+ });
+ });
+
+ it('error is shown to the user', () => {
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ });
});
+ });
+ });
+ describe('When the user clicks a runner', () => {
+ beforeEach(() => {
jest.spyOn(window, 'confirm');
+
+ createComponent();
+ });
+
+ afterEach(() => {
+ window.confirm.mockRestore();
});
describe('When the user confirms deletion', () => {
@@ -141,18 +206,28 @@ describe('RunnerTypeCell', () => {
await findDeleteBtn().vm.$emit('click');
});
- it('The user sees a confirmation alert', async () => {
+ it('The user sees a confirmation alert', () => {
expect(window.confirm).toHaveBeenCalledTimes(1);
expect(window.confirm).toHaveBeenCalledWith(expect.any(String));
});
it('The delete mutation is called correctly', () => {
- expect(mutate).toHaveBeenCalledTimes(1);
- expect(mutate).toHaveBeenCalledWith({
- mutation: deleteRunnerMutation,
+ expect(runnerDeleteMutationHandler).toHaveBeenCalledTimes(1);
+ expect(runnerDeleteMutationHandler).toHaveBeenCalledWith({
+ input: { id: mockRunner.id },
+ });
+ });
+
+ it('When delete mutation is called, current runners are refetched', async () => {
+ jest.spyOn(wrapper.vm.$apollo, 'mutate');
+
+ await findDeleteBtn().vm.$emit('click');
+
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
+ mutation: runnerDeleteMutation,
variables: {
input: {
- id: `gid://gitlab/Ci::Runner/${mockId}`,
+ id: mockRunner.id,
},
},
awaitRefetchQueries: true,
@@ -176,6 +251,57 @@ describe('RunnerTypeCell', () => {
expect(findDeleteBtn().attributes('title')).toBe('');
});
+
+ describe('When delete fails', () => {
+ describe('On a network error', () => {
+ const mockErrorMsg = 'Delete error!';
+
+ beforeEach(async () => {
+ runnerDeleteMutationHandler.mockRejectedValueOnce(new Error(mockErrorMsg));
+
+ await findDeleteBtn().vm.$emit('click');
+ });
+
+ it('error is reported to sentry', () => {
+ expect(captureException).toHaveBeenCalledWith({
+ error: new Error(`Network error: ${mockErrorMsg}`),
+ component: 'RunnerActionsCell',
+ });
+ });
+
+ it('error is shown to the user', () => {
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ });
+ });
+
+ describe('On a validation error', () => {
+ const mockErrorMsg = 'Runner not found!';
+ const mockErrorMsg2 = 'User not allowed!';
+
+ beforeEach(async () => {
+ runnerDeleteMutationHandler.mockResolvedValue({
+ data: {
+ runnerDelete: {
+ errors: [mockErrorMsg, mockErrorMsg2],
+ },
+ },
+ });
+
+ await findDeleteBtn().vm.$emit('click');
+ });
+
+ it('error is reported to sentry', () => {
+ expect(captureException).toHaveBeenCalledWith({
+ error: new Error(`${mockErrorMsg} ${mockErrorMsg2}`),
+ component: 'RunnerActionsCell',
+ });
+ });
+
+ it('error is shown to the user', () => {
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ });
+ });
+ });
});
describe('When the user does not confirm deletion', () => {
@@ -189,7 +315,7 @@ describe('RunnerTypeCell', () => {
});
it('The delete mutation is not called', () => {
- expect(mutate).toHaveBeenCalledTimes(0);
+ expect(runnerDeleteMutationHandler).toHaveBeenCalledTimes(0);
});
it('The delete button does not have a loading state', () => {
diff --git a/spec/frontend/runner/components/helpers/masked_value_spec.js b/spec/frontend/runner/components/helpers/masked_value_spec.js
new file mode 100644
index 00000000000..f87315057ec
--- /dev/null
+++ b/spec/frontend/runner/components/helpers/masked_value_spec.js
@@ -0,0 +1,51 @@
+import { GlButton } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import MaskedValue from '~/runner/components/helpers/masked_value.vue';
+
+const mockSecret = '01234567890';
+const mockMasked = '***********';
+
+describe('MaskedValue', () => {
+ let wrapper;
+
+ const findButton = () => wrapper.findComponent(GlButton);
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMount(MaskedValue, {
+ propsData: {
+ value: mockSecret,
+ ...props,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('Displays masked value by default', () => {
+ expect(wrapper.text()).toBe(mockMasked);
+ });
+
+ describe('When the icon is clicked', () => {
+ beforeEach(() => {
+ findButton().vm.$emit('click');
+ });
+
+ it('Displays the actual value', () => {
+ expect(wrapper.text()).toBe(mockSecret);
+ expect(wrapper.text()).not.toBe(mockMasked);
+ });
+
+ it('When user clicks again, displays masked value', async () => {
+ await findButton().vm.$emit('click');
+
+ expect(wrapper.text()).toBe(mockMasked);
+ expect(wrapper.text()).not.toBe(mockSecret);
+ });
+ });
+});
diff --git a/spec/frontend/runner/components/runner_filtered_search_bar_spec.js b/spec/frontend/runner/components/runner_filtered_search_bar_spec.js
index 61a8f821b30..85cf7ea92df 100644
--- a/spec/frontend/runner/components/runner_filtered_search_bar_spec.js
+++ b/spec/frontend/runner/components/runner_filtered_search_bar_spec.js
@@ -2,8 +2,10 @@ import { GlFilteredSearch, GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import RunnerFilteredSearchBar from '~/runner/components/runner_filtered_search_bar.vue';
-import { PARAM_KEY_STATUS, PARAM_KEY_RUNNER_TYPE } from '~/runner/constants';
+import TagToken from '~/runner/components/search_tokens/tag_token.vue';
+import { PARAM_KEY_STATUS, PARAM_KEY_RUNNER_TYPE, PARAM_KEY_TAG } from '~/runner/constants';
import FilteredSearch from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
+import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
describe('RunnerList', () => {
let wrapper;
@@ -11,6 +13,7 @@ describe('RunnerList', () => {
const findFilteredSearch = () => wrapper.findComponent(FilteredSearch);
const findGlFilteredSearch = () => wrapper.findComponent(GlFilteredSearch);
const findSortOptions = () => wrapper.findAllComponents(GlDropdownItem);
+ const findActiveRunnersMessage = () => wrapper.findByTestId('active-runners-message');
const mockDefaultSort = 'CREATED_DESC';
const mockOtherSort = 'CONTACTED_DESC';
@@ -18,18 +21,20 @@ describe('RunnerList', () => {
{ type: PARAM_KEY_STATUS, value: { data: 'ACTIVE', operator: '=' } },
{ type: 'filtered-search-term', value: { data: '' } },
];
+ const mockActiveRunnersCount = 2;
const createComponent = ({ props = {}, options = {} } = {}) => {
wrapper = extendedWrapper(
shallowMount(RunnerFilteredSearchBar, {
propsData: {
+ namespace: 'runners',
value: {
filters: [],
sort: mockDefaultSort,
},
+ activeRunnersCount: mockActiveRunnersCount,
...props,
},
- attrs: { namespace: 'runners' },
stubs: {
FilteredSearch,
GlFilteredSearch,
@@ -53,6 +58,18 @@ describe('RunnerList', () => {
expect(findFilteredSearch().props('namespace')).toBe('runners');
});
+ it('Displays an active runner count', () => {
+ expect(findActiveRunnersMessage().text()).toBe(
+ `Runners currently online: ${mockActiveRunnersCount}`,
+ );
+ });
+
+ it('Displays a large active runner count', () => {
+ createComponent({ props: { activeRunnersCount: 2000 } });
+
+ expect(findActiveRunnersMessage().text()).toBe('Runners currently online: 2,000');
+ });
+
it('sets sorting options', () => {
const SORT_OPTIONS_COUNT = 2;
@@ -65,12 +82,18 @@ describe('RunnerList', () => {
expect(findFilteredSearch().props('tokens')).toEqual([
expect.objectContaining({
type: PARAM_KEY_STATUS,
+ token: BaseToken,
options: expect.any(Array),
}),
expect.objectContaining({
type: PARAM_KEY_RUNNER_TYPE,
+ token: BaseToken,
options: expect.any(Array),
}),
+ expect.objectContaining({
+ type: PARAM_KEY_TAG,
+ token: TagToken,
+ }),
]);
});
diff --git a/spec/frontend/runner/components/runner_list_spec.js b/spec/frontend/runner/components/runner_list_spec.js
index d88d7b3fbee..5fff3581e39 100644
--- a/spec/frontend/runner/components/runner_list_spec.js
+++ b/spec/frontend/runner/components/runner_list_spec.js
@@ -1,5 +1,6 @@
import { GlLink, GlTable, GlSkeletonLoader } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
+import { cloneDeep } from 'lodash';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import RunnerList from '~/runner/components/runner_list.vue';
@@ -11,7 +12,6 @@ const mockActiveRunnersCount = mockRunners.length;
describe('RunnerList', () => {
let wrapper;
- const findActiveRunnersMessage = () => wrapper.findByTestId('active-runners-message');
const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
const findTable = () => wrapper.findComponent(GlTable);
const findHeaders = () => wrapper.findAll('th');
@@ -39,18 +39,6 @@ describe('RunnerList', () => {
wrapper.destroy();
});
- it('Displays active runner count', () => {
- expect(findActiveRunnersMessage().text()).toBe(
- `Runners currently online: ${mockActiveRunnersCount}`,
- );
- });
-
- it('Displays a large active runner count', () => {
- createComponent({ props: { activeRunnersCount: 2000 } });
-
- expect(findActiveRunnersMessage().text()).toBe('Runners currently online: 2,000');
- });
-
it('Displays headers', () => {
const headerLabels = findHeaders().wrappers.map((w) => w.text());
@@ -85,12 +73,11 @@ describe('RunnerList', () => {
);
expect(findCell({ fieldKey: 'name' }).text()).toContain(description);
- // Other fields: some cells are empty in the first iteration
- // See https://gitlab.com/gitlab-org/gitlab/-/issues/329658#pending-features
+ // Other fields
expect(findCell({ fieldKey: 'version' }).text()).toBe(version);
expect(findCell({ fieldKey: 'ipAddress' }).text()).toBe(ipAddress);
- expect(findCell({ fieldKey: 'projectCount' }).text()).toBe('');
- expect(findCell({ fieldKey: 'jobCount' }).text()).toBe('');
+ expect(findCell({ fieldKey: 'projectCount' }).text()).toBe('1');
+ expect(findCell({ fieldKey: 'jobCount' }).text()).toBe('0');
expect(findCell({ fieldKey: 'tagList' }).text()).toBe('');
expect(findCell({ fieldKey: 'contactedAt' }).text()).toEqual(expect.any(String));
@@ -101,6 +88,54 @@ describe('RunnerList', () => {
expect(actions.findByTestId('toggle-active-runner').exists()).toBe(true);
});
+ describe('Table data formatting', () => {
+ let mockRunnersCopy;
+
+ beforeEach(() => {
+ mockRunnersCopy = cloneDeep(mockRunners);
+ });
+
+ it('Formats null project counts', () => {
+ mockRunnersCopy[0].projectCount = null;
+
+ createComponent({ props: { runners: mockRunnersCopy } }, mount);
+
+ expect(findCell({ fieldKey: 'projectCount' }).text()).toBe('n/a');
+ });
+
+ it('Formats 0 project counts', () => {
+ mockRunnersCopy[0].projectCount = 0;
+
+ createComponent({ props: { runners: mockRunnersCopy } }, mount);
+
+ expect(findCell({ fieldKey: 'projectCount' }).text()).toBe('0');
+ });
+
+ it('Formats big project counts', () => {
+ mockRunnersCopy[0].projectCount = 1000;
+
+ createComponent({ props: { runners: mockRunnersCopy } }, mount);
+
+ expect(findCell({ fieldKey: 'projectCount' }).text()).toBe('1,000');
+ });
+
+ it('Formats job counts', () => {
+ mockRunnersCopy[0].jobCount = 1000;
+
+ createComponent({ props: { runners: mockRunnersCopy } }, mount);
+
+ expect(findCell({ fieldKey: 'jobCount' }).text()).toBe('1,000');
+ });
+
+ it('Formats big job counts with a plus symbol', () => {
+ mockRunnersCopy[0].jobCount = 1001;
+
+ createComponent({ props: { runners: mockRunnersCopy } }, mount);
+
+ expect(findCell({ fieldKey: 'jobCount' }).text()).toBe('1,000+');
+ });
+ });
+
it('Links to the runner page', () => {
const { id } = mockRunners[0];
diff --git a/spec/frontend/runner/components/runner_manual_setup_help_spec.js b/spec/frontend/runner/components/runner_manual_setup_help_spec.js
index add595d784e..effef0e7ebf 100644
--- a/spec/frontend/runner/components/runner_manual_setup_help_spec.js
+++ b/spec/frontend/runner/components/runner_manual_setup_help_spec.js
@@ -3,6 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { TEST_HOST } from 'helpers/test_constants';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import MaskedValue from '~/runner/components/helpers/masked_value.vue';
import RunnerManualSetupHelp from '~/runner/components/runner_manual_setup_help.vue';
import RunnerRegistrationTokenReset from '~/runner/components/runner_registration_token_reset.vue';
import { INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE } from '~/runner/constants';
@@ -37,6 +38,7 @@ describe('RunnerManualSetupHelp', () => {
...props,
},
stubs: {
+ MaskedValue,
GlSprintf,
},
}),
@@ -93,7 +95,11 @@ describe('RunnerManualSetupHelp', () => {
expect(findRunnerInstructions().exists()).toBe(true);
});
- it('Displays the registration token', () => {
+ it('Displays the registration token', async () => {
+ findRegistrationToken().find('[data-testid="toggle-masked"]').vm.$emit('click');
+
+ await nextTick();
+
expect(findRegistrationToken().text()).toBe(mockRegistrationToken);
expect(findClipboardButtons().at(1).props('text')).toBe(mockRegistrationToken);
});
@@ -105,6 +111,7 @@ describe('RunnerManualSetupHelp', () => {
it('Replaces the runner reset button', async () => {
const mockNewRegistrationToken = 'NEW_MOCK_REGISTRATION_TOKEN';
+ findRegistrationToken().find('[data-testid="toggle-masked"]').vm.$emit('click');
findRunnerRegistrationTokenReset().vm.$emit('tokenReset', mockNewRegistrationToken);
await nextTick();
diff --git a/spec/frontend/runner/components/runner_registration_token_reset_spec.js b/spec/frontend/runner/components/runner_registration_token_reset_spec.js
index fa5751b380f..6dc207e369c 100644
--- a/spec/frontend/runner/components/runner_registration_token_reset_spec.js
+++ b/spec/frontend/runner/components/runner_registration_token_reset_spec.js
@@ -7,8 +7,10 @@ import createFlash, { FLASH_TYPES } from '~/flash';
import RunnerRegistrationTokenReset from '~/runner/components/runner_registration_token_reset.vue';
import { INSTANCE_TYPE } from '~/runner/constants';
import runnersRegistrationTokenResetMutation from '~/runner/graphql/runners_registration_token_reset.mutation.graphql';
+import { captureException } from '~/runner/sentry_utils';
jest.mock('~/flash');
+jest.mock('~/runner/sentry_utils');
const localVue = createLocalVue();
localVue.use(VueApollo);
@@ -111,25 +113,32 @@ describe('RunnerRegistrationTokenReset', () => {
describe('On error', () => {
it('On network error, error message is shown', async () => {
- runnersRegistrationTokenResetMutationHandler.mockRejectedValueOnce(
- new Error('Something went wrong'),
- );
+ const mockErrorMsg = 'Token reset failed!';
+
+ runnersRegistrationTokenResetMutationHandler.mockRejectedValueOnce(new Error(mockErrorMsg));
window.confirm.mockReturnValueOnce(true);
await findButton().vm.$emit('click');
await waitForPromises();
expect(createFlash).toHaveBeenLastCalledWith({
- message: 'Network error: Something went wrong',
+ message: `Network error: ${mockErrorMsg}`,
+ });
+ expect(captureException).toHaveBeenCalledWith({
+ error: new Error(`Network error: ${mockErrorMsg}`),
+ component: 'RunnerRegistrationTokenReset',
});
});
it('On validation error, error message is shown', async () => {
+ const mockErrorMsg = 'User not allowed!';
+ const mockErrorMsg2 = 'Type is not valid!';
+
runnersRegistrationTokenResetMutationHandler.mockResolvedValue({
data: {
runnersRegistrationTokenReset: {
token: null,
- errors: ['Token reset failed'],
+ errors: [mockErrorMsg, mockErrorMsg2],
},
},
});
@@ -139,7 +148,11 @@ describe('RunnerRegistrationTokenReset', () => {
await waitForPromises();
expect(createFlash).toHaveBeenLastCalledWith({
- message: 'Token reset failed',
+ message: `${mockErrorMsg} ${mockErrorMsg2}`,
+ });
+ expect(captureException).toHaveBeenCalledWith({
+ error: new Error(`${mockErrorMsg} ${mockErrorMsg2}`),
+ component: 'RunnerRegistrationTokenReset',
});
});
});
diff --git a/spec/frontend/runner/components/runner_tag_spec.js b/spec/frontend/runner/components/runner_tag_spec.js
new file mode 100644
index 00000000000..dda318f8153
--- /dev/null
+++ b/spec/frontend/runner/components/runner_tag_spec.js
@@ -0,0 +1,45 @@
+import { GlBadge } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import RunnerTag from '~/runner/components/runner_tag.vue';
+
+describe('RunnerTag', () => {
+ let wrapper;
+
+ const findBadge = () => wrapper.findComponent(GlBadge);
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMount(RunnerTag, {
+ propsData: {
+ tag: 'tag1',
+ ...props,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('Displays tag text', () => {
+ expect(wrapper.text()).toBe('tag1');
+ });
+
+ it('Displays tags with correct style', () => {
+ expect(findBadge().props()).toMatchObject({
+ size: 'md',
+ variant: 'info',
+ });
+ });
+
+ it('Displays tags with small size', () => {
+ createComponent({
+ props: { size: 'sm' },
+ });
+
+ expect(findBadge().props('size')).toBe('sm');
+ });
+});
diff --git a/spec/frontend/runner/components/runner_tags_spec.js b/spec/frontend/runner/components/runner_tags_spec.js
index 7bb3f65e4ba..b6487ade0d6 100644
--- a/spec/frontend/runner/components/runner_tags_spec.js
+++ b/spec/frontend/runner/components/runner_tags_spec.js
@@ -1,5 +1,5 @@
import { GlBadge } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import RunnerTags from '~/runner/components/runner_tags.vue';
describe('RunnerTags', () => {
@@ -9,7 +9,7 @@ describe('RunnerTags', () => {
const findBadgesAt = (i = 0) => wrapper.findAllComponents(GlBadge).at(i);
const createComponent = ({ props = {} } = {}) => {
- wrapper = shallowMount(RunnerTags, {
+ wrapper = mount(RunnerTags, {
propsData: {
tagList: ['tag1', 'tag2'],
...props,
@@ -45,14 +45,6 @@ describe('RunnerTags', () => {
expect(findBadge().props('size')).toBe('sm');
});
- it('Displays tags with a variant', () => {
- createComponent({
- props: { variant: 'warning' },
- });
-
- expect(findBadge().props('variant')).toBe('warning');
- });
-
it('Is empty when there are no tags', () => {
createComponent({
props: { tagList: null },
diff --git a/spec/frontend/runner/components/runner_update_form_spec.js b/spec/frontend/runner/components/runner_update_form_spec.js
index 6333ed7118a..15029d7a911 100644
--- a/spec/frontend/runner/components/runner_update_form_spec.js
+++ b/spec/frontend/runner/components/runner_update_form_spec.js
@@ -15,9 +15,11 @@ import {
ACCESS_LEVEL_NOT_PROTECTED,
} from '~/runner/constants';
import runnerUpdateMutation from '~/runner/graphql/runner_update.mutation.graphql';
+import { captureException } from '~/runner/sentry_utils';
import { runnerData } from '../mock_data';
jest.mock('~/flash');
+jest.mock('~/runner/sentry_utils');
const mockRunner = runnerData.data.runner;
@@ -205,13 +207,11 @@ describe('RunnerUpdateForm', () => {
});
it.each`
- value | submitted
- ${''} | ${{ tagList: [] }}
- ${'tag1, tag2'} | ${{ tagList: ['tag1', 'tag2'] }}
- ${'with spaces'} | ${{ tagList: ['with spaces'] }}
- ${',,,,, commas'} | ${{ tagList: ['commas'] }}
- ${'more ,,,,, commas'} | ${{ tagList: ['more', 'commas'] }}
- ${' trimmed , trimmed2 '} | ${{ tagList: ['trimmed', 'trimmed2'] }}
+ value | submitted
+ ${''} | ${{ tagList: [] }}
+ ${'tag1, tag2'} | ${{ tagList: ['tag1', 'tag2'] }}
+ ${'with spaces'} | ${{ tagList: ['with spaces'] }}
+ ${'more ,,,,, commas'} | ${{ tagList: ['more', 'commas'] }}
`('Field updates runner\'s tags for "$value"', async ({ value, submitted }) => {
const runner = { ...mockRunner, tagList: ['tag1'] };
createComponent({ props: { runner } });
@@ -232,22 +232,30 @@ describe('RunnerUpdateForm', () => {
});
it('On network error, error message is shown', async () => {
- runnerUpdateHandler.mockRejectedValue(new Error('Something went wrong'));
+ const mockErrorMsg = 'Update error!';
+
+ runnerUpdateHandler.mockRejectedValue(new Error(mockErrorMsg));
await submitFormAndWait();
expect(createFlash).toHaveBeenLastCalledWith({
- message: 'Network error: Something went wrong',
+ message: `Network error: ${mockErrorMsg}`,
+ });
+ expect(captureException).toHaveBeenCalledWith({
+ component: 'RunnerUpdateForm',
+ error: new Error(`Network error: ${mockErrorMsg}`),
});
expect(findSubmitDisabledAttr()).toBeUndefined();
});
- it('On validation error, error message is shown', async () => {
+ it('On validation error, error message is shown and it is not sent to sentry', async () => {
+ const mockErrorMsg = 'Invalid value!';
+
runnerUpdateHandler.mockResolvedValue({
data: {
runnerUpdate: {
runner: mockRunner,
- errors: ['A value is invalid'],
+ errors: [mockErrorMsg],
},
},
});
@@ -255,8 +263,9 @@ describe('RunnerUpdateForm', () => {
await submitFormAndWait();
expect(createFlash).toHaveBeenLastCalledWith({
- message: 'A value is invalid',
+ message: mockErrorMsg,
});
+ expect(captureException).not.toHaveBeenCalled();
expect(findSubmitDisabledAttr()).toBeUndefined();
});
});
diff --git a/spec/frontend/runner/components/search_tokens/tag_token_spec.js b/spec/frontend/runner/components/search_tokens/tag_token_spec.js
new file mode 100644
index 00000000000..52b87542243
--- /dev/null
+++ b/spec/frontend/runner/components/search_tokens/tag_token_spec.js
@@ -0,0 +1,188 @@
+import { GlFilteredSearchSuggestion, GlLoadingIcon, GlToken } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import { nextTick } from 'vue';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import axios from '~/lib/utils/axios_utils';
+
+import TagToken, { TAG_SUGGESTIONS_PATH } from '~/runner/components/search_tokens/tag_token.vue';
+import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
+import { getRecentlyUsedSuggestions } from '~/vue_shared/components/filtered_search_bar/filtered_search_utils';
+
+jest.mock('~/flash');
+
+jest.mock('~/vue_shared/components/filtered_search_bar/filtered_search_utils', () => ({
+ ...jest.requireActual('~/vue_shared/components/filtered_search_bar/filtered_search_utils'),
+ getRecentlyUsedSuggestions: jest.fn(),
+}));
+
+const mockStorageKey = 'stored-recent-tags';
+
+const mockTags = [
+ { id: 1, name: 'linux' },
+ { id: 2, name: 'windows' },
+ { id: 3, name: 'mac' },
+];
+
+const mockTagsFiltered = [mockTags[0]];
+
+const mockSearchTerm = mockTags[0].name;
+
+const GlFilteredSearchTokenStub = {
+ template: `<div>
+ <slot name="view-token"></slot>
+ <slot name="suggestions"></slot>
+ </div>`,
+};
+
+const mockTagTokenConfig = {
+ icon: 'tag',
+ title: 'Tags',
+ type: 'tag',
+ token: TagToken,
+ recentTokenValuesStorageKey: mockStorageKey,
+ operators: OPERATOR_IS_ONLY,
+};
+
+describe('TagToken', () => {
+ let mock;
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = mount(TagToken, {
+ propsData: {
+ config: mockTagTokenConfig,
+ value: { data: '' },
+ active: false,
+ ...props,
+ },
+ provide: {
+ portalName: 'fake target',
+ alignSuggestions: function fakeAlignSuggestions() {},
+ filteredSearchSuggestionListInstance: {
+ register: jest.fn(),
+ unregister: jest.fn(),
+ },
+ },
+ stubs: {
+ GlFilteredSearchToken: GlFilteredSearchTokenStub,
+ },
+ });
+ };
+
+ const findGlFilteredSearchSuggestions = () =>
+ wrapper.findAllComponents(GlFilteredSearchSuggestion);
+ const findGlFilteredSearchToken = () => wrapper.findComponent(GlFilteredSearchTokenStub);
+ const findToken = () => wrapper.findComponent(GlToken);
+ const findGlLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+
+ beforeEach(async () => {
+ mock = new MockAdapter(axios);
+
+ mock.onGet(TAG_SUGGESTIONS_PATH, { params: { search: '' } }).reply(200, mockTags);
+ mock
+ .onGet(TAG_SUGGESTIONS_PATH, { params: { search: mockSearchTerm } })
+ .reply(200, mockTagsFiltered);
+
+ getRecentlyUsedSuggestions.mockReturnValue([]);
+
+ createComponent();
+ await waitForPromises();
+ });
+
+ afterEach(() => {
+ getRecentlyUsedSuggestions.mockReset();
+ wrapper.destroy();
+ });
+
+ describe('when the tags token is displayed', () => {
+ it('requests tags suggestions', () => {
+ expect(mock.history.get[0].params).toEqual({ search: '' });
+ });
+
+ it('displays tags suggestions', () => {
+ mockTags.forEach(({ name }, i) => {
+ expect(findGlFilteredSearchSuggestions().at(i).text()).toBe(name);
+ });
+ });
+ });
+
+ describe('when suggestions are stored', () => {
+ const storedSuggestions = [{ id: 4, value: 'docker', text: 'docker' }];
+
+ beforeEach(async () => {
+ getRecentlyUsedSuggestions.mockReturnValue(storedSuggestions);
+
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('suggestions are loaded from a correct key', () => {
+ expect(getRecentlyUsedSuggestions).toHaveBeenCalledWith(mockStorageKey);
+ });
+
+ it('displays stored tags suggestions', () => {
+ expect(findGlFilteredSearchSuggestions()).toHaveLength(
+ mockTags.length + storedSuggestions.length,
+ );
+
+ expect(findGlFilteredSearchSuggestions().at(0).text()).toBe(storedSuggestions[0].text);
+ });
+ });
+
+ describe('when the users filters suggestions', () => {
+ beforeEach(async () => {
+ findGlFilteredSearchToken().vm.$emit('input', { data: mockSearchTerm });
+
+ jest.runAllTimers();
+ });
+
+ it('requests filtered tags suggestions', async () => {
+ await waitForPromises();
+
+ expect(mock.history.get[1].params).toEqual({ search: mockSearchTerm });
+ });
+
+ it('shows the loading icon', async () => {
+ await nextTick();
+
+ expect(findGlLoadingIcon().exists()).toBe(true);
+ });
+
+ it('displays filtered tags suggestions', async () => {
+ await waitForPromises();
+
+ expect(findGlFilteredSearchSuggestions()).toHaveLength(mockTagsFiltered.length);
+
+ expect(findGlFilteredSearchSuggestions().at(0).text()).toBe(mockTagsFiltered[0].name);
+ });
+ });
+
+ describe('when suggestions cannot be loaded', () => {
+ beforeEach(async () => {
+ mock.onGet(TAG_SUGGESTIONS_PATH, { params: { search: '' } }).reply(500);
+
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('error is shown', async () => {
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ expect(createFlash).toHaveBeenCalledWith({ message: expect.any(String) });
+ });
+ });
+
+ describe('when the user selects a value', () => {
+ beforeEach(async () => {
+ createComponent({ value: { data: mockTags[0].name } });
+ findGlFilteredSearchToken().vm.$emit('select');
+
+ await waitForPromises();
+ });
+
+ it('selected tag is displayed', async () => {
+ expect(findToken().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/runner/runner_detail/runner_details_app_spec.js b/spec/frontend/runner/runner_detail/runner_details_app_spec.js
index d0bd701458d..1a1428e8cb1 100644
--- a/spec/frontend/runner/runner_detail/runner_details_app_spec.js
+++ b/spec/frontend/runner/runner_detail/runner_details_app_spec.js
@@ -2,14 +2,19 @@ import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import RunnerTypeBadge from '~/runner/components/runner_type_badge.vue';
import getRunnerQuery from '~/runner/graphql/get_runner.query.graphql';
import RunnerDetailsApp from '~/runner/runner_details/runner_details_app.vue';
+import { captureException } from '~/runner/sentry_utils';
import { runnerData } from '../mock_data';
+jest.mock('~/flash');
+jest.mock('~/runner/sentry_utils');
+
const mockRunnerGraphqlId = runnerData.data.runner.id;
const mockRunnerId = `${getIdFromGraphQLId(mockRunnerGraphqlId)}`;
@@ -23,11 +28,9 @@ describe('RunnerDetailsApp', () => {
const findRunnerTypeBadge = () => wrapper.findComponent(RunnerTypeBadge);
const createComponentWithApollo = ({ props = {}, mountFn = shallowMount } = {}) => {
- const handlers = [[getRunnerQuery, mockRunnerQuery]];
-
wrapper = mountFn(RunnerDetailsApp, {
localVue,
- apolloProvider: createMockApollo(handlers),
+ apolloProvider: createMockApollo([[getRunnerQuery, mockRunnerQuery]]),
propsData: {
runnerId: mockRunnerId,
...props,
@@ -63,4 +66,22 @@ describe('RunnerDetailsApp', () => {
expect(findRunnerTypeBadge().text()).toBe('shared');
});
+
+ describe('When there is an error', () => {
+ beforeEach(async () => {
+ mockRunnerQuery = jest.fn().mockRejectedValueOnce(new Error('Error!'));
+ await createComponentWithApollo();
+ });
+
+ it('error is reported to sentry', async () => {
+ expect(captureException).toHaveBeenCalledWith({
+ error: new Error('Network error: Error!'),
+ component: 'RunnerDetailsApp',
+ });
+ });
+
+ it('error is shown to the user', async () => {
+ expect(createFlash).toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/runner/runner_detail/runner_update_form_utils_spec.js b/spec/frontend/runner/runner_detail/runner_update_form_utils_spec.js
new file mode 100644
index 00000000000..510b4e604ac
--- /dev/null
+++ b/spec/frontend/runner/runner_detail/runner_update_form_utils_spec.js
@@ -0,0 +1,96 @@
+import { ACCESS_LEVEL_NOT_PROTECTED } from '~/runner/constants';
+import {
+ modelToUpdateMutationVariables,
+ runnerToModel,
+} from '~/runner/runner_details/runner_update_form_utils';
+
+const mockId = 'gid://gitlab/Ci::Runner/1';
+const mockDescription = 'Runner Desc.';
+
+const mockRunner = {
+ id: mockId,
+ description: mockDescription,
+ maximumTimeout: 100,
+ accessLevel: ACCESS_LEVEL_NOT_PROTECTED,
+ active: true,
+ locked: true,
+ runUntagged: true,
+ tagList: ['tag-1', 'tag-2'],
+};
+
+const mockModel = {
+ ...mockRunner,
+ tagList: 'tag-1, tag-2',
+};
+
+describe('~/runner/runner_details/runner_update_form_utils', () => {
+ describe('runnerToModel', () => {
+ it('collects all model data', () => {
+ expect(runnerToModel(mockRunner)).toEqual(mockModel);
+ });
+
+ it('does not collect other data', () => {
+ const model = runnerToModel({
+ ...mockRunner,
+ unrelated: 'unrelatedValue',
+ });
+
+ expect(model.unrelated).toEqual(undefined);
+ });
+
+ it('tag list defaults to an empty string', () => {
+ const model = runnerToModel({
+ ...mockRunner,
+ tagList: undefined,
+ });
+
+ expect(model.tagList).toEqual('');
+ });
+ });
+
+ describe('modelToUpdateMutationVariables', () => {
+ it('collects all model data', () => {
+ expect(modelToUpdateMutationVariables(mockModel)).toEqual({
+ input: {
+ ...mockRunner,
+ },
+ });
+ });
+
+ it('collects a nullable timeout from the model', () => {
+ const variables = modelToUpdateMutationVariables({
+ ...mockModel,
+ maximumTimeout: '',
+ });
+
+ expect(variables).toEqual({
+ input: {
+ ...mockRunner,
+ maximumTimeout: null,
+ },
+ });
+ });
+
+ it.each`
+ tagList | tagListInput
+ ${''} | ${[]}
+ ${'tag1, tag2'} | ${['tag1', 'tag2']}
+ ${'with spaces'} | ${['with spaces']}
+ ${',,,,, commas'} | ${['commas']}
+ ${'more ,,,,, commas'} | ${['more', 'commas']}
+ ${' trimmed , trimmed2 '} | ${['trimmed', 'trimmed2']}
+ `('collect tags separated by commas for "$value"', ({ tagList, tagListInput }) => {
+ const variables = modelToUpdateMutationVariables({
+ ...mockModel,
+ tagList,
+ });
+
+ expect(variables).toEqual({
+ input: {
+ ...mockRunner,
+ tagList: tagListInput,
+ },
+ });
+ });
+ });
+});
diff --git a/spec/frontend/runner/runner_list/runner_list_app_spec.js b/spec/frontend/runner/runner_list/runner_list_app_spec.js
index dd913df7143..54b7d1f1bdb 100644
--- a/spec/frontend/runner/runner_list/runner_list_app_spec.js
+++ b/spec/frontend/runner/runner_list/runner_list_app_spec.js
@@ -1,9 +1,9 @@
-import * as Sentry from '@sentry/browser';
import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
import { updateHistory } from '~/lib/utils/url_utility';
import RunnerFilteredSearchBar from '~/runner/components/runner_filtered_search_bar.vue';
@@ -23,13 +23,15 @@ import {
} from '~/runner/constants';
import getRunnersQuery from '~/runner/graphql/get_runners.query.graphql';
import RunnerListApp from '~/runner/runner_list/runner_list_app.vue';
+import { captureException } from '~/runner/sentry_utils';
import { runnersData, runnersDataPaginated } from '../mock_data';
const mockRegistrationToken = 'MOCK_REGISTRATION_TOKEN';
const mockActiveRunnersCount = 2;
-jest.mock('@sentry/browser');
+jest.mock('~/flash');
+jest.mock('~/runner/sentry_utils');
jest.mock('~/lib/utils/url_utility', () => ({
...jest.requireActual('~/lib/utils/url_utility'),
updateHistory: jest.fn(),
@@ -64,7 +66,7 @@ describe('RunnerListApp', () => {
};
const setQuery = (query) => {
- window.location.href = `${TEST_HOST}/admin/runners/${query}`;
+ window.location.href = `${TEST_HOST}/admin/runners?${query}`;
window.location.search = query;
};
@@ -80,11 +82,6 @@ describe('RunnerListApp', () => {
beforeEach(async () => {
setQuery('');
- Sentry.withScope.mockImplementation((fn) => {
- const scope = { setTag: jest.fn() };
- fn(scope);
- });
-
mockRunnersQuery = jest.fn().mockResolvedValue(runnersData);
createComponentWithApollo();
await waitForPromises();
@@ -119,7 +116,7 @@ describe('RunnerListApp', () => {
describe('when a filter is preselected', () => {
beforeEach(async () => {
- window.location.search = `?status[]=${STATUS_ACTIVE}&runner_type[]=${INSTANCE_TYPE}`;
+ setQuery(`?status[]=${STATUS_ACTIVE}&runner_type[]=${INSTANCE_TYPE}&tag[]=tag1`);
createComponentWithApollo();
await waitForPromises();
@@ -130,6 +127,7 @@ describe('RunnerListApp', () => {
filters: [
{ type: 'status', value: { data: STATUS_ACTIVE, operator: '=' } },
{ type: 'runner_type', value: { data: INSTANCE_TYPE, operator: '=' } },
+ { type: 'tag', value: { data: 'tag1', operator: '=' } },
],
sort: 'CREATED_DESC',
pagination: { page: 1 },
@@ -140,6 +138,7 @@ describe('RunnerListApp', () => {
expect(mockRunnersQuery).toHaveBeenLastCalledWith({
status: STATUS_ACTIVE,
type: INSTANCE_TYPE,
+ tagList: ['tag1'],
sort: DEFAULT_SORT,
first: RUNNER_PAGE_SIZE,
});
@@ -157,7 +156,7 @@ describe('RunnerListApp', () => {
it('updates the browser url', () => {
expect(updateHistory).toHaveBeenLastCalledWith({
title: expect.any(String),
- url: 'http://test.host/admin/runners/?status[]=ACTIVE&sort=CREATED_ASC',
+ url: 'http://test.host/admin/runners?status[]=ACTIVE&sort=CREATED_ASC',
});
});
@@ -189,15 +188,21 @@ describe('RunnerListApp', () => {
describe('when runners query fails', () => {
beforeEach(async () => {
- mockRunnersQuery = jest.fn().mockRejectedValue(new Error());
+ mockRunnersQuery = jest.fn().mockRejectedValue(new Error('Error!'));
createComponentWithApollo();
await waitForPromises();
});
it('error is reported to sentry', async () => {
- expect(Sentry.withScope).toHaveBeenCalled();
- expect(Sentry.captureException).toHaveBeenCalled();
+ expect(captureException).toHaveBeenCalledWith({
+ error: new Error('Network error: Error!'),
+ component: 'RunnerListApp',
+ });
+ });
+
+ it('error is shown to the user', async () => {
+ expect(createFlash).toHaveBeenCalledTimes(1);
});
});
diff --git a/spec/frontend/runner/runner_list/runner_search_utils_spec.js b/spec/frontend/runner/runner_list/runner_search_utils_spec.js
index a1f33e9c880..e7969676549 100644
--- a/spec/frontend/runner/runner_list/runner_search_utils_spec.js
+++ b/spec/frontend/runner/runner_list/runner_search_utils_spec.js
@@ -99,6 +99,37 @@ describe('search_params.js', () => {
},
},
{
+ name: 'a tag',
+ urlQuery: '?tag[]=tag-1',
+ search: {
+ filters: [{ type: 'tag', value: { data: 'tag-1', operator: '=' } }],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: {
+ tagList: ['tag-1'],
+ first: 20,
+ sort: 'CREATED_DESC',
+ },
+ },
+ {
+ name: 'two tags',
+ urlQuery: '?tag[]=tag-1&tag[]=tag-2',
+ search: {
+ filters: [
+ { type: 'tag', value: { data: 'tag-1', operator: '=' } },
+ { type: 'tag', value: { data: 'tag-2', operator: '=' } },
+ ],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: {
+ tagList: ['tag-1', 'tag-2'],
+ first: 20,
+ sort: 'CREATED_DESC',
+ },
+ },
+ {
name: 'the next page',
urlQuery: '?page=2&after=AFTER_CURSOR',
search: { filters: [], pagination: { page: 2, after: 'AFTER_CURSOR' }, sort: 'CREATED_DESC' },
@@ -115,14 +146,15 @@ describe('search_params.js', () => {
graphqlVariables: { sort: 'CREATED_DESC', before: 'BEFORE_CURSOR', last: RUNNER_PAGE_SIZE },
},
{
- name:
- 'the next page filtered by multiple status, a single instance type and a non default sort',
+ name: 'the next page filtered by a status, an instance type, tags and a non default sort',
urlQuery:
- '?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&sort=CREATED_ASC&page=2&after=AFTER_CURSOR',
+ '?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&tag[]=tag-1&tag[]=tag-2&sort=CREATED_ASC&page=2&after=AFTER_CURSOR',
search: {
filters: [
{ type: 'status', value: { data: 'ACTIVE', operator: '=' } },
{ type: 'runner_type', value: { data: 'INSTANCE_TYPE', operator: '=' } },
+ { type: 'tag', value: { data: 'tag-1', operator: '=' } },
+ { type: 'tag', value: { data: 'tag-2', operator: '=' } },
],
pagination: { page: 2, after: 'AFTER_CURSOR' },
sort: 'CREATED_ASC',
@@ -130,6 +162,7 @@ describe('search_params.js', () => {
graphqlVariables: {
status: 'ACTIVE',
type: 'INSTANCE_TYPE',
+ tagList: ['tag-1', 'tag-2'],
sort: 'CREATED_ASC',
after: 'AFTER_CURSOR',
first: RUNNER_PAGE_SIZE,
diff --git a/spec/frontend/runner/sentry_utils_spec.js b/spec/frontend/runner/sentry_utils_spec.js
new file mode 100644
index 00000000000..b61eb63961e
--- /dev/null
+++ b/spec/frontend/runner/sentry_utils_spec.js
@@ -0,0 +1,39 @@
+import * as Sentry from '@sentry/browser';
+import { captureException } from '~/runner/sentry_utils';
+
+jest.mock('@sentry/browser');
+
+describe('~/runner/sentry_utils', () => {
+ let mockSetTag;
+
+ beforeEach(async () => {
+ mockSetTag = jest.fn();
+
+ Sentry.withScope.mockImplementation((fn) => {
+ const scope = { setTag: mockSetTag };
+ fn(scope);
+ });
+ });
+
+ describe('captureException', () => {
+ const mockError = new Error('Something went wrong!');
+
+ it('error is reported to sentry', () => {
+ captureException({ error: mockError });
+
+ expect(Sentry.withScope).toHaveBeenCalled();
+ expect(Sentry.captureException).toHaveBeenCalledWith(mockError);
+ });
+
+ it('error is reported to sentry with a component name', () => {
+ const mockComponentName = 'MyComponent';
+
+ captureException({ error: mockError, component: mockComponentName });
+
+ expect(Sentry.withScope).toHaveBeenCalled();
+ expect(Sentry.captureException).toHaveBeenCalledWith(mockError);
+
+ expect(mockSetTag).toHaveBeenCalledWith('vue_component', mockComponentName);
+ });
+ });
+});
diff --git a/spec/frontend/search/mock_data.js b/spec/frontend/search/mock_data.js
index fbe01f372b0..24ce45e8a09 100644
--- a/spec/frontend/search/mock_data.js
+++ b/spec/frontend/search/mock_data.js
@@ -1,3 +1,6 @@
+import { GROUPS_LOCAL_STORAGE_KEY, PROJECTS_LOCAL_STORAGE_KEY } from '~/search/store/constants';
+import * as types from '~/search/store/mutation_types';
+
export const MOCK_QUERY = {
scope: 'issues',
state: 'all',
@@ -6,45 +9,45 @@ export const MOCK_QUERY = {
};
export const MOCK_GROUP = {
+ id: 1,
name: 'test group',
full_name: 'full name / test group',
- id: 1,
};
export const MOCK_GROUPS = [
{
+ id: 1,
avatar_url: null,
name: 'test group',
full_name: 'full name / test group',
- id: 1,
},
{
+ id: 2,
avatar_url: 'https://avatar.com',
name: 'test group 2',
full_name: 'full name / test group 2',
- id: 2,
},
];
export const MOCK_PROJECT = {
+ id: 1,
name: 'test project',
namespace: MOCK_GROUP,
nameWithNamespace: 'test group / test project',
- id: 1,
};
export const MOCK_PROJECTS = [
{
+ id: 1,
name: 'test project',
namespace: MOCK_GROUP,
name_with_namespace: 'test group / test project',
- id: 1,
},
{
+ id: 2,
name: 'test project 2',
namespace: MOCK_GROUP,
name_with_namespace: 'test group / test project 2',
- id: 2,
},
];
@@ -63,3 +66,41 @@ export const MOCK_SORT_OPTIONS = [
},
},
];
+
+export const MOCK_LS_KEY = 'mock-ls-key';
+
+export const MOCK_INFLATED_DATA = [
+ { id: 1, name: 'test 1' },
+ { id: 2, name: 'test 2' },
+];
+
+export const FRESH_STORED_DATA = [
+ { id: 1, name: 'test 1', frequency: 1 },
+ { id: 2, name: 'test 2', frequency: 2 },
+];
+
+export const STALE_STORED_DATA = [
+ { id: 1, name: 'blah 1', frequency: 1 },
+ { id: 2, name: 'blah 2', frequency: 2 },
+];
+
+export const MOCK_FRESH_DATA_RES = { name: 'fresh' };
+
+export const PROMISE_ALL_EXPECTED_MUTATIONS = {
+ initGroups: {
+ type: types.LOAD_FREQUENT_ITEMS,
+ payload: { key: GROUPS_LOCAL_STORAGE_KEY, data: FRESH_STORED_DATA },
+ },
+ resGroups: {
+ type: types.LOAD_FREQUENT_ITEMS,
+ payload: { key: GROUPS_LOCAL_STORAGE_KEY, data: [MOCK_FRESH_DATA_RES, MOCK_FRESH_DATA_RES] },
+ },
+ initProjects: {
+ type: types.LOAD_FREQUENT_ITEMS,
+ payload: { key: PROJECTS_LOCAL_STORAGE_KEY, data: FRESH_STORED_DATA },
+ },
+ resProjects: {
+ type: types.LOAD_FREQUENT_ITEMS,
+ payload: { key: PROJECTS_LOCAL_STORAGE_KEY, data: [MOCK_FRESH_DATA_RES, MOCK_FRESH_DATA_RES] },
+ },
+};
diff --git a/spec/frontend/search/store/actions_spec.js b/spec/frontend/search/store/actions_spec.js
index 634661c5843..3755f8ffae7 100644
--- a/spec/frontend/search/store/actions_spec.js
+++ b/spec/frontend/search/store/actions_spec.js
@@ -5,9 +5,20 @@ import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
import * as urlUtils from '~/lib/utils/url_utility';
import * as actions from '~/search/store/actions';
+import { GROUPS_LOCAL_STORAGE_KEY, PROJECTS_LOCAL_STORAGE_KEY } from '~/search/store/constants';
import * as types from '~/search/store/mutation_types';
import createState from '~/search/store/state';
-import { MOCK_QUERY, MOCK_GROUPS, MOCK_PROJECT, MOCK_PROJECTS } from '../mock_data';
+import * as storeUtils from '~/search/store/utils';
+import {
+ MOCK_QUERY,
+ MOCK_GROUPS,
+ MOCK_PROJECT,
+ MOCK_PROJECTS,
+ MOCK_GROUP,
+ FRESH_STORED_DATA,
+ MOCK_FRESH_DATA_RES,
+ PROMISE_ALL_EXPECTED_MUTATIONS,
+} from '../mock_data';
jest.mock('~/flash');
jest.mock('~/lib/utils/url_utility', () => ({
@@ -56,6 +67,46 @@ describe('Global Search Store Actions', () => {
});
});
+ describe.each`
+ action | axiosMock | type | expectedMutations | flashCallCount | lsKey
+ ${actions.loadFrequentGroups} | ${{ method: 'onGet', code: 200 }} | ${'success'} | ${[PROMISE_ALL_EXPECTED_MUTATIONS.initGroups, PROMISE_ALL_EXPECTED_MUTATIONS.resGroups]} | ${0} | ${GROUPS_LOCAL_STORAGE_KEY}
+ ${actions.loadFrequentGroups} | ${{ method: 'onGet', code: 500 }} | ${'error'} | ${[PROMISE_ALL_EXPECTED_MUTATIONS.initGroups]} | ${1} | ${GROUPS_LOCAL_STORAGE_KEY}
+ ${actions.loadFrequentProjects} | ${{ method: 'onGet', code: 200 }} | ${'success'} | ${[PROMISE_ALL_EXPECTED_MUTATIONS.initProjects, PROMISE_ALL_EXPECTED_MUTATIONS.resProjects]} | ${0} | ${PROJECTS_LOCAL_STORAGE_KEY}
+ ${actions.loadFrequentProjects} | ${{ method: 'onGet', code: 500 }} | ${'error'} | ${[PROMISE_ALL_EXPECTED_MUTATIONS.initProjects]} | ${1} | ${PROJECTS_LOCAL_STORAGE_KEY}
+ `(
+ 'Promise.all calls',
+ ({ action, axiosMock, type, expectedMutations, flashCallCount, lsKey }) => {
+ describe(action.name, () => {
+ describe(`on ${type}`, () => {
+ beforeEach(() => {
+ storeUtils.loadDataFromLS = jest.fn().mockReturnValue(FRESH_STORED_DATA);
+ mock[axiosMock.method]().reply(axiosMock.code, MOCK_FRESH_DATA_RES);
+ });
+
+ it(`should dispatch the correct mutations`, () => {
+ return testAction({ action, state, expectedMutations }).then(() => {
+ expect(storeUtils.loadDataFromLS).toHaveBeenCalledWith(lsKey);
+ flashCallback(flashCallCount);
+ });
+ });
+ });
+ });
+ },
+ );
+
+ describe('getGroupsData', () => {
+ const mockCommit = () => {};
+ beforeEach(() => {
+ jest.spyOn(Api, 'groups').mockResolvedValue(MOCK_GROUPS);
+ });
+
+ it('calls Api.groups with order_by set to similarity', () => {
+ actions.fetchGroups({ commit: mockCommit }, 'test');
+
+ expect(Api.groups).toHaveBeenCalledWith('test', { order_by: 'similarity' });
+ });
+ });
+
describe('getProjectsData', () => {
const mockCommit = () => {};
beforeEach(() => {
@@ -64,10 +115,19 @@ describe('Global Search Store Actions', () => {
});
describe('when groupId is set', () => {
- it('calls Api.groupProjects', () => {
+ it('calls Api.groupProjects with expected parameters', () => {
actions.fetchProjects({ commit: mockCommit, state });
- expect(Api.groupProjects).toHaveBeenCalled();
+ expect(Api.groupProjects).toHaveBeenCalledWith(
+ state.query.group_id,
+ state.query.search,
+ {
+ order_by: 'similarity',
+ include_subgroups: true,
+ with_shared: false,
+ },
+ expect.any(Function),
+ );
expect(Api.projects).not.toHaveBeenCalled();
});
});
@@ -121,4 +181,44 @@ describe('Global Search Store Actions', () => {
});
});
});
+
+ describe('setFrequentGroup', () => {
+ beforeEach(() => {
+ storeUtils.setFrequentItemToLS = jest.fn();
+ });
+
+ it(`calls setFrequentItemToLS with ${GROUPS_LOCAL_STORAGE_KEY} and item data`, async () => {
+ await testAction({
+ action: actions.setFrequentGroup,
+ payload: MOCK_GROUP,
+ state,
+ });
+
+ expect(storeUtils.setFrequentItemToLS).toHaveBeenCalledWith(
+ GROUPS_LOCAL_STORAGE_KEY,
+ state.frequentItems,
+ MOCK_GROUP,
+ );
+ });
+ });
+
+ describe('setFrequentProject', () => {
+ beforeEach(() => {
+ storeUtils.setFrequentItemToLS = jest.fn();
+ });
+
+ it(`calls setFrequentItemToLS with ${PROJECTS_LOCAL_STORAGE_KEY} and item data`, async () => {
+ await testAction({
+ action: actions.setFrequentProject,
+ payload: MOCK_PROJECT,
+ state,
+ });
+
+ expect(storeUtils.setFrequentItemToLS).toHaveBeenCalledWith(
+ PROJECTS_LOCAL_STORAGE_KEY,
+ state.frequentItems,
+ MOCK_PROJECT,
+ );
+ });
+ });
});
diff --git a/spec/frontend/search/store/getters_spec.js b/spec/frontend/search/store/getters_spec.js
new file mode 100644
index 00000000000..081e6a986eb
--- /dev/null
+++ b/spec/frontend/search/store/getters_spec.js
@@ -0,0 +1,32 @@
+import { GROUPS_LOCAL_STORAGE_KEY, PROJECTS_LOCAL_STORAGE_KEY } from '~/search/store/constants';
+import * as getters from '~/search/store/getters';
+import createState from '~/search/store/state';
+import { MOCK_QUERY, MOCK_GROUPS, MOCK_PROJECTS } from '../mock_data';
+
+describe('Global Search Store Getters', () => {
+ let state;
+
+ beforeEach(() => {
+ state = createState({ query: MOCK_QUERY });
+ });
+
+ describe('frequentGroups', () => {
+ beforeEach(() => {
+ state.frequentItems[GROUPS_LOCAL_STORAGE_KEY] = MOCK_GROUPS;
+ });
+
+ it('returns the correct data', () => {
+ expect(getters.frequentGroups(state)).toStrictEqual(MOCK_GROUPS);
+ });
+ });
+
+ describe('frequentProjects', () => {
+ beforeEach(() => {
+ state.frequentItems[PROJECTS_LOCAL_STORAGE_KEY] = MOCK_PROJECTS;
+ });
+
+ it('returns the correct data', () => {
+ expect(getters.frequentProjects(state)).toStrictEqual(MOCK_PROJECTS);
+ });
+ });
+});
diff --git a/spec/frontend/search/store/mutations_spec.js b/spec/frontend/search/store/mutations_spec.js
index df94ba40ff2..a60718a972d 100644
--- a/spec/frontend/search/store/mutations_spec.js
+++ b/spec/frontend/search/store/mutations_spec.js
@@ -71,4 +71,13 @@ describe('Global Search Store Mutations', () => {
expect(state.query[payload.key]).toBe(payload.value);
});
});
+
+ describe('LOAD_FREQUENT_ITEMS', () => {
+ it('sets frequentItems[key] to data', () => {
+ const payload = { key: 'test-key', data: [1, 2, 3] };
+ mutations[types.LOAD_FREQUENT_ITEMS](state, payload);
+
+ expect(state.frequentItems[payload.key]).toStrictEqual(payload.data);
+ });
+ });
});
diff --git a/spec/frontend/search/store/utils_spec.js b/spec/frontend/search/store/utils_spec.js
new file mode 100644
index 00000000000..5055fa2cc3d
--- /dev/null
+++ b/spec/frontend/search/store/utils_spec.js
@@ -0,0 +1,197 @@
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
+import { MAX_FREQUENCY } from '~/search/store/constants';
+import { loadDataFromLS, setFrequentItemToLS, mergeById } from '~/search/store/utils';
+import {
+ MOCK_LS_KEY,
+ MOCK_GROUPS,
+ MOCK_INFLATED_DATA,
+ FRESH_STORED_DATA,
+ STALE_STORED_DATA,
+} from '../mock_data';
+
+const PREV_TIME = new Date().getTime() - 1;
+const CURRENT_TIME = new Date().getTime();
+
+useLocalStorageSpy();
+jest.mock('~/lib/utils/accessor', () => ({
+ isLocalStorageAccessSafe: jest.fn().mockReturnValue(true),
+}));
+
+describe('Global Search Store Utils', () => {
+ afterEach(() => {
+ localStorage.clear();
+ });
+
+ describe('loadDataFromLS', () => {
+ let res;
+
+ describe('with valid data', () => {
+ beforeEach(() => {
+ localStorage.setItem(MOCK_LS_KEY, JSON.stringify(MOCK_GROUPS));
+ res = loadDataFromLS(MOCK_LS_KEY);
+ });
+
+ it('returns parsed array', () => {
+ expect(res).toStrictEqual(MOCK_GROUPS);
+ });
+ });
+
+ describe('with invalid data', () => {
+ beforeEach(() => {
+ localStorage.setItem(MOCK_LS_KEY, '[}');
+ res = loadDataFromLS(MOCK_LS_KEY);
+ });
+
+ it('wipes local storage and returns an empty array', () => {
+ expect(localStorage.removeItem).toHaveBeenCalledWith(MOCK_LS_KEY);
+ expect(res).toStrictEqual([]);
+ });
+ });
+ });
+
+ describe('setFrequentItemToLS', () => {
+ const frequentItems = {};
+
+ describe('with existing data', () => {
+ describe(`when frequency is less than ${MAX_FREQUENCY}`, () => {
+ beforeEach(() => {
+ frequentItems[MOCK_LS_KEY] = [{ ...MOCK_GROUPS[0], frequency: 1, lastUsed: PREV_TIME }];
+ setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_GROUPS[0]);
+ });
+
+ it('adds 1 to the frequency, tracks lastUsed, and calls localStorage.setItem', () => {
+ expect(localStorage.setItem).toHaveBeenCalledWith(
+ MOCK_LS_KEY,
+ JSON.stringify([{ ...MOCK_GROUPS[0], frequency: 2, lastUsed: CURRENT_TIME }]),
+ );
+ });
+ });
+
+ describe(`when frequency is equal to ${MAX_FREQUENCY}`, () => {
+ beforeEach(() => {
+ frequentItems[MOCK_LS_KEY] = [
+ { ...MOCK_GROUPS[0], frequency: MAX_FREQUENCY, lastUsed: PREV_TIME },
+ ];
+ setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_GROUPS[0]);
+ });
+
+ it(`does not further increase frequency past ${MAX_FREQUENCY}, tracks lastUsed, and calls localStorage.setItem`, () => {
+ expect(localStorage.setItem).toHaveBeenCalledWith(
+ MOCK_LS_KEY,
+ JSON.stringify([
+ { ...MOCK_GROUPS[0], frequency: MAX_FREQUENCY, lastUsed: CURRENT_TIME },
+ ]),
+ );
+ });
+ });
+ });
+
+ describe('with no existing data', () => {
+ beforeEach(() => {
+ frequentItems[MOCK_LS_KEY] = [];
+ setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_GROUPS[0]);
+ });
+
+ it('adds a new entry with frequency 1, tracks lastUsed, and calls localStorage.setItem', () => {
+ expect(localStorage.setItem).toHaveBeenCalledWith(
+ MOCK_LS_KEY,
+ JSON.stringify([{ ...MOCK_GROUPS[0], frequency: 1, lastUsed: CURRENT_TIME }]),
+ );
+ });
+ });
+
+ describe('with multiple entries', () => {
+ beforeEach(() => {
+ frequentItems[MOCK_LS_KEY] = [
+ { id: 1, frequency: 2, lastUsed: PREV_TIME },
+ { id: 2, frequency: 1, lastUsed: PREV_TIME },
+ { id: 3, frequency: 1, lastUsed: PREV_TIME },
+ ];
+ setFrequentItemToLS(MOCK_LS_KEY, frequentItems, { id: 3 });
+ });
+
+ it('sorts the array by most frequent and lastUsed', () => {
+ expect(localStorage.setItem).toHaveBeenCalledWith(
+ MOCK_LS_KEY,
+ JSON.stringify([
+ { id: 3, frequency: 2, lastUsed: CURRENT_TIME },
+ { id: 1, frequency: 2, lastUsed: PREV_TIME },
+ { id: 2, frequency: 1, lastUsed: PREV_TIME },
+ ]),
+ );
+ });
+ });
+
+ describe('with max entries', () => {
+ beforeEach(() => {
+ frequentItems[MOCK_LS_KEY] = [
+ { id: 1, frequency: 5, lastUsed: PREV_TIME },
+ { id: 2, frequency: 4, lastUsed: PREV_TIME },
+ { id: 3, frequency: 3, lastUsed: PREV_TIME },
+ { id: 4, frequency: 2, lastUsed: PREV_TIME },
+ { id: 5, frequency: 1, lastUsed: PREV_TIME },
+ ];
+ setFrequentItemToLS(MOCK_LS_KEY, frequentItems, { id: 6 });
+ });
+
+ it('removes the last item in the array', () => {
+ expect(localStorage.setItem).toHaveBeenCalledWith(
+ MOCK_LS_KEY,
+ JSON.stringify([
+ { id: 1, frequency: 5, lastUsed: PREV_TIME },
+ { id: 2, frequency: 4, lastUsed: PREV_TIME },
+ { id: 3, frequency: 3, lastUsed: PREV_TIME },
+ { id: 4, frequency: 2, lastUsed: PREV_TIME },
+ { id: 6, frequency: 1, lastUsed: CURRENT_TIME },
+ ]),
+ );
+ });
+ });
+
+ describe('with null data loaded in', () => {
+ beforeEach(() => {
+ frequentItems[MOCK_LS_KEY] = null;
+ setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_GROUPS[0]);
+ });
+
+ it('wipes local storage', () => {
+ expect(localStorage.removeItem).toHaveBeenCalledWith(MOCK_LS_KEY);
+ });
+ });
+
+ describe('with additional data', () => {
+ beforeEach(() => {
+ const MOCK_ADDITIONAL_DATA_GROUP = { ...MOCK_GROUPS[0], extraData: 'test' };
+ frequentItems[MOCK_LS_KEY] = [];
+ setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_ADDITIONAL_DATA_GROUP);
+ });
+
+ it('parses out extra data for LS', () => {
+ expect(localStorage.setItem).toHaveBeenCalledWith(
+ MOCK_LS_KEY,
+ JSON.stringify([{ ...MOCK_GROUPS[0], frequency: 1, lastUsed: CURRENT_TIME }]),
+ );
+ });
+ });
+ });
+
+ describe.each`
+ description | inflatedData | storedData | response
+ ${'identical'} | ${MOCK_INFLATED_DATA} | ${FRESH_STORED_DATA} | ${FRESH_STORED_DATA}
+ ${'stale'} | ${MOCK_INFLATED_DATA} | ${STALE_STORED_DATA} | ${FRESH_STORED_DATA}
+ ${'empty'} | ${MOCK_INFLATED_DATA} | ${[]} | ${MOCK_INFLATED_DATA}
+ ${'null'} | ${MOCK_INFLATED_DATA} | ${null} | ${MOCK_INFLATED_DATA}
+ `('mergeById', ({ description, inflatedData, storedData, response }) => {
+ describe(`with ${description} storedData`, () => {
+ let res;
+
+ beforeEach(() => {
+ res = mergeById(inflatedData, storedData);
+ });
+
+ it('prioritizes inflatedData and preserves frequency count', () => {
+ expect(response).toStrictEqual(res);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/search/topbar/components/group_filter_spec.js b/spec/frontend/search/topbar/components/group_filter_spec.js
index 15b46f9c058..fbd7ad6bb57 100644
--- a/spec/frontend/search/topbar/components/group_filter_spec.js
+++ b/spec/frontend/search/topbar/components/group_filter_spec.js
@@ -1,13 +1,14 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
import Vuex from 'vuex';
import { MOCK_GROUP, MOCK_QUERY } from 'jest/search/mock_data';
import { visitUrl, setUrlParams } from '~/lib/utils/url_utility';
+import { GROUPS_LOCAL_STORAGE_KEY } from '~/search/store/constants';
import GroupFilter from '~/search/topbar/components/group_filter.vue';
import SearchableDropdown from '~/search/topbar/components/searchable_dropdown.vue';
import { ANY_OPTION, GROUP_DATA, PROJECT_DATA } from '~/search/topbar/constants';
-const localVue = createLocalVue();
-localVue.use(Vuex);
+Vue.use(Vuex);
jest.mock('~/lib/utils/url_utility', () => ({
visitUrl: jest.fn(),
@@ -19,6 +20,8 @@ describe('GroupFilter', () => {
const actionSpies = {
fetchGroups: jest.fn(),
+ setFrequentGroup: jest.fn(),
+ loadFrequentGroups: jest.fn(),
};
const defaultProps = {
@@ -32,10 +35,12 @@ describe('GroupFilter', () => {
...initialState,
},
actions: actionSpies,
+ getters: {
+ frequentGroups: () => [],
+ },
});
wrapper = shallowMount(GroupFilter, {
- localVue,
store,
propsData: {
...defaultProps,
@@ -62,12 +67,14 @@ describe('GroupFilter', () => {
});
describe('events', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
describe('when @search is emitted', () => {
const search = 'test';
beforeEach(() => {
- createComponent();
-
findSearchableDropdown().vm.$emit('search', search);
});
@@ -77,14 +84,31 @@ describe('GroupFilter', () => {
});
});
- describe('when @change is emitted', () => {
+ describe('when @change is emitted with Any', () => {
beforeEach(() => {
- createComponent();
+ findSearchableDropdown().vm.$emit('change', ANY_OPTION);
+ });
+
+ it('calls setUrlParams with group null, project id null, and then calls visitUrl', () => {
+ expect(setUrlParams).toHaveBeenCalledWith({
+ [GROUP_DATA.queryParam]: null,
+ [PROJECT_DATA.queryParam]: null,
+ });
+
+ expect(visitUrl).toHaveBeenCalled();
+ });
+
+ it('does not call setFrequentGroup', () => {
+ expect(actionSpies.setFrequentGroup).not.toHaveBeenCalled();
+ });
+ });
+ describe('when @change is emitted with a group', () => {
+ beforeEach(() => {
findSearchableDropdown().vm.$emit('change', MOCK_GROUP);
});
- it('calls calls setUrlParams with group id, project id null, and visitUrl', () => {
+ it('calls setUrlParams with group id, project id null, and then calls visitUrl', () => {
expect(setUrlParams).toHaveBeenCalledWith({
[GROUP_DATA.queryParam]: MOCK_GROUP.id,
[PROJECT_DATA.queryParam]: null,
@@ -92,6 +116,20 @@ describe('GroupFilter', () => {
expect(visitUrl).toHaveBeenCalled();
});
+
+ it(`calls setFrequentGroup with the group and ${GROUPS_LOCAL_STORAGE_KEY}`, () => {
+ expect(actionSpies.setFrequentGroup).toHaveBeenCalledWith(expect.any(Object), MOCK_GROUP);
+ });
+ });
+
+ describe('when @first-open is emitted', () => {
+ beforeEach(() => {
+ findSearchableDropdown().vm.$emit('first-open');
+ });
+
+ it('calls loadFrequentGroups', () => {
+ expect(actionSpies.loadFrequentGroups).toHaveBeenCalledTimes(1);
+ });
});
});
diff --git a/spec/frontend/search/topbar/components/project_filter_spec.js b/spec/frontend/search/topbar/components/project_filter_spec.js
index 3bd0769b34a..63b0f882ca4 100644
--- a/spec/frontend/search/topbar/components/project_filter_spec.js
+++ b/spec/frontend/search/topbar/components/project_filter_spec.js
@@ -1,13 +1,14 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
import Vuex from 'vuex';
import { MOCK_PROJECT, MOCK_QUERY } from 'jest/search/mock_data';
import { visitUrl, setUrlParams } from '~/lib/utils/url_utility';
+import { PROJECTS_LOCAL_STORAGE_KEY } from '~/search/store/constants';
import ProjectFilter from '~/search/topbar/components/project_filter.vue';
import SearchableDropdown from '~/search/topbar/components/searchable_dropdown.vue';
import { ANY_OPTION, GROUP_DATA, PROJECT_DATA } from '~/search/topbar/constants';
-const localVue = createLocalVue();
-localVue.use(Vuex);
+Vue.use(Vuex);
jest.mock('~/lib/utils/url_utility', () => ({
visitUrl: jest.fn(),
@@ -19,6 +20,8 @@ describe('ProjectFilter', () => {
const actionSpies = {
fetchProjects: jest.fn(),
+ setFrequentProject: jest.fn(),
+ loadFrequentProjects: jest.fn(),
};
const defaultProps = {
@@ -32,10 +35,12 @@ describe('ProjectFilter', () => {
...initialState,
},
actions: actionSpies,
+ getters: {
+ frequentProjects: () => [],
+ },
});
wrapper = shallowMount(ProjectFilter, {
- localVue,
store,
propsData: {
...defaultProps,
@@ -84,12 +89,16 @@ describe('ProjectFilter', () => {
findSearchableDropdown().vm.$emit('change', ANY_OPTION);
});
- it('calls setUrlParams with project id, not group id, then calls visitUrl', () => {
+ it('calls setUrlParams with null, no group id, then calls visitUrl', () => {
expect(setUrlParams).toHaveBeenCalledWith({
- [PROJECT_DATA.queryParam]: ANY_OPTION.id,
+ [PROJECT_DATA.queryParam]: null,
});
expect(visitUrl).toHaveBeenCalled();
});
+
+ it('does not call setFrequentProject', () => {
+ expect(actionSpies.setFrequentProject).not.toHaveBeenCalled();
+ });
});
describe('with a Project', () => {
@@ -104,6 +113,23 @@ describe('ProjectFilter', () => {
});
expect(visitUrl).toHaveBeenCalled();
});
+
+ it(`calls setFrequentProject with the group and ${PROJECTS_LOCAL_STORAGE_KEY}`, () => {
+ expect(actionSpies.setFrequentProject).toHaveBeenCalledWith(
+ expect.any(Object),
+ MOCK_PROJECT,
+ );
+ });
+ });
+ });
+
+ describe('when @first-open is emitted', () => {
+ beforeEach(() => {
+ findSearchableDropdown().vm.$emit('first-open');
+ });
+
+ it('calls loadFrequentProjects', () => {
+ expect(actionSpies.loadFrequentProjects).toHaveBeenCalledTimes(1);
});
});
});
diff --git a/spec/frontend/search/topbar/components/searchable_dropdown_spec.js b/spec/frontend/search/topbar/components/searchable_dropdown_spec.js
index 10d779f0f90..b21cf5c6b79 100644
--- a/spec/frontend/search/topbar/components/searchable_dropdown_spec.js
+++ b/spec/frontend/search/topbar/components/searchable_dropdown_spec.js
@@ -2,9 +2,9 @@ import { GlDropdown, GlDropdownItem, GlSearchBoxByType, GlSkeletonLoader } from
import { shallowMount, mount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { MOCK_GROUPS, MOCK_GROUP, MOCK_QUERY } from 'jest/search/mock_data';
import SearchableDropdown from '~/search/topbar/components/searchable_dropdown.vue';
-import SearchableDropdownItem from '~/search/topbar/components/searchable_dropdown_item.vue';
import { ANY_OPTION, GROUP_DATA } from '~/search/topbar/constants';
Vue.use(Vuex);
@@ -29,13 +29,15 @@ describe('Global Search Searchable Dropdown', () => {
},
});
- wrapper = mountFn(SearchableDropdown, {
- store,
- propsData: {
- ...defaultProps,
- ...props,
- },
- });
+ wrapper = extendedWrapper(
+ mountFn(SearchableDropdown, {
+ store,
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ }),
+ );
};
afterEach(() => {
@@ -45,10 +47,11 @@ describe('Global Search Searchable Dropdown', () => {
const findGlDropdown = () => wrapper.findComponent(GlDropdown);
const findGlDropdownSearch = () => findGlDropdown().findComponent(GlSearchBoxByType);
const findDropdownText = () => findGlDropdown().find('.dropdown-toggle-text');
- const findSearchableDropdownItems = () =>
- findGlDropdown().findAllComponents(SearchableDropdownItem);
+ const findSearchableDropdownItems = () => wrapper.findAllByTestId('searchable-items');
+ const findFrequentDropdownItems = () => wrapper.findAllByTestId('frequent-items');
const findAnyDropdownItem = () => findGlDropdown().findComponent(GlDropdownItem);
- const findFirstGroupDropdownItem = () => findSearchableDropdownItems().at(0);
+ const findFirstSearchableDropdownItem = () => findSearchableDropdownItems().at(0);
+ const findFirstFrequentDropdownItem = () => findFrequentDropdownItems().at(0);
const findLoader = () => wrapper.findComponent(GlSkeletonLoader);
describe('template', () => {
@@ -82,7 +85,7 @@ describe('Global Search Searchable Dropdown', () => {
});
});
- describe('findDropdownItems', () => {
+ describe('Searchable Dropdown Items', () => {
describe('when loading is false', () => {
beforeEach(() => {
createComponent({}, { items: MOCK_GROUPS });
@@ -96,7 +99,7 @@ describe('Global Search Searchable Dropdown', () => {
expect(findAnyDropdownItem().exists()).toBe(true);
});
- it('renders SearchableDropdownItem for each item', () => {
+ it('renders searchable dropdown item for each item', () => {
expect(findSearchableDropdownItems()).toHaveLength(MOCK_GROUPS.length);
});
});
@@ -114,12 +117,31 @@ describe('Global Search Searchable Dropdown', () => {
expect(findAnyDropdownItem().exists()).toBe(true);
});
- it('does not render SearchableDropdownItem', () => {
+ it('does not render searchable dropdown items', () => {
expect(findSearchableDropdownItems()).toHaveLength(0);
});
});
});
+ describe.each`
+ searchText | frequentItems | length
+ ${''} | ${[]} | ${0}
+ ${''} | ${MOCK_GROUPS} | ${MOCK_GROUPS.length}
+ ${'test'} | ${[]} | ${0}
+ ${'test'} | ${MOCK_GROUPS} | ${0}
+ `('Frequent Dropdown Items', ({ searchText, frequentItems, length }) => {
+ describe(`when search is ${searchText} and frequentItems length is ${frequentItems.length}`, () => {
+ beforeEach(() => {
+ createComponent({}, { frequentItems });
+ wrapper.setData({ searchText });
+ });
+
+ it(`should${length ? '' : ' not'} render frequent dropdown items`, () => {
+ expect(findFrequentDropdownItems()).toHaveLength(length);
+ });
+ });
+ });
+
describe('Dropdown Text', () => {
describe('when selectedItem is any', () => {
beforeEach(() => {
@@ -145,7 +167,7 @@ describe('Global Search Searchable Dropdown', () => {
describe('actions', () => {
beforeEach(() => {
- createComponent({}, { items: MOCK_GROUPS });
+ createComponent({}, { items: MOCK_GROUPS, frequentItems: MOCK_GROUPS });
});
it('clicking "Any" dropdown item $emits @change with ANY_OPTION', () => {
@@ -154,10 +176,41 @@ describe('Global Search Searchable Dropdown', () => {
expect(wrapper.emitted('change')[0]).toEqual([ANY_OPTION]);
});
- it('on SearchableDropdownItem @change, the wrapper $emits change with the item', () => {
- findFirstGroupDropdownItem().vm.$emit('change', MOCK_GROUPS[0]);
+ it('on searchable item @change, the wrapper $emits change with the item', () => {
+ findFirstSearchableDropdownItem().vm.$emit('change', MOCK_GROUPS[0]);
+
+ expect(wrapper.emitted('change')[0]).toEqual([MOCK_GROUPS[0]]);
+ });
+
+ it('on frequent item @change, the wrapper $emits change with the item', () => {
+ findFirstFrequentDropdownItem().vm.$emit('change', MOCK_GROUPS[0]);
expect(wrapper.emitted('change')[0]).toEqual([MOCK_GROUPS[0]]);
});
+
+ describe('opening the dropdown', () => {
+ describe('for the first time', () => {
+ beforeEach(() => {
+ findGlDropdown().vm.$emit('show');
+ });
+
+ it('$emits @search and @first-open', () => {
+ expect(wrapper.emitted('search')[0]).toStrictEqual([wrapper.vm.searchText]);
+ expect(wrapper.emitted('first-open')[0]).toStrictEqual([]);
+ });
+ });
+
+ describe('not for the first time', () => {
+ beforeEach(() => {
+ wrapper.setData({ hasBeenOpened: true });
+ findGlDropdown().vm.$emit('show');
+ });
+
+ it('$emits @search and not @first-open', () => {
+ expect(wrapper.emitted('search')[0]).toStrictEqual([wrapper.vm.searchText]);
+ expect(wrapper.emitted('first-open')).toBeUndefined();
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/search_autocomplete_spec.js b/spec/frontend/search_autocomplete_spec.js
index 5aca07d59e4..c643cf6557d 100644
--- a/spec/frontend/search_autocomplete_spec.js
+++ b/spec/frontend/search_autocomplete_spec.js
@@ -1,4 +1,3 @@
-/* eslint-disable no-unused-expressions, consistent-return, no-param-reassign, default-case, no-return-assign */
import AxiosMockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
@@ -22,31 +21,33 @@ describe('Search autocomplete dropdown', () => {
const groupName = 'Gitlab Org';
const removeBodyAttributes = () => {
- const $body = $('body');
+ const { body } = document;
- $body.removeAttr('data-page');
- $body.removeAttr('data-project');
- $body.removeAttr('data-group');
+ delete body.dataset.page;
+ delete body.dataset.project;
+ delete body.dataset.group;
};
// Add required attributes to body before starting the test.
// section would be dashboard|group|project
- const addBodyAttributes = (section) => {
- if (section == null) {
- section = 'dashboard';
- }
-
- const $body = $('body');
+ const addBodyAttributes = (section = 'dashboard') => {
removeBodyAttributes();
+
+ const { body } = document;
switch (section) {
case 'dashboard':
- return $body.attr('data-page', 'root:index');
+ body.dataset.page = 'root:index';
+ break;
case 'group':
- $body.attr('data-page', 'groups:show');
- return $body.data('group', 'gitlab-org');
+ body.dataset.page = 'groups:show';
+ body.dataset.group = 'gitlab-org';
+ break;
case 'project':
- $body.attr('data-page', 'projects:show');
- return $body.data('project', 'gitlab-ce');
+ body.dataset.page = 'projects:show';
+ body.dataset.project = 'gitlab-ce';
+ break;
+ default:
+ break;
}
};
@@ -56,34 +57,31 @@ describe('Search autocomplete dropdown', () => {
// Mock `gl` object in window for dashboard specific page. App code will need it.
const mockDashboardOptions = () => {
- window.gl || (window.gl = {});
- return (window.gl.dashboardOptions = {
+ window.gl.dashboardOptions = {
issuesPath: dashboardIssuesPath,
mrPath: dashboardMRsPath,
- });
+ };
};
// Mock `gl` object in window for project specific page. App code will need it.
const mockProjectOptions = () => {
- window.gl || (window.gl = {});
- return (window.gl.projectOptions = {
+ window.gl.projectOptions = {
'gitlab-ce': {
issuesPath: projectIssuesPath,
mrPath: projectMRsPath,
projectName,
},
- });
+ };
};
const mockGroupOptions = () => {
- window.gl || (window.gl = {});
- return (window.gl.groupOptions = {
+ window.gl.groupOptions = {
'gitlab-org': {
issuesPath: groupIssuesPath,
mrPath: groupMRsPath,
projectName: groupName,
},
- });
+ };
};
const assertLinks = (list, issuesPath, mrsPath) => {
@@ -113,7 +111,7 @@ describe('Search autocomplete dropdown', () => {
window.gon.current_username = userName;
window.gl = window.gl || (window.gl = {});
- return (widget = initSearchAutocomplete({ autocompletePath }));
+ widget = initSearchAutocomplete({ autocompletePath });
});
afterEach(() => {
diff --git a/spec/frontend/search_autocomplete_utils_spec.js b/spec/frontend/search_autocomplete_utils_spec.js
new file mode 100644
index 00000000000..4fdec717e93
--- /dev/null
+++ b/spec/frontend/search_autocomplete_utils_spec.js
@@ -0,0 +1,114 @@
+import {
+ isInGroupsPage,
+ isInProjectPage,
+ getGroupSlug,
+ getProjectSlug,
+} from '~/search_autocomplete_utils';
+
+describe('search_autocomplete_utils', () => {
+ let originalBody;
+
+ beforeEach(() => {
+ originalBody = document.body;
+ document.body = document.createElement('body');
+ });
+
+ afterEach(() => {
+ document.body = originalBody;
+ });
+
+ describe('isInGroupsPage', () => {
+ it.each`
+ page | result
+ ${'groups:index'} | ${true}
+ ${'groups:show'} | ${true}
+ ${'projects:show'} | ${false}
+ `(`returns $result in for page $page`, ({ page, result }) => {
+ document.body.dataset.page = page;
+
+ expect(isInGroupsPage()).toBe(result);
+ });
+ });
+
+ describe('isInProjectPage', () => {
+ it.each`
+ page | result
+ ${'projects:index'} | ${true}
+ ${'projects:show'} | ${true}
+ ${'groups:show'} | ${false}
+ `(`returns $result in for page $page`, ({ page, result }) => {
+ document.body.dataset.page = page;
+
+ expect(isInProjectPage()).toBe(result);
+ });
+ });
+
+ describe('getProjectSlug', () => {
+ it('returns null when no project is present or on project page', () => {
+ expect(getProjectSlug()).toBe(null);
+ });
+
+ it('returns null when not on project page', () => {
+ document.body.dataset.project = 'gitlab';
+
+ expect(getProjectSlug()).toBe(null);
+ });
+
+ it('returns null when project is missing', () => {
+ document.body.dataset.page = 'projects';
+
+ expect(getProjectSlug()).toBe(undefined);
+ });
+
+ it('returns project', () => {
+ document.body.dataset.page = 'projects';
+ document.body.dataset.project = 'gitlab';
+
+ expect(getProjectSlug()).toBe('gitlab');
+ });
+
+ it('returns project in edit page', () => {
+ document.body.dataset.page = 'projects:edit';
+ document.body.dataset.project = 'gitlab';
+
+ expect(getProjectSlug()).toBe('gitlab');
+ });
+ });
+
+ describe('getGroupSlug', () => {
+ it('returns null when no group is present or on group page', () => {
+ expect(getGroupSlug()).toBe(null);
+ });
+
+ it('returns null when not on group page', () => {
+ document.body.dataset.group = 'gitlab-org';
+
+ expect(getGroupSlug()).toBe(null);
+ });
+
+ it('returns null when group is missing on groups page', () => {
+ document.body.dataset.page = 'groups';
+
+ expect(getGroupSlug()).toBe(undefined);
+ });
+
+ it('returns null when group is missing on project page', () => {
+ document.body.dataset.page = 'project';
+
+ expect(getGroupSlug()).toBe(null);
+ });
+
+ it.each`
+ page
+ ${'groups'}
+ ${'groups:edit'}
+ ${'projects'}
+ ${'projects:edit'}
+ `(`returns group in page $page`, ({ page }) => {
+ document.body.dataset.page = page;
+ document.body.dataset.group = 'gitlab-org';
+
+ expect(getGroupSlug()).toBe('gitlab-org');
+ });
+ });
+});
diff --git a/spec/frontend/security_configuration/components/auto_dev_ops_alert_spec.js b/spec/frontend/security_configuration/components/auto_dev_ops_alert_spec.js
new file mode 100644
index 00000000000..467ae35408c
--- /dev/null
+++ b/spec/frontend/security_configuration/components/auto_dev_ops_alert_spec.js
@@ -0,0 +1,55 @@
+import { GlAlert } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import AutoDevopsAlert from '~/security_configuration/components/auto_dev_ops_alert.vue';
+
+const autoDevopsHelpPagePath = '/autoDevopsHelpPagePath';
+const autoDevopsPath = '/enableAutoDevopsPath';
+
+describe('AutoDevopsAlert component', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = mount(AutoDevopsAlert, {
+ provide: {
+ autoDevopsHelpPagePath,
+ autoDevopsPath,
+ },
+ });
+ };
+
+ const findAlert = () => wrapper.findComponent(GlAlert);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('contains correct body text', () => {
+ expect(wrapper.text()).toContain('Quickly enable all');
+ });
+
+ it('renders the link correctly', () => {
+ const link = wrapper.find('a');
+
+ expect(link.attributes('href')).toBe(autoDevopsHelpPagePath);
+ expect(link.text()).toBe('Auto DevOps');
+ });
+
+ it('bubbles up dismiss events from the GlAlert', () => {
+ expect(wrapper.emitted('dismiss')).toBe(undefined);
+
+ findAlert().vm.$emit('dismiss');
+
+ expect(wrapper.emitted('dismiss')).toEqual([[]]);
+ });
+
+ it('has a button pointing to autoDevopsPath', () => {
+ expect(findAlert().props()).toMatchObject({
+ primaryButtonText: 'Enable Auto DevOps',
+ primaryButtonLink: autoDevopsPath,
+ });
+ });
+});
diff --git a/spec/frontend/security_configuration/components/feature_card_spec.js b/spec/frontend/security_configuration/components/feature_card_spec.js
index c69e135012e..3658dbb5ef2 100644
--- a/spec/frontend/security_configuration/components/feature_card_spec.js
+++ b/spec/frontend/security_configuration/components/feature_card_spec.js
@@ -3,6 +3,7 @@ import { mount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import FeatureCard from '~/security_configuration/components/feature_card.vue';
import ManageViaMr from '~/vue_shared/security_configuration/components/manage_via_mr.vue';
+import { REPORT_TYPE_SAST } from '~/vue_shared/security_reports/constants';
import { makeFeature } from './utils';
describe('FeatureCard component', () => {
@@ -126,21 +127,23 @@ describe('FeatureCard component', () => {
describe('actions', () => {
describe.each`
- context | available | configured | configurationPath | canEnableByMergeRequest | action
- ${'unavailable'} | ${false} | ${false} | ${null} | ${false} | ${null}
- ${'available'} | ${true} | ${false} | ${null} | ${false} | ${'guide'}
- ${'configured'} | ${true} | ${true} | ${null} | ${false} | ${'guide'}
- ${'available, can enable by MR'} | ${true} | ${false} | ${null} | ${true} | ${'create-mr'}
- ${'configured, can enable by MR'} | ${true} | ${true} | ${null} | ${true} | ${'guide'}
- ${'available with config path'} | ${true} | ${false} | ${'foo'} | ${false} | ${'enable'}
- ${'available with config path, can enable by MR'} | ${true} | ${false} | ${'foo'} | ${true} | ${'enable'}
- ${'configured with config path'} | ${true} | ${true} | ${'foo'} | ${false} | ${'configure'}
- ${'configured with config path, can enable by MR'} | ${true} | ${true} | ${'foo'} | ${true} | ${'configure'}
+ context | type | available | configured | configurationPath | canEnableByMergeRequest | action
+ ${'unavailable'} | ${REPORT_TYPE_SAST} | ${false} | ${false} | ${null} | ${false} | ${null}
+ ${'available'} | ${REPORT_TYPE_SAST} | ${true} | ${false} | ${null} | ${false} | ${'guide'}
+ ${'configured'} | ${REPORT_TYPE_SAST} | ${true} | ${true} | ${null} | ${false} | ${'guide'}
+ ${'available, can enable by MR'} | ${REPORT_TYPE_SAST} | ${true} | ${false} | ${null} | ${true} | ${'create-mr'}
+ ${'available, can enable by MR, unknown type'} | ${'foo'} | ${true} | ${false} | ${null} | ${true} | ${'guide'}
+ ${'configured, can enable by MR'} | ${REPORT_TYPE_SAST} | ${true} | ${true} | ${null} | ${true} | ${'guide'}
+ ${'available with config path'} | ${REPORT_TYPE_SAST} | ${true} | ${false} | ${'foo'} | ${false} | ${'enable'}
+ ${'available with config path, can enable by MR'} | ${REPORT_TYPE_SAST} | ${true} | ${false} | ${'foo'} | ${true} | ${'enable'}
+ ${'configured with config path'} | ${REPORT_TYPE_SAST} | ${true} | ${true} | ${'foo'} | ${false} | ${'configure'}
+ ${'configured with config path, can enable by MR'} | ${REPORT_TYPE_SAST} | ${true} | ${true} | ${'foo'} | ${true} | ${'configure'}
`(
'given $context feature',
- ({ available, configured, configurationPath, canEnableByMergeRequest, action }) => {
+ ({ type, available, configured, configurationPath, canEnableByMergeRequest, action }) => {
beforeEach(() => {
feature = makeFeature({
+ type,
available,
configured,
configurationPath,
diff --git a/spec/frontend/security_configuration/components/redesigned_app_spec.js b/spec/frontend/security_configuration/components/redesigned_app_spec.js
index 7e27a3e1108..119a25a77c1 100644
--- a/spec/frontend/security_configuration/components/redesigned_app_spec.js
+++ b/spec/frontend/security_configuration/components/redesigned_app_spec.js
@@ -2,6 +2,7 @@ import { GlTab } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { makeMockUserCalloutDismisser } from 'helpers/mock_user_callout_dismisser';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import AutoDevopsAlert from '~/security_configuration/components/auto_dev_ops_alert.vue';
import {
SAST_NAME,
SAST_SHORT_NAME,
@@ -13,6 +14,7 @@ import {
LICENSE_COMPLIANCE_HELP_PATH,
} from '~/security_configuration/components/constants';
import FeatureCard from '~/security_configuration/components/feature_card.vue';
+
import RedesignedSecurityConfigurationApp, {
i18n,
} from '~/security_configuration/components/redesigned_app.vue';
@@ -23,6 +25,9 @@ import {
} from '~/vue_shared/security_reports/constants';
const upgradePath = '/upgrade';
+const autoDevopsHelpPagePath = '/autoDevopsHelpPagePath';
+const autoDevopsPath = '/autoDevopsPath';
+const gitlabCiHistoryPath = 'test/historyPath';
describe('redesigned App component', () => {
let wrapper;
@@ -36,6 +41,8 @@ describe('redesigned App component', () => {
propsData,
provide: {
upgradePath,
+ autoDevopsHelpPagePath,
+ autoDevopsPath,
},
stubs: {
UserCalloutDismisser: makeMockUserCalloutDismisser({
@@ -52,9 +59,30 @@ describe('redesigned App component', () => {
const findTabs = () => wrapper.findAllComponents(GlTab);
const findByTestId = (id) => wrapper.findByTestId(id);
const findFeatureCards = () => wrapper.findAllComponents(FeatureCard);
- const findComplianceViewHistoryLink = () => findByTestId('compliance-view-history-link');
- const findSecurityViewHistoryLink = () => findByTestId('security-view-history-link');
+ const findLink = ({ href, text, container = wrapper }) => {
+ const selector = `a[href="${href}"]`;
+ const link = container.find(selector);
+
+ if (link.exists() && link.text() === text) {
+ return link;
+ }
+
+ return wrapper.find(`${selector} does not exist`);
+ };
+ const findSecurityViewHistoryLink = () =>
+ findLink({
+ href: gitlabCiHistoryPath,
+ text: i18n.configurationHistory,
+ container: findByTestId('security-testing-tab'),
+ });
+ const findComplianceViewHistoryLink = () =>
+ findLink({
+ href: gitlabCiHistoryPath,
+ text: i18n.configurationHistory,
+ container: findByTestId('compliance-testing-tab'),
+ });
const findUpgradeBanner = () => wrapper.findComponent(UpgradeBanner);
+ const findAutoDevopsAlert = () => wrapper.findComponent(AutoDevopsAlert);
const securityFeaturesMock = [
{
@@ -119,6 +147,10 @@ describe('redesigned App component', () => {
expect(cards.at(1).props()).toEqual({ feature: complianceFeaturesMock[0] });
});
+ it('renders a basic description', () => {
+ expect(wrapper.text()).toContain(i18n.description);
+ });
+
it('should not show latest pipeline link when latestPipelinePath is not defined', () => {
expect(findByTestId('latest-pipeline-info').exists()).toBe(false);
});
@@ -129,6 +161,44 @@ describe('redesigned App component', () => {
});
});
+ describe('autoDevOpsAlert', () => {
+ describe('given the right props', () => {
+ beforeEach(() => {
+ createComponent({
+ augmentedSecurityFeatures: securityFeaturesMock,
+ augmentedComplianceFeatures: complianceFeaturesMock,
+ autoDevopsEnabled: false,
+ gitlabCiPresent: false,
+ canEnableAutoDevops: true,
+ });
+ });
+
+ it('should show AutoDevopsAlert', () => {
+ expect(findAutoDevopsAlert().exists()).toBe(true);
+ });
+
+ it('calls the dismiss callback when closing the AutoDevopsAlert', () => {
+ expect(userCalloutDismissSpy).not.toHaveBeenCalled();
+
+ findAutoDevopsAlert().vm.$emit('dismiss');
+
+ expect(userCalloutDismissSpy).toHaveBeenCalledTimes(1);
+ });
+ });
+
+ describe('given the wrong props', () => {
+ beforeEach(() => {
+ createComponent({
+ augmentedSecurityFeatures: securityFeaturesMock,
+ augmentedComplianceFeatures: complianceFeaturesMock,
+ });
+ });
+ it('should not show AutoDevopsAlert', () => {
+ expect(findAutoDevopsAlert().exists()).toBe(false);
+ });
+ });
+ });
+
describe('upgrade banner', () => {
const makeAvailable = (available) => (feature) => ({ ...feature, available });
@@ -193,9 +263,8 @@ describe('redesigned App component', () => {
it('should show latest pipeline info on the security tab with correct link when latestPipelinePath is defined', () => {
const latestPipelineInfoSecurity = findByTestId('latest-pipeline-info-security');
- expect(latestPipelineInfoSecurity.exists()).toBe(true);
expect(latestPipelineInfoSecurity.text()).toMatchInterpolatedText(
- i18n.securityTestingDescription,
+ i18n.latestPipelineDescription,
);
expect(latestPipelineInfoSecurity.find('a').attributes('href')).toBe('test/path');
});
@@ -203,9 +272,8 @@ describe('redesigned App component', () => {
it('should show latest pipeline info on the compliance tab with correct link when latestPipelinePath is defined', () => {
const latestPipelineInfoCompliance = findByTestId('latest-pipeline-info-compliance');
- expect(latestPipelineInfoCompliance.exists()).toBe(true);
expect(latestPipelineInfoCompliance.text()).toMatchInterpolatedText(
- i18n.securityTestingDescription,
+ i18n.latestPipelineDescription,
);
expect(latestPipelineInfoCompliance.find('a').attributes('href')).toBe('test/path');
});
@@ -217,7 +285,7 @@ describe('redesigned App component', () => {
augmentedSecurityFeatures: securityFeaturesMock,
augmentedComplianceFeatures: complianceFeaturesMock,
gitlabCiPresent: true,
- gitlabCiHistoryPath: 'test/historyPath',
+ gitlabCiHistoryPath,
});
});
diff --git a/spec/frontend/security_configuration/utils_spec.js b/spec/frontend/security_configuration/utils_spec.js
index 6ad167cadda..eaed4532baa 100644
--- a/spec/frontend/security_configuration/utils_spec.js
+++ b/spec/frontend/security_configuration/utils_spec.js
@@ -35,7 +35,15 @@ const mockValidCustomFeature = [
{
name: 'SAST',
type: 'SAST',
- customfield: 'customvalue',
+ customField: 'customvalue',
+ },
+];
+
+const mockValidCustomFeatureSnakeCase = [
+ {
+ name: 'SAST',
+ type: 'SAST',
+ custom_field: 'customvalue',
},
];
@@ -79,3 +87,15 @@ describe('returns an object with augmentedSecurityFeatures and augmentedComplian
).toEqual(expectedOutputCustomFeature);
});
});
+
+describe('returns an object with camelcased keys', () => {
+ it('given a customfeature in snakecase', () => {
+ expect(
+ augmentFeatures(
+ mockSecurityFeatures,
+ mockComplianceFeatures,
+ mockValidCustomFeatureSnakeCase,
+ ),
+ ).toEqual(expectedOutputCustomFeature);
+ });
+});
diff --git a/spec/frontend/sentry/index_spec.js b/spec/frontend/sentry/index_spec.js
index 13b9b9e909c..d1f098112e8 100644
--- a/spec/frontend/sentry/index_spec.js
+++ b/spec/frontend/sentry/index_spec.js
@@ -7,6 +7,8 @@ describe('SentryConfig options', () => {
const gitlabUrl = 'gitlabUrl';
const environment = 'test';
const revision = 'revision';
+ const featureCategory = 'my_feature_category';
+
let indexReturnValue;
beforeEach(() => {
@@ -16,6 +18,7 @@ describe('SentryConfig options', () => {
current_user_id: currentUserId,
gitlab_url: gitlabUrl,
revision,
+ feature_category: featureCategory,
};
process.env.HEAD_COMMIT_SHA = revision;
@@ -34,6 +37,7 @@ describe('SentryConfig options', () => {
release: revision,
tags: {
revision,
+ feature_category: featureCategory,
},
});
});
diff --git a/spec/frontend/sentry/sentry_config_spec.js b/spec/frontend/sentry/sentry_config_spec.js
index 1f5097ef2a8..9f67b681b8d 100644
--- a/spec/frontend/sentry/sentry_config_spec.js
+++ b/spec/frontend/sentry/sentry_config_spec.js
@@ -72,11 +72,13 @@ describe('SentryConfig', () => {
release: 'revision',
tags: {
revision: 'revision',
+ feature_category: 'my_feature_category',
},
};
beforeEach(() => {
jest.spyOn(Sentry, 'init').mockImplementation();
+ jest.spyOn(Sentry, 'setTags').mockImplementation();
sentryConfig.options = options;
sentryConfig.IGNORE_ERRORS = 'ignore_errors';
@@ -89,7 +91,6 @@ describe('SentryConfig', () => {
expect(Sentry.init).toHaveBeenCalledWith({
dsn: options.dsn,
release: options.release,
- tags: options.tags,
sampleRate: 0.95,
whitelistUrls: options.whitelistUrls,
environment: 'test',
@@ -98,6 +99,10 @@ describe('SentryConfig', () => {
});
});
+ it('should call Sentry.setTags', () => {
+ expect(Sentry.setTags).toHaveBeenCalledWith(options.tags);
+ });
+
it('should set environment from options', () => {
sentryConfig.options.environment = 'development';
@@ -106,7 +111,6 @@ describe('SentryConfig', () => {
expect(Sentry.init).toHaveBeenCalledWith({
dsn: options.dsn,
release: options.release,
- tags: options.tags,
sampleRate: 0.95,
whitelistUrls: options.whitelistUrls,
environment: 'development',
diff --git a/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap b/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap
index 36f6746b754..53bef449c2f 100644
--- a/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap
+++ b/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap
@@ -3,7 +3,7 @@
exports[`EmptyStateComponent should render content 1`] = `
"<section class=\\"row empty-state text-center\\">
<div class=\\"col-12\\">
- <div class=\\"svg-250 svg-content\\"><img src=\\"/image.svg\\" alt=\\"\\" class=\\"gl-max-w-full\\"></div>
+ <div class=\\"svg-250 svg-content\\"><img src=\\"/image.svg\\" alt=\\"\\" role=\\"img\\" class=\\"gl-max-w-full\\"></div>
</div>
<div class=\\"col-12\\">
<div class=\\"text-content gl-mx-auto gl-my-0 gl-p-5\\">
diff --git a/spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js b/spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js
index b49e6255923..2d5a3653631 100644
--- a/spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js
+++ b/spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js
@@ -1,7 +1,6 @@
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import AssigneeAvatar from '~/sidebar/components/assignees/assignee_avatar.vue';
import CollapsedAssignee from '~/sidebar/components/assignees/collapsed_assignee.vue';
-import UserNameWithStatus from '~/sidebar/components/assignees/user_name_with_status.vue';
import userDataMock from '../../user_data_mock';
const TEST_USER = userDataMock();
@@ -17,11 +16,8 @@ describe('CollapsedAssignee assignee component', () => {
...props,
};
- wrapper = shallowMount(CollapsedAssignee, {
+ wrapper = mount(CollapsedAssignee, {
propsData,
- stubs: {
- UserNameWithStatus,
- },
});
}
diff --git a/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js b/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
index 0e052abffeb..8504684d23a 100644
--- a/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
+++ b/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
@@ -176,7 +176,7 @@ describe('Sidebar assignees widget', () => {
).toBe(true);
});
- it('emits an event with assignees list on successful mutation', async () => {
+ it('emits an event with assignees list and issuable id on successful mutation', async () => {
createComponent();
await waitForPromises();
@@ -193,18 +193,21 @@ describe('Sidebar assignees widget', () => {
expect(wrapper.emitted('assignees-updated')).toEqual([
[
- [
- {
- __typename: 'User',
- avatarUrl:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- id: 'gid://gitlab/User/1',
- name: 'Administrator',
- username: 'root',
- webUrl: '/root',
- status: null,
- },
- ],
+ {
+ assignees: [
+ {
+ __typename: 'User',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ id: 'gid://gitlab/User/1',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: '/root',
+ status: null,
+ },
+ ],
+ id: 1,
+ },
],
]);
});
@@ -285,6 +288,21 @@ describe('Sidebar assignees widget', () => {
expect(updateIssueAssigneesMutationSuccess).not.toHaveBeenCalled();
expect(findUserSelect().isVisible()).toBe(true);
});
+
+ it('calls the mutation old issuable id if `iid` prop was changed', async () => {
+ findUserSelect().vm.$emit('input', [{ username: 'francina.skiles' }]);
+ wrapper.setProps({
+ iid: '2',
+ });
+ await nextTick();
+ findEditableItem().vm.$emit('close');
+
+ expect(updateIssueAssigneesMutationSuccess).toHaveBeenCalledWith({
+ assigneeUsernames: ['francina.skiles'],
+ fullPath: '/mygroup/myProject',
+ iid: '1',
+ });
+ });
});
it('shows an error if update assignees mutation is rejected', async () => {
diff --git a/spec/frontend/sidebar/components/assignees/user_name_with_status_spec.js b/spec/frontend/sidebar/components/assignees/user_name_with_status_spec.js
index 9483c6624c5..4dbf3d426bb 100644
--- a/spec/frontend/sidebar/components/assignees/user_name_with_status_spec.js
+++ b/spec/frontend/sidebar/components/assignees/user_name_with_status_spec.js
@@ -1,25 +1,21 @@
-import { GlSprintf } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import { AVAILABILITY_STATUS } from '~/set_status_modal/utils';
import UserNameWithStatus from '~/sidebar/components/assignees/user_name_with_status.vue';
-const name = 'Goku';
+const name = 'Administrator';
const containerClasses = 'gl-cool-class gl-over-9000';
describe('UserNameWithStatus', () => {
let wrapper;
function createComponent(props = {}) {
- return shallowMount(UserNameWithStatus, {
+ wrapper = mount(UserNameWithStatus, {
propsData: { name, containerClasses, ...props },
- stubs: {
- GlSprintf,
- },
});
}
beforeEach(() => {
- wrapper = createComponent();
+ createComponent();
});
afterEach(() => {
@@ -41,11 +37,39 @@ describe('UserNameWithStatus', () => {
describe(`with availability="${AVAILABILITY_STATUS.BUSY}"`, () => {
beforeEach(() => {
- wrapper = createComponent({ availability: AVAILABILITY_STATUS.BUSY });
+ createComponent({ availability: AVAILABILITY_STATUS.BUSY });
});
it('will render "Busy"', () => {
- expect(wrapper.html()).toContain('Goku (Busy)');
+ expect(wrapper.text()).toContain('(Busy)');
+ });
+ });
+
+ describe('when user has pronouns set', () => {
+ const pronouns = 'they/them';
+
+ beforeEach(() => {
+ createComponent({ pronouns });
+ });
+
+ it("renders user's name with pronouns", () => {
+ expect(wrapper.text()).toMatchInterpolatedText(`${name} (${pronouns})`);
+ });
+ });
+
+ describe('when user does not have pronouns set', () => {
+ describe.each`
+ pronouns
+ ${undefined}
+ ${null}
+ ${''}
+ ${' '}
+ `('when `pronouns` prop is $pronouns', ({ pronouns }) => {
+ it("renders only the user's name", () => {
+ createComponent({ pronouns });
+
+ expect(wrapper.text()).toMatchInterpolatedText(name);
+ });
});
});
});
diff --git a/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js b/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js
index 8d58854b013..f5e5ab4a984 100644
--- a/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js
+++ b/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js
@@ -451,8 +451,9 @@ describe('SidebarDropdownWidget', () => {
expect(projectMilestonesSpy).toHaveBeenNthCalledWith(1, {
fullPath: mockIssue.projectPath,
- title: '',
+ sort: null,
state: 'active',
+ title: '',
});
});
@@ -477,8 +478,9 @@ describe('SidebarDropdownWidget', () => {
expect(projectMilestonesSpy).toHaveBeenNthCalledWith(2, {
fullPath: mockIssue.projectPath,
- title: mockSearchTerm,
+ sort: null,
state: 'active',
+ title: mockSearchTerm,
});
});
});
diff --git a/spec/frontend/sidebar/components/todo_toggle/sidebar_todo_widget_spec.js b/spec/frontend/sidebar/components/todo_toggle/sidebar_todo_widget_spec.js
new file mode 100644
index 00000000000..23f1753c4bf
--- /dev/null
+++ b/spec/frontend/sidebar/components/todo_toggle/sidebar_todo_widget_spec.js
@@ -0,0 +1,126 @@
+import { GlIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import SidebarTodoWidget from '~/sidebar/components/todo_toggle/sidebar_todo_widget.vue';
+import epicTodoQuery from '~/sidebar/queries/epic_todo.query.graphql';
+import TodoButton from '~/vue_shared/components/sidebar/todo_toggle/todo_button.vue';
+import { todosResponse, noTodosResponse } from '../../mock_data';
+
+jest.mock('~/flash');
+
+Vue.use(VueApollo);
+
+describe('Sidebar Todo Widget', () => {
+ let wrapper;
+ let fakeApollo;
+
+ const findTodoButton = () => wrapper.findComponent(TodoButton);
+
+ const createComponent = ({
+ todosQueryHandler = jest.fn().mockResolvedValue(noTodosResponse),
+ } = {}) => {
+ fakeApollo = createMockApollo([[epicTodoQuery, todosQueryHandler]]);
+
+ wrapper = shallowMount(SidebarTodoWidget, {
+ apolloProvider: fakeApollo,
+ provide: {
+ canUpdate: true,
+ isClassicSidebar: true,
+ },
+ propsData: {
+ fullPath: 'group',
+ issuableIid: '1',
+ issuableId: 'gid://gitlab/Epic/4',
+ issuableType: 'epic',
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ fakeApollo = null;
+ });
+
+ describe('when user does not have a todo for the issuable', () => {
+ beforeEach(() => {
+ createComponent();
+ return waitForPromises();
+ });
+
+ it('passes false isTodo prop to Todo button component', () => {
+ expect(findTodoButton().props('isTodo')).toBe(false);
+ });
+
+ it('emits `todoUpdated` event with a `false` payload', () => {
+ expect(wrapper.emitted('todoUpdated')).toEqual([[false]]);
+ });
+ });
+
+ describe('when user has a todo for the issuable', () => {
+ beforeEach(() => {
+ createComponent({
+ todosQueryHandler: jest.fn().mockResolvedValue(todosResponse),
+ });
+ return waitForPromises();
+ });
+
+ it('passes true isTodo prop to Todo button component', () => {
+ expect(findTodoButton().props('isTodo')).toBe(true);
+ });
+
+ it('emits `todoUpdated` event with a `true` payload', () => {
+ expect(wrapper.emitted('todoUpdated')).toEqual([[true]]);
+ });
+ });
+
+ it('displays a flash message when query is rejected', async () => {
+ createComponent({
+ todosQueryHandler: jest.fn().mockRejectedValue('Houston, we have a problem'),
+ });
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenCalled();
+ });
+
+ describe('collapsed', () => {
+ const event = { stopPropagation: jest.fn(), preventDefault: jest.fn() };
+
+ beforeEach(() => {
+ createComponent({
+ todosQueryHandler: jest.fn().mockResolvedValue(noTodosResponse),
+ });
+ return waitForPromises();
+ });
+
+ it('shows add todo icon', () => {
+ expect(wrapper.find(GlIcon).exists()).toBe(true);
+
+ expect(wrapper.find(GlIcon).props('name')).toBe('todo-add');
+ });
+
+ it('sets default tooltip title', () => {
+ expect(wrapper.find(GlIcon).attributes('title')).toBe('Add a to do');
+ });
+
+ it('when user has a to do', async () => {
+ createComponent({
+ todosQueryHandler: jest.fn().mockResolvedValue(todosResponse),
+ });
+
+ await waitForPromises();
+ expect(wrapper.find(GlIcon).props('name')).toBe('todo-done');
+ expect(wrapper.find(GlIcon).attributes('title')).toBe('Mark as done');
+ });
+
+ it('emits `todoUpdated` event on click on icon', async () => {
+ wrapper.find(GlIcon).vm.$emit('click', event);
+
+ await wrapper.vm.$nextTick();
+ expect(wrapper.emitted('todoUpdated')).toEqual([[false]]);
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/lock/edit_form_buttons_spec.js b/spec/frontend/sidebar/lock/edit_form_buttons_spec.js
index 49283ea99cf..1673425947e 100644
--- a/spec/frontend/sidebar/lock/edit_form_buttons_spec.js
+++ b/spec/frontend/sidebar/lock/edit_form_buttons_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import { deprecatedCreateFlash as flash } from '~/flash';
+import createFlash from '~/flash';
import { createStore as createMrStore } from '~/mr_notes/stores';
import createStore from '~/notes/stores';
import EditFormButtons from '~/sidebar/components/lock/edit_form_buttons.vue';
@@ -130,7 +130,7 @@ describe('EditFormButtons', () => {
});
it('does not flash an error message', () => {
- expect(flash).not.toHaveBeenCalled();
+ expect(createFlash).not.toHaveBeenCalled();
});
});
@@ -165,9 +165,9 @@ describe('EditFormButtons', () => {
});
it('calls flash with the correct message', () => {
- expect(flash).toHaveBeenCalledWith(
- `Something went wrong trying to change the locked state of this ${issuableDisplayName}`,
- );
+ expect(createFlash).toHaveBeenCalledWith({
+ message: `Something went wrong trying to change the locked state of this ${issuableDisplayName}`,
+ });
});
});
});
diff --git a/spec/frontend/sidebar/mock_data.js b/spec/frontend/sidebar/mock_data.js
index d6287b93fb9..9fab24d7518 100644
--- a/spec/frontend/sidebar/mock_data.js
+++ b/spec/frontend/sidebar/mock_data.js
@@ -530,6 +530,7 @@ export const mockMilestone1 = {
title: 'Foobar Milestone',
webUrl: 'http://gdk.test:3000/groups/gitlab-org/-/milestones/1',
state: 'active',
+ expired: false,
};
export const mockMilestone2 = {
@@ -538,6 +539,7 @@ export const mockMilestone2 = {
title: 'Awesome Milestone',
webUrl: 'http://gdk.test:3000/groups/gitlab-org/-/milestones/2',
state: 'active',
+ expired: false,
};
export const mockProjectMilestonesResponse = {
@@ -571,6 +573,7 @@ export const mockMilestoneMutationResponse = {
id: 'gid://gitlab/Milestone/2',
title: 'Awesome Milestone',
state: 'active',
+ expired: false,
__typename: 'Milestone',
},
__typename: 'Issue',
@@ -609,4 +612,38 @@ export const issuableTimeTrackingResponse = {
},
};
+export const todosResponse = {
+ data: {
+ workspace: {
+ __typename: 'Group',
+ issuable: {
+ __typename: 'Epic',
+ id: 'gid://gitlab/Epic/4',
+ currentUserTodos: {
+ nodes: [
+ {
+ id: 'gid://gitlab/Todo/433',
+ },
+ ],
+ },
+ },
+ },
+ },
+};
+
+export const noTodosResponse = {
+ data: {
+ workspace: {
+ __typename: 'Group',
+ issuable: {
+ __typename: 'Epic',
+ id: 'gid://gitlab/Epic/4',
+ currentUserTodos: {
+ nodes: [],
+ },
+ },
+ },
+ },
+};
+
export default mockData;
diff --git a/spec/frontend/sidebar/sidebar_move_issue_spec.js b/spec/frontend/sidebar/sidebar_move_issue_spec.js
index 6a7758ace40..d9972ae75c3 100644
--- a/spec/frontend/sidebar/sidebar_move_issue_spec.js
+++ b/spec/frontend/sidebar/sidebar_move_issue_spec.js
@@ -1,5 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
+import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
import SidebarMoveIssue from '~/sidebar/lib/sidebar_move_issue';
import SidebarService from '~/sidebar/services/sidebar_service';
@@ -7,6 +8,8 @@ import SidebarMediator from '~/sidebar/sidebar_mediator';
import SidebarStore from '~/sidebar/stores/sidebar_store';
import Mock from './mock_data';
+jest.mock('~/flash');
+
describe('SidebarMoveIssue', () => {
let mock;
const test = {};
@@ -99,7 +102,6 @@ describe('SidebarMoveIssue', () => {
});
it('should remove loading state from confirm button on failure', (done) => {
- jest.spyOn(window, 'Flash').mockImplementation(() => {});
jest.spyOn(test.mediator, 'moveIssue').mockReturnValue(Promise.reject());
test.mediator.setMoveToProjectId(7);
@@ -108,7 +110,7 @@ describe('SidebarMoveIssue', () => {
expect(test.mediator.moveIssue).toHaveBeenCalled();
// Wait for the move issue request to fail
setImmediate(() => {
- expect(window.Flash).toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalled();
expect(test.$confirmButton.prop('disabled')).toBeFalsy();
expect(test.$confirmButton.hasClass('is-loading')).toBe(false);
done();
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
index b0c253bca65..e12255fe825 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
@@ -13,7 +13,7 @@ exports[`Snippet Blob Edit component with loaded blob matches snapshot 1`] = `
value="foo/bar/test.md"
/>
- <editor-lite-stub
+ <source-editor-stub
editoroptions="[object Object]"
fileglobalid="blob_local_7"
filename="foo/bar/test.md"
diff --git a/spec/frontend/snippets/components/edit_spec.js b/spec/frontend/snippets/components/edit_spec.js
index efdb52cfcd9..4e88ab9504e 100644
--- a/spec/frontend/snippets/components/edit_spec.js
+++ b/spec/frontend/snippets/components/edit_spec.js
@@ -7,8 +7,7 @@ import { useFakeDate } from 'helpers/fake_date';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import GetSnippetQuery from 'shared_queries/snippet/snippet.query.graphql';
-import UnsolvedCaptchaError from '~/captcha/unsolved_captcha_error';
-import { deprecatedCreateFlash as Flash } from '~/flash';
+import createFlash from '~/flash';
import * as urlUtils from '~/lib/utils/url_utility';
import SnippetEditApp from '~/snippets/components/edit.vue';
import SnippetBlobActionsEdit from '~/snippets/components/snippet_blob_actions_edit.vue';
@@ -29,7 +28,6 @@ jest.mock('~/flash');
const TEST_UPLOADED_FILES = ['foo/bar.txt', 'alpha/beta.js'];
const TEST_API_ERROR = new Error('TEST_API_ERROR');
-const TEST_CAPTCHA_ERROR = new UnsolvedCaptchaError();
const TEST_MUTATION_ERROR = 'Test mutation error';
const TEST_ACTIONS = {
NO_CONTENT: merge({}, testEntries.created.diff, { content: '' }),
@@ -319,14 +317,16 @@ describe('Snippet Edit app', () => {
});
expect(urlUtils.redirectTo).not.toHaveBeenCalled();
- expect(Flash).toHaveBeenCalledWith(expectMessage);
+ expect(createFlash).toHaveBeenCalledWith({
+ message: expectMessage,
+ });
},
);
- describe.each([TEST_API_ERROR, TEST_CAPTCHA_ERROR])('with apollo network error', (error) => {
+ describe('with apollo network error', () => {
beforeEach(async () => {
jest.spyOn(console, 'error').mockImplementation();
- mutateSpy.mockRejectedValue(error);
+ mutateSpy.mockRejectedValue(TEST_API_ERROR);
await createComponentAndSubmit();
});
@@ -337,9 +337,9 @@ describe('Snippet Edit app', () => {
it('should flash', () => {
// Apollo automatically wraps the resolver's error in a NetworkError
- expect(Flash).toHaveBeenCalledWith(
- `Can't update snippet: Network error: ${error.message}`,
- );
+ expect(createFlash).toHaveBeenCalledWith({
+ message: `Can't update snippet: Network error: ${TEST_API_ERROR.message}`,
+ });
});
it('should console error', () => {
@@ -348,7 +348,7 @@ describe('Snippet Edit app', () => {
// eslint-disable-next-line no-console
expect(console.error).toHaveBeenCalledWith(
'[gitlab] unexpected error while updating snippet',
- expect.objectContaining({ message: `Network error: ${error.message}` }),
+ expect.objectContaining({ message: `Network error: ${TEST_API_ERROR.message}` }),
);
});
});
diff --git a/spec/frontend/snippets/components/snippet_blob_edit_spec.js b/spec/frontend/snippets/components/snippet_blob_edit_spec.js
index 4b3b21c5507..7ea27864519 100644
--- a/spec/frontend/snippets/components/snippet_blob_edit_spec.js
+++ b/spec/frontend/snippets/components/snippet_blob_edit_spec.js
@@ -8,7 +8,7 @@ import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { joinPaths } from '~/lib/utils/url_utility';
import SnippetBlobEdit from '~/snippets/components/snippet_blob_edit.vue';
-import EditorLite from '~/vue_shared/components/editor_lite.vue';
+import SourceEditor from '~/vue_shared/components/source_editor.vue';
jest.mock('~/flash');
@@ -48,7 +48,7 @@ describe('Snippet Blob Edit component', () => {
const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
const findHeader = () => wrapper.find(BlobHeaderEdit);
- const findContent = () => wrapper.find(EditorLite);
+ const findContent = () => wrapper.find(SourceEditor);
const getLastUpdatedArgs = () => {
const event = wrapper.emitted()['blob-updated'];
diff --git a/spec/frontend/static_site_editor/services/submit_content_changes_spec.js b/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
index d9bceb76a37..757611166d7 100644
--- a/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
+++ b/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
@@ -8,8 +8,8 @@ import {
SUBMIT_CHANGES_MERGE_REQUEST_ERROR,
TRACKING_ACTION_CREATE_COMMIT,
TRACKING_ACTION_CREATE_MERGE_REQUEST,
- USAGE_PING_TRACKING_ACTION_CREATE_COMMIT,
- USAGE_PING_TRACKING_ACTION_CREATE_MERGE_REQUEST,
+ SERVICE_PING_TRACKING_ACTION_CREATE_COMMIT,
+ SERVICE_PING_TRACKING_ACTION_CREATE_MERGE_REQUEST,
DEFAULT_FORMATTING_CHANGES_COMMIT_MESSAGE,
DEFAULT_FORMATTING_CHANGES_COMMIT_DESCRIPTION,
} from '~/static_site_editor/constants';
@@ -237,7 +237,7 @@ describe('submitContentChanges', () => {
});
});
- describe('sends the correct Usage Ping tracking event', () => {
+ describe('sends the correct Service Ping tracking event', () => {
beforeEach(() => {
jest.spyOn(Api, 'trackRedisCounterEvent').mockResolvedValue({ data: '' });
});
@@ -245,7 +245,7 @@ describe('submitContentChanges', () => {
it('for commiting changes', () => {
return submitContentChanges(buildPayload()).then(() => {
expect(Api.trackRedisCounterEvent).toHaveBeenCalledWith(
- USAGE_PING_TRACKING_ACTION_CREATE_COMMIT,
+ SERVICE_PING_TRACKING_ACTION_CREATE_COMMIT,
);
});
});
@@ -253,7 +253,7 @@ describe('submitContentChanges', () => {
it('for creating a merge request', () => {
return submitContentChanges(buildPayload()).then(() => {
expect(Api.trackRedisCounterEvent).toHaveBeenCalledWith(
- USAGE_PING_TRACKING_ACTION_CREATE_MERGE_REQUEST,
+ SERVICE_PING_TRACKING_ACTION_CREATE_MERGE_REQUEST,
);
});
});
diff --git a/spec/frontend/terraform/components/terraform_list_spec.js b/spec/frontend/terraform/components/terraform_list_spec.js
index 882b7b55b3e..c622f86072d 100644
--- a/spec/frontend/terraform/components/terraform_list_spec.js
+++ b/spec/frontend/terraform/components/terraform_list_spec.js
@@ -47,6 +47,9 @@ describe('TerraformList', () => {
localVue,
apolloProvider,
propsData,
+ stubs: {
+ GlTab,
+ },
});
};
diff --git a/spec/frontend/token_access/mock_data.js b/spec/frontend/token_access/mock_data.js
new file mode 100644
index 00000000000..14d7b00cb6d
--- /dev/null
+++ b/spec/frontend/token_access/mock_data.js
@@ -0,0 +1,84 @@
+export const enabledJobTokenScope = {
+ data: {
+ project: {
+ ciCdSettings: {
+ jobTokenScopeEnabled: true,
+ __typename: 'ProjectCiCdSetting',
+ },
+ __typename: 'Project',
+ },
+ },
+};
+
+export const disabledJobTokenScope = {
+ data: {
+ project: {
+ ciCdSettings: {
+ jobTokenScopeEnabled: false,
+ __typename: 'ProjectCiCdSetting',
+ },
+ __typename: 'Project',
+ },
+ },
+};
+
+export const updateJobTokenScope = {
+ data: {
+ ciCdSettingsUpdate: {
+ ciCdSettings: {
+ jobTokenScopeEnabled: true,
+ __typename: 'ProjectCiCdSetting',
+ },
+ errors: [],
+ __typename: 'CiCdSettingsUpdatePayload',
+ },
+ },
+};
+
+export const projectsWithScope = {
+ data: {
+ project: {
+ __typename: 'Project',
+ ciJobTokenScope: {
+ __typename: 'CiJobTokenScopeType',
+ projects: {
+ __typename: 'ProjectConnection',
+ nodes: [
+ {
+ fullPath: 'root/332268-test',
+ name: 'root/332268-test',
+ },
+ ],
+ },
+ },
+ },
+ },
+};
+
+export const addProjectSuccess = {
+ data: {
+ ciJobTokenScopeAddProject: {
+ errors: [],
+ __typename: 'CiJobTokenScopeAddProjectPayload',
+ },
+ },
+};
+
+export const removeProjectSuccess = {
+ data: {
+ ciJobTokenScopeRemoveProject: {
+ errors: [],
+ __typename: 'CiJobTokenScopeRemoveProjectPayload',
+ },
+ },
+};
+
+export const mockProjects = [
+ {
+ name: 'merge-train-stuff',
+ fullPath: 'root/merge-train-stuff',
+ isLocked: false,
+ __typename: 'Project',
+ },
+ { name: 'ci-project', fullPath: 'root/ci-project', isLocked: true, __typename: 'Project' },
+];
diff --git a/spec/frontend/token_access/token_access_spec.js b/spec/frontend/token_access/token_access_spec.js
new file mode 100644
index 00000000000..c7323eb19fe
--- /dev/null
+++ b/spec/frontend/token_access/token_access_spec.js
@@ -0,0 +1,218 @@
+import { GlToggle, GlLoadingIcon } from '@gitlab/ui';
+import { createLocalVue, shallowMount, mount } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import TokenAccess from '~/token_access/components/token_access.vue';
+import addProjectCIJobTokenScopeMutation from '~/token_access/graphql/mutations/add_project_ci_job_token_scope.mutation.graphql';
+import removeProjectCIJobTokenScopeMutation from '~/token_access/graphql/mutations/remove_project_ci_job_token_scope.mutation.graphql';
+import updateCIJobTokenScopeMutation from '~/token_access/graphql/mutations/update_ci_job_token_scope.mutation.graphql';
+import getCIJobTokenScopeQuery from '~/token_access/graphql/queries/get_ci_job_token_scope.query.graphql';
+import getProjectsWithCIJobTokenScopeQuery from '~/token_access/graphql/queries/get_projects_with_ci_job_token_scope.query.graphql';
+import {
+ enabledJobTokenScope,
+ disabledJobTokenScope,
+ updateJobTokenScope,
+ projectsWithScope,
+ addProjectSuccess,
+ removeProjectSuccess,
+} from './mock_data';
+
+const projectPath = 'root/my-repo';
+const error = new Error('Error');
+const localVue = createLocalVue();
+
+localVue.use(VueApollo);
+
+jest.mock('~/flash');
+
+describe('TokenAccess component', () => {
+ let wrapper;
+
+ const enabledJobTokenScopeHandler = jest.fn().mockResolvedValue(enabledJobTokenScope);
+ const disabledJobTokenScopeHandler = jest.fn().mockResolvedValue(disabledJobTokenScope);
+ const updateJobTokenScopeHandler = jest.fn().mockResolvedValue(updateJobTokenScope);
+ const getProjectsWithScope = jest.fn().mockResolvedValue(projectsWithScope);
+ const addProjectSuccessHandler = jest.fn().mockResolvedValue(addProjectSuccess);
+ const addProjectFailureHandler = jest.fn().mockRejectedValue(error);
+ const removeProjectSuccessHandler = jest.fn().mockResolvedValue(removeProjectSuccess);
+ const removeProjectFailureHandler = jest.fn().mockRejectedValue(error);
+
+ const findToggle = () => wrapper.findComponent(GlToggle);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findAddProjectBtn = () => wrapper.find('[data-testid="add-project-button"]');
+ const findRemoveProjectBtn = () => wrapper.find('[data-testid="remove-project-button"]');
+ const findTokenSection = () => wrapper.find('[data-testid="token-section"]');
+
+ const createMockApolloProvider = (requestHandlers) => {
+ return createMockApollo(requestHandlers);
+ };
+
+ const createComponent = (requestHandlers, mountFn = shallowMount) => {
+ wrapper = mountFn(TokenAccess, {
+ localVue,
+ provide: {
+ fullPath: projectPath,
+ },
+ apolloProvider: createMockApolloProvider(requestHandlers),
+ data() {
+ return {
+ targetProjectPath: 'root/test',
+ };
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('loading state', () => {
+ it('shows loading state while waiting on query to resolve', async () => {
+ createComponent([
+ [getCIJobTokenScopeQuery, enabledJobTokenScopeHandler],
+ [getProjectsWithCIJobTokenScopeQuery, getProjectsWithScope],
+ ]);
+
+ expect(findLoadingIcon().exists()).toBe(true);
+
+ await waitForPromises();
+
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+ });
+
+ describe('toggle', () => {
+ it('the toggle should be enabled and the token section should show', async () => {
+ createComponent([
+ [getCIJobTokenScopeQuery, enabledJobTokenScopeHandler],
+ [getProjectsWithCIJobTokenScopeQuery, getProjectsWithScope],
+ ]);
+
+ await waitForPromises();
+
+ expect(findToggle().props('value')).toBe(true);
+ expect(findTokenSection().exists()).toBe(true);
+ });
+
+ it('the toggle should be disabled and the token section should not show', async () => {
+ createComponent([
+ [getCIJobTokenScopeQuery, disabledJobTokenScopeHandler],
+ [getProjectsWithCIJobTokenScopeQuery, getProjectsWithScope],
+ ]);
+
+ await waitForPromises();
+
+ expect(findToggle().props('value')).toBe(false);
+ expect(findTokenSection().exists()).toBe(false);
+ });
+
+ it('switching the toggle calls the mutation and fetches the projects again', async () => {
+ createComponent([
+ [getCIJobTokenScopeQuery, disabledJobTokenScopeHandler],
+ [updateCIJobTokenScopeMutation, updateJobTokenScopeHandler],
+ [getProjectsWithCIJobTokenScopeQuery, getProjectsWithScope],
+ ]);
+
+ await waitForPromises();
+
+ expect(getProjectsWithScope).toHaveBeenCalledTimes(1);
+
+ findToggle().vm.$emit('change', true);
+
+ await waitForPromises();
+
+ expect(updateJobTokenScopeHandler).toHaveBeenCalledWith({
+ input: { fullPath: projectPath, jobTokenScopeEnabled: true },
+ });
+ expect(getProjectsWithScope).toHaveBeenCalledTimes(2);
+ });
+ });
+
+ describe('add project', () => {
+ it('calls add project mutation', async () => {
+ createComponent(
+ [
+ [getCIJobTokenScopeQuery, enabledJobTokenScopeHandler],
+ [getProjectsWithCIJobTokenScopeQuery, getProjectsWithScope],
+ [addProjectCIJobTokenScopeMutation, addProjectSuccessHandler],
+ ],
+ mount,
+ );
+
+ await waitForPromises();
+
+ findAddProjectBtn().trigger('click');
+
+ expect(addProjectSuccessHandler).toHaveBeenCalledWith({
+ input: {
+ projectPath,
+ targetProjectPath: 'root/test',
+ },
+ });
+ });
+
+ it('add project handles error correctly', async () => {
+ createComponent(
+ [
+ [getCIJobTokenScopeQuery, enabledJobTokenScopeHandler],
+ [getProjectsWithCIJobTokenScopeQuery, getProjectsWithScope],
+ [addProjectCIJobTokenScopeMutation, addProjectFailureHandler],
+ ],
+ mount,
+ );
+
+ await waitForPromises();
+
+ findAddProjectBtn().trigger('click');
+
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenCalled();
+ });
+ });
+
+ describe('remove project', () => {
+ it('calls remove project mutation', async () => {
+ createComponent(
+ [
+ [getCIJobTokenScopeQuery, enabledJobTokenScopeHandler],
+ [getProjectsWithCIJobTokenScopeQuery, getProjectsWithScope],
+ [removeProjectCIJobTokenScopeMutation, removeProjectSuccessHandler],
+ ],
+ mount,
+ );
+
+ await waitForPromises();
+
+ findRemoveProjectBtn().trigger('click');
+
+ expect(removeProjectSuccessHandler).toHaveBeenCalledWith({
+ input: {
+ projectPath,
+ targetProjectPath: 'root/332268-test',
+ },
+ });
+ });
+
+ it('remove project handles error correctly', async () => {
+ createComponent(
+ [
+ [getCIJobTokenScopeQuery, enabledJobTokenScopeHandler],
+ [getProjectsWithCIJobTokenScopeQuery, getProjectsWithScope],
+ [removeProjectCIJobTokenScopeMutation, removeProjectFailureHandler],
+ ],
+ mount,
+ );
+
+ await waitForPromises();
+
+ findRemoveProjectBtn().trigger('click');
+
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/token_access/token_projects_table_spec.js b/spec/frontend/token_access/token_projects_table_spec.js
new file mode 100644
index 00000000000..3bda0d0b530
--- /dev/null
+++ b/spec/frontend/token_access/token_projects_table_spec.js
@@ -0,0 +1,51 @@
+import { GlTable, GlButton } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import TokenProjectsTable from '~/token_access/components/token_projects_table.vue';
+import { mockProjects } from './mock_data';
+
+describe('Token projects table', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = mount(TokenProjectsTable, {
+ provide: {
+ fullPath: 'root/ci-project',
+ },
+ propsData: {
+ projects: mockProjects,
+ },
+ });
+ };
+
+ const findTable = () => wrapper.findComponent(GlTable);
+ const findAllTableRows = () => wrapper.findAll('[data-testid="projects-token-table-row"]');
+ const findDeleteProjectBtn = () => wrapper.findComponent(GlButton);
+ const findAllDeleteProjectBtn = () => wrapper.findAllComponents(GlButton);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays a table', () => {
+ expect(findTable().exists()).toBe(true);
+ });
+
+ it('displays the correct amount of table rows', () => {
+ expect(findAllTableRows()).toHaveLength(mockProjects.length);
+ });
+
+ it('delete project button emits event with correct project to delete', async () => {
+ await findDeleteProjectBtn().trigger('click');
+
+ expect(wrapper.emitted('removeProject')).toEqual([[mockProjects[0].fullPath]]);
+ });
+
+ it('does not show the remove icon if the project is locked', () => {
+ // currently two mock projects with one being a locked project
+ expect(findAllDeleteProjectBtn()).toHaveLength(1);
+ });
+});
diff --git a/spec/frontend/tracking_spec.js b/spec/frontend/tracking_spec.js
index d8dae2b2dc0..13498cfb823 100644
--- a/spec/frontend/tracking_spec.js
+++ b/spec/frontend/tracking_spec.js
@@ -197,6 +197,52 @@ describe('Tracking', () => {
expectedError,
);
});
+
+ it('does not add empty form whitelist rules', () => {
+ Tracking.enableFormTracking({ fields: { allow: ['input-class1'] } });
+
+ expect(snowplowSpy).toHaveBeenCalledWith(
+ 'enableFormTracking',
+ { fields: { whitelist: ['input-class1'] } },
+ [],
+ );
+ });
+
+ describe('when `document.readyState` does not equal `complete`', () => {
+ const originalReadyState = document.readyState;
+ const setReadyState = (value) => {
+ Object.defineProperty(document, 'readyState', {
+ value,
+ configurable: true,
+ });
+ };
+ const fireReadyStateChangeEvent = () => {
+ document.dispatchEvent(new Event('readystatechange'));
+ };
+
+ beforeEach(() => {
+ setReadyState('interactive');
+ });
+
+ afterEach(() => {
+ setReadyState(originalReadyState);
+ });
+
+ it('does not call `window.snowplow` until `readystatechange` is fired and `document.readyState` equals `complete`', () => {
+ Tracking.enableFormTracking({ fields: { allow: ['input-class1'] } });
+
+ expect(snowplowSpy).not.toHaveBeenCalled();
+
+ fireReadyStateChangeEvent();
+
+ expect(snowplowSpy).not.toHaveBeenCalled();
+
+ setReadyState('complete');
+ fireReadyStateChangeEvent();
+
+ expect(snowplowSpy).toHaveBeenCalled();
+ });
+ });
});
describe('.flushPendingEvents', () => {
diff --git a/spec/frontend/vue_alerts_spec.js b/spec/frontend/vue_alerts_spec.js
index 05b73415544..30be606292f 100644
--- a/spec/frontend/vue_alerts_spec.js
+++ b/spec/frontend/vue_alerts_spec.js
@@ -28,8 +28,8 @@ describe('VueAlerts', () => {
alerts
.map(
(x) => `
- <div class="js-vue-alert"
- data-dismissible="${x.dismissible}"
+ <div class="js-vue-alert"
+ data-dismissible="${x.dismissible}"
data-title="${x.title}"
data-primary-button-text="${x.primaryButtonText}"
data-primary-button-link="${x.primaryButtonLink}"
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
index 115f21d8b35..f44f0b98207 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount } from '@vue/test-utils';
+import { shallowMount, mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import Header from '~/vue_merge_request_widget/components/mr_widget_header.vue';
@@ -26,6 +26,15 @@ describe('MRWidgetHeader', () => {
expect(downloadPlainDiffEl.attributes('href')).toBe('/mr/plainDiffPath');
};
+ const commonMrProps = {
+ divergedCommitsCount: 1,
+ sourceBranch: 'mr-widget-refactor',
+ sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>',
+ targetBranch: 'main',
+ targetBranchPath: '/foo/bar/main',
+ statusPath: 'abc',
+ };
+
describe('computed', () => {
describe('shouldShowCommitsBehindText', () => {
it('return true when there are divergedCommitsCount', () => {
@@ -59,36 +68,28 @@ describe('MRWidgetHeader', () => {
describe('commitsBehindText', () => {
it('returns singular when there is one commit', () => {
- createComponent({
- mr: {
- divergedCommitsCount: 1,
- sourceBranch: 'mr-widget-refactor',
- sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>',
- targetBranch: 'main',
- targetBranchPath: '/foo/bar/main',
- statusPath: 'abc',
+ wrapper = mount(Header, {
+ propsData: {
+ mr: commonMrProps,
},
});
- expect(wrapper.vm.commitsBehindText).toBe(
- 'The source branch is <a href="/foo/bar/main">1 commit behind</a> the target branch',
+ expect(wrapper.find('.diverged-commits-count').element.innerHTML).toBe(
+ 'The source branch is <a href="/foo/bar/main" class="gl-link">1 commit behind</a> the target branch',
);
});
it('returns plural when there is more than one commit', () => {
- createComponent({
- mr: {
- divergedCommitsCount: 2,
- sourceBranch: 'mr-widget-refactor',
- sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>',
- targetBranch: 'main',
- targetBranchPath: '/foo/bar/main',
- statusPath: 'abc',
+ wrapper = mount(Header, {
+ propsData: {
+ mr: {
+ ...commonMrProps,
+ divergedCommitsCount: 2,
+ },
},
});
-
- expect(wrapper.vm.commitsBehindText).toBe(
- 'The source branch is <a href="/foo/bar/main">2 commits behind</a> the target branch',
+ expect(wrapper.find('.diverged-commits-count').element.innerHTML).toBe(
+ 'The source branch is <a href="/foo/bar/main" class="gl-link">2 commits behind</a> the target branch',
);
});
});
@@ -273,19 +274,18 @@ describe('MRWidgetHeader', () => {
describe('with diverged commits', () => {
beforeEach(() => {
- createComponent({
- mr: {
- divergedCommitsCount: 12,
- sourceBranch: 'mr-widget-refactor',
- sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>',
- sourceBranchRemoved: false,
- targetBranchPath: 'foo/bar/commits-path',
- targetBranchTreePath: 'foo/bar/tree/path',
- targetBranch: 'main',
- isOpen: true,
- emailPatchesPath: '/mr/email-patches',
- plainDiffPath: '/mr/plainDiffPath',
- statusPath: 'abc',
+ wrapper = mount(Header, {
+ propsData: {
+ mr: {
+ ...commonMrProps,
+ divergedCommitsCount: 12,
+ sourceBranchRemoved: false,
+ targetBranchPath: 'foo/bar/commits-path',
+ targetBranchTreePath: 'foo/bar/tree/path',
+ isOpen: true,
+ emailPatchesPath: '/mr/email-patches',
+ plainDiffPath: '/mr/plainDiffPath',
+ },
},
});
});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
index 5081e1e5906..d3221cc2fc7 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
@@ -70,9 +70,9 @@ describe('Merge request widget rebase component', () => {
const text = findRebaseMessageElText();
- expect(text).toContain('Fast-forward merge is not possible.');
+ expect(text).toContain('Merge blocked');
expect(text.replace(/\s\s+/g, ' ')).toContain(
- 'Rebase the source branch onto the target branch.',
+ 'the source branch must be rebased onto the target branch',
);
});
@@ -111,12 +111,10 @@ describe('Merge request widget rebase component', () => {
const text = findRebaseMessageElText();
- expect(text).toContain('Fast-forward merge is not possible.');
- expect(text).toContain('Rebase the source branch onto');
- expect(text).toContain('foo');
- expect(text.replace(/\s\s+/g, ' ')).toContain(
- 'to allow this merge request to be merged.',
+ expect(text).toContain(
+ 'Merge blocked: the source branch must be rebased onto the target branch.',
);
+ expect(text).toContain('the source branch must be rebased');
});
it('should render the correct target branch name', () => {
@@ -136,7 +134,7 @@ describe('Merge request widget rebase component', () => {
const elem = findRebaseMessageEl();
expect(elem.text()).toContain(
- `Fast-forward merge is not possible. Rebase the source branch onto ${targetBranch} to allow this merge request to be merged.`,
+ `Merge blocked: the source branch must be rebased onto the target branch.`,
);
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js b/spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js
index 5d09af50420..8214cedc4a1 100644
--- a/spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js
@@ -63,7 +63,7 @@ describe('Commits edit component', () => {
beforeEach(() => {
createComponent({
header: `<div class="test-header">${testCommitMessage}</div>`,
- checkbox: `<label slot="checkbox" class="test-checkbox">${testLabel}</label >`,
+ checkbox: `<label class="test-checkbox">${testLabel}</label >`,
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
index fee78d3af94..e1bce7f0474 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
@@ -199,7 +199,7 @@ describe('MRWidgetConflicts', () => {
});
expect(removeBreakLine(wrapper.text()).trim()).toContain(
- 'Fast-forward merge is not possible. To merge this request, first rebase locally.',
+ 'Merge blocked: fast-forward merge is not possible. To merge this request, first rebase locally.',
);
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js
index 6bb87893c31..9c3a6d581e8 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js
@@ -217,7 +217,6 @@ describe('MRWidgetMerged', () => {
vm.mr.sourceBranchRemoved = false;
Vue.nextTick(() => {
- expect(vm.$el.innerText).toContain('You can delete the source branch now');
expect(vm.$el.innerText).not.toContain('The source branch has been deleted');
done();
});
@@ -229,7 +228,6 @@ describe('MRWidgetMerged', () => {
Vue.nextTick(() => {
expect(vm.$el.innerText).toContain('The source branch is being deleted');
- expect(vm.$el.innerText).not.toContain('You can delete the source branch now');
expect(vm.$el.innerText).not.toContain('The source branch has been deleted');
done();
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
index 2d00cd8e8d4..cd77d442cbf 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -70,6 +70,9 @@ const createComponent = (customConfig = {}, mergeRequestWidgetGraphql = false) =
mergeRequestWidgetGraphql,
},
},
+ stubs: {
+ CommitEdit,
+ },
});
};
diff --git a/spec/frontend/vue_mr_widget/mock_data.js b/spec/frontend/vue_mr_widget/mock_data.js
index 8e36a9225d6..e6f1e15d718 100644
--- a/spec/frontend/vue_mr_widget/mock_data.js
+++ b/spec/frontend/vue_mr_widget/mock_data.js
@@ -273,9 +273,9 @@ export default {
'http://localhost:3000/root/acets-app/commit/53027d060246c8f47e4a9310fb332aa52f221775',
mr_troubleshooting_docs_path: 'help',
ci_troubleshooting_docs_path: 'help2',
- merge_request_pipelines_docs_path: '/help/ci/merge_request_pipelines/index.md',
+ merge_request_pipelines_docs_path: '/help/ci/pipelines/merge_request_pipelines.md',
merge_train_when_pipeline_succeeds_docs_path:
- '/help/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/#startadd-to-merge-train-when-pipeline-succeeds',
+ '/help/ci/pipelines/merge_trains.md#startadd-to-merge-train-when-pipeline-succeeds',
squash: true,
visual_review_app_available: true,
merge_trains_enabled: true,
diff --git a/spec/frontend/vue_shared/components/__snapshots__/awards_list_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/awards_list_spec.js.snap
index 3f91591f5cd..c14cf0db370 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/awards_list_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/awards_list_spec.js.snap
@@ -7,7 +7,7 @@ exports[`vue_shared/components/awards_list default matches snapshot 1`] = `
<button
class="btn gl-mr-3 gl-my-2 btn-default btn-md gl-button"
data-testid="award-button"
- title="Ada, Leonardo, and Marie"
+ title="Ada, Leonardo, and Marie reacted with :thumbsup:"
type="button"
>
<!---->
@@ -37,7 +37,7 @@ exports[`vue_shared/components/awards_list default matches snapshot 1`] = `
<button
class="btn gl-mr-3 gl-my-2 btn-default btn-md gl-button selected"
data-testid="award-button"
- title="You, Ada, and Marie"
+ title="You, Ada, and Marie reacted with :thumbsdown:"
type="button"
>
<!---->
@@ -67,7 +67,7 @@ exports[`vue_shared/components/awards_list default matches snapshot 1`] = `
<button
class="btn gl-mr-3 gl-my-2 btn-default btn-md gl-button"
data-testid="award-button"
- title="Ada and Jane"
+ title="Ada and Jane reacted with :smile:"
type="button"
>
<!---->
@@ -97,7 +97,7 @@ exports[`vue_shared/components/awards_list default matches snapshot 1`] = `
<button
class="btn gl-mr-3 gl-my-2 btn-default btn-md gl-button selected"
data-testid="award-button"
- title="You, Ada, Jane, and Leonardo"
+ title="You, Ada, Jane, and Leonardo reacted with :ok_hand:"
type="button"
>
<!---->
@@ -127,7 +127,7 @@ exports[`vue_shared/components/awards_list default matches snapshot 1`] = `
<button
class="btn gl-mr-3 gl-my-2 btn-default btn-md gl-button selected"
data-testid="award-button"
- title="You"
+ title="You reacted with :cactus:"
type="button"
>
<!---->
@@ -157,7 +157,7 @@ exports[`vue_shared/components/awards_list default matches snapshot 1`] = `
<button
class="btn gl-mr-3 gl-my-2 btn-default btn-md gl-button"
data-testid="award-button"
- title="Marie"
+ title="Marie reacted with :a:"
type="button"
>
<!---->
@@ -187,7 +187,7 @@ exports[`vue_shared/components/awards_list default matches snapshot 1`] = `
<button
class="btn gl-mr-3 gl-my-2 btn-default btn-md gl-button selected"
data-testid="award-button"
- title="You"
+ title="You reacted with :b:"
type="button"
>
<!---->
diff --git a/spec/frontend/vue_shared/components/__snapshots__/editor_lite_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/source_editor_spec.js.snap
index 26785855369..7ce155f6a5d 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/editor_lite_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/source_editor_spec.js.snap
@@ -1,9 +1,9 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`Editor Lite component rendering matches the snapshot 1`] = `
+exports[`Source Editor component rendering matches the snapshot 1`] = `
<div
data-editor-loading=""
- id="editor-lite-snippet_777"
+ id="source-editor-snippet_777"
>
<pre
class="editor-loading-content"
diff --git a/spec/frontend/vue_shared/components/awards_list_spec.js b/spec/frontend/vue_shared/components/awards_list_spec.js
index 55f9eedc169..95e9760c181 100644
--- a/spec/frontend/vue_shared/components/awards_list_spec.js
+++ b/spec/frontend/vue_shared/components/awards_list_spec.js
@@ -98,43 +98,43 @@ describe('vue_shared/components/awards_list', () => {
classes: REACTION_CONTROL_CLASSES,
count: 3,
html: matchingEmojiTag(EMOJI_THUMBSUP),
- title: 'Ada, Leonardo, and Marie',
+ title: `Ada, Leonardo, and Marie reacted with :${EMOJI_THUMBSUP}:`,
},
{
classes: [...REACTION_CONTROL_CLASSES, 'selected'],
count: 3,
html: matchingEmojiTag(EMOJI_THUMBSDOWN),
- title: 'You, Ada, and Marie',
+ title: `You, Ada, and Marie reacted with :${EMOJI_THUMBSDOWN}:`,
},
{
classes: REACTION_CONTROL_CLASSES,
count: 2,
html: matchingEmojiTag(EMOJI_SMILE),
- title: 'Ada and Jane',
+ title: `Ada and Jane reacted with :${EMOJI_SMILE}:`,
},
{
classes: [...REACTION_CONTROL_CLASSES, 'selected'],
count: 4,
html: matchingEmojiTag(EMOJI_OK),
- title: 'You, Ada, Jane, and Leonardo',
+ title: `You, Ada, Jane, and Leonardo reacted with :${EMOJI_OK}:`,
},
{
classes: [...REACTION_CONTROL_CLASSES, 'selected'],
count: 1,
html: matchingEmojiTag(EMOJI_CACTUS),
- title: 'You',
+ title: `You reacted with :${EMOJI_CACTUS}:`,
},
{
classes: REACTION_CONTROL_CLASSES,
count: 1,
html: matchingEmojiTag(EMOJI_A),
- title: 'Marie',
+ title: `Marie reacted with :${EMOJI_A}:`,
},
{
classes: [...REACTION_CONTROL_CLASSES, 'selected'],
count: 1,
html: matchingEmojiTag(EMOJI_B),
- title: 'You',
+ title: `You reacted with :${EMOJI_B}:`,
},
]);
});
@@ -246,13 +246,13 @@ describe('vue_shared/components/awards_list', () => {
classes: REACTION_CONTROL_CLASSES,
count: 1,
html: matchingEmojiTag(EMOJI_100),
- title: 'Marie',
+ title: `Marie reacted with :${EMOJI_100}:`,
},
{
classes: REACTION_CONTROL_CLASSES,
count: 1,
html: matchingEmojiTag(EMOJI_SMILE),
- title: 'Marie',
+ title: `Marie reacted with :${EMOJI_SMILE}:`,
},
]);
});
diff --git a/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js b/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
index f592db935ec..d14f3e5559f 100644
--- a/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
@@ -10,9 +10,10 @@ describe('Blob Rich Viewer component', () => {
const content = '<h1 id="markdown">Foo Bar</h1>';
const defaultType = 'markdown';
- function createComponent(type = defaultType) {
+ function createComponent(type = defaultType, richViewer) {
wrapper = shallowMount(RichViewer, {
propsData: {
+ richViewer,
content,
type,
},
@@ -31,6 +32,12 @@ describe('Blob Rich Viewer component', () => {
expect(wrapper.html()).toContain(content);
});
+ it('renders the richViewer if one is present', () => {
+ const richViewer = '<div class="js-pdf-viewer"></div>';
+ createComponent('pdf', richViewer);
+ expect(wrapper.html()).toContain(richViewer);
+ });
+
it('queries for advanced viewer', () => {
expect(handleBlobRichViewer).toHaveBeenCalledWith(expect.anything(), defaultType);
});
diff --git a/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js b/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js
index 46d4edad891..c6c351a7f3f 100644
--- a/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js
@@ -2,7 +2,7 @@ import { shallowMount } from '@vue/test-utils';
import waitForPromises from 'helpers/wait_for_promises';
import { HIGHLIGHT_CLASS_NAME } from '~/vue_shared/components/blob_viewers/constants';
import SimpleViewer from '~/vue_shared/components/blob_viewers/simple_viewer.vue';
-import EditorLite from '~/vue_shared/components/editor_lite.vue';
+import SourceEditor from '~/vue_shared/components/source_editor.vue';
describe('Blob Simple Viewer component', () => {
let wrapper;
@@ -96,7 +96,7 @@ describe('Blob Simple Viewer component', () => {
});
describe('Vue refactoring to use Source Editor', () => {
- const findEditorLite = () => wrapper.find(EditorLite);
+ const findSourceEditor = () => wrapper.find(SourceEditor);
it.each`
doesRender | condition | isRawContent | isRefactorFlagEnabled
@@ -105,19 +105,19 @@ describe('Blob Simple Viewer component', () => {
${'Does not'} | ${'both, the FF and rawContent are not specified'} | ${false} | ${false}
${'Does'} | ${'both, the FF and rawContent are specified'} | ${true} | ${true}
`(
- '$doesRender render Editor Lite component in readonly mode when $condition',
+ '$doesRender render Source Editor component in readonly mode when $condition',
async ({ isRawContent, isRefactorFlagEnabled } = {}) => {
createComponent('raw content', isRawContent, isRefactorFlagEnabled);
await waitForPromises();
if (isRawContent && isRefactorFlagEnabled) {
- expect(findEditorLite().exists()).toBe(true);
+ expect(findSourceEditor().exists()).toBe(true);
- expect(findEditorLite().props('value')).toBe('raw content');
- expect(findEditorLite().props('fileName')).toBe('test.js');
- expect(findEditorLite().props('editorOptions')).toEqual({ readOnly: true });
+ expect(findSourceEditor().props('value')).toBe('raw content');
+ expect(findSourceEditor().props('fileName')).toBe('test.js');
+ expect(findSourceEditor().props('editorOptions')).toEqual({ readOnly: true });
} else {
- expect(findEditorLite().exists()).toBe(false);
+ expect(findSourceEditor().exists()).toBe(false);
}
},
);
diff --git a/spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js b/spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
index eacc41ccdad..8deb466b33c 100644
--- a/spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
@@ -109,9 +109,11 @@ describe('ImageDiffViewer', () => {
components: {
imageDiffViewer,
},
- data: {
- ...allProps,
- diffMode: 'renamed',
+ data() {
+ return {
+ ...allProps,
+ diffMode: 'renamed',
+ };
},
...compileToFunctions(`
<image-diff-viewer
@@ -121,7 +123,9 @@ describe('ImageDiffViewer', () => {
:new-size="newSize"
:old-size="oldSize"
>
- <span slot="image-overlay" class="overlay">test</span>
+ <template #image-overlay>
+ <span class="overlay">test</span>
+ </template>
</image-diff-viewer>
`),
}).$mount();
diff --git a/spec/frontend/vue_shared/components/dismissible_alert_spec.js b/spec/frontend/vue_shared/components/dismissible_alert_spec.js
index cfa6d1064e5..fcd004d35a7 100644
--- a/spec/frontend/vue_shared/components/dismissible_alert_spec.js
+++ b/spec/frontend/vue_shared/components/dismissible_alert_spec.js
@@ -5,18 +5,12 @@ import DismissibleAlert from '~/vue_shared/components/dismissible_alert.vue';
const TEST_HTML = 'Hello World! <strong>Foo</strong>';
describe('vue_shared/components/dismissible_alert', () => {
- const testAlertProps = {
- primaryButtonText: 'Lorem ipsum',
- primaryButtonLink: '/lorem/ipsum',
- };
-
let wrapper;
const createComponent = (props = {}) => {
wrapper = shallowMount(DismissibleAlert, {
propsData: {
html: TEST_HTML,
- ...testAlertProps,
...props,
},
});
@@ -28,16 +22,13 @@ describe('vue_shared/components/dismissible_alert', () => {
const findAlert = () => wrapper.find(GlAlert);
- describe('with default', () => {
+ describe('default', () => {
beforeEach(() => {
createComponent();
});
it('shows alert', () => {
- const alert = findAlert();
-
- expect(alert.exists()).toBe(true);
- expect(alert.props()).toEqual(expect.objectContaining(testAlertProps));
+ expect(findAlert().exists()).toBe(true);
});
it('shows given HTML', () => {
@@ -54,4 +45,32 @@ describe('vue_shared/components/dismissible_alert', () => {
});
});
});
+
+ describe('with additional props', () => {
+ const testAlertProps = {
+ dismissible: true,
+ title: 'Mock Title',
+ primaryButtonText: 'Lorem ipsum',
+ primaryButtonLink: '/lorem/ipsum',
+ variant: 'warning',
+ };
+
+ beforeEach(() => {
+ createComponent(testAlertProps);
+ });
+
+ it('passes other props', () => {
+ expect(findAlert().props()).toEqual(expect.objectContaining(testAlertProps));
+ });
+ });
+
+ describe('with unsafe HTML', () => {
+ beforeEach(() => {
+ createComponent({ html: '<a onclick="alert("XSS")">Link</a>' });
+ });
+
+ it('removes unsafe HTML', () => {
+ expect(findAlert().html()).toContain('<a>Link</a>');
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/file_finder/index_spec.js b/spec/frontend/vue_shared/components/file_finder/index_spec.js
index d757b7fac72..181fc4017a3 100644
--- a/spec/frontend/vue_shared/components/file_finder/index_spec.js
+++ b/spec/frontend/vue_shared/components/file_finder/index_spec.js
@@ -154,6 +154,16 @@ describe('File finder item spec', () => {
});
});
+ describe('DOM Performance', () => {
+ it('renders less DOM nodes if not visible by utilizing v-if', async () => {
+ vm.visible = false;
+
+ await waitForPromises();
+
+ expect(vm.$el).toBeInstanceOf(Comment);
+ });
+ });
+
describe('watches', () => {
describe('searchText', () => {
it('resets focusedIndex when updated', (done) => {
@@ -169,7 +179,7 @@ describe('File finder item spec', () => {
});
describe('visible', () => {
- it('returns searchText when false', (done) => {
+ it('resets searchText when changed to false', (done) => {
vm.searchText = 'test';
vm.visible = true;
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js
index 93cddff8421..1b97011bf7f 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js
@@ -11,7 +11,7 @@ import {
processFilters,
filterToQueryObject,
urlQueryToFilter,
- getRecentlyUsedTokenValues,
+ getRecentlyUsedSuggestions,
setTokenValueToRecentlyUsed,
} from '~/vue_shared/components/filtered_search_bar/filtered_search_utils';
@@ -328,32 +328,32 @@ describe('urlQueryToFilter', () => {
);
});
-describe('getRecentlyUsedTokenValues', () => {
+describe('getRecentlyUsedSuggestions', () => {
useLocalStorageSpy();
beforeEach(() => {
localStorage.removeItem(mockStorageKey);
});
- it('returns array containing recently used token values from provided recentTokenValuesStorageKey', () => {
+ it('returns array containing recently used token values from provided recentSuggestionsStorageKey', () => {
setLocalStorageAvailability(true);
const mockExpectedArray = [{ foo: 'bar' }];
localStorage.setItem(mockStorageKey, JSON.stringify(mockExpectedArray));
- expect(getRecentlyUsedTokenValues(mockStorageKey)).toEqual(mockExpectedArray);
+ expect(getRecentlyUsedSuggestions(mockStorageKey)).toEqual(mockExpectedArray);
});
- it('returns empty array when provided recentTokenValuesStorageKey does not have anything in localStorage', () => {
+ it('returns empty array when provided recentSuggestionsStorageKey does not have anything in localStorage', () => {
setLocalStorageAvailability(true);
- expect(getRecentlyUsedTokenValues(mockStorageKey)).toEqual([]);
+ expect(getRecentlyUsedSuggestions(mockStorageKey)).toEqual([]);
});
it('returns empty array when when access to localStorage is not available', () => {
setLocalStorageAvailability(false);
- expect(getRecentlyUsedTokenValues(mockStorageKey)).toEqual([]);
+ expect(getRecentlyUsedSuggestions(mockStorageKey)).toEqual([]);
});
});
@@ -366,7 +366,7 @@ describe('setTokenValueToRecentlyUsed', () => {
localStorage.removeItem(mockStorageKey);
});
- it('adds provided tokenValue to localStorage for recentTokenValuesStorageKey', () => {
+ it('adds provided tokenValue to localStorage for recentSuggestionsStorageKey', () => {
setLocalStorageAvailability(true);
setTokenValueToRecentlyUsed(mockStorageKey, mockTokenValue1);
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
index 951b050495c..74f579e77ed 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
@@ -94,7 +94,7 @@ describe('AuthorToken', () => {
it('calls `config.fetchAuthors` with provided searchTerm param', () => {
jest.spyOn(wrapper.vm.config, 'fetchAuthors');
- getBaseToken().vm.$emit('fetch-token-values', mockAuthors[0].username);
+ getBaseToken().vm.$emit('fetch-suggestions', mockAuthors[0].username);
expect(wrapper.vm.config.fetchAuthors).toHaveBeenCalledWith(
mockAuthorToken.fetchPath,
@@ -105,17 +105,17 @@ describe('AuthorToken', () => {
it('sets response to `authors` when request is succesful', () => {
jest.spyOn(wrapper.vm.config, 'fetchAuthors').mockResolvedValue(mockAuthors);
- getBaseToken().vm.$emit('fetch-token-values', 'root');
+ getBaseToken().vm.$emit('fetch-suggestions', 'root');
return waitForPromises().then(() => {
- expect(getBaseToken().props('tokenValues')).toEqual(mockAuthors);
+ expect(getBaseToken().props('suggestions')).toEqual(mockAuthors);
});
});
it('calls `createFlash` with flash error message when request fails', () => {
jest.spyOn(wrapper.vm.config, 'fetchAuthors').mockRejectedValue({});
- getBaseToken().vm.$emit('fetch-token-values', 'root');
+ getBaseToken().vm.$emit('fetch-suggestions', 'root');
return waitForPromises().then(() => {
expect(createFlash).toHaveBeenCalledWith({
@@ -127,17 +127,17 @@ describe('AuthorToken', () => {
it('sets `loading` to false when request completes', async () => {
jest.spyOn(wrapper.vm.config, 'fetchAuthors').mockRejectedValue({});
- getBaseToken().vm.$emit('fetch-token-values', 'root');
+ getBaseToken().vm.$emit('fetch-suggestions', 'root');
await waitForPromises();
- expect(getBaseToken().props('tokensListLoading')).toBe(false);
+ expect(getBaseToken().props('suggestionsLoading')).toBe(false);
});
});
});
describe('template', () => {
- const activateTokenValuesList = async () => {
+ const activateSuggestionsList = async () => {
const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
@@ -154,7 +154,7 @@ describe('AuthorToken', () => {
expect(baseTokenEl.exists()).toBe(true);
expect(baseTokenEl.props()).toMatchObject({
- tokenValues: mockAuthors,
+ suggestions: mockAuthors,
fnActiveTokenValue: wrapper.vm.getActiveAuthor,
});
});
@@ -221,7 +221,7 @@ describe('AuthorToken', () => {
stubs: { Portal: true },
});
- await activateTokenValuesList();
+ await activateSuggestionsList();
const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
@@ -252,7 +252,7 @@ describe('AuthorToken', () => {
stubs: { Portal: true },
});
- await activateTokenValuesList();
+ await activateSuggestionsList();
const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
index 89c5cedc9b8..cd6ffd679d0 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
@@ -7,7 +7,7 @@ import {
import { DEFAULT_LABELS } from '~/vue_shared/components/filtered_search_bar/constants';
import {
- getRecentlyUsedTokenValues,
+ getRecentlyUsedSuggestions,
setTokenValueToRecentlyUsed,
} from '~/vue_shared/components/filtered_search_bar/filtered_search_utils';
import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
@@ -49,10 +49,10 @@ const mockProps = {
config: mockLabelToken,
value: { data: '' },
active: false,
- tokenValues: [],
- tokensListLoading: false,
- defaultTokenValues: DEFAULT_LABELS,
- recentTokenValuesStorageKey: mockStorageKey,
+ suggestions: [],
+ suggestionsLoading: false,
+ defaultSuggestions: DEFAULT_LABELS,
+ recentSuggestionsStorageKey: mockStorageKey,
fnCurrentTokenValue: jest.fn(),
};
@@ -83,7 +83,7 @@ describe('BaseToken', () => {
props: {
...mockProps,
value: { data: `"${mockRegularLabel.title}"` },
- tokenValues: mockLabels,
+ suggestions: mockLabels,
},
});
});
@@ -93,8 +93,8 @@ describe('BaseToken', () => {
});
describe('data', () => {
- it('calls `getRecentlyUsedTokenValues` to populate `recentTokenValues` when `recentTokenValuesStorageKey` is defined', () => {
- expect(getRecentlyUsedTokenValues).toHaveBeenCalledWith(mockStorageKey);
+ it('calls `getRecentlyUsedSuggestions` to populate `recentSuggestions` when `recentSuggestionsStorageKey` is defined', () => {
+ expect(getRecentlyUsedSuggestions).toHaveBeenCalledWith(mockStorageKey);
});
});
@@ -147,15 +147,15 @@ describe('BaseToken', () => {
wrapperWithTokenActive.destroy();
});
- it('emits `fetch-token-values` event on the component when value of this prop is changed to false and `tokenValues` array is empty', async () => {
+ it('emits `fetch-suggestions` event on the component when value of this prop is changed to false and `suggestions` array is empty', async () => {
wrapperWithTokenActive.setProps({
active: false,
});
await wrapperWithTokenActive.vm.$nextTick();
- expect(wrapperWithTokenActive.emitted('fetch-token-values')).toBeTruthy();
- expect(wrapperWithTokenActive.emitted('fetch-token-values')).toEqual([
+ expect(wrapperWithTokenActive.emitted('fetch-suggestions')).toBeTruthy();
+ expect(wrapperWithTokenActive.emitted('fetch-suggestions')).toEqual([
[`"${mockRegularLabel.title}"`],
]);
});
@@ -164,7 +164,7 @@ describe('BaseToken', () => {
describe('methods', () => {
describe('handleTokenValueSelected', () => {
- it('calls `setTokenValueToRecentlyUsed` when `recentTokenValuesStorageKey` is defined', () => {
+ it('calls `setTokenValueToRecentlyUsed` when `recentSuggestionsStorageKey` is defined', () => {
const mockTokenValue = {
id: 1,
title: 'Foo',
@@ -175,14 +175,14 @@ describe('BaseToken', () => {
expect(setTokenValueToRecentlyUsed).toHaveBeenCalledWith(mockStorageKey, mockTokenValue);
});
- it('does not add token from preloadedTokenValues', async () => {
+ it('does not add token from preloadedSuggestions', async () => {
const mockTokenValue = {
id: 1,
title: 'Foo',
};
wrapper.setProps({
- preloadedTokenValues: [mockTokenValue],
+ preloadedSuggestions: [mockTokenValue],
});
await wrapper.vm.$nextTick();
@@ -228,7 +228,7 @@ describe('BaseToken', () => {
wrapperWithNoStubs.destroy();
});
- it('emits `fetch-token-values` event on component after a delay when component emits `input` event', async () => {
+ it('emits `fetch-suggestions` event on component after a delay when component emits `input` event', async () => {
jest.useFakeTimers();
wrapperWithNoStubs.find(GlFilteredSearchToken).vm.$emit('input', { data: 'foo' });
@@ -236,8 +236,8 @@ describe('BaseToken', () => {
jest.runAllTimers();
- expect(wrapperWithNoStubs.emitted('fetch-token-values')).toBeTruthy();
- expect(wrapperWithNoStubs.emitted('fetch-token-values')[2]).toEqual(['foo']);
+ expect(wrapperWithNoStubs.emitted('fetch-suggestions')).toBeTruthy();
+ expect(wrapperWithNoStubs.emitted('fetch-suggestions')[2]).toEqual(['foo']);
});
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js
index ca5dc984ae0..bd654c5a9cb 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js
@@ -7,7 +7,7 @@ import { mockIterationToken } from '../mock_data';
jest.mock('~/flash');
describe('IterationToken', () => {
- const title = 'gitlab-org: #1';
+ const id = 123;
let wrapper;
const createComponent = ({ config = mockIterationToken, value = { data: '' } } = {}) =>
@@ -28,14 +28,14 @@ describe('IterationToken', () => {
});
it('renders iteration value', async () => {
- wrapper = createComponent({ value: { data: title } });
+ wrapper = createComponent({ value: { data: id } });
await wrapper.vm.$nextTick();
const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
expect(tokenSegments).toHaveLength(3); // `Iteration` `=` `gitlab-org: #1`
- expect(tokenSegments.at(2).text()).toBe(title);
+ expect(tokenSegments.at(2).text()).toBe(id.toString());
});
it('fetches initial values', () => {
@@ -43,10 +43,10 @@ describe('IterationToken', () => {
wrapper = createComponent({
config: { ...mockIterationToken, fetchIterations: fetchIterationsSpy },
- value: { data: title },
+ value: { data: id },
});
- expect(fetchIterationsSpy).toHaveBeenCalledWith(title);
+ expect(fetchIterationsSpy).toHaveBeenCalledWith(id);
});
it('fetches iterations on user input', () => {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
index cc40ff96b65..ec9458f64d2 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
@@ -159,7 +159,7 @@ describe('LabelToken', () => {
expect(baseTokenEl.exists()).toBe(true);
expect(baseTokenEl.props()).toMatchObject({
- tokenValues: mockLabels,
+ suggestions: mockLabels,
fnActiveTokenValue: wrapper.vm.getActiveLabel,
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
index 9f550ac9afc..74ceb03bb96 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
@@ -9,6 +9,7 @@ import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
+import { sortMilestonesByDueDate } from '~/milestones/milestone_utils';
import { DEFAULT_MILESTONES } from '~/vue_shared/components/filtered_search_bar/constants';
import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue';
@@ -21,6 +22,7 @@ import {
} from '../mock_data';
jest.mock('~/flash');
+jest.mock('~/milestones/milestone_utils');
const defaultStubs = {
Portal: true,
@@ -112,6 +114,7 @@ describe('MilestoneToken', () => {
return waitForPromises().then(() => {
expect(wrapper.vm.milestones).toEqual(mockMilestones);
+ expect(sortMilestonesByDueDate).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap b/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap
index f3ce03796f9..5e956d66b6a 100644
--- a/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap
+++ b/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap
@@ -55,6 +55,8 @@ exports[`Issue placeholder note component matches snapshot 1`] = `
<p>
Foo
</p>
+
+
</div>
</div>
</div>
diff --git a/spec/frontend/vue_shared/components/paginated_list_spec.js b/spec/frontend/vue_shared/components/paginated_list_spec.js
index c0ee49f194f..9f819cc4e94 100644
--- a/spec/frontend/vue_shared/components/paginated_list_spec.js
+++ b/spec/frontend/vue_shared/components/paginated_list_spec.js
@@ -7,9 +7,11 @@ describe('Pagination links component', () => {
let glPaginatedList;
const template = `
- <div class="slot" slot-scope="{ listItem }">
- <span class="item">Item Name: {{listItem.id}}</span>
- </div>
+ <template #default="{ listItem }">
+ <div class="slot">
+ <span class="item">Item Name: {{ listItem.id }}</span>
+ </div>
+ </template>
`;
const props = {
diff --git a/spec/frontend/vue_shared/components/project_avatar/default_spec.js b/spec/frontend/vue_shared/components/project_avatar/default_spec.js
index 0daadeebc20..84dad2374cb 100644
--- a/spec/frontend/vue_shared/components/project_avatar/default_spec.js
+++ b/spec/frontend/vue_shared/components/project_avatar/default_spec.js
@@ -3,7 +3,7 @@ import mountComponent from 'helpers/vue_mount_component_helper';
import { projectData } from 'jest/ide/mock_data';
import { TEST_HOST } from 'spec/test_constants';
import { getFirstCharacterCapitalized } from '~/lib/utils/text_utility';
-import ProjectAvatarDefault from '~/vue_shared/components/project_avatar/default.vue';
+import ProjectAvatarDefault from '~/vue_shared/components/deprecated_project_avatar/default.vue';
describe('ProjectAvatarDefault component', () => {
const Component = Vue.extend(ProjectAvatarDefault);
diff --git a/spec/frontend/vue_shared/components/project_avatar_spec.js b/spec/frontend/vue_shared/components/project_avatar_spec.js
new file mode 100644
index 00000000000..d55f3127a74
--- /dev/null
+++ b/spec/frontend/vue_shared/components/project_avatar_spec.js
@@ -0,0 +1,67 @@
+import { GlAvatar } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import ProjectAvatar from '~/vue_shared/components/project_avatar.vue';
+
+const defaultProps = {
+ projectName: 'GitLab',
+};
+
+describe('ProjectAvatar', () => {
+ let wrapper;
+
+ const findGlAvatar = () => wrapper.findComponent(GlAvatar);
+
+ const createComponent = ({ props, attrs } = {}) => {
+ wrapper = shallowMount(ProjectAvatar, { propsData: { ...defaultProps, ...props }, attrs });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders GlAvatar with correct props', () => {
+ createComponent();
+
+ const avatar = findGlAvatar();
+ expect(avatar.exists()).toBe(true);
+ expect(avatar.props()).toMatchObject({
+ alt: defaultProps.projectName,
+ entityName: defaultProps.projectName,
+ size: 32,
+ src: '',
+ });
+ });
+
+ describe('with `size` prop', () => {
+ it('renders GlAvatar with specified `size` prop', () => {
+ const mockSize = 48;
+ createComponent({ props: { size: mockSize } });
+
+ const avatar = findGlAvatar();
+ expect(avatar.props('size')).toBe(mockSize);
+ });
+ });
+
+ describe('with `projectAvatarUrl` prop', () => {
+ it('renders GlAvatar with specified `src` prop', () => {
+ const mockProjectAvatarUrl = 'https://gitlab.com';
+ createComponent({ props: { projectAvatarUrl: mockProjectAvatarUrl } });
+
+ const avatar = findGlAvatar();
+ expect(avatar.props('src')).toBe(mockProjectAvatarUrl);
+ });
+ });
+
+ describe.each`
+ alt
+ ${''}
+ ${'custom-alt'}
+ `('when `alt` prop is "$alt"', ({ alt }) => {
+ it('renders GlAvatar with specified `alt` attribute', () => {
+ createComponent({ props: { alt } });
+
+ const avatar = findGlAvatar();
+ expect(avatar.props('alt')).toBe(alt);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js b/spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js
index 649eb2643f1..ab028ea52b7 100644
--- a/spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js
+++ b/spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js
@@ -1,5 +1,6 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import { trimText } from 'helpers/text_helper';
+import ProjectAvatar from '~/vue_shared/components/deprecated_project_avatar/default.vue';
import ProjectListItem from '~/vue_shared/components/project_selector/project_list_item.vue';
const localVue = createLocalVue();
@@ -53,7 +54,7 @@ describe('ProjectListItem component', () => {
it(`renders the project avatar`, () => {
wrapper = shallowMount(Component, options);
- expect(wrapper.find('.js-project-avatar').exists()).toBe(true);
+ expect(wrapper.findComponent(ProjectAvatar).exists()).toBe(true);
});
it(`renders a simple namespace name with a trailing slash`, () => {
diff --git a/spec/frontend/vue_shared/components/resizable_chart/__snapshots__/resizable_chart_container_spec.js.snap b/spec/frontend/vue_shared/components/resizable_chart/__snapshots__/resizable_chart_container_spec.js.snap
index add0c36a120..cdfe311acd9 100644
--- a/spec/frontend/vue_shared/components/resizable_chart/__snapshots__/resizable_chart_container_spec.js.snap
+++ b/spec/frontend/vue_shared/components/resizable_chart/__snapshots__/resizable_chart_container_spec.js.snap
@@ -2,20 +2,22 @@
exports[`Resizable Chart Container renders the component 1`] = `
<div>
- <div
- class="slot"
- >
- <span
- class="width"
+ <template>
+ <div
+ class="slot"
>
- 0
- </span>
-
- <span
- class="height"
- >
- 0
- </span>
- </div>
+ <span
+ class="width"
+ >
+ 0
+ </span>
+
+ <span
+ class="height"
+ >
+ 0
+ </span>
+ </div>
+ </template>
</div>
`;
diff --git a/spec/frontend/vue_shared/components/resizable_chart/resizable_chart_container_spec.js b/spec/frontend/vue_shared/components/resizable_chart/resizable_chart_container_spec.js
index 1fce3c5d0b0..40f0c0f29f2 100644
--- a/spec/frontend/vue_shared/components/resizable_chart/resizable_chart_container_spec.js
+++ b/spec/frontend/vue_shared/components/resizable_chart/resizable_chart_container_spec.js
@@ -16,10 +16,12 @@ describe('Resizable Chart Container', () => {
wrapper = mount(ResizableChartContainer, {
scopedSlots: {
default: `
- <div class="slot" slot-scope="{ width, height }">
- <span class="width">{{width}}</span>
- <span class="height">{{height}}</span>
- </div>
+ <template #default="{ width, height }">
+ <div class="slot">
+ <span class="width">{{width}}</span>
+ <span class="height">{{height}}</span>
+ </div>
+ </template>
`,
},
});
diff --git a/spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js b/spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js
index d58c87d66cb..395c74dcba6 100644
--- a/spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js
+++ b/spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js
@@ -3,7 +3,7 @@ import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import {
- expectedDownloadDropdownProps,
+ expectedDownloadDropdownPropsWithTitle,
securityReportMergeRequestDownloadPathsQueryResponse,
} from 'jest/vue_shared/security_reports/mock_data';
import createFlash from '~/flash';
@@ -80,7 +80,7 @@ describe('Merge request artifact Download', () => {
});
it('renders the download dropdown', () => {
- expect(findDownloadDropdown().props()).toEqual(expectedDownloadDropdownProps);
+ expect(findDownloadDropdown().props()).toEqual(expectedDownloadDropdownPropsWithTitle);
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/copyable_field_spec.js b/spec/frontend/vue_shared/components/sidebar/copyable_field_spec.js
index b99b1a66b79..3980033862e 100644
--- a/spec/frontend/vue_shared/components/sidebar/copyable_field_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/copyable_field_spec.js
@@ -1,4 +1,4 @@
-import { GlLoadingIcon } from '@gitlab/ui';
+import { GlLoadingIcon, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import CopyableField from '~/vue_shared/components/sidebar/copyable_field.vue';
@@ -14,6 +14,9 @@ describe('SidebarCopyableField', () => {
const createComponent = (propsData = defaultProps) => {
wrapper = shallowMount(CopyableField, {
propsData,
+ stubs: {
+ GlSprintf,
+ },
});
};
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
index 60903933505..06ea88c09a0 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
@@ -54,7 +54,6 @@ describe('DropdownContentsLabelsView', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
const findDropdownContent = () => wrapper.find('[data-testid="dropdown-content"]');
@@ -381,6 +380,15 @@ describe('DropdownContentsLabelsView', () => {
expect(findDropdownFooter().exists()).toBe(false);
});
+ it('does not render footer list items when `allowLabelCreate` is false and `labelsManagePath` is null', () => {
+ createComponent({
+ ...mockConfig,
+ allowLabelCreate: false,
+ labelsManagePath: null,
+ });
+ expect(findDropdownFooter().exists()).toBe(false);
+ });
+
it('renders footer list items when `state.variant` is "embedded"', () => {
expect(findDropdownFooter().exists()).toBe(true);
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
index 3f11095cb04..46ade5d5857 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
@@ -1,11 +1,14 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
+import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
import * as actions from '~/vue_shared/components/sidebar/labels_select_vue/store/actions';
import * as types from '~/vue_shared/components/sidebar/labels_select_vue/store/mutation_types';
import defaultState from '~/vue_shared/components/sidebar/labels_select_vue/store/state';
+jest.mock('~/flash');
+
describe('LabelsSelect Actions', () => {
let state;
const mockInitialState = {
@@ -91,10 +94,6 @@ describe('LabelsSelect Actions', () => {
});
describe('receiveLabelsFailure', () => {
- beforeEach(() => {
- setFixtures('<div class="flash-container"></div>');
- });
-
it('sets value `state.labelsFetchInProgress` to `false`', (done) => {
testAction(
actions.receiveLabelsFailure,
@@ -109,9 +108,7 @@ describe('LabelsSelect Actions', () => {
it('shows flash error', () => {
actions.receiveLabelsFailure({ commit: () => {} });
- expect(document.querySelector('.flash-container .flash-text').innerText.trim()).toBe(
- 'Error fetching labels.',
- );
+ expect(createFlash).toHaveBeenCalledWith({ message: 'Error fetching labels.' });
});
});
@@ -186,10 +183,6 @@ describe('LabelsSelect Actions', () => {
});
describe('receiveCreateLabelFailure', () => {
- beforeEach(() => {
- setFixtures('<div class="flash-container"></div>');
- });
-
it('sets value `state.labelCreateInProgress` to `false`', (done) => {
testAction(
actions.receiveCreateLabelFailure,
@@ -204,9 +197,7 @@ describe('LabelsSelect Actions', () => {
it('shows flash error', () => {
actions.receiveCreateLabelFailure({ commit: () => {} });
- expect(document.querySelector('.flash-container .flash-text').innerText.trim()).toBe(
- 'Error creating label.',
- );
+ expect(createFlash).toHaveBeenCalledWith({ message: 'Error creating label.' });
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
index ab266ac8aed..1d2a9c34599 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
@@ -153,7 +153,16 @@ describe('LabelsSelect Mutations', () => {
});
describe(`${types.UPDATE_SELECTED_LABELS}`, () => {
- const labels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
+ let labels;
+
+ beforeEach(() => {
+ labels = [
+ { id: 1, title: 'scoped::test', set: true },
+ { id: 2, set: false, title: 'scoped::one' },
+ { id: 3, title: '' },
+ { id: 4, title: '' },
+ ];
+ });
it('updates `state.labels` to include `touched` and `set` props based on provided `labels` param', () => {
const updatedLabelIds = [2];
@@ -169,5 +178,23 @@ describe('LabelsSelect Mutations', () => {
}
});
});
+
+ describe('when label is scoped', () => {
+ it('unsets the currently selected scoped label and sets the current label', () => {
+ const state = {
+ labels,
+ };
+ mutations[types.UPDATE_SELECTED_LABELS](state, {
+ labels: [{ id: 2, title: 'scoped::one' }],
+ });
+
+ expect(state.labels).toEqual([
+ { id: 1, title: 'scoped::test', set: false },
+ { id: 2, set: true, title: 'scoped::one', touched: true },
+ { id: 3, title: '' },
+ { id: 4, title: '' },
+ ]);
+ });
+ });
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_value_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_value_spec.js
index 59f3268c000..b3ffee2d020 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_value_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_value_spec.js
@@ -1,88 +1,97 @@
import { GlLabel } from '@gitlab/ui';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import Vuex from 'vuex';
+import { shallowMount } from '@vue/test-utils';
import DropdownValue from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_value.vue';
-import labelsSelectModule from '~/vue_shared/components/sidebar/labels_select_widget/store';
-
-import { mockConfig, mockRegularLabel, mockScopedLabel } from './mock_data';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
+import { mockRegularLabel, mockScopedLabel } from './mock_data';
describe('DropdownValue', () => {
let wrapper;
- const createComponent = (initialState = {}, slots = {}) => {
- const store = new Vuex.Store(labelsSelectModule());
-
- store.dispatch('setInitialState', { ...mockConfig, ...initialState });
+ const findAllLabels = () => wrapper.findAllComponents(GlLabel);
+ const findRegularLabel = () => findAllLabels().at(0);
+ const findScopedLabel = () => findAllLabels().at(1);
+ const findWrapper = () => wrapper.find('[data-testid="value-wrapper"]');
+ const findEmptyPlaceholder = () => wrapper.find('[data-testid="empty-placeholder"]');
+ const createComponent = (props = {}, slots = {}) => {
wrapper = shallowMount(DropdownValue, {
- localVue,
- store,
slots,
+ propsData: {
+ selectedLabels: [mockRegularLabel, mockScopedLabel],
+ allowLabelRemove: true,
+ allowScopedLabels: true,
+ labelsFilterBasePath: '/gitlab-org/my-project/issues',
+ labelsFilterParam: 'label_name',
+ ...props,
+ },
});
};
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
- describe('methods', () => {
- describe('labelFilterUrl', () => {
- it('returns a label filter URL based on provided label param', () => {
- createComponent();
-
- expect(wrapper.vm.labelFilterUrl(mockRegularLabel)).toBe(
- '/gitlab-org/my-project/issues?label_name[]=Foo%20Label',
- );
- });
+ describe('when there are no labels', () => {
+ beforeEach(() => {
+ createComponent(
+ {
+ selectedLabels: [],
+ },
+ {
+ default: 'None',
+ },
+ );
});
- describe('scopedLabel', () => {
- beforeEach(() => {
- createComponent();
- });
+ it('does not apply `has-labels` class to the wrapping container', () => {
+ expect(findWrapper().classes()).not.toContain('has-labels');
+ });
- it('returns `true` when provided label param is a scoped label', () => {
- expect(wrapper.vm.scopedLabel(mockScopedLabel)).toBe(true);
- });
+ it('renders an empty placeholder', () => {
+ expect(findEmptyPlaceholder().exists()).toBe(true);
+ expect(findEmptyPlaceholder().text()).toBe('None');
+ });
- it('returns `false` when provided label param is a regular label', () => {
- expect(wrapper.vm.scopedLabel(mockRegularLabel)).toBe(false);
- });
+ it('does not render any labels', () => {
+ expect(findAllLabels().length).toBe(0);
});
});
- describe('template', () => {
- it('renders class `has-labels` on component container element when `selectedLabels` is not empty', () => {
+ describe('when there are labels', () => {
+ beforeEach(() => {
createComponent();
+ });
- expect(wrapper.attributes('class')).toContain('has-labels');
+ it('applies `has-labels` class to the wrapping container', () => {
+ expect(findWrapper().classes()).toContain('has-labels');
});
- it('renders element containing `None` when `selectedLabels` is empty', () => {
- createComponent(
- {
- selectedLabels: [],
- },
- {
- default: 'None',
- },
- );
- const noneEl = wrapper.find('span.text-secondary');
+ it('does not render an empty placeholder', () => {
+ expect(findEmptyPlaceholder().exists()).toBe(false);
+ });
- expect(noneEl.exists()).toBe(true);
- expect(noneEl.text()).toBe('None');
+ it('renders a list of two labels', () => {
+ expect(findAllLabels().length).toBe(2);
});
- it('renders labels when `selectedLabels` is not empty', () => {
- createComponent();
+ it('passes correct props to the regular label', () => {
+ expect(findRegularLabel().props('target')).toBe(
+ '/gitlab-org/my-project/issues?label_name[]=Foo%20Label',
+ );
+ expect(findRegularLabel().props('scoped')).toBe(false);
+ });
+
+ it('passes correct props to the scoped label', () => {
+ expect(findScopedLabel().props('target')).toBe(
+ '/gitlab-org/my-project/issues?label_name[]=Foo%3A%3ABar',
+ );
+ expect(findScopedLabel().props('scoped')).toBe(true);
+ });
- expect(wrapper.findAll(GlLabel).length).toBe(2);
+ it('emits `onLabelRemove` event with the correct ID', () => {
+ findRegularLabel().vm.$emit('close');
+ expect(wrapper.emitted('onLabelRemove')).toEqual([[mockRegularLabel.id]]);
});
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js
index ee1346c362f..66971446f47 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js
@@ -34,6 +34,10 @@ describe('LabelsSelectRoot', () => {
stubs: {
'dropdown-contents': DropdownContents,
},
+ provide: {
+ iid: '1',
+ projectPath: 'test',
+ },
});
};
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/actions_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/actions_spec.js
index 7ef4b769b6b..27de7de2411 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/actions_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/actions_spec.js
@@ -1,11 +1,14 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
+import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
import * as actions from '~/vue_shared/components/sidebar/labels_select_widget/store/actions';
import * as types from '~/vue_shared/components/sidebar/labels_select_widget/store/mutation_types';
import defaultState from '~/vue_shared/components/sidebar/labels_select_widget/store/state';
+jest.mock('~/flash');
+
describe('LabelsSelect Actions', () => {
let state;
const mockInitialState = {
@@ -91,10 +94,6 @@ describe('LabelsSelect Actions', () => {
});
describe('receiveLabelsFailure', () => {
- beforeEach(() => {
- setFixtures('<div class="flash-container"></div>');
- });
-
it('sets value `state.labelsFetchInProgress` to `false`', (done) => {
testAction(
actions.receiveLabelsFailure,
@@ -109,9 +108,7 @@ describe('LabelsSelect Actions', () => {
it('shows flash error', () => {
actions.receiveLabelsFailure({ commit: () => {} });
- expect(document.querySelector('.flash-container .flash-text').innerText.trim()).toBe(
- 'Error fetching labels.',
- );
+ expect(createFlash).toHaveBeenCalledWith({ message: 'Error fetching labels.' });
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/mutations_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/mutations_spec.js
index acb275b5d90..9e965cb33e8 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/mutations_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/store/mutations_spec.js
@@ -120,7 +120,16 @@ describe('LabelsSelect Mutations', () => {
});
describe(`${types.UPDATE_SELECTED_LABELS}`, () => {
- const labels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
+ let labels;
+
+ beforeEach(() => {
+ labels = [
+ { id: 1, title: 'scoped::test', set: true },
+ { id: 2, set: false, title: 'scoped::one' },
+ { id: 3, title: '' },
+ { id: 4, title: '' },
+ ];
+ });
it('updates `state.labels` to include `touched` and `set` props based on provided `labels` param', () => {
const updatedLabelIds = [2];
@@ -136,5 +145,23 @@ describe('LabelsSelect Mutations', () => {
}
});
});
+
+ describe('when label is scoped', () => {
+ it('unsets the currently selected scoped label and sets the current label', () => {
+ const state = {
+ labels,
+ };
+ mutations[types.UPDATE_SELECTED_LABELS](state, {
+ labels: [{ id: 2, title: 'scoped::one' }],
+ });
+
+ expect(state.labels).toEqual([
+ { id: 1, title: 'scoped::test', set: false },
+ { id: 2, set: true, title: 'scoped::one', touched: true },
+ { id: 3, title: '' },
+ { id: 4, title: '' },
+ ]);
+ });
+ });
});
});
diff --git a/spec/frontend/vue_shared/components/todo_button_spec.js b/spec/frontend/vue_shared/components/sidebar/todo_button_spec.js
index 8043bb7785b..de3e1ccfb03 100644
--- a/spec/frontend/vue_shared/components/todo_button_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/todo_button_spec.js
@@ -1,9 +1,10 @@
import { GlButton } from '@gitlab/ui';
import { shallowMount, mount } from '@vue/test-utils';
-import TodoButton from '~/vue_shared/components/todo_button.vue';
+import TodoButton from '~/vue_shared/components/sidebar/todo_toggle/todo_button.vue';
describe('Todo Button', () => {
let wrapper;
+ let dispatchEventSpy;
const createComponent = (props = {}, mountFn = shallowMount) => {
wrapper = mountFn(TodoButton, {
@@ -13,8 +14,17 @@ describe('Todo Button', () => {
});
};
+ beforeEach(() => {
+ dispatchEventSpy = jest.spyOn(document, 'dispatchEvent');
+ jest.spyOn(document, 'querySelector').mockReturnValue({
+ innerText: 2,
+ });
+ });
+
afterEach(() => {
wrapper.destroy();
+ dispatchEventSpy = null;
+ jest.clearAllMocks();
});
it('renders GlButton', () => {
@@ -30,6 +40,16 @@ describe('Todo Button', () => {
expect(wrapper.emitted().click).toBeTruthy();
});
+ it('calls dispatchDocumentEvent to update global To-Do counter correctly', () => {
+ createComponent({}, mount);
+ wrapper.find(GlButton).trigger('click');
+ const dispatchedEvent = dispatchEventSpy.mock.calls[0][0];
+
+ expect(dispatchEventSpy).toHaveBeenCalledTimes(1);
+ expect(dispatchedEvent.detail).toEqual({ count: 1 });
+ expect(dispatchedEvent.type).toBe('todo:toggle');
+ });
+
it.each`
label | isTodo
${'Mark as done'} | ${true}
diff --git a/spec/frontend/vue_shared/components/editor_lite_spec.js b/spec/frontend/vue_shared/components/source_editor_spec.js
index badd5aed0e3..dca4d60e23c 100644
--- a/spec/frontend/vue_shared/components/editor_lite_spec.js
+++ b/spec/frontend/vue_shared/components/source_editor_spec.js
@@ -1,12 +1,12 @@
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { EDITOR_READY_EVENT } from '~/editor/constants';
-import Editor from '~/editor/editor_lite';
-import EditorLite from '~/vue_shared/components/editor_lite.vue';
+import Editor from '~/editor/source_editor';
+import SourceEditor from '~/vue_shared/components/source_editor.vue';
-jest.mock('~/editor/editor_lite');
+jest.mock('~/editor/source_editor');
-describe('Editor Lite component', () => {
+describe('Source Editor component', () => {
let wrapper;
let mockInstance;
@@ -30,7 +30,7 @@ describe('Editor Lite component', () => {
};
});
function createComponent(props = {}) {
- wrapper = shallowMount(EditorLite, {
+ wrapper = shallowMount(SourceEditor, {
propsData: {
value,
fileName,
@@ -73,10 +73,10 @@ describe('Editor Lite component', () => {
createComponent({ value: undefined });
expect(spy).not.toHaveBeenCalled();
- expect(wrapper.find('[id^="editor-lite-"]').exists()).toBe(true);
+ expect(wrapper.find('[id^="source-editor-"]').exists()).toBe(true);
});
- it('initialises Editor Lite instance', () => {
+ it('initialises Source Editor instance', () => {
const el = wrapper.find({ ref: 'editor' }).element;
expect(createInstanceMock).toHaveBeenCalledWith({
el,
@@ -111,7 +111,7 @@ describe('Editor Lite component', () => {
expect(wrapper.emitted().input).toEqual([[value]]);
});
- it('emits EDITOR_READY_EVENT event when the Editor Lite is ready', async () => {
+ it('emits EDITOR_READY_EVENT event when the Source Editor is ready', async () => {
const el = wrapper.find({ ref: 'editor' }).element;
expect(wrapper.emitted()[EDITOR_READY_EVENT]).toBeUndefined();
diff --git a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
index 87fe8619f28..538e67ef354 100644
--- a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
+++ b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
@@ -1,5 +1,5 @@
-import { GlSkeletonLoader, GlSprintf, GlIcon } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { GlSkeletonLoader, GlIcon } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import { AVAILABILITY_STATUS } from '~/set_status_modal/utils';
import UserNameWithStatus from '~/sidebar/components/assignees/user_name_with_status.vue';
import UserPopover from '~/vue_shared/components/user_popover/user_popover.vue';
@@ -13,6 +13,7 @@ const DEFAULT_PROPS = {
bio: null,
workInformation: null,
status: null,
+ pronouns: 'they/them',
loaded: true,
},
};
@@ -30,23 +31,18 @@ describe('User Popover Component', () => {
wrapper.destroy();
});
- const findByTestId = (testid) => wrapper.find(`[data-testid="${testid}"]`);
const findUserStatus = () => wrapper.find('.js-user-status');
const findTarget = () => document.querySelector('.js-user-link');
const findUserName = () => wrapper.find(UserNameWithStatus);
- const findSecurityBotDocsLink = () => findByTestId('user-popover-bot-docs-link');
+ const findSecurityBotDocsLink = () => wrapper.findByTestId('user-popover-bot-docs-link');
const createWrapper = (props = {}, options = {}) => {
- wrapper = shallowMount(UserPopover, {
+ wrapper = mountExtended(UserPopover, {
propsData: {
...DEFAULT_PROPS,
target: findTarget(),
...props,
},
- stubs: {
- GlSprintf,
- UserNameWithStatus,
- },
...options,
});
};
@@ -232,6 +228,12 @@ describe('User Popover Component', () => {
expect(wrapper.text()).not.toContain('(Busy)');
});
+
+ it('passes `pronouns` prop to `UserNameWithStatus` component', () => {
+ createWrapper();
+
+ expect(findUserName().props('pronouns')).toBe('they/them');
+ });
});
describe('bot user', () => {
diff --git a/spec/frontend/vue_shared/components/user_select_spec.js b/spec/frontend/vue_shared/components/user_select_spec.js
index 0fabc6525ea..b777ac0a0a4 100644
--- a/spec/frontend/vue_shared/components/user_select_spec.js
+++ b/spec/frontend/vue_shared/components/user_select_spec.js
@@ -275,48 +275,4 @@ describe('User select dropdown', () => {
expect(findEmptySearchResults().exists()).toBe(true);
});
});
-
- // TODO Remove this test after the following issue is resolved in the backend
- // https://gitlab.com/gitlab-org/gitlab/-/issues/329750
- describe('temporary error suppression', () => {
- beforeEach(() => {
- jest.spyOn(console, 'error').mockImplementation();
- });
-
- const nullError = { message: 'Cannot return null for non-nullable field GroupMember.user' };
-
- it.each`
- mockErrors
- ${[nullError]}
- ${[nullError, nullError]}
- `('does not emit errors', async ({ mockErrors }) => {
- createComponent({
- searchQueryHandler: jest.fn().mockResolvedValue({
- errors: mockErrors,
- }),
- });
- await waitForSearch();
-
- expect(wrapper.emitted()).toEqual({});
- // eslint-disable-next-line no-console
- expect(console.error).toHaveBeenCalled();
- });
-
- it.each`
- mockErrors
- ${[{ message: 'serious error' }]}
- ${[nullError, { message: 'serious error' }]}
- `('emits error when non-null related errors are included', async ({ mockErrors }) => {
- createComponent({
- searchQueryHandler: jest.fn().mockResolvedValue({
- errors: mockErrors,
- }),
- });
- await waitForSearch();
-
- expect(wrapper.emitted('error')).toEqual([[]]);
- // eslint-disable-next-line no-console
- expect(console.error).not.toHaveBeenCalled();
- });
- });
});
diff --git a/spec/frontend/vue_shared/components/web_ide_link_spec.js b/spec/frontend/vue_shared/components/web_ide_link_spec.js
index 5a6c91bda9f..0fd4d0dab87 100644
--- a/spec/frontend/vue_shared/components/web_ide_link_spec.js
+++ b/spec/frontend/vue_shared/components/web_ide_link_spec.js
@@ -15,8 +15,8 @@ const ACTION_EDIT = {
tooltip: '',
attrs: {
'data-qa-selector': 'edit_button',
- 'data-track-event': 'click_edit',
- 'data-track-label': 'Edit',
+ 'data-track-action': 'click_consolidated_edit',
+ 'data-track-label': 'edit',
},
};
const ACTION_EDIT_CONFIRM_FORK = {
@@ -32,8 +32,8 @@ const ACTION_WEB_IDE = {
text: 'Web IDE',
attrs: {
'data-qa-selector': 'web_ide_button',
- 'data-track-event': 'click_edit_ide',
- 'data-track-label': 'Web IDE',
+ 'data-track-action': 'click_consolidated_edit_ide',
+ 'data-track-label': 'web_ide',
},
};
const ACTION_WEB_IDE_CONFIRM_FORK = {
diff --git a/spec/frontend/vue_shared/new_namespace/components/welcome_spec.js b/spec/frontend/vue_shared/new_namespace/components/welcome_spec.js
index 602213fca83..2d51f6dbeeb 100644
--- a/spec/frontend/vue_shared/new_namespace/components/welcome_spec.js
+++ b/spec/frontend/vue_shared/new_namespace/components/welcome_spec.js
@@ -1,12 +1,8 @@
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { mockTracking } from 'helpers/tracking_helper';
-import { TRACKING_CONTEXT_SCHEMA } from '~/experimentation/constants';
-import { getExperimentData } from '~/experimentation/utils';
import WelcomePage from '~/vue_shared/new_namespace/components/welcome.vue';
-jest.mock('~/experimentation/utils', () => ({ getExperimentData: jest.fn() }));
-
describe('Welcome page', () => {
let wrapper;
let trackingSpy;
@@ -28,7 +24,6 @@ describe('Welcome page', () => {
beforeEach(() => {
trackingSpy = mockTracking('_category_', document, jest.spyOn);
trackingSpy.mockImplementation(() => {});
- getExperimentData.mockReturnValue(undefined);
});
afterEach(() => {
@@ -38,7 +33,7 @@ describe('Welcome page', () => {
});
it('tracks link clicks', async () => {
- createComponent({ propsData: { experiment: 'foo', panels: [{ name: 'test', href: '#' }] } });
+ createComponent({ propsData: { panels: [{ name: 'test', href: '#' }] } });
const link = wrapper.find('a');
link.trigger('click');
await nextTick();
@@ -47,25 +42,6 @@ describe('Welcome page', () => {
});
});
- it('adds experiment data if in experiment', async () => {
- const mockExperimentData = 'data';
- getExperimentData.mockReturnValue(mockExperimentData);
-
- createComponent({ propsData: { experiment: 'foo', panels: [{ name: 'test', href: '#' }] } });
- const link = wrapper.find('a');
- link.trigger('click');
- await nextTick();
- return wrapper.vm.$nextTick().then(() => {
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_tab', {
- label: 'test',
- context: {
- data: mockExperimentData,
- schema: TRACKING_CONTEXT_SCHEMA,
- },
- });
- });
- });
-
it('renders footer slot if provided', () => {
const DUMMY = 'Test message';
createComponent({
diff --git a/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js b/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js
index 30937921900..6115dc6e61b 100644
--- a/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js
+++ b/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js
@@ -37,13 +37,6 @@ describe('Experimental new project creation app', () => {
window.location.hash = '';
});
- it('passes experiment to welcome component if provided', () => {
- const EXPERIMENT = 'foo';
- createComponent({ propsData: { experiment: EXPERIMENT } });
-
- expect(findWelcomePage().props().experiment).toBe(EXPERIMENT);
- });
-
describe('with empty hash', () => {
beforeEach(() => {
createComponent();
diff --git a/spec/frontend/vue_shared/oncall_schedules_list_spec.js b/spec/frontend/vue_shared/oncall_schedules_list_spec.js
index 5c30809c09b..f83a5187b8b 100644
--- a/spec/frontend/vue_shared/oncall_schedules_list_spec.js
+++ b/spec/frontend/vue_shared/oncall_schedules_list_spec.js
@@ -18,7 +18,7 @@ const mockSchedules = [
},
];
-const userName = 'User 1';
+const userName = "O'User";
describe('On-call schedules list', () => {
let wrapper;
diff --git a/spec/frontend/vue_shared/plugins/global_toast_spec.js b/spec/frontend/vue_shared/plugins/global_toast_spec.js
index 89f43a5e556..322586a772c 100644
--- a/spec/frontend/vue_shared/plugins/global_toast_spec.js
+++ b/spec/frontend/vue_shared/plugins/global_toast_spec.js
@@ -1,11 +1,10 @@
-import Vue from 'vue';
-import toast from '~/vue_shared/plugins/global_toast';
+import toast, { instance } from '~/vue_shared/plugins/global_toast';
describe('Global toast', () => {
let spyFunc;
beforeEach(() => {
- spyFunc = jest.spyOn(Vue.prototype.$toast, 'show').mockImplementation(() => {});
+ spyFunc = jest.spyOn(instance.$toast, 'show').mockImplementation(() => {});
});
afterEach(() => {
@@ -18,7 +17,7 @@ describe('Global toast', () => {
toast(arg1, arg2);
- expect(Vue.prototype.$toast.show).toHaveBeenCalledTimes(1);
- expect(Vue.prototype.$toast.show).toHaveBeenCalledWith(arg1, arg2);
+ expect(instance.$toast.show).toHaveBeenCalledTimes(1);
+ expect(instance.$toast.show).toHaveBeenCalledWith(arg1, arg2);
});
});
diff --git a/spec/frontend/vue_shared/security_reports/components/manage_via_mr_spec.js b/spec/frontend/vue_shared/security_reports/components/manage_via_mr_spec.js
index 517eee6a729..facbd51168c 100644
--- a/spec/frontend/vue_shared/security_reports/components/manage_via_mr_spec.js
+++ b/spec/frontend/vue_shared/security_reports/components/manage_via_mr_spec.js
@@ -9,6 +9,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import { humanize } from '~/lib/utils/text_utility';
import { redirectTo } from '~/lib/utils/url_utility';
import ManageViaMr from '~/vue_shared/security_configuration/components/manage_via_mr.vue';
+import { REPORT_TYPE_SAST } from '~/vue_shared/security_reports/constants';
import { buildConfigureSecurityFeatureMockFactory } from './apollo_mocks';
jest.mock('~/lib/utils/url_utility');
@@ -169,6 +170,29 @@ describe('ManageViaMr component', () => {
},
);
+ describe('canRender static method', () => {
+ it.each`
+ context | type | available | configured | canEnableByMergeRequest | expectedValue
+ ${'an unconfigured feature'} | ${REPORT_TYPE_SAST} | ${true} | ${false} | ${true} | ${true}
+ ${'a configured feature'} | ${REPORT_TYPE_SAST} | ${true} | ${true} | ${true} | ${false}
+ ${'an unavailable feature'} | ${REPORT_TYPE_SAST} | ${false} | ${false} | ${true} | ${false}
+ ${'a feature which cannot be enabled via MR'} | ${REPORT_TYPE_SAST} | ${true} | ${false} | ${false} | ${false}
+ ${'an unknown feature'} | ${'foo'} | ${true} | ${false} | ${true} | ${false}
+ `(
+ 'given $context returns $expectedValue',
+ ({ type, available, configured, canEnableByMergeRequest, expectedValue }) => {
+ expect(
+ ManageViaMr.canRender({
+ type,
+ available,
+ configured,
+ canEnableByMergeRequest,
+ }),
+ ).toBe(expectedValue);
+ },
+ );
+ });
+
describe('button props', () => {
it('passes the variant and category props to the GlButton', () => {
const variant = 'danger';
diff --git a/spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js b/spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js
index 9138d2d3f4c..4b75da0b126 100644
--- a/spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js
+++ b/spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js
@@ -40,14 +40,13 @@ describe('SecurityReportDownloadDropdown component', () => {
expect(findDropdown().props('loading')).toBe(false);
});
- it('renders a dropdown items for each artifact', () => {
+ it('renders a dropdown item for each artifact', () => {
artifacts.forEach((artifact, i) => {
const item = findDropdownItems().at(i);
expect(item.text()).toContain(artifact.name);
- expect(item.attributes()).toMatchObject({
- href: artifact.path,
- download: expect.any(String),
- });
+
+ expect(item.element.getAttribute('href')).toBe(artifact.path);
+ expect(item.element.getAttribute('download')).toBeDefined();
});
});
});
@@ -61,4 +60,32 @@ describe('SecurityReportDownloadDropdown component', () => {
expect(findDropdown().props('loading')).toBe(true);
});
});
+
+ describe('given title props', () => {
+ beforeEach(() => {
+ createComponent({ artifacts: [], loading: true, title: 'test title' });
+ });
+
+ it('should render title', () => {
+ expect(findDropdown().attributes('title')).toBe('test title');
+ });
+
+ it('should not render text', () => {
+ expect(findDropdown().text().trim()).toBe('');
+ });
+ });
+
+ describe('given text props', () => {
+ beforeEach(() => {
+ createComponent({ artifacts: [], loading: true, text: 'test text' });
+ });
+
+ it('should not render title', () => {
+ expect(findDropdown().props().title).not.toBeDefined();
+ });
+
+ it('should render text', () => {
+ expect(findDropdown().props().text).toContain('test text');
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/security_reports/mock_data.js b/spec/frontend/vue_shared/security_reports/mock_data.js
index bd9ce3b7314..06631710509 100644
--- a/spec/frontend/vue_shared/security_reports/mock_data.js
+++ b/spec/frontend/vue_shared/security_reports/mock_data.js
@@ -581,9 +581,18 @@ export const secretDetectionArtifacts = [
},
];
-export const expectedDownloadDropdownProps = {
+export const expectedDownloadDropdownPropsWithTitle = {
loading: false,
artifacts: [...secretDetectionArtifacts, ...sastArtifacts],
+ text: '',
+ title: 'Download results',
+};
+
+export const expectedDownloadDropdownPropsWithText = {
+ loading: false,
+ artifacts: [...secretDetectionArtifacts, ...sastArtifacts],
+ title: '',
+ text: 'Download results',
};
/**
diff --git a/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js b/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
index 038d7754776..bef538e1ff1 100644
--- a/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
+++ b/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
@@ -8,7 +8,7 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import { trimText } from 'helpers/text_helper';
import waitForPromises from 'helpers/wait_for_promises';
import {
- expectedDownloadDropdownProps,
+ expectedDownloadDropdownPropsWithText,
securityReportMergeRequestDownloadPathsQueryNoArtifactsResponse,
securityReportMergeRequestDownloadPathsQueryResponse,
sastDiffSuccessMock,
@@ -99,7 +99,7 @@ describe('Security reports app', () => {
});
it('renders the download dropdown', () => {
- expect(findDownloadDropdown().props()).toEqual(expectedDownloadDropdownProps);
+ expect(findDownloadDropdown().props()).toEqual(expectedDownloadDropdownPropsWithText);
});
it('renders the expected message', () => {
@@ -203,7 +203,7 @@ describe('Security reports app', () => {
});
it('renders the download dropdown', () => {
- expect(findDownloadDropdown().props()).toEqual(expectedDownloadDropdownProps);
+ expect(findDownloadDropdown().props()).toEqual(expectedDownloadDropdownPropsWithText);
});
});
@@ -225,7 +225,7 @@ describe('Security reports app', () => {
});
it('renders the download dropdown', () => {
- expect(findDownloadDropdown().props()).toEqual(expectedDownloadDropdownProps);
+ expect(findDownloadDropdown().props()).toEqual(expectedDownloadDropdownPropsWithText);
});
});
@@ -247,7 +247,7 @@ describe('Security reports app', () => {
});
it('renders the download dropdown', () => {
- expect(findDownloadDropdown().props()).toEqual(expectedDownloadDropdownProps);
+ expect(findDownloadDropdown().props()).toEqual(expectedDownloadDropdownPropsWithText);
});
});
diff --git a/spec/frontend/vuex_shared/bindings_spec.js b/spec/frontend/vuex_shared/bindings_spec.js
index 0f91a09018f..4e210143c8c 100644
--- a/spec/frontend/vuex_shared/bindings_spec.js
+++ b/spec/frontend/vuex_shared/bindings_spec.js
@@ -3,7 +3,7 @@ import { mapComputed } from '~/vuex_shared/bindings';
describe('Binding utils', () => {
describe('mapComputed', () => {
- const defaultArgs = [['baz'], 'bar', 'foo'];
+ const defaultArgs = [['baz'], 'bar', 'foo', 'qux'];
const createDummy = (mapComputedArgs = defaultArgs) => ({
computed: {
@@ -29,12 +29,18 @@ describe('Binding utils', () => {
},
};
- it('returns an object with keys equal to the first fn parameter ', () => {
+ it('returns an object with keys equal to the first fn parameter', () => {
const keyList = ['foo1', 'foo2'];
const result = mapComputed(keyList, 'foo', 'bar');
expect(Object.keys(result)).toEqual(keyList);
});
+ it('returns an object with keys equal to the first fn parameter when the root is a function', () => {
+ const keyList = ['foo1', 'foo2'];
+ const result = mapComputed(keyList, 'foo', (state) => state.bar);
+ expect(Object.keys(result)).toEqual(keyList);
+ });
+
it('returned object has set and get function', () => {
const result = mapComputed(['baz'], 'foo', 'bar');
expect(result.baz.set).toBeDefined();
diff --git a/spec/frontend_integration/diffs/diffs_interopability_spec.js b/spec/frontend_integration/diffs/diffs_interopability_spec.js
index cb7659e16d3..448641ed834 100644
--- a/spec/frontend_integration/diffs/diffs_interopability_spec.js
+++ b/spec/frontend_integration/diffs/diffs_interopability_spec.js
@@ -8,15 +8,6 @@ import {
getCodeElementFromLineNumber,
} from './diffs_interopability_api';
-jest.mock('~/vue_shared/mixins/gl_feature_flags_mixin', () => () => ({
- inject: {
- glFeatures: {
- from: 'window.gon.features',
- default: () => global.window.gon?.features,
- },
- },
-}));
-
const TEST_PROJECT_PATH = 'gitlab-org/gitlab-test';
const TEST_BASE_URL = `/${TEST_PROJECT_PATH}/-/merge_requests/1/`;
const TEST_DIFF_FILE = 'files/js/commit.coffee';
@@ -114,48 +105,41 @@ describe('diffs third party interoperability', () => {
);
describe.each`
- desc | unifiedDiffComponents | view | rowSelector | codeSelector | expectation
- ${'inline view'} | ${false} | ${'inline'} | ${'tr.line_holder'} | ${'td.line_content'} | ${EXPECT_INLINE}
- ${'parallel view left side'} | ${false} | ${'parallel'} | ${'tr.line_holder'} | ${'td.line_content.left-side'} | ${EXPECT_PARALLEL_LEFT_SIDE}
- ${'parallel view right side'} | ${false} | ${'parallel'} | ${'tr.line_holder'} | ${'td.line_content.right-side'} | ${EXPECT_PARALLEL_RIGHT_SIDE}
- ${'inline view'} | ${true} | ${'inline'} | ${'.diff-tr.line_holder'} | ${'.diff-td.line_content'} | ${EXPECT_INLINE}
- ${'parallel view left side'} | ${true} | ${'parallel'} | ${'.diff-tr.line_holder'} | ${'.diff-td.line_content.left-side'} | ${EXPECT_PARALLEL_LEFT_SIDE}
- ${'parallel view right side'} | ${true} | ${'parallel'} | ${'.diff-tr.line_holder'} | ${'.diff-td.line_content.right-side'} | ${EXPECT_PARALLEL_RIGHT_SIDE}
- `(
- '$desc (unifiedDiffComponents=$unifiedDiffComponents)',
- ({ unifiedDiffComponents, view, rowSelector, codeSelector, expectation }) => {
- beforeEach(async () => {
- global.jsdom.reconfigure({
- url: `${TEST_HOST}/${TEST_BASE_URL}/diffs?view=${view}`,
- });
- window.gon.features = { unifiedDiffComponents };
-
- vm = startDiffsApp();
-
- await waitFor(() => expect(hasLines(rowSelector)).toBe(true));
+ desc | view | rowSelector | codeSelector | expectation
+ ${'inline view'} | ${'inline'} | ${'.diff-tr.line_holder'} | ${'.diff-td.line_content'} | ${EXPECT_INLINE}
+ ${'parallel view left side'} | ${'parallel'} | ${'.diff-tr.line_holder'} | ${'.diff-td.line_content.left-side'} | ${EXPECT_PARALLEL_LEFT_SIDE}
+ ${'parallel view right side'} | ${'parallel'} | ${'.diff-tr.line_holder'} | ${'.diff-td.line_content.right-side'} | ${EXPECT_PARALLEL_RIGHT_SIDE}
+ `('$desc', ({ view, rowSelector, codeSelector, expectation }) => {
+ beforeEach(async () => {
+ global.jsdom.reconfigure({
+ url: `${TEST_HOST}/${TEST_BASE_URL}/diffs?view=${view}`,
});
- it('should match diff model', () => {
- const lines = findLineElements(rowSelector);
- const codes = findCodeElements(lines, codeSelector);
+ vm = startDiffsApp();
- expect(getCodeElementsInteropModel(codes)).toEqual(expectation);
- });
+ await waitFor(() => expect(hasLines(rowSelector)).toBe(true));
+ });
+
+ it('should match diff model', () => {
+ const lines = findLineElements(rowSelector);
+ const codes = findCodeElements(lines, codeSelector);
+
+ expect(getCodeElementsInteropModel(codes)).toEqual(expectation);
+ });
- it.each`
- lineNumber | part | expectedText
- ${4} | ${'base'} | ${'new CommitFile(this)'}
- ${4} | ${'head'} | ${'new CommitFile(@)'}
- ${2} | ${'base'} | ${'constructor: ->'}
- ${2} | ${'head'} | ${'constructor: ->'}
- `(
- 'should find code element lineNumber=$lineNumber part=$part',
- ({ lineNumber, part, expectedText }) => {
- const codeElement = getCodeElementFromLineNumber(findDiffFile(), lineNumber, part);
-
- expect(codeElement.textContent.trim()).toBe(expectedText);
- },
- );
- },
- );
+ it.each`
+ lineNumber | part | expectedText
+ ${4} | ${'base'} | ${'new CommitFile(this)'}
+ ${4} | ${'head'} | ${'new CommitFile(@)'}
+ ${2} | ${'base'} | ${'constructor: ->'}
+ ${2} | ${'head'} | ${'constructor: ->'}
+ `(
+ 'should find code element lineNumber=$lineNumber part=$part',
+ ({ lineNumber, part, expectedText }) => {
+ const codeElement = getCodeElementFromLineNumber(findDiffFile(), lineNumber, part);
+
+ expect(codeElement.textContent.trim()).toBe(expectedText);
+ },
+ );
+ });
});
diff --git a/spec/frontend_integration/ide/helpers/ide_helper.js b/spec/frontend_integration/ide/helpers/ide_helper.js
index 6c09b44d891..56b2e298aa3 100644
--- a/spec/frontend_integration/ide/helpers/ide_helper.js
+++ b/spec/frontend_integration/ide/helpers/ide_helper.js
@@ -7,6 +7,7 @@ import {
screen,
findByText,
} from '@testing-library/dom';
+import { editor as monacoEditor } from 'monaco-editor';
const isFolderRowOpen = (row) => row.matches('.folder.is-open');
@@ -23,7 +24,10 @@ export const switchLeftSidebarTab = (name) => {
export const getStatusBar = () => document.querySelector('.ide-status-bar');
export const waitForMonacoEditor = () =>
- new Promise((resolve) => window.monaco.editor.onDidCreateEditor(resolve));
+ new Promise((resolve) => monacoEditor.onDidCreateEditor(resolve));
+
+export const waitForEditorDispose = (instance) =>
+ new Promise((resolve) => instance.onDidDispose(resolve));
export const waitForEditorModelChange = (instance) =>
new Promise((resolve) => instance.onDidChangeModel(resolve));
@@ -38,14 +42,14 @@ export const findAndSetEditorValue = async (value) => {
const editor = await findMonacoEditor();
const uri = editor.getAttribute('data-uri');
- window.monaco.editor.getModel(uri).setValue(value);
+ monacoEditor.getModel(uri).setValue(value);
};
export const getEditorValue = async () => {
const editor = await findMonacoEditor();
const uri = editor.getAttribute('data-uri');
- return window.monaco.editor.getModel(uri).getValue();
+ return monacoEditor.getModel(uri).getValue();
};
const findTreeBody = () => screen.findByTestId('ide-tree-body');
diff --git a/spec/frontend_integration/test_helpers/mock_server/routes/diffs.js b/spec/frontend_integration/test_helpers/mock_server/routes/diffs.js
index 8301627e842..eccdce268f0 100644
--- a/spec/frontend_integration/test_helpers/mock_server/routes/diffs.js
+++ b/spec/frontend_integration/test_helpers/mock_server/routes/diffs.js
@@ -12,10 +12,7 @@ export default (server) => {
return {
...result,
pagination: withValues(pagination, {
- current_page: null,
- next_page: null,
total_pages: 1,
- next_page_href: null,
}),
};
});
diff --git a/spec/frontend_integration/test_helpers/setup/setup_mock_server.js b/spec/frontend_integration/test_helpers/setup/setup_mock_server.js
index 43a21deed25..6f4832992a5 100644
--- a/spec/frontend_integration/test_helpers/setup/setup_mock_server.js
+++ b/spec/frontend_integration/test_helpers/setup/setup_mock_server.js
@@ -7,6 +7,16 @@ beforeEach(() => {
const server = createMockServer();
server.logging = false;
+ server.pretender.handledRequest = (verb, path, { status, responseText }) => {
+ if (status >= 500) {
+ // eslint-disable-next-line no-console
+ console.log(`
+The mock server returned status ${status} with "${verb} ${path}":
+
+${JSON.stringify({ responseText }, null, 2)}
+`);
+ }
+ };
global.mockServer = server;
});
diff --git a/spec/frontend_integration/test_helpers/setup/setup_testing_library.js b/spec/frontend_integration/test_helpers/setup/setup_testing_library.js
index 5081b1c3b62..adc59665306 100644
--- a/spec/frontend_integration/test_helpers/setup/setup_testing_library.js
+++ b/spec/frontend_integration/test_helpers/setup/setup_testing_library.js
@@ -1,3 +1,15 @@
import { configure } from '@testing-library/dom';
-configure({ asyncUtilTimeout: 10000 });
+const CUSTOM_ERROR_TYPE = 'TestingLibraryError';
+
+configure({
+ asyncUtilTimeout: 10000,
+ // Overwrite default error message to reduce noise.
+ getElementError: (messageArg) => {
+ // Add to message because the `name` doesn't look like it's used (although it should).
+ const message = `${CUSTOM_ERROR_TYPE}:\n\n${messageArg}`;
+ const error = new Error(message);
+ error.name = CUSTOM_ERROR_TYPE;
+ return error;
+ },
+});
diff --git a/spec/graphql/features/authorization_spec.rb b/spec/graphql/features/authorization_spec.rb
index 64e423e2bf8..0dc3a9c85e7 100644
--- a/spec/graphql/features/authorization_spec.rb
+++ b/spec/graphql/features/authorization_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'DeclarativePolicy authorization in GraphQL ' do
include Graphql::ResolverFactories
let_it_be(:user) { create(:user) }
+
let(:permission_single) { :foo }
let(:permission_collection) { [:foo, :bar] }
let(:test_object) { double(name: 'My name') }
diff --git a/spec/graphql/features/feature_flag_spec.rb b/spec/graphql/features/feature_flag_spec.rb
index 30238cf9cb3..e5560fccf89 100644
--- a/spec/graphql/features/feature_flag_spec.rb
+++ b/spec/graphql/features/feature_flag_spec.rb
@@ -28,14 +28,25 @@ RSpec.describe 'Graphql Field feature flags' do
end
end
- it 'returns the value when feature is enabled' do
- expect(subject['item']).to eq('name' => test_object.name)
+ it 'checks YAML definition for default_enabled' do
+ # Exception is indicative of a check for YAML definition
+ expect { subject }.to raise_error(Feature::InvalidFeatureFlagError, /The feature flag YAML definition for '#{feature_flag}' does not exist/)
end
- it 'returns nil when the feature is disabled' do
- stub_feature_flags(feature_flag => false)
+ context 'skipping YAML check' do
+ before do
+ skip_default_enabled_yaml_check
+ end
+
+ it 'returns the value when feature is enabled' do
+ expect(subject['item']).to eq('name' => test_object.name)
+ end
- expect(subject).to be_nil
+ it 'returns nil when the feature is disabled' do
+ stub_feature_flags(feature_flag => false)
+
+ expect(subject).to be_nil
+ end
end
end
end
diff --git a/spec/graphql/gitlab_schema_spec.rb b/spec/graphql/gitlab_schema_spec.rb
index 1f2c518f83c..06505536b09 100644
--- a/spec/graphql/gitlab_schema_spec.rb
+++ b/spec/graphql/gitlab_schema_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe GitlabSchema do
let_it_be(:connections) { GitlabSchema.connections.all_wrappers }
+
let(:user) { build :user }
it 'uses batch loading' do
diff --git a/spec/graphql/mutations/alert_management/prometheus_integration/create_spec.rb b/spec/graphql/mutations/alert_management/prometheus_integration/create_spec.rb
index 7ab0f43d674..164bd9b1e39 100644
--- a/spec/graphql/mutations/alert_management/prometheus_integration/create_spec.rb
+++ b/spec/graphql/mutations/alert_management/prometheus_integration/create_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Mutations::AlertManagement::PrometheusIntegration::Create do
end
context 'when Prometheus Integration already exists' do
- let_it_be(:existing_integration) { create(:prometheus_service, project: project) }
+ let_it_be(:existing_integration) { create(:prometheus_integration, project: project) }
it 'returns errors' do
expect(resolve).to eq(
@@ -32,7 +32,7 @@ RSpec.describe Mutations::AlertManagement::PrometheusIntegration::Create do
context 'when UpdateService responds with success' do
it 'returns the integration with no errors' do
expect(resolve).to eq(
- integration: ::PrometheusService.last!,
+ integration: ::Integrations::Prometheus.last!,
errors: []
)
end
diff --git a/spec/graphql/mutations/alert_management/prometheus_integration/reset_token_spec.rb b/spec/graphql/mutations/alert_management/prometheus_integration/reset_token_spec.rb
index c9e1bf4162c..be07c142f4e 100644
--- a/spec/graphql/mutations/alert_management/prometheus_integration/reset_token_spec.rb
+++ b/spec/graphql/mutations/alert_management/prometheus_integration/reset_token_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Mutations::AlertManagement::PrometheusIntegration::ResetToken do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
- let_it_be(:integration) { create(:prometheus_service, project: project) }
+ let_it_be(:integration) { create(:prometheus_integration, project: project) }
let(:args) { { id: GitlabSchema.id_from_object(integration) } }
diff --git a/spec/graphql/mutations/alert_management/prometheus_integration/update_spec.rb b/spec/graphql/mutations/alert_management/prometheus_integration/update_spec.rb
index 19e0d53b75f..81d057c6ae2 100644
--- a/spec/graphql/mutations/alert_management/prometheus_integration/update_spec.rb
+++ b/spec/graphql/mutations/alert_management/prometheus_integration/update_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Mutations::AlertManagement::PrometheusIntegration::Update do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
- let_it_be(:integration) { create(:prometheus_service, project: project) }
+ let_it_be(:integration) { create(:prometheus_integration, project: project) }
let(:args) { { id: GitlabSchema.id_from_object(integration), active: false, api_url: 'http://new-url.com' } }
diff --git a/spec/graphql/mutations/ci/job_token_scope/add_project_spec.rb b/spec/graphql/mutations/ci/job_token_scope/add_project_spec.rb
new file mode 100644
index 00000000000..412be5f16a4
--- /dev/null
+++ b/spec/graphql/mutations/ci/job_token_scope/add_project_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Mutations::Ci::JobTokenScope::AddProject do
+ let(:mutation) do
+ described_class.new(object: nil, context: { current_user: current_user }, field: nil)
+ end
+
+ describe '#resolve' do
+ let_it_be(:project) do
+ create(:project, ci_job_token_scope_enabled: true).tap(&:save!)
+ end
+
+ let_it_be(:target_project) { create(:project) }
+
+ let(:target_project_path) { target_project.full_path }
+
+ subject do
+ mutation.resolve(project_path: project.full_path, target_project_path: target_project_path)
+ end
+
+ context 'when user is not logged in' do
+ let(:current_user) { nil }
+
+ it 'raises error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'when user is logged in' do
+ let(:current_user) { create(:user) }
+
+ context 'when user does not have permissions to admin project' do
+ it 'raises error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'when user has permissions to admin project and read target project' do
+ before do
+ project.add_maintainer(current_user)
+ target_project.add_guest(current_user)
+ end
+
+ it 'adds target project to the job token scope' do
+ expect do
+ expect(subject).to include(ci_job_token_scope: be_present, errors: be_empty)
+ end.to change { Ci::JobToken::ProjectScopeLink.count }.by(1)
+ end
+
+ context 'when the service returns an error' do
+ let(:service) { double(:service) }
+
+ it 'returns an error response' do
+ expect(::Ci::JobTokenScope::AddProjectService).to receive(:new).with(project, current_user).and_return(service)
+ expect(service).to receive(:execute).with(target_project).and_return(ServiceResponse.error(message: 'The error message'))
+
+ expect(subject.fetch(:ci_job_token_scope)).to be_nil
+ expect(subject.fetch(:errors)).to include("The error message")
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/ci/job_token_scope/remove_project_spec.rb b/spec/graphql/mutations/ci/job_token_scope/remove_project_spec.rb
new file mode 100644
index 00000000000..0e706ea6e0c
--- /dev/null
+++ b/spec/graphql/mutations/ci/job_token_scope/remove_project_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Mutations::Ci::JobTokenScope::RemoveProject do
+ let(:mutation) do
+ described_class.new(object: nil, context: { current_user: current_user }, field: nil)
+ end
+
+ describe '#resolve' do
+ let_it_be(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) }
+ let_it_be(:target_project) { create(:project) }
+
+ let_it_be(:link) do
+ create(:ci_job_token_project_scope_link,
+ source_project: project,
+ target_project: target_project)
+ end
+
+ let(:target_project_path) { target_project.full_path }
+
+ subject do
+ mutation.resolve(project_path: project.full_path, target_project_path: target_project_path)
+ end
+
+ context 'when user is not logged in' do
+ let(:current_user) { nil }
+
+ it 'raises error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'when user is logged in' do
+ let(:current_user) { create(:user) }
+
+ context 'when user does not have permissions to admin project' do
+ it 'raises error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'when user has permissions to admin project and read target project' do
+ before do
+ project.add_maintainer(current_user)
+ target_project.add_guest(current_user)
+ end
+
+ it 'removes target project from the job token scope' do
+ expect do
+ expect(subject).to include(ci_job_token_scope: be_present, errors: be_empty)
+ end.to change { Ci::JobToken::ProjectScopeLink.count }.by(-1)
+ end
+
+ context 'when the service returns an error' do
+ let(:service) { double(:service) }
+
+ it 'returns an error response' do
+ expect(::Ci::JobTokenScope::RemoveProjectService).to receive(:new).with(project, current_user).and_return(service)
+ expect(service).to receive(:execute).with(target_project).and_return(ServiceResponse.error(message: 'The error message'))
+
+ expect(subject.fetch(:ci_job_token_scope)).to be_nil
+ expect(subject.fetch(:errors)).to include("The error message")
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/custom_emoji/create_spec.rb b/spec/graphql/mutations/custom_emoji/create_spec.rb
index 118c5d67188..7c98e53a72c 100644
--- a/spec/graphql/mutations/custom_emoji/create_spec.rb
+++ b/spec/graphql/mutations/custom_emoji/create_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Mutations::CustomEmoji::Create do
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
+
let(:args) { { group_path: group.full_path, name: 'tanuki', url: 'https://about.gitlab.com/images/press/logo/png/gitlab-icon-rgb.png' } }
before do
diff --git a/spec/graphql/mutations/discussions/toggle_resolve_spec.rb b/spec/graphql/mutations/discussions/toggle_resolve_spec.rb
index 162b1249ab5..b03c6cb094f 100644
--- a/spec/graphql/mutations/discussions/toggle_resolve_spec.rb
+++ b/spec/graphql/mutations/discussions/toggle_resolve_spec.rb
@@ -140,6 +140,7 @@ RSpec.describe Mutations::Discussions::ToggleResolve do
context 'when discussion is on a merge request' do
let_it_be(:noteable) { create(:merge_request, source_project: project) }
+
let(:discussion) { create(:diff_note_on_merge_request, noteable: noteable, project: project).to_discussion }
it_behaves_like 'a working resolve method'
@@ -147,6 +148,7 @@ RSpec.describe Mutations::Discussions::ToggleResolve do
context 'when discussion is on a design' do
let_it_be(:noteable) { create(:design, :with_file, issue: create(:issue, project: project)) }
+
let(:discussion) { create(:diff_note_on_design, noteable: noteable, project: project).to_discussion }
it_behaves_like 'a working resolve method'
diff --git a/spec/graphql/mutations/environments/canary_ingress/update_spec.rb b/spec/graphql/mutations/environments/canary_ingress/update_spec.rb
index c022828cf09..2715a908f85 100644
--- a/spec/graphql/mutations/environments/canary_ingress/update_spec.rb
+++ b/spec/graphql/mutations/environments/canary_ingress/update_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Mutations::Environments::CanaryIngress::Update do
let_it_be(:environment) { create(:environment, project: project) }
let_it_be(:maintainer) { create(:user) }
let_it_be(:reporter) { create(:user) }
+
let(:user) { maintainer }
subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
diff --git a/spec/graphql/mutations/issues/create_spec.rb b/spec/graphql/mutations/issues/create_spec.rb
index b32f0991959..0e7ef0e55b9 100644
--- a/spec/graphql/mutations/issues/create_spec.rb
+++ b/spec/graphql/mutations/issues/create_spec.rb
@@ -50,6 +50,7 @@ RSpec.describe Mutations::Issues::Create do
stub_licensed_features(multiple_issue_assignees: false, issue_weights: false)
project.add_guest(assignee1)
project.add_guest(assignee2)
+ stub_spam_services
end
subject { mutation.resolve(**mutation_params) }
diff --git a/spec/graphql/mutations/issues/set_confidential_spec.rb b/spec/graphql/mutations/issues/set_confidential_spec.rb
index c3269e5c0c0..495b8442d95 100644
--- a/spec/graphql/mutations/issues/set_confidential_spec.rb
+++ b/spec/graphql/mutations/issues/set_confidential_spec.rb
@@ -17,6 +17,10 @@ RSpec.describe Mutations::Issues::SetConfidential do
subject { mutation.resolve(project_path: project.full_path, iid: issue.iid, confidential: confidential) }
+ before do
+ stub_spam_services
+ end
+
it_behaves_like 'permission level for issue mutation is correctly verified'
context 'when the user can update the issue' do
diff --git a/spec/graphql/mutations/issues/set_severity_spec.rb b/spec/graphql/mutations/issues/set_severity_spec.rb
index 7698118ae3e..7ce9c7f6621 100644
--- a/spec/graphql/mutations/issues/set_severity_spec.rb
+++ b/spec/graphql/mutations/issues/set_severity_spec.rb
@@ -5,12 +5,13 @@ require 'spec_helper'
RSpec.describe Mutations::Issues::SetSeverity do
let_it_be(:user) { create(:user) }
let_it_be(:issue) { create(:incident) }
+
let(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
specify { expect(described_class).to require_graphql_authorizations(:update_issue) }
describe '#resolve' do
- let(:severity) { 'CRITICAL' }
+ let(:severity) { 'critical' }
let(:mutated_incident) { subject[:issue] }
subject(:resolve) { mutation.resolve(project_path: issue.project.full_path, iid: issue.iid, severity: severity) }
diff --git a/spec/graphql/mutations/issues/update_spec.rb b/spec/graphql/mutations/issues/update_spec.rb
index bd780477658..80f43338bb5 100644
--- a/spec/graphql/mutations/issues/update_spec.rb
+++ b/spec/graphql/mutations/issues/update_spec.rb
@@ -35,6 +35,10 @@ RSpec.describe Mutations::Issues::Update do
subject { mutation.resolve(**mutation_params) }
+ before do
+ stub_spam_services
+ end
+
it_behaves_like 'permission level for issue mutation is correctly verified'
context 'when the user can update the issue' do
diff --git a/spec/graphql/mutations/labels/create_spec.rb b/spec/graphql/mutations/labels/create_spec.rb
index b2dd94f31bb..53a17041125 100644
--- a/spec/graphql/mutations/labels/create_spec.rb
+++ b/spec/graphql/mutations/labels/create_spec.rb
@@ -45,6 +45,7 @@ RSpec.describe Mutations::Labels::Create do
context 'when creating a project label' do
let_it_be(:parent) { create(:project) }
+
let(:extra_params) { { project_path: parent.full_path } }
it_behaves_like 'create labels mutation'
@@ -52,6 +53,7 @@ RSpec.describe Mutations::Labels::Create do
context 'when creating a group label' do
let_it_be(:parent) { create(:group) }
+
let(:extra_params) { { group_path: parent.full_path } }
it_behaves_like 'create labels mutation'
diff --git a/spec/graphql/mutations/notes/reposition_image_diff_note_spec.rb b/spec/graphql/mutations/notes/reposition_image_diff_note_spec.rb
index d88b196cbff..e78f755d5c7 100644
--- a/spec/graphql/mutations/notes/reposition_image_diff_note_spec.rb
+++ b/spec/graphql/mutations/notes/reposition_image_diff_note_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Mutations::Notes::RepositionImageDiffNote do
let_it_be(:noteable) { create(:merge_request) }
let_it_be(:project) { noteable.project }
+
let(:note) { create(:image_diff_note_on_merge_request, noteable: noteable, project: project) }
let(:mutation) do
diff --git a/spec/graphql/mutations/release_asset_links/create_spec.rb b/spec/graphql/mutations/release_asset_links/create_spec.rb
index 089bc3d3276..eb7cbb4b789 100644
--- a/spec/graphql/mutations/release_asset_links/create_spec.rb
+++ b/spec/graphql/mutations/release_asset_links/create_spec.rb
@@ -50,6 +50,24 @@ RSpec.describe Mutations::ReleaseAssetLinks::Create do
end
end
+ context 'with protected tag' do
+ context 'when user has access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
+
+ it 'does not have errors' do
+ expect(subject).to include(errors: [])
+ end
+ end
+
+ context 'when user does not have access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :maintainers_can_create, name: '*', project: project) }
+
+ it 'has an access error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+ end
+
context "when the user doesn't have access to the project" do
let(:current_user) { reporter }
diff --git a/spec/graphql/mutations/release_asset_links/delete_spec.rb b/spec/graphql/mutations/release_asset_links/delete_spec.rb
index 15d320b58ee..cda292f2ffa 100644
--- a/spec/graphql/mutations/release_asset_links/delete_spec.rb
+++ b/spec/graphql/mutations/release_asset_links/delete_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Mutations::ReleaseAssetLinks::Delete do
let_it_be(:project) { create(:project, :private, :repository) }
let_it_be_with_reload(:release) { create(:release, project: project) }
+ let_it_be(:reporter) { create(:user).tap { |u| project.add_reporter(u) } }
let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
let_it_be(:maintainer) { create(:user).tap { |u| project.add_maintainer(u) } }
let_it_be_with_reload(:release_link) { create(:release_link, release: release) }
@@ -22,7 +23,7 @@ RSpec.describe Mutations::ReleaseAssetLinks::Delete do
let(:deleted_link) { subject[:link] }
context 'when the current user has access to delete the link' do
- let(:current_user) { maintainer }
+ let(:current_user) { developer }
it 'deletes the link and returns it', :aggregate_failures do
expect(deleted_link).to eq(release_link)
@@ -30,6 +31,26 @@ RSpec.describe Mutations::ReleaseAssetLinks::Delete do
expect(release.links).to be_empty
end
+ context 'with protected tag' do
+ context 'when user has access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
+
+ it 'does not have errors' do
+ subject
+
+ expect(resolve).to include(errors: [])
+ end
+ end
+
+ context 'when user does not have access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :maintainers_can_create, name: '*', project: project) }
+
+ it 'raises a resource access error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+ end
+
context "when the link doesn't exist" do
let(:mutation_arguments) { super().merge(id: "gid://gitlab/Releases::Link/#{non_existing_record_id}") }
@@ -48,7 +69,7 @@ RSpec.describe Mutations::ReleaseAssetLinks::Delete do
end
context 'when the current user does not have access to delete the link' do
- let(:current_user) { developer }
+ let(:current_user) { reporter }
it 'raises an error' do
expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
diff --git a/spec/graphql/mutations/release_asset_links/update_spec.rb b/spec/graphql/mutations/release_asset_links/update_spec.rb
index 20c1c8b581c..64648687336 100644
--- a/spec/graphql/mutations/release_asset_links/update_spec.rb
+++ b/spec/graphql/mutations/release_asset_links/update_spec.rb
@@ -87,6 +87,26 @@ RSpec.describe Mutations::ReleaseAssetLinks::Update do
end
it_behaves_like 'no changes to the link except for the', :name
+
+ context 'with protected tag' do
+ context 'when user has access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
+
+ it 'does not have errors' do
+ subject
+
+ expect(resolve).to include(errors: [])
+ end
+ end
+
+ context 'when user does not have access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :maintainers_can_create, name: '*', project: project) }
+
+ it 'raises a resource access error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+ end
end
context 'when nil is provided' do
diff --git a/spec/graphql/mutations/releases/create_spec.rb b/spec/graphql/mutations/releases/create_spec.rb
index 7776f968346..1f2c3ed537f 100644
--- a/spec/graphql/mutations/releases/create_spec.rb
+++ b/spec/graphql/mutations/releases/create_spec.rb
@@ -117,6 +117,28 @@ RSpec.describe Mutations::Releases::Create do
expect(new_link.filepath).to eq(expected_link[:filepath])
end
end
+
+ context 'with protected tag' do
+ context 'when user has access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
+
+ it 'does not have errors' do
+ subject
+
+ expect(resolve).to include(errors: [])
+ end
+ end
+
+ context 'when user does not have access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :maintainers_can_create, name: '*', project: project) }
+
+ it 'has an access error' do
+ subject
+
+ expect(resolve).to include(errors: ['Access Denied'])
+ end
+ end
+ end
end
context "when the current user doesn't have access to create releases" do
diff --git a/spec/graphql/mutations/releases/delete_spec.rb b/spec/graphql/mutations/releases/delete_spec.rb
index bedb72b002c..d97f839ce87 100644
--- a/spec/graphql/mutations/releases/delete_spec.rb
+++ b/spec/graphql/mutations/releases/delete_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Mutations::Releases::Delete do
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:non_project_member) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
let_it_be(:developer) { create(:user) }
let_it_be(:maintainer) { create(:user) }
let_it_be(:tag) { 'v1.1.0'}
@@ -20,6 +21,7 @@ RSpec.describe Mutations::Releases::Delete do
end
before do
+ project.add_reporter(reporter)
project.add_developer(developer)
project.add_maintainer(maintainer)
end
@@ -36,7 +38,7 @@ RSpec.describe Mutations::Releases::Delete do
end
context 'when the current user has access to create releases' do
- let(:current_user) { maintainer }
+ let(:current_user) { developer }
it 'deletes the release' do
expect { subject }.to change { Release.count }.by(-1)
@@ -54,6 +56,28 @@ RSpec.describe Mutations::Releases::Delete do
expect(subject[:errors]).to eq([])
end
+ context 'with protected tag' do
+ context 'when user has access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
+
+ it 'does not have errors' do
+ subject
+
+ expect(resolve).to include(errors: [])
+ end
+ end
+
+ context 'when user does not have access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :maintainers_can_create, name: '*', project: project) }
+
+ it 'has an access error' do
+ subject
+
+ expect(resolve).to include(errors: ['Access Denied'])
+ end
+ end
+ end
+
context 'validation' do
context 'when the release does not exist' do
let(:mutation_arguments) { super().merge(tag: 'not-a-real-release') }
@@ -76,8 +100,8 @@ RSpec.describe Mutations::Releases::Delete do
end
context "when the current user doesn't have access to update releases" do
- context 'when the user is a developer' do
- let(:current_user) { developer }
+ context 'when the user is a reporter' do
+ let(:current_user) { reporter }
it_behaves_like 'unauthorized or not found error'
end
diff --git a/spec/graphql/mutations/releases/update_spec.rb b/spec/graphql/mutations/releases/update_spec.rb
index c541afd53a1..5ee63ac4dc2 100644
--- a/spec/graphql/mutations/releases/update_spec.rb
+++ b/spec/graphql/mutations/releases/update_spec.rb
@@ -107,6 +107,28 @@ RSpec.describe Mutations::Releases::Update do
end
it_behaves_like 'no changes to the release except for the', :name
+
+ context 'with protected tag' do
+ context 'when user has access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
+
+ it 'does not have errors' do
+ subject
+
+ expect(resolve).to include(errors: [])
+ end
+ end
+
+ context 'when user does not have access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :maintainers_can_create, name: '*', project: project) }
+
+ it 'has an access error' do
+ subject
+
+ expect(resolve).to include(errors: ['Access Denied'])
+ end
+ end
+ end
end
context 'when nil is provided' do
diff --git a/spec/graphql/mutations/security/ci_configuration/base_security_analyzer_spec.rb b/spec/graphql/mutations/security/ci_configuration/base_security_analyzer_spec.rb
new file mode 100644
index 00000000000..818a7d303bd
--- /dev/null
+++ b/spec/graphql/mutations/security/ci_configuration/base_security_analyzer_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Security::CiConfiguration::BaseSecurityAnalyzer do
+ include GraphqlHelpers
+
+ it 'raises a NotImplementedError error if the resolve method is called on the base class' do
+ user = create(:user)
+ project = create(:project, :public, :repository)
+ project.add_developer(user)
+ expect { resolve(described_class, args: { project_path: project.full_path }, ctx: { current_user: user }) }.to raise_error(NotImplementedError)
+ end
+end
diff --git a/spec/graphql/resolvers/alert_management/http_integrations_resolver_spec.rb b/spec/graphql/resolvers/alert_management/http_integrations_resolver_spec.rb
index a4d1101bc4f..0f40565c5d3 100644
--- a/spec/graphql/resolvers/alert_management/http_integrations_resolver_spec.rb
+++ b/spec/graphql/resolvers/alert_management/http_integrations_resolver_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Resolvers::AlertManagement::HttpIntegrationsResolver do
let_it_be(:developer) { create(:user) }
let_it_be(:maintainer) { create(:user) }
let_it_be(:project) { create(:project) }
- let_it_be(:prometheus_integration) { create(:prometheus_service, project: project) }
+ let_it_be(:prometheus_integration) { create(:prometheus_integration, project: project) }
let_it_be(:active_http_integration) { create(:alert_management_http_integration, project: project) }
let_it_be(:inactive_http_integration) { create(:alert_management_http_integration, :inactive, project: project) }
let_it_be(:other_proj_integration) { create(:alert_management_http_integration) }
diff --git a/spec/graphql/resolvers/alert_management/integrations_resolver_spec.rb b/spec/graphql/resolvers/alert_management/integrations_resolver_spec.rb
index fb0fb6729d4..11114d41522 100644
--- a/spec/graphql/resolvers/alert_management/integrations_resolver_spec.rb
+++ b/spec/graphql/resolvers/alert_management/integrations_resolver_spec.rb
@@ -8,11 +8,11 @@ RSpec.describe Resolvers::AlertManagement::IntegrationsResolver do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:project2) { create(:project) }
- let_it_be(:prometheus_integration) { create(:prometheus_service, project: project) }
+ let_it_be(:prometheus_integration) { create(:prometheus_integration, project: project) }
let_it_be(:active_http_integration) { create(:alert_management_http_integration, project: project) }
let_it_be(:inactive_http_integration) { create(:alert_management_http_integration, :inactive, project: project) }
let_it_be(:other_proj_integration) { create(:alert_management_http_integration, project: project2) }
- let_it_be(:other_proj_prometheus_integration) { create(:prometheus_service, project: project2) }
+ let_it_be(:other_proj_prometheus_integration) { create(:prometheus_integration, project: project2) }
let(:params) { {} }
diff --git a/spec/graphql/resolvers/ci/config_resolver_spec.rb b/spec/graphql/resolvers/ci/config_resolver_spec.rb
index 73e9fab9f99..97eee749290 100644
--- a/spec/graphql/resolvers/ci/config_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/config_resolver_spec.rb
@@ -15,14 +15,15 @@ RSpec.describe Resolvers::Ci::ConfigResolver do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, creator: user, namespace: user.namespace) }
+ let_it_be(:sha) { nil }
subject(:response) do
resolve(described_class,
- args: { project_path: project.full_path, content: content },
+ args: { project_path: project.full_path, content: content, sha: sha },
ctx: { current_user: user })
end
- context 'with a valid .gitlab-ci.yml' do
+ shared_examples 'a valid config file' do
let(:fake_result) do
::Gitlab::Ci::Lint::Result.new(
merged_yaml: content,
@@ -37,9 +38,22 @@ RSpec.describe Resolvers::Ci::ConfigResolver do
end
it 'lints the ci config file and returns the merged yaml file' do
- expect(response[:merged_yaml]).to eq(content)
expect(response[:status]).to eq(:valid)
+ expect(response[:merged_yaml]).to eq(content)
expect(response[:errors]).to be_empty
+ expect(::Gitlab::Ci::Lint).to have_received(:new).with(current_user: user, project: project, sha: sha)
+ end
+ end
+
+ context 'with a valid .gitlab-ci.yml' do
+ context 'with a sha' do
+ let(:sha) { '1231231' }
+
+ it_behaves_like 'a valid config file'
+ end
+
+ context 'without a sha' do
+ it_behaves_like 'a valid config file'
end
end
diff --git a/spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb b/spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb
new file mode 100644
index 00000000000..8522542498d
--- /dev/null
+++ b/spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Ci::JobTokenScopeResolver do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) }
+
+ specify do
+ expect(described_class).to have_nullable_graphql_type(::Types::Ci::JobTokenScopeType)
+ end
+
+ subject(:resolve_scope) { resolve(described_class, ctx: { current_user: current_user }, obj: project) }
+
+ describe '#resolve' do
+ context 'with access to scope' do
+ before do
+ project.add_user(current_user, :maintainer)
+ end
+
+ it 'returns nil when scope is not enabled' do
+ allow(project).to receive(:ci_job_token_scope_enabled?).and_return(false)
+
+ expect(resolve_scope).to eq(nil)
+ end
+
+ it 'returns the same project in the allow list of projects for the Ci Job Token' do
+ expect(resolve_scope.all_projects).to contain_exactly(project)
+ end
+
+ context 'when another projects gets added to the allow list' do
+ let!(:link) { create(:ci_job_token_project_scope_link, source_project: project) }
+
+ it 'returns both projects' do
+ expect(resolve_scope.all_projects).to contain_exactly(project, link.target_project)
+ end
+ end
+
+ context 'when job token scope is disabled' do
+ before do
+ project.update!(ci_job_token_scope_enabled: false)
+ end
+
+ it 'returns nil' do
+ expect(resolve_scope).to be_nil
+ end
+ end
+ end
+
+ context 'without access to scope' do
+ before do
+ project.add_user(current_user, :developer)
+ end
+
+ it 'raises error' do
+ expect do
+ resolve_scope
+ end.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/group_milestones_resolver_spec.rb b/spec/graphql/resolvers/group_milestones_resolver_spec.rb
index 78d89054efd..acfc8313407 100644
--- a/spec/graphql/resolvers/group_milestones_resolver_spec.rb
+++ b/spec/graphql/resolvers/group_milestones_resolver_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe Resolvers::GroupMilestonesResolver do
+ using RSpec::Parameterized::TableSyntax
include GraphqlHelpers
describe '#resolve' do
@@ -79,6 +80,24 @@ RSpec.describe Resolvers::GroupMilestonesResolver do
end
end
+ context 'by sort' do
+ it 'calls MilestonesFinder with correct parameters' do
+ expect(MilestonesFinder).to receive(:new)
+ .with(args(group_ids: group.id, state: 'all', sort: :due_date_desc))
+ .and_call_original
+
+ resolve_group_milestones(sort: :due_date_desc)
+ end
+
+ %i[expired_last_due_date_asc expired_last_due_date_desc].each do |sort_by|
+ it "uses offset-pagination when sorting by #{sort_by}" do
+ resolved = resolve_group_milestones(sort: sort_by)
+
+ expect(resolved).to be_a(::Gitlab::Graphql::Pagination::OffsetActiveRecordRelationConnection)
+ end
+ end
+ end
+
context 'by timeframe' do
context 'when start_date and end_date are present' do
context 'when start date is after end_date' do
diff --git a/spec/graphql/resolvers/issues_resolver_spec.rb b/spec/graphql/resolvers/issues_resolver_spec.rb
index 7c2ceb50066..9b329e961cc 100644
--- a/spec/graphql/resolvers/issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/issues_resolver_spec.rb
@@ -290,6 +290,42 @@ RSpec.describe Resolvers::IssuesResolver do
expect(resolve_issues(sort: :severity_desc).to_a).to eq([issue_high_severity, issue_low_severity, issue_no_severity])
end
end
+
+ context 'when sorting by popularity' do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:issue1) { create(:issue, project: project) } # has one upvote
+ let_it_be(:issue2) { create(:issue, project: project) } # has two upvote
+ let_it_be(:issue3) { create(:issue, project: project) }
+ let_it_be(:issue4) { create(:issue, project: project) } # has one upvote
+
+ before do
+ create(:award_emoji, :upvote, awardable: issue1)
+ create(:award_emoji, :upvote, awardable: issue2)
+ create(:award_emoji, :upvote, awardable: issue2)
+ create(:award_emoji, :upvote, awardable: issue4)
+ end
+
+ it 'sorts issues ascending (ties broken by id in desc order)' do
+ expect(resolve_issues(sort: :popularity_asc).to_a).to eq([issue3, issue4, issue1, issue2])
+ end
+
+ it 'sorts issues descending (ties broken by id in desc order)' do
+ expect(resolve_issues(sort: :popularity_desc).to_a).to eq([issue2, issue4, issue1, issue3])
+ end
+ end
+
+ context 'when sorting with non-stable cursors' do
+ %i[priority_asc priority_desc
+ popularity_asc popularity_desc
+ label_priority_asc label_priority_desc
+ milestone_due_asc milestone_due_desc].each do |sort_by|
+ it "uses offset-pagination when sorting by #{sort_by}" do
+ resolved = resolve_issues(sort: sort_by)
+
+ expect(resolved).to be_a(::Gitlab::Graphql::Pagination::OffsetActiveRecordRelationConnection)
+ end
+ end
+ end
end
it 'returns issues user can see' do
diff --git a/spec/graphql/resolvers/project_milestones_resolver_spec.rb b/spec/graphql/resolvers/project_milestones_resolver_spec.rb
index b641a54393e..e168291c804 100644
--- a/spec/graphql/resolvers/project_milestones_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_milestones_resolver_spec.rb
@@ -71,6 +71,24 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
end
end
+ context 'by sort' do
+ it 'calls MilestonesFinder with correct parameters' do
+ expect(MilestonesFinder).to receive(:new)
+ .with(args(project_ids: project.id, state: 'all', sort: :due_date_desc))
+ .and_call_original
+
+ resolve_project_milestones(sort: :due_date_desc)
+ end
+
+ %i[expired_last_due_date_asc expired_last_due_date_desc].each do |sort_by|
+ it "uses offset-pagination when sorting by #{sort_by}" do
+ resolved = resolve_project_milestones(sort: sort_by)
+
+ expect(resolved).to be_a(::Gitlab::Graphql::Pagination::OffsetActiveRecordRelationConnection)
+ end
+ end
+ end
+
context 'by timeframe' do
context 'when start_date and end_date are present' do
it 'calls MilestonesFinder with correct parameters' do
diff --git a/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb b/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb
index c375345250d..8c36153d485 100644
--- a/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Resolvers::Projects::JiraProjectsResolver do
end
context 'when project has no Jira service' do
- let_it_be(:jira_service) { nil }
+ let_it_be(:jira_integration) { nil }
context 'when user is a maintainer' do
before do
@@ -34,7 +34,7 @@ RSpec.describe Resolvers::Projects::JiraProjectsResolver do
end
context 'when project has Jira service' do
- let(:jira_service) { create(:jira_service, project: project) }
+ let(:jira_integration) { create(:jira_integration, project: project) }
context 'when user is a developer' do
before do
@@ -98,6 +98,6 @@ RSpec.describe Resolvers::Projects::JiraProjectsResolver do
end
def resolve_jira_projects(args = {}, context = { current_user: user })
- resolve(described_class, obj: jira_service, args: args, ctx: context)
+ resolve(described_class, obj: jira_integration, args: args, ctx: context)
end
end
diff --git a/spec/graphql/resolvers/projects/services_resolver_spec.rb b/spec/graphql/resolvers/projects/services_resolver_spec.rb
index a1b631113b2..6da99c8448e 100644
--- a/spec/graphql/resolvers/projects/services_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects/services_resolver_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe Resolvers::Projects::ServicesResolver do
context 'when project has services' do
let_it_be(:project) { create(:project, :private) }
- let_it_be(:jira_service) { create(:jira_service, project: project) }
+ let_it_be(:jira_integration) { create(:jira_integration, project: project) }
context 'when user cannot access services' do
context 'when anonymous user' do
diff --git a/spec/graphql/resolvers/projects_resolver_spec.rb b/spec/graphql/resolvers/projects_resolver_spec.rb
index 2f2aacb9ad5..2685115d1a2 100644
--- a/spec/graphql/resolvers/projects_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects_resolver_spec.rb
@@ -27,10 +27,6 @@ RSpec.describe Resolvers::ProjectsResolver do
private_group.add_developer(user)
end
- before do
- stub_feature_flags(project_finder_similarity_sort: false)
- end
-
context 'when user is not logged in' do
let(:current_user) { nil }
@@ -83,6 +79,7 @@ RSpec.describe Resolvers::ProjectsResolver do
context 'when user is logged in' do
let(:current_user) { user }
+ let(:visible_projecs) { [project, other_project, group_project, private_project, private_group_project] }
context 'when no filters are applied' do
it 'returns all visible projects for the user' do
@@ -129,21 +126,24 @@ RSpec.describe Resolvers::ProjectsResolver do
end
end
- context 'when sort is similarity' do
+ context 'when sorting' do
let_it_be(:named_project1) { create(:project, :public, name: 'projAB', path: 'projAB') }
let_it_be(:named_project2) { create(:project, :public, name: 'projABC', path: 'projABC') }
let_it_be(:named_project3) { create(:project, :public, name: 'projA', path: 'projA') }
+ let_it_be(:named_projects) { [named_project1, named_project2, named_project3] }
- let(:filters) { { search: 'projA', sort: 'similarity' } }
-
- it 'returns projects in order of similarity to search' do
- stub_feature_flags(project_finder_similarity_sort: current_user)
+ context 'when sorting by similarity' do
+ let(:filters) { { search: 'projA', sort: 'similarity' } }
- is_expected.to eq([named_project3, named_project1, named_project2])
+ it 'returns projects in order of similarity to search' do
+ is_expected.to eq([named_project3, named_project1, named_project2])
+ end
end
- it 'returns projects in any order if flag is off' do
- is_expected.to match_array([named_project3, named_project1, named_project2])
+ context 'when no sort is provided' do
+ it 'returns projects in descending order by id' do
+ is_expected.to match_array((visible_projecs + named_projects).sort_by { |p| p[:id]}.reverse )
+ end
end
end
diff --git a/spec/graphql/types/alert_management/prometheus_integration_type_spec.rb b/spec/graphql/types/alert_management/prometheus_integration_type_spec.rb
index d057afb331c..31cf94aef44 100644
--- a/spec/graphql/types/alert_management/prometheus_integration_type_spec.rb
+++ b/spec/graphql/types/alert_management/prometheus_integration_type_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe GitlabSchema.types['AlertManagementPrometheusIntegration'] do
end
end
- let_it_be_with_reload(:integration) { create(:prometheus_service) }
+ let_it_be_with_reload(:integration) { create(:prometheus_integration) }
let_it_be(:user) { create(:user, maintainer_projects: [integration.project]) }
it_behaves_like 'has field with value', 'name' do
@@ -50,7 +50,7 @@ RSpec.describe GitlabSchema.types['AlertManagementPrometheusIntegration'] do
describe 'a group integration' do
let_it_be(:group) { create(:group) }
- let_it_be(:integration) { create(:prometheus_service, project: nil, group: group) }
+ let_it_be(:integration) { create(:prometheus_integration, project: nil, group: group) }
# Since it is impossible to authorize the parent here, given that the
# project is nil, all fields should be redacted:
diff --git a/spec/graphql/types/base_field_spec.rb b/spec/graphql/types/base_field_spec.rb
index 54b59317b55..c34fbf42dd8 100644
--- a/spec/graphql/types/base_field_spec.rb
+++ b/spec/graphql/types/base_field_spec.rb
@@ -130,14 +130,25 @@ RSpec.describe Types::BaseField do
skip_feature_flags_yaml_validation
end
- it 'returns false if the feature is not enabled' do
- stub_feature_flags(flag => false)
-
- expect(field.visible?(context)).to eq(false)
+ it 'checks YAML definition for default_enabled' do
+ # Exception is indicative of a check for YAML definition
+ expect { field.visible?(context) }.to raise_error(Feature::InvalidFeatureFlagError, /The feature flag YAML definition for '#{flag}' does not exist/)
end
- it 'returns true if the feature is enabled' do
- expect(field.visible?(context)).to eq(true)
+ context 'skipping YAML check' do
+ before do
+ skip_default_enabled_yaml_check
+ end
+
+ it 'returns false if the feature is not enabled' do
+ stub_feature_flags(flag => false)
+
+ expect(field.visible?(context)).to eq(false)
+ end
+
+ it 'returns true if the feature is enabled' do
+ expect(field.visible?(context)).to eq(true)
+ end
end
end
end
@@ -149,17 +160,17 @@ RSpec.describe Types::BaseField do
let(:flag) { :test_flag }
it 'prepends the description' do
- expect(field.description). to eq 'Test description. Available only when feature flag `test_flag` is enabled.'
+ expect(field.description).to start_with 'Test description. Available only when feature flag `test_flag` is enabled.'
end
context 'falsey feature_flag values' do
using RSpec::Parameterized::TableSyntax
- where(:flag, :feature_value) do
- '' | false
- '' | true
- nil | false
- nil | true
+ where(:flag, :feature_value, :default_enabled) do
+ '' | false | false
+ '' | true | false
+ nil | false | true
+ nil | true | false
end
with_them do
@@ -168,6 +179,33 @@ RSpec.describe Types::BaseField do
end
end
end
+
+ context 'with different default_enabled values' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:feature_value, :default_enabled, :expected_description) do
+ disabled_ff_description = "Test description. Available only when feature flag `test_flag` is enabled. This flag is disabled by default, because the feature is experimental and is subject to change without notice."
+ enabled_ff_description = "Test description. Available only when feature flag `test_flag` is enabled. This flag is enabled by default."
+
+ false | false | disabled_ff_description
+ true | false | disabled_ff_description
+ false | true | enabled_ff_description
+ true | true | enabled_ff_description
+ end
+
+ with_them do
+ before do
+ stub_feature_flags("#{flag}": feature_value)
+
+ allow(Feature::Definition).to receive(:has_definition?).with(flag).and_return(true)
+ allow(Feature::Definition).to receive(:default_enabled?).and_return(default_enabled)
+ end
+
+ it 'returns the correct availability in the description' do
+ expect(field.description). to eq expected_description
+ end
+ end
+ end
end
end
@@ -185,9 +223,8 @@ RSpec.describe Types::BaseField do
feature_flag: 'foo_flag'
)
- expectation = 'Field description. Available only when feature flag `foo_flag` is enabled. Deprecated in 1.10: Deprecation reason.'
-
- expect(field.description).to eq(expectation)
+ expect(field.description).to start_with('Field description. Available only when feature flag `foo_flag` is enabled.')
+ expect(field.description).to end_with('Deprecated in 1.10: Deprecation reason.')
end
end
end
diff --git a/spec/graphql/types/ci/detailed_status_type_spec.rb b/spec/graphql/types/ci/detailed_status_type_spec.rb
index 9fa3280657a..5ed79b73a47 100644
--- a/spec/graphql/types/ci/detailed_status_type_spec.rb
+++ b/spec/graphql/types/ci/detailed_status_type_spec.rb
@@ -8,14 +8,26 @@ RSpec.describe Types::Ci::DetailedStatusType do
specify { expect(described_class.graphql_name).to eq('DetailedStatus') }
it 'has all fields' do
- expect(described_class).to have_graphql_fields(:group, :icon, :favicon,
+ expect(described_class).to have_graphql_fields(:id, :group, :icon, :favicon,
:details_path, :has_details,
:label, :text, :tooltip, :action)
end
+ let_it_be(:stage) { create(:ci_stage_entity, status: :skipped) }
+
+ describe 'id field' do
+ it 'correctly renders the field' do
+ parent_object = double(:parent_object, object: stage)
+ parent = double(:parent, object: parent_object)
+ status = stage.detailed_status(stage.pipeline.user)
+ expected_id = "#{status.id}-#{stage.id}"
+
+ expect(resolve_field('id', status, extras: { parent: parent })).to eq(expected_id)
+ end
+ end
+
describe 'action field' do
it 'correctly renders the field' do
- stage = create(:ci_stage_entity, status: :skipped)
status = stage.detailed_status(stage.pipeline.user)
expected_status = {
diff --git a/spec/graphql/types/ci/group_type_spec.rb b/spec/graphql/types/ci/group_type_spec.rb
index d7ce5602612..f563b31342f 100644
--- a/spec/graphql/types/ci/group_type_spec.rb
+++ b/spec/graphql/types/ci/group_type_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Types::Ci::GroupType do
it 'exposes the expected fields' do
expected_fields = %i[
+ id
name
size
jobs
diff --git a/spec/graphql/types/ci/job_token_scope_type_spec.rb b/spec/graphql/types/ci/job_token_scope_type_spec.rb
new file mode 100644
index 00000000000..19a8cc324f9
--- /dev/null
+++ b/spec/graphql/types/ci/job_token_scope_type_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['CiJobTokenScopeType'] do
+ specify { expect(described_class.graphql_name).to eq('CiJobTokenScopeType') }
+
+ it 'has the correct fields' do
+ expected_fields = [:projects]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+
+ describe 'query' do
+ let_it_be(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) }
+ let_it_be(:current_user) { create(:user) }
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ ciJobTokenScope {
+ projects {
+ nodes {
+ path
+ }
+ }
+ }
+ }
+ }
+ )
+ end
+
+ subject { GitlabSchema.execute(query, context: { current_user: current_user }).as_json }
+
+ let(:projects_field) { subject.dig('data', 'project', 'ciJobTokenScope', 'projects', 'nodes') }
+ let(:returned_project_paths) { projects_field.map { |project| project['path']} }
+
+ context 'with access to scope' do
+ before do
+ project.add_user(current_user, :maintainer)
+ end
+
+ context 'when multiple projects in the allow list' do
+ let!(:link) { create(:ci_job_token_project_scope_link, source_project: project) }
+
+ context 'when linked projects are readable' do
+ before do
+ link.target_project.add_user(current_user, :developer)
+ end
+
+ it 'returns readable projects in scope' do
+ expect(returned_project_paths).to contain_exactly(project.path, link.target_project.path)
+ end
+ end
+
+ context 'when linked project is not readable' do
+ it 'returns readable projects in scope' do
+ expect(returned_project_paths).to contain_exactly(project.path)
+ end
+ end
+
+ context 'when job token scope is disabled' do
+ before do
+ project.ci_cd_settings.update!(job_token_scope_enabled: false)
+ end
+
+ it 'returns nil' do
+ expect(subject.dig('data', 'project', 'ciJobTokenScope')).to be_nil
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/types/ci/pipeline_type_spec.rb b/spec/graphql/types/ci/pipeline_type_spec.rb
index 35d48229fa4..9ba4252bcd5 100644
--- a/spec/graphql/types/ci/pipeline_type_spec.rb
+++ b/spec/graphql/types/ci/pipeline_type_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Types::Ci::PipelineType do
coverage created_at updated_at started_at finished_at committed_at
stages user retryable cancelable jobs source_job job downstream
upstream path project active user_permissions warnings commit_path uses_needs
- test_report_summary test_suite
+ test_report_summary test_suite ref
]
if Gitlab.ee?
diff --git a/spec/graphql/types/ci/runner_type_spec.rb b/spec/graphql/types/ci/runner_type_spec.rb
index f27216f4d39..cff4c459d79 100644
--- a/spec/graphql/types/ci/runner_type_spec.rb
+++ b/spec/graphql/types/ci/runner_type_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe GitlabSchema.types['CiRunner'] do
expected_fields = %w[
id description contacted_at maximum_timeout access_level active status
version short_sha revision locked run_untagged ip_address runner_type tag_list
+ project_count job_count
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/ci/stage_type_spec.rb b/spec/graphql/types/ci/stage_type_spec.rb
index cb8c1cb02cd..48c569eca16 100644
--- a/spec/graphql/types/ci/stage_type_spec.rb
+++ b/spec/graphql/types/ci/stage_type_spec.rb
@@ -7,9 +7,11 @@ RSpec.describe Types::Ci::StageType do
it 'exposes the expected fields' do
expected_fields = %i[
+ id
name
groups
detailedStatus
+ status
jobs
]
diff --git a/spec/graphql/types/ci/status_action_type_spec.rb b/spec/graphql/types/ci/status_action_type_spec.rb
index 8a99068e44f..ab7dee3dd11 100644
--- a/spec/graphql/types/ci/status_action_type_spec.rb
+++ b/spec/graphql/types/ci/status_action_type_spec.rb
@@ -3,10 +3,13 @@
require 'spec_helper'
RSpec.describe Types::Ci::StatusActionType do
+ include GraphqlHelpers
+
specify { expect(described_class.graphql_name).to eq('StatusAction') }
it 'exposes the expected fields' do
expected_fields = %i[
+ id
buttonTitle
icon
path
@@ -16,4 +19,21 @@ RSpec.describe Types::Ci::StatusActionType do
expect(described_class).to have_graphql_fields(*expected_fields)
end
+
+ describe 'id field' do
+ it 'correctly renders the field' do
+ stage = build(:ci_stage_entity, status: :skipped)
+ status = stage.detailed_status(stage.pipeline.user)
+
+ grandparent_object = double(:grandparent_object, object: stage)
+ parent_object = double(:parent_object, object: status)
+
+ grandparent = double(:parent, object: grandparent_object)
+ parent = double(:parent, object: parent_object, parent: grandparent)
+
+ expected_id = "#{stage.class.name}-#{status.id}"
+
+ expect(resolve_field('id', status, extras: { parent: parent })).to eq(expected_id)
+ end
+ end
end
diff --git a/spec/graphql/types/deployment_tier_enum_spec.rb b/spec/graphql/types/deployment_tier_enum_spec.rb
new file mode 100644
index 00000000000..752bf895d74
--- /dev/null
+++ b/spec/graphql/types/deployment_tier_enum_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::DeploymentTierEnum do
+ it 'includes a value for each supported environment tier' do
+ expect(described_class.values).to match(
+ 'PRODUCTION' => have_attributes(value: :production),
+ 'STAGING' => have_attributes(value: :staging),
+ 'TESTING' => have_attributes(value: :testing),
+ 'DEVELOPMENT' => have_attributes(value: :development),
+ 'OTHER' => have_attributes(value: :other)
+ )
+ end
+end
diff --git a/spec/graphql/types/global_id_type_spec.rb b/spec/graphql/types/global_id_type_spec.rb
index 37f59770817..cdf09dd9cc9 100644
--- a/spec/graphql/types/global_id_type_spec.rb
+++ b/spec/graphql/types/global_id_type_spec.rb
@@ -3,7 +3,6 @@
require 'spec_helper'
RSpec.describe Types::GlobalIDType do
- include ::Gitlab::Graphql::Laziness
include GraphqlHelpers
include GlobalIDDeprecationHelpers
@@ -103,7 +102,7 @@ RSpec.describe Types::GlobalIDType do
end
context 'with a deprecation' do
- around(:all) do |example|
+ around do |example|
# Unset all previously memoized GlobalIDTypes to allow us to define one
# that will use the constants stubbed in the `before` block.
previous_id_types = Types::GlobalIDType.instance_variable_get(:@id_types)
diff --git a/spec/graphql/types/issuable_searchable_field_enum_spec.rb b/spec/graphql/types/issuable_searchable_field_enum_spec.rb
new file mode 100644
index 00000000000..13e1b55ac7b
--- /dev/null
+++ b/spec/graphql/types/issuable_searchable_field_enum_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::IssuableSearchableFieldEnum do
+ specify { expect(described_class.graphql_name).to eq('IssuableSearchableField') }
+
+ it 'exposes all the issuable searchable fields' do
+ expect(described_class.values.keys).to contain_exactly(
+ *Issuable::SEARCHABLE_FIELDS.map(&:upcase)
+ )
+ end
+end
diff --git a/spec/graphql/types/issue_type_spec.rb b/spec/graphql/types/issue_type_spec.rb
index 6908a610aae..a117741b3a2 100644
--- a/spec/graphql/types/issue_type_spec.rb
+++ b/spec/graphql/types/issue_type_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe GitlabSchema.types['Issue'] do
specify { expect(described_class).to require_graphql_authorizations(:read_issue) }
- specify { expect(described_class.interfaces).to include(Types::Notes::NoteableType) }
+ specify { expect(described_class.interfaces).to include(Types::Notes::NoteableInterface) }
specify { expect(described_class.interfaces).to include(Types::CurrentUserTodos) }
@@ -18,7 +18,7 @@ RSpec.describe GitlabSchema.types['Issue'] do
confidential discussion_locked upvotes downvotes user_notes_count user_discussions_count web_path web_url relative_position
emails_disabled subscribed time_estimate total_time_spent human_time_estimate human_total_time_spent closed_at created_at updated_at task_completion_status
design_collection alert_management_alert severity current_user_todos moved moved_to
- create_note_email timelogs]
+ create_note_email timelogs project_id]
fields.each do |field_name|
expect(described_class).to have_graphql_field(field_name)
diff --git a/spec/graphql/types/merge_request_type_spec.rb b/spec/graphql/types/merge_request_type_spec.rb
index 875a16a79e5..bc3ccb0d9ba 100644
--- a/spec/graphql/types/merge_request_type_spec.rb
+++ b/spec/graphql/types/merge_request_type_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe GitlabSchema.types['MergeRequest'] do
specify { expect(described_class).to require_graphql_authorizations(:read_merge_request) }
- specify { expect(described_class.interfaces).to include(Types::Notes::NoteableType) }
+ specify { expect(described_class.interfaces).to include(Types::Notes::NoteableInterface) }
specify { expect(described_class.interfaces).to include(Types::CurrentUserTodos) }
diff --git a/spec/graphql/types/milestone_type_spec.rb b/spec/graphql/types/milestone_type_spec.rb
index 5c2ae5cea3c..f00acb3f7cf 100644
--- a/spec/graphql/types/milestone_type_spec.rb
+++ b/spec/graphql/types/milestone_type_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe GitlabSchema.types['Milestone'] do
it 'has the expected fields' do
expected_fields = %w[
- id iid title description state web_path
+ id iid title description state expired web_path
due_date start_date created_at updated_at
project_milestone group_milestone subgroup_milestone
stats
diff --git a/spec/graphql/types/notes/discussion_type_spec.rb b/spec/graphql/types/notes/discussion_type_spec.rb
index 37ed861d069..5290c1e2eb6 100644
--- a/spec/graphql/types/notes/discussion_type_spec.rb
+++ b/spec/graphql/types/notes/discussion_type_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe GitlabSchema.types['Discussion'] do
resolved
resolved_at
resolved_by
+ noteable
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/notes/noteable_type_spec.rb b/spec/graphql/types/notes/noteable_interface_spec.rb
index fad24c6fed4..be2c30aac72 100644
--- a/spec/graphql/types/notes/noteable_type_spec.rb
+++ b/spec/graphql/types/notes/noteable_interface_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Types::Notes::NoteableType do
+RSpec.describe Types::Notes::NoteableInterface do
it 'exposes the expected fields' do
expected_fields = %i[
discussions
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index 0f7cadbd4a7..a22110e8338 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['Project'] do
include GraphqlHelpers
+ include Ci::TemplateHelpers
specify { expect(described_class).to expose_permissions_using(Types::PermissionTypes::Project) }
@@ -38,6 +39,61 @@ RSpec.describe GitlabSchema.types['Project'] do
expect(described_class).to include_graphql_fields(*expected_fields)
end
+ describe 'container_registry_enabled' do
+ let_it_be(:project, reload: true) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ containerRegistryEnabled
+ }
+ }
+ )
+ end
+
+ subject { GitlabSchema.execute(query, context: { current_user: user }).as_json }
+
+ context 'with `enabled` visibility' do
+ before do
+ project.project_feature.update_column(:container_registry_access_level, ProjectFeature::ENABLED)
+ end
+
+ context 'with non member user' do
+ it 'returns true' do
+ expect(subject.dig('data', 'project', 'containerRegistryEnabled')).to eq(true)
+ end
+ end
+ end
+
+ context 'with `private` visibility' do
+ before do
+ project.project_feature.update_column(:container_registry_access_level, ProjectFeature::PRIVATE)
+ end
+
+ context 'with reporter user' do
+ before do
+ project.add_reporter(user)
+ end
+
+ it 'returns true' do
+ expect(subject.dig('data', 'project', 'containerRegistryEnabled')).to eq(true)
+ end
+ end
+
+ context 'with guest user' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'returns false' do
+ expect(subject.dig('data', 'project', 'containerRegistryEnabled')).to eq(false)
+ end
+ end
+ end
+ end
+
describe 'sast_ci_configuration' do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
@@ -103,15 +159,14 @@ RSpec.describe GitlabSchema.types['Project'] do
subject { GitlabSchema.execute(query, context: { current_user: user }).as_json }
it "returns the project's sast configuration for global variables" do
- secure_analyzers_prefix = subject.dig('data', 'project', 'sastCiConfiguration', 'global', 'nodes').first
- expect(secure_analyzers_prefix['type']).to eq('string')
- expect(secure_analyzers_prefix['field']).to eq('SECURE_ANALYZERS_PREFIX')
- expect(secure_analyzers_prefix['label']).to eq('Image prefix')
- expect(secure_analyzers_prefix['defaultValue'])
- .to eq('registry.gitlab.com/gitlab-org/security-products/analyzers')
- expect(secure_analyzers_prefix['value']).to eq('registry.gitlab.com/gitlab-org/security-products/analyzers')
- expect(secure_analyzers_prefix['size']).to eq('LARGE')
- expect(secure_analyzers_prefix['options']).to be_nil
+ secure_analyzers = subject.dig('data', 'project', 'sastCiConfiguration', 'global', 'nodes').first
+ expect(secure_analyzers['type']).to eq('string')
+ expect(secure_analyzers['field']).to eq('SECURE_ANALYZERS_PREFIX')
+ expect(secure_analyzers['label']).to eq('Image prefix')
+ expect(secure_analyzers['defaultValue']).to eq(secure_analyzers_prefix)
+ expect(secure_analyzers['value']).to eq(secure_analyzers_prefix)
+ expect(secure_analyzers['size']).to eq('LARGE')
+ expect(secure_analyzers['options']).to be_nil
end
it "returns the project's sast configuration for pipeline variables" do
@@ -387,4 +442,11 @@ RSpec.describe GitlabSchema.types['Project'] do
it { is_expected.to have_graphql_type(Types::Ci::TemplateType) }
it { is_expected.to have_graphql_arguments(:name) }
end
+
+ describe 'ci_job_token_scope field' do
+ subject { described_class.fields['ciJobTokenScope'] }
+
+ it { is_expected.to have_graphql_type(Types::Ci::JobTokenScopeType) }
+ it { is_expected.to have_graphql_resolver(Resolvers::Ci::JobTokenScopeResolver) }
+ end
end
diff --git a/spec/graphql/types/projects/service_type_spec.rb b/spec/graphql/types/projects/service_type_spec.rb
index 567bdfaec24..cb09f1ca6cc 100644
--- a/spec/graphql/types/projects/service_type_spec.rb
+++ b/spec/graphql/types/projects/service_type_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Types::Projects::ServiceType do
describe ".resolve_type" do
it 'resolves the corresponding type for objects' do
- expect(described_class.resolve_type(build(:jira_service), {})).to eq(Types::Projects::Services::JiraServiceType)
+ expect(described_class.resolve_type(build(:jira_integration), {})).to eq(Types::Projects::Services::JiraServiceType)
expect(described_class.resolve_type(build(:service), {})).to eq(Types::Projects::Services::BaseServiceType)
expect(described_class.resolve_type(build(:drone_ci_integration), {})).to eq(Types::Projects::Services::BaseServiceType)
expect(described_class.resolve_type(build(:custom_issue_tracker_integration), {})).to eq(Types::Projects::Services::BaseServiceType)
diff --git a/spec/graphql/types/projects/services_enum_spec.rb b/spec/graphql/types/projects/services_enum_spec.rb
index 39c2dcd07f6..00427e1d580 100644
--- a/spec/graphql/types/projects/services_enum_spec.rb
+++ b/spec/graphql/types/projects/services_enum_spec.rb
@@ -8,6 +8,6 @@ RSpec.describe GitlabSchema.types['ServiceType'] do
end
def available_services_enum
- ::Integration.available_services_types(include_dev: false).map(&:underscore).map(&:upcase)
+ ::Integration.available_integration_types(include_dev: false).map(&:underscore).map(&:upcase)
end
end
diff --git a/spec/graphql/types/query_complexity_type_spec.rb b/spec/graphql/types/query_complexity_type_spec.rb
new file mode 100644
index 00000000000..6b2330f2b13
--- /dev/null
+++ b/spec/graphql/types/query_complexity_type_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['QueryComplexity'] do
+ include GraphqlHelpers
+
+ specify do
+ expect(described_class).to have_graphql_fields(:limit, :score).only
+ end
+
+ it 'works when executed' do
+ query = <<-GQL
+ query {
+ queryComplexity {
+ score
+ limit
+ }
+
+ currentUser {
+ name
+ }
+ }
+ GQL
+
+ query_result = run_with_clean_state(query).to_h
+
+ data = graphql_dig_at(query_result, :data, :queryComplexity)
+
+ expect(data).to include(
+ 'score' => be > 0,
+ 'limit' => GitlabSchema::DEFAULT_MAX_COMPLEXITY
+ )
+ end
+end
diff --git a/spec/graphql/types/release_asset_link_type_spec.rb b/spec/graphql/types/release_asset_link_type_spec.rb
index 6800d5459c4..0c903b8d27a 100644
--- a/spec/graphql/types/release_asset_link_type_spec.rb
+++ b/spec/graphql/types/release_asset_link_type_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe GitlabSchema.types['ReleaseAssetLink'] do
it 'has the expected fields' do
expected_fields = %w[
- id name url external link_type direct_asset_url
+ id name url external link_type direct_asset_url direct_asset_path
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/snippets/blob_type_spec.rb b/spec/graphql/types/snippets/blob_type_spec.rb
index 60c0db8e551..e20b001ba7f 100644
--- a/spec/graphql/types/snippets/blob_type_spec.rb
+++ b/spec/graphql/types/snippets/blob_type_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe GitlabSchema.types['SnippetBlob'] do
include GraphqlHelpers
it 'has the correct fields' do
- expected_fields = [:rich_data, :plain_data,
+ expected_fields = [:rich_data, :plain_data, :raw_plain_data,
:raw_path, :size, :binary, :name, :path,
:simple_viewer, :rich_viewer, :mode, :external_storage,
:rendered_as_text]
@@ -18,6 +18,7 @@ RSpec.describe GitlabSchema.types['SnippetBlob'] do
{
'richData' => be_nullable,
'plainData' => be_nullable,
+ 'rawPlainData' => be_nullable,
'rawPath' => be_non_null,
'size' => be_non_null,
'binary' => be_non_null,
diff --git a/spec/haml_lint/linter/documentation_links_spec.rb b/spec/haml_lint/linter/documentation_links_spec.rb
index 22c406de57a..75002097d69 100644
--- a/spec/haml_lint/linter/documentation_links_spec.rb
+++ b/spec/haml_lint/linter/documentation_links_spec.rb
@@ -10,30 +10,30 @@ RSpec.describe HamlLint::Linter::DocumentationLinks do
shared_examples 'link validation rules' do |link_pattern|
context 'when link_to points to the existing file path' do
- let(:haml) { "= link_to 'Description', #{link_pattern}('README.md')" }
+ let(:haml) { "= link_to 'Description', #{link_pattern}('index.md')" }
it { is_expected.not_to report_lint }
end
context 'when link_to points to the existing file with valid anchor' do
- let(:haml) { "= link_to 'Description', #{link_pattern}('README.md', anchor: 'overview'), target: '_blank'" }
+ let(:haml) { "= link_to 'Description', #{link_pattern}('index.md', anchor: 'overview'), target: '_blank'" }
it { is_expected.not_to report_lint }
end
context 'when link_to points to the existing file path without .md extension' do
- let(:haml) { "= link_to 'Description', #{link_pattern}('README')" }
+ let(:haml) { "= link_to 'Description', #{link_pattern}('index')" }
it { is_expected.not_to report_lint }
end
context 'when anchor is not correct' do
- let(:haml) { "= link_to 'Description', #{link_pattern}('README.md', anchor: 'wrong')" }
+ let(:haml) { "= link_to 'Description', #{link_pattern}('index.md', anchor: 'wrong')" }
it { is_expected.to report_lint }
context "when #{link_pattern} has multiple options" do
- let(:haml) { "= link_to 'Description', #{link_pattern}('README.md', key: :value, anchor: 'wrong')" }
+ let(:haml) { "= link_to 'Description', #{link_pattern}('index.md', key: :value, anchor: 'wrong')" }
it { is_expected.to report_lint }
end
@@ -58,7 +58,7 @@ RSpec.describe HamlLint::Linter::DocumentationLinks do
end
context 'when anchor belongs to a different element' do
- let(:haml) { "= link_to 'Description', #{link_pattern}('README.md'), target: (anchor: 'blank')" }
+ let(:haml) { "= link_to 'Description', #{link_pattern}('index.md'), target: (anchor: 'blank')" }
it { is_expected.not_to report_lint }
end
@@ -82,7 +82,7 @@ RSpec.describe HamlLint::Linter::DocumentationLinks do
end
context 'when the second link is invalid' do
- let(:haml) { ".data-form{ data: { url: #{link_pattern}('README.md'), wrong_url: #{link_pattern}('wrong.md') } }" }
+ let(:haml) { ".data-form{ data: { url: #{link_pattern}('index.md'), wrong_url: #{link_pattern}('wrong.md') } }" }
it { is_expected.to report_lint }
end
diff --git a/spec/helpers/admin/user_actions_helper_spec.rb b/spec/helpers/admin/user_actions_helper_spec.rb
index 7ccd9a4fe3e..d945b13cad6 100644
--- a/spec/helpers/admin/user_actions_helper_spec.rb
+++ b/spec/helpers/admin/user_actions_helper_spec.rb
@@ -29,13 +29,13 @@ RSpec.describe Admin::UserActionsHelper do
context 'the user is a standard user' do
let_it_be(:user) { create(:user) }
- it { is_expected.to contain_exactly("edit", "block", "deactivate", "delete", "delete_with_contributions") }
+ it { is_expected.to contain_exactly("edit", "block", "ban", "deactivate", "delete", "delete_with_contributions") }
end
context 'the user is an admin user' do
let_it_be(:user) { create(:user, :admin) }
- it { is_expected.to contain_exactly("edit", "block", "deactivate", "delete", "delete_with_contributions") }
+ it { is_expected.to contain_exactly("edit", "block", "ban", "deactivate", "delete", "delete_with_contributions") }
end
context 'the user is blocked by LDAP' do
@@ -59,7 +59,7 @@ RSpec.describe Admin::UserActionsHelper do
context 'the user is deactivated' do
let_it_be(:user) { create(:user, :deactivated) }
- it { is_expected.to contain_exactly("edit", "block", "activate", "delete", "delete_with_contributions") }
+ it { is_expected.to contain_exactly("edit", "block", "ban", "activate", "delete", "delete_with_contributions") }
end
context 'the user is locked' do
@@ -73,6 +73,7 @@ RSpec.describe Admin::UserActionsHelper do
is_expected.to contain_exactly(
"edit",
"block",
+ "ban",
"deactivate",
"unlock",
"delete",
@@ -81,6 +82,12 @@ RSpec.describe Admin::UserActionsHelper do
}
end
+ context 'the user is banned' do
+ let_it_be(:user) { create(:user, :banned) }
+
+ it { is_expected.to contain_exactly("edit", "unban", "delete", "delete_with_contributions") }
+ end
+
context 'the current_user does not have permission to delete the user' do
let_it_be(:user) { build(:user) }
@@ -88,7 +95,7 @@ RSpec.describe Admin::UserActionsHelper do
allow(helper).to receive(:can?).with(current_user, :destroy_user, user).and_return(false)
end
- it { is_expected.to contain_exactly("edit", "block", "deactivate") }
+ it { is_expected.to contain_exactly("edit", "block", "ban", "deactivate") }
end
context 'the user is a sole owner of a group' do
@@ -99,7 +106,31 @@ RSpec.describe Admin::UserActionsHelper do
group.add_owner(user)
end
- it { is_expected.to contain_exactly("edit", "block", "deactivate") }
+ it { is_expected.to contain_exactly("edit", "block", "ban", "deactivate") }
+ end
+
+ context 'the user is a bot' do
+ let_it_be(:user) { create(:user, :bot) }
+
+ it { is_expected.to match_array([]) }
+ end
+
+ context 'when `ban_user_feature_flag` is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ context 'the user is a standard user' do
+ let_it_be(:user) { create(:user) }
+
+ it { is_expected.not_to include("ban") }
+ end
+
+ context 'the user is banned' do
+ let_it_be(:user) { create(:user, :banned) }
+
+ it { is_expected.not_to include("unban") }
+ end
end
end
end
diff --git a/spec/helpers/analytics/unique_visits_helper_spec.rb b/spec/helpers/analytics/unique_visits_helper_spec.rb
deleted file mode 100644
index b4b370c169d..00000000000
--- a/spec/helpers/analytics/unique_visits_helper_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require "spec_helper"
-
-RSpec.describe Analytics::UniqueVisitsHelper do
- include Devise::Test::ControllerHelpers
-
- describe '#track_visit' do
- let(:target_id) { 'p_analytics_valuestream' }
- let(:current_user) { create(:user) }
-
- it 'does not track visit if user is not logged in' do
- expect_any_instance_of(Gitlab::Analytics::UniqueVisits).not_to receive(:track_visit)
-
- helper.track_visit(target_id)
- end
-
- it 'tracks visit if user is logged in' do
- sign_in(current_user)
-
- expect_any_instance_of(Gitlab::Analytics::UniqueVisits).to receive(:track_visit)
-
- helper.track_visit(target_id)
- end
-
- it 'tracks visit if user is not logged in, but has the cookie already' do
- helper.request.cookies[:visitor_id] = { value: SecureRandom.uuid, expires: 24.months }
-
- expect_any_instance_of(Gitlab::Analytics::UniqueVisits).to receive(:track_visit)
-
- helper.track_visit(target_id)
- end
- end
-end
diff --git a/spec/helpers/application_settings_helper_spec.rb b/spec/helpers/application_settings_helper_spec.rb
index 4c62b3e12c1..90bfb2e72e6 100644
--- a/spec/helpers/application_settings_helper_spec.rb
+++ b/spec/helpers/application_settings_helper_spec.rb
@@ -178,6 +178,26 @@ RSpec.describe ApplicationSettingsHelper do
end
end
+ describe '.valid_runner_registrars' do
+ subject { helper.valid_runner_registrars }
+
+ context 'when only admins are permitted to register runners' do
+ before do
+ stub_application_setting(valid_runner_registrars: [])
+ end
+
+ it { is_expected.to eq [] }
+ end
+
+ context 'when group and project users are permitted to register runners' do
+ before do
+ stub_application_setting(valid_runner_registrars: ApplicationSetting::VALID_RUNNER_REGISTRAR_TYPES)
+ end
+
+ it { is_expected.to eq ApplicationSetting::VALID_RUNNER_REGISTRAR_TYPES }
+ end
+ end
+
describe '.signup_enabled?' do
subject { helper.signup_enabled? }
diff --git a/spec/helpers/blob_helper_spec.rb b/spec/helpers/blob_helper_spec.rb
index 885569574a4..c48d609836d 100644
--- a/spec/helpers/blob_helper_spec.rb
+++ b/spec/helpers/blob_helper_spec.rb
@@ -67,8 +67,8 @@ RSpec.describe BlobHelper do
it 'passes on primary tracking attributes' do
parsed_link = Capybara.string(link).find_link('Edit')
- expect(parsed_link[:'data-track-event']).to eq("click_edit")
- expect(parsed_link[:'data-track-label']).to eq("Edit")
+ expect(parsed_link[:'data-track-action']).to eq("click_edit")
+ expect(parsed_link[:'data-track-label']).to eq("edit")
expect(parsed_link[:'data-track-property']).to eq(nil)
end
end
@@ -85,8 +85,8 @@ RSpec.describe BlobHelper do
it 'passes on secondary tracking attributes' do
parsed_link = Capybara.string(link).find_link('Edit')
- expect(parsed_link[:'data-track-event']).to eq("click_edit")
- expect(parsed_link[:'data-track-label']).to eq("Edit")
+ expect(parsed_link[:'data-track-action']).to eq("click_edit")
+ expect(parsed_link[:'data-track-label']).to eq("edit")
expect(parsed_link[:'data-track-property']).to eq("secondary")
end
end
@@ -332,8 +332,8 @@ RSpec.describe BlobHelper do
it 'passes on secondary tracking attributes' do
parsed_link = Capybara.string(link).find_link('Web IDE')
- expect(parsed_link[:'data-track-event']).to eq("click_edit_ide")
- expect(parsed_link[:'data-track-label']).to eq("Web IDE")
+ expect(parsed_link[:'data-track-action']).to eq("click_edit_ide")
+ expect(parsed_link[:'data-track-label']).to eq("web_ide")
expect(parsed_link[:'data-track-property']).to eq("secondary")
end
end
@@ -350,8 +350,8 @@ RSpec.describe BlobHelper do
it 'passes on primary tracking attributes' do
parsed_link = Capybara.string(link).find_link('Web IDE')
- expect(parsed_link[:'data-track-event']).to eq("click_edit_ide")
- expect(parsed_link[:'data-track-label']).to eq("Web IDE")
+ expect(parsed_link[:'data-track-action']).to eq("click_edit_ide")
+ expect(parsed_link[:'data-track-label']).to eq("web_ide")
expect(parsed_link[:'data-track-property']).to eq(nil)
end
end
diff --git a/spec/helpers/ci/pipeline_editor_helper_spec.rb b/spec/helpers/ci/pipeline_editor_helper_spec.rb
index 2287718db5a..3ce4657282e 100644
--- a/spec/helpers/ci/pipeline_editor_helper_spec.rb
+++ b/spec/helpers/ci/pipeline_editor_helper_spec.rb
@@ -40,12 +40,12 @@ RSpec.describe Ci::PipelineEditorHelper do
it 'returns pipeline editor data' do
expect(pipeline_editor_data).to eq({
"ci-config-path": project.ci_config_path_or_default,
- "ci-examples-help-page-path" => help_page_path('ci/examples/README'),
- "ci-help-page-path" => help_page_path('ci/README'),
+ "ci-examples-help-page-path" => help_page_path('ci/examples/index'),
+ "ci-help-page-path" => help_page_path('ci/index'),
"commit-sha" => project.commit.sha,
- "default-branch" => project.default_branch,
+ "default-branch" => project.default_branch_or_main,
"empty-state-illustration-path" => 'foo',
- "initial-branch-name": nil,
+ "initial-branch-name" => nil,
"lint-help-page-path" => help_page_path('ci/lint', anchor: 'validate-basic-logic-and-syntax'),
"needs-help-page-path" => help_page_path('ci/yaml/README', anchor: 'needs'),
"new-merge-request-path" => '/mock/project/-/merge_requests/new',
@@ -54,7 +54,7 @@ RSpec.describe Ci::PipelineEditorHelper do
"project-path" => project.path,
"project-full-path" => project.full_path,
"project-namespace" => project.namespace.full_path,
- "runner-help-page-path" => help_page_path('ci/runners/README'),
+ "runner-help-page-path" => help_page_path('ci/runners/index'),
"total-branches" => project.repository.branches.length,
"yml-help-page-path" => help_page_path('ci/yaml/README')
})
@@ -67,12 +67,12 @@ RSpec.describe Ci::PipelineEditorHelper do
it 'returns pipeline editor data' do
expect(pipeline_editor_data).to eq({
"ci-config-path": project.ci_config_path_or_default,
- "ci-examples-help-page-path" => help_page_path('ci/examples/README'),
- "ci-help-page-path" => help_page_path('ci/README'),
+ "ci-examples-help-page-path" => help_page_path('ci/examples/index'),
+ "ci-help-page-path" => help_page_path('ci/index'),
"commit-sha" => '',
- "default-branch" => project.default_branch,
+ "default-branch" => project.default_branch_or_main,
"empty-state-illustration-path" => 'foo',
- "initial-branch-name": nil,
+ "initial-branch-name" => nil,
"lint-help-page-path" => help_page_path('ci/lint', anchor: 'validate-basic-logic-and-syntax'),
"needs-help-page-path" => help_page_path('ci/yaml/README', anchor: 'needs'),
"new-merge-request-path" => '/mock/project/-/merge_requests/new',
@@ -81,11 +81,27 @@ RSpec.describe Ci::PipelineEditorHelper do
"project-path" => project.path,
"project-full-path" => project.full_path,
"project-namespace" => project.namespace.full_path,
- "runner-help-page-path" => help_page_path('ci/runners/README'),
+ "runner-help-page-path" => help_page_path('ci/runners/index'),
"total-branches" => 0,
"yml-help-page-path" => help_page_path('ci/yaml/README')
})
end
end
+
+ context 'with a non-default branch name' do
+ let(:user) { create(:user) }
+
+ before do
+ create_commit('Message', project, user, 'feature')
+ controller.params[:branch_name] = 'feature'
+ end
+
+ it 'returns correct values' do
+ latest_feature_sha = project.repository.commit('feature').sha
+
+ expect(pipeline_editor_data['initial-branch-name']).to eq('feature')
+ expect(pipeline_editor_data['commit-sha']).to eq(latest_feature_sha)
+ end
+ end
end
end
diff --git a/spec/helpers/ci/runners_helper_spec.rb b/spec/helpers/ci/runners_helper_spec.rb
index 94d4d620de9..40927d44e24 100644
--- a/spec/helpers/ci/runners_helper_spec.rb
+++ b/spec/helpers/ci/runners_helper_spec.rb
@@ -3,6 +3,12 @@
require 'spec_helper'
RSpec.describe Ci::RunnersHelper do
+ let_it_be(:user, refind: true) { create(:user) }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ end
+
describe '#runner_status_icon', :clean_gitlab_redis_cache do
it "returns - not contacted yet" do
runner = create(:ci_runner)
@@ -90,28 +96,28 @@ RSpec.describe Ci::RunnersHelper do
context 'when project has runners' do
it 'returns the correct value for is_enabled' do
- data = toggle_shared_runners_settings_data(project_with_runners)
+ data = helper.toggle_shared_runners_settings_data(project_with_runners)
expect(data[:is_enabled]).to eq("true")
end
end
context 'when project does not have runners' do
it 'returns the correct value for is_enabled' do
- data = toggle_shared_runners_settings_data(project_without_runners)
+ data = helper.toggle_shared_runners_settings_data(project_without_runners)
expect(data[:is_enabled]).to eq("false")
end
end
context 'for all projects' do
it 'returns the update path for toggling the shared runners setting' do
- data = toggle_shared_runners_settings_data(project_with_runners)
+ data = helper.toggle_shared_runners_settings_data(project_with_runners)
expect(data[:update_path]).to eq(toggle_shared_runners_project_runners_path(project_with_runners))
end
it 'returns false for is_disabled_and_unoverridable when project has no group' do
project = create(:project)
- data = toggle_shared_runners_settings_data(project)
+ data = helper.toggle_shared_runners_settings_data(project)
expect(data[:is_disabled_and_unoverridable]).to eq("false")
end
@@ -129,7 +135,7 @@ RSpec.describe Ci::RunnersHelper do
project = create(:project, group: group)
allow(group).to receive(:shared_runners_setting).and_return(shared_runners_setting)
- data = toggle_shared_runners_settings_data(project)
+ data = helper.toggle_shared_runners_settings_data(project)
expect(data[:is_disabled_and_unoverridable]).to eq(is_disabled_and_unoverridable)
end
end
diff --git a/spec/helpers/clusters_helper_spec.rb b/spec/helpers/clusters_helper_spec.rb
index 8c738141063..f64afa1ed71 100644
--- a/spec/helpers/clusters_helper_spec.rb
+++ b/spec/helpers/clusters_helper_spec.rb
@@ -75,6 +75,13 @@ RSpec.describe ClustersHelper do
it 'displays project path' do
expect(subject[:project_path]).to eq(project.full_path)
end
+
+ it 'generates docs urls' do
+ expect(subject[:agent_docs_url]).to eq(help_page_path('user/clusters/agent/index'))
+ expect(subject[:install_docs_url]).to eq(help_page_path('administration/clusters/kas'))
+ expect(subject[:get_started_docs_url]).to eq(help_page_path('user/clusters/agent/index', anchor: 'define-a-configuration-repository'))
+ expect(subject[:integration_docs_url]).to eq(help_page_path('user/clusters/agent/index', anchor: 'get-started-with-gitops-and-the-gitlab-agent'))
+ end
end
describe '#js_clusters_list_data' do
diff --git a/spec/helpers/commits_helper_spec.rb b/spec/helpers/commits_helper_spec.rb
index 4e94636ba45..34445d26258 100644
--- a/spec/helpers/commits_helper_spec.rb
+++ b/spec/helpers/commits_helper_spec.rb
@@ -321,4 +321,13 @@ RSpec.describe CommitsHelper do
it { is_expected.to include(pipeline.cache_key) }
end
end
+
+ describe "#commit_path_template" do
+ let(:project) { build(:project) }
+ let(:expected_path) { "/#{project.full_path}/-/commit/$COMMIT_SHA" }
+
+ subject { helper.commit_path_template(project) }
+
+ it { is_expected.to eq(expected_path) }
+ end
end
diff --git a/spec/helpers/diff_helper_spec.rb b/spec/helpers/diff_helper_spec.rb
index dfea1020c52..29708f10de4 100644
--- a/spec/helpers/diff_helper_spec.rb
+++ b/spec/helpers/diff_helper_spec.rb
@@ -293,23 +293,22 @@ RSpec.describe DiffHelper do
describe '#render_overflow_warning?' do
using RSpec::Parameterized::TableSyntax
- let(:diffs_collection) { instance_double(Gitlab::Diff::FileCollection::MergeRequestDiff, raw_diff_files: diff_files) }
+ let(:diffs_collection) { instance_double(Gitlab::Diff::FileCollection::MergeRequestDiff, raw_diff_files: diff_files, overflow?: false) }
let(:diff_files) { Gitlab::Git::DiffCollection.new(files) }
let(:safe_file) { { too_large: false, diff: '' } }
let(:large_file) { { too_large: true, diff: '' } }
let(:files) { [safe_file, safe_file] }
- before do
- allow(diff_files).to receive(:overflow?).and_return(false)
- allow(diff_files).to receive(:overflow_max_bytes?).and_return(false)
- allow(diff_files).to receive(:overflow_max_files?).and_return(false)
- allow(diff_files).to receive(:overflow_max_lines?).and_return(false)
- allow(diff_files).to receive(:collapsed_safe_bytes?).and_return(false)
- allow(diff_files).to receive(:collapsed_safe_files?).and_return(false)
- allow(diff_files).to receive(:collapsed_safe_lines?).and_return(false)
- end
-
context 'when no limits are hit' do
+ before do
+ allow(diff_files).to receive(:overflow_max_bytes?).and_return(false)
+ allow(diff_files).to receive(:overflow_max_files?).and_return(false)
+ allow(diff_files).to receive(:overflow_max_lines?).and_return(false)
+ allow(diff_files).to receive(:collapsed_safe_bytes?).and_return(false)
+ allow(diff_files).to receive(:collapsed_safe_files?).and_return(false)
+ allow(diff_files).to receive(:collapsed_safe_lines?).and_return(false)
+ end
+
it 'returns false and does not log any overflow events' do
expect(Gitlab::Metrics).not_to receive(:add_event).with(:diffs_overflow_collection_limits)
expect(Gitlab::Metrics).not_to receive(:add_event).with(:diffs_overflow_single_file_limits)
@@ -343,7 +342,7 @@ RSpec.describe DiffHelper do
context 'when the file collection has an overflow' do
before do
- allow(diff_files).to receive(:overflow?).and_return(true)
+ allow(diffs_collection).to receive(:overflow?).and_return(true)
end
it 'returns true and only logs all the correct collection overflow event' do
@@ -405,7 +404,7 @@ RSpec.describe DiffHelper do
it "returns a valid URL" do
allow(helper).to receive(:safe_params).and_return(params)
- expect(subject).to match(/foo\/bar\/-\/commit\/#{commit.sha}\/diff_for_path/)
+ expect(subject).to match(%r{foo/bar/-/commit/#{commit.sha}/diff_for_path})
end
end
diff --git a/spec/helpers/emails_helper_spec.rb b/spec/helpers/emails_helper_spec.rb
index 58ed5901d45..956c19f54d1 100644
--- a/spec/helpers/emails_helper_spec.rb
+++ b/spec/helpers/emails_helper_spec.rb
@@ -238,16 +238,16 @@ RSpec.describe EmailsHelper do
it 'returns the default header logo' do
create :appearance, header_logo: nil
- expect(header_logo).to eq(
- %{<img alt="GitLab" src="/images/mailers/gitlab_header_logo.gif" width="55" height="50" />}
+ expect(header_logo).to match(
+ %r{<img alt="GitLab" src="/images/mailers/gitlab_header_logo\.(?:gif|png)" width="\d+" height="\d+" />}
)
end
end
context 'there is no brand item' do
it 'returns the default header logo' do
- expect(header_logo).to eq(
- %{<img alt="GitLab" src="/images/mailers/gitlab_header_logo.gif" width="55" height="50" />}
+ expect(header_logo).to match(
+ %r{<img alt="GitLab" src="/images/mailers/gitlab_header_logo\.(?:gif|png)" width="\d+" height="\d+" />}
)
end
end
diff --git a/spec/helpers/environments_helper_spec.rb b/spec/helpers/environments_helper_spec.rb
index 96869fcc777..22867a5b652 100644
--- a/spec/helpers/environments_helper_spec.rb
+++ b/spec/helpers/environments_helper_spec.rb
@@ -122,7 +122,7 @@ RSpec.describe EnvironmentsHelper do
end
context 'has_managed_prometheus' do
- context 'without prometheus service' do
+ context 'without prometheus integration' do
it "doesn't have managed prometheus" do
expect(metrics_data).to include(
'has_managed_prometheus' => 'false'
@@ -130,12 +130,12 @@ RSpec.describe EnvironmentsHelper do
end
end
- context 'with prometheus service' do
- let_it_be(:prometheus_service) { create(:prometheus_service, project: project) }
+ context 'with prometheus integration' do
+ let_it_be(:prometheus_integration) { create(:prometheus_integration, project: project) }
- context 'when manual prometheus service is active' do
+ context 'when manual prometheus integration is active' do
it "doesn't have managed prometheus" do
- prometheus_service.update!(manual_configuration: true)
+ prometheus_integration.update!(manual_configuration: true)
expect(metrics_data).to include(
'has_managed_prometheus' => 'false'
@@ -143,9 +143,9 @@ RSpec.describe EnvironmentsHelper do
end
end
- context 'when prometheus service is inactive' do
+ context 'when prometheus integration is inactive' do
it "doesn't have managed prometheus" do
- prometheus_service.update!(manual_configuration: false)
+ prometheus_integration.update!(manual_configuration: false)
expect(metrics_data).to include(
'has_managed_prometheus' => 'false'
diff --git a/spec/helpers/gitlab_routing_helper_spec.rb b/spec/helpers/gitlab_routing_helper_spec.rb
index 40faf994ad2..a3f2b8fafa0 100644
--- a/spec/helpers/gitlab_routing_helper_spec.rb
+++ b/spec/helpers/gitlab_routing_helper_spec.rb
@@ -239,8 +239,9 @@ RSpec.describe GitlabRoutingHelper do
let(:blob) { snippet.blobs.first }
let(:ref) { 'snippet-test-ref' }
let(:args) { {} }
+ let(:path) { blob.path }
- subject { gitlab_raw_snippet_blob_url(snippet, blob.path, ref, **args) }
+ subject { gitlab_raw_snippet_blob_url(snippet, path, ref, **args) }
it_behaves_like 'snippet blob raw url'
@@ -248,7 +249,7 @@ RSpec.describe GitlabRoutingHelper do
let(:args) { { inline: true } }
let(:snippet) { personal_snippet }
- it { expect(subject).to eq("http://test.host/-/snippets/#{snippet.id}/raw/#{ref}/#{blob.path}?inline=true") }
+ it { expect(subject).to eq("http://test.host/-/snippets/#{snippet.id}/raw/#{ref}/#{path}?inline=true") }
end
context 'without a ref' do
@@ -257,7 +258,17 @@ RSpec.describe GitlabRoutingHelper do
let(:expected_ref) { snippet.repository.root_ref }
it 'uses the root ref' do
- expect(subject).to eq("http://test.host/-/snippets/#{snippet.id}/raw/#{expected_ref}/#{blob.path}")
+ expect(subject).to eq("http://test.host/-/snippets/#{snippet.id}/raw/#{expected_ref}/#{path}")
+ end
+
+ context 'when snippet does not have a repository' do
+ let(:snippet) { create(:personal_snippet) }
+ let(:path) { 'example' }
+ let(:expected_ref) { Gitlab::DefaultBranch.value }
+
+ it 'uses the instance deafult branch' do
+ expect(subject).to eq("http://test.host/-/snippets/#{snippet.id}/raw/#{expected_ref}/#{path}")
+ end
end
end
end
diff --git a/spec/helpers/services_helper_spec.rb b/spec/helpers/integrations_helper_spec.rb
index 6dd872225ba..8e652d2f150 100644
--- a/spec/helpers/services_helper_spec.rb
+++ b/spec/helpers/integrations_helper_spec.rb
@@ -2,7 +2,23 @@
require 'spec_helper'
-RSpec.describe ServicesHelper do
+RSpec.describe IntegrationsHelper do
+ describe '#integration_event_description' do
+ subject(:description) { helper.integration_event_description(integration, 'merge_request_events') }
+
+ context 'when integration is Jira' do
+ let(:integration) { Integrations::Jira.new }
+
+ it { is_expected.to include('Jira') }
+ end
+
+ context 'when integration is Team City' do
+ let(:integration) { Integrations::Teamcity.new }
+
+ it { is_expected.to include('TeamCity') }
+ end
+ end
+
describe '#integration_form_data' do
let(:fields) do
[
@@ -36,8 +52,8 @@ RSpec.describe ServicesHelper do
subject { helper.integration_form_data(integration) }
- context 'Slack service' do
- let(:integration) { build(:slack_service) }
+ context 'with Slack integration' do
+ let(:integration) { build(:integrations_slack) }
it { is_expected.to include(*fields) }
it { is_expected.not_to include(*jira_fields) }
@@ -48,14 +64,14 @@ RSpec.describe ServicesHelper do
end
context 'Jira service' do
- let(:integration) { build(:jira_service) }
+ let(:integration) { build(:jira_integration) }
it { is_expected.to include(*fields, *jira_fields) }
end
end
describe '#scoped_reset_integration_path' do
- let(:integration) { build_stubbed(:jira_service) }
+ let(:integration) { build_stubbed(:jira_integration) }
let(:group) { nil }
subject { helper.scoped_reset_integration_path(integration, group: group) }
@@ -75,7 +91,7 @@ RSpec.describe ServicesHelper do
end
context 'when a new integration is not persisted' do
- let_it_be(:integration) { build(:jira_service) }
+ let_it_be(:integration) { build(:jira_integration) }
it 'returns an empty string' do
is_expected.to eq('')
diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb
index a8a227c8ec4..96aba312ba3 100644
--- a/spec/helpers/issues_helper_spec.rb
+++ b/spec/helpers/issues_helper_spec.rb
@@ -294,7 +294,6 @@ RSpec.describe IssuesHelper do
expected = {
autocomplete_award_emojis_path: autocomplete_award_emojis_path,
- autocomplete_users_path: autocomplete_users_path(active: true, current_user: true, project_id: project.id, format: :json),
calendar_path: '#',
can_bulk_update: 'true',
can_edit: 'true',
@@ -313,8 +312,6 @@ RSpec.describe IssuesHelper do
max_attachment_size: number_to_human_size(Gitlab::CurrentSettings.max_attachment_size.megabytes),
new_issue_path: new_project_issue_path(project, issue: { milestone_id: finder.milestones.first.id }),
project_import_jira_path: project_import_jira_path(project),
- project_labels_path: project_labels_path(project, include_ancestor_groups: true, format: :json),
- project_milestones_path: project_milestones_path(project, format: :json),
project_path: project.full_path,
quick_actions_help_path: help_page_path('user/project/quick_actions'),
reset_path: new_issuable_address_project_path(project, issuable_type: 'issue'),
diff --git a/spec/helpers/namespaces_helper_spec.rb b/spec/helpers/namespaces_helper_spec.rb
index a8a918cbc74..68bc19cb429 100644
--- a/spec/helpers/namespaces_helper_spec.rb
+++ b/spec/helpers/namespaces_helper_spec.rb
@@ -195,26 +195,6 @@ RSpec.describe NamespacesHelper do
end
end
- describe '#cascading_namespace_settings_enabled?' do
- subject { helper.cascading_namespace_settings_enabled? }
-
- context 'when `cascading_namespace_settings` feature flag is enabled' do
- it 'returns `true`' do
- expect(subject).to be(true)
- end
- end
-
- context 'when `cascading_namespace_settings` feature flag is disabled' do
- before do
- stub_feature_flags(cascading_namespace_settings: false)
- end
-
- it 'returns `false`' do
- expect(subject).to be(false)
- end
- end
- end
-
describe '#cascading_namespace_settings_popover_data' do
attribute = :delayed_project_removal
diff --git a/spec/helpers/nav/new_dropdown_helper_spec.rb b/spec/helpers/nav/new_dropdown_helper_spec.rb
index dd860ce3180..e3d9bc5b174 100644
--- a/spec/helpers/nav/new_dropdown_helper_spec.rb
+++ b/spec/helpers/nav/new_dropdown_helper_spec.rb
@@ -13,7 +13,6 @@ RSpec.describe Nav::NewDropdownHelper do
let(:with_can_create_project) { false }
let(:with_can_create_group) { false }
let(:with_can_create_snippet) { false }
- let(:with_new_repo_experiment) { :control }
let(:with_invite_members_experiment) { false }
let(:with_invite_members_experiment_enabled) { false }
@@ -29,7 +28,6 @@ RSpec.describe Nav::NewDropdownHelper do
end
before do
- stub_experiments(new_repo: with_new_repo_experiment)
allow(::Gitlab::Experimentation).to receive(:active?).with(:invite_members_new_dropdown) { with_invite_members_experiment }
allow(helper).to receive(:experiment_enabled?).with(:invite_members_new_dropdown) { with_invite_members_experiment_enabled }
allow(helper).to receive(:tracking_label) { 'test_tracking_label' }
@@ -43,19 +41,6 @@ RSpec.describe Nav::NewDropdownHelper do
allow(user).to receive(:can?).with(:create_snippet) { with_can_create_snippet }
end
- shared_examples 'new repo experiment shared example' do |title|
- let(:with_new_repo_experiment) { :candidate }
-
- it 'has experiment project title' do
- expect(subject[:menu_sections]).to match(
- expected_menu_section(
- title: title,
- menu_item: a_hash_including(title: 'New project/repository')
- )
- )
- end
- end
-
shared_examples 'invite member link shared example' do
it 'shows invite member link' do
expect(subject[:menu_sections]).to eq(
@@ -117,15 +102,13 @@ RSpec.describe Nav::NewDropdownHelper do
title: 'GitLab',
menu_item: ::Gitlab::Nav::TopNavMenuItem.build(
id: 'general_new_project',
- title: 'New project',
+ title: 'New project/repository',
href: '/projects/new',
- data: { track_experiment: 'new_repo', track_event: 'click_link_new_project', track_label: 'plus_menu_dropdown', qa_selector: 'global_new_project_link' }
+ data: { track_event: 'click_link_new_project', track_label: 'plus_menu_dropdown', qa_selector: 'global_new_project_link' }
)
)
)
end
-
- it_behaves_like 'new repo experiment shared example', 'GitLab'
end
context 'when can create group' do
@@ -193,15 +176,13 @@ RSpec.describe Nav::NewDropdownHelper do
title: 'This group',
menu_item: ::Gitlab::Nav::TopNavMenuItem.build(
id: 'new_project',
- title: 'New project',
+ title: 'New project/repository',
href: "/projects/new?namespace_id=#{group.id}",
- data: { track_experiment: 'new_repo', track_event: 'click_link_new_project_group', track_label: 'plus_menu_dropdown' }
+ data: { track_event: 'click_link_new_project_group', track_label: 'plus_menu_dropdown' }
)
)
)
end
-
- it_behaves_like 'new repo experiment shared example', 'This group'
end
context 'when can create subgroup' do
diff --git a/spec/helpers/nav/top_nav_helper_spec.rb b/spec/helpers/nav/top_nav_helper_spec.rb
index d87c751c62f..4d6da258536 100644
--- a/spec/helpers/nav/top_nav_helper_spec.rb
+++ b/spec/helpers/nav/top_nav_helper_spec.rb
@@ -143,7 +143,6 @@ RSpec.describe Nav::TopNavHelper do
css_class: 'qa-projects-dropdown',
data: {
track_event: 'click_dropdown',
- track_experiment: 'new_repo',
track_label: 'projects_dropdown'
},
icon: 'project',
@@ -539,10 +538,18 @@ RSpec.describe Nav::TopNavHelper do
end
context 'with new' do
- let(:with_new_view_model) { { id: 'test-new-view-model' } }
+ let(:with_new_view_model) { { menu_sections: [{ id: 'test-new-view-model' }] } }
it 'has new subview' do
- expect(subject[:views][:new]).to eq({ id: 'test-new-view-model' })
+ expect(subject[:views][:new]).to eq(with_new_view_model)
+ end
+ end
+
+ context 'with new and no menu_sections' do
+ let(:with_new_view_model) { { menu_sections: [] } }
+
+ it 'has new subview' do
+ expect(subject[:views][:new]).to be_nil
end
end
end
diff --git a/spec/helpers/operations_helper_spec.rb b/spec/helpers/operations_helper_spec.rb
index e1bd477bc75..1864f9fad15 100644
--- a/spec/helpers/operations_helper_spec.rb
+++ b/spec/helpers/operations_helper_spec.rb
@@ -20,19 +20,19 @@ RSpec.describe OperationsHelper do
allow(helper).to receive(:can?).with(user, :admin_operations, project) { true }
end
- context 'initial service configuration' do
- let_it_be(:prometheus_service) { PrometheusService.new(project: project) }
+ context 'initial integration configuration' do
+ let_it_be(:prometheus_integration) { ::Integrations::Prometheus.new(project: project) }
before do
- allow(project).to receive(:find_or_initialize_service).and_call_original
- allow(project).to receive(:find_or_initialize_service).with('prometheus').and_return(prometheus_service)
+ allow(project).to receive(:find_or_initialize_integration).and_call_original
+ allow(project).to receive(:find_or_initialize_integration).with('prometheus').and_return(prometheus_integration)
end
it 'returns the correct values' do
expect(subject).to eq(
'alerts_setup_url' => help_page_path('operations/incident_management/integrations.md', anchor: 'configuration'),
'alerts_usage_url' => project_alert_management_index_path(project),
- 'prometheus_form_path' => project_service_path(project, prometheus_service),
+ 'prometheus_form_path' => project_service_path(project, prometheus_integration),
'prometheus_reset_key_path' => reset_alerting_token_project_settings_operations_path(project),
'prometheus_authorization_key' => nil,
'prometheus_api_url' => nil,
@@ -53,15 +53,15 @@ RSpec.describe OperationsHelper do
end
context 'with external Prometheus configured' do
- let_it_be(:prometheus_service, reload: true) do
- create(:prometheus_service, project: project)
+ let_it_be(:prometheus_integration, reload: true) do
+ create(:prometheus_integration, project: project)
end
context 'with external Prometheus enabled' do
it 'returns the correct values' do
expect(subject).to include(
'prometheus_activated' => 'true',
- 'prometheus_api_url' => prometheus_service.api_url
+ 'prometheus_api_url' => prometheus_integration.api_url
)
end
end
@@ -71,7 +71,7 @@ RSpec.describe OperationsHelper do
it 'returns the correct values' do
expect(subject).to include(
'prometheus_activated' => 'false',
- 'prometheus_api_url' => prometheus_service.api_url
+ 'prometheus_api_url' => prometheus_integration.api_url
)
end
end
@@ -79,11 +79,11 @@ RSpec.describe OperationsHelper do
let(:cluster_managed) { false }
before do
- allow(prometheus_service)
+ allow(prometheus_integration)
.to receive(:prometheus_available?)
.and_return(cluster_managed)
- prometheus_service.update!(manual_configuration: false)
+ prometheus_integration.update!(manual_configuration: false)
end
include_examples 'Prometheus is disabled'
@@ -101,7 +101,7 @@ RSpec.describe OperationsHelper do
it 'returns the correct values' do
expect(subject).to include(
'prometheus_authorization_key' => project_alerting_setting.token,
- 'prometheus_api_url' => prometheus_service.api_url
+ 'prometheus_api_url' => prometheus_integration.api_url
)
end
end
diff --git a/spec/helpers/packages_helper_spec.rb b/spec/helpers/packages_helper_spec.rb
index 93d32cb8418..8b3c8411fbd 100644
--- a/spec/helpers/packages_helper_spec.rb
+++ b/spec/helpers/packages_helper_spec.rb
@@ -66,6 +66,7 @@ RSpec.describe PackagesHelper do
end
describe '#show_cleanup_policy_on_alert' do
+ let_it_be(:user) { create(:user) }
let_it_be_with_reload(:container_repository) { create(:container_repository) }
subject { helper.show_cleanup_policy_on_alert(project.reload) }
@@ -203,9 +204,10 @@ RSpec.describe PackagesHelper do
with_them do
before do
+ allow(helper).to receive(:current_user).and_return(user)
allow(Gitlab).to receive(:com?).and_return(com)
stub_config(registry: { enabled: config_registry })
- allow(project).to receive(:container_registry_enabled).and_return(project_registry)
+ allow(project).to receive(:feature_available?).with(:container_registry, user).and_return(project_registry)
stub_application_setting(container_expiration_policies_enable_historic_entries: historic_entries)
stub_feature_flags(container_expiration_policies_historic_entry: false)
stub_feature_flags(container_expiration_policies_historic_entry: project) if historic_entry
diff --git a/spec/helpers/projects/alert_management_helper_spec.rb b/spec/helpers/projects/alert_management_helper_spec.rb
index 9895d06f93a..2450f7838b3 100644
--- a/spec/helpers/projects/alert_management_helper_spec.rb
+++ b/spec/helpers/projects/alert_management_helper_spec.rb
@@ -41,12 +41,12 @@ RSpec.describe Projects::AlertManagementHelper do
end
end
- context 'with prometheus service' do
- let_it_be(:prometheus_service) { create(:prometheus_service, project: project) }
+ context 'with prometheus integration' do
+ let_it_be(:prometheus_integration) { create(:prometheus_integration, project: project) }
- context 'when manual prometheus service is active' do
+ context 'when manual prometheus integration is active' do
it "enables alert management and doesn't show managed prometheus" do
- prometheus_service.update!(manual_configuration: true)
+ prometheus_integration.update!(manual_configuration: true)
expect(data).to include(
'alert-management-enabled' => 'true'
@@ -69,9 +69,9 @@ RSpec.describe Projects::AlertManagementHelper do
end
end
- context 'when prometheus service is inactive' do
+ context 'when prometheus integration is inactive' do
it 'disables alert management and hides managed prometheus' do
- prometheus_service.update!(manual_configuration: false)
+ prometheus_integration.update!(manual_configuration: false)
expect(data).to include(
'alert-management-enabled' => 'false'
@@ -83,7 +83,7 @@ RSpec.describe Projects::AlertManagementHelper do
end
end
- context 'without prometheus service' do
+ context 'without prometheus integration' do
it "doesn't have managed prometheus" do
expect(data).to include(
'has-managed-prometheus' => 'false'
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 1804a9a99cf..75e80f5edbc 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -138,7 +138,7 @@ RSpec.describe ProjectsHelper do
end
end
- describe "#project_list_cache_key", :clean_gitlab_redis_shared_state do
+ describe "#project_list_cache_key", :clean_gitlab_redis_cache do
let(:project) { project_with_repo }
before do
@@ -876,6 +876,37 @@ RSpec.describe ProjectsHelper do
end
end
+ describe '#show_terraform_banner?' do
+ let_it_be(:ruby) { create(:programming_language, name: 'Ruby') }
+ let_it_be(:hcl) { create(:programming_language, name: 'HCL') }
+
+ subject { helper.show_terraform_banner?(project) }
+
+ before do
+ create(:repository_language, project: project, programming_language: language, share: 1)
+ end
+
+ context 'the project does not contain terraform files' do
+ let(:language) { ruby }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'the project contains terraform files' do
+ let(:language) { hcl }
+
+ it { is_expected.to be_truthy }
+
+ context 'the project already has a terraform state' do
+ before do
+ create(:terraform_state, project: project)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+ end
+
describe '#project_title' do
subject { helper.project_title(project) }
diff --git a/spec/helpers/registrations_helper_spec.rb b/spec/helpers/registrations_helper_spec.rb
index fa647548b3c..b2f9a794cb3 100644
--- a/spec/helpers/registrations_helper_spec.rb
+++ b/spec/helpers/registrations_helper_spec.rb
@@ -3,30 +3,6 @@
require 'spec_helper'
RSpec.describe RegistrationsHelper do
- using RSpec::Parameterized::TableSyntax
-
- describe '#social_signin_enabled?' do
- before do
- allow(::Gitlab).to receive(:dev_env_or_com?).and_return(com)
- allow(view).to receive(:omniauth_enabled?).and_return(omniauth_enabled)
- allow(view).to receive(:button_based_providers_enabled?).and_return(button_based_providers_enabled)
- allow(view).to receive(:devise_mapping).and_return(double(omniauthable?: omniauthable))
- end
-
- subject { helper.social_signin_enabled? }
-
- where com: [true, false],
- omniauth_enabled: [true, false],
- omniauthable: [true, false],
- button_based_providers_enabled: [true, false]
-
- with_them do
- let(:result) { com && omniauth_enabled && button_based_providers_enabled && omniauthable }
-
- it { is_expected.to eq(result) }
- end
- end
-
describe '#signup_username_data_attributes' do
it 'has expected attributes' do
expect(helper.signup_username_data_attributes.keys).to include(:min_length, :min_length_message, :max_length, :max_length_message, :qa_selector)
diff --git a/spec/helpers/releases_helper_spec.rb b/spec/helpers/releases_helper_spec.rb
index e6bf91ceef6..69f66dc6488 100644
--- a/spec/helpers/releases_helper_spec.rb
+++ b/spec/helpers/releases_helper_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe ReleasesHelper do
describe '#illustration' do
it 'returns the correct image path' do
- expect(helper.illustration).to match(/illustrations\/releases-(\w+)\.svg/)
+ expect(helper.illustration).to match(%r{illustrations/releases-(\w+)\.svg})
end
end
@@ -48,16 +48,6 @@ RSpec.describe ReleasesHelper do
it 'points new_release_path to the "New Release" page' do
expect(helper.data_for_releases_page[:new_release_path]).to eq(new_project_release_path(project))
end
-
- context 'when the "new_release_page" feature flag is disabled' do
- before do
- stub_feature_flags(new_release_page: false)
- end
-
- it 'points new_release_path to the "New Tag" page' do
- expect(helper.data_for_releases_page[:new_release_path]).to eq(new_project_tag_path(project))
- end
- end
end
end
@@ -107,4 +97,42 @@ RSpec.describe ReleasesHelper do
end
end
end
+
+ describe 'startup queries' do
+ describe 'use_startup_query_for_index_page?' do
+ it 'allows startup queries for non-paginated requests' do
+ allow(helper).to receive(:params).and_return({ unrelated_query_param: 'value' })
+
+ expect(helper.use_startup_query_for_index_page?).to be(true)
+ end
+
+ it 'disallows startup queries for requests paginated with a "before" cursor' do
+ allow(helper).to receive(:params).and_return({ unrelated_query_param: 'value', before: 'cursor' })
+
+ expect(helper.use_startup_query_for_index_page?).to be(false)
+ end
+
+ it 'disallows startup queries for requests paginated with an "after" cursor' do
+ allow(helper).to receive(:params).and_return({ unrelated_query_param: 'value', after: 'cursor' })
+
+ expect(helper.use_startup_query_for_index_page?).to be(false)
+ end
+ end
+
+ describe '#index_page_startup_query_variables' do
+ let_it_be(:project) { build(:project, namespace: create(:group)) }
+
+ before do
+ helper.instance_variable_set(:@project, project)
+ end
+
+ it 'returns the correct GraphQL variables for the startup query' do
+ expect(helper.index_page_startup_query_variables).to eq({
+ fullPath: project.full_path,
+ sort: 'RELEASED_AT_DESC',
+ first: 1
+ })
+ end
+ end
+ end
end
diff --git a/spec/helpers/sessions_helper_spec.rb b/spec/helpers/sessions_helper_spec.rb
index 027943aecee..816e43669bd 100644
--- a/spec/helpers/sessions_helper_spec.rb
+++ b/spec/helpers/sessions_helper_spec.rb
@@ -3,6 +3,42 @@
require 'spec_helper'
RSpec.describe SessionsHelper do
+ describe '#recently_confirmed_com?' do
+ subject { helper.recently_confirmed_com? }
+
+ context 'when on .com' do
+ before do
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
+ end
+
+ it 'when flash notice is empty it is false' do
+ flash[:notice] = nil
+ expect(subject).to be false
+ end
+
+ it 'when flash notice is anything it is false' do
+ flash[:notice] = 'hooray!'
+ expect(subject).to be false
+ end
+
+ it 'when flash notice is devise confirmed message it is true' do
+ flash[:notice] = t(:confirmed, scope: [:devise, :confirmations])
+ expect(subject).to be true
+ end
+ end
+
+ context 'when not on .com' do
+ before do
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return(false)
+ end
+
+ it 'when flash notice is devise confirmed message it is false' do
+ flash[:notice] = t(:confirmed, scope: [:devise, :confirmations])
+ expect(subject).to be false
+ end
+ end
+ end
+
describe '#unconfirmed_email?' do
it 'returns true when the flash alert contains a devise failure unconfirmed message' do
flash[:alert] = t(:unconfirmed, scope: [:devise, :failure])
diff --git a/spec/helpers/user_callouts_helper_spec.rb b/spec/helpers/user_callouts_helper_spec.rb
index f68da45bb9a..90333cb0ad5 100644
--- a/spec/helpers/user_callouts_helper_spec.rb
+++ b/spec/helpers/user_callouts_helper_spec.rb
@@ -97,7 +97,17 @@ RSpec.describe UserCalloutsHelper do
allow(helper).to receive(:user_dismissed?).with(described_class::CUSTOMIZE_HOMEPAGE) { false }
end
- it { is_expected.to be true }
+ context 'when user is on the default dashboard' do
+ it { is_expected.to be true }
+ end
+
+ context 'when user is not on the default dashboard' do
+ before do
+ user.dashboard = 'stars'
+ end
+
+ it { is_expected.to be false }
+ end
end
context 'when user dismissed' do
diff --git a/spec/helpers/users_helper_spec.rb b/spec/helpers/users_helper_spec.rb
index 862fd58df04..480b1e2a0de 100644
--- a/spec/helpers/users_helper_spec.rb
+++ b/spec/helpers/users_helper_spec.rb
@@ -364,4 +364,54 @@ RSpec.describe UsersHelper do
expect(data[:paths]).to match_schema('entities/admin_users_data_attributes_paths')
end
end
+
+ describe '#confirm_user_data' do
+ confirm_admin_user_path = '/admin/users/root/confirm'
+
+ before do
+ allow(helper).to receive(:confirm_admin_user_path).with(user).and_return(confirm_admin_user_path)
+ end
+
+ subject(:confirm_user_data) { helper.confirm_user_data(user) }
+
+ it 'sets `path` key correctly' do
+ expect(confirm_user_data[:path]).to eq(confirm_admin_user_path)
+ end
+
+ it 'sets `modal_attributes` key to valid json' do
+ expect(confirm_user_data[:modal_attributes]).to be_valid_json
+ end
+
+ context 'when `user.unconfirmed_email` is set' do
+ let(:user) { create(:user, unconfirmed_email: 'foo@bar.com') }
+
+ it 'sets `modal_attributes.messageHtml` correctly' do
+ expect(Gitlab::Json.parse(confirm_user_data[:modal_attributes])['messageHtml']).to eq('This user has an unconfirmed email address (foo@bar.com). You may force a confirmation.')
+ end
+ end
+
+ context 'when `user.unconfirmed_email` is not set' do
+ it 'sets `modal_attributes.messageHtml` correctly' do
+ expect(Gitlab::Json.parse(confirm_user_data[:modal_attributes])['messageHtml']).to eq('This user has an unconfirmed email address. You may force a confirmation.')
+ end
+ end
+ end
+
+ describe '#admin_user_actions_data_attributes' do
+ subject(:data) { helper.admin_user_actions_data_attributes(user) }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(Admin::UserEntity).to receive(:represent).and_call_original
+ end
+
+ it 'user matches the serialized json' do
+ expect(data[:user]).to be_valid_json
+ expect(Admin::UserEntity).to have_received(:represent).with(user, hash_including({ current_user: user }))
+ end
+
+ it 'paths matches the schema' do
+ expect(data[:paths]).to match_schema('entities/admin_users_data_attributes_paths')
+ end
+ end
end
diff --git a/spec/initializers/100_patch_omniauth_saml_spec.rb b/spec/initializers/100_patch_omniauth_saml_spec.rb
index 3496eb4d680..de556cfa1e5 100644
--- a/spec/initializers/100_patch_omniauth_saml_spec.rb
+++ b/spec/initializers/100_patch_omniauth_saml_spec.rb
@@ -7,10 +7,11 @@ RSpec.describe 'OmniAuth::Strategies::SAML', type: :strategy do
let(:strategy) { [OmniAuth::Strategies::SAML, { idp_sso_target_url: idp_sso_target_url }] }
describe 'POST /users/auth/saml' do
- it 'redirects to the provider login page' do
+ it 'redirects to the provider login page', :aggregate_failures do
post '/users/auth/saml'
- expect(last_response).to redirect_to(/\A#{Regexp.quote(idp_sso_target_url)}/)
+ expect(last_response.status).to eq(302)
+ expect(last_response.location).to match(/\A#{Regexp.quote(idp_sso_target_url)}/)
end
it 'stores request ID during request phase' do
diff --git a/spec/initializers/attr_encrypted_no_db_connection_spec.rb b/spec/initializers/attr_encrypted_no_db_connection_spec.rb
index ad3d14ed7d4..34d9e182370 100644
--- a/spec/initializers/attr_encrypted_no_db_connection_spec.rb
+++ b/spec/initializers/attr_encrypted_no_db_connection_spec.rb
@@ -4,18 +4,20 @@ require 'spec_helper'
RSpec.describe 'GitLab monkey-patches to AttrEncrypted' do
describe '#attribute_instance_methods_as_symbols_available?' do
- it 'returns false' do
- expect(ActiveRecord::Base.__send__(:attribute_instance_methods_as_symbols_available?)).to be_falsy
- end
-
- it 'does not define virtual attributes' do
- klass = Class.new(ActiveRecord::Base) do
+ let(:klass) do
+ Class.new(ActiveRecord::Base) do
# We need some sort of table to work on
self.table_name = 'projects'
attr_encrypted :foo
end
+ end
+
+ it 'returns false' do
+ expect(ActiveRecord::Base.__send__(:attribute_instance_methods_as_symbols_available?)).to be_falsy
+ end
+ it 'does not define virtual attributes' do
instance = klass.new
aggregate_failures do
@@ -28,5 +30,11 @@ RSpec.describe 'GitLab monkey-patches to AttrEncrypted' do
end
end
end
+
+ it 'calls attr_changed? method with kwargs' do
+ obj = klass.new
+
+ expect(obj.foo_changed?).to eq(false)
+ end
end
end
diff --git a/spec/initializers/global_id_spec.rb b/spec/initializers/global_id_spec.rb
index 63bfa32d74f..4deb1833999 100644
--- a/spec/initializers/global_id_spec.rb
+++ b/spec/initializers/global_id_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe 'global_id' do
- it 'prepends `Gitlab::Patch::GlobalID`' do
- expect(GlobalID.ancestors).to include(Gitlab::Patch::GlobalID)
+ it 'prepends `Gitlab::Patch::GlobalId`' do
+ expect(GlobalID.ancestors).to include(Gitlab::Patch::GlobalId)
end
it 'patches GlobalID to find aliased models when a deprecation exists' do
diff --git a/spec/initializers/lograge_spec.rb b/spec/initializers/lograge_spec.rb
index 651b0c8a9b8..a1fd9be299b 100644
--- a/spec/initializers/lograge_spec.rb
+++ b/spec/initializers/lograge_spec.rb
@@ -200,6 +200,8 @@ RSpec.describe 'lograge', type: :request do
%w[
db_primary_wal_count
db_replica_wal_count
+ db_primary_wal_cached_count
+ db_replica_wal_cached_count
db_replica_count
db_replica_cached_count
db_primary_count
diff --git a/spec/initializers/mailer_retries_spec.rb b/spec/initializers/mailer_retries_spec.rb
index c1e56784ad9..a220188cc29 100644
--- a/spec/initializers/mailer_retries_spec.rb
+++ b/spec/initializers/mailer_retries_spec.rb
@@ -2,22 +2,8 @@
require 'spec_helper'
-RSpec.describe 'Mailer retries' do
- # We need to ensure that this runs through Sidekiq to take
- # advantage of the middleware. There is a Rails bug that means we
- # have to do some extra steps to make this happen:
- # https://github.com/rails/rails/issues/37270#issuecomment-553927324
- around do |example|
- descendants = ActiveJob::Base.descendants + [ActiveJob::Base]
- descendants.each(&:disable_test_adapter)
- ActiveJob::Base.queue_adapter = :sidekiq
-
- example.run
-
- descendants.each { |a| a.queue_adapter = :test }
- end
-
- it 'sets retries for mailers to 3', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332645' do
+RSpec.describe 'Mailer retries', :sidekiq_mailers do
+ it 'sets retries for mailers to 3' do
DeviseMailer.user_admin_approval(create(:user)).deliver_later
expect(Sidekiq::Queues['mailers'].first).to include('retry' => 3)
diff --git a/spec/javascripts/lib/utils/mock_data.js b/spec/javascripts/lib/utils/mock_data.js
index c2f79a32377..f1358986f2a 100644
--- a/spec/javascripts/lib/utils/mock_data.js
+++ b/spec/javascripts/lib/utils/mock_data.js
@@ -1 +1 @@
-export * from '../../../frontend/lib/utils/mock_data.js';
+export * from '../../../frontend/lib/utils/mock_data';
diff --git a/spec/lib/api/entities/basic_project_details_spec.rb b/spec/lib/api/entities/basic_project_details_spec.rb
new file mode 100644
index 00000000000..dc7c4fdce4e
--- /dev/null
+++ b/spec/lib/api/entities/basic_project_details_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::BasicProjectDetails do
+ let_it_be(:project) { create(:project) }
+
+ let(:current_user) { project.owner }
+
+ subject(:output) { described_class.new(project, current_user: current_user).as_json }
+
+ describe '#default_branch' do
+ it 'delegates to Project#default_branch_or_main' do
+ expect(project).to receive(:default_branch_or_main).twice.and_call_original
+
+ expect(output).to include(default_branch: project.default_branch_or_main)
+ end
+
+ context 'anonymous user' do
+ let(:current_user) { nil }
+
+ it 'is not included' do
+ expect(output.keys).not_to include(:default_branch)
+ end
+ end
+ end
+end
diff --git a/spec/lib/api/entities/bulk_import_spec.rb b/spec/lib/api/entities/bulk_import_spec.rb
new file mode 100644
index 00000000000..2db6862b079
--- /dev/null
+++ b/spec/lib/api/entities/bulk_import_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::BulkImport do
+ let_it_be(:import) { create(:bulk_import) }
+
+ subject { described_class.new(import).as_json }
+
+ it 'has the correct attributes' do
+ expect(subject).to include(
+ :id,
+ :status,
+ :source_type,
+ :created_at,
+ :updated_at
+ )
+ end
+end
diff --git a/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb b/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb
new file mode 100644
index 00000000000..adc8fdcdd9c
--- /dev/null
+++ b/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::BulkImports::EntityFailure do
+ let_it_be(:failure) { create(:bulk_import_failure) }
+
+ subject { described_class.new(failure).as_json }
+
+ it 'has the correct attributes' do
+ expect(subject).to include(
+ :pipeline_class,
+ :pipeline_step,
+ :exception_class,
+ :correlation_id_value,
+ :created_at
+ )
+ end
+end
diff --git a/spec/lib/api/entities/bulk_imports/entity_spec.rb b/spec/lib/api/entities/bulk_imports/entity_spec.rb
new file mode 100644
index 00000000000..f91ae1fc5a1
--- /dev/null
+++ b/spec/lib/api/entities/bulk_imports/entity_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::BulkImports::Entity do
+ let_it_be(:entity) { create(:bulk_import_entity) }
+
+ subject { described_class.new(entity).as_json }
+
+ it 'has the correct attributes' do
+ expect(subject).to include(
+ :id,
+ :bulk_import_id,
+ :status,
+ :source_full_path,
+ :destination_name,
+ :destination_namespace,
+ :parent_id,
+ :namespace_id,
+ :project_id,
+ :created_at,
+ :updated_at,
+ :failures
+ )
+ end
+end
diff --git a/spec/lib/api/entities/job_request/image_spec.rb b/spec/lib/api/entities/ci/job_request/image_spec.rb
index f13eab6a752..55aade03129 100644
--- a/spec/lib/api/entities/job_request/image_spec.rb
+++ b/spec/lib/api/entities/ci/job_request/image_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Entities::JobRequest::Image do
+RSpec.describe API::Entities::Ci::JobRequest::Image do
let(:ports) { [{ number: 80, protocol: 'http', name: 'name' }]}
let(:image) { double(name: 'image_name', entrypoint: ['foo'], ports: ports)}
let(:entity) { described_class.new(image) }
diff --git a/spec/lib/api/entities/job_request/port_spec.rb b/spec/lib/api/entities/ci/job_request/port_spec.rb
index 4820c4a691b..8e0d2cabcfc 100644
--- a/spec/lib/api/entities/job_request/port_spec.rb
+++ b/spec/lib/api/entities/ci/job_request/port_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::API::Entities::JobRequest::Port do
+RSpec.describe ::API::Entities::Ci::JobRequest::Port do
let(:port) { double(number: 80, protocol: 'http', name: 'name')}
let(:entity) { described_class.new(port) }
diff --git a/spec/lib/api/entities/group_detail_spec.rb b/spec/lib/api/entities/group_detail_spec.rb
new file mode 100644
index 00000000000..8fcb120c809
--- /dev/null
+++ b/spec/lib/api/entities/group_detail_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::GroupDetail do
+ describe '#as_json' do
+ it 'includes prevent_sharing_groups_outside_hierarchy for a root group' do
+ group = create(:group)
+
+ expect(described_class.new(group).as_json).to include(prevent_sharing_groups_outside_hierarchy: false)
+ end
+
+ it 'excludes prevent_sharing_groups_outside_hierarchy for a subgroup' do
+ subgroup = build(:group, :nested)
+
+ expect(described_class.new(subgroup).as_json.keys).not_to include(:prevent_sharing_groups_outside_hierarchy)
+ end
+ end
+end
diff --git a/spec/lib/api/entities/plan_limit_spec.rb b/spec/lib/api/entities/plan_limit_spec.rb
index ee42c67f9b6..75e39e4f074 100644
--- a/spec/lib/api/entities/plan_limit_spec.rb
+++ b/spec/lib/api/entities/plan_limit_spec.rb
@@ -14,7 +14,8 @@ RSpec.describe API::Entities::PlanLimit do
:maven_max_file_size,
:npm_max_file_size,
:nuget_max_file_size,
- :pypi_max_file_size
+ :pypi_max_file_size,
+ :terraform_module_max_file_size
)
end
diff --git a/spec/lib/api/entities/user_spec.rb b/spec/lib/api/entities/user_spec.rb
index e35deeb6263..860f007f284 100644
--- a/spec/lib/api/entities/user_spec.rb
+++ b/spec/lib/api/entities/user_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe API::Entities::User do
subject { described_class.new(user, current_user: current_user).as_json }
it 'exposes correct attributes' do
- expect(subject).to include(:bio, :location, :public_email, :skype, :linkedin, :twitter, :website_url, :organization, :job_title, :work_information)
+ expect(subject).to include(:bio, :location, :public_email, :skype, :linkedin, :twitter, :website_url, :organization, :job_title, :work_information, :pronouns)
end
it 'exposes created_at if the current user can read the user profile' do
diff --git a/spec/lib/api/helpers/caching_spec.rb b/spec/lib/api/helpers/caching_spec.rb
index f94c44c7382..38b7b386d5c 100644
--- a/spec/lib/api/helpers/caching_spec.rb
+++ b/spec/lib/api/helpers/caching_spec.rb
@@ -3,7 +3,7 @@
require "spec_helper"
RSpec.describe API::Helpers::Caching, :use_clean_rails_redis_caching do
- subject(:instance) { Class.new.include(described_class).new }
+ subject(:instance) { Class.new.include(described_class, Grape::DSL::Headers).new }
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
@@ -44,108 +44,16 @@ RSpec.describe API::Helpers::Caching, :use_clean_rails_redis_caching do
}
end
- context "single object" do
+ context 'single object' do
let_it_be(:presentable) { create(:todo, project: project) }
- it { is_expected.to be_a(Gitlab::Json::PrecompiledJson) }
-
- it "uses the presenter" do
- expect(presenter).to receive(:represent).with(presentable, project: project)
-
- subject
- end
-
- it "is valid JSON" do
- parsed = Gitlab::Json.parse(subject.to_s)
-
- expect(parsed).to be_a(Hash)
- expect(parsed["id"]).to eq(presentable.id)
- end
-
- it "fetches from the cache" do
- expect(instance.cache).to receive(:fetch).with("#{presentable.cache_key}:#{user.cache_key}", expires_in: described_class::DEFAULT_EXPIRY).once
-
- subject
- end
-
- context "when a cache context is supplied" do
- before do
- kwargs[:cache_context] = -> (todo) { todo.project.cache_key }
- end
-
- it "uses the context to augment the cache key" do
- expect(instance.cache).to receive(:fetch).with("#{presentable.cache_key}:#{project.cache_key}", expires_in: described_class::DEFAULT_EXPIRY).once
-
- subject
- end
- end
-
- context "when expires_in is supplied" do
- it "sets the expiry when accessing the cache" do
- kwargs[:expires_in] = 7.days
-
- expect(instance.cache).to receive(:fetch).with("#{presentable.cache_key}:#{user.cache_key}", expires_in: 7.days).once
-
- subject
- end
- end
+ it_behaves_like 'object cache helper'
end
- context "for a collection of objects" do
+ context 'collection of objects' do
let_it_be(:presentable) { Array.new(5).map { create(:todo, project: project) } }
- it { is_expected.to be_an(Gitlab::Json::PrecompiledJson) }
-
- it "uses the presenter" do
- presentable.each do |todo|
- expect(presenter).to receive(:represent).with(todo, project: project)
- end
-
- subject
- end
-
- it "is valid JSON" do
- parsed = Gitlab::Json.parse(subject.to_s)
-
- expect(parsed).to be_an(Array)
-
- presentable.each_with_index do |todo, i|
- expect(parsed[i]["id"]).to eq(todo.id)
- end
- end
-
- it "fetches from the cache" do
- keys = presentable.map { |todo| "#{todo.cache_key}:#{user.cache_key}" }
-
- expect(instance.cache).to receive(:fetch_multi).with(*keys, expires_in: described_class::DEFAULT_EXPIRY).once.and_call_original
-
- subject
- end
-
- context "when a cache context is supplied" do
- before do
- kwargs[:cache_context] = -> (todo) { todo.project.cache_key }
- end
-
- it "uses the context to augment the cache key" do
- keys = presentable.map { |todo| "#{todo.cache_key}:#{project.cache_key}" }
-
- expect(instance.cache).to receive(:fetch_multi).with(*keys, expires_in: described_class::DEFAULT_EXPIRY).once.and_call_original
-
- subject
- end
- end
-
- context "expires_in is supplied" do
- it "sets the expiry when accessing the cache" do
- keys = presentable.map { |todo| "#{todo.cache_key}:#{user.cache_key}" }
- kwargs[:expires_in] = 7.days
-
- expect(instance.cache).to receive(:fetch_multi).with(*keys, expires_in: 7.days).once.and_call_original
-
- subject
- end
- end
+ it_behaves_like 'collection cache helper'
end
end
@@ -187,6 +95,42 @@ RSpec.describe API::Helpers::Caching, :use_clean_rails_redis_caching do
expect(nested_call.to_s).to eq(subject.to_s)
end
+
+ context 'Cache versioning' do
+ it 'returns cache based on version parameter' do
+ result_1 = instance.cache_action(cache_key, **kwargs.merge(version: 1)) { 'Cache 1' }
+ result_2 = instance.cache_action(cache_key, **kwargs.merge(version: 2)) { 'Cache 2' }
+
+ expect(result_1.to_s).to eq('Cache 1'.to_json)
+ expect(result_2.to_s).to eq('Cache 2'.to_json)
+ end
+ end
+
+ context 'Cache for pagination headers' do
+ described_class::PAGINATION_HEADERS.each do |pagination_header|
+ context pagination_header do
+ before do
+ instance.header(pagination_header, 100)
+ end
+
+ it 'stores and recovers pagination headers from cache' do
+ expect { perform }.not_to change { instance.header[pagination_header] }
+
+ instance.header.delete(pagination_header)
+
+ expect { perform }.to change { instance.header[pagination_header] }.from(nil).to(100)
+ end
+
+ it 'prefers headers from request than from cache' do
+ expect { perform }.not_to change { instance.header[pagination_header] }
+
+ instance.header(pagination_header, 50)
+
+ expect { perform }.not_to change { instance.header[pagination_header] }.from(50)
+ end
+ end
+ end
+ end
end
describe "#cache_action_if" do
diff --git a/spec/lib/backup/database_spec.rb b/spec/lib/backup/database_spec.rb
index 2bce4cab679..f57037d5652 100644
--- a/spec/lib/backup/database_spec.rb
+++ b/spec/lib/backup/database_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Backup::Database do
context 'when the restore command prints errors' do
let(:visible_error) { "This is a test error\n" }
- let(:noise) { "Table projects does not exist\nmust be owner of extension pg_trgm\n" }
+ let(:noise) { "Table projects does not exist\nmust be owner of extension pg_trgm\nWARNING: no privileges could be revoked for public\n" }
let(:cmd) { %W[#{Gem.ruby} -e $stderr.write("#{noise}#{visible_error}")] }
it 'filters out noise from errors' do
diff --git a/spec/lib/backup/gitaly_backup_spec.rb b/spec/lib/backup/gitaly_backup_spec.rb
index 13567ead842..cdb35c0ce01 100644
--- a/spec/lib/backup/gitaly_backup_spec.rb
+++ b/spec/lib/backup/gitaly_backup_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Backup::GitalyBackup do
+ let(:parallel) { nil }
+ let(:parallel_storage) { nil }
let(:progress) do
Tempfile.new('progress').tap do |progress|
progress.unlink
@@ -13,7 +15,7 @@ RSpec.describe Backup::GitalyBackup do
progress.close
end
- subject { described_class.new(progress) }
+ subject { described_class.new(progress, parallel: parallel, parallel_storage: parallel_storage) }
context 'unknown' do
it 'fails to start unknown' do
@@ -30,6 +32,8 @@ RSpec.describe Backup::GitalyBackup do
project_snippet = create(:project_snippet, :repository, project: project)
personal_snippet = create(:personal_snippet, :repository, author: project.owner)
+ expect(Process).to receive(:spawn).with(anything, 'create', '-path', anything, { in: anything, out: progress }).and_call_original
+
subject.start(:create)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
@@ -45,6 +49,28 @@ RSpec.describe Backup::GitalyBackup do
expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project_snippet.disk_path + '.bundle'))
end
+ context 'parallel option set' do
+ let(:parallel) { 3 }
+
+ it 'passes parallel option through' do
+ expect(Process).to receive(:spawn).with(anything, 'create', '-path', anything, '-parallel', '3', { in: anything, out: progress }).and_call_original
+
+ subject.start(:create)
+ subject.wait
+ end
+ end
+
+ context 'parallel_storage option set' do
+ let(:parallel_storage) { 3 }
+
+ it 'passes parallel option through' do
+ expect(Process).to receive(:spawn).with(anything, 'create', '-path', anything, '-parallel-storage', '3', { in: anything, out: progress }).and_call_original
+
+ subject.start(:create)
+ subject.wait
+ end
+ end
+
it 'raises when the exit code not zero' do
expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false'))
@@ -83,6 +109,8 @@ RSpec.describe Backup::GitalyBackup do
copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle')
copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle')
+ expect(Process).to receive(:spawn).with(anything, 'restore', '-path', anything, { in: anything, out: progress }).and_call_original
+
subject.start(:restore)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
@@ -100,6 +128,17 @@ RSpec.describe Backup::GitalyBackup do
expect(collect_commit_shas.call(project_snippet.repository)).to eq(['6e44ba56a4748be361a841e759c20e421a1651a1'])
end
+ context 'parallel option set' do
+ let(:parallel) { 3 }
+
+ it 'does not pass parallel option through' do
+ expect(Process).to receive(:spawn).with(anything, 'restore', '-path', anything, { in: anything, out: progress }).and_call_original
+
+ subject.start(:restore)
+ subject.wait
+ end
+ end
+
it 'raises when the exit code not zero' do
expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false'))
diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb
index d77b1e0f276..85818038c9d 100644
--- a/spec/lib/backup/repositories_spec.rb
+++ b/spec/lib/backup/repositories_spec.rb
@@ -4,7 +4,8 @@ require 'spec_helper'
RSpec.describe Backup::Repositories do
let(:progress) { spy(:stdout) }
- let(:strategy) { spy(:strategy) }
+ let(:parallel_enqueue) { true }
+ let(:strategy) { spy(:strategy, parallel_enqueue?: parallel_enqueue) }
subject { described_class.new(progress, strategy: strategy) }
@@ -80,6 +81,22 @@ RSpec.describe Backup::Repositories do
end
end
+ context 'concurrency with a strategy without parallel enqueueing support' do
+ let(:parallel_enqueue) { false }
+
+ it 'enqueues all projects sequentially' do
+ expect(Thread).not_to receive(:new)
+
+ expect(strategy).to receive(:start).with(:create)
+ projects.each do |project|
+ expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
+ end
+ expect(strategy).to receive(:wait)
+
+ subject.dump(max_concurrency: 2, max_storage_concurrency: 2)
+ end
+ end
+
[4, 10].each do |max_storage_concurrency|
context "max_storage_concurrency #{max_storage_concurrency}", quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/241701' do
let(:storage_keys) { %w[default test_second_storage] }
diff --git a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
index 7557b9a118d..d7bcebbbe34 100644
--- a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
@@ -118,7 +118,7 @@ RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter do
end
context "redmine project" do
- let_it_be(:service) { create(:redmine_service, project: project) }
+ let_it_be(:integration) { create(:redmine_integration, project: project) }
before do
project.update!(issues_enabled: false)
@@ -140,7 +140,9 @@ RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter do
end
context "youtrack project" do
- let_it_be(:service) { create(:youtrack_service, project: project) }
+ before_all do
+ create(:youtrack_integration, project: project)
+ end
before do
project.update!(issues_enabled: false)
@@ -183,7 +185,7 @@ RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter do
end
context "jira project" do
- let_it_be(:service) { create(:jira_service, project: project) }
+ let_it_be(:service) { create(:jira_integration, project: project) }
let(:reference) { issue.to_reference }
@@ -215,8 +217,6 @@ RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter do
context "ewm project" do
let_it_be(:integration) { create(:ewm_integration, project: project) }
- let(:service) { integration } # TODO: remove when https://gitlab.com/gitlab-org/gitlab/-/issues/330300 is complete
-
before do
project.update!(issues_enabled: false)
end
diff --git a/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb b/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
index dafdc71ce64..f8a00716680 100644
--- a/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
@@ -327,6 +327,7 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter do
it_behaves_like 'String-based single-word references'
it_behaves_like 'String-based multi-word references in quotes'
it_behaves_like 'referencing a milestone in a link href'
+ it_behaves_like 'linking to a milestone as the entire link'
it_behaves_like 'cross-project / cross-namespace complete reference'
it_behaves_like 'cross-project / same-namespace complete reference'
it_behaves_like 'cross project shorthand reference'
@@ -460,4 +461,76 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter do
include_context 'group milestones'
end
end
+
+ context 'checking N+1' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group2) { create(:group) }
+ let_it_be(:project) { create(:project, :public, namespace: group) }
+ let_it_be(:project2) { create(:project, :public, namespace: group2) }
+ let_it_be(:project3) { create(:project, :public) }
+ let_it_be(:project_milestone) { create(:milestone, project: project) }
+ let_it_be(:project_milestone2) { create(:milestone, project: project) }
+ let_it_be(:project2_milestone) { create(:milestone, project: project2) }
+ let_it_be(:group2_milestone) { create(:milestone, group: group2) }
+ let_it_be(:project_reference) { "#{project_milestone.to_reference}" }
+ let_it_be(:project_reference2) { "#{project_milestone2.to_reference}" }
+ let_it_be(:project2_reference) { "#{project2_milestone.to_reference(full: true)}" }
+ let_it_be(:group2_reference) { "#{project2.full_path}%\"#{group2_milestone.name}\"" }
+
+ it 'does not have N+1 per multiple references per project', :use_sql_query_cache do
+ markdown = "#{project_reference}"
+ control_count = 4
+
+ expect do
+ reference_filter(markdown)
+ end.not_to exceed_all_query_limit(control_count)
+
+ markdown = "#{project_reference} %qwert %werty %ertyu %rtyui #{project_reference2}"
+
+ expect do
+ reference_filter(markdown)
+ end.not_to exceed_all_query_limit(control_count)
+ end
+
+ it 'has N+1 for multiple unique project/group references', :use_sql_query_cache do
+ markdown = "#{project_reference}"
+ control_count = 4
+
+ expect do
+ reference_filter(markdown, project: project)
+ end.not_to exceed_all_query_limit(control_count)
+
+ # Since we're not batching milestone queries across projects/groups,
+ # queries increase when a new project/group is added.
+ # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
+ markdown = "#{project_reference} #{group2_reference}"
+ control_count += 5
+
+ expect do
+ reference_filter(markdown)
+ end.not_to exceed_all_query_limit(control_count)
+
+ # third reference to already queried project/namespace, nothing extra (no N+1 here)
+ markdown = "#{project_reference} #{group2_reference} #{project_reference2}"
+
+ expect do
+ reference_filter(markdown)
+ end.not_to exceed_all_query_limit(control_count)
+
+ # last reference needs additional queries
+ markdown = "#{project_reference} #{group2_reference} #{project2_reference} #{project3.full_path}%test_milestone"
+ control_count += 6
+
+ expect do
+ reference_filter(markdown)
+ end.not_to exceed_all_query_limit(control_count)
+
+ # Use an iid instead of title reference
+ markdown = "#{project_reference} #{group2_reference} #{project2.full_path}%#{project2_milestone.iid} #{project3.full_path}%test_milestone"
+
+ expect do
+ reference_filter(markdown)
+ end.not_to exceed_all_query_limit(control_count)
+ end
+ end
end
diff --git a/spec/lib/banzai/filter/upload_link_filter_spec.rb b/spec/lib/banzai/filter/upload_link_filter_spec.rb
index 9ca499be665..eb45a8149c3 100644
--- a/spec/lib/banzai/filter/upload_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/upload_link_filter_spec.rb
@@ -42,6 +42,12 @@ RSpec.describe Banzai::Filter::UploadLinkFilter do
let(:upload_path) { '/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg' }
let(:relative_path) { "/#{project.full_path}#{upload_path}" }
+ it 'preserves original url in data-canonical-src attribute' do
+ doc = filter(link(upload_path))
+
+ expect(doc.at_css('a')['data-canonical-src']).to eq(upload_path)
+ end
+
context 'to a project upload' do
context 'with an absolute URL' do
let(:absolute_path) { Gitlab.config.gitlab.url + relative_path }
diff --git a/spec/lib/banzai/filter/wiki_link_filter_spec.rb b/spec/lib/banzai/filter/wiki_link_filter_spec.rb
index b5b5349946b..70c7c3c74fb 100644
--- a/spec/lib/banzai/filter/wiki_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/wiki_link_filter_spec.rb
@@ -22,6 +22,24 @@ RSpec.describe Banzai::Filter::WikiLinkFilter do
expect(filtered_link.attribute('href').value).to eq('/uploads/a.test')
end
+ describe 'when links are rewritable' do
+ it "stores original url in the data-canonical-src attribute" do
+ original_path = "#{repository_upload_folder}/a.jpg"
+ filtered_elements = filter("<a href='#{original_path}'><img src='#{original_path}'>example</img></a>", wiki: wiki)
+
+ expect(filtered_elements.search('img').first.attribute('data-canonical-src').value).to eq(original_path)
+ expect(filtered_elements.search('a').first.attribute('data-canonical-src').value).to eq(original_path)
+ end
+ end
+
+ describe 'when links are not rewritable' do
+ it "does not store original url in the data-canonical-src attribute" do
+ filtered_link = filter("<a href='/uploads/a.test'>Link</a>", wiki: wiki).children[0]
+
+ expect(filtered_link.value?('data-canonical-src')).to eq(false)
+ end
+ end
+
describe 'when links point to the relative wiki path' do
it 'does not rewrite links' do
path = "#{wiki.wiki_base_path}/#{repository_upload_folder}/a.jpg"
diff --git a/spec/lib/banzai/reference_parser/base_parser_spec.rb b/spec/lib/banzai/reference_parser/base_parser_spec.rb
index 18d8418ca23..095500cdc53 100644
--- a/spec/lib/banzai/reference_parser/base_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/base_parser_spec.rb
@@ -78,12 +78,31 @@ RSpec.describe Banzai::ReferenceParser::BaseParser do
describe '#referenced_by' do
context 'when references_relation is implemented' do
- it 'returns a collection of objects' do
- links = Nokogiri::HTML.fragment("<a data-foo='#{user.id}'></a>")
- .children
+ context 'and ids_only is set to false' do
+ it 'returns a collection of objects' do
+ links = Nokogiri::HTML.fragment("<a data-foo='#{user.id}'></a>")
+ .children
- expect(subject).to receive(:references_relation).and_return(User)
- expect(subject.referenced_by(links)).to eq([user])
+ expect(subject).to receive(:references_relation).and_return(User)
+ expect(subject.referenced_by(links)).to eq([user])
+ end
+ end
+
+ context 'and ids_only is set to true' do
+ it 'returns a collection of id values without performing a db query' do
+ links = Nokogiri::HTML.fragment("<a data-foo='1'></a><a data-foo='2'></a>").children
+
+ expect(subject).not_to receive(:references_relation)
+ expect(subject.referenced_by(links, ids_only: true)).to eq(%w(1 2))
+ end
+
+ context 'and the html fragment does not contain any attributes' do
+ it 'returns an empty array' do
+ links = Nokogiri::HTML.fragment("no links").children
+
+ expect(subject.referenced_by(links, ids_only: true)).to eq([])
+ end
+ end
end
end
@@ -188,7 +207,7 @@ RSpec.describe Banzai::ReferenceParser::BaseParser do
dummy = Class.new(described_class) do
self.reference_type = :test
- def gather_references(nodes)
+ def gather_references(nodes, ids_only: false)
nodes
end
end
@@ -222,7 +241,7 @@ RSpec.describe Banzai::ReferenceParser::BaseParser do
nodes.select { |n| n.id > 5 }
end
- def referenced_by(nodes)
+ def referenced_by(nodes, ids_only: false)
nodes.map(&:id)
end
end
diff --git a/spec/lib/bulk_imports/clients/graphql_spec.rb b/spec/lib/bulk_imports/clients/graphql_spec.rb
new file mode 100644
index 00000000000..2f212458c4a
--- /dev/null
+++ b/spec/lib/bulk_imports/clients/graphql_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Clients::Graphql do
+ let_it_be(:config) { create(:bulk_import_configuration) }
+
+ subject { described_class.new(url: config.url, token: config.access_token) }
+
+ describe '#execute' do
+ let(:query) { '{ metadata { version } }' }
+ let(:graphql_client_double) { double }
+ let(:response_double) { double }
+
+ before do
+ stub_const('BulkImports::MINIMUM_COMPATIBLE_MAJOR_VERSION', version)
+ allow(graphql_client_double).to receive(:execute)
+ allow(subject).to receive(:client).and_return(graphql_client_double)
+ allow(graphql_client_double).to receive(:execute).with(query).and_return(response_double)
+ allow(response_double).to receive_message_chain(:data, :metadata, :version).and_return(version)
+ end
+
+ context 'when source instance is compatible' do
+ let(:version) { '14.0.0' }
+
+ it 'marks source instance as compatible' do
+ subject.execute('test')
+
+ expect(subject.instance_variable_get(:@compatible_instance_version)).to eq(true)
+ end
+ end
+
+ context 'when source instance is incompatible' do
+ let(:version) { '13.0.0' }
+
+ it 'raises an error' do
+ expect { subject.execute('test') }.to raise_error(::BulkImports::Error, "Unsupported GitLab Version. Minimum Supported Gitlab Version #{BulkImport::MINIMUM_GITLAB_MAJOR_VERSION}.")
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb
index ac42f12a3d4..c36cb80851a 100644
--- a/spec/lib/bulk_imports/clients/http_spec.rb
+++ b/spec/lib/bulk_imports/clients/http_spec.rb
@@ -5,12 +5,20 @@ require 'spec_helper'
RSpec.describe BulkImports::Clients::HTTP do
include ImportSpecHelper
- let(:uri) { 'http://gitlab.example' }
+ let(:url) { 'http://gitlab.example' }
let(:token) { 'token' }
let(:resource) { 'resource' }
+ let(:version) { "#{BulkImport::MINIMUM_GITLAB_MAJOR_VERSION}.0.0" }
let(:response_double) { double(code: 200, success?: true, parsed_response: {}) }
+ let(:version_response) { double(code: 200, success?: true, parsed_response: { 'version' => version }) }
- subject { described_class.new(uri: uri, token: token) }
+ before do
+ allow(Gitlab::HTTP).to receive(:get)
+ .with('http://gitlab.example/api/v4/version', anything)
+ .and_return(version_response)
+ end
+
+ subject { described_class.new(url: url, token: token) }
shared_examples 'performs network request' do
it 'performs network request' do
@@ -21,20 +29,20 @@ RSpec.describe BulkImports::Clients::HTTP do
context 'error handling' do
context 'when error occurred' do
- it 'raises ConnectionError' do
+ it 'raises BulkImports::Error' do
allow(Gitlab::HTTP).to receive(method).and_raise(Errno::ECONNREFUSED)
- expect { subject.public_send(method, resource) }.to raise_exception(described_class::ConnectionError)
+ expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::Error)
end
end
context 'when response is not success' do
- it 'raises ConnectionError' do
+ it 'raises BulkImports::Error' do
response_double = double(code: 503, success?: false)
allow(Gitlab::HTTP).to receive(method).and_return(response_double)
- expect { subject.public_send(method, resource) }.to raise_exception(described_class::ConnectionError)
+ expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::Error)
end
end
end
@@ -46,7 +54,7 @@ RSpec.describe BulkImports::Clients::HTTP do
include_examples 'performs network request' do
let(:expected_args) do
[
- 'http://gitlab.example:80/api/v4/resource',
+ 'http://gitlab.example/api/v4/resource',
hash_including(
follow_redirects: false,
query: {
@@ -96,7 +104,7 @@ RSpec.describe BulkImports::Clients::HTTP do
private
def stub_http_get(path, query, response)
- uri = "http://gitlab.example:80/api/v4/#{path}"
+ uri = "http://gitlab.example/api/v4/#{path}"
params = {
follow_redirects: false,
headers: {
@@ -116,7 +124,7 @@ RSpec.describe BulkImports::Clients::HTTP do
include_examples 'performs network request' do
let(:expected_args) do
[
- 'http://gitlab.example:80/api/v4/resource',
+ 'http://gitlab.example/api/v4/resource',
hash_including(
body: {},
follow_redirects: false,
@@ -136,7 +144,7 @@ RSpec.describe BulkImports::Clients::HTTP do
include_examples 'performs network request' do
let(:expected_args) do
[
- 'http://gitlab.example:80/api/v4/resource',
+ 'http://gitlab.example/api/v4/resource',
hash_including(
follow_redirects: false,
headers: {
@@ -152,7 +160,7 @@ RSpec.describe BulkImports::Clients::HTTP do
describe '#stream' do
it 'performs network request with stream_body option' do
expected_args = [
- 'http://gitlab.example:80/api/v4/resource',
+ 'http://gitlab.example/api/v4/resource',
hash_including(
stream_body: true,
headers: {
@@ -167,4 +175,28 @@ RSpec.describe BulkImports::Clients::HTTP do
subject.stream(resource)
end
end
+
+ context 'when source instance is incompatible' do
+ let(:version) { '13.0.0' }
+
+ it 'raises an error' do
+ expect { subject.get(resource) }.to raise_error(::BulkImports::Error, "Unsupported GitLab Version. Minimum Supported Gitlab Version #{BulkImport::MINIMUM_GITLAB_MAJOR_VERSION}.")
+ end
+ end
+
+ context 'when url is relative' do
+ let(:url) { 'http://website.example/gitlab' }
+
+ before do
+ allow(Gitlab::HTTP).to receive(:get)
+ .with('http://website.example/gitlab/api/v4/version', anything)
+ .and_return(version_response)
+ end
+
+ it 'performs network request to a relative gitlab url' do
+ expect(Gitlab::HTTP).to receive(:get).with('http://website.example/gitlab/api/v4/resource', anything).and_return(response_double)
+
+ subject.get(resource)
+ end
+ end
end
diff --git a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
index 533955b057c..de0b56045b3 100644
--- a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
+++ b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe BulkImports::Groups::Loaders::GroupLoader do
let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
let(:service_double) { instance_double(::Groups::CreateService) }
let(:data) { { foo: :bar } }
diff --git a/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb
new file mode 100644
index 00000000000..c68284aa580
--- /dev/null
+++ b/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Groups::Pipelines::GroupAvatarPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ group: group,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Group',
+ destination_namespace: group.full_path
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject { described_class.new(context) }
+
+ describe '#run' do
+ it 'updates the group avatar' do
+ avatar_path = 'spec/fixtures/dk.png'
+ stub_file_download(
+ avatar_path,
+ configuration: context.configuration,
+ relative_url: "/groups/source%2Ffull%2Fpath/avatar",
+ dir: an_instance_of(String),
+ file_size_limit: Avatarable::MAXIMUM_FILE_SIZE,
+ allowed_content_types: described_class::ALLOWED_AVATAR_DOWNLOAD_TYPES
+ )
+
+ expect { subject.run }.to change(context.group, :avatar)
+
+ expect(context.group.avatar.filename).to eq(File.basename(avatar_path))
+ end
+
+ it 'raises an error when the avatar upload fails' do
+ avatar_path = 'spec/fixtures/aosp_manifest.xml'
+ stub_file_download(
+ avatar_path,
+ configuration: context.configuration,
+ relative_url: "/groups/source%2Ffull%2Fpath/avatar",
+ dir: an_instance_of(String),
+ file_size_limit: Avatarable::MAXIMUM_FILE_SIZE,
+ allowed_content_types: described_class::ALLOWED_AVATAR_DOWNLOAD_TYPES
+ )
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger).to receive(:error)
+ .with(
+ bulk_import_id: context.bulk_import.id,
+ bulk_import_entity_id: context.entity.id,
+ bulk_import_entity_type: context.entity.source_type,
+ context_extra: context.extra,
+ exception_class: "BulkImports::Groups::Pipelines::GroupAvatarPipeline::GroupAvatarLoadingError",
+ exception_message: "Avatar file format is not supported. Please try one of the following supported formats: image/png, image/jpeg, image/gif, image/bmp, image/tiff, image/vnd.microsoft.icon",
+ pipeline_class: "BulkImports::Groups::Pipelines::GroupAvatarPipeline",
+ pipeline_step: :loader
+ )
+ end
+
+ expect { subject.run }.to change(BulkImports::Failure, :count)
+ end
+ end
+
+ def stub_file_download(filepath = 'file/path.png', **params)
+ expect_next_instance_of(BulkImports::FileDownloadService, params.presence) do |downloader|
+ expect(downloader).to receive(:execute).and_return(filepath)
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb
index d8a667ec92a..0126acb320b 100644
--- a/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb
@@ -63,6 +63,14 @@ RSpec.describe BulkImports::Groups::Pipelines::MembersPipeline do
expect(member.updated_at).to eq('2020-01-01T00:00:00Z')
expect(member.expires_at).to eq(nil)
end
+
+ context 'when user_id is current user id' do
+ it 'does not create new member' do
+ data = { 'user_id' => user.id }
+
+ expect { subject.load(context, data) }.not_to change(GroupMember, :count)
+ end
+ end
end
describe 'pipeline parts' do
diff --git a/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb b/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb
index f3905a4b6e4..af99428e0c1 100644
--- a/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb
+++ b/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb
@@ -84,9 +84,34 @@ RSpec.describe BulkImports::Groups::Transformers::MemberAttributesTransformer do
expect(subject.transform(context, data)).to be_nil
end
end
+
+ context 'source user id caching' do
+ context 'when user gid is present' do
+ it 'caches source user id' do
+ gid = 'gid://gitlab/User/7'
+ data = member_data(email: user.email, gid: gid)
+
+ expect_next_instance_of(BulkImports::UsersMapper) do |mapper|
+ expect(mapper).to receive(:cache_source_user_id).with('7', user.id)
+ end
+
+ subject.transform(context, data)
+ end
+ end
+
+ context 'when user gid is missing' do
+ it 'does not use caching' do
+ data = member_data(email: user.email)
+
+ expect(BulkImports::UsersMapper).not_to receive(:new)
+
+ subject.transform(context, data)
+ end
+ end
+ end
end
- def member_data(email: '', access_level: 30)
+ def member_data(email: '', gid: nil, access_level: 30)
{
'created_at' => '2020-01-01T00:00:00Z',
'updated_at' => '2020-01-01T00:00:00Z',
@@ -95,6 +120,7 @@ RSpec.describe BulkImports::Groups::Transformers::MemberAttributesTransformer do
'integer_value' => access_level
},
'user' => {
+ 'user_gid' => gid,
'public_email' => email
}
}
diff --git a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
index a5d1a5f7fbb..57a258b0d9f 100644
--- a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
@@ -106,8 +106,11 @@ RSpec.describe BulkImports::NdjsonPipeline do
data = [hash, 1]
user = double
config = double(relation_excluded_keys: nil, top_relation_tree: [])
- context = double(portable: group, current_user: user, import_export_config: config)
+ import_double = instance_double(BulkImport, id: 1)
+ entity_double = instance_double(BulkImports::Entity, id: 2)
+ context = double(portable: group, current_user: user, import_export_config: config, bulk_import: import_double, entity: entity_double)
allow(subject).to receive(:import_export_config).and_return(config)
+ allow(subject).to receive(:context).and_return(context)
expect(Gitlab::ImportExport::Group::RelationFactory)
.to receive(:create)
@@ -116,7 +119,7 @@ RSpec.describe BulkImports::NdjsonPipeline do
relation_sym: :test,
relation_hash: hash,
importable: group,
- members_mapper: instance_of(Gitlab::ImportExport::MembersMapper),
+ members_mapper: instance_of(BulkImports::UsersMapper),
object_builder: Gitlab::ImportExport::Group::ObjectBuilder,
user: user,
excluded_keys: nil
diff --git a/spec/lib/bulk_imports/stage_spec.rb b/spec/lib/bulk_imports/stage_spec.rb
index d082faa90bc..4398b00e7e9 100644
--- a/spec/lib/bulk_imports/stage_spec.rb
+++ b/spec/lib/bulk_imports/stage_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe BulkImports::Stage do
let(:pipelines) do
[
[0, BulkImports::Groups::Pipelines::GroupPipeline],
+ [1, BulkImports::Groups::Pipelines::GroupAvatarPipeline],
[1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
[1, BulkImports::Groups::Pipelines::MembersPipeline],
[1, BulkImports::Groups::Pipelines::LabelsPipeline],
diff --git a/spec/lib/bulk_imports/users_mapper_spec.rb b/spec/lib/bulk_imports/users_mapper_spec.rb
new file mode 100644
index 00000000000..e6357319d05
--- /dev/null
+++ b/spec/lib/bulk_imports/users_mapper_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::UsersMapper do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:import) { create(:bulk_import, user: user) }
+ let_it_be(:entity) { create(:bulk_import_entity, bulk_import: import) }
+
+ let(:context) do
+ instance_double(
+ BulkImports::Pipeline::Context,
+ bulk_import: import,
+ entity: entity,
+ current_user: user
+ )
+ end
+
+ subject { described_class.new(context: context) }
+
+ describe '#map' do
+ context 'when value for specified key exists' do
+ it 'returns a map of source & destination user ids from redis' do
+ allow(Gitlab::Cache::Import::Caching).to receive(:values_from_hash).and_return({ "1" => "2" })
+
+ expect(subject.map).to eq({ 1 => 2 })
+ end
+ end
+
+ context 'when value for specified key does not exist' do
+ it 'returns default value' do
+ expect(subject.map[:non_existent_key]).to eq(user.id)
+ end
+ end
+ end
+
+ describe '#default_user_id' do
+ it 'returns current user id' do
+ expect(subject.default_user_id).to eq(user.id)
+ end
+ end
+
+ describe '#include?' do
+ context 'when source user id is present in the map' do
+ it 'returns true' do
+ allow(subject).to receive(:map).and_return({ 1 => 2 })
+
+ expect(subject.include?(1)).to eq(true)
+ end
+ end
+
+ context 'when source user id is missing in the map' do
+ it 'returns false' do
+ allow(subject).to receive(:map).and_return({})
+
+ expect(subject.include?(1)).to eq(false)
+ end
+ end
+ end
+
+ describe '#cache_source_user_id' do
+ it 'caches provided source & destination user ids in redis' do
+ expect(Gitlab::Cache::Import::Caching).to receive(:hash_add).with("bulk_imports/#{import.id}/#{entity.id}/source_user_ids", 1, 2)
+
+ subject.cache_source_user_id(1, 2)
+ end
+ end
+end
diff --git a/spec/lib/error_tracking/collector/sentry_request_parser_spec.rb b/spec/lib/error_tracking/collector/sentry_request_parser_spec.rb
new file mode 100644
index 00000000000..6f12c6d25e0
--- /dev/null
+++ b/spec/lib/error_tracking/collector/sentry_request_parser_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ErrorTracking::Collector::SentryRequestParser do
+ describe '.parse' do
+ let_it_be(:raw_event) { fixture_file('error_tracking/event.txt') }
+ let_it_be(:parsed_event) { Gitlab::Json.parse(fixture_file('error_tracking/parsed_event.json')) }
+
+ let(:body) { raw_event }
+ let(:headers) { { 'Content-Encoding' => '' } }
+ let(:request) { double('request', headers: headers, body: StringIO.new(body)) }
+
+ subject { described_class.parse(request) }
+
+ RSpec.shared_examples 'valid parser' do
+ it 'returns a valid hash' do
+ parsed_request = subject
+
+ expect(parsed_request[:request_type]).to eq('event')
+ expect(parsed_request[:event]).to eq(parsed_event)
+ end
+ end
+
+ context 'empty body content' do
+ let(:body) { '' }
+
+ it 'fails with exception' do
+ expect { subject }.to raise_error(StandardError)
+ end
+ end
+
+ context 'plain text sentry request' do
+ it_behaves_like 'valid parser'
+ end
+
+ context 'gzip encoded sentry request' do
+ let(:headers) { { 'Content-Encoding' => 'gzip' } }
+ let(:body) { Zlib.gzip(raw_event) }
+
+ it_behaves_like 'valid parser'
+ end
+ end
+end
diff --git a/spec/lib/extracts_path_spec.rb b/spec/lib/extracts_path_spec.rb
index b69cbbf0ec0..05f3bb2f71a 100644
--- a/spec/lib/extracts_path_spec.rb
+++ b/spec/lib/extracts_path_spec.rb
@@ -7,9 +7,17 @@ RSpec.describe ExtractsPath do
include RepoHelpers
include Gitlab::Routing
+ # Make url_for work
+ def default_url_options
+ { controller: 'projects/blob', action: 'show', namespace_id: @project.namespace.path, project_id: @project.path }
+ end
+
let_it_be(:owner) { create(:user) }
let_it_be(:container) { create(:project, :repository, creator: owner) }
+
let(:request) { double('request') }
+ let(:flash) { {} }
+ let(:redirect_renamed_default_branch?) { true }
before do
@project = container
@@ -17,11 +25,14 @@ RSpec.describe ExtractsPath do
allow(container.repository).to receive(:ref_names).and_return(ref_names)
allow(request).to receive(:format=)
+ allow(request).to receive(:get?)
+ allow(request).to receive(:head?)
end
describe '#assign_ref_vars' do
let(:ref) { sample_commit[:id] }
- let(:params) { { path: sample_commit[:line_code_path], ref: ref } }
+ let(:path) { sample_commit[:line_code_path] }
+ let(:params) { { path: path, ref: ref } }
it_behaves_like 'assigns ref vars'
@@ -125,6 +136,66 @@ RSpec.describe ExtractsPath do
expect(@commit).to be_nil
end
end
+
+ context 'ref points to a previous default branch' do
+ let(:ref) { 'develop' }
+
+ before do
+ @project.update!(previous_default_branch: ref)
+
+ allow(@project).to receive(:default_branch).and_return('foo')
+ end
+
+ it 'redirects to the new default branch for a GET request' do
+ allow(request).to receive(:get?).and_return(true)
+
+ expect(self).to receive(:redirect_to).with("http://localhost/#{@project.full_path}/-/blob/foo/#{path}")
+ expect(self).not_to receive(:render_404)
+
+ assign_ref_vars
+
+ expect(@commit).to be_nil
+ expect(flash[:notice]).to match(/default branch/)
+ end
+
+ it 'redirects to the new default branch for a HEAD request' do
+ allow(request).to receive(:head?).and_return(true)
+
+ expect(self).to receive(:redirect_to).with("http://localhost/#{@project.full_path}/-/blob/foo/#{path}")
+ expect(self).not_to receive(:render_404)
+
+ assign_ref_vars
+
+ expect(@commit).to be_nil
+ expect(flash[:notice]).to match(/default branch/)
+ end
+
+ it 'returns 404 for any other request type' do
+ expect(self).not_to receive(:redirect_to)
+ expect(self).to receive(:render_404)
+
+ assign_ref_vars
+
+ expect(@commit).to be_nil
+ expect(flash).to be_empty
+ end
+
+ context 'redirect behaviour is disabled' do
+ let(:redirect_renamed_default_branch?) { false }
+
+ it 'returns 404 for a GET request' do
+ allow(request).to receive(:get?).and_return(true)
+
+ expect(self).not_to receive(:redirect_to)
+ expect(self).to receive(:render_404)
+
+ assign_ref_vars
+
+ expect(@commit).to be_nil
+ expect(flash).to be_empty
+ end
+ end
+ end
end
it_behaves_like 'extracts refs'
diff --git a/spec/lib/extracts_ref_spec.rb b/spec/lib/extracts_ref_spec.rb
index 5433a512981..3cdce150de9 100644
--- a/spec/lib/extracts_ref_spec.rb
+++ b/spec/lib/extracts_ref_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe ExtractsRef do
let_it_be(:owner) { create(:user) }
let_it_be(:container) { create(:snippet, :repository, author: owner) }
+
let(:ref) { sample_commit[:id] }
let(:params) { { path: sample_commit[:line_code_path], ref: ref } }
diff --git a/spec/lib/gitlab/analytics/unique_visits_spec.rb b/spec/lib/gitlab/analytics/unique_visits_spec.rb
deleted file mode 100644
index f4d5c0b1eca..00000000000
--- a/spec/lib/gitlab/analytics/unique_visits_spec.rb
+++ /dev/null
@@ -1,81 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Analytics::UniqueVisits, :clean_gitlab_redis_shared_state do
- let(:unique_visits) { Gitlab::Analytics::UniqueVisits.new }
- let(:target1_id) { 'g_analytics_contribution' }
- let(:target2_id) { 'g_analytics_insights' }
- let(:target3_id) { 'g_analytics_issues' }
- let(:target4_id) { 'g_compliance_dashboard' }
- let(:target5_id) { 'i_compliance_credential_inventory' }
- let(:visitor1_id) { 'dfb9d2d2-f56c-4c77-8aeb-6cddc4a1f857' }
- let(:visitor2_id) { '1dd9afb2-a3ee-4de1-8ae3-a405579c8584' }
- let(:visitor3_id) { '34rfjuuy-ce56-sa35-ds34-dfer567dfrf2' }
-
- around do |example|
- # We need to freeze to a reference time
- # because visits are grouped by the week number in the year
- # Without freezing the time, the test may behave inconsistently
- # depending on which day of the week test is run.
- reference_time = Time.utc(2020, 6, 1)
- travel_to(reference_time) { example.run }
- end
-
- describe '#track_visit' do
- it 'tracks the unique weekly visits for targets' do
- unique_visits.track_visit(target1_id, values: visitor1_id, time: 7.days.ago)
- unique_visits.track_visit(target1_id, values: visitor1_id, time: 7.days.ago)
- unique_visits.track_visit(target1_id, values: visitor2_id, time: 7.days.ago)
-
- unique_visits.track_visit(target2_id, values: visitor2_id, time: 7.days.ago)
- unique_visits.track_visit(target2_id, values: visitor1_id, time: 8.days.ago)
- unique_visits.track_visit(target2_id, values: visitor1_id, time: 15.days.ago)
-
- unique_visits.track_visit(target4_id, values: visitor3_id, time: 7.days.ago)
-
- unique_visits.track_visit(target5_id, values: visitor3_id, time: 15.days.ago)
- unique_visits.track_visit(target5_id, values: visitor2_id, time: 15.days.ago)
-
- expect(unique_visits.unique_visits_for(targets: target1_id)).to eq(2)
- expect(unique_visits.unique_visits_for(targets: target2_id)).to eq(1)
- expect(unique_visits.unique_visits_for(targets: target4_id)).to eq(1)
-
- expect(unique_visits.unique_visits_for(targets: target2_id, start_date: 15.days.ago)).to eq(1)
-
- expect(unique_visits.unique_visits_for(targets: target3_id)).to eq(0)
-
- expect(unique_visits.unique_visits_for(targets: target5_id, start_date: 15.days.ago)).to eq(2)
-
- expect(unique_visits.unique_visits_for(targets: :analytics)).to eq(2)
- expect(unique_visits.unique_visits_for(targets: :analytics, start_date: 15.days.ago)).to eq(1)
- expect(unique_visits.unique_visits_for(targets: :analytics, start_date: 30.days.ago)).to eq(0)
-
- expect(unique_visits.unique_visits_for(targets: :analytics, start_date: 4.weeks.ago, end_date: Date.current)).to eq(2)
-
- expect(unique_visits.unique_visits_for(targets: :compliance)).to eq(1)
- expect(unique_visits.unique_visits_for(targets: :compliance, start_date: 15.days.ago)).to eq(2)
- expect(unique_visits.unique_visits_for(targets: :compliance, start_date: 30.days.ago)).to eq(0)
-
- expect(unique_visits.unique_visits_for(targets: :compliance, start_date: 4.weeks.ago, end_date: Date.current)).to eq(2)
- end
-
- it 'sets the keys in Redis to expire automatically after 12 weeks' do
- unique_visits.track_visit(target1_id, values: visitor1_id)
-
- Gitlab::Redis::SharedState.with do |redis|
- redis.scan_each(match: "{#{target1_id}}-*").each do |key|
- expect(redis.ttl(key)).to be_within(5.seconds).of(12.weeks)
- end
- end
- end
-
- it 'raises an error if an invalid target id is given' do
- invalid_target_id = "x_invalid"
-
- expect do
- unique_visits.track_visit(invalid_target_id, values: visitor1_id)
- end.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
- end
- end
-end
diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb
index 7475ed2796f..14200733c19 100644
--- a/spec/lib/gitlab/auth/auth_finders_spec.rb
+++ b/spec/lib/gitlab/auth/auth_finders_spec.rb
@@ -460,7 +460,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
expect { find_user_from_access_token }.to raise_error(Gitlab::Auth::UnauthorizedError)
end
- context 'no feed or API requests' do
+ context 'no feed, API or archive requests' do
it 'returns nil if the request is not RSS' do
expect(find_user_from_web_access_token(:rss)).to be_nil
end
@@ -472,6 +472,10 @@ RSpec.describe Gitlab::Auth::AuthFinders do
it 'returns nil if the request is not API' do
expect(find_user_from_web_access_token(:api)).to be_nil
end
+
+ it 'returns nil if the request is not ARCHIVE' do
+ expect(find_user_from_web_access_token(:archive)).to be_nil
+ end
end
it 'returns the user for RSS requests' do
@@ -486,6 +490,24 @@ RSpec.describe Gitlab::Auth::AuthFinders do
expect(find_user_from_web_access_token(:ics)).to eq(user)
end
+ it 'returns the user for ARCHIVE requests' do
+ set_header('SCRIPT_NAME', '/-/archive/main.zip')
+
+ expect(find_user_from_web_access_token(:archive)).to eq(user)
+ end
+
+ context 'when allow_archive_as_web_access_format feature flag is disabled' do
+ before do
+ stub_feature_flags(allow_archive_as_web_access_format: false)
+ end
+
+ it 'returns nil for ARCHIVE requests' do
+ set_header('SCRIPT_NAME', '/-/archive/main.zip')
+
+ expect(find_user_from_web_access_token(:archive)).to be_nil
+ end
+ end
+
context 'for API requests' do
it 'returns the user' do
set_header('SCRIPT_NAME', '/api/endpoint')
diff --git a/spec/lib/gitlab/auth/ldap/adapter_spec.rb b/spec/lib/gitlab/auth/ldap/adapter_spec.rb
index 8546d63cf77..b7b12e49a8e 100644
--- a/spec/lib/gitlab/auth/ldap/adapter_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/adapter_spec.rb
@@ -95,6 +95,40 @@ RSpec.describe Gitlab::Auth::Ldap::Adapter do
describe '#ldap_search' do
subject { adapter.ldap_search(base: :dn, filter: :filter) }
+ shared_examples 'connection retry' do
+ before do
+ allow(adapter).to receive(:renew_connection_adapter).and_return(ldap)
+ allow(Gitlab::AppLogger).to receive(:warn)
+ end
+
+ context 'retries the operation' do
+ before do
+ stub_const("#{described_class}::MAX_SEARCH_RETRIES", 3)
+ end
+
+ it 'as many times as MAX_SEARCH_RETRIES' do
+ expect(ldap).to receive(:search).exactly(3).times
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::LdapConnectionError)
+ end
+
+ context 'when no more retries' do
+ before do
+ stub_const("#{described_class}::MAX_SEARCH_RETRIES", 1)
+ end
+
+ it 'raises the exception' do
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::LdapConnectionError)
+ end
+
+ it 'logs the error' do
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::LdapConnectionError)
+ expect(Gitlab::AppLogger).to have_received(:warn).with(
+ "LDAP search raised exception Net::LDAP::Error: #{err_message}")
+ end
+ end
+ end
+ end
+
context "when the search is successful" do
context "and the result is non-empty" do
before do
@@ -110,6 +144,22 @@ RSpec.describe Gitlab::Auth::Ldap::Adapter do
end
it { is_expected.to eq [] }
+
+ context 'when returned with expected code' do
+ let(:response_code) { 80 }
+ let(:response_message) { 'Other' }
+ let(:err_message) { "Got empty results with response code: #{response_code}, message: #{response_message}" }
+
+ before do
+ stub_ldap_config(retry_empty_result_with_codes: [response_code])
+ allow(ldap).to receive_messages(
+ search: nil,
+ get_operation_result: double(code: response_code, message: response_message)
+ )
+ end
+
+ it_behaves_like 'connection retry'
+ end
end
end
@@ -132,30 +182,13 @@ RSpec.describe Gitlab::Auth::Ldap::Adapter do
end
context 'retries the operation' do
- before do
- stub_const("#{described_class}::MAX_SEARCH_RETRIES", 3)
- end
+ let(:err_message) { 'some error' }
- it 'as many times as MAX_SEARCH_RETRIES' do
- expect(ldap).to receive(:search).exactly(3).times
- expect { subject }.to raise_error(Gitlab::Auth::Ldap::LdapConnectionError)
+ before do
+ allow(ldap).to receive(:search) { raise Net::LDAP::Error, err_message }
end
- context 'when no more retries' do
- before do
- stub_const("#{described_class}::MAX_SEARCH_RETRIES", 1)
- end
-
- it 'raises the exception' do
- expect { subject }.to raise_error(Gitlab::Auth::Ldap::LdapConnectionError)
- end
-
- it 'logs the error' do
- expect { subject }.to raise_error(Gitlab::Auth::Ldap::LdapConnectionError)
- expect(Gitlab::AppLogger).to have_received(:warn).with(
- "LDAP search raised exception Net::LDAP::Error: some error")
- end
- end
+ it_behaves_like 'connection retry'
end
end
end
diff --git a/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb b/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb
new file mode 100644
index 00000000000..f56cf899410
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillDraftStatusOnMergeRequests do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:merge_requests) { table(:merge_requests) }
+
+ let(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab') }
+ let(:project) { projects.create!(namespace_id: group.id) }
+
+ let(:draft_prefixes) { ["[Draft]", "(Draft)", "Draft:", "Draft", "[WIP]", "WIP:", "WIP"] }
+
+ def create_merge_request(params)
+ common_params = {
+ target_project_id: project.id,
+ target_branch: 'feature1',
+ source_branch: 'master'
+ }
+
+ merge_requests.create!(common_params.merge(params))
+ end
+
+ context "for MRs with #draft? == true titles but draft attribute false" do
+ before do
+ draft_prefixes.each do |prefix|
+ (1..4).each do |n|
+ create_merge_request(
+ title: "#{prefix} This is a title",
+ draft: false,
+ state_id: n
+ )
+ end
+ end
+ end
+
+ it "updates all open draft merge request's draft field to true" do
+ mr_count = merge_requests.all.count
+ mr_ids = merge_requests.all.collect(&:id)
+
+ expect { subject.perform(mr_ids.first, mr_ids.last) }
+ .to change { MergeRequest.where(draft: false).count }
+ .from(mr_count).to(mr_count - draft_prefixes.length)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb b/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb
index 7fe82420364..58864aac084 100644
--- a/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb
@@ -3,18 +3,18 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillJiraTrackerDeploymentType2, :migration, schema: 20201028182809 do
- let_it_be(:jira_service_temp) { described_class::JiraServiceTemp }
+ let_it_be(:jira_integration_temp) { described_class::JiraServiceTemp }
let_it_be(:jira_tracker_data_temp) { described_class::JiraTrackerDataTemp }
let_it_be(:atlassian_host) { 'https://api.atlassian.net' }
let_it_be(:mixedcase_host) { 'https://api.AtlassiaN.nEt' }
let_it_be(:server_host) { 'https://my.server.net' }
- let(:jira_service) { jira_service_temp.create!(type: 'JiraService', active: true, category: 'issue_tracker') }
+ let(:jira_integration) { jira_integration_temp.create!(type: 'JiraService', active: true, category: 'issue_tracker') }
subject { described_class.new }
def create_tracker_data(options = {})
- jira_tracker_data_temp.create!({ service_id: jira_service.id }.merge(options))
+ jira_tracker_data_temp.create!({ service_id: jira_integration.id }.merge(options))
end
describe '#perform' do
diff --git a/spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb b/spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb
new file mode 100644
index 00000000000..b084e3fe885
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillUpvotesCountOnIssues, schema: 20210701111909 do
+ let(:award_emoji) { table(:award_emoji) }
+
+ let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
+ let!(:project1) { table(:projects).create!(namespace_id: namespace.id) }
+ let!(:project2) { table(:projects).create!(namespace_id: namespace.id) }
+ let!(:issue1) { table(:issues).create!(project_id: project1.id) }
+ let!(:issue2) { table(:issues).create!(project_id: project2.id) }
+ let!(:issue3) { table(:issues).create!(project_id: project2.id) }
+ let!(:issue4) { table(:issues).create!(project_id: project2.id) }
+
+ describe '#perform' do
+ before do
+ add_upvotes(issue1, :thumbsdown, 1)
+ add_upvotes(issue2, :thumbsup, 2)
+ add_upvotes(issue2, :thumbsdown, 1)
+ add_upvotes(issue3, :thumbsup, 3)
+ add_upvotes(issue4, :thumbsup, 4)
+ end
+
+ it 'updates upvotes_count' do
+ subject.perform(issue1.id, issue4.id)
+
+ expect(issue1.reload.upvotes_count).to eq(0)
+ expect(issue2.reload.upvotes_count).to eq(2)
+ expect(issue3.reload.upvotes_count).to eq(3)
+ expect(issue4.reload.upvotes_count).to eq(4)
+ end
+ end
+
+ private
+
+ def add_upvotes(issue, name, count)
+ count.times do
+ award_emoji.create!(
+ name: name.to_s,
+ awardable_type: 'Issue',
+ awardable_id: issue.id
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb b/spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb
new file mode 100644
index 00000000000..c4039b85459
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedDeployments, :migration, schema: 20210617161348 do
+ let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let!(:project) { table(:projects).create!(namespace_id: namespace.id) }
+ let!(:environment) { table(:environments).create!(name: 'production', slug: 'production', project_id: project.id) }
+ let(:background_migration_jobs) { table(:background_migration_jobs) }
+
+ before do
+ create_deployment!(environment.id, project.id)
+ create_deployment!(non_existing_record_id, project.id)
+ end
+
+ it 'deletes only orphaned deployments' do
+ expect(valid_deployments.pluck(:id)).not_to be_empty
+ expect(orphaned_deployments.pluck(:id)).not_to be_empty
+
+ subject.perform(table(:deployments).minimum(:id), table(:deployments).maximum(:id))
+
+ expect(valid_deployments.pluck(:id)).not_to be_empty
+ expect(orphaned_deployments.pluck(:id)).to be_empty
+ end
+
+ it 'marks jobs as done' do
+ first_job = background_migration_jobs.create!(
+ class_name: 'DeleteOrphanedDeployments',
+ arguments: [table(:deployments).minimum(:id), table(:deployments).minimum(:id)]
+ )
+
+ second_job = background_migration_jobs.create!(
+ class_name: 'DeleteOrphanedDeployments',
+ arguments: [table(:deployments).maximum(:id), table(:deployments).maximum(:id)]
+ )
+
+ subject.perform(table(:deployments).minimum(:id), table(:deployments).minimum(:id))
+
+ expect(first_job.reload.status).to eq(Gitlab::Database::BackgroundMigrationJob.statuses[:succeeded])
+ expect(second_job.reload.status).to eq(Gitlab::Database::BackgroundMigrationJob.statuses[:pending])
+ end
+
+ private
+
+ def valid_deployments
+ table(:deployments).where('EXISTS (SELECT 1 FROM environments WHERE deployments.environment_id = environments.id)')
+ end
+
+ def orphaned_deployments
+ table(:deployments).where('NOT EXISTS (SELECT 1 FROM environments WHERE deployments.environment_id = environments.id)')
+ end
+
+ def create_deployment!(environment_id, project_id)
+ table(:deployments).create!(
+ environment_id: environment_id,
+ project_id: project_id,
+ ref: 'master',
+ tag: false,
+ sha: 'x',
+ status: 1,
+ iid: table(:deployments).count + 1)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
index 80879c8c6d9..f2cd2acd4f3 100644
--- a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
@@ -283,11 +283,11 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, s
end
context 'with Jira service with invalid properties, valid Jira service and valid bugzilla service' do
- let!(:jira_service_invalid) do
+ let!(:jira_integration_invalid) do
services.create!(id: 19, title: 'invalid - title', description: 'invalid - description', type: 'JiraService', properties: 'invalid data', category: 'issue_tracker')
end
- let!(:jira_service_valid) do
+ let!(:jira_integration_valid) do
services.create!(id: 20, type: 'JiraService', properties: jira_properties.to_json, category: 'issue_tracker')
end
@@ -298,21 +298,21 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, s
it 'migrates data for the valid service' do
subject
- jira_service_invalid.reload
- expect(JiraTrackerData.find_by(service_id: jira_service_invalid.id)).to be_nil
- expect(jira_service_invalid.title).to eq('invalid - title')
- expect(jira_service_invalid.description).to eq('invalid - description')
- expect(jira_service_invalid.properties).to eq('invalid data')
+ jira_integration_invalid.reload
+ expect(JiraTrackerData.find_by(service_id: jira_integration_invalid.id)).to be_nil
+ expect(jira_integration_invalid.title).to eq('invalid - title')
+ expect(jira_integration_invalid.description).to eq('invalid - description')
+ expect(jira_integration_invalid.properties).to eq('invalid data')
- jira_service_valid.reload
- data = JiraTrackerData.find_by(service_id: jira_service_valid.id)
+ jira_integration_valid.reload
+ data = JiraTrackerData.find_by(service_id: jira_integration_valid.id)
expect(data.url).to eq(url)
expect(data.api_url).to eq(api_url)
expect(data.username).to eq(username)
expect(data.password).to eq(password)
- expect(jira_service_valid.title).to eq(title)
- expect(jira_service_valid.description).to eq(description)
+ expect(jira_integration_valid.title).to eq(title)
+ expect(jira_integration_valid.description).to eq(description)
bugzilla_integration_valid.reload
data = IssueTrackerData.find_by(service_id: bugzilla_integration_valid.id)
diff --git a/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb b/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb
new file mode 100644
index 00000000000..496ce151032
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb
@@ -0,0 +1,400 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let(:merge_requests) { table(:merge_requests) }
+ let(:diffs) { table(:merge_request_diffs) }
+ let(:commits) do
+ table(:merge_request_diff_commits).tap do |t|
+ t.extend(SuppressCompositePrimaryKeyWarning)
+ end
+ end
+
+ let(:commit_users) { described_class::MergeRequestDiffCommitUser }
+
+ let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
+ let(:project) { projects.create!(namespace_id: namespace.id) }
+ let(:merge_request) do
+ merge_requests.create!(
+ source_branch: 'x',
+ target_branch: 'master',
+ target_project_id: project.id
+ )
+ end
+
+ let(:diff) { diffs.create!(merge_request_id: merge_request.id) }
+ let(:migration) { described_class.new }
+
+ describe 'MergeRequestDiffCommit' do
+ describe '.each_row_to_migrate' do
+ it 'yields the rows to migrate for a given range' do
+ commit1 = commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc'),
+ author_name: 'bob',
+ author_email: 'bob@example.com',
+ committer_name: 'bob',
+ committer_email: 'bob@example.com'
+ )
+
+ commit2 = commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 1,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc'),
+ author_name: 'Alice',
+ author_email: 'alice@example.com',
+ committer_name: 'Alice',
+ committer_email: 'alice@example.com'
+ )
+
+ # We stub this constant to make sure we run at least two pagination
+ # queries for getting the data. This way we can test if the pagination
+ # is actually working properly.
+ stub_const(
+ 'Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers::COMMIT_ROWS_PER_QUERY',
+ 1
+ )
+
+ rows = []
+
+ described_class::MergeRequestDiffCommit.each_row_to_migrate(diff.id, diff.id + 1) do |row|
+ rows << row
+ end
+
+ expect(rows.length).to eq(2)
+
+ expect(rows[0].author_name).to eq(commit1.author_name)
+ expect(rows[1].author_name).to eq(commit2.author_name)
+ end
+ end
+ end
+
+ describe 'MergeRequestDiffCommitUser' do
+ describe '.union' do
+ it 'produces a union of the given queries' do
+ alice = commit_users.create!(name: 'Alice', email: 'alice@example.com')
+ bob = commit_users.create!(name: 'Bob', email: 'bob@example.com')
+ users = commit_users.union([
+ commit_users.where(name: 'Alice').to_sql,
+ commit_users.where(name: 'Bob').to_sql
+ ])
+
+ expect(users).to include(alice)
+ expect(users).to include(bob)
+ end
+ end
+ end
+
+ describe '#perform' do
+ it 'migrates the data in the range' do
+ commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc'),
+ author_name: 'bob',
+ author_email: 'bob@example.com',
+ committer_name: 'bob',
+ committer_email: 'bob@example.com'
+ )
+
+ migration.perform(diff.id, diff.id + 1)
+
+ bob = commit_users.find_by(name: 'bob')
+ commit = commits.first
+
+ expect(commit.commit_author_id).to eq(bob.id)
+ expect(commit.committer_id).to eq(bob.id)
+ end
+
+ it 'treats empty names and Emails the same as NULL values' do
+ commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc'),
+ author_name: 'bob',
+ author_email: 'bob@example.com',
+ committer_name: '',
+ committer_email: ''
+ )
+
+ migration.perform(diff.id, diff.id + 1)
+
+ bob = commit_users.find_by(name: 'bob')
+ commit = commits.first
+
+ expect(commit.commit_author_id).to eq(bob.id)
+ expect(commit.committer_id).to be_nil
+ end
+
+ it 'does not update rows without a committer and author' do
+ commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc')
+ )
+
+ migration.perform(diff.id, diff.id + 1)
+
+ commit = commits.first
+
+ expect(commit_users.count).to eq(0)
+ expect(commit.commit_author_id).to be_nil
+ expect(commit.committer_id).to be_nil
+ end
+
+ it 'marks the background job as done' do
+ Gitlab::Database::BackgroundMigrationJob.create!(
+ class_name: 'MigrateMergeRequestDiffCommitUsers',
+ arguments: [diff.id, diff.id + 1]
+ )
+
+ migration.perform(diff.id, diff.id + 1)
+
+ job = Gitlab::Database::BackgroundMigrationJob.first
+
+ expect(job.status).to eq('succeeded')
+ end
+ end
+
+ describe '#get_data_to_update' do
+ it 'returns the users and commit rows to update' do
+ commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc'),
+ author_name: 'bob' + ('a' * 510),
+ author_email: 'bob@example.com',
+ committer_name: 'bob' + ('a' * 510),
+ committer_email: 'bob@example.com'
+ )
+
+ commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 1,
+ sha: Gitlab::Database::ShaAttribute.serialize('456abc'),
+ author_name: 'alice',
+ author_email: 'alice@example.com',
+ committer_name: 'alice',
+ committer_email: 'alice@example.com'
+ )
+
+ users, to_update = migration.get_data_to_update(diff.id, diff.id + 1)
+
+ bob_name = 'bob' + ('a' * 509)
+
+ expect(users).to include(%w[alice alice@example.com])
+ expect(users).to include([bob_name, 'bob@example.com'])
+
+ expect(to_update[[diff.id, 0]])
+ .to eq([[bob_name, 'bob@example.com'], [bob_name, 'bob@example.com']])
+
+ expect(to_update[[diff.id, 1]])
+ .to eq([%w[alice alice@example.com], %w[alice alice@example.com]])
+ end
+
+ it 'does not include a user if both the name and Email are missing' do
+ commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc'),
+ author_name: nil,
+ author_email: nil,
+ committer_name: 'bob',
+ committer_email: 'bob@example.com'
+ )
+
+ users, _ = migration.get_data_to_update(diff.id, diff.id + 1)
+
+ expect(users).to eq([%w[bob bob@example.com]].to_set)
+ end
+ end
+
+ describe '#get_user_rows_in_batches' do
+ it 'retrieves all existing users' do
+ alice = commit_users.create!(name: 'alice', email: 'alice@example.com')
+ bob = commit_users.create!(name: 'bob', email: 'bob@example.com')
+
+ users = [[alice.name, alice.email], [bob.name, bob.email]]
+ mapping = {}
+
+ migration.get_user_rows_in_batches(users, mapping)
+
+ expect(mapping[%w[alice alice@example.com]]).to eq(alice)
+ expect(mapping[%w[bob bob@example.com]]).to eq(bob)
+ end
+ end
+
+ describe '#create_missing_users' do
+ it 'creates merge request diff commit users that are missing' do
+ alice = commit_users.create!(name: 'alice', email: 'alice@example.com')
+ users = [%w[alice alice@example.com], %w[bob bob@example.com]]
+ mapping = { %w[alice alice@example.com] => alice }
+
+ migration.create_missing_users(users, mapping)
+
+ expect(mapping[%w[alice alice@example.com]]).to eq(alice)
+ expect(mapping[%w[bob bob@example.com]].name).to eq('bob')
+ expect(mapping[%w[bob bob@example.com]].email).to eq('bob@example.com')
+ end
+ end
+
+ describe '#update_commit_rows' do
+ it 'updates the merge request diff commit rows' do
+ to_update = { [42, 0] => [%w[alice alice@example.com], []] }
+ user_mapping = { %w[alice alice@example.com] => double(:user, id: 1) }
+
+ expect(migration)
+ .to receive(:bulk_update_commit_rows)
+ .with({ [42, 0] => [1, nil] })
+
+ migration.update_commit_rows(to_update, user_mapping)
+ end
+ end
+
+ describe '#bulk_update_commit_rows' do
+ context 'when there are no authors and committers' do
+ it 'does not update any rows' do
+ migration.bulk_update_commit_rows({ [1, 0] => [] })
+
+ expect(described_class::MergeRequestDiffCommit.connection)
+ .not_to receive(:execute)
+ end
+ end
+
+ context 'when there are only authors' do
+ it 'only updates the author IDs' do
+ author = commit_users.create!(name: 'Alice', email: 'alice@example.com')
+ commit = commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc')
+ )
+
+ mapping = {
+ [commit.merge_request_diff_id, commit.relative_order] =>
+ [author.id, nil]
+ }
+
+ migration.bulk_update_commit_rows(mapping)
+
+ commit = commits.first
+
+ expect(commit.commit_author_id).to eq(author.id)
+ expect(commit.committer_id).to be_nil
+ end
+ end
+
+ context 'when there are only committers' do
+ it 'only updates the committer IDs' do
+ committer =
+ commit_users.create!(name: 'Alice', email: 'alice@example.com')
+
+ commit = commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc')
+ )
+
+ mapping = {
+ [commit.merge_request_diff_id, commit.relative_order] =>
+ [nil, committer.id]
+ }
+
+ migration.bulk_update_commit_rows(mapping)
+
+ commit = commits.first
+
+ expect(commit.committer_id).to eq(committer.id)
+ expect(commit.commit_author_id).to be_nil
+ end
+ end
+
+ context 'when there are both authors and committers' do
+ it 'updates both the author and committer IDs' do
+ author = commit_users.create!(name: 'Bob', email: 'bob@example.com')
+ committer =
+ commit_users.create!(name: 'Alice', email: 'alice@example.com')
+
+ commit = commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc')
+ )
+
+ mapping = {
+ [commit.merge_request_diff_id, commit.relative_order] =>
+ [author.id, committer.id]
+ }
+
+ migration.bulk_update_commit_rows(mapping)
+
+ commit = commits.first
+
+ expect(commit.commit_author_id).to eq(author.id)
+ expect(commit.committer_id).to eq(committer.id)
+ end
+ end
+
+ context 'when there are multiple commit rows to update' do
+ it 'updates all the rows' do
+ author = commit_users.create!(name: 'Bob', email: 'bob@example.com')
+ committer =
+ commit_users.create!(name: 'Alice', email: 'alice@example.com')
+
+ commit1 = commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc')
+ )
+
+ commit2 = commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 1,
+ sha: Gitlab::Database::ShaAttribute.serialize('456abc')
+ )
+
+ mapping = {
+ [commit1.merge_request_diff_id, commit1.relative_order] =>
+ [author.id, committer.id],
+
+ [commit2.merge_request_diff_id, commit2.relative_order] =>
+ [author.id, nil]
+ }
+
+ migration.bulk_update_commit_rows(mapping)
+
+ commit1 = commits.find_by(relative_order: 0)
+ commit2 = commits.find_by(relative_order: 1)
+
+ expect(commit1.commit_author_id).to eq(author.id)
+ expect(commit1.committer_id).to eq(committer.id)
+
+ expect(commit2.commit_author_id).to eq(author.id)
+ expect(commit2.committer_id).to be_nil
+ end
+ end
+ end
+
+ describe '#primary_key' do
+ it 'returns the primary key for the commits table' do
+ key = migration.primary_key
+
+ expect(key.to_sql).to eq('("merge_request_diff_commits"."merge_request_diff_id", "merge_request_diff_commits"."relative_order")')
+ end
+ end
+
+ describe '#prepare' do
+ it 'trims a value to at most 512 characters' do
+ expect(migration.prepare('€' * 1_000)).to eq('€' * 512)
+ end
+
+ it 'returns nil if the value is an empty string' do
+ expect(migration.prepare('')).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb b/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb
index 33498ffa748..9eda51f6ec4 100644
--- a/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb
@@ -2,6 +2,8 @@
require 'spec_helper'
+require 'webauthn/u2f_migrator'
+
RSpec.describe Gitlab::BackgroundMigration::MigrateU2fWebauthn, :migration, schema: 20200925125321 do
let(:users) { table(:users) }
diff --git a/spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb b/spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb
index f7466a2ddfd..b96d3f7f0b5 100644
--- a/spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb
+++ b/spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::UpdateJiraTrackerDataDeploymentTypeBasedOnUrl do
+RSpec.describe Gitlab::BackgroundMigration::UpdateJiraTrackerDataDeploymentTypeBasedOnUrl, schema: 20210421163509 do
let(:services_table) { table(:services) }
let(:service_jira_cloud) { services_table.create!(id: 1, type: 'JiraService') }
let(:service_jira_server) { services_table.create!(id: 2, type: 'JiraService') }
diff --git a/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb b/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb
index 8d625cab1d8..c0e4d1b5355 100644
--- a/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb
+++ b/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cache do
let_it_be(:project) { create(:project, :repository) }
+
let(:pipeline_status) { described_class.new(project) }
let(:cache_key) { pipeline_status.cache_key }
@@ -83,24 +84,8 @@ RSpec.describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cac
expect(pipeline_status).not_to be_has_cache
end
- context 'ci_pipeline_status_omit_commit_sha_in_cache_key is enabled' do
- before do
- stub_feature_flags(ci_pipeline_status_omit_commit_sha_in_cache_key: project)
- end
-
- it 'makes a Gitaly call' do
- expect { pipeline_status.load_status }.to change { Gitlab::GitalyClient.get_request_count }.by(1)
- end
- end
-
- context 'ci_pipeline_status_omit_commit_sha_in_cache_key is disabled' do
- before do
- stub_feature_flags(ci_pipeline_status_omit_commit_sha_in_cache_key: false)
- end
-
- it 'makes a Gitaly calls' do
- expect { pipeline_status.load_status }.to change { Gitlab::GitalyClient.get_request_count }.by(1)
- end
+ it 'makes a Gitaly call' do
+ expect { pipeline_status.load_status }.to change { Gitlab::GitalyClient.get_request_count }.by(1)
end
end
@@ -111,24 +96,8 @@ RSpec.describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cac
expect(pipeline_status).to be_has_cache
end
- context 'ci_pipeline_status_omit_commit_sha_in_cache_key is enabled' do
- before do
- stub_feature_flags(ci_pipeline_status_omit_commit_sha_in_cache_key: project)
- end
-
- it 'makes no Gitaly calls' do
- expect { pipeline_status.load_status }.to change { Gitlab::GitalyClient.get_request_count }.by(0)
- end
- end
-
- context 'ci_pipeline_status_omit_commit_sha_in_cache_key is disabled' do
- before do
- stub_feature_flags(ci_pipeline_status_omit_commit_sha_in_cache_key: false)
- end
-
- it 'makes a Gitaly calls' do
- expect { pipeline_status.load_status }.to change { Gitlab::GitalyClient.get_request_count }.by(1)
- end
+ it 'makes no Gitaly calls' do
+ expect { pipeline_status.load_status }.to change { Gitlab::GitalyClient.get_request_count }.by(0)
end
end
end
diff --git a/spec/lib/gitlab/cache/helpers_spec.rb b/spec/lib/gitlab/cache/helpers_spec.rb
new file mode 100644
index 00000000000..08e0d7729bd
--- /dev/null
+++ b/spec/lib/gitlab/cache/helpers_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Cache::Helpers, :use_clean_rails_redis_caching do
+ subject(:instance) { Class.new.include(described_class).new }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:presenter) { MergeRequestSerializer.new(current_user: user, project: project) }
+
+ before do
+ # We have to stub #render as it's a Rails controller method unavailable in
+ # the module by itself
+ allow(instance).to receive(:render) { |data| data }
+ allow(instance).to receive(:current_user) { user }
+ end
+
+ describe "#render_cached" do
+ subject do
+ instance.render_cached(presentable, **kwargs)
+ end
+
+ let(:kwargs) do
+ {
+ with: presenter,
+ project: project
+ }
+ end
+
+ context 'single object' do
+ let_it_be(:presentable) { create(:merge_request, source_project: project, source_branch: 'wip') }
+
+ it_behaves_like 'object cache helper'
+ end
+
+ context 'collection of objects' do
+ let_it_be(:presentable) do
+ [
+ create(:merge_request, source_project: project, source_branch: 'fix'),
+ create(:merge_request, source_project: project, source_branch: 'master')
+ ]
+ end
+
+ it_behaves_like 'collection cache helper'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cache/import/caching_spec.rb b/spec/lib/gitlab/cache/import/caching_spec.rb
index 8ce12f5d32e..f770960e27a 100644
--- a/spec/lib/gitlab/cache/import/caching_spec.rb
+++ b/spec/lib/gitlab/cache/import/caching_spec.rb
@@ -100,6 +100,30 @@ RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache do
end
end
+ describe '.hash_add' do
+ it 'adds a value to a hash' do
+ described_class.hash_add('foo', 1, 1)
+ described_class.hash_add('foo', 2, 2)
+
+ key = described_class.cache_key_for('foo')
+ values = Gitlab::Redis::Cache.with { |r| r.hgetall(key) }
+
+ expect(values).to eq({ '1' => '1', '2' => '2' })
+ end
+ end
+
+ describe '.values_from_hash' do
+ it 'returns empty hash when the hash is empty' do
+ expect(described_class.values_from_hash('foo')).to eq({})
+ end
+
+ it 'returns the set list of values' do
+ described_class.hash_add('foo', 1, 1)
+
+ expect(described_class.values_from_hash('foo')).to eq({ '1' => '1' })
+ end
+ end
+
describe '.write_multiple' do
it 'sets multiple keys when key_prefix not set' do
mapping = { 'foo' => 10, 'bar' => 20 }
diff --git a/spec/lib/gitlab/changelog/config_spec.rb b/spec/lib/gitlab/changelog/config_spec.rb
index 2809843b832..a464c1e57e5 100644
--- a/spec/lib/gitlab/changelog/config_spec.rb
+++ b/spec/lib/gitlab/changelog/config_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe Gitlab::Changelog::Config do
expect(config.date_format).to eq('foo')
expect(config.template)
- .to be_instance_of(Gitlab::Changelog::AST::Expressions)
+ .to be_instance_of(Gitlab::TemplateParser::AST::Expressions)
expect(config.categories).to eq({ 'foo' => 'bar' })
expect(config.tag_regex).to eq('foo')
@@ -53,6 +53,16 @@ RSpec.describe Gitlab::Changelog::Config do
expect { described_class.from_hash(project, 'categories' => 10) }
.to raise_error(Gitlab::Changelog::Error)
end
+
+ it 'raises a Gitlab::Changelog::Error when the template is invalid' do
+ invalid_template = <<~TPL
+ {% each {{foo}} %}
+ {% end %}
+ TPL
+
+ expect { described_class.from_hash(project, 'template' => invalid_template) }
+ .to raise_error(Gitlab::Changelog::Error)
+ end
end
describe '#contributor?' do
diff --git a/spec/lib/gitlab/checks/project_moved_spec.rb b/spec/lib/gitlab/checks/container_moved_spec.rb
index 469aea8d093..00ef5604e1d 100644
--- a/spec/lib/gitlab/checks/project_moved_spec.rb
+++ b/spec/lib/gitlab/checks/container_moved_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
+RSpec.describe Gitlab::Checks::ContainerMoved, :clean_gitlab_redis_shared_state do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, :wiki_repo, namespace: user.namespace) }
@@ -14,27 +14,48 @@ RSpec.describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
subject { described_class.new(repository, git_user, protocol, redirect_path) }
describe '.fetch_message' do
+ let(:key) { "redirect_namespace:#{user.id}:#{project.repository.gl_repository}" }
+ let(:legacy_key) { "redirect_namespace:#{user.id}:#{project.id}" }
+
context 'with a redirect message queue' do
before do
subject.add_message
end
it 'returns the redirect message' do
- expect(described_class.fetch_message(user.id, project.id)).to eq(subject.message)
+ expect(described_class.fetch_message(user, project.repository)).to eq(subject.message)
end
it 'deletes the redirect message from redis' do
- expect(Gitlab::Redis::SharedState.with { |redis| redis.get("redirect_namespace:#{user.id}:#{project.id}") }).not_to be_nil
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(key) }).not_to be_nil
- described_class.fetch_message(user.id, project.id)
+ described_class.fetch_message(user, project.repository)
- expect(Gitlab::Redis::SharedState.with { |redis| redis.get("redirect_namespace:#{user.id}:#{project.id}") }).to be_nil
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(key) }).to be_nil
+ end
+
+ context 'with a message in the legacy key' do
+ before do
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.set(legacy_key, 'legacy message')
+ end
+ end
+
+ it 'returns and deletes the legacy message' do
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(key) }).not_to be_nil
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(legacy_key) }).not_to be_nil
+
+ expect(described_class.fetch_message(user, project.repository)).to eq('legacy message')
+
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(key) }).to be_nil
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(legacy_key) }).to be_nil
+ end
end
end
context 'with no redirect message queue' do
it 'returns nil' do
- expect(described_class.fetch_message(1, 2)).to be_nil
+ expect(described_class.fetch_message(user, project.repository)).to be_nil
end
end
end
@@ -58,7 +79,7 @@ RSpec.describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
shared_examples 'returns redirect message' do
it do
message = <<~MSG
- Project '#{redirect_path}' was moved to '#{project.full_path}'.
+ #{container_label} '#{redirect_path}' was moved to '#{repository.container.full_path}'.
Please update your Git remote:
@@ -86,6 +107,7 @@ RSpec.describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
context 'with project' do
it_behaves_like 'errors per protocol' do
+ let(:container_label) { 'Project' }
let(:http_url_to_repo) { project.http_url_to_repo }
let(:ssh_url_to_repo) { project.ssh_url_to_repo }
end
@@ -95,6 +117,7 @@ RSpec.describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
let(:repository) { project.wiki.repository }
it_behaves_like 'errors per protocol' do
+ let(:container_label) { 'Project wiki' }
let(:http_url_to_repo) { project.wiki.http_url_to_repo }
let(:ssh_url_to_repo) { project.wiki.ssh_url_to_repo }
end
@@ -106,6 +129,7 @@ RSpec.describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
let(:repository) { snippet.repository }
it_behaves_like 'errors per protocol' do
+ let(:container_label) { 'Project snippet' }
let(:http_url_to_repo) { snippet.http_url_to_repo }
let(:ssh_url_to_repo) { snippet.ssh_url_to_repo }
end
@@ -116,8 +140,10 @@ RSpec.describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
let(:repository) { snippet.repository }
- it 'returns nil' do
- expect(subject.add_message).to be_nil
+ it_behaves_like 'errors per protocol' do
+ let(:container_label) { 'Personal snippet' }
+ let(:http_url_to_repo) { snippet.http_url_to_repo }
+ let(:ssh_url_to_repo) { snippet.ssh_url_to_repo }
end
end
end
diff --git a/spec/lib/gitlab/checks/project_created_spec.rb b/spec/lib/gitlab/checks/project_created_spec.rb
index 74e43b04b6b..6a2e4201030 100644
--- a/spec/lib/gitlab/checks/project_created_spec.rb
+++ b/spec/lib/gitlab/checks/project_created_spec.rb
@@ -13,27 +13,48 @@ RSpec.describe Gitlab::Checks::ProjectCreated, :clean_gitlab_redis_shared_state
subject { described_class.new(repository, git_user, 'http') }
describe '.fetch_message' do
+ let(:key) { "project_created:#{user.id}:#{project.repository.gl_repository}" }
+ let(:legacy_key) { "project_created:#{user.id}:#{project.id}" }
+
context 'with a project created message queue' do
before do
subject.add_message
end
it 'returns project created message' do
- expect(described_class.fetch_message(user.id, project.id)).to eq(subject.message)
+ expect(described_class.fetch_message(user, project.repository)).to eq(subject.message)
end
it 'deletes the project created message from redis' do
- expect(Gitlab::Redis::SharedState.with { |redis| redis.get("project_created:#{user.id}:#{project.id}") }).not_to be_nil
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(key) }).not_to be_nil
+
+ described_class.fetch_message(user, project.repository)
+
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(key) }).to be_nil
+ end
+
+ context 'with a message in the legacy key' do
+ before do
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.set(legacy_key, 'legacy message')
+ end
+ end
+
+ it 'returns and deletes the legacy message' do
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(key) }).not_to be_nil
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(legacy_key) }).not_to be_nil
- described_class.fetch_message(user.id, project.id)
+ expect(described_class.fetch_message(user, project.repository)).to eq('legacy message')
- expect(Gitlab::Redis::SharedState.with { |redis| redis.get("project_created:#{user.id}:#{project.id}") }).to be_nil
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(key) }).to be_nil
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(legacy_key) }).to be_nil
+ end
end
end
context 'with no project created message queue' do
it 'returns nil' do
- expect(described_class.fetch_message(1, 2)).to be_nil
+ expect(described_class.fetch_message(user, project.repository)).to be_nil
end
end
end
diff --git a/spec/lib/gitlab/ci/ansi2json/line_spec.rb b/spec/lib/gitlab/ci/ansi2json/line_spec.rb
index 909c0f1b3ea..d16750d19f1 100644
--- a/spec/lib/gitlab/ci/ansi2json/line_spec.rb
+++ b/spec/lib/gitlab/ci/ansi2json/line_spec.rb
@@ -76,30 +76,25 @@ RSpec.describe Gitlab::Ci::Ansi2json::Line do
end
describe '#set_section_duration' do
- shared_examples 'set_section_duration' do
- it 'sets and formats the section_duration' do
- subject.set_section_duration(75)
+ using RSpec::Parameterized::TableSyntax
- expect(subject.section_duration).to eq('01:15')
- end
+ where(:duration, :result) do
+ nil | '00:00'
+ 'string' | '00:00'
+ 0.seconds | '00:00'
+ 7.seconds | '00:07'
+ 75 | '01:15'
+ 1.minute + 15.seconds | '01:15'
+ 13.hours + 14.minutes + 15.seconds | '13:14:15'
+ 1.day + 13.hours + 14.minutes + 15.seconds | '37:14:15'
end
- context 'with default timezone' do
- it_behaves_like 'set_section_duration'
- end
+ with_them do
+ it do
+ subject.set_section_duration(duration)
- context 'with a timezone carrying minutes offset' do
- before do
- # The actual call by does use Time.at(...).utc that the following
- # rubocop rule (Rails/TimeZone) suggests, but for this specific
- # test's purposes we needed to mock at the Time.at call point.
-
- # rubocop:disable Rails/TimeZone
- allow(Time).to receive(:at).with(75).and_return(Time.at(75, in: '+05:30'))
- # rubocop:enable Rails/TimeZone
+ expect(subject.section_duration).to eq(result)
end
-
- it_behaves_like 'set_section_duration'
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb b/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb
index 0e6d5b6c311..7476fc6c25f 100644
--- a/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb
@@ -143,51 +143,22 @@ RSpec.describe Gitlab::Ci::Config::Entry::Artifacts do
end
describe 'excluded artifacts' do
- context 'when configuration is valid and the feature is enabled' do
- before do
- stub_feature_flags(ci_artifacts_exclude: true)
- end
-
- context 'when configuration is valid' do
- let(:config) { { untracked: true, exclude: ['some/directory/'] } }
-
- it 'correctly parses the configuration' do
- expect(entry).to be_valid
- expect(entry.value).to eq config
- end
- end
+ context 'when configuration is valid' do
+ let(:config) { { untracked: true, exclude: ['some/directory/'] } }
- context 'when configuration is not valid' do
- let(:config) { { untracked: true, exclude: 1234 } }
-
- it 'returns an error' do
- expect(entry).not_to be_valid
- expect(entry.errors)
- .to include 'artifacts exclude should be an array of strings'
- end
+ it 'correctly parses the configuration' do
+ expect(entry).to be_valid
+ expect(entry.value).to eq config
end
end
- context 'when artifacts/exclude feature is disabled' do
- before do
- stub_feature_flags(ci_artifacts_exclude: false)
- end
-
- context 'when configuration has been provided' do
- let(:config) { { untracked: true, exclude: ['some/directory/'] } }
-
- it 'returns an error' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include 'artifacts exclude feature is disabled'
- end
- end
+ context 'when configuration is not valid' do
+ let(:config) { { untracked: true, exclude: 1234 } }
- context 'when configuration is not present' do
- let(:config) { { untracked: true } }
-
- it 'is a valid configuration' do
- expect(entry).to be_valid
- end
+ it 'returns an error' do
+ expect(entry).not_to be_valid
+ expect(entry.errors)
+ .to include 'artifacts exclude should be an array of strings'
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/reports_spec.rb b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
index d8907f7015b..12b8960eb32 100644
--- a/spec/lib/gitlab/ci/config/entry/reports_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
@@ -40,6 +40,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports do
:secret_detection | 'gl-secret-detection-report.json'
:dependency_scanning | 'gl-dependency-scanning-report.json'
:container_scanning | 'gl-container-scanning-report.json'
+ :cluster_image_scanning | 'gl-cluster-image-scanning-report.json'
:dast | 'gl-dast-report.json'
:license_scanning | 'gl-license-scanning-report.json'
:performance | 'performance.json'
diff --git a/spec/lib/gitlab/ci/lint_spec.rb b/spec/lib/gitlab/ci/lint_spec.rb
index aaa3a7a8b9d..77f6608eb85 100644
--- a/spec/lib/gitlab/ci/lint_spec.rb
+++ b/spec/lib/gitlab/ci/lint_spec.rb
@@ -247,7 +247,7 @@ RSpec.describe Gitlab::Ci::Lint do
include_context 'advanced validations' do
it 'runs advanced logical validations' do
expect(subject).not_to be_valid
- expect(subject.errors).to eq(["'test' job needs 'build' job, but it was not added to the pipeline"])
+ expect(subject.errors).to eq(["'test' job needs 'build' job, but 'build' is not in any previous stage"])
end
end
diff --git a/spec/lib/gitlab/ci/matching/runner_matcher_spec.rb b/spec/lib/gitlab/ci/matching/runner_matcher_spec.rb
index d6492caa31a..6b3fef33182 100644
--- a/spec/lib/gitlab/ci/matching/runner_matcher_spec.rb
+++ b/spec/lib/gitlab/ci/matching/runner_matcher_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Matching::RunnerMatcher do
let(:dummy_attributes) do
{
+ runner_ids: [1],
runner_type: 'instance_type',
public_projects_minutes_cost_factor: 0,
private_projects_minutes_cost_factor: 1,
@@ -26,6 +27,8 @@ RSpec.describe Gitlab::Ci::Matching::RunnerMatcher do
context 'with attributes' do
let(:attributes) { dummy_attributes }
+ it { expect(matcher.runner_ids).to eq([1]) }
+
it { expect(matcher.runner_type).to eq('instance_type') }
it { expect(matcher.public_projects_minutes_cost_factor).to eq(0) }
diff --git a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
index 2e537f40692..687bb82a8ef 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
@@ -203,18 +203,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
expect(rspec_variables['VAR1']).to eq('overridden var 1')
end
-
- context 'when the FF ci_workflow_rules_variables is disabled' do
- before do
- stub_feature_flags(ci_workflow_rules_variables: false)
- end
-
- it 'sends root variable' do
- run_chain
-
- expect(rspec_variables['VAR1']).to eq('var 1')
- end
- end
end
context 'N+1 queries' do
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 020f957cf70..58938251ca1 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -11,8 +11,9 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
let(:seed_context) { double(pipeline: pipeline, root_variables: root_variables) }
let(:attributes) { { name: 'rspec', ref: 'master', scheduling_type: :stage } }
let(:previous_stages) { [] }
+ let(:current_stage) { double(seeds_names: [attributes[:name]]) }
- let(:seed_build) { described_class.new(seed_context, attributes, previous_stages) }
+ let(:seed_build) { described_class.new(seed_context, attributes, previous_stages, current_stage) }
describe '#attributes' do
subject { seed_build.attributes }
@@ -90,6 +91,20 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
end
end
+ context 'with job:tags' do
+ let(:attributes) do
+ {
+ name: 'rspec',
+ ref: 'master',
+ job_variables: [{ key: 'VARIABLE', value: 'value', public: true }],
+ tag_list: ['static-tag', '$VARIABLE', '$NO_VARIABLE']
+ }
+ end
+
+ it { is_expected.to include(tag_list: ['static-tag', 'value', '$NO_VARIABLE']) }
+ it { is_expected.to include(yaml_variables: [{ key: 'VARIABLE', value: 'value', public: true }]) }
+ end
+
context 'with cache:key' do
let(:attributes) do
{
@@ -250,19 +265,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
{ key: 'VAR4', value: 'new var pipeline 4', public: true }]
)
end
-
- context 'when FF ci_workflow_rules_variables is disabled' do
- before do
- stub_feature_flags(ci_workflow_rules_variables: false)
- end
-
- it 'returns existing yaml variables' do
- expect(subject[:yaml_variables]).to match_array(
- [{ key: 'VAR2', value: 'var 2', public: true },
- { key: 'VAR3', value: 'var 3', public: true }]
- )
- end
- end
end
context 'when root_variables_inheritance is false' do
@@ -1092,7 +1094,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
it "returns an error" do
expect(subject.errors).to contain_exactly(
- "'rspec' job needs 'build' job, but it was not added to the pipeline")
+ "'rspec' job needs 'build' job, but 'build' is not in any previous stage")
end
context 'when the needed job is optional' do
@@ -1128,6 +1130,28 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
end
end
+ context 'when build job is part of the same stage' do
+ let(:current_stage) { double(seeds_names: [attributes[:name], 'build']) }
+
+ it 'is included' do
+ is_expected.to be_included
+ end
+
+ it 'does not have errors' do
+ expect(subject.errors).to be_empty
+ end
+
+ context 'when ci_same_stage_job_needs FF is disabled' do
+ before do
+ stub_feature_flags(ci_same_stage_job_needs: false)
+ end
+
+ it 'has errors' do
+ expect(subject.errors).to contain_exactly("'rspec' job needs 'build' job, but 'build' is not in any previous stage")
+ end
+ end
+ end
+
context 'when using 101 needs' do
let(:needs_count) { 101 }
diff --git a/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
index 21be8660def..3424e7d03a3 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
@@ -34,6 +34,10 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Pipeline do
described_class.new(seed_context, stages_attributes)
end
+ before do
+ stub_feature_flags(ci_same_stage_job_needs: false)
+ end
+
describe '#stages' do
it 'returns the stage resources' do
stages = seed.stages
@@ -65,7 +69,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Pipeline do
}
expect(seed.errors).to contain_exactly(
- "'invalid_job' job needs 'non-existent' job, but it was not added to the pipeline")
+ "'invalid_job' job needs 'non-existent' job, but 'non-existent' is not in any previous stage")
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline_object_hierarchy_spec.rb b/spec/lib/gitlab/ci/pipeline_object_hierarchy_spec.rb
index 89602fe79d1..62ff7fcafea 100644
--- a/spec/lib/gitlab/ci/pipeline_object_hierarchy_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline_object_hierarchy_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::Ci::PipelineObjectHierarchy do
let_it_be(:cousin_parent) { create(:ci_pipeline, project: project) }
let_it_be(:cousin) { create(:ci_pipeline, project: project) }
let_it_be(:triggered_pipeline) { create(:ci_pipeline) }
+ let_it_be(:triggered_child_pipeline) { create(:ci_pipeline) }
before_all do
create_source_pipeline(ancestor, parent)
@@ -19,19 +20,20 @@ RSpec.describe Gitlab::Ci::PipelineObjectHierarchy do
create_source_pipeline(parent, child)
create_source_pipeline(cousin_parent, cousin)
create_source_pipeline(child, triggered_pipeline)
+ create_source_pipeline(triggered_pipeline, triggered_child_pipeline)
end
describe '#base_and_ancestors' do
it 'includes the base and its ancestors' do
relation = described_class.new(::Ci::Pipeline.where(id: parent.id),
- options: { same_project: true }).base_and_ancestors
+ options: { project_condition: :same }).base_and_ancestors
expect(relation).to contain_exactly(ancestor, parent)
end
it 'can find ancestors upto a certain level' do
relation = described_class.new(::Ci::Pipeline.where(id: child.id),
- options: { same_project: true }).base_and_ancestors(upto: ancestor.id)
+ options: { project_condition: :same }).base_and_ancestors(upto: ancestor.id)
expect(relation).to contain_exactly(parent, child)
end
@@ -39,7 +41,7 @@ RSpec.describe Gitlab::Ci::PipelineObjectHierarchy do
describe 'hierarchy_order option' do
let(:relation) do
described_class.new(::Ci::Pipeline.where(id: child.id),
- options: { same_project: true }).base_and_ancestors(hierarchy_order: hierarchy_order)
+ options: { project_condition: :same }).base_and_ancestors(hierarchy_order: hierarchy_order)
end
context ':asc' do
@@ -63,15 +65,32 @@ RSpec.describe Gitlab::Ci::PipelineObjectHierarchy do
describe '#base_and_descendants' do
it 'includes the base and its descendants' do
relation = described_class.new(::Ci::Pipeline.where(id: parent.id),
- options: { same_project: true }).base_and_descendants
+ options: { project_condition: :same }).base_and_descendants
expect(relation).to contain_exactly(parent, child)
end
+ context 'when project_condition: :different' do
+ it "includes the base and other project pipelines" do
+ relation = described_class.new(::Ci::Pipeline.where(id: child.id),
+ options: { project_condition: :different }).base_and_descendants
+
+ expect(relation).to contain_exactly(child, triggered_pipeline, triggered_child_pipeline)
+ end
+ end
+
+ context 'when project_condition: nil' do
+ it "includes the base and its descendants with other project pipeline" do
+ relation = described_class.new(::Ci::Pipeline.where(id: parent.id)).base_and_descendants
+
+ expect(relation).to contain_exactly(parent, child, triggered_pipeline, triggered_child_pipeline)
+ end
+ end
+
context 'when with_depth is true' do
let(:relation) do
described_class.new(::Ci::Pipeline.where(id: ancestor.id),
- options: { same_project: true }).base_and_descendants(with_depth: true)
+ options: { project_condition: :same }).base_and_descendants(with_depth: true)
end
it 'includes depth in the results' do
@@ -91,21 +110,51 @@ RSpec.describe Gitlab::Ci::PipelineObjectHierarchy do
end
describe '#all_objects' do
- it 'includes its ancestors and descendants' do
- relation = described_class.new(::Ci::Pipeline.where(id: parent.id),
- options: { same_project: true }).all_objects
+ context 'when passing ancestors_base' do
+ let(:options) { { project_condition: project_condition } }
+ let(:ancestors_base) { ::Ci::Pipeline.where(id: child.id) }
+
+ subject(:relation) { described_class.new(ancestors_base, options: options).all_objects }
- expect(relation).to contain_exactly(ancestor, parent, child)
+ context 'when project_condition: :same' do
+ let(:project_condition) { :same }
+
+ it "includes its ancestors and descendants" do
+ expect(relation).to contain_exactly(ancestor, parent, child)
+ end
+ end
+
+ context 'when project_condition: :different' do
+ let(:project_condition) { :different }
+
+ it "includes the base and other project pipelines" do
+ expect(relation).to contain_exactly(child, triggered_pipeline, triggered_child_pipeline)
+ end
+ end
end
- it 'returns all family tree' do
- relation = described_class.new(
- ::Ci::Pipeline.where(id: child.id),
- described_class.new(::Ci::Pipeline.where(id: child.id), options: { same_project: true }).base_and_ancestors,
- options: { same_project: true }
- ).all_objects
+ context 'when passing ancestors_base and descendants_base' do
+ let(:options) { { project_condition: project_condition } }
+ let(:ancestors_base) { ::Ci::Pipeline.where(id: child.id) }
+ let(:descendants_base) { described_class.new(::Ci::Pipeline.where(id: child.id), options: options).base_and_ancestors }
+
+ subject(:relation) { described_class.new(ancestors_base, descendants_base, options: options).all_objects }
+
+ context 'when project_condition: :same' do
+ let(:project_condition) { :same }
- expect(relation).to contain_exactly(ancestor, parent, cousin_parent, child, cousin)
+ it 'returns all family tree' do
+ expect(relation).to contain_exactly(ancestor, parent, cousin_parent, child, cousin)
+ end
+ end
+
+ context 'when project_condition: :different' do
+ let(:project_condition) { :different }
+
+ it "includes the base and other project pipelines" do
+ expect(relation).to contain_exactly(child, triggered_pipeline, triggered_child_pipeline)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/reports/security/identifier_spec.rb b/spec/lib/gitlab/ci/reports/security/identifier_spec.rb
new file mode 100644
index 00000000000..123730b6ee6
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/identifier_spec.rb
@@ -0,0 +1,125 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::Identifier do
+ using RSpec::Parameterized::TableSyntax
+
+ describe '#initialize' do
+ subject { described_class.new(**params) }
+
+ let(:params) do
+ {
+ external_type: 'brakeman_warning_code',
+ external_id: '107',
+ name: 'Brakeman Warning Code 107',
+ url: 'https://brakemanscanner.org/docs/warning_types/cross_site_scripting/'
+ }
+ end
+
+ context 'when all params are given' do
+ it 'initializes an instance' do
+ expect { subject }.not_to raise_error
+
+ expect(subject).to have_attributes(
+ external_type: 'brakeman_warning_code',
+ external_id: '107',
+ fingerprint: 'aa2254904a69148ad14b6ac5db25b355da9c987f',
+ name: 'Brakeman Warning Code 107',
+ url: 'https://brakemanscanner.org/docs/warning_types/cross_site_scripting/'
+ )
+ end
+ end
+
+ %i[external_type external_id name].each do |attribute|
+ context "when attribute #{attribute} is missing" do
+ before do
+ params.delete(attribute)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+ end
+ end
+
+ describe '#key' do
+ let(:identifier) { create(:ci_reports_security_identifier) }
+
+ subject { identifier.key }
+
+ it 'returns fingerprint' do
+ is_expected.to eq(identifier.fingerprint)
+ end
+ end
+
+ describe '#type_identifier?' do
+ where(:external_type, :expected_result) do
+ 'cve' | false
+ 'foo' | false
+ 'cwe' | true
+ 'wasc' | true
+ end
+
+ with_them do
+ let(:identifier) { create(:ci_reports_security_identifier, external_type: external_type) }
+
+ subject { identifier.type_identifier? }
+
+ it { is_expected.to be(expected_result) }
+ end
+ end
+
+ describe 'external type check methods' do
+ where(:external_type, :is_cve?, :is_cwe?, :is_wasc?) do
+ 'Foo' | false | false | false
+ 'Cve' | true | false | false
+ 'Cwe' | false | true | false
+ 'Wasc' | false | false | true
+ end
+
+ with_them do
+ let(:identifier) { create(:ci_reports_security_identifier, external_type: external_type) }
+
+ it 'returns correct result for the type check method' do
+ expect(identifier.cve?).to be(is_cve?)
+ expect(identifier.cwe?).to be(is_cwe?)
+ expect(identifier.wasc?).to be(is_wasc?)
+ end
+ end
+ end
+
+ describe '#to_hash' do
+ let(:identifier) { create(:ci_reports_security_identifier) }
+
+ subject { identifier.to_hash }
+
+ it 'returns expected hash' do
+ is_expected.to eq({
+ external_type: identifier.external_type,
+ external_id: identifier.external_id,
+ fingerprint: identifier.fingerprint,
+ name: identifier.name,
+ url: identifier.url
+ })
+ end
+ end
+
+ describe '#==' do
+ where(:type_1, :id_1, :type_2, :id_2, :equal, :case_name) do
+ 'CVE' | '2018-1234' | 'CVE' | '2018-1234' | true | 'when external_type and external_id are equal'
+ 'CVE' | '2018-1234' | 'brakeman_code' | '2018-1234' | false | 'when external_type is different'
+ 'CVE' | '2018-1234' | 'CVE' | '2019-6789' | false | 'when external_id is different'
+ end
+
+ with_them do
+ let(:identifier_1) { create(:ci_reports_security_identifier, external_type: type_1, external_id: id_1) }
+ let(:identifier_2) { create(:ci_reports_security_identifier, external_type: type_2, external_id: id_2) }
+
+ it "returns #{params[:equal]}" do
+ expect(identifier_1 == identifier_2).to eq(equal)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/security/link_spec.rb b/spec/lib/gitlab/ci/reports/security/link_spec.rb
new file mode 100644
index 00000000000..7b55af27f4d
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/link_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::Link do
+ subject(:security_link) { described_class.new(name: 'CVE-2020-0202', url: 'https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-0202') }
+
+ describe '#initialize' do
+ context 'when all params are given' do
+ it 'initializes an instance' do
+ expect { subject }.not_to raise_error
+
+ expect(subject).to have_attributes(
+ name: 'CVE-2020-0202',
+ url: 'https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-0202'
+ )
+ end
+ end
+
+ describe '#to_hash' do
+ it 'returns expected hash' do
+ expect(security_link.to_hash).to eq(
+ {
+ name: 'CVE-2020-0202',
+ url: 'https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-0202'
+ }
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/security/scan_spec.rb b/spec/lib/gitlab/ci/reports/security/scan_spec.rb
new file mode 100644
index 00000000000..b4968ff3a6e
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/scan_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::Scan do
+ describe '#initialize' do
+ subject { described_class.new(params.with_indifferent_access) }
+
+ let(:params) do
+ {
+ status: 'success',
+ type: 'dependency-scanning',
+ start_time: 'placeholer',
+ end_time: 'placholder'
+ }
+ end
+
+ context 'when all params are given' do
+ it 'initializes an instance' do
+ expect { subject }.not_to raise_error
+
+ expect(subject).to have_attributes(
+ status: 'success',
+ type: 'dependency-scanning',
+ start_time: 'placeholer',
+ end_time: 'placholder'
+ )
+ end
+ end
+
+ describe '#to_hash' do
+ subject { described_class.new(params.with_indifferent_access).to_hash }
+
+ it 'returns expected hash' do
+ is_expected.to eq(
+ {
+ status: 'success',
+ type: 'dependency-scanning',
+ start_time: 'placeholer',
+ end_time: 'placholder'
+ }
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/security/scanned_resource_spec.rb b/spec/lib/gitlab/ci/reports/security/scanned_resource_spec.rb
new file mode 100644
index 00000000000..e9daa05e8b9
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/scanned_resource_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::ScannedResource do
+ let(:url) { 'http://example.com:3001/1?foo=bar' }
+ let(:request_method) { 'GET' }
+
+ context 'when the URI is not a URI' do
+ subject { ::Gitlab::Ci::Reports::Security::ScannedResource.new(url, request_method) }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when the URL is valid' do
+ subject { ::Gitlab::Ci::Reports::Security::ScannedResource.new(URI.parse(url), request_method) }
+
+ it 'sets the URL attributes' do
+ expect(subject.request_method).to eq(request_method)
+ expect(subject.request_uri.to_s).to eq(url)
+ expect(subject.url_scheme).to eq('http')
+ expect(subject.url_host).to eq('example.com')
+ expect(subject.url_port).to eq(3001)
+ expect(subject.url_path).to eq('/1')
+ expect(subject.url_query).to eq('foo=bar')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/security/scanner_spec.rb b/spec/lib/gitlab/ci/reports/security/scanner_spec.rb
new file mode 100644
index 00000000000..99f5d4723d3
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/scanner_spec.rb
@@ -0,0 +1,146 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::Scanner do
+ describe '#initialize' do
+ subject { described_class.new(**params) }
+
+ let(:params) do
+ {
+ external_id: 'brakeman',
+ name: 'Brakeman',
+ vendor: 'GitLab',
+ version: '1.0.1'
+ }
+ end
+
+ context 'when all params are given' do
+ it 'initializes an instance' do
+ expect { subject }.not_to raise_error
+
+ expect(subject).to have_attributes(
+ external_id: 'brakeman',
+ name: 'Brakeman',
+ vendor: 'GitLab'
+ )
+ end
+ end
+
+ %i[external_id name].each do |attribute|
+ context "when attribute #{attribute} is missing" do
+ before do
+ params.delete(attribute)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+ end
+ end
+
+ describe '#key' do
+ let(:scanner) { create(:ci_reports_security_scanner) }
+
+ subject { scanner.key }
+
+ it 'returns external_id' do
+ is_expected.to eq(scanner.external_id)
+ end
+ end
+
+ describe '#to_hash' do
+ let(:scanner) { create(:ci_reports_security_scanner) }
+
+ subject { scanner.to_hash }
+
+ it 'returns expected hash' do
+ is_expected.to eq({
+ external_id: scanner.external_id,
+ name: scanner.name,
+ vendor: scanner.vendor
+ })
+ end
+
+ context 'when vendor is not defined' do
+ let(:scanner) { create(:ci_reports_security_scanner, vendor: nil) }
+
+ it 'returns expected hash' do
+ is_expected.to eq({
+ external_id: scanner.external_id,
+ name: scanner.name
+ })
+ end
+ end
+ end
+
+ describe '#==' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:id_1, :id_2, :equal, :case_name) do
+ 'brakeman' | 'brakeman' | true | 'when external_id is equal'
+ 'brakeman' | 'bandit' | false | 'when external_id is different'
+ end
+
+ with_them do
+ let(:scanner_1) { create(:ci_reports_security_scanner, external_id: id_1) }
+ let(:scanner_2) { create(:ci_reports_security_scanner, external_id: id_2) }
+
+ it "returns #{params[:equal]}" do
+ expect(scanner_1 == scanner_2).to eq(equal)
+ end
+ end
+ end
+
+ describe '#<=>' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:scanner_1) { create(:ci_reports_security_scanner, **scanner_1_attributes) }
+ let(:scanner_2) { create(:ci_reports_security_scanner, **scanner_2_attributes) }
+
+ subject { scanner_1 <=> scanner_2 }
+
+ context 'when the `external_id` of the scanners are different' do
+ where(:scanner_1_attributes, :scanner_2_attributes, :expected_comparison_result) do
+ { external_id: 'bundler_audit', name: 'foo', vendor: 'bar' } | { external_id: 'retire.js', name: 'foo', vendor: 'bar' } | -1
+ { external_id: 'retire.js', name: 'foo', vendor: 'bar' } | { external_id: 'gemnasium', name: 'foo', vendor: 'bar' } | -1
+ { external_id: 'gemnasium', name: 'foo', vendor: 'bar' } | { external_id: 'gemnasium-maven', name: 'foo', vendor: 'bar' } | -1
+ { external_id: 'gemnasium-maven', name: 'foo', vendor: 'bar' } | { external_id: 'gemnasium-python', name: 'foo', vendor: 'bar' } | -1
+ { external_id: 'gemnasium-python', name: 'foo', vendor: 'bar' } | { external_id: 'bandit', name: 'foo', vendor: 'bar' } | 1
+ { external_id: 'bandit', name: 'foo', vendor: 'bar' } | { external_id: 'semgrep', name: 'foo', vendor: 'bar' } | -1
+ { external_id: 'semgrep', name: 'foo', vendor: 'bar' } | { external_id: 'unknown', name: 'foo', vendor: 'bar' } | -1
+ { external_id: 'gemnasium', name: 'foo', vendor: 'bar' } | { external_id: 'gemnasium', name: 'foo', vendor: nil } | 1
+ end
+
+ with_them do
+ it { is_expected.to eq(expected_comparison_result) }
+ end
+ end
+
+ context 'when the `external_id` of the scanners are equal' do
+ context 'when the `name` of the scanners are different' do
+ where(:scanner_1_attributes, :scanner_2_attributes, :expected_comparison_result) do
+ { external_id: 'gemnasium', name: 'a', vendor: 'bar' } | { external_id: 'gemnasium', name: 'b', vendor: 'bar' } | -1
+ { external_id: 'gemnasium', name: 'd', vendor: 'bar' } | { external_id: 'gemnasium', name: 'c', vendor: 'bar' } | 1
+ end
+
+ with_them do
+ it { is_expected.to eq(expected_comparison_result) }
+ end
+ end
+
+ context 'when the `name` of the scanners are equal' do
+ where(:scanner_1_attributes, :scanner_2_attributes, :expected_comparison_result) do
+ { external_id: 'gemnasium', name: 'foo', vendor: 'a' } | { external_id: 'gemnasium', name: 'foo', vendor: 'a' } | 0 # rubocop:disable Lint/BinaryOperatorWithIdenticalOperands
+ { external_id: 'gemnasium', name: 'foo', vendor: 'a' } | { external_id: 'gemnasium', name: 'foo', vendor: 'b' } | -1
+ { external_id: 'gemnasium', name: 'foo', vendor: 'b' } | { external_id: 'gemnasium', name: 'foo', vendor: 'a' } | 1
+ end
+
+ with_them do
+ it { is_expected.to eq(expected_comparison_result) }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/test_case_spec.rb b/spec/lib/gitlab/ci/reports/test_case_spec.rb
index 668a475514e..d21359368b8 100644
--- a/spec/lib/gitlab/ci/reports/test_case_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_case_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe Gitlab::Ci::Reports::TestCase, :aggregate_failures do
end
it '#attachment_url' do
- expect(attachment_test_case.attachment_url).to match(/file\/some\/path.png/)
+ expect(attachment_test_case.attachment_url).to match(%r{file/some/path.png})
end
end
diff --git a/spec/lib/gitlab/ci/status/composite_spec.rb b/spec/lib/gitlab/ci/status/composite_spec.rb
index 2b9523bd83d..cceabc35e85 100644
--- a/spec/lib/gitlab/ci/status/composite_spec.rb
+++ b/spec/lib/gitlab/ci/status/composite_spec.rb
@@ -82,25 +82,6 @@ RSpec.describe Gitlab::Ci::Status::Composite do
it_behaves_like 'compares status and warnings'
end
-
- context 'when FF ci_fix_pipeline_status_for_dag_needs_manual is disabled' do
- before do
- stub_feature_flags(ci_fix_pipeline_status_for_dag_needs_manual: false)
- end
-
- where(:build_statuses, :dag, :result, :has_warnings) do
- %i(success manual) | true | 'pending' | false
- %i(success manual) | false | 'success' | false
- end
-
- with_them do
- let(:all_statuses) do
- build_statuses.map { |status| @statuses_with_allow_failure[status] }
- end
-
- it_behaves_like 'compares status and warnings'
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
index 653b3be0b2a..e8aeb93a2ba 100644
--- a/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'Deploy-ECS.gitlab-ci.yml' do
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('AWS/Deploy-ECS') }
describe 'the created pipeline' do
- let(:default_branch) { 'master' }
+ let(:default_branch) { project.default_branch_or_main }
let(:pipeline_branch) { default_branch }
let(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.owner }
@@ -38,7 +38,7 @@ RSpec.describe 'Deploy-ECS.gitlab-ci.yml' do
let(:pipeline_branch) { 'test_branch' }
before do
- project.repository.create_branch(pipeline_branch)
+ project.repository.create_branch(pipeline_branch, default_branch)
end
it_behaves_like 'no pipeline yaml error'
diff --git a/spec/lib/gitlab/ci/templates/managed_cluster_applications_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/managed_cluster_applications_gitlab_ci_yaml_spec.rb
index 0e458e01a2c..151880e27a3 100644
--- a/spec/lib/gitlab/ci/templates/managed_cluster_applications_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/managed_cluster_applications_gitlab_ci_yaml_spec.rb
@@ -12,7 +12,8 @@ RSpec.describe 'Managed-Cluster-Applications.gitlab-ci.yml' do
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
let(:pipeline) { service.execute!(:push) }
let(:build_names) { pipeline.builds.pluck(:name) }
- let(:pipeline_branch) { 'master' }
+ let(:default_branch) { project.default_branch_or_main }
+ let(:pipeline_branch) { default_branch }
before do
stub_ci_pipeline_yaml_file(template.content)
@@ -28,7 +29,7 @@ RSpec.describe 'Managed-Cluster-Applications.gitlab-ci.yml' do
let(:pipeline_branch) { 'a_branch' }
before do
- project.repository.create_branch(pipeline_branch)
+ project.repository.create_branch(pipeline_branch, default_branch)
end
it 'has no jobs' do
diff --git a/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
index 4377f155d34..5ab3035486f 100644
--- a/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'Terraform.latest.gitlab-ci.yml' do
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Terraform.latest') }
describe 'the created pipeline' do
- let(:default_branch) { 'master' }
+ let(:default_branch) { project.default_branch_or_main }
let(:pipeline_branch) { default_branch }
let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.owner }
@@ -34,7 +34,7 @@ RSpec.describe 'Terraform.latest.gitlab-ci.yml' do
let(:pipeline_branch) { 'patch-1' }
before do
- project.repository.create_branch(pipeline_branch)
+ project.repository.create_branch(pipeline_branch, default_branch)
end
it 'does not creates a deploy and a test job' do
diff --git a/spec/lib/gitlab/ci/yaml_processor/dag_spec.rb b/spec/lib/gitlab/ci/yaml_processor/dag_spec.rb
new file mode 100644
index 00000000000..af1b43f6b01
--- /dev/null
+++ b/spec/lib/gitlab/ci/yaml_processor/dag_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::YamlProcessor::Dag do
+ let(:nodes) { {} }
+
+ subject(:result) { described_class.new(nodes).tsort }
+
+ context 'when it is a regular pipeline' do
+ let(:nodes) do
+ { 'job_c' => %w(job_b job_d), 'job_d' => %w(job_a), 'job_b' => %w(job_a), 'job_a' => %w() }
+ end
+
+ it 'returns ordered jobs' do
+ expect(result).to eq(%w(job_a job_b job_d job_c))
+ end
+ end
+
+ context 'when there is a circular dependency' do
+ let(:nodes) do
+ { 'job_a' => %w(job_c), 'job_b' => %w(job_a), 'job_c' => %w(job_b) }
+ end
+
+ it 'raises TSort::Cyclic' do
+ expect { result }.to raise_error(TSort::Cyclic, /topological sort failed/)
+ end
+ end
+
+ context 'when there is a missing job' do
+ let(:nodes) do
+ { 'job_a' => %w(job_d), 'job_b' => %w(job_a) }
+ end
+
+ it 'raises MissingNodeError' do
+ expect { result }.to raise_error(
+ Gitlab::Ci::YamlProcessor::Dag::MissingNodeError, 'node job_d is missing'
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index e8e44f884cf..19c2e34a0f0 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -595,7 +595,15 @@ module Gitlab
EOYML
end
- it_behaves_like 'has warnings and expected error', /build job: need test is not defined in prior stages/
+ it_behaves_like 'has warnings and expected error', /build job: need test is not defined in current or prior stages/
+
+ context 'with ci_same_stage_job_needs FF disabled' do
+ before do
+ stub_feature_flags(ci_same_stage_job_needs: false)
+ end
+
+ it_behaves_like 'has warnings and expected error', /build job: need test is not defined in prior stages/
+ end
end
end
end
@@ -1648,8 +1656,6 @@ module Gitlab
end
it 'populates a build options with complete artifacts configuration' do
- stub_feature_flags(ci_artifacts_exclude: true)
-
config = <<~YAML
test:
script: echo "Hello World"
@@ -1860,7 +1866,7 @@ module Gitlab
build2: { stage: 'build', script: 'test' },
test1: { stage: 'test', script: 'test', dependencies: dependencies },
test2: { stage: 'test', script: 'test' },
- deploy: { stage: 'test', script: 'test' }
+ deploy: { stage: 'deploy', script: 'test' }
}
end
@@ -1893,7 +1899,15 @@ module Gitlab
context 'dependencies to deploy' do
let(:dependencies) { ['deploy'] }
- it_behaves_like 'returns errors', 'test1 job: dependency deploy is not defined in prior stages'
+ it_behaves_like 'returns errors', 'test1 job: dependency deploy is not defined in current or prior stages'
+
+ context 'with ci_same_stage_job_needs FF disabled' do
+ before do
+ stub_feature_flags(ci_same_stage_job_needs: false)
+ end
+
+ it_behaves_like 'returns errors', 'test1 job: dependency deploy is not defined in prior stages'
+ end
end
context 'when a job depends on another job that references a not-yet defined stage' do
@@ -1918,7 +1932,7 @@ module Gitlab
}
end
- it_behaves_like 'returns errors', /is not defined in prior stages/
+ it_behaves_like 'returns errors', /is not defined in current or prior stages/
end
end
@@ -1933,7 +1947,7 @@ module Gitlab
parallel: { stage: 'build', script: 'test', parallel: 2 },
test1: { stage: 'test', script: 'test', needs: needs, dependencies: dependencies },
test2: { stage: 'test', script: 'test' },
- deploy: { stage: 'test', script: 'test' }
+ deploy: { stage: 'deploy', script: 'test' }
}
end
@@ -1943,6 +1957,45 @@ module Gitlab
it { is_expected.to be_valid }
end
+ context 'needs a job from the same stage' do
+ let(:needs) { %w(test2) }
+
+ it 'creates jobs with valid specifications' do
+ expect(subject.builds.size).to eq(7)
+ expect(subject.builds[0]).to eq(
+ stage: 'build',
+ stage_idx: 1,
+ name: 'build1',
+ only: { refs: %w[branches tags] },
+ options: {
+ script: ['test']
+ },
+ when: 'on_success',
+ allow_failure: false,
+ yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
+ scheduling_type: :stage
+ )
+ expect(subject.builds[4]).to eq(
+ stage: 'test',
+ stage_idx: 2,
+ name: 'test1',
+ only: { refs: %w[branches tags] },
+ options: { script: ['test'] },
+ needs_attributes: [
+ { name: 'test2', artifacts: true, optional: false }
+ ],
+ when: 'on_success',
+ allow_failure: false,
+ yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
+ scheduling_type: :dag
+ )
+ end
+ end
+
context 'needs two builds' do
let(:needs) { %w(build1 build2) }
@@ -2098,7 +2151,15 @@ module Gitlab
context 'needs to deploy' do
let(:needs) { ['deploy'] }
- it_behaves_like 'returns errors', 'test1 job: need deploy is not defined in prior stages'
+ it_behaves_like 'returns errors', 'test1 job: need deploy is not defined in current or prior stages'
+
+ context 'with ci_same_stage_job_needs FF disabled' do
+ before do
+ stub_feature_flags(ci_same_stage_job_needs: false)
+ end
+
+ it_behaves_like 'returns errors', 'test1 job: need deploy is not defined in prior stages'
+ end
end
context 'needs and dependencies that are mismatching' do
@@ -2769,6 +2830,29 @@ module Gitlab
it_behaves_like 'returns errors', 'jobs:rspec:parallel should be an integer or a hash'
end
+
+ context 'when the pipeline has a circular dependency' do
+ let(:config) do
+ <<~YAML
+ job_a:
+ stage: test
+ script: build
+ needs: [job_c]
+
+ job_b:
+ stage: test
+ script: test
+ needs: [job_a]
+
+ job_c:
+ stage: test
+ script: deploy
+ needs: [job_b]
+ YAML
+ end
+
+ it_behaves_like 'returns errors', 'The pipeline has circular dependencies.'
+ end
end
describe '#execute' do
diff --git a/spec/lib/gitlab/closing_issue_extractor_spec.rb b/spec/lib/gitlab/closing_issue_extractor_spec.rb
index 37349c30224..279486aa2a1 100644
--- a/spec/lib/gitlab/closing_issue_extractor_spec.rb
+++ b/spec/lib/gitlab/closing_issue_extractor_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::ClosingIssueExtractor do
let_it_be(:project2) { create(:project) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:issue2) { create(:issue, project: project2) }
+
let(:reference) { issue.to_reference }
let(:cross_reference) { issue2.to_reference(project) }
@@ -351,6 +352,7 @@ RSpec.describe Gitlab::ClosingIssueExtractor do
context 'with multiple references' do
let_it_be(:other_issue) { create(:issue, project: project) }
let_it_be(:third_issue) { create(:issue, project: project) }
+
let(:reference2) { other_issue.to_reference }
let(:reference3) { third_issue.to_reference }
diff --git a/spec/lib/gitlab/composer/cache_spec.rb b/spec/lib/gitlab/composer/cache_spec.rb
index 00318ac14f9..071771960c6 100644
--- a/spec/lib/gitlab/composer/cache_spec.rb
+++ b/spec/lib/gitlab/composer/cache_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::Composer::Cache do
let_it_be(:json) { { 'name' => package_name } }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json.to_json }, group: group) }
+
let(:branch) { project.repository.find_branch('master') }
let(:sha_regex) { /^[A-Fa-f0-9]{64}$/ }
diff --git a/spec/lib/gitlab/consul/internal_spec.rb b/spec/lib/gitlab/consul/internal_spec.rb
index 5889dd8b41d..28dcaac9ff2 100644
--- a/spec/lib/gitlab/consul/internal_spec.rb
+++ b/spec/lib/gitlab/consul/internal_spec.rb
@@ -134,6 +134,6 @@ RSpec.describe Gitlab::Consul::Internal do
end
def stub_consul_discover_prometheus
- stub_request(:get, /v1\/catalog\/service\/prometheus/)
+ stub_request(:get, %r{v1/catalog/service/prometheus})
end
end
diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
index d08057fb10a..8e63e771caa 100644
--- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
+++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
@@ -61,6 +61,36 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
expect(directives['font_src']).to eq("'self' https://example.com")
end
end
+
+ context 'when CUSTOMER_PORTAL_URL is set' do
+ before do
+ stub_env('CUSTOMER_PORTAL_URL', 'https://customers.example.com')
+ end
+
+ context 'when in production' do
+ before do
+ allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('production'))
+ end
+
+ it 'does not add CUSTOMER_PORTAL_URL to CSP' do
+ directives = settings['directives']
+
+ expect(directives['frame_src']).to eq("'self' https://www.google.com/recaptcha/ https://www.recaptcha.net/ https://content.googleapis.com https://content-compute.googleapis.com https://content-cloudbilling.googleapis.com https://content-cloudresourcemanager.googleapis.com")
+ end
+ end
+
+ context 'when in development' do
+ before do
+ allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('development'))
+ end
+
+ it 'adds CUSTOMER_PORTAL_URL to CSP' do
+ directives = settings['directives']
+
+ expect(directives['frame_src']).to eq("'self' https://www.google.com/recaptcha/ https://www.recaptcha.net/ https://content.googleapis.com https://content-compute.googleapis.com https://content-cloudbilling.googleapis.com https://content-cloudresourcemanager.googleapis.com https://customers.example.com")
+ end
+ end
+ end
end
describe '#load' do
diff --git a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
index 2de784d3e16..0182e0f7651 100644
--- a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
@@ -124,4 +124,84 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
end
end
end
+
+ describe '#split_and_retry!' do
+ let!(:job) { create(:batched_background_migration_job, batch_size: 10, min_value: 6, max_value: 15, status: :failed, attempts: 3) }
+
+ context 'when job can be split' do
+ before do
+ allow_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |batch_class|
+ allow(batch_class).to receive(:next_batch).with(anything, anything, batch_min_value: 6, batch_size: 5).and_return([6, 10])
+ end
+ end
+
+ it 'sets the correct attributes' do
+ expect { job.split_and_retry! }.to change { described_class.count }.by(1)
+
+ expect(job).to have_attributes(
+ min_value: 6,
+ max_value: 10,
+ batch_size: 5,
+ status: 'failed',
+ attempts: 0,
+ started_at: nil,
+ finished_at: nil,
+ metrics: {}
+ )
+
+ new_job = described_class.last
+
+ expect(new_job).to have_attributes(
+ batched_background_migration_id: job.batched_background_migration_id,
+ min_value: 11,
+ max_value: 15,
+ batch_size: 5,
+ status: 'failed',
+ attempts: 0,
+ started_at: nil,
+ finished_at: nil,
+ metrics: {}
+ )
+ expect(new_job.created_at).not_to eq(job.created_at)
+ end
+
+ it 'splits the jobs into retriable jobs' do
+ migration = job.batched_migration
+
+ expect { job.split_and_retry! }.to change { migration.batched_jobs.retriable.count }.from(0).to(2)
+ end
+ end
+
+ context 'when job is not failed' do
+ let!(:job) { create(:batched_background_migration_job, status: :succeeded) }
+
+ it 'raises an exception' do
+ expect { job.split_and_retry! }.to raise_error 'Only failed jobs can be split'
+ end
+ end
+
+ context 'when batch size is already 1' do
+ let!(:job) { create(:batched_background_migration_job, batch_size: 1, status: :failed) }
+
+ it 'raises an exception' do
+ expect { job.split_and_retry! }.to raise_error 'Job cannot be split further'
+ end
+ end
+
+ context 'when computed midpoint is larger than the max value of the batch' do
+ before do
+ allow_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |batch_class|
+ allow(batch_class).to receive(:next_batch).with(anything, anything, batch_min_value: 6, batch_size: 5).and_return([6, 16])
+ end
+ end
+
+ it 'lowers the batch size and resets the number of attempts' do
+ expect { job.split_and_retry! }.not_to change { described_class.count }
+
+ expect(job.batch_size).to eq(5)
+ expect(job.attempts).to eq(0)
+ expect(job.status).to eq('failed')
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
index 9f0493ab0d7..779e8e40c97 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
@@ -281,4 +281,152 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
end
end
+
+ describe '#finalize' do
+ let(:migration_wrapper) { Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper.new }
+
+ let(:migration_helpers) { ActiveRecord::Migration.new }
+ let(:table_name) { :_batched_migrations_test_table }
+ let(:column_name) { :some_id }
+ let(:job_arguments) { [:some_id, :some_id_convert_to_bigint] }
+
+ let(:migration_status) { :active }
+
+ let!(:batched_migration) do
+ create(
+ :batched_background_migration,
+ status: migration_status,
+ max_value: 8,
+ batch_size: 2,
+ sub_batch_size: 1,
+ interval: 0,
+ table_name: table_name,
+ column_name: column_name,
+ job_arguments: job_arguments,
+ pause_ms: 0
+ )
+ end
+
+ before do
+ migration_helpers.drop_table table_name, if_exists: true
+ migration_helpers.create_table table_name, id: false do |t|
+ t.integer :some_id, primary_key: true
+ t.integer :some_id_convert_to_bigint
+ end
+
+ migration_helpers.execute("INSERT INTO #{table_name} VALUES (1, 1), (2, 2), (3, NULL), (4, NULL), (5, NULL), (6, NULL), (7, NULL), (8, NULL)")
+ end
+
+ after do
+ migration_helpers.drop_table table_name, if_exists: true
+ end
+
+ context 'when the migration is not yet completed' do
+ before do
+ common_attributes = {
+ batched_migration: batched_migration,
+ batch_size: 2,
+ sub_batch_size: 1,
+ pause_ms: 0
+ }
+
+ create(:batched_background_migration_job, common_attributes.merge(status: :succeeded, min_value: 1, max_value: 2))
+ create(:batched_background_migration_job, common_attributes.merge(status: :pending, min_value: 3, max_value: 4))
+ create(:batched_background_migration_job, common_attributes.merge(status: :failed, min_value: 5, max_value: 6, attempts: 1))
+ end
+
+ it 'completes the migration' do
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_for_configuration)
+ .with('CopyColumnUsingBackgroundMigrationJob', table_name, column_name, job_arguments)
+ .and_return(batched_migration)
+
+ expect(batched_migration).to receive(:finalizing!).and_call_original
+
+ expect do
+ runner.finalize(
+ batched_migration.job_class_name,
+ table_name,
+ column_name,
+ job_arguments
+ )
+ end.to change { batched_migration.reload.status }.from('active').to('finished')
+
+ expect(batched_migration.batched_jobs).to all(be_succeeded)
+
+ not_converted = migration_helpers.execute("SELECT * FROM #{table_name} WHERE some_id_convert_to_bigint IS NULL")
+ expect(not_converted.to_a).to be_empty
+ end
+
+ context 'when migration fails to complete' do
+ it 'raises an error' do
+ batched_migration.batched_jobs.failed.update_all(attempts: Gitlab::Database::BackgroundMigration::BatchedJob::MAX_ATTEMPTS)
+
+ expect do
+ runner.finalize(
+ batched_migration.job_class_name,
+ table_name,
+ column_name,
+ job_arguments
+ )
+ end.to raise_error described_class::FailedToFinalize
+ end
+ end
+ end
+
+ context 'when the migration is already finished' do
+ let(:migration_status) { :finished }
+
+ it 'is a no-op' do
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_for_configuration)
+ .with('CopyColumnUsingBackgroundMigrationJob', table_name, column_name, job_arguments)
+ .and_return(batched_migration)
+
+ configuration = {
+ job_class_name: batched_migration.job_class_name,
+ table_name: table_name.to_sym,
+ column_name: column_name.to_sym,
+ job_arguments: job_arguments
+ }
+
+ expect(Gitlab::AppLogger).to receive(:warn)
+ .with("Batched background migration for the given configuration is already finished: #{configuration}")
+
+ expect(batched_migration).not_to receive(:finalizing!)
+
+ runner.finalize(
+ batched_migration.job_class_name,
+ table_name,
+ column_name,
+ job_arguments
+ )
+ end
+ end
+
+ context 'when the migration does not exist' do
+ it 'is a no-op' do
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_for_configuration)
+ .with('CopyColumnUsingBackgroundMigrationJob', table_name, column_name, [:some, :other, :arguments])
+ .and_return(nil)
+
+ configuration = {
+ job_class_name: batched_migration.job_class_name,
+ table_name: table_name.to_sym,
+ column_name: column_name.to_sym,
+ job_arguments: [:some, :other, :arguments]
+ }
+
+ expect(Gitlab::AppLogger).to receive(:warn)
+ .with("Could not find batched background migration for the given configuration: #{configuration}")
+
+ expect(batched_migration).not_to receive(:finalizing!)
+
+ runner.finalize(
+ batched_migration.job_class_name,
+ table_name,
+ column_name,
+ [:some, :other, :arguments]
+ )
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index d881390cd52..3207e97a639 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -10,11 +10,11 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
describe '#last_job' do
let!(:batched_migration) { create(:batched_background_migration) }
- let!(:batched_job1) { create(:batched_background_migration_job, batched_migration: batched_migration) }
- let!(:batched_job2) { create(:batched_background_migration_job, batched_migration: batched_migration) }
+ let!(:batched_job1) { create(:batched_background_migration_job, batched_migration: batched_migration, max_value: 1000) }
+ let!(:batched_job2) { create(:batched_background_migration_job, batched_migration: batched_migration, max_value: 500) }
- it 'returns the most recent (in order of id) batched job' do
- expect(batched_migration.last_job).to eq(batched_job2)
+ it 'returns the batched job with highest max_value' do
+ expect(batched_migration.last_job).to eq(batched_job1)
end
end
end
@@ -387,4 +387,22 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
expect(actual).to contain_exactly(migration)
end
end
+
+ describe '.find_for_configuration' do
+ it 'returns nill if such migration does not exists' do
+ expect(described_class.find_for_configuration('MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])).to be_nil
+ end
+
+ it 'returns the migration when it exists' do
+ migration = create(
+ :batched_background_migration,
+ job_class_name: 'MyJobClass',
+ table_name: :projects,
+ column_name: :id,
+ job_arguments: [[:id], [:id_convert_to_bigint]]
+ )
+
+ expect(described_class.find_for_configuration('MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])).to eq(migration)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/custom_structure_spec.rb b/spec/lib/gitlab/database/custom_structure_spec.rb
deleted file mode 100644
index 04ce1e4ad9a..00000000000
--- a/spec/lib/gitlab/database/custom_structure_spec.rb
+++ /dev/null
@@ -1,65 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::CustomStructure do
- let_it_be(:structure) { described_class.new }
- let_it_be(:filepath) { Rails.root.join(described_class::CUSTOM_DUMP_FILE) }
- let_it_be(:file_header) do
- <<~DATA
- -- this file tracks custom GitLab data, such as foreign keys referencing partitioned tables
- -- more details can be found in the issue: https://gitlab.com/gitlab-org/gitlab/-/issues/201872
- DATA
- end
-
- let(:io) { StringIO.new }
-
- before do
- allow(File).to receive(:open).with(filepath, anything).and_yield(io)
- end
-
- context 'when there are no partitioned_foreign_keys' do
- it 'dumps a valid structure file' do
- structure.dump
-
- expect(io.string).to eq("#{file_header}\n")
- end
- end
-
- context 'when there are partitioned_foreign_keys' do
- let!(:first_fk) do
- Gitlab::Database::PartitioningMigrationHelpers::PartitionedForeignKey.create(
- cascade_delete: true, from_table: 'issues', from_column: 'project_id', to_table: 'projects', to_column: 'id')
- end
-
- let!(:second_fk) do
- Gitlab::Database::PartitioningMigrationHelpers::PartitionedForeignKey.create(
- cascade_delete: false, from_table: 'issues', from_column: 'moved_to_id', to_table: 'issues', to_column: 'id')
- end
-
- it 'dumps a file with the command to restore the current keys' do
- structure.dump
-
- expect(io.string).to eq(<<~DATA)
- #{file_header}
- COPY partitioned_foreign_keys (id, cascade_delete, from_table, from_column, to_table, to_column) FROM STDIN;
- #{first_fk.id}\ttrue\tissues\tproject_id\tprojects\tid
- #{second_fk.id}\tfalse\tissues\tmoved_to_id\tissues\tid
- \\.
- DATA
-
- first_fk.destroy
- io.truncate(0)
- io.rewind
-
- structure.dump
-
- expect(io.string).to eq(<<~DATA)
- #{file_header}
- COPY partitioned_foreign_keys (id, cascade_delete, from_table, from_column, to_table, to_column) FROM STDIN;
- #{second_fk.id}\tfalse\tissues\tmoved_to_id\tissues\tid
- \\.
- DATA
- end
- end
-end
diff --git a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
index 4705bb23885..b82b8d9a311 100644
--- a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
@@ -306,26 +306,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
end
end
- describe '#all_caught_up?' do
- it 'returns true if all hosts caught up to the write location' do
- expect(lb.host_list.hosts).to all(receive(:caught_up?).with('foo').and_return(true))
-
- expect(lb.all_caught_up?('foo')).to eq(true)
- end
-
- it 'returns false if a host has not yet caught up' do
- expect(lb.host_list.hosts[0]).to receive(:caught_up?)
- .with('foo')
- .and_return(true)
-
- expect(lb.host_list.hosts[1]).to receive(:caught_up?)
- .with('foo')
- .and_return(false)
-
- expect(lb.all_caught_up?('foo')).to eq(false)
- end
- end
-
describe '#retry_with_backoff' do
it 'returns the value returned by the block' do
value = lb.retry_with_backoff { 10 }
@@ -488,7 +468,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
end
end
- describe '#select_caught_up_hosts' do
+ describe '#select_up_to_date_host' do
let(:location) { 'AB/12345'}
let(:hosts) { lb.host_list.hosts }
let(:set_host) { RequestStore[described_class::CACHE_KEY] }
diff --git a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
index 01367716518..9381ffa59fe 100644
--- a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
@@ -71,6 +71,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
expect(app).to receive(:call).with(env).and_return(10)
+ expect(ActiveSupport::Notifications)
+ .to receive(:instrument)
+ .with('web_transaction_completed.load_balancing')
+ .and_call_original
+
expect(middleware.call(env)).to eq(10)
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
index 90051172fca..54050a87af0 100644
--- a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
@@ -5,12 +5,27 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
let(:middleware) { described_class.new }
+ let(:load_balancer) { double.as_null_object }
+ let(:worker_class) { 'TestDataConsistencyWorker' }
+ let(:job) { { "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e" } }
+
+ before do
+ skip_feature_flags_yaml_validation
+ skip_default_enabled_yaml_check
+ allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer).and_return(load_balancer)
+ end
+
after do
Gitlab::Database::LoadBalancing::Session.clear_session
end
+ def run_middleware
+ middleware.call(worker_class, job, nil, nil) {}
+ end
+
describe '#call' do
shared_context 'data consistency worker class' do |data_consistency, feature_flag|
+ let(:expected_consistency) { data_consistency }
let(:worker_class) do
Class.new do
def self.name
@@ -31,13 +46,23 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
end
end
+ shared_examples_for 'job data consistency' do
+ it "sets job data consistency" do
+ run_middleware
+
+ expect(job['worker_data_consistency']).to eq(expected_consistency)
+ end
+ end
+
shared_examples_for 'does not pass database locations' do
it 'does not pass database locations', :aggregate_failures do
- middleware.call(worker_class, job, double(:queue), redis_pool) { 10 }
+ run_middleware
expect(job['database_replica_location']).to be_nil
expect(job['database_write_location']).to be_nil
end
+
+ include_examples 'job data consistency'
end
shared_examples_for 'mark data consistency location' do |data_consistency|
@@ -45,7 +70,9 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
let(:location) { '0/D525E3A8' }
- context 'when feature flag load_balancing_for_sidekiq is disabled' do
+ context 'when feature flag is disabled' do
+ let(:expected_consistency) { :always }
+
before do
stub_feature_flags(load_balancing_for_test_data_consistency_worker: false)
end
@@ -59,12 +86,14 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
end
it 'passes database_replica_location' do
- expect(middleware).to receive_message_chain(:load_balancer, :host, "database_replica_location").and_return(location)
+ expect(load_balancer).to receive_message_chain(:host, "database_replica_location").and_return(location)
- middleware.call(worker_class, job, double(:queue), redis_pool) { 10 }
+ run_middleware
expect(job['database_replica_location']).to eq(location)
end
+
+ include_examples 'job data consistency'
end
context 'when write was performed' do
@@ -73,12 +102,14 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
end
it 'passes primary write location', :aggregate_failures do
- expect(middleware).to receive_message_chain(:load_balancer, :primary_write_location).and_return(location)
+ expect(load_balancer).to receive(:primary_write_location).and_return(location)
- middleware.call(worker_class, job, double(:queue), redis_pool) { 10 }
+ run_middleware
expect(job['database_write_location']).to eq(location)
end
+
+ include_examples 'job data consistency'
end
end
@@ -89,7 +120,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
end
it 'does not set database locations again' do
- middleware.call(worker_class, job, double(:queue), redis_pool) { 10 }
+ run_middleware
expect(job[provided_database_location]).to eq(old_location)
expect(job[other_location]).to be_nil
@@ -101,8 +132,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
let(:job) { { "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", provided_database_location => old_location } }
before do
- allow(middleware).to receive_message_chain(:load_balancer, :primary_write_location).and_return(new_location)
- allow(middleware).to receive_message_chain(:load_balancer, :database_replica_location).and_return(new_location)
+ allow(load_balancer).to receive(:primary_write_location).and_return(new_location)
+ allow(load_balancer).to receive(:database_replica_location).and_return(new_location)
end
context "when write was performed" do
@@ -114,24 +145,16 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
end
end
- let(:queue) { 'default' }
- let(:redis_pool) { Sidekiq.redis_pool }
- let(:worker_class) { 'TestDataConsistencyWorker' }
- let(:job) { { "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e" } }
-
- before do
- skip_feature_flags_yaml_validation
- skip_default_enabled_yaml_check
- end
-
context 'when worker cannot be constantized' do
let(:worker_class) { 'ActionMailer::MailDeliveryJob' }
+ let(:expected_consistency) { :always }
include_examples 'does not pass database locations'
end
context 'when worker class does not include ApplicationWorker' do
let(:worker_class) { ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper }
+ let(:expected_consistency) { :always }
include_examples 'does not pass database locations'
end
diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
index b7cd0caa922..14f240cd159 100644
--- a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
@@ -5,6 +5,19 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
let(:middleware) { described_class.new }
+ let(:load_balancer) { double.as_null_object }
+
+ let(:worker) { worker_class.new }
+ let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'database_replica_location' => '0/D525E3A8' } }
+
+ before do
+ skip_feature_flags_yaml_validation
+ skip_default_enabled_yaml_check
+ allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer).and_return(load_balancer)
+
+ replication_lag!(false)
+ end
+
after do
Gitlab::Database::LoadBalancing::Session.clear_session
end
@@ -31,30 +44,34 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
end
end
- shared_examples_for 'stick to the primary' do
+ shared_examples_for 'load balancing strategy' do |strategy|
+ it "sets load balancing strategy to #{strategy}" do
+ run_middleware do
+ expect(job['load_balancing_strategy']).to eq(strategy)
+ end
+ end
+ end
+
+ shared_examples_for 'stick to the primary' do |expected_strategy|
it 'sticks to the primary' do
- middleware.call(worker, job, double(:queue)) do
+ run_middleware do
expect(Gitlab::Database::LoadBalancing::Session.current.use_primary?).to be_truthy
end
end
+
+ include_examples 'load balancing strategy', expected_strategy
end
- shared_examples_for 'replica is up to date' do |location, data_consistency|
+ shared_examples_for 'replica is up to date' do |location, expected_strategy|
it 'does not stick to the primary', :aggregate_failures do
expect(middleware).to receive(:replica_caught_up?).with(location).and_return(true)
- middleware.call(worker, job, double(:queue)) do
+ run_middleware do
expect(Gitlab::Database::LoadBalancing::Session.current.use_primary?).not_to be_truthy
end
-
- expect(job[:database_chosen]).to eq('replica')
end
- it "updates job hash with data_consistency :#{data_consistency}" do
- middleware.call(worker, job, double(:queue)) do
- expect(job).to include(data_consistency: data_consistency.to_s)
- end
- end
+ include_examples 'load balancing strategy', expected_strategy
end
shared_examples_for 'sticks based on data consistency' do |data_consistency|
@@ -65,7 +82,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
stub_feature_flags(load_balancing_for_test_data_consistency_worker: false)
end
- include_examples 'stick to the primary'
+ include_examples 'stick to the primary', 'primary'
end
context 'when database replica location is set' do
@@ -75,7 +92,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
allow(middleware).to receive(:replica_caught_up?).and_return(true)
end
- it_behaves_like 'replica is up to date', '0/D525E3A8', data_consistency
+ it_behaves_like 'replica is up to date', '0/D525E3A8', 'replica'
end
context 'when database primary location is set' do
@@ -85,39 +102,26 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
allow(middleware).to receive(:replica_caught_up?).and_return(true)
end
- it_behaves_like 'replica is up to date', '0/D525E3A8', data_consistency
+ it_behaves_like 'replica is up to date', '0/D525E3A8', 'replica'
end
context 'when database location is not set' do
let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e' } }
- it_behaves_like 'stick to the primary', nil
+ it_behaves_like 'stick to the primary', 'primary_no_wal'
end
end
- let(:queue) { 'default' }
- let(:redis_pool) { Sidekiq.redis_pool }
- let(:worker) { worker_class.new }
- let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'database_replica_location' => '0/D525E3A8' } }
- let(:block) { 10 }
-
- before do
- skip_feature_flags_yaml_validation
- skip_default_enabled_yaml_check
- allow(middleware).to receive(:clear)
- allow(Gitlab::Database::LoadBalancing::Session.current).to receive(:performed_write?).and_return(true)
- end
-
context 'when worker class does not include ApplicationWorker' do
let(:worker) { ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper.new }
- include_examples 'stick to the primary'
+ include_examples 'stick to the primary', 'primary'
end
context 'when worker data consistency is :always' do
include_context 'data consistency worker class', :always, :load_balancing_for_test_data_consistency_worker
- include_examples 'stick to the primary'
+ include_examples 'stick to the primary', 'primary'
end
context 'when worker data consistency is :delayed' do
@@ -125,8 +129,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
context 'when replica is not up to date' do
before do
- allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer, :release_host)
- allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer, :select_up_to_date_host).and_return(false)
+ replication_lag!(true)
end
around do |example|
@@ -137,38 +140,34 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
end
context 'when job is executed first' do
- it 'raise an error and retries', :aggregate_failures do
+ it 'raises an error and retries', :aggregate_failures do
expect do
process_job(job)
end.to raise_error(Sidekiq::JobRetry::Skip)
expect(job['error_class']).to eq('Gitlab::Database::LoadBalancing::SidekiqServerMiddleware::JobReplicaNotUpToDate')
- expect(job[:database_chosen]).to eq('retry')
end
+
+ include_examples 'load balancing strategy', 'retry'
end
context 'when job is retried' do
- it 'stick to the primary', :aggregate_failures do
+ before do
expect do
process_job(job)
end.to raise_error(Sidekiq::JobRetry::Skip)
-
- process_job(job)
- expect(job[:database_chosen]).to eq('primary')
end
- end
- context 'replica selection mechanism feature flag rollout' do
- before do
- stub_feature_flags(sidekiq_load_balancing_rotate_up_to_date_replica: false)
+ context 'and replica still lagging behind' do
+ include_examples 'stick to the primary', 'primary'
end
- it 'uses different implmentation' do
- expect(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer, :host, :caught_up?).and_return(false)
+ context 'and replica is now up-to-date' do
+ before do
+ replication_lag!(false)
+ end
- expect do
- process_job(job)
- end.to raise_error(Sidekiq::JobRetry::Skip)
+ it_behaves_like 'replica is up to date', '0/D525E3A8', 'replica_retried'
end
end
end
@@ -182,20 +181,24 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
allow(middleware).to receive(:replica_caught_up?).and_return(false)
end
- include_examples 'stick to the primary'
-
- it 'updates job hash with primary database chosen', :aggregate_failures do
- expect { |b| middleware.call(worker, job, double(:queue), &b) }.to yield_control
-
- expect(job[:database_chosen]).to eq('primary')
- end
+ include_examples 'stick to the primary', 'primary'
end
end
end
def process_job(job)
- Sidekiq::JobRetry.new.local(worker_class, job, queue) do
+ Sidekiq::JobRetry.new.local(worker_class, job, 'default') do
worker_class.process_job(job)
end
end
+
+ def run_middleware
+ middleware.call(worker, job, double(:queue)) { yield }
+ rescue described_class::JobReplicaNotUpToDate
+ # we silence errors here that cause the job to retry
+ end
+
+ def replication_lag!(exists)
+ allow(load_balancer).to receive(:select_up_to_date_host).and_return(!exists)
+ end
end
diff --git a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
index bf4e3756e0e..53445d73756 100644
--- a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
@@ -46,41 +46,68 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
describe '.all_caught_up?' do
let(:lb) { double(:lb) }
+ let(:last_write_location) { 'foo' }
before do
allow(described_class).to receive(:load_balancer).and_return(lb)
- end
- it 'returns true if no write location could be found' do
allow(described_class).to receive(:last_write_location_for)
.with(:user, 42)
- .and_return(nil)
+ .and_return(last_write_location)
+ end
+
+ context 'when no write location could be found' do
+ let(:last_write_location) { nil }
- expect(lb).not_to receive(:all_caught_up?)
+ it 'returns true' do
+ allow(described_class).to receive(:last_write_location_for)
+ .with(:user, 42)
+ .and_return(nil)
+
+ expect(lb).not_to receive(:select_up_to_date_host)
- expect(described_class.all_caught_up?(:user, 42)).to eq(true)
+ expect(described_class.all_caught_up?(:user, 42)).to eq(true)
+ end
end
- it 'returns true, and unsticks if all secondaries have caught up' do
- allow(described_class).to receive(:last_write_location_for)
- .with(:user, 42)
- .and_return('foo')
+ context 'when all secondaries have caught up' do
+ before do
+ allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(true)
+ end
- allow(lb).to receive(:all_caught_up?).with('foo').and_return(true)
+ it 'returns true, and unsticks' do
+ expect(described_class).to receive(:unstick).with(:user, 42)
- expect(described_class).to receive(:unstick).with(:user, 42)
+ expect(described_class.all_caught_up?(:user, 42)).to eq(true)
+ end
+
+ it 'notifies with the proper event payload' do
+ expect(ActiveSupport::Notifications)
+ .to receive(:instrument)
+ .with('caught_up_replica_pick.load_balancing', { result: true })
+ .and_call_original
- expect(described_class.all_caught_up?(:user, 42)).to eq(true)
+ described_class.all_caught_up?(:user, 42)
+ end
end
- it 'return false if the secondaries have not yet caught up' do
- allow(described_class).to receive(:last_write_location_for)
- .with(:user, 42)
- .and_return('foo')
+ context 'when the secondaries have not yet caught up' do
+ before do
+ allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(false)
+ end
+
+ it 'returns false' do
+ expect(described_class.all_caught_up?(:user, 42)).to eq(false)
+ end
- allow(lb).to receive(:all_caught_up?).with('foo').and_return(false)
+ it 'notifies with the proper event payload' do
+ expect(ActiveSupport::Notifications)
+ .to receive(:instrument)
+ .with('caught_up_replica_pick.load_balancing', { result: false })
+ .and_call_original
- expect(described_class.all_caught_up?(:user, 42)).to eq(false)
+ described_class.all_caught_up?(:user, 42)
+ end
end
end
@@ -96,7 +123,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
.with(:user, 42)
.and_return(nil)
- expect(lb).not_to receive(:all_caught_up?)
+ expect(lb).not_to receive(:select_up_to_date_host)
described_class.unstick_or_continue_sticking(:user, 42)
end
@@ -106,7 +133,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
.with(:user, 42)
.and_return('foo')
- allow(lb).to receive(:all_caught_up?).with('foo').and_return(true)
+ allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(true)
expect(described_class).to receive(:unstick).with(:user, 42)
@@ -118,7 +145,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
.with(:user, 42)
.and_return('foo')
- allow(lb).to receive(:all_caught_up?).with('foo').and_return(false)
+ allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(false)
expect(Gitlab::Database::LoadBalancing::Session.current)
.to receive(:use_primary!)
@@ -298,10 +325,22 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
end
it 'returns true, selects hosts, and unsticks if any secondary has caught up' do
- expect(lb).to receive(:select_caught_up_hosts).and_return(true)
+ expect(lb).to receive(:select_up_to_date_host).and_return(true)
expect(described_class).to receive(:unstick).with(:project, 42)
expect(described_class.select_caught_up_replicas(:project, 42)).to be true
end
+
+ context 'when :load_balancing_refine_load_balancer_methods FF is disabled' do
+ before do
+ stub_feature_flags(load_balancing_refine_load_balancer_methods: false)
+ end
+
+ it 'returns true, selects hosts, and unsticks if any secondary has caught up' do
+ expect(lb).to receive(:select_caught_up_hosts).and_return(true)
+ expect(described_class).to receive(:unstick).with(:project, 42)
+ expect(described_class.select_caught_up_replicas(:project, 42)).to be true
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing_spec.rb b/spec/lib/gitlab/database/load_balancing_spec.rb
index e7de7f2b43b..94717a10492 100644
--- a/spec/lib/gitlab/database/load_balancing_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing_spec.rb
@@ -142,10 +142,10 @@ RSpec.describe Gitlab::Database::LoadBalancing do
expect(described_class.enable?).to eq(false)
end
- it 'returns false when Sidekiq is being used' do
+ it 'returns true when Sidekiq is being used' do
allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
- expect(described_class.enable?).to eq(false)
+ expect(described_class.enable?).to eq(true)
end
it 'returns false when running inside a Rake task' do
@@ -170,18 +170,6 @@ RSpec.describe Gitlab::Database::LoadBalancing do
expect(described_class.enable?).to eq(true)
end
-
- context 'when ENABLE_LOAD_BALANCING_FOR_SIDEKIQ environment variable is set' do
- before do
- stub_env('ENABLE_LOAD_BALANCING_FOR_SIDEKIQ', 'true')
- end
-
- it 'returns true when Sidekiq is being used' do
- allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
-
- expect(described_class.enable?).to eq(true)
- end
- end
end
describe '.configured?' do
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index f0ea07646fb..8e25f9249fe 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -379,6 +379,37 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
allow(model).to receive(:transaction_open?).and_return(false)
end
+ context 'target column' do
+ it 'defaults to (id) when no custom target column is provided' do
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:statement_timeout_disabled?).and_return(false)
+ expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
+ expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+
+ expect(model).to receive(:execute).with(/REFERENCES users \(id\)/)
+
+ model.add_concurrent_foreign_key(:projects, :users,
+ column: :user_id)
+ end
+
+ it 'references the custom taget column when provided' do
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:statement_timeout_disabled?).and_return(false)
+ expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
+ expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+
+ expect(model).to receive(:execute).with(/REFERENCES users \(id_convert_to_bigint\)/)
+
+ model.add_concurrent_foreign_key(:projects, :users,
+ column: :user_id,
+ target_column: :id_convert_to_bigint)
+ end
+ end
+
context 'ON DELETE statements' do
context 'on_delete: :nullify' do
it 'appends ON DELETE SET NULL statement' do
@@ -450,7 +481,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:foreign_key_exists?).with(:projects, :users,
column: :user_id,
on_delete: :cascade,
- name: name).and_return(true)
+ name: name,
+ primary_key: :id).and_return(true)
expect(model).not_to receive(:execute).with(/ADD CONSTRAINT/)
expect(model).to receive(:execute).with(/VALIDATE CONSTRAINT/)
@@ -479,6 +511,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'does not create a new foreign key' do
expect(model).to receive(:foreign_key_exists?).with(:projects, :users,
name: :foo,
+ primary_key: :id,
on_delete: :cascade,
column: :user_id).and_return(true)
@@ -583,7 +616,15 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
describe '#foreign_key_exists?' do
before do
- key = ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(:projects, :users, { column: :non_standard_id, name: :fk_projects_users_non_standard_id, on_delete: :cascade })
+ key = ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(
+ :projects, :users,
+ {
+ column: :non_standard_id,
+ name: :fk_projects_users_non_standard_id,
+ on_delete: :cascade,
+ primary_key: :id
+ }
+ )
allow(model).to receive(:foreign_keys).with(:projects).and_return([key])
end
@@ -612,6 +653,11 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model.foreign_key_exists?(:projects, target_table, column: :user_id)).to be_falsey
end
+ it 'compares by target column name if given' do
+ expect(model.foreign_key_exists?(:projects, target_table, primary_key: :user_id)).to be_falsey
+ expect(model.foreign_key_exists?(:projects, target_table, primary_key: :id)).to be_truthy
+ end
+
it 'compares by foreign key name if given' do
expect(model.foreign_key_exists?(:projects, target_table, name: :non_existent_foreign_key_name)).to be_falsey
end
@@ -2007,7 +2053,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
table_name: :events,
column_name: :id,
- job_arguments: [[:id], [:id_convert_to_bigint]]
+ job_arguments: [["id"], ["id_convert_to_bigint"]]
}
end
@@ -2017,7 +2063,16 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
create(:batched_background_migration, configuration.merge(status: :active))
expect { ensure_batched_background_migration_is_finished }
- .to raise_error "Expected batched background migration for the given configuration to be marked as 'finished', but it is 'active': #{configuration}"
+ .to raise_error "Expected batched background migration for the given configuration to be marked as 'finished', but it is 'active':" \
+ "\t#{configuration}" \
+ "\n\n" \
+ "Finalize it manualy by running" \
+ "\n\n" \
+ "\tsudo gitlab-rake gitlab:background_migrations:finalize[CopyColumnUsingBackgroundMigrationJob,events,id,'[[\"id\"]\\, [\"id_convert_to_bigint\"]]']" \
+ "\n\n" \
+ "For more information, check the documentation" \
+ "\n\n" \
+ "\thttps://docs.gitlab.com/ee/user/admin_area/monitoring/background_migrations.html#database-migrations-failing-because-of-batched-background-migration-not-finished"
end
it 'does not raise error when migration exists and is marked as finished' do
@@ -2153,21 +2208,41 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
buffer.rewind
expect(buffer.read).to include("\"class\":\"#{model.class}\"")
end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(raise_on_exhaustion: [true, false])
+
+ with_them do
+ it 'sets raise_on_exhaustion as requested' do
+ with_lock_retries = double
+ expect(Gitlab::Database::WithLockRetries).to receive(:new).and_return(with_lock_retries)
+ expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: raise_on_exhaustion)
+
+ model.with_lock_retries(env: env, logger: in_memory_logger, raise_on_exhaustion: raise_on_exhaustion) { }
+ end
+ end
+
+ it 'does not raise on exhaustion by default' do
+ with_lock_retries = double
+ expect(Gitlab::Database::WithLockRetries).to receive(:new).and_return(with_lock_retries)
+ expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: false)
+
+ model.with_lock_retries(env: env, logger: in_memory_logger) { }
+ end
end
describe '#backfill_iids' do
include MigrationsHelpers
- before do
- stub_const('Issue', Class.new(ActiveRecord::Base))
-
- Issue.class_eval do
+ let(:issue_class) do
+ Class.new(ActiveRecord::Base) do
include AtomicInternalId
self.table_name = 'issues'
self.inheritance_column = :_type_disabled
- belongs_to :project, class_name: "::Project"
+ belongs_to :project, class_name: "::Project", inverse_of: nil
has_internal_id :iid,
scope: :project,
@@ -2190,7 +2265,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.backfill_iids('issues')
- issue = Issue.create!(project_id: project.id)
+ issue = issue_class.create!(project_id: project.id)
expect(issue.iid).to eq(1)
end
@@ -2201,7 +2276,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.backfill_iids('issues')
- issue_b = Issue.create!(project_id: project.id)
+ issue_b = issue_class.create!(project_id: project.id)
expect(issue_a.reload.iid).to eq(1)
expect(issue_b.iid).to eq(2)
@@ -2216,8 +2291,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.backfill_iids('issues')
- issue_a = Issue.create!(project_id: project_a.id)
- issue_b = Issue.create!(project_id: project_b.id)
+ issue_a = issue_class.create!(project_id: project_a.id)
+ issue_b = issue_class.create!(project_id: project_b.id)
expect(issue_a.iid).to eq(2)
expect(issue_b.iid).to eq(3)
@@ -2231,7 +2306,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.backfill_iids('issues')
- issue_b = Issue.create!(project_id: project_b.id)
+ issue_b = issue_class.create!(project_id: project_b.id)
expect(issue_a.reload.iid).to eq(1)
expect(issue_b.reload.iid).to eq(1)
@@ -2951,4 +3026,12 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
end
+
+ describe '#rename_constraint' do
+ it "executes the statement to rename constraint" do
+ expect(model).to receive(:execute).with /ALTER TABLE "test_table"\nRENAME CONSTRAINT "fk_old_name" TO "fk_new_name"/
+
+ model.rename_constraint(:test_table, :fk_old_name, :fk_new_name)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
index 885eef5723e..f9dca371398 100644
--- a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
@@ -71,6 +71,18 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
model.create!(created_at: Date.parse('2020-06-15'))
end
+ context 'when pruning partitions before June 2020' do
+ subject { described_class.new(model, partitioning_key, retain_for: 1.month).missing_partitions }
+
+ it 'does not include the missing partition from May 2020 because it would be dropped' do
+ expect(subject).not_to include(Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-05-01', '2020-06-01'))
+ end
+
+ it 'detects the missing partition for 1 month ago (July 2020)' do
+ expect(subject).to include(Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-07-01', '2020-08-01'))
+ end
+ end
+
it 'detects the gap and the missing partition in May 2020' do
expect(subject).to include(Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-05-01', '2020-06-01'))
end
@@ -108,6 +120,19 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
SQL
end
+ context 'when pruning partitions before June 2020' do
+ subject { described_class.new(model, partitioning_key, retain_for: 1.month).missing_partitions }
+
+ it 'detects exactly the set of partitions from June 2020 to March 2021' do
+ months = %w[2020-07-01 2020-08-01 2020-09-01 2020-10-01 2020-11-01 2020-12-01 2021-01-01 2021-02-01 2021-03-01]
+ expected = months[..-2].zip(months.drop(1)).map do |(from, to)|
+ Gitlab::Database::Partitioning::TimePartition.new(model.table_name, from, to)
+ end
+
+ expect(subject).to match_array(expected)
+ end
+ end
+
it 'detects the missing catch-all partition at the beginning' do
expect(subject).to include(Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-08-01'))
end
@@ -150,4 +175,100 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
end
end
end
+
+ describe '#extra_partitions' do
+ let(:model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'partitioned_test'
+ self.primary_key = :id
+ end
+ end
+
+ let(:partitioning_key) { :created_at }
+ let(:table_name) { :partitioned_test }
+
+ around do |example|
+ travel_to(Date.parse('2020-08-22')) { example.run }
+ end
+
+ describe 'with existing partitions' do
+ before do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ CREATE TABLE #{table_name}
+ (id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at))
+ PARTITION BY RANGE (created_at);
+
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_000000
+ PARTITION OF #{table_name}
+ FOR VALUES FROM (MINVALUE) TO ('2020-05-01');
+
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_202005
+ PARTITION OF #{table_name}
+ FOR VALUES FROM ('2020-05-01') TO ('2020-06-01');
+
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_202006
+ PARTITION OF #{table_name}
+ FOR VALUES FROM ('2020-06-01') TO ('2020-07-01')
+ SQL
+ end
+
+ context 'without a time retention policy' do
+ subject { described_class.new(model, partitioning_key).extra_partitions }
+
+ it 'has no extra partitions to prune' do
+ expect(subject).to eq([])
+ end
+ end
+
+ context 'with a time retention policy that excludes no partitions' do
+ subject { described_class.new(model, partitioning_key, retain_for: 4.months).extra_partitions }
+
+ it 'has no extra partitions to prune' do
+ expect(subject).to eq([])
+ end
+ end
+
+ context 'with a time retention policy of 3 months' do
+ subject { described_class.new(model, partitioning_key, retain_for: 3.months).extra_partitions }
+
+ it 'prunes the unbounded partition ending 2020-05-01' do
+ min_value_to_may = Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01',
+ partition_name: 'partitioned_test_000000')
+
+ expect(subject).to contain_exactly(min_value_to_may)
+ end
+
+ context 'when the feature flag is toggled off' do
+ before do
+ stub_feature_flags(partition_pruning_dry_run: false)
+ end
+
+ it 'is empty' do
+ expect(subject).to eq([])
+ end
+ end
+ end
+
+ context 'with a time retention policy of 2 months' do
+ subject { described_class.new(model, partitioning_key, retain_for: 2.months).extra_partitions }
+
+ it 'prunes the unbounded partition and the partition for May-June' do
+ expect(subject).to contain_exactly(
+ Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01', partition_name: 'partitioned_test_000000'),
+ Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-05-01', '2020-06-01', partition_name: 'partitioned_test_202005')
+ )
+ end
+
+ context 'when the feature flag is toggled off' do
+ before do
+ stub_feature_flags(partition_pruning_dry_run: false)
+ end
+
+ it 'is empty' do
+ expect(subject).to eq([])
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/partitioning/partition_creator_spec.rb b/spec/lib/gitlab/database/partitioning/partition_creator_spec.rb
deleted file mode 100644
index ec89f2ed61c..00000000000
--- a/spec/lib/gitlab/database/partitioning/partition_creator_spec.rb
+++ /dev/null
@@ -1,96 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::Partitioning::PartitionCreator do
- include Database::PartitioningHelpers
- include ExclusiveLeaseHelpers
-
- describe '.register' do
- let(:model) { double(partitioning_strategy: nil) }
-
- it 'remembers registered models' do
- expect { described_class.register(model) }.to change { described_class.models }.to include(model)
- end
- end
-
- describe '#create_partitions (mocked)' do
- subject { described_class.new(models).create_partitions }
-
- let(:models) { [model] }
- let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table) }
- let(:partitioning_strategy) { double(missing_partitions: partitions) }
- let(:table) { "some_table" }
-
- before do
- allow(ActiveRecord::Base.connection).to receive(:table_exists?).and_call_original
- allow(ActiveRecord::Base.connection).to receive(:table_exists?).with(table).and_return(true)
- allow(ActiveRecord::Base.connection).to receive(:execute).and_call_original
-
- stub_exclusive_lease(described_class::LEASE_KEY % table, timeout: described_class::LEASE_TIMEOUT)
- end
-
- let(:partitions) do
- [
- instance_double(Gitlab::Database::Partitioning::TimePartition, table: 'bar', partition_name: 'foo', to_sql: "SELECT 1"),
- instance_double(Gitlab::Database::Partitioning::TimePartition, table: 'bar', partition_name: 'foo2', to_sql: "SELECT 2")
- ]
- end
-
- it 'creates the partition' do
- expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.first.to_sql)
- expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.second.to_sql)
-
- subject
- end
-
- context 'error handling with 2 models' do
- let(:models) do
- [
- double(partitioning_strategy: strategy1, table_name: table),
- double(partitioning_strategy: strategy2, table_name: table)
- ]
- end
-
- let(:strategy1) { double('strategy1', missing_partitions: nil) }
- let(:strategy2) { double('strategy2', missing_partitions: partitions) }
-
- it 'still creates partitions for the second table' do
- expect(strategy1).to receive(:missing_partitions).and_raise('this should never happen (tm)')
- expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.first.to_sql)
- expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.second.to_sql)
-
- subject
- end
- end
- end
-
- describe '#create_partitions' do
- subject { described_class.new([my_model]).create_partitions }
-
- let(:connection) { ActiveRecord::Base.connection }
- let(:my_model) do
- Class.new(ApplicationRecord) do
- include PartitionedTable
-
- self.table_name = 'my_model_example_table'
-
- partitioned_by :created_at, strategy: :monthly
- end
- end
-
- before do
- connection.execute(<<~SQL)
- CREATE TABLE my_model_example_table
- (id serial not null, created_at timestamptz not null, primary key (id, created_at))
- PARTITION BY RANGE (created_at);
- SQL
- end
-
- it 'creates partitions' do
- expect { subject }.to change { find_partitions(my_model.table_name, schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA).size }.from(0)
-
- subject
- end
- end
-end
diff --git a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
new file mode 100644
index 00000000000..903a41d6dd2
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
@@ -0,0 +1,161 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
+ include Database::PartitioningHelpers
+ include Database::TableSchemaHelpers
+ include ExclusiveLeaseHelpers
+
+ describe '.register' do
+ let(:model) { double(partitioning_strategy: nil) }
+
+ it 'remembers registered models' do
+ expect { described_class.register(model) }.to change { described_class.models }.to include(model)
+ end
+ end
+
+ context 'creating partitions (mocked)' do
+ subject(:sync_partitions) { described_class.new(models).sync_partitions }
+
+ let(:models) { [model] }
+ let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table) }
+ let(:partitioning_strategy) { double(missing_partitions: partitions, extra_partitions: []) }
+ let(:table) { "some_table" }
+
+ before do
+ allow(ActiveRecord::Base.connection).to receive(:table_exists?).and_call_original
+ allow(ActiveRecord::Base.connection).to receive(:table_exists?).with(table).and_return(true)
+ allow(ActiveRecord::Base.connection).to receive(:execute).and_call_original
+
+ stub_exclusive_lease(described_class::MANAGEMENT_LEASE_KEY % table, timeout: described_class::LEASE_TIMEOUT)
+ end
+
+ let(:partitions) do
+ [
+ instance_double(Gitlab::Database::Partitioning::TimePartition, table: 'bar', partition_name: 'foo', to_sql: "SELECT 1"),
+ instance_double(Gitlab::Database::Partitioning::TimePartition, table: 'bar', partition_name: 'foo2', to_sql: "SELECT 2")
+ ]
+ end
+
+ it 'creates the partition' do
+ expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.first.to_sql)
+ expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.second.to_sql)
+
+ sync_partitions
+ end
+
+ context 'error handling with 2 models' do
+ let(:models) do
+ [
+ double(partitioning_strategy: strategy1, table_name: table),
+ double(partitioning_strategy: strategy2, table_name: table)
+ ]
+ end
+
+ let(:strategy1) { double('strategy1', missing_partitions: nil, extra_partitions: []) }
+ let(:strategy2) { double('strategy2', missing_partitions: partitions, extra_partitions: []) }
+
+ it 'still creates partitions for the second table' do
+ expect(strategy1).to receive(:missing_partitions).and_raise('this should never happen (tm)')
+ expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.first.to_sql)
+ expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.second.to_sql)
+
+ sync_partitions
+ end
+ end
+ end
+
+ context 'creating partitions' do
+ subject(:sync_partitions) { described_class.new([my_model]).sync_partitions }
+
+ let(:connection) { ActiveRecord::Base.connection }
+ let(:my_model) do
+ Class.new(ApplicationRecord) do
+ include PartitionedTable
+
+ self.table_name = 'my_model_example_table'
+
+ partitioned_by :created_at, strategy: :monthly
+ end
+ end
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE my_model_example_table
+ (id serial not null, created_at timestamptz not null, primary key (id, created_at))
+ PARTITION BY RANGE (created_at);
+ SQL
+ end
+
+ it 'creates partitions' do
+ expect { sync_partitions }.to change { find_partitions(my_model.table_name, schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA).size }.from(0)
+ end
+ end
+
+ context 'detaching partitions (mocked)' do
+ subject(:sync_partitions) { manager.sync_partitions }
+
+ let(:manager) { described_class.new(models) }
+ let(:models) { [model] }
+ let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table)}
+ let(:partitioning_strategy) { double(extra_partitions: extra_partitions, missing_partitions: []) }
+ let(:table) { "foo" }
+
+ before do
+ allow(ActiveRecord::Base.connection).to receive(:table_exists?).and_call_original
+ allow(ActiveRecord::Base.connection).to receive(:table_exists?).with(table).and_return(true)
+
+ stub_exclusive_lease(described_class::MANAGEMENT_LEASE_KEY % table, timeout: described_class::LEASE_TIMEOUT)
+ end
+
+ let(:extra_partitions) do
+ [
+ instance_double(Gitlab::Database::Partitioning::TimePartition, table: table, partition_name: 'foo1'),
+ instance_double(Gitlab::Database::Partitioning::TimePartition, table: table, partition_name: 'foo2')
+ ]
+ end
+
+ context 'with the partition_pruning_dry_run feature flag enabled' do
+ before do
+ stub_feature_flags(partition_pruning_dry_run: true)
+ end
+
+ it 'detaches each extra partition' do
+ extra_partitions.each { |p| expect(manager).to receive(:detach_one_partition).with(p) }
+
+ sync_partitions
+ end
+
+ context 'error handling' do
+ let(:models) do
+ [
+ double(partitioning_strategy: error_strategy, table_name: table),
+ model
+ ]
+ end
+
+ let(:error_strategy) { double(extra_partitions: nil, missing_partitions: []) }
+
+ it 'still drops partitions for the other model' do
+ expect(error_strategy).to receive(:extra_partitions).and_raise('injected error!')
+ extra_partitions.each { |p| expect(manager).to receive(:detach_one_partition).with(p) }
+
+ sync_partitions
+ end
+ end
+ end
+
+ context 'with the partition_pruning_dry_run feature flag disabled' do
+ before do
+ stub_feature_flags(partition_pruning_dry_run: false)
+ end
+
+ it 'returns immediately' do
+ expect(manager).not_to receive(:detach)
+
+ sync_partitions
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
index 83f2436043c..a524fe681e9 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
@@ -3,192 +3,142 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers do
- include Database::TriggerHelpers
+ include Database::TableSchemaHelpers
- let(:model) do
- ActiveRecord::Migration.new.extend(described_class)
+ let(:migration) do
+ ActiveRecord::Migration.new.extend(Gitlab::Database::PartitioningMigrationHelpers)
end
- let_it_be(:connection) { ActiveRecord::Base.connection }
-
- let(:referenced_table) { :issues }
- let(:function_name) { '_test_partitioned_foreign_keys_function' }
- let(:trigger_name) { '_test_partitioned_foreign_keys_trigger' }
+ let(:source_table_name) { '_test_partitioned_table' }
+ let(:target_table_name) { '_test_referenced_table' }
+ let(:column_name) { "#{target_table_name}_id" }
+ let(:foreign_key_name) { '_test_partitioned_fk' }
+ let(:partition_schema) { 'gitlab_partitions_dynamic' }
+ let(:partition1_name) { "#{partition_schema}.#{source_table_name}_202001" }
+ let(:partition2_name) { "#{partition_schema}.#{source_table_name}_202002" }
+ let(:options) do
+ {
+ column: column_name,
+ name: foreign_key_name,
+ on_delete: :cascade,
+ validate: true
+ }
+ end
before do
- allow(model).to receive(:puts)
- allow(model).to receive(:fk_function_name).and_return(function_name)
- allow(model).to receive(:fk_trigger_name).and_return(trigger_name)
+ allow(migration).to receive(:puts)
+
+ connection.execute(<<~SQL)
+ CREATE TABLE #{target_table_name} (
+ id serial NOT NULL,
+ PRIMARY KEY (id)
+ );
+
+ CREATE TABLE #{source_table_name} (
+ id serial NOT NULL,
+ #{column_name} int NOT NULL,
+ created_at timestamptz NOT NULL,
+ PRIMARY KEY (id, created_at)
+ ) PARTITION BY RANGE (created_at);
+
+ CREATE TABLE #{partition1_name} PARTITION OF #{source_table_name}
+ FOR VALUES FROM ('2020-01-01') TO ('2020-02-01');
+
+ CREATE TABLE #{partition2_name} PARTITION OF #{source_table_name}
+ FOR VALUES FROM ('2020-02-01') TO ('2020-03-01');
+ SQL
end
- describe 'adding a foreign key' do
+ describe '#add_concurrent_partitioned_foreign_key' do
before do
- allow(model).to receive(:transaction_open?).and_return(false)
- end
-
- context 'when the table has no foreign keys' do
- it 'creates a trigger function to handle the single cascade' do
- model.add_partitioned_foreign_key :issue_assignees, referenced_table
-
- expect_function_to_contain(function_name, 'delete from issue_assignees where issue_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
- end
- end
-
- context 'when the table already has foreign keys' do
- context 'when the foreign key is from a different table' do
- before do
- model.add_partitioned_foreign_key :issue_assignees, referenced_table
- end
-
- it 'creates a trigger function to handle the multiple cascades' do
- model.add_partitioned_foreign_key :epic_issues, referenced_table
-
- expect_function_to_contain(function_name,
- 'delete from issue_assignees where issue_id = old.id',
- 'delete from epic_issues where issue_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
- end
- end
-
- context 'when the foreign key is from the same table' do
- before do
- model.add_partitioned_foreign_key :issues, referenced_table, column: :moved_to_id
- end
-
- context 'when the foreign key is from a different column' do
- it 'creates a trigger function to handle the multiple cascades' do
- model.add_partitioned_foreign_key :issues, referenced_table, column: :duplicated_to_id
-
- expect_function_to_contain(function_name,
- 'delete from issues where moved_to_id = old.id',
- 'delete from issues where duplicated_to_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
- end
- end
-
- context 'when the foreign key is from the same column' do
- it 'ignores the duplicate and properly recreates the trigger function' do
- model.add_partitioned_foreign_key :issues, referenced_table, column: :moved_to_id
-
- expect_function_to_contain(function_name, 'delete from issues where moved_to_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
- end
- end
- end
- end
+ allow(migration).to receive(:foreign_key_exists?)
+ .with(source_table_name, target_table_name, anything)
+ .and_return(false)
- context 'when the foreign key is set to nullify' do
- it 'creates a trigger function that nullifies the foreign key' do
- model.add_partitioned_foreign_key :issue_assignees, referenced_table, on_delete: :nullify
-
- expect_function_to_contain(function_name, 'update issue_assignees set issue_id = null where issue_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
- end
+ allow(migration).to receive(:with_lock_retries).and_yield
end
- context 'when the referencing column is a custom value' do
- it 'creates a trigger function with the correct column name' do
- model.add_partitioned_foreign_key :issues, referenced_table, column: :duplicated_to_id
+ context 'when the foreign key does not exist on the parent table' do
+ it 'creates the foreign key on each partition, and the parent table' do
+ expect(migration).to receive(:foreign_key_exists?)
+ .with(source_table_name, target_table_name, **options)
+ .and_return(false)
- expect_function_to_contain(function_name, 'delete from issues where duplicated_to_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
- end
- end
+ expect(migration).to receive(:concurrent_partitioned_foreign_key_name).and_return(foreign_key_name)
- context 'when the referenced column is a custom value' do
- let(:referenced_table) { :user_details }
+ expect_add_concurrent_fk_and_call_original(partition1_name, target_table_name, **options)
+ expect_add_concurrent_fk_and_call_original(partition2_name, target_table_name, **options)
- it 'creates a trigger function with the correct column name' do
- model.add_partitioned_foreign_key :user_preferences, referenced_table, column: :user_id, primary_key: :user_id
+ expect(migration).to receive(:with_lock_retries).ordered.and_yield
+ expect(migration).to receive(:add_foreign_key)
+ .with(source_table_name, target_table_name, **options)
+ .ordered
+ .and_call_original
- expect_function_to_contain(function_name, 'delete from user_preferences where user_id = old.user_id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
- end
- end
+ migration.add_concurrent_partitioned_foreign_key(source_table_name, target_table_name, column: column_name)
- context 'when the given key definition is invalid' do
- it 'raises an error with the appropriate message' do
- expect do
- model.add_partitioned_foreign_key :issue_assignees, referenced_table, column: :not_a_real_issue_id
- end.to raise_error(/From column must be a valid column/)
+ expect_foreign_key_to_exist(source_table_name, foreign_key_name)
end
- end
-
- context 'when run inside a transaction' do
- it 'raises an error' do
- expect(model).to receive(:transaction_open?).and_return(true)
- expect do
- model.add_partitioned_foreign_key :issue_assignees, referenced_table
- end.to raise_error(/can not be run inside a transaction/)
+ def expect_add_concurrent_fk_and_call_original(source_table_name, target_table_name, options)
+ expect(migration).to receive(:add_concurrent_foreign_key)
+ .ordered
+ .with(source_table_name, target_table_name, options)
+ .and_wrap_original do |_, source_table_name, target_table_name, options|
+ connection.add_foreign_key(source_table_name, target_table_name, **options)
+ end
end
end
- end
- context 'removing a foreign key' do
- before do
- allow(model).to receive(:transaction_open?).and_return(false)
- end
+ context 'when the foreign key exists on the parent table' do
+ it 'does not attempt to create any foreign keys' do
+ expect(migration).to receive(:concurrent_partitioned_foreign_key_name).and_return(foreign_key_name)
- context 'when the table has multiple foreign keys' do
- before do
- model.add_partitioned_foreign_key :issue_assignees, referenced_table
- model.add_partitioned_foreign_key :epic_issues, referenced_table
- end
+ expect(migration).to receive(:foreign_key_exists?)
+ .with(source_table_name, target_table_name, **options)
+ .and_return(true)
- it 'creates a trigger function without the removed cascade' do
- expect_function_to_contain(function_name,
- 'delete from issue_assignees where issue_id = old.id',
- 'delete from epic_issues where issue_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
+ expect(migration).not_to receive(:add_concurrent_foreign_key)
+ expect(migration).not_to receive(:with_lock_retries)
+ expect(migration).not_to receive(:add_foreign_key)
- model.remove_partitioned_foreign_key :issue_assignees, referenced_table
+ migration.add_concurrent_partitioned_foreign_key(source_table_name, target_table_name, column: column_name)
- expect_function_to_contain(function_name, 'delete from epic_issues where issue_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
+ expect_foreign_key_not_to_exist(source_table_name, foreign_key_name)
end
end
- context 'when the table has only one remaining foreign key' do
- before do
- model.add_partitioned_foreign_key :issue_assignees, referenced_table
+ context 'when additional foreign key options are given' do
+ let(:options) do
+ {
+ column: column_name,
+ name: '_my_fk_name',
+ on_delete: :restrict,
+ validate: true
+ }
end
- it 'removes the trigger function altogether' do
- expect_function_to_contain(function_name, 'delete from issue_assignees where issue_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
-
- model.remove_partitioned_foreign_key :issue_assignees, referenced_table
-
- expect_function_not_to_exist(function_name)
- expect_trigger_not_to_exist(referenced_table, trigger_name)
- end
- end
+ it 'forwards them to the foreign key helper methods' do
+ expect(migration).to receive(:foreign_key_exists?)
+ .with(source_table_name, target_table_name, **options)
+ .and_return(false)
- context 'when the foreign key does not exist' do
- before do
- model.add_partitioned_foreign_key :issue_assignees, referenced_table
- end
+ expect(migration).not_to receive(:concurrent_partitioned_foreign_key_name)
- it 'ignores the invalid key and properly recreates the trigger function' do
- expect_function_to_contain(function_name, 'delete from issue_assignees where issue_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
+ expect_add_concurrent_fk(partition1_name, target_table_name, **options)
+ expect_add_concurrent_fk(partition2_name, target_table_name, **options)
- model.remove_partitioned_foreign_key :issues, referenced_table, column: :moved_to_id
+ expect(migration).to receive(:with_lock_retries).ordered.and_yield
+ expect(migration).to receive(:add_foreign_key).with(source_table_name, target_table_name, **options).ordered
- expect_function_to_contain(function_name, 'delete from issue_assignees where issue_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
+ migration.add_concurrent_partitioned_foreign_key(source_table_name, target_table_name,
+ column: column_name, name: '_my_fk_name', on_delete: :restrict)
end
- end
-
- context 'when run outside a transaction' do
- it 'raises an error' do
- expect(model).to receive(:transaction_open?).and_return(true)
- expect do
- model.remove_partitioned_foreign_key :issue_assignees, referenced_table
- end.to raise_error(/can not be run inside a transaction/)
+ def expect_add_concurrent_fk(source_table_name, target_table_name, options)
+ expect(migration).to receive(:add_concurrent_foreign_key)
+ .ordered
+ .with(source_table_name, target_table_name, options)
end
end
end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/partitioned_foreign_key_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/partitioned_foreign_key_spec.rb
deleted file mode 100644
index a58c37f111d..00000000000
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/partitioned_foreign_key_spec.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::PartitionedForeignKey do
- let(:foreign_key) do
- described_class.new(
- to_table: 'issues',
- from_table: 'issue_assignees',
- from_column: 'issue_id',
- to_column: 'id',
- cascade_delete: true)
- end
-
- describe 'validations' do
- it 'allows keys that reference valid tables and columns' do
- expect(foreign_key).to be_valid
- end
-
- it 'does not allow keys without a valid to_table' do
- foreign_key.to_table = 'this_is_not_a_real_table'
-
- expect(foreign_key).not_to be_valid
- expect(foreign_key.errors[:to_table].first).to eq('must be a valid table')
- end
-
- it 'does not allow keys without a valid from_table' do
- foreign_key.from_table = 'this_is_not_a_real_table'
-
- expect(foreign_key).not_to be_valid
- expect(foreign_key.errors[:from_table].first).to eq('must be a valid table')
- end
-
- it 'does not allow keys without a valid to_column' do
- foreign_key.to_column = 'this_is_not_a_real_fk'
-
- expect(foreign_key).not_to be_valid
- expect(foreign_key.errors[:to_column].first).to eq('must be a valid column')
- end
-
- it 'does not allow keys without a valid from_column' do
- foreign_key.from_column = 'this_is_not_a_real_pk'
-
- expect(foreign_key).not_to be_valid
- expect(foreign_key.errors[:from_column].first).to eq('must be a valid column')
- end
- end
-end
diff --git a/spec/lib/gitlab/database/postgres_index_spec.rb b/spec/lib/gitlab/database/postgres_index_spec.rb
index 2fda9b85c5a..e1832219ebf 100644
--- a/spec/lib/gitlab/database/postgres_index_spec.rb
+++ b/spec/lib/gitlab/database/postgres_index_spec.rb
@@ -22,17 +22,23 @@ RSpec.describe Gitlab::Database::PostgresIndex do
it_behaves_like 'a postgres model'
- describe '.regular' do
- it 'only non-unique indexes' do
- expect(described_class.regular).to all(have_attributes(unique: false))
- end
-
+ describe '.reindexing_support' do
it 'only non partitioned indexes' do
- expect(described_class.regular).to all(have_attributes(partitioned: false))
+ expect(described_class.reindexing_support).to all(have_attributes(partitioned: false))
end
it 'only indexes that dont serve an exclusion constraint' do
- expect(described_class.regular).to all(have_attributes(exclusion: false))
+ expect(described_class.reindexing_support).to all(have_attributes(exclusion: false))
+ end
+
+ it 'only non-expression indexes' do
+ expect(described_class.reindexing_support).to all(have_attributes(expression: false))
+ end
+
+ it 'only btree and gist indexes' do
+ types = described_class.reindexing_support.map(&:type).uniq
+
+ expect(types & %w(btree gist)).to eq(types)
end
end
@@ -67,6 +73,34 @@ RSpec.describe Gitlab::Database::PostgresIndex do
end
end
+ describe '#relative_bloat_level' do
+ subject { build(:postgres_index, bloat_estimate: bloat_estimate, ondisk_size_bytes: 1024) }
+
+ let(:bloat_estimate) { build(:postgres_index_bloat_estimate, bloat_size: 256) }
+
+ it 'calculates the relative bloat level' do
+ expect(subject.relative_bloat_level).to eq(0.25)
+ end
+ end
+
+ describe '#reset' do
+ subject { index.reset }
+
+ let(:index) { described_class.by_identifier(identifier) }
+
+ it 'calls #reload' do
+ expect(index).to receive(:reload).once.and_call_original
+
+ subject
+ end
+
+ it 'resets the bloat estimation' do
+ expect(index).to receive(:clear_memoization).with(:bloat_size).and_call_original
+
+ subject
+ end
+ end
+
describe '#unique?' do
it 'returns true for a unique index' do
expect(find('public.bar_key')).to be_unique
diff --git a/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb
index ca9f4af9187..40e36bc02e9 100644
--- a/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb
+++ b/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb
@@ -3,33 +3,27 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::PostgresqlAdapter::DumpSchemaVersionsMixin do
- let(:schema_migration) { double('schema_migration', all_versions: versions) }
-
- let(:instance) do
- Object.new.extend(described_class)
- end
-
- before do
- allow(instance).to receive(:schema_migration).and_return(schema_migration)
- end
-
- context 'when version files exist' do
- let(:versions) { %w(5 2 1000 200 4 93 2) }
+ let(:instance_class) do
+ klass = Class.new do
+ def dump_schema_information
+ original_dump_schema_information
+ end
+
+ def original_dump_schema_information
+ end
+ end
- it 'touches version files' do
- expect(Gitlab::Database::SchemaVersionFiles).to receive(:touch_all).with(versions)
+ klass.prepend(described_class)
- instance.dump_schema_information
- end
+ klass
end
- context 'when version files do not exist' do
- let(:versions) { [] }
+ let(:instance) { instance_class.new }
- it 'does not touch version files' do
- expect(Gitlab::Database::SchemaVersionFiles).not_to receive(:touch_all)
+ it 'calls SchemaMigrations touch_all and skips original implementation' do
+ expect(Gitlab::Database::SchemaMigrations).to receive(:touch_all).with(instance)
+ expect(instance).not_to receive(:original_dump_schema_information)
- instance.dump_schema_information
- end
+ instance.dump_schema_information
end
end
diff --git a/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
index ea8c9e2cfd7..2a1f91b5b21 100644
--- a/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
+++ b/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::Database::PostgresqlAdapter::ForceDisconnectableMixin do
end
end
- let(:config) { Rails.application.config_for(:database).merge(pool: 1) }
+ let(:config) { ActiveRecord::Base.configurations.find_db_config(Rails.env).configuration_hash.merge(pool: 1) }
let(:pool) { model.establish_connection(config) }
it 'calls the force disconnect callback on checkin' do
diff --git a/spec/lib/gitlab/database/postgresql_adapter/type_map_cache_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/type_map_cache_spec.rb
index e9c512f94bb..c6542aa2adb 100644
--- a/spec/lib/gitlab/database/postgresql_adapter/type_map_cache_spec.rb
+++ b/spec/lib/gitlab/database/postgresql_adapter/type_map_cache_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::PostgresqlAdapter::TypeMapCache do
- let(:db_config) { ActiveRecord::Base.configurations.configs_for(env_name: 'test', name: 'primary').configuration_hash }
+ let(:db_config) { ActiveRecord::Base.configurations.find_db_config(Rails.env).configuration_hash }
let(:adapter_class) { ActiveRecord::ConnectionAdapters::PostgreSQLAdapter }
before do
diff --git a/spec/lib/gitlab/database/postgresql_database_tasks/load_schema_versions_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_database_tasks/load_schema_versions_mixin_spec.rb
new file mode 100644
index 00000000000..3e675a85999
--- /dev/null
+++ b/spec/lib/gitlab/database/postgresql_database_tasks/load_schema_versions_mixin_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PostgresqlDatabaseTasks::LoadSchemaVersionsMixin do
+ let(:instance_class) do
+ klass = Class.new do
+ def structure_load
+ original_structure_load
+ end
+
+ def original_structure_load
+ end
+ end
+
+ klass.prepend(described_class)
+
+ klass
+ end
+
+ let(:instance) { instance_class.new }
+
+ it 'calls SchemaMigrations load_all' do
+ connection = double('connection')
+ allow(instance).to receive(:connection).and_return(connection)
+
+ expect(instance).to receive(:original_structure_load).ordered
+ expect(Gitlab::Database::SchemaMigrations).to receive(:load_all).with(connection).ordered
+
+ instance.structure_load
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb b/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb
deleted file mode 100644
index d9077969003..00000000000
--- a/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb
+++ /dev/null
@@ -1,303 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
- subject { described_class.new(index, logger: logger) }
-
- let(:table_name) { '_test_reindex_table' }
- let(:column_name) { '_test_column' }
- let(:index_name) { '_test_reindex_index' }
- let(:index) { instance_double(Gitlab::Database::PostgresIndex, indexrelid: 42, name: index_name, schema: 'public', tablename: table_name, partitioned?: false, unique?: false, exclusion?: false, expression?: false, definition: 'CREATE INDEX _test_reindex_index ON public._test_reindex_table USING btree (_test_column)') }
- let(:logger) { double('logger', debug: nil, info: nil, error: nil ) }
- let(:connection) { ActiveRecord::Base.connection }
-
- before do
- connection.execute(<<~SQL)
- CREATE TABLE #{table_name} (
- id serial NOT NULL PRIMARY KEY,
- #{column_name} integer NOT NULL);
-
- CREATE INDEX #{index.name} ON #{table_name} (#{column_name});
- SQL
- end
-
- context 'when the index is unique' do
- before do
- allow(index).to receive(:unique?).and_return(true)
- end
-
- it 'raises an error' do
- expect do
- subject.perform
- end.to raise_error(described_class::ReindexError, /UNIQUE indexes are currently not supported/)
- end
- end
-
- context 'when the index is partitioned' do
- before do
- allow(index).to receive(:partitioned?).and_return(true)
- end
-
- it 'raises an error' do
- expect do
- subject.perform
- end.to raise_error(described_class::ReindexError, /partitioned indexes are currently not supported/)
- end
- end
-
- context 'when the index serves an exclusion constraint' do
- before do
- allow(index).to receive(:exclusion?).and_return(true)
- end
-
- it 'raises an error' do
- expect do
- subject.perform
- end.to raise_error(described_class::ReindexError, /indexes serving an exclusion constraint are currently not supported/)
- end
- end
-
- context 'when the index is a lingering temporary index from a previous reindexing run' do
- context 'with the temporary index prefix' do
- let(:index_name) { 'tmp_reindex_something' }
-
- it 'raises an error' do
- expect do
- subject.perform
- end.to raise_error(described_class::ReindexError, /left-over temporary index/)
- end
- end
-
- context 'with the replaced index prefix' do
- let(:index_name) { 'old_reindex_something' }
-
- it 'raises an error' do
- expect do
- subject.perform
- end.to raise_error(described_class::ReindexError, /left-over temporary index/)
- end
- end
- end
-
- context 'replacing the original index with a rebuilt copy' do
- let(:replacement_name) { 'tmp_reindex_42' }
- let(:replaced_name) { 'old_reindex_42' }
-
- let(:create_index) { "CREATE INDEX CONCURRENTLY #{replacement_name} ON public.#{table_name} USING btree (#{column_name})" }
- let(:drop_index) do
- <<~SQL
- DROP INDEX CONCURRENTLY
- IF EXISTS "public"."#{replacement_name}"
- SQL
- end
-
- let!(:original_index) { find_index_create_statement }
-
- it 'integration test: executing full index replacement without mocks' do
- allow(connection).to receive(:execute).and_wrap_original do |method, sql|
- method.call(sql.sub(/CONCURRENTLY/, ''))
- end
-
- subject.perform
-
- check_index_exists
- end
-
- context 'mocked specs' do
- before do
- allow(subject).to receive(:connection).and_return(connection)
- allow(connection).to receive(:execute).and_call_original
- end
-
- it 'replaces the existing index with an identical index' do
- expect(connection).to receive(:execute).with('SET statement_timeout TO \'32400s\'')
-
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
- end
-
- expect_index_rename(index.name, replaced_name)
- expect_index_rename(replacement_name, index.name)
- expect_index_rename(replaced_name, replacement_name)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: false).and_yield
- end
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- subject.perform
-
- check_index_exists
- end
-
- context 'for expression indexes' do
- before do
- allow(index).to receive(:expression?).and_return(true)
- end
-
- it 'rebuilds table statistics before dropping the original index' do
- expect(connection).to receive(:execute).with('SET statement_timeout TO \'32400s\'')
-
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_to_execute_concurrently_in_order(<<~SQL)
- ANALYZE "#{index.schema}"."#{index.tablename}"
- SQL
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
- end
-
- expect_index_rename(index.name, replaced_name)
- expect_index_rename(replacement_name, index.name)
- expect_index_rename(replaced_name, replacement_name)
-
- expect_index_drop(drop_index)
-
- subject.perform
-
- check_index_exists
- end
- end
-
- context 'when a dangling index is left from a previous run' do
- before do
- connection.execute("CREATE INDEX #{replacement_name} ON #{table_name} (#{column_name})")
- end
-
- it 'replaces the existing index with an identical index' do
- expect_index_drop(drop_index)
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
- end
-
- expect_index_rename(index.name, replaced_name)
- expect_index_rename(replacement_name, index.name)
- expect_index_rename(replaced_name, replacement_name)
-
- expect_index_drop(drop_index)
-
- subject.perform
-
- check_index_exists
- end
- end
-
- context 'when it fails to create the replacement index' do
- it 'safely cleans up and signals the error' do
- expect(connection).to receive(:execute).with(create_index).ordered
- .and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: false).and_yield
- end
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect { subject.perform }.to raise_error(ActiveRecord::ConnectionTimeoutError, /connect timeout/)
-
- check_index_exists
- end
- end
-
- context 'when the replacement index is not valid' do
- it 'safely cleans up and signals the error' do
- replacement_index = double('replacement index', valid_index?: false)
- allow(Gitlab::Database::PostgresIndex).to receive(:find_by).with(schema: 'public', name: replacement_name).and_return(nil, replacement_index)
-
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: false).and_yield
- end
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect { subject.perform }.to raise_error(described_class::ReindexError, /replacement index was created as INVALID/)
-
- check_index_exists
- end
- end
-
- context 'when a database error occurs while swapping the indexes' do
- it 'safely cleans up and signals the error' do
- replacement_index = double('replacement index', valid_index?: true)
- allow(Gitlab::Database::PostgresIndex).to receive(:find_by).with(schema: 'public', name: replacement_name).and_return(nil, replacement_index)
-
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
- end
-
- expect_index_rename(index.name, replaced_name).and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
-
- expect_index_drop(drop_index)
-
- expect { subject.perform }.to raise_error(ActiveRecord::ConnectionTimeoutError, /connect timeout/)
-
- check_index_exists
- end
- end
-
- context 'when with_lock_retries fails to acquire the lock' do
- it 'safely cleans up and signals the error' do
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true)
- .and_raise(::Gitlab::Database::WithLockRetries::AttemptsExhaustedError, 'exhausted')
- end
-
- expect_index_drop(drop_index)
-
- expect { subject.perform }.to raise_error(::Gitlab::Database::WithLockRetries::AttemptsExhaustedError, /exhausted/)
-
- check_index_exists
- end
- end
- end
- end
-
- def expect_to_execute_concurrently_in_order(sql)
- # Indexes cannot be created CONCURRENTLY in a transaction. Since the tests are wrapped in transactions,
- # verify the original call but pass through the non-concurrent form.
- expect(connection).to receive(:execute).with(sql).ordered.and_wrap_original do |method, sql|
- method.call(sql.sub(/CONCURRENTLY/, ''))
- end
- end
-
- def expect_index_rename(from, to)
- expect(connection).to receive(:execute).with(<<~SQL).ordered
- ALTER INDEX "public"."#{from}"
- RENAME TO "#{to}"
- SQL
- end
-
- def expect_index_drop(drop_index)
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: false).and_yield
- end
-
- expect_to_execute_concurrently_in_order(drop_index)
- end
-
- def find_index_create_statement
- ActiveRecord::Base.connection.select_value(<<~SQL)
- SELECT indexdef
- FROM pg_indexes
- WHERE schemaname = 'public'
- AND indexname = #{ActiveRecord::Base.connection.quote(index.name)}
- SQL
- end
-
- def check_index_exists
- expect(find_index_create_statement).to eq(original_index)
- end
-end
diff --git a/spec/lib/gitlab/database/reindexing/coordinator_spec.rb b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
index ae6362ba812..085fd3061ad 100644
--- a/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
@@ -9,16 +9,9 @@ RSpec.describe Gitlab::Database::Reindexing::Coordinator do
describe '.perform' do
subject { described_class.new(index, notifier).perform }
- before do
- swapout_view_for_table(:postgres_indexes)
-
- allow(Gitlab::Database::Reindexing::ConcurrentReindex).to receive(:new).with(index).and_return(reindexer)
- allow(Gitlab::Database::Reindexing::ReindexAction).to receive(:create_for).with(index).and_return(action)
- end
-
let(:index) { create(:postgres_index) }
let(:notifier) { instance_double(Gitlab::Database::Reindexing::GrafanaNotifier, notify_start: nil, notify_end: nil) }
- let(:reindexer) { instance_double(Gitlab::Database::Reindexing::ConcurrentReindex, perform: nil) }
+ let(:reindexer) { instance_double(Gitlab::Database::Reindexing::ReindexConcurrently, perform: nil) }
let(:action) { create(:reindex_action, index: index) }
let!(:lease) { stub_exclusive_lease(lease_key, uuid, timeout: lease_timeout) }
@@ -26,6 +19,13 @@ RSpec.describe Gitlab::Database::Reindexing::Coordinator do
let(:lease_timeout) { 1.day }
let(:uuid) { 'uuid' }
+ before do
+ swapout_view_for_table(:postgres_indexes)
+
+ allow(Gitlab::Database::Reindexing::ReindexConcurrently).to receive(:new).with(index).and_return(reindexer)
+ allow(Gitlab::Database::Reindexing::ReindexAction).to receive(:create_for).with(index).and_return(action)
+ end
+
context 'locking' do
it 'acquires a lock while reindexing' do
expect(lease).to receive(:try_obtain).ordered.and_return(uuid)
@@ -39,7 +39,7 @@ RSpec.describe Gitlab::Database::Reindexing::Coordinator do
it 'does not perform reindexing actions if lease is not granted' do
expect(lease).to receive(:try_obtain).ordered.and_return(false)
- expect(Gitlab::Database::Reindexing::ConcurrentReindex).not_to receive(:new)
+ expect(Gitlab::Database::Reindexing::ReindexConcurrently).not_to receive(:new)
subject
end
diff --git a/spec/lib/gitlab/database/reindexing/index_selection_spec.rb b/spec/lib/gitlab/database/reindexing/index_selection_spec.rb
index 4466679a099..ee3f2b1b415 100644
--- a/spec/lib/gitlab/database/reindexing/index_selection_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/index_selection_spec.rb
@@ -10,20 +10,50 @@ RSpec.describe Gitlab::Database::Reindexing::IndexSelection do
before do
swapout_view_for_table(:postgres_index_bloat_estimates)
swapout_view_for_table(:postgres_indexes)
+
+ create_list(:postgres_index, 10, ondisk_size_bytes: 10.gigabytes).each_with_index do |index, i|
+ create(:postgres_index_bloat_estimate, index: index, bloat_size_bytes: 2.gigabyte * (i + 1))
+ end
end
def execute(sql)
ActiveRecord::Base.connection.execute(sql)
end
- it 'orders by highest bloat first' do
- create_list(:postgres_index, 10).each_with_index do |index, i|
- create(:postgres_index_bloat_estimate, index: index, bloat_size_bytes: 1.megabyte * i)
- end
+ it 'orders by highest relative bloat first' do
+ expected = Gitlab::Database::PostgresIndex.all.sort_by(&:relative_bloat_level).reverse.map(&:name)
+
+ expect(subject.map(&:name)).to eq(expected)
+ end
+
+ it 'excludes indexes with a relative bloat level below 20%' do
+ excluded = create(
+ :postgres_index_bloat_estimate,
+ index: create(:postgres_index, ondisk_size_bytes: 10.gigabytes),
+ bloat_size_bytes: 1.9.gigabyte # 19% relative index bloat
+ )
- expected = Gitlab::Database::PostgresIndexBloatEstimate.order(bloat_size_bytes: :desc).map(&:index)
+ expect(subject).not_to include(excluded.index)
+ end
+
+ it 'excludes indexes smaller than 1 GB ondisk size' do
+ excluded = create(
+ :postgres_index_bloat_estimate,
+ index: create(:postgres_index, ondisk_size_bytes: 0.99.gigabytes),
+ bloat_size_bytes: 0.8.gigabyte
+ )
+
+ expect(subject).not_to include(excluded.index)
+ end
+
+ it 'excludes indexes larger than 100 GB ondisk size' do
+ excluded = create(
+ :postgres_index_bloat_estimate,
+ index: create(:postgres_index, ondisk_size_bytes: 101.gigabytes),
+ bloat_size_bytes: 25.gigabyte
+ )
- expect(subject).to eq(expected)
+ expect(subject).not_to include(excluded.index)
end
context 'with time frozen' do
@@ -31,20 +61,17 @@ RSpec.describe Gitlab::Database::Reindexing::IndexSelection do
freeze_time { example.run }
end
- it 'does not return indexes with reindex action in the last 7 days' do
- not_recently_reindexed = create_list(:postgres_index, 2).each_with_index do |index, i|
- create(:postgres_index_bloat_estimate, index: index, bloat_size_bytes: 1.megabyte * i)
- create(:reindex_action, index: index, action_end: Time.zone.now - 7.days - 1.minute)
+ it 'does not return indexes with reindex action in the last 10 days' do
+ not_recently_reindexed = Gitlab::Database::PostgresIndex.all.each do |index|
+ create(:reindex_action, index: index, action_end: Time.zone.now - 10.days - 1.minute)
end
- create_list(:postgres_index, 2).each_with_index do |index, i|
- create(:postgres_index_bloat_estimate, index: index, bloat_size_bytes: 1.megabyte * i)
+ create_list(:postgres_index, 10, ondisk_size_bytes: 10.gigabytes).each_with_index do |index, i|
+ create(:postgres_index_bloat_estimate, index: index, bloat_size_bytes: 2.gigabyte * (i + 1))
create(:reindex_action, index: index, action_end: Time.zone.now)
end
- expected = Gitlab::Database::PostgresIndexBloatEstimate.where(identifier: not_recently_reindexed.map(&:identifier)).map(&:index).map(&:identifier).sort
-
- expect(subject.map(&:identifier).sort).to eq(expected)
+ expect(subject.map(&:name).sort).to eq(not_recently_reindexed.map(&:name).sort)
end
end
end
diff --git a/spec/lib/gitlab/database/reindexing/reindex_concurrently_spec.rb b/spec/lib/gitlab/database/reindexing/reindex_concurrently_spec.rb
new file mode 100644
index 00000000000..6f87475fc94
--- /dev/null
+++ b/spec/lib/gitlab/database/reindexing/reindex_concurrently_spec.rb
@@ -0,0 +1,134 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Reindexing::ReindexConcurrently, '#perform' do
+ subject { described_class.new(index, logger: logger).perform }
+
+ let(:table_name) { '_test_reindex_table' }
+ let(:column_name) { '_test_column' }
+ let(:index_name) { '_test_reindex_index' }
+ let(:index) { Gitlab::Database::PostgresIndex.by_identifier("public.#{iname(index_name)}") }
+ let(:logger) { double('logger', debug: nil, info: nil, error: nil ) }
+ let(:connection) { ActiveRecord::Base.connection }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{table_name} (
+ id serial NOT NULL PRIMARY KEY,
+ #{column_name} integer NOT NULL);
+
+ CREATE INDEX #{index_name} ON #{table_name} (#{column_name});
+ SQL
+ end
+
+ context 'when the index serves an exclusion constraint' do
+ before do
+ allow(index).to receive(:exclusion?).and_return(true)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::ReindexError, /indexes serving an exclusion constraint are currently not supported/)
+ end
+ end
+
+ context 'when attempting to reindex an expression index' do
+ before do
+ allow(index).to receive(:expression?).and_return(true)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::ReindexError, /expression indexes are currently not supported/)
+ end
+ end
+
+ context 'when the index is a dangling temporary index from a previous reindexing run' do
+ context 'with the temporary index prefix' do
+ let(:index_name) { '_test_reindex_index_ccnew' }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::ReindexError, /left-over temporary index/)
+ end
+ end
+
+ context 'with the temporary index prefix with a counter' do
+ let(:index_name) { '_test_reindex_index_ccnew1' }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::ReindexError, /left-over temporary index/)
+ end
+ end
+ end
+
+ it 'recreates the index using REINDEX with a long statement timeout' do
+ expect_to_execute_in_order(
+ "SET statement_timeout TO '32400s'",
+ "REINDEX INDEX CONCURRENTLY \"public\".\"#{index.name}\"",
+ "RESET statement_timeout"
+ )
+
+ subject
+ end
+
+ context 'with dangling indexes matching TEMPORARY_INDEX_PATTERN, i.e. /some\_index\_ccnew(\d)*/' do
+ before do
+ # dangling indexes
+ connection.execute("CREATE INDEX #{iname(index_name, '_ccnew')} ON #{table_name} (#{column_name})")
+ connection.execute("CREATE INDEX #{iname(index_name, '_ccnew2')} ON #{table_name} (#{column_name})")
+
+ # Unrelated index - don't drop
+ connection.execute("CREATE INDEX some_other_index_ccnew ON #{table_name} (#{column_name})")
+ end
+
+ shared_examples_for 'dropping the dangling index' do
+ it 'drops the dangling indexes while controlling lock_timeout' do
+ expect_to_execute_in_order(
+ # Regular index rebuild
+ "SET statement_timeout TO '32400s'",
+ "REINDEX INDEX CONCURRENTLY \"public\".\"#{index_name}\"",
+ "RESET statement_timeout",
+ # Drop _ccnew index
+ "SET lock_timeout TO '60000ms'",
+ "DROP INDEX CONCURRENTLY IF EXISTS \"public\".\"#{iname(index_name, '_ccnew')}\"",
+ "RESET idle_in_transaction_session_timeout; RESET lock_timeout",
+ # Drop _ccnew2 index
+ "SET lock_timeout TO '60000ms'",
+ "DROP INDEX CONCURRENTLY IF EXISTS \"public\".\"#{iname(index_name, '_ccnew2')}\"",
+ "RESET idle_in_transaction_session_timeout; RESET lock_timeout"
+ )
+
+ subject
+ end
+ end
+
+ context 'with normal index names' do
+ it_behaves_like 'dropping the dangling index'
+ end
+
+ context 'with index name at 63 character limit' do
+ let(:index_name) { 'a' * 63 }
+
+ before do
+ # Another unrelated index - don't drop
+ extra_index = index_name[0...55]
+ connection.execute("CREATE INDEX #{extra_index}_ccnew ON #{table_name} (#{column_name})")
+ end
+
+ it_behaves_like 'dropping the dangling index'
+ end
+ end
+
+ def iname(name, suffix = '')
+ "#{name[0...63 - suffix.size]}#{suffix}"
+ end
+
+ def expect_to_execute_in_order(*queries)
+ # Indexes cannot be created CONCURRENTLY in a transaction. Since the tests are wrapped in transactions,
+ # verify the original call but pass through the non-concurrent form.
+ queries.each do |query|
+ expect(connection).to receive(:execute).with(query).ordered.and_wrap_original do |method, sql|
+ method.call(sql.sub(/CONCURRENTLY/, ''))
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb
index b2f038e8b62..8aff99544ca 100644
--- a/spec/lib/gitlab/database/reindexing_spec.rb
+++ b/spec/lib/gitlab/database/reindexing_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Gitlab::Database::Reindexing do
it 'retrieves regular indexes that are no left-overs from previous runs' do
result = double
- expect(Gitlab::Database::PostgresIndex).to receive_message_chain('regular.where.not_match.not_match').with(no_args).with('NOT expression').with('^tmp_reindex_').with('^old_reindex_').and_return(result)
+ expect(Gitlab::Database::PostgresIndex).to receive_message_chain('not_match.reindexing_support').with('\_ccnew[0-9]*$').with(no_args).and_return(result)
expect(subject).to eq(result)
end
diff --git a/spec/lib/gitlab/database/schema_migrations/context_spec.rb b/spec/lib/gitlab/database/schema_migrations/context_spec.rb
new file mode 100644
index 00000000000..f3bed9b40d6
--- /dev/null
+++ b/spec/lib/gitlab/database/schema_migrations/context_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SchemaMigrations::Context do
+ let(:connection) { ActiveRecord::Base.connection }
+
+ let(:context) { described_class.new(connection) }
+
+ describe '#schema_directory' do
+ it 'returns db/schema_migrations' do
+ expect(context.schema_directory).to eq(File.join(Rails.root, 'db/schema_migrations'))
+ end
+
+ context 'multiple databases' do
+ let(:connection) { Ci::BaseModel.connection }
+
+ it 'returns a directory path that is database specific' do
+ skip_if_multiple_databases_not_setup
+
+ expect(context.schema_directory).to eq(File.join(Rails.root, 'db/ci_schema_migrations'))
+ end
+ end
+ end
+
+ describe '#versions_to_create' do
+ before do
+ allow(connection).to receive_message_chain(:schema_migration, :all_versions).and_return(migrated_versions)
+
+ migrations_struct = Struct.new(:version)
+ migrations = file_versions.map { |version| migrations_struct.new(version) }
+ allow(connection).to receive_message_chain(:migration_context, :migrations).and_return(migrations)
+ end
+
+ let(:version1) { '20200123' }
+ let(:version2) { '20200410' }
+ let(:version3) { '20200602' }
+ let(:version4) { '20200809' }
+
+ let(:migrated_versions) { file_versions }
+ let(:file_versions) { [version1, version2, version3, version4] }
+
+ context 'migrated versions is the same as migration file versions' do
+ it 'returns migrated versions' do
+ expect(context.versions_to_create).to eq(migrated_versions)
+ end
+ end
+
+ context 'migrated versions is subset of migration file versions' do
+ let(:migrated_versions) { [version1, version2] }
+
+ it 'returns migrated versions' do
+ expect(context.versions_to_create).to eq(migrated_versions)
+ end
+ end
+
+ context 'migrated versions is superset of migration file versions' do
+ let(:migrated_versions) { file_versions + ['20210809'] }
+
+ it 'returns file versions' do
+ expect(context.versions_to_create).to eq(file_versions)
+ end
+ end
+
+ context 'migrated versions has slightly different versions to migration file versions' do
+ let(:migrated_versions) { [version1, version2, version3, version4, '20210101'] }
+ let(:file_versions) { [version1, version2, version3, version4, '20210102'] }
+
+ it 'returns the common set' do
+ expect(context.versions_to_create).to eq([version1, version2, version3, version4])
+ end
+ end
+ end
+
+ def skip_if_multiple_databases_not_setup
+ skip 'Skipping because multiple databases not set up' unless Gitlab::Database.has_config?(:ci)
+ end
+end
diff --git a/spec/lib/gitlab/database/schema_version_files_spec.rb b/spec/lib/gitlab/database/schema_migrations/migrations_spec.rb
index c3b3ae0a07f..8be776fdb88 100644
--- a/spec/lib/gitlab/database/schema_version_files_spec.rb
+++ b/spec/lib/gitlab/database/schema_migrations/migrations_spec.rb
@@ -2,43 +2,37 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::SchemaVersionFiles do
- describe '.touch_all' do
+RSpec.describe Gitlab::Database::SchemaMigrations::Migrations do
+ let(:connection) { ApplicationRecord.connection }
+ let(:context) { Gitlab::Database::SchemaMigrations::Context.new(connection) }
+
+ let(:migrations) { described_class.new(context) }
+
+ describe '#touch_all' do
let(:version1) { '20200123' }
let(:version2) { '20200410' }
let(:version3) { '20200602' }
let(:version4) { '20200809' }
+
let(:relative_schema_directory) { 'db/schema_migrations' }
- let(:relative_migrate_directory) { 'db/migrate' }
- let(:relative_post_migrate_directory) { 'db/post_migrate' }
it 'creates a file containing a checksum for each version with a matching migration' do
Dir.mktmpdir do |tmpdir|
schema_directory = Pathname.new(tmpdir).join(relative_schema_directory)
- migrate_directory = Pathname.new(tmpdir).join(relative_migrate_directory)
- post_migrate_directory = Pathname.new(tmpdir).join(relative_post_migrate_directory)
-
- FileUtils.mkdir_p(migrate_directory)
- FileUtils.mkdir_p(post_migrate_directory)
FileUtils.mkdir_p(schema_directory)
- migration1_filepath = migrate_directory.join("#{version1}_migration.rb")
- FileUtils.touch(migration1_filepath)
-
- migration2_filepath = post_migrate_directory.join("#{version2}_post_migration.rb")
- FileUtils.touch(migration2_filepath)
-
old_version_filepath = schema_directory.join('20200101')
FileUtils.touch(old_version_filepath)
expect(File.exist?(old_version_filepath)).to be(true)
- allow(described_class).to receive(:schema_directory).and_return(schema_directory)
- allow(described_class).to receive(:migration_directories).and_return([migrate_directory, post_migrate_directory])
+ allow(context).to receive(:schema_directory).and_return(schema_directory)
+ allow(context).to receive(:versions_to_create).and_return([version1, version2])
- described_class.touch_all([version1, version2, version3, version4])
+ migrations.touch_all
expect(File.exist?(old_version_filepath)).to be(false)
+
[version1, version2].each do |version|
version_filepath = schema_directory.join(version)
expect(File.exist?(version_filepath)).to be(true)
@@ -55,12 +49,9 @@ RSpec.describe Gitlab::Database::SchemaVersionFiles do
end
end
- describe '.load_all' do
- let(:connection) { double('connection') }
-
+ describe '#load_all' do
before do
- allow(described_class).to receive(:connection).and_return(connection)
- allow(described_class).to receive(:find_version_filenames).and_return(filenames)
+ allow(migrations).to receive(:version_filenames).and_return(filenames)
end
context 'when there are no version files' do
@@ -70,7 +61,7 @@ RSpec.describe Gitlab::Database::SchemaVersionFiles do
expect(connection).not_to receive(:quote_string)
expect(connection).not_to receive(:execute)
- described_class.load_all
+ migrations.load_all
end
end
@@ -88,7 +79,7 @@ RSpec.describe Gitlab::Database::SchemaVersionFiles do
ON CONFLICT DO NOTHING
SQL
- described_class.load_all
+ migrations.load_all
end
end
end
diff --git a/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb b/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb
index e93d8ab590d..ff8e76311ae 100644
--- a/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb
@@ -37,8 +37,10 @@ RSpec.describe Gitlab::Database::WithLockRetriesOutsideTransaction do
context 'when lock retry is enabled' do
let(:lock_fiber) do
Fiber.new do
+ configuration = ActiveRecordSecond.configurations.find_db_config(Rails.env).configuration_hash
+
# Initiating a second DB connection for the lock
- conn = ActiveRecordSecond.establish_connection(Rails.configuration.database_configuration[Rails.env]).connection
+ conn = ActiveRecordSecond.establish_connection(configuration).connection
conn.transaction do
conn.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
index df2c506e163..367f793b117 100644
--- a/spec/lib/gitlab/database/with_lock_retries_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -37,8 +37,10 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'when lock retry is enabled' do
let(:lock_fiber) do
Fiber.new do
+ configuration = ActiveRecordSecond.configurations.find_db_config(Rails.env).configuration_hash
+
# Initiating a second DB connection for the lock
- conn = ActiveRecordSecond.establish_connection(Rails.configuration.database_configuration[Rails.env]).connection
+ conn = ActiveRecordSecond.establish_connection(configuration).connection
conn.transaction do
conn.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
diff --git a/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb b/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
index e70b34d6557..2740664d200 100644
--- a/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
+++ b/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
@@ -56,10 +56,10 @@ RSpec.describe Gitlab::DatabaseImporters::InstanceAdministrators::CreateGroup do
it "tracks successful install" do
expect(::Gitlab::Tracking).to receive(:event).with(
- 'instance_administrators_group', 'group_created'
+ 'instance_administrators_group', 'group_created', namespace: group
)
- result
+ subject.execute
end
it 'creates group' do
diff --git a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
index 28291508ac0..f5ea660ee1e 100644
--- a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
+++ b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
@@ -63,11 +63,11 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
application_setting.update(allow_local_requests_from_web_hooks_and_services: true)
end
- shared_examples 'has prometheus service' do |server_address|
+ shared_examples 'has prometheus integration' do |server_address|
it do
expect(result[:status]).to eq(:success)
- prometheus = project.prometheus_service
+ prometheus = project.prometheus_integration
expect(prometheus).not_to eq(nil)
expect(prometheus.api_url).to eq(server_address)
expect(prometheus.active).to eq(true)
@@ -75,7 +75,7 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
end
end
- it_behaves_like 'has prometheus service', 'http://localhost:9090'
+ it_behaves_like 'has prometheus integration', 'http://localhost:9090'
it 'is idempotent' do
result1 = subject.execute
@@ -86,10 +86,10 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
end
it "tracks successful install" do
- expect(::Gitlab::Tracking).to receive(:event).twice
- expect(::Gitlab::Tracking).to receive(:event).with('self_monitoring', 'project_created')
+ expect(::Gitlab::Tracking).to receive(:event).with("instance_administrators_group", "group_created", namespace: project.namespace)
+ expect(::Gitlab::Tracking).to receive(:event).with('self_monitoring', 'project_created', project: project, namespace: project.namespace)
- result
+ subject.execute
end
it 'creates group' do
@@ -134,13 +134,13 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
expect(application_setting.reload.self_monitoring_project_id).to eq(project.id)
end
- it 'creates a Prometheus service' do
+ it 'creates a Prometheus integration' do
expect(result[:status]).to eq(:success)
integrations = result[:project].reload.integrations
expect(integrations.count).to eq(1)
- # Ensures PrometheusService#self_monitoring_project? is true
+ # Ensures Integrations::Prometheus#self_monitoring_project? is true
expect(integrations.first.allow_local_api_url?).to be_truthy
end
@@ -193,12 +193,12 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
end
end
- context 'when local requests from hooks and services are not allowed' do
+ context 'when local requests from hooks and integrations are not allowed' do
before do
application_setting.update(allow_local_requests_from_web_hooks_and_services: false)
end
- it_behaves_like 'has prometheus service', 'http://localhost:9090'
+ it_behaves_like 'has prometheus integration', 'http://localhost:9090'
end
context 'with non default prometheus address' do
@@ -211,18 +211,18 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
}
end
- it_behaves_like 'has prometheus service', 'https://localhost:9090'
+ it_behaves_like 'has prometheus integration', 'https://localhost:9090'
context 'with :9090 symbol' do
let(:server_address) { :':9090' }
- it_behaves_like 'has prometheus service', 'http://localhost:9090'
+ it_behaves_like 'has prometheus integration', 'http://localhost:9090'
end
context 'with 0.0.0.0:9090' do
let(:server_address) { '0.0.0.0:9090' }
- it_behaves_like 'has prometheus service', 'http://localhost:9090'
+ it_behaves_like 'has prometheus integration', 'http://localhost:9090'
end
end
@@ -233,7 +233,7 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
it 'does not fail' do
expect(result).to include(status: :success)
- expect(project.prometheus_service).to be_nil
+ expect(project.prometheus_integration).to be_nil
end
end
@@ -244,7 +244,7 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
it 'does not fail' do
expect(result).to include(status: :success)
- expect(project.prometheus_service).to be_nil
+ expect(project.prometheus_integration).to be_nil
end
end
@@ -258,7 +258,7 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
it 'does not configure prometheus' do
expect(result).to include(status: :success)
- expect(project.prometheus_service).to be_nil
+ expect(project.prometheus_integration).to be_nil
end
end
@@ -267,7 +267,7 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
it 'does not configure prometheus' do
expect(result).to include(status: :success)
- expect(project.prometheus_service).to be_nil
+ expect(project.prometheus_integration).to be_nil
end
end
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 847f7ec2d74..a834e41c019 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -41,6 +41,79 @@ RSpec.describe Gitlab::Database do
end
end
+ describe '.has_config?' do
+ context 'two tier database config' do
+ before do
+ allow(Gitlab::Application).to receive_message_chain(:config, :database_configuration, :[]).with(Rails.env)
+ .and_return({ "adapter" => "postgresql", "database" => "gitlabhq_test" })
+ end
+
+ it 'returns false for primary' do
+ expect(described_class.has_config?(:primary)).to eq(false)
+ end
+
+ it 'returns false for ci' do
+ expect(described_class.has_config?(:ci)).to eq(false)
+ end
+ end
+
+ context 'three tier database config' do
+ before do
+ allow(Gitlab::Application).to receive_message_chain(:config, :database_configuration, :[]).with(Rails.env)
+ .and_return({
+ "primary" => { "adapter" => "postgresql", "database" => "gitlabhq_test" },
+ "ci" => { "adapter" => "postgresql", "database" => "gitlabhq_test_ci" }
+ })
+ end
+
+ it 'returns true for primary' do
+ expect(described_class.has_config?(:primary)).to eq(true)
+ end
+
+ it 'returns true for ci' do
+ expect(described_class.has_config?(:ci)).to eq(true)
+ end
+
+ it 'returns false for non-existent' do
+ expect(described_class.has_config?(:nonexistent)).to eq(false)
+ end
+ end
+ end
+
+ describe '.main_database?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:database_name, :result) do
+ :main | true
+ 'main' | true
+ :ci | false
+ 'ci' | false
+ :archive | false
+ 'archive' | false
+ end
+
+ with_them do
+ it { expect(described_class.main_database?(database_name)).to eq(result) }
+ end
+ end
+
+ describe '.ci_database?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:database_name, :result) do
+ :main | false
+ 'main' | false
+ :ci | true
+ 'ci' | true
+ :archive | false
+ 'archive' | false
+ end
+
+ with_them do
+ it { expect(described_class.ci_database?(database_name)).to eq(result) }
+ end
+ end
+
describe '.adapter_name' do
it 'returns the name of the adapter' do
expect(described_class.adapter_name).to be_an_instance_of(String)
@@ -414,6 +487,23 @@ RSpec.describe Gitlab::Database do
end
end
+ describe '.dbname' do
+ it 'returns the dbname for the connection' do
+ connection = ActiveRecord::Base.connection
+
+ expect(described_class.dbname(connection)).to be_a(String)
+ expect(described_class.dbname(connection)).to eq(connection.pool.db_config.database)
+ end
+
+ context 'when the pool is a NullPool' do
+ it 'returns unknown' do
+ connection = double(:active_record_connection, pool: ActiveRecord::ConnectionAdapters::NullPool.new)
+
+ expect(described_class.dbname(connection)).to eq('unknown')
+ end
+ end
+ end
+
describe '#true_value' do
it 'returns correct value' do
expect(described_class.true_value).to eq "'t'"
diff --git a/spec/lib/gitlab/deploy_key_access_spec.rb b/spec/lib/gitlab/deploy_key_access_spec.rb
index e186e993d8f..83b97c8ba25 100644
--- a/spec/lib/gitlab/deploy_key_access_spec.rb
+++ b/spec/lib/gitlab/deploy_key_access_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::DeployKeyAccess do
let_it_be(:user) { create(:user) }
let_it_be(:deploy_key) { create(:deploy_key, user: user) }
+
let(:project) { create(:project, :repository) }
let(:protected_branch) { create(:protected_branch, :no_one_can_push, project: project) }
diff --git a/spec/lib/gitlab/diff/file_collection/base_spec.rb b/spec/lib/gitlab/diff/file_collection/base_spec.rb
new file mode 100644
index 00000000000..00d3aa47301
--- /dev/null
+++ b/spec/lib/gitlab/diff/file_collection/base_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Diff::FileCollection::Base do
+ let(:merge_request) { create(:merge_request) }
+ let(:diffable) { merge_request.merge_request_diff }
+ let(:diff_options) { {} }
+
+ describe '#overflow?' do
+ subject(:overflown) { described_class.new(diffable, project: merge_request.project, diff_options: diff_options).overflow? }
+
+ context 'when it is not overflown' do
+ it 'returns false' do
+ expect(overflown).to eq(false)
+ end
+ end
+
+ context 'when it is overflown' do
+ let(:diff_options) { { max_files: 1 } }
+
+ it 'returns true' do
+ expect(overflown).to eq(true)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/file_collection/commit_spec.rb b/spec/lib/gitlab/diff/file_collection/commit_spec.rb
index 3d995b36b6f..cfb5f50edbe 100644
--- a/spec/lib/gitlab/diff/file_collection/commit_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/commit_spec.rb
@@ -75,4 +75,12 @@ RSpec.describe Gitlab::Diff::FileCollection::Commit do
]
end
end
+
+ describe '#cache_key' do
+ subject(:cache_key) { described_class.new(diffable, diff_options: nil).cache_key }
+
+ it 'returns with the commit id' do
+ expect(cache_key).to eq ['commit', diffable.id]
+ end
+ end
end
diff --git a/spec/lib/gitlab/diff/file_collection/compare_spec.rb b/spec/lib/gitlab/diff/file_collection/compare_spec.rb
index f3326f4f03d..ce70903a480 100644
--- a/spec/lib/gitlab/diff/file_collection/compare_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/compare_spec.rb
@@ -15,29 +15,20 @@ RSpec.describe Gitlab::Diff::FileCollection::Compare do
head_commit.id)
end
- it_behaves_like 'diff statistics' do
- let(:collection_default_args) do
- {
- project: diffable.project,
- diff_options: {},
- diff_refs: diffable.diff_refs
- }
- end
+ let(:diffable) { Compare.new(raw_compare, project) }
+ let(:collection_default_args) do
+ {
+ project: diffable.project,
+ diff_options: {},
+ diff_refs: diffable.diff_refs
+ }
+ end
- let(:diffable) { Compare.new(raw_compare, project) }
+ it_behaves_like 'diff statistics' do
let(:stub_path) { '.gitignore' }
end
it_behaves_like 'sortable diff files' do
- let(:diffable) { Compare.new(raw_compare, project) }
- let(:collection_default_args) do
- {
- project: diffable.project,
- diff_options: {},
- diff_refs: diffable.diff_refs
- }
- end
-
let(:unsorted_diff_files_paths) do
[
'.DS_Store',
@@ -66,4 +57,12 @@ RSpec.describe Gitlab::Diff::FileCollection::Compare do
]
end
end
+
+ describe '#cache_key' do
+ subject(:cache_key) { described_class.new(diffable, **collection_default_args).cache_key }
+
+ it 'returns with head and base' do
+ expect(cache_key).to eq ['compare', head_commit.id, start_commit.id]
+ end
+ end
end
diff --git a/spec/lib/gitlab/diff/file_collection/merge_request_diff_base_spec.rb b/spec/lib/gitlab/diff/file_collection/merge_request_diff_base_spec.rb
new file mode 100644
index 00000000000..51bee6d45e4
--- /dev/null
+++ b/spec/lib/gitlab/diff/file_collection/merge_request_diff_base_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBase do
+ let(:merge_request) { create(:merge_request) }
+ let(:diffable) { merge_request.merge_request_diff }
+
+ describe '#overflow?' do
+ subject(:overflown) { described_class.new(diffable, diff_options: nil).overflow? }
+
+ context 'when it is not overflown' do
+ it 'returns false' do
+ expect(overflown).to eq(false)
+ end
+ end
+
+ context 'when it is overflown' do
+ before do
+ diffable.update!(state: :overflow)
+ end
+
+ it 'returns true' do
+ expect(overflown).to eq(true)
+ end
+ end
+ end
+
+ describe '#cache_key' do
+ subject(:cache_key) { described_class.new(diffable, diff_options: nil).cache_key }
+
+ it 'returns cache_key from merge_request_diff' do
+ expect(cache_key).to eq diffable.cache_key
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb b/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb
index 670c734ce08..beb85d383a0 100644
--- a/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBatch do
let(:merge_request) { create(:merge_request) }
- let(:batch_page) { 1 }
+ let(:batch_page) { 0 }
let(:batch_size) { 10 }
let(:diffable) { merge_request.merge_request_diff }
let(:diff_files_relation) { diffable.merge_request_diff_files }
@@ -18,19 +18,15 @@ RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBatch do
let(:diff_files) { subject.diff_files }
- before do
- stub_feature_flags(diffs_gradual_load: false)
- end
-
describe 'initialize' do
it 'memoizes pagination_data' do
- expect(subject.pagination_data).to eq(current_page: 1, next_page: 2, total_pages: 2)
+ expect(subject.pagination_data).to eq(total_pages: 20)
end
end
describe '#diff_files' do
let(:batch_size) { 3 }
- let(:paginated_rel) { diff_files_relation.page(batch_page).per(batch_size) }
+ let(:paginated_rel) { diff_files_relation.offset(batch_page).limit(batch_size) }
let(:expected_batch_files) do
paginated_rel.map(&:new_path)
@@ -51,7 +47,7 @@ RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBatch do
end
context 'another page' do
- let(:batch_page) { 2 }
+ let(:batch_page) { 1 }
it 'returns correct diff files' do
expect(diff_files.map(&:new_path)).to eq(expected_batch_files)
@@ -63,7 +59,7 @@ RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBatch do
it 'returns correct diff files' do
expected_batch_files =
- diff_files_relation.page(described_class::DEFAULT_BATCH_PAGE).per(batch_size).map(&:new_path)
+ diff_files_relation.offset(described_class::DEFAULT_BATCH_PAGE).limit(batch_size).map(&:new_path)
expect(diff_files.map(&:new_path)).to eq(expected_batch_files)
end
@@ -74,7 +70,7 @@ RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBatch do
it 'returns correct diff files' do
expected_batch_files =
- diff_files_relation.page(batch_page).per(described_class::DEFAULT_BATCH_SIZE).map(&:new_path)
+ diff_files_relation.offset(batch_page).limit(described_class::DEFAULT_BATCH_SIZE).map(&:new_path)
expect(diff_files.map(&:new_path)).to eq(expected_batch_files)
end
@@ -90,29 +86,17 @@ RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBatch do
context 'last page' do
it 'returns correct diff files' do
- last_page = paginated_rel.total_pages
+ last_page = diff_files_relation.count - batch_size
collection = described_class.new(diffable,
last_page,
batch_size,
diff_options: nil)
- expected_batch_files = diff_files_relation.page(last_page).per(batch_size).map(&:new_path)
+ expected_batch_files = diff_files_relation.offset(last_page).limit(batch_size).map(&:new_path)
expect(collection.diff_files.map(&:new_path)).to eq(expected_batch_files)
end
end
-
- context 'with diffs gradual load feature flag enabled' do
- let(:batch_page) { 0 }
-
- before do
- stub_feature_flags(diffs_gradual_load: true)
- end
-
- it 'returns correct diff files' do
- expect(subject.diffs.map(&:new_path)).to eq(diff_files_relation.page(1).per(batch_size).map(&:new_path))
- end
- end
end
it_behaves_like 'unfoldable diff' do
@@ -130,7 +114,7 @@ RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBatch do
end
let(:diffable) { merge_request.merge_request_diff }
- let(:batch_page) { 2 }
+ let(:batch_page) { 10 }
let(:stub_path) { '.gitignore' }
subject do
diff --git a/spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb b/spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb
index d9f384fb47f..bdeaabec1f1 100644
--- a/spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb
+++ b/spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Diff::PositionTracer::LineStrategy do
+RSpec.describe Gitlab::Diff::PositionTracer::LineStrategy, :clean_gitlab_redis_cache do
# Douwe's diary New York City, 2016-06-28
# --------------------------------------------------------------------------
#
@@ -288,6 +288,27 @@ RSpec.describe Gitlab::Diff::PositionTracer::LineStrategy do
new_line: old_position.new_line
)
end
+
+ context "when the position is multiline" do
+ let(:old_position) do
+ position(
+ new_path: file_name,
+ new_line: 2,
+ line_range: {
+ "start_line_code" => 1,
+ "end_line_code" => 2
+ }
+ )
+ end
+
+ it "returns the new position along with line_range" do
+ expect_new_position(
+ new_path: old_position.new_path,
+ new_line: old_position.new_line,
+ line_range: old_position.line_range
+ )
+ end
+ end
end
context "when the file's content was changed between the old and the new diff" do
@@ -547,6 +568,29 @@ RSpec.describe Gitlab::Diff::PositionTracer::LineStrategy do
new_line: 2
)
end
+
+ context "when the position is multiline" do
+ let(:old_position) do
+ position(
+ new_path: file_name,
+ new_line: 2,
+ line_range: {
+ "start_line_code" => 1,
+ "end_line_code" => 2
+ }
+ )
+ end
+
+ it "returns the new position but drops line_range information" do
+ expect_change_position(
+ old_path: file_name,
+ new_path: file_name,
+ old_line: nil,
+ new_line: 2,
+ line_range: nil
+ )
+ end
+ end
end
context "when the file's content was changed between the old and the new diff" do
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index 3a60564d8d2..e8470657181 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
let(:email_raw) { email_fixture('emails/service_desk.eml') }
let_it_be(:group) { create(:group, :private, name: "email") }
+
let(:expected_description) do
"Service desk stuff!\n\n```\na = b\n```\n\n`/label ~label1`\n`/assign @user1`\n`/close`\n![image](uploads/image.png)"
end
@@ -50,6 +51,15 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
it 'sends thank you email' do
expect { receiver.execute }.to have_enqueued_job.on_queue('mailers')
end
+
+ it 'adds metric events for incoming and reply emails' do
+ metric_transaction = double('Gitlab::Metrics::WebTransaction', increment: true, observe: true)
+ allow(::Gitlab::Metrics::BackgroundTransaction).to receive(:current).and_return(metric_transaction)
+ expect(metric_transaction).to receive(:add_event).with(:receive_email_service_desk, { handler: 'Gitlab::Email::Handler::ServiceDeskHandler' })
+ expect(metric_transaction).to receive(:add_event).with(:service_desk_thank_you_email)
+
+ receiver.execute
+ end
end
context 'when everything is fine' do
@@ -169,6 +179,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
context 'when using service desk key' do
let_it_be(:service_desk_key) { 'mykey' }
+
let(:email_raw) { service_desk_fixture('emails/service_desk_custom_address.eml') }
let(:receiver) { Gitlab::Email::ServiceDeskReceiver.new(email_raw) }
@@ -200,6 +211,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
context 'when there are multiple projects with same key' do
let_it_be(:project_with_same_key) { create(:project, group: group, service_desk_enabled: true) }
+
let(:email_raw) { service_desk_fixture('emails/service_desk_custom_address.eml', slug: project_with_same_key.full_path_slug.to_s) }
before do
diff --git a/spec/lib/gitlab/email/receiver_spec.rb b/spec/lib/gitlab/email/receiver_spec.rb
index 2c1fe529a5d..b1a04f0592a 100644
--- a/spec/lib/gitlab/email/receiver_spec.rb
+++ b/spec/lib/gitlab/email/receiver_spec.rb
@@ -5,106 +5,125 @@ require 'spec_helper'
RSpec.describe Gitlab::Email::Receiver do
include_context :email_shared_context
- shared_examples 'correctly finds the mail key and adds metric event' do
- let(:metric_transaction) { double('Gitlab::Metrics::WebTransaction') }
+ let(:metric_transaction) { instance_double(Gitlab::Metrics::WebTransaction) }
- specify :aggregate_failures do
+ shared_examples 'successful receive' do
+ let_it_be(:project) { create(:project) }
+
+ let(:handler) { double(:handler, project: project, execute: true, metrics_event: nil, metrics_params: nil) }
+
+ it 'correctly finds the mail key' do
expect(Gitlab::Email::Handler).to receive(:for).with(an_instance_of(Mail::Message), 'gitlabhq/gitlabhq+auth_token').and_return(handler)
+
+ receiver.execute
+ end
+
+ it 'adds metric event' do
+ allow(receiver).to receive(:handler).and_return(handler)
+
expect(::Gitlab::Metrics::BackgroundTransaction).to receive(:current).and_return(metric_transaction)
expect(metric_transaction).to receive(:add_event).with(handler.metrics_event, handler.metrics_params)
receiver.execute
end
+
+ it 'returns valid metadata' do
+ allow(receiver).to receive(:handler).and_return(handler)
+
+ metadata = receiver.mail_metadata
+
+ expect(metadata.keys).to match_array(%i(mail_uid from_address to_address mail_key references delivered_to envelope_to x_envelope_to meta))
+ expect(metadata[:meta]).to include(client_id: 'email/jake@example.com', project: project.full_path)
+ expect(metadata[meta_key]).to eq(meta_value)
+ end
end
context 'when the email contains a valid email address in a header' do
- let(:handler) { double(:handler) }
- let(:metadata) { receiver.mail_metadata }
-
before do
- allow(handler).to receive(:execute)
- allow(handler).to receive(:metrics_params)
- allow(handler).to receive(:metrics_event)
-
stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.example.com")
-
- expect(receiver.mail_metadata.keys).to match_array(%i(mail_uid from_address to_address mail_key references delivered_to envelope_to x_envelope_to))
end
context 'when in a Delivered-To header' do
let(:email_raw) { fixture_file('emails/forwarded_new_issue.eml') }
+ let(:meta_key) { :delivered_to }
+ let(:meta_value) { ["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com", "support@example.com"] }
- it_behaves_like 'correctly finds the mail key and adds metric event'
-
- it 'parses the metadata' do
- expect(metadata[:delivered_to]). to eq(["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com", "support@example.com"])
- end
+ it_behaves_like 'successful receive'
end
context 'when in an Envelope-To header' do
let(:email_raw) { fixture_file('emails/envelope_to_header.eml') }
+ let(:meta_key) { :envelope_to }
+ let(:meta_value) { ["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"] }
- it_behaves_like 'correctly finds the mail key and adds metric event'
-
- it 'parses the metadata' do
- expect(metadata[:envelope_to]). to eq(["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"])
- end
+ it_behaves_like 'successful receive'
end
context 'when in an X-Envelope-To header' do
let(:email_raw) { fixture_file('emails/x_envelope_to_header.eml') }
+ let(:meta_key) { :x_envelope_to }
+ let(:meta_value) { ["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"] }
- it_behaves_like 'correctly finds the mail key and adds metric event'
-
- it 'parses the metadata' do
- expect(metadata[:x_envelope_to]). to eq(["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"])
- end
+ it_behaves_like 'successful receive'
end
context 'when enclosed with angle brackets in an Envelope-To header' do
let(:email_raw) { fixture_file('emails/envelope_to_header_with_angle_brackets.eml') }
+ let(:meta_key) { :envelope_to }
+ let(:meta_value) { ["<incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com>"] }
- it_behaves_like 'correctly finds the mail key and adds metric event'
+ it_behaves_like 'successful receive'
end
end
- context "when we cannot find a capable handler" do
- let(:email_raw) { fixture_file('emails/valid_reply.eml').gsub(mail_key, "!!!") }
+ shared_examples 'failed receive' do
+ it 'adds metric event' do
+ expect(::Gitlab::Metrics::BackgroundTransaction).to receive(:current).and_return(metric_transaction)
+ expect(metric_transaction).to receive(:add_event).with('email_receiver_error', { error: expected_error.name })
- it "raises an UnknownIncomingEmail error" do
- expect { receiver.execute }.to raise_error(Gitlab::Email::UnknownIncomingEmail)
+ expect { receiver.execute }.to raise_error(expected_error)
end
end
- context "when the email is blank" do
- let(:email_raw) { "" }
+ context 'when we cannot find a capable handler' do
+ let(:email_raw) { fixture_file('emails/valid_reply.eml').gsub(mail_key, '!!!') }
+ let(:expected_error) { Gitlab::Email::UnknownIncomingEmail }
- it "raises an EmptyEmailError" do
- expect { receiver.execute }.to raise_error(Gitlab::Email::EmptyEmailError)
- end
+ it_behaves_like 'failed receive'
end
- context "when the email was auto generated with Auto-Submitted header" do
- let(:email_raw) { fixture_file("emails/auto_submitted.eml") }
+ context 'when the email is blank' do
+ let(:email_raw) { '' }
+ let(:expected_error) { Gitlab::Email::EmptyEmailError }
- it "raises an AutoGeneratedEmailError" do
- expect { receiver.execute }.to raise_error(Gitlab::Email::AutoGeneratedEmailError)
- end
+ it_behaves_like 'failed receive'
end
- context "when the email was auto generated with X-Autoreply header" do
- let(:email_raw) { fixture_file("emails/auto_reply.eml") }
+ context 'when the email was auto generated with Auto-Submitted header' do
+ let(:email_raw) { fixture_file('emails/auto_submitted.eml') }
+ let(:expected_error) { Gitlab::Email::AutoGeneratedEmailError }
- it "raises an AutoGeneratedEmailError" do
- expect { receiver.execute }.to raise_error(Gitlab::Email::AutoGeneratedEmailError)
- end
+ it_behaves_like 'failed receive'
end
- it "requires all handlers to have a unique metric_event" do
+ context 'when the email was auto generated with X-Autoreply header' do
+ let(:email_raw) { fixture_file('emails/auto_reply.eml') }
+ let(:expected_error) { Gitlab::Email::AutoGeneratedEmailError }
+
+ it_behaves_like 'failed receive'
+ end
+
+ it 'requires all handlers to have a unique metric_event' do
events = Gitlab::Email::Handler.handlers.map do |handler|
handler.new(Mail::Message.new, 'gitlabhq/gitlabhq+auth_token').metrics_event
end
expect(events.uniq.count).to eq events.count
end
+
+ it 'requires all handlers to respond to #project' do
+ Gitlab::Email::Handler.load_handlers.each do |handler|
+ expect { handler.new(nil, nil).project }.not_to raise_error
+ end
+ end
end
diff --git a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
index 6076e525f06..9acc7fd04be 100644
--- a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
@@ -15,6 +15,18 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
let(:event) { Raven::Event.from_exception(exception, required_options.merge(data)) }
let(:result_hash) { described_class.call(event).to_hash }
+ let(:data) do
+ {
+ extra: {
+ caller: 'test'
+ },
+ fingerprint: [
+ 'GRPC::DeadlineExceeded',
+ '4:Deadline Exceeded. debug_error_string:{"created":"@1598938192.005782000","description":"Error received from peer unix:/home/git/gitalypraefect.socket","file":"src/core/lib/surface/call.cc","file_line":1055,"grpc_message":"Deadline Exceeded","grpc_status":4}'
+ ]
+ }
+ end
+
context 'when there is no GRPC exception' do
let(:exception) { RuntimeError.new }
let(:data) { { fingerprint: ['ArgumentError', 'Missing arguments'] } }
@@ -24,19 +36,47 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
end
end
- context 'when there is a GPRC exception with a debug string' do
+ context 'when there is a GRPC exception with a debug string' do
let(:exception) { GRPC::DeadlineExceeded.new('Deadline Exceeded', {}, '{"hello":1}') }
- let(:data) do
- {
- extra: {
- caller: 'test'
- },
- fingerprint: [
- 'GRPC::DeadlineExceeded',
- '4:Deadline Exceeded. debug_error_string:{"created":"@1598938192.005782000","description":"Error received from peer unix:/home/git/gitalypraefect.socket","file":"src/core/lib/surface/call.cc","file_line":1055,"grpc_message":"Deadline Exceeded","grpc_status":4}'
- ]
- }
+ it 'removes the debug error string and stores it as an extra field' do
+ expect(result_hash[:fingerprint])
+ .to eq(['GRPC::DeadlineExceeded', '4:Deadline Exceeded.'])
+
+ expect(result_hash[:exception][:values].first)
+ .to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
+
+ expect(result_hash[:extra])
+ .to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
+ end
+
+ context 'with no custom fingerprint' do
+ let(:data) do
+ { extra: { caller: 'test' } }
+ end
+
+ it 'removes the debug error string and stores it as an extra field' do
+ expect(result_hash).not_to include(:fingerprint)
+
+ expect(result_hash[:exception][:values].first)
+ .to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
+
+ expect(result_hash[:extra])
+ .to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
+ end
+ end
+ end
+
+ context 'when there is a wrapped GRPC exception with a debug string' do
+ let(:inner_exception) { GRPC::DeadlineExceeded.new('Deadline Exceeded', {}, '{"hello":1}') }
+ let(:exception) do
+ begin
+ raise inner_exception
+ rescue GRPC::DeadlineExceeded
+ raise StandardError.new, inner_exception.message
+ end
+ rescue StandardError => e
+ e
end
it 'removes the debug error string and stores it as an extra field' do
@@ -46,6 +86,9 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
expect(result_hash[:exception][:values].first)
.to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
+ expect(result_hash[:exception][:values].second)
+ .to include(type: 'StandardError', value: '4:Deadline Exceeded.')
+
expect(result_hash[:extra])
.to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
end
@@ -61,6 +104,9 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
expect(result_hash[:exception][:values].first)
.to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
+ expect(result_hash[:exception][:values].second)
+ .to include(type: 'StandardError', value: '4:Deadline Exceeded.')
+
expect(result_hash[:extra])
.to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 336bf20d59c..706bcdea291 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -869,6 +869,128 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
end
+ describe '#blobs' do
+ let_it_be(:commit_oid) { '4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6' }
+
+ shared_examples 'a blob enumeration' do
+ it 'enumerates blobs' do
+ blobs = repository.blobs(revisions).to_a
+
+ expect(blobs.size).to eq(expected_blobs)
+ blobs.each do |blob|
+ expect(blob.data).to be_empty
+ expect(blob.id.size).to be(40)
+ end
+ end
+ end
+
+ context 'single revision' do
+ let(:revisions) { [commit_oid] }
+ let(:expected_blobs) { 53 }
+
+ it_behaves_like 'a blob enumeration'
+ end
+
+ context 'multiple revisions' do
+ let(:revisions) { ["^#{commit_oid}~", commit_oid] }
+ let(:expected_blobs) { 1 }
+
+ it_behaves_like 'a blob enumeration'
+ end
+
+ context 'pseudo revisions' do
+ let(:revisions) { ['master', '--not', '--all'] }
+ let(:expected_blobs) { 0 }
+
+ it_behaves_like 'a blob enumeration'
+ end
+
+ context 'blank revisions' do
+ let(:revisions) { [::Gitlab::Git::BLANK_SHA] }
+ let(:expected_blobs) { 0 }
+
+ before do
+ expect_any_instance_of(Gitlab::GitalyClient::BlobService)
+ .not_to receive(:list_blobs)
+ end
+
+ it_behaves_like 'a blob enumeration'
+ end
+
+ context 'partially blank revisions' do
+ let(:revisions) { [::Gitlab::Git::BLANK_SHA, commit_oid] }
+ let(:expected_blobs) { 53 }
+
+ before do
+ expect_next_instance_of(Gitlab::GitalyClient::BlobService) do |service|
+ expect(service)
+ .to receive(:list_blobs)
+ .with([commit_oid], kind_of(Hash))
+ .and_call_original
+ end
+ end
+
+ it_behaves_like 'a blob enumeration'
+ end
+ end
+
+ describe '#new_commits' do
+ let(:repository) { mutable_repository }
+ let(:new_commit) do
+ author = { name: 'Test User', email: 'mail@example.com', time: Time.now }
+
+ Rugged::Commit.create(repository_rugged,
+ author: author,
+ committer: author,
+ message: "Message",
+ parents: [],
+ tree: "4b825dc642cb6eb9a060e54bf8d69288fbee4904")
+ end
+
+ let(:expected_commits) { 1 }
+ let(:revisions) { [new_commit] }
+
+ shared_examples 'an enumeration of new commits' do
+ it 'enumerates commits' do
+ commits = repository.new_commits(revisions).to_a
+
+ expect(commits.size).to eq(expected_commits)
+ commits.each do |commit|
+ expect(commit.id).to eq(new_commit)
+ expect(commit.message).to eq("Message")
+ end
+ end
+ end
+
+ context 'with list_commits disabled' do
+ before do
+ stub_feature_flags(list_commits: false)
+
+ expect_next_instance_of(Gitlab::GitalyClient::RefService) do |service|
+ expect(service)
+ .to receive(:list_new_commits)
+ .with(new_commit)
+ .and_call_original
+ end
+ end
+
+ it_behaves_like 'an enumeration of new commits'
+ end
+
+ context 'with list_commits enabled' do
+ before do
+ expect_next_instance_of(Gitlab::GitalyClient::CommitService) do |service|
+ expect(service)
+ .to receive(:list_commits)
+ .with([new_commit, '--not', '--all'])
+ .and_call_original
+ end
+ end
+
+ it_behaves_like 'an enumeration of new commits'
+ end
+ end
+
describe '#count_commits_between' do
subject { repository.count_commits_between('feature', 'master') }
diff --git a/spec/lib/gitlab/git/user_spec.rb b/spec/lib/gitlab/git/user_spec.rb
index 4414195ebf4..dfa68a7496c 100644
--- a/spec/lib/gitlab/git/user_spec.rb
+++ b/spec/lib/gitlab/git/user_spec.rb
@@ -7,15 +7,16 @@ RSpec.describe Gitlab::Git::User do
let(:name) { 'Jane Doé' }
let(:email) { 'janedoé@example.com' }
let(:gl_id) { 'user-123' }
+ let(:timezone) { 'Asia/Shanghai' }
let(:user) do
- described_class.new(username, name, email, gl_id)
+ described_class.new(username, name, email, gl_id, timezone)
end
- subject { described_class.new(username, name, email, gl_id) }
+ subject { described_class.new(username, name, email, gl_id, timezone) }
describe '.from_gitaly' do
let(:gitaly_user) do
- Gitaly::User.new(gl_username: username, name: name.b, email: email.b, gl_id: gl_id)
+ Gitaly::User.new(gl_username: username, name: name.b, email: email.b, gl_id: gl_id, timezone: timezone)
end
subject { described_class.from_gitaly(gitaly_user) }
@@ -25,34 +26,45 @@ RSpec.describe Gitlab::Git::User do
describe '.from_gitlab' do
context 'when no commit_email has been set' do
- let(:user) { build(:user, email: 'alice@example.com', commit_email: nil) }
+ let(:user) { build(:user, email: 'alice@example.com', commit_email: nil, timezone: timezone) }
subject { described_class.from_gitlab(user) }
- it { expect(subject).to eq(described_class.new(user.username, user.name, user.email, 'user-')) }
+ it { expect(subject).to eq(described_class.new(user.username, user.name, user.email, 'user-', timezone)) }
end
context 'when commit_email has been set' do
- let(:user) { build(:user, email: 'alice@example.com', commit_email: 'bob@example.com') }
+ let(:user) { build(:user, email: 'alice@example.com', commit_email: 'bob@example.com', timezone: timezone) }
subject { described_class.from_gitlab(user) }
- it { expect(subject).to eq(described_class.new(user.username, user.name, user.commit_email, 'user-')) }
+ it { expect(subject).to eq(described_class.new(user.username, user.name, user.commit_email, 'user-', timezone)) }
end
end
describe '#==' do
- def eq_other(username, name, email, gl_id)
- eq(described_class.new(username, name, email, gl_id))
+ def eq_other(username, name, email, gl_id, timezone)
+ eq(described_class.new(username, name, email, gl_id, timezone))
end
- it { expect(subject).to eq_other(username, name, email, gl_id) }
+ it { expect(subject).to eq_other(username, name, email, gl_id, timezone) }
- it { expect(subject).not_to eq_other(nil, nil, nil, nil) }
- it { expect(subject).not_to eq_other(username + 'x', name, email, gl_id) }
- it { expect(subject).not_to eq_other(username, name + 'x', email, gl_id) }
- it { expect(subject).not_to eq_other(username, name, email + 'x', gl_id) }
- it { expect(subject).not_to eq_other(username, name, email, gl_id + 'x') }
+ it { expect(subject).not_to eq_other(nil, nil, nil, nil, timezone) }
+ it { expect(subject).not_to eq_other(username + 'x', name, email, gl_id, timezone) }
+ it { expect(subject).not_to eq_other(username, name + 'x', email, gl_id, timezone) }
+ it { expect(subject).not_to eq_other(username, name, email + 'x', gl_id, timezone) }
+ it { expect(subject).not_to eq_other(username, name, email, gl_id + 'x', timezone) }
+ it { expect(subject).not_to eq_other(username, name, email, gl_id, 'Etc/UTC') }
+
+ context 'when add_timezone_to_web_operations is disabled' do
+ before do
+ stub_feature_flags(add_timezone_to_web_operations: false)
+ end
+
+ it 'ignores timezone arg and sets Etc/UTC by default' do
+ expect(user.timezone).to eq('Etc/UTC')
+ end
+ end
end
describe '#to_gitaly' do
@@ -69,6 +81,7 @@ RSpec.describe Gitlab::Git::User do
expect(subject.email).to be_a_binary_string
expect(subject.gl_id).to eq(gl_id)
+ expect(subject.timezone).to eq(timezone)
end
end
end
diff --git a/spec/lib/gitlab/git_access_snippet_spec.rb b/spec/lib/gitlab/git_access_snippet_spec.rb
index 777c94035d4..3b85e3ddd1d 100644
--- a/spec/lib/gitlab/git_access_snippet_spec.rb
+++ b/spec/lib/gitlab/git_access_snippet_spec.rb
@@ -140,8 +140,6 @@ RSpec.describe Gitlab::GitAccessSnippet do
end
context 'when project is public but snippet feature is private' do
- let(:project) { create(:project, :public) }
-
before do
update_feature_access_level(project, :private)
end
@@ -151,7 +149,7 @@ RSpec.describe Gitlab::GitAccessSnippet do
end
context 'when project is not accessible' do
- let(:project) { create(:project, :private) }
+ let_it_be(:project) { create(:project, :private) }
[:anonymous, :non_member].each do |membership|
context membership.to_s do
@@ -168,7 +166,7 @@ RSpec.describe Gitlab::GitAccessSnippet do
end
context 'when project is archived' do
- let(:project) { create(:project, :public, :archived) }
+ let_it_be(:project) { create(:project, :public, :archived) }
[:anonymous, :non_member].each do |membership|
context membership.to_s do
@@ -214,7 +212,7 @@ RSpec.describe Gitlab::GitAccessSnippet do
end
context 'when snippet feature is disabled' do
- let(:project) { create(:project, :public, :snippets_disabled) }
+ let_it_be(:project) { create(:project, :public, :snippets_disabled) }
[:anonymous, :non_member, :author, :admin].each do |membership|
context membership.to_s do
@@ -306,9 +304,9 @@ RSpec.describe Gitlab::GitAccessSnippet do
end
describe 'repository size restrictions' do
- let(:snippet) { create(:personal_snippet, :public, :repository) }
- let(:actor) { snippet.author }
+ let_it_be(:snippet) { create(:personal_snippet, :public, :repository) }
+ let(:actor) { snippet.author }
let(:oldrev) { TestEnv::BRANCH_SHA["snippet/single-file"] }
let(:newrev) { TestEnv::BRANCH_SHA["snippet/edit-file"] }
let(:ref) { "refs/heads/snippet/edit-file" }
@@ -384,11 +382,12 @@ RSpec.describe Gitlab::GitAccessSnippet do
it_behaves_like 'a push to repository to make it over the limit'
end
- context 'when GIT_OBJECT_DIRECTORY_RELATIVE env var is not set' do
+ shared_examples_for 'a change with GIT_OBJECT_DIRECTORY_RELATIVE env var unset' do
let(:change_size) { 200 }
before do
- allow(snippet.repository).to receive(:new_blobs).and_return(
+ stub_feature_flags(git_access_batched_changes_size: batched)
+ allow(snippet.repository).to receive(expected_call).and_return(
[double(:blob, size: change_size)]
)
end
@@ -397,6 +396,20 @@ RSpec.describe Gitlab::GitAccessSnippet do
it_behaves_like 'a push to repository below the limit'
it_behaves_like 'a push to repository to make it over the limit'
end
+
+ context 'when batched computation is enabled' do
+ let(:batched) { true }
+ let(:expected_call) { :blobs }
+
+ it_behaves_like 'a change with GIT_OBJECT_DIRECTORY_RELATIVE env var unset'
+ end
+
+ context 'when batched computation is disabled' do
+ let(:batched) { false }
+ let(:expected_call) { :new_blobs }
+
+ it_behaves_like 'a change with GIT_OBJECT_DIRECTORY_RELATIVE env var unset'
+ end
end
describe 'HEAD realignment' do
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index 3ee0310a9a2..bf682e4e4c6 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -265,7 +265,7 @@ RSpec.describe Gitlab::GitAccess do
it 'enqueues a redirected message for pushing' do
push_access_check
- expect(Gitlab::Checks::ProjectMoved.fetch_message(user.id, project.id)).not_to be_nil
+ expect(Gitlab::Checks::ContainerMoved.fetch_message(user, project.repository)).not_to be_nil
end
it 'allows push and pull access' do
@@ -435,7 +435,7 @@ RSpec.describe Gitlab::GitAccess do
it 'disallows users with expired password to pull' do
project.add_maintainer(user)
- user.update!(password_expires_at: 2.minutes.ago)
+ user.update!(password_expires_at: 2.minutes.ago, password_automatically_set: true)
expect { pull_access_check }.to raise_forbidden("Your password expired. Please access GitLab from a web browser to update your password.")
end
@@ -987,7 +987,7 @@ RSpec.describe Gitlab::GitAccess do
end
it 'disallows users with expired password to push' do
- user.update!(password_expires_at: 2.minutes.ago)
+ user.update!(password_expires_at: 2.minutes.ago, password_automatically_set: true)
expect { push_access_check }.to raise_forbidden("Your password expired. Please access GitLab from a web browser to update your password.")
end
diff --git a/spec/lib/gitlab/gitaly_client/blob_service_spec.rb b/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
index f0ec58f3c2d..50078d8c127 100644
--- a/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
@@ -88,4 +88,104 @@ RSpec.describe Gitlab::GitalyClient::BlobService do
subject
end
end
+
+ describe '#list_blobs' do
+ let(:limit) { 0 }
+ let(:bytes_limit) { 0 }
+ let(:expected_params) { { revisions: revisions, limit: limit, bytes_limit: bytes_limit } }
+
+ before do
+ ::Gitlab::GitalyClient.clear_stubs!
+ end
+
+ subject { client.list_blobs(revisions, limit: limit, bytes_limit: bytes_limit) }
+
+ context 'with a single revision' do
+ let(:revisions) { ['master'] }
+
+ it 'sends a list_blobs message' do
+ expect_next_instance_of(Gitaly::BlobService::Stub) do |service|
+ expect(service)
+ .to receive(:list_blobs)
+ .with(gitaly_request_with_params(expected_params), kind_of(Hash))
+ .and_return([])
+ end
+
+ subject
+ end
+ end
+
+ context 'with multiple revisions' do
+ let(:revisions) { ['master', '--not', '--all'] }
+
+ it 'sends a list_blobs message' do
+ expect_next_instance_of(Gitaly::BlobService::Stub) do |service|
+ expect(service)
+ .to receive(:list_blobs)
+ .with(gitaly_request_with_params(expected_params), kind_of(Hash))
+ .and_return([])
+ end
+
+ subject
+ end
+ end
+
+ context 'with multiple revisions and limits' do
+ let(:revisions) { ['master', '--not', '--all'] }
+ let(:limit) { 10 }
+ let(:bytes_lmit) { 1024 }
+
+ it 'sends a list_blobs message' do
+ expect_next_instance_of(Gitaly::BlobService::Stub) do |service|
+ expect(service)
+ .to receive(:list_blobs)
+ .with(gitaly_request_with_params(expected_params), kind_of(Hash))
+ .and_return([])
+ end
+
+ subject
+ end
+ end
+
+ context 'with split contents' do
+ let(:revisions) { ['master'] }
+
+ it 'sends a list_blobs message', :aggregate_failures do
+ expect_next_instance_of(Gitaly::BlobService::Stub) do |service|
+ expect(service)
+ .to receive(:list_blobs)
+ .with(gitaly_request_with_params(expected_params), kind_of(Hash))
+ .and_return([
+ Gitaly::ListBlobsResponse.new(blobs: [
+ Gitaly::ListBlobsResponse::Blob.new(oid: "012345", size: 8, data: "0x01"),
+ Gitaly::ListBlobsResponse::Blob.new(data: "23")
+ ]),
+ Gitaly::ListBlobsResponse.new(blobs: [
+ Gitaly::ListBlobsResponse::Blob.new(data: "45"),
+ Gitaly::ListBlobsResponse::Blob.new(oid: "56", size: 4, data: "0x5"),
+ Gitaly::ListBlobsResponse::Blob.new(data: "6")
+ ]),
+ Gitaly::ListBlobsResponse.new(blobs: [
+ Gitaly::ListBlobsResponse::Blob.new(oid: "78", size: 4, data: "0x78")
+ ])
+ ])
+ end
+
+ blobs = subject.to_a
+ expect(blobs.size).to be(3)
+
+ expect(blobs[0].id).to eq('012345')
+ expect(blobs[0].size).to eq(8)
+ expect(blobs[0].data).to eq('0x012345')
+
+ expect(blobs[1].id).to eq('56')
+ expect(blobs[1].size).to eq(4)
+ expect(blobs[1].data).to eq('0x56')
+
+ expect(blobs[2].id).to eq('78')
+ expect(blobs[2].size).to eq(4)
+ expect(blobs[2].data).to eq('0x78')
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index ac4c42d57ee..22c29403255 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -287,6 +287,39 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
end
end
+ describe '#list_commits' do
+ shared_examples 'a ListCommits request' do
+ before do
+ ::Gitlab::GitalyClient.clear_stubs!
+ end
+
+ it 'sends a list_commits message' do
+ expect_next_instance_of(Gitaly::CommitService::Stub) do |service|
+ expect(service)
+ .to receive(:list_commits)
+ .with(gitaly_request_with_params(expected_params), kind_of(Hash))
+ .and_return([])
+ end
+
+ client.list_commits(revisions)
+ end
+ end
+
+ context 'with a single revision' do
+ let(:revisions) { 'master' }
+ let(:expected_params) { %w[master] }
+
+ it_behaves_like 'a ListCommits request'
+ end
+
+ context 'with multiple revisions' do
+ let(:revisions) { %w[master --not --all] }
+ let(:expected_params) { %w[master --not --all] }
+
+ it_behaves_like 'a ListCommits request'
+ end
+ end
+
describe '#commit_stats' do
let(:request) do
Gitaly::CommitStatsRequest.new(
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index 9a17140a1e0..3789bc76a94 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GitalyClient::OperationService do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
+
let(:repository) { project.repository.raw }
let(:client) { described_class.new(repository) }
let(:gitaly_user) { Gitlab::Git::User.from_gitlab(user).to_gitaly }
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index 56c8fe20eca..53805d67f9f 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -209,19 +209,6 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
end
end
- describe '#rebase_in_progress?' do
- let(:rebase_id) { 1 }
-
- it 'sends a repository_rebase_in_progress message' do
- expect_any_instance_of(Gitaly::RepositoryService::Stub)
- .to receive(:is_rebase_in_progress)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double(in_progress: true))
-
- client.rebase_in_progress?(rebase_id)
- end
- end
-
describe '#squash_in_progress?' do
let(:squash_id) { 1 }
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb
index 01d9edf0ba1..016f6e5377b 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb
@@ -8,13 +8,14 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestMergedByImporter, :cle
let(:project) { merge_request.project }
let(:merged_at) { Time.new(2017, 1, 1, 12, 00).utc }
let(:client_double) { double(user: double(id: 999, login: 'merger', email: 'merger@email.com')) }
+ let(:merger_user) { double(id: 999, login: 'merger') }
let(:pull_request) do
instance_double(
Gitlab::GithubImport::Representation::PullRequest,
iid: merge_request.iid,
merged_at: merged_at,
- merged_by: double(id: 999, login: 'merger')
+ merged_by: merger_user
)
end
@@ -48,4 +49,23 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestMergedByImporter, :cle
expect(last_note.author).to eq(project.creator)
end
end
+
+ context 'when the merger user is not provided' do
+ let(:merger_user) { nil }
+
+ it 'adds a note referencing the merger user' do
+ expect { subject.execute }
+ .to change(Note, :count).by(1)
+ .and not_change(merge_request, :updated_at)
+
+ metrics = merge_request.metrics.reload
+ expect(metrics.merged_by).to be_nil
+ expect(metrics.merged_at).to eq(merged_at)
+
+ last_note = merge_request.notes.last
+ expect(last_note.note).to eq("*Merged by: ghost at 2017-01-01 12:00:00 UTC*")
+ expect(last_note.created_at).to eq(merged_at)
+ expect(last_note.author).to eq(project.creator)
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
index fa8b5e6ccf0..a6da40f47f1 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
@@ -167,6 +167,19 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
end
end
+ context 'when the submitted_at is not provided' do
+ let(:review) { create_review(type: 'APPROVED', note: '', submitted_at: nil) }
+
+ it 'creates a note for the review without the author information' do
+ expect { subject.execute }.to change(Note, :count).by(1)
+
+ last_note = merge_request.notes.last
+
+ expect(last_note.created_at)
+ .to be_within(1.second).of(merge_request.updated_at)
+ end
+ end
+
context 'when the review has a note text' do
context 'when the review is "APPROVED"' do
let(:review) { create_review(type: 'APPROVED') }
@@ -215,13 +228,15 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
end
end
- def create_review(type:, note: 'note', author: { id: 999, login: 'author' })
+ def create_review(type:, **extra)
Gitlab::GithubImport::Representation::PullRequestReview.from_json_hash(
- merge_request_id: merge_request.id,
- review_type: type,
- note: note,
- submitted_at: submitted_at.to_s,
- author: author
+ extra.reverse_merge(
+ author: { id: 999, login: 'author' },
+ merge_request_id: merge_request.id,
+ review_type: type,
+ note: 'note',
+ submitted_at: submitted_at.to_s
+ )
)
end
end
diff --git a/spec/lib/gitlab/github_import/markdown_text_spec.rb b/spec/lib/gitlab/github_import/markdown_text_spec.rb
index 22bf10f36d8..2d159580b5f 100644
--- a/spec/lib/gitlab/github_import/markdown_text_spec.rb
+++ b/spec/lib/gitlab/github_import/markdown_text_spec.rb
@@ -27,6 +27,13 @@ RSpec.describe Gitlab::GithubImport::MarkdownText do
expect(text.to_s).to eq('Hello')
end
+ it 'returns empty text when it receives nil' do
+ author = double(:author, login: nil)
+ text = described_class.new(nil, author, true)
+
+ expect(text.to_s).to eq('')
+ end
+
it 'returns the text with an extra header when the author was not found' do
author = double(:author, login: 'Alice')
text = described_class.new('Hello', author)
diff --git a/spec/lib/gitlab/github_import/object_counter_spec.rb b/spec/lib/gitlab/github_import/object_counter_spec.rb
new file mode 100644
index 00000000000..668c11667b5
--- /dev/null
+++ b/spec/lib/gitlab/github_import/object_counter_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::ObjectCounter, :clean_gitlab_redis_cache do
+ let_it_be(:project) { create(:project) }
+
+ it 'validates the operation being incremented' do
+ expect { described_class.increment(project, :issue, :unknown) }
+ .to raise_error(ArgumentError, 'Operation must be fetched or imported')
+ end
+
+ it 'increments the counter and saves the key to be listed in the summary later' do
+ expect(Gitlab::Metrics)
+ .to receive(:counter)
+ .twice
+ .with(:github_importer_fetched_issue, 'The number of fetched Github Issue')
+ .and_return(double(increment: true))
+
+ expect(Gitlab::Metrics)
+ .to receive(:counter)
+ .twice
+ .with(:github_importer_imported_issue, 'The number of imported Github Issue')
+ .and_return(double(increment: true))
+
+ described_class.increment(project, :issue, :fetched)
+ described_class.increment(project, :issue, :fetched)
+ described_class.increment(project, :issue, :imported)
+ described_class.increment(project, :issue, :imported)
+
+ expect(described_class.summary(project)).to eq({
+ 'fetched' => { 'issue' => 2 },
+ 'imported' => { 'issue' => 2 }
+ })
+ end
+end
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
index 1e31cd2f007..d56d4708385 100644
--- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -11,6 +11,10 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
Class
end
+ def object_type
+ :dummy
+ end
+
def collection_method
:issues
end
diff --git a/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb b/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb
index f9763455468..cad9b13774e 100644
--- a/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb
@@ -68,5 +68,11 @@ RSpec.describe Gitlab::GithubImport::Representation::PullRequestReview do
expect(review.author).to be_nil
end
+
+ it 'does not fail when submitted_at is blank' do
+ review = described_class.from_json_hash(hash.except('submitted_at'))
+
+ expect(review.submitted_at).to be_nil
+ end
end
end
diff --git a/spec/lib/gitlab/github_import_spec.rb b/spec/lib/gitlab/github_import_spec.rb
index 3129da64809..662757f66ad 100644
--- a/spec/lib/gitlab/github_import_spec.rb
+++ b/spec/lib/gitlab/github_import_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport do
context 'github.com' do
- let(:project) { double(:project, import_url: 'http://t0ken@github.com/user/repo.git') }
+ let(:project) { double(:project, import_url: 'http://t0ken@github.com/user/repo.git', id: 1) }
it 'returns a new Client with a custom token' do
expect(described_class::Client)
diff --git a/spec/lib/gitlab/gl_repository/repo_type_spec.rb b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
index 629e6c96858..71a4c693f9d 100644
--- a/spec/lib/gitlab/gl_repository/repo_type_spec.rb
+++ b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
@@ -5,6 +5,7 @@ RSpec.describe Gitlab::GlRepository::RepoType do
let_it_be(:project) { create(:project) }
let_it_be(:personal_snippet) { create(:personal_snippet, author: project.owner) }
let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.owner) }
+
let(:project_path) { project.repository.full_path }
let(:wiki_path) { project.wiki.repository.full_path }
let(:design_path) { project.design_repository.full_path }
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb
new file mode 100644
index 00000000000..0047d24a215
--- /dev/null
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb
@@ -0,0 +1,420 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/334973
+ # The spec will be merged with connection_spec.rb in the future.
+ let(:nodes) { Project.all.order(id: :asc) }
+ let(:arguments) { {} }
+ let(:query_type) { GraphQL::ObjectType.new }
+ let(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
+ let(:context) { GraphQL::Query::Context.new(query: OpenStruct.new(schema: schema), values: nil, object: nil) }
+
+ let_it_be(:column_order_id) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'id', order_expression: Project.arel_table[:id].asc) }
+ let_it_be(:column_order_id_desc) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'id', order_expression: Project.arel_table[:id].desc) }
+ let_it_be(:column_order_updated_at) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'updated_at', order_expression: Project.arel_table[:updated_at].asc) }
+ let_it_be(:column_order_created_at) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'created_at', order_expression: Project.arel_table[:created_at].asc) }
+ let_it_be(:column_order_last_repo) do
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'last_repository_check_at',
+ column_expression: Project.arel_table[:last_repository_check_at],
+ order_expression: Gitlab::Database.nulls_last_order('last_repository_check_at', :asc),
+ reversed_order_expression: Gitlab::Database.nulls_last_order('last_repository_check_at', :desc),
+ order_direction: :asc,
+ nullable: :nulls_last,
+ distinct: false)
+ end
+
+ let_it_be(:column_order_last_repo_desc) do
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'last_repository_check_at',
+ column_expression: Project.arel_table[:last_repository_check_at],
+ order_expression: Gitlab::Database.nulls_last_order('last_repository_check_at', :desc),
+ reversed_order_expression: Gitlab::Database.nulls_last_order('last_repository_check_at', :asc),
+ order_direction: :desc,
+ nullable: :nulls_last,
+ distinct: false)
+ end
+
+ subject(:connection) do
+ described_class.new(nodes, **{ context: context, max_page_size: 3 }.merge(arguments))
+ end
+
+ def encoded_cursor(node)
+ described_class.new(nodes, context: context).cursor_for(node)
+ end
+
+ def decoded_cursor(cursor)
+ Gitlab::Json.parse(Base64Bp.urlsafe_decode64(cursor))
+ end
+
+ describe "With generic keyset order support" do
+ let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id])) }
+
+ it_behaves_like 'a connection with collection methods'
+
+ it_behaves_like 'a redactable connection' do
+ let_it_be(:projects) { create_list(:project, 2) }
+ let(:unwanted) { projects.second }
+ end
+
+ describe '#cursor_for' do
+ let(:project) { create(:project) }
+ let(:cursor) { connection.cursor_for(project) }
+
+ it 'returns an encoded ID' do
+ expect(decoded_cursor(cursor)).to eq('id' => project.id.to_s)
+ end
+
+ context 'when an order is specified' do
+ let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id])) }
+
+ it 'returns the encoded value of the order' do
+ expect(decoded_cursor(cursor)).to include('id' => project.id.to_s)
+ end
+ end
+
+ context 'when multiple orders are specified' do
+ let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_updated_at, column_order_created_at, column_order_id])) }
+
+ it 'returns the encoded value of the order' do
+ expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.strftime('%Y-%m-%d %H:%M:%S.%N %Z'))
+ end
+ end
+ end
+
+ describe '#sliced_nodes' do
+ let(:projects) { create_list(:project, 4) }
+
+ context 'when before is passed' do
+ let(:arguments) { { before: encoded_cursor(projects[1]) } }
+
+ it 'only returns the project before the selected one' do
+ expect(subject.sliced_nodes).to contain_exactly(projects.first)
+ end
+
+ context 'when the sort order is descending' do
+ let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id_desc])) }
+
+ it 'returns the correct nodes' do
+ expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
+ end
+ end
+ end
+
+ context 'when after is passed' do
+ let(:arguments) { { after: encoded_cursor(projects[1]) } }
+
+ it 'only returns the project before the selected one' do
+ expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
+ end
+
+ context 'when the sort order is descending' do
+ let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id_desc])) }
+
+ it 'returns the correct nodes' do
+ expect(subject.sliced_nodes).to contain_exactly(projects.first)
+ end
+ end
+ end
+
+ context 'when both before and after are passed' do
+ let(:arguments) do
+ {
+ after: encoded_cursor(projects[1]),
+ before: encoded_cursor(projects[3])
+ }
+ end
+
+ it 'returns the expected set' do
+ expect(subject.sliced_nodes).to contain_exactly(projects[2])
+ end
+ end
+
+ shared_examples 'nodes are in ascending order' do
+ context 'when no cursor is passed' do
+ let(:arguments) { {} }
+
+ it 'returns projects in ascending order' do
+ expect(subject.sliced_nodes).to eq(ascending_nodes)
+ end
+ end
+
+ context 'when before cursor value is not NULL' do
+ let(:arguments) { { before: encoded_cursor(ascending_nodes[2]) } }
+
+ it 'returns all projects before the cursor' do
+ expect(subject.sliced_nodes).to eq(ascending_nodes.first(2))
+ end
+ end
+
+ context 'when after cursor value is not NULL' do
+ let(:arguments) { { after: encoded_cursor(ascending_nodes[1]) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq(ascending_nodes.last(3))
+ end
+ end
+
+ context 'when before and after cursor' do
+ let(:arguments) { { before: encoded_cursor(ascending_nodes.last), after: encoded_cursor(ascending_nodes.first) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq(ascending_nodes[1..3])
+ end
+ end
+ end
+
+ shared_examples 'nodes are in descending order' do
+ context 'when no cursor is passed' do
+ let(:arguments) { {} }
+
+ it 'only returns projects in descending order' do
+ expect(subject.sliced_nodes).to eq(descending_nodes)
+ end
+ end
+
+ context 'when before cursor value is not NULL' do
+ let(:arguments) { { before: encoded_cursor(descending_nodes[2]) } }
+
+ it 'returns all projects before the cursor' do
+ expect(subject.sliced_nodes).to eq(descending_nodes.first(2))
+ end
+ end
+
+ context 'when after cursor value is not NULL' do
+ let(:arguments) { { after: encoded_cursor(descending_nodes[1]) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq(descending_nodes.last(3))
+ end
+ end
+
+ context 'when before and after cursor' do
+ let(:arguments) { { before: encoded_cursor(descending_nodes.last), after: encoded_cursor(descending_nodes.first) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq(descending_nodes[1..3])
+ end
+ end
+ end
+
+ context 'when multiple orders with nil values are defined' do
+ let_it_be(:project1) { create(:project, last_repository_check_at: 10.days.ago) } # Asc: project5 Desc: project3
+ let_it_be(:project2) { create(:project, last_repository_check_at: nil) } # Asc: project1 Desc: project1
+ let_it_be(:project3) { create(:project, last_repository_check_at: 5.days.ago) } # Asc: project3 Desc: project5
+ let_it_be(:project4) { create(:project, last_repository_check_at: nil) } # Asc: project2 Desc: project2
+ let_it_be(:project5) { create(:project, last_repository_check_at: 20.days.ago) } # Asc: project4 Desc: project4
+
+ context 'when ascending' do
+ let_it_be(:order) { Gitlab::Pagination::Keyset::Order.build([column_order_last_repo, column_order_id]) }
+ let_it_be(:nodes) { Project.order(order) }
+ let_it_be(:ascending_nodes) { [project5, project1, project3, project2, project4] }
+
+ it_behaves_like 'nodes are in ascending order'
+
+ context 'when before cursor value is NULL' do
+ let(:arguments) { { before: encoded_cursor(project4) } }
+
+ it 'returns all projects before the cursor' do
+ expect(subject.sliced_nodes).to eq([project5, project1, project3, project2])
+ end
+ end
+
+ context 'when after cursor value is NULL' do
+ let(:arguments) { { after: encoded_cursor(project2) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq([project4])
+ end
+ end
+ end
+
+ context 'when descending' do
+ let_it_be(:order) { Gitlab::Pagination::Keyset::Order.build([column_order_last_repo_desc, column_order_id]) }
+ let_it_be(:nodes) { Project.order(order) }
+ let_it_be(:descending_nodes) { [project3, project1, project5, project2, project4] }
+
+ it_behaves_like 'nodes are in descending order'
+
+ context 'when before cursor value is NULL' do
+ let(:arguments) { { before: encoded_cursor(project4) } }
+
+ it 'returns all projects before the cursor' do
+ expect(subject.sliced_nodes).to eq([project3, project1, project5, project2])
+ end
+ end
+
+ context 'when after cursor value is NULL' do
+ let(:arguments) { { after: encoded_cursor(project2) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq([project4])
+ end
+ end
+ end
+ end
+
+ # rubocop: disable RSpec/EmptyExampleGroup
+ context 'when ordering uses LOWER' do
+ end
+ # rubocop: enable RSpec/EmptyExampleGroup
+
+ context 'when ordering by similarity' do
+ let_it_be(:project1) { create(:project, name: 'test') }
+ let_it_be(:project2) { create(:project, name: 'testing') }
+ let_it_be(:project3) { create(:project, name: 'tests') }
+ let_it_be(:project4) { create(:project, name: 'testing stuff') }
+ let_it_be(:project5) { create(:project, name: 'test') }
+
+ let_it_be(:nodes) do
+ # Note: sorted_by_similarity_desc scope internally supports the generic keyset order.
+ Project.sorted_by_similarity_desc('test', include_in_select: true)
+ end
+
+ let_it_be(:descending_nodes) { nodes.to_a }
+
+ it_behaves_like 'nodes are in descending order'
+ end
+
+ context 'when an invalid cursor is provided' do
+ let(:arguments) { { before: Base64Bp.urlsafe_encode64('invalidcursor', padding: false) } }
+
+ it 'raises an error' do
+ expect { subject.sliced_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
+ end
+ end
+
+ describe '#nodes' do
+ let_it_be(:all_nodes) { create_list(:project, 5) }
+
+ let(:paged_nodes) { subject.nodes }
+
+ it_behaves_like 'connection with paged nodes' do
+ let(:paged_nodes_size) { 3 }
+ end
+
+ context 'when both are passed' do
+ let(:arguments) { { first: 2, last: 2 } }
+
+ it 'raises an error' do
+ expect { paged_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
+ end
+
+ context 'when primary key is not in original order' do
+ let(:nodes) { Project.order(last_repository_check_at: :desc) }
+
+ it 'is added to end' do
+ sliced = subject.sliced_nodes
+
+ order_sql = sliced.order_values.last.to_sql
+
+ expect(order_sql).to end_with(Project.arel_table[:id].desc.to_sql)
+ end
+ end
+
+ context 'when there is no primary key' do
+ before do
+ stub_const('NoPrimaryKey', Class.new(ActiveRecord::Base))
+ NoPrimaryKey.class_eval do
+ self.table_name = 'no_primary_key'
+ self.primary_key = nil
+ end
+ end
+
+ let(:nodes) { NoPrimaryKey.all }
+
+ it 'raises an error' do
+ expect(NoPrimaryKey.primary_key).to be_nil
+ expect { subject.sliced_nodes }.to raise_error(ArgumentError, 'Relation must have a primary key')
+ end
+ end
+ end
+
+ describe '#has_previous_page and #has_next_page' do
+ # using a list of 5 items with a max_page of 3
+ let_it_be(:project_list) { create_list(:project, 5) }
+ let_it_be(:nodes) { Project.order(Gitlab::Pagination::Keyset::Order.build([column_order_id])) }
+
+ context 'when default query' do
+ let(:arguments) { {} }
+
+ it 'has no previous, but a next' do
+ expect(subject.has_previous_page).to be_falsey
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ context 'when before is first item' do
+ let(:arguments) { { before: encoded_cursor(project_list.first) } }
+
+ it 'has no previous, but a next' do
+ expect(subject.has_previous_page).to be_falsey
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ describe 'using `before`' do
+ context 'when before is the last item' do
+ let(:arguments) { { before: encoded_cursor(project_list.last) } }
+
+ it 'has no previous, but a next' do
+ expect(subject.has_previous_page).to be_falsey
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ context 'when before and last specified' do
+ let(:arguments) { { before: encoded_cursor(project_list.last), last: 2 } }
+
+ it 'has a previous and a next' do
+ expect(subject.has_previous_page).to be_truthy
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ context 'when before and last does request all remaining nodes' do
+ let(:arguments) { { before: encoded_cursor(project_list[1]), last: 3 } }
+
+ it 'has a previous and a next' do
+ expect(subject.has_previous_page).to be_falsey
+ expect(subject.has_next_page).to be_truthy
+ expect(subject.nodes).to eq [project_list[0]]
+ end
+ end
+ end
+
+ describe 'using `after`' do
+ context 'when after is the first item' do
+ let(:arguments) { { after: encoded_cursor(project_list.first) } }
+
+ it 'has a previous, and a next' do
+ expect(subject.has_previous_page).to be_truthy
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ context 'when after and first specified' do
+ let(:arguments) { { after: encoded_cursor(project_list.first), first: 2 } }
+
+ it 'has a previous and a next' do
+ expect(subject.has_previous_page).to be_truthy
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ context 'when before and last does request all remaining nodes' do
+ let(:arguments) { { after: encoded_cursor(project_list[2]), last: 3 } }
+
+ it 'has a previous but no next' do
+ expect(subject.has_previous_page).to be_truthy
+ expect(subject.has_next_page).to be_falsey
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
index 03030728834..8ef5f1147c5 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
@@ -355,6 +355,10 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
context 'when primary key is not in original order' do
let(:nodes) { Project.order(last_repository_check_at: :desc) }
+ before do
+ stub_feature_flags(new_graphql_keyset_pagination: false)
+ end
+
it 'is added to end' do
sliced = subject.sliced_nodes
diff --git a/spec/lib/gitlab/group_search_results_spec.rb b/spec/lib/gitlab/group_search_results_spec.rb
index 009f66d2108..ec96a069b8f 100644
--- a/spec/lib/gitlab/group_search_results_spec.rb
+++ b/spec/lib/gitlab/group_search_results_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Gitlab::GroupSearchResults do
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public, group: group) }
+
let(:filters) { {} }
let(:limit_projects) { Project.all }
let(:query) { 'gob' }
diff --git a/spec/lib/gitlab/highlight_spec.rb b/spec/lib/gitlab/highlight_spec.rb
index 1f06019c929..ccb3ae1018a 100644
--- a/spec/lib/gitlab/highlight_spec.rb
+++ b/spec/lib/gitlab/highlight_spec.rb
@@ -50,9 +50,16 @@ RSpec.describe Gitlab::Highlight do
let(:result) { described_class.highlight(file_name, content) } # content is 44 bytes
before do
+ stub_feature_flags(one_megabyte_file_size_limit: false)
stub_config(extra: { 'maximum_text_highlight_size_kilobytes' => 0.0001 } ) # 1.024 bytes
end
+ it 'confirm file size is 1MB when `one_megabyte_file_size_limit` is enabled' do
+ stub_feature_flags(one_megabyte_file_size_limit: true)
+ expect(described_class.too_large?(1024.kilobytes)).to eq(false)
+ expect(described_class.too_large?(1025.kilobytes)).to eq(true)
+ end
+
it 'increments the metric for oversized files' do
expect { result }.to change { over_highlight_size_limit('file size: 0.0001') }.by(1)
end
diff --git a/spec/lib/gitlab/hook_data/issue_builder_spec.rb b/spec/lib/gitlab/hook_data/issue_builder_spec.rb
index 8f976bcf09d..039b4c19522 100644
--- a/spec/lib/gitlab/hook_data/issue_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/issue_builder_spec.rb
@@ -48,6 +48,7 @@ RSpec.describe Gitlab::HookData::IssueBuilder do
expect(data).to include(:human_time_change)
expect(data).to include(:assignee_ids)
expect(data).to include(:state)
+ expect(data).to include(:severity)
expect(data).to include('labels' => [label.hook_attrs])
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 87a10b52b22..78805cea66a 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -54,6 +54,8 @@ issues:
- namespace
- note_authors
- issue_email_participants
+- test_reports
+- requirement
events:
- author
- project
@@ -196,6 +198,8 @@ merge_request_diff:
- merge_request_diff_files
merge_request_diff_commits:
- merge_request_diff
+- commit_author
+- committer
merge_request_diff_detail:
- merge_request_diff
merge_request_diff_files:
@@ -367,34 +371,34 @@ project:
- discord_integration
- drone_ci_integration
- emails_on_push_integration
-- pipelines_email_service
-- mattermost_slash_commands_service
-- slack_slash_commands_service
+- pipelines_email_integration
+- mattermost_slash_commands_integration
+- slack_slash_commands_integration
- irker_integration
-- packagist_service
-- pivotaltracker_service
-- prometheus_service
+- packagist_integration
+- pivotaltracker_integration
+- prometheus_integration
- flowdock_integration
- assembla_integration
- asana_integration
-- slack_service
-- microsoft_teams_service
-- mattermost_service
+- slack_integration
+- microsoft_teams_integration
+- mattermost_integration
- hangouts_chat_integration
-- unify_circuit_service
+- unify_circuit_integration
- buildkite_integration
- bamboo_integration
-- teamcity_service
-- pushover_service
-- jira_service
-- redmine_service
-- youtrack_service
+- teamcity_integration
+- pushover_integration
+- jira_integration
+- redmine_integration
+- youtrack_integration
- custom_issue_tracker_integration
- bugzilla_integration
- ewm_integration
- external_wiki_integration
-- mock_ci_service
-- mock_monitoring_service
+- mock_ci_integration
+- mock_monitoring_integration
- forked_to_members
- forked_from_project
- forks
@@ -480,12 +484,12 @@ project:
- kubernetes_namespaces
- error_tracking_setting
- metrics_setting
-- gitlab_slack_application_service
-- github_service
+- gitlab_slack_application_integration
+- github_integration
- protected_environments
- mirror_user
- push_rule
-- jenkins_service
+- jenkins_integration
- index_status
- feature_usage
- approval_rules
@@ -557,7 +561,7 @@ project:
- alert_management_alerts
- repository_storage_moves
- freeze_periods
-- webex_teams_service
+- webex_teams_integration
- build_report_results
- vulnerability_statistic
- vulnerability_historical_statistics
@@ -574,6 +578,7 @@ project:
- merge_request_metrics
- security_orchestration_policy_configuration
- timelogs
+- error_tracking_errors
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
index 7a9e7d8afba..9c6d2708607 100644
--- a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
+++ b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
@@ -109,14 +109,14 @@ RSpec.describe 'Test coverage of the Project Import' do
def failure_message(not_tested_relations)
<<~MSG
- These relations seem to be added recenty and
+ These relations seem to be added recently and
they expected to be covered in our Import specs: #{not_tested_relations}.
To do that, expand one of the files listed in `project_json_fixtures`
(or expand the list if you consider adding a new fixture file).
After that, add a new spec into
- `spec/lib/gitlab/import_export/project_tree_restorer_spec.rb`
+ `spec/lib/gitlab/import_export/project/tree_restorer_spec.rb`
to check that the relation is being imported correctly.
In case the spec breaks the master or there is a sense of urgency,
diff --git a/spec/lib/gitlab/import_export/project/object_builder_spec.rb b/spec/lib/gitlab/import_export/project/object_builder_spec.rb
index 20d882c82be..4c9f9f7c690 100644
--- a/spec/lib/gitlab/import_export/project/object_builder_spec.rb
+++ b/spec/lib/gitlab/import_export/project/object_builder_spec.rb
@@ -150,4 +150,30 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
expect(merge_request.persisted?).to be true
end
end
+
+ context 'merge request diff commit users' do
+ it 'finds the existing user' do
+ user = MergeRequest::DiffCommitUser
+ .find_or_create('Alice', 'alice@example.com')
+
+ found = described_class.build(
+ MergeRequest::DiffCommitUser,
+ 'name' => 'Alice',
+ 'email' => 'alice@example.com'
+ )
+
+ expect(found).to eq(user)
+ end
+
+ it 'creates a new user' do
+ found = described_class.build(
+ MergeRequest::DiffCommitUser,
+ 'name' => 'Alice',
+ 'email' => 'alice@example.com'
+ )
+
+ expect(found.name).to eq('Alice')
+ expect(found.email).to eq('alice@example.com')
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index 1b5fba85020..82f465c4f9e 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -224,6 +224,27 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
expect(MergeRequestDiffCommit.count).to eq(77)
end
+ it 'assigns committer and author details to all diff commits' do
+ MergeRequestDiffCommit.all.each do |commit|
+ expect(commit.commit_author_id).not_to be_nil
+ expect(commit.committer_id).not_to be_nil
+ end
+ end
+
+ it 'assigns the correct commit users to different diff commits' do
+ commit1 = MergeRequestDiffCommit
+ .find_by(sha: '0b4bc9a49b562e85de7cc9e834518ea6828729b9')
+
+ commit2 = MergeRequestDiffCommit
+ .find_by(sha: 'a4e5dfebf42e34596526acb8611bc7ed80e4eb3f')
+
+ expect(commit1.commit_author.name).to eq('Dmitriy Zaporozhets')
+ expect(commit1.commit_author.email).to eq('dmitriy.zaporozhets@gmail.com')
+
+ expect(commit2.commit_author.name).to eq('James Lopez')
+ expect(commit2.commit_author.email).to eq('james@jameslopez.es')
+ end
+
it 'has the correct data for merge request latest_merge_request_diff' do
MergeRequest.find_each do |merge_request|
expect(merge_request.latest_merge_request_diff_id).to eq(merge_request.merge_request_diffs.maximum(:id))
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 2173bee6b4b..77d126e012e 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -235,6 +235,10 @@ MergeRequestDiffCommit:
- committer_email
- message
- trailers
+MergeRequest::DiffCommitUser:
+- id
+- name
+- email
MergeRequestDiffFile:
- merge_request_diff_id
- relative_order
@@ -645,6 +649,7 @@ Timelog:
- spent_at
- created_at
- updated_at
+- summary
ProjectAutoDevops:
- id
- enabled
diff --git a/spec/lib/gitlab/import_export/shared_spec.rb b/spec/lib/gitlab/import_export/shared_spec.rb
index feeb88397eb..1945156ca59 100644
--- a/spec/lib/gitlab/import_export/shared_spec.rb
+++ b/spec/lib/gitlab/import_export/shared_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe Gitlab::ImportExport::Shared do
describe '#export_path' do
it 'uses a random hash relative to project path' do
- expect(subject.export_path).to match(/#{base_path}\h{32}\/\h{32}/)
+ expect(subject.export_path).to match(%r{#{base_path}\h{32}/\h{32}})
end
it 'memoizes the path' do
@@ -44,7 +44,7 @@ RSpec.describe Gitlab::ImportExport::Shared do
subject = described_class.new(group)
base_path = %(/tmp/gitlab_exports/@groups/)
- expect(subject.base_path).to match(/#{base_path}\h{2}\/\h{2}\/\h{64}/)
+ expect(subject.base_path).to match(%r{#{base_path}\h{2}/\h{2}/\h{64}})
end
end
end
diff --git a/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb b/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb
index fe934cadedd..c1661cf02b6 100644
--- a/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::ImportExport::SnippetRepoRestorer do
expect(restorer.restore).to be_truthy
end.to change { SnippetRepository.count }.by(1)
- blob = snippet.repository.blob_at('HEAD', snippet.file_name)
+ blob = snippet.repository.blob_at(snippet.default_branch, snippet.file_name)
expect(blob).not_to be_nil
expect(blob.data).to eq(snippet.content)
end
diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb
index 28ae90d4947..48fcc9f93db 100644
--- a/spec/lib/gitlab/instrumentation_helper_spec.rb
+++ b/spec/lib/gitlab/instrumentation_helper_spec.rb
@@ -99,23 +99,6 @@ RSpec.describe Gitlab::InstrumentationHelper do
:mem_mallocs
)
end
-
- context 'when trace_memory_allocations is disabled' do
- before do
- stub_feature_flags(trace_memory_allocations: false)
- Gitlab::Memory::Instrumentation.ensure_feature_flag!
- end
-
- it 'does not log memory usage metrics' do
- subject
-
- expect(payload).not_to include(
- :mem_objects,
- :mem_bytes,
- :mem_mallocs
- )
- end
- end
end
context 'when load balancing is enabled' do
@@ -133,7 +116,37 @@ RSpec.describe Gitlab::InstrumentationHelper do
db_primary_count: 0,
db_primary_cached_count: 0,
db_primary_wal_count: 0,
- db_replica_wal_count: 0)
+ db_replica_wal_count: 0,
+ db_primary_wal_cached_count: 0,
+ db_replica_wal_cached_count: 0)
+ end
+
+ context 'when replica caught up search was made' do
+ before do
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_ok] = 2
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_fail] = 1
+ end
+
+ it 'includes related metrics' do
+ subject
+
+ expect(payload).to include(caught_up_replica_pick_ok: 2)
+ expect(payload).to include(caught_up_replica_pick_fail: 1)
+ end
+ end
+
+ context 'when only a single counter was updated' do
+ before do
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_ok] = 1
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_fail] = nil
+ end
+
+ it 'includes only that counter into logging' do
+ subject
+
+ expect(payload).to include(caught_up_replica_pick_ok: 1)
+ expect(payload).not_to include(:caught_up_replica_pick_fail)
+ end
end
end
@@ -150,7 +163,9 @@ RSpec.describe Gitlab::InstrumentationHelper do
db_primary_count: 0,
db_primary_cached_count: 0,
db_primary_wal_count: 0,
- db_replica_wal_count: 0)
+ db_replica_wal_count: 0,
+ db_primary_wal_cached_count: 0,
+ db_replica_wal_cached_count: 0)
end
end
end
diff --git a/spec/lib/gitlab/integrations/sti_type_spec.rb b/spec/lib/gitlab/integrations/sti_type_spec.rb
index 3154872ed04..70b93d6a4b5 100644
--- a/spec/lib/gitlab/integrations/sti_type_spec.rb
+++ b/spec/lib/gitlab/integrations/sti_type_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::Integrations::StiType do
context 'SQL SELECT' do
let(:expected_sql) do
<<~SQL.strip
- SELECT "services".* FROM "services" WHERE "services"."type" = 'AsanaService'
+ SELECT "integrations".* FROM "integrations" WHERE "integrations"."type" = 'AsanaService'
SQL
end
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::Integrations::StiType do
context 'SQL CREATE' do
let(:expected_sql) do
<<~SQL.strip
- INSERT INTO "services" ("type") VALUES ('AsanaService')
+ INSERT INTO "integrations" ("type") VALUES ('AsanaService')
SQL
end
@@ -42,7 +42,7 @@ RSpec.describe Gitlab::Integrations::StiType do
context 'SQL UPDATE' do
let(:expected_sql) do
<<~SQL.strip
- UPDATE "services" SET "type" = 'AsanaService'
+ UPDATE "integrations" SET "type" = 'AsanaService'
SQL
end
@@ -61,7 +61,7 @@ RSpec.describe Gitlab::Integrations::StiType do
context 'SQL DELETE' do
let(:expected_sql) do
<<~SQL.strip
- DELETE FROM "services" WHERE "services"."type" = 'AsanaService'
+ DELETE FROM "integrations" WHERE "integrations"."type" = 'AsanaService'
SQL
end
diff --git a/spec/lib/gitlab/jira_import/base_importer_spec.rb b/spec/lib/gitlab/jira_import/base_importer_spec.rb
index 9d8143775f9..479551095de 100644
--- a/spec/lib/gitlab/jira_import/base_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/base_importer_spec.rb
@@ -9,10 +9,10 @@ RSpec.describe Gitlab::JiraImport::BaseImporter do
describe 'with any inheriting class' do
context 'when project validation is ok' do
- let!(:jira_service) { create(:jira_service, project: project) }
+ let!(:jira_integration) { create(:jira_integration, project: project) }
before do
- stub_jira_service_test
+ stub_jira_integration_test
allow(Gitlab::JiraImport).to receive(:validate_project_settings!)
end
diff --git a/spec/lib/gitlab/jira_import/issues_importer_spec.rb b/spec/lib/gitlab/jira_import/issues_importer_spec.rb
index 4a32f0fd3a9..aead5405bd1 100644
--- a/spec/lib/gitlab/jira_import/issues_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/issues_importer_spec.rb
@@ -9,12 +9,12 @@ RSpec.describe Gitlab::JiraImport::IssuesImporter do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:jira_import) { create(:jira_import_state, project: project, user: current_user) }
- let_it_be(:jira_service) { create(:jira_service, project: project) }
+ let_it_be(:jira_integration) { create(:jira_integration, project: project) }
subject { described_class.new(project) }
before do
- stub_jira_service_test
+ stub_jira_integration_test
end
describe '#imported_items_cache_key' do
diff --git a/spec/lib/gitlab/jira_import/labels_importer_spec.rb b/spec/lib/gitlab/jira_import/labels_importer_spec.rb
index db98a83cb3c..71440590815 100644
--- a/spec/lib/gitlab/jira_import/labels_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/labels_importer_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::JiraImport::LabelsImporter do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
- let_it_be(:jira_service) { create(:jira_service, project: project) }
+ let_it_be(:jira_integration) { create(:jira_integration, project: project) }
let(:importer) { described_class.new(project) }
@@ -20,7 +20,7 @@ RSpec.describe Gitlab::JiraImport::LabelsImporter do
describe '#execute', :clean_gitlab_redis_cache do
before do
- stub_jira_service_test
+ stub_jira_integration_test
end
context 'when label is missing from jira import' do
diff --git a/spec/lib/gitlab/jira_import_spec.rb b/spec/lib/gitlab/jira_import_spec.rb
index 94fdff984d5..a7c73e79641 100644
--- a/spec/lib/gitlab/jira_import_spec.rb
+++ b/spec/lib/gitlab/jira_import_spec.rb
@@ -31,12 +31,12 @@ RSpec.describe Gitlab::JiraImport do
end
end
- context 'when Jira service was not setup' do
+ context 'when Jira integration was not setup' do
it_behaves_like 'raise Jira import error', 'Jira integration not configured.'
end
- context 'when Jira service exists' do
- let!(:jira_service) { create(:jira_service, project: project, active: true) }
+ context 'when Jira integration exists' do
+ let!(:jira_integration) { create(:jira_integration, project: project, active: true) }
context 'when Jira connection is not valid' do
before do
@@ -50,14 +50,14 @@ RSpec.describe Gitlab::JiraImport do
end
before do
- stub_jira_service_test
+ stub_jira_integration_test
end
context 'without user param' do
it_behaves_like 'jira configuration base checks'
context 'when jira connection is valid' do
- let!(:jira_service) { create(:jira_service, project: project, active: true) }
+ let!(:jira_integration) { create(:jira_integration, project: project, active: true) }
it 'does not return any error' do
expect { subject }.not_to raise_error
@@ -77,8 +77,8 @@ RSpec.describe Gitlab::JiraImport do
it_behaves_like 'jira configuration base checks'
- context 'when jira service is configured' do
- let!(:jira_service) { create(:jira_service, project: project, active: true) }
+ context 'when jira integration is configured' do
+ let!(:jira_integration) { create(:jira_integration, project: project, active: true) }
context 'when issues feature is disabled' do
let_it_be(:project, reload: true) { create(:project, :issues_disabled) }
@@ -96,7 +96,7 @@ RSpec.describe Gitlab::JiraImport do
context 'when user does not have permissions to run the import' do
before do
- create(:jira_service, project: project, active: true)
+ create(:jira_integration, project: project, active: true)
project.add_developer(user)
end
diff --git a/spec/lib/gitlab/json_cache_spec.rb b/spec/lib/gitlab/json_cache_spec.rb
index 563b3d35823..8265c3449bb 100644
--- a/spec/lib/gitlab/json_cache_spec.rb
+++ b/spec/lib/gitlab/json_cache_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::JsonCache do
let_it_be(:broadcast_message) { create(:broadcast_message) }
+
let(:backend) { double('backend').as_null_object }
let(:namespace) { 'geo' }
let(:key) { 'foo' }
diff --git a/spec/lib/gitlab/kas/client_spec.rb b/spec/lib/gitlab/kas/client_spec.rb
index 7bf2d30ca48..40e18f58ee4 100644
--- a/spec/lib/gitlab/kas/client_spec.rb
+++ b/spec/lib/gitlab/kas/client_spec.rb
@@ -30,10 +30,11 @@ RSpec.describe Gitlab::Kas::Client do
describe 'gRPC calls' do
let(:token) { instance_double(JSONWebToken::HMACToken, encoded: 'test-token') }
+ let(:kas_url) { 'grpc://example.kas.internal' }
before do
allow(Gitlab::Kas).to receive(:enabled?).and_return(true)
- allow(Gitlab::Kas).to receive(:internal_url).and_return('grpc://example.kas.internal')
+ allow(Gitlab::Kas).to receive(:internal_url).and_return(kas_url)
expect(JSONWebToken::HMACToken).to receive(:new)
.with(Gitlab::Kas.secret)
@@ -80,5 +81,21 @@ RSpec.describe Gitlab::Kas::Client do
it { expect(subject).to eq(agent_configurations) }
end
+
+ describe 'with grpcs' do
+ let(:stub) { instance_double(Gitlab::Agent::ConfigurationProject::Rpc::ConfigurationProject::Stub) }
+ let(:kas_url) { 'grpcs://example.kas.internal' }
+
+ it 'uses a ChannelCredentials object' do
+ expect(Gitlab::Agent::ConfigurationProject::Rpc::ConfigurationProject::Stub).to receive(:new)
+ .with('example.kas.internal', instance_of(GRPC::Core::ChannelCredentials), timeout: described_class::TIMEOUT)
+ .and_return(stub)
+
+ allow(stub).to receive(:list_agent_config_files)
+ .and_return(double(config_files: []))
+
+ described_class.new.list_agent_config_files(project: project)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/kas_spec.rb b/spec/lib/gitlab/kas_spec.rb
index c9d40f785b8..24d2b03fe2a 100644
--- a/spec/lib/gitlab/kas_spec.rb
+++ b/spec/lib/gitlab/kas_spec.rb
@@ -104,48 +104,4 @@ RSpec.describe Gitlab::Kas do
end
end
end
-
- describe '.included_in_gitlab_com_rollout?' do
- let_it_be(:project) { create(:project) }
-
- context 'not GitLab.com' do
- before do
- allow(Gitlab).to receive(:com?).and_return(false)
- end
-
- it 'returns true' do
- expect(described_class.included_in_gitlab_com_rollout?(project)).to be_truthy
- end
- end
-
- context 'GitLab.com' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- context 'kubernetes_agent_on_gitlab_com feature flag disabled' do
- before do
- stub_feature_flags(kubernetes_agent_on_gitlab_com: false)
- end
-
- it 'returns false' do
- expect(described_class.included_in_gitlab_com_rollout?(project)).to be_falsey
- end
- end
-
- context 'kubernetes_agent_on_gitlab_com feature flag enabled' do
- before do
- stub_feature_flags(kubernetes_agent_on_gitlab_com: project)
- end
-
- it 'returns true' do
- expect(described_class.included_in_gitlab_com_rollout?(project)).to be_truthy
- end
-
- it 'returns false for another project' do
- expect(described_class.included_in_gitlab_com_rollout?(create(:project))).to be_falsey
- end
- end
- end
- end
end
diff --git a/spec/lib/gitlab/kroki_spec.rb b/spec/lib/gitlab/kroki_spec.rb
index 31d3edd158b..7d29d018ff1 100644
--- a/spec/lib/gitlab/kroki_spec.rb
+++ b/spec/lib/gitlab/kroki_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Kroki do
describe '.formats' do
def default_formats
- %w[bytefield c4plantuml ditaa erd graphviz nomnoml plantuml svgbob umlet vega vegalite wavedrom].freeze
+ %w[bytefield c4plantuml ditaa erd graphviz nomnoml pikchr plantuml svgbob umlet vega vegalite wavedrom].freeze
end
subject { described_class.formats(Gitlab::CurrentSettings) }
diff --git a/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb b/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb
index 0092c69d0bb..ec1f46100a4 100644
--- a/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb
+++ b/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb
@@ -206,6 +206,14 @@ RSpec.describe Gitlab::Kubernetes::CiliumNetworkPolicy do
it { is_expected.to be_nil }
end
+
+ context 'with environment_ids' do
+ subject { Gitlab::Kubernetes::CiliumNetworkPolicy.from_resource(resource, [1, 2, 3]) }
+
+ it 'includes environment_ids in as_json result' do
+ expect(subject.as_json).to include(environment_ids: [1, 2, 3])
+ end
+ end
end
describe '#resource' do
diff --git a/spec/lib/gitlab/kubernetes/network_policy_spec.rb b/spec/lib/gitlab/kubernetes/network_policy_spec.rb
index d3640c61d94..2cba37a1302 100644
--- a/spec/lib/gitlab/kubernetes/network_policy_spec.rb
+++ b/spec/lib/gitlab/kubernetes/network_policy_spec.rb
@@ -196,6 +196,14 @@ RSpec.describe Gitlab::Kubernetes::NetworkPolicy do
it { is_expected.to be_nil }
end
+
+ context 'with environment_ids' do
+ subject { Gitlab::Kubernetes::NetworkPolicy.from_resource(resource, [1, 2, 3]) }
+
+ it 'includes environment_ids in as_json result' do
+ expect(subject.as_json).to include(environment_ids: [1, 2, 3])
+ end
+ end
end
describe '#resource' do
diff --git a/spec/lib/gitlab/language_detection_spec.rb b/spec/lib/gitlab/language_detection_spec.rb
index 14523be8ec6..9430ecf7baf 100644
--- a/spec/lib/gitlab/language_detection_spec.rb
+++ b/spec/lib/gitlab/language_detection_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::LanguageDetection do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:ruby) { create(:programming_language, name: 'Ruby') }
let_it_be(:haskell) { create(:programming_language, name: 'Haskell') }
+
let(:repository) { project.repository }
let(:detection) do
[{ value: 66.63, label: "Ruby", color: "#701516", highlight: "#701516" },
diff --git a/spec/lib/gitlab/lfs_token_spec.rb b/spec/lib/gitlab/lfs_token_spec.rb
index 4b40e8960b2..a8472062f03 100644
--- a/spec/lib/gitlab/lfs_token_spec.rb
+++ b/spec/lib/gitlab/lfs_token_spec.rb
@@ -126,7 +126,7 @@ RSpec.describe Gitlab::LfsToken, :clean_gitlab_redis_shared_state do
end
context 'when the user password is expired' do
- let(:actor) { create(:user, password_expires_at: 1.minute.ago) }
+ let(:actor) { create(:user, password_expires_at: 1.minute.ago, password_automatically_set: true) }
it 'returns false' do
expect(lfs_token.token_valid?(lfs_token.token)).to be false
diff --git a/spec/lib/gitlab/memory/instrumentation_spec.rb b/spec/lib/gitlab/memory/instrumentation_spec.rb
index 0dbe9a8e275..069c45da18a 100644
--- a/spec/lib/gitlab/memory/instrumentation_spec.rb
+++ b/spec/lib/gitlab/memory/instrumentation_spec.rb
@@ -18,24 +18,8 @@ RSpec.describe Gitlab::Memory::Instrumentation do
describe '.start_thread_memory_allocations' do
subject { described_class.start_thread_memory_allocations }
- context 'when feature flag trace_memory_allocations is enabled' do
- before do
- stub_feature_flags(trace_memory_allocations: true)
- end
-
- it 'a hash is returned' do
- is_expected.not_to be_empty
- end
- end
-
- context 'when feature flag trace_memory_allocations is disabled' do
- before do
- stub_feature_flags(trace_memory_allocations: false)
- end
-
- it 'a nil is returned' do
- is_expected.to be_nil
- end
+ it 'a hash is returned' do
+ is_expected.to be_a(Hash)
end
context 'when feature is unavailable' do
@@ -63,30 +47,14 @@ RSpec.describe Gitlab::Memory::Instrumentation do
expect(described_class).to receive(:measure_thread_memory_allocations).and_call_original
end
- context 'when feature flag trace_memory_allocations is enabled' do
- before do
- stub_feature_flags(trace_memory_allocations: true)
- end
-
- it 'a hash is returned' do
- result = subject
- expect(result).to include(
- mem_objects: be > 1000,
- mem_mallocs: be > 1000,
- mem_bytes: be > 100_000, # 100 items * 100 bytes each
- mem_total_bytes: eq(result[:mem_bytes] + 40 * result[:mem_objects])
- )
- end
- end
-
- context 'when feature flag trace_memory_allocations is disabled' do
- before do
- stub_feature_flags(trace_memory_allocations: false)
- end
-
- it 'a nil is returned' do
- is_expected.to be_nil
- end
+ it 'a hash is returned' do
+ result = subject
+ expect(result).to include(
+ mem_objects: be > 1000,
+ mem_mallocs: be > 1000,
+ mem_bytes: be > 100_000, # 100 items * 100 bytes each
+ mem_total_bytes: eq(result[:mem_bytes] + 40 * result[:mem_objects])
+ )
end
context 'when feature is unavailable' do
diff --git a/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb b/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb
index 153cf43be0a..0516091a8ec 100644
--- a/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Metrics::Subscribers::ActionCable, :request_store do
let(:subscriber) { described_class.new }
let(:counter) { double(:counter) }
- let(:data) { { data: { event: 'updated' } } }
+ let(:data) { { 'result' => { 'data' => { 'event' => 'updated' } } } }
let(:channel_class) { 'IssuesChannel' }
let(:event) do
double(
@@ -35,6 +35,17 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActionCable, :request_store do
subscriber.transmit(event)
end
+
+ it 'tracks size of payload as JSON' do
+ allow(::Gitlab::Metrics).to receive(:histogram).with(
+ :action_cable_transmitted_bytes, /transmit/
+ ).and_return(counter)
+ message_size = ::ActiveSupport::JSON.encode(data).bytesize
+
+ expect(counter).to receive(:observe).with({ channel: channel_class, operation: 'event' }, message_size)
+
+ subscriber.transmit(event)
+ end
end
describe '#broadcast' do
diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
index cffa62c3a52..6fc8f090431 100644
--- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
let(:env) { {} }
let(:subscriber) { described_class.new }
- let(:connection) { double(:connection) }
+ let(:connection) { ActiveRecord::Base.connection }
describe '#transaction' do
let(:web_transaction) { double('Gitlab::Metrics::WebTransaction') }
@@ -183,6 +183,8 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
'SQL' | 'UPDATE users SET admin = true WHERE id = 10' | true | true | false | false
'SQL' | 'SELECT pg_current_wal_insert_lsn()::text AS location' | true | false | false | true
'SQL' | 'SELECT pg_last_wal_replay_lsn()::text AS location' | true | false | false | true
+ 'CACHE' | 'SELECT pg_current_wal_insert_lsn()::text AS location' | true | false | true | true
+ 'CACHE' | 'SELECT pg_last_wal_replay_lsn()::text AS location' | true | false | true | true
'CACHE' | 'SELECT * FROM users WHERE id = 10' | true | false | true | false
'SCHEMA' | "SELECT attr.attname FROM pg_attribute attr INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey) WHERE cons.contype = 'p' AND cons.conrelid = '\"projects\"'::regclass" | false | false | false | false
nil | 'BEGIN' | false | false | false | false
diff --git a/spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb b/spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb
new file mode 100644
index 00000000000..21a6573c6fd
--- /dev/null
+++ b/spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb
@@ -0,0 +1,115 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Subscribers::LoadBalancing, :request_store do
+ let(:subscriber) { described_class.new }
+
+ before do
+ allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
+ end
+
+ describe '#caught_up_replica_pick' do
+ shared_examples 'having payload result value' do |result, counter_name|
+ subject { subscriber.caught_up_replica_pick(event) }
+
+ let(:payload) { { result: result } }
+
+ let(:event) do
+ double(
+ :event,
+ name: 'load_balancing.caught_up_replica_pick',
+ payload: payload
+ )
+ end
+
+ it 'stores per-request caught up replica search result' do
+ subject
+
+ expect(Gitlab::SafeRequestStore[counter_name]).to eq(1)
+ end
+ end
+
+ it_behaves_like 'having payload result value', true, :caught_up_replica_pick_ok
+ it_behaves_like 'having payload result value', false, :caught_up_replica_pick_fail
+ end
+
+ describe "#web_transaction_completed" do
+ subject { subscriber.web_transaction_completed(event) }
+
+ let(:event) do
+ double(
+ :event,
+ name: 'load_balancing.web_transaction_completed',
+ payload: {}
+ )
+ end
+
+ let(:web_transaction) { double('Gitlab::Metrics::WebTransaction') }
+
+ before do
+ allow(::Gitlab::Metrics::WebTransaction).to receive(:current)
+ .and_return(web_transaction)
+ end
+
+ context 'when no data in request store' do
+ before do
+ Gitlab::SafeRequestStore[:caught_up_replica_pick] = nil
+ end
+
+ it 'does not change the counters' do
+ expect(web_transaction).not_to receive(:increment)
+ end
+ end
+
+ context 'when request store was updated' do
+ before do
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_ok] = 2
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_fail] = 1
+ end
+
+ it 'increments :caught_up_replica_pick count with proper label' do
+ expect(web_transaction).to receive(:increment).with(:gitlab_transaction_caught_up_replica_pick_count_total, 2, { result: true })
+ expect(web_transaction).to receive(:increment).with(:gitlab_transaction_caught_up_replica_pick_count_total, 1, { result: false })
+
+ subject
+ end
+ end
+ end
+
+ describe '.load_balancing_payload' do
+ subject { described_class.load_balancing_payload }
+
+ context 'when no data in request store' do
+ before do
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_ok] = nil
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_fail] = nil
+ end
+
+ it 'returns empty hash' do
+ expect(subject).to eq({})
+ end
+ end
+
+ context 'when request store was updated for a single counter' do
+ before do
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_ok] = 2
+ end
+
+ it 'returns proper payload with only that counter' do
+ expect(subject).to eq({ caught_up_replica_pick_ok: 2 })
+ end
+ end
+
+ context 'when both counters were updated' do
+ before do
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_ok] = 2
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_fail] = 1
+ end
+
+ it 'return proper payload' do
+ expect(subject).to eq({ caught_up_replica_pick_ok: 2, caught_up_replica_pick_fail: 1 })
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/object_hierarchy_spec.rb b/spec/lib/gitlab/object_hierarchy_spec.rb
index 7615b37521a..64161fbafdd 100644
--- a/spec/lib/gitlab/object_hierarchy_spec.rb
+++ b/spec/lib/gitlab/object_hierarchy_spec.rb
@@ -9,265 +9,178 @@ RSpec.describe Gitlab::ObjectHierarchy do
let(:options) { {} }
- shared_context 'Gitlab::ObjectHierarchy test cases' do
- describe '#base_and_ancestors' do
- let(:relation) do
- described_class.new(Group.where(id: child2.id), options: options).base_and_ancestors
- end
-
- it 'includes the base rows' do
- expect(relation).to include(child2)
- end
-
- it 'includes all of the ancestors' do
- expect(relation).to include(parent, child1)
- end
-
- it 'can find ancestors upto a certain level' do
- relation = described_class.new(Group.where(id: child2), options: options).base_and_ancestors(upto: child1)
-
- expect(relation).to contain_exactly(child2)
- end
-
- it 'uses ancestors_base #initialize argument' do
- relation = described_class.new(Group.where(id: child2.id), Group.none, options: options).base_and_ancestors
+ describe '#base_and_ancestors' do
+ let(:relation) do
+ described_class.new(Group.where(id: child2.id), options: options).base_and_ancestors
+ end
- expect(relation).to include(parent, child1, child2)
- end
+ it 'includes the base rows' do
+ expect(relation).to include(child2)
+ end
- it 'does not allow the use of #update_all' do
- expect { relation.update_all(share_with_group_lock: false) }
- .to raise_error(ActiveRecord::ReadOnlyRecord)
- end
+ it 'includes all of the ancestors' do
+ expect(relation).to include(parent, child1)
+ end
- describe 'hierarchy_order option' do
- let(:relation) do
- described_class.new(Group.where(id: child2.id), options: options).base_and_ancestors(hierarchy_order: hierarchy_order)
- end
+ it 'can find ancestors upto a certain level' do
+ relation = described_class.new(Group.where(id: child2), options: options).base_and_ancestors(upto: child1)
- context ':asc' do
- let(:hierarchy_order) { :asc }
+ expect(relation).to contain_exactly(child2)
+ end
- it 'orders by child to parent' do
- expect(relation).to eq([child2, child1, parent])
- end
- end
+ it 'uses ancestors_base #initialize argument' do
+ relation = described_class.new(Group.where(id: child2.id), Group.none, options: options).base_and_ancestors
- context ':desc' do
- let(:hierarchy_order) { :desc }
+ expect(relation).to include(parent, child1, child2)
+ end
- it 'orders by parent to child' do
- expect(relation).to eq([parent, child1, child2])
- end
- end
- end
+ it 'does not allow the use of #update_all' do
+ expect { relation.update_all(share_with_group_lock: false) }
+ .to raise_error(ActiveRecord::ReadOnlyRecord)
end
- describe '#base_and_descendants' do
+ describe 'hierarchy_order option' do
let(:relation) do
- described_class.new(Group.where(id: parent.id), options: options).base_and_descendants
- end
-
- it 'includes the base rows' do
- expect(relation).to include(parent)
- end
-
- it 'includes all the descendants' do
- expect(relation).to include(child1, child2)
+ described_class.new(Group.where(id: child2.id), options: options).base_and_ancestors(hierarchy_order: hierarchy_order)
end
- it 'uses descendants_base #initialize argument' do
- relation = described_class.new(Group.none, Group.where(id: parent.id), options: options).base_and_descendants
+ context ':asc' do
+ let(:hierarchy_order) { :asc }
- expect(relation).to include(parent, child1, child2)
- end
-
- it 'does not allow the use of #update_all' do
- expect { relation.update_all(share_with_group_lock: false) }
- .to raise_error(ActiveRecord::ReadOnlyRecord)
- end
-
- context 'when with_depth is true' do
- let(:relation) do
- described_class.new(Group.where(id: parent.id), options: options).base_and_descendants(with_depth: true)
+ it 'orders by child to parent' do
+ expect(relation).to eq([child2, child1, parent])
end
+ end
- it 'includes depth in the results' do
- object_depths = {
- parent.id => 1,
- child1.id => 2,
- child2.id => 3
- }
+ context ':desc' do
+ let(:hierarchy_order) { :desc }
- relation.each do |object|
- expect(object.depth).to eq(object_depths[object.id])
- end
+ it 'orders by parent to child' do
+ expect(relation).to eq([parent, child1, child2])
end
end
end
+ end
- describe '#descendants' do
- it 'includes only the descendants' do
- relation = described_class.new(Group.where(id: parent), options: options).descendants
-
- expect(relation).to contain_exactly(child1, child2)
- end
+ describe '#base_and_descendants' do
+ let(:relation) do
+ described_class.new(Group.where(id: parent.id), options: options).base_and_descendants
end
- describe '#max_descendants_depth' do
- subject { described_class.new(base_relation, options: options).max_descendants_depth }
-
- context 'when base relation is empty' do
- let(:base_relation) { Group.where(id: nil) }
-
- it { expect(subject).to be_nil }
- end
-
- context 'when base has no children' do
- let(:base_relation) { Group.where(id: child2) }
-
- it { expect(subject).to eq(1) }
- end
-
- context 'when base has grandchildren' do
- let(:base_relation) { Group.where(id: parent) }
-
- it { expect(subject).to eq(3) }
- end
+ it 'includes the base rows' do
+ expect(relation).to include(parent)
end
- describe '#ancestors' do
- it 'includes only the ancestors' do
- relation = described_class.new(Group.where(id: child2), options: options).ancestors
+ it 'includes all the descendants' do
+ expect(relation).to include(child1, child2)
+ end
- expect(relation).to contain_exactly(child1, parent)
- end
+ it 'uses descendants_base #initialize argument' do
+ relation = described_class.new(Group.none, Group.where(id: parent.id), options: options).base_and_descendants
- it 'can find ancestors upto a certain level' do
- relation = described_class.new(Group.where(id: child2), options: options).ancestors(upto: child1)
+ expect(relation).to include(parent, child1, child2)
+ end
- expect(relation).to be_empty
- end
+ it 'does not allow the use of #update_all' do
+ expect { relation.update_all(share_with_group_lock: false) }
+ .to raise_error(ActiveRecord::ReadOnlyRecord)
end
- describe '#all_objects' do
+ context 'when with_depth is true' do
let(:relation) do
- described_class.new(Group.where(id: child1.id), options: options).all_objects
+ described_class.new(Group.where(id: parent.id), options: options).base_and_descendants(with_depth: true)
end
- it 'includes the base rows' do
- expect(relation).to include(child1)
- end
-
- it 'includes the ancestors' do
- expect(relation).to include(parent)
- end
+ it 'includes depth in the results' do
+ object_depths = {
+ parent.id => 1,
+ child1.id => 2,
+ child2.id => 3
+ }
- it 'includes the descendants' do
- expect(relation).to include(child2)
- end
-
- it 'uses ancestors_base #initialize argument for ancestors' do
- relation = described_class.new(Group.where(id: child1.id), Group.where(id: non_existing_record_id), options: options).all_objects
-
- expect(relation).to include(parent)
+ relation.each do |object|
+ expect(object.depth).to eq(object_depths[object.id])
+ end
end
+ end
+ end
- it 'uses descendants_base #initialize argument for descendants' do
- relation = described_class.new(Group.where(id: non_existing_record_id), Group.where(id: child1.id), options: options).all_objects
-
- expect(relation).to include(child2)
- end
+ describe '#descendants' do
+ it 'includes only the descendants' do
+ relation = described_class.new(Group.where(id: parent), options: options).descendants
- it 'does not allow the use of #update_all' do
- expect { relation.update_all(share_with_group_lock: false) }
- .to raise_error(ActiveRecord::ReadOnlyRecord)
- end
+ expect(relation).to contain_exactly(child1, child2)
end
end
- context 'when the use_distinct_in_object_hierarchy feature flag is enabled' do
- before do
- stub_feature_flags(use_distinct_in_object_hierarchy: true)
- stub_feature_flags(use_distinct_for_all_object_hierarchy: false)
- end
+ describe '#max_descendants_depth' do
+ subject { described_class.new(base_relation, options: options).max_descendants_depth }
- it_behaves_like 'Gitlab::ObjectHierarchy test cases'
+ context 'when base relation is empty' do
+ let(:base_relation) { Group.where(id: nil) }
- it 'calls DISTINCT' do
- expect(child2.self_and_ancestors.to_sql).to include("DISTINCT")
+ it { expect(subject).to be_nil }
end
- context 'when use_traversal_ids feature flag is enabled' do
- it 'does not call DISTINCT' do
- expect(parent.self_and_descendants.to_sql).not_to include("DISTINCT")
- end
+ context 'when base has no children' do
+ let(:base_relation) { Group.where(id: child2) }
+
+ it { expect(subject).to eq(1) }
end
- context 'when use_traversal_ids feature flag is disabled' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
+ context 'when base has grandchildren' do
+ let(:base_relation) { Group.where(id: parent) }
- it 'calls DISTINCT' do
- expect(parent.self_and_descendants.to_sql).to include("DISTINCT")
- end
+ it { expect(subject).to eq(3) }
end
end
- context 'when the use_distinct_for_all_object_hierarchy feature flag is enabled' do
- before do
- stub_feature_flags(use_distinct_in_object_hierarchy: false)
- stub_feature_flags(use_distinct_for_all_object_hierarchy: true)
+ describe '#ancestors' do
+ it 'includes only the ancestors' do
+ relation = described_class.new(Group.where(id: child2), options: options).ancestors
+
+ expect(relation).to contain_exactly(child1, parent)
end
- it_behaves_like 'Gitlab::ObjectHierarchy test cases'
+ it 'can find ancestors upto a certain level' do
+ relation = described_class.new(Group.where(id: child2), options: options).ancestors(upto: child1)
- it 'calls DISTINCT' do
- expect(child2.self_and_ancestors.to_sql).to include("DISTINCT")
+ expect(relation).to be_empty
end
+ end
- context 'when use_traversal_ids feature flag is enabled' do
- it 'does not call DISTINCT' do
- expect(parent.self_and_descendants.to_sql).not_to include("DISTINCT")
- end
+ describe '#all_objects' do
+ let(:relation) do
+ described_class.new(Group.where(id: child1.id), options: options).all_objects
end
- context 'when use_traversal_ids feature flag is disabled' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- it 'calls DISTINCT' do
- expect(parent.self_and_descendants.to_sql).to include("DISTINCT")
- end
+ it 'includes the base rows' do
+ expect(relation).to include(child1)
+ end
- context 'when the skip_ordering option is set' do
- let(:options) { { skip_ordering: true } }
+ it 'includes the ancestors' do
+ expect(relation).to include(parent)
+ end
- it_behaves_like 'Gitlab::ObjectHierarchy test cases'
+ it 'includes the descendants' do
+ expect(relation).to include(child2)
+ end
- it 'does not include ROW_NUMBER()' do
- query = described_class.new(Group.where(id: parent.id), options: options).base_and_descendants.to_sql
+ it 'uses ancestors_base #initialize argument for ancestors' do
+ relation = described_class.new(Group.where(id: child1.id), Group.where(id: non_existing_record_id), options: options).all_objects
- expect(query).to include("DISTINCT")
- expect(query).not_to include("ROW_NUMBER()")
- end
- end
+ expect(relation).to include(parent)
end
- end
- context 'when the use_distinct_in_object_hierarchy feature flag is disabled' do
- before do
- stub_feature_flags(use_distinct_in_object_hierarchy: false)
- stub_feature_flags(use_distinct_for_all_object_hierarchy: false)
- end
+ it 'uses descendants_base #initialize argument for descendants' do
+ relation = described_class.new(Group.where(id: non_existing_record_id), Group.where(id: child1.id), options: options).all_objects
- it_behaves_like 'Gitlab::ObjectHierarchy test cases'
+ expect(relation).to include(child2)
+ end
- it 'does not call DISTINCT' do
- expect(parent.self_and_descendants.to_sql).not_to include("DISTINCT")
- expect(child2.self_and_ancestors.to_sql).not_to include("DISTINCT")
+ it 'does not allow the use of #update_all' do
+ expect { relation.update_all(share_with_group_lock: false) }
+ .to raise_error(ActiveRecord::ReadOnlyRecord)
end
end
end
diff --git a/spec/lib/gitlab/pagination/keyset/iterator_spec.rb b/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
index 656ae73945e..d8e79287745 100644
--- a/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
@@ -18,110 +18,127 @@ RSpec.describe Gitlab::Pagination::Keyset::Iterator do
Gitlab::Pagination::Keyset::Order.build([
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
attribute_name: column,
- column_expression: klass.arel_table[column],
- order_expression: ::Gitlab::Database.nulls_order(column, direction, nulls_position),
- reversed_order_expression: ::Gitlab::Database.nulls_order(column, reverse_direction, reverse_nulls_position),
- order_direction: direction,
- nullable: nulls_position,
- distinct: false
+ column_expression: klass.arel_table[column],
+ order_expression: ::Gitlab::Database.nulls_order(column, direction, nulls_position),
+ reversed_order_expression: ::Gitlab::Database.nulls_order(column, reverse_direction, reverse_nulls_position),
+ order_direction: direction,
+ nullable: nulls_position,
+ distinct: false
),
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
attribute_name: 'id',
- order_expression: klass.arel_table[:id].send(direction),
- add_to_projections: true
+ order_expression: klass.arel_table[:id].send(direction)
)
])
end
let(:scope) { project.issues.reorder(custom_reorder) }
- subject { described_class.new(scope: scope) }
+ shared_examples 'iterator examples' do
+ describe '.each_batch' do
+ it 'yields an ActiveRecord::Relation when a block is given' do
+ iterator.each_batch(of: 1) do |relation|
+ expect(relation).to be_a_kind_of(ActiveRecord::Relation)
+ end
+ end
- describe '.each_batch' do
- it 'yields an ActiveRecord::Relation when a block is given' do
- subject.each_batch(of: 1) do |relation|
- expect(relation).to be_a_kind_of(ActiveRecord::Relation)
+ it 'raises error when ordering configuration cannot be automatically determined' do
+ expect do
+ described_class.new(scope: MergeRequestDiffCommit.order(:merge_request_diff_id, :relative_order))
+ end.to raise_error /The order on the scope does not support keyset pagination/
end
- end
- it 'accepts a custom batch size' do
- count = 0
+ it 'accepts a custom batch size' do
+ count = 0
- subject.each_batch(of: 2) { |relation| count += relation.count(:all) }
+ iterator.each_batch(of: 2) { |relation| count += relation.count(:all) }
- expect(count).to eq(9)
- end
+ expect(count).to eq(9)
+ end
- it 'allows updating of the yielded relations' do
- time = Time.current
+ it 'allows updating of the yielded relations' do
+ time = Time.current
- subject.each_batch(of: 2) do |relation|
- relation.update_all(updated_at: time)
- end
+ iterator.each_batch(of: 2) do |relation|
+ Issue.connection.execute("UPDATE issues SET updated_at = '#{time.to_s(:inspect)}' WHERE id IN (#{relation.reselect(:id).to_sql})")
+ end
- expect(Issue.where(updated_at: time).count).to eq(9)
- end
+ expect(Issue.pluck(:updated_at)).to all(be_within(5.seconds).of(time))
+ end
- context 'with ordering direction' do
- context 'when ordering asc' do
- it 'orders ascending by default, including secondary order column' do
- positions = []
+ context 'with ordering direction' do
+ context 'when ordering asc' do
+ it 'orders ascending by default, including secondary order column' do
+ positions = []
- subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
+ iterator.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
- expect(positions).to eq(project.issues.order_relative_position_asc.order(id: :asc).pluck(:relative_position, :id))
+ expect(positions).to eq(project.issues.order_relative_position_asc.order(id: :asc).pluck(:relative_position, :id))
+ end
end
- end
- context 'when reversing asc order' do
- let(:scope) { project.issues.order(custom_reorder.reversed_order) }
+ context 'when reversing asc order' do
+ let(:scope) { project.issues.order(custom_reorder.reversed_order) }
- it 'orders in reverse of ascending' do
- positions = []
+ it 'orders in reverse of ascending' do
+ positions = []
- subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
+ iterator.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
- expect(positions).to eq(project.issues.order_relative_position_desc.order(id: :desc).pluck(:relative_position, :id))
+ expect(positions).to eq(project.issues.order_relative_position_desc.order(id: :desc).pluck(:relative_position, :id))
+ end
end
- end
- context 'when asc order, with nulls first' do
- let(:nulls_position) { :nulls_first }
+ context 'when asc order, with nulls first' do
+ let(:nulls_position) { :nulls_first }
- it 'orders ascending with nulls first' do
- positions = []
+ it 'orders ascending with nulls first' do
+ positions = []
- subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
+ iterator.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
- expect(positions).to eq(project.issues.reorder(::Gitlab::Database.nulls_first_order('relative_position', 'ASC')).order(id: :asc).pluck(:relative_position, :id))
+ expect(positions).to eq(project.issues.reorder(::Gitlab::Database.nulls_first_order('relative_position', 'ASC')).order(id: :asc).pluck(:relative_position, :id))
+ end
end
- end
- context 'when ordering desc' do
- let(:direction) { :desc }
- let(:nulls_position) { :nulls_last }
+ context 'when ordering desc' do
+ let(:direction) { :desc }
+ let(:nulls_position) { :nulls_last }
- it 'orders descending' do
- positions = []
+ it 'orders descending' do
+ positions = []
- subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
+ iterator.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
- expect(positions).to eq(project.issues.reorder(::Gitlab::Database.nulls_last_order('relative_position', 'DESC')).order(id: :desc).pluck(:relative_position, :id))
+ expect(positions).to eq(project.issues.reorder(::Gitlab::Database.nulls_last_order('relative_position', 'DESC')).order(id: :desc).pluck(:relative_position, :id))
+ end
end
- end
- context 'when ordering by columns are repeated twice' do
- let(:direction) { :desc }
- let(:column) { :id }
+ context 'when ordering by columns are repeated twice' do
+ let(:direction) { :desc }
+ let(:column) { :id }
- it 'orders descending' do
- positions = []
+ it 'orders descending' do
+ positions = []
- subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:id)) }
+ iterator.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:id)) }
- expect(positions).to eq(project.issues.reorder(id: :desc).pluck(:id))
+ expect(positions).to eq(project.issues.reorder(id: :desc).pluck(:id))
+ end
end
end
end
end
+
+ context 'when use_union_optimization is used' do
+ subject(:iterator) { described_class.new(scope: scope, use_union_optimization: true) }
+
+ include_examples 'iterator examples'
+ end
+
+ context 'when use_union_optimization is not used' do
+ subject(:iterator) { described_class.new(scope: scope, use_union_optimization: false) }
+
+ include_examples 'iterator examples'
+ end
end
diff --git a/spec/lib/gitlab/pagination/keyset/order_spec.rb b/spec/lib/gitlab/pagination/keyset/order_spec.rb
index 26f52745b54..562a9bf4460 100644
--- a/spec/lib/gitlab/pagination/keyset/order_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/order_spec.rb
@@ -171,6 +171,12 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
end
it_behaves_like 'order examples'
+
+ it 'uses the row comparison method' do
+ sql = order.where_values_with_or_query({ year: 2010, month: 5, id: 1 }).to_sql
+
+ expect(sql).to eq('(("my_table"."year", "my_table"."month", "my_table"."id") > (2010, 5, 1))')
+ end
end
context 'when ordering by nullable columns and a distinct column' do
diff --git a/spec/lib/gitlab/pagination/offset_pagination_spec.rb b/spec/lib/gitlab/pagination/offset_pagination_spec.rb
index c9a23170137..f8d50fbc517 100644
--- a/spec/lib/gitlab/pagination/offset_pagination_spec.rb
+++ b/spec/lib/gitlab/pagination/offset_pagination_spec.rb
@@ -130,6 +130,80 @@ RSpec.describe Gitlab::Pagination::OffsetPagination do
end
end
+ context 'when resource already paginated' do
+ let(:resource) { Project.all.page(1).per(1) }
+
+ context 'when per_page param is specified' do
+ let(:query) { base_query.merge(page: 1, per_page: 2) }
+
+ it 'returns appropriate amount of resources based on per_page param' do
+ expect(subject.paginate(resource).count).to eq 2
+ end
+ end
+
+ context 'when page and per page params are strings' do
+ let(:query) { base_query.merge(page: '1', per_page: '1') }
+
+ it 'returns appropriate amount of resources' do
+ expect(subject.paginate(resource).count).to eq 1
+ end
+ end
+
+ context 'when per_page param is blank' do
+ let(:query) { base_query.merge(page: 1) }
+
+ it 'returns appropriate amount of resources' do
+ expect(subject.paginate(resource).count).to eq 1
+ end
+ end
+
+ context 'when page param is blank' do
+ let(:query) { base_query }
+
+ it 'returns appropriate amount of resources based on resource per(N)' do
+ expect(subject.paginate(resource).count).to eq 1
+ end
+ end
+ end
+
+ context 'when resource does not respond to limit_value' do
+ let(:custom_collection) do
+ Class.new do
+ include Enumerable
+
+ def initialize(items)
+ @collection = items
+ end
+
+ def each
+ @collection.each { |item| yield item }
+ end
+
+ def page(number)
+ Kaminari.paginate_array(@collection).page(number)
+ end
+ end
+ end
+
+ let(:resource) { custom_collection.new(Project.all).page(query[:page]) }
+
+ context 'when page param is blank' do
+ let(:query) { base_query }
+
+ it 'returns appropriate amount of resources' do
+ expect(subject.paginate(resource).count).to eq 3
+ end
+ end
+
+ context 'when per_page param is blank' do
+ let(:query) { base_query.merge(page: 1) }
+
+ it 'returns appropriate amount of resources with default per page value' do
+ expect(subject.paginate(resource).count).to eq 3
+ end
+ end
+ end
+
context 'when resource is a paginatable array' do
let(:resource) { Kaminari.paginate_array(Project.all.to_a) }
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index 2f28b8dfce0..a9c0262fdb2 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::ProjectSearchResults do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
+
let(:query) { 'hello world' }
let(:repository_ref) { nil }
let(:filters) { {} }
@@ -208,11 +209,10 @@ RSpec.describe Gitlab::ProjectSearchResults do
describe 'wiki search' do
let(:project) { create(:project, :public, :wiki_repo) }
- let(:wiki) { build(:project_wiki, project: project) }
before do
- wiki.create_page('Files/Title', 'Content')
- wiki.create_page('CHANGELOG', 'Files example')
+ project.wiki.create_page('Files/Title', 'Content')
+ project.wiki.create_page('CHANGELOG', 'Files example')
end
it_behaves_like 'general blob search', 'wiki', 'wiki_blobs' do
@@ -266,6 +266,7 @@ RSpec.describe Gitlab::ProjectSearchResults do
let_it_be(:closed_result) { create(:issue, :closed, project: project, title: 'foo closed') }
let_it_be(:opened_result) { create(:issue, :opened, project: project, title: 'foo opened') }
let_it_be(:confidential_result) { create(:issue, :confidential, project: project, title: 'foo confidential') }
+
let(:query) { 'foo' }
before do
diff --git a/spec/lib/gitlab/prometheus/adapter_spec.rb b/spec/lib/gitlab/prometheus/adapter_spec.rb
index 1eaed65c805..5320fbc7c4f 100644
--- a/spec/lib/gitlab/prometheus/adapter_spec.rb
+++ b/spec/lib/gitlab/prometheus/adapter_spec.rb
@@ -9,31 +9,31 @@ RSpec.describe Gitlab::Prometheus::Adapter do
subject { described_class.new(project, cluster) }
describe '#prometheus_adapter' do
- context 'prometheus service can execute queries' do
- let(:prometheus_service) { double(:prometheus_service, can_query?: true) }
+ context 'prometheus integration can execute queries' do
+ let(:prometheus_integration) { double(:prometheus_integration, can_query?: true) }
before do
- allow(project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
+ allow(project).to receive(:find_or_initialize_integration).with('prometheus').and_return prometheus_integration
end
- it 'return prometheus service as prometheus adapter' do
- expect(subject.prometheus_adapter).to eq(prometheus_service)
+ it 'return prometheus integration as prometheus adapter' do
+ expect(subject.prometheus_adapter).to eq(prometheus_integration)
end
context 'with cluster with prometheus available' do
let!(:prometheus) { create(:clusters_integrations_prometheus, cluster: cluster) }
- it 'returns prometheus service' do
- expect(subject.prometheus_adapter).to eq(prometheus_service)
+ it 'returns prometheus integration' do
+ expect(subject.prometheus_adapter).to eq(prometheus_integration)
end
end
end
- context "prometheus service can't execute queries" do
- let(:prometheus_service) { double(:prometheus_service, can_query?: false) }
+ context "prometheus integration can't execute queries" do
+ let(:prometheus_integration) { double(:prometheus_integration, can_query?: false) }
before do
- allow(project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
+ allow(project).to receive(:find_or_initialize_integration).with('prometheus').and_return prometheus_integration
end
context 'with cluster with prometheus disabled' do
diff --git a/spec/lib/gitlab/prometheus/query_variables_spec.rb b/spec/lib/gitlab/prometheus/query_variables_spec.rb
index 1dbdb892a5d..d9cac3e1064 100644
--- a/spec/lib/gitlab/prometheus/query_variables_spec.rb
+++ b/spec/lib/gitlab/prometheus/query_variables_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Prometheus::QueryVariables do
describe '.call' do
let_it_be_with_refind(:environment) { create(:environment) }
+
let(:project) { environment.project }
let(:slug) { environment.slug }
let(:params) { {} }
diff --git a/spec/lib/gitlab/rate_limit_helpers_spec.rb b/spec/lib/gitlab/rate_limit_helpers_spec.rb
index e7d4c69d47b..d583c8e58fb 100644
--- a/spec/lib/gitlab/rate_limit_helpers_spec.rb
+++ b/spec/lib/gitlab/rate_limit_helpers_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::RateLimitHelpers, :clean_gitlab_redis_shared_state do
+RSpec.describe Gitlab::RateLimitHelpers, :clean_gitlab_redis_cache do
let(:limiter_class) do
Class.new do
include ::Gitlab::RateLimitHelpers
diff --git a/spec/lib/gitlab/reactive_cache_set_cache_spec.rb b/spec/lib/gitlab/reactive_cache_set_cache_spec.rb
index 19fb2ada476..f405b2ad86e 100644
--- a/spec/lib/gitlab/reactive_cache_set_cache_spec.rb
+++ b/spec/lib/gitlab/reactive_cache_set_cache_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::ReactiveCacheSetCache, :clean_gitlab_redis_cache do
let_it_be(:project) { create(:project) }
+
let(:cache_prefix) { 'cache_prefix' }
let(:expires_in) { 10.minutes }
let(:cache) { described_class.new(expires_in: expires_in) }
diff --git a/spec/lib/gitlab/reference_extractor_spec.rb b/spec/lib/gitlab/reference_extractor_spec.rb
index 229d49868d4..f6e69aa6533 100644
--- a/spec/lib/gitlab/reference_extractor_spec.rb
+++ b/spec/lib/gitlab/reference_extractor_spec.rb
@@ -227,7 +227,7 @@ RSpec.describe Gitlab::ReferenceExtractor do
context 'with an inactive external issue tracker' do
let(:project) { create(:project) }
- let!(:jira_service) { create(:jira_service, project: project, active: false) }
+ let!(:jira_integration) { create(:jira_integration, project: project, active: false) }
let(:issue) { create(:issue, project: project) }
context 'when GitLab issues are enabled' do
@@ -315,6 +315,7 @@ RSpec.describe Gitlab::ReferenceExtractor do
describe '#references' do
let_it_be(:user) { create(:user) }
let_it_be(:issue) { create(:issue, project: project) }
+
let(:text) { "Ref. #{issue.to_reference}" }
subject { described_class.new(project, user) }
diff --git a/spec/lib/gitlab/repo_path_spec.rb b/spec/lib/gitlab/repo_path_spec.rb
index 912efa6a5db..6cff0eff7e8 100644
--- a/spec/lib/gitlab/repo_path_spec.rb
+++ b/spec/lib/gitlab/repo_path_spec.rb
@@ -13,11 +13,11 @@ RSpec.describe ::Gitlab::RepoPath do
describe '.parse' do
context 'a repository storage path' do
- it 'parses a full repository project path' do
+ it 'parses a full project repository path' do
expect(described_class.parse(project.repository.full_path)).to eq([project, project, Gitlab::GlRepository::PROJECT, nil])
end
- it 'parses a full wiki project path' do
+ it 'parses a full project wiki repository path' do
expect(described_class.parse(project.wiki.repository.full_path)).to eq([project.wiki, project, Gitlab::GlRepository::WIKI, nil])
end
@@ -49,7 +49,7 @@ RSpec.describe ::Gitlab::RepoPath do
end
it 'parses a relative wiki path' do
- expect(described_class.parse(redirect.path + '.wiki.git')).to eq([project.wiki, project, Gitlab::GlRepository::WIKI, redirect_route])
+ expect(described_class.parse(redirect.path + '.wiki.git')).to eq([project.wiki, project, Gitlab::GlRepository::WIKI, "#{redirect_route}.wiki"])
end
it 'parses a relative path starting with /' do
@@ -57,7 +57,7 @@ RSpec.describe ::Gitlab::RepoPath do
end
it 'parses a redirected project snippet repository path' do
- expect(described_class.parse(redirect.path + "/snippets/#{project_snippet.id}.git")).to eq([project_snippet, project, Gitlab::GlRepository::SNIPPET, redirect_route])
+ expect(described_class.parse(redirect.path + "/snippets/#{project_snippet.id}.git")).to eq([project_snippet, project, Gitlab::GlRepository::SNIPPET, "#{redirect_route}/snippets/#{project_snippet.id}"])
end
end
end
@@ -70,8 +70,8 @@ RSpec.describe ::Gitlab::RepoPath do
describe '.find_project' do
context 'when finding a project by its canonical path' do
context 'when the cases match' do
- it 'returns the project and nil' do
- expect(described_class.find_project(project.full_path)).to eq([project, nil])
+ it 'returns the project' do
+ expect(described_class.find_project(project.full_path)).to eq(project)
end
end
@@ -80,45 +80,45 @@ RSpec.describe ::Gitlab::RepoPath do
# easy and safe to redirect someone to the correctly-cased URL. For git
# requests, we should accept wrongly-cased URLs because it is a pain to
# block people's git operations and force them to update remote URLs.
- it 'returns the project and nil' do
- expect(described_class.find_project(project.full_path.upcase)).to eq([project, nil])
+ it 'returns the project' do
+ expect(described_class.find_project(project.full_path.upcase)).to eq(project)
end
end
end
context 'when finding a project via a redirect' do
- it 'returns the project and nil' do
- expect(described_class.find_project(redirect.path)).to eq([project, redirect.path])
+ it 'returns the project' do
+ expect(described_class.find_project(redirect.path)).to eq(project)
end
end
end
describe '.find_snippet' do
it 'extracts path and id from personal snippet route' do
- expect(described_class.find_snippet("snippets/#{personal_snippet.id}")).to eq([personal_snippet, nil])
+ expect(described_class.find_snippet("snippets/#{personal_snippet.id}")).to eq(personal_snippet)
end
it 'extracts path and id from project snippet route' do
- expect(described_class.find_snippet("#{project.full_path}/snippets/#{project_snippet.id}")).to eq([project_snippet, nil])
+ expect(described_class.find_snippet("#{project.full_path}/snippets/#{project_snippet.id}")).to eq(project_snippet)
end
it 'returns nil for invalid snippet paths' do
aggregate_failures do
- expect(described_class.find_snippet("snippets/#{project_snippet.id}")).to eq([nil, nil])
- expect(described_class.find_snippet("#{project.full_path}/snippets/#{personal_snippet.id}")).to eq([nil, nil])
- expect(described_class.find_snippet('')).to eq([nil, nil])
+ expect(described_class.find_snippet("snippets/#{project_snippet.id}")).to be_nil
+ expect(described_class.find_snippet("#{project.full_path}/snippets/#{personal_snippet.id}")).to be_nil
+ expect(described_class.find_snippet('')).to be_nil
end
end
it 'returns nil for snippets not associated with the project' do
snippet = create(:project_snippet)
- expect(described_class.find_snippet("#{project.full_path}/snippets/#{snippet.id}")).to eq([nil, nil])
+ expect(described_class.find_snippet("#{project.full_path}/snippets/#{snippet.id}")).to be_nil
end
context 'when finding a project snippet via a redirect' do
- it 'returns the project and true' do
- expect(described_class.find_snippet("#{redirect.path}/snippets/#{project_snippet.id}")).to eq([project_snippet, redirect.path])
+ it 'returns the project snippet' do
+ expect(described_class.find_snippet("#{redirect.path}/snippets/#{project_snippet.id}")).to eq(project_snippet)
end
end
end
diff --git a/spec/lib/gitlab/repository_set_cache_spec.rb b/spec/lib/gitlab/repository_set_cache_spec.rb
index 9aeb9f11bac..4dcf9dc2c05 100644
--- a/spec/lib/gitlab/repository_set_cache_spec.rb
+++ b/spec/lib/gitlab/repository_set_cache_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
shared_examples 'cache_key examples' do
it 'includes the namespace' do
- is_expected.to eq("foo:#{namespace}:set")
+ is_expected.to eq("#{gitlab_cache_namespace}:foo:#{namespace}:set")
end
context 'with a given namespace' do
@@ -23,7 +23,7 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
let(:cache) { described_class.new(repository, extra_namespace: extra_namespace) }
it 'includes the full namespace' do
- is_expected.to eq("foo:#{namespace}:#{extra_namespace}:set")
+ is_expected.to eq("#{gitlab_cache_namespace}:foo:#{namespace}:#{extra_namespace}:set")
end
end
end
@@ -60,7 +60,7 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
write_cache
redis_keys = Gitlab::Redis::Cache.with { |redis| redis.scan(0, match: "*") }.last
- expect(redis_keys).to include("branch_names:#{namespace}:set")
+ expect(redis_keys).to include("#{gitlab_cache_namespace}:branch_names:#{namespace}:set")
expect(cache.fetch('branch_names')).to contain_exactly('main')
end
@@ -95,8 +95,8 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
expect(cache.read(:foo)).to be_empty
end
- it 'expires the new key format' do
- expect_any_instance_of(Redis).to receive(:unlink).with(cache.cache_key(:foo), cache.new_cache_key(:foo)) # rubocop:disable RSpec/AnyInstanceOf
+ it 'expires the old key format' do
+ expect_any_instance_of(Redis).to receive(:unlink).with(cache.cache_key(:foo), cache.old_cache_key(:foo)) # rubocop:disable RSpec/AnyInstanceOf
subject
end
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index a1b18172a31..2974893ec4a 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Gitlab::SearchResults do
let_it_be(:project) { create(:project, name: 'foo') }
let_it_be(:issue) { create(:issue, project: project, title: 'foo') }
let_it_be(:milestone) { create(:milestone, project: project, title: 'foo') }
+
let(:merge_request) { create(:merge_request, source_project: project, title: 'foo') }
let(:query) { 'foo' }
let(:filters) { {} }
@@ -228,10 +229,18 @@ RSpec.describe Gitlab::SearchResults do
let!(:new_updated) { create(:issue, project: project, title: 'updated recent', updated_at: 1.day.ago) }
let!(:very_old_updated) { create(:issue, project: project, title: 'updated very old', updated_at: 1.year.ago) }
+ let!(:less_popular_result) { create(:issue, project: project, title: 'less popular', upvotes_count: 10) }
+ let!(:popular_result) { create(:issue, project: project, title: 'popular', upvotes_count: 100) }
+ let!(:non_popular_result) { create(:issue, project: project, title: 'non popular', upvotes_count: 1) }
+
include_examples 'search results sorted' do
let(:results_created) { described_class.new(user, 'sorted', Project.order(:id), sort: sort, filters: filters) }
let(:results_updated) { described_class.new(user, 'updated', Project.order(:id), sort: sort, filters: filters) }
end
+
+ include_examples 'search results sorted by popularity' do
+ let(:results_popular) { described_class.new(user, 'popular', Project.order(:id), sort: sort, filters: filters) }
+ end
end
end
diff --git a/spec/lib/gitlab/shell_spec.rb b/spec/lib/gitlab/shell_spec.rb
index b0dc34e8abf..891b3639709 100644
--- a/spec/lib/gitlab/shell_spec.rb
+++ b/spec/lib/gitlab/shell_spec.rb
@@ -5,6 +5,7 @@ require 'stringio'
RSpec.describe Gitlab::Shell do
let_it_be(:project) { create(:project, :repository) }
+
let(:repository) { project.repository }
let(:gitlab_shell) { described_class.new }
diff --git a/spec/lib/gitlab/sidekiq_config_spec.rb b/spec/lib/gitlab/sidekiq_config_spec.rb
index d216b9d0c18..d2a53185acd 100644
--- a/spec/lib/gitlab/sidekiq_config_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config_spec.rb
@@ -122,4 +122,43 @@ RSpec.describe Gitlab::SidekiqConfig do
expect(described_class.sidekiq_queues_yml_outdated?).to be(false)
end
end
+
+ describe '.worker_queue_mappings' do
+ it 'returns the worker class => queue mappings based on the current routing configuration' do
+ test_routes = [
+ ['urgency=high', 'default'],
+ ['*', nil]
+ ]
+
+ allow(::Gitlab::SidekiqConfig::WorkerRouter)
+ .to receive(:global).and_return(::Gitlab::SidekiqConfig::WorkerRouter.new(test_routes))
+
+ expect(described_class.worker_queue_mappings).to include('MergeWorker' => 'default',
+ 'Ci::BuildFinishedWorker' => 'default',
+ 'BackgroundMigrationWorker' => 'background_migration',
+ 'AdminEmailWorker' => 'cronjob:admin_email')
+ end
+ end
+
+ describe '.current_worker_queue_mappings' do
+ it 'returns worker queue mappings that have queues in the current Sidekiq options' do
+ test_routes = [
+ ['urgency=high', 'default'],
+ ['*', nil]
+ ]
+
+ allow(::Gitlab::SidekiqConfig::WorkerRouter)
+ .to receive(:global).and_return(::Gitlab::SidekiqConfig::WorkerRouter.new(test_routes))
+
+ allow(Sidekiq).to receive(:options).and_return(queues: %w[default background_migration])
+
+ mappings = described_class.current_worker_queue_mappings
+
+ expect(mappings).to include('MergeWorker' => 'default',
+ 'Ci::BuildFinishedWorker' => 'default',
+ 'BackgroundMigrationWorker' => 'background_migration')
+
+ expect(mappings).not_to include('AdminEmailWorker' => 'cronjob:admin_email')
+ end
+ end
end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index dfdc1420eac..4406b34e638 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -298,6 +298,8 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
end
+ let(:dbname) { ::Gitlab::Database.dbname(ActiveRecord::Base.connection) }
+
let(:expected_end_payload_with_db) do
expected_end_payload.merge(
'db_duration_s' => a_value >= 0.1,
@@ -311,7 +313,10 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
'db_primary_count' => a_value >= 1,
'db_primary_cached_count' => 0,
'db_primary_wal_count' => 0,
- 'db_primary_duration_s' => a_value > 0
+ 'db_primary_duration_s' => a_value > 0,
+ "db_primary_#{dbname}_duration_s" => a_value > 0,
+ 'db_primary_wal_cached_count' => 0,
+ 'db_replica_wal_cached_count' => 0
)
end
@@ -333,6 +338,8 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
'db_primary_count' => 0,
'db_primary_cached_count' => 0,
'db_primary_wal_count' => 0,
+ 'db_primary_wal_cached_count' => 0,
+ 'db_replica_wal_cached_count' => 0,
'db_primary_duration_s' => 0
)
end
@@ -342,7 +349,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
context 'when the job uses load balancing capabilities' do
- let(:expected_payload) { { 'database_chosen' => 'retry' } }
+ let(:expected_payload) { { 'load_balancing_strategy' => 'retry' } }
before do
allow(Time).to receive(:now).and_return(timestamp)
@@ -354,7 +361,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
expect(logger).to receive(:info).with(include(expected_payload)).ordered
call_subject(job, 'test_queue') do
- job[:database_chosen] = 'retry'
+ job['load_balancing_strategy'] = 'retry'
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
index 82ca84f0697..698758a13fd 100644
--- a/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
@@ -3,11 +3,12 @@
require 'spec_helper'
RSpec.describe Gitlab::SidekiqMiddleware::ClientMetrics do
+ let(:enqueued_jobs_metric) { double('enqueued jobs metric', increment: true) }
+
shared_examples "a metrics middleware" do
context "with mocked prometheus" do
- let(:enqueued_jobs_metric) { double('enqueued jobs metric', increment: true) }
-
before do
+ labels[:scheduling] = 'immediate'
allow(Gitlab::Metrics).to receive(:counter).with(described_class::ENQUEUED, anything).and_return(enqueued_jobs_metric)
end
@@ -32,4 +33,35 @@ RSpec.describe Gitlab::SidekiqMiddleware::ClientMetrics do
end
it_behaves_like 'metrics middleware with worker attribution'
+
+ context 'when mounted' do
+ before do
+ stub_const('TestWorker', Class.new)
+ TestWorker.class_eval do
+ include Sidekiq::Worker
+
+ def perform(*args)
+ end
+ end
+
+ allow(Gitlab::Metrics).to receive(:counter).and_return(Gitlab::Metrics::NullMetric.instance)
+ allow(Gitlab::Metrics).to receive(:counter).with(described_class::ENQUEUED, anything).and_return(enqueued_jobs_metric)
+ end
+
+ context 'when scheduling jobs for immediate execution' do
+ it 'increments enqueued jobs metric with scheduling label set to immediate' do
+ expect(enqueued_jobs_metric).to receive(:increment).with(a_hash_including(scheduling: 'immediate'), 1)
+
+ Sidekiq::Testing.inline! { TestWorker.perform_async }
+ end
+ end
+
+ context 'when scheduling jobs for future execution' do
+ it 'increments enqueued jobs metric with scheduling label set to delayed' do
+ expect(enqueued_jobs_metric).to receive(:increment).with(a_hash_including(scheduling: 'delayed'), 1)
+
+ Sidekiq::Testing.inline! { TestWorker.perform_in(1.second) }
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
index a10a8883591..d67cb95f483 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
let(:queue) { 'authorized_projects' }
let(:idempotency_key) do
- hash = Digest::SHA256.hexdigest("#{job['class']}:#{job['args'].join('-')}")
+ hash = Digest::SHA256.hexdigest("#{job['class']}:#{Sidekiq.dump_json(job['args'])}")
"#{Gitlab::Redis::Queues::SIDEKIQ_NAMESPACE}:duplicate:#{queue}:#{hash}"
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 34b4541f339..3ec8d404bf0 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -8,11 +8,77 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
context "with mocked prometheus" do
include_context 'server metrics with mocked prometheus'
- describe '#initialize' do
+ describe '.initialize_process_metrics' do
it 'sets concurrency metrics' do
expect(concurrency_metric).to receive(:set).with({}, Sidekiq.options[:concurrency].to_i)
- subject
+ described_class.initialize_process_metrics
+ end
+
+ it 'initializes sidekiq_jobs_completion_seconds for the workers in the current Sidekiq process' do
+ allow(Gitlab::SidekiqConfig)
+ .to receive(:current_worker_queue_mappings)
+ .and_return('MergeWorker' => 'merge', 'Ci::BuildFinishedWorker' => 'default')
+
+ expect(completion_seconds_metric)
+ .to receive(:get).with(queue: 'merge',
+ worker: 'MergeWorker',
+ urgency: 'high',
+ external_dependencies: 'no',
+ feature_category: 'source_code_management',
+ boundary: '',
+ job_status: 'done')
+
+ expect(completion_seconds_metric)
+ .to receive(:get).with(queue: 'merge',
+ worker: 'MergeWorker',
+ urgency: 'high',
+ external_dependencies: 'no',
+ feature_category: 'source_code_management',
+ boundary: '',
+ job_status: 'fail')
+
+ expect(completion_seconds_metric)
+ .to receive(:get).with(queue: 'default',
+ worker: 'Ci::BuildFinishedWorker',
+ urgency: 'high',
+ external_dependencies: 'no',
+ feature_category: 'continuous_integration',
+ boundary: 'cpu',
+ job_status: 'done')
+
+ expect(completion_seconds_metric)
+ .to receive(:get).with(queue: 'default',
+ worker: 'Ci::BuildFinishedWorker',
+ urgency: 'high',
+ external_dependencies: 'no',
+ feature_category: 'continuous_integration',
+ boundary: 'cpu',
+ job_status: 'fail')
+
+ described_class.initialize_process_metrics
+ end
+
+ context 'when the sidekiq_job_completion_metric_initialize feature flag is disabled' do
+ before do
+ stub_feature_flags(sidekiq_job_completion_metric_initialize: false)
+ end
+
+ it 'sets the concurrency metric' do
+ expect(concurrency_metric).to receive(:set).with({}, Sidekiq.options[:concurrency].to_i)
+
+ described_class.initialize_process_metrics
+ end
+
+ it 'does not initialize sidekiq_jobs_completion_seconds' do
+ allow(Gitlab::SidekiqConfig)
+ .to receive(:current_worker_queue_mappings)
+ .and_return('MergeWorker' => 'merge', 'Ci::BuildFinishedWorker' => 'default')
+
+ expect(completion_seconds_metric).not_to receive(:get)
+
+ described_class.initialize_process_metrics
+ end
end
end
@@ -47,6 +113,26 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
subject.call(worker, job, :test) { nil }
end
+ it 'sets sidekiq_jobs_completion_seconds values that are compatible with those from .initialize_process_metrics' do
+ label_validator = Prometheus::Client::LabelSetValidator.new([:le])
+
+ allow(Gitlab::SidekiqConfig)
+ .to receive(:current_worker_queue_mappings)
+ .and_return('MergeWorker' => 'merge', 'Ci::BuildFinishedWorker' => 'default')
+
+ allow(completion_seconds_metric).to receive(:get) do |labels|
+ expect { label_validator.validate(labels) }.not_to raise_error
+ end
+
+ allow(completion_seconds_metric).to receive(:observe) do |labels, _duration|
+ expect { label_validator.validate(labels) }.not_to raise_error
+ end
+
+ described_class.initialize_process_metrics
+
+ subject.call(worker, job, :test) { nil }
+ end
+
it 'sets the thread name if it was nil' do
allow(Thread.current).to receive(:name).and_return(nil)
expect(Thread.current).to receive(:name=).with(Gitlab::Metrics::Samplers::ThreadsSampler::SIDEKIQ_WORKER_THREAD_NAME)
@@ -109,22 +195,20 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
end
context 'DB load balancing' do
- using RSpec::Parameterized::TableSyntax
-
subject { described_class.new }
let(:queue) { :test }
let(:worker_class) { worker.class }
- let(:job) { {} }
- let(:job_status) { :done }
- let(:labels_with_job_status) { default_labels.merge(job_status: job_status.to_s) }
- let(:default_labels) do
- { queue: queue.to_s,
- worker: worker_class.to_s,
- boundary: "",
- external_dependencies: "no",
- feature_category: "",
- urgency: "low" }
+ let(:worker) { TestWorker.new }
+ let(:client_middleware) { Gitlab::Database::LoadBalancing::SidekiqClientMiddleware.new }
+ let(:load_balancer) { double.as_null_object }
+ let(:load_balancing_metric) { double('load balancing metric') }
+ let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e" } }
+
+ def process_job
+ client_middleware.call(worker_class, job, queue, double) do
+ worker_class.process_job(job)
+ end
end
before do
@@ -132,84 +216,97 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
TestWorker.class_eval do
include Sidekiq::Worker
include WorkerAttributes
+
+ def perform(*args)
+ end
end
+
+ allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer).and_return(load_balancer)
+ allow(load_balancing_metric).to receive(:increment)
+ allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_load_balancing_count, anything).and_return(load_balancing_metric)
end
- let(:worker) { TestWorker.new }
+ around do |example|
+ with_sidekiq_server_middleware do |chain|
+ chain.add Gitlab::Database::LoadBalancing::SidekiqServerMiddleware
+ chain.add described_class
+ Sidekiq::Testing.inline! { example.run }
+ end
+ end
include_context 'server metrics with mocked prometheus'
+ include_context 'server metrics call'
+ include_context 'clear DB Load Balancing configuration'
- context 'when load_balancing is enabled' do
- let(:load_balancing_metric) { double('load balancing metric') }
-
- include_context 'clear DB Load Balancing configuration'
+ shared_context 'worker declaring data consistency' do
+ let(:worker_class) { LBTestWorker }
before do
- allow(::Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
- allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_load_balancing_count, anything).and_return(load_balancing_metric)
- end
-
- describe '#initialize' do
- it 'sets load_balancing metrics' do
- expect(Gitlab::Metrics).to receive(:counter).with(:sidekiq_load_balancing_count, anything).and_return(load_balancing_metric)
+ stub_const('LBTestWorker', Class.new(TestWorker))
+ LBTestWorker.class_eval do
+ include ApplicationWorker
- subject
+ data_consistency :delayed
end
end
+ end
- describe '#call' do
- include_context 'server metrics call'
-
- context 'when :database_chosen is provided' do
- where(:database_chosen) do
- %w[primary retry replica]
- end
-
- with_them do
- context "when #{params[:database_chosen]} is used" do
- let(:labels_with_load_balancing) do
- labels_with_job_status.merge(database_chosen: database_chosen, data_consistency: 'delayed')
- end
+ context 'when load_balancing is enabled' do
+ before do
+ allow(::Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
+ end
- before do
- job[:database_chosen] = database_chosen
- job[:data_consistency] = 'delayed'
- allow(load_balancing_metric).to receive(:increment)
- end
+ describe '#call' do
+ context 'when worker declares data consistency' do
+ include_context 'worker declaring data consistency'
- it 'increment sidekiq_load_balancing_count' do
- expect(load_balancing_metric).to receive(:increment).with(labels_with_load_balancing, 1)
+ it 'increments load balancing counter with defined data consistency' do
+ process_job
- described_class.new.call(worker, job, :test) { nil }
- end
- end
+ expect(load_balancing_metric).to have_received(:increment).with(
+ a_hash_including(
+ data_consistency: :delayed,
+ load_balancing_strategy: 'replica'
+ ), 1)
end
end
- context 'when :database_chosen is not provided' do
- it 'does not increment sidekiq_load_balancing_count' do
- expect(load_balancing_metric).not_to receive(:increment)
+ context 'when worker does not declare data consistency' do
+ it 'increments load balancing counter with default data consistency' do
+ process_job
- described_class.new.call(worker, job, :test) { nil }
+ expect(load_balancing_metric).to have_received(:increment).with(
+ a_hash_including(
+ data_consistency: :always,
+ load_balancing_strategy: 'primary'
+ ), 1)
end
end
end
end
context 'when load_balancing is disabled' do
- include_context 'clear DB Load Balancing configuration'
+ include_context 'worker declaring data consistency'
before do
allow(::Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(false)
end
describe '#initialize' do
- it 'doesnt set load_balancing metrics' do
+ it 'does not set load_balancing metrics' do
expect(Gitlab::Metrics).not_to receive(:counter).with(:sidekiq_load_balancing_count, anything)
subject
end
end
+
+ describe '#call' do
+ it 'does not increment load balancing counter' do
+ process_job
+
+ expect(load_balancing_metric).not_to have_received(:increment)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb b/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb
index 4fbe59c3c27..440eca10a88 100644
--- a/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb
@@ -230,11 +230,11 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do
end
context 'in compress mode' do
+ let(:size_limit) { 50 }
+ let(:compression_threshold) { 30 }
let(:mode) { 'compress' }
context 'when job size is less than compression threshold' do
- let(:size_limit) { 50 }
- let(:compression_threshold) { 30 }
let(:job) { job_payload(a: 'a' * 10) }
it 'does not raise an exception' do
@@ -244,8 +244,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do
end
context 'when job size is bigger than compression threshold and less than size limit after compressed' do
- let(:size_limit) { 50 }
- let(:compression_threshold) { 30 }
let(:args) { { a: 'a' * 300 } }
let(:job) { job_payload(args) }
@@ -260,9 +258,20 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do
end
end
+ context 'when the job was already compressed' do
+ let(:job) do
+ job_payload({ a: 'a' * 10 })
+ .merge(Gitlab::SidekiqMiddleware::SizeLimiter::Compressor::COMPRESSED_KEY => true)
+ end
+
+ it 'does not compress the arguments again' do
+ expect(Gitlab::SidekiqMiddleware::SizeLimiter::Compressor).not_to receive(:compress)
+
+ expect { validate.call(TestSizeLimiterWorker, job) }.not_to raise_error
+ end
+ end
+
context 'when job size is bigger than compression threshold and bigger than size limit after compressed' do
- let(:size_limit) { 50 }
- let(:compression_threshold) { 30 }
let(:args) { { a: 'a' * 3000 } }
let(:job) { job_payload(args) }
diff --git a/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
index fff925f8532..d6cc787f53d 100644
--- a/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
@@ -11,6 +11,8 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Client do
include ApplicationWorker
+ feature_category :issue_tracking
+
def self.job_for_args(args)
jobs.find { |job| job['args'] == args }
end
@@ -41,5 +43,39 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Client do
expect(job1['meta.user']).to eq(user_per_job['job1'].username)
expect(job2['meta.user']).to eq(user_per_job['job2'].username)
end
+
+ context 'when the feature category is set in the context_proc' do
+ it 'takes the feature category from the worker, not the caller' do
+ TestWithContextWorker.bulk_perform_async_with_contexts(
+ %w(job1 job2),
+ arguments_proc: -> (name) { [name, 1, 2, 3] },
+ context_proc: -> (_) { { feature_category: 'code_review' } }
+ )
+
+ job1 = TestWithContextWorker.job_for_args(['job1', 1, 2, 3])
+ job2 = TestWithContextWorker.job_for_args(['job2', 1, 2, 3])
+
+ expect(job1['meta.feature_category']).to eq('issue_tracking')
+ expect(job2['meta.feature_category']).to eq('issue_tracking')
+ end
+ end
+
+ context 'when the feature category is already set in the surrounding block' do
+ it 'takes the feature category from the worker, not the caller' do
+ Gitlab::ApplicationContext.with_context(feature_category: 'authentication_and_authorization') do
+ TestWithContextWorker.bulk_perform_async_with_contexts(
+ %w(job1 job2),
+ arguments_proc: -> (name) { [name, 1, 2, 3] },
+ context_proc: -> (_) { {} }
+ )
+ end
+
+ job1 = TestWithContextWorker.job_for_args(['job1', 1, 2, 3])
+ job2 = TestWithContextWorker.job_for_args(['job2', 1, 2, 3])
+
+ expect(job1['meta.feature_category']).to eq('issue_tracking')
+ expect(job2['meta.feature_category']).to eq('issue_tracking')
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/sidekiq_queue_spec.rb b/spec/lib/gitlab/sidekiq_queue_spec.rb
index 44ac89c0816..2ab32657f0e 100644
--- a/spec/lib/gitlab/sidekiq_queue_spec.rb
+++ b/spec/lib/gitlab/sidekiq_queue_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Gitlab::SidekiqQueue, :clean_gitlab_redis_queues do
context 'when the queue is not processed in time' do
before do
- allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(1, 2, 12)
+ allow(sidekiq_queue).to receive(:monotonic_time).and_return(1, 2, 12)
end
it 'returns a non-completion flag, the number of jobs deleted, and the remaining queue size' do
diff --git a/spec/lib/gitlab/sidekiq_status_spec.rb b/spec/lib/gitlab/sidekiq_status_spec.rb
index dd5b8856ccd..fc2ac29a1f9 100644
--- a/spec/lib/gitlab/sidekiq_status_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::SidekiqStatus do
- describe '.set', :clean_gitlab_redis_shared_state do
+RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state do
+ describe '.set' do
it 'stores the job ID' do
described_class.set('123')
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::SidekiqStatus do
end
end
- describe '.unset', :clean_gitlab_redis_shared_state do
+ describe '.unset' do
it 'removes the job ID' do
described_class.set('123')
described_class.unset('123')
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::SidekiqStatus do
end
end
- describe '.all_completed?', :clean_gitlab_redis_shared_state do
+ describe '.all_completed?' do
it 'returns true if all jobs have been completed' do
expect(described_class.all_completed?(%w(123))).to eq(true)
end
@@ -41,7 +41,7 @@ RSpec.describe Gitlab::SidekiqStatus do
end
end
- describe '.running?', :clean_gitlab_redis_shared_state do
+ describe '.running?' do
it 'returns true if job is running' do
described_class.set('123')
@@ -53,7 +53,7 @@ RSpec.describe Gitlab::SidekiqStatus do
end
end
- describe '.num_running', :clean_gitlab_redis_shared_state do
+ describe '.num_running' do
it 'returns 0 if all jobs have been completed' do
expect(described_class.num_running(%w(123))).to eq(0)
end
@@ -66,7 +66,7 @@ RSpec.describe Gitlab::SidekiqStatus do
end
end
- describe '.num_completed', :clean_gitlab_redis_shared_state do
+ describe '.num_completed' do
it 'returns 1 if all jobs have been completed' do
expect(described_class.num_completed(%w(123))).to eq(1)
end
@@ -88,7 +88,7 @@ RSpec.describe Gitlab::SidekiqStatus do
end
end
- describe 'completed', :clean_gitlab_redis_shared_state do
+ describe 'completed' do
it 'returns the completed job' do
expect(described_class.completed_jids(%w(123))).to eq(['123'])
end
diff --git a/spec/lib/gitlab/spamcheck/client_spec.rb b/spec/lib/gitlab/spamcheck/client_spec.rb
index 491e5e9a662..15e963fe423 100644
--- a/spec/lib/gitlab/spamcheck/client_spec.rb
+++ b/spec/lib/gitlab/spamcheck/client_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Spamcheck::Client do
let(:endpoint) { 'grpc://grpc.test.url' }
let_it_be(:user) { create(:user, organization: 'GitLab') }
- let(:verdict_value) { nil }
+ let(:verdict_value) { ::Spamcheck::SpamVerdict::Verdict::ALLOW }
let(:error_value) { "" }
let(:attribs_value) do
@@ -56,6 +56,13 @@ RSpec.describe Gitlab::Spamcheck::Client do
expect(subject).to eq([expected, { "monitorMode" => "false" }, ""])
end
end
+
+ it 'includes interceptors' do
+ expect_next_instance_of(::Gitlab::Spamcheck::Client) do |client|
+ expect(client).to receive(:interceptors).and_call_original
+ end
+ subject
+ end
end
describe "#build_issue_protobuf", :aggregate_failures do
diff --git a/spec/lib/gitlab/changelog/ast_spec.rb b/spec/lib/gitlab/template_parser/ast_spec.rb
index fa15ac979fe..27361ea8632 100644
--- a/spec/lib/gitlab/changelog/ast_spec.rb
+++ b/spec/lib/gitlab/template_parser/ast_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::Changelog::AST::Identifier do
- let(:state) { Gitlab::Changelog::EvalState.new }
+RSpec.describe Gitlab::TemplateParser::AST::Identifier do
+ let(:state) { Gitlab::TemplateParser::EvalState.new }
describe '#evaluate' do
it 'evaluates a selector' do
@@ -26,8 +26,8 @@ RSpec.describe Gitlab::Changelog::AST::Identifier do
end
end
-RSpec.describe Gitlab::Changelog::AST::Integer do
- let(:state) { Gitlab::Changelog::EvalState.new }
+RSpec.describe Gitlab::TemplateParser::AST::Integer do
+ let(:state) { Gitlab::TemplateParser::EvalState.new }
describe '#evaluate' do
it 'evaluates a selector' do
@@ -44,33 +44,33 @@ RSpec.describe Gitlab::Changelog::AST::Integer do
end
end
-RSpec.describe Gitlab::Changelog::AST::Selector do
- let(:state) { Gitlab::Changelog::EvalState.new }
+RSpec.describe Gitlab::TemplateParser::AST::Selector do
+ let(:state) { Gitlab::TemplateParser::EvalState.new }
let(:data) { { 'numbers' => [10] } }
describe '#evaluate' do
it 'evaluates a selector' do
- ident = Gitlab::Changelog::AST::Identifier.new('numbers')
- int = Gitlab::Changelog::AST::Integer.new(0)
+ ident = Gitlab::TemplateParser::AST::Identifier.new('numbers')
+ int = Gitlab::TemplateParser::AST::Integer.new(0)
expect(described_class.new([ident, int]).evaluate(state, data)).to eq(10)
end
it 'evaluates a selector that returns nil' do
- int = Gitlab::Changelog::AST::Integer.new(0)
+ int = Gitlab::TemplateParser::AST::Integer.new(0)
expect(described_class.new([int]).evaluate(state, data)).to be_nil
end
end
end
-RSpec.describe Gitlab::Changelog::AST::Variable do
- let(:state) { Gitlab::Changelog::EvalState.new }
+RSpec.describe Gitlab::TemplateParser::AST::Variable do
+ let(:state) { Gitlab::TemplateParser::EvalState.new }
let(:data) { { 'numbers' => [10] } }
describe '#evaluate' do
it 'evaluates a variable' do
- node = Gitlab::Changelog::Parser
+ node = Gitlab::TemplateParser::Parser
.new
.parse_and_transform('{{numbers.0}}')
.nodes[0]
@@ -80,26 +80,26 @@ RSpec.describe Gitlab::Changelog::AST::Variable do
it 'evaluates an undefined variable' do
node =
- Gitlab::Changelog::Parser.new.parse_and_transform('{{foobar}}').nodes[0]
+ Gitlab::TemplateParser::Parser.new.parse_and_transform('{{foobar}}').nodes[0]
expect(node.evaluate(state, data)).to eq('')
end
it 'evaluates the special variable "it"' do
node =
- Gitlab::Changelog::Parser.new.parse_and_transform('{{it}}').nodes[0]
+ Gitlab::TemplateParser::Parser.new.parse_and_transform('{{it}}').nodes[0]
expect(node.evaluate(state, data)).to eq(data.to_s)
end
end
end
-RSpec.describe Gitlab::Changelog::AST::Expressions do
- let(:state) { Gitlab::Changelog::EvalState.new }
+RSpec.describe Gitlab::TemplateParser::AST::Expressions do
+ let(:state) { Gitlab::TemplateParser::EvalState.new }
describe '#evaluate' do
it 'evaluates all expressions' do
- node = Gitlab::Changelog::Parser
+ node = Gitlab::TemplateParser::Parser
.new
.parse_and_transform('{{number}}foo')
@@ -108,8 +108,8 @@ RSpec.describe Gitlab::Changelog::AST::Expressions do
end
end
-RSpec.describe Gitlab::Changelog::AST::Text do
- let(:state) { Gitlab::Changelog::EvalState.new }
+RSpec.describe Gitlab::TemplateParser::AST::Text do
+ let(:state) { Gitlab::TemplateParser::EvalState.new }
describe '#evaluate' do
it 'returns the text' do
@@ -118,12 +118,12 @@ RSpec.describe Gitlab::Changelog::AST::Text do
end
end
-RSpec.describe Gitlab::Changelog::AST::If do
- let(:state) { Gitlab::Changelog::EvalState.new }
+RSpec.describe Gitlab::TemplateParser::AST::If do
+ let(:state) { Gitlab::TemplateParser::EvalState.new }
describe '#evaluate' do
it 'evaluates a truthy if expression without an else clause' do
- node = Gitlab::Changelog::Parser
+ node = Gitlab::TemplateParser::Parser
.new
.parse_and_transform('{% if thing %}foo{% end %}')
.nodes[0]
@@ -132,7 +132,7 @@ RSpec.describe Gitlab::Changelog::AST::If do
end
it 'evaluates a falsy if expression without an else clause' do
- node = Gitlab::Changelog::Parser
+ node = Gitlab::TemplateParser::Parser
.new
.parse_and_transform('{% if thing %}foo{% end %}')
.nodes[0]
@@ -141,7 +141,7 @@ RSpec.describe Gitlab::Changelog::AST::If do
end
it 'evaluates a falsy if expression with an else clause' do
- node = Gitlab::Changelog::Parser
+ node = Gitlab::TemplateParser::Parser
.new
.parse_and_transform('{% if thing %}foo{% else %}bar{% end %}')
.nodes[0]
@@ -177,13 +177,13 @@ RSpec.describe Gitlab::Changelog::AST::If do
end
end
-RSpec.describe Gitlab::Changelog::AST::Each do
- let(:state) { Gitlab::Changelog::EvalState.new }
+RSpec.describe Gitlab::TemplateParser::AST::Each do
+ let(:state) { Gitlab::TemplateParser::EvalState.new }
describe '#evaluate' do
it 'evaluates the expression' do
data = { 'animals' => [{ 'name' => 'Cat' }, { 'name' => 'Dog' }] }
- node = Gitlab::Changelog::Parser
+ node = Gitlab::TemplateParser::Parser
.new
.parse_and_transform('{% each animals %}{{name}}{% end %}')
.nodes[0]
@@ -193,7 +193,7 @@ RSpec.describe Gitlab::Changelog::AST::Each do
it 'returns an empty string when the input is not a collection' do
data = { 'animals' => 10 }
- node = Gitlab::Changelog::Parser
+ node = Gitlab::TemplateParser::Parser
.new
.parse_and_transform('{% each animals %}{{name}}{% end %}')
.nodes[0]
@@ -237,10 +237,10 @@ RSpec.describe Gitlab::Changelog::AST::Each do
TPL
node =
- Gitlab::Changelog::Parser.new.parse_and_transform(template).nodes[0]
+ Gitlab::TemplateParser::Parser.new.parse_and_transform(template).nodes[0]
expect { node.evaluate(state, data) }
- .to raise_error(Gitlab::Changelog::Error)
+ .to raise_error(Gitlab::TemplateParser::Error)
end
end
end
diff --git a/spec/lib/gitlab/changelog/parser_spec.rb b/spec/lib/gitlab/template_parser/parser_spec.rb
index 1d353f5eb35..22247cbb693 100644
--- a/spec/lib/gitlab/changelog/parser_spec.rb
+++ b/spec/lib/gitlab/template_parser/parser_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Changelog::Parser do
+RSpec.describe Gitlab::TemplateParser::Parser do
let(:parser) { described_class.new }
describe '#root' do
@@ -67,12 +67,12 @@ RSpec.describe Gitlab::Changelog::Parser do
it 'parses and transforms a template' do
node = parser.parse_and_transform('foo')
- expect(node).to be_instance_of(Gitlab::Changelog::AST::Expressions)
+ expect(node).to be_instance_of(Gitlab::TemplateParser::AST::Expressions)
end
it 'raises parsing errors using a custom error class' do
expect { parser.parse_and_transform('{% each') }
- .to raise_error(Gitlab::Changelog::Error)
+ .to raise_error(Gitlab::TemplateParser::Error)
end
end
end
diff --git a/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb b/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb
index 65597e6568d..f8e73a807c6 100644
--- a/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb
+++ b/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb
@@ -21,7 +21,10 @@ RSpec.describe Gitlab::Tracking::Destinations::Snowplow do
expect(SnowplowTracker::AsyncEmitter)
.to receive(:new)
- .with('gitfoo.com', { protocol: 'https' })
+ .with('gitfoo.com',
+ { protocol: 'https',
+ on_success: subject.method(:increment_successful_events_emissions),
+ on_failure: subject.method(:failure_callback) })
.and_return(emitter)
expect(SnowplowTracker::Tracker)
@@ -40,6 +43,18 @@ RSpec.describe Gitlab::Tracking::Destinations::Snowplow do
.to have_received(:track_struct_event)
.with('category', 'action', 'label', 'property', 1.5, nil, (Time.now.to_f * 1000).to_i)
end
+
+ it 'increase total snowplow events counter' do
+ counter = double
+
+ expect(counter).to receive(:increment)
+ expect(Gitlab::Metrics).to receive(:counter)
+ .with(:gitlab_snowplow_events_total,
+ 'Number of Snowplow events')
+ .and_return(counter)
+
+ subject.event('category', 'action', label: 'label', property: 'property', value: 1.5)
+ end
end
end
@@ -52,4 +67,43 @@ RSpec.describe Gitlab::Tracking::Destinations::Snowplow do
end
end
end
+
+ context 'callbacks' do
+ describe 'on success' do
+ it 'increase gitlab_successful_snowplow_events_total counter' do
+ counter = double
+
+ expect(counter).to receive(:increment).with({}, 2)
+ expect(Gitlab::Metrics).to receive(:counter)
+ .with(:gitlab_snowplow_successful_events_total,
+ 'Number of successful Snowplow events emissions')
+ .and_return(counter)
+
+ subject.method(:increment_successful_events_emissions).call(2)
+ end
+ end
+
+ describe 'on failure' do
+ it 'increase gitlab_failed_snowplow_events_total counter and logs failures', :aggregate_failures do
+ counter = double
+ error_message = "Admin::AuditLogsController search_audit_event failed to be reported to collector at gitfoo.com"
+ failures = [{ "e" => "se",
+ "se_ca" => "Admin::AuditLogsController",
+ "se_ac" => "search_audit_event" }]
+ allow(Gitlab::Metrics).to receive(:counter)
+ .with(:gitlab_snowplow_successful_events_total,
+ 'Number of successful Snowplow events emissions')
+ .and_call_original
+
+ expect(Gitlab::AppLogger).to receive(:error).with(error_message)
+ expect(counter).to receive(:increment).with({}, 1)
+ expect(Gitlab::Metrics).to receive(:counter)
+ .with(:gitlab_snowplow_failed_events_total,
+ 'Number of failed Snowplow events emissions')
+ .and_return(counter)
+
+ subject.method(:failure_callback).call(2, failures)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/usage/docs/helper_spec.rb b/spec/lib/gitlab/usage/docs/helper_spec.rb
new file mode 100644
index 00000000000..e2bb1d8d818
--- /dev/null
+++ b/spec/lib/gitlab/usage/docs/helper_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Docs::Helper do
+ subject(:helper) { klass.new }
+
+ let_it_be(:klass) do
+ Class.new do
+ include Gitlab::Usage::Docs::Helper
+ end
+ end
+
+ let(:metric_definition) do
+ {
+ data_category: 'Standard',
+ name: 'test_metric',
+ description: description,
+ product_group: 'group::product intelligence',
+ status: 'data_available',
+ tier: %w(free premium)
+ }
+ end
+
+ let(:description) { 'Metric description' }
+
+ describe '#render_name' do
+ it { expect(helper.render_name(metric_definition[:name])).to eq('### `test_metric`') }
+ end
+
+ describe '#render_description' do
+ context 'without description' do
+ let(:description) { nil }
+
+ it { expect(helper.render_description(metric_definition)).to eq('Missing description') }
+ end
+
+ context 'without description' do
+ it { expect(helper.render_description(metric_definition)).to eq('Metric description') }
+ end
+ end
+
+ describe '#render_yaml_link' do
+ let(:yaml_link) { 'config/metrics/license/test_metric.yml' }
+ let(:expected) { "[YAML definition](#{yaml_link})" }
+
+ it { expect(helper.render_yaml_link(yaml_link)).to eq(expected) }
+ end
+
+ describe '#render_status' do
+ let(:expected) { "Status: `data_available`" }
+
+ it { expect(helper.render_status(metric_definition)).to eq(expected) }
+ end
+
+ describe '#render_owner' do
+ let(:expected) { "Group: `group::product intelligence`" }
+
+ it { expect(helper.render_owner(metric_definition)).to eq(expected) }
+ end
+
+ describe '#render_tiers' do
+ let(:expected) { "Tiers: `free`, `premium`" }
+
+ it { expect(helper.render_tiers(metric_definition)).to eq(expected) }
+ end
+
+ describe '#render_data_category' do
+ let(:expected) { 'Data Category: `Standard`' }
+
+ it { expect(helper.render_data_category(metric_definition)).to eq(expected) }
+ end
+
+ describe '#render_owner' do
+ let(:expected) { "Group: `group::product intelligence`" }
+
+ it { expect(helper.render_owner(metric_definition)).to eq(expected) }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index 1ed639b2f7d..f3c3e5fc550 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -17,7 +17,8 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
data_source: 'database',
distribution: %w(ee ce),
tier: %w(free starter premium ultimate bronze silver gold),
- name: 'count_boards'
+ name: 'uuid',
+ data_category: 'Standard'
}
end
@@ -63,6 +64,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
:value_type | nil
:value_type | 'test'
:status | nil
+ :data_category | nil
:key_path | nil
:product_group | nil
:time_frame | nil
@@ -196,7 +198,8 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
time_frame: 'none',
data_source: 'database',
distribution: %w(ee ce),
- tier: %w(free starter premium ultimate bronze silver gold)
+ tier: %w(free starter premium ultimate bronze silver gold),
+ data_category: 'Optional'
}
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/collected_data_categories_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/collected_data_categories_metric_spec.rb
new file mode 100644
index 00000000000..8f52d550e5c
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/collected_data_categories_metric_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CollectedDataCategoriesMetric do
+ it_behaves_like 'a correct instrumented metric value', {} do
+ let(:expected_value) { %w[Standard Subscription Operational Optional] }
+
+ before do
+ allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |instance|
+ expect(instance).to receive(:execute).and_return(expected_value)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
new file mode 100644
index 00000000000..5e36820df5e
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
+ subject do
+ described_class.tap do |m|
+ m.relation { Issue }
+ m.operation :count
+ m.start { m.relation.minimum(:id) }
+ m.finish { m.relation.maximum(:id) }
+ end.new(time_frame: 'all')
+ end
+
+ describe '#value' do
+ let_it_be(:issue_1) { create(:issue) }
+ let_it_be(:issue_2) { create(:issue) }
+ let_it_be(:issue_3) { create(:issue) }
+ let_it_be(:issues) { Issue.all }
+
+ before do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ end
+
+ it 'calculates a correct result' do
+ expect(subject.value).to eq(3)
+ end
+
+ it 'does not cache the result of start and finish', :request_store, :use_clean_rails_redis_caching do
+ expect(Gitlab::Cache).not_to receive(:fetch_once)
+ expect(subject).to receive(:count).with(any_args, hash_including(start: issues.min_by(&:id).id, finish: issues.max_by(&:id).id)).and_call_original
+
+ subject.value
+
+ expect(Rails.cache.read('metric_instrumentation/special_issue_count_minimum_id')).to eq(nil)
+ expect(Rails.cache.read('metric_instrumentation/special_issue_count_maximum_id')).to eq(nil)
+ end
+
+ context 'with start and finish not called' do
+ subject do
+ described_class.tap do |m|
+ m.relation { Issue }
+ m.operation :count
+ end.new(time_frame: 'all')
+ end
+
+ it 'calculates a correct result' do
+ expect(subject.value).to eq(3)
+ end
+ end
+
+ context 'with cache_start_and_finish_as called' do
+ subject do
+ described_class.tap do |m|
+ m.relation { Issue }
+ m.operation :count
+ m.start { m.relation.minimum(:id) }
+ m.finish { m.relation.maximum(:id) }
+ m.cache_start_and_finish_as :special_issue_count
+ end.new(time_frame: 'all')
+ end
+
+ it 'caches using the key name passed', :request_store, :use_clean_rails_redis_caching do
+ expect(Gitlab::Cache).to receive(:fetch_once).with('metric_instrumentation/special_issue_count_minimum_id', any_args).and_call_original
+ expect(Gitlab::Cache).to receive(:fetch_once).with('metric_instrumentation/special_issue_count_maximum_id', any_args).and_call_original
+ expect(subject).to receive(:count).with(any_args, hash_including(start: issues.min_by(&:id).id, finish: issues.max_by(&:id).id)).and_call_original
+
+ subject.value
+
+ expect(Rails.cache.read('metric_instrumentation/special_issue_count_minimum_id')).to eq(issues.min_by(&:id).id)
+ expect(Rails.cache.read('metric_instrumentation/special_issue_count_maximum_id')).to eq(issues.max_by(&:id).id)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index 4efacae0a48..d89202ae7fe 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -46,7 +46,8 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
'pipeline_authoring',
'epics_usage',
'epic_boards_usage',
- 'secure'
+ 'secure',
+ 'network_policies'
)
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
index 78cc27c8569..6f201b43390 100644
--- a/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::UsageDataCounters::PackageEventCounter, :clean_gitlab_red
end
it 'includes the right events' do
- expect(described_class::KNOWN_EVENTS.size).to eq 52
+ expect(described_class::KNOWN_EVENTS.size).to eq 63
end
described_class::KNOWN_EVENTS.each do |event|
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index ea82de186f5..d84974e562a 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -435,8 +435,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
create(:issue, project: project, author: User.support_bot)
create(:note, project: project, noteable: issue, author: user)
create(:todo, project: project, target: issue, author: user)
- create(:jira_service, :jira_cloud_service, active: true, project: create(:project, :jira_dvcs_cloud, creator: user))
- create(:jira_service, active: true, project: create(:project, :jira_dvcs_server, creator: user))
+ create(:jira_integration, :jira_cloud_service, active: true, project: create(:project, :jira_dvcs_cloud, creator: user))
+ create(:jira_integration, active: true, project: create(:project, :jira_dvcs_server, creator: user))
end
expect(described_class.usage_activity_by_stage_plan({})).to include(
@@ -1078,6 +1078,16 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
it 'gathers gitaly apdex', :aggregate_failures do
expect(subject[:settings][:gitaly_apdex]).to be_within(0.001).of(0.95)
end
+
+ it 'reports collected data categories' do
+ expected_value = %w[Standard Subscription Operational Optional]
+
+ allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |instance|
+ expect(instance).to receive(:execute).and_return(expected_value)
+ end
+
+ expect(subject[:settings][:collected_data_categories]).to eq(expected_value)
+ end
end
end
@@ -1269,7 +1279,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
let(:categories) { ::Gitlab::UsageDataCounters::HLLRedisCounter.categories }
let(:ineligible_total_categories) do
- %w[source_code ci_secrets_management incident_management_alerts snippets terraform incident_management_oncall secure]
+ %w[source_code ci_secrets_management incident_management_alerts snippets terraform incident_management_oncall secure network_policies]
end
context 'with redis_hll_tracking feature enabled' do
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index 11b2a12f228..8f705d6a487 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -377,7 +377,7 @@ RSpec.describe Gitlab::Utils::UsageData do
shared_examples 'try to query Prometheus with given address' do
context 'Prometheus is ready' do
before do
- stub_request(:get, /\/-\/ready/)
+ stub_request(:get, %r{/-/ready})
.to_return(status: 200, body: 'Prometheus is Ready.\n')
end
@@ -387,7 +387,7 @@ RSpec.describe Gitlab::Utils::UsageData do
context 'Prometheus is not reachable through HTTPS' do
before do
- stub_request(:get, /https:\/\/.*/).to_raise(Errno::ECONNREFUSED)
+ stub_request(:get, %r{https://.*}).to_raise(Errno::ECONNREFUSED)
end
context 'Prometheus is reachable through HTTP' do
@@ -396,7 +396,7 @@ RSpec.describe Gitlab::Utils::UsageData do
context 'Prometheus is not reachable through HTTP' do
before do
- stub_request(:get, /http:\/\/.*/).to_raise(Errno::ECONNREFUSED)
+ stub_request(:get, %r{http://.*}).to_raise(Errno::ECONNREFUSED)
end
it_behaves_like 'does not query data from Prometheus'
@@ -406,7 +406,7 @@ RSpec.describe Gitlab::Utils::UsageData do
context 'Prometheus is not ready' do
before do
- stub_request(:get, /\/-\/ready/)
+ stub_request(:get, %r{/-/ready})
.to_return(status: 503, body: 'Service Unavailable')
end
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index a7ccce0aaab..f1601294c07 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -351,6 +351,22 @@ RSpec.describe Gitlab::Utils do
end
end
+ describe '.deep_symbolized_access' do
+ let(:hash) do
+ { "variables" => [{ "key" => "VAR1", "value" => "VALUE2" }] }
+ end
+
+ subject { described_class.deep_symbolized_access(hash) }
+
+ it 'allows to access hash keys with symbols' do
+ expect(subject[:variables]).to be_a(Array)
+ end
+
+ it 'allows to access array keys with symbols' do
+ expect(subject[:variables].first[:key]).to eq('VAR1')
+ end
+ end
+
describe '.try_megabytes_to_bytes' do
context 'when the size can be converted to megabytes' do
it 'returns the size in megabytes' do
diff --git a/spec/lib/gitlab/wiki_file_finder_spec.rb b/spec/lib/gitlab/wiki_file_finder_spec.rb
index 7abe92a5a2b..3102f628de9 100644
--- a/spec/lib/gitlab/wiki_file_finder_spec.rb
+++ b/spec/lib/gitlab/wiki_file_finder_spec.rb
@@ -4,12 +4,11 @@ require 'spec_helper'
RSpec.describe Gitlab::WikiFileFinder do
describe '#find' do
- let(:project) { create(:project, :public, :wiki_repo) }
- let(:wiki) { build(:project_wiki, project: project) }
-
- before do
- wiki.create_page('Files/Title', 'Content')
- wiki.create_page('CHANGELOG', 'Files example')
+ let_it_be(:project) do
+ create(:project, :public, :wiki_repo).tap do |project|
+ project.wiki.create_page('Files/Title', 'Content')
+ project.wiki.create_page('CHANGELOG', 'Files example')
+ end
end
it_behaves_like 'file finder' do
diff --git a/spec/lib/marginalia_spec.rb b/spec/lib/marginalia_spec.rb
index 040f70236c6..dd57cd7980e 100644
--- a/spec/lib/marginalia_spec.rb
+++ b/spec/lib/marginalia_spec.rb
@@ -89,21 +89,7 @@ RSpec.describe 'Marginalia spec' do
end
end
- describe 'for ActionMailer delivery jobs' do
- # We need to ensure that this runs through Sidekiq to take
- # advantage of the middleware. There is a Rails bug that means we
- # have to do some extra steps to make this happen:
- # https://github.com/rails/rails/issues/37270#issuecomment-553927324
- around do |example|
- descendants = ActiveJob::Base.descendants + [ActiveJob::Base]
- descendants.each(&:disable_test_adapter)
- ActiveJob::Base.queue_adapter = :sidekiq
-
- example.run
-
- descendants.each { |a| a.queue_adapter = :test }
- end
-
+ describe 'for ActionMailer delivery jobs', :sidekiq_mailers do
let(:delivery_job) { MarginaliaTestMailer.first_user.deliver_later }
let(:recorded) do
diff --git a/spec/lib/object_storage/direct_upload_spec.rb b/spec/lib/object_storage/direct_upload_spec.rb
index 4b374452c0a..006f4f603b6 100644
--- a/spec/lib/object_storage/direct_upload_spec.rb
+++ b/spec/lib/object_storage/direct_upload_spec.rb
@@ -136,16 +136,6 @@ RSpec.describe ObjectStorage::DirectUpload do
end
end
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(use_workhorse_s3_client: false)
- end
-
- it 'does not enable Workhorse client' do
- expect(subject[:UseWorkhorseClient]).to be false
- end
- end
-
context 'when V2 signatures are used' do
before do
credentials[:aws_signature_version] = 2
diff --git a/spec/lib/security/ci_configuration/sast_build_action_spec.rb b/spec/lib/security/ci_configuration/sast_build_action_spec.rb
index 5337e8d9c39..d93175249f5 100644
--- a/spec/lib/security/ci_configuration/sast_build_action_spec.rb
+++ b/spec/lib/security/ci_configuration/sast_build_action_spec.rb
@@ -323,6 +323,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -342,6 +343,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -358,6 +360,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -380,6 +383,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -415,6 +419,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -439,6 +444,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -461,6 +467,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -484,6 +491,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -507,6 +515,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
diff --git a/spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb b/spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb
index f6181c6ef7a..146c60ffb6e 100644
--- a/spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb
+++ b/spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe Security::CiConfiguration::SecretDetectionBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -62,6 +63,7 @@ RSpec.describe Security::CiConfiguration::SecretDetectionBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -111,6 +113,7 @@ RSpec.describe Security::CiConfiguration::SecretDetectionBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
include:
@@ -131,6 +134,7 @@ RSpec.describe Security::CiConfiguration::SecretDetectionBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
include:
diff --git a/spec/lib/serializers/symbolized_json_spec.rb b/spec/lib/serializers/symbolized_json_spec.rb
new file mode 100644
index 00000000000..b30fb074ddd
--- /dev/null
+++ b/spec/lib/serializers/symbolized_json_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Serializers::SymbolizedJson do
+ describe '.dump' do
+ let(:obj) { { key: "value" } }
+
+ subject { described_class.dump(obj) }
+
+ it 'returns a hash' do
+ is_expected.to eq(obj)
+ end
+ end
+
+ describe '.load' do
+ let(:data_string) { '{"key":"value","variables":[{"key":"VAR1","value":"VALUE1"}]}' }
+ let(:data_hash) { Gitlab::Json.parse(data_string) }
+
+ context 'when loading a hash' do
+ subject { described_class.load(data_hash) }
+
+ it 'decodes a string' do
+ is_expected.to be_a(Hash)
+ end
+
+ it 'allows to access with symbols' do
+ expect(subject[:key]).to eq('value')
+ expect(subject[:variables].first[:key]).to eq('VAR1')
+ end
+ end
+
+ context 'when loading a nil' do
+ subject { described_class.load(nil) }
+
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb b/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb
index 4a60dfde674..3149c316c63 100644
--- a/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb
@@ -39,33 +39,22 @@ RSpec.describe Sidebars::Projects::Menus::DeploymentsMenu do
end
end
- shared_examples 'feature flag :sidebar_refactor disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- specify { is_expected.to be_nil }
- end
-
describe 'Feature Flags' do
let(:item_id) { :feature_flags }
it_behaves_like 'access rights checks'
- it_behaves_like 'feature flag :sidebar_refactor disabled'
end
describe 'Environments' do
let(:item_id) { :environments }
it_behaves_like 'access rights checks'
- it_behaves_like 'feature flag :sidebar_refactor disabled'
end
describe 'Releases' do
let(:item_id) { :releases }
it_behaves_like 'access rights checks'
- it_behaves_like 'feature flag :sidebar_refactor disabled'
end
end
end
diff --git a/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb b/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb
new file mode 100644
index 00000000000..2415598da9c
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::InfrastructureMenu do
+ let(:project) { build(:project) }
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project, show_cluster_hint: false) }
+
+ describe '#render?' do
+ subject { described_class.new(context) }
+
+ context 'when menu does not have any menu items' do
+ it 'returns false' do
+ allow(subject).to receive(:has_renderable_items?).and_return(false)
+
+ expect(subject.render?).to be false
+ end
+ end
+
+ context 'when menu has menu items' do
+ it 'returns true' do
+ expect(subject.render?).to be true
+ end
+ end
+ end
+
+ describe '#link' do
+ subject { described_class.new(context) }
+
+ context 'when Kubernetes menu item is visible' do
+ it 'menu link points to Kubernetes page' do
+ expect(subject.link).to eq find_menu_item(:kubernetes).link
+ end
+ end
+
+ context 'when Kubernetes menu item is not visible' do
+ before do
+ subject.renderable_items.delete(find_menu_item(:kubernetes))
+ end
+
+ it 'menu link points to Serverless page' do
+ expect(subject.link).to eq find_menu_item(:serverless).link
+ end
+
+ context 'when Serverless menu is not visible' do
+ before do
+ subject.renderable_items.delete(find_menu_item(:serverless))
+ end
+
+ it 'menu link points to Terraform page' do
+ expect(subject.link).to eq find_menu_item(:terraform).link
+ end
+ end
+ end
+
+ def find_menu_item(menu_item)
+ subject.renderable_items.find { |i| i.item_id == menu_item }
+ end
+ end
+
+ describe 'Menu Items' do
+ subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
+
+ shared_examples 'access rights checks' do
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+
+ describe 'Kubernetes' do
+ let(:item_id) { :kubernetes }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Serverless' do
+ let(:item_id) { :serverless }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Terraform' do
+ let(:item_id) { :terraform }
+
+ it_behaves_like 'access rights checks'
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/issues_menu_spec.rb b/spec/lib/sidebars/projects/menus/issues_menu_spec.rb
index ac62cd7594a..e5d486bbe8f 100644
--- a/spec/lib/sidebars/projects/menus/issues_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/issues_menu_spec.rb
@@ -65,22 +65,4 @@ RSpec.describe Sidebars::Projects::Menus::IssuesMenu do
end
end
end
-
- describe 'Menu Items' do
- subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
-
- describe 'Labels' do
- let(:item_id) { :labels }
-
- specify { is_expected.to be_nil }
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- specify { is_expected.not_to be_nil }
- end
- end
- end
end
diff --git a/spec/lib/sidebars/projects/menus/labels_menu_spec.rb b/spec/lib/sidebars/projects/menus/labels_menu_spec.rb
deleted file mode 100644
index e1420f9e61b..00000000000
--- a/spec/lib/sidebars/projects/menus/labels_menu_spec.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Sidebars::Projects::Menus::LabelsMenu do
- let(:project) { build(:project) }
- let(:user) { project.owner }
- let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
-
- subject { described_class.new(context) }
-
- it 'does not contain any sub menu' do
- expect(subject.has_items?).to eq false
- end
-
- describe '#render?' do
- let(:issues_enabled) { true }
-
- before do
- allow(project).to receive(:issues_enabled?).and_return(issues_enabled)
- end
-
- context 'when feature flag :sidebar_refactor is enabled' do
- let(:issues_enabled) { false }
-
- it 'returns false' do
- expect(subject.render?).to be_falsey
- end
- end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- context 'when user can read labels' do
- context 'when issues feature is enabled' do
- it 'returns false' do
- expect(subject.render?).to be_falsey
- end
- end
-
- context 'when issues feature is disabled' do
- let(:issues_enabled) { false }
-
- it 'returns true' do
- expect(subject.render?).to be_truthy
- end
- end
- end
-
- context 'when user cannot read labels' do
- let(:user) { nil }
-
- it 'returns false' do
- expect(subject.render?).to be_falsey
- end
- end
- end
- end
-end
diff --git a/spec/lib/sidebars/projects/menus/members_menu_spec.rb b/spec/lib/sidebars/projects/menus/members_menu_spec.rb
deleted file mode 100644
index dcc085c2957..00000000000
--- a/spec/lib/sidebars/projects/menus/members_menu_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Sidebars::Projects::Menus::MembersMenu do
- let(:project) { build(:project) }
- let(:user) { project.owner }
- let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
-
- subject { described_class.new(context) }
-
- describe '#render?' do
- it 'returns false' do
- expect(subject.render?).to eq false
- end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- it 'returns true' do
- expect(subject.render?).to eq true
- end
-
- context 'when user cannot access members' do
- let(:user) { nil }
-
- it 'returns false' do
- expect(subject.render?).to eq false
- end
- end
- end
- end
-end
diff --git a/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb b/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
index 93618fa3321..381842be5ab 100644
--- a/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
@@ -41,43 +41,30 @@ RSpec.describe Sidebars::Projects::Menus::MonitorMenu do
it 'returns "Monitor"' do
expect(subject.title).to eq 'Monitor'
end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- it 'returns "Operations"' do
- stub_feature_flags(sidebar_refactor: false)
-
- expect(subject.title).to eq 'Operations'
- end
- end
end
describe '#extra_container_html_options' do
it 'returns "shortcuts-monitor"' do
expect(subject.extra_container_html_options).to eq(class: 'shortcuts-monitor')
end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- it 'returns "shortcuts-operations"' do
- stub_feature_flags(sidebar_refactor: false)
-
- expect(subject.extra_container_html_options).to eq(class: 'shortcuts-operations')
- end
- end
end
describe '#link' do
- context 'when metrics dashboard is visible' do
- it 'returns link to the metrics dashboard page' do
- expect(subject.link).to include('/-/environments/metrics')
- end
+ let(:foo_path) { '/foo_path'}
+
+ let(:foo_menu) do
+ ::Sidebars::MenuItem.new(
+ title: 'foo',
+ link: foo_path,
+ active_routes: {},
+ item_id: :foo
+ )
end
- context 'when metrics dashboard is not visible' do
- it 'returns link to the feature flags page' do
- project.project_feature.update!(operations_access_level: Featurable::DISABLED)
+ it 'returns first visible item link' do
+ subject.insert_element_before(subject.renderable_items, subject.renderable_items.first.item_id, foo_menu)
- expect(subject.link).to include('/-/feature_flags')
- end
+ expect(subject.link).to eq foo_path
end
end
@@ -130,76 +117,6 @@ RSpec.describe Sidebars::Projects::Menus::MonitorMenu do
it_behaves_like 'access rights checks'
end
- describe 'Serverless' do
- let(:item_id) { :serverless }
-
- specify { is_expected.to be_nil }
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- it_behaves_like 'access rights checks'
- end
- end
-
- describe 'Terraform' do
- let(:item_id) { :terraform }
-
- specify { is_expected.to be_nil }
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- it_behaves_like 'access rights checks'
- end
- end
-
- describe 'Kubernetes' do
- let(:item_id) { :kubernetes }
-
- specify { is_expected.to be_nil }
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- it_behaves_like 'access rights checks'
- end
- end
-
- describe 'Environments' do
- let(:item_id) { :environments }
-
- specify { is_expected.to be_nil }
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- it_behaves_like 'access rights checks'
- end
- end
-
- describe 'Feature Flags' do
- let(:item_id) { :feature_flags }
-
- specify { is_expected.to be_nil }
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- it_behaves_like 'access rights checks'
- end
- end
-
describe 'Product Analytics' do
let(:item_id) { :product_analytics }
diff --git a/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb b/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb
index 748796bc7ee..7e8d0ab0518 100644
--- a/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Sidebars::Projects::Menus::ProjectInformationMenu do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be_with_reload(:project) { create(:project, :repository) }
let(:user) { project.owner }
let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
@@ -12,59 +12,36 @@ RSpec.describe Sidebars::Projects::Menus::ProjectInformationMenu do
subject { described_class.new(context).container_html_options }
specify { is_expected.to match(hash_including(class: 'shortcuts-project-information has-sub-items')) }
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- specify { is_expected.to match(hash_including(class: 'shortcuts-project rspec-project-link has-sub-items')) }
- end
end
describe 'Menu Items' do
subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
- describe 'Releases' do
- let(:item_id) { :releases }
+ describe 'Labels' do
+ let(:item_id) { :labels }
- specify { is_expected.to be_nil }
+ specify { is_expected.not_to be_nil }
- context 'when feature flag :sidebar_refactor is disabled' do
+ context 'when merge requests are disabled' do
before do
- stub_feature_flags(sidebar_refactor: false)
+ project.project_feature.update_attribute(:merge_requests_access_level, Featurable::DISABLED)
end
- context 'when project repository is empty' do
- it 'does not include releases menu item' do
- allow(project).to receive(:empty_repo?).and_return(true)
+ specify { is_expected.not_to be_nil }
+ end
- is_expected.to be_nil
- end
+ context 'when issues are disabled' do
+ before do
+ project.project_feature.update_attribute(:issues_access_level, Featurable::DISABLED)
end
- context 'when project repository is not empty' do
- context 'when user can download code' do
- specify { is_expected.not_to be_nil }
- end
-
- context 'when user cannot download code' do
- let(:user) { nil }
-
- specify { is_expected.to be_nil }
- end
- end
+ specify { is_expected.not_to be_nil }
end
- end
-
- describe 'Labels' do
- let(:item_id) { :labels }
-
- specify { is_expected.not_to be_nil }
- context 'when feature flag :sidebar_refactor is disabled' do
+ context 'when merge requests and issues are disabled' do
before do
- stub_feature_flags(sidebar_refactor: false)
+ project.project_feature.update_attribute(:merge_requests_access_level, Featurable::DISABLED)
+ project.project_feature.update_attribute(:issues_access_level, Featurable::DISABLED)
end
specify { is_expected.to be_nil }
@@ -76,10 +53,8 @@ RSpec.describe Sidebars::Projects::Menus::ProjectInformationMenu do
specify { is_expected.not_to be_nil }
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
+ describe 'when the user does not have access' do
+ let(:user) { nil }
specify { is_expected.to be_nil }
end
diff --git a/spec/lib/sidebars/projects/menus/scope_menu_spec.rb b/spec/lib/sidebars/projects/menus/scope_menu_spec.rb
index f84d458a2e1..5040ef9b0ff 100644
--- a/spec/lib/sidebars/projects/menus/scope_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/scope_menu_spec.rb
@@ -11,13 +11,5 @@ RSpec.describe Sidebars::Projects::Menus::ScopeMenu do
subject { described_class.new(context).container_html_options }
specify { is_expected.to match(hash_including(class: 'shortcuts-project rspec-project-link')) }
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- specify { is_expected.to eq(aria: { label: project.name }) }
- end
end
end
diff --git a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
index 6817f0e6ed6..9b79614db20 100644
--- a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
@@ -99,14 +99,6 @@ RSpec.describe Sidebars::Projects::Menus::SettingsMenu do
specify { expect(subject.title).to eq 'Monitor' }
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- specify { expect(subject.title).to eq 'Operations' }
- end
-
describe 'when the user does not have access' do
let(:user) { nil }
@@ -159,14 +151,6 @@ RSpec.describe Sidebars::Projects::Menus::SettingsMenu do
specify { is_expected.not_to be_nil }
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- specify { is_expected.to be_nil }
- end
-
describe 'when the user does not have access' do
let(:user) { nil }
diff --git a/spec/mailers/emails/admin_notification_spec.rb b/spec/mailers/emails/admin_notification_spec.rb
new file mode 100644
index 00000000000..90381eb8ffd
--- /dev/null
+++ b/spec/mailers/emails/admin_notification_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Emails::AdminNotification do
+ it 'adds email methods to Notify' do
+ subject.instance_methods.each do |email_method|
+ expect(Notify).to be_respond_to(email_method)
+ end
+ end
+end
diff --git a/spec/mailers/emails/releases_spec.rb b/spec/mailers/emails/releases_spec.rb
index 287971d35a8..d1d7f5e6d6a 100644
--- a/spec/mailers/emails/releases_spec.rb
+++ b/spec/mailers/emails/releases_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe Emails::Releases do
let(:release) { create(:release, project: project, description: "Attachment: [Test file](#{upload_path})") }
it 'renders absolute links' do
- is_expected.to have_body_text(%Q(<a href="#{project.web_url}#{upload_path}" data-link="true" class="gfm">Test file</a>))
+ is_expected.to have_body_text(%Q(<a href="#{project.web_url}#{upload_path}" data-canonical-src="#{upload_path}" data-link="true" class="gfm">Test file</a>))
end
end
end
diff --git a/spec/mailers/emails/service_desk_spec.rb b/spec/mailers/emails/service_desk_spec.rb
index 995e6c006cd..28011456a66 100644
--- a/spec/mailers/emails/service_desk_spec.rb
+++ b/spec/mailers/emails/service_desk_spec.rb
@@ -115,16 +115,6 @@ RSpec.describe Emails::ServiceDesk do
end
end
- shared_examples 'notification with metric event' do |event_type|
- it 'adds metric event' do
- metric_transaction = double('Gitlab::Metrics::WebTransaction', increment: true, observe: true)
- allow(::Gitlab::Metrics::BackgroundTransaction).to receive(:current).and_return(metric_transaction)
- expect(metric_transaction).to receive(:add_event).with(event_type)
-
- subject.content_type
- end
- end
-
describe '.service_desk_thank_you_email' do
let_it_be(:reply_in_subject) { true }
let_it_be(:default_text) do
@@ -134,7 +124,6 @@ RSpec.describe Emails::ServiceDesk do
subject { ServiceEmailClass.service_desk_thank_you_email(issue.id) }
it_behaves_like 'read template from repository', 'thank_you'
- it_behaves_like 'notification with metric event', :service_desk_thank_you_email
context 'handling template markdown' do
context 'with a simple text' do
@@ -175,7 +164,6 @@ RSpec.describe Emails::ServiceDesk do
subject { ServiceEmailClass.service_desk_new_note_email(issue.id, note.id, email) }
it_behaves_like 'read template from repository', 'new_note'
- it_behaves_like 'notification with metric event', :service_desk_new_note_email
context 'handling template markdown' do
context 'with a simple text' do
@@ -211,7 +199,7 @@ RSpec.describe Emails::ServiceDesk do
let_it_be(:note) { create(:note_on_issue, noteable: issue, project: project, note: "a new comment with [file](#{upload_path})") }
let(:template_content) { 'some text %{ NOTE_TEXT }' }
- let(:expected_body) { %Q(some text a new comment with <a href="#{project.web_url}#{upload_path}" data-link="true" class="gfm">file</a>) }
+ let(:expected_body) { %Q(some text a new comment with <a href="#{project.web_url}#{upload_path}" data-canonical-src="#{upload_path}" data-link="true" class="gfm">file</a>) }
it_behaves_like 'handle template content', 'new_note'
end
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index 8ee88776107..ae956adf563 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -1609,6 +1609,32 @@ RSpec.describe Notify do
end
end
end
+
+ describe 'admin notification' do
+ let(:example_site_path) { root_path }
+ let(:user) { create(:user) }
+
+ subject { @email = described_class.send_admin_notification(user.id, 'Admin announcement', 'Text') }
+
+ it 'is sent as the author' do
+ sender = subject.header[:from].addrs[0]
+ expect(sender.display_name).to eq("GitLab")
+ expect(sender.address).to eq(gitlab_sender)
+ end
+
+ it 'is sent to recipient' do
+ is_expected.to deliver_to user.email
+ end
+
+ it 'has the correct subject' do
+ is_expected.to have_subject 'Admin announcement'
+ end
+
+ it 'includes unsubscribe link' do
+ unsubscribe_link = "http://localhost/unsubscribes/#{Base64.urlsafe_encode64(user.email)}"
+ is_expected.to have_body_text(unsubscribe_link)
+ end
+ end
end
describe 'confirmation if email changed' do
@@ -1969,6 +1995,19 @@ RSpec.describe Notify do
end
end
+ describe 'in product marketing', :mailer do
+ let_it_be(:group) { create(:group) }
+
+ let(:mail) { ActionMailer::Base.deliveries.last }
+
+ it 'does not raise error' do
+ described_class.in_product_marketing_email(user.id, group.id, :trial, 0).deliver
+
+ expect(mail.subject).to eq('Go farther with GitLab')
+ expect(mail.body.parts.first.to_s).to include('Start a GitLab Ultimate trial today in less than one minute, no credit card required.')
+ end
+ end
+
def expect_sender(user)
sender = subject.header[:from].addrs[0]
expect(sender.display_name).to eq("#{user.name} (@#{user.username})")
diff --git a/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb b/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb
index 2999332509a..dad95760306 100644
--- a/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb
+++ b/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb
@@ -14,11 +14,11 @@ RSpec.describe MigrateIssueTrackersData do
}
end
- let!(:jira_service) do
+ let!(:jira_integration) do
services.create!(type: 'JiraService', properties: properties, category: 'issue_tracker')
end
- let!(:jira_service_nil) do
+ let!(:jira_integration_nil) do
services.create!(type: 'JiraService', properties: nil, category: 'issue_tracker')
end
@@ -26,11 +26,11 @@ RSpec.describe MigrateIssueTrackersData do
services.create!(type: 'BugzillaService', properties: properties, category: 'issue_tracker')
end
- let!(:youtrack_service) do
+ let!(:youtrack_integration) do
services.create!(type: 'YoutrackService', properties: properties, category: 'issue_tracker')
end
- let!(:youtrack_service_empty) do
+ let!(:youtrack_integration_empty) do
services.create!(type: 'YoutrackService', properties: '', category: 'issue_tracker')
end
@@ -55,8 +55,8 @@ RSpec.describe MigrateIssueTrackersData do
freeze_time do
migrate!
- expect(migration_name).to be_scheduled_delayed_migration(3.minutes, jira_service.id, bugzilla_integration.id)
- expect(migration_name).to be_scheduled_delayed_migration(6.minutes, youtrack_service.id, gitlab_service.id)
+ expect(migration_name).to be_scheduled_delayed_migration(3.minutes, jira_integration.id, bugzilla_integration.id)
+ expect(migration_name).to be_scheduled_delayed_migration(6.minutes, youtrack_integration.id, gitlab_service.id)
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
end
end
diff --git a/spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb b/spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb
index 5516e2af3f1..cf8bc608483 100644
--- a/spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb
+++ b/spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb
@@ -14,11 +14,11 @@ RSpec.describe RescheduleMigrateIssueTrackersData do
}
end
- let!(:jira_service) do
+ let!(:jira_integration) do
services.create!(id: 10, type: 'JiraService', properties: properties, category: 'issue_tracker')
end
- let!(:jira_service_nil) do
+ let!(:jira_integration_nil) do
services.create!(id: 11, type: 'JiraService', properties: nil, category: 'issue_tracker')
end
@@ -26,11 +26,11 @@ RSpec.describe RescheduleMigrateIssueTrackersData do
services.create!(id: 12, type: 'BugzillaService', properties: properties, category: 'issue_tracker')
end
- let!(:youtrack_service) do
+ let!(:youtrack_integration) do
services.create!(id: 13, type: 'YoutrackService', properties: properties, category: 'issue_tracker')
end
- let!(:youtrack_service_empty) do
+ let!(:youtrack_integration_empty) do
services.create!(id: 14, type: 'YoutrackService', properties: '', category: 'issue_tracker')
end
@@ -56,8 +56,8 @@ RSpec.describe RescheduleMigrateIssueTrackersData do
freeze_time do
migrate!
- expect(migration_name).to be_scheduled_delayed_migration(3.minutes, jira_service.id, bugzilla_integration.id)
- expect(migration_name).to be_scheduled_delayed_migration(6.minutes, youtrack_service.id, gitlab_service.id)
+ expect(migration_name).to be_scheduled_delayed_migration(3.minutes, jira_integration.id, bugzilla_integration.id)
+ expect(migration_name).to be_scheduled_delayed_migration(6.minutes, youtrack_integration.id, gitlab_service.id)
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
end
end
diff --git a/spec/migrations/20200728080250_replace_unique_index_on_cycle_analytics_stages_spec.rb b/spec/migrations/20200728080250_replace_unique_index_on_cycle_analytics_stages_spec.rb
index 761168ae609..a632065946d 100644
--- a/spec/migrations/20200728080250_replace_unique_index_on_cycle_analytics_stages_spec.rb
+++ b/spec/migrations/20200728080250_replace_unique_index_on_cycle_analytics_stages_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!('replace_unique_index_on_cycle_analytics_stages')
-RSpec.describe ReplaceUniqueIndexOnCycleAnalyticsStages, :migration, schema: 20200728080250 do
+RSpec.describe ReplaceUniqueIndexOnCycleAnalyticsStages, :migration, schema: 20200727142337 do
let(:namespaces) { table(:namespaces) }
let(:group_value_streams) { table(:analytics_cycle_analytics_group_value_streams) }
let(:group_stages) { table(:analytics_cycle_analytics_group_stages) }
diff --git a/spec/migrations/20210610153556_delete_legacy_operations_feature_flags_spec.rb b/spec/migrations/20210610153556_delete_legacy_operations_feature_flags_spec.rb
new file mode 100644
index 00000000000..4f621d0670c
--- /dev/null
+++ b/spec/migrations/20210610153556_delete_legacy_operations_feature_flags_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!('delete_legacy_operations_feature_flags')
+
+RSpec.describe DeleteLegacyOperationsFeatureFlags do
+ let(:namespace) { table(:namespaces).create!(name: 'foo', path: 'bar') }
+ let(:project) { table(:projects).create!(namespace_id: namespace.id) }
+ let(:issue) { table(:issues).create!(id: 123, project_id: project.id) }
+ let(:operations_feature_flags) { table(:operations_feature_flags) }
+ let(:operations_feature_flag_scopes) { table(:operations_feature_flag_scopes) }
+ let(:operations_strategies) { table(:operations_strategies) }
+ let(:operations_scopes) { table(:operations_scopes) }
+ let(:operations_feature_flags_issues) { table(:operations_feature_flags_issues) }
+
+ it 'correctly deletes legacy feature flags' do
+ # Legacy version of a feature flag - dropped support in GitLab 14.0.
+ legacy_flag = operations_feature_flags.create!(project_id: project.id, version: 1, name: 'flag_a', active: true, iid: 1)
+ operations_feature_flag_scopes.create!(feature_flag_id: legacy_flag.id, active: true)
+ operations_feature_flags_issues.create!(feature_flag_id: legacy_flag.id, issue_id: issue.id)
+ # New version of a feature flag.
+ new_flag = operations_feature_flags.create!(project_id: project.id, version: 2, name: 'flag_b', active: true, iid: 2)
+ new_strategy = operations_strategies.create!(feature_flag_id: new_flag.id, name: 'default')
+ operations_scopes.create!(strategy_id: new_strategy.id, environment_scope: '*')
+ operations_feature_flags_issues.create!(feature_flag_id: new_flag.id, issue_id: issue.id)
+
+ expect(operations_feature_flags.all.pluck(:version)).to contain_exactly(1, 2)
+ expect(operations_feature_flag_scopes.count).to eq(1)
+ expect(operations_strategies.count).to eq(1)
+ expect(operations_scopes.count).to eq(1)
+ expect(operations_feature_flags_issues.all.pluck(:feature_flag_id)).to contain_exactly(legacy_flag.id, new_flag.id)
+
+ migrate!
+
+ # Legacy flag is deleted.
+ expect(operations_feature_flags.all.pluck(:version)).to contain_exactly(2)
+ # The associated entries of the legacy flag are deleted too.
+ expect(operations_feature_flag_scopes.count).to eq(0)
+ # The associated entries of the new flag stay instact.
+ expect(operations_strategies.count).to eq(1)
+ expect(operations_scopes.count).to eq(1)
+ expect(operations_feature_flags_issues.all.pluck(:feature_flag_id)).to contain_exactly(new_flag.id)
+ end
+end
diff --git a/spec/migrations/2021061716138_cascade_delete_freeze_periods_spec.rb b/spec/migrations/2021061716138_cascade_delete_freeze_periods_spec.rb
new file mode 100644
index 00000000000..fd664d99f06
--- /dev/null
+++ b/spec/migrations/2021061716138_cascade_delete_freeze_periods_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!('cascade_delete_freeze_periods')
+
+RSpec.describe CascadeDeleteFreezePeriods do
+ let(:namespace) { table(:namespaces).create!(name: 'deploy_freeze', path: 'deploy_freeze') }
+ let(:project) { table(:projects).create!(id: 1, namespace_id: namespace.id) }
+ let(:freeze_periods) { table(:ci_freeze_periods) }
+
+ describe "#up" do
+ it 'allows for a project to be deleted' do
+ freeze_periods.create!(id: 1, project_id: project.id, freeze_start: '5 * * * *', freeze_end: '6 * * * *', cron_timezone: 'UTC')
+ migrate!
+
+ project.delete
+
+ expect(freeze_periods.where(project_id: project.id).count).to be_zero
+ end
+ end
+end
diff --git a/spec/migrations/20210708130419_reschedule_merge_request_diff_users_background_migration_spec.rb b/spec/migrations/20210708130419_reschedule_merge_request_diff_users_background_migration_spec.rb
new file mode 100644
index 00000000000..9cc454662f9
--- /dev/null
+++ b/spec/migrations/20210708130419_reschedule_merge_request_diff_users_background_migration_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration! 'reschedule_merge_request_diff_users_background_migration'
+
+RSpec.describe RescheduleMergeRequestDiffUsersBackgroundMigration, :migration do
+ let(:migration) { described_class.new }
+
+ describe '#up' do
+ before do
+ allow(described_class::MergeRequestDiff)
+ .to receive(:minimum)
+ .with(:id)
+ .and_return(42)
+
+ allow(described_class::MergeRequestDiff)
+ .to receive(:maximum)
+ .with(:id)
+ .and_return(85_123)
+ end
+
+ it 'deletes existing background migration job records' do
+ args = [150_000, 300_000]
+
+ Gitlab::Database::BackgroundMigrationJob
+ .create!(class_name: described_class::MIGRATION_NAME, arguments: args)
+
+ migration.up
+
+ found = Gitlab::Database::BackgroundMigrationJob
+ .where(class_name: described_class::MIGRATION_NAME, arguments: args)
+ .count
+
+ expect(found).to eq(0)
+ end
+
+ it 'schedules the migrations in batches' do
+ expect(migration)
+ .to receive(:migrate_in)
+ .ordered
+ .with(2.minutes.to_i, described_class::MIGRATION_NAME, [42, 40_042])
+
+ expect(migration)
+ .to receive(:migrate_in)
+ .ordered
+ .with(4.minutes.to_i, described_class::MIGRATION_NAME, [40_042, 80_042])
+
+ expect(migration)
+ .to receive(:migrate_in)
+ .ordered
+ .with(6.minutes.to_i, described_class::MIGRATION_NAME, [80_042, 120_042])
+
+ migration.up
+ end
+
+ it 'creates rows to track the background migration jobs' do
+ expect(Gitlab::Database::BackgroundMigrationJob)
+ .to receive(:create!)
+ .ordered
+ .with(class_name: described_class::MIGRATION_NAME, arguments: [42, 40_042])
+
+ expect(Gitlab::Database::BackgroundMigrationJob)
+ .to receive(:create!)
+ .ordered
+ .with(class_name: described_class::MIGRATION_NAME, arguments: [40_042, 80_042])
+
+ expect(Gitlab::Database::BackgroundMigrationJob)
+ .to receive(:create!)
+ .ordered
+ .with(class_name: described_class::MIGRATION_NAME, arguments: [80_042, 120_042])
+
+ migration.up
+ end
+ end
+end
diff --git a/spec/migrations/active_record/schema_spec.rb b/spec/migrations/active_record/schema_spec.rb
index 8199f55f5fc..4a505c51a16 100644
--- a/spec/migrations/active_record/schema_spec.rb
+++ b/spec/migrations/active_record/schema_spec.rb
@@ -7,10 +7,10 @@ require 'spec_helper'
RSpec.describe ActiveRecord::Schema, schema: :latest do
let(:all_migrations) do
- migrations_paths = %w[db/migrate db/post_migrate]
- .map { |path| Rails.root.join(*path, '*') }
+ migrations_directories = %w[db/migrate db/post_migrate].map { |path| Rails.root.join(path).to_s }
+ migrations_paths = migrations_directories.map { |path| File.join(path, '*') }
- migrations = Dir[*migrations_paths]
+ migrations = Dir[*migrations_paths] - migrations_directories
migrations.map { |migration| File.basename(migration).split('_').first.to_i }.sort
end
diff --git a/spec/migrations/add_premium_and_ultimate_plan_limits_spec.rb b/spec/migrations/add_premium_and_ultimate_plan_limits_spec.rb
new file mode 100644
index 00000000000..fb62fc3ca02
--- /dev/null
+++ b/spec/migrations/add_premium_and_ultimate_plan_limits_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe AddPremiumAndUltimatePlanLimits, :migration do
+ shared_examples_for 'a migration that does not alter plans or plan limits' do
+ it do
+ expect { migrate! }.not_to change {
+ [
+ AddPremiumAndUltimatePlanLimits::Plan.count,
+ AddPremiumAndUltimatePlanLimits::PlanLimits.count
+ ]
+ }
+ end
+ end
+
+ describe '#up' do
+ context 'when not .com?' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return false
+ end
+
+ it_behaves_like 'a migration that does not alter plans or plan limits'
+ end
+
+ context 'when .com?' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return true
+ end
+
+ context 'when source plan does not exist' do
+ it_behaves_like 'a migration that does not alter plans or plan limits'
+ end
+
+ context 'when target plan does not exist' do
+ before do
+ table(:plans).create!(name: 'silver', title: 'Silver')
+ table(:plans).create!(name: 'gold', title: 'Gold')
+ end
+
+ it_behaves_like 'a migration that does not alter plans or plan limits'
+ end
+
+ context 'when source and target plans exist' do
+ let!(:silver) { table(:plans).create!(name: 'silver', title: 'Silver') }
+ let!(:gold) { table(:plans).create!(name: 'gold', title: 'Gold') }
+ let!(:premium) { table(:plans).create!(name: 'premium', title: 'Premium') }
+ let!(:ultimate) { table(:plans).create!(name: 'ultimate', title: 'Ultimate') }
+
+ let!(:silver_limits) { table(:plan_limits).create!(plan_id: silver.id, storage_size_limit: 111) }
+ let!(:gold_limits) { table(:plan_limits).create!(plan_id: gold.id, storage_size_limit: 222) }
+
+ context 'when target has plan limits' do
+ before do
+ table(:plan_limits).create!(plan_id: premium.id, storage_size_limit: 999)
+ table(:plan_limits).create!(plan_id: ultimate.id, storage_size_limit: 999)
+ end
+
+ it 'does not overwrite the limits' do
+ expect { migrate! }.not_to change {
+ [
+ AddPremiumAndUltimatePlanLimits::Plan.count,
+ AddPremiumAndUltimatePlanLimits::PlanLimits.pluck(:id, :storage_size_limit).sort
+ ]
+ }
+ end
+ end
+
+ context 'when target has no plan limits' do
+ it 'creates plan limits from the source plan' do
+ migrate!
+
+ expect(AddPremiumAndUltimatePlanLimits::PlanLimits.pluck(:plan_id, :storage_size_limit)).to match_array([
+ [silver.id, silver_limits.storage_size_limit],
+ [gold.id, gold_limits.storage_size_limit],
+ [premium.id, silver_limits.storage_size_limit],
+ [ultimate.id, gold_limits.storage_size_limit]
+ ])
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/add_upvotes_count_index_to_issues_spec.rb b/spec/migrations/add_upvotes_count_index_to_issues_spec.rb
new file mode 100644
index 00000000000..c04cb98a107
--- /dev/null
+++ b/spec/migrations/add_upvotes_count_index_to_issues_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddUpvotesCountIndexToIssues do
+ let(:migration_instance) { described_class.new }
+
+ describe '#up' do
+ it 'adds index' do
+ expect { migrate! }.to change { migration_instance.index_exists?(:issues, [:project_id, :upvotes_count], name: described_class::INDEX_NAME) }.from(false).to(true)
+ end
+ end
+
+ describe '#down' do
+ it 'removes index' do
+ migrate!
+
+ expect { schema_migrate_down! }.to change { migration_instance.index_exists?(:issues, [:project_id, :upvotes_count], name: described_class::INDEX_NAME) }.from(true).to(false)
+ end
+ end
+end
diff --git a/spec/migrations/backfill_issues_upvotes_count_spec.rb b/spec/migrations/backfill_issues_upvotes_count_spec.rb
new file mode 100644
index 00000000000..f2bea0edea0
--- /dev/null
+++ b/spec/migrations/backfill_issues_upvotes_count_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe BackfillIssuesUpvotesCount do
+ let(:migration) { described_class.new }
+ let(:issues) { table(:issues) }
+ let(:award_emoji) { table(:award_emoji) }
+
+ let!(:issue1) { issues.create! }
+ let!(:issue2) { issues.create! }
+ let!(:issue3) { issues.create! }
+ let!(:issue4) { issues.create! }
+ let!(:issue4_without_thumbsup) { issues.create! }
+
+ let!(:award_emoji1) { award_emoji.create!( name: 'thumbsup', awardable_type: 'Issue', awardable_id: issue1.id) }
+ let!(:award_emoji2) { award_emoji.create!( name: 'thumbsup', awardable_type: 'Issue', awardable_id: issue2.id) }
+ let!(:award_emoji3) { award_emoji.create!( name: 'thumbsup', awardable_type: 'Issue', awardable_id: issue3.id) }
+ let!(:award_emoji4) { award_emoji.create!( name: 'thumbsup', awardable_type: 'Issue', awardable_id: issue4.id) }
+
+ it 'correctly schedules background migrations' do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ migrate!
+
+ expect(described_class::MIGRATION).to be_scheduled_migration(issue1.id, issue2.id)
+ expect(described_class::MIGRATION).to be_scheduled_migration(issue3.id, issue4.id)
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+end
diff --git a/spec/migrations/delete_template_services_duplicated_by_type_spec.rb b/spec/migrations/delete_template_services_duplicated_by_type_spec.rb
index b5a29436159..577fea984da 100644
--- a/spec/migrations/delete_template_services_duplicated_by_type_spec.rb
+++ b/spec/migrations/delete_template_services_duplicated_by_type_spec.rb
@@ -14,11 +14,11 @@ RSpec.describe DeleteTemplateServicesDuplicatedByType do
end
it 'deletes service templates duplicated by type except the one with the lowest ID' do
- jenkins_service_id = services.where(type: 'JenkinsService').order(:id).pluck(:id).first
- jira_service_id = services.where(type: 'JiraService').pluck(:id).first
+ jenkins_integration_id = services.where(type: 'JenkinsService').order(:id).pluck(:id).first
+ jira_integration_id = services.where(type: 'JiraService').pluck(:id).first
migrate!
- expect(services.pluck(:id)).to contain_exactly(jenkins_service_id, jira_service_id)
+ expect(services.pluck(:id)).to contain_exactly(jenkins_integration_id, jira_integration_id)
end
end
diff --git a/spec/migrations/fix_batched_migrations_old_format_job_arguments_spec.rb b/spec/migrations/fix_batched_migrations_old_format_job_arguments_spec.rb
new file mode 100644
index 00000000000..e15011d0dab
--- /dev/null
+++ b/spec/migrations/fix_batched_migrations_old_format_job_arguments_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+# rubocop:disable Style/WordArray
+RSpec.describe FixBatchedMigrationsOldFormatJobArguments do
+ let(:batched_background_migrations) { table(:batched_background_migrations) }
+
+ context 'when migrations with legacy job arguments exists' do
+ it 'updates job arguments to current format' do
+ legacy_events_migration = create_batched_migration('events', 'id', ['id', 'id_convert_to_bigint'])
+ legacy_push_event_payloads_migration = create_batched_migration('push_event_payloads', 'event_id', ['event_id', 'event_id_convert_to_bigint'])
+
+ migrate!
+
+ expect(legacy_events_migration.reload.job_arguments).to eq([['id'], ['id_convert_to_bigint']])
+ expect(legacy_push_event_payloads_migration.reload.job_arguments).to eq([['event_id'], ['event_id_convert_to_bigint']])
+ end
+ end
+
+ context 'when only migrations with current job arguments exists' do
+ it 'updates nothing' do
+ events_migration = create_batched_migration('events', 'id', [['id'], ['id_convert_to_bigint']])
+ push_event_payloads_migration = create_batched_migration('push_event_payloads', 'event_id', [['event_id'], ['event_id_convert_to_bigint']])
+
+ migrate!
+
+ expect(events_migration.reload.job_arguments).to eq([['id'], ['id_convert_to_bigint']])
+ expect(push_event_payloads_migration.reload.job_arguments).to eq([['event_id'], ['event_id_convert_to_bigint']])
+ end
+ end
+
+ context 'when migrations with both legacy and current job arguments exist' do
+ it 'updates nothing' do
+ legacy_events_migration = create_batched_migration('events', 'id', ['id', 'id_convert_to_bigint'])
+ events_migration = create_batched_migration('events', 'id', [['id'], ['id_convert_to_bigint']])
+ legacy_push_event_payloads_migration = create_batched_migration('push_event_payloads', 'event_id', ['event_id', 'event_id_convert_to_bigint'])
+ push_event_payloads_migration = create_batched_migration('push_event_payloads', 'event_id', [['event_id'], ['event_id_convert_to_bigint']])
+
+ migrate!
+
+ expect(legacy_events_migration.reload.job_arguments).to eq(['id', 'id_convert_to_bigint'])
+ expect(events_migration.reload.job_arguments).to eq([['id'], ['id_convert_to_bigint']])
+ expect(legacy_push_event_payloads_migration.reload.job_arguments).to eq(['event_id', 'event_id_convert_to_bigint'])
+ expect(push_event_payloads_migration.reload.job_arguments).to eq([['event_id'], ['event_id_convert_to_bigint']])
+ end
+ end
+
+ def create_batched_migration(table_name, column_name, job_arguments)
+ batched_background_migrations.create!(
+ max_value: 10,
+ batch_size: 10,
+ sub_batch_size: 10,
+ interval: 1,
+ job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
+ table_name: table_name,
+ column_name: column_name,
+ job_arguments: job_arguments
+ )
+ end
+end
+# rubocop:enable Style/WordArray
diff --git a/spec/migrations/re_schedule_latest_pipeline_id_population_spec.rb b/spec/migrations/re_schedule_latest_pipeline_id_population_spec.rb
new file mode 100644
index 00000000000..354a0896ac9
--- /dev/null
+++ b/spec/migrations/re_schedule_latest_pipeline_id_population_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ReScheduleLatestPipelineIdPopulation do
+ let(:namespaces) { table(:namespaces) }
+ let(:pipelines) { table(:ci_pipelines) }
+ let(:projects) { table(:projects) }
+ let(:project_settings) { table(:project_settings) }
+ let(:vulnerability_statistics) { table(:vulnerability_statistics) }
+
+ let(:letter_grade_a) { 0 }
+
+ let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
+ let(:project_1) { projects.create!(namespace_id: namespace.id, name: 'Foo 1') }
+ let(:project_2) { projects.create!(namespace_id: namespace.id, name: 'Foo 2') }
+ let(:project_3) { projects.create!(namespace_id: namespace.id, name: 'Foo 3') }
+ let(:project_4) { projects.create!(namespace_id: namespace.id, name: 'Foo 4') }
+
+ before do
+ project_settings.create!(project_id: project_1.id, has_vulnerabilities: true)
+ project_settings.create!(project_id: project_2.id, has_vulnerabilities: true)
+ project_settings.create!(project_id: project_3.id)
+ project_settings.create!(project_id: project_4.id, has_vulnerabilities: true)
+
+ pipeline = pipelines.create!(project_id: project_2.id, ref: 'master', sha: 'adf43c3a')
+
+ vulnerability_statistics.create!(project_id: project_2.id, letter_grade: letter_grade_a, latest_pipeline_id: pipeline.id)
+ vulnerability_statistics.create!(project_id: project_4.id, letter_grade: letter_grade_a)
+
+ allow(Gitlab).to receive(:ee?).and_return(is_ee?)
+ stub_const("#{described_class.name}::BATCH_SIZE", 1)
+ end
+
+ around do |example|
+ freeze_time { example.run }
+ end
+
+ context 'when the installation is FOSS' do
+ let(:is_ee?) { false }
+
+ it 'does not schedule any background job' do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to be(0)
+ end
+ end
+
+ context 'when the installation is EE' do
+ let(:is_ee?) { true }
+
+ it 'schedules the background jobs' do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to be(2)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(described_class::DELAY_INTERVAL, project_1.id, project_1.id)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2 * described_class::DELAY_INTERVAL, project_4.id, project_4.id)
+ end
+ end
+end
diff --git a/spec/migrations/rename_services_to_integrations_spec.rb b/spec/migrations/rename_services_to_integrations_spec.rb
new file mode 100644
index 00000000000..812dd5efecb
--- /dev/null
+++ b/spec/migrations/rename_services_to_integrations_spec.rb
@@ -0,0 +1,255 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe RenameServicesToIntegrations do
+ let(:migration) { described_class.new }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:integrations) { table(:integrations) }
+ let(:services) { table(:services) }
+
+ before do
+ @namespace = namespaces.create!(name: 'foo', path: 'foo')
+ @project = projects.create!(namespace_id: @namespace.id)
+ end
+
+ RSpec.shared_examples 'a table (or view) with triggers' do
+ describe 'INSERT tracker trigger' do
+ it 'sets `has_external_issue_tracker` to true when active `issue_tracker` is inserted' do
+ expect do
+ subject.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+ end.to change { @project.reload.has_external_issue_tracker }.to(true)
+ end
+
+ it 'does not set `has_external_issue_tracker` to true when integration is for a different project' do
+ different_project = projects.create!(namespace_id: @namespace.id)
+
+ expect do
+ subject.create!(category: 'issue_tracker', active: true, project_id: different_project.id)
+ end.not_to change { @project.reload.has_external_issue_tracker }
+ end
+
+ it 'does not set `has_external_issue_tracker` to true when inactive `issue_tracker` is inserted' do
+ expect do
+ subject.create!(category: 'issue_tracker', active: false, project_id: @project.id)
+ end.not_to change { @project.reload.has_external_issue_tracker }
+ end
+
+ it 'does not set `has_external_issue_tracker` to true when a non-`issue tracker` active integration is inserted' do
+ expect do
+ subject.create!(category: 'my_type', active: true, project_id: @project.id)
+ end.not_to change { @project.reload.has_external_issue_tracker }
+ end
+ end
+
+ describe 'UPDATE tracker trigger' do
+ it 'sets `has_external_issue_tracker` to true when `issue_tracker` is made active' do
+ integration = subject.create!(category: 'issue_tracker', active: false, project_id: @project.id)
+
+ expect do
+ integration.update!(active: true)
+ end.to change { @project.reload.has_external_issue_tracker }.to(true)
+ end
+
+ it 'sets `has_external_issue_tracker` to false when `issue_tracker` is made inactive' do
+ integration = subject.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+
+ expect do
+ integration.update!(active: false)
+ end.to change { @project.reload.has_external_issue_tracker }.to(false)
+ end
+
+ it 'sets `has_external_issue_tracker` to false when `issue_tracker` is made inactive, and an inactive `issue_tracker` exists' do
+ subject.create!(category: 'issue_tracker', active: false, project_id: @project.id)
+ integration = subject.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+
+ expect do
+ integration.update!(active: false)
+ end.to change { @project.reload.has_external_issue_tracker }.to(false)
+ end
+
+ it 'does not change `has_external_issue_tracker` when `issue_tracker` is made inactive, if an active `issue_tracker` exists' do
+ subject.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+ integration = subject.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+
+ expect do
+ integration.update!(active: false)
+ end.not_to change { @project.reload.has_external_issue_tracker }
+ end
+
+ it 'does not change `has_external_issue_tracker` when integration is for a different project' do
+ different_project = projects.create!(namespace_id: @namespace.id)
+ integration = subject.create!(category: 'issue_tracker', active: false, project_id: different_project.id)
+
+ expect do
+ integration.update!(active: true)
+ end.not_to change { @project.reload.has_external_issue_tracker }
+ end
+ end
+
+ describe 'DELETE tracker trigger' do
+ it 'sets `has_external_issue_tracker` to false when `issue_tracker` is deleted' do
+ integration = subject.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+
+ expect do
+ integration.delete
+ end.to change { @project.reload.has_external_issue_tracker }.to(false)
+ end
+
+ it 'sets `has_external_issue_tracker` to false when `issue_tracker` is deleted, if an inactive `issue_tracker` still exists' do
+ subject.create!(category: 'issue_tracker', active: false, project_id: @project.id)
+ integration = subject.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+
+ expect do
+ integration.delete
+ end.to change { @project.reload.has_external_issue_tracker }.to(false)
+ end
+
+ it 'does not change `has_external_issue_tracker` when `issue_tracker` is deleted, if an active `issue_tracker` still exists' do
+ subject.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+ integration = subject.create!(category: 'issue_tracker', active: true, project_id: @project.id)
+
+ expect do
+ integration.delete
+ end.not_to change { @project.reload.has_external_issue_tracker }
+ end
+
+ it 'does not change `has_external_issue_tracker` when integration is for a different project' do
+ different_project = projects.create!(namespace_id: @namespace.id)
+ integration = subject.create!(category: 'issue_tracker', active: true, project_id: different_project.id)
+
+ expect do
+ integration.delete
+ end.not_to change { @project.reload.has_external_issue_tracker }
+ end
+ end
+
+ describe 'INSERT wiki trigger' do
+ it 'sets `has_external_wiki` to true when active `ExternalWikiService` is inserted' do
+ expect do
+ subject.create!(type: 'ExternalWikiService', active: true, project_id: @project.id)
+ end.to change { @project.reload.has_external_wiki }.to(true)
+ end
+
+ it 'does not set `has_external_wiki` to true when integration is for a different project' do
+ different_project = projects.create!(namespace_id: @namespace.id)
+
+ expect do
+ subject.create!(type: 'ExternalWikiService', active: true, project_id: different_project.id)
+ end.not_to change { @project.reload.has_external_wiki }
+ end
+
+ it 'does not set `has_external_wiki` to true when inactive `ExternalWikiService` is inserted' do
+ expect do
+ subject.create!(type: 'ExternalWikiService', active: false, project_id: @project.id)
+ end.not_to change { @project.reload.has_external_wiki }
+ end
+
+ it 'does not set `has_external_wiki` to true when active other integration is inserted' do
+ expect do
+ subject.create!(type: 'MyService', active: true, project_id: @project.id)
+ end.not_to change { @project.reload.has_external_wiki }
+ end
+ end
+
+ describe 'UPDATE wiki trigger' do
+ it 'sets `has_external_wiki` to true when `ExternalWikiService` is made active' do
+ integration = subject.create!(type: 'ExternalWikiService', active: false, project_id: @project.id)
+
+ expect do
+ integration.update!(active: true)
+ end.to change { @project.reload.has_external_wiki }.to(true)
+ end
+
+ it 'sets `has_external_wiki` to false when `ExternalWikiService` is made inactive' do
+ integration = subject.create!(type: 'ExternalWikiService', active: true, project_id: @project.id)
+
+ expect do
+ integration.update!(active: false)
+ end.to change { @project.reload.has_external_wiki }.to(false)
+ end
+
+ it 'does not change `has_external_wiki` when integration is for a different project' do
+ different_project = projects.create!(namespace_id: @namespace.id)
+ integration = subject.create!(type: 'ExternalWikiService', active: false, project_id: different_project.id)
+
+ expect do
+ integration.update!(active: true)
+ end.not_to change { @project.reload.has_external_wiki }
+ end
+ end
+
+ describe 'DELETE wiki trigger' do
+ it 'sets `has_external_wiki` to false when `ExternalWikiService` is deleted' do
+ integration = subject.create!(type: 'ExternalWikiService', active: true, project_id: @project.id)
+
+ expect do
+ integration.delete
+ end.to change { @project.reload.has_external_wiki }.to(false)
+ end
+
+ it 'does not change `has_external_wiki` when integration is for a different project' do
+ different_project = projects.create!(namespace_id: @namespace.id)
+ integration = subject.create!(type: 'ExternalWikiService', active: true, project_id: different_project.id)
+
+ expect do
+ integration.delete
+ end.not_to change { @project.reload.has_external_wiki }
+ end
+ end
+ end
+
+ RSpec.shared_examples 'a table (or view) without triggers' do
+ specify do
+ number_of_triggers = ActiveRecord::Base.connection
+ .execute("SELECT count(*) FROM information_schema.triggers WHERE event_object_table = '#{subject.table_name}'")
+ .first['count']
+
+ expect(number_of_triggers).to eq(0)
+ end
+ end
+
+ describe '#up' do
+ before do
+ # LOCK TABLE statements must be in a transaction
+ ActiveRecord::Base.transaction { migrate! }
+ end
+
+ context 'the integrations table' do
+ subject { integrations }
+
+ it_behaves_like 'a table (or view) with triggers'
+ end
+
+ context 'the services table' do
+ subject { services }
+
+ it_behaves_like 'a table (or view) without triggers'
+ end
+ end
+
+ describe '#down' do
+ before do
+ # LOCK TABLE statements must be in a transaction
+ ActiveRecord::Base.transaction do
+ migration.up
+ migration.down
+ end
+ end
+
+ context 'the services table' do
+ subject { services }
+
+ it_behaves_like 'a table (or view) with triggers'
+ end
+
+ context 'the integrations table' do
+ subject { integrations }
+
+ it_behaves_like 'a table (or view) without triggers'
+ end
+ end
+end
diff --git a/spec/migrations/reset_job_token_scope_enabled_spec.rb b/spec/migrations/reset_job_token_scope_enabled_spec.rb
new file mode 100644
index 00000000000..40dfe4de34b
--- /dev/null
+++ b/spec/migrations/reset_job_token_scope_enabled_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe ResetJobTokenScopeEnabled do
+ let(:settings) { table(:project_ci_cd_settings) }
+ let(:projects) { table(:projects) }
+ let(:namespaces) { table(:namespaces) }
+ let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
+ let(:project_1) { projects.create!(name: 'proj-1', path: 'gitlab-org', namespace_id: namespace.id)}
+ let(:project_2) { projects.create!(name: 'proj-2', path: 'gitlab-org', namespace_id: namespace.id)}
+
+ before do
+ settings.create!(id: 1, project_id: project_1.id, job_token_scope_enabled: true)
+ settings.create!(id: 2, project_id: project_2.id, job_token_scope_enabled: false)
+ end
+
+ it 'migrates job_token_scope_enabled to be always false' do
+ expect { migrate! }
+ .to change { settings.where(job_token_scope_enabled: false).count }
+ .from(1).to(2)
+ end
+end
diff --git a/spec/migrations/schedule_backfill_draft_status_on_merge_requests_spec.rb b/spec/migrations/schedule_backfill_draft_status_on_merge_requests_spec.rb
new file mode 100644
index 00000000000..5a1c07d810f
--- /dev/null
+++ b/spec/migrations/schedule_backfill_draft_status_on_merge_requests_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe ScheduleBackfillDraftStatusOnMergeRequests, :sidekiq do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:merge_requests) { table(:merge_requests) }
+
+ let(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab') }
+ let(:project) { projects.create!(namespace_id: group.id) }
+
+ let(:draft_prefixes) { ["[Draft]", "(Draft)", "Draft:", "Draft", "[WIP]", "WIP:", "WIP"] }
+
+ def create_merge_request(params)
+ common_params = {
+ target_project_id: project.id,
+ target_branch: 'feature1',
+ source_branch: 'master'
+ }
+
+ merge_requests.create!(common_params.merge(params))
+ end
+
+ before do
+ draft_prefixes.each do |prefix|
+ (1..4).each do |n|
+ create_merge_request(
+ title: "#{prefix} This is a title",
+ draft: false,
+ state_id: n
+ )
+ end
+ end
+
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ end
+
+ it 'schedules BackfillDraftStatusOnMergeRequests background jobs' do
+ Sidekiq::Testing.fake! do
+ draft_mrs = Gitlab::BackgroundMigration::BackfillDraftStatusOnMergeRequests::MergeRequest.eligible
+
+ first_mr_id = draft_mrs.first.id
+ second_mr_id = draft_mrs.second.id
+
+ freeze_time do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(7)
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(2.minutes, first_mr_id, first_mr_id)
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(4.minutes, second_mr_id, second_mr_id)
+ end
+ end
+ end
+end
diff --git a/spec/migrations/schedule_delete_orphaned_deployments_spec.rb b/spec/migrations/schedule_delete_orphaned_deployments_spec.rb
new file mode 100644
index 00000000000..618958a3d90
--- /dev/null
+++ b/spec/migrations/schedule_delete_orphaned_deployments_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe ScheduleDeleteOrphanedDeployments, :sidekiq, schema: 20210617161348 do
+ let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let!(:project) { table(:projects).create!(namespace_id: namespace.id) }
+ let!(:environment) { table(:environments).create!(name: 'production', slug: 'production', project_id: project.id) }
+ let(:background_migration_jobs) { table(:background_migration_jobs) }
+
+ before do
+ create_deployment!(environment.id, project.id)
+ create_deployment!(environment.id, project.id)
+ create_deployment!(environment.id, project.id)
+ create_deployment!(non_existing_record_id, project.id)
+ create_deployment!(non_existing_record_id, project.id)
+ create_deployment!(non_existing_record_id, project.id)
+ create_deployment!(non_existing_record_id, project.id)
+
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ end
+
+ it 'schedules DeleteOrphanedDeployments background jobs' do
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(7)
+ table(:deployments).find_each do |deployment|
+ expect(described_class::MIGRATION).to be_scheduled_migration(deployment.id, deployment.id)
+ end
+ end
+ end
+ end
+
+ def create_deployment!(environment_id, project_id)
+ table(:deployments).create!(
+ environment_id: environment_id,
+ project_id: project_id,
+ ref: 'master',
+ tag: false,
+ sha: 'x',
+ status: 1,
+ iid: table(:deployments).count + 1)
+ end
+end
diff --git a/spec/models/ability_spec.rb b/spec/models/ability_spec.rb
index 4bfa953df40..e131661602e 100644
--- a/spec/models/ability_spec.rb
+++ b/spec/models/ability_spec.rb
@@ -328,6 +328,69 @@ RSpec.describe Ability do
end
end
+ describe '.feature_flags_readable_by_user' do
+ context 'without a user' do
+ it 'returns no feature flags' do
+ feature_flag_1 = build(:operations_feature_flag)
+ feature_flag_2 = build(:operations_feature_flag, project: build(:project, :public))
+
+ feature_flags = described_class
+ .feature_flags_readable_by_user([feature_flag_1, feature_flag_2])
+
+ expect(feature_flags).to eq([])
+ end
+ end
+
+ context 'with a user' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:feature_flag) { create(:operations_feature_flag, project: project) }
+ let(:cross_project) { create(:project) }
+ let(:cross_project_feature_flag) { create(:operations_feature_flag, project: cross_project) }
+
+ let(:other_feature_flag) { create(:operations_feature_flag) }
+ let(:all_feature_flags) do
+ [feature_flag, cross_project_feature_flag, other_feature_flag]
+ end
+
+ subject(:readable_feature_flags) do
+ described_class.feature_flags_readable_by_user(all_feature_flags, user)
+ end
+
+ before do
+ project.add_developer(user)
+ cross_project.add_developer(user)
+ end
+
+ it 'returns feature flags visible to the user' do
+ expect(readable_feature_flags).to contain_exactly(feature_flag, cross_project_feature_flag)
+ end
+
+ context 'when a user cannot read cross project and a filter is passed' do
+ before do
+ allow(described_class).to receive(:allowed?).and_call_original
+ expect(described_class).to receive(:allowed?).with(user, :read_cross_project) { false }
+ end
+
+ subject(:readable_feature_flags) do
+ read_cross_project_filter = -> (feature_flags) do
+ feature_flags.select { |flag| flag.project == project }
+ end
+ described_class.feature_flags_readable_by_user(
+ all_feature_flags, user,
+ filters: { read_cross_project: read_cross_project_filter }
+ )
+ end
+
+ it 'returns only feature flags of the specified project without checking access on others' do
+ expect(described_class).not_to receive(:allowed?).with(user, :read_feature_flag, cross_project_feature_flag)
+
+ expect(readable_feature_flags).to contain_exactly(feature_flag)
+ end
+ end
+ end
+ end
+
describe '.project_disabled_features_rules' do
let(:project) { create(:project, :wiki_disabled) }
diff --git a/spec/models/abuse_report_spec.rb b/spec/models/abuse_report_spec.rb
index a97574fa524..e87996fc1f0 100644
--- a/spec/models/abuse_report_spec.rb
+++ b/spec/models/abuse_report_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe AbuseReport do
let_it_be(:report, reload: true) { create(:abuse_report) }
let_it_be(:user, reload: true) { create(:admin) }
+
subject { report }
it { expect(subject).to be_valid }
diff --git a/spec/models/alert_management/alert_spec.rb b/spec/models/alert_management/alert_spec.rb
index 80a45b1c1be..18d486740b8 100644
--- a/spec/models/alert_management/alert_spec.rb
+++ b/spec/models/alert_management/alert_spec.rb
@@ -100,6 +100,7 @@ RSpec.describe AlertManagement::Alert do
describe 'fingerprint' do
let_it_be(:fingerprint) { 'fingerprint' }
let_it_be(:project3, refind: true) { create(:project) }
+
let(:new_alert) { build(:alert_management_alert, fingerprint: fingerprint, project: project3) }
subject { new_alert }
diff --git a/spec/models/application_setting/term_spec.rb b/spec/models/application_setting/term_spec.rb
index 51a6027698f..d9efa597352 100644
--- a/spec/models/application_setting/term_spec.rb
+++ b/spec/models/application_setting/term_spec.rb
@@ -3,9 +3,7 @@
require 'spec_helper'
RSpec.describe ApplicationSetting::Term do
- describe 'validations' do
- it { is_expected.to validate_presence_of(:terms) }
- end
+ it { is_expected.to nullify_if_blank(:terms) }
describe '.latest' do
it 'finds the latest terms' do
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 4e72d558b52..80471a09bbd 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -134,6 +134,14 @@ RSpec.describe ApplicationSetting do
it { is_expected.to allow_value('disabled').for(:whats_new_variant) }
it { is_expected.not_to allow_value(nil).for(:whats_new_variant) }
+ it { is_expected.not_to allow_value(['']).for(:valid_runner_registrars) }
+ it { is_expected.not_to allow_value(['OBVIOUSLY_WRONG']).for(:valid_runner_registrars) }
+ it { is_expected.not_to allow_value(%w(project project)).for(:valid_runner_registrars) }
+ it { is_expected.not_to allow_value([nil]).for(:valid_runner_registrars) }
+ it { is_expected.not_to allow_value(nil).for(:valid_runner_registrars) }
+ it { is_expected.to allow_value([]).for(:valid_runner_registrars) }
+ it { is_expected.to allow_value(%w(project group)).for(:valid_runner_registrars) }
+
context 'help_page_documentation_base_url validations' do
it { is_expected.to allow_value(nil).for(:help_page_documentation_base_url) }
it { is_expected.to allow_value('https://docs.gitlab.com').for(:help_page_documentation_base_url) }
@@ -250,6 +258,19 @@ RSpec.describe ApplicationSetting do
it { is_expected.to allow_value(nil).for(:snowplow_collector_hostname) }
end
+ context 'when mailgun_events_enabled is enabled' do
+ before do
+ setting.mailgun_events_enabled = true
+ end
+
+ it { is_expected.to validate_presence_of(:mailgun_signing_key) }
+ it { is_expected.to validate_length_of(:mailgun_signing_key).is_at_most(255) }
+ end
+
+ context 'when mailgun_events_enabled is not enabled' do
+ it { is_expected.not_to validate_presence_of(:mailgun_signing_key) }
+ end
+
context "when user accepted let's encrypt terms of service" do
before do
expect do
diff --git a/spec/models/audit_event_spec.rb b/spec/models/audit_event_spec.rb
index bc603bc5ab6..4fba5fddc92 100644
--- a/spec/models/audit_event_spec.rb
+++ b/spec/models/audit_event_spec.rb
@@ -10,6 +10,71 @@ RSpec.describe AuditEvent do
end
end
+ describe 'callbacks' do
+ describe '#parallel_persist' do
+ shared_examples 'a parallel persisted field' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:column, :details, :expected_value) do
+ :value | nil | :value
+ nil | :value | :value
+ :value | :another_value | :value
+ nil | nil | nil
+ end
+
+ with_them do
+ let(:values) { { value: value, another_value: "#{value}88" } }
+
+ let(:audit_event) do
+ build(:audit_event, name => values[column], details: { name => values[details] })
+ end
+
+ it 'sets both values to be the same', :aggregate_failures do
+ audit_event.validate
+
+ expect(audit_event[name]).to eq(values[expected_value])
+ expect(audit_event.details[name]).to eq(values[expected_value])
+ end
+ end
+ end
+
+ context 'wih author_name' do
+ let(:name) { :author_name }
+ let(:value) { 'Mary Poppins' }
+
+ it_behaves_like 'a parallel persisted field'
+ end
+
+ context 'with entity_path' do
+ let(:name) { :entity_path }
+ let(:value) { 'gitlab-org' }
+
+ it_behaves_like 'a parallel persisted field'
+ end
+
+ context 'with target_details' do
+ let(:name) { :target_details }
+ let(:value) { 'gitlab-org/gitlab' }
+
+ it_behaves_like 'a parallel persisted field'
+ end
+
+ context 'with target_type' do
+ let(:name) { :target_type }
+ let(:value) { 'Project' }
+
+ it_behaves_like 'a parallel persisted field'
+ end
+
+ context 'with target_id' do
+ let(:name) { :target_id }
+ let(:value) { 8 }
+
+ it_behaves_like 'a parallel persisted field'
+ end
+ end
+ end
+
it 'sanitizes custom_message in the details hash' do
audit_event = create(:project_audit_event, details: { target_id: 678, custom_message: '<strong>Arnold</strong>' })
diff --git a/spec/models/award_emoji_spec.rb b/spec/models/award_emoji_spec.rb
index f268408c095..ebd1441f901 100644
--- a/spec/models/award_emoji_spec.rb
+++ b/spec/models/award_emoji_spec.rb
@@ -119,6 +119,36 @@ RSpec.describe AwardEmoji do
end
end
+ describe 'bumping updated at' do
+ let(:note) { create(:note_on_issue) }
+ let(:award_emoji) { build(:award_emoji, user: build(:user), awardable: note) }
+
+ it 'calls bump_updated_at on the note when saved' do
+ expect(note).to receive(:bump_updated_at)
+
+ award_emoji.save!
+ end
+
+ it 'calls bump_updated_at on the note when destroyed' do
+ expect(note).to receive(:bump_updated_at)
+
+ award_emoji.destroy!
+ end
+
+ context 'on another awardable' do
+ let(:issue) { create(:issue) }
+ let(:award_emoji) { build(:award_emoji, user: build(:user), awardable: issue) }
+
+ it 'does not error out when saved' do
+ expect { award_emoji.save! }.not_to raise_error
+ end
+
+ it 'does not error out when destroy' do
+ expect { award_emoji.destroy! }.not_to raise_error
+ end
+ end
+ end
+
describe '.award_counts_for_user' do
let(:user) { create(:user) }
@@ -141,4 +171,43 @@ RSpec.describe AwardEmoji do
expect(awards).to eq('thumbsup' => 2)
end
end
+
+ describe 'updating upvotes_count' do
+ context 'on an issue' do
+ let(:issue) { create(:issue) }
+ let(:upvote) { build(:award_emoji, :upvote, user: build(:user), awardable: issue) }
+ let(:downvote) { build(:award_emoji, :downvote, user: build(:user), awardable: issue) }
+
+ it 'updates upvotes_count on the issue when saved' do
+ expect(issue).to receive(:update_column).with(:upvotes_count, 1).once
+
+ upvote.save!
+ downvote.save!
+ end
+
+ it 'updates upvotes_count on the issue when destroyed' do
+ expect(issue).to receive(:update_column).with(:upvotes_count, 0).once
+
+ upvote.destroy!
+ downvote.destroy!
+ end
+ end
+
+ context 'on another awardable' do
+ let(:merge_request) { create(:merge_request) }
+ let(:award_emoji) { build(:award_emoji, user: build(:user), awardable: merge_request) }
+
+ it 'does not update upvotes_count on the merge_request when saved' do
+ expect(merge_request).not_to receive(:update_column)
+
+ award_emoji.save!
+ end
+
+ it 'does not update upvotes_count on the merge_request when destroyed' do
+ expect(merge_request).not_to receive(:update_column)
+
+ award_emoji.destroy!
+ end
+ end
+ end
end
diff --git a/spec/models/blob_viewer/markup_spec.rb b/spec/models/blob_viewer/markup_spec.rb
index 13b040d62d0..dae1b79dda2 100644
--- a/spec/models/blob_viewer/markup_spec.rb
+++ b/spec/models/blob_viewer/markup_spec.rb
@@ -24,15 +24,5 @@ RSpec.describe BlobViewer::Markup do
expect(subject.banzai_render_context.keys).to include(:rendered)
end
end
-
- context 'when cached_markdown_blob feature flag is disabled' do
- before do
- stub_feature_flags(cached_markdown_blob: false)
- end
-
- it 'does not set cache_key key' do
- expect(subject.banzai_render_context.keys).not_to include(:cache_key)
- end
- end
end
end
diff --git a/spec/models/bulk_import_spec.rb b/spec/models/bulk_import_spec.rb
index 1a7e1ed8119..4cfec6b20b7 100644
--- a/spec/models/bulk_import_spec.rb
+++ b/spec/models/bulk_import_spec.rb
@@ -15,4 +15,10 @@ RSpec.describe BulkImport, type: :model do
it { is_expected.to define_enum_for(:source_type).with_values(%i[gitlab]) }
end
+
+ describe '.all_human_statuses' do
+ it 'returns all human readable entity statuses' do
+ expect(described_class.all_human_statuses).to contain_exactly('created', 'started', 'finished', 'failed')
+ end
+ end
end
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index d1b7125a6e6..11a3e53dd16 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -134,4 +134,24 @@ RSpec.describe BulkImports::Entity, type: :model do
expect(entity.encoded_source_full_path).to eq(expected)
end
end
+
+ describe 'scopes' do
+ describe '.by_user_id' do
+ it 'returns entities associated with specified user' do
+ user = create(:user)
+ import = create(:bulk_import, user: user)
+ entity_1 = create(:bulk_import_entity, bulk_import: import)
+ entity_2 = create(:bulk_import_entity, bulk_import: import)
+ create(:bulk_import_entity)
+
+ expect(described_class.by_user_id(user.id)).to contain_exactly(entity_1, entity_2)
+ end
+ end
+ end
+
+ describe '.all_human_statuses' do
+ it 'returns all human readable entity statuses' do
+ expect(described_class.all_human_statuses).to contain_exactly('created', 'started', 'finished', 'failed')
+ end
+ end
end
diff --git a/spec/models/bulk_imports/file_transfer/group_config_spec.rb b/spec/models/bulk_imports/file_transfer/group_config_spec.rb
index 4611a00b0cc..1e566a7b042 100644
--- a/spec/models/bulk_imports/file_transfer/group_config_spec.rb
+++ b/spec/models/bulk_imports/file_transfer/group_config_spec.rb
@@ -34,6 +34,10 @@ RSpec.describe BulkImports::FileTransfer::GroupConfig do
it 'returns a list of top level exportable relations' do
expect(subject.portable_relations).to include('milestones', 'badges', 'boards', 'labels')
end
+
+ it 'does not include skipped relations' do
+ expect(subject.portable_relations).not_to include('members')
+ end
end
describe '#top_relation_tree' do
diff --git a/spec/models/bulk_imports/file_transfer/project_config_spec.rb b/spec/models/bulk_imports/file_transfer/project_config_spec.rb
index 2995556a58d..db037528ec1 100644
--- a/spec/models/bulk_imports/file_transfer/project_config_spec.rb
+++ b/spec/models/bulk_imports/file_transfer/project_config_spec.rb
@@ -34,6 +34,10 @@ RSpec.describe BulkImports::FileTransfer::ProjectConfig do
it 'returns a list of top level exportable relations' do
expect(subject.portable_relations).to include('issues', 'labels', 'milestones', 'merge_requests')
end
+
+ it 'does not include skipped relations' do
+ expect(subject.portable_relations).not_to include('project_members', 'group_members')
+ end
end
describe '#top_relation_tree' do
diff --git a/spec/models/chat_name_spec.rb b/spec/models/chat_name_spec.rb
index 4d77bd53158..9ed00003ac1 100644
--- a/spec/models/chat_name_spec.rb
+++ b/spec/models/chat_name_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe ChatName do
let_it_be(:chat_name) { create(:chat_name) }
+
subject { chat_name }
it { is_expected.to belong_to(:integration) }
diff --git a/spec/models/chat_team_spec.rb b/spec/models/chat_team_spec.rb
index 08fd05324aa..2e8cdb7a316 100644
--- a/spec/models/chat_team_spec.rb
+++ b/spec/models/chat_team_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe ChatTeam do
let_it_be(:chat_team) { create(:chat_team) }
+
subject { chat_team }
# Associations
diff --git a/spec/models/ci/build_dependencies_spec.rb b/spec/models/ci/build_dependencies_spec.rb
index 331ba9953ca..cd330324840 100644
--- a/spec/models/ci/build_dependencies_spec.rb
+++ b/spec/models/ci/build_dependencies_spec.rb
@@ -55,6 +55,24 @@ RSpec.describe Ci::BuildDependencies do
end
end
end
+
+ context 'when needs refer to jobs from the same stage' do
+ let(:job) do
+ create(:ci_build,
+ pipeline: pipeline,
+ name: 'dag_job',
+ scheduling_type: :dag,
+ stage_idx: 2,
+ stage: 'deploy'
+ )
+ end
+
+ before do
+ create(:ci_build_need, build: job, name: 'staging', artifacts: true)
+ end
+
+ it { is_expected.to contain_exactly(staging) }
+ end
end
describe 'jobs from specified dependencies' do
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 62dec522161..0c344270e0b 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -39,6 +39,34 @@ RSpec.describe Ci::Build do
it { is_expected.to delegate_method(:merge_request_ref?).to(:pipeline) }
it { is_expected.to delegate_method(:legacy_detached_merge_request_pipeline?).to(:pipeline) }
+ shared_examples 'calling proper BuildFinishedWorker' do
+ context 'when ci_build_finished_worker_namespace_changed feature flag enabled' do
+ before do
+ stub_feature_flags(ci_build_finished_worker_namespace_changed: build.project)
+ end
+
+ it 'calls Ci::BuildFinishedWorker' do
+ expect(Ci::BuildFinishedWorker).to receive(:perform_async)
+ expect(::BuildFinishedWorker).not_to receive(:perform_async)
+
+ subject
+ end
+ end
+
+ context 'when ci_build_finished_worker_namespace_changed feature flag disabled' do
+ before do
+ stub_feature_flags(ci_build_finished_worker_namespace_changed: false)
+ end
+
+ it 'calls ::BuildFinishedWorker' do
+ expect(::BuildFinishedWorker).to receive(:perform_async)
+ expect(Ci::BuildFinishedWorker).not_to receive(:perform_async)
+
+ subject
+ end
+ end
+ end
+
describe 'associations' do
it 'has a bidirectional relationship with projects' do
expect(described_class.reflect_on_association(:project).has_inverse?).to eq(:builds)
@@ -384,7 +412,7 @@ RSpec.describe Ci::Build do
context 'when there is a queuing entry already present' do
before do
- ::Ci::PendingBuild.create!(build: build, project: build.project)
+ create(:ci_pending_build, build: build, project: build.project)
end
it 'does not raise an error' do
@@ -396,7 +424,7 @@ RSpec.describe Ci::Build do
context 'when both failure scenario happen at the same time' do
before do
::Ci::Build.find(build.id).update_column(:lock_version, 100)
- ::Ci::PendingBuild.create!(build: build, project: build.project)
+ create(:ci_pending_build, build: build, project: build.project)
end
it 'raises stale object error exception' do
@@ -478,7 +506,7 @@ RSpec.describe Ci::Build do
let(:build) { create(:ci_build, :pending) }
before do
- ::Ci::PendingBuild.create!(build: build, project: build.project)
+ create(:ci_pending_build, build: build, project: build.project)
::Ci::Build.find(build.id).update_column(:lock_version, 100)
end
@@ -1323,6 +1351,7 @@ RSpec.describe Ci::Build do
end
it_behaves_like 'avoid deadlock'
+ it_behaves_like 'calling proper BuildFinishedWorker'
it 'transits deployment status to success' do
subject
@@ -1335,6 +1364,7 @@ RSpec.describe Ci::Build do
let(:event) { :drop! }
it_behaves_like 'avoid deadlock'
+ it_behaves_like 'calling proper BuildFinishedWorker'
it 'transits deployment status to failed' do
subject
@@ -1359,6 +1389,7 @@ RSpec.describe Ci::Build do
let(:event) { :cancel! }
it_behaves_like 'avoid deadlock'
+ it_behaves_like 'calling proper BuildFinishedWorker'
it 'transits deployment status to canceled' do
subject
@@ -1966,6 +1997,23 @@ RSpec.describe Ci::Build do
end
end
+ describe '#tag_list' do
+ let_it_be(:build) { create(:ci_build, tag_list: ['tag']) }
+
+ context 'when tags are preloaded' do
+ it 'does not trigger queries' do
+ build_with_tags = described_class.eager_load_tags.id_in([build]).to_a.first
+
+ expect { build_with_tags.tag_list }.not_to exceed_all_query_limit(0)
+ expect(build_with_tags.tag_list).to eq(['tag'])
+ end
+ end
+
+ context 'when tags are not preloaded' do
+ it { expect(described_class.find(build.id).tag_list).to eq(['tag']) }
+ end
+ end
+
describe '#has_tags?' do
context 'when build has tags' do
subject { create(:ci_build, tag_list: ['tag']) }
@@ -2155,15 +2203,15 @@ RSpec.describe Ci::Build do
end
it 'contains options' do
- expect(build.options).to eq(options.stringify_keys)
+ expect(build.options).to eq(options.symbolize_keys)
end
- it 'allows to access with keys' do
+ it 'allows to access with symbolized keys' do
expect(build.options[:image]).to eq('ruby:2.7')
end
- it 'allows to access with strings' do
- expect(build.options['image']).to eq('ruby:2.7')
+ it 'rejects access with string keys' do
+ expect(build.options['image']).to be_nil
end
context 'when ci_build_metadata_config is set' do
@@ -2172,7 +2220,7 @@ RSpec.describe Ci::Build do
end
it 'persist data in build metadata' do
- expect(build.metadata.read_attribute(:config_options)).to eq(options.stringify_keys)
+ expect(build.metadata.read_attribute(:config_options)).to eq(options.symbolize_keys)
end
it 'does not persist data in build' do
@@ -4476,26 +4524,12 @@ RSpec.describe Ci::Build do
it { is_expected.to include(:upload_multiple_artifacts) }
end
- context 'when artifacts exclude is defined and the is feature enabled' do
+ context 'when artifacts exclude is defined' do
let(:options) do
{ artifacts: { exclude: %w[something] } }
end
- context 'when a feature flag is enabled' do
- before do
- stub_feature_flags(ci_artifacts_exclude: true)
- end
-
- it { is_expected.to include(:artifacts_exclude) }
- end
-
- context 'when a feature flag is disabled' do
- before do
- stub_feature_flags(ci_artifacts_exclude: false)
- end
-
- it { is_expected.not_to include(:artifacts_exclude) }
- end
+ it { is_expected.to include(:artifacts_exclude) }
end
end
@@ -4712,9 +4746,9 @@ RSpec.describe Ci::Build do
describe '#read_metadata_attribute' do
let(:build) { create(:ci_build, :degenerated) }
- let(:build_options) { { "key" => "build" } }
- let(:metadata_options) { { "key" => "metadata" } }
- let(:default_options) { { "key" => "default" } }
+ let(:build_options) { { key: "build" } }
+ let(:metadata_options) { { key: "metadata" } }
+ let(:default_options) { { key: "default" } }
subject { build.send(:read_metadata_attribute, :options, :config_options, default_options) }
@@ -4749,8 +4783,8 @@ RSpec.describe Ci::Build do
describe '#write_metadata_attribute' do
let(:build) { create(:ci_build, :degenerated) }
- let(:options) { { "key" => "new options" } }
- let(:existing_options) { { "key" => "existing options" } }
+ let(:options) { { key: "new options" } }
+ let(:existing_options) { { key: "existing options" } }
subject { build.send(:write_metadata_attribute, :options, :config_options, options) }
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
index a16453f3d01..b6e128c317c 100644
--- a/spec/models/ci/build_trace_chunk_spec.rb
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -152,14 +152,6 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state, :clean_git
context 'default value' do
it { expect(subject).to eq('redis_trace_chunks') }
-
- context 'when dedicated_redis_trace_chunks is disabled' do
- before do
- stub_feature_flags(dedicated_redis_trace_chunks: false)
- end
-
- it { expect(subject).to eq('redis') }
- end
end
end
diff --git a/spec/models/ci/build_trace_chunks/fog_spec.rb b/spec/models/ci/build_trace_chunks/fog_spec.rb
index d9e9533fb26..21dab6fad60 100644
--- a/spec/models/ci/build_trace_chunks/fog_spec.rb
+++ b/spec/models/ci/build_trace_chunks/fog_spec.rb
@@ -102,6 +102,57 @@ RSpec.describe Ci::BuildTraceChunks::Fog do
end
end
+ describe '#append_data' do
+ let(:initial_data) { (+'😺').force_encoding(Encoding::ASCII_8BIT) }
+ let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: initial_data) }
+ let(:data) { data_store.data(model) }
+
+ context 'when ci_job_trace_force_encode is enabled' do
+ it 'appends ASCII data' do
+ data_store.append_data(model, +'hello world', 4)
+
+ expect(data.encoding).to eq(Encoding::ASCII_8BIT)
+ expect(data.force_encoding(Encoding::UTF_8)).to eq('😺hello world')
+ end
+
+ it 'appends UTF-8 data' do
+ data_store.append_data(model, +'Résumé', 4)
+
+ expect(data.encoding).to eq(Encoding::ASCII_8BIT)
+ expect(data.force_encoding(Encoding::UTF_8)).to eq("😺Résumé")
+ end
+
+ context 'when initial data is UTF-8' do
+ let(:initial_data) { +'😺' }
+
+ it 'appends ASCII data' do
+ data_store.append_data(model, +'hello world', 4)
+
+ expect(data.encoding).to eq(Encoding::ASCII_8BIT)
+ expect(data.force_encoding(Encoding::UTF_8)).to eq('😺hello world')
+ end
+ end
+ end
+
+ context 'when ci_job_trace_force_encode is disabled' do
+ before do
+ stub_feature_flags(ci_job_trace_force_encode: false)
+ end
+
+ it 'appends ASCII data' do
+ data_store.append_data(model, +'hello world', 4)
+
+ expect(data.encoding).to eq(Encoding::ASCII_8BIT)
+ expect(data.force_encoding(Encoding::UTF_8)).to eq('😺hello world')
+ end
+
+ it 'throws an exception when appending UTF-8 data' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_exception).and_call_original
+ expect { data_store.append_data(model, +'Résumé', 4) }.to raise_exception(Encoding::CompatibilityError)
+ end
+ end
+ end
+
describe '#delete_data' do
subject { data_store.delete_data(model) }
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 582639b105e..a94a1dd284a 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -268,6 +268,29 @@ RSpec.describe Ci::JobArtifact do
end
end
+ describe '.for_project' do
+ it 'returns artifacts only for given project(s)', :aggregate_failures do
+ artifact1 = create(:ci_job_artifact)
+ artifact2 = create(:ci_job_artifact)
+ create(:ci_job_artifact)
+
+ expect(described_class.for_project(artifact1.project)).to match_array([artifact1])
+ expect(described_class.for_project([artifact1.project, artifact2.project])).to match_array([artifact1, artifact2])
+ end
+ end
+
+ describe 'created_in_time_range' do
+ it 'returns artifacts created in given time range', :aggregate_failures do
+ artifact1 = create(:ci_job_artifact, created_at: 1.day.ago)
+ artifact2 = create(:ci_job_artifact, created_at: 1.month.ago)
+ artifact3 = create(:ci_job_artifact, created_at: 1.year.ago)
+
+ expect(described_class.created_in_time_range(from: 1.week.ago)).to match_array([artifact1])
+ expect(described_class.created_in_time_range(to: 1.week.ago)).to match_array([artifact2, artifact3])
+ expect(described_class.created_in_time_range(from: 2.months.ago, to: 1.week.ago)).to match_array([artifact2])
+ end
+ end
+
describe 'callbacks' do
describe '#schedule_background_upload' do
subject { create(:ci_job_artifact, :archive) }
diff --git a/spec/models/ci/job_token/project_scope_link_spec.rb b/spec/models/ci/job_token/project_scope_link_spec.rb
index d18495b9312..dd6a75dfd89 100644
--- a/spec/models/ci/job_token/project_scope_link_spec.rb
+++ b/spec/models/ci/job_token/project_scope_link_spec.rb
@@ -65,4 +65,22 @@ RSpec.describe Ci::JobToken::ProjectScopeLink do
expect(subject).to contain_exactly(target_link)
end
end
+
+ describe '.for_source_and_target' do
+ let_it_be(:link) { create(:ci_job_token_project_scope_link, source_project: project) }
+
+ subject { described_class.for_source_and_target(project, target_project) }
+
+ context 'when link is found' do
+ let(:target_project) { link.target_project }
+
+ it { is_expected.to eq(link) }
+ end
+
+ context 'when link is not found' do
+ let(:target_project) { create(:project) }
+
+ it { is_expected.to be_nil }
+ end
+ end
end
diff --git a/spec/models/ci/job_token/scope_spec.rb b/spec/models/ci/job_token/scope_spec.rb
index c731a2634f5..4b95adf8476 100644
--- a/spec/models/ci/job_token/scope_spec.rb
+++ b/spec/models/ci/job_token/scope_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Ci::JobToken::Scope do
- let_it_be(:project) { create(:project) }
+ let_it_be(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) }
let(:scope) { described_class.new(project) }
@@ -29,7 +29,7 @@ RSpec.describe Ci::JobToken::Scope do
end
end
- describe 'includes?' do
+ describe '#includes?' do
subject { scope.includes?(target_project) }
context 'when param is the project defining the scope' do
diff --git a/spec/models/ci/pending_build_spec.rb b/spec/models/ci/pending_build_spec.rb
index c1d4f4b0a5e..b64f3999232 100644
--- a/spec/models/ci/pending_build_spec.rb
+++ b/spec/models/ci/pending_build_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Ci::PendingBuild do
context 'when another queuing entry exists for given build' do
before do
- described_class.create!(build: build, project: project, protected: false)
+ create(:ci_pending_build, build: build, project: project)
end
it 'returns a build id as a result' do
@@ -29,5 +29,61 @@ RSpec.describe Ci::PendingBuild do
expect(result.rows.dig(0, 0)).to eq build.id
end
end
+
+ context 'when project does not have shared runner' do
+ it 'sets instance_runners_enabled to false' do
+ described_class.upsert_from_build!(build)
+
+ expect(described_class.last.instance_runners_enabled).to be_falsey
+ end
+ end
+
+ context 'when project has shared runner' do
+ let_it_be(:runner) { create(:ci_runner, :instance) }
+
+ context 'when ci_pending_builds_maintain_shared_runners_data is enabled' do
+ it 'sets instance_runners_enabled to true' do
+ described_class.upsert_from_build!(build)
+
+ expect(described_class.last.instance_runners_enabled).to be_truthy
+ end
+
+ context 'when project is about to be deleted' do
+ before do
+ build.project.update!(pending_delete: true)
+ end
+
+ it 'sets instance_runners_enabled to false' do
+ described_class.upsert_from_build!(build)
+
+ expect(described_class.last.instance_runners_enabled).to be_falsey
+ end
+ end
+
+ context 'when builds are disabled' do
+ before do
+ build.project.project_feature.update!(builds_access_level: false)
+ end
+
+ it 'sets instance_runners_enabled to false' do
+ described_class.upsert_from_build!(build)
+
+ expect(described_class.last.instance_runners_enabled).to be_falsey
+ end
+ end
+ end
+
+ context 'when ci_pending_builds_maintain_shared_runners_data is disabled' do
+ before do
+ stub_feature_flags(ci_pending_builds_maintain_shared_runners_data: false)
+ end
+
+ it 'sets instance_runners_enabled to false' do
+ described_class.upsert_from_build!(build)
+
+ expect(described_class.last.instance_runners_enabled).to be_falsey
+ end
+ end
+ end
end
end
diff --git a/spec/models/ci/pipeline_schedule_spec.rb b/spec/models/ci/pipeline_schedule_spec.rb
index cf73460bf1e..8de3ebb18b9 100644
--- a/spec/models/ci/pipeline_schedule_spec.rb
+++ b/spec/models/ci/pipeline_schedule_spec.rb
@@ -123,8 +123,15 @@ RSpec.describe Ci::PipelineSchedule do
'*/5 * * * *' | '0 * * * *' | (1.day.in_minutes / 1.hour.in_minutes).to_i | true | Time.zone.local(2021, 5, 27, 11, 0) | Time.zone.local(2021, 5, 27, 12, 0)
'*/5 * * * *' | '0 * * * *' | (1.day.in_minutes / 2.hours.in_minutes).to_i | true | Time.zone.local(2021, 5, 27, 11, 0) | Time.zone.local(2021, 5, 27, 12, 5)
'*/5 * * * *' | '0 1 * * *' | (1.day.in_minutes / 1.hour.in_minutes).to_i | true | Time.zone.local(2021, 5, 27, 1, 0) | Time.zone.local(2021, 5, 28, 1, 0)
- '*/5 * * * *' | '0 1 * * *' | (1.day.in_minutes / 1.hour.in_minutes).to_i | true | Time.zone.local(2021, 5, 27, 1, 0) | Time.zone.local(2021, 5, 28, 1, 0)
+ '*/5 * * * *' | '0 1 * * *' | (1.day.in_minutes / 10).to_i | true | Time.zone.local(2021, 5, 27, 1, 0) | Time.zone.local(2021, 5, 28, 1, 0)
+ '*/5 * * * *' | '0 1 * * *' | (1.day.in_minutes / 8).to_i | true | Time.zone.local(2021, 5, 27, 1, 0) | Time.zone.local(2021, 5, 28, 1, 0)
'*/5 * * * *' | '0 1 1 * *' | (1.day.in_minutes / 1.hour.in_minutes).to_i | true | Time.zone.local(2021, 5, 1, 1, 0) | Time.zone.local(2021, 6, 1, 1, 0)
+ '*/9 * * * *' | '0 1 1 * *' | (1.day.in_minutes / 1.hour.in_minutes).to_i | true | Time.zone.local(2021, 5, 1, 1, 9) | Time.zone.local(2021, 6, 1, 1, 0)
+ '*/9 * * * *' | '0 1 1 * *' | (1.day.in_minutes / 1.hour.in_minutes).to_i | false | Time.zone.local(2021, 5, 1, 1, 9) | Time.zone.local(2021, 6, 1, 1, 9)
+ '*/5 * * * *' | '59 14 * * *' | (1.day.in_minutes / 1.hour.in_minutes).to_i | true | Time.zone.local(2021, 5, 1, 15, 0) | Time.zone.local(2021, 5, 2, 15, 0)
+ '*/5 * * * *' | '59 14 * * *' | (1.day.in_minutes / 1.hour.in_minutes).to_i | false | Time.zone.local(2021, 5, 1, 15, 0) | Time.zone.local(2021, 5, 2, 15, 0)
+ '*/5 * * * *' | '45 21 1 2 *' | (1.day.in_minutes / 5).to_i | true | Time.zone.local(2021, 2, 1, 21, 45) | Time.zone.local(2022, 2, 1, 21, 45)
+ '*/5 * * * *' | '45 21 1 2 *' | (1.day.in_minutes / 5).to_i | false | Time.zone.local(2021, 2, 1, 21, 45) | Time.zone.local(2022, 2, 1, 21, 50)
end
with_them do
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 26fc4b140c1..74a476a6422 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -11,6 +11,10 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
let_it_be(:namespace) { create_default(:namespace).freeze }
let_it_be(:project) { create_default(:project, :repository).freeze }
+ it 'paginates 15 pipeleines per page' do
+ expect(described_class.default_per_page).to eq(15)
+ end
+
it_behaves_like 'having unique enum values'
it { is_expected.to belong_to(:project) }
@@ -2768,6 +2772,41 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
expect(control2.count).to eq(control1.count + extra_update_queries + extra_generic_commit_status_validation_queries)
end
end
+
+ context 'when the first try cannot get an exclusive lock' do
+ let(:retries) { 1 }
+
+ subject(:cancel_running) { pipeline.cancel_running(retries: retries) }
+
+ before do
+ build = create(:ci_build, :running, pipeline: pipeline)
+
+ allow(pipeline.cancelable_statuses).to receive(:find_in_batches).and_yield([build])
+
+ call_count = 0
+ allow(build).to receive(:cancel).and_wrap_original do |original, *args|
+ call_count >= retries ? raise(ActiveRecord::StaleObjectError) : original.call(*args)
+
+ call_count += 1
+ end
+ end
+
+ it 'retries again and cancels the build' do
+ cancel_running
+
+ expect(latest_status).to contain_exactly('canceled')
+ end
+
+ context 'when the retries parameter is 0' do
+ let(:retries) { 0 }
+
+ it 'raises error' do
+ expect do
+ cancel_running
+ end.to raise_error(ActiveRecord::StaleObjectError)
+ end
+ end
+ end
end
describe '#retry_failed' do
@@ -2854,7 +2893,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
it 'builds hook data once' do
- create(:pipelines_email_service)
+ create(:pipelines_email_integration)
expect(Gitlab::DataBuilder::Pipeline).to receive(:build).once.and_call_original
@@ -3772,16 +3811,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it 'can generate a codequality report' do
expect(subject).to be_truthy
end
-
- context 'when feature is disabled' do
- before do
- stub_feature_flags(codequality_mr_diff: false)
- end
-
- it 'can not generate a codequality report' do
- expect(subject).to be_falsey
- end
- end
end
end
@@ -4355,16 +4384,14 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
- describe '#base_and_ancestors' do
- subject { pipeline.base_and_ancestors(same_project: same_project) }
+ describe '#self_and_upstreams' do
+ subject(:self_and_upstreams) { pipeline.self_and_upstreams }
let_it_be(:pipeline) { create(:ci_pipeline, :created) }
- let(:same_project) { false }
-
context 'when pipeline is not child nor parent' do
it 'returns just the pipeline itself' do
- expect(subject).to contain_exactly(pipeline)
+ expect(self_and_upstreams).to contain_exactly(pipeline)
end
end
@@ -4378,7 +4405,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
it 'returns parent and self' do
- expect(subject).to contain_exactly(parent, pipeline)
+ expect(self_and_upstreams).to contain_exactly(parent, pipeline)
end
end
@@ -4390,7 +4417,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
it 'returns self' do
- expect(subject).to contain_exactly(pipeline)
+ expect(self_and_upstreams).to contain_exactly(pipeline)
end
end
@@ -4406,11 +4433,11 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
it 'returns self, parent and ancestor' do
- expect(subject).to contain_exactly(ancestor, parent, pipeline)
+ expect(self_and_upstreams).to contain_exactly(ancestor, parent, pipeline)
end
end
- context 'when pipeline is a triggered pipeline' do
+ context 'when pipeline is a triggered pipeline from a different project' do
let_it_be(:pipeline) { create(:ci_pipeline, :created) }
let(:upstream) { create(:ci_pipeline, project: create(:project)) }
@@ -4419,18 +4446,41 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
create_source_pipeline(upstream, pipeline)
end
- context 'same_project: false' do
- it 'returns upstream and self' do
- expect(subject).to contain_exactly(pipeline, upstream)
- end
+ it 'returns upstream and self' do
+ expect(self_and_upstreams).to contain_exactly(pipeline, upstream)
end
+ end
+ end
- context 'same_project: true' do
- let(:same_project) { true }
+ describe '#self_and_ancestors' do
+ subject(:self_and_ancestors) { pipeline.self_and_ancestors }
- it 'returns self' do
- expect(subject).to contain_exactly(pipeline)
- end
+ context 'when pipeline is child' do
+ let(:pipeline) { create(:ci_pipeline, :created) }
+ let(:parent) { create(:ci_pipeline) }
+ let(:sibling) { create(:ci_pipeline) }
+
+ before do
+ create_source_pipeline(parent, pipeline)
+ create_source_pipeline(parent, sibling)
+ end
+
+ it 'returns parent and self' do
+ expect(self_and_ancestors).to contain_exactly(parent, pipeline)
+ end
+ end
+
+ context 'when pipeline is a triggered pipeline from a different project' do
+ let_it_be(:pipeline) { create(:ci_pipeline, :created) }
+
+ let(:upstream) { create(:ci_pipeline, project: create(:project)) }
+
+ before do
+ create_source_pipeline(upstream, pipeline)
+ end
+
+ it 'returns only self' do
+ expect(self_and_ancestors).to contain_exactly(pipeline)
end
end
end
@@ -4468,15 +4518,18 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
context 'when the parent pipeline has a dependent upstream pipeline' do
- let!(:upstream_bridge) do
- create_bridge(create(:ci_pipeline, project: create(:project)), parent_pipeline, true)
- end
+ let(:upstream_pipeline) { create(:ci_pipeline, project: create(:project)) }
+ let!(:upstream_bridge) { create_bridge(upstream_pipeline, parent_pipeline, true) }
+
+ let(:upstream_upstream_pipeline) { create(:ci_pipeline, project: create(:project)) }
+ let!(:upstream_upstream_bridge) { create_bridge(upstream_upstream_pipeline, upstream_pipeline, true) }
it 'marks all source bridges as pending' do
reset_bridge
expect(bridge.reload).to be_pending
expect(upstream_bridge.reload).to be_pending
+ expect(upstream_upstream_bridge.reload).to be_pending
end
end
end
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 61f80bd43b1..ffc8ab4cf8b 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -1086,6 +1086,18 @@ RSpec.describe Ci::Runner do
expect(matchers.map(&:tag_list)).to match_array([%w[tag1 tag2], %w[tag3 tag4]])
end
end
+
+ context 'with runner_ids' do
+ before do
+ create_list(:ci_runner, 2)
+ end
+
+ it 'includes runner_ids' do
+ expect(matchers.size).to eq(1)
+
+ expect(matchers.first.runner_ids).to match_array(described_class.all.pluck(:id))
+ end
+ end
end
describe '#runner_matcher' do
@@ -1095,6 +1107,8 @@ RSpec.describe Ci::Runner do
subject(:matcher) { runner.runner_matcher }
+ it { expect(matcher.runner_ids).to eq([runner.id]) }
+
it { expect(matcher.runner_type).to eq(runner.runner_type) }
it { expect(matcher.public_projects_minutes_cost_factor).to eq(runner.public_projects_minutes_cost_factor) }
diff --git a/spec/models/ci/running_build_spec.rb b/spec/models/ci/running_build_spec.rb
index 589e5a86f4d..629861e35b8 100644
--- a/spec/models/ci/running_build_spec.rb
+++ b/spec/models/ci/running_build_spec.rb
@@ -21,10 +21,7 @@ RSpec.describe Ci::RunningBuild do
context 'when another queuing entry exists for given build' do
before do
- described_class.create!(build: build,
- project: project,
- runner: runner,
- runner_type: runner.runner_type)
+ create(:ci_running_build, build: build, project: project, runner: runner)
end
it 'returns a build id as a result' do
diff --git a/spec/models/clusters/integrations/prometheus_spec.rb b/spec/models/clusters/integrations/prometheus_spec.rb
index 680786189ad..e529c751889 100644
--- a/spec/models/clusters/integrations/prometheus_spec.rb
+++ b/spec/models/clusters/integrations/prometheus_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Clusters::Integrations::Prometheus do
let(:cluster) { create(:cluster, :with_installed_helm) }
- it 'deactivates prometheus_service' do
+ it 'deactivates prometheus_integration' do
expect(Clusters::Applications::DeactivateServiceWorker)
.to receive(:perform_async).with(cluster.id, 'prometheus')
@@ -35,7 +35,7 @@ RSpec.describe Clusters::Integrations::Prometheus do
let(:enabled) { true }
context 'when no change to enabled status' do
- it 'does not touch project services' do
+ it 'does not touch project integrations' do
integration # ensure integration exists before we set the expectations
expect(Clusters::Applications::DeactivateServiceWorker)
@@ -51,7 +51,7 @@ RSpec.describe Clusters::Integrations::Prometheus do
context 'when enabling' do
let(:enabled) { false }
- it 'deactivates prometheus_service' do
+ it 'deactivates prometheus_integration' do
expect(Clusters::Applications::ActivateServiceWorker)
.to receive(:perform_async).with(cluster.id, 'prometheus')
@@ -62,7 +62,7 @@ RSpec.describe Clusters::Integrations::Prometheus do
context 'when disabling' do
let(:enabled) { true }
- it 'activates prometheus_service' do
+ it 'activates prometheus_integration' do
expect(Clusters::Applications::DeactivateServiceWorker)
.to receive(:perform_async).with(cluster.id, 'prometheus')
diff --git a/spec/models/clusters/kubernetes_namespace_spec.rb b/spec/models/clusters/kubernetes_namespace_spec.rb
index 3b903fe34f9..e70cd15baca 100644
--- a/spec/models/clusters/kubernetes_namespace_spec.rb
+++ b/spec/models/clusters/kubernetes_namespace_spec.rb
@@ -62,6 +62,7 @@ RSpec.describe Clusters::KubernetesNamespace, type: :model do
describe 'namespace uniqueness validation' do
let_it_be(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+
let(:kubernetes_namespace) { build(:cluster_kubernetes_namespace, cluster: cluster, namespace: 'my-namespace') }
subject { kubernetes_namespace }
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index 8ffc198fc4d..63fe6923630 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Commit do
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:personal_snippet) { create(:personal_snippet, :repository) }
let_it_be(:project_snippet) { create(:project_snippet, :repository) }
+
let(:commit) { project.commit }
describe 'modules' do
diff --git a/spec/models/compare_spec.rb b/spec/models/compare_spec.rb
index d395aa359e5..86bab569ab0 100644
--- a/spec/models/compare_spec.rb
+++ b/spec/models/compare_spec.rb
@@ -13,7 +13,15 @@ RSpec.describe Compare do
let(:raw_compare) { Gitlab::Git::Compare.new(project.repository.raw_repository, start_commit.id, head_commit.id) }
- subject { described_class.new(raw_compare, project) }
+ subject(:compare) { described_class.new(raw_compare, project) }
+
+ describe '#cache_key' do
+ subject { compare.cache_key }
+
+ it { is_expected.to include(project) }
+ it { is_expected.to include(:compare) }
+ it { is_expected.to include(compare.diff_refs.hash) }
+ end
describe '#start_commit' do
it 'returns raw compare base commit' do
diff --git a/spec/models/concerns/approvable_base_spec.rb b/spec/models/concerns/approvable_base_spec.rb
index a9e944cf220..c7ea2631a24 100644
--- a/spec/models/concerns/approvable_base_spec.rb
+++ b/spec/models/concerns/approvable_base_spec.rb
@@ -59,4 +59,25 @@ RSpec.describe ApprovableBase do
end
end
end
+
+ describe '.not_approved_by_users_with_usernames' do
+ subject { MergeRequest.not_approved_by_users_with_usernames([user.username, user2.username]) }
+
+ let!(:merge_request2) { create(:merge_request) }
+ let!(:merge_request3) { create(:merge_request) }
+ let!(:merge_request4) { create(:merge_request) }
+ let(:user2) { create(:user) }
+ let(:user3) { create(:user) }
+
+ before do
+ create(:approval, merge_request: merge_request, user: user)
+ create(:approval, merge_request: merge_request2, user: user2)
+ create(:approval, merge_request: merge_request2, user: user3)
+ create(:approval, merge_request: merge_request4, user: user3)
+ end
+
+ it 'has the merge request that is not approved at all and not approved by either user' do
+ expect(subject).to contain_exactly(merge_request3, merge_request4)
+ end
+ end
end
diff --git a/spec/models/concerns/atomic_internal_id_spec.rb b/spec/models/concerns/atomic_internal_id_spec.rb
index 35b0f107676..b803e699b25 100644
--- a/spec/models/concerns/atomic_internal_id_spec.rb
+++ b/spec/models/concerns/atomic_internal_id_spec.rb
@@ -240,18 +240,12 @@ RSpec.describe AtomicInternalId do
end
describe '.with_project_iid_supply' do
- let(:iid) { 100 }
-
- it 'wraps generate and track_greatest in a concurrency-safe lock' do
- expect_next_instance_of(InternalId::InternalIdGenerator) do |g|
- expect(g).to receive(:with_lock).and_call_original
- expect(g.record).to receive(:last_value).and_return(iid)
- expect(g).to receive(:track_greatest).with(iid + 4)
- end
-
- ::Milestone.with_project_iid_supply(milestone.project) do |supply|
- 4.times { supply.next_value }
- end
+ it 'supplies a stream of iid values' do
+ expect do
+ ::Milestone.with_project_iid_supply(milestone.project) do |supply|
+ 4.times { supply.next_value }
+ end
+ end.to change { InternalId.find_by(project: milestone.project, usage: :milestones)&.last_value.to_i }.by(4)
end
end
end
diff --git a/spec/models/concerns/awardable_spec.rb b/spec/models/concerns/awardable_spec.rb
index b80b6ec95e2..fcd0d0c05f4 100644
--- a/spec/models/concerns/awardable_spec.rb
+++ b/spec/models/concerns/awardable_spec.rb
@@ -3,64 +3,64 @@
require 'spec_helper'
RSpec.describe Awardable do
- let!(:issue) { create(:issue) }
- let!(:award_emoji) { create(:award_emoji, :downvote, awardable: issue) }
+ let!(:note) { create(:note) }
+ let!(:award_emoji) { create(:award_emoji, :downvote, awardable: note) }
describe "Associations" do
- subject { build(:issue) }
+ subject { build(:note) }
it { is_expected.to have_many(:award_emoji).dependent(:destroy) }
end
describe "ClassMethods" do
- let!(:issue2) { create(:issue) }
- let!(:award_emoji2) { create(:award_emoji, awardable: issue2) }
+ let!(:note2) { create(:note) }
+ let!(:award_emoji2) { create(:award_emoji, awardable: note2) }
describe "orders" do
it "orders on upvotes" do
- expect(Issue.order_upvotes_desc.to_a).to eq [issue2, issue]
+ expect(Note.order_upvotes_desc.to_a).to eq [note2, note]
end
it "orders on downvotes" do
- expect(Issue.order_downvotes_desc.to_a).to eq [issue, issue2]
+ expect(Note.order_downvotes_desc.to_a).to eq [note, note2]
end
end
describe "#awarded" do
it "filters by user and emoji name" do
- expect(Issue.awarded(award_emoji.user, "thumbsup")).to be_empty
- expect(Issue.awarded(award_emoji.user, "thumbsdown")).to eq [issue]
- expect(Issue.awarded(award_emoji2.user, "thumbsup")).to eq [issue2]
- expect(Issue.awarded(award_emoji2.user, "thumbsdown")).to be_empty
+ expect(Note.awarded(award_emoji.user, "thumbsup")).to be_empty
+ expect(Note.awarded(award_emoji.user, "thumbsdown")).to eq [note]
+ expect(Note.awarded(award_emoji2.user, "thumbsup")).to eq [note2]
+ expect(Note.awarded(award_emoji2.user, "thumbsdown")).to be_empty
end
it "filters by user and any emoji" do
- issue3 = create(:issue)
- create(:award_emoji, awardable: issue3, name: "star", user: award_emoji.user)
- create(:award_emoji, awardable: issue3, name: "star", user: award_emoji2.user)
+ note3 = create(:note)
+ create(:award_emoji, awardable: note3, name: "star", user: award_emoji.user)
+ create(:award_emoji, awardable: note3, name: "star", user: award_emoji2.user)
- expect(Issue.awarded(award_emoji.user)).to contain_exactly(issue, issue3)
- expect(Issue.awarded(award_emoji2.user)).to contain_exactly(issue2, issue3)
+ expect(Note.awarded(award_emoji.user)).to contain_exactly(note, note3)
+ expect(Note.awarded(award_emoji2.user)).to contain_exactly(note2, note3)
end
end
describe "#not_awarded" do
- it "returns issues not awarded by user" do
- expect(Issue.not_awarded(award_emoji.user)).to eq [issue2]
- expect(Issue.not_awarded(award_emoji2.user)).to eq [issue]
+ it "returns notes not awarded by user" do
+ expect(Note.not_awarded(award_emoji.user)).to eq [note2]
+ expect(Note.not_awarded(award_emoji2.user)).to eq [note]
end
end
end
describe "#upvotes" do
it "counts the number of upvotes" do
- expect(issue.upvotes).to be 0
+ expect(note.upvotes).to be 0
end
end
describe "#downvotes" do
it "counts the number of downvotes" do
- expect(issue.downvotes).to be 1
+ expect(note.downvotes).to be 1
end
end
@@ -68,67 +68,67 @@ RSpec.describe Awardable do
let(:user) { create(:user) }
before do
- issue.project.add_guest(user)
+ note.project.add_guest(user)
end
it 'is truthy when the user is allowed to award emoji' do
- expect(issue.user_can_award?(user)).to be_truthy
+ expect(note.user_can_award?(user)).to be_truthy
end
it 'is falsy when the project is archived' do
- issue.project.update!(archived: true)
+ note.project.update!(archived: true)
- expect(issue.user_can_award?(user)).to be_falsy
+ expect(note.user_can_award?(user)).to be_falsy
end
end
describe 'querying award_emoji on an Awardable' do
- let(:issue) { create(:issue) }
+ let(:note) { create(:note) }
it 'sorts in ascending fashion' do
- create_list(:award_emoji, 3, awardable: issue)
+ create_list(:award_emoji, 3, awardable: note)
- expect(issue.award_emoji).to eq issue.award_emoji.sort_by(&:id)
+ expect(note.award_emoji).to eq note.award_emoji.sort_by(&:id)
end
end
describe "#grouped_awards" do
context 'default award emojis' do
- let(:issue_without_downvote) { create(:issue) }
- let(:issue_with_downvote) do
- issue_with_downvote = create(:issue)
- create(:award_emoji, :downvote, awardable: issue_with_downvote)
- issue_with_downvote
+ let(:note_without_downvote) { create(:note) }
+ let(:note_with_downvote) do
+ note_with_downvote = create(:note)
+ create(:award_emoji, :downvote, awardable: note_with_downvote)
+ note_with_downvote
end
it "includes unused thumbs buttons by default" do
- expect(issue_without_downvote.grouped_awards.keys.sort).to eq %w(thumbsdown thumbsup)
+ expect(note_without_downvote.grouped_awards.keys.sort).to eq %w(thumbsdown thumbsup)
end
it "doesn't include unused thumbs buttons when disabled in project" do
- issue_without_downvote.project.show_default_award_emojis = false
+ note_without_downvote.project.show_default_award_emojis = false
- expect(issue_without_downvote.grouped_awards.keys.sort).to be_empty
+ expect(note_without_downvote.grouped_awards.keys.sort).to be_empty
end
it "includes unused thumbs buttons when enabled in project" do
- issue_without_downvote.project.show_default_award_emojis = true
+ note_without_downvote.project.show_default_award_emojis = true
- expect(issue_without_downvote.grouped_awards.keys.sort).to eq %w(thumbsdown thumbsup)
+ expect(note_without_downvote.grouped_awards.keys.sort).to eq %w(thumbsdown thumbsup)
end
it "doesn't include unused thumbs buttons in summary" do
- expect(issue_without_downvote.grouped_awards(with_thumbs: false).keys).to be_empty
+ expect(note_without_downvote.grouped_awards(with_thumbs: false).keys).to be_empty
end
it "includes used thumbs buttons when disabled in project" do
- issue_with_downvote.project.show_default_award_emojis = false
+ note_with_downvote.project.show_default_award_emojis = false
- expect(issue_with_downvote.grouped_awards.keys).to eq %w(thumbsdown)
+ expect(note_with_downvote.grouped_awards.keys).to eq %w(thumbsdown)
end
it "includes used thumbs buttons in summary" do
- expect(issue_with_downvote.grouped_awards(with_thumbs: false).keys).to eq %w(thumbsdown)
+ expect(note_with_downvote.grouped_awards(with_thumbs: false).keys).to eq %w(thumbsdown)
end
end
end
diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb
index 33a4c8eac41..1c1efab2889 100644
--- a/spec/models/concerns/cache_markdown_field_spec.rb
+++ b/spec/models/concerns/cache_markdown_field_spec.rb
@@ -75,7 +75,7 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
end
it 'returns false when the local version was bumped' do
- allow(Gitlab::CurrentSettings.current_application_settings).to receive(:local_markdown_version).and_return(2)
+ stub_application_setting(local_markdown_version: 2)
thing.cached_markdown_version = cache_version
is_expected.to be_falsy
@@ -88,7 +88,7 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
end
it 'returns true when the cached version is just right' do
- allow(Gitlab::CurrentSettings.current_application_settings).to receive(:local_markdown_version).and_return(2)
+ stub_application_setting(local_markdown_version: 2)
thing.cached_markdown_version = cache_version + 2
is_expected.to be_truthy
diff --git a/spec/models/concerns/cascading_namespace_setting_attribute_spec.rb b/spec/models/concerns/cascading_namespace_setting_attribute_spec.rb
index 02cd8557231..e8f2b18e662 100644
--- a/spec/models/concerns/cascading_namespace_setting_attribute_spec.rb
+++ b/spec/models/concerns/cascading_namespace_setting_attribute_spec.rb
@@ -17,18 +17,6 @@ RSpec.describe NamespaceSetting, 'CascadingNamespaceSettingAttribute' do
describe '#delayed_project_removal' do
subject(:delayed_project_removal) { subgroup_settings.delayed_project_removal }
- context 'when the feature is disabled' do
- before do
- stub_feature_flags(cascading_namespace_settings: false)
-
- group_settings.update!(delayed_project_removal: true)
- end
-
- it 'does not cascade' do
- expect(delayed_project_removal).to eq(nil)
- end
- end
-
context 'when there is no parent' do
context 'and the value is not nil' do
before do
@@ -192,16 +180,6 @@ RSpec.describe NamespaceSetting, 'CascadingNamespaceSettingAttribute' do
end
end
- context 'when the feature is disabled' do
- before do
- stub_feature_flags(cascading_namespace_settings: false)
-
- group_settings.update!(delayed_project_removal: true)
- end
-
- it_behaves_like 'not locked'
- end
-
context 'when attribute is locked by self' do
before do
subgroup_settings.update!(lock_delayed_project_removal: true)
diff --git a/spec/models/concerns/has_integrations_spec.rb b/spec/models/concerns/has_integrations_spec.rb
index 6e55a1c8b01..6b3f75bfcfd 100644
--- a/spec/models/concerns/has_integrations_spec.rb
+++ b/spec/models/concerns/has_integrations_spec.rb
@@ -7,14 +7,14 @@ RSpec.describe HasIntegrations do
let_it_be(:project_2) { create(:project) }
let_it_be(:project_3) { create(:project) }
let_it_be(:project_4) { create(:project) }
- let_it_be(:instance_integration) { create(:jira_service, :instance) }
+ let_it_be(:instance_integration) { create(:jira_integration, :instance) }
before do
- create(:jira_service, project: project_1, inherit_from_id: instance_integration.id)
- create(:jira_service, project: project_2, inherit_from_id: nil)
- create(:jira_service, group: create(:group), project: nil, inherit_from_id: nil)
- create(:jira_service, project: project_3, inherit_from_id: nil)
- create(:slack_service, project: project_4, inherit_from_id: nil)
+ create(:jira_integration, project: project_1, inherit_from_id: instance_integration.id)
+ create(:jira_integration, project: project_2, inherit_from_id: nil)
+ create(:jira_integration, group: create(:group), project: nil, inherit_from_id: nil)
+ create(:jira_integration, project: project_3, inherit_from_id: nil)
+ create(:integrations_slack, project: project_4, inherit_from_id: nil)
end
describe '.with_custom_integration_for' do
diff --git a/spec/models/concerns/integrations/has_data_fields_spec.rb b/spec/models/concerns/integrations/has_data_fields_spec.rb
index 54e0ac9c5a5..b28fef571c6 100644
--- a/spec/models/concerns/integrations/has_data_fields_spec.rb
+++ b/spec/models/concerns/integrations/has_data_fields_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe Integrations::HasDataFields do
context 'when data are stored in data_fields' do
let(:service) do
- create(:jira_service, url: url, username: username)
+ create(:jira_integration, url: url, username: username)
end
it_behaves_like 'data fields'
@@ -111,45 +111,52 @@ RSpec.describe Integrations::HasDataFields do
end
context 'when data are stored in properties' do
- let(:service) { create(:jira_service, :without_properties_callback, properties: properties) }
+ let(:integration) { create(:jira_integration, :without_properties_callback, properties: properties) }
- it_behaves_like 'data fields'
+ it_behaves_like 'data fields' do
+ let(:service) { integration }
+ end
describe '{arg}_was?' do
it 'returns nil when the property has not been assigned a new value' do
- service.username = 'new_username'
- service.validate
- expect(service.url_was).to be_nil
+ integration.username = 'new_username'
+ integration.validate
+
+ expect(integration.url_was).to be_nil
end
it 'returns initial value when the property has been assigned a different value' do
- service.url = 'http://example.com'
- service.validate
- expect(service.url_was).to eq('http://url.com')
+ integration.url = 'http://example.com'
+ integration.validate
+
+ expect(integration.url_was).to eq('http://url.com')
end
it 'returns initial value when the property has been re-assigned the same value' do
- service.url = 'http://url.com'
- service.validate
- expect(service.url_was).to eq('http://url.com')
+ integration.url = 'http://url.com'
+ integration.validate
+
+ expect(integration.url_was).to eq('http://url.com')
end
end
end
context 'when data are stored in both properties and data_fields' do
- let(:service) do
- create(:jira_service, :without_properties_callback, active: false, properties: properties).tap do |integration|
+ let(:integration) do
+ create(:jira_integration, :without_properties_callback, active: false, properties: properties).tap do |integration|
create(:jira_tracker_data, properties.merge(integration: integration))
end
end
- it_behaves_like 'data fields'
+ it_behaves_like 'data fields' do
+ let(:service) { integration }
+ end
describe '{arg}_was?' do
it 'returns nil' do
- service.url = 'http://example.com'
- service.validate
- expect(service.url_was).to be_nil
+ integration.url = 'http://example.com'
+ integration.validate
+ expect(integration.url_was).to be_nil
end
end
end
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index 7b100b7a6f3..071e0dcba44 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -535,6 +535,26 @@ RSpec.describe Issuable do
merge_request.to_hook_data(user, old_associations: { assignees: [user] })
end
end
+
+ context 'incident severity is updated' do
+ let(:issue) { create(:incident) }
+
+ before do
+ issue.update!(issuable_severity_attributes: { severity: 'low' })
+ expect(Gitlab::HookData::IssuableBuilder)
+ .to receive(:new).with(issue).and_return(builder)
+ end
+
+ it 'delegates to Gitlab::HookData::IssuableBuilder#build' do
+ expect(builder).to receive(:build).with(
+ user: user,
+ changes: hash_including(
+ 'severity' => %w(unknown low)
+ ))
+
+ issue.to_hook_data(user, old_associations: { severity: 'unknown' })
+ end
+ end
end
describe '#labels_array' do
diff --git a/spec/models/concerns/partitioned_table_spec.rb b/spec/models/concerns/partitioned_table_spec.rb
index 3343b273ba2..c37fb81a1cf 100644
--- a/spec/models/concerns/partitioned_table_spec.rb
+++ b/spec/models/concerns/partitioned_table_spec.rb
@@ -14,6 +14,16 @@ RSpec.describe PartitionedTable do
end
end
+ context 'with keyword arguments passed to the strategy' do
+ subject { my_class.partitioned_by(key, strategy: :monthly, retain_for: 3.months) }
+
+ it 'passes the keyword arguments to the strategy' do
+ expect(Gitlab::Database::Partitioning::MonthlyStrategy).to receive(:new).with(my_class, key, retain_for: 3.months).and_call_original
+
+ subject
+ end
+ end
+
it 'assigns the MonthlyStrategy as the partitioning strategy' do
subject
@@ -27,7 +37,7 @@ RSpec.describe PartitionedTable do
end
it 'registers itself with the PartitionCreator' do
- expect(Gitlab::Database::Partitioning::PartitionCreator).to receive(:register).with(my_class)
+ expect(Gitlab::Database::Partitioning::PartitionManager).to receive(:register).with(my_class)
subject
end
diff --git a/spec/models/concerns/prometheus_adapter_spec.rb b/spec/models/concerns/prometheus_adapter_spec.rb
index 235e505c6e9..01c987a1d92 100644
--- a/spec/models/concerns/prometheus_adapter_spec.rb
+++ b/spec/models/concerns/prometheus_adapter_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe PrometheusAdapter, :use_clean_rails_memory_store_caching do
include ReactiveCachingHelpers
let(:project) { create(:prometheus_project) }
- let(:service) { project.prometheus_service }
+ let(:integration) { project.prometheus_integration }
let(:described_class) do
Class.new do
@@ -29,10 +29,10 @@ RSpec.describe PrometheusAdapter, :use_clean_rails_memory_store_caching do
end
context 'with valid data' do
- subject { service.query(:validate, query) }
+ subject { integration.query(:validate, query) }
before do
- stub_reactive_cache(service, validation_respone, validation_query, query)
+ stub_reactive_cache(integration, validation_respone, validation_query, query)
end
it 'returns query data' do
@@ -49,10 +49,10 @@ RSpec.describe PrometheusAdapter, :use_clean_rails_memory_store_caching do
end
context 'with valid data' do
- subject { service.query(:environment, environment) }
+ subject { integration.query(:environment, environment) }
before do
- stub_reactive_cache(service, prometheus_data, environment_query, environment.id)
+ stub_reactive_cache(integration, prometheus_data, environment_query, environment.id)
end
it 'returns reactive data' do
@@ -66,11 +66,11 @@ RSpec.describe PrometheusAdapter, :use_clean_rails_memory_store_caching do
let(:prometheus_client) { double(:prometheus_client, label_values: nil) }
context 'with valid data' do
- subject { service.query(:matched_metrics) }
+ subject { integration.query(:matched_metrics) }
before do
- allow(service).to receive(:prometheus_client).and_return(prometheus_client)
- synchronous_reactive_cache(service)
+ allow(integration).to receive(:prometheus_client).and_return(prometheus_client)
+ synchronous_reactive_cache(integration)
end
it 'returns reactive data' do
@@ -89,10 +89,10 @@ RSpec.describe PrometheusAdapter, :use_clean_rails_memory_store_caching do
end
context 'with valid data' do
- subject { service.query(:deployment, deployment) }
+ subject { integration.query(:deployment, deployment) }
before do
- stub_reactive_cache(service, prometheus_data, deployment_query, deployment.id)
+ stub_reactive_cache(integration, prometheus_data, deployment_query, deployment.id)
end
it 'returns reactive data' do
@@ -111,10 +111,10 @@ RSpec.describe PrometheusAdapter, :use_clean_rails_memory_store_caching do
end
context 'with valid data' do
- subject { service.query(:additional_metrics_environment, environment, *time_window) }
+ subject { integration.query(:additional_metrics_environment, environment, *time_window) }
before do
- stub_reactive_cache(service, prometheus_data, additional_metrics_environment_query, environment.id, *time_window)
+ stub_reactive_cache(integration, prometheus_data, additional_metrics_environment_query, environment.id, *time_window)
end
it 'returns reactive data' do
@@ -128,21 +128,21 @@ RSpec.describe PrometheusAdapter, :use_clean_rails_memory_store_caching do
let(:environment) { create(:environment, slug: 'env-slug') }
before do
- service.manual_configuration = true
- service.active = true
+ integration.manual_configuration = true
+ integration.active = true
end
subject do
- service.calculate_reactive_cache(environment_query.name, environment.id)
+ integration.calculate_reactive_cache(environment_query.name, environment.id)
end
around do |example|
freeze_time { example.run }
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
- service.active = false
+ integration.active = false
end
it { is_expected.to be_nil }
@@ -168,7 +168,7 @@ RSpec.describe PrometheusAdapter, :use_clean_rails_memory_store_caching do
end
describe '#build_query_args' do
- subject { service.build_query_args(*args) }
+ subject { integration.build_query_args(*args) }
context 'when active record models are included' do
let(:args) { [double(:environment, id: 12)] }
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index 3232a559d0b..a53db07cc59 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -281,6 +281,16 @@ RSpec.describe ContainerRepository do
expect(repository.name).to be_empty
end
end
+
+ context 'when repository already exists' do
+ let(:path) { project.full_path + '/some/image' }
+
+ it 'returns the existing repository' do
+ container_repository = create(:container_repository, project: project, name: 'some/image')
+
+ expect(repository.id).to eq(container_repository.id)
+ end
+ end
end
describe '.build_root_repository' do
@@ -311,13 +321,18 @@ RSpec.describe ContainerRepository do
end
context 'with a subgroup' do
- let(:test_group) { create(:group) }
- let(:another_project) { create(:project, path: 'test', group: test_group) }
+ let_it_be(:test_group) { create(:group) }
+ let_it_be(:another_project) { create(:project, path: 'test', group: test_group) }
+ let_it_be(:project3) { create(:project, path: 'test3', group: test_group, container_registry_enabled: false) }
- let(:another_repository) do
+ let_it_be(:another_repository) do
create(:container_repository, name: 'my_image', project: another_project)
end
+ let_it_be(:repository3) do
+ create(:container_repository, name: 'my_image3', project: project3)
+ end
+
before do
group.parent = test_group
group.save!
@@ -331,40 +346,6 @@ RSpec.describe ContainerRepository do
it { is_expected.to eq([]) }
end
-
- context 'with read_container_registry_access_level disabled' do
- before do
- stub_feature_flags(read_container_registry_access_level: false)
- end
-
- context 'in a group' do
- let(:test_group) { group }
-
- it { is_expected.to contain_exactly(repository) }
- end
-
- context 'with a subgroup' do
- let(:test_group) { create(:group) }
- let(:another_project) { create(:project, path: 'test', group: test_group) }
-
- let(:another_repository) do
- create(:container_repository, name: 'my_image', project: another_project)
- end
-
- before do
- group.parent = test_group
- group.save!
- end
-
- it { is_expected.to contain_exactly(repository, another_repository) }
- end
-
- context 'group without container_repositories' do
- let(:test_group) { create(:group) }
-
- it { is_expected.to eq([]) }
- end
- end
end
describe '.search_by_name' do
diff --git a/spec/models/deploy_token_spec.rb b/spec/models/deploy_token_spec.rb
index dfc37f9e661..c9f7895a616 100644
--- a/spec/models/deploy_token_spec.rb
+++ b/spec/models/deploy_token_spec.rb
@@ -242,6 +242,7 @@ RSpec.describe DeployToken do
context 'and when the token is of group type' do
let_it_be(:group) { create(:group) }
+
let(:deploy_token) { create(:deploy_token, :group) }
before do
diff --git a/spec/models/deployment_metrics_spec.rb b/spec/models/deployment_metrics_spec.rb
index fadfc1b63ac..c804e20d66d 100644
--- a/spec/models/deployment_metrics_spec.rb
+++ b/spec/models/deployment_metrics_spec.rb
@@ -15,35 +15,35 @@ RSpec.describe DeploymentMetrics do
context 'when deployment is success' do
let(:deployment) { create(:deployment, :success) }
- context 'without a monitoring service' do
+ context 'without a monitoring integration' do
it { is_expected.to be_falsy }
end
- context 'with a Prometheus Service' do
- let(:prometheus_service) { instance_double(PrometheusService, can_query?: true, configured?: true) }
+ context 'with a Prometheus integration' do
+ let(:prometheus_integration) { instance_double(::Integrations::Prometheus, can_query?: true, configured?: true) }
before do
- allow(deployment.project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
+ allow(deployment.project).to receive(:find_or_initialize_integration).with('prometheus').and_return prometheus_integration
end
it { is_expected.to be_truthy }
end
- context 'with a Prometheus Service that cannot query' do
- let(:prometheus_service) { instance_double(PrometheusService, configured?: true, can_query?: false) }
+ context 'with a Prometheus integration that cannot query' do
+ let(:prometheus_integration) { instance_double(::Integrations::Prometheus, configured?: true, can_query?: false) }
before do
- allow(deployment.project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
+ allow(deployment.project).to receive(:find_or_initialize_integration).with('prometheus').and_return prometheus_integration
end
it { is_expected.to be_falsy }
end
- context 'with a Prometheus Service that is not configured' do
- let(:prometheus_service) { instance_double(PrometheusService, configured?: false, can_query?: false) }
+ context 'with a Prometheus integration that is not configured' do
+ let(:prometheus_integration) { instance_double(::Integrations::Prometheus, configured?: false, can_query?: false) }
before do
- allow(deployment.project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
+ allow(deployment.project).to receive(:find_or_initialize_integration).with('prometheus').and_return prometheus_integration
end
it { is_expected.to be_falsy }
@@ -64,7 +64,7 @@ RSpec.describe DeploymentMetrics do
describe '#metrics' do
let(:deployment) { create(:deployment, :success) }
- let(:prometheus_adapter) { instance_double(PrometheusService, can_query?: true, configured?: true) }
+ let(:prometheus_adapter) { instance_double(::Integrations::Prometheus, can_query?: true, configured?: true) }
let(:deployment_metrics) { described_class.new(deployment.project, deployment) }
subject { deployment_metrics.metrics }
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index bcd237cbd38..a0e5e9cbfe4 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -80,6 +80,7 @@ RSpec.describe Deployment do
let_it_be(:staging) { create(:environment, :staging, project: project) }
let_it_be(:other_project) { create(:project, :repository) }
let_it_be(:other_production) { create(:environment, :production, project: other_project) }
+
let(:environment_name) { production.name }
context 'when deployment belongs to the environment' do
@@ -488,6 +489,7 @@ RSpec.describe Deployment do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:commits) { project.repository.commits('master', limit: 2) }
let_it_be(:deployments) { commits.reverse.map { |commit| create(:deployment, project: project, sha: commit.id) } }
+
let(:sha) { commits.map(&:id) }
it 'finds the latest deployment with sha' do
@@ -823,6 +825,7 @@ RSpec.describe Deployment do
describe '#update_merge_request_metrics!' do
let_it_be(:project) { create(:project, :repository) }
+
let(:environment) { build(:environment, environment_tier, project: project) }
let!(:deployment) { create(:deployment, :success, project: project, environment: environment) }
let!(:merge_request) { create(:merge_request, :simple, :merged_last_month, project: project) }
diff --git a/spec/models/diff_discussion_spec.rb b/spec/models/diff_discussion_spec.rb
index 2a2663149d0..998204626d3 100644
--- a/spec/models/diff_discussion_spec.rb
+++ b/spec/models/diff_discussion_spec.rb
@@ -126,4 +126,13 @@ RSpec.describe DiffDiscussion do
end
end
end
+
+ describe '#cache_key' do
+ it 'returns the cache key with the position sha' do
+ notes_sha = Digest::SHA1.hexdigest("#{diff_note.id}")
+ position_sha = Digest::SHA1.hexdigest(diff_note.position.to_json)
+
+ expect(subject.cache_key).to eq("#{described_class::CACHE_VERSION}:#{diff_note.latest_cached_markdown_version}:#{subject.id}:#{notes_sha}:#{diff_note.updated_at}::#{position_sha}")
+ end
+ end
end
diff --git a/spec/models/diff_viewer/server_side_spec.rb b/spec/models/diff_viewer/server_side_spec.rb
index 686dd1249be..28660b0d4b9 100644
--- a/spec/models/diff_viewer/server_side_spec.rb
+++ b/spec/models/diff_viewer/server_side_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe DiffViewer::ServerSide do
let_it_be(:project) { create(:project, :repository) }
+
let(:commit) { project.commit_by(oid: '570e7b2abdd848b95f2f578043fc23bd6f6fd24d') }
let!(:diff_file) { commit.diffs.diff_file_with_new_path('files/ruby/popen.rb') }
diff --git a/spec/models/discussion_spec.rb b/spec/models/discussion_spec.rb
index 021940be0c2..2b33de96e04 100644
--- a/spec/models/discussion_spec.rb
+++ b/spec/models/discussion_spec.rb
@@ -51,4 +51,22 @@ RSpec.describe Discussion do
expect(policy).to be_a(NotePolicy)
end
end
+
+ describe '#cache_key' do
+ let(:notes_sha) { Digest::SHA1.hexdigest("#{first_note.id}:#{second_note.id}:#{third_note.id}") }
+
+ it 'returns the cache key with ID and latest updated note updated at' do
+ expect(subject.cache_key).to eq("#{described_class::CACHE_VERSION}:#{third_note.latest_cached_markdown_version}:#{subject.id}:#{notes_sha}:#{third_note.updated_at}:")
+ end
+
+ context 'when discussion is resolved' do
+ before do
+ subject.resolve!(first_note.author)
+ end
+
+ it 'returns the cache key with resolved at' do
+ expect(subject.cache_key).to eq("#{described_class::CACHE_VERSION}:#{third_note.latest_cached_markdown_version}:#{subject.id}:#{notes_sha}:#{third_note.updated_at}:#{subject.resolved_at}")
+ end
+ end
+ end
end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index ff4c8ae950d..18a172b72d7 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -219,6 +219,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:environments) { Environment.all }
before_all do
@@ -760,6 +761,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
describe '#last_visible_pipeline' do
let(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
+
let(:environment) { create(:environment, project: project) }
let(:commit) { project.commit }
@@ -1462,6 +1464,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:environment, reload: true) { create(:environment, project: project) }
+
let!(:deployment) { create(:deployment, project: project, environment: environment, deployable: build) }
let!(:build) { create(:ci_build, :running, project: project, environment: environment) }
diff --git a/spec/models/error_tracking/error_event_spec.rb b/spec/models/error_tracking/error_event_spec.rb
new file mode 100644
index 00000000000..331661f88cc
--- /dev/null
+++ b/spec/models/error_tracking/error_event_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ErrorTracking::ErrorEvent, type: :model do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:error) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:description) }
+ it { is_expected.to validate_presence_of(:occurred_at) }
+ end
+end
diff --git a/spec/models/error_tracking/error_spec.rb b/spec/models/error_tracking/error_spec.rb
new file mode 100644
index 00000000000..8591802d15c
--- /dev/null
+++ b/spec/models/error_tracking/error_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ErrorTracking::Error, type: :model do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to have_many(:events) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_presence_of(:description) }
+ it { is_expected.to validate_presence_of(:actor) }
+ end
+end
diff --git a/spec/models/event_collection_spec.rb b/spec/models/event_collection_spec.rb
index aca2a8c3a2f..107447c9630 100644
--- a/spec/models/event_collection_spec.rb
+++ b/spec/models/event_collection_spec.rb
@@ -28,6 +28,7 @@ RSpec.describe EventCollection do
let_it_be(:closed_issue_event) { create(:closed_issue_event, project: project, author: user) }
let_it_be(:wiki_page_event) { create(:wiki_page_event, project: project) }
let_it_be(:design_event) { create(:design_event, project: project) }
+
let(:push_events) { push_event_payloads.map(&:event) }
it 'returns an Array of events', :aggregate_failures do
diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb
index 949e8ec0a72..fc229dcaa22 100644
--- a/spec/models/event_spec.rb
+++ b/spec/models/event_spec.rb
@@ -268,6 +268,7 @@ RSpec.describe Event do
let(:design) { create(:design, issue: issue, project: project) }
let(:note_on_commit) { create(:note_on_commit, project: project) }
let(:note_on_issue) { create(:note_on_issue, noteable: issue, project: project) }
+ let(:confidential_note) { create(:note, noteable: issue, project: project, confidential: true) }
let(:note_on_confidential_issue) { create(:note_on_issue, noteable: confidential_issue, project: project) }
let(:note_on_project_snippet) { create(:note_on_project_snippet, author: author, noteable: project_snippet, project: project) }
let(:note_on_personal_snippet) { create(:note_on_personal_snippet, author: author, noteable: personal_snippet, project: nil) }
@@ -399,6 +400,16 @@ RSpec.describe Event do
include_examples 'visible to assignee and author', true
end
+ context 'confidential note' do
+ let(:target) { confidential_note }
+
+ include_examples 'visibility examples' do
+ let(:visibility) { visible_to_none_except(:member) }
+ end
+
+ include_examples 'visible to author', true
+ end
+
context 'private project' do
let(:project) { private_project }
let(:target) { note_on_issue }
@@ -967,14 +978,13 @@ RSpec.describe Event do
describe '#action_name' do
it 'handles all valid design events' do
- created, updated, destroyed, archived = %i[created updated destroyed archived].map do |trait|
+ created, updated, destroyed = %i[created updated destroyed].map do |trait|
build(:design_event, trait).action_name
end
expect(created).to eq('uploaded')
expect(updated).to eq('revised')
expect(destroyed).to eq('deleted')
- expect(archived).to eq('archived')
end
it 'handles correct push_action' do
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 8f4bc43c38a..0a08b15a1eb 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -565,11 +565,11 @@ RSpec.describe Group do
describe '.without_integration' do
let(:another_group) { create(:group) }
- let(:instance_integration) { build(:jira_service, :instance) }
+ let(:instance_integration) { build(:jira_integration, :instance) }
before do
- create(:jira_service, group: group, project: nil)
- create(:slack_service, group: another_group, project: nil)
+ create(:jira_integration, group: group, project: nil)
+ create(:integrations_slack, group: another_group, project: nil)
end
it 'returns groups without integration' do
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index d4ea3e5d08a..ab4027170b2 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -9,11 +9,12 @@ RSpec.describe Integration do
let_it_be(:project) { create(:project, group: group) }
describe "Associations" do
- it { is_expected.to belong_to :project }
- it { is_expected.to belong_to :group }
- it { is_expected.to have_one :service_hook }
- it { is_expected.to have_one :jira_tracker_data }
- it { is_expected.to have_one :issue_tracker_data }
+ it { is_expected.to belong_to(:project).inverse_of(:integrations) }
+ it { is_expected.to belong_to(:group).inverse_of(:integrations) }
+ it { is_expected.to have_one(:service_hook).inverse_of(:integration).with_foreign_key(:service_id) }
+ it { is_expected.to have_one(:issue_tracker_data).autosave(true).inverse_of(:integration).with_foreign_key(:service_id).class_name('Integrations::IssueTrackerData') }
+ it { is_expected.to have_one(:jira_tracker_data).autosave(true).inverse_of(:integration).with_foreign_key(:service_id).class_name('Integrations::JiraTrackerData') }
+ it { is_expected.to have_one(:open_project_tracker_data).autosave(true).inverse_of(:integration).with_foreign_key(:service_id).class_name('Integrations::OpenProjectTrackerData') }
end
describe 'validations' do
@@ -68,9 +69,9 @@ RSpec.describe Integration do
describe 'Scopes' do
describe '.by_type' do
- let!(:service1) { create(:jira_service) }
- let!(:service2) { create(:jira_service) }
- let!(:service3) { create(:redmine_service) }
+ let!(:service1) { create(:jira_integration) }
+ let!(:service2) { create(:jira_integration) }
+ let!(:service3) { create(:redmine_integration) }
subject { described_class.by_type(type) }
@@ -88,8 +89,8 @@ RSpec.describe Integration do
end
describe '.for_group' do
- let!(:service1) { create(:jira_service, project_id: nil, group_id: group.id) }
- let!(:service2) { create(:jira_service) }
+ let!(:service1) { create(:jira_integration, project_id: nil, group_id: group.id) }
+ let!(:service2) { create(:jira_integration) }
it 'returns the right group service' do
expect(described_class.for_group(group)).to match_array([service1])
@@ -139,67 +140,38 @@ RSpec.describe Integration do
end
end
- describe "Test Button" do
- let(:service) { build(:service, project: project) }
-
- describe '#can_test?' do
- subject { service.can_test? }
-
- context 'when repository is not empty' do
- let(:project) { build(:project, :repository) }
-
- it { is_expected.to be true }
- end
-
- context 'when repository is empty' do
- let(:project) { build(:project) }
-
- it { is_expected.to be true }
- end
-
- context 'when instance-level service' do
- Integration.available_services_types.each do |service_type|
- let(:service) do
- described_class.send(:integration_type_to_model, service_type).new(instance: true)
- end
-
- it { is_expected.to be_falsey }
- end
- end
-
- context 'when group-level service' do
- Integration.available_services_types.each do |service_type|
- let(:service) do
- described_class.send(:integration_type_to_model, service_type).new(group_id: group.id)
- end
+ describe '#testable?' do
+ context 'when integration is project-level' do
+ subject { build(:service, project: project) }
- it { is_expected.to be_falsey }
- end
- end
+ it { is_expected.to be_testable }
end
- describe '#test' do
- let(:data) { 'test' }
+ context 'when integration is not project-level' do
+ subject { build(:service, project: nil) }
- context 'when repository is not empty' do
- let(:project) { build(:project, :repository) }
+ it { is_expected.not_to be_testable }
+ end
+ end
- it 'test runs execute' do
- expect(service).to receive(:execute).with(data)
+ describe '#test' do
+ let(:integration) { build(:service, project: project) }
+ let(:data) { 'test' }
- service.test(data)
- end
- end
+ it 'calls #execute' do
+ expect(integration).to receive(:execute).with(data)
- context 'when repository is empty' do
- let(:project) { build(:project) }
+ integration.test(data)
+ end
- it 'test runs execute' do
- expect(service).to receive(:execute).with(data)
+ it 'returns a result' do
+ result = 'foo'
+ allow(integration).to receive(:execute).with(data).and_return(result)
- service.test(data)
- end
- end
+ expect(integration.test(data)).to eq(
+ success: true,
+ result: result
+ )
end
end
@@ -234,26 +206,30 @@ RSpec.describe Integration do
end
describe '.find_or_initialize_non_project_specific_integration' do
- let!(:service1) { create(:jira_service, project_id: nil, group_id: group.id) }
- let!(:service2) { create(:jira_service) }
+ let!(:integration_1) { create(:jira_integration, project_id: nil, group_id: group.id) }
+ let!(:integration_2) { create(:jira_integration) }
- it 'returns the right service' do
- expect(Integration.find_or_initialize_non_project_specific_integration('jira', group_id: group)).to eq(service1)
+ it 'returns the right integration' do
+ expect(Integration.find_or_initialize_non_project_specific_integration('jira', group_id: group))
+ .to eq(integration_1)
end
- it 'does not create a new service' do
- expect { Integration.find_or_initialize_non_project_specific_integration('redmine', group_id: group) }.not_to change { Integration.count }
+ it 'does not create a new integration' do
+ expect { Integration.find_or_initialize_non_project_specific_integration('redmine', group_id: group) }
+ .not_to change(Integration, :count)
end
end
describe '.find_or_initialize_all_non_project_specific' do
shared_examples 'service instances' do
it 'returns the available service instances' do
- expect(Integration.find_or_initialize_all_non_project_specific(Integration.for_instance).map(&:to_param)).to match_array(Integration.available_services_names(include_project_specific: false))
+ expect(Integration.find_or_initialize_all_non_project_specific(Integration.for_instance).map(&:to_param))
+ .to match_array(Integration.available_integration_names(include_project_specific: false))
end
it 'does not create service instances' do
- expect { Integration.find_or_initialize_all_non_project_specific(Integration.for_instance) }.not_to change { Integration.count }
+ expect { Integration.find_or_initialize_all_non_project_specific(Integration.for_instance) }
+ .not_to change(Integration, :count)
end
end
@@ -262,7 +238,7 @@ RSpec.describe Integration do
context 'with all existing instances' do
before do
Integration.insert_all(
- Integration.available_services_types(include_project_specific: false).map { |type| { instance: true, type: type } }
+ Integration.available_integration_types(include_project_specific: false).map { |type| { instance: true, type: type } }
)
end
@@ -280,7 +256,7 @@ RSpec.describe Integration do
context 'with a few existing instances' do
before do
- create(:jira_service, :instance)
+ create(:jira_integration, :instance)
end
it_behaves_like 'service instances'
@@ -290,13 +266,15 @@ RSpec.describe Integration do
describe 'template' do
shared_examples 'retrieves service templates' do
it 'returns the available service templates' do
- expect(Integration.find_or_create_templates.pluck(:type)).to match_array(Integration.available_services_types(include_project_specific: false))
+ expect(Integration.find_or_create_templates.pluck(:type)).to match_array(Integration.available_integration_types(include_project_specific: false))
end
end
describe '.find_or_create_templates' do
it 'creates service templates' do
- expect { Integration.find_or_create_templates }.to change { Integration.count }.from(0).to(Integration.available_services_names(include_project_specific: false).size)
+ total = Integration.available_integration_names(include_project_specific: false).size
+
+ expect { Integration.find_or_create_templates }.to change(Integration, :count).from(0).to(total)
end
it_behaves_like 'retrieves service templates'
@@ -304,7 +282,7 @@ RSpec.describe Integration do
context 'with all existing templates' do
before do
Integration.insert_all(
- Integration.available_services_types(include_project_specific: false).map { |type| { template: true, type: type } }
+ Integration.available_integration_types(include_project_specific: false).map { |type| { template: true, type: type } }
)
end
@@ -326,11 +304,13 @@ RSpec.describe Integration do
context 'with a few existing templates' do
before do
- create(:jira_service, :template)
+ create(:jira_integration, :template)
end
it 'creates the rest of the service templates' do
- expect { Integration.find_or_create_templates }.to change { Integration.count }.from(1).to(Integration.available_services_names(include_project_specific: false).size)
+ total = Integration.available_integration_names(include_project_specific: false).size
+
+ expect { Integration.find_or_create_templates }.to change(Integration, :count).from(1).to(total)
end
it_behaves_like 'retrieves service templates'
@@ -339,36 +319,36 @@ RSpec.describe Integration do
describe '.build_from_integration' do
context 'when integration is invalid' do
- let(:integration) do
- build(:prometheus_service, :template, active: true, properties: {})
+ let(:template_integration) do
+ build(:prometheus_integration, :template, active: true, properties: {})
.tap { |integration| integration.save!(validate: false) }
end
- it 'sets service to inactive' do
- service = described_class.build_from_integration(integration, project_id: project.id)
+ it 'sets integration to inactive' do
+ integration = described_class.build_from_integration(template_integration, project_id: project.id)
- expect(service).to be_valid
- expect(service.active).to be false
+ expect(integration).to be_valid
+ expect(integration.active).to be false
end
end
context 'when integration is an instance-level integration' do
- let(:integration) { create(:jira_service, :instance) }
+ let(:instance_integration) { create(:jira_integration, :instance) }
it 'sets inherit_from_id from integration' do
- service = described_class.build_from_integration(integration, project_id: project.id)
+ integration = described_class.build_from_integration(instance_integration, project_id: project.id)
- expect(service.inherit_from_id).to eq(integration.id)
+ expect(integration.inherit_from_id).to eq(instance_integration.id)
end
end
context 'when integration is a group-level integration' do
- let(:integration) { create(:jira_service, group: group, project: nil) }
+ let(:group_integration) { create(:jira_integration, group: group, project: nil) }
it 'sets inherit_from_id from integration' do
- service = described_class.build_from_integration(integration, project_id: project.id)
+ integration = described_class.build_from_integration(group_integration, project_id: project.id)
- expect(service.inherit_from_id).to eq(integration.id)
+ expect(integration.inherit_from_id).to eq(group_integration.id)
end
end
@@ -418,7 +398,7 @@ RSpec.describe Integration do
context 'when data are stored in properties' do
let(:properties) { data_params }
let!(:integration) do
- create(:jira_service, :without_properties_callback, template: true, properties: properties.merge(additional: 'something'))
+ create(:jira_integration, :without_properties_callback, template: true, properties: properties.merge(additional: 'something'))
end
it_behaves_like 'service creation from an integration'
@@ -426,7 +406,7 @@ RSpec.describe Integration do
context 'when data are stored in separated fields' do
let(:integration) do
- create(:jira_service, :template, data_params.merge(properties: {}))
+ create(:jira_integration, :template, data_params.merge(properties: {}))
end
it_behaves_like 'service creation from an integration'
@@ -435,7 +415,7 @@ RSpec.describe Integration do
context 'when data are stored in both properties and separated fields' do
let(:properties) { data_params }
let(:integration) do
- create(:jira_service, :without_properties_callback, active: true, template: true, properties: properties).tap do |integration|
+ create(:jira_integration, :without_properties_callback, active: true, template: true, properties: properties).tap do |integration|
create(:jira_tracker_data, data_params.merge(integration: integration))
end
end
@@ -459,39 +439,41 @@ RSpec.describe Integration do
describe 'is prefilled for projects pushover service' do
it "has all fields prefilled" do
- service = project.find_or_initialize_service('pushover')
-
- expect(service.template).to eq(false)
- expect(service.device).to eq('MyDevice')
- expect(service.sound).to eq('mic')
- expect(service.priority).to eq(4)
- expect(service.api_key).to eq('123456789')
+ integration = project.find_or_initialize_integration('pushover')
+
+ expect(integration).to have_attributes(
+ template: eq(false),
+ device: eq('MyDevice'),
+ sound: eq('mic'),
+ priority: eq(4),
+ api_key: eq('123456789')
+ )
end
end
end
end
describe '.default_integration' do
- context 'with an instance-level service' do
- let_it_be(:instance_service) { create(:jira_service, :instance) }
+ context 'with an instance-level integration' do
+ let_it_be(:instance_integration) { create(:jira_integration, :instance) }
- it 'returns the instance service' do
- expect(described_class.default_integration('JiraService', project)).to eq(instance_service)
+ it 'returns the instance integration' do
+ expect(described_class.default_integration('JiraService', project)).to eq(instance_integration)
end
- it 'returns nil for nonexistent service type' do
+ it 'returns nil for nonexistent integration type' do
expect(described_class.default_integration('HipchatService', project)).to eq(nil)
end
- context 'with a group service' do
- let_it_be(:group_service) { create(:jira_service, group_id: group.id, project_id: nil) }
+ context 'with a group integration' do
+ let_it_be(:group_integration) { create(:jira_integration, group_id: group.id, project_id: nil) }
- it 'returns the group service for a project' do
- expect(described_class.default_integration('JiraService', project)).to eq(group_service)
+ it 'returns the group integration for a project' do
+ expect(described_class.default_integration('JiraService', project)).to eq(group_integration)
end
- it 'returns the instance service for a group' do
- expect(described_class.default_integration('JiraService', group)).to eq(instance_service)
+ it 'returns the instance integration for a group' do
+ expect(described_class.default_integration('JiraService', group)).to eq(instance_integration)
end
context 'with a subgroup' do
@@ -499,27 +481,27 @@ RSpec.describe Integration do
let!(:project) { create(:project, group: subgroup) }
- it 'returns the closest group service for a project' do
- expect(described_class.default_integration('JiraService', project)).to eq(group_service)
+ it 'returns the closest group integration for a project' do
+ expect(described_class.default_integration('JiraService', project)).to eq(group_integration)
end
- it 'returns the closest group service for a subgroup' do
- expect(described_class.default_integration('JiraService', subgroup)).to eq(group_service)
+ it 'returns the closest group integration for a subgroup' do
+ expect(described_class.default_integration('JiraService', subgroup)).to eq(group_integration)
end
- context 'having a service with custom settings' do
- let!(:subgroup_service) { create(:jira_service, group_id: subgroup.id, project_id: nil) }
+ context 'having a integration with custom settings' do
+ let!(:subgroup_integration) { create(:jira_integration, group_id: subgroup.id, project_id: nil) }
- it 'returns the closest group service for a project' do
- expect(described_class.default_integration('JiraService', project)).to eq(subgroup_service)
+ it 'returns the closest group integration for a project' do
+ expect(described_class.default_integration('JiraService', project)).to eq(subgroup_integration)
end
end
- context 'having a service inheriting settings' do
- let!(:subgroup_service) { create(:jira_service, group_id: subgroup.id, project_id: nil, inherit_from_id: group_service.id) }
+ context 'having a integration inheriting settings' do
+ let!(:subgroup_integration) { create(:jira_integration, group_id: subgroup.id, project_id: nil, inherit_from_id: group_integration.id) }
- it 'returns the closest group service which does not inherit from its parent for a project' do
- expect(described_class.default_integration('JiraService', project)).to eq(group_service)
+ it 'returns the closest group integration which does not inherit from its parent for a project' do
+ expect(described_class.default_integration('JiraService', project)).to eq(group_integration)
end
end
end
@@ -528,10 +510,10 @@ RSpec.describe Integration do
end
describe '.create_from_active_default_integrations' do
- context 'with an active service template' do
- let_it_be(:template_integration) { create(:prometheus_service, :template, api_url: 'https://prometheus.template.com/') }
+ context 'with an active integration template' do
+ let_it_be(:template_integration) { create(:prometheus_integration, :template, api_url: 'https://prometheus.template.com/') }
- it 'creates a service from the template' do
+ it 'creates an integration from the template' do
described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
expect(project.reload.integrations.size).to eq(1)
@@ -540,9 +522,9 @@ RSpec.describe Integration do
end
context 'with an active instance-level integration' do
- let!(:instance_integration) { create(:prometheus_service, :instance, api_url: 'https://prometheus.instance.com/') }
+ let!(:instance_integration) { create(:prometheus_integration, :instance, api_url: 'https://prometheus.instance.com/') }
- it 'creates a service from the instance-level integration' do
+ it 'creates an integration from the instance-level integration' do
described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
expect(project.reload.integrations.size).to eq(1)
@@ -551,7 +533,7 @@ RSpec.describe Integration do
end
context 'passing a group' do
- it 'creates a service from the instance-level integration' do
+ it 'creates an integration from the instance-level integration' do
described_class.create_from_active_default_integrations(group, :group_id)
expect(group.reload.integrations.size).to eq(1)
@@ -561,9 +543,9 @@ RSpec.describe Integration do
end
context 'with an active group-level integration' do
- let!(:group_integration) { create(:prometheus_service, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
+ let!(:group_integration) { create(:prometheus_integration, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
- it 'creates a service from the group-level integration' do
+ it 'creates an integration from the group-level integration' do
described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
expect(project.reload.integrations.size).to eq(1)
@@ -574,7 +556,7 @@ RSpec.describe Integration do
context 'passing a group' do
let!(:subgroup) { create(:group, parent: group) }
- it 'creates a service from the group-level integration' do
+ it 'creates an integration from the group-level integration' do
described_class.create_from_active_default_integrations(subgroup, :group_id)
expect(subgroup.reload.integrations.size).to eq(1)
@@ -584,11 +566,11 @@ RSpec.describe Integration do
end
context 'with an active subgroup' do
- let!(:subgroup_integration) { create(:prometheus_service, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
+ let!(:subgroup_integration) { create(:prometheus_integration, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
let!(:subgroup) { create(:group, parent: group) }
let(:project) { create(:project, group: subgroup) }
- it 'creates a service from the subgroup-level integration' do
+ it 'creates an integration from the subgroup-level integration' do
described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
expect(project.reload.integrations.size).to eq(1)
@@ -601,7 +583,7 @@ RSpec.describe Integration do
context 'traversal queries' do
shared_examples 'correct ancestor order' do
- it 'creates a service from the subgroup-level integration' do
+ it 'creates an integration from the subgroup-level integration' do
described_class.create_from_active_default_integrations(sub_subgroup, :group_id)
sub_subgroup.reload
@@ -611,10 +593,10 @@ RSpec.describe Integration do
expect(sub_subgroup.integrations.first.inherit_from_id).to eq(subgroup_integration.id)
end
- context 'having a service inheriting settings' do
- let!(:subgroup_integration) { create(:prometheus_service, group: subgroup, project: nil, inherit_from_id: group_integration.id, api_url: 'https://prometheus.subgroup.com/') }
+ context 'having an integration inheriting settings' do
+ let!(:subgroup_integration) { create(:prometheus_integration, group: subgroup, project: nil, inherit_from_id: group_integration.id, api_url: 'https://prometheus.subgroup.com/') }
- it 'creates a service from the group-level integration' do
+ it 'creates an integration from the group-level integration' do
described_class.create_from_active_default_integrations(sub_subgroup, :group_id)
sub_subgroup.reload
@@ -656,11 +638,11 @@ RSpec.describe Integration do
let_it_be(:subgroup2) { create(:group, parent: group) }
let_it_be(:project1) { create(:project, group: subgroup1) }
let_it_be(:project2) { create(:project, group: subgroup2) }
- let_it_be(:group_integration) { create(:prometheus_service, group: group, project: nil) }
- let_it_be(:subgroup_integration1) { create(:prometheus_service, group: subgroup1, project: nil, inherit_from_id: group_integration.id) }
- let_it_be(:subgroup_integration2) { create(:prometheus_service, group: subgroup2, project: nil) }
- let_it_be(:project_integration1) { create(:prometheus_service, group: nil, project: project1, inherit_from_id: group_integration.id) }
- let_it_be(:project_integration2) { create(:prometheus_service, group: nil, project: project2, inherit_from_id: subgroup_integration2.id) }
+ let_it_be(:group_integration) { create(:prometheus_integration, group: group, project: nil) }
+ let_it_be(:subgroup_integration1) { create(:prometheus_integration, group: subgroup1, project: nil, inherit_from_id: group_integration.id) }
+ let_it_be(:subgroup_integration2) { create(:prometheus_integration, group: subgroup2, project: nil) }
+ let_it_be(:project_integration1) { create(:prometheus_integration, group: nil, project: project1, inherit_from_id: group_integration.id) }
+ let_it_be(:project_integration2) { create(:prometheus_integration, group: nil, project: project2, inherit_from_id: subgroup_integration2.id) }
it 'returns the groups and projects inheriting from integration ancestors', :aggregate_failures do
expect(described_class.inherited_descendants_from_self_or_ancestors_from(group_integration)).to eq([subgroup_integration1, project_integration1])
@@ -669,11 +651,8 @@ RSpec.describe Integration do
end
describe '.integration_name_to_model' do
- it 'returns the model for the given service name', :aggregate_failures do
+ it 'returns the model for the given service name' do
expect(described_class.integration_name_to_model('asana')).to eq(Integrations::Asana)
- # TODO We can remove this test when all models have been namespaced:
- # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/60968#note_570994955
- expect(described_class.integration_name_to_model('prometheus')).to eq(PrometheusService)
end
it 'raises an error if service name is invalid' do
@@ -897,37 +876,37 @@ RSpec.describe Integration do
end
end
- describe '.available_services_names' do
+ describe '.available_integration_names' do
it 'calls the right methods' do
- expect(described_class).to receive(:services_names).and_call_original
- expect(described_class).to receive(:dev_services_names).and_call_original
- expect(described_class).to receive(:project_specific_services_names).and_call_original
+ expect(described_class).to receive(:integration_names).and_call_original
+ expect(described_class).to receive(:dev_integration_names).and_call_original
+ expect(described_class).to receive(:project_specific_integration_names).and_call_original
- described_class.available_services_names
+ described_class.available_integration_names
end
- it 'does not call project_specific_services_names with include_project_specific false' do
- expect(described_class).to receive(:services_names).and_call_original
- expect(described_class).to receive(:dev_services_names).and_call_original
- expect(described_class).not_to receive(:project_specific_services_names)
+ it 'does not call project_specific_integration_names with include_project_specific false' do
+ expect(described_class).to receive(:integration_names).and_call_original
+ expect(described_class).to receive(:dev_integration_names).and_call_original
+ expect(described_class).not_to receive(:project_specific_integration_names)
- described_class.available_services_names(include_project_specific: false)
+ described_class.available_integration_names(include_project_specific: false)
end
- it 'does not call dev_services_names with include_dev false' do
- expect(described_class).to receive(:services_names).and_call_original
- expect(described_class).not_to receive(:dev_services_names)
- expect(described_class).to receive(:project_specific_services_names).and_call_original
+ it 'does not call dev_integration_names with include_dev false' do
+ expect(described_class).to receive(:integration_names).and_call_original
+ expect(described_class).not_to receive(:dev_integration_names)
+ expect(described_class).to receive(:project_specific_integration_names).and_call_original
- described_class.available_services_names(include_dev: false)
+ described_class.available_integration_names(include_dev: false)
end
- it { expect(described_class.available_services_names).to include('jenkins') }
+ it { expect(described_class.available_integration_names).to include('jenkins') }
end
- describe '.project_specific_services_names' do
+ describe '.project_specific_integration_names' do
it do
- expect(described_class.project_specific_services_names)
+ expect(described_class.project_specific_integration_names)
.to include(*described_class::PROJECT_SPECIFIC_INTEGRATION_NAMES)
end
end
diff --git a/spec/models/integrations/asana_spec.rb b/spec/models/integrations/asana_spec.rb
index 4473478910a..f7e7eb1b0ae 100644
--- a/spec/models/integrations/asana_spec.rb
+++ b/spec/models/integrations/asana_spec.rb
@@ -3,11 +3,6 @@
require 'spec_helper'
RSpec.describe Integrations::Asana do
- describe 'Associations' do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
describe 'Validations' do
context 'active' do
before do
@@ -42,13 +37,12 @@ RSpec.describe Integrations::Asana do
allow(@asana).to receive_messages(
project: project,
project_id: project.id,
- service_hook: true,
api_key: 'verySecret',
restrict_to_branch: 'master'
)
end
- it 'calls Asana service to create a story' do
+ it 'calls Asana integration to create a story' do
data = create_data_for_commits("Message from commit. related to ##{gid}")
expected_message = "#{data[:user_name]} pushed to branch #{data[:ref]} of #{project.full_name} ( #{data[:commits][0][:url]} ): #{data[:commits][0][:message]}"
@@ -59,7 +53,7 @@ RSpec.describe Integrations::Asana do
@asana.execute(data)
end
- it 'calls Asana service to create a story and close a task' do
+ it 'calls Asana integration to create a story and close a task' do
data = create_data_for_commits('fix #456789')
d1 = double('Asana::Resources::Task')
expect(d1).to receive(:add_comment)
diff --git a/spec/models/integrations/assembla_spec.rb b/spec/models/integrations/assembla_spec.rb
index e5972bce95d..960dfea3dc4 100644
--- a/spec/models/integrations/assembla_spec.rb
+++ b/spec/models/integrations/assembla_spec.rb
@@ -5,11 +5,6 @@ require 'spec_helper'
RSpec.describe Integrations::Assembla do
include StubRequests
- describe "Associations" do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
describe "Execute" do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
@@ -19,7 +14,6 @@ RSpec.describe Integrations::Assembla do
allow(@assembla_integration).to receive_messages(
project_id: project.id,
project: project,
- service_hook: true,
token: 'verySecret',
subdomain: 'project_name'
)
diff --git a/spec/models/integrations/bamboo_spec.rb b/spec/models/integrations/bamboo_spec.rb
index 39966f7978d..73ebf404828 100644
--- a/spec/models/integrations/bamboo_spec.rb
+++ b/spec/models/integrations/bamboo_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Integrations::Bamboo, :use_clean_rails_memory_store_caching do
let_it_be(:project) { create(:project) }
- subject(:service) do
+ subject(:integration) do
described_class.create!(
project: project,
properties: {
@@ -22,53 +22,48 @@ RSpec.describe Integrations::Bamboo, :use_clean_rails_memory_store_caching do
)
end
- describe 'Associations' do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
describe 'Validations' do
- context 'when service is active' do
+ context 'when active' do
before do
- subject.active = true
+ integration.active = true
end
it { is_expected.to validate_presence_of(:build_key) }
it { is_expected.to validate_presence_of(:bamboo_url) }
- it_behaves_like 'issue tracker service URL attribute', :bamboo_url
+ it_behaves_like 'issue tracker integration URL attribute', :bamboo_url
describe '#username' do
it 'does not validate the presence of username if password is nil' do
- subject.password = nil
+ integration.password = nil
- expect(subject).not_to validate_presence_of(:username)
+ expect(integration).not_to validate_presence_of(:username)
end
it 'validates the presence of username if password is present' do
- subject.password = 'secret'
+ integration.password = 'secret'
- expect(subject).to validate_presence_of(:username)
+ expect(integration).to validate_presence_of(:username)
end
end
describe '#password' do
it 'does not validate the presence of password if username is nil' do
- subject.username = nil
+ integration.username = nil
- expect(subject).not_to validate_presence_of(:password)
+ expect(integration).not_to validate_presence_of(:password)
end
it 'validates the presence of password if username is present' do
- subject.username = 'john'
+ integration.username = 'john'
- expect(subject).to validate_presence_of(:password)
+ expect(integration).to validate_presence_of(:password)
end
end
end
- context 'when service is inactive' do
+ context 'when inactive' do
before do
- subject.active = false
+ integration.active = false
end
it { is_expected.not_to validate_presence_of(:build_key) }
@@ -82,45 +77,38 @@ RSpec.describe Integrations::Bamboo, :use_clean_rails_memory_store_caching do
describe 'before_update :reset_password' do
context 'when a password was previously set' do
it 'resets password if url changed' do
- bamboo_integration = service
-
- bamboo_integration.bamboo_url = 'http://gitlab1.com'
- bamboo_integration.save!
+ integration.bamboo_url = 'http://gitlab1.com'
+ integration.save!
- expect(bamboo_integration.password).to be_nil
+ expect(integration.password).to be_nil
end
it 'does not reset password if username changed' do
- bamboo_integration = service
+ integration.username = 'some_name'
+ integration.save!
- bamboo_integration.username = 'some_name'
- bamboo_integration.save!
-
- expect(bamboo_integration.password).to eq('password')
+ expect(integration.password).to eq('password')
end
it "does not reset password if new url is set together with password, even if it's the same password" do
- bamboo_integration = service
-
- bamboo_integration.bamboo_url = 'http://gitlab_edited.com'
- bamboo_integration.password = 'password'
- bamboo_integration.save!
+ integration.bamboo_url = 'http://gitlab_edited.com'
+ integration.password = 'password'
+ integration.save!
- expect(bamboo_integration.password).to eq('password')
- expect(bamboo_integration.bamboo_url).to eq('http://gitlab_edited.com')
+ expect(integration.password).to eq('password')
+ expect(integration.bamboo_url).to eq('http://gitlab_edited.com')
end
end
it 'saves password if new url is set together with password when no password was previously set' do
- bamboo_integration = service
- bamboo_integration.password = nil
+ integration.password = nil
- bamboo_integration.bamboo_url = 'http://gitlab_edited.com'
- bamboo_integration.password = 'password'
- bamboo_integration.save!
+ integration.bamboo_url = 'http://gitlab_edited.com'
+ integration.password = 'password'
+ integration.save!
- expect(bamboo_integration.password).to eq('password')
- expect(bamboo_integration.bamboo_url).to eq('http://gitlab_edited.com')
+ expect(integration.password).to eq('password')
+ expect(integration.bamboo_url).to eq('http://gitlab_edited.com')
end
end
end
@@ -129,29 +117,29 @@ RSpec.describe Integrations::Bamboo, :use_clean_rails_memory_store_caching do
it 'runs update and build action' do
stub_update_and_build_request
- subject.execute(Gitlab::DataBuilder::Push::SAMPLE_DATA)
+ integration.execute(Gitlab::DataBuilder::Push::SAMPLE_DATA)
end
end
describe '#build_page' do
it 'returns the contents of the reactive cache' do
- stub_reactive_cache(service, { build_page: 'foo' }, 'sha', 'ref')
+ stub_reactive_cache(integration, { build_page: 'foo' }, 'sha', 'ref')
- expect(service.build_page('sha', 'ref')).to eq('foo')
+ expect(integration.build_page('sha', 'ref')).to eq('foo')
end
end
describe '#commit_status' do
it 'returns the contents of the reactive cache' do
- stub_reactive_cache(service, { commit_status: 'foo' }, 'sha', 'ref')
+ stub_reactive_cache(integration, { commit_status: 'foo' }, 'sha', 'ref')
- expect(service.commit_status('sha', 'ref')).to eq('foo')
+ expect(integration.commit_status('sha', 'ref')).to eq('foo')
end
end
shared_examples 'reactive cache calculation' do
describe '#build_page' do
- subject { service.calculate_reactive_cache('123', 'unused')[:build_page] }
+ subject { integration.calculate_reactive_cache('123', 'unused')[:build_page] }
it 'returns a specific URL when status is 500' do
stub_request(status: 500)
@@ -183,7 +171,7 @@ RSpec.describe Integrations::Bamboo, :use_clean_rails_memory_store_caching do
end
describe '#commit_status' do
- subject { service.calculate_reactive_cache('123', 'unused')[:commit_status] }
+ subject { integration.calculate_reactive_cache('123', 'unused')[:commit_status] }
it 'sets commit status to :error when status is 500' do
stub_request(status: 500)
diff --git a/spec/models/integrations/base_chat_notification_spec.rb b/spec/models/integrations/base_chat_notification_spec.rb
index 656eaa3bbdd..ac4031a9b7d 100644
--- a/spec/models/integrations/base_chat_notification_spec.rb
+++ b/spec/models/integrations/base_chat_notification_spec.rb
@@ -15,26 +15,8 @@ RSpec.describe Integrations::BaseChatNotification do
it { is_expected.to validate_inclusion_of(:labels_to_be_notified_behavior).in_array(%w[match_any match_all]).allow_blank }
end
- describe '#can_test?' do
- context 'with empty repository' do
- it 'returns true' do
- subject.project = create(:project, :empty_repo)
-
- expect(subject.can_test?).to be true
- end
- end
-
- context 'with repository' do
- it 'returns true' do
- subject.project = create(:project, :repository)
-
- expect(subject.can_test?).to be true
- end
- end
- end
-
describe '#execute' do
- subject(:chat_service) { described_class.new }
+ subject(:chat_integration) { described_class.new }
let_it_be(:project) { create(:project, :repository) }
@@ -43,10 +25,9 @@ RSpec.describe Integrations::BaseChatNotification do
let(:data) { Gitlab::DataBuilder::Push.build_sample(subject.project, user) }
before do
- allow(chat_service).to receive_messages(
+ allow(chat_integration).to receive_messages(
project: project,
project_id: project.id,
- service_hook: true,
webhook: webhook_url
)
@@ -57,8 +38,8 @@ RSpec.describe Integrations::BaseChatNotification do
context 'with a repository' do
it 'returns true' do
- expect(chat_service).to receive(:notify).and_return(true)
- expect(chat_service.execute(data)).to be true
+ expect(chat_integration).to receive(:notify).and_return(true)
+ expect(chat_integration.execute(data)).to be true
end
end
@@ -66,8 +47,8 @@ RSpec.describe Integrations::BaseChatNotification do
it 'returns true' do
subject.project = create(:project, :empty_repo)
- expect(chat_service).to receive(:notify).and_return(true)
- expect(chat_service.execute(data)).to be true
+ expect(chat_integration).to receive(:notify).and_return(true)
+ expect(chat_integration.execute(data)).to be true
end
end
@@ -75,8 +56,8 @@ RSpec.describe Integrations::BaseChatNotification do
it 'does not remove spaces' do
allow(project).to receive(:full_name).and_return('Project Name')
- expect(chat_service).to receive(:get_message).with(any_args, hash_including(project_name: 'Project Name'))
- chat_service.execute(data)
+ expect(chat_integration).to receive(:get_message).with(any_args, hash_including(project_name: 'Project Name'))
+ chat_integration.execute(data)
end
end
@@ -89,76 +70,76 @@ RSpec.describe Integrations::BaseChatNotification do
let(:data) { Gitlab::DataBuilder::Note.build(note, user) }
- shared_examples 'notifies the chat service' do
+ shared_examples 'notifies the chat integration' do
specify do
- expect(chat_service).to receive(:notify).with(any_args)
+ expect(chat_integration).to receive(:notify).with(any_args)
- chat_service.execute(data)
+ chat_integration.execute(data)
end
end
- shared_examples 'does not notify the chat service' do
+ shared_examples 'does not notify the chat integration' do
specify do
- expect(chat_service).not_to receive(:notify).with(any_args)
+ expect(chat_integration).not_to receive(:notify).with(any_args)
- chat_service.execute(data)
+ chat_integration.execute(data)
end
end
- it_behaves_like 'notifies the chat service'
+ it_behaves_like 'notifies the chat integration'
context 'with label filter' do
- subject(:chat_service) { described_class.new(labels_to_be_notified: '~Bug') }
+ subject(:chat_integration) { described_class.new(labels_to_be_notified: '~Bug') }
- it_behaves_like 'notifies the chat service'
+ it_behaves_like 'notifies the chat integration'
context 'MergeRequest events' do
let(:data) { create(:merge_request, labels: [label]).to_hook_data(user) }
- it_behaves_like 'notifies the chat service'
+ it_behaves_like 'notifies the chat integration'
end
context 'Issue events' do
let(:data) { issue.to_hook_data(user) }
- it_behaves_like 'notifies the chat service'
+ it_behaves_like 'notifies the chat integration'
end
end
context 'when labels_to_be_notified_behavior is not defined' do
- subject(:chat_service) { described_class.new(labels_to_be_notified: label_filter) }
+ subject(:chat_integration) { described_class.new(labels_to_be_notified: label_filter) }
context 'no matching labels' do
let(:label_filter) { '~some random label' }
- it_behaves_like 'does not notify the chat service'
+ it_behaves_like 'does not notify the chat integration'
end
context 'only one label matches' do
let(:label_filter) { '~some random label, ~Bug' }
- it_behaves_like 'notifies the chat service'
+ it_behaves_like 'notifies the chat integration'
end
end
context 'when labels_to_be_notified_behavior is blank' do
- subject(:chat_service) { described_class.new(labels_to_be_notified: label_filter, labels_to_be_notified_behavior: '') }
+ subject(:chat_integration) { described_class.new(labels_to_be_notified: label_filter, labels_to_be_notified_behavior: '') }
context 'no matching labels' do
let(:label_filter) { '~some random label' }
- it_behaves_like 'does not notify the chat service'
+ it_behaves_like 'does not notify the chat integration'
end
context 'only one label matches' do
let(:label_filter) { '~some random label, ~Bug' }
- it_behaves_like 'notifies the chat service'
+ it_behaves_like 'notifies the chat integration'
end
end
context 'when labels_to_be_notified_behavior is match_any' do
- subject(:chat_service) do
+ subject(:chat_integration) do
described_class.new(
labels_to_be_notified: label_filter,
labels_to_be_notified_behavior: 'match_any'
@@ -168,24 +149,24 @@ RSpec.describe Integrations::BaseChatNotification do
context 'no label filter' do
let(:label_filter) { nil }
- it_behaves_like 'notifies the chat service'
+ it_behaves_like 'notifies the chat integration'
end
context 'no matching labels' do
let(:label_filter) { '~some random label' }
- it_behaves_like 'does not notify the chat service'
+ it_behaves_like 'does not notify the chat integration'
end
context 'only one label matches' do
let(:label_filter) { '~some random label, ~Bug' }
- it_behaves_like 'notifies the chat service'
+ it_behaves_like 'notifies the chat integration'
end
end
context 'when labels_to_be_notified_behavior is match_all' do
- subject(:chat_service) do
+ subject(:chat_integration) do
described_class.new(
labels_to_be_notified: label_filter,
labels_to_be_notified_behavior: 'match_all'
@@ -195,31 +176,31 @@ RSpec.describe Integrations::BaseChatNotification do
context 'no label filter' do
let(:label_filter) { nil }
- it_behaves_like 'notifies the chat service'
+ it_behaves_like 'notifies the chat integration'
end
context 'no matching labels' do
let(:label_filter) { '~some random label' }
- it_behaves_like 'does not notify the chat service'
+ it_behaves_like 'does not notify the chat integration'
end
context 'only one label matches' do
let(:label_filter) { '~some random label, ~Bug' }
- it_behaves_like 'does not notify the chat service'
+ it_behaves_like 'does not notify the chat integration'
end
context 'labels matches exactly' do
let(:label_filter) { '~Bug, ~Backend, ~Community contribution' }
- it_behaves_like 'notifies the chat service'
+ it_behaves_like 'notifies the chat integration'
end
context 'labels matches but object has more' do
let(:label_filter) { '~Bug, ~Backend' }
- it_behaves_like 'notifies the chat service'
+ it_behaves_like 'notifies the chat integration'
end
context 'labels are distributed on multiple objects' do
@@ -241,22 +222,22 @@ RSpec.describe Integrations::BaseChatNotification do
})
end
- it_behaves_like 'does not notify the chat service'
+ it_behaves_like 'does not notify the chat integration'
end
end
end
context 'with "channel" property' do
before do
- allow(chat_service).to receive(:channel).and_return(channel)
+ allow(chat_integration).to receive(:channel).and_return(channel)
end
context 'empty string' do
let(:channel) { '' }
it 'does not include the channel' do
- expect(chat_service).to receive(:notify).with(any_args, hash_excluding(:channel)).and_return(true)
- expect(chat_service.execute(data)).to be(true)
+ expect(chat_integration).to receive(:notify).with(any_args, hash_excluding(:channel)).and_return(true)
+ expect(chat_integration.execute(data)).to be(true)
end
end
@@ -264,20 +245,20 @@ RSpec.describe Integrations::BaseChatNotification do
let(:channel) { ' ' }
it 'does not include the channel' do
- expect(chat_service).to receive(:notify).with(any_args, hash_excluding(:channel)).and_return(true)
- expect(chat_service.execute(data)).to be(true)
+ expect(chat_integration).to receive(:notify).with(any_args, hash_excluding(:channel)).and_return(true)
+ expect(chat_integration.execute(data)).to be(true)
end
end
end
shared_examples 'with channel specified' do |channel, expected_channels|
before do
- allow(chat_service).to receive(:push_channel).and_return(channel)
+ allow(chat_integration).to receive(:push_channel).and_return(channel)
end
it 'notifies all channels' do
- expect(chat_service).to receive(:notify).with(any_args, hash_including(channel: expected_channels)).and_return(true)
- expect(chat_service.execute(data)).to be(true)
+ expect(chat_integration).to receive(:notify).with(any_args, hash_including(channel: expected_channels)).and_return(true)
+ expect(chat_integration.execute(data)).to be(true)
end
end
diff --git a/spec/models/integrations/base_issue_tracker_spec.rb b/spec/models/integrations/base_issue_tracker_spec.rb
index 0f1bc39929a..25e27e96a84 100644
--- a/spec/models/integrations/base_issue_tracker_spec.rb
+++ b/spec/models/integrations/base_issue_tracker_spec.rb
@@ -7,26 +7,26 @@ RSpec.describe Integrations::BaseIssueTracker do
let(:project) { create :project }
describe 'only one issue tracker per project' do
- let(:service) { Integrations::Redmine.new(project: project, active: true, issue_tracker_data: build(:issue_tracker_data)) }
+ let(:integration) { Integrations::Redmine.new(project: project, active: true, issue_tracker_data: build(:issue_tracker_data)) }
before do
create(:custom_issue_tracker_integration, project: project)
end
- context 'when service is changed manually by user' do
+ context 'when integration is changed manually by user' do
it 'executes the validation' do
- valid = service.valid?(:manual_change)
+ valid = integration.valid?(:manual_change)
expect(valid).to be_falsey
- expect(service.errors[:base]).to include(
+ expect(integration.errors[:base]).to include(
'Another issue tracker is already in use. Only one issue tracker service can be active at a time'
)
end
end
- context 'when service is changed internally' do
+ context 'when integration is changed internally' do
it 'does not execute the validation' do
- expect(service.valid?).to be_truthy
+ expect(integration.valid?).to be_truthy
end
end
end
diff --git a/spec/models/integrations/bugzilla_spec.rb b/spec/models/integrations/bugzilla_spec.rb
index e75fa8dd4d4..432306c8fa8 100644
--- a/spec/models/integrations/bugzilla_spec.rb
+++ b/spec/models/integrations/bugzilla_spec.rb
@@ -3,13 +3,8 @@
require 'spec_helper'
RSpec.describe Integrations::Bugzilla do
- describe 'Associations' do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
describe 'Validations' do
- context 'when service is active' do
+ context 'when integration is active' do
before do
subject.active = true
end
@@ -17,12 +12,12 @@ RSpec.describe Integrations::Bugzilla do
it { is_expected.to validate_presence_of(:project_url) }
it { is_expected.to validate_presence_of(:issues_url) }
it { is_expected.to validate_presence_of(:new_issue_url) }
- it_behaves_like 'issue tracker service URL attribute', :project_url
- it_behaves_like 'issue tracker service URL attribute', :issues_url
- it_behaves_like 'issue tracker service URL attribute', :new_issue_url
+ it_behaves_like 'issue tracker integration URL attribute', :project_url
+ it_behaves_like 'issue tracker integration URL attribute', :issues_url
+ it_behaves_like 'issue tracker integration URL attribute', :new_issue_url
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
subject.active = false
end
diff --git a/spec/models/integrations/buildkite_spec.rb b/spec/models/integrations/buildkite_spec.rb
index 7dc81da7003..4207ae0d555 100644
--- a/spec/models/integrations/buildkite_spec.rb
+++ b/spec/models/integrations/buildkite_spec.rb
@@ -8,34 +8,32 @@ RSpec.describe Integrations::Buildkite, :use_clean_rails_memory_store_caching do
let(:project) { create(:project) }
- subject(:service) do
+ subject(:integration) do
described_class.create!(
project: project,
properties: {
- service_hook: true,
project_url: 'https://buildkite.com/organization-name/example-pipeline',
token: 'secret-sauce-webhook-token:secret-sauce-status-token'
}
)
end
- describe 'Associations' do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
+ it_behaves_like Integrations::HasWebHook do
+ let(:hook_url) { 'https://webhook.buildkite.com/deliver/secret-sauce-webhook-token' }
end
describe 'Validations' do
- context 'when service is active' do
+ context 'when integration is active' do
before do
subject.active = true
end
it { is_expected.to validate_presence_of(:project_url) }
it { is_expected.to validate_presence_of(:token) }
- it_behaves_like 'issue tracker service URL attribute', :project_url
+ it_behaves_like 'issue tracker integration URL attribute', :project_url
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
subject.active = false
end
@@ -47,7 +45,7 @@ RSpec.describe Integrations::Buildkite, :use_clean_rails_memory_store_caching do
describe '.supported_events' do
it 'supports push, merge_request, and tag_push events' do
- expect(service.supported_events).to eq %w(push merge_request tag_push)
+ expect(integration.supported_events).to eq %w(push merge_request tag_push)
end
end
@@ -57,18 +55,18 @@ RSpec.describe Integrations::Buildkite, :use_clean_rails_memory_store_caching do
end
it 'always activates SSL verification after saved' do
- service.create_service_hook(enable_ssl_verification: false)
+ integration.create_service_hook(enable_ssl_verification: false)
- service.enable_ssl_verification = false
- service.active = true
+ integration.enable_ssl_verification = false
+ integration.active = true
- expect { service.save! }
- .to change { service.service_hook.enable_ssl_verification }.from(false).to(true)
+ expect { integration.save! }
+ .to change { integration.service_hook.enable_ssl_verification }.from(false).to(true)
end
- describe '#webhook_url' do
+ describe '#hook_url' do
it 'returns the webhook url' do
- expect(service.webhook_url).to eq(
+ expect(integration.hook_url).to eq(
'https://webhook.buildkite.com/deliver/secret-sauce-webhook-token'
)
end
@@ -76,7 +74,7 @@ RSpec.describe Integrations::Buildkite, :use_clean_rails_memory_store_caching do
describe '#commit_status_path' do
it 'returns the correct status page' do
- expect(service.commit_status_path('2ab7834c')).to eq(
+ expect(integration.commit_status_path('2ab7834c')).to eq(
'https://gitlab.buildkite.com/status/secret-sauce-status-token.json?commit=2ab7834c'
)
end
@@ -84,7 +82,7 @@ RSpec.describe Integrations::Buildkite, :use_clean_rails_memory_store_caching do
describe '#build_page' do
it 'returns the correct build page' do
- expect(service.build_page('2ab7834c', nil)).to eq(
+ expect(integration.build_page('2ab7834c', nil)).to eq(
'https://buildkite.com/organization-name/example-pipeline/builds?commit=2ab7834c'
)
end
@@ -92,9 +90,9 @@ RSpec.describe Integrations::Buildkite, :use_clean_rails_memory_store_caching do
describe '#commit_status' do
it 'returns the contents of the reactive cache' do
- stub_reactive_cache(service, { commit_status: 'foo' }, 'sha', 'ref')
+ stub_reactive_cache(integration, { commit_status: 'foo' }, 'sha', 'ref')
- expect(service.commit_status('sha', 'ref')).to eq('foo')
+ expect(integration.commit_status('sha', 'ref')).to eq('foo')
end
end
@@ -104,7 +102,7 @@ RSpec.describe Integrations::Buildkite, :use_clean_rails_memory_store_caching do
'https://gitlab.buildkite.com/status/secret-sauce-status-token.json?commit=123'
end
- subject { service.calculate_reactive_cache('123', 'unused')[:commit_status] }
+ subject { integration.calculate_reactive_cache('123', 'unused')[:commit_status] }
it 'sets commit status to :error when status is 500' do
stub_request(status: 500)
diff --git a/spec/models/integrations/campfire_spec.rb b/spec/models/integrations/campfire_spec.rb
index d68f8e0bd4e..0044e6fae21 100644
--- a/spec/models/integrations/campfire_spec.rb
+++ b/spec/models/integrations/campfire_spec.rb
@@ -5,13 +5,8 @@ require 'spec_helper'
RSpec.describe Integrations::Campfire do
include StubRequests
- describe 'Associations' do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
describe 'Validations' do
- context 'when service is active' do
+ context 'when integration is active' do
before do
subject.active = true
end
@@ -19,7 +14,7 @@ RSpec.describe Integrations::Campfire do
it { is_expected.to validate_presence_of(:token) }
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
subject.active = false
end
@@ -37,7 +32,6 @@ RSpec.describe Integrations::Campfire do
allow(@campfire_integration).to receive_messages(
project_id: project.id,
project: project,
- service_hook: true,
token: 'verySecret',
subdomain: 'project-name',
room: 'test-room'
diff --git a/spec/models/integrations/confluence_spec.rb b/spec/models/integrations/confluence_spec.rb
index 08e18c99376..e2f9316bc95 100644
--- a/spec/models/integrations/confluence_spec.rb
+++ b/spec/models/integrations/confluence_spec.rb
@@ -3,17 +3,12 @@
require 'spec_helper'
RSpec.describe Integrations::Confluence do
- describe 'Associations' do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
describe 'Validations' do
before do
subject.active = active
end
- context 'when service is active' do
+ context 'when integration is active' do
let(:active) { true }
it { is_expected.not_to allow_value('https://example.com').for(:confluence_url) }
@@ -35,7 +30,7 @@ RSpec.describe Integrations::Confluence do
it { is_expected.to validate_presence_of(:confluence_url) }
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
let(:active) { false }
it { is_expected.not_to validate_presence_of(:confluence_url) }
@@ -71,13 +66,13 @@ RSpec.describe Integrations::Confluence do
subject { project.project_setting.has_confluence? }
- it 'sets the property to true when service is active' do
+ it 'sets the property to true when integration is active' do
create(:confluence_integration, project: project, active: true)
is_expected.to be(true)
end
- it 'sets the property to false when service is not active' do
+ it 'sets the property to false when integration is not active' do
create(:confluence_integration, project: project, active: false)
is_expected.to be(false)
diff --git a/spec/models/integrations/custom_issue_tracker_spec.rb b/spec/models/integrations/custom_issue_tracker_spec.rb
index 25f2648e738..e1ffe7a74f0 100644
--- a/spec/models/integrations/custom_issue_tracker_spec.rb
+++ b/spec/models/integrations/custom_issue_tracker_spec.rb
@@ -3,13 +3,8 @@
require 'spec_helper'
RSpec.describe Integrations::CustomIssueTracker do
- describe 'Associations' do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
describe 'Validations' do
- context 'when service is active' do
+ context 'when integration is active' do
before do
subject.active = true
end
@@ -17,12 +12,12 @@ RSpec.describe Integrations::CustomIssueTracker do
it { is_expected.to validate_presence_of(:project_url) }
it { is_expected.to validate_presence_of(:issues_url) }
it { is_expected.to validate_presence_of(:new_issue_url) }
- it_behaves_like 'issue tracker service URL attribute', :project_url
- it_behaves_like 'issue tracker service URL attribute', :issues_url
- it_behaves_like 'issue tracker service URL attribute', :new_issue_url
+ it_behaves_like 'issue tracker integration URL attribute', :project_url
+ it_behaves_like 'issue tracker integration URL attribute', :issues_url
+ it_behaves_like 'issue tracker integration URL attribute', :new_issue_url
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
subject.active = false
end
diff --git a/spec/models/integrations/datadog_spec.rb b/spec/models/integrations/datadog_spec.rb
index 165b21840e0..e2749ab1bc1 100644
--- a/spec/models/integrations/datadog_spec.rb
+++ b/spec/models/integrations/datadog_spec.rb
@@ -10,13 +10,13 @@ RSpec.describe Integrations::Datadog do
let(:active) { true }
let(:dd_site) { 'datadoghq.com' }
- let(:default_url) { 'https://webhooks-http-intake.logs.datadoghq.com/v1/input/' }
+ let(:default_url) { 'https://webhooks-http-intake.logs.datadoghq.com/api/v2/webhook' }
let(:api_url) { '' }
let(:api_key) { SecureRandom.hex(32) }
let(:dd_env) { 'ci' }
let(:dd_service) { 'awesome-gitlab' }
- let(:expected_hook_url) { default_url + api_key + "?env=#{dd_env}&service=#{dd_service}" }
+ let(:expected_hook_url) { default_url + "?dd-api-key=#{api_key}&env=#{dd_env}&service=#{dd_service}" }
let(:instance) do
described_class.new(
@@ -38,9 +38,9 @@ RSpec.describe Integrations::Datadog do
let(:pipeline_data) { Gitlab::DataBuilder::Pipeline.build(pipeline) }
let(:build_data) { Gitlab::DataBuilder::Build.build(build) }
- describe 'associations' do
- it { is_expected.to belong_to(:project) }
- it { is_expected.to have_one(:service_hook) }
+ it_behaves_like Integrations::HasWebHook do
+ let(:integration) { instance }
+ let(:hook_url) { "#{described_class::URL_TEMPLATE % { datadog_domain: dd_site }}?dd-api-key=#{api_key}&env=#{dd_env}&service=#{dd_service}" }
end
describe 'validations' do
@@ -65,7 +65,7 @@ RSpec.describe Integrations::Datadog do
context 'with custom api_url' do
let(:dd_site) { '' }
- let(:api_url) { 'https://webhooks-http-intake.logs.datad0g.com/v1/input/' }
+ let(:api_url) { 'https://webhooks-http-intake.logs.datad0g.com/api/v2/webhook' }
it { is_expected.not_to validate_presence_of(:datadog_site) }
it { is_expected.to validate_presence_of(:api_url) }
@@ -91,7 +91,7 @@ RSpec.describe Integrations::Datadog do
end
end
- context 'when service is not active' do
+ context 'when integration is not active' do
let(:active) { false }
it { is_expected.to be_valid }
@@ -107,9 +107,9 @@ RSpec.describe Integrations::Datadog do
end
context 'with custom URL' do
- let(:api_url) { 'https://webhooks-http-intake.logs.datad0g.com/v1/input/' }
+ let(:api_url) { 'https://webhooks-http-intake.logs.datad0g.com/api/v2/webhook' }
- it { is_expected.to eq(api_url + api_key + "?env=#{dd_env}&service=#{dd_service}") }
+ it { is_expected.to eq(api_url + "?dd-api-key=#{api_key}&env=#{dd_env}&service=#{dd_service}") }
context 'blank' do
let(:api_url) { '' }
@@ -122,7 +122,7 @@ RSpec.describe Integrations::Datadog do
let(:dd_service) { '' }
let(:dd_env) { '' }
- it { is_expected.to eq(default_url + api_key) }
+ it { is_expected.to eq(default_url + "?dd-api-key=#{api_key}") }
end
end
diff --git a/spec/models/integrations/discord_spec.rb b/spec/models/integrations/discord_spec.rb
index bff6a8ee5b2..b85620782c1 100644
--- a/spec/models/integrations/discord_spec.rb
+++ b/spec/models/integrations/discord_spec.rb
@@ -11,7 +11,9 @@ RSpec.describe Integrations::Discord do
embeds: [
include(
author: include(name: be_present),
- description: be_present
+ description: be_present,
+ color: be_present,
+ timestamp: be_present
)
]
}
@@ -33,7 +35,6 @@ RSpec.describe Integrations::Discord do
allow(subject).to receive_messages(
project: project,
project_id: project.id,
- service_hook: true,
webhook: webhook_url
)
@@ -47,15 +48,19 @@ RSpec.describe Integrations::Discord do
allow(client).to receive(:execute).and_yield(builder)
end
- subject.execute(sample_data)
+ freeze_time do
+ subject.execute(sample_data)
- expect(builder.to_json_hash[:embeds].first).to include(
- description: start_with("#{user.name} pushed to branch [master](http://localhost/#{project.namespace.path}/#{project.path}/commits/master) of"),
- author: hash_including(
- icon_url: start_with('https://www.gravatar.com/avatar/'),
- name: user.name
+ expect(builder.to_json_hash[:embeds].first).to include(
+ description: start_with("#{user.name} pushed to branch [master](http://localhost/#{project.namespace.path}/#{project.path}/commits/master) of"),
+ author: hash_including(
+ icon_url: start_with('https://www.gravatar.com/avatar/'),
+ name: user.name
+ ),
+ color: 16543014,
+ timestamp: Time.now.utc.iso8601
)
- )
+ end
end
context 'DNS rebind to local address' do
diff --git a/spec/models/integrations/drone_ci_spec.rb b/spec/models/integrations/drone_ci_spec.rb
index 137f078edca..062e23d628e 100644
--- a/spec/models/integrations/drone_ci_spec.rb
+++ b/spec/models/integrations/drone_ci_spec.rb
@@ -5,11 +5,6 @@ require 'spec_helper'
RSpec.describe Integrations::DroneCi, :use_clean_rails_memory_store_caching do
include ReactiveCachingHelpers
- describe 'associations' do
- it { is_expected.to belong_to(:project) }
- it { is_expected.to have_one(:service_hook) }
- end
-
describe 'validations' do
context 'active' do
before do
@@ -18,7 +13,7 @@ RSpec.describe Integrations::DroneCi, :use_clean_rails_memory_store_caching do
it { is_expected.to validate_presence_of(:token) }
it { is_expected.to validate_presence_of(:drone_url) }
- it_behaves_like 'issue tracker service URL attribute', :drone_url
+ it_behaves_like 'issue tracker integration URL attribute', :drone_url
end
context 'inactive' do
@@ -32,7 +27,15 @@ RSpec.describe Integrations::DroneCi, :use_clean_rails_memory_store_caching do
end
shared_context :drone_ci_integration do
- let(:drone) { described_class.new }
+ subject(:drone) do
+ described_class.new(
+ project: project,
+ active: true,
+ drone_url: drone_url,
+ token: token
+ )
+ end
+
let(:project) { create(:project, :repository, name: 'project') }
let(:path) { project.full_path }
let(:drone_url) { 'http://drone.example.com' }
@@ -45,16 +48,6 @@ RSpec.describe Integrations::DroneCi, :use_clean_rails_memory_store_caching do
let(:build_page) { "#{drone_url}/gitlab/#{path}/redirect/commits/#{sha}?branch=#{branch}" }
let(:commit_status_path) { "#{drone_url}/gitlab/#{path}/commits/#{sha}?branch=#{branch}&access_token=#{token}" }
- before do
- allow(drone).to receive_messages(
- project_id: project.id,
- project: project,
- active: true,
- drone_url: drone_url,
- token: token
- )
- end
-
def stub_request(status: 200, body: nil)
body ||= %q({"status":"success"})
@@ -66,7 +59,21 @@ RSpec.describe Integrations::DroneCi, :use_clean_rails_memory_store_caching do
end
end
- describe "service page/path methods" do
+ it_behaves_like Integrations::HasWebHook do
+ include_context :drone_ci_integration
+
+ let(:integration) { drone }
+ let(:hook_url) { "#{drone_url}/hook?owner=#{project.namespace.full_path}&name=#{project.path}&access_token=#{token}" }
+
+ it 'does not create a hook if project is not present' do
+ integration.project = nil
+ integration.instance = true
+
+ expect { integration.save! }.not_to change(ServiceHook, :count)
+ end
+ end
+
+ describe "integration page/path methods" do
include_context :drone_ci_integration
it { expect(drone.build_page(sha, branch)).to eq(build_page) }
@@ -137,10 +144,17 @@ RSpec.describe Integrations::DroneCi, :use_clean_rails_memory_store_caching do
Gitlab::DataBuilder::Push.build_sample(project, user)
end
- it do
- service_hook = double
- expect(service_hook).to receive(:execute)
- expect(drone).to receive(:service_hook).and_return(service_hook)
+ it 'executes the webhook' do
+ expect(drone).to receive(:execute_web_hook!).with(push_sample_data)
+
+ drone.execute(push_sample_data)
+ end
+
+ it 'does not try to execute the webhook if the integration is not in a project' do
+ drone.project = nil
+ drone.instance = true
+
+ expect(drone).not_to receive(:execute_web_hook!)
drone.execute(push_sample_data)
end
diff --git a/spec/models/integrations/emails_on_push_spec.rb b/spec/models/integrations/emails_on_push_spec.rb
index c82d4bdff9b..bdca267f6cb 100644
--- a/spec/models/integrations/emails_on_push_spec.rb
+++ b/spec/models/integrations/emails_on_push_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Integrations::EmailsOnPush do
let_it_be(:project) { create_default(:project).freeze }
describe 'Validations' do
- context 'when service is active' do
+ context 'when integration is active' do
before do
subject.active = true
end
@@ -14,7 +14,7 @@ RSpec.describe Integrations::EmailsOnPush do
it { is_expected.to validate_presence_of(:recipients) }
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
subject.active = false
end
@@ -27,7 +27,7 @@ RSpec.describe Integrations::EmailsOnPush do
stub_const("#{described_class}::RECIPIENTS_LIMIT", 2)
end
- subject(:service) { described_class.new(project: project, recipients: recipients, active: true) }
+ subject(:integration) { described_class.new(project: project, recipients: recipients, active: true) }
context 'valid number of recipients' do
let(:recipients) { 'foo@bar.com duplicate@example.com Duplicate@example.com invalid-email' }
@@ -43,14 +43,14 @@ RSpec.describe Integrations::EmailsOnPush do
it { is_expected.not_to be_valid }
it 'adds an error message' do
- service.valid?
+ integration.valid?
- expect(service.errors).to contain_exactly('Recipients can\'t exceed 2')
+ expect(integration.errors).to contain_exactly('Recipients can\'t exceed 2')
end
- context 'when service is not active' do
+ context 'when integration is not active' do
before do
- service.active = false
+ integration.active = false
end
it { is_expected.to be_valid }
diff --git a/spec/models/integrations/ewm_spec.rb b/spec/models/integrations/ewm_spec.rb
index 38897adb447..49681fefe55 100644
--- a/spec/models/integrations/ewm_spec.rb
+++ b/spec/models/integrations/ewm_spec.rb
@@ -3,13 +3,8 @@
require 'spec_helper'
RSpec.describe Integrations::Ewm do
- describe 'Associations' do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
describe 'Validations' do
- context 'when service is active' do
+ context 'when integration is active' do
before do
subject.active = true
end
@@ -17,12 +12,12 @@ RSpec.describe Integrations::Ewm do
it { is_expected.to validate_presence_of(:project_url) }
it { is_expected.to validate_presence_of(:issues_url) }
it { is_expected.to validate_presence_of(:new_issue_url) }
- it_behaves_like 'issue tracker service URL attribute', :project_url
- it_behaves_like 'issue tracker service URL attribute', :issues_url
- it_behaves_like 'issue tracker service URL attribute', :new_issue_url
+ it_behaves_like 'issue tracker integration URL attribute', :project_url
+ it_behaves_like 'issue tracker integration URL attribute', :issues_url
+ it_behaves_like 'issue tracker integration URL attribute', :new_issue_url
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
subject.active = false
end
diff --git a/spec/models/integrations/external_wiki_spec.rb b/spec/models/integrations/external_wiki_spec.rb
index 8c20b810301..e4d6a1c7c84 100644
--- a/spec/models/integrations/external_wiki_spec.rb
+++ b/spec/models/integrations/external_wiki_spec.rb
@@ -3,22 +3,17 @@
require 'spec_helper'
RSpec.describe Integrations::ExternalWiki do
- describe "Associations" do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
describe 'Validations' do
- context 'when service is active' do
+ context 'when integration is active' do
before do
subject.active = true
end
it { is_expected.to validate_presence_of(:external_wiki_url) }
- it_behaves_like 'issue tracker service URL attribute', :external_wiki_url
+ it_behaves_like 'issue tracker integration URL attribute', :external_wiki_url
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
subject.active = false
end
diff --git a/spec/models/integrations/flowdock_spec.rb b/spec/models/integrations/flowdock_spec.rb
index 189831fa32d..daafb1b3958 100644
--- a/spec/models/integrations/flowdock_spec.rb
+++ b/spec/models/integrations/flowdock_spec.rb
@@ -3,13 +3,8 @@
require 'spec_helper'
RSpec.describe Integrations::Flowdock do
- describe "Associations" do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
describe 'Validations' do
- context 'when service is active' do
+ context 'when integration is active' do
before do
subject.active = true
end
@@ -17,7 +12,7 @@ RSpec.describe Integrations::Flowdock do
it { is_expected.to validate_presence_of(:token) }
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
subject.active = false
end
@@ -38,7 +33,6 @@ RSpec.describe Integrations::Flowdock do
allow(flowdock_integration).to receive_messages(
project_id: project.id,
project: project,
- service_hook: true,
token: 'verySecret'
)
WebMock.stub_request(:post, api_url)
diff --git a/spec/models/integrations/irker_spec.rb b/spec/models/integrations/irker_spec.rb
index a69be1292e0..8b207e8b43e 100644
--- a/spec/models/integrations/irker_spec.rb
+++ b/spec/models/integrations/irker_spec.rb
@@ -5,13 +5,8 @@ require 'socket'
require 'json'
RSpec.describe Integrations::Irker do
- describe 'Associations' do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
describe 'Validations' do
- context 'when service is active' do
+ context 'when integration is active' do
before do
subject.active = true
end
@@ -19,7 +14,7 @@ RSpec.describe Integrations::Irker do
it { is_expected.to validate_presence_of(:recipients) }
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
subject.active = false
end
@@ -46,7 +41,6 @@ RSpec.describe Integrations::Irker do
active: true,
project: project,
project_id: project.id,
- service_hook: true,
server_host: @irker_server.addr[2],
server_port: @irker_server.addr[1],
default_irc_uri: 'irc://chat.freenode.net/',
diff --git a/spec/models/integrations/jenkins_spec.rb b/spec/models/integrations/jenkins_spec.rb
index 2374dfe4480..9eb2a7fc098 100644
--- a/spec/models/integrations/jenkins_spec.rb
+++ b/spec/models/integrations/jenkins_spec.rb
@@ -24,14 +24,14 @@ RSpec.describe Integrations::Jenkins do
let(:jenkins_authorization) { "Basic " + ::Base64.strict_encode64(jenkins_username + ':' + jenkins_password) }
- describe 'Associations' do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
+ it_behaves_like Integrations::HasWebHook do
+ let(:integration) { described_class.new(jenkins_params) }
+ let(:hook_url) { "http://#{ERB::Util.url_encode jenkins_username}:#{ERB::Util.url_encode jenkins_password}@jenkins.example.com/project/my_project" }
end
describe 'username validation' do
- before do
- @jenkins_service = described_class.create!(
+ let(:jenkins_integration) do
+ described_class.create!(
active: active,
project: project,
properties: {
@@ -43,9 +43,9 @@ RSpec.describe Integrations::Jenkins do
)
end
- subject { @jenkins_service }
+ subject { jenkins_integration }
- context 'when the service is active' do
+ context 'when the integration is active' do
let(:active) { true }
context 'when password was not touched' do
@@ -74,7 +74,7 @@ RSpec.describe Integrations::Jenkins do
end
end
- context 'when the service is inactive' do
+ context 'when the integration is inactive' do
let(:active) { false }
it { is_expected.not_to validate_presence_of :username }
@@ -84,7 +84,7 @@ RSpec.describe Integrations::Jenkins do
describe '#hook_url' do
let(:username) { nil }
let(:password) { nil }
- let(:jenkins_service) do
+ let(:jenkins_integration) do
described_class.new(
project: project,
properties: {
@@ -96,7 +96,7 @@ RSpec.describe Integrations::Jenkins do
)
end
- subject { jenkins_service.hook_url }
+ subject { jenkins_integration.hook_url }
context 'when the jenkins_url has no relative path' do
let(:jenkins_url) { 'http://jenkins.example.com/' }
@@ -138,10 +138,10 @@ RSpec.describe Integrations::Jenkins do
user = create(:user, username: 'username')
project = create(:project, name: 'project')
push_sample_data = Gitlab::DataBuilder::Push.build_sample(project, user)
- jenkins_service = described_class.create!(jenkins_params)
+ jenkins_integration = described_class.create!(jenkins_params)
stub_request(:post, jenkins_hook_url).with(headers: { 'Authorization' => jenkins_authorization })
- result = jenkins_service.test(push_sample_data)
+ result = jenkins_integration.test(push_sample_data)
expect(result).to eq({ success: true, result: '' })
end
@@ -152,20 +152,20 @@ RSpec.describe Integrations::Jenkins do
let(:namespace) { create(:group, :private) }
let(:project) { create(:project, :private, name: 'project', namespace: namespace) }
let(:push_sample_data) { Gitlab::DataBuilder::Push.build_sample(project, user) }
- let(:jenkins_service) { described_class.create!(jenkins_params) }
+ let(:jenkins_integration) { described_class.create!(jenkins_params) }
before do
stub_request(:post, jenkins_hook_url)
end
it 'invokes the Jenkins API' do
- jenkins_service.execute(push_sample_data)
+ jenkins_integration.execute(push_sample_data)
expect(a_request(:post, jenkins_hook_url)).to have_been_made.once
end
it 'adds default web hook headers to the request' do
- jenkins_service.execute(push_sample_data)
+ jenkins_integration.execute(push_sample_data)
expect(
a_request(:post, jenkins_hook_url)
@@ -174,7 +174,7 @@ RSpec.describe Integrations::Jenkins do
end
it 'request url contains properly serialized username and password' do
- jenkins_service.execute(push_sample_data)
+ jenkins_integration.execute(push_sample_data)
expect(
a_request(:post, 'http://jenkins.example.com/project/my_project')
@@ -187,8 +187,8 @@ RSpec.describe Integrations::Jenkins do
let(:project) { create(:project) }
context 'when a password was previously set' do
- before do
- @jenkins_service = described_class.create!(
+ let(:jenkins_integration) do
+ described_class.create!(
project: project,
properties: {
jenkins_url: 'http://jenkins.example.com/',
@@ -199,42 +199,47 @@ RSpec.describe Integrations::Jenkins do
end
it 'resets password if url changed' do
- @jenkins_service.jenkins_url = 'http://jenkins-edited.example.com/'
- @jenkins_service.save!
- expect(@jenkins_service.password).to be_nil
+ jenkins_integration.jenkins_url = 'http://jenkins-edited.example.com/'
+ jenkins_integration.save!
+
+ expect(jenkins_integration.password).to be_nil
end
it 'resets password if username is blank' do
- @jenkins_service.username = ''
- @jenkins_service.save!
- expect(@jenkins_service.password).to be_nil
+ jenkins_integration.username = ''
+ jenkins_integration.save!
+
+ expect(jenkins_integration.password).to be_nil
end
it 'does not reset password if username changed' do
- @jenkins_service.username = 'some_name'
- @jenkins_service.save!
- expect(@jenkins_service.password).to eq('password')
+ jenkins_integration.username = 'some_name'
+ jenkins_integration.save!
+
+ expect(jenkins_integration.password).to eq('password')
end
it 'does not reset password if new url is set together with password, even if it\'s the same password' do
- @jenkins_service.jenkins_url = 'http://jenkins_edited.example.com/'
- @jenkins_service.password = 'password'
- @jenkins_service.save!
- expect(@jenkins_service.password).to eq('password')
- expect(@jenkins_service.jenkins_url).to eq('http://jenkins_edited.example.com/')
+ jenkins_integration.jenkins_url = 'http://jenkins_edited.example.com/'
+ jenkins_integration.password = 'password'
+ jenkins_integration.save!
+
+ expect(jenkins_integration.password).to eq('password')
+ expect(jenkins_integration.jenkins_url).to eq('http://jenkins_edited.example.com/')
end
it 'resets password if url changed, even if setter called multiple times' do
- @jenkins_service.jenkins_url = 'http://jenkins1.example.com/'
- @jenkins_service.jenkins_url = 'http://jenkins1.example.com/'
- @jenkins_service.save!
- expect(@jenkins_service.password).to be_nil
+ jenkins_integration.jenkins_url = 'http://jenkins1.example.com/'
+ jenkins_integration.jenkins_url = 'http://jenkins1.example.com/'
+ jenkins_integration.save!
+
+ expect(jenkins_integration.password).to be_nil
end
end
context 'when no password was previously set' do
- before do
- @jenkins_service = described_class.create!(
+ let(:jenkins_integration) do
+ described_class.create!(
project: create(:project),
properties: {
jenkins_url: 'http://jenkins.example.com/',
@@ -244,11 +249,12 @@ RSpec.describe Integrations::Jenkins do
end
it 'saves password if new url is set together with password' do
- @jenkins_service.jenkins_url = 'http://jenkins_edited.example.com/'
- @jenkins_service.password = 'password'
- @jenkins_service.save!
- expect(@jenkins_service.password).to eq('password')
- expect(@jenkins_service.jenkins_url).to eq('http://jenkins_edited.example.com/')
+ jenkins_integration.jenkins_url = 'http://jenkins_edited.example.com/'
+ jenkins_integration.password = 'password'
+ jenkins_integration.save!
+
+ expect(jenkins_integration.password).to eq('password')
+ expect(jenkins_integration.jenkins_url).to eq('http://jenkins_edited.example.com/')
end
end
end
diff --git a/spec/models/integrations/jira_spec.rb b/spec/models/integrations/jira_spec.rb
index 23ade570383..6ca72d68bbb 100644
--- a/spec/models/integrations/jira_spec.rb
+++ b/spec/models/integrations/jira_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Integrations::Jira do
let(:password) { 'jira-password' }
let(:transition_id) { 'test27' }
let(:server_info_results) { { 'deploymentType' => 'Cloud' } }
- let(:jira_service) do
+ let(:jira_integration) do
described_class.new(
project: project,
url: url,
@@ -100,20 +100,15 @@ RSpec.describe Integrations::Jira do
end
describe '#fields' do
- let(:service) { create(:jira_service) }
+ let(:integration) { create(:jira_integration) }
- subject(:fields) { service.fields }
+ subject(:fields) { integration.fields }
it 'returns custom fields' do
expect(fields.pluck(:name)).to eq(%w[url api_url username password])
end
end
- describe 'Associations' do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
describe '.reference_pattern' do
using RSpec::Parameterized::TableSyntax
@@ -146,39 +141,35 @@ RSpec.describe Integrations::Jira do
}
end
- subject { described_class.create!(params) }
+ subject(:integration) { described_class.create!(params) }
it 'does not store data into properties' do
- expect(subject.properties).to be_nil
+ expect(integration.properties).to be_nil
end
it 'stores data in data_fields correctly' do
- service = subject
-
- expect(service.jira_tracker_data.url).to eq(url)
- expect(service.jira_tracker_data.api_url).to eq(api_url)
- expect(service.jira_tracker_data.username).to eq(username)
- expect(service.jira_tracker_data.password).to eq(password)
- expect(service.jira_tracker_data.jira_issue_transition_id).to eq(transition_id)
- expect(service.jira_tracker_data.deployment_cloud?).to be_truthy
+ expect(integration.jira_tracker_data.url).to eq(url)
+ expect(integration.jira_tracker_data.api_url).to eq(api_url)
+ expect(integration.jira_tracker_data.username).to eq(username)
+ expect(integration.jira_tracker_data.password).to eq(password)
+ expect(integration.jira_tracker_data.jira_issue_transition_id).to eq(transition_id)
+ expect(integration.jira_tracker_data.deployment_cloud?).to be_truthy
end
context 'when loading serverInfo' do
- let(:jira_service) { subject }
-
- context 'from a Cloud instance' do
+ context 'with a Cloud instance' do
let(:server_info_results) { { 'deploymentType' => 'Cloud' } }
it 'is detected' do
- expect(jira_service.jira_tracker_data.deployment_cloud?).to be_truthy
+ expect(integration.jira_tracker_data).to be_deployment_cloud
end
end
- context 'from a Server instance' do
+ context 'with a Server instance' do
let(:server_info_results) { { 'deploymentType' => 'Server' } }
it 'is detected' do
- expect(jira_service.jira_tracker_data.deployment_server?).to be_truthy
+ expect(integration.jira_tracker_data).to be_deployment_server
end
end
@@ -189,7 +180,7 @@ RSpec.describe Integrations::Jira do
let(:api_url) { 'http://example-api.atlassian.net' }
it 'deployment_type is set to cloud' do
- expect(jira_service.jira_tracker_data.deployment_cloud?).to be_truthy
+ expect(integration.jira_tracker_data).to be_deployment_cloud
end
end
@@ -197,7 +188,7 @@ RSpec.describe Integrations::Jira do
let(:api_url) { 'http://my-jira-api.someserver.com' }
it 'deployment_type is set to server' do
- expect(jira_service.jira_tracker_data.deployment_server?).to be_truthy
+ expect(integration.jira_tracker_data).to be_deployment_server
end
end
end
@@ -210,7 +201,7 @@ RSpec.describe Integrations::Jira do
it 'deployment_type is set to cloud' do
expect(Gitlab::AppLogger).to receive(:warn).with(message: "Jira API returned no ServerInfo, setting deployment_type from URL", server_info: server_info_results, url: api_url)
- expect(jira_service.jira_tracker_data.deployment_cloud?).to be_truthy
+ expect(integration.jira_tracker_data).to be_deployment_cloud
end
end
@@ -219,7 +210,7 @@ RSpec.describe Integrations::Jira do
it 'deployment_type is set to server' do
expect(Gitlab::AppLogger).to receive(:warn).with(message: "Jira API returned no ServerInfo, setting deployment_type from URL", server_info: server_info_results, url: api_url)
- expect(jira_service.jira_tracker_data.deployment_server?).to be_truthy
+ expect(integration.jira_tracker_data).to be_deployment_server
end
end
end
@@ -253,11 +244,11 @@ RSpec.describe Integrations::Jira do
context 'reading data' do
it 'reads data correctly' do
- expect(service.url).to eq(url)
- expect(service.api_url).to eq(api_url)
- expect(service.username).to eq(username)
- expect(service.password).to eq(password)
- expect(service.jira_issue_transition_id).to eq(transition_id)
+ expect(integration.url).to eq(url)
+ expect(integration.api_url).to eq(api_url)
+ expect(integration.username).to eq(username)
+ expect(integration.password).to eq(password)
+ expect(integration.jira_issue_transition_id).to eq(transition_id)
end
end
@@ -267,15 +258,11 @@ RSpec.describe Integrations::Jira do
let_it_be(:new_url) { 'http://jira-new.example.com' }
before do
- service.update!(username: new_username, url: new_url)
- end
-
- it 'leaves properties field emtpy' do
- # expect(service.reload.properties).to be_empty
+ integration.update!(username: new_username, url: new_url)
end
it 'stores updated data in jira_tracker_data table' do
- data = service.jira_tracker_data.reload
+ data = integration.jira_tracker_data.reload
expect(data.url).to eq(new_url)
expect(data.api_url).to eq(api_url)
@@ -288,15 +275,15 @@ RSpec.describe Integrations::Jira do
context 'when updating the url, api_url, username, or password' do
context 'when updating the integration' do
it 'updates deployment type' do
- service.update!(url: 'http://first.url')
- service.jira_tracker_data.update!(deployment_type: 'server')
+ integration.update!(url: 'http://first.url')
+ integration.jira_tracker_data.update!(deployment_type: 'server')
- expect(service.jira_tracker_data.deployment_server?).to be_truthy
+ expect(integration.jira_tracker_data.deployment_server?).to be_truthy
- service.update!(api_url: 'http://another.url')
- service.jira_tracker_data.reload
+ integration.update!(api_url: 'http://another.url')
+ integration.jira_tracker_data.reload
- expect(service.jira_tracker_data.deployment_cloud?).to be_truthy
+ expect(integration.jira_tracker_data.deployment_cloud?).to be_truthy
expect(WebMock).to have_requested(:get, /serverInfo/).twice
end
end
@@ -305,34 +292,34 @@ RSpec.describe Integrations::Jira do
let(:server_info_results) { {} }
it 'updates deployment type' do
- service.update!(url: nil, api_url: nil, active: false)
+ integration.update!(url: nil, api_url: nil, active: false)
- service.jira_tracker_data.reload
+ integration.jira_tracker_data.reload
- expect(service.jira_tracker_data.deployment_unknown?).to be_truthy
+ expect(integration.jira_tracker_data.deployment_unknown?).to be_truthy
end
end
it 'calls serverInfo for url' do
- service.update!(url: 'http://first.url')
+ integration.update!(url: 'http://first.url')
expect(WebMock).to have_requested(:get, /serverInfo/)
end
it 'calls serverInfo for api_url' do
- service.update!(api_url: 'http://another.url')
+ integration.update!(api_url: 'http://another.url')
expect(WebMock).to have_requested(:get, /serverInfo/)
end
it 'calls serverInfo for username' do
- service.update!(username: 'test-user')
+ integration.update!(username: 'test-user')
expect(WebMock).to have_requested(:get, /serverInfo/)
end
it 'calls serverInfo for password' do
- service.update!(password: 'test-password')
+ integration.update!(password: 'test-password')
expect(WebMock).to have_requested(:get, /serverInfo/)
end
@@ -340,7 +327,8 @@ RSpec.describe Integrations::Jira do
context 'when not updating the url, api_url, username, or password' do
it 'does not update deployment type' do
- expect {service.update!(jira_issue_transition_id: 'jira_issue_transition_id')}.to raise_error(ActiveRecord::RecordInvalid)
+ expect { integration.update!(jira_issue_transition_id: 'jira_issue_transition_id') }
+ .to raise_error(ActiveRecord::RecordInvalid)
expect(WebMock).not_to have_requested(:get, /serverInfo/)
end
@@ -348,9 +336,9 @@ RSpec.describe Integrations::Jira do
context 'when not allowed to test an instance or group' do
it 'does not update deployment type' do
- allow(service).to receive(:can_test?).and_return(false)
+ allow(integration).to receive(:testable?).and_return(false)
- service.update!(url: 'http://first.url')
+ integration.update!(url: 'http://first.url')
expect(WebMock).not_to have_requested(:get, /serverInfo/)
end
@@ -368,68 +356,68 @@ RSpec.describe Integrations::Jira do
end
it 'resets password if url changed' do
- service
- service.url = 'http://jira_edited.example.com'
- service.save!
+ integration
+ integration.url = 'http://jira_edited.example.com'
+ integration.save!
- expect(service.reload.url).to eq('http://jira_edited.example.com')
- expect(service.password).to be_nil
+ expect(integration.reload.url).to eq('http://jira_edited.example.com')
+ expect(integration.password).to be_nil
end
it 'does not reset password if url "changed" to the same url as before' do
- service.url = 'http://jira.example.com'
- service.save!
+ integration.url = 'http://jira.example.com'
+ integration.save!
- expect(service.reload.url).to eq('http://jira.example.com')
- expect(service.password).not_to be_nil
+ expect(integration.reload.url).to eq('http://jira.example.com')
+ expect(integration.password).not_to be_nil
end
it 'resets password if url not changed but api url added' do
- service.api_url = 'http://jira_edited.example.com/rest/api/2'
- service.save!
+ integration.api_url = 'http://jira_edited.example.com/rest/api/2'
+ integration.save!
- expect(service.reload.api_url).to eq('http://jira_edited.example.com/rest/api/2')
- expect(service.password).to be_nil
+ expect(integration.reload.api_url).to eq('http://jira_edited.example.com/rest/api/2')
+ expect(integration.password).to be_nil
end
it 'does not reset password if new url is set together with password, even if it\'s the same password' do
- service.url = 'http://jira_edited.example.com'
- service.password = password
- service.save!
+ integration.url = 'http://jira_edited.example.com'
+ integration.password = password
+ integration.save!
- expect(service.password).to eq(password)
- expect(service.url).to eq('http://jira_edited.example.com')
+ expect(integration.password).to eq(password)
+ expect(integration.url).to eq('http://jira_edited.example.com')
end
it 'resets password if url changed, even if setter called multiple times' do
- service.url = 'http://jira1.example.com/rest/api/2'
- service.url = 'http://jira1.example.com/rest/api/2'
- service.save!
+ integration.url = 'http://jira1.example.com/rest/api/2'
+ integration.url = 'http://jira1.example.com/rest/api/2'
+ integration.save!
- expect(service.password).to be_nil
+ expect(integration.password).to be_nil
end
it 'does not reset password if username changed' do
- service.username = 'some_name'
- service.save!
+ integration.username = 'some_name'
+ integration.save!
- expect(service.reload.password).to eq(password)
+ expect(integration.reload.password).to eq(password)
end
it 'does not reset password if password changed' do
- service.url = 'http://jira_edited.example.com'
- service.password = 'new_password'
- service.save!
+ integration.url = 'http://jira_edited.example.com'
+ integration.password = 'new_password'
+ integration.save!
- expect(service.reload.password).to eq('new_password')
+ expect(integration.reload.password).to eq('new_password')
end
it 'does not reset password if the password is touched and same as before' do
- service.url = 'http://jira_edited.example.com'
- service.password = password
- service.save!
+ integration.url = 'http://jira_edited.example.com'
+ integration.password = password
+ integration.save!
- expect(service.reload.password).to eq(password)
+ expect(integration.reload.password).to eq(password)
end
end
@@ -443,23 +431,23 @@ RSpec.describe Integrations::Jira do
end
it 'resets password if api url changed' do
- service.api_url = 'http://jira_edited.example.com/rest/api/2'
- service.save!
+ integration.api_url = 'http://jira_edited.example.com/rest/api/2'
+ integration.save!
- expect(service.password).to be_nil
+ expect(integration.password).to be_nil
end
it 'does not reset password if url changed' do
- service.url = 'http://jira_edited.example.com'
- service.save!
+ integration.url = 'http://jira_edited.example.com'
+ integration.save!
- expect(service.password).to eq(password)
+ expect(integration.password).to eq(password)
end
it 'resets password if api url set to empty' do
- service.update!(api_url: '')
+ integration.update!(api_url: '')
- expect(service.reload.password).to be_nil
+ expect(integration.reload.password).to be_nil
end
end
end
@@ -472,11 +460,11 @@ RSpec.describe Integrations::Jira do
end
it 'saves password if new url is set together with password' do
- service.url = 'http://jira_edited.example.com/rest/api/2'
- service.password = 'password'
- service.save!
- expect(service.reload.password).to eq('password')
- expect(service.reload.url).to eq('http://jira_edited.example.com/rest/api/2')
+ integration.url = 'http://jira_edited.example.com/rest/api/2'
+ integration.password = 'password'
+ integration.save!
+ expect(integration.reload.password).to eq('password')
+ expect(integration.reload.url).to eq('http://jira_edited.example.com/rest/api/2')
end
end
end
@@ -486,16 +474,16 @@ RSpec.describe Integrations::Jira do
# this will be removed as part of https://gitlab.com/gitlab-org/gitlab/issues/29404
context 'when data are stored in properties' do
let(:properties) { data_params }
- let!(:service) do
- create(:jira_service, :without_properties_callback, properties: properties.merge(additional: 'something'))
+ let!(:integration) do
+ create(:jira_integration, :without_properties_callback, properties: properties.merge(additional: 'something'))
end
it_behaves_like 'handles jira fields'
end
context 'when data are stored in separated fields' do
- let(:service) do
- create(:jira_service, data_params.merge(properties: {}))
+ let(:integration) do
+ create(:jira_integration, data_params.merge(properties: {}))
end
it_behaves_like 'handles jira fields'
@@ -503,8 +491,8 @@ RSpec.describe Integrations::Jira do
context 'when data are stored in both properties and separated fields' do
let(:properties) { data_params }
- let(:service) do
- create(:jira_service, :without_properties_callback, active: false, properties: properties).tap do |integration|
+ let(:integration) do
+ create(:jira_integration, :without_properties_callback, active: false, properties: properties).tap do |integration|
create(:jira_tracker_data, data_params.merge(integration: integration))
end
end
@@ -522,7 +510,7 @@ RSpec.describe Integrations::Jira do
end
it 'call the Jira API to get the issue' do
- jira_service.find_issue(issue_key)
+ jira_integration.find_issue(issue_key)
expect(WebMock).to have_requested(:get, issue_url)
end
@@ -531,7 +519,7 @@ RSpec.describe Integrations::Jira do
let(:issue_url) { "#{url}/rest/api/2/issue/#{issue_key}?expand=renderedFields,transitions" }
it 'calls the Jira API with the options to get the issue' do
- jira_service.find_issue(issue_key, rendered_fields: true, transitions: true)
+ jira_integration.find_issue(issue_key, rendered_fields: true, transitions: true)
expect(WebMock).to have_requested(:get, issue_url)
end
@@ -558,16 +546,16 @@ RSpec.describe Integrations::Jira do
end
subject(:close_issue) do
- jira_service.close_issue(resource, ExternalIssue.new(issue_key, project))
+ jira_integration.close_issue(resource, ExternalIssue.new(issue_key, project))
end
before do
- jira_service.jira_issue_transition_id = '999'
+ jira_integration.jira_issue_transition_id = '999'
# These stubs are needed to test Integrations::Jira#close_issue.
# We close the issue then do another request to API to check if it got closed.
# Here is stubbed the API return with a closed and an opened issues.
- open_issue = JIRA::Resource::Issue.new(jira_service.client, attrs: issue_fields.deep_stringify_keys)
+ open_issue = JIRA::Resource::Issue.new(jira_integration.client, attrs: issue_fields.deep_stringify_keys)
closed_issue = open_issue.dup
allow(open_issue).to receive(:resolution).and_return(false)
allow(closed_issue).to receive(:resolution).and_return(true)
@@ -585,7 +573,7 @@ RSpec.describe Integrations::Jira do
let(:external_issue) { ExternalIssue.new('JIRA-123', project) }
def close_issue
- jira_service.close_issue(resource, external_issue, current_user)
+ jira_integration.close_issue(resource, external_issue, current_user)
end
it 'calls Jira API' do
@@ -636,7 +624,7 @@ RSpec.describe Integrations::Jira do
context 'when "comment_on_event_enabled" is set to false' do
it 'creates Remote Link reference but does not create comment' do
- allow(jira_service).to receive_messages(comment_on_event_enabled: false)
+ allow(jira_integration).to receive_messages(comment_on_event_enabled: false)
close_issue
expect(WebMock).not_to have_requested(:post, comment_url)
@@ -709,12 +697,12 @@ RSpec.describe Integrations::Jira do
end
it 'logs exception when transition id is not valid' do
- allow(jira_service).to receive(:log_error)
+ allow(jira_integration).to receive(:log_error)
WebMock.stub_request(:post, transitions_url).with(basic_auth: %w(jira-username jira-password)).and_raise("Bad Request")
close_issue
- expect(jira_service).to have_received(:log_error).with(
+ expect(jira_integration).to have_received(:log_error).with(
"Issue transition failed",
error: hash_including(
exception_class: 'StandardError',
@@ -734,7 +722,7 @@ RSpec.describe Integrations::Jira do
context 'when custom transition IDs are blank' do
before do
- jira_service.jira_issue_transition_id = ''
+ jira_integration.jira_issue_transition_id = ''
end
it 'does not transition the issue' do
@@ -755,7 +743,7 @@ RSpec.describe Integrations::Jira do
end
before do
- jira_service.jira_issue_transition_automatic = true
+ jira_integration.jira_issue_transition_automatic = true
close_issue
end
@@ -789,7 +777,7 @@ RSpec.describe Integrations::Jira do
context 'when using multiple transition ids' do
before do
- allow(jira_service).to receive_messages(jira_issue_transition_id: '1,2,3')
+ allow(jira_integration).to receive_messages(jira_issue_transition_id: '1,2,3')
end
it 'calls the api with transition ids separated by comma' do
@@ -805,7 +793,7 @@ RSpec.describe Integrations::Jira do
end
it 'calls the api with transition ids separated by semicolon' do
- allow(jira_service).to receive_messages(jira_issue_transition_id: '1;2;3')
+ allow(jira_integration).to receive_messages(jira_issue_transition_id: '1;2;3')
close_issue
@@ -864,7 +852,7 @@ RSpec.describe Integrations::Jira do
let(:jira_issue) { ExternalIssue.new('JIRA-123', project) }
- subject { jira_service.create_cross_reference_note(jira_issue, resource, user) }
+ subject { jira_integration.create_cross_reference_note(jira_issue, resource, user) }
shared_examples 'creates a comment on Jira' do
let(:issue_url) { "#{url}/rest/api/2/issue/JIRA-123" }
@@ -936,7 +924,7 @@ RSpec.describe Integrations::Jira do
let(:server_info_results) { { 'url' => 'http://url', 'deploymentType' => 'Cloud' } }
def server_info
- jira_service.test(nil)
+ jira_integration.test(nil)
end
context 'when the test succeeds' do
@@ -946,7 +934,7 @@ RSpec.describe Integrations::Jira do
end
it 'gets Jira project with API URL if set' do
- jira_service.update!(api_url: 'http://jira.api.com')
+ jira_integration.update!(api_url: 'http://jira.api.com')
expect(server_info).to eq(success: true, result: server_info_results)
expect(WebMock).to have_requested(:get, /jira.api.com/)
@@ -961,13 +949,13 @@ RSpec.describe Integrations::Jira do
WebMock.stub_request(:get, test_url).with(basic_auth: [username, password])
.to_raise(JIRA::HTTPError.new(double(message: error_message)))
- expect(jira_service).to receive(:log_error).with(
+ expect(jira_integration).to receive(:log_error).with(
'Error sending message',
client_url: 'http://jira.example.com',
error: error_message
)
- expect(jira_service.test(nil)).to eq(success: false, result: error_message)
+ expect(jira_integration.test(nil)).to eq(success: false, result: error_message)
end
end
end
@@ -983,17 +971,17 @@ RSpec.describe Integrations::Jira do
}
allow(Gitlab.config).to receive(:issues_tracker).and_return(settings)
- service = project.create_jira_service(active: true)
+ integration = project.create_jira_integration(active: true)
- expect(service.url).to eq('http://jira.sample/projects/project_a')
- expect(service.api_url).to eq('http://jira.sample/api')
+ expect(integration.url).to eq('http://jira.sample/projects/project_a')
+ expect(integration.api_url).to eq('http://jira.sample/api')
end
end
it 'removes trailing slashes from url' do
- service = described_class.new(url: 'http://jira.test.com/path/')
+ integration = described_class.new(url: 'http://jira.test.com/path/')
- expect(service.url).to eq('http://jira.test.com/path')
+ expect(integration.url).to eq('http://jira.test.com/path')
end
end
@@ -1093,19 +1081,65 @@ RSpec.describe Integrations::Jira do
describe '#issue_transition_enabled?' do
it 'returns true if automatic transitions are enabled' do
- jira_service.jira_issue_transition_automatic = true
+ jira_integration.jira_issue_transition_automatic = true
- expect(jira_service.issue_transition_enabled?).to be(true)
+ expect(jira_integration.issue_transition_enabled?).to be(true)
end
it 'returns true if custom transitions are set' do
- jira_service.jira_issue_transition_id = '1, 2, 3'
+ jira_integration.jira_issue_transition_id = '1, 2, 3'
- expect(jira_service.issue_transition_enabled?).to be(true)
+ expect(jira_integration.issue_transition_enabled?).to be(true)
end
it 'returns false if automatic and custom transitions are disabled' do
- expect(jira_service.issue_transition_enabled?).to be(false)
+ expect(jira_integration.issue_transition_enabled?).to be(false)
+ end
+ end
+
+ describe 'valid_connection? and configured?' do
+ before do
+ allow(jira_integration).to receive(:test).with(nil).and_return(test_result)
+ end
+
+ context 'when the test fails' do
+ let(:test_result) { { success: false } }
+
+ it 'is falsey' do
+ expect(jira_integration).not_to be_valid_connection
+ end
+
+ it 'implies that configured? is also falsey' do
+ expect(jira_integration).not_to be_configured
+ end
+ end
+
+ context 'when the test succeeds' do
+ let(:test_result) { { success: true } }
+
+ it 'is truthy' do
+ expect(jira_integration).to be_valid_connection
+ end
+
+ context 'when the integration is active' do
+ before do
+ jira_integration.active = true
+ end
+
+ it 'implies that configured? is also truthy' do
+ expect(jira_integration).to be_configured
+ end
+ end
+
+ context 'when the integration is inactive' do
+ before do
+ jira_integration.active = false
+ end
+
+ it 'implies that configured? is falsey' do
+ expect(jira_integration).not_to be_configured
+ end
+ end
end
end
end
diff --git a/spec/models/integrations/mattermost_slash_commands_spec.rb b/spec/models/integrations/mattermost_slash_commands_spec.rb
index c8a6584591c..b6abe00469b 100644
--- a/spec/models/integrations/mattermost_slash_commands_spec.rb
+++ b/spec/models/integrations/mattermost_slash_commands_spec.rb
@@ -5,27 +5,29 @@ require 'spec_helper'
RSpec.describe Integrations::MattermostSlashCommands do
it_behaves_like Integrations::BaseSlashCommands
- context 'Mattermost API' do
+ describe 'Mattermost API' do
let(:project) { create(:project) }
- let(:service) { project.build_mattermost_slash_commands_service }
+ let(:integration) { project.build_mattermost_slash_commands_integration }
let(:user) { create(:user) }
before do
session = ::Mattermost::Session.new(nil)
session.base_uri = 'http://mattermost.example.com'
- allow_any_instance_of(::Mattermost::Client).to receive(:with_session)
- .and_yield(session)
+ allow(session).to receive(:with_session).and_yield(session)
+ allow(::Mattermost::Session).to receive(:new).and_return(session)
end
describe '#configure' do
subject do
- service.configure(user, team_id: 'abc',
- trigger: 'gitlab', url: 'http://trigger.url',
- icon_url: 'http://icon.url/icon.png')
+ integration.configure(user,
+ team_id: 'abc',
+ trigger: 'gitlab',
+ url: 'http://trigger.url',
+ icon_url: 'http://icon.url/icon.png')
end
- context 'the requests succeeds' do
+ context 'when the request succeeds' do
before do
stub_request(:post, 'http://mattermost.example.com/api/v4/commands')
.with(body: {
@@ -48,18 +50,18 @@ RSpec.describe Integrations::MattermostSlashCommands do
)
end
- it 'saves the service' do
+ it 'saves the integration' do
expect { subject }.to change { project.integrations.count }.by(1)
end
it 'saves the token' do
subject
- expect(service.reload.token).to eq('token')
+ expect(integration.reload.token).to eq('token')
end
end
- context 'an error is received' do
+ context 'when an error is received' do
before do
stub_request(:post, 'http://mattermost.example.com/api/v4/commands')
.to_return(
@@ -86,10 +88,10 @@ RSpec.describe Integrations::MattermostSlashCommands do
describe '#list_teams' do
subject do
- service.list_teams(user)
+ integration.list_teams(user)
end
- context 'the requests succeeds' do
+ context 'when the request succeeds' do
before do
stub_request(:get, 'http://mattermost.example.com/api/v4/users/me/teams')
.to_return(
@@ -104,7 +106,7 @@ RSpec.describe Integrations::MattermostSlashCommands do
end
end
- context 'an error is received' do
+ context 'when an error is received' do
before do
stub_request(:get, 'http://mattermost.example.com/api/v4/users/me/teams')
.to_return(
diff --git a/spec/models/integrations/microsoft_teams_spec.rb b/spec/models/integrations/microsoft_teams_spec.rb
index 2f1be233eb2..21b9a005746 100644
--- a/spec/models/integrations/microsoft_teams_spec.rb
+++ b/spec/models/integrations/microsoft_teams_spec.rb
@@ -3,25 +3,20 @@
require 'spec_helper'
RSpec.describe Integrations::MicrosoftTeams do
- let(:chat_service) { described_class.new }
+ let(:chat_integration) { described_class.new }
let(:webhook_url) { 'https://example.gitlab.com/' }
- describe "Associations" do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
describe 'Validations' do
- context 'when service is active' do
+ context 'when integration is active' do
before do
subject.active = true
end
it { is_expected.to validate_presence_of(:webhook) }
- it_behaves_like 'issue tracker service URL attribute', :webhook
+ it_behaves_like 'issue tracker integration URL attribute', :webhook
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
subject.active = false
end
@@ -42,10 +37,9 @@ RSpec.describe Integrations::MicrosoftTeams do
let_it_be(:project) { create(:project, :repository, :wiki_repo) }
before do
- allow(chat_service).to receive_messages(
+ allow(chat_integration).to receive_messages(
project: project,
project_id: project.id,
- service_hook: true,
webhook: webhook_url
)
@@ -58,28 +52,29 @@ RSpec.describe Integrations::MicrosoftTeams do
end
it "calls Microsoft Teams API for push events" do
- chat_service.execute(push_sample_data)
+ chat_integration.execute(push_sample_data)
expect(WebMock).to have_requested(:post, webhook_url).once
end
it 'specifies the webhook when it is configured' do
- expect(::MicrosoftTeams::Notifier).to receive(:new).with(webhook_url).and_return(double(:microsoft_teams_service).as_null_object)
+ integration = double(:microsoft_teams_integration).as_null_object
+ expect(::MicrosoftTeams::Notifier).to receive(:new).with(webhook_url).and_return(integration)
- chat_service.execute(push_sample_data)
+ chat_integration.execute(push_sample_data)
end
end
context 'with issue events' do
let(:opts) { { title: 'Awesome issue', description: 'please fix' } }
let(:issues_sample_data) do
- service = Issues::CreateService.new(project: project, current_user: user, params: opts)
+ service = Issues::CreateService.new(project: project, current_user: user, params: opts, spam_params: nil)
issue = service.execute
service.hook_data(issue, 'open')
end
it "calls Microsoft Teams API" do
- chat_service.execute(issues_sample_data)
+ chat_integration.execute(issues_sample_data)
expect(WebMock).to have_requested(:post, webhook_url).once
end
@@ -106,7 +101,7 @@ RSpec.describe Integrations::MicrosoftTeams do
end
it "calls Microsoft Teams API" do
- chat_service.execute(merge_sample_data)
+ chat_integration.execute(merge_sample_data)
expect(WebMock).to have_requested(:post, webhook_url).once
end
@@ -126,7 +121,7 @@ RSpec.describe Integrations::MicrosoftTeams do
let(:wiki_page_sample_data) { Gitlab::DataBuilder::WikiPage.build(wiki_page, user, 'create') }
it "calls Microsoft Teams API" do
- chat_service.execute(wiki_page_sample_data)
+ chat_integration.execute(wiki_page_sample_data)
expect(WebMock).to have_requested(:post, webhook_url).once
end
@@ -138,10 +133,9 @@ RSpec.describe Integrations::MicrosoftTeams do
let(:project) { create(:project, :repository, creator: user) }
before do
- allow(chat_service).to receive_messages(
+ allow(chat_integration).to receive_messages(
project: project,
project_id: project.id,
- service_hook: true,
webhook: webhook_url
)
@@ -159,7 +153,7 @@ RSpec.describe Integrations::MicrosoftTeams do
it "calls Microsoft Teams API for commit comment events" do
data = Gitlab::DataBuilder::Note.build(commit_note, user)
- chat_service.execute(data)
+ chat_integration.execute(data)
expect(WebMock).to have_requested(:post, webhook_url).once
end
@@ -174,7 +168,7 @@ RSpec.describe Integrations::MicrosoftTeams do
it "calls Microsoft Teams API for merge request comment events" do
data = Gitlab::DataBuilder::Note.build(merge_request_note, user)
- chat_service.execute(data)
+ chat_integration.execute(data)
expect(WebMock).to have_requested(:post, webhook_url).once
end
@@ -188,7 +182,7 @@ RSpec.describe Integrations::MicrosoftTeams do
it "calls Microsoft Teams API for issue comment events" do
data = Gitlab::DataBuilder::Note.build(issue_note, user)
- chat_service.execute(data)
+ chat_integration.execute(data)
expect(WebMock).to have_requested(:post, webhook_url).once
end
@@ -203,7 +197,7 @@ RSpec.describe Integrations::MicrosoftTeams do
it "calls Microsoft Teams API for snippet comment events" do
data = Gitlab::DataBuilder::Note.build(snippet_note, user)
- chat_service.execute(data)
+ chat_integration.execute(data)
expect(WebMock).to have_requested(:post, webhook_url).once
end
@@ -221,9 +215,8 @@ RSpec.describe Integrations::MicrosoftTeams do
end
before do
- allow(chat_service).to receive_messages(
+ allow(chat_integration).to receive_messages(
project: project,
- service_hook: true,
webhook: webhook_url
)
end
@@ -231,14 +224,14 @@ RSpec.describe Integrations::MicrosoftTeams do
shared_examples 'call Microsoft Teams API' do |branches_to_be_notified: nil|
before do
WebMock.stub_request(:post, webhook_url)
- chat_service.branches_to_be_notified = branches_to_be_notified if branches_to_be_notified
+ chat_integration.branches_to_be_notified = branches_to_be_notified if branches_to_be_notified
end
it 'calls Microsoft Teams API for pipeline events' do
data = Gitlab::DataBuilder::Pipeline.build(pipeline)
data[:markdown] = true
- chat_service.execute(data)
+ chat_integration.execute(data)
message = Integrations::ChatMessage::PipelineMessage.new(data)
@@ -250,11 +243,11 @@ RSpec.describe Integrations::MicrosoftTeams do
shared_examples 'does not call Microsoft Teams API' do |branches_to_be_notified: nil|
before do
- chat_service.branches_to_be_notified = branches_to_be_notified if branches_to_be_notified
+ chat_integration.branches_to_be_notified = branches_to_be_notified if branches_to_be_notified
end
it 'does not call Microsoft Teams API for pipeline events' do
data = Gitlab::DataBuilder::Pipeline.build(pipeline)
- result = chat_service.execute(data)
+ result = chat_integration.execute(data)
expect(result).to be_falsy
end
@@ -272,7 +265,7 @@ RSpec.describe Integrations::MicrosoftTeams do
context 'with default to notify_only_broken_pipelines' do
it 'does not call Microsoft Teams API for pipeline events' do
data = Gitlab::DataBuilder::Pipeline.build(pipeline)
- result = chat_service.execute(data)
+ result = chat_integration.execute(data)
expect(result).to be_falsy
end
@@ -280,7 +273,7 @@ RSpec.describe Integrations::MicrosoftTeams do
context 'with setting notify_only_broken_pipelines to false' do
before do
- chat_service.notify_only_broken_pipelines = false
+ chat_integration.notify_only_broken_pipelines = false
end
it_behaves_like 'call Microsoft Teams API'
diff --git a/spec/models/integrations/open_project_spec.rb b/spec/models/integrations/open_project_spec.rb
index e5b976dc91d..789911acae8 100644
--- a/spec/models/integrations/open_project_spec.rb
+++ b/spec/models/integrations/open_project_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Integrations::OpenProject do
describe 'Validations' do
- context 'when service is active' do
+ context 'when integration is active' do
before do
subject.active = true
end
@@ -13,11 +13,11 @@ RSpec.describe Integrations::OpenProject do
it { is_expected.to validate_presence_of(:token) }
it { is_expected.to validate_presence_of(:project_identifier_code) }
- it_behaves_like 'issue tracker service URL attribute', :url
- it_behaves_like 'issue tracker service URL attribute', :api_url
+ it_behaves_like 'issue tracker integration URL attribute', :url
+ it_behaves_like 'issue tracker integration URL attribute', :api_url
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
subject.active = false
end
@@ -27,9 +27,4 @@ RSpec.describe Integrations::OpenProject do
it { is_expected.not_to validate_presence_of(:project_identifier_code) }
end
end
-
- describe 'Associations' do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
end
diff --git a/spec/models/integrations/packagist_spec.rb b/spec/models/integrations/packagist_spec.rb
index 48f7e81adca..dce96890522 100644
--- a/spec/models/integrations/packagist_spec.rb
+++ b/spec/models/integrations/packagist_spec.rb
@@ -24,23 +24,23 @@ RSpec.describe Integrations::Packagist do
let(:packagist_server) { 'https://packagist.example.com' }
let(:project) { create(:project) }
- describe "Associations" do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
+ it_behaves_like Integrations::HasWebHook do
+ let(:integration) { described_class.new(packagist_params) }
+ let(:hook_url) { "#{packagist_server}/api/update-package?username=#{packagist_username}&apiToken=#{packagist_token}" }
end
describe '#execute' do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:push_sample_data) { Gitlab::DataBuilder::Push.build_sample(project, user) }
- let(:packagist_service) { described_class.create!(packagist_params) }
+ let(:packagist_integration) { described_class.create!(packagist_params) }
before do
stub_request(:post, packagist_hook_url)
end
it 'calls Packagist API' do
- packagist_service.execute(push_sample_data)
+ packagist_integration.execute(push_sample_data)
expect(a_request(:post, packagist_hook_url)).to have_been_made.once
end
diff --git a/spec/models/integrations/pipelines_email_spec.rb b/spec/models/integrations/pipelines_email_spec.rb
index 90055b04bb8..761049f25fe 100644
--- a/spec/models/integrations/pipelines_email_spec.rb
+++ b/spec/models/integrations/pipelines_email_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Integrations::PipelinesEmail, :mailer do
end
describe 'Validations' do
- context 'when service is active' do
+ context 'when integration is active' do
before do
subject.active = true
end
@@ -28,7 +28,7 @@ RSpec.describe Integrations::PipelinesEmail, :mailer do
it { is_expected.to validate_presence_of(:recipients) }
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
subject.active = false
end
diff --git a/spec/models/integrations/pivotaltracker_spec.rb b/spec/models/integrations/pivotaltracker_spec.rb
index 2ce90b6f739..bf8458a376c 100644
--- a/spec/models/integrations/pivotaltracker_spec.rb
+++ b/spec/models/integrations/pivotaltracker_spec.rb
@@ -5,13 +5,8 @@ require 'spec_helper'
RSpec.describe Integrations::Pivotaltracker do
include StubRequests
- describe 'Associations' do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
describe 'Validations' do
- context 'when service is active' do
+ context 'when integration is active' do
before do
subject.active = true
end
@@ -19,7 +14,7 @@ RSpec.describe Integrations::Pivotaltracker do
it { is_expected.to validate_presence_of(:token) }
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
subject.active = false
end
@@ -29,9 +24,9 @@ RSpec.describe Integrations::Pivotaltracker do
end
describe 'Execute' do
- let(:service) do
- described_class.new.tap do |service|
- service.token = 'secret_api_token'
+ let(:integration) do
+ described_class.new.tap do |integration|
+ integration.token = 'secret_api_token'
end
end
@@ -59,7 +54,7 @@ RSpec.describe Integrations::Pivotaltracker do
end
it 'posts correct message' do
- service.execute(push_data)
+ integration.execute(push_data)
expect(WebMock).to have_requested(:post, stubbed_hostname(url)).with(
body: {
'source_commit' => {
@@ -77,22 +72,22 @@ RSpec.describe Integrations::Pivotaltracker do
end
context 'when allowed branches is specified' do
- let(:service) do
- super().tap do |service|
- service.restrict_to_branch = 'master,v10'
+ let(:integration) do
+ super().tap do |integration|
+ integration.restrict_to_branch = 'master,v10'
end
end
it 'posts message if branch is in the list' do
- service.execute(push_data(branch: 'master'))
- service.execute(push_data(branch: 'v10'))
+ integration.execute(push_data(branch: 'master'))
+ integration.execute(push_data(branch: 'v10'))
expect(WebMock).to have_requested(:post, stubbed_hostname(url)).twice
end
it 'does not post message if branch is not in the list' do
- service.execute(push_data(branch: 'mas'))
- service.execute(push_data(branch: 'v11'))
+ integration.execute(push_data(branch: 'mas'))
+ integration.execute(push_data(branch: 'v11'))
expect(WebMock).not_to have_requested(:post, stubbed_hostname(url))
end
diff --git a/spec/models/project_services/prometheus_service_spec.rb b/spec/models/integrations/prometheus_spec.rb
index a2025388fab..f6f242bf58e 100644
--- a/spec/models/project_services/prometheus_service_spec.rb
+++ b/spec/models/integrations/prometheus_spec.rb
@@ -4,17 +4,13 @@ require 'spec_helper'
require 'googleauth'
-RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowplow do
+RSpec.describe Integrations::Prometheus, :use_clean_rails_memory_store_caching, :snowplow do
include PrometheusHelpers
include ReactiveCachingHelpers
let_it_be_with_reload(:project) { create(:prometheus_project) }
- let(:service) { project.prometheus_service }
-
- describe "Associations" do
- it { is_expected.to belong_to :project }
- end
+ let(:integration) { project.prometheus_integration }
context 'redirects' do
it 'does not follow redirects' do
@@ -22,7 +18,7 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
redirect_req_stub = stub_prometheus_request(prometheus_query_url('1'), status: 302, headers: { location: redirect_to })
redirected_req_stub = stub_prometheus_request(redirect_to, body: { 'status': 'success' })
- result = service.test
+ result = integration.test
# result = { success: false, result: error }
expect(result[:success]).to be_falsy
@@ -36,22 +32,22 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
describe 'Validations' do
context 'when manual_configuration is enabled' do
before do
- service.manual_configuration = true
+ integration.manual_configuration = true
end
it 'validates presence of api_url' do
- expect(service).to validate_presence_of(:api_url)
+ expect(integration).to validate_presence_of(:api_url)
end
end
context 'when manual configuration is disabled' do
before do
- service.manual_configuration = false
+ integration.manual_configuration = false
end
it 'does not validate presence of api_url' do
- expect(service).not_to validate_presence_of(:api_url)
- expect(service.valid?).to eq(true)
+ expect(integration).not_to validate_presence_of(:api_url)
+ expect(integration.valid?).to eq(true)
end
context 'local connections allowed' do
@@ -60,23 +56,23 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
end
it 'does not validate presence of api_url' do
- expect(service).not_to validate_presence_of(:api_url)
- expect(service.valid?).to eq(true)
+ expect(integration).not_to validate_presence_of(:api_url)
+ expect(integration.valid?).to eq(true)
end
end
end
context 'when the api_url domain points to localhost or local network' do
- let(:domain) { Addressable::URI.parse(service.api_url).hostname }
+ let(:domain) { Addressable::URI.parse(integration.api_url).hostname }
it 'cannot query' do
- expect(service.can_query?).to be true
+ expect(integration.can_query?).to be true
aggregate_failures do
['127.0.0.1', '192.168.2.3'].each do |url|
allow(Addrinfo).to receive(:getaddrinfo).with(domain, any_args).and_return([Addrinfo.tcp(url, 80)])
- expect(service.can_query?).to be false
+ expect(integration.can_query?).to be false
end
end
end
@@ -88,14 +84,14 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
['127.0.0.1', '192.168.2.3'].each do |url|
allow(Addrinfo).to receive(:getaddrinfo).with(domain, any_args).and_return([Addrinfo.tcp(url, 80)])
- expect(service.can_query?).to be true
+ expect(integration.can_query?).to be true
end
end
end
context 'with self-monitoring project and internal Prometheus' do
before do
- service.api_url = 'http://localhost:9090'
+ integration.api_url = 'http://localhost:9090'
stub_application_setting(self_monitoring_project_id: project.id)
stub_config(prometheus: { enable: true, server_address: 'localhost:9090' })
@@ -106,19 +102,19 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
['127.0.0.1', '192.168.2.3'].each do |url|
allow(Addrinfo).to receive(:getaddrinfo).with(domain, any_args).and_return([Addrinfo.tcp(url, 80)])
- expect(service.can_query?).to be true
+ expect(integration.can_query?).to be true
end
end
end
it 'does not allow self-monitoring project to connect to other local URLs' do
- service.api_url = 'http://localhost:8000'
+ integration.api_url = 'http://localhost:8000'
aggregate_failures do
['127.0.0.1', '192.168.2.3'].each do |url|
allow(Addrinfo).to receive(:getaddrinfo).with(domain, any_args).and_return([Addrinfo.tcp(url, 80)])
- expect(service.can_query?).to be false
+ expect(integration.can_query?).to be false
end
end
end
@@ -129,26 +125,26 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
describe 'callbacks' do
context 'after_create' do
let(:project) { create(:project) }
- let(:service) { build(:prometheus_service, project: project) }
+ let(:integration) { build(:prometheus_integration, project: project) }
- subject(:create_service) { service.save! }
+ subject(:create_integration) { integration.save! }
it 'creates default alerts' do
expect(Prometheus::CreateDefaultAlertsWorker)
.to receive(:perform_async)
.with(project.id)
- create_service
+ create_integration
end
context 'no project exists' do
- let(:service) { build(:prometheus_service, :instance) }
+ let(:integration) { build(:prometheus_integration, :instance) }
it 'does not create default alerts' do
expect(Prometheus::CreateDefaultAlertsWorker)
.not_to receive(:perform_async)
- create_service
+ create_integration
end
end
end
@@ -156,15 +152,15 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
describe '#test' do
before do
- service.manual_configuration = true
+ integration.manual_configuration = true
end
let!(:req_stub) { stub_prometheus_request(prometheus_query_url('1'), body: prometheus_value_body('vector')) }
context 'success' do
it 'reads the discovery endpoint' do
- expect(service.test[:result]).to eq('Checked API endpoint')
- expect(service.test[:success]).to be_truthy
+ expect(integration.test[:result]).to eq('Checked API endpoint')
+ expect(integration.test[:success]).to be_truthy
expect(req_stub).to have_been_requested.twice
end
end
@@ -173,7 +169,7 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
let!(:req_stub) { stub_prometheus_request(prometheus_query_url('1'), status: 404) }
it 'fails to read the discovery endpoint' do
- expect(service.test[:success]).to be_falsy
+ expect(integration.test[:success]).to be_falsy
expect(req_stub).to have_been_requested
end
end
@@ -183,20 +179,20 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
let(:api_url) { 'http://some_url' }
before do
- service.active = true
- service.api_url = api_url
- service.manual_configuration = manual_configuration
+ integration.active = true
+ integration.api_url = api_url
+ integration.manual_configuration = manual_configuration
end
context 'manual configuration is enabled' do
let(:manual_configuration) { true }
it 'calls valid?' do
- allow(service).to receive(:valid?).and_call_original
+ allow(integration).to receive(:valid?).and_call_original
- expect(service.prometheus_client).not_to be_nil
+ expect(integration.prometheus_client).not_to be_nil
- expect(service).to have_received(:valid?)
+ expect(integration).to have_received(:valid?)
end
end
@@ -204,7 +200,7 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
let(:manual_configuration) { false }
it 'no client provided' do
- expect(service.prometheus_client).to be_nil
+ expect(integration.prometheus_client).to be_nil
end
end
@@ -219,8 +215,8 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
end
it 'allows local requests' do
- expect(service.prometheus_client).not_to be_nil
- expect { service.prometheus_client.ping }.not_to raise_error
+ expect(integration.prometheus_client).not_to be_nil
+ expect { integration.prometheus_client.ping }.not_to raise_error
end
end
@@ -235,7 +231,7 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
end
it 'blocks local requests' do
- expect(service.prometheus_client).to be_nil
+ expect(integration.prometheus_client).to be_nil
end
context 'with self monitoring project and internal Prometheus URL' do
@@ -250,8 +246,8 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
end
it 'allows local requests' do
- expect(service.prometheus_client).not_to be_nil
- expect { service.prometheus_client.ping }.not_to raise_error
+ expect(integration.prometheus_client).not_to be_nil
+ expect { integration.prometheus_client.ping }.not_to raise_error
end
end
end
@@ -278,8 +274,8 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
end
def stub_iap_request
- service.google_iap_service_account_json = Gitlab::Json.generate(google_iap_service_account)
- service.google_iap_audience_client_id = 'IAP_CLIENT_ID.apps.googleusercontent.com'
+ integration.google_iap_service_account_json = Gitlab::Json.generate(google_iap_service_account)
+ integration.google_iap_audience_client_id = 'IAP_CLIENT_ID.apps.googleusercontent.com'
stub_request(:post, 'https://oauth2.googleapis.com/token')
.to_return(
@@ -292,9 +288,9 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
it 'includes the authorization header' do
stub_iap_request
- expect(service.prometheus_client).not_to be_nil
- expect(service.prometheus_client.send(:options)).to have_key(:headers)
- expect(service.prometheus_client.send(:options)[:headers]).to eq(authorization: "Bearer FOO")
+ expect(integration.prometheus_client).not_to be_nil
+ expect(integration.prometheus_client.send(:options)).to have_key(:headers)
+ expect(integration.prometheus_client.send(:options)[:headers]).to eq(authorization: "Bearer FOO")
end
context 'when passed with token_credential_uri', issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/284819' do
@@ -315,7 +311,7 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
stub_iap_request
stub_request(:any, malicious_host).to_raise('Making additional HTTP requests is forbidden!')
- expect(service.prometheus_client).not_to be_nil
+ expect(integration.prometheus_client).not_to be_nil
end
end
end
@@ -332,7 +328,7 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
let(:cluster) { create(:cluster, projects: [project]) }
it 'returns true' do
- expect(service.prometheus_available?).to be(true)
+ expect(integration.prometheus_available?).to be(true)
end
end
@@ -343,16 +339,16 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
let(:cluster) { create(:cluster_for_group, groups: [group]) }
it 'returns true' do
- expect(service.prometheus_available?).to be(true)
+ expect(integration.prometheus_available?).to be(true)
end
it 'avoids N+1 queries' do
- service
+ integration
5.times do |i|
other_cluster = create(:cluster_for_group, groups: [group], environment_scope: i)
create(:clusters_integrations_prometheus, cluster: other_cluster)
end
- expect { service.prometheus_available? }.not_to exceed_query_limit(1)
+ expect { integration.prometheus_available? }.not_to exceed_query_limit(1)
end
end
@@ -360,7 +356,7 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
let(:cluster) { create(:cluster, :instance) }
it 'returns true' do
- expect(service.prometheus_available?).to be(true)
+ expect(integration.prometheus_available?).to be(true)
end
end
end
@@ -370,7 +366,7 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
let!(:prometheus) { create(:clusters_integrations_prometheus, :disabled, cluster: cluster) }
it 'returns false' do
- expect(service.prometheus_available?).to be(false)
+ expect(integration.prometheus_available?).to be(false)
end
end
@@ -378,78 +374,78 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
let(:cluster) { create(:cluster, projects: [project]) }
it 'returns false' do
- expect(service.prometheus_available?).to be(false)
+ expect(integration.prometheus_available?).to be(false)
end
end
context 'no clusters' do
it 'returns false' do
- expect(service.prometheus_available?).to be(false)
+ expect(integration.prometheus_available?).to be(false)
end
end
end
describe '#synchronize_service_state before_save callback' do
context 'no clusters with prometheus are installed' do
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
- service.active = false
+ integration.active = false
end
- it 'activates service when manual_configuration is enabled' do
- expect { service.update!(manual_configuration: true) }.to change { service.active }.from(false).to(true)
+ it 'activates integration when manual_configuration is enabled' do
+ expect { integration.update!(manual_configuration: true) }.to change { integration.active }.from(false).to(true)
end
- it 'keeps service inactive when manual_configuration is disabled' do
- expect { service.update!(manual_configuration: false) }.not_to change { service.active }.from(false)
+ it 'keeps integration inactive when manual_configuration is disabled' do
+ expect { integration.update!(manual_configuration: false) }.not_to change { integration.active }.from(false)
end
end
- context 'when service is active' do
+ context 'when integration is active' do
before do
- service.active = true
+ integration.active = true
end
- it 'keeps the service active when manual_configuration is enabled' do
- expect { service.update!(manual_configuration: true) }.not_to change { service.active }.from(true)
+ it 'keeps the integration active when manual_configuration is enabled' do
+ expect { integration.update!(manual_configuration: true) }.not_to change { integration.active }.from(true)
end
- it 'inactivates the service when manual_configuration is disabled' do
- expect { service.update!(manual_configuration: false) }.to change { service.active }.from(true).to(false)
+ it 'inactivates the integration when manual_configuration is disabled' do
+ expect { integration.update!(manual_configuration: false) }.to change { integration.active }.from(true).to(false)
end
end
end
context 'with prometheus installed in the cluster' do
before do
- allow(service).to receive(:prometheus_available?).and_return(true)
+ allow(integration).to receive(:prometheus_available?).and_return(true)
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
- service.active = false
+ integration.active = false
end
- it 'activates service when manual_configuration is enabled' do
- expect { service.update!(manual_configuration: true) }.to change { service.active }.from(false).to(true)
+ it 'activates integration when manual_configuration is enabled' do
+ expect { integration.update!(manual_configuration: true) }.to change { integration.active }.from(false).to(true)
end
- it 'activates service when manual_configuration is disabled' do
- expect { service.update!(manual_configuration: false) }.to change { service.active }.from(false).to(true)
+ it 'activates integration when manual_configuration is disabled' do
+ expect { integration.update!(manual_configuration: false) }.to change { integration.active }.from(false).to(true)
end
end
- context 'when service is active' do
+ context 'when integration is active' do
before do
- service.active = true
+ integration.active = true
end
- it 'keeps service active when manual_configuration is enabled' do
- expect { service.update!(manual_configuration: true) }.not_to change { service.active }.from(true)
+ it 'keeps integration active when manual_configuration is enabled' do
+ expect { integration.update!(manual_configuration: true) }.not_to change { integration.active }.from(true)
end
- it 'keeps service active when manual_configuration is disabled' do
- expect { service.update!(manual_configuration: false) }.not_to change { service.active }.from(true)
+ it 'keeps integration active when manual_configuration is disabled' do
+ expect { integration.update!(manual_configuration: false) }.not_to change { integration.active }.from(true)
end
end
end
@@ -457,20 +453,20 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
describe '#track_events after_commit callback' do
before do
- allow(service).to receive(:prometheus_available?).and_return(true)
+ allow(integration).to receive(:prometheus_available?).and_return(true)
end
context "enabling manual_configuration" do
it "tracks enable event" do
- service.update!(manual_configuration: false)
- service.update!(manual_configuration: true)
+ integration.update!(manual_configuration: false)
+ integration.update!(manual_configuration: true)
expect_snowplow_event(category: 'cluster:services:prometheus', action: 'enabled_manual_prometheus')
end
it "tracks disable event" do
- service.update!(manual_configuration: true)
- service.update!(manual_configuration: false)
+ integration.update!(manual_configuration: true)
+ integration.update!(manual_configuration: false)
expect_snowplow_event(category: 'cluster:services:prometheus', action: 'disabled_manual_prometheus')
end
@@ -479,20 +475,20 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
describe '#editable?' do
it 'is editable' do
- expect(service.editable?).to be(true)
+ expect(integration.editable?).to be(true)
end
context 'when cluster exists with prometheus enabled' do
let(:cluster) { create(:cluster, projects: [project]) }
before do
- service.update!(manual_configuration: false)
+ integration.update!(manual_configuration: false)
create(:clusters_integrations_prometheus, cluster: cluster)
end
it 'remains editable' do
- expect(service.editable?).to be(true)
+ expect(integration.editable?).to be(true)
end
end
end
@@ -536,7 +532,7 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
end
it 'returns fields' do
- expect(service.fields).to eq(expected_fields)
+ expect(integration.fields).to eq(expected_fields)
end
end
end
diff --git a/spec/models/integrations/pushover_spec.rb b/spec/models/integrations/pushover_spec.rb
index be8dc5634a0..716a00c5bcf 100644
--- a/spec/models/integrations/pushover_spec.rb
+++ b/spec/models/integrations/pushover_spec.rb
@@ -5,13 +5,8 @@ require 'spec_helper'
RSpec.describe Integrations::Pushover do
include StubRequests
- describe 'Associations' do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
describe 'Validations' do
- context 'when service is active' do
+ context 'when integration is active' do
before do
subject.active = true
end
@@ -21,7 +16,7 @@ RSpec.describe Integrations::Pushover do
it { is_expected.to validate_presence_of(:priority) }
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
subject.active = false
end
@@ -51,7 +46,6 @@ RSpec.describe Integrations::Pushover do
allow(pushover).to receive_messages(
project: project,
project_id: project.id,
- service_hook: true,
api_key: api_key,
user_key: user_key,
device: device,
diff --git a/spec/models/integrations/redmine_spec.rb b/spec/models/integrations/redmine_spec.rb
index 083585d4fed..59997d2b6f6 100644
--- a/spec/models/integrations/redmine_spec.rb
+++ b/spec/models/integrations/redmine_spec.rb
@@ -3,11 +3,6 @@
require 'spec_helper'
RSpec.describe Integrations::Redmine do
- describe 'Associations' do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
describe 'Validations' do
# if redmine is set in setting the urls are set to defaults
# therefore the validation passes as the values are not nil
@@ -18,7 +13,7 @@ RSpec.describe Integrations::Redmine do
allow(Gitlab.config).to receive(:issues_tracker).and_return(settings)
end
- context 'when service is active' do
+ context 'when integration is active' do
before do
subject.active = true
end
@@ -27,12 +22,12 @@ RSpec.describe Integrations::Redmine do
it { is_expected.to validate_presence_of(:issues_url) }
it { is_expected.to validate_presence_of(:new_issue_url) }
- it_behaves_like 'issue tracker service URL attribute', :project_url
- it_behaves_like 'issue tracker service URL attribute', :issues_url
- it_behaves_like 'issue tracker service URL attribute', :new_issue_url
+ it_behaves_like 'issue tracker integration URL attribute', :project_url
+ it_behaves_like 'issue tracker integration URL attribute', :issues_url
+ it_behaves_like 'issue tracker integration URL attribute', :new_issue_url
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
subject.active = false
end
diff --git a/spec/models/integrations/slack_slash_commands_spec.rb b/spec/models/integrations/slack_slash_commands_spec.rb
index a9d3c820a3c..ff89d2c6a40 100644
--- a/spec/models/integrations/slack_slash_commands_spec.rb
+++ b/spec/models/integrations/slack_slash_commands_spec.rb
@@ -18,8 +18,8 @@ RSpec.describe Integrations::SlackSlashCommands do
}
end
- let(:service) do
- project.create_slack_slash_commands_service(
+ let(:integration) do
+ project.create_slack_slash_commands_integration(
properties: { token: 'token' },
active: true
)
@@ -30,11 +30,11 @@ RSpec.describe Integrations::SlackSlashCommands do
end
before do
- allow(service).to receive(:authorize_chat_name_url).and_return(authorize_url)
+ allow(integration).to receive(:authorize_chat_name_url).and_return(authorize_url)
end
it 'uses slack compatible links' do
- response = service.trigger(params)
+ response = integration.trigger(params)
expect(response[:text]).to include("<#{authorize_url}|connect your GitLab account>")
end
diff --git a/spec/models/integrations/slack_spec.rb b/spec/models/integrations/slack_spec.rb
index e598c528967..4661d9c8291 100644
--- a/spec/models/integrations/slack_spec.rb
+++ b/spec/models/integrations/slack_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Integrations::Slack do
stub_request(:post, "https://slack.service.url/")
end
- let_it_be(:slack_service) { create(:slack_service, branches_to_be_notified: 'all') }
+ let_it_be(:slack_integration) { create(:integrations_slack, branches_to_be_notified: 'all') }
it 'uses only known events', :aggregate_failures do
described_class::SUPPORTED_EVENTS_FOR_USAGE_LOG.each do |action|
@@ -26,7 +26,7 @@ RSpec.describe Integrations::Slack do
it 'increases the usage data counter' do
expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(event_name, values: user.id).and_call_original
- slack_service.execute(data)
+ slack_integration.execute(data)
end
end
@@ -38,7 +38,7 @@ RSpec.describe Integrations::Slack do
it 'does not increase the usage data counter' do
expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event).with('i_ecosystem_slack_service_pipeline_notification', values: user.id)
- slack_service.execute(data)
+ slack_integration.execute(data)
end
end
@@ -126,7 +126,7 @@ RSpec.describe Integrations::Slack do
it 'does not increase the usage data counter' do
expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
- slack_service.execute(data)
+ slack_integration.execute(data)
end
end
end
diff --git a/spec/models/integrations/teamcity_spec.rb b/spec/models/integrations/teamcity_spec.rb
index b88a4722ad4..d425357aef0 100644
--- a/spec/models/integrations/teamcity_spec.rb
+++ b/spec/models/integrations/teamcity_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Integrations::Teamcity, :use_clean_rails_memory_store_caching do
let(:teamcity_full_url) { 'http://gitlab.com/teamcity/httpAuth/app/rest/builds/branch:unspecified:any,revision:123' }
let(:project) { create(:project) }
- subject(:service) do
+ subject(:integration) do
described_class.create!(
project: project,
properties: {
@@ -22,20 +22,15 @@ RSpec.describe Integrations::Teamcity, :use_clean_rails_memory_store_caching do
)
end
- describe 'Associations' do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
describe 'Validations' do
- context 'when service is active' do
+ context 'when integration is active' do
before do
subject.active = true
end
it { is_expected.to validate_presence_of(:build_type) }
it { is_expected.to validate_presence_of(:teamcity_url) }
- it_behaves_like 'issue tracker service URL attribute', :teamcity_url
+ it_behaves_like 'issue tracker integration URL attribute', :teamcity_url
describe '#username' do
it 'does not validate the presence of username if password is nil' do
@@ -66,7 +61,7 @@ RSpec.describe Integrations::Teamcity, :use_clean_rails_memory_store_caching do
end
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
subject.active = false
end
@@ -79,71 +74,66 @@ RSpec.describe Integrations::Teamcity, :use_clean_rails_memory_store_caching do
end
describe 'Callbacks' do
+ let(:teamcity_integration) { integration }
+
describe 'before_update :reset_password' do
context 'when a password was previously set' do
it 'resets password if url changed' do
- teamcity_service = service
+ teamcity_integration.teamcity_url = 'http://gitlab1.com'
+ teamcity_integration.save!
- teamcity_service.teamcity_url = 'http://gitlab1.com'
- teamcity_service.save!
-
- expect(teamcity_service.password).to be_nil
+ expect(teamcity_integration.password).to be_nil
end
it 'does not reset password if username changed' do
- teamcity_service = service
-
- teamcity_service.username = 'some_name'
- teamcity_service.save!
+ teamcity_integration.username = 'some_name'
+ teamcity_integration.save!
- expect(teamcity_service.password).to eq('password')
+ expect(teamcity_integration.password).to eq('password')
end
it "does not reset password if new url is set together with password, even if it's the same password" do
- teamcity_service = service
-
- teamcity_service.teamcity_url = 'http://gitlab_edited.com'
- teamcity_service.password = 'password'
- teamcity_service.save!
+ teamcity_integration.teamcity_url = 'http://gitlab_edited.com'
+ teamcity_integration.password = 'password'
+ teamcity_integration.save!
- expect(teamcity_service.password).to eq('password')
- expect(teamcity_service.teamcity_url).to eq('http://gitlab_edited.com')
+ expect(teamcity_integration.password).to eq('password')
+ expect(teamcity_integration.teamcity_url).to eq('http://gitlab_edited.com')
end
end
it 'saves password if new url is set together with password when no password was previously set' do
- teamcity_service = service
- teamcity_service.password = nil
+ teamcity_integration.password = nil
- teamcity_service.teamcity_url = 'http://gitlab_edited.com'
- teamcity_service.password = 'password'
- teamcity_service.save!
+ teamcity_integration.teamcity_url = 'http://gitlab_edited.com'
+ teamcity_integration.password = 'password'
+ teamcity_integration.save!
- expect(teamcity_service.password).to eq('password')
- expect(teamcity_service.teamcity_url).to eq('http://gitlab_edited.com')
+ expect(teamcity_integration.password).to eq('password')
+ expect(teamcity_integration.teamcity_url).to eq('http://gitlab_edited.com')
end
end
end
describe '#build_page' do
it 'returns the contents of the reactive cache' do
- stub_reactive_cache(service, { build_page: 'foo' }, 'sha', 'ref')
+ stub_reactive_cache(integration, { build_page: 'foo' }, 'sha', 'ref')
- expect(service.build_page('sha', 'ref')).to eq('foo')
+ expect(integration.build_page('sha', 'ref')).to eq('foo')
end
end
describe '#commit_status' do
it 'returns the contents of the reactive cache' do
- stub_reactive_cache(service, { commit_status: 'foo' }, 'sha', 'ref')
+ stub_reactive_cache(integration, { commit_status: 'foo' }, 'sha', 'ref')
- expect(service.commit_status('sha', 'ref')).to eq('foo')
+ expect(integration.commit_status('sha', 'ref')).to eq('foo')
end
end
describe '#calculate_reactive_cache' do
context 'build_page' do
- subject { service.calculate_reactive_cache('123', 'unused')[:build_page] }
+ subject { integration.calculate_reactive_cache('123', 'unused')[:build_page] }
it 'returns a specific URL when status is 500' do
stub_request(status: 500)
@@ -179,7 +169,7 @@ RSpec.describe Integrations::Teamcity, :use_clean_rails_memory_store_caching do
end
context 'commit_status' do
- subject { service.calculate_reactive_cache('123', 'unused')[:commit_status] }
+ subject { integration.calculate_reactive_cache('123', 'unused')[:commit_status] }
it 'sets commit status to :error when status is 500' do
stub_request(status: 500)
@@ -243,25 +233,25 @@ RSpec.describe Integrations::Teamcity, :use_clean_rails_memory_store_caching do
it 'handles push request correctly' do
stub_post_to_build_queue(branch: 'dev-123_branch')
- expect(service.execute(data)).to include('Ok')
+ expect(integration.execute(data)).to include('Ok')
end
it 'returns nil when ref is blank' do
data[:after] = Gitlab::Git::BLANK_SHA
- expect(service.execute(data)).to be_nil
+ expect(integration.execute(data)).to be_nil
end
it 'returns nil when there is no content' do
data[:total_commits_count] = 0
- expect(service.execute(data)).to be_nil
+ expect(integration.execute(data)).to be_nil
end
it 'returns nil when a merge request is opened for the same ref' do
create(:merge_request, source_project: project, source_branch: 'dev-123_branch')
- expect(service.execute(data)).to be_nil
+ expect(integration.execute(data)).to be_nil
end
end
@@ -283,26 +273,26 @@ RSpec.describe Integrations::Teamcity, :use_clean_rails_memory_store_caching do
it 'handles merge request correctly' do
stub_post_to_build_queue(branch: 'dev-123_branch')
- expect(service.execute(data)).to include('Ok')
+ expect(integration.execute(data)).to include('Ok')
end
it 'returns nil when merge request is not opened' do
data[:object_attributes][:state] = 'closed'
- expect(service.execute(data)).to be_nil
+ expect(integration.execute(data)).to be_nil
end
it 'returns nil unless merge request is marked as unchecked' do
data[:object_attributes][:merge_status] = 'can_be_merged'
- expect(service.execute(data)).to be_nil
+ expect(integration.execute(data)).to be_nil
end
end
it 'returns nil when event is not supported' do
data = { object_kind: 'foo' }
- expect(service.execute(data)).to be_nil
+ expect(integration.execute(data)).to be_nil
end
end
diff --git a/spec/models/integrations/youtrack_spec.rb b/spec/models/integrations/youtrack_spec.rb
index 314204f6fb4..f6a9dd8ef37 100644
--- a/spec/models/integrations/youtrack_spec.rb
+++ b/spec/models/integrations/youtrack_spec.rb
@@ -3,13 +3,8 @@
require 'spec_helper'
RSpec.describe Integrations::Youtrack do
- describe 'Associations' do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
describe 'Validations' do
- context 'when service is active' do
+ context 'when integration is active' do
before do
subject.active = true
end
@@ -17,11 +12,11 @@ RSpec.describe Integrations::Youtrack do
it { is_expected.to validate_presence_of(:project_url) }
it { is_expected.to validate_presence_of(:issues_url) }
- it_behaves_like 'issue tracker service URL attribute', :project_url
- it_behaves_like 'issue tracker service URL attribute', :issues_url
+ it_behaves_like 'issue tracker integration URL attribute', :project_url
+ it_behaves_like 'issue tracker integration URL attribute', :issues_url
end
- context 'when service is inactive' do
+ context 'when integration is inactive' do
before do
subject.active = false
end
diff --git a/spec/models/internal_id_spec.rb b/spec/models/internal_id_spec.rb
index 390d1552c16..696b5b48cbf 100644
--- a/spec/models/internal_id_spec.rb
+++ b/spec/models/internal_id_spec.rb
@@ -39,216 +39,217 @@ RSpec.describe InternalId do
end
end
- describe '.generate_next' do
- subject { described_class.generate_next(id_subject, scope, usage, init) }
+ shared_examples_for 'a monotonically increasing id generator' do
+ describe '.generate_next' do
+ subject { described_class.generate_next(id_subject, scope, usage, init) }
- context 'in the absence of a record' do
- it 'creates a record if not yet present' do
- expect { subject }.to change { described_class.count }.from(0).to(1)
- end
+ context 'in the absence of a record' do
+ it 'creates a record if not yet present' do
+ expect { subject }.to change { described_class.count }.from(0).to(1)
+ end
- it 'stores record attributes' do
- subject
+ it 'stores record attributes' do
+ subject
- described_class.first.tap do |record|
- expect(record.project).to eq(project)
- expect(record.usage).to eq(usage.to_s)
+ described_class.first.tap do |record|
+ expect(record.project).to eq(project)
+ expect(record.usage).to eq(usage.to_s)
+ end
end
- end
- context 'with existing issues' do
- before do
- create_list(:issue, 2, project: project)
- described_class.delete_all
- end
+ context 'with existing issues' do
+ before do
+ create_list(:issue, 2, project: project)
+ described_class.delete_all
+ end
- it 'calculates last_value values automatically' do
- expect(subject).to eq(project.issues.size + 1)
+ it 'calculates last_value values automatically' do
+ expect(subject).to eq(project.issues.size + 1)
+ end
end
end
- context 'with concurrent inserts on table' do
- it 'looks up the record if it was created concurrently' do
- args = { **scope, usage: described_class.usages[usage.to_s] }
- record = double
- expect(described_class).to receive(:find_by).with(args).and_return(nil) # first call, record not present
- expect(described_class).to receive(:find_by).with(args).and_return(record) # second call, record was created by another process
- expect(described_class).to receive(:create!).and_raise(ActiveRecord::RecordNotUnique, 'record not unique')
- expect(record).to receive(:increment_and_save!)
-
- subject
+ it 'generates a strictly monotone, gapless sequence' do
+ seq = Array.new(10).map do
+ described_class.generate_next(issue, scope, usage, init)
end
- end
- end
+ normalized = seq.map { |i| i - seq.min }
- it 'generates a strictly monotone, gapless sequence' do
- seq = Array.new(10).map do
- described_class.generate_next(issue, scope, usage, init)
+ expect(normalized).to eq((0..seq.size - 1).to_a)
end
- normalized = seq.map { |i| i - seq.min }
-
- expect(normalized).to eq((0..seq.size - 1).to_a)
- end
- context 'there are no instances to pass in' do
- let(:id_subject) { Issue }
+ context 'there are no instances to pass in' do
+ let(:id_subject) { Issue }
- it 'accepts classes instead' do
- expect(subject).to eq(1)
+ it 'accepts classes instead' do
+ expect(subject).to eq(1)
+ end
end
- end
- context 'when executed outside of transaction' do
- it 'increments counter with in_transaction: "false"' do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?) { false }
+ context 'when executed outside of transaction' do
+ it 'increments counter with in_transaction: "false"' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?) { false }
- expect(InternalId::InternalIdGenerator.internal_id_transactions_total).to receive(:increment)
- .with(operation: :generate, usage: 'issues', in_transaction: 'false').and_call_original
+ expect(InternalId.internal_id_transactions_total).to receive(:increment)
+ .with(operation: :generate, usage: 'issues', in_transaction: 'false').and_call_original
- subject
+ subject
+ end
end
- end
- context 'when executed within transaction' do
- it 'increments counter with in_transaction: "true"' do
- expect(InternalId::InternalIdGenerator.internal_id_transactions_total).to receive(:increment)
- .with(operation: :generate, usage: 'issues', in_transaction: 'true').and_call_original
+ context 'when executed within transaction' do
+ it 'increments counter with in_transaction: "true"' do
+ expect(InternalId.internal_id_transactions_total).to receive(:increment)
+ .with(operation: :generate, usage: 'issues', in_transaction: 'true').and_call_original
- InternalId.transaction { subject }
+ InternalId.transaction { subject }
+ end
end
end
- end
- describe '.reset' do
- subject { described_class.reset(issue, scope, usage, value) }
+ describe '.reset' do
+ subject { described_class.reset(issue, scope, usage, value) }
- context 'in the absence of a record' do
- let(:value) { 2 }
+ context 'in the absence of a record' do
+ let(:value) { 2 }
- it 'does not revert back the value' do
- expect { subject }.not_to change { described_class.count }
- expect(subject).to be_falsey
+ it 'does not revert back the value' do
+ expect { subject }.not_to change { described_class.count }
+ expect(subject).to be_falsey
+ end
end
- end
- context 'when valid iid is used to reset' do
- let!(:value) { generate_next }
+ context 'when valid iid is used to reset' do
+ let!(:value) { generate_next }
- context 'and iid is a latest one' do
- it 'does rewind and next generated value is the same' do
- expect(subject).to be_truthy
- expect(generate_next).to eq(value)
+ context 'and iid is a latest one' do
+ it 'does rewind and next generated value is the same' do
+ expect(subject).to be_truthy
+ expect(generate_next).to eq(value)
+ end
end
- end
- context 'and iid is not a latest one' do
- it 'does not rewind' do
- generate_next
+ context 'and iid is not a latest one' do
+ it 'does not rewind' do
+ generate_next
- expect(subject).to be_falsey
- expect(generate_next).to be > value
+ expect(subject).to be_falsey
+ expect(generate_next).to be > value
+ end
end
- end
- def generate_next
- described_class.generate_next(issue, scope, usage, init)
+ def generate_next
+ described_class.generate_next(issue, scope, usage, init)
+ end
end
- end
- context 'when executed outside of transaction' do
- let(:value) { 2 }
+ context 'when executed outside of transaction' do
+ let(:value) { 2 }
- it 'increments counter with in_transaction: "false"' do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?) { false }
+ it 'increments counter with in_transaction: "false"' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?) { false }
- expect(InternalId::InternalIdGenerator.internal_id_transactions_total).to receive(:increment)
- .with(operation: :reset, usage: 'issues', in_transaction: 'false').and_call_original
+ expect(InternalId.internal_id_transactions_total).to receive(:increment)
+ .with(operation: :reset, usage: 'issues', in_transaction: 'false').and_call_original
- subject
+ subject
+ end
end
- end
- context 'when executed within transaction' do
- let(:value) { 2 }
+ context 'when executed within transaction' do
+ let(:value) { 2 }
- it 'increments counter with in_transaction: "true"' do
- expect(InternalId::InternalIdGenerator.internal_id_transactions_total).to receive(:increment)
- .with(operation: :reset, usage: 'issues', in_transaction: 'true').and_call_original
+ it 'increments counter with in_transaction: "true"' do
+ expect(InternalId.internal_id_transactions_total).to receive(:increment)
+ .with(operation: :reset, usage: 'issues', in_transaction: 'true').and_call_original
- InternalId.transaction { subject }
+ InternalId.transaction { subject }
+ end
end
end
- end
- describe '.track_greatest' do
- let(:value) { 9001 }
+ describe '.track_greatest' do
+ let(:value) { 9001 }
- subject { described_class.track_greatest(id_subject, scope, usage, value, init) }
+ subject { described_class.track_greatest(id_subject, scope, usage, value, init) }
- context 'in the absence of a record' do
- it 'creates a record if not yet present' do
- expect { subject }.to change { described_class.count }.from(0).to(1)
+ context 'in the absence of a record' do
+ it 'creates a record if not yet present' do
+ expect { subject }.to change { described_class.count }.from(0).to(1)
+ end
end
- end
- it 'stores record attributes' do
- subject
+ it 'stores record attributes' do
+ subject
- described_class.first.tap do |record|
- expect(record.project).to eq(project)
- expect(record.usage).to eq(usage.to_s)
- expect(record.last_value).to eq(value)
+ described_class.first.tap do |record|
+ expect(record.project).to eq(project)
+ expect(record.usage).to eq(usage.to_s)
+ expect(record.last_value).to eq(value)
+ end
end
- end
- context 'with existing issues' do
- before do
- create(:issue, project: project)
- described_class.delete_all
- end
+ context 'with existing issues' do
+ before do
+ create(:issue, project: project)
+ described_class.delete_all
+ end
- it 'still returns the last value to that of the given value' do
- expect(subject).to eq(value)
+ it 'still returns the last value to that of the given value' do
+ expect(subject).to eq(value)
+ end
end
- end
- context 'when value is less than the current last_value' do
- it 'returns the current last_value' do
- described_class.create!(**scope, usage: usage, last_value: 10_001)
+ context 'when value is less than the current last_value' do
+ it 'returns the current last_value' do
+ described_class.create!(**scope, usage: usage, last_value: 10_001)
- expect(subject).to eq 10_001
+ expect(subject).to eq 10_001
+ end
end
- end
- context 'there are no instances to pass in' do
- let(:id_subject) { Issue }
+ context 'there are no instances to pass in' do
+ let(:id_subject) { Issue }
- it 'accepts classes instead' do
- expect(subject).to eq(value)
+ it 'accepts classes instead' do
+ expect(subject).to eq(value)
+ end
end
- end
- context 'when executed outside of transaction' do
- it 'increments counter with in_transaction: "false"' do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?) { false }
+ context 'when executed outside of transaction' do
+ it 'increments counter with in_transaction: "false"' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?) { false }
- expect(InternalId::InternalIdGenerator.internal_id_transactions_total).to receive(:increment)
- .with(operation: :track_greatest, usage: 'issues', in_transaction: 'false').and_call_original
+ expect(InternalId.internal_id_transactions_total).to receive(:increment)
+ .with(operation: :track_greatest, usage: 'issues', in_transaction: 'false').and_call_original
- subject
+ subject
+ end
end
- end
- context 'when executed within transaction' do
- it 'increments counter with in_transaction: "true"' do
- expect(InternalId::InternalIdGenerator.internal_id_transactions_total).to receive(:increment)
- .with(operation: :track_greatest, usage: 'issues', in_transaction: 'true').and_call_original
+ context 'when executed within transaction' do
+ it 'increments counter with in_transaction: "true"' do
+ expect(InternalId.internal_id_transactions_total).to receive(:increment)
+ .with(operation: :track_greatest, usage: 'issues', in_transaction: 'true').and_call_original
- InternalId.transaction { subject }
+ InternalId.transaction { subject }
+ end
end
end
end
+ context 'when the feature flag is disabled' do
+ stub_feature_flags(generate_iids_without_explicit_locking: false)
+
+ it_behaves_like 'a monotonically increasing id generator'
+ end
+
+ context 'when the feature flag is enabled' do
+ stub_feature_flags(generate_iids_without_explicit_locking: true)
+
+ it_behaves_like 'a monotonically increasing id generator'
+ end
+
describe '#increment_and_save!' do
let(:id) { create(:internal_id) }
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index edb93ecf4b6..441446bae60 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -128,6 +128,24 @@ RSpec.describe Issue do
end
end
+ context 'order by upvotes' do
+ let!(:issue) { create(:issue) }
+ let!(:issue2) { create(:issue) }
+ let!(:award_emoji) { create(:award_emoji, :upvote, awardable: issue2) }
+
+ describe '.order_upvotes_desc' do
+ it 'orders on upvotes' do
+ expect(described_class.order_upvotes_desc.to_a).to eq [issue2, issue]
+ end
+ end
+
+ describe '.order_upvotes_asc' do
+ it 'orders on upvotes' do
+ expect(described_class.order_upvotes_asc.to_a).to eq [issue, issue2]
+ end
+ end
+ end
+
describe '.with_alert_management_alerts' do
subject { described_class.with_alert_management_alerts }
@@ -1051,23 +1069,53 @@ RSpec.describe Issue do
describe '#check_for_spam?' do
using RSpec::Parameterized::TableSyntax
-
- where(:visibility_level, :confidential, :new_attributes, :check_for_spam?) do
- Gitlab::VisibilityLevel::PUBLIC | false | { description: 'woo' } | true
- Gitlab::VisibilityLevel::PUBLIC | false | { title: 'woo' } | true
- Gitlab::VisibilityLevel::PUBLIC | true | { confidential: false } | true
- Gitlab::VisibilityLevel::PUBLIC | true | { description: 'woo' } | false
- Gitlab::VisibilityLevel::PUBLIC | false | { title: 'woo', confidential: true } | false
- Gitlab::VisibilityLevel::PUBLIC | false | { description: 'original description' } | false
- Gitlab::VisibilityLevel::INTERNAL | false | { description: 'woo' } | false
- Gitlab::VisibilityLevel::PRIVATE | false | { description: 'woo' } | false
+ let_it_be(:support_bot) { ::User.support_bot }
+
+ where(:support_bot?, :visibility_level, :confidential, :new_attributes, :check_for_spam?) do
+ ### non-support-bot cases
+ # spammable attributes changing
+ false | Gitlab::VisibilityLevel::PUBLIC | false | { description: 'new' } | true
+ false | Gitlab::VisibilityLevel::PUBLIC | false | { title: 'new' } | true
+ # confidential to non-confidential
+ false | Gitlab::VisibilityLevel::PUBLIC | true | { confidential: false } | true
+ # non-confidential to confidential
+ false | Gitlab::VisibilityLevel::PUBLIC | false | { confidential: true } | false
+ # spammable attributes changing on confidential
+ false | Gitlab::VisibilityLevel::PUBLIC | true | { description: 'new' } | false
+ # spammable attributes changing while changing to confidential
+ false | Gitlab::VisibilityLevel::PUBLIC | false | { title: 'new', confidential: true } | false
+ # spammable attribute not changing
+ false | Gitlab::VisibilityLevel::PUBLIC | false | { description: 'original description' } | false
+ # non-spammable attribute changing
+ false | Gitlab::VisibilityLevel::PUBLIC | false | { weight: 3 } | false
+ # spammable attributes changing on non-public
+ false | Gitlab::VisibilityLevel::INTERNAL | false | { description: 'new' } | false
+ false | Gitlab::VisibilityLevel::PRIVATE | false | { description: 'new' } | false
+
+ ### support-bot cases
+ # confidential to non-confidential
+ true | Gitlab::VisibilityLevel::PUBLIC | true | { confidential: false } | true
+ # non-confidential to confidential
+ true | Gitlab::VisibilityLevel::PUBLIC | false | { confidential: true } | false
+ # spammable attributes changing on confidential
+ true | Gitlab::VisibilityLevel::PUBLIC | true | { description: 'new' } | true
+ # spammable attributes changing while changing to confidential
+ true | Gitlab::VisibilityLevel::PUBLIC | false | { title: 'new', confidential: true } | true
+ # spammable attributes changing on non-public
+ true | Gitlab::VisibilityLevel::INTERNAL | false | { description: 'new' } | true
+ true | Gitlab::VisibilityLevel::PRIVATE | false | { title: 'new' } | true
+ # spammable attribute not changing
+ true | Gitlab::VisibilityLevel::PUBLIC | false | { description: 'original description' } | false
+ # non-spammable attribute changing
+ true | Gitlab::VisibilityLevel::PRIVATE | true | { weight: 3 } | false
end
with_them do
- it 'checks for spam on issues that can be seen anonymously' do
+ it 'checks for spam when necessary' do
+ author = support_bot? ? support_bot : user
project = reusable_project
project.update!(visibility_level: visibility_level)
- issue = create(:issue, project: project, confidential: confidential, description: 'original description')
+ issue = create(:issue, project: project, confidential: confidential, description: 'original description', author: author)
issue.assign_attributes(new_attributes)
diff --git a/spec/models/label_note_spec.rb b/spec/models/label_note_spec.rb
index 0bf202ce2b1..ee4822c653d 100644
--- a/spec/models/label_note_spec.rb
+++ b/spec/models/label_note_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe LabelNote do
let_it_be(:user) { create(:user) }
let_it_be(:label) { create(:label, project: project) }
let_it_be(:label2) { create(:label, project: project) }
+
let(:resource_parent) { project }
context 'when resource is issue' do
diff --git a/spec/models/lfs_file_lock_spec.rb b/spec/models/lfs_file_lock_spec.rb
index d3f79c7c7cf..5afad6c184f 100644
--- a/spec/models/lfs_file_lock_spec.rb
+++ b/spec/models/lfs_file_lock_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe LfsFileLock do
let_it_be(:lfs_file_lock, reload: true) { create(:lfs_file_lock) }
+
subject { lfs_file_lock }
it { is_expected.to belong_to(:project) }
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index 372fc40afcc..5824c2085ce 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -30,6 +30,7 @@ RSpec.describe Member do
context "when an invite email is provided" do
let_it_be(:project) { create(:project) }
+
let(:member) { build(:project_member, source: project, invite_email: "user@example.com", user: nil) }
it "doesn't require a user" do
@@ -98,6 +99,7 @@ RSpec.describe Member do
context 'project bots' do
let_it_be(:project_bot) { create(:user, :project_bot) }
+
let(:new_member) { build(:project_member, user_id: project_bot.id) }
context 'not a member of any group or project' do
@@ -476,6 +478,20 @@ RSpec.describe Member do
it { is_expected.to include @blocked_maintainer }
it { is_expected.to include @blocked_developer }
it { is_expected.to include @member_with_minimal_access }
+
+ context 'with where conditions' do
+ let_it_be(:example_member) { create(:group_member, invite_email: 'user@example.com') }
+
+ subject do
+ described_class
+ .default_scoped
+ .where(invite_email: 'user@example.com')
+ .distinct_on_user_with_max_access_level
+ .to_a
+ end
+
+ it { is_expected.to eq [example_member] }
+ end
end
end
@@ -494,282 +510,6 @@ RSpec.describe Member do
end
end
- describe '.add_user' do
- %w[project group].each do |source_type|
- context "when source is a #{source_type}" do
- let_it_be(:source, reload: true) { create(source_type, :public) }
- let_it_be(:user) { create(:user) }
- let_it_be(:admin) { create(:admin) }
-
- it 'returns a <Source>Member object' do
- member = described_class.add_user(source, user, :maintainer)
-
- expect(member).to be_a "#{source_type.classify}Member".constantize
- expect(member).to be_persisted
- end
-
- context 'when admin mode is enabled', :enable_admin_mode do
- it 'sets members.created_by to the given admin current_user' do
- member = described_class.add_user(source, user, :maintainer, current_user: admin)
-
- expect(member.created_by).to eq(admin)
- end
- end
-
- context 'when admin mode is disabled' do
- it 'rejects setting members.created_by to the given admin current_user' do
- member = described_class.add_user(source, user, :maintainer, current_user: admin)
-
- expect(member.created_by).to be_nil
- end
- end
-
- it 'sets members.expires_at to the given expires_at' do
- member = described_class.add_user(source, user, :maintainer, expires_at: Date.new(2016, 9, 22))
-
- expect(member.expires_at).to eq(Date.new(2016, 9, 22))
- end
-
- described_class.access_levels.each do |sym_key, int_access_level|
- it "accepts the :#{sym_key} symbol as access level" do
- expect(source.users).not_to include(user)
-
- member = described_class.add_user(source, user.id, sym_key)
-
- expect(member.access_level).to eq(int_access_level)
- expect(source.users.reload).to include(user)
- end
-
- it "accepts the #{int_access_level} integer as access level" do
- expect(source.users).not_to include(user)
-
- member = described_class.add_user(source, user.id, int_access_level)
-
- expect(member.access_level).to eq(int_access_level)
- expect(source.users.reload).to include(user)
- end
- end
-
- context 'with no current_user' do
- context 'when called with a known user id' do
- it 'adds the user as a member' do
- expect(source.users).not_to include(user)
-
- described_class.add_user(source, user.id, :maintainer)
-
- expect(source.users.reload).to include(user)
- end
- end
-
- context 'when called with an unknown user id' do
- it 'adds the user as a member' do
- expect(source.users).not_to include(user)
-
- described_class.add_user(source, non_existing_record_id, :maintainer)
-
- expect(source.users.reload).not_to include(user)
- end
- end
-
- context 'when called with a user object' do
- it 'adds the user as a member' do
- expect(source.users).not_to include(user)
-
- described_class.add_user(source, user, :maintainer)
-
- expect(source.users.reload).to include(user)
- end
- end
-
- context 'when called with a requester user object' do
- before do
- source.request_access(user)
- end
-
- it 'adds the requester as a member' do
- expect(source.users).not_to include(user)
- expect(source.requesters.exists?(user_id: user)).to be_truthy
-
- expect { described_class.add_user(source, user, :maintainer) }
- .to raise_error(Gitlab::Access::AccessDeniedError)
-
- expect(source.users.reload).not_to include(user)
- expect(source.requesters.reload.exists?(user_id: user)).to be_truthy
- end
- end
-
- context 'when called with a known user email' do
- it 'adds the user as a member' do
- expect(source.users).not_to include(user)
-
- described_class.add_user(source, user.email, :maintainer)
-
- expect(source.users.reload).to include(user)
- end
- end
-
- context 'when called with a known user secondary email' do
- let(:secondary_email) { create(:email, email: 'secondary@example.com', user: user) }
-
- it 'adds the user as a member' do
- expect(source.users).not_to include(user)
-
- described_class.add_user(source, secondary_email.email, :maintainer)
-
- expect(source.users.reload).to include(user)
- end
- end
-
- context 'when called with an unknown user email' do
- it 'creates an invited member' do
- expect(source.users).not_to include(user)
-
- described_class.add_user(source, 'user@example.com', :maintainer)
-
- expect(source.members.invite.pluck(:invite_email)).to include('user@example.com')
- end
- end
-
- context 'when called with an unknown user email starting with a number' do
- it 'creates an invited member', :aggregate_failures do
- email_starting_with_number = "#{user.id}_email@example.com"
-
- described_class.add_user(source, email_starting_with_number, :maintainer)
-
- expect(source.members.invite.pluck(:invite_email)).to include(email_starting_with_number)
- expect(source.users.reload).not_to include(user)
- end
- end
- end
-
- context 'when current_user can update member', :enable_admin_mode do
- it 'creates the member' do
- expect(source.users).not_to include(user)
-
- described_class.add_user(source, user, :maintainer, current_user: admin)
-
- expect(source.users.reload).to include(user)
- end
-
- context 'when called with a requester user object' do
- before do
- source.request_access(user)
- end
-
- it 'adds the requester as a member' do
- expect(source.users).not_to include(user)
- expect(source.requesters.exists?(user_id: user)).to be_truthy
-
- described_class.add_user(source, user, :maintainer, current_user: admin)
-
- expect(source.users.reload).to include(user)
- expect(source.requesters.reload.exists?(user_id: user)).to be_falsy
- end
- end
- end
-
- context 'when current_user cannot update member' do
- it 'does not create the member' do
- expect(source.users).not_to include(user)
-
- member = described_class.add_user(source, user, :maintainer, current_user: user)
-
- expect(source.users.reload).not_to include(user)
- expect(member).not_to be_persisted
- end
-
- context 'when called with a requester user object' do
- before do
- source.request_access(user)
- end
-
- it 'does not destroy the requester' do
- expect(source.users).not_to include(user)
- expect(source.requesters.exists?(user_id: user)).to be_truthy
-
- described_class.add_user(source, user, :maintainer, current_user: user)
-
- expect(source.users.reload).not_to include(user)
- expect(source.requesters.exists?(user_id: user)).to be_truthy
- end
- end
- end
-
- context 'when member already exists' do
- before do
- source.add_user(user, :developer)
- end
-
- context 'with no current_user' do
- it 'updates the member' do
- expect(source.users).to include(user)
-
- described_class.add_user(source, user, :maintainer)
-
- expect(source.members.find_by(user_id: user).access_level).to eq(Gitlab::Access::MAINTAINER)
- end
- end
-
- context 'when current_user can update member', :enable_admin_mode do
- it 'updates the member' do
- expect(source.users).to include(user)
-
- described_class.add_user(source, user, :maintainer, current_user: admin)
-
- expect(source.members.find_by(user_id: user).access_level).to eq(Gitlab::Access::MAINTAINER)
- end
- end
-
- context 'when current_user cannot update member' do
- it 'does not update the member' do
- expect(source.users).to include(user)
-
- described_class.add_user(source, user, :maintainer, current_user: user)
-
- expect(source.members.find_by(user_id: user).access_level).to eq(Gitlab::Access::DEVELOPER)
- end
- end
- end
- end
- end
- end
-
- describe '.add_users' do
- %w[project group].each do |source_type|
- context "when source is a #{source_type}" do
- let_it_be(:source) { create(source_type, :public) }
- let_it_be(:admin) { create(:admin) }
- let_it_be(:user1) { create(:user) }
- let_it_be(:user2) { create(:user) }
-
- it 'returns a <Source>Member objects' do
- members = described_class.add_users(source, [user1, user2], :maintainer)
-
- expect(members).to be_a Array
- expect(members.size).to eq(2)
- expect(members.first).to be_a "#{source_type.classify}Member".constantize
- expect(members.first).to be_persisted
- end
-
- it 'returns an empty array' do
- members = described_class.add_users(source, [], :maintainer)
-
- expect(members).to be_a Array
- expect(members).to be_empty
- end
-
- it 'supports differents formats' do
- list = ['joe@local.test', admin, user1.id, user2.id.to_s]
-
- members = described_class.add_users(source, list, :maintainer)
-
- expect(members.size).to eq(4)
- expect(members.first).to be_invite
- end
- end
- end
- end
-
describe '#accept_request' do
let(:member) { create(:project_member, requested_at: Time.current.utc) }
@@ -966,7 +706,8 @@ RSpec.describe Member do
end
context 'when after_commit :update_highest_role' do
- let!(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+
let(:user_id) { user.id }
where(:member_type, :source_type) do
@@ -1001,7 +742,7 @@ RSpec.describe Member do
end
describe 'destroy member' do
- subject { member.destroy! }
+ subject { member.reload.destroy! }
include_examples 'update highest role with exclusive lease'
end
diff --git a/spec/models/members/group_member_spec.rb b/spec/models/members/group_member_spec.rb
index 8c942228059..472f4280d26 100644
--- a/spec/models/members/group_member_spec.rb
+++ b/spec/models/members/group_member_spec.rb
@@ -47,27 +47,6 @@ RSpec.describe GroupMember do
end
end
- describe '.access_levels' do
- it 'returns Gitlab::Access.options_with_owner' do
- expect(described_class.access_levels).to eq(Gitlab::Access.sym_options_with_owner)
- end
- end
-
- describe '.add_users' do
- it 'adds the given users to the given group' do
- group = create(:group)
- users = create_list(:user, 2)
-
- described_class.add_users(
- group,
- [users.first.id, users.second],
- described_class::MAINTAINER
- )
-
- expect(group.users).to include(users.first, users.second)
- end
- end
-
it_behaves_like 'members notifications', :group
describe '#namespace_id' do
diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb
index b84b408cb4b..4c59bda856f 100644
--- a/spec/models/members/project_member_spec.rb
+++ b/spec/models/members/project_member_spec.rb
@@ -23,19 +23,6 @@ RSpec.describe ProjectMember do
end
end
- describe '.add_user' do
- it 'adds the user as a member' do
- user = create(:user)
- project = create(:project)
-
- expect(project.users).not_to include(user)
-
- described_class.add_user(project, user, :maintainer, current_user: project.owner)
-
- expect(project.users.reload).to include(user)
- end
- end
-
describe '#real_source_type' do
subject { create(:project_member).real_source_type }
diff --git a/spec/models/merge_request/cleanup_schedule_spec.rb b/spec/models/merge_request/cleanup_schedule_spec.rb
index 925d287088b..85208f901fd 100644
--- a/spec/models/merge_request/cleanup_schedule_spec.rb
+++ b/spec/models/merge_request/cleanup_schedule_spec.rb
@@ -11,22 +11,125 @@ RSpec.describe MergeRequest::CleanupSchedule do
it { is_expected.to validate_presence_of(:scheduled_at) }
end
- describe '.scheduled_merge_request_ids' do
- let_it_be(:mr_cleanup_schedule_1) { create(:merge_request_cleanup_schedule, scheduled_at: 2.days.ago) }
- let_it_be(:mr_cleanup_schedule_2) { create(:merge_request_cleanup_schedule, scheduled_at: 1.day.ago) }
- let_it_be(:mr_cleanup_schedule_3) { create(:merge_request_cleanup_schedule, scheduled_at: 1.day.ago, completed_at: Time.current) }
- let_it_be(:mr_cleanup_schedule_4) { create(:merge_request_cleanup_schedule, scheduled_at: 4.days.ago) }
- let_it_be(:mr_cleanup_schedule_5) { create(:merge_request_cleanup_schedule, scheduled_at: 3.days.ago) }
- let_it_be(:mr_cleanup_schedule_6) { create(:merge_request_cleanup_schedule, scheduled_at: 1.day.from_now) }
- let_it_be(:mr_cleanup_schedule_7) { create(:merge_request_cleanup_schedule, scheduled_at: 5.days.ago) }
-
- it 'only includes incomplete schedule within the specified limit' do
- expect(described_class.scheduled_merge_request_ids(4)).to eq([
- mr_cleanup_schedule_2.merge_request_id,
- mr_cleanup_schedule_1.merge_request_id,
- mr_cleanup_schedule_5.merge_request_id,
- mr_cleanup_schedule_4.merge_request_id
+ describe 'state machine transitions' do
+ let(:cleanup_schedule) { create(:merge_request_cleanup_schedule) }
+
+ it 'sets status to unstarted by default' do
+ expect(cleanup_schedule).to be_unstarted
+ end
+
+ describe '#run' do
+ it 'sets the status to running' do
+ cleanup_schedule.run
+
+ expect(cleanup_schedule.reload).to be_running
+ end
+
+ context 'when previous status is not unstarted' do
+ let(:cleanup_schedule) { create(:merge_request_cleanup_schedule, :running) }
+
+ it 'does not change status' do
+ expect { cleanup_schedule.run }.not_to change(cleanup_schedule, :status)
+ end
+ end
+ end
+
+ describe '#retry' do
+ let(:cleanup_schedule) { create(:merge_request_cleanup_schedule, :running) }
+
+ it 'sets the status to unstarted' do
+ cleanup_schedule.retry
+
+ expect(cleanup_schedule.reload).to be_unstarted
+ end
+
+ it 'increments failed_count' do
+ expect { cleanup_schedule.retry }.to change(cleanup_schedule, :failed_count).by(1)
+ end
+
+ context 'when previous status is not running' do
+ let(:cleanup_schedule) { create(:merge_request_cleanup_schedule) }
+
+ it 'does not change status' do
+ expect { cleanup_schedule.retry }.not_to change(cleanup_schedule, :status)
+ end
+ end
+ end
+
+ describe '#complete' do
+ let(:cleanup_schedule) { create(:merge_request_cleanup_schedule, :running) }
+
+ it 'sets the status to completed' do
+ cleanup_schedule.complete
+
+ expect(cleanup_schedule.reload).to be_completed
+ end
+
+ it 'sets the completed_at' do
+ expect { cleanup_schedule.complete }.to change(cleanup_schedule, :completed_at)
+ end
+
+ context 'when previous status is not running' do
+ let(:cleanup_schedule) { create(:merge_request_cleanup_schedule, :completed) }
+
+ it 'does not change status' do
+ expect { cleanup_schedule.complete }.not_to change(cleanup_schedule, :status)
+ end
+ end
+ end
+
+ describe '#mark_as_failed' do
+ let(:cleanup_schedule) { create(:merge_request_cleanup_schedule, :running) }
+
+ it 'sets the status to failed' do
+ cleanup_schedule.mark_as_failed
+
+ expect(cleanup_schedule.reload).to be_failed
+ end
+
+ it 'increments failed_count' do
+ expect { cleanup_schedule.mark_as_failed }.to change(cleanup_schedule, :failed_count).by(1)
+ end
+
+ context 'when previous status is not running' do
+ let(:cleanup_schedule) { create(:merge_request_cleanup_schedule, :failed) }
+
+ it 'does not change status' do
+ expect { cleanup_schedule.mark_as_failed }.not_to change(cleanup_schedule, :status)
+ end
+ end
+ end
+ end
+
+ describe '.scheduled_and_unstarted' do
+ let!(:cleanup_schedule_1) { create(:merge_request_cleanup_schedule, scheduled_at: 2.days.ago) }
+ let!(:cleanup_schedule_2) { create(:merge_request_cleanup_schedule, scheduled_at: 1.day.ago) }
+ let!(:cleanup_schedule_3) { create(:merge_request_cleanup_schedule, :completed, scheduled_at: 1.day.ago) }
+ let!(:cleanup_schedule_4) { create(:merge_request_cleanup_schedule, scheduled_at: 4.days.ago) }
+ let!(:cleanup_schedule_5) { create(:merge_request_cleanup_schedule, scheduled_at: 3.days.ago) }
+ let!(:cleanup_schedule_6) { create(:merge_request_cleanup_schedule, scheduled_at: 1.day.from_now) }
+ let!(:cleanup_schedule_7) { create(:merge_request_cleanup_schedule, :failed, scheduled_at: 5.days.ago) }
+
+ it 'returns records that are scheduled before or on current time and unstarted (ordered by scheduled first)' do
+ expect(described_class.scheduled_and_unstarted).to eq([
+ cleanup_schedule_2,
+ cleanup_schedule_1,
+ cleanup_schedule_5,
+ cleanup_schedule_4
])
end
end
+
+ describe '.start_next' do
+ let!(:cleanup_schedule_1) { create(:merge_request_cleanup_schedule, :completed, scheduled_at: 1.day.ago) }
+ let!(:cleanup_schedule_2) { create(:merge_request_cleanup_schedule, scheduled_at: 2.days.ago) }
+ let!(:cleanup_schedule_3) { create(:merge_request_cleanup_schedule, :running, scheduled_at: 1.day.ago) }
+ let!(:cleanup_schedule_4) { create(:merge_request_cleanup_schedule, scheduled_at: 3.days.ago) }
+ let!(:cleanup_schedule_5) { create(:merge_request_cleanup_schedule, :failed, scheduled_at: 3.days.ago) }
+
+ it 'finds the next scheduled and unstarted then marked it as running' do
+ expect(described_class.start_next).to eq(cleanup_schedule_2)
+ expect(cleanup_schedule_2.reload).to be_running
+ end
+ end
end
diff --git a/spec/models/merge_request/diff_commit_user_spec.rb b/spec/models/merge_request/diff_commit_user_spec.rb
new file mode 100644
index 00000000000..08e073568f9
--- /dev/null
+++ b/spec/models/merge_request/diff_commit_user_spec.rb
@@ -0,0 +1,127 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequest::DiffCommitUser do
+ describe 'validations' do
+ it 'requires that names are less than 512 characters long' do
+ expect(described_class.new(name: 'a' * 1000)).not_to be_valid
+ end
+
+ it 'requires that Emails are less than 512 characters long' do
+ expect(described_class.new(email: 'a' * 1000)).not_to be_valid
+ end
+
+ it 'requires either a name or Email' do
+ expect(described_class.new).not_to be_valid
+ end
+
+ it 'allows setting of just a name' do
+ expect(described_class.new(name: 'Alice')).to be_valid
+ end
+
+ it 'allows setting of just an Email' do
+ expect(described_class.new(email: 'alice@example.com')).to be_valid
+ end
+
+ it 'allows setting of both a name and Email' do
+ expect(described_class.new(name: 'Alice', email: 'alice@example.com'))
+ .to be_valid
+ end
+ end
+
+ describe '.prepare' do
+ it 'trims a value to at most 512 characters' do
+ expect(described_class.prepare('€' * 1_000)).to eq('€' * 512)
+ end
+
+ it 'returns nil if the value is an empty string' do
+ expect(described_class.prepare('')).to be_nil
+ end
+ end
+
+ describe '.find_or_create' do
+ it 'creates a new row if none exist' do
+ alice = described_class.find_or_create('Alice', 'alice@example.com')
+
+ expect(alice.name).to eq('Alice')
+ expect(alice.email).to eq('alice@example.com')
+ end
+
+ it 'returns an existing row if one exists' do
+ user1 = create(:merge_request_diff_commit_user)
+ user2 = described_class.find_or_create(user1.name, user1.email)
+
+ expect(user1).to eq(user2)
+ end
+
+ it 'handles concurrent inserts' do
+ user = create(:merge_request_diff_commit_user)
+
+ expect(described_class)
+ .to receive(:find_or_create_by!)
+ .ordered
+ .with(name: user.name, email: user.email)
+ .and_raise(ActiveRecord::RecordNotUnique)
+
+ expect(described_class)
+ .to receive(:find_or_create_by!)
+ .ordered
+ .with(name: user.name, email: user.email)
+ .and_return(user)
+
+ expect(described_class.find_or_create(user.name, user.email)).to eq(user)
+ end
+ end
+
+ describe '.bulk_find_or_create' do
+ it 'bulk creates missing rows and reuses existing rows' do
+ bob = create(
+ :merge_request_diff_commit_user,
+ name: 'Bob',
+ email: 'bob@example.com'
+ )
+
+ users = described_class.bulk_find_or_create(
+ [%w[Alice alice@example.com], %w[Bob bob@example.com]]
+ )
+ alice = described_class.find_by(name: 'Alice')
+
+ expect(users[%w[Alice alice@example.com]]).to eq(alice)
+ expect(users[%w[Bob bob@example.com]]).to eq(bob)
+ end
+
+ it 'does not insert any data when all users exist' do
+ bob = create(
+ :merge_request_diff_commit_user,
+ name: 'Bob',
+ email: 'bob@example.com'
+ )
+
+ users = described_class.bulk_find_or_create([%w[Bob bob@example.com]])
+
+ expect(described_class).not_to receive(:insert_all)
+ expect(users[%w[Bob bob@example.com]]).to eq(bob)
+ end
+
+ it 'handles concurrently inserted rows' do
+ bob = create(
+ :merge_request_diff_commit_user,
+ name: 'Bob',
+ email: 'bob@example.com'
+ )
+
+ input = [%w[Bob bob@example.com]]
+
+ expect(described_class)
+ .to receive(:bulk_find)
+ .twice
+ .with(input)
+ .and_return([], [bob])
+
+ users = described_class.bulk_find_or_create(input)
+
+ expect(users[%w[Bob bob@example.com]]).to eq(bob)
+ end
+ end
+end
diff --git a/spec/models/merge_request_diff_commit_spec.rb b/spec/models/merge_request_diff_commit_spec.rb
index a24628b0f9d..6290468d4a7 100644
--- a/spec/models/merge_request_diff_commit_spec.rb
+++ b/spec/models/merge_request_diff_commit_spec.rb
@@ -16,6 +16,11 @@ RSpec.describe MergeRequestDiffCommit do
let(:invalid_items_for_bulk_insertion) { [] } # class does not have any validations defined
end
+ describe 'associations' do
+ it { is_expected.to belong_to(:commit_author) }
+ it { is_expected.to belong_to(:committer) }
+ end
+
describe '#to_hash' do
subject { merge_request.commits.first }
@@ -46,6 +51,8 @@ RSpec.describe MergeRequestDiffCommit do
"committed_date": "2014-02-27T10:01:38.000+01:00".to_time,
"committer_name": "Dmitriy Zaporozhets",
"committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author_id": an_instance_of(Integer),
+ "committer_id": an_instance_of(Integer),
"merge_request_diff_id": merge_request_diff_id,
"relative_order": 0,
"sha": Gitlab::Database::ShaAttribute.serialize("5937ac0a7beb003549fc5fd26fc247adbce4a52e"),
@@ -59,6 +66,8 @@ RSpec.describe MergeRequestDiffCommit do
"committed_date": "2014-02-27T09:57:31.000+01:00".to_time,
"committer_name": "Dmitriy Zaporozhets",
"committer_email": "dmitriy.zaporozhets@gmail.com",
+ "commit_author_id": an_instance_of(Integer),
+ "committer_id": an_instance_of(Integer),
"merge_request_diff_id": merge_request_diff_id,
"relative_order": 1,
"sha": Gitlab::Database::ShaAttribute.serialize("570e7b2abdd848b95f2f578043fc23bd6f6fd24d"),
@@ -76,6 +85,21 @@ RSpec.describe MergeRequestDiffCommit do
subject
end
+ it 'creates diff commit users' do
+ diff = create(:merge_request_diff, merge_request: merge_request)
+
+ described_class.create_bulk(diff.id, [commits.first])
+
+ commit_row = MergeRequestDiffCommit
+ .find_by(merge_request_diff_id: diff.id, relative_order: 0)
+
+ commit_user_row =
+ MergeRequest::DiffCommitUser.find_by(name: 'Dmitriy Zaporozhets')
+
+ expect(commit_row.commit_author).to eq(commit_user_row)
+ expect(commit_row.committer).to eq(commit_user_row)
+ end
+
context 'with dates larger than the DB limit' do
let(:commits) do
# This commit's date is "Sun Aug 17 07:12:55 292278994 +0000"
@@ -92,6 +116,8 @@ RSpec.describe MergeRequestDiffCommit do
"committed_date": timestamp,
"committer_name": "Alejandro Rodríguez",
"committer_email": "alejorro70@gmail.com",
+ "commit_author_id": an_instance_of(Integer),
+ "committer_id": an_instance_of(Integer),
"merge_request_diff_id": merge_request_diff_id,
"relative_order": 0,
"sha": Gitlab::Database::ShaAttribute.serialize("ba3343bc4fa403a8dfbfcab7fc1a8c29ee34bd69"),
@@ -107,4 +133,28 @@ RSpec.describe MergeRequestDiffCommit do
end
end
end
+
+ describe '.prepare_commits_for_bulk_insert' do
+ it 'returns the commit hashes and unique user tuples' do
+ commit = double(:commit, to_hash: {
+ parent_ids: %w[foo bar],
+ author_name: 'a' * 1000,
+ author_email: 'a' * 1000,
+ committer_name: 'Alice',
+ committer_email: 'alice@example.com'
+ })
+
+ hashes, tuples = described_class.prepare_commits_for_bulk_insert([commit])
+
+ expect(hashes).to eq([{
+ author_name: 'a' * 512,
+ author_email: 'a' * 512,
+ committer_name: 'Alice',
+ committer_email: 'alice@example.com'
+ }])
+
+ expect(tuples)
+ .to include(['a' * 512, 'a' * 512], %w[Alice alice@example.com])
+ end
+ end
end
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index 3741e01e99a..e0e25031589 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -9,10 +9,6 @@ RSpec.describe MergeRequestDiff do
let(:diff_with_commits) { create(:merge_request).merge_request_diff }
- before do
- stub_feature_flags(diffs_gradual_load: false)
- end
-
describe 'validations' do
subject { diff_with_commits }
@@ -115,6 +111,7 @@ RSpec.describe MergeRequestDiff do
let(:closed_recently) { recently_closed_mr.merge_request_diff }
let_it_be(:recently_merged_mr) { create(:merge_request, :merged) }
+
let(:merged_recently) { recently_merged_mr.merge_request_diff }
before do
@@ -436,9 +433,7 @@ RSpec.describe MergeRequestDiff do
it 'returns empty pagination data' do
diffs = diff_with_commits.diffs_in_batch(1, 10, diff_options: diff_options)
- expect(diffs.pagination_data).to eq(current_page: nil,
- next_page: nil,
- total_pages: nil)
+ expect(diffs.pagination_data).to eq(total_pages: nil)
end
end
@@ -460,19 +455,17 @@ RSpec.describe MergeRequestDiff do
context 'when persisted files available' do
it 'returns paginated diffs' do
- diffs = diff_with_commits.diffs_in_batch(1, 10, diff_options: diff_options)
+ diffs = diff_with_commits.diffs_in_batch(0, 10, diff_options: diff_options)
expect(diffs).to be_a(Gitlab::Diff::FileCollection::MergeRequestDiffBatch)
expect(diffs.diff_files.size).to eq(10)
- expect(diffs.pagination_data).to eq(current_page: 1,
- next_page: 2,
- total_pages: 2)
+ expect(diffs.pagination_data).to eq(total_pages: 20)
end
it 'sorts diff files directory first' do
diff_with_commits.update!(sorted: false) # Mark as unsorted so it'll re-order
- expect(diff_with_commits.diffs_in_batch(1, 10, diff_options: diff_options).diff_file_paths).to eq([
+ expect(diff_with_commits.diffs_in_batch(0, 10, diff_options: diff_options).diff_file_paths).to eq([
'bar/branch-test.txt',
'custom-highlighting/test.gitlab-custom',
'encoding/iso8859.txt',
@@ -491,43 +484,21 @@ RSpec.describe MergeRequestDiff do
{ ignore_whitespace_change: true }
end
- it 'returns a Gitlab::Diff::FileCollection::Compare with paginated diffs' do
+ it 'returns pagination data from MergeRequestDiffBatch' do
diffs = diff_with_commits.diffs_in_batch(1, 10, diff_options: diff_options)
+ file_count = diff_with_commits.merge_request_diff_files.count
expect(diffs).to be_a(Gitlab::Diff::FileCollection::Compare)
expect(diffs.diff_files.size).to eq 10
- expect(diffs.pagination_data).to eq(current_page: 1, next_page: 2, total_pages: 2)
+ expect(diffs.pagination_data).to eq(total_pages: file_count)
end
it 'returns an empty MergeRequestBatch with empty pagination data when the batch is empty' do
- diffs = diff_with_commits.diffs_in_batch(3, 10, diff_options: diff_options)
+ diffs = diff_with_commits.diffs_in_batch(30, 10, diff_options: diff_options)
expect(diffs).to be_a(Gitlab::Diff::FileCollection::MergeRequestDiffBatch)
expect(diffs.diff_files.size).to eq 0
- expect(diffs.pagination_data).to eq(current_page: nil, next_page: nil, total_pages: nil)
- end
-
- context 'with gradual load enabled' do
- before do
- stub_feature_flags(diffs_gradual_load: true)
- end
-
- it 'returns pagination data from MergeRequestDiffBatch' do
- diffs = diff_with_commits.diffs_in_batch(1, 10, diff_options: diff_options)
- file_count = diff_with_commits.merge_request_diff_files.count
-
- expect(diffs).to be_a(Gitlab::Diff::FileCollection::Compare)
- expect(diffs.diff_files.size).to eq 10
- expect(diffs.pagination_data).to eq(current_page: nil, next_page: nil, total_pages: file_count)
- end
-
- it 'returns an empty MergeRequestBatch with empty pagination data when the batch is empty' do
- diffs = diff_with_commits.diffs_in_batch(30, 10, diff_options: diff_options)
-
- expect(diffs).to be_a(Gitlab::Diff::FileCollection::MergeRequestDiffBatch)
- expect(diffs.diff_files.size).to eq 0
- expect(diffs.pagination_data).to eq(current_page: nil, next_page: nil, total_pages: nil)
- end
+ expect(diffs.pagination_data).to eq(total_pages: nil)
end
end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 73b1cb13f19..edd543854cb 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -441,6 +441,22 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe '.join_metrics' do
+ let_it_be(:join_condition) { '"merge_request_metrics"."target_project_id" = 1' }
+
+ context 'when a no target_project_id is available' do
+ it 'moves target_project_id condition to the merge request metrics' do
+ expect(described_class.join_metrics(1).to_sql).to include(join_condition)
+ end
+ end
+
+ context 'when a target_project_id is present in the where conditions' do
+ it 'moves target_project_id condition to the merge request metrics' do
+ expect(described_class.where(target_project_id: 1).join_metrics.to_sql).to include(join_condition)
+ end
+ end
+ end
+
describe '.by_related_commit_sha' do
subject { described_class.by_related_commit_sha(sha) }
@@ -779,7 +795,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
context 'when both internal and external issue trackers are enabled' do
before do
- create(:jira_service, project: subject.project)
+ create(:jira_integration, project: subject.project)
subject.project.reload
end
@@ -1310,7 +1326,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
subject.project.add_developer(subject.author)
commit = double(:commit, safe_message: 'Fixes TEST-3')
- create(:jira_service, project: subject.project)
+ create(:jira_integration, project: subject.project)
subject.project.reload
allow(subject).to receive(:commits).and_return([commit])
@@ -1898,7 +1914,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
context 'has ci' do
it 'returns true if MR has head_pipeline_id and commits' do
- allow(merge_request).to receive_message_chain(:source_project, :ci_service) { nil }
+ allow(merge_request).to receive_message_chain(:source_project, :ci_integration) { nil }
allow(merge_request).to receive(:head_pipeline_id) { double }
allow(merge_request).to receive(:has_no_commits?) { false }
@@ -1906,7 +1922,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
it 'returns true if MR has any pipeline and commits' do
- allow(merge_request).to receive_message_chain(:source_project, :ci_service) { nil }
+ allow(merge_request).to receive_message_chain(:source_project, :ci_integration) { nil }
allow(merge_request).to receive(:head_pipeline_id) { nil }
allow(merge_request).to receive(:has_no_commits?) { false }
allow(merge_request).to receive(:all_pipelines) { [double] }
@@ -1914,8 +1930,8 @@ RSpec.describe MergeRequest, factory_default: :keep do
expect(merge_request.has_ci?).to be(true)
end
- it 'returns true if MR has CI service and commits' do
- allow(merge_request).to receive_message_chain(:source_project, :ci_service) { double }
+ it 'returns true if MR has CI integration and commits' do
+ allow(merge_request).to receive_message_chain(:source_project, :ci_integration) { double }
allow(merge_request).to receive(:head_pipeline_id) { nil }
allow(merge_request).to receive(:has_no_commits?) { false }
allow(merge_request).to receive(:all_pipelines) { [] }
@@ -1925,8 +1941,8 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
context 'has no ci' do
- it 'returns false if MR has no CI service nor pipeline, and no commits' do
- allow(merge_request).to receive_message_chain(:source_project, :ci_service) { nil }
+ it 'returns false if MR has no CI integration nor pipeline, and no commits' do
+ allow(merge_request).to receive_message_chain(:source_project, :ci_integration) { nil }
allow(merge_request).to receive(:head_pipeline_id) { nil }
allow(merge_request).to receive(:all_pipelines) { [] }
allow(merge_request).to receive(:has_no_commits?) { true }
@@ -2067,14 +2083,6 @@ RSpec.describe MergeRequest, factory_default: :keep do
let(:merge_request) { create(:merge_request, :with_codequality_mr_diff_reports) }
it { is_expected.to be_truthy }
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(codequality_mr_diff: false)
- end
-
- it { is_expected.to be_falsey }
- end
end
context 'when head pipeline does not have codeqquality mr diff report' do
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index 7cf7c360dff..bc592acc80f 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -3,10 +3,9 @@
require 'spec_helper'
RSpec.describe Milestone do
- let(:user) { create(:user) }
- let(:issue) { create(:issue, project: project) }
- let(:milestone) { create(:milestone, project: project) }
- let(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:issue) { create(:issue, project: project) }
it_behaves_like 'a timebox', :milestone do
describe "#uniqueness_of_title" do
@@ -92,6 +91,8 @@ RSpec.describe Milestone do
end
describe '.predefined_id?' do
+ let_it_be(:milestone) { create(:milestone, project: project) }
+
it 'returns true for a predefined Milestone ID' do
expect(Milestone.predefined_id?(described_class::Upcoming.id)).to be true
end
@@ -129,6 +130,8 @@ RSpec.describe Milestone do
end
describe "#percent_complete" do
+ let(:milestone) { create(:milestone, project: project) }
+
it "does not count open issues" do
milestone.issues << issue
expect(milestone.percent_complete).to eq(0)
@@ -145,24 +148,22 @@ RSpec.describe Milestone do
end
end
- describe '#expired?' do
+ describe '#expired? and #expired' do
context "expired" do
- before do
- allow(milestone).to receive(:due_date).and_return(Date.today.prev_year)
- end
+ let(:milestone) { build(:milestone, project: project, due_date: Date.today.prev_year) }
- it 'returns true when due_date is in the past' do
+ it 'returns true when due_date is in the past', :aggregate_failures do
expect(milestone.expired?).to be_truthy
+ expect(milestone.expired).to eq true
end
end
context "not expired" do
- before do
- allow(milestone).to receive(:due_date).and_return(Date.today.next_year)
- end
+ let(:milestone) { build(:milestone, project: project, due_date: Date.today.next_year) }
- it 'returns false when due_date is in the future' do
+ it 'returns false when due_date is in the future', :aggregate_failures do
expect(milestone.expired?).to be_falsey
+ expect(milestone.expired).to eq false
end
end
end
@@ -180,10 +181,8 @@ RSpec.describe Milestone do
end
describe '#can_be_closed?' do
- it { expect(milestone.can_be_closed?).to be_truthy }
- end
+ let_it_be(:milestone) { build(:milestone, project: project) }
- describe '#can_be_closed?' do
before do
milestone = create :milestone, project: project
create :closed_issue, milestone: milestone, project: project
@@ -335,10 +334,10 @@ RSpec.describe Milestone do
it_behaves_like '#for_projects_and_groups'
describe '.upcoming_ids' do
- let(:group_1) { create(:group) }
- let(:group_2) { create(:group) }
- let(:group_3) { create(:group) }
- let(:groups) { [group_1, group_2, group_3] }
+ let_it_be(:group_1) { create(:group) }
+ let_it_be(:group_2) { create(:group) }
+ let_it_be(:group_3) { create(:group) }
+ let_it_be(:groups) { [group_1, group_2, group_3] }
let!(:past_milestone_group_1) { create(:milestone, group: group_1, due_date: Time.current - 1.day) }
let!(:current_milestone_group_1) { create(:milestone, group: group_1, due_date: Time.current + 1.day) }
@@ -350,10 +349,10 @@ RSpec.describe Milestone do
let!(:past_milestone_group_3) { create(:milestone, group: group_3, due_date: Time.current - 1.day) }
- let(:project_1) { create(:project) }
- let(:project_2) { create(:project) }
- let(:project_3) { create(:project) }
- let(:projects) { [project_1, project_2, project_3] }
+ let_it_be(:project_1) { create(:project) }
+ let_it_be(:project_2) { create(:project) }
+ let_it_be(:project_3) { create(:project) }
+ let_it_be(:projects) { [project_1, project_2, project_3] }
let!(:past_milestone_project_1) { create(:milestone, project: project_1, due_date: Time.current - 1.day) }
let!(:current_milestone_project_1) { create(:milestone, project: project_1, due_date: Time.current + 1.day) }
@@ -451,6 +450,32 @@ RSpec.describe Milestone do
end
end
+ describe '.sort_with_expired_last' do
+ let_it_be(:milestone) { create(:milestone, title: 'Due today', due_date: Date.current) }
+ let_it_be(:milestone_1) { create(:milestone, title: 'Current 1', due_date: Date.current + 1.day) }
+ let_it_be(:milestone_2) { create(:milestone, title: 'Current 2', due_date: Date.current + 2.days) }
+ let_it_be(:milestone_3) { create(:milestone, title: 'Without due date') }
+ let_it_be(:milestone_4) { create(:milestone, title: 'Expired 1', due_date: Date.current - 2.days) }
+ let_it_be(:milestone_5) { create(:milestone, title: 'Expired 2', due_date: Date.current - 1.day) }
+ let_it_be(:milestone_6) { create(:milestone, title: 'Without due date2') }
+
+ context 'ordering by due_date ascending' do
+ it 'sorts by due date in ascending order (ties broken by id in desc order)', :aggregate_failures do
+ expect(milestone_3.id).to be < (milestone_6.id)
+ expect(described_class.sort_with_expired_last(:expired_last_due_date_asc))
+ .to eq([milestone, milestone_1, milestone_2, milestone_6, milestone_3, milestone_4, milestone_5])
+ end
+ end
+
+ context 'ordering by due_date descending' do
+ it 'sorts by due date in descending order (ties broken by id in desc order)', :aggregate_failures do
+ expect(milestone_3.id).to be < (milestone_6.id)
+ expect(described_class.sort_with_expired_last(:expired_last_due_date_desc))
+ .to eq([milestone_2, milestone_1, milestone, milestone_6, milestone_3, milestone_5, milestone_4])
+ end
+ end
+ end
+
describe '.sort_by_attribute' do
let_it_be(:milestone_1) { create(:milestone, title: 'Foo') }
let_it_be(:milestone_2) { create(:milestone, title: 'Bar') }
diff --git a/spec/models/namespace/root_storage_statistics_spec.rb b/spec/models/namespace/root_storage_statistics_spec.rb
index b725d2366a1..51c191069ec 100644
--- a/spec/models/namespace/root_storage_statistics_spec.rb
+++ b/spec/models/namespace/root_storage_statistics_spec.rb
@@ -99,6 +99,7 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model do
context 'with a personal namespace' do
let_it_be(:user) { create(:user) }
+
let(:namespace) { user.namespace }
it_behaves_like 'data refresh'
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 373f3a89e14..ea1ce067e4d 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -156,7 +156,7 @@ RSpec.describe Namespace do
end
end
- describe 'scopes' do
+ describe 'scopes', :aggregate_failures do
let_it_be(:namespace1) { create(:group, name: 'Namespace 1', path: 'namespace-1') }
let_it_be(:namespace2) { create(:group, name: 'Namespace 2', path: 'namespace-2') }
let_it_be(:namespace1sub) { create(:group, name: 'Sub Namespace', path: 'sub-namespace', parent: namespace1) }
@@ -181,6 +181,15 @@ RSpec.describe Namespace do
expect(described_class.filter_by_path(namespace1.path.upcase)).to eq([namespace1])
end
end
+
+ describe '.sorted_by_similarity_and_parent_id_desc' do
+ it 'returns exact matches and top level groups first' do
+ expect(described_class.sorted_by_similarity_and_parent_id_desc(namespace1.path)).to eq([namespace1, namespace2, namespace2sub, namespace1sub, namespace])
+ expect(described_class.sorted_by_similarity_and_parent_id_desc(namespace2.path)).to eq([namespace2, namespace1, namespace2sub, namespace1sub, namespace])
+ expect(described_class.sorted_by_similarity_and_parent_id_desc(namespace2sub.name)).to eq([namespace2sub, namespace1sub, namespace2, namespace1, namespace])
+ expect(described_class.sorted_by_similarity_and_parent_id_desc('Namespace')).to eq([namespace2, namespace1, namespace2sub, namespace1sub, namespace])
+ end
+ end
end
describe 'delegate' do
@@ -965,6 +974,14 @@ RSpec.describe Namespace do
end
end
+ shared_examples 'disabled feature flag when traversal_ids is blank' do
+ before do
+ namespace.traversal_ids = []
+ end
+
+ it { is_expected.to eq false }
+ end
+
describe '#use_traversal_ids?' do
let_it_be(:namespace, reload: true) { create(:namespace) }
@@ -976,6 +993,8 @@ RSpec.describe Namespace do
end
it { is_expected.to eq true }
+
+ it_behaves_like 'disabled feature flag when traversal_ids is blank'
end
context 'when use_traversal_ids feature flag is false' do
@@ -987,6 +1006,62 @@ RSpec.describe Namespace do
end
end
+ describe '#use_traversal_ids_for_root_ancestor?' do
+ let_it_be(:namespace, reload: true) { create(:namespace) }
+
+ subject { namespace.use_traversal_ids_for_root_ancestor? }
+
+ context 'when use_traversal_ids_for_root_ancestor feature flag is true' do
+ before do
+ stub_feature_flags(use_traversal_ids_for_root_ancestor: true)
+ end
+
+ it { is_expected.to eq true }
+
+ it_behaves_like 'disabled feature flag when traversal_ids is blank'
+ end
+
+ context 'when use_traversal_ids_for_root_ancestor feature flag is false' do
+ before do
+ stub_feature_flags(use_traversal_ids_for_root_ancestor: false)
+ end
+
+ it { is_expected.to eq false }
+ end
+ end
+
+ describe '#use_traversal_ids_for_ancestors?' do
+ let_it_be(:namespace, reload: true) { create(:namespace) }
+
+ subject { namespace.use_traversal_ids_for_ancestors? }
+
+ context 'when use_traversal_ids_for_ancestors? feature flag is true' do
+ before do
+ stub_feature_flags(use_traversal_ids_for_ancestors: true)
+ end
+
+ it { is_expected.to eq true }
+
+ it_behaves_like 'disabled feature flag when traversal_ids is blank'
+ end
+
+ context 'when use_traversal_ids_for_ancestors? feature flag is false' do
+ before do
+ stub_feature_flags(use_traversal_ids_for_ancestors: false)
+ end
+
+ it { is_expected.to eq false }
+ end
+
+ context 'when use_traversal_ids? feature flag is false' do
+ before do
+ stub_feature_flags(use_traversal_ids: false)
+ end
+
+ it { is_expected.to eq false }
+ end
+ end
+
describe '#users_with_descendants' do
let(:user_a) { create(:user) }
let(:user_b) { create(:user) }
@@ -1058,6 +1133,14 @@ RSpec.describe Namespace do
end
include_examples '#all_projects'
+
+ # Using #self_and_descendant instead of #self_and_descendant_ids can produce
+ # very slow queries.
+ it 'calls self_and_descendant_ids' do
+ namespace = create(:group)
+ expect(namespace).to receive(:self_and_descendant_ids)
+ namespace.all_projects
+ end
end
context 'with use_traversal_ids feature flag disabled' do
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index d9f566f9383..2afe9a0f29b 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -455,6 +455,7 @@ RSpec.describe Note do
describe "#system_note_viewable_by?(user)" do
let_it_be(:note) { create(:note) }
let_it_be(:user) { create(:user) }
+
let!(:metadata) { create(:system_note_metadata, note: note, action: "branch") }
context "when system_note_metadata is not present" do
@@ -536,6 +537,7 @@ RSpec.describe Note do
context "when there is a reference to a label" do
let_it_be(:private_label) { create(:label, project: private_project) }
+
let(:note) do
create :note,
noteable: ext_issue, project: ext_proj,
@@ -550,6 +552,7 @@ RSpec.describe Note do
context "when there are two references in note" do
let_it_be(:ext_issue2) { create(:issue, project: ext_proj) }
+
let(:note) do
create :note,
noteable: ext_issue2, project: ext_proj,
@@ -1239,6 +1242,7 @@ RSpec.describe Note do
describe 'expiring ETag cache' do
let_it_be(:issue) { create(:issue) }
+
let(:note) { build(:note, project: issue.project, noteable: issue) }
def expect_expiration(noteable)
diff --git a/spec/models/notification_setting_spec.rb b/spec/models/notification_setting_spec.rb
index 010b7455f85..3f1684327e7 100644
--- a/spec/models/notification_setting_spec.rb
+++ b/spec/models/notification_setting_spec.rb
@@ -51,6 +51,7 @@ RSpec.describe NotificationSetting do
context 'notification_email' do
let_it_be(:user) { create(:user) }
+
subject { described_class.new(source_id: 1, source_type: 'Project', user_id: user.id) }
it 'allows to change email to verified one' do
diff --git a/spec/models/operations/feature_flag_spec.rb b/spec/models/operations/feature_flag_spec.rb
index 55682e12642..cb9da2aea34 100644
--- a/spec/models/operations/feature_flag_spec.rb
+++ b/spec/models/operations/feature_flag_spec.rb
@@ -251,6 +251,7 @@ RSpec.describe Operations::FeatureFlag do
describe '.for_unleash_client' do
let_it_be(:project) { create(:project) }
+
let!(:feature_flag) do
create(:operations_feature_flag, project: project,
name: 'feature1', active: true, version: 2)
diff --git a/spec/models/packages/package_file_spec.rb b/spec/models/packages/package_file_spec.rb
index 7f2f22c815c..ee0aeb26d50 100644
--- a/spec/models/packages/package_file_spec.rb
+++ b/spec/models/packages/package_file_spec.rb
@@ -5,6 +5,7 @@ RSpec.describe Packages::PackageFile, type: :model do
let_it_be(:project) { create(:project) }
let_it_be(:package_file1) { create(:package_file, :xml, file_name: 'FooBar') }
let_it_be(:package_file2) { create(:package_file, :xml, file_name: 'ThisIsATest') }
+ let_it_be(:package_file3) { create(:package_file, :xml, file_name: 'formatted.zip') }
let_it_be(:debian_package) { create(:debian_package, project: project) }
describe 'relationships' do
@@ -36,6 +37,12 @@ RSpec.describe Packages::PackageFile, type: :model do
it { is_expected.to match_array([package_file1]) }
end
+
+ describe '.with_format' do
+ subject { described_class.with_format('zip') }
+
+ it { is_expected.to contain_exactly(package_file3) }
+ end
end
context 'updating project statistics' do
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index b2c1d51e4af..449e30f9fb7 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -1006,37 +1006,4 @@ RSpec.describe Packages::Package, type: :model do
it_behaves_like 'not enqueuing a sync worker job'
end
end
-
- context 'destroying a composer package' do
- let_it_be(:package_name) { 'composer-package-name' }
- let_it_be(:json) { { 'name' => package_name } }
- let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json.to_json } ) }
-
- let!(:package) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
-
- before do
- Gitlab::Composer::Cache.new(project: project, name: package_name).execute
- package.composer_metadatum.reload
- end
-
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(disable_composer_callback: false)
- end
-
- it 'schedule the update job' do
- expect(::Packages::Composer::CacheUpdateWorker).to receive(:perform_async).with(project.id, package_name, package.composer_metadatum.version_cache_sha)
-
- package.destroy!
- end
- end
-
- context 'with feature flag enabled' do
- it 'does nothing' do
- expect(::Packages::Composer::CacheUpdateWorker).not_to receive(:perform_async)
-
- package.destroy!
- end
- end
- end
end
diff --git a/spec/models/plan_limits_spec.rb b/spec/models/plan_limits_spec.rb
index cf8e30023eb..72fda2280e5 100644
--- a/spec/models/plan_limits_spec.rb
+++ b/spec/models/plan_limits_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe PlanLimits do
let_it_be(:project) { create(:project) }
let_it_be(:plan_limits) { create(:plan_limits) }
+
let(:project_hooks_count) { 2 }
before do
@@ -184,6 +185,7 @@ RSpec.describe PlanLimits do
ci_max_artifact_size_junit
ci_max_artifact_size_sast
ci_max_artifact_size_dast
+ ci_max_artifact_size_cluster_image_scanning
ci_max_artifact_size_codequality
ci_max_artifact_size_license_management
ci_max_artifact_size_performance
diff --git a/spec/models/plan_spec.rb b/spec/models/plan_spec.rb
index 490c6b1bbf7..73e88a17e24 100644
--- a/spec/models/plan_spec.rb
+++ b/spec/models/plan_spec.rb
@@ -15,6 +15,29 @@ RSpec.describe Plan do
end
end
+ describe '#default' do
+ context 'when default plan exists' do
+ let!(:default_plan) { create(:default_plan) }
+
+ it 'returns default plan' do
+ expect(described_class.default).to eq(default_plan)
+ end
+ end
+
+ context 'when default plan does not exist' do
+ it 'creates default plan' do
+ expect { described_class.default }.to change { Plan.count }.by(1)
+ end
+
+ it 'creates plan with correct attributes' do
+ plan = described_class.default
+
+ expect(plan.name).to eq(Plan::DEFAULT)
+ expect(plan.title).to eq(Plan::DEFAULT.titleize)
+ end
+ end
+ end
+
context 'when updating plan limits' do
let(:plan) { described_class.default }
diff --git a/spec/models/project_ci_cd_setting_spec.rb b/spec/models/project_ci_cd_setting_spec.rb
index c206ba27ec1..caab182cda8 100644
--- a/spec/models/project_ci_cd_setting_spec.rb
+++ b/spec/models/project_ci_cd_setting_spec.rb
@@ -22,8 +22,8 @@ RSpec.describe ProjectCiCdSetting do
end
describe '#job_token_scope_enabled' do
- it 'is true by default' do
- expect(described_class.new.job_token_scope_enabled).to be_truthy
+ it 'is false by default' do
+ expect(described_class.new.job_token_scope_enabled).to be_falsey
end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 144b00e1d2e..efa269cdb5c 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -35,14 +35,14 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:hooks) }
it { is_expected.to have_many(:protected_branches) }
it { is_expected.to have_many(:exported_protected_branches) }
- it { is_expected.to have_one(:slack_service) }
- it { is_expected.to have_one(:microsoft_teams_service) }
- it { is_expected.to have_one(:mattermost_service) }
+ it { is_expected.to have_one(:slack_integration) }
+ it { is_expected.to have_one(:microsoft_teams_integration) }
+ it { is_expected.to have_one(:mattermost_integration) }
it { is_expected.to have_one(:hangouts_chat_integration) }
- it { is_expected.to have_one(:unify_circuit_service) }
- it { is_expected.to have_one(:webex_teams_service) }
- it { is_expected.to have_one(:packagist_service) }
- it { is_expected.to have_one(:pushover_service) }
+ it { is_expected.to have_one(:unify_circuit_integration) }
+ it { is_expected.to have_one(:webex_teams_integration) }
+ it { is_expected.to have_one(:packagist_integration) }
+ it { is_expected.to have_one(:pushover_integration) }
it { is_expected.to have_one(:asana_integration) }
it { is_expected.to have_many(:boards) }
it { is_expected.to have_one(:campfire_integration) }
@@ -50,19 +50,19 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_one(:discord_integration) }
it { is_expected.to have_one(:drone_ci_integration) }
it { is_expected.to have_one(:emails_on_push_integration) }
- it { is_expected.to have_one(:pipelines_email_service) }
+ it { is_expected.to have_one(:pipelines_email_integration) }
it { is_expected.to have_one(:irker_integration) }
- it { is_expected.to have_one(:pivotaltracker_service) }
+ it { is_expected.to have_one(:pivotaltracker_integration) }
it { is_expected.to have_one(:flowdock_integration) }
it { is_expected.to have_one(:assembla_integration) }
- it { is_expected.to have_one(:slack_slash_commands_service) }
- it { is_expected.to have_one(:mattermost_slash_commands_service) }
+ it { is_expected.to have_one(:slack_slash_commands_integration) }
+ it { is_expected.to have_one(:mattermost_slash_commands_integration) }
it { is_expected.to have_one(:buildkite_integration) }
it { is_expected.to have_one(:bamboo_integration) }
- it { is_expected.to have_one(:teamcity_service) }
- it { is_expected.to have_one(:jira_service) }
- it { is_expected.to have_one(:redmine_service) }
- it { is_expected.to have_one(:youtrack_service) }
+ it { is_expected.to have_one(:teamcity_integration) }
+ it { is_expected.to have_one(:jira_integration) }
+ it { is_expected.to have_one(:redmine_integration) }
+ it { is_expected.to have_one(:youtrack_integration) }
it { is_expected.to have_one(:custom_issue_tracker_integration) }
it { is_expected.to have_one(:bugzilla_integration) }
it { is_expected.to have_one(:ewm_integration) }
@@ -80,6 +80,8 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_one(:error_tracking_setting).class_name('ErrorTracking::ProjectErrorTrackingSetting') }
it { is_expected.to have_one(:project_setting) }
it { is_expected.to have_one(:alerting_setting).class_name('Alerting::ProjectAlertingSetting') }
+ it { is_expected.to have_one(:mock_ci_integration) }
+ it { is_expected.to have_one(:mock_monitoring_integration) }
it { is_expected.to have_many(:commit_statuses) }
it { is_expected.to have_many(:ci_pipelines) }
it { is_expected.to have_many(:ci_refs) }
@@ -656,12 +658,51 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to delegate_method(:container_registry_enabled?).to(:project_feature) }
it { is_expected.to delegate_method(:container_registry_access_level).to(:project_feature) }
- context 'when read_container_registry_access_level is disabled' do
- before do
- stub_feature_flags(read_container_registry_access_level: false)
+ include_examples 'ci_cd_settings delegation' do
+ # Skip attributes defined in EE code
+ let(:exclude_attributes) do
+ %w(
+ merge_pipelines_enabled
+ merge_trains_enabled
+ auto_rollback_enabled
+ )
+ end
+ end
+
+ describe '#ci_forward_deployment_enabled?' do
+ it_behaves_like 'a ci_cd_settings predicate method', prefix: 'ci_' do
+ let(:delegated_method) { :forward_deployment_enabled? }
+ end
+ end
+
+ describe '#ci_job_token_scope_enabled?' do
+ it_behaves_like 'a ci_cd_settings predicate method', prefix: 'ci_' do
+ let(:delegated_method) { :job_token_scope_enabled? }
+ end
+ end
+
+ describe '#restrict_user_defined_variables?' do
+ it_behaves_like 'a ci_cd_settings predicate method' do
+ let(:delegated_method) { :restrict_user_defined_variables? }
+ end
+ end
+
+ describe '#keep_latest_artifacts_available?' do
+ it_behaves_like 'a ci_cd_settings predicate method' do
+ let(:delegated_method) { :keep_latest_artifacts_available? }
+ end
+ end
+
+ describe '#keep_latest_artifact?' do
+ it_behaves_like 'a ci_cd_settings predicate method' do
+ let(:delegated_method) { :keep_latest_artifact? }
end
+ end
- it { is_expected.not_to delegate_method(:container_registry_enabled?).to(:project_feature) }
+ describe '#group_runners_enabled?' do
+ it_behaves_like 'a ci_cd_settings predicate method' do
+ let(:delegated_method) { :group_runners_enabled? }
+ end
end
end
@@ -1444,13 +1485,13 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '.with_active_jira_services' do
- it 'returns the correct project' do
- active_jira_service = create(:jira_service)
+ describe '.with_active_jira_integrations' do
+ it 'returns the correct integrations' do
+ active_jira_integration = create(:jira_integration)
active_service = create(:service, active: true)
- expect(described_class.with_active_jira_services).to include(active_jira_service.project)
- expect(described_class.with_active_jira_services).not_to include(active_service.project)
+ expect(described_class.with_active_jira_integrations).to include(active_jira_integration.project)
+ expect(described_class.with_active_jira_integrations).not_to include(active_service.project)
end
end
@@ -1555,13 +1596,16 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '.with_service' do
+ describe '.with_integration' do
before do
create_list(:prometheus_project, 2)
end
- it 'avoid n + 1' do
- expect { described_class.with_service(:prometheus_service).map(&:prometheus_service) }.not_to exceed_query_limit(1)
+ let(:integration) { :prometheus_integration }
+
+ it 'avoids n + 1' do
+ expect { described_class.with_integration(integration).map(&integration) }
+ .not_to exceed_query_limit(1)
end
end
@@ -2403,20 +2447,6 @@ RSpec.describe Project, factory_default: :keep do
expect(project.container_registry_enabled).to eq(false)
expect(project.container_registry_enabled?).to eq(false)
end
-
- context 'with read_container_registry_access_level disabled' do
- before do
- stub_feature_flags(read_container_registry_access_level: false)
- end
-
- it 'reads project.container_registry_enabled' do
- project.update_column(:container_registry_enabled, true)
- project.project_feature.update_column(:container_registry_access_level, ProjectFeature::DISABLED)
-
- expect(project.container_registry_enabled).to eq(true)
- expect(project.container_registry_enabled?).to eq(true)
- end
- end
end
describe '#has_container_registry_tags?' do
@@ -3083,8 +3113,8 @@ RSpec.describe Project, factory_default: :keep do
context 'LFS disabled in group' do
before do
+ stub_lfs_setting(enabled: true)
project.namespace.update_attribute(:lfs_enabled, false)
- enable_lfs
end
it_behaves_like 'project overrides group'
@@ -3092,14 +3122,18 @@ RSpec.describe Project, factory_default: :keep do
context 'LFS enabled in group' do
before do
+ stub_lfs_setting(enabled: true)
project.namespace.update_attribute(:lfs_enabled, true)
- enable_lfs
end
it_behaves_like 'project overrides group'
end
describe 'LFS disabled globally' do
+ before do
+ stub_lfs_setting(enabled: false)
+ end
+
shared_examples 'it always returns false' do
it do
expect(project.lfs_enabled?).to be_falsey
@@ -3896,10 +3930,6 @@ RSpec.describe Project, factory_default: :keep do
end
end
- def enable_lfs
- allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
- end
-
describe '#pages_url' do
let(:group) { create(:group, name: 'Group') }
let(:nested_group) { create(:group, parent: group) }
@@ -5350,27 +5380,27 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#execute_services' do
- let(:service) { create(:slack_service, push_events: true, merge_requests_events: false, active: true) }
+ describe '#execute_integrations' do
+ let(:integration) { create(:integrations_slack, push_events: true, merge_requests_events: false, active: true) }
- it 'executes services with the specified scope' do
+ it 'executes integrations with the specified scope' do
data = 'any data'
expect_next_found_instance_of(Integrations::Slack) do |instance|
expect(instance).to receive(:async_execute).with(data).once
end
- service.project.execute_services(data, :push_hooks)
+ integration.project.execute_integrations(data, :push_hooks)
end
- it 'does not execute services that don\'t match the specified scope' do
+ it 'does not execute integration that don\'t match the specified scope' do
expect(Integrations::Slack).not_to receive(:allocate).and_wrap_original do |method|
method.call.tap do |instance|
expect(instance).not_to receive(:async_execute)
end
end
- service.project.execute_services(anything, :merge_request_hooks)
+ integration.project.execute_integrations(anything, :merge_request_hooks)
end
end
@@ -5401,16 +5431,16 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#has_active_services?' do
+ describe '#has_active_integrations?' do
let_it_be(:project) { create(:project) }
- it { expect(project.has_active_services?).to be_falsey }
+ it { expect(project.has_active_integrations?).to be_falsey }
it 'returns true when a matching service exists' do
create(:custom_issue_tracker_integration, push_events: true, merge_requests_events: false, project: project)
- expect(project.has_active_services?(:merge_request_hooks)).to be_falsey
- expect(project.has_active_services?).to be_truthy
+ expect(project.has_active_integrations?(:merge_request_hooks)).to be_falsey
+ expect(project.has_active_integrations?).to be_truthy
end
end
@@ -5820,112 +5850,92 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#find_or_initialize_services' do
+ describe '#find_or_initialize_integrations' do
let_it_be(:subject) { create(:project) }
it 'avoids N+1 database queries' do
- control_count = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_services }.count
+ control_count = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_integrations }.count
expect(control_count).to be <= 4
end
- it 'avoids N+1 database queries with more available services' do
- allow(Integration).to receive(:available_services_names).and_return(%w[pushover])
- control_count = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_services }
-
- allow(Integration).to receive(:available_services_names).and_call_original
- expect { subject.find_or_initialize_services }.not_to exceed_query_limit(control_count)
- end
-
- context 'with disabled services' do
- before do
- allow(Integration).to receive(:available_services_names).and_return(%w[prometheus pushover teamcity])
- allow(subject).to receive(:disabled_services).and_return(%w[prometheus])
- end
-
- it 'returns only enabled services sorted' do
- services = subject.find_or_initialize_services
+ it 'avoids N+1 database queries with more available integrations' do
+ allow(Integration).to receive(:available_integration_names).and_return(%w[pushover])
+ control_count = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_integrations }
- expect(services.size).to eq(2)
- expect(services.map(&:title)).to eq(['JetBrains TeamCity', 'Pushover'])
- end
+ allow(Integration).to receive(:available_integration_names).and_call_original
+ expect { subject.find_or_initialize_integrations }.not_to exceed_query_limit(control_count)
end
- end
-
- describe '#disabled_services' do
- subject { build(:project).disabled_services }
- context 'without datadog_ci_integration' do
+ context 'with disabled integrations' do
before do
- stub_feature_flags(datadog_ci_integration: false)
+ allow(Integration).to receive(:available_integration_names).and_return(%w[prometheus pushover teamcity])
+ allow(subject).to receive(:disabled_integrations).and_return(%w[prometheus])
end
- it { is_expected.to include('datadog') }
- end
-
- context 'with datadog_ci_integration' do
- before do
- stub_feature_flags(datadog_ci_integration: true)
+ it 'returns only enabled integrations sorted' do
+ expect(subject.find_or_initialize_integrations).to match [
+ have_attributes(title: 'JetBrains TeamCity'),
+ have_attributes(title: 'Pushover')
+ ]
end
-
- it { is_expected.not_to include('datadog') }
end
end
- describe '#find_or_initialize_service' do
+ describe '#find_or_initialize_integration' do
it 'avoids N+1 database queries' do
- allow(Integration).to receive(:available_services_names).and_return(%w[prometheus pushover])
+ allow(Integration).to receive(:available_integration_names).and_return(%w[prometheus pushover])
- control_count = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_service('prometheus') }.count
+ control_count = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_integration('prometheus') }.count
- allow(Integration).to receive(:available_services_names).and_call_original
+ allow(Integration).to receive(:available_integration_names).and_call_original
- expect { subject.find_or_initialize_service('prometheus') }.not_to exceed_query_limit(control_count)
+ expect { subject.find_or_initialize_integration('prometheus') }.not_to exceed_query_limit(control_count)
end
it 'returns nil if integration is disabled' do
- allow(subject).to receive(:disabled_services).and_return(%w[prometheus])
+ allow(subject).to receive(:disabled_integrations).and_return(%w[prometheus])
- expect(subject.find_or_initialize_service('prometheus')).to be_nil
+ expect(subject.find_or_initialize_integration('prometheus')).to be_nil
end
context 'with an existing integration' do
subject { create(:project) }
before do
- create(:prometheus_service, project: subject, api_url: 'https://prometheus.project.com/')
+ create(:prometheus_integration, project: subject, api_url: 'https://prometheus.project.com/')
end
it 'retrieves the integration' do
- expect(subject.find_or_initialize_service('prometheus').api_url).to eq('https://prometheus.project.com/')
+ expect(subject.find_or_initialize_integration('prometheus').api_url).to eq('https://prometheus.project.com/')
end
end
context 'with an instance-level and template integrations' do
before do
- create(:prometheus_service, :instance, api_url: 'https://prometheus.instance.com/')
- create(:prometheus_service, :template, api_url: 'https://prometheus.template.com/')
+ create(:prometheus_integration, :instance, api_url: 'https://prometheus.instance.com/')
+ create(:prometheus_integration, :template, api_url: 'https://prometheus.template.com/')
end
- it 'builds the service from the instance if exists' do
- expect(subject.find_or_initialize_service('prometheus').api_url).to eq('https://prometheus.instance.com/')
+ it 'builds the integration from the instance integration' do
+ expect(subject.find_or_initialize_integration('prometheus').api_url).to eq('https://prometheus.instance.com/')
end
end
- context 'with an instance-level and template integrations' do
+ context 'with a template integration and no instance-level' do
before do
- create(:prometheus_service, :template, api_url: 'https://prometheus.template.com/')
+ create(:prometheus_integration, :template, api_url: 'https://prometheus.template.com/')
end
- it 'builds the service from the template if instance does not exists' do
- expect(subject.find_or_initialize_service('prometheus').api_url).to eq('https://prometheus.template.com/')
+ it 'builds the integration from the template' do
+ expect(subject.find_or_initialize_integration('prometheus').api_url).to eq('https://prometheus.template.com/')
end
end
- context 'without an exisiting integration, nor instance-level or template' do
- it 'builds the service if instance or template does not exists' do
- expect(subject.find_or_initialize_service('prometheus')).to be_a(PrometheusService)
- expect(subject.find_or_initialize_service('prometheus').api_url).to be_nil
+ context 'without an exisiting integration, or instance-level or template' do
+ it 'builds the integration' do
+ expect(subject.find_or_initialize_integration('prometheus')).to be_a(::Integrations::Prometheus)
+ expect(subject.find_or_initialize_integration('prometheus').api_url).to be_nil
end
end
end
@@ -6605,25 +6615,25 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#prometheus_service_active?' do
+ describe '#prometheus_integration_active?' do
let(:project) { create(:project) }
- subject { project.prometheus_service_active? }
+ subject { project.prometheus_integration_active? }
before do
- create(:prometheus_service, project: project, manual_configuration: manual_configuration)
+ create(:prometheus_integration, project: project, manual_configuration: manual_configuration)
end
- context 'when project has an activated prometheus service' do
+ context 'when project has an activated prometheus integration' do
let(:manual_configuration) { true }
it { is_expected.to be_truthy }
end
- context 'when project has an inactive prometheus service' do
+ context 'when project has an inactive prometheus integration' do
let(:manual_configuration) { false }
- it 'the service is marked as inactive' do
+ it 'the integration is marked as inactive' do
expect(subject).to be_falsey
end
end
diff --git a/spec/models/prometheus_alert_spec.rb b/spec/models/prometheus_alert_spec.rb
index 8e517e1764e..bfe2c7cc2a4 100644
--- a/spec/models/prometheus_alert_spec.rb
+++ b/spec/models/prometheus_alert_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe PrometheusAlert do
let_it_be(:project) { build(:project) }
+
let(:metric) { build(:prometheus_metric) }
describe '.distinct_projects' do
diff --git a/spec/models/protected_branch/push_access_level_spec.rb b/spec/models/protected_branch/push_access_level_spec.rb
index fa84cd660cb..13d33b95b16 100644
--- a/spec/models/protected_branch/push_access_level_spec.rb
+++ b/spec/models/protected_branch/push_access_level_spec.rb
@@ -40,6 +40,7 @@ RSpec.describe ProtectedBranch::PushAccessLevel do
let_it_be(:protected_branch) { create(:protected_branch, :no_one_can_push, project: project) }
let_it_be(:user) { create(:user) }
let_it_be(:deploy_key) { create(:deploy_key, user: user) }
+
let!(:deploy_keys_project) { create(:deploy_keys_project, project: project, deploy_key: deploy_key, can_push: can_push) }
let(:can_push) { true }
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index c896b6c0c6c..452eafe733f 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -41,6 +41,7 @@ RSpec.describe Repository do
describe '#branch_names_contains' do
let_it_be(:project) { create(:project, :repository) }
+
let(:repository) { project.repository }
subject { repository.branch_names_contains(sample_commit.id) }
@@ -398,6 +399,7 @@ RSpec.describe Repository do
describe '#new_commits' do
let_it_be(:project) { create(:project, :repository) }
+
let(:repository) { project.repository }
subject { repository.new_commits(rev) }
@@ -426,6 +428,7 @@ RSpec.describe Repository do
describe '#commits_by' do
let_it_be(:project) { create(:project, :repository) }
+
let(:oids) { TestEnv::BRANCH_SHA.values }
subject { project.repository.commits_by(oids: oids) }
@@ -2990,6 +2993,7 @@ RSpec.describe Repository do
describe '#merge_base' do
let_it_be(:project) { create(:project, :repository) }
+
subject(:repository) { project.repository }
it 'only makes one gitaly call' do
@@ -3088,6 +3092,7 @@ RSpec.describe Repository do
describe "#blobs_metadata" do
let_it_be(:project) { create(:project, :repository) }
+
let(:repository) { project.repository }
def expect_metadata_blob(thing)
diff --git a/spec/models/service_desk_setting_spec.rb b/spec/models/service_desk_setting_spec.rb
index 8ccbd983ba1..f99ac84175c 100644
--- a/spec/models/service_desk_setting_spec.rb
+++ b/spec/models/service_desk_setting_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe ServiceDeskSetting do
it { is_expected.to validate_length_of(:outgoing_name).is_at_most(255) }
it { is_expected.to validate_length_of(:project_key).is_at_most(255) }
it { is_expected.to allow_value('abc123_').for(:project_key) }
- it { is_expected.not_to allow_value('abc 12').for(:project_key) }
+ it { is_expected.not_to allow_value('abc 12').for(:project_key).with_message("can contain only lowercase letters, digits, and '_'.") }
it { is_expected.not_to allow_value('Big val').for(:project_key) }
describe '.valid_issue_template' do
diff --git a/spec/models/snippet_repository_spec.rb b/spec/models/snippet_repository_spec.rb
index 11196f06529..40a28b9e0cc 100644
--- a/spec/models/snippet_repository_spec.rb
+++ b/spec/models/snippet_repository_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe SnippetRepository do
let_it_be(:user) { create(:user) }
+
let(:snippet) { create(:personal_snippet, :repository, author: user) }
let(:snippet_repository) { snippet.snippet_repository }
let(:commit_opts) { { branch_name: 'master', message: 'whatever' } }
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index 06e9899c0bd..19d3895177f 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -722,6 +722,7 @@ RSpec.describe Snippet do
describe '#list_files' do
let_it_be(:snippet) { create(:snippet, :repository) }
+
let(:ref) { 'test-ref' }
subject { snippet.list_files(ref) }
@@ -827,14 +828,10 @@ RSpec.describe Snippet do
end
context 'when default branch in settings is different from "master"' do
- let(:default_branch) { 'main' }
+ let(:default_branch) { 'custom-branch' }
it 'changes the HEAD reference to the default branch' do
- expect(File.read(head_path).squish).to eq 'ref: refs/heads/master'
-
- subject
-
- expect(File.read(head_path).squish).to eq "ref: refs/heads/#{default_branch}"
+ expect { subject }.to change { File.read(head_path).squish }.to("ref: refs/heads/#{default_branch}")
end
end
end
diff --git a/spec/models/terraform/state_spec.rb b/spec/models/terraform/state_spec.rb
index 1319e2adb03..a113ae37203 100644
--- a/spec/models/terraform/state_spec.rb
+++ b/spec/models/terraform/state_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe Terraform::State do
describe 'scopes' do
describe '.ordered_by_name' do
let_it_be(:project) { create(:project) }
+
let(:names) { %w(state_d state_b state_a state_c) }
subject { described_class.ordered_by_name }
diff --git a/spec/models/timelog_spec.rb b/spec/models/timelog_spec.rb
index bc042f7a639..9d6fda1d2a9 100644
--- a/spec/models/timelog_spec.rb
+++ b/spec/models/timelog_spec.rb
@@ -17,6 +17,8 @@ RSpec.describe Timelog do
it { is_expected.to validate_presence_of(:time_spent) }
it { is_expected.to validate_presence_of(:user) }
+ it { is_expected.to validate_length_of(:summary).is_at_most(255) }
+
it { expect(subject.project_id).not_to be_nil }
describe 'Issuable validation' do
diff --git a/spec/models/u2f_registration_spec.rb b/spec/models/u2f_registration_spec.rb
index 1f2e4d1e447..aba2f27d104 100644
--- a/spec/models/u2f_registration_spec.rb
+++ b/spec/models/u2f_registration_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe U2fRegistration do
let_it_be(:user) { create(:user) }
+
let(:u2f_registration) do
device = U2F::FakeU2F.new(FFaker::BaconIpsum.characters(5))
create(:u2f_registration, name: 'u2f_device',
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index e86a9c262d8..0eb769c65cd 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -1005,6 +1005,7 @@ RSpec.describe User do
let_it_be(:valid_token_and_notified) { create(:personal_access_token, user: user2, expires_at: 2.days.from_now, expire_notification_delivered: true) }
let_it_be(:valid_token1) { create(:personal_access_token, user: user2, expires_at: 2.days.from_now) }
let_it_be(:valid_token2) { create(:personal_access_token, user: user2, expires_at: 2.days.from_now) }
+
let(:users) { described_class.with_expiring_and_not_notified_personal_access_tokens(from) }
context 'in one day' do
@@ -1898,6 +1899,14 @@ RSpec.describe User do
expect(user.deactivated?).to be_truthy
end
+
+ it 'sends deactivated user an email' do
+ expect_next_instance_of(NotificationService) do |notification|
+ allow(notification).to receive(:user_deactivated).with(user.name, user.notification_email)
+ end
+
+ user.deactivate
+ end
end
context "a user who is blocked" do
@@ -2826,6 +2835,14 @@ RSpec.describe User do
end
end
+ describe '#matches_identity?' do
+ it 'finds the identity when the DN is formatted differently' do
+ user = create(:omniauth_user, provider: 'ldapmain', extern_uid: 'uid=john smith,ou=people,dc=example,dc=com')
+
+ expect(user.matches_identity?('ldapmain', 'uid=John Smith, ou=People, dc=example, dc=com')).to eq(true)
+ end
+ end
+
describe '#ldap_block' do
let(:user) { create(:omniauth_user, provider: 'ldapmain', name: 'John Smith') }
@@ -4241,6 +4258,7 @@ RSpec.describe User do
describe '#source_groups_of_two_factor_authentication_requirement' do
let_it_be(:group_not_requiring_2FA) { create :group }
+
let(:user) { create :user }
before do
@@ -5258,11 +5276,43 @@ RSpec.describe User do
end
describe '#password_expired_if_applicable?' do
- let(:user) { build(:user, password_expires_at: password_expires_at) }
+ let(:user) { build(:user, password_expires_at: password_expires_at, password_automatically_set: set_automatically?) }
subject { user.password_expired_if_applicable? }
context 'when user is not ldap user' do
+ context 'when user has password set automatically' do
+ let(:set_automatically?) { true }
+
+ context 'when password_expires_at is not set' do
+ let(:password_expires_at) {}
+
+ it 'returns false' do
+ is_expected.to be_falsey
+ end
+ end
+
+ context 'when password_expires_at is in the past' do
+ let(:password_expires_at) { 1.minute.ago }
+
+ it 'returns true' do
+ is_expected.to be_truthy
+ end
+ end
+
+ context 'when password_expires_at is in the future' do
+ let(:password_expires_at) { 1.minute.from_now }
+
+ it 'returns false' do
+ is_expected.to be_falsey
+ end
+ end
+ end
+ end
+
+ context 'when user has password not set automatically' do
+ let(:set_automatically?) { false }
+
context 'when password_expires_at is not set' do
let(:password_expires_at) {}
@@ -5274,8 +5324,8 @@ RSpec.describe User do
context 'when password_expires_at is in the past' do
let(:password_expires_at) { 1.minute.ago }
- it 'returns true' do
- is_expected.to be_truthy
+ it 'returns false' do
+ is_expected.to be_falsey
end
end
@@ -5319,6 +5369,34 @@ RSpec.describe User do
end
end
end
+
+ context 'when user is a project bot' do
+ let(:user) { build(:user, :project_bot, password_expires_at: password_expires_at) }
+
+ context 'when password_expires_at is not set' do
+ let(:password_expires_at) {}
+
+ it 'returns false' do
+ is_expected.to be_falsey
+ end
+ end
+
+ context 'when password_expires_at is in the past' do
+ let(:password_expires_at) { 1.minute.ago }
+
+ it 'returns false' do
+ is_expected.to be_falsey
+ end
+ end
+
+ context 'when password_expires_at is in the future' do
+ let(:password_expires_at) { 1.minute.from_now }
+
+ it 'returns false' do
+ is_expected.to be_falsey
+ end
+ end
+ end
end
describe '#read_only_attribute?' do
@@ -5787,6 +5865,20 @@ RSpec.describe User do
end
end
+ describe '#default_dashboard?' do
+ it 'is the default dashboard' do
+ user = build(:user)
+
+ expect(user.default_dashboard?).to be true
+ end
+
+ it 'is not the default dashboard' do
+ user = build(:user, dashboard: 'stars')
+
+ expect(user.default_dashboard?).to be false
+ end
+ end
+
describe '.dormant' do
it 'returns dormant users' do
freeze_time do
@@ -5829,4 +5921,17 @@ RSpec.describe User do
end
end
end
+
+ describe '.by_provider_and_extern_uid' do
+ it 'calls Identity model scope to ensure case-insensitive query', :aggregate_failures do
+ expected_user = create(:user)
+ create(:identity, extern_uid: 'some-other-name-id', provider: :github)
+ create(:identity, extern_uid: 'my_github_id', provider: :gitlab)
+ create(:identity)
+ create(:identity, user: expected_user, extern_uid: 'my_github_id', provider: :github)
+
+ expect(Identity).to receive(:with_extern_uid).and_call_original
+ expect(described_class.by_provider_and_extern_uid(:github, 'my_github_id')).to match_array([expected_user])
+ end
+ end
end
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index 579a9e664cf..699dd35196f 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -201,11 +201,10 @@ RSpec.describe WikiPage do
expect(subject.errors.messages).to eq(title: ["can't be blank"])
end
- it "validates presence of content" do
+ it "does not validate presence of content" do
subject.attributes.delete(:content)
- expect(subject).not_to be_valid
- expect(subject.errors.messages).to eq(content: ["can't be blank"])
+ expect(subject).to be_valid
end
describe '#validate_content_size_limit' do
diff --git a/spec/policies/global_policy_spec.rb b/spec/policies/global_policy_spec.rb
index 85026ced466..122612df355 100644
--- a/spec/policies/global_policy_spec.rb
+++ b/spec/policies/global_policy_spec.rb
@@ -249,7 +249,7 @@ RSpec.describe GlobalPolicy do
context 'user with expired password' do
before do
- current_user.update!(password_expires_at: 2.minutes.ago)
+ current_user.update!(password_expires_at: 2.minutes.ago, password_automatically_set: true)
end
it { is_expected.not_to be_allowed(:access_api) }
@@ -445,7 +445,7 @@ RSpec.describe GlobalPolicy do
context 'user with expired password' do
before do
- current_user.update!(password_expires_at: 2.minutes.ago)
+ current_user.update!(password_expires_at: 2.minutes.ago, password_automatically_set: true)
end
it { is_expected.not_to be_allowed(:access_git) }
@@ -537,7 +537,7 @@ RSpec.describe GlobalPolicy do
context 'user with expired password' do
before do
- current_user.update!(password_expires_at: 2.minutes.ago)
+ current_user.update!(password_expires_at: 2.minutes.ago, password_automatically_set: true)
end
it { is_expected.not_to be_allowed(:use_slash_commands) }
diff --git a/spec/policies/integration_policy_spec.rb b/spec/policies/integration_policy_spec.rb
index d490045c1e1..ef628c1c4b1 100644
--- a/spec/policies/integration_policy_spec.rb
+++ b/spec/policies/integration_policy_spec.rb
@@ -9,8 +9,8 @@ RSpec.describe IntegrationPolicy, :models do
subject(:policy) { Ability.policy_for(user, integration) }
- context 'when the integration is a prometheus_service' do
- let(:integration) { create(:prometheus_service) }
+ context 'when the integration is a prometheus_integration' do
+ let(:integration) { create(:prometheus_integration) }
describe 'rules' do
it { is_expected.to be_disallowed :admin_project }
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index f3c92751d06..59123c3695a 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe ProjectPolicy do
include ExternalAuthorizationServiceHelpers
+ include AdminModeHelper
include_context 'ProjectPolicy context'
let(:project) { public_project }
@@ -70,7 +71,7 @@ RSpec.describe ProjectPolicy do
context 'when external tracker configured' do
it 'does not include the issues permissions' do
- create(:jira_service, project: project)
+ create(:jira_integration, project: project)
expect_disallowed :read_issue, :read_issue_iid, :create_issue, :update_issue, :admin_issue, :create_incident
end
@@ -1423,6 +1424,7 @@ RSpec.describe ProjectPolicy do
before do
current_user.set_ci_job_token_scope!(job)
+ scope_project.update!(ci_job_token_scope_enabled: true)
end
context 'when accessing a private project' do
@@ -1442,6 +1444,14 @@ RSpec.describe ProjectPolicy do
end
it { is_expected.to be_disallowed(:guest_access) }
+
+ context 'when job token scope is disabled' do
+ before do
+ scope_project.update!(ci_job_token_scope_enabled: false)
+ end
+
+ it { is_expected.to be_allowed(:guest_access) }
+ end
end
end
@@ -1462,6 +1472,14 @@ RSpec.describe ProjectPolicy do
end
it { is_expected.to be_disallowed(:public_access) }
+
+ context 'when job token scope is disabled' do
+ before do
+ scope_project.update!(ci_job_token_scope_enabled: false)
+ end
+
+ it { is_expected.to be_allowed(:public_access) }
+ end
end
end
end
@@ -1469,7 +1487,12 @@ RSpec.describe ProjectPolicy do
describe 'container_image policies' do
using RSpec::Parameterized::TableSyntax
- let(:guest_operations_permissions) { [:read_container_image] }
+ # These are permissions that admins should not have when the project is private
+ # or the container registry is private.
+ let(:admin_excluded_permissions) { [:build_read_container_image] }
+
+ let(:anonymous_operations_permissions) { [:read_container_image] }
+ let(:guest_operations_permissions) { anonymous_operations_permissions + [:build_read_container_image] }
let(:developer_operations_permissions) do
guest_operations_permissions + [
@@ -1483,47 +1506,67 @@ RSpec.describe ProjectPolicy do
]
end
+ let(:all_permissions) { maintainer_operations_permissions }
+
where(:project_visibility, :access_level, :role, :allowed) do
+ :public | ProjectFeature::ENABLED | :admin | true
+ :public | ProjectFeature::ENABLED | :owner | true
:public | ProjectFeature::ENABLED | :maintainer | true
:public | ProjectFeature::ENABLED | :developer | true
:public | ProjectFeature::ENABLED | :reporter | true
:public | ProjectFeature::ENABLED | :guest | true
:public | ProjectFeature::ENABLED | :anonymous | true
+ :public | ProjectFeature::PRIVATE | :admin | true
+ :public | ProjectFeature::PRIVATE | :owner | true
:public | ProjectFeature::PRIVATE | :maintainer | true
:public | ProjectFeature::PRIVATE | :developer | true
:public | ProjectFeature::PRIVATE | :reporter | true
:public | ProjectFeature::PRIVATE | :guest | false
:public | ProjectFeature::PRIVATE | :anonymous | false
+ :public | ProjectFeature::DISABLED | :admin | false
+ :public | ProjectFeature::DISABLED | :owner | false
:public | ProjectFeature::DISABLED | :maintainer | false
:public | ProjectFeature::DISABLED | :developer | false
:public | ProjectFeature::DISABLED | :reporter | false
:public | ProjectFeature::DISABLED | :guest | false
:public | ProjectFeature::DISABLED | :anonymous | false
+ :internal | ProjectFeature::ENABLED | :admin | true
+ :internal | ProjectFeature::ENABLED | :owner | true
:internal | ProjectFeature::ENABLED | :maintainer | true
:internal | ProjectFeature::ENABLED | :developer | true
:internal | ProjectFeature::ENABLED | :reporter | true
:internal | ProjectFeature::ENABLED | :guest | true
:internal | ProjectFeature::ENABLED | :anonymous | false
+ :internal | ProjectFeature::PRIVATE | :admin | true
+ :internal | ProjectFeature::PRIVATE | :owner | true
:internal | ProjectFeature::PRIVATE | :maintainer | true
:internal | ProjectFeature::PRIVATE | :developer | true
:internal | ProjectFeature::PRIVATE | :reporter | true
:internal | ProjectFeature::PRIVATE | :guest | false
:internal | ProjectFeature::PRIVATE | :anonymous | false
+ :internal | ProjectFeature::DISABLED | :admin | false
+ :internal | ProjectFeature::DISABLED | :owner | false
:internal | ProjectFeature::DISABLED | :maintainer | false
:internal | ProjectFeature::DISABLED | :developer | false
:internal | ProjectFeature::DISABLED | :reporter | false
:internal | ProjectFeature::DISABLED | :guest | false
:internal | ProjectFeature::DISABLED | :anonymous | false
+ :private | ProjectFeature::ENABLED | :admin | true
+ :private | ProjectFeature::ENABLED | :owner | true
:private | ProjectFeature::ENABLED | :maintainer | true
:private | ProjectFeature::ENABLED | :developer | true
:private | ProjectFeature::ENABLED | :reporter | true
:private | ProjectFeature::ENABLED | :guest | false
:private | ProjectFeature::ENABLED | :anonymous | false
+ :private | ProjectFeature::PRIVATE | :admin | true
+ :private | ProjectFeature::PRIVATE | :owner | true
:private | ProjectFeature::PRIVATE | :maintainer | true
:private | ProjectFeature::PRIVATE | :developer | true
:private | ProjectFeature::PRIVATE | :reporter | true
:private | ProjectFeature::PRIVATE | :guest | false
:private | ProjectFeature::PRIVATE | :anonymous | false
+ :private | ProjectFeature::DISABLED | :admin | false
+ :private | ProjectFeature::DISABLED | :owner | false
:private | ProjectFeature::DISABLED | :maintainer | false
:private | ProjectFeature::DISABLED | :developer | false
:private | ProjectFeature::DISABLED | :reporter | false
@@ -1535,96 +1578,49 @@ RSpec.describe ProjectPolicy do
let(:current_user) { send(role) }
let(:project) { send("#{project_visibility}_project") }
- it 'allows/disallows the abilities based on the container_registry feature access level' do
+ before do
+ enable_admin_mode!(admin) if role == :admin
project.project_feature.update!(container_registry_access_level: access_level)
+ end
+ it 'allows/disallows the abilities based on the container_registry feature access level' do
if allowed
expect_allowed(*permissions_abilities(role))
+ expect_disallowed(*(all_permissions - permissions_abilities(role)))
else
- expect_disallowed(*permissions_abilities(role))
+ expect_disallowed(*all_permissions)
+ end
+ end
+
+ it 'allows build_read_container_image to admins who are also team members' do
+ if allowed && role == :admin
+ project.add_reporter(current_user)
+
+ expect_allowed(:build_read_container_image)
end
end
def permissions_abilities(role)
case role
- when :maintainer
+ when :admin
+ if project_visibility == :private || access_level == ProjectFeature::PRIVATE
+ maintainer_operations_permissions - admin_excluded_permissions
+ else
+ maintainer_operations_permissions
+ end
+ when :maintainer, :owner
maintainer_operations_permissions
when :developer
developer_operations_permissions
- when :reporter, :guest, :anonymous
+ when :reporter, :guest
guest_operations_permissions
+ when :anonymous
+ anonymous_operations_permissions
else
raise "Unknown role #{role}"
end
end
end
-
- context 'with read_container_registry_access_level disabled' do
- before do
- stub_feature_flags(read_container_registry_access_level: false)
- end
-
- where(:project_visibility, :container_registry_enabled, :role, :allowed) do
- :public | true | :maintainer | true
- :public | true | :developer | true
- :public | true | :reporter | true
- :public | true | :guest | true
- :public | true | :anonymous | true
- :public | false | :maintainer | false
- :public | false | :developer | false
- :public | false | :reporter | false
- :public | false | :guest | false
- :public | false | :anonymous | false
- :internal | true | :maintainer | true
- :internal | true | :developer | true
- :internal | true | :reporter | true
- :internal | true | :guest | true
- :internal | true | :anonymous | false
- :internal | false | :maintainer | false
- :internal | false | :developer | false
- :internal | false | :reporter | false
- :internal | false | :guest | false
- :internal | false | :anonymous | false
- :private | true | :maintainer | true
- :private | true | :developer | true
- :private | true | :reporter | true
- :private | true | :guest | false
- :private | true | :anonymous | false
- :private | false | :maintainer | false
- :private | false | :developer | false
- :private | false | :reporter | false
- :private | false | :guest | false
- :private | false | :anonymous | false
- end
-
- with_them do
- let(:current_user) { send(role) }
- let(:project) { send("#{project_visibility}_project") }
-
- it 'allows/disallows the abilities based on container_registry_enabled' do
- project.update_column(:container_registry_enabled, container_registry_enabled)
-
- if allowed
- expect_allowed(*permissions_abilities(role))
- else
- expect_disallowed(*permissions_abilities(role))
- end
- end
-
- def permissions_abilities(role)
- case role
- when :maintainer
- maintainer_operations_permissions
- when :developer
- developer_operations_permissions
- when :reporter, :guest, :anonymous
- guest_operations_permissions
- else
- raise "Unknown role #{role}"
- end
- end
- end
- end
end
describe 'update_runners_registration_token' do
diff --git a/spec/policies/release_policy_spec.rb b/spec/policies/release_policy_spec.rb
new file mode 100644
index 00000000000..25468ae2ea2
--- /dev/null
+++ b/spec/policies/release_policy_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ReleasePolicy, :request_store do
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:release, reload: true) { create(:release, project: project) }
+
+ let(:user) { developer }
+
+ before_all do
+ project.add_developer(developer)
+ project.add_maintainer(maintainer)
+ end
+
+ subject { described_class.new(user, release) }
+
+ context 'when the evalute_protected_tag_for_release_permissions feature flag is disabled' do
+ before do
+ stub_feature_flags(evalute_protected_tag_for_release_permissions: false)
+ end
+
+ it 'allows the user to create and update a release' do
+ is_expected.to be_allowed(:create_release)
+ is_expected.to be_allowed(:update_release)
+ end
+
+ it 'prevents the user from destroying a release' do
+ is_expected.to be_disallowed(:destroy_release)
+ end
+
+ context 'when the user is maintainer' do
+ let(:user) { maintainer }
+
+ it 'allows the user to destroy a release' do
+ is_expected.to be_allowed(:destroy_release)
+ end
+ end
+ end
+
+ context 'when the user has access to the protected tag' do
+ let_it_be(:protected_tag) { create(:protected_tag, :developers_can_create, name: release.tag, project: project) }
+
+ it 'allows the user to create, update and destroy a release' do
+ is_expected.to be_allowed(:create_release)
+ is_expected.to be_allowed(:update_release)
+ is_expected.to be_allowed(:destroy_release)
+ end
+ end
+
+ context 'when the user does not have access to the protected tag' do
+ let_it_be(:protected_tag) { create(:protected_tag, :maintainers_can_create, name: release.tag, project: project) }
+
+ it 'prevents the user from creating, updating and destroying a release' do
+ is_expected.to be_disallowed(:create_release)
+ is_expected.to be_disallowed(:update_release)
+ is_expected.to be_disallowed(:destroy_release)
+ end
+ end
+end
diff --git a/spec/presenters/blob_presenter_spec.rb b/spec/presenters/blob_presenter_spec.rb
index 38bdf3b9364..466a2b55e76 100644
--- a/spec/presenters/blob_presenter_spec.rb
+++ b/spec/presenters/blob_presenter_spec.rb
@@ -121,16 +121,26 @@ RSpec.describe BlobPresenter do
end
end
- describe '#plain_data' do
+ describe '#raw_plain_data' do
let(:blob) { repository.blob_at('HEAD', file) }
- subject { described_class.new(blob).plain_data }
+ context 'when blob is text' do
+ let(:file) { 'files/ruby/popen.rb' }
+
+ it 'does not include html in the content' do
+ expect(presenter.raw_plain_data.include?('</span>')).to be_falsey
+ end
+ end
+ end
+
+ describe '#plain_data' do
+ let(:blob) { repository.blob_at('HEAD', file) }
context 'when blob is binary' do
let(:file) { 'files/images/logo-black.png' }
it 'returns nil' do
- expect(subject).to be_nil
+ expect(presenter.plain_data).to be_nil
end
end
@@ -138,7 +148,7 @@ RSpec.describe BlobPresenter do
let(:file) { 'README.md' }
it 'returns plain content' do
- expect(subject).to include('<span id="LC1" class="line" lang="markdown">')
+ expect(presenter.plain_data).to include('<span id="LC1" class="line" lang="markdown">')
end
end
@@ -146,7 +156,7 @@ RSpec.describe BlobPresenter do
let(:file) { 'files/ruby/regex.rb' }
it 'returns highlighted syntax content' do
- expect(subject)
+ expect(presenter.plain_data)
.to include '<span id="LC1" class="line" lang="ruby"><span class="k">module</span> <span class="nn">Gitlab</span>'
end
end
@@ -155,7 +165,7 @@ RSpec.describe BlobPresenter do
let(:file) { 'LICENSE' }
it 'returns plain text highlighted content' do
- expect(subject).to include('<span id="LC1" class="line" lang="plaintext">The MIT License (MIT)</span>')
+ expect(presenter.plain_data).to include('<span id="LC1" class="line" lang="plaintext">The MIT License (MIT)</span>')
end
end
end
diff --git a/spec/presenters/ci/build_runner_presenter_spec.rb b/spec/presenters/ci/build_runner_presenter_spec.rb
index 9cb00781e99..4422773fec6 100644
--- a/spec/presenters/ci/build_runner_presenter_spec.rb
+++ b/spec/presenters/ci/build_runner_presenter_spec.rb
@@ -44,29 +44,13 @@ RSpec.describe Ci::BuildRunnerPresenter do
create(:ci_build, options: { artifacts: { paths: %w[abc], exclude: %w[cde] } })
end
- context 'when the feature is enabled' do
- before do
- stub_feature_flags(ci_artifacts_exclude: true)
- end
-
- it 'includes the list of excluded paths' do
- expect(presenter.artifacts.first).to include(
- artifact_type: :archive,
- artifact_format: :zip,
- paths: %w[abc],
- exclude: %w[cde]
- )
- end
- end
-
- context 'when the feature is disabled' do
- before do
- stub_feature_flags(ci_artifacts_exclude: false)
- end
-
- it 'does not include the list of excluded paths' do
- expect(presenter.artifacts.first).not_to have_key(:exclude)
- end
+ it 'includes the list of excluded paths' do
+ expect(presenter.artifacts.first).to include(
+ artifact_type: :archive,
+ artifact_format: :zip,
+ paths: %w[abc],
+ exclude: %w[cde]
+ )
end
end
@@ -193,7 +177,7 @@ RSpec.describe Ci::BuildRunnerPresenter do
end
it 'uses a SHA in the persistent refspec' do
- expect(subject[0]).to match(/^\+[0-9a-f]{40}:refs\/pipelines\/[0-9]+$/)
+ expect(subject[0]).to match(%r{^\+[0-9a-f]{40}:refs/pipelines/[0-9]+$})
end
context 'when ref is tag' do
diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb
index c64f9e8465f..b3ec184d08c 100644
--- a/spec/presenters/merge_request_presenter_spec.rb
+++ b/spec/presenters/merge_request_presenter_spec.rb
@@ -11,17 +11,17 @@ RSpec.describe MergeRequestPresenter do
subject { described_class.new(resource).ci_status }
context 'when no head pipeline' do
- it 'return status using CiService' do
- ci_service = double(Integrations::MockCi)
+ it 'return status from Ci integration' do
+ ci_integration = double(Integrations::MockCi)
ci_status = double
allow(resource.source_project)
- .to receive(:ci_service)
- .and_return(ci_service)
+ .to receive(:ci_integration)
+ .and_return(ci_integration)
allow(resource).to receive(:head_pipeline).and_return(nil)
- expect(ci_service).to receive(:commit_status)
+ expect(ci_integration).to receive(:commit_status)
.with(resource.diff_head_sha, resource.source_branch)
.and_return(ci_status)
diff --git a/spec/presenters/packages/nuget/package_metadata_presenter_spec.rb b/spec/presenters/packages/nuget/package_metadata_presenter_spec.rb
index d5e7b23d785..8bb0694f39c 100644
--- a/spec/presenters/packages/nuget/package_metadata_presenter_spec.rb
+++ b/spec/presenters/packages/nuget/package_metadata_presenter_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Packages::Nuget::PackageMetadataPresenter do
include_context 'with expected presenters dependency groups'
- let_it_be(:package) { create(:nuget_package, :with_metadatum) }
+ let_it_be(:package) { create(:nuget_package, :with_symbol_package, :with_metadatum) }
let_it_be(:tag1) { create(:packages_tag, name: 'tag1', package: package) }
let_it_be(:tag2) { create(:packages_tag, name: 'tag2', package: package) }
let_it_be(:presenter) { described_class.new(package) }
@@ -19,7 +19,7 @@ RSpec.describe Packages::Nuget::PackageMetadataPresenter do
end
describe '#archive_url' do
- let_it_be(:expected_suffix) { "/api/v4/projects/#{package.project_id}/packages/nuget/download/#{package.name}/#{package.version}/#{package.package_files.last.file_name}" }
+ let_it_be(:expected_suffix) { "/api/v4/projects/#{package.project_id}/packages/nuget/download/#{package.name}/#{package.version}/#{package.package_files.with_format('nupkg').last.file_name}" }
subject { presenter.archive_url }
diff --git a/spec/presenters/packages/nuget/service_index_presenter_spec.rb b/spec/presenters/packages/nuget/service_index_presenter_spec.rb
index 9c95fbc8fd2..aa69a9c3017 100644
--- a/spec/presenters/packages/nuget/service_index_presenter_spec.rb
+++ b/spec/presenters/packages/nuget/service_index_presenter_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe ::Packages::Nuget::ServiceIndexPresenter do
describe '#resources' do
subject { presenter.resources }
- shared_examples 'returning valid resources' do |resources_count: 8, include_publish_service: true|
+ shared_examples 'returning valid resources' do |resources_count: 9, include_publish_service: true|
it 'has valid resources' do
expect(subject.size).to eq resources_count
subject.each do |resource|
@@ -38,10 +38,15 @@ RSpec.describe ::Packages::Nuget::ServiceIndexPresenter do
end
end
- it "does #{'not ' unless include_publish_service}return the publish resource" do
+ it "does #{'not ' unless include_publish_service}return the publish resource", :aggregate_failures do
services_types = subject.map { |res| res[:@type] }
- described_class::SERVICE_VERSIONS[:publish].each do |publish_service_version|
+ publish_service_versions = [
+ described_class::SERVICE_VERSIONS[:publish],
+ described_class::SERVICE_VERSIONS[:symbol]
+ ].flatten
+
+ publish_service_versions.each do |publish_service_version|
if include_publish_service
expect(services_types).to include(publish_service_version)
else
@@ -54,7 +59,7 @@ RSpec.describe ::Packages::Nuget::ServiceIndexPresenter do
context 'for a group' do
let(:target) { group }
- # at the group level we don't have the publish and download service
+ # at the group level we don't have the publish, symbol, and download service
it_behaves_like 'returning valid resources', resources_count: 6, include_publish_service: false
end
diff --git a/spec/presenters/snippet_blob_presenter_spec.rb b/spec/presenters/snippet_blob_presenter_spec.rb
index 42eca6b5a49..1a5130dcdf6 100644
--- a/spec/presenters/snippet_blob_presenter_spec.rb
+++ b/spec/presenters/snippet_blob_presenter_spec.rb
@@ -120,6 +120,27 @@ RSpec.describe SnippetBlobPresenter do
end
end
+ describe '#raw_plain_data' do
+ context "with a plain file" do
+ subject { described_class.new(blob, current_user: user) }
+
+ it 'shows raw data for non binary files' do
+ expect(subject.raw_plain_data).to eq(blob.data)
+ end
+ end
+
+ context "with a binary file" do
+ let(:file) { 'files/images/logo-black.png' }
+ let(:blob) { blob_at(file) }
+
+ subject { described_class.new(blob, current_user: user) }
+
+ it 'returns nil' do
+ expect(subject.raw_plain_data).to be_nil
+ end
+ end
+ end
+
describe '#raw_url' do
subject { described_class.new(blob, current_user: user).raw_url }
diff --git a/spec/requests/api/admin/plan_limits_spec.rb b/spec/requests/api/admin/plan_limits_spec.rb
index 6bc133f67c0..f497227789a 100644
--- a/spec/requests/api/admin/plan_limits_spec.rb
+++ b/spec/requests/api/admin/plan_limits_spec.rb
@@ -29,6 +29,7 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits' do
expect(json_response['npm_max_file_size']).to eq(Plan.default.actual_limits.npm_max_file_size)
expect(json_response['nuget_max_file_size']).to eq(Plan.default.actual_limits.nuget_max_file_size)
expect(json_response['pypi_max_file_size']).to eq(Plan.default.actual_limits.pypi_max_file_size)
+ expect(json_response['terraform_module_max_file_size']).to eq(Plan.default.actual_limits.terraform_module_max_file_size)
end
end
@@ -48,6 +49,7 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits' do
expect(json_response['npm_max_file_size']).to eq(Plan.default.actual_limits.npm_max_file_size)
expect(json_response['nuget_max_file_size']).to eq(Plan.default.actual_limits.nuget_max_file_size)
expect(json_response['pypi_max_file_size']).to eq(Plan.default.actual_limits.pypi_max_file_size)
+ expect(json_response['terraform_module_max_file_size']).to eq(Plan.default.actual_limits.terraform_module_max_file_size)
end
end
@@ -85,7 +87,8 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits' do
'maven_max_file_size': 30,
'npm_max_file_size': 40,
'nuget_max_file_size': 50,
- 'pypi_max_file_size': 60
+ 'pypi_max_file_size': 60,
+ 'terraform_module_max_file_size': 70
}
expect(response).to have_gitlab_http_status(:ok)
@@ -96,6 +99,7 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits' do
expect(json_response['npm_max_file_size']).to eq(40)
expect(json_response['nuget_max_file_size']).to eq(50)
expect(json_response['pypi_max_file_size']).to eq(60)
+ expect(json_response['terraform_module_max_file_size']).to eq(70)
end
it 'updates single plan limits' do
@@ -128,7 +132,8 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits' do
'maven_max_file_size': 'c',
'npm_max_file_size': 'd',
'nuget_max_file_size': 'e',
- 'pypi_max_file_size': 'f'
+ 'pypi_max_file_size': 'f',
+ 'terraform_module_max_file_size': 'g'
}
expect(response).to have_gitlab_http_status(:bad_request)
@@ -139,7 +144,8 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits' do
'generic_packages_max_file_size is invalid',
'npm_max_file_size is invalid',
'nuget_max_file_size is invalid',
- 'pypi_max_file_size is invalid'
+ 'pypi_max_file_size is invalid',
+ 'terraform_module_max_file_size is invalid'
)
end
end
diff --git a/spec/requests/api/api_spec.rb b/spec/requests/api/api_spec.rb
index 46430e55ff2..81620fce448 100644
--- a/spec/requests/api/api_spec.rb
+++ b/spec/requests/api/api_spec.rb
@@ -81,6 +81,7 @@ RSpec.describe API::API do
let_it_be(:maven_metadatum) { package.maven_metadatum }
let_it_be(:package_file) { package.package_files.first }
let_it_be(:deploy_token) { create(:deploy_token) }
+
let(:headers_with_deploy_token) do
{
Gitlab::Auth::AuthFinders::DEPLOY_TOKEN_HEADER => deploy_token.token
@@ -138,6 +139,7 @@ RSpec.describe API::API do
describe 'Marginalia comments' do
context 'GET /user/:id' do
let_it_be(:user) { create(:user) }
+
let(:component_map) do
{
"application" => "test",
diff --git a/spec/requests/api/award_emoji_spec.rb b/spec/requests/api/award_emoji_spec.rb
index 1c825949ae8..07a9f7dfd74 100644
--- a/spec/requests/api/award_emoji_spec.rb
+++ b/spec/requests/api/award_emoji_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe API::AwardEmoji do
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:award_emoji) { create(:award_emoji, awardable: issue, user: user) }
let_it_be(:note) { create(:note, project: project, noteable: issue) }
+
let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let!(:downvote) { create(:award_emoji, :downvote, awardable: merge_request, user: user) }
diff --git a/spec/requests/api/branches_spec.rb b/spec/requests/api/branches_spec.rb
index 36fbe86ac76..ad517a05533 100644
--- a/spec/requests/api/branches_spec.rb
+++ b/spec/requests/api/branches_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe API::Branches do
let_it_be(:user) { create(:user) }
+
let(:project) { create(:project, :repository, creator: user, path: 'my.project') }
let(:guest) { create(:user).tap { |u| project.add_guest(u) } }
let(:branch_name) { 'feature' }
@@ -20,7 +21,7 @@ RSpec.describe API::Branches do
stub_feature_flags(branch_list_keyset_pagination: false)
end
- describe "GET /projects/:id/repository/branches", :use_clean_rails_redis_caching do
+ describe "GET /projects/:id/repository/branches", :use_clean_rails_redis_caching, :clean_gitlab_redis_shared_state do
let(:route) { "/projects/#{project_id}/repository/branches" }
shared_examples_for 'repository branches' do
@@ -74,6 +75,14 @@ RSpec.describe API::Branches do
check_merge_status(json_response)
end
+
+ it 'recovers pagination headers from cache between consecutive requests' do
+ 2.times do
+ get api(route, current_user), params: base_params
+
+ expect(response.headers).to include('X-Page')
+ end
+ end
end
context 'with gitaly pagination params' do
@@ -718,10 +727,11 @@ RSpec.describe API::Branches do
end
it 'returns 400 if ref name is invalid' do
+ error_message = 'Failed to create branch \'new_design3\': invalid reference name \'foo\''
post api(route, user), params: { branch: 'new_design3', ref: 'foo' }
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to eq('Invalid reference name: foo')
+ expect(json_response['message']).to eq(error_message)
end
end
diff --git a/spec/requests/api/bulk_imports_spec.rb b/spec/requests/api/bulk_imports_spec.rb
new file mode 100644
index 00000000000..f0edfa6f227
--- /dev/null
+++ b/spec/requests/api/bulk_imports_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::BulkImports do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:import_1) { create(:bulk_import, user: user) }
+ let_it_be(:import_2) { create(:bulk_import, user: user) }
+ let_it_be(:entity_1) { create(:bulk_import_entity, bulk_import: import_1) }
+ let_it_be(:entity_2) { create(:bulk_import_entity, bulk_import: import_1) }
+ let_it_be(:entity_3) { create(:bulk_import_entity, bulk_import: import_2) }
+ let_it_be(:failure_3) { create(:bulk_import_failure, entity: entity_3) }
+
+ describe 'GET /bulk_imports' do
+ it 'returns a list of bulk imports authored by the user' do
+ get api('/bulk_imports', user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.pluck('id')).to contain_exactly(import_1.id, import_2.id)
+ end
+ end
+
+ describe 'GET /bulk_imports/entities' do
+ it 'returns a list of all import entities authored by the user' do
+ get api('/bulk_imports/entities', user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.pluck('id')).to contain_exactly(entity_1.id, entity_2.id, entity_3.id)
+ end
+ end
+
+ describe 'GET /bulk_imports/:id' do
+ it 'returns specified bulk import' do
+ get api("/bulk_imports/#{import_1.id}", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['id']).to eq(import_1.id)
+ end
+ end
+
+ describe 'GET /bulk_imports/:id/entities' do
+ it 'returns specified bulk import entities with failures' do
+ get api("/bulk_imports/#{import_2.id}/entities", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.pluck('id')).to contain_exactly(entity_3.id)
+ expect(json_response.first['failures'].first['exception_class']).to eq(failure_3.exception_class)
+ end
+ end
+
+ describe 'GET /bulk_imports/:id/entities/:entity_id' do
+ it 'returns specified bulk import entity' do
+ get api("/bulk_imports/#{import_1.id}/entities/#{entity_2.id}", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['id']).to eq(entity_2.id)
+ end
+ end
+
+ context 'when user is unauthenticated' do
+ it 'returns 401' do
+ get api('/bulk_imports', nil)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+end
diff --git a/spec/requests/api/ci/pipelines_spec.rb b/spec/requests/api/ci/pipelines_spec.rb
index 092cd00630e..eb6c0861844 100644
--- a/spec/requests/api/ci/pipelines_spec.rb
+++ b/spec/requests/api/ci/pipelines_spec.rb
@@ -491,6 +491,7 @@ RSpec.describe API::Ci::Pipelines do
describe 'GET /projects/:id/pipelines/:pipeline_id/bridges' do
let_it_be(:bridge) { create(:ci_bridge, pipeline: pipeline) }
+
let(:downstream_pipeline) { create(:ci_pipeline) }
let!(:pipeline_source) do
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
index 8896bd44077..00c3a0a31af 100644
--- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -297,7 +297,13 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
context 'when job filtered by job_age' do
- let!(:job) { create(:ci_build, :pending, :queued, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, queued_at: 60.seconds.ago) }
+ let!(:job) do
+ create(:ci_build, :pending, :queued, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, queued_at: 60.seconds.ago)
+ end
+
+ before do
+ job.queuing_entry&.update!(created_at: 60.seconds.ago)
+ end
context 'job is queued less than job_age parameter' do
let(:job_age) { 120 }
@@ -797,29 +803,16 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
context 'when a runner supports this feature' do
- it 'exposes excluded paths when the feature is enabled' do
- stub_feature_flags(ci_artifacts_exclude: true)
-
+ it 'exposes excluded paths' do
request_job info: { features: { artifacts_exclude: true } }
expect(response).to have_gitlab_http_status(:created)
expect(json_response.dig('artifacts').first).to include('exclude' => ['cde'])
end
-
- it 'does not expose excluded paths when the feature is disabled' do
- stub_feature_flags(ci_artifacts_exclude: false)
-
- request_job info: { features: { artifacts_exclude: true } }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response.dig('artifacts').first).not_to have_key('exclude')
- end
end
context 'when a runner does not support this feature' do
it 'does not expose the build at all' do
- stub_feature_flags(ci_artifacts_exclude: true)
-
request_job
expect(response).to have_gitlab_http_status(:no_content)
diff --git a/spec/requests/api/ci/runner/jobs_trace_spec.rb b/spec/requests/api/ci/runner/jobs_trace_spec.rb
index e077a174b08..e20c7e36096 100644
--- a/spec/requests/api/ci/runner/jobs_trace_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_trace_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_trace_chunks do
include StubGitlabCalls
include RedisHelpers
include WorkhorseHelpers
@@ -142,7 +142,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
context 'when redis data are flushed' do
before do
- redis_shared_state_cleanup!
+ redis_trace_chunks_cleanup!
end
it 'has empty trace' do
@@ -272,6 +272,18 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
it { expect(response).to have_gitlab_http_status(:forbidden) }
end
+ context 'when the job trace is too big' do
+ before do
+ project.actual_limits.update!(ci_jobs_trace_size_limit: 1)
+ end
+
+ it 'returns 403 Forbidden' do
+ patch_the_trace(' appended', headers.merge({ 'Content-Range' => "#{1.megabyte}-#{1.megabyte + 9}" }))
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
def patch_the_trace(content = ' appended', request_headers = nil, job_id: job.id)
unless request_headers
job.trace.read do |stream|
diff --git a/spec/requests/api/ci/runner/runners_post_spec.rb b/spec/requests/api/ci/runner/runners_post_spec.rb
index 1696fe63d5d..6d222046998 100644
--- a/spec/requests/api/ci/runner/runners_post_spec.rb
+++ b/spec/requests/api/ci/runner/runners_post_spec.rb
@@ -11,8 +11,10 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
before do
stub_feature_flags(ci_enable_live_trace: true)
+ stub_feature_flags(runner_registration_control: false)
stub_gitlab_calls
stub_application_setting(runners_registration_token: registration_token)
+ stub_application_setting(valid_runner_registrars: ApplicationSetting::VALID_RUNNER_REGISTRAR_TYPES)
allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
end
@@ -122,6 +124,33 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(project.runners.recent.size).to eq(1)
end
end
+
+ context 'when valid runner registrars do not include project' do
+ before do
+ stub_application_setting(valid_runner_registrars: ['group'])
+ end
+
+ context 'when feature flag is enabled' do
+ before do
+ stub_feature_flags(runner_registration_control: true)
+ end
+
+ it 'returns 403 error' do
+ request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when feature flag is disabled' do
+ it 'registers the runner' do
+ request
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.active).to be true
+ end
+ end
+ end
end
context 'when group token is used' do
@@ -180,6 +209,33 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(group.runners.recent.size).to eq(1)
end
end
+
+ context 'when valid runner registrars do not include group' do
+ before do
+ stub_application_setting(valid_runner_registrars: ['project'])
+ end
+
+ context 'when feature flag is enabled' do
+ before do
+ stub_feature_flags(runner_registration_control: true)
+ end
+
+ it 'returns 403 error' do
+ request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when feature flag is disabled' do
+ it 'registers the runner' do
+ request
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.active).to be true
+ end
+ end
+ end
end
end
diff --git a/spec/requests/api/composer_packages_spec.rb b/spec/requests/api/composer_packages_spec.rb
index 4120edabea3..e75725cacba 100644
--- a/spec/requests/api/composer_packages_spec.rb
+++ b/spec/requests/api/composer_packages_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe API::ComposerPackages do
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
let_it_be(:package_name) { 'package-name' }
let_it_be(:project, reload: true) { create(:project, :custom_repo, files: { 'composer.json' => { name: package_name }.to_json }, group: group) }
+
let(:snowplow_gitlab_standard_context) { { project: project, namespace: project.namespace, user: user } }
let(:headers) { {} }
diff --git a/spec/requests/api/debian_group_packages_spec.rb b/spec/requests/api/debian_group_packages_spec.rb
index c3abb06c5c1..931eaf41891 100644
--- a/spec/requests/api/debian_group_packages_spec.rb
+++ b/spec/requests/api/debian_group_packages_spec.rb
@@ -15,23 +15,23 @@ RSpec.describe API::DebianGroupPackages do
describe 'GET groups/:id/-/packages/debian/dists/*distribution/Release' do
let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution.codename}/Release" }
- it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^TODO Release$/
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^Codename: fixture-distribution\n$/
end
describe 'GET groups/:id/-/packages/debian/dists/*distribution/InRelease' do
let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution.codename}/InRelease" }
- it_behaves_like 'Debian repository read endpoint', 'GET request', :not_found
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^Codename: fixture-distribution\n$/
end
describe 'GET groups/:id/-/packages/debian/dists/*distribution/:component/binary-:architecture/Packages' do
- let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution.codename}/#{component}/binary-#{architecture}/Packages" }
+ let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution.codename}/#{component.name}/binary-#{architecture.name}/Packages" }
- it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^TODO Packages$/
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /Description: This is an incomplete Packages file/
end
describe 'GET groups/:id/-/packages/debian/pool/:component/:letter/:source_package/:file_name' do
- let(:url) { "/groups/#{container.id}/-/packages/debian/pool/#{component}/#{letter}/#{source_package}/#{package_name}_#{package_version}_#{architecture}.deb" }
+ let(:url) { "/groups/#{container.id}/-/packages/debian/pool/#{component.name}/#{letter}/#{source_package}/#{package_name}_#{package_version}_#{architecture.name}.deb" }
it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^TODO File$/
end
diff --git a/spec/requests/api/debian_project_packages_spec.rb b/spec/requests/api/debian_project_packages_spec.rb
index c11c4ecc12a..fb7da467322 100644
--- a/spec/requests/api/debian_project_packages_spec.rb
+++ b/spec/requests/api/debian_project_packages_spec.rb
@@ -15,23 +15,23 @@ RSpec.describe API::DebianProjectPackages do
describe 'GET projects/:id/packages/debian/dists/*distribution/Release' do
let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/Release" }
- it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^TODO Release$/
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^Codename: fixture-distribution\n$/
end
describe 'GET projects/:id/packages/debian/dists/*distribution/InRelease' do
let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/InRelease" }
- it_behaves_like 'Debian repository read endpoint', 'GET request', :not_found
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^Codename: fixture-distribution\n$/
end
describe 'GET projects/:id/packages/debian/dists/*distribution/:component/binary-:architecture/Packages' do
- let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/#{component}/binary-#{architecture}/Packages" }
+ let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/#{component.name}/binary-#{architecture.name}/Packages" }
- it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^TODO Packages$/
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /Description: This is an incomplete Packages file/
end
describe 'GET projects/:id/packages/debian/pool/:component/:letter/:source_package/:file_name' do
- let(:url) { "/projects/#{container.id}/packages/debian/pool/#{component}/#{letter}/#{source_package}/#{package_name}_#{package_version}_#{architecture}.deb" }
+ let(:url) { "/projects/#{container.id}/packages/debian/pool/#{component.name}/#{letter}/#{source_package}/#{package_name}_#{package_version}_#{architecture.name}.deb" }
it_behaves_like 'Debian repository read endpoint', 'GET request', :success, /^TODO File$/
end
diff --git a/spec/requests/api/deploy_tokens_spec.rb b/spec/requests/api/deploy_tokens_spec.rb
index e8426270622..38380fa4460 100644
--- a/spec/requests/api/deploy_tokens_spec.rb
+++ b/spec/requests/api/deploy_tokens_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe API::DeployTokens do
let_it_be(:creator) { create(:user) }
let_it_be(:project) { create(:project, creator_id: creator.id) }
let_it_be(:group) { create(:group) }
+
let!(:deploy_token) { create(:deploy_token, projects: [project]) }
let!(:revoked_deploy_token) { create(:deploy_token, projects: [project], revoked: true) }
let!(:expired_deploy_token) { create(:deploy_token, projects: [project], expires_at: '1988-01-11T04:33:04-0600') }
diff --git a/spec/requests/api/deployments_spec.rb b/spec/requests/api/deployments_spec.rb
index bbfe37cb70b..38c96cd37af 100644
--- a/spec/requests/api/deployments_spec.rb
+++ b/spec/requests/api/deployments_spec.rb
@@ -456,6 +456,7 @@ RSpec.describe API::Deployments do
context 'when a user member of the deployment project' do
let_it_be(:project2) { create(:project) }
+
let!(:merge_request1) { create(:merge_request, source_project: project, target_project: project) }
let!(:merge_request2) { create(:merge_request, source_project: project, target_project: project, state: 'closed') }
let!(:merge_request3) { create(:merge_request, source_project: project2, target_project: project2) }
diff --git a/spec/requests/api/error_tracking_collector_spec.rb b/spec/requests/api/error_tracking_collector_spec.rb
new file mode 100644
index 00000000000..52d63410e7a
--- /dev/null
+++ b/spec/requests/api/error_tracking_collector_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::ErrorTrackingCollector do
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:setting) { create(:project_error_tracking_setting, project: project) }
+
+ describe "POST /error_tracking/collector/api/:id/envelope" do
+ let_it_be(:raw_event) { fixture_file('error_tracking/event.txt') }
+ let_it_be(:url) { "/error_tracking/collector/api/#{project.id}/envelope" }
+
+ let(:params) { raw_event }
+
+ subject { post api(url), params: params }
+
+ RSpec.shared_examples 'not found' do
+ it 'reponds with 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ RSpec.shared_examples 'bad request' do
+ it 'responds with 400' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'error tracking feature is disabled' do
+ before do
+ setting.update!(enabled: false)
+ end
+
+ it_behaves_like 'not found'
+ end
+
+ context 'feature flag is disabled' do
+ before do
+ stub_feature_flags(integrated_error_tracking: false)
+ end
+
+ it_behaves_like 'not found'
+ end
+
+ context 'empty body' do
+ let(:params) { '' }
+
+ it_behaves_like 'bad request'
+ end
+
+ context 'unknown request type' do
+ let(:params) { fixture_file('error_tracking/unknown.txt') }
+
+ it_behaves_like 'bad request'
+ end
+
+ context 'transaction request type' do
+ let(:params) { fixture_file('error_tracking/transaction.txt') }
+
+ it 'does nothing and returns no content' do
+ expect { subject }.not_to change { ErrorTracking::ErrorEvent.count }
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ it 'writes to the database and returns no content' do
+ expect { subject }.to change { ErrorTracking::ErrorEvent.count }.by(1)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+end
diff --git a/spec/requests/api/error_tracking_spec.rb b/spec/requests/api/error_tracking_spec.rb
index 8c9ca1b6a9d..39121af7bc3 100644
--- a/spec/requests/api/error_tracking_spec.rb
+++ b/spec/requests/api/error_tracking_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe API::ErrorTracking do
let_it_be(:user) { create(:user) }
+
let(:setting) { create(:project_error_tracking_setting) }
let(:project) { setting.project }
diff --git a/spec/requests/api/feature_flags_spec.rb b/spec/requests/api/feature_flags_spec.rb
index 2cd52c0a5e5..8edf8825fb2 100644
--- a/spec/requests/api/feature_flags_spec.rb
+++ b/spec/requests/api/feature_flags_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe API::FeatureFlags do
let_it_be(:developer) { create(:user) }
let_it_be(:reporter) { create(:user) }
let_it_be(:non_project_member) { create(:user) }
+
let(:user) { developer }
before_all do
diff --git a/spec/requests/api/freeze_periods_spec.rb b/spec/requests/api/freeze_periods_spec.rb
index 5589d4d543d..3da992301d5 100644
--- a/spec/requests/api/freeze_periods_spec.rb
+++ b/spec/requests/api/freeze_periods_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe API::FreezePeriods do
let_it_be(:project) { create(:project, :repository, :private) }
let_it_be(:user) { create(:user) }
let_it_be(:admin) { create(:admin) }
+
let(:api_user) { user }
let(:invalid_cron) { '0 0 0 * *' }
let(:last_freeze_period) { project.freeze_periods.last }
diff --git a/spec/requests/api/geo_spec.rb b/spec/requests/api/geo_spec.rb
new file mode 100644
index 00000000000..edbca5eb1c6
--- /dev/null
+++ b/spec/requests/api/geo_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Geo do
+ include WorkhorseHelpers
+
+ describe 'GET /geo/proxy' do
+ subject { get api('/geo/proxy'), headers: workhorse_headers }
+
+ include_context 'workhorse headers'
+
+ context 'with valid auth' do
+ it 'returns empty data' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_empty
+ end
+ end
+
+ it 'rejects requests that bypassed gitlab-workhorse' do
+ workhorse_headers.delete(Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/boards/board_lists_query_spec.rb b/spec/requests/api/graphql/boards/board_lists_query_spec.rb
index cd94ce91071..2d52cddcacc 100644
--- a/spec/requests/api/graphql/boards/board_lists_query_spec.rb
+++ b/spec/requests/api/graphql/boards/board_lists_query_spec.rb
@@ -66,6 +66,7 @@ RSpec.describe 'get board lists' do
describe 'sorting and pagination' do
let_it_be(:current_user) { user }
+
let(:data_path) { [board_parent_type, :boards, :nodes, 0, :lists] }
def pagination_query(params)
diff --git a/spec/requests/api/graphql/ci/jobs_spec.rb b/spec/requests/api/graphql/ci/jobs_spec.rb
index 3fb89d6e815..10f05efa1b8 100644
--- a/spec/requests/api/graphql/ci/jobs_spec.rb
+++ b/spec/requests/api/graphql/ci/jobs_spec.rb
@@ -38,9 +38,15 @@ RSpec.describe 'Query.project.pipeline' do
name
groups {
nodes {
+ detailedStatus {
+ id
+ }
name
jobs {
nodes {
+ detailedStatus {
+ id
+ }
name
needs {
nodes { #{all_graphql_fields_for('CiBuildNeed')} }
diff --git a/spec/requests/api/graphql/ci/pipelines_spec.rb b/spec/requests/api/graphql/ci/pipelines_spec.rb
index f207636283f..6587061094d 100644
--- a/spec/requests/api/graphql/ci/pipelines_spec.rb
+++ b/spec/requests/api/graphql/ci/pipelines_spec.rb
@@ -229,6 +229,7 @@ RSpec.describe 'Query.project(fullPath).pipelines' do
let_it_be(:pipeline) { create(:ci_pipeline, project: project, user: user) }
let_it_be(:upstream_project) { create(:project, :repository, :public) }
let_it_be(:upstream_pipeline) { create(:ci_pipeline, project: upstream_project, user: user) }
+
let(:upstream_pipelines_graphql_data) { graphql_data.dig(*%w[project pipelines nodes]).first['upstream'] }
let(:query) do
diff --git a/spec/requests/api/graphql/ci/runner_spec.rb b/spec/requests/api/graphql/ci/runner_spec.rb
index e1f84d23209..cdd46ca4ecc 100644
--- a/spec/requests/api/graphql/ci/runner_spec.rb
+++ b/spec/requests/api/graphql/ci/runner_spec.rb
@@ -5,25 +5,25 @@ require 'spec_helper'
RSpec.describe 'Query.runner(id)' do
include GraphqlHelpers
- let_it_be(:user) { create_default(:user, :admin) }
+ let_it_be(:user) { create(:user, :admin) }
- let_it_be(:active_runner) do
+ let_it_be(:active_instance_runner) do
create(:ci_runner, :instance, description: 'Runner 1', contacted_at: 2.hours.ago,
active: true, version: 'adfe156', revision: 'a', locked: true, ip_address: '127.0.0.1', maximum_timeout: 600,
access_level: 0, tag_list: %w[tag1 tag2], run_untagged: true)
end
- let_it_be(:inactive_runner) do
+ let_it_be(:inactive_instance_runner) do
create(:ci_runner, :instance, description: 'Runner 2', contacted_at: 1.day.ago, active: false,
version: 'adfe157', revision: 'b', ip_address: '10.10.10.10', access_level: 1, run_untagged: true)
end
def get_runner(id)
case id
- when :active_runner
- active_runner
- when :inactive_runner
- inactive_runner
+ when :active_instance_runner
+ active_instance_runner
+ when :inactive_instance_runner
+ inactive_instance_runner
end
end
@@ -59,7 +59,9 @@ RSpec.describe 'Query.runner(id)' do
'accessLevel' => runner.access_level.to_s.upcase,
'runUntagged' => runner.run_untagged,
'ipAddress' => runner.ip_address,
- 'runnerType' => 'INSTANCE_TYPE'
+ 'runnerType' => 'INSTANCE_TYPE',
+ 'jobCount' => 0,
+ 'projectCount' => nil
)
expect(runner_data['tagList']).to match_array runner.tag_list
end
@@ -84,38 +86,113 @@ RSpec.describe 'Query.runner(id)' do
end
describe 'for active runner' do
- it_behaves_like 'runner details fetch', :active_runner
+ it_behaves_like 'runner details fetch', :active_instance_runner
+
+ context 'when tagList is not requested' do
+ let(:query) do
+ wrap_fields(query_graphql_path(query_path, 'id'))
+ end
+
+ let(:query_path) do
+ [
+ [:runner, { id: active_instance_runner.to_global_id.to_s }]
+ ]
+ end
+
+ it 'does not retrieve tagList' do
+ post_graphql(query, current_user: user)
+
+ runner_data = graphql_data_at(:runner)
+ expect(runner_data).not_to be_nil
+ expect(runner_data).not_to include('tagList')
+ end
+ end
end
describe 'for inactive runner' do
- it_behaves_like 'runner details fetch', :inactive_runner
+ it_behaves_like 'runner details fetch', :inactive_instance_runner
+ end
+
+ describe 'for multiple runners' do
+ let_it_be(:project1) { create(:project, :test_repo) }
+ let_it_be(:project2) { create(:project, :test_repo) }
+ let_it_be(:project_runner1) { create(:ci_runner, :project, projects: [project1, project2], description: 'Runner 1') }
+ let_it_be(:project_runner2) { create(:ci_runner, :project, projects: [], description: 'Runner 2') }
+
+ let!(:job) { create(:ci_build, runner: project_runner1) }
+
+ context 'requesting project and job counts' do
+ let(:query) do
+ %(
+ query {
+ projectRunner1: runner(id: "#{project_runner1.to_global_id}") {
+ projectCount
+ jobCount
+ }
+ projectRunner2: runner(id: "#{project_runner2.to_global_id}") {
+ projectCount
+ jobCount
+ }
+ activeInstanceRunner: runner(id: "#{active_instance_runner.to_global_id}") {
+ projectCount
+ jobCount
+ }
+ }
+ )
+ end
+
+ before do
+ project_runner2.projects.clear
+
+ post_graphql(query, current_user: user)
+ end
+
+ it 'retrieves expected fields' do
+ runner1_data = graphql_data_at(:project_runner1)
+ runner2_data = graphql_data_at(:project_runner2)
+ runner3_data = graphql_data_at(:active_instance_runner)
+
+ expect(runner1_data).to match a_hash_including(
+ 'jobCount' => 1,
+ 'projectCount' => 2)
+ expect(runner2_data).to match a_hash_including(
+ 'jobCount' => 0,
+ 'projectCount' => 0)
+ expect(runner3_data).to match a_hash_including(
+ 'jobCount' => 0,
+ 'projectCount' => nil)
+ end
+ end
end
describe 'by regular user' do
- let(:user) { create_default(:user) }
+ let(:user) { create(:user) }
- it_behaves_like 'retrieval by unauthorized user', :active_runner
+ it_behaves_like 'retrieval by unauthorized user', :active_instance_runner
end
describe 'by unauthenticated user' do
let(:user) { nil }
- it_behaves_like 'retrieval by unauthorized user', :active_runner
+ it_behaves_like 'retrieval by unauthorized user', :active_instance_runner
end
describe 'Query limits' do
def runner_query(runner)
<<~SINGLE
runner(id: "#{runner.to_global_id}") {
- #{all_graphql_fields_for('CiRunner')}
+ #{all_graphql_fields_for('CiRunner', excluded: excluded_fields)}
}
SINGLE
end
+ # Currently excluding a known N+1 issue, see https://gitlab.com/gitlab-org/gitlab/-/issues/334759
+ let(:excluded_fields) { %w[jobCount] }
+
let(:single_query) do
<<~QUERY
{
- active: #{runner_query(active_runner)}
+ active: #{runner_query(active_instance_runner)}
}
QUERY
end
@@ -123,8 +200,8 @@ RSpec.describe 'Query.runner(id)' do
let(:double_query) do
<<~QUERY
{
- active: #{runner_query(active_runner)}
- inactive: #{runner_query(inactive_runner)}
+ active: #{runner_query(active_instance_runner)}
+ inactive: #{runner_query(inactive_instance_runner)}
}
QUERY
end
diff --git a/spec/requests/api/graphql/current_user_todos_spec.rb b/spec/requests/api/graphql/current_user_todos_spec.rb
index b657f15d0e9..7f37abba74a 100644
--- a/spec/requests/api/graphql/current_user_todos_spec.rb
+++ b/spec/requests/api/graphql/current_user_todos_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe 'A Todoable that implements the CurrentUserTodos interface' do
let_it_be(:todoable) { create(:issue, project: project) }
let_it_be(:done_todo) { create(:todo, state: :done, target: todoable, user: current_user) }
let_it_be(:pending_todo) { create(:todo, state: :pending, target: todoable, user: current_user) }
+
let(:state) { 'null' }
let(:todoable_response) do
diff --git a/spec/requests/api/graphql/issue_status_counts_spec.rb b/spec/requests/api/graphql/issue_status_counts_spec.rb
index 3d8817c3bc5..89ecbf44b10 100644
--- a/spec/requests/api/graphql/issue_status_counts_spec.rb
+++ b/spec/requests/api/graphql/issue_status_counts_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe 'getting Issue counts by status' do
let_it_be(:issue_opened) { create(:issue, project: project) }
let_it_be(:issue_closed) { create(:issue, :closed, project: project) }
let_it_be(:other_project_issue) { create(:issue) }
+
let(:params) { {} }
let(:fields) do
diff --git a/spec/requests/api/graphql/metrics/dashboard_query_spec.rb b/spec/requests/api/graphql/metrics/dashboard_query_spec.rb
index e01f59ee6a0..1b84acff0e2 100644
--- a/spec/requests/api/graphql/metrics/dashboard_query_spec.rb
+++ b/spec/requests/api/graphql/metrics/dashboard_query_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Getting Metrics Dashboard' do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
+
let(:project) { create(:project) }
let(:environment) { create(:environment, project: project) }
diff --git a/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb b/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb
index b8cde32877b..1692cfbcf84 100644
--- a/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb
+++ b/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Deleting Sidekiq jobs', :clean_gitlab_redis_queues do
include GraphqlHelpers
let_it_be(:admin) { create(:admin) }
+
let(:queue) { 'authorized_projects' }
let(:variables) { { user: admin.username, queue_name: queue } }
diff --git a/spec/requests/api/graphql/mutations/alert_management/alerts/set_assignees_spec.rb b/spec/requests/api/graphql/mutations/alert_management/alerts/set_assignees_spec.rb
index cd5cefa0a9a..fcef7b4e3ec 100644
--- a/spec/requests/api/graphql/mutations/alert_management/alerts/set_assignees_spec.rb
+++ b/spec/requests/api/graphql/mutations/alert_management/alerts/set_assignees_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'Setting assignees of an alert' do
let_it_be(:project) { create(:project) }
let_it_be(:current_user) { create(:user) }
let_it_be(:alert) { create(:alert_management_alert, project: project) }
+
let(:input) { { assignee_usernames: [current_user.username] } }
let(:mutation) do
diff --git a/spec/requests/api/graphql/mutations/alert_management/alerts/todo/create_spec.rb b/spec/requests/api/graphql/mutations/alert_management/alerts/todo/create_spec.rb
index cd423d7764a..48307964345 100644
--- a/spec/requests/api/graphql/mutations/alert_management/alerts/todo/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/alert_management/alerts/todo/create_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Creating a todo for the alert' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
+
let(:alert) { create(:alert_management_alert, project: project) }
let(:mutation) do
diff --git a/spec/requests/api/graphql/mutations/alert_management/alerts/update_alert_status_spec.rb b/spec/requests/api/graphql/mutations/alert_management/alerts/update_alert_status_spec.rb
index ff55656a2ae..802d8d6c5a1 100644
--- a/spec/requests/api/graphql/mutations/alert_management/alerts/update_alert_status_spec.rb
+++ b/spec/requests/api/graphql/mutations/alert_management/alerts/update_alert_status_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Setting the status of an alert' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
+
let(:alert) { create(:alert_management_alert, project: project) }
let(:input) { { status: 'ACKNOWLEDGED' } }
diff --git a/spec/requests/api/graphql/mutations/alert_management/http_integration/create_spec.rb b/spec/requests/api/graphql/mutations/alert_management/http_integration/create_spec.rb
index e594d67aab4..ff93da2153f 100644
--- a/spec/requests/api/graphql/mutations/alert_management/http_integration/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/alert_management/http_integration/create_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Creating a new HTTP Integration' do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
+
let(:variables) do
{
project_path: project.full_path,
diff --git a/spec/requests/api/graphql/mutations/alert_management/prometheus_integration/create_spec.rb b/spec/requests/api/graphql/mutations/alert_management/prometheus_integration/create_spec.rb
index 0ef61ae0d5b..4c359d9b357 100644
--- a/spec/requests/api/graphql/mutations/alert_management/prometheus_integration/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/alert_management/prometheus_integration/create_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Creating a new Prometheus Integration' do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
+
let(:variables) do
{
project_path: project.full_path,
@@ -42,7 +43,7 @@ RSpec.describe 'Creating a new Prometheus Integration' do
it 'creates a new integration' do
post_graphql_mutation(mutation, current_user: current_user)
- new_integration = ::PrometheusService.last!
+ new_integration = ::Integrations::Prometheus.last!
integration_response = mutation_response['integration']
expect(response).to have_gitlab_http_status(:success)
diff --git a/spec/requests/api/graphql/mutations/alert_management/prometheus_integration/reset_token_spec.rb b/spec/requests/api/graphql/mutations/alert_management/prometheus_integration/reset_token_spec.rb
index d8d0ace5981..31053c50cac 100644
--- a/spec/requests/api/graphql/mutations/alert_management/prometheus_integration/reset_token_spec.rb
+++ b/spec/requests/api/graphql/mutations/alert_management/prometheus_integration/reset_token_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'Resetting a token on an existing Prometheus Integration' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
- let_it_be(:integration) { create(:prometheus_service, project: project) }
+ let_it_be(:integration) { create(:prometheus_integration, project: project) }
let(:mutation) do
variables = {
diff --git a/spec/requests/api/graphql/mutations/alert_management/prometheus_integration/update_spec.rb b/spec/requests/api/graphql/mutations/alert_management/prometheus_integration/update_spec.rb
index 6c4a647a353..ad26ec118d7 100644
--- a/spec/requests/api/graphql/mutations/alert_management/prometheus_integration/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/alert_management/prometheus_integration/update_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'Updating an existing Prometheus Integration' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
- let_it_be(:integration) { create(:prometheus_service, project: project) }
+ let_it_be(:integration) { create(:prometheus_integration, project: project) }
let(:mutation) do
variables = {
diff --git a/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb b/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb
index b39062f2e71..fdf5503a3a2 100644
--- a/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb
+++ b/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'Adding an AwardEmoji' do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:awardable) { create(:note, project: project) }
+
let(:emoji_name) { 'thumbsup' }
let(:mutation) do
variables = {
diff --git a/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb b/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb
index 170e7ff3b44..6b26e37e30c 100644
--- a/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb
+++ b/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'Toggling an AwardEmoji' do
let_it_be(:current_user) { create(:user) }
let_it_be(:project, reload: true) { create(:project) }
let_it_be(:awardable) { create(:note, project: project) }
+
let(:emoji_name) { 'thumbsup' }
let(:mutation) do
variables = {
diff --git a/spec/requests/api/graphql/mutations/boards/create_spec.rb b/spec/requests/api/graphql/mutations/boards/create_spec.rb
index c5f981262ea..22d05f36f0f 100644
--- a/spec/requests/api/graphql/mutations/boards/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/boards/create_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Mutations::Boards::Create do
let_it_be(:parent) { create(:project) }
+
let(:project_path) { parent.full_path }
let(:params) do
{
diff --git a/spec/requests/api/graphql/mutations/branches/create_spec.rb b/spec/requests/api/graphql/mutations/branches/create_spec.rb
index fc09f57a389..6a098002963 100644
--- a/spec/requests/api/graphql/mutations/branches/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/branches/create_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Creation of a new branch' do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project, :public, :empty_repo) }
+
let(:input) { { project_path: project.full_path, name: new_branch, ref: ref } }
let(:new_branch) { 'new_branch' }
let(:ref) { 'master' }
@@ -34,11 +35,12 @@ RSpec.describe 'Creation of a new branch' do
end
context 'when ref is not correct' do
+ err_msg = 'Failed to create branch \'another_branch\': invalid reference name \'unknown\''
let(:new_branch) { 'another_branch' }
let(:ref) { 'unknown' }
it_behaves_like 'a mutation that returns errors in the response',
- errors: ['Invalid reference name: unknown']
+ errors: [err_msg]
end
end
end
diff --git a/spec/requests/api/graphql/mutations/ci/ci_cd_settings_update_spec.rb b/spec/requests/api/graphql/mutations/ci/ci_cd_settings_update_spec.rb
index 0d7571d91ca..05f6804a208 100644
--- a/spec/requests/api/graphql/mutations/ci/ci_cd_settings_update_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/ci_cd_settings_update_spec.rb
@@ -5,7 +5,10 @@ require 'spec_helper'
RSpec.describe 'CiCdSettingsUpdate' do
include GraphqlHelpers
- let_it_be(:project) { create(:project, keep_latest_artifact: true, ci_job_token_scope_enabled: true) }
+ let_it_be(:project) do
+ create(:project, keep_latest_artifact: true, ci_job_token_scope_enabled: true)
+ .tap(&:save!)
+ end
let(:variables) do
{
diff --git a/spec/requests/api/graphql/mutations/ci/job_token_scope/add_project_spec.rb b/spec/requests/api/graphql/mutations/ci/job_token_scope/add_project_spec.rb
new file mode 100644
index 00000000000..b53a7ddde32
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/ci/job_token_scope/add_project_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'CiJobTokenScopeAddProject' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) }
+ let_it_be(:target_project) { create(:project) }
+
+ let(:variables) do
+ {
+ project_path: project.full_path,
+ target_project_path: target_project.full_path
+ }
+ end
+
+ let(:mutation) do
+ graphql_mutation(:ci_job_token_scope_add_project, variables) do
+ <<~QL
+ errors
+ ciJobTokenScope {
+ projects {
+ nodes {
+ path
+ }
+ }
+ }
+ QL
+ end
+ end
+
+ let(:mutation_response) { graphql_mutation_response(:ci_job_token_scope_add_project) }
+
+ context 'when unauthorized' do
+ let(:current_user) { create(:user) }
+
+ context 'when not a maintainer' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ it 'has graphql errors' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(graphql_errors).not_to be_empty
+ end
+ end
+ end
+
+ context 'when authorized' do
+ let_it_be(:current_user) { project.owner }
+
+ before do
+ target_project.add_developer(current_user)
+ end
+
+ it 'adds the target project to the job token scope' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response.dig('ciJobTokenScope', 'projects', 'nodes')).not_to be_empty
+ end.to change { Ci::JobToken::Scope.new(project).includes?(target_project) }.from(false).to(true)
+ end
+
+ context 'when invalid target project is provided' do
+ before do
+ variables[:target_project_path] = 'unknown/project'
+ end
+
+ it 'has mutation errors' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['errors']).to contain_exactly(Ci::JobTokenScope::EditScopeValidations::TARGET_PROJECT_UNAUTHORIZED_OR_UNFOUND)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/ci/job_token_scope/remove_project_spec.rb b/spec/requests/api/graphql/mutations/ci/job_token_scope/remove_project_spec.rb
new file mode 100644
index 00000000000..f1f42b00ada
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/ci/job_token_scope/remove_project_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'CiJobTokenScopeRemoveProject' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) }
+ let_it_be(:target_project) { create(:project) }
+
+ let_it_be(:link) do
+ create(:ci_job_token_project_scope_link,
+ source_project: project,
+ target_project: target_project)
+ end
+
+ let(:variables) do
+ {
+ project_path: project.full_path,
+ target_project_path: target_project.full_path
+ }
+ end
+
+ let(:mutation) do
+ graphql_mutation(:ci_job_token_scope_remove_project, variables) do
+ <<~QL
+ errors
+ ciJobTokenScope {
+ projects {
+ nodes {
+ path
+ }
+ }
+ }
+ QL
+ end
+ end
+
+ let(:mutation_response) { graphql_mutation_response(:ci_job_token_scope_remove_project) }
+
+ context 'when unauthorized' do
+ let(:current_user) { create(:user) }
+
+ context 'when not a maintainer' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ it 'has graphql errors' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(graphql_errors).not_to be_empty
+ end
+ end
+ end
+
+ context 'when authorized' do
+ let_it_be(:current_user) { project.owner }
+
+ before do
+ target_project.add_guest(current_user)
+ end
+
+ it 'removes the target project from the job token scope' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response.dig('ciJobTokenScope', 'projects', 'nodes')).not_to be_empty
+ end.to change { Ci::JobToken::Scope.new(project).includes?(target_project) }.from(true).to(false)
+ end
+
+ context 'when invalid target project is provided' do
+ before do
+ variables[:target_project_path] = 'unknown/project'
+ end
+
+ it 'has mutation errors' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['errors']).to contain_exactly(Ci::JobTokenScope::EditScopeValidations::TARGET_PROJECT_UNAUTHORIZED_OR_UNFOUND)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/commits/create_spec.rb b/spec/requests/api/graphql/mutations/commits/create_spec.rb
index 375d4f10b40..619cba99c4e 100644
--- a/spec/requests/api/graphql/mutations/commits/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/commits/create_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Creation of a new commit' do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) }
+
let(:input) { { project_path: project.full_path, branch: branch, message: message, actions: actions } }
let(:branch) { 'master' }
let(:message) { 'Commit message' }
diff --git a/spec/requests/api/graphql/mutations/container_expiration_policy/update_spec.rb b/spec/requests/api/graphql/mutations/container_expiration_policy/update_spec.rb
index 23e8e366483..0156142dc6f 100644
--- a/spec/requests/api/graphql/mutations/container_expiration_policy/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/container_expiration_policy/update_spec.rb
@@ -53,6 +53,7 @@ RSpec.describe 'Updating the container expiration policy' do
RSpec.shared_examples 'rejecting invalid regex for' do |field_name|
context "for field #{field_name}" do
let_it_be(:invalid_regex) { '*production' }
+
let(:params) do
{
:project_path => project.full_path,
diff --git a/spec/requests/api/graphql/mutations/discussions/toggle_resolve_spec.rb b/spec/requests/api/graphql/mutations/discussions/toggle_resolve_spec.rb
index 450996bf76b..632a934cd95 100644
--- a/spec/requests/api/graphql/mutations/discussions/toggle_resolve_spec.rb
+++ b/spec/requests/api/graphql/mutations/discussions/toggle_resolve_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Toggling the resolve status of a discussion' do
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:noteable) { create(:merge_request, source_project: project) }
+
let(:discussion) do
create(:diff_note_on_merge_request, noteable: noteable, project: project).to_discussion
end
diff --git a/spec/requests/api/graphql/mutations/environments/canary_ingress/update_spec.rb b/spec/requests/api/graphql/mutations/environments/canary_ingress/update_spec.rb
index f25a49291a6..3771ae0746e 100644
--- a/spec/requests/api/graphql/mutations/environments/canary_ingress/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/environments/canary_ingress/update_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe 'Update Environment Canary Ingress', :clean_gitlab_redis_cache do
let_it_be(:deployment) { create(:deployment, :success, environment: environment, project: project) }
let_it_be(:maintainer) { create(:user) }
let_it_be(:developer) { create(:user) }
+
let(:environment_id) { environment.to_global_id.to_s }
let(:weight) { 25 }
let(:actor) { developer }
diff --git a/spec/requests/api/graphql/mutations/issues/set_locked_spec.rb b/spec/requests/api/graphql/mutations/issues/set_locked_spec.rb
index 4989d096925..435ed0f9eb2 100644
--- a/spec/requests/api/graphql/mutations/issues/set_locked_spec.rb
+++ b/spec/requests/api/graphql/mutations/issues/set_locked_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'Setting an issue as locked' do
let_it_be(:current_user) { create(:user) }
let_it_be(:issue) { create(:issue) }
let_it_be(:project) { issue.project }
+
let(:input) { { locked: true } }
let(:mutation) do
diff --git a/spec/requests/api/graphql/mutations/issues/set_severity_spec.rb b/spec/requests/api/graphql/mutations/issues/set_severity_spec.rb
index 96fd2368765..41997f151a2 100644
--- a/spec/requests/api/graphql/mutations/issues/set_severity_spec.rb
+++ b/spec/requests/api/graphql/mutations/issues/set_severity_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Setting severity level of an incident' do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
+
let(:incident) { create(:incident) }
let(:project) { incident.project }
let(:input) { { severity: 'CRITICAL' } }
diff --git a/spec/requests/api/graphql/mutations/issues/update_spec.rb b/spec/requests/api/graphql/mutations/issues/update_spec.rb
index adfa2a2bc08..b3e1ab62e54 100644
--- a/spec/requests/api/graphql/mutations/issues/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/issues/update_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'Update of an existing issue' do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:issue) { create(:issue, project: project) }
+
let(:input) do
{
'iid' => issue.iid.to_s,
diff --git a/spec/requests/api/graphql/mutations/jira_import/import_users_spec.rb b/spec/requests/api/graphql/mutations/jira_import/import_users_spec.rb
index 00b93984f98..45cc70f09fd 100644
--- a/spec/requests/api/graphql/mutations/jira_import/import_users_spec.rb
+++ b/spec/requests/api/graphql/mutations/jira_import/import_users_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'Importing Jira Users' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
+
let(:importer) { instance_double(JiraImport::UsersImporter) }
let(:project_path) { project.full_path }
let(:start_at) { 7 }
diff --git a/spec/requests/api/graphql/mutations/jira_import/start_spec.rb b/spec/requests/api/graphql/mutations/jira_import/start_spec.rb
index e7124512ef1..b14305281af 100644
--- a/spec/requests/api/graphql/mutations/jira_import/start_spec.rb
+++ b/spec/requests/api/graphql/mutations/jira_import/start_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'Starting a Jira Import' do
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project) }
+
let(:jira_project_key) { 'AA' }
let(:project_path) { project.full_path }
@@ -80,17 +81,17 @@ RSpec.describe 'Starting a Jira Import' do
end
end
- context 'when project has no Jira service' do
+ context 'when project has no Jira integration' do
it_behaves_like 'a mutation that returns errors in the response', errors: ['Jira integration not configured.']
end
- context 'when when project has Jira service' do
- let!(:service) { create(:jira_service, project: project) }
+ context 'when when project has Jira integration' do
+ let!(:service) { create(:jira_integration, project: project) }
before do
project.reload
- stub_jira_service_test
+ stub_jira_integration_test
end
context 'when issues feature are disabled' do
diff --git a/spec/requests/api/graphql/mutations/labels/create_spec.rb b/spec/requests/api/graphql/mutations/labels/create_spec.rb
index 28284408306..d19411f6c1d 100644
--- a/spec/requests/api/graphql/mutations/labels/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/labels/create_spec.rb
@@ -61,6 +61,7 @@ RSpec.describe Mutations::Labels::Create do
context 'when creating a project label' do
let_it_be(:parent) { create(:project) }
+
let(:extra_params) { { project_path: parent.full_path } }
it_behaves_like 'labels create mutation'
@@ -68,6 +69,7 @@ RSpec.describe Mutations::Labels::Create do
context 'when creating a group label' do
let_it_be(:parent) { create(:group) }
+
let(:extra_params) { { group_path: parent.full_path } }
it_behaves_like 'labels create mutation'
diff --git a/spec/requests/api/graphql/mutations/merge_requests/accept_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/accept_spec.rb
index 2725b33d528..19a7c72ba80 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/accept_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/accept_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'accepting a merge request', :request_store do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) }
+
let!(:merge_request) { create(:merge_request, source_project: project) }
let(:input) do
{
diff --git a/spec/requests/api/graphql/mutations/merge_requests/create_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/create_spec.rb
index bf759521dc0..3a4508489a1 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/create_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Creation of a new merge request' do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
+
let(:project) { create(:project, :public, :repository) }
let(:input) do
{
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
index a63116e2b94..dec9afd1310 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe 'Setting assignees of a merge request', :assume_throttled do
context 'when the current user does not have permission to add assignees' do
let(:current_user) { create(:user) }
- let(:db_query_limit) { 28 }
+ let(:db_query_limit) { 27 }
it 'does not change the assignees' do
project.add_guest(current_user)
@@ -80,7 +80,7 @@ RSpec.describe 'Setting assignees of a merge request', :assume_throttled do
end
context 'with assignees already assigned' do
- let(:db_query_limit) { 46 }
+ let(:db_query_limit) { 39 }
before do
merge_request.assignees = [assignee2]
@@ -96,7 +96,7 @@ RSpec.describe 'Setting assignees of a merge request', :assume_throttled do
end
context 'when passing an empty list of assignees' do
- let(:db_query_limit) { 32 }
+ let(:db_query_limit) { 31 }
let(:input) { { assignee_usernames: [] } }
before do
@@ -115,7 +115,7 @@ RSpec.describe 'Setting assignees of a merge request', :assume_throttled do
context 'when passing append as true' do
let(:mode) { Types::MutationOperationModeEnum.enum[:append] }
let(:input) { { assignee_usernames: [assignee2.username], operation_mode: mode } }
- let(:db_query_limit) { 22 }
+ let(:db_query_limit) { 21 }
before do
# In CE, APPEND is a NOOP as you can't have multiple assignees
@@ -135,7 +135,7 @@ RSpec.describe 'Setting assignees of a merge request', :assume_throttled do
end
context 'when passing remove as true' do
- let(:db_query_limit) { 32 }
+ let(:db_query_limit) { 31 }
let(:mode) { Types::MutationOperationModeEnum.enum[:remove] }
let(:input) { { assignee_usernames: [assignee.username], operation_mode: mode } }
let(:expected_result) { [] }
diff --git a/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb b/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb
index 2a39757e103..5bc3c68cf26 100644
--- a/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Create do
let_it_be(:project) { create(:project, :private, :repository) }
let_it_be(:environment) { create(:environment, project: project) }
let_it_be(:cluster) { create(:cluster, projects: [project]) }
+
let(:dashboard_path) { 'config/prometheus/common_metrics.yml' }
let(:starting_at) { Time.current.iso8601 }
let(:ending_at) { 1.hour.from_now.iso8601 }
diff --git a/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb b/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
index 202e7e7c333..d335642d321 100644
--- a/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
@@ -126,6 +126,7 @@ RSpec.describe 'Updating the package settings' do
context 'without existing package settings' do
let_it_be(:namespace, reload: true) { create(:group) }
+
let(:package_settings) { namespace.package_settings }
where(:user_role, :shared_examples_name) do
diff --git a/spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb b/spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb
index b5aaf304812..8f3ae9f26f6 100644
--- a/spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Adding a DiffNote' do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
+
let(:noteable) { create(:merge_request, source_project: project, target_project: project) }
let(:project) { create(:project, :repository) }
let(:diff_refs) { noteable.diff_refs }
diff --git a/spec/requests/api/graphql/mutations/notes/create/image_diff_note_spec.rb b/spec/requests/api/graphql/mutations/notes/create/image_diff_note_spec.rb
index 0e5744fb64f..8f2438cb741 100644
--- a/spec/requests/api/graphql/mutations/notes/create/image_diff_note_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/create/image_diff_note_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Adding an image DiffNote' do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
+
let(:noteable) { create(:merge_request, source_project: project, target_project: project) }
let(:project) { create(:project, :repository) }
let(:diff_refs) { noteable.diff_refs }
diff --git a/spec/requests/api/graphql/mutations/notes/create/note_spec.rb b/spec/requests/api/graphql/mutations/notes/create/note_spec.rb
index 8dd8ed361ba..87c752393ea 100644
--- a/spec/requests/api/graphql/mutations/notes/create/note_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/create/note_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Adding a Note' do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
+
let(:noteable) { create(:merge_request, source_project: project, target_project: project) }
let(:project) { create(:project) }
let(:discussion) { nil }
diff --git a/spec/requests/api/graphql/mutations/notes/reposition_image_diff_note_spec.rb b/spec/requests/api/graphql/mutations/notes/reposition_image_diff_note_spec.rb
index 4efa7f9d509..89e3a71280f 100644
--- a/spec/requests/api/graphql/mutations/notes/reposition_image_diff_note_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/reposition_image_diff_note_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Repositioning an ImageDiffNote' do
let_it_be(:noteable) { create(:merge_request) }
let_it_be(:project) { noteable.project }
+
let(:note) { create(:image_diff_note_on_merge_request, noteable: noteable, project: project) }
let(:new_position) { { x: 10 } }
let(:current_user) { project.creator }
diff --git a/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb b/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb
index 1ce09881fde..cfd0b34b815 100644
--- a/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb
@@ -26,6 +26,7 @@ RSpec.describe 'Updating an image DiffNote' do
let_it_be(:updated_height) { 100 }
let_it_be(:updated_x) { 5 }
let_it_be(:updated_y) { 10 }
+
let(:updated_position) do
{
width: updated_width,
diff --git a/spec/requests/api/graphql/mutations/packages/destroy_spec.rb b/spec/requests/api/graphql/mutations/packages/destroy_spec.rb
new file mode 100644
index 00000000000..e5ced419ecf
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/packages/destroy_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Destroying a package' do
+ using RSpec::Parameterized::TableSyntax
+
+ include GraphqlHelpers
+
+ let_it_be_with_reload(:package) { create(:package) }
+ let_it_be(:user) { create(:user) }
+
+ let(:project) { package.project }
+ let(:id) { package.to_global_id.to_s }
+
+ let(:query) do
+ <<~GQL
+ errors
+ GQL
+ end
+
+ let(:params) { { id: id } }
+ let(:mutation) { graphql_mutation(:destroy_package, params, query) }
+ let(:mutation_response) { graphql_mutation_response(:destroyPackage) }
+
+ shared_examples 'destroying the package' do
+ it 'destroy the package' do
+ expect(::Packages::DestroyPackageService)
+ .to receive(:new).with(container: package, current_user: user).and_call_original
+
+ expect { mutation_request }.to change { ::Packages::Package.count }.by(-1)
+ end
+
+ it_behaves_like 'returning response status', :success
+ end
+
+ shared_examples 'denying the mutation request' do
+ it 'does not destroy the package' do
+ expect(::Packages::DestroyPackageService)
+ .not_to receive(:new).with(container: package, current_user: user)
+
+ expect { mutation_request }.not_to change { ::Packages::Package.count }
+
+ expect(mutation_response).to be_nil
+ end
+
+ it_behaves_like 'returning response status', :success
+ end
+
+ describe 'post graphql mutation' do
+ subject(:mutation_request) { post_graphql_mutation(mutation, current_user: user) }
+
+ context 'with valid id' do
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'destroying the package'
+ :developer | 'denying the mutation request'
+ :reporter | 'denying the mutation request'
+ :guest | 'denying the mutation request'
+ :anonymous | 'denying the mutation request'
+ end
+
+ with_them do
+ before do
+ project.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+ end
+
+ context 'with invalid id' do
+ let(:params) { { id: 'gid://gitlab/Packages::Package/5555' } }
+
+ it_behaves_like 'denying the mutation request'
+ end
+
+ context 'when an error occures' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'returns the errors in the response' do
+ allow_next_found_instance_of(::Packages::Package) do |package|
+ allow(package).to receive(:destroy!).and_raise(StandardError)
+ end
+
+ mutation_request
+
+ expect(mutation_response['errors']).to eq(['Failed to remove the package'])
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/releases/delete_spec.rb b/spec/requests/api/graphql/mutations/releases/delete_spec.rb
index 3710f118bf4..40063156609 100644
--- a/spec/requests/api/graphql/mutations/releases/delete_spec.rb
+++ b/spec/requests/api/graphql/mutations/releases/delete_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe 'Deleting a release' do
end
context 'when the current user has access to update releases' do
- let(:current_user) { maintainer }
+ let(:current_user) { developer }
it 'deletes the release' do
expect { delete_release }.to change { Release.count }.by(-1)
@@ -105,12 +105,6 @@ RSpec.describe 'Deleting a release' do
end
context "when the current user doesn't have access to update releases" do
- context 'when the current user is a Developer' do
- let(:current_user) { developer }
-
- it_behaves_like 'unauthorized or not found error'
- end
-
context 'when the current user is a Reporter' do
let(:current_user) { reporter }
diff --git a/spec/requests/api/graphql/mutations/snippets/create_spec.rb b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
index 214c804c519..9a3cea3ca14 100644
--- a/spec/requests/api/graphql/mutations/snippets/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
@@ -17,7 +17,6 @@ RSpec.describe 'Creating a Snippet' do
let(:actions) { [{ action: action }.merge(file_1), { action: action }.merge(file_2)] }
let(:project_path) { nil }
let(:uploaded_files) { nil }
- let(:spam_mutation_vars) { {} }
let(:mutation_vars) do
{
description: description,
@@ -26,7 +25,7 @@ RSpec.describe 'Creating a Snippet' do
project_path: project_path,
uploaded_files: uploaded_files,
blob_actions: actions
- }.merge(spam_mutation_vars)
+ }
end
let(:mutation) do
@@ -77,21 +76,6 @@ RSpec.describe 'Creating a Snippet' do
expect(mutation_response['snippet']).to be_nil
end
-
- context 'when snippet_spam flag is disabled' do
- before do
- stub_feature_flags(snippet_spam: false)
- end
-
- it 'passes disable_spam_action_service param to service' do
- expect(::Snippets::CreateService)
- .to receive(:new)
- .with(project: anything, current_user: anything, params: hash_including(disable_spam_action_service: true))
- .and_call_original
-
- subject
- end
- end
end
shared_examples 'creates snippet' do
@@ -101,8 +85,8 @@ RSpec.describe 'Creating a Snippet' do
end.to change { Snippet.count }.by(1)
snippet = Snippet.last
- created_file_1 = snippet.repository.blob_at('HEAD', file_1[:filePath])
- created_file_2 = snippet.repository.blob_at('HEAD', file_2[:filePath])
+ created_file_1 = snippet.repository.blob_at(snippet.default_branch, file_1[:filePath])
+ created_file_2 = snippet.repository.blob_at(snippet.default_branch, file_2[:filePath])
expect(created_file_1.data).to match(file_1[:content])
expect(created_file_2.data).to match(file_2[:content])
@@ -121,15 +105,6 @@ RSpec.describe 'Creating a Snippet' do
it_behaves_like 'snippet edit usage data counters'
it_behaves_like 'a mutation which can mutate a spammable' do
- let(:captcha_response) { 'abc123' }
- let(:spam_log_id) { 1234 }
- let(:spam_mutation_vars) do
- {
- captcha_response: captcha_response,
- spam_log_id: spam_log_id
- }
- end
-
let(:service) { Snippets::CreateService }
end
end
@@ -190,7 +165,7 @@ RSpec.describe 'Creating a Snippet' do
it do
expect(::Snippets::CreateService).to receive(:new)
- .with(project: nil, current_user: user, params: hash_including(files: expected_value))
+ .with(project: nil, current_user: user, params: hash_including(files: expected_value), spam_params: instance_of(::Spam::SpamParams))
.and_return(double(execute: creation_response))
subject
diff --git a/spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb b/spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb
index 4d499310591..43d846cb297 100644
--- a/spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe 'Mark snippet as spam' do
let_it_be(:other_user) { create(:user) }
let_it_be(:snippet) { create(:personal_snippet) }
let_it_be(:user_agent_detail) { create(:user_agent_detail, subject: snippet) }
+
let(:current_user) { snippet.author }
let(:snippet_gid) { snippet.to_global_id.to_s }
diff --git a/spec/requests/api/graphql/mutations/snippets/update_spec.rb b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
index 77efb786dcb..eb7e6f840fe 100644
--- a/spec/requests/api/graphql/mutations/snippets/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe 'Updating a Snippet' do
let_it_be(:original_description) { 'Initial description' }
let_it_be(:original_title) { 'Initial title' }
let_it_be(:original_file_name) { 'Initial file_name' }
+
let(:updated_content) { 'Updated content' }
let(:updated_description) { 'Updated description' }
let(:updated_title) { 'Updated_title' }
@@ -16,7 +17,6 @@ RSpec.describe 'Updating a Snippet' do
let(:updated_file) { 'CHANGELOG' }
let(:deleted_file) { 'README' }
let(:snippet_gid) { GitlabSchema.id_from_object(snippet).to_s }
- let(:spam_mutation_vars) { {} }
let(:mutation_vars) do
{
id: snippet_gid,
@@ -27,7 +27,7 @@ RSpec.describe 'Updating a Snippet' do
{ action: :update, filePath: updated_file, content: updated_content },
{ action: :delete, filePath: deleted_file }
]
- }.merge(spam_mutation_vars)
+ }
end
let(:mutation) do
@@ -82,21 +82,6 @@ RSpec.describe 'Updating a Snippet' do
end
end
- context 'when snippet_spam flag is disabled' do
- before do
- stub_feature_flags(snippet_spam: false)
- end
-
- it 'passes disable_spam_action_service param to service' do
- expect(::Snippets::UpdateService)
- .to receive(:new)
- .with(project: anything, current_user: anything, params: hash_including(disable_spam_action_service: true))
- .and_call_original
-
- subject
- end
- end
-
context 'when there are ActiveRecord validation errors' do
let(:updated_title) { '' }
@@ -125,15 +110,6 @@ RSpec.describe 'Updating a Snippet' do
end
it_behaves_like 'a mutation which can mutate a spammable' do
- let(:captcha_response) { 'abc123' }
- let(:spam_log_id) { 1234 }
- let(:spam_mutation_vars) do
- {
- captcha_response: captcha_response,
- spam_log_id: spam_log_id
- }
- end
-
let(:service) { Snippets::UpdateService }
end
@@ -164,6 +140,7 @@ RSpec.describe 'Updating a Snippet' do
describe 'ProjectSnippet' do
let_it_be(:project) { create(:project, :private) }
+
let(:snippet) do
create(:project_snippet,
:private,
diff --git a/spec/requests/api/graphql/mutations/user_callouts/create_spec.rb b/spec/requests/api/graphql/mutations/user_callouts/create_spec.rb
index cb67a60ebe4..716983f01d2 100644
--- a/spec/requests/api/graphql/mutations/user_callouts/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/user_callouts/create_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Create a user callout' do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
+
let(:feature_name) { ::UserCallout.feature_names.each_key.first }
let(:input) do
diff --git a/spec/requests/api/graphql/namespace/package_settings_spec.rb b/spec/requests/api/graphql/namespace/package_settings_spec.rb
index 6af098e902f..42fd07dbdc7 100644
--- a/spec/requests/api/graphql/namespace/package_settings_spec.rb
+++ b/spec/requests/api/graphql/namespace/package_settings_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'getting namespace package settings in a namespace' do
let_it_be(:package_settings) { create(:namespace_package_setting) }
let_it_be(:namespace) { package_settings.namespace }
let_it_be(:current_user) { namespace.owner }
+
let(:package_settings_response) { graphql_data.dig('namespace', 'packageSettings') }
let(:fields) { all_graphql_fields_for('PackageSettings') }
diff --git a/spec/requests/api/graphql/project/alert_management/alert/issue_spec.rb b/spec/requests/api/graphql/project/alert_management/alert/issue_spec.rb
index 9724de4fedb..05a98a9dd9c 100644
--- a/spec/requests/api/graphql/project/alert_management/alert/issue_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/alert/issue_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'getting Alert Management Alert Issue' do
let_it_be(:project) { create(:project) }
let_it_be(:current_user) { create(:user) }
+
let(:payload) { {} }
let(:query) { 'avg(metric) > 1.0' }
diff --git a/spec/requests/api/graphql/project/alert_management/alert_status_counts_spec.rb b/spec/requests/api/graphql/project/alert_management/alert_status_counts_spec.rb
index 9fbf5aaa41f..ecd93d169d3 100644
--- a/spec/requests/api/graphql/project/alert_management/alert_status_counts_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/alert_status_counts_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe 'getting Alert Management Alert counts by status' do
let_it_be(:alert_resolved) { create(:alert_management_alert, :resolved, project: project) }
let_it_be(:alert_triggered) { create(:alert_management_alert, project: project) }
let_it_be(:other_project_alert) { create(:alert_management_alert) }
+
let(:params) { {} }
let(:fields) do
diff --git a/spec/requests/api/graphql/project/alert_management/integrations_spec.rb b/spec/requests/api/graphql/project/alert_management/integrations_spec.rb
index 0e029aee9e8..1793d4961eb 100644
--- a/spec/requests/api/graphql/project/alert_management/integrations_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/integrations_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'getting Alert Management Integrations' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:current_user) { create(:user) }
- let_it_be(:prometheus_service) { create(:prometheus_service, project: project) }
+ let_it_be(:prometheus_integration) { create(:prometheus_integration, project: project) }
let_it_be(:project_alerting_setting) { create(:project_alerting_setting, project: project) }
let_it_be(:active_http_integration) { create(:alert_management_http_integration, project: project) }
let_it_be(:inactive_http_integration) { create(:alert_management_http_integration, :inactive, project: project) }
@@ -53,15 +53,15 @@ RSpec.describe 'getting Alert Management Integrations' do
end
context 'when no extra params given' do
- let(:http_integration) { integrations.first }
- let(:prometheus_integration) { integrations.second }
+ let(:http_integration_response) { integrations.first }
+ let(:prometheus_integration_response) { integrations.second }
it_behaves_like 'a working graphql query'
it { expect(integrations.size).to eq(2) }
it 'returns the correct properties of the integrations' do
- expect(http_integration).to include(
+ expect(http_integration_response).to include(
'id' => global_id_of(active_http_integration),
'type' => 'HTTP',
'name' => active_http_integration.name,
@@ -71,14 +71,14 @@ RSpec.describe 'getting Alert Management Integrations' do
'apiUrl' => nil
)
- expect(prometheus_integration).to include(
- 'id' => global_id_of(prometheus_service),
+ expect(prometheus_integration_response).to include(
+ 'id' => global_id_of(prometheus_integration),
'type' => 'PROMETHEUS',
'name' => 'Prometheus',
- 'active' => prometheus_service.manual_configuration?,
+ 'active' => prometheus_integration.manual_configuration?,
'token' => project_alerting_setting.token,
'url' => "http://localhost/#{project.full_path}/prometheus/alerts/notify.json",
- 'apiUrl' => prometheus_service.api_url
+ 'apiUrl' => prometheus_integration.api_url
)
end
end
@@ -104,7 +104,7 @@ RSpec.describe 'getting Alert Management Integrations' do
end
context 'when Prometheus Integration ID is given' do
- let(:params) { { id: global_id_of(prometheus_service) } }
+ let(:params) { { id: global_id_of(prometheus_integration) } }
it_behaves_like 'a working graphql query'
@@ -112,13 +112,13 @@ RSpec.describe 'getting Alert Management Integrations' do
it 'returns the correct properties of the Prometheus Integration' do
expect(integrations.first).to include(
- 'id' => global_id_of(prometheus_service),
+ 'id' => global_id_of(prometheus_integration),
'type' => 'PROMETHEUS',
'name' => 'Prometheus',
- 'active' => prometheus_service.manual_configuration?,
+ 'active' => prometheus_integration.manual_configuration?,
'token' => project_alerting_setting.token,
'url' => "http://localhost/#{project.full_path}/prometheus/alerts/notify.json",
- 'apiUrl' => prometheus_service.api_url
+ 'apiUrl' => prometheus_integration.api_url
)
end
end
diff --git a/spec/requests/api/graphql/project/base_service_spec.rb b/spec/requests/api/graphql/project/base_service_spec.rb
index af462c4a639..5dc0f55db88 100644
--- a/spec/requests/api/graphql/project/base_service_spec.rb
+++ b/spec/requests/api/graphql/project/base_service_spec.rb
@@ -7,9 +7,9 @@ RSpec.describe 'query Jira service' do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
- let_it_be(:jira_service) { create(:jira_service, project: project) }
+ let_it_be(:jira_integration) { create(:jira_integration, project: project) }
let_it_be(:bugzilla_integration) { create(:bugzilla_integration, project: project) }
- let_it_be(:redmine_service) { create(:redmine_service, project: project) }
+ let_it_be(:redmine_integration) { create(:redmine_integration, project: project) }
let(:query) do
%(
diff --git a/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb b/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb
index b2b42137acf..14fabaaf032 100644
--- a/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb
+++ b/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'getting a detailed sentry error' do
let_it_be(:project_setting) { create(:project_error_tracking_setting, project: project) }
let_it_be(:current_user) { project.owner }
let_it_be(:sentry_detailed_error) { build(:detailed_error_tracking_error) }
+
let(:sentry_gid) { sentry_detailed_error.to_global_id.to_s }
let(:fields) do
<<~QUERY
diff --git a/spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb b/spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb
index c7d327a62af..e71e5a48ddc 100644
--- a/spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb
+++ b/spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe 'sentry errors requests' do
describe 'getting a detailed sentry error' do
let_it_be(:sentry_detailed_error) { build(:detailed_error_tracking_error) }
+
let(:sentry_gid) { sentry_detailed_error.to_global_id.to_s }
let(:detailed_fields) do
@@ -193,6 +194,7 @@ RSpec.describe 'sentry errors requests' do
describe 'getting a stack trace' do
let_it_be(:sentry_stack_trace) { build(:error_tracking_error_event) }
+
let(:sentry_gid) { global_id_of(Gitlab::ErrorTracking::DetailedError.new(id: 1)) }
let(:stack_trace_fields) do
diff --git a/spec/requests/api/graphql/project/issue/designs/designs_spec.rb b/spec/requests/api/graphql/project/issue/designs/designs_spec.rb
index decab900a43..def41efddde 100644
--- a/spec/requests/api/graphql/project/issue/designs/designs_spec.rb
+++ b/spec/requests/api/graphql/project/issue/designs/designs_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'Getting designs related to an issue' do
let_it_be(:design) { create(:design, :with_smaller_image_versions, versions_count: 1) }
let_it_be(:current_user) { design.project.owner }
+
let(:design_query) do
<<~NODE
designs {
@@ -124,6 +125,7 @@ RSpec.describe 'Getting designs related to an issue' do
context 'with versions' do
let_it_be(:version) { design.versions.take }
+
let(:design_query) do
<<~NODE
designs {
@@ -165,6 +167,7 @@ RSpec.describe 'Getting designs related to an issue' do
let_it_be(:issue) { design.issue }
let_it_be(:second_design, reload: true) { create(:design, :with_smaller_image_versions, issue: issue, versions_count: 1) }
let_it_be(:deleted_design) { create(:design, :with_versions, issue: issue, deleted: true, versions_count: 1) }
+
let(:all_versions) { issue.design_versions.ordered.reverse }
let(:design_query) do
<<~NODE
diff --git a/spec/requests/api/graphql/project/jira_service_spec.rb b/spec/requests/api/graphql/project/jira_service_spec.rb
index 905a669bf0d..64e9e04ae44 100644
--- a/spec/requests/api/graphql/project/jira_service_spec.rb
+++ b/spec/requests/api/graphql/project/jira_service_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'query Jira service' do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
- let_it_be(:jira_service) { create(:jira_service, project: project) }
+ let_it_be(:jira_integration) { create(:jira_integration, project: project) }
let(:query) do
%(
diff --git a/spec/requests/api/graphql/project/pipeline_spec.rb b/spec/requests/api/graphql/project/pipeline_spec.rb
index 0a5bcc7a965..cb6755640a9 100644
--- a/spec/requests/api/graphql/project/pipeline_spec.rb
+++ b/spec/requests/api/graphql/project/pipeline_spec.rb
@@ -8,9 +8,9 @@ RSpec.describe 'getting pipeline information nested in a project' do
let_it_be(:project) { create(:project, :repository, :public) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let_it_be(:current_user) { create(:user) }
- let_it_be(:build_job) { create(:ci_build, :trace_with_sections, name: 'build-a', pipeline: pipeline) }
- let_it_be(:failed_build) { create(:ci_build, :failed, name: 'failed-build', pipeline: pipeline) }
- let_it_be(:bridge) { create(:ci_bridge, name: 'ci-bridge-example', pipeline: pipeline) }
+ let_it_be(:build_job) { create(:ci_build, :trace_with_sections, name: 'build-a', pipeline: pipeline, stage_idx: 0, stage: 'build') }
+ let_it_be(:failed_build) { create(:ci_build, :failed, name: 'failed-build', pipeline: pipeline, stage_idx: 0, stage: 'build') }
+ let_it_be(:bridge) { create(:ci_bridge, name: 'ci-bridge-example', pipeline: pipeline, stage_idx: 0, stage: 'build') }
let(:path) { %i[project pipeline] }
let(:pipeline_graphql_data) { graphql_data_at(*path) }
@@ -79,16 +79,6 @@ RSpec.describe 'getting pipeline information nested in a project' do
end
end
- private
-
- def build_query_to_find_pipeline_shas(*pipelines)
- pipeline_fields = pipelines.map.each_with_index do |pipeline, idx|
- "pipeline#{idx}: pipeline(iid: \"#{pipeline.iid}\") { sha }"
- end.join(' ')
-
- graphql_query_for('project', { 'fullPath' => project.full_path }, pipeline_fields)
- end
-
context 'when enough data is requested' do
let(:fields) do
query_graphql_field(:jobs, nil,
@@ -282,4 +272,69 @@ RSpec.describe 'getting pipeline information nested in a project' do
end
end
end
+
+ context 'N+1 queries on stages jobs' do
+ let(:depth) { 5 }
+ let(:fields) do
+ <<~FIELDS
+ stages {
+ nodes {
+ name
+ groups {
+ nodes {
+ name
+ jobs {
+ nodes {
+ name
+ needs {
+ nodes {
+ name
+ }
+ }
+ status: detailedStatus {
+ tooltip
+ hasDetails
+ detailsPath
+ action {
+ buttonTitle
+ path
+ title
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ FIELDS
+ end
+
+ it 'does not generate N+1 queries', :request_store, :use_sql_query_cache do
+ # warm up
+ post_graphql(query, current_user: current_user)
+
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ post_graphql(query, current_user: current_user)
+ end
+
+ create(:ci_build, name: 'test-a', pipeline: pipeline, stage_idx: 1, stage: 'test')
+ create(:ci_build, name: 'test-b', pipeline: pipeline, stage_idx: 1, stage: 'test')
+ create(:ci_build, name: 'deploy-a', pipeline: pipeline, stage_idx: 2, stage: 'deploy')
+
+ expect do
+ post_graphql(query, current_user: current_user)
+ end.not_to exceed_all_query_limit(control)
+ end
+ end
+
+ private
+
+ def build_query_to_find_pipeline_shas(*pipelines)
+ pipeline_fields = pipelines.map.each_with_index do |pipeline, idx|
+ "pipeline#{idx}: pipeline(iid: \"#{pipeline.iid}\") { sha }"
+ end.join(' ')
+
+ graphql_query_for('project', { 'fullPath' => project.full_path }, pipeline_fields)
+ end
end
diff --git a/spec/requests/api/graphql/project/project_pipeline_statistics_spec.rb b/spec/requests/api/graphql/project/project_pipeline_statistics_spec.rb
index 7d157563f5f..39a68d98d84 100644
--- a/spec/requests/api/graphql/project/project_pipeline_statistics_spec.rb
+++ b/spec/requests/api/graphql/project/project_pipeline_statistics_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'rendering project pipeline statistics' do
include GraphqlHelpers
let_it_be(:project) { create(:project) }
+
let(:user) { create(:user) }
let(:fields) do
diff --git a/spec/requests/api/graphql/project_query_spec.rb b/spec/requests/api/graphql/project_query_spec.rb
index 54375d4de1d..e44a7efb354 100644
--- a/spec/requests/api/graphql/project_query_spec.rb
+++ b/spec/requests/api/graphql/project_query_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'getting project information' do
let_it_be(:group) { create(:group) }
let_it_be(:project, reload: true) { create(:project, :repository, group: group) }
let_it_be(:current_user) { create(:user) }
+
let(:project_fields) { all_graphql_fields_for('project'.to_s.classify, max_depth: 1) }
let(:query) do
diff --git a/spec/requests/api/graphql/query_spec.rb b/spec/requests/api/graphql/query_spec.rb
index 6bd0703c121..ecc7fffaef7 100644
--- a/spec/requests/api/graphql/query_spec.rb
+++ b/spec/requests/api/graphql/query_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'Query' do
let_it_be(:project) { create(:project) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:developer) { create(:user) }
+
let(:current_user) { developer }
describe '.designManagement' do
@@ -15,6 +16,7 @@ RSpec.describe 'Query' do
let_it_be(:version) { create(:design_version, issue: issue) }
let_it_be(:design) { version.designs.first }
+
let(:query_result) { graphql_data.dig(*path) }
let(:query) { graphql_query_for(:design_management, nil, dm_fields) }
diff --git a/spec/requests/api/graphql/user/starred_projects_query_spec.rb b/spec/requests/api/graphql/user/starred_projects_query_spec.rb
index 6cb02068f2a..a8c087d1fbf 100644
--- a/spec/requests/api/graphql/user/starred_projects_query_spec.rb
+++ b/spec/requests/api/graphql/user/starred_projects_query_spec.rb
@@ -60,6 +60,7 @@ RSpec.describe 'Getting starredProjects of the user' do
context 'the current user is a member of a private project the user starred' do
let_it_be(:other_user) { create(:user) }
+
let(:current_user) { other_user }
before do
diff --git a/spec/requests/api/graphql/user_query_spec.rb b/spec/requests/api/graphql/user_query_spec.rb
index 60520906e87..59b805bb25b 100644
--- a/spec/requests/api/graphql/user_query_spec.rb
+++ b/spec/requests/api/graphql/user_query_spec.rb
@@ -402,6 +402,7 @@ RSpec.describe 'getting user information' do
context 'we request the groupMemberships' do
let_it_be(:membership_a) { create(:group_member, user: user) }
+
let(:group_memberships) { graphql_data_at(:user, :group_memberships, :nodes) }
let(:user_fields) { 'groupMemberships { nodes { id } }' }
@@ -424,6 +425,7 @@ RSpec.describe 'getting user information' do
context 'we request the projectMemberships' do
let_it_be(:membership_a) { create(:project_member, user: user) }
+
let(:project_memberships) { graphql_data_at(:user, :project_memberships, :nodes) }
let(:user_fields) { 'projectMemberships { nodes { id } }' }
diff --git a/spec/requests/api/graphql_spec.rb b/spec/requests/api/graphql_spec.rb
index 463fca43cb5..7b081bb7568 100644
--- a/spec/requests/api/graphql_spec.rb
+++ b/spec/requests/api/graphql_spec.rb
@@ -315,6 +315,7 @@ RSpec.describe 'GraphQL' do
describe 'resolver complexity' do
let_it_be(:project) { create(:project, :public) }
+
let(:query) do
graphql_query_for(
'project',
@@ -350,6 +351,7 @@ RSpec.describe 'GraphQL' do
describe 'complexity limits' do
let_it_be(:project) { create(:project, :public) }
+
let!(:user) { create(:user) }
let(:query_fields) do
diff --git a/spec/requests/api/group_avatar_spec.rb b/spec/requests/api/group_avatar_spec.rb
index be5cfbc234c..50379d29b09 100644
--- a/spec/requests/api/group_avatar_spec.rb
+++ b/spec/requests/api/group_avatar_spec.rb
@@ -4,17 +4,35 @@ require 'spec_helper'
RSpec.describe API::GroupAvatar do
def avatar_path(group)
- "/groups/#{group.id}/avatar"
+ "/groups/#{ERB::Util.url_encode(group.full_path)}/avatar"
end
describe 'GET /groups/:id/avatar' do
context 'when the group is public' do
- it 'retrieves the avatar successfully' do
- group = create(:group, :public, :with_avatar)
+ let(:group) { create(:group, :public, :with_avatar) }
+ it 'retrieves the avatar successfully' do
get api(avatar_path(group))
expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Content-Disposition'])
+ .to eq(%(attachment; filename="dk.png"; filename*=UTF-8''dk.png))
+ end
+
+ context 'when the avatar is in the object storage' do
+ before do
+ stub_uploads_object_storage(AvatarUploader)
+
+ group.avatar.migrate!(ObjectStorage::Store::REMOTE)
+ end
+
+ it 'redirects to the file in the object storage' do
+ get api(avatar_path(group))
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response.headers['Content-Disposition'])
+ .to eq(%(attachment; filename="dk.png"; filename*=UTF-8''dk.png))
+ end
end
context 'when the group does not have avatar' do
@@ -24,6 +42,18 @@ RSpec.describe API::GroupAvatar do
get api(avatar_path(group))
expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.body)
+ .to eq(%({"message":"404 Avatar Not Found"}))
+ end
+ end
+
+ context 'when the group is a subgroup' do
+ it 'returns :ok' do
+ group = create(:group, :nested, :public, :with_avatar, name: 'g1.1')
+
+ get api(avatar_path(group))
+
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
diff --git a/spec/requests/api/group_import_spec.rb b/spec/requests/api/group_import_spec.rb
index f632e49bf3a..efad6334518 100644
--- a/spec/requests/api/group_import_spec.rb
+++ b/spec/requests/api/group_import_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe API::GroupImport do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
+
let(:path) { '/groups/import' }
let(:file) { File.join('spec', 'fixtures', 'group_export.tar.gz') }
let(:export_path) { "#{Dir.tmpdir}/group_export_spec" }
diff --git a/spec/requests/api/group_labels_spec.rb b/spec/requests/api/group_labels_spec.rb
index c677e68b285..11738e3cba8 100644
--- a/spec/requests/api/group_labels_spec.rb
+++ b/spec/requests/api/group_labels_spec.rb
@@ -29,6 +29,32 @@ RSpec.describe API::GroupLabels do
let(:expected_labels) { [group_label1.name] }
it_behaves_like 'fetches labels'
+
+ context 'and is subscribed' do
+ before do
+ group_label1.subscribe(user)
+ end
+
+ it 'returns true' do
+ get api("/groups/#{group.id}/labels?search=#{group_label1.name}", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response[0]['subscribed']).to be true
+ end
+ end
+
+ context 'and is unsubscribed' do
+ before do
+ group_label1.unsubscribe(user)
+ end
+
+ it 'returns false' do
+ get api("/groups/#{group.id}/labels?search=#{group_label1.name}", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response[0]['subscribed']).to be false
+ end
+ end
end
context 'when the with_counts parameter is set' do
diff --git a/spec/requests/api/group_milestones_spec.rb b/spec/requests/api/group_milestones_spec.rb
index e3e0164e5a7..2312d35c815 100644
--- a/spec/requests/api/group_milestones_spec.rb
+++ b/spec/requests/api/group_milestones_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe API::GroupMilestones do
let_it_be(:group_member) { create(:group_member, group: group, user: user) }
let_it_be(:closed_milestone) { create(:closed_milestone, group: group, title: 'version1', description: 'closed milestone') }
let_it_be(:milestone) { create(:milestone, group: group, title: 'version2', description: 'open milestone') }
+
let(:route) { "/groups/#{group.id}/milestones" }
it_behaves_like 'group and project milestones', "/groups/:id/milestones"
diff --git a/spec/requests/api/group_packages_spec.rb b/spec/requests/api/group_packages_spec.rb
index 792aa2c1f20..a2b0b35c76a 100644
--- a/spec/requests/api/group_packages_spec.rb
+++ b/spec/requests/api/group_packages_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe API::GroupPackages do
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public, namespace: group, name: 'project A') }
let_it_be(:user) { create(:user) }
+
let(:params) { {} }
subject { get api(url), params: params }
@@ -17,6 +18,7 @@ RSpec.describe API::GroupPackages do
context 'with sorting' do
let_it_be(:package1) { create(:npm_package, project: project, version: '3.1.0', name: "@#{project.root_namespace.path}/foo1") }
let_it_be(:package2) { create(:nuget_package, project: project, version: '2.0.4') }
+
let(:package3) { create(:maven_package, project: project, version: '1.1.1', name: 'zzz') }
before do
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index 0a47b93773b..ad7a2e3b1fb 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe API::Groups do
let_it_be(:project3) { create(:project, namespace: group1, path: 'test', visibility_level: Gitlab::VisibilityLevel::PRIVATE) }
let_it_be(:archived_project) { create(:project, namespace: group1, archived: true) }
- before do
+ before_all do
group1.add_owner(user1)
group2.add_owner(user2)
end
@@ -255,13 +255,14 @@ RSpec.describe API::Groups do
end
context "when using sorting" do
- let(:group3) { create(:group, name: "a#{group1.name}", path: "z#{group1.path}") }
- let(:group4) { create(:group, name: "same-name", path: "y#{group1.path}") }
- let(:group5) { create(:group, name: "same-name") }
+ let_it_be(:group3) { create(:group, name: "a#{group1.name}", path: "z#{group1.path}") }
+ let_it_be(:group4) { create(:group, name: "same-name", path: "y#{group1.path}") }
+ let_it_be(:group5) { create(:group, name: "same-name") }
+
let(:response_groups) { json_response.map { |group| group['name'] } }
let(:response_groups_ids) { json_response.map { |group| group['id'] } }
- before do
+ before_all do
group3.add_owner(user1)
group4.add_owner(user1)
group5.add_owner(user1)
@@ -330,6 +331,44 @@ RSpec.describe API::Groups do
expect(response_groups_ids).to eq(Group.select { |group| group['name'] == 'same-name' }.map { |group| group['id'] }.sort)
end
+ context 'when searching with similarity ordering', :aggregate_failures do
+ let_it_be(:group6) { create(:group, name: 'same-name subgroup', parent: group4) }
+ let_it_be(:group7) { create(:group, name: 'same-name parent') }
+
+ let(:params) { { order_by: 'similarity', search: 'same-name' } }
+
+ before_all do
+ group6.add_owner(user1)
+ group7.add_owner(user1)
+ end
+
+ subject { get api('/groups', user1), params: params }
+
+ it 'sorts top level groups before subgroups with exact matches first' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response.length).to eq(4)
+
+ expect(response_groups).to eq(['same-name', 'same-name parent', 'same-name subgroup', 'same-name'])
+ end
+
+ context 'when `search` parameter is not given' do
+ let(:params) { { order_by: 'similarity' } }
+
+ it 'sorts items ordered by name' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response.length).to eq(6)
+
+ expect(response_groups).to eq(groups_visible_to_user(user1).order(:name).pluck(:name))
+ end
+ end
+ end
+
def groups_visible_to_user(user)
Group.where(id: user.authorized_groups.select(:id).reorder(nil))
end
@@ -451,6 +490,7 @@ RSpec.describe API::Groups do
expect(json_response['visibility']).to eq(Gitlab::VisibilityLevel.string_level(group1.visibility_level))
expect(json_response['avatar_url']).to eq(group1.avatar_url(only_path: false))
expect(json_response['share_with_group_lock']).to eq(group1.share_with_group_lock)
+ expect(json_response['prevent_sharing_groups_outside_hierarchy']).to eq(group2.namespace_settings.prevent_sharing_groups_outside_hierarchy)
expect(json_response['require_two_factor_authentication']).to eq(group1.require_two_factor_authentication)
expect(json_response['two_factor_grace_period']).to eq(group1.two_factor_grace_period)
expect(json_response['auto_devops_enabled']).to eq(group1.auto_devops_enabled)
@@ -661,6 +701,7 @@ RSpec.describe API::Groups do
project_creation_level: "noone",
subgroup_creation_level: "maintainer",
default_branch_protection: ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS,
+ prevent_sharing_groups_outside_hierarchy: true,
avatar: fixture_file_upload(file_path)
}
@@ -685,6 +726,7 @@ RSpec.describe API::Groups do
expect(json_response['shared_projects'].length).to eq(0)
expect(json_response['default_branch_protection']).to eq(::Gitlab::Access::MAINTAINER_PROJECT_ACCESS)
expect(json_response['avatar_url']).to end_with('dk.png')
+ expect(json_response['prevent_sharing_groups_outside_hierarchy']).to eq(true)
end
context 'updating the `default_branch_protection` attribute' do
@@ -755,6 +797,15 @@ RSpec.describe API::Groups do
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['visibility_level']).to contain_exactly('private is not allowed since there are sub-groups with higher visibility.')
end
+
+ it 'does not update prevent_sharing_groups_outside_hierarchy' do
+ put api("/groups/#{subgroup.id}", user3), params: { description: 'it works', prevent_sharing_groups_outside_hierarchy: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.keys).not_to include('prevent_sharing_groups_outside_hierarchy')
+ expect(subgroup.reload.prevent_sharing_groups_outside_hierarchy).to eq(false)
+ expect(json_response['description']).to eq('it works')
+ end
end
end
@@ -1381,6 +1432,7 @@ RSpec.describe API::Groups do
let_it_be(:sub_child_group1) { create(:group, parent: child_group1) }
let_it_be(:child_group2) { create(:group, :private, parent: group2) }
let_it_be(:sub_child_group2) { create(:group, :private, parent: child_group2) }
+
let(:response_groups) { json_response.map { |group| group['name'] } }
context 'when unauthenticated' do
diff --git a/spec/requests/api/helm_packages_spec.rb b/spec/requests/api/helm_packages_spec.rb
index 5871c0a5d5b..08b4489a6e3 100644
--- a/spec/requests/api/helm_packages_spec.rb
+++ b/spec/requests/api/helm_packages_spec.rb
@@ -9,55 +9,171 @@ RSpec.describe API::HelmPackages do
let_it_be_with_reload(:project) { create(:project, :public) }
let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) }
+ let_it_be(:package) { create(:helm_package, project: project) }
- describe 'GET /api/v4/projects/:id/packages/helm/:channel/charts/:file_name.tgz' do
- let_it_be(:package) { create(:helm_package, project: project) }
-
- let(:channel) { package.package_files.first.helm_channel }
+ describe 'GET /api/v4/projects/:id/packages/helm/:channel/index.yaml' do
+ it_behaves_like 'handling helm chart index requests' do
+ let(:url) { "/projects/#{project.id}/packages/helm/#{package.package_files.first.helm_channel}/index.yaml" }
+ end
+ end
- let(:url) { "/projects/#{project.id}/packages/helm/#{channel}/charts/#{package.name}-#{package.version}.tgz" }
+ describe 'GET /api/v4/projects/:id/packages/helm/:channel/charts/:file_name.tgz' do
+ let(:url) { "/projects/#{project.id}/packages/helm/#{package.package_files.first.helm_channel}/charts/#{package.name}-#{package.version}.tgz" }
- subject { get api(url) }
+ subject { get api(url), headers: headers }
context 'with valid project' do
- where(:visibility, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- :public | :developer | true | true | 'process helm download content request' | :success
- :public | :guest | true | true | 'process helm download content request' | :success
- :public | :developer | true | false | 'rejects helm packages access' | :unauthorized
- :public | :guest | true | false | 'rejects helm packages access' | :unauthorized
- :public | :developer | false | true | 'process helm download content request' | :success
- :public | :guest | false | true | 'process helm download content request' | :success
- :public | :developer | false | false | 'rejects helm packages access' | :unauthorized
- :public | :guest | false | false | 'rejects helm packages access' | :unauthorized
- :public | :anonymous | false | true | 'process helm download content request' | :success
- :private | :developer | true | true | 'process helm download content request' | :success
- :private | :guest | true | true | 'rejects helm packages access' | :forbidden
- :private | :developer | true | false | 'rejects helm packages access' | :unauthorized
- :private | :guest | true | false | 'rejects helm packages access' | :unauthorized
- :private | :developer | false | true | 'rejects helm packages access' | :not_found
- :private | :guest | false | true | 'rejects helm packages access' | :not_found
- :private | :developer | false | false | 'rejects helm packages access' | :unauthorized
- :private | :guest | false | false | 'rejects helm packages access' | :unauthorized
- :private | :anonymous | false | true | 'rejects helm packages access' | :unauthorized
+ where(:visibility, :user_role, :shared_examples_name, :expected_status) do
+ :public | :guest | 'process helm download content request' | :success
+ :public | :not_a_member | 'process helm download content request' | :success
+ :public | :anonymous | 'process helm download content request' | :success
+ :private | :reporter | 'process helm download content request' | :success
+ :private | :guest | 'rejects helm packages access' | :forbidden
+ :private | :not_a_member | 'rejects helm packages access' | :not_found
+ :private | :anonymous | 'rejects helm packages access' | :unauthorized
end
with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, personal_access_token.token) }
let(:snowplow_gitlab_standard_context) { { project: project, namespace: project.namespace } }
- subject { get api(url), headers: headers }
-
before do
project.update!(visibility: visibility.to_s)
end
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status]
end
end
+ context 'when an invalid token is passed' do
+ let(:headers) { basic_auth_header(user.username, 'wrong') }
+
+ it_behaves_like 'returning response status', :unauthorized
+ end
+
it_behaves_like 'deploy token for package GET requests'
+ end
+
+ describe 'POST /api/v4/projects/:id/packages/helm/api/:channel/charts/authorize' do
+ include_context 'workhorse headers'
+
+ let(:channel) { 'stable' }
+ let(:url) { "/projects/#{project.id}/packages/helm/api/#{channel}/charts/authorize" }
+ let(:headers) { {} }
+
+ subject { post api(url), headers: headers }
+
+ context 'with valid project' do
+ where(:visibility_level, :user_role, :shared_examples_name, :expected_status) do
+ :public | :developer | 'process helm workhorse authorization' | :success
+ :public | :reporter | 'rejects helm packages access' | :forbidden
+ :public | :not_a_member | 'rejects helm packages access' | :forbidden
+ :public | :anonymous | 'rejects helm packages access' | :unauthorized
+ :private | :developer | 'process helm workhorse authorization' | :success
+ :private | :reporter | 'rejects helm packages access' | :forbidden
+ :private | :not_a_member | 'rejects helm packages access' | :not_found
+ :private | :anonymous | 'rejects helm packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, personal_access_token.token) }
+ let(:headers) { user_headers.merge(workhorse_headers) }
+
+ before do
+ project.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value(visibility_level.to_s))
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status]
+ end
+ end
+
+ context 'when an invalid token is passed' do
+ let(:headers) { basic_auth_header(user.username, 'wrong') }
+
+ it_behaves_like 'returning response status', :unauthorized
+ end
+
+ it_behaves_like 'deploy token for package uploads'
+
+ it_behaves_like 'job token for package uploads', authorize_endpoint: true, accept_invalid_username: true do
+ let_it_be(:job) { create(:ci_build, :running, user: user, project: project) }
+ end
it_behaves_like 'rejects helm access with unknown project id'
end
+
+ describe 'POST /api/v4/projects/:id/packages/helm/api/:channel/charts' do
+ include_context 'workhorse headers'
+
+ let_it_be(:file_name) { 'package.tgz' }
+
+ let(:channel) { 'stable' }
+ let(:url) { "/projects/#{project.id}/packages/helm/api/#{channel}/charts" }
+ let(:headers) { {} }
+ let(:params) { { chart: temp_file(file_name) } }
+ let(:file_key) { :chart }
+ let(:send_rewritten_field) { true }
+ let(:snowplow_gitlab_standard_context) { { project: project, namespace: project.namespace } }
+
+ subject do
+ workhorse_finalize(
+ api(url),
+ method: :post,
+ file_key: file_key,
+ params: params,
+ headers: headers,
+ send_rewritten_field: send_rewritten_field
+ )
+ end
+
+ context 'with valid project' do
+ where(:visibility_level, :user_role, :shared_examples_name, :expected_status) do
+ :public | :developer | 'process helm upload' | :created
+ :public | :reporter | 'rejects helm packages access' | :forbidden
+ :public | :not_a_member | 'rejects helm packages access' | :forbidden
+ :public | :anonymous | 'rejects helm packages access' | :unauthorized
+ :private | :developer | 'process helm upload' | :created
+ :private | :guest | 'rejects helm packages access' | :forbidden
+ :private | :not_a_member | 'rejects helm packages access' | :not_found
+ :private | :anonymous | 'rejects helm packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, personal_access_token.token) }
+ let(:headers) { user_headers.merge(workhorse_headers) }
+
+ before do
+ project.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value(visibility_level.to_s))
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status]
+ end
+ end
+
+ context 'when an invalid token is passed' do
+ let(:headers) { basic_auth_header(user.username, 'wrong') }
+
+ it_behaves_like 'returning response status', :unauthorized
+ end
+
+ it_behaves_like 'deploy token for package uploads'
+
+ it_behaves_like 'job token for package uploads', accept_invalid_username: true do
+ let_it_be(:job) { create(:ci_build, :running, user: user, project: project) }
+ end
+
+ it_behaves_like 'rejects helm access with unknown project id'
+
+ context 'file size above maximum limit' do
+ let(:headers) { basic_auth_header(deploy_token.username, deploy_token.token).merge(workhorse_headers) }
+
+ before do
+ allow_next_instance_of(UploadedFile) do |uploaded_file|
+ allow(uploaded_file).to receive(:size).and_return(project.actual_limits.helm_max_file_size + 1)
+ end
+ end
+
+ it_behaves_like 'returning response status', :bad_request
+ end
+ end
end
diff --git a/spec/requests/api/helpers_spec.rb b/spec/requests/api/helpers_spec.rb
index ce0018d6d0d..8961f3177b6 100644
--- a/spec/requests/api/helpers_spec.rb
+++ b/spec/requests/api/helpers_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe API::Helpers do
include TermsHelper
let_it_be(:user, reload: true) { create(:user) }
+
let(:admin) { create(:admin) }
let(:key) { create(:key, user: user) }
diff --git a/spec/requests/api/import_bitbucket_server_spec.rb b/spec/requests/api/import_bitbucket_server_spec.rb
index 972b21ad2e0..2225f737f36 100644
--- a/spec/requests/api/import_bitbucket_server_spec.rb
+++ b/spec/requests/api/import_bitbucket_server_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe API::ImportBitbucketServer do
let(:base_uri) { "https://test:7990" }
- let(:user) { create(:user, bio: 'test') }
+ let(:user) { create(:user) }
let(:token) { "asdasd12345" }
let(:secret) { "sekrettt" }
let(:project_key) { 'TES' }
@@ -14,6 +14,7 @@ RSpec.describe API::ImportBitbucketServer do
describe "POST /import/bitbucket_server" do
context 'with no optional parameters' do
let_it_be(:project) { create(:project) }
+
let(:client) { double(BitbucketServer::Client) }
before do
@@ -48,6 +49,7 @@ RSpec.describe API::ImportBitbucketServer do
context 'with a new project name' do
let_it_be(:project) { create(:project, name: 'new-name') }
+
let(:client) { instance_double(BitbucketServer::Client) }
before do
@@ -83,6 +85,7 @@ RSpec.describe API::ImportBitbucketServer do
context 'with an invalid URL' do
let_it_be(:project) { create(:project, name: 'new-name') }
+
let(:client) { instance_double(BitbucketServer::Client) }
before do
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index 631698554f9..49756df61c6 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe API::Internal::Base do
let_it_be(:project, reload: true) { create(:project, :repository, :wiki_repo) }
let_it_be(:personal_snippet) { create(:personal_snippet, :repository, author: user) }
let_it_be(:project_snippet) { create(:project_snippet, :repository, author: user, project: project) }
+
let(:key) { create(:key, user: user) }
let(:secret_token) { Gitlab::Shell.secret_token }
let(:gl_repository) { "project-#{project.id}" }
@@ -1176,59 +1177,68 @@ RSpec.describe API::Internal::Base do
allow_any_instance_of(Gitlab::Identifier).to receive(:identify).and_return(user)
end
- context 'with Project' do
- it 'executes PostReceiveService' do
- message = <<~MESSAGE.strip
- To create a merge request for #{branch_name}, visit:
- http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/new?merge_request%5Bsource_branch%5D=#{branch_name}
- MESSAGE
+ shared_examples 'runs post-receive hooks' do
+ let(:gl_repository) { container.repository.gl_repository }
+ let(:messages) { [] }
+ it 'executes PostReceiveService' do
subject
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq({
- 'messages' => [{ 'message' => message, 'type' => 'basic' }],
+ 'messages' => messages,
'reference_counter_decreased' => true
})
end
+ it 'tries to notify that the container has moved' do
+ expect(Gitlab::Checks::ContainerMoved).to receive(:fetch_message).with(user, container.repository)
+
+ subject
+ end
+
it_behaves_like 'storing arguments in the application context' do
- let(:expected_params) { { user: user.username, project: project.full_path } }
+ let(:expected_params) { expected_context }
end
end
- context 'with PersonalSnippet' do
- let(:gl_repository) { "snippet-#{personal_snippet.id}" }
-
- it 'executes PostReceiveService' do
- subject
+ context 'with Project' do
+ it_behaves_like 'runs post-receive hooks' do
+ let(:container) { project }
+ let(:expected_context) { { user: user.username, project: project.full_path } }
- expect(json_response).to eq({
- 'messages' => [],
- 'reference_counter_decreased' => true
- })
+ let(:messages) do
+ [
+ {
+ 'message' => <<~MESSAGE.strip,
+ To create a merge request for #{branch_name}, visit:
+ http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/new?merge_request%5Bsource_branch%5D=#{branch_name}
+ MESSAGE
+ 'type' => 'basic'
+ }
+ ]
+ end
end
+ end
- it_behaves_like 'storing arguments in the application context' do
- let(:expected_params) { { user: key.user.username } }
- let(:gl_repository) { "snippet-#{personal_snippet.id}" }
+ context 'with PersonalSnippet' do
+ it_behaves_like 'runs post-receive hooks' do
+ let(:container) { personal_snippet }
+ let(:expected_context) { { user: key.user.username } }
end
end
context 'with ProjectSnippet' do
- let(:gl_repository) { "snippet-#{project_snippet.id}" }
-
- it 'executes PostReceiveService' do
- subject
-
- expect(json_response).to eq({
- 'messages' => [],
- 'reference_counter_decreased' => true
- })
+ it_behaves_like 'runs post-receive hooks' do
+ let(:container) { project_snippet }
+ let(:expected_context) { { user: key.user.username, project: project_snippet.project.full_path } }
end
+ end
- it_behaves_like 'storing arguments in the application context' do
- let(:expected_params) { { user: key.user.username, project: project_snippet.project.full_path } }
- let(:gl_repository) { "snippet-#{project_snippet.id}" }
+ context 'with ProjectWiki' do
+ it_behaves_like 'runs post-receive hooks' do
+ let(:container) { project.wiki }
+ let(:expected_context) { { user: key.user.username, project: project.full_path } }
end
end
@@ -1236,7 +1246,7 @@ RSpec.describe API::Internal::Base do
it 'does not try to notify that project moved' do
allow_any_instance_of(Gitlab::Identifier).to receive(:identify).and_return(nil)
- expect(Gitlab::Checks::ProjectMoved).not_to receive(:fetch_message)
+ expect(Gitlab::Checks::ContainerMoved).not_to receive(:fetch_message)
subject
@@ -1244,33 +1254,17 @@ RSpec.describe API::Internal::Base do
end
end
- context 'when project is nil' do
- context 'with Project' do
- let(:gl_repository) { 'project-foo' }
-
- it 'does not try to notify that project moved' do
- allow(Gitlab::GlRepository).to receive(:parse).and_return([nil, nil, Gitlab::GlRepository::PROJECT])
-
- expect(Gitlab::Checks::ProjectMoved).not_to receive(:fetch_message)
-
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- context 'with PersonalSnippet' do
- let(:gl_repository) { "snippet-#{personal_snippet.id}" }
+ context 'when container is nil' do
+ let(:gl_repository) { 'project-foo' }
- it 'does not try to notify that project moved' do
- allow(Gitlab::GlRepository).to receive(:parse).and_return([personal_snippet, nil, Gitlab::GlRepository::SNIPPET])
+ it 'does not try to notify that project moved' do
+ allow(Gitlab::GlRepository).to receive(:parse).and_return([nil, nil, Gitlab::GlRepository::PROJECT])
- expect(Gitlab::Checks::ProjectMoved).not_to receive(:fetch_message)
+ expect(Gitlab::Checks::ContainerMoved).not_to receive(:fetch_message)
- subject
+ subject
- expect(response).to have_gitlab_http_status(:ok)
- end
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -1378,29 +1372,6 @@ RSpec.describe API::Internal::Base do
end
end
- describe 'GET /internal/geo_proxy' do
- subject { get api('/internal/geo_proxy'), params: { secret_token: secret_token } }
-
- context 'with valid auth' do
- it 'returns empty data' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to be_empty
- end
- end
-
- context 'with invalid auth' do
- let(:secret_token) { 'invalid_token' }
-
- it 'returns unauthorized' do
- subject
-
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
- end
- end
-
def lfs_auth_project(project)
post(
api("/internal/lfs_authenticate"),
diff --git a/spec/requests/api/internal/kubernetes_spec.rb b/spec/requests/api/internal/kubernetes_spec.rb
index 7a2cec974b9..2acf6951d50 100644
--- a/spec/requests/api/internal/kubernetes_spec.rb
+++ b/spec/requests/api/internal/kubernetes_spec.rb
@@ -133,36 +133,6 @@ RSpec.describe API::Internal::Kubernetes do
)
)
end
-
- context 'on GitLab.com' do
- before do
- allow(::Gitlab).to receive(:com?).and_return(true)
- end
-
- context 'kubernetes_agent_on_gitlab_com feature flag disabled' do
- before do
- stub_feature_flags(kubernetes_agent_on_gitlab_com: false)
- end
-
- it 'returns 403' do
- send_request(headers: { 'Authorization' => "Bearer #{agent_token.token}" })
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'kubernetes_agent_on_gitlab_com feature flag enabled' do
- before do
- stub_feature_flags(kubernetes_agent_on_gitlab_com: agent_token.agent.project)
- end
-
- it 'returns success' do
- send_request(headers: { 'Authorization' => "Bearer #{agent_token.token}" })
-
- expect(response).to have_gitlab_http_status(:success)
- end
- end
- end
end
end
@@ -214,36 +184,6 @@ RSpec.describe API::Internal::Kubernetes do
expect(response).to have_gitlab_http_status(:not_found)
end
end
-
- context 'on GitLab.com' do
- before do
- allow(::Gitlab).to receive(:com?).and_return(true)
- end
-
- context 'kubernetes_agent_on_gitlab_com feature flag disabled' do
- before do
- stub_feature_flags(kubernetes_agent_on_gitlab_com: false)
- end
-
- it 'returns 403' do
- send_request(params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'kubernetes_agent_on_gitlab_com feature flag enabled' do
- before do
- stub_feature_flags(kubernetes_agent_on_gitlab_com: agent_token.agent.project)
- end
-
- it 'returns success' do
- send_request(params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
-
- expect(response).to have_gitlab_http_status(:success)
- end
- end
- end
end
context 'project is private' do
diff --git a/spec/requests/api/labels_spec.rb b/spec/requests/api/labels_spec.rb
index 26377c40b73..4b6868f42bc 100644
--- a/spec/requests/api/labels_spec.rb
+++ b/spec/requests/api/labels_spec.rb
@@ -200,6 +200,36 @@ RSpec.describe API::Labels do
expect(json_response.map { |l| l['name'] }).to match_array([group_label.name, priority_label.name, label1.name])
end
+ context 'when search param is provided' do
+ context 'and user is subscribed' do
+ before do
+ priority_label.subscribe(user)
+ end
+
+ it 'returns subscribed true' do
+ get api("/projects/#{project.id}/labels?search=#{priority_label.name}", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response[0]['name']).to eq(priority_label.name)
+ expect(json_response[0]['subscribed']).to be true
+ end
+ end
+
+ context 'and user is not subscribed' do
+ before do
+ priority_label.unsubscribe(user)
+ end
+
+ it 'returns subscribed false' do
+ get api("/projects/#{project.id}/labels?search=#{priority_label.name}", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response[0]['name']).to eq(priority_label.name)
+ expect(json_response[0]['subscribed']).to be false
+ end
+ end
+ end
+
context 'when the with_counts parameter is set' do
before do
create(:labeled_issue, project: project, labels: [group_label], author: user)
diff --git a/spec/requests/api/lint_spec.rb b/spec/requests/api/lint_spec.rb
index 57aa0f36192..7fe516d3daa 100644
--- a/spec/requests/api/lint_spec.rb
+++ b/spec/requests/api/lint_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe API::Lint do
context 'when authenticated' do
let_it_be(:api_user) { create(:user) }
+
it 'returns authorized' do
post api('/ci/lint', api_user), params: { content: 'content' }
@@ -43,6 +44,7 @@ RSpec.describe API::Lint do
context 'when authenticated' do
let_it_be(:api_user) { create(:user) }
+
it 'returns authentication success' do
post api('/ci/lint', api_user), params: { content: 'content' }
diff --git a/spec/requests/api/markdown_spec.rb b/spec/requests/api/markdown_spec.rb
index 35d91963ac9..faf671d350f 100644
--- a/spec/requests/api/markdown_spec.rb
+++ b/spec/requests/api/markdown_spec.rb
@@ -52,6 +52,7 @@ RSpec.describe API::Markdown do
context "when arguments are valid" do
let_it_be(:project) { create(:project) }
let_it_be(:issue) { create(:issue, project: project) }
+
let(:issue_url) { "http://#{Gitlab.config.gitlab.host}/#{issue.project.namespace.path}/#{issue.project.path}/-/issues/#{issue.iid}" }
let(:text) { ":tada: Hello world! :100: #{issue.to_reference}" }
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 038c3bc552a..4b5fc57571b 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -1188,16 +1188,14 @@ RSpec.describe API::MergeRequests do
expect(json_response['target_project_id']).to eq(merge_request.target_project.id)
expect(json_response['draft']).to be false
expect(json_response['work_in_progress']).to be false
- expect(json_response['merge_when_pipeline_succeeds']).to be_falsy
+ expect(json_response['merge_when_pipeline_succeeds']).to be false
expect(json_response['merge_status']).to eq('can_be_merged')
- expect(json_response['should_close_merge_request']).to be_falsy
- expect(json_response['force_close_merge_request']).to be_falsy
expect(json_response['changes_count']).to eq(merge_request.merge_request_diff.real_size)
expect(json_response['merge_error']).to eq(merge_request.merge_error)
expect(json_response['user']['can_merge']).to be_truthy
expect(json_response).not_to include('rebase_in_progress')
- expect(json_response['first_contribution']).to be_falsy
- expect(json_response['has_conflicts']).to be_falsy
+ expect(json_response['first_contribution']).to be false
+ expect(json_response['has_conflicts']).to be false
expect(json_response['blocking_discussions_resolved']).to be_truthy
expect(json_response['references']['short']).to eq("!#{merge_request.iid}")
expect(json_response['references']['relative']).to eq("!#{merge_request.iid}")
@@ -1396,7 +1394,7 @@ RSpec.describe API::MergeRequests do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user2)
- expect(json_response['user']['can_merge']).to be_falsy
+ expect(json_response['user']['can_merge']).to be false
end
it 'returns `checking` as its merge_status instead of `cannot_be_merged_rechecking`' do
@@ -2009,6 +2007,7 @@ RSpec.describe API::MergeRequests do
context 'forked projects', :sidekiq_might_not_need_inline do
let_it_be(:user2) { create(:user) }
+
let(:project) { create(:project, :public, :repository) }
let!(:forked_project) { fork_project(project, user2, repository: true) }
let!(:unrelated_project) { create(:project, namespace: create(:user).namespace, creator_id: user2.id) }
@@ -2664,7 +2663,7 @@ RSpec.describe API::MergeRequests do
)
expect(response).to have_gitlab_http_status(:ok)
- expect(source_repository.branch_exists?(source_branch)).to be_falsy
+ expect(source_repository.branch_exists?(source_branch)).to be false
end
end
@@ -2682,7 +2681,7 @@ RSpec.describe API::MergeRequests do
)
expect(response).to have_gitlab_http_status(:ok)
- expect(source_repository.branch_exists?(source_branch)).to be_falsy
+ expect(source_repository.branch_exists?(source_branch)).to be false
end
it 'does not remove the source branch' do
@@ -2804,7 +2803,7 @@ RSpec.describe API::MergeRequests do
it 'sets to true' do
merge_request.update!(merge_params: { 'force_remove_source_branch' => false } )
- expect(merge_request.force_remove_source_branch?).to be_falsey
+ expect(merge_request.force_remove_source_branch?).to be false
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: { state_event: "close", remove_source_branch: true }
@@ -2889,6 +2888,7 @@ RSpec.describe API::MergeRequests do
context "forked projects" do
let_it_be(:user2) { create(:user) }
+
let(:project) { create(:project, :public, :repository) }
let!(:forked_project) { fork_project(project, user2, repository: true) }
let(:merge_request) do
diff --git a/spec/requests/api/metrics/dashboard/annotations_spec.rb b/spec/requests/api/metrics/dashboard/annotations_spec.rb
index 07de2925ee2..79a38702354 100644
--- a/spec/requests/api/metrics/dashboard/annotations_spec.rb
+++ b/spec/requests/api/metrics/dashboard/annotations_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe API::Metrics::Dashboard::Annotations do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :private, :repository, namespace: user.namespace) }
let_it_be(:environment) { create(:environment, project: project) }
+
let(:dashboard) { 'config/prometheus/common_metrics.yml' }
let(:starting_at) { Time.now.iso8601 }
let(:ending_at) { 1.hour.from_now.iso8601 }
diff --git a/spec/requests/api/metrics/user_starred_dashboards_spec.rb b/spec/requests/api/metrics/user_starred_dashboards_spec.rb
index 533dff05f27..7f019e1226a 100644
--- a/spec/requests/api/metrics/user_starred_dashboards_spec.rb
+++ b/spec/requests/api/metrics/user_starred_dashboards_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe API::Metrics::UserStarredDashboards do
let_it_be(:dashboard_yml) { fixture_file('lib/gitlab/metrics/dashboard/sample_dashboard.yml') }
let_it_be(:dashboard) { '.gitlab/dashboards/find&seek.yml' }
let_it_be(:project) { create(:project, :private, :repository, :custom_repo, namespace: user.namespace, files: { dashboard => dashboard_yml }) }
+
let(:url) { "/projects/#{project.id}/metrics/user_starred_dashboards" }
let(:params) do
{
diff --git a/spec/requests/api/nuget_project_packages_spec.rb b/spec/requests/api/nuget_project_packages_spec.rb
index 572736cfc86..f608f296295 100644
--- a/spec/requests/api/nuget_project_packages_spec.rb
+++ b/spec/requests/api/nuget_project_packages_spec.rb
@@ -92,9 +92,10 @@ RSpec.describe API::NugetProjectPackages do
describe 'GET /api/v4/projects/:id/packages/nuget/download/*package_name/*package_version/*package_filename' do
let_it_be(:package_name) { 'Dummy.Package' }
- let_it_be(:package) { create(:nuget_package, project: project, name: package_name) }
+ let_it_be(:package) { create(:nuget_package, :with_symbol_package, project: project, name: package_name) }
- let(:url) { "/projects/#{target.id}/packages/nuget/download/#{package.name}/#{package.version}/#{package.name}.#{package.version}.nupkg" }
+ let(:format) { 'nupkg' }
+ let(:url) { "/projects/#{target.id}/packages/nuget/download/#{package.name}/#{package.version}/#{package.name}.#{package.version}.#{format}" }
subject { get api(url) }
@@ -154,56 +155,14 @@ RSpec.describe API::NugetProjectPackages do
subject { put api(url), headers: headers }
- context 'with valid project' do
- where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'process nuget workhorse authorization' | :success
- 'PUBLIC' | :guest | true | true | 'rejects nuget packages access' | :forbidden
- 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'rejects nuget packages access' | :forbidden
- 'PUBLIC' | :guest | false | true | 'rejects nuget packages access' | :forbidden
- 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | true | true | 'process nuget workhorse authorization' | :success
- 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
- 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
- end
-
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
- let(:headers) { user_headers.merge(workhorse_headers) }
-
- before do
- update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
- end
-
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
- end
- end
-
- it_behaves_like 'deploy token for package uploads'
-
- it_behaves_like 'job token for package uploads', authorize_endpoint: true do
- let_it_be(:job) { create(:ci_build, :running, user: user, project: project) }
- end
-
- it_behaves_like 'rejects nuget access with unknown target id'
-
- it_behaves_like 'rejects nuget access with invalid target id'
+ it_behaves_like 'nuget authorize upload endpoint'
end
describe 'PUT /api/v4/projects/:id/packages/nuget' do
include_context 'workhorse headers'
let_it_be(:file_name) { 'package.nupkg' }
+
let(:url) { "/projects/#{target.id}/packages/nuget" }
let(:headers) { {} }
let(:params) { { package: temp_file(file_name) } }
@@ -221,63 +180,43 @@ RSpec.describe API::NugetProjectPackages do
)
end
- context 'with valid project' do
- where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'process nuget upload' | :created
- 'PUBLIC' | :guest | true | true | 'rejects nuget packages access' | :forbidden
- 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'rejects nuget packages access' | :forbidden
- 'PUBLIC' | :guest | false | true | 'rejects nuget packages access' | :forbidden
- 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | true | true | 'process nuget upload' | :created
- 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
- 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
- end
-
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
- let(:headers) { user_headers.merge(workhorse_headers) }
- let(:snowplow_gitlab_standard_context) { { project: project, user: user, namespace: project.namespace } }
-
- before do
- update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
- end
+ it_behaves_like 'nuget upload endpoint'
+ end
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
- end
- end
+ describe 'PUT /api/v4/projects/:id/packages/nuget/symbolpackage/authorize' do
+ include_context 'workhorse headers'
- it_behaves_like 'deploy token for package uploads'
+ let(:url) { "/projects/#{target.id}/packages/nuget/symbolpackage/authorize" }
+ let(:headers) { {} }
- it_behaves_like 'job token for package uploads' do
- let_it_be(:job) { create(:ci_build, :running, user: user, project: project) }
- end
+ subject { put api(url), headers: headers }
- it_behaves_like 'rejects nuget access with unknown target id'
+ it_behaves_like 'nuget authorize upload endpoint'
+ end
- it_behaves_like 'rejects nuget access with invalid target id'
+ describe 'PUT /api/v4/projects/:id/packages/nuget/symbolpackage' do
+ include_context 'workhorse headers'
- context 'file size above maximum limit' do
- let(:headers) { basic_auth_header(deploy_token.username, deploy_token.token).merge(workhorse_headers) }
+ let_it_be(:file_name) { 'package.snupkg' }
- before do
- allow_next_instance_of(UploadedFile) do |uploaded_file|
- allow(uploaded_file).to receive(:size).and_return(project.actual_limits.nuget_max_file_size + 1)
- end
- end
+ let(:url) { "/projects/#{target.id}/packages/nuget/symbolpackage" }
+ let(:headers) { {} }
+ let(:params) { { package: temp_file(file_name) } }
+ let(:file_key) { :package }
+ let(:send_rewritten_field) { true }
- it_behaves_like 'returning response status', :bad_request
+ subject do
+ workhorse_finalize(
+ api(url),
+ method: :put,
+ file_key: file_key,
+ params: params,
+ headers: headers,
+ send_rewritten_field: send_rewritten_field
+ )
end
+
+ it_behaves_like 'nuget upload endpoint', symbol_package: true
end
def update_visibility_to(visibility)
diff --git a/spec/requests/api/project_attributes.yml b/spec/requests/api/project_attributes.yml
index 2932447f663..8341fac3191 100644
--- a/spec/requests/api/project_attributes.yml
+++ b/spec/requests/api/project_attributes.yml
@@ -137,6 +137,7 @@ project_setting:
- has_confluence
- has_vulnerabilities
- prevent_merge_without_jira_issue
+ - previous_default_branch
- project_id
- push_rule_id
- show_default_award_emojis
diff --git a/spec/requests/api/project_clusters_spec.rb b/spec/requests/api/project_clusters_spec.rb
index f784f677c25..253b61e5865 100644
--- a/spec/requests/api/project_clusters_spec.rb
+++ b/spec/requests/api/project_clusters_spec.rb
@@ -335,6 +335,7 @@ RSpec.describe API::ProjectClusters do
let(:namespace) { 'new-namespace' }
let(:platform_kubernetes_attributes) { { namespace: namespace } }
let_it_be(:management_project) { create(:project, namespace: project.namespace) }
+
let(:management_project_id) { management_project.id }
let(:update_params) do
diff --git a/spec/requests/api/project_container_repositories_spec.rb b/spec/requests/api/project_container_repositories_spec.rb
index 695d2c3fe2c..1170a9ba6cb 100644
--- a/spec/requests/api/project_container_repositories_spec.rb
+++ b/spec/requests/api/project_container_repositories_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe API::ProjectContainerRepositories do
let_it_be(:developer) { create(:user) }
let_it_be(:reporter) { create(:user) }
let_it_be(:guest) { create(:user) }
+
let(:root_repository) { create(:container_repository, :root, project: project) }
let(:test_repository) { create(:container_repository, project: project) }
let(:root_repository2) { create(:container_repository, :root, project: project2) }
diff --git a/spec/requests/api/project_milestones_spec.rb b/spec/requests/api/project_milestones_spec.rb
index 71535e66353..606279ec20a 100644
--- a/spec/requests/api/project_milestones_spec.rb
+++ b/spec/requests/api/project_milestones_spec.rb
@@ -21,6 +21,7 @@ RSpec.describe API::ProjectMilestones do
let_it_be(:group) { create(:group, :private, parent: ancestor_group) }
let_it_be(:ancestor_group_milestone) { create(:milestone, group: ancestor_group) }
let_it_be(:group_milestone) { create(:milestone, group: group) }
+
let(:params) { { include_parent_milestones: true } }
shared_examples 'listing all milestones' do
diff --git a/spec/requests/api/project_snippets_spec.rb b/spec/requests/api/project_snippets_spec.rb
index 6a9cf6e16e2..8cd1f15a88d 100644
--- a/spec/requests/api/project_snippets_spec.rb
+++ b/spec/requests/api/project_snippets_spec.rb
@@ -138,7 +138,7 @@ RSpec.describe API::ProjectSnippets do
aggregate_failures do
expect(snippet.repository.exists?).to be_truthy
- blob = snippet.repository.blob_at('master', file_path)
+ blob = snippet.repository.blob_at(snippet.default_branch, file_path)
expect(blob.data).to eq file_content
end
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 529a75af122..a869866c698 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -164,24 +164,21 @@ RSpec.describe API::Projects do
end
end
- shared_examples_for 'projects response without N + 1 queries' do
+ shared_examples_for 'projects response without N + 1 queries' do |threshold|
+ let(:additional_project) { create(:project, :public) }
+
it 'avoids N + 1 queries' do
+ get api('/projects', current_user)
+
control = ActiveRecord::QueryRecorder.new do
get api('/projects', current_user)
end
- if defined?(additional_project)
- additional_project
- else
- create(:project, :public)
- end
+ additional_project
- # TODO: We're currently querying to detect if a project is a fork
- # in 2 ways. Lower this back to 8 when `ForkedProjectLink` relation is
- # removed
expect do
get api('/projects', current_user)
- end.not_to exceed_query_limit(control).with_threshold(9)
+ end.not_to exceed_query_limit(control).with_threshold(threshold)
end
end
@@ -194,7 +191,7 @@ RSpec.describe API::Projects do
let(:projects) { [project] }
end
- it_behaves_like 'projects response without N + 1 queries' do
+ it_behaves_like 'projects response without N + 1 queries', 1 do
let(:current_user) { nil }
end
end
@@ -206,7 +203,7 @@ RSpec.describe API::Projects do
let(:projects) { user_projects }
end
- it_behaves_like 'projects response without N + 1 queries' do
+ it_behaves_like 'projects response without N + 1 queries', 0 do
let(:current_user) { user }
end
@@ -215,7 +212,7 @@ RSpec.describe API::Projects do
create(:project, :public, group: create(:group))
end
- it_behaves_like 'projects response without N + 1 queries' do
+ it_behaves_like 'projects response without N + 1 queries', 0 do
let(:current_user) { user }
let(:additional_project) { create(:project, :public, group: create(:group)) }
end
@@ -233,20 +230,6 @@ RSpec.describe API::Projects do
expect(project_response['container_registry_enabled']).to eq(false)
end
- it 'reads projects.container_registry_enabled when read_container_registry_access_level is disabled' do
- stub_feature_flags(read_container_registry_access_level: false)
-
- project.project_feature.update!(container_registry_access_level: ProjectFeature::DISABLED)
- project.update_column(:container_registry_enabled, true)
-
- get api('/projects', user)
- project_response = json_response.find { |p| p['id'] == project.id }
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to be_an Array
- expect(project_response['container_registry_enabled']).to eq(true)
- end
-
it 'includes project topics' do
get api('/projects', user)
@@ -386,7 +369,7 @@ RSpec.describe API::Projects do
end
context 'when external issue tracker is enabled' do
- let!(:jira_service) { create(:jira_service, project: project) }
+ let!(:jira_integration) { create(:jira_integration, project: project) }
it 'includes open_issues_count' do
get api('/projects', user)
@@ -880,7 +863,7 @@ RSpec.describe API::Projects do
get api(url, current_user), params: params
link = response.header['Link']
- url = link&.match(/<[^>]+(\/projects\?[^>]+)>; rel="next"/) do |match|
+ url = link&.match(%r{<[^>]+(/projects\?[^>]+)>; rel="next"}) do |match|
match[1]
end
@@ -1016,7 +999,8 @@ RSpec.describe API::Projects do
request_access_enabled: true,
only_allow_merge_if_all_discussions_are_resolved: false,
ci_config_path: 'a/custom/path',
- merge_method: 'ff'
+ merge_method: 'ff',
+ squash_option: 'always'
}).tap do |attrs|
attrs[:operations_access_level] = 'disabled'
attrs[:analytics_access_level] = 'disabled'
@@ -2464,6 +2448,14 @@ RSpec.describe API::Projects do
describe 'GET /projects/:id/users' do
shared_examples_for 'project users response' do
+ let(:reporter_1) { create(:user) }
+ let(:reporter_2) { create(:user) }
+
+ before do
+ project.add_reporter(reporter_1)
+ project.add_reporter(reporter_2)
+ end
+
it 'returns the project users' do
get api("/projects/#{project.id}/users", current_user)
@@ -2472,12 +2464,15 @@ RSpec.describe API::Projects do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
- expect(json_response.size).to eq(1)
+ expect(json_response.size).to eq(3)
first_user = json_response.first
expect(first_user['username']).to eq(user.username)
expect(first_user['name']).to eq(user.name)
expect(first_user.keys).to include(*%w[name username id state avatar_url web_url])
+
+ ids = json_response.map { |raw_user| raw_user['id'] }
+ expect(ids).to eq([user.id, reporter_1.id, reporter_2.id])
end
end
@@ -2490,9 +2485,26 @@ RSpec.describe API::Projects do
context 'when authenticated' do
context 'valid request' do
- it_behaves_like 'project users response' do
- let(:project) { project4 }
- let(:current_user) { user4 }
+ context 'when sort_by_project_authorizations_user_id FF is off' do
+ before do
+ stub_feature_flags(sort_by_project_users_by_project_authorizations_user_id: false)
+ end
+
+ it_behaves_like 'project users response' do
+ let(:project) { project4 }
+ let(:current_user) { user4 }
+ end
+ end
+
+ context 'when sort_by_project_authorizations_user_id FF is on' do
+ before do
+ stub_feature_flags(sort_by_project_users_by_project_authorizations_user_id: true)
+ end
+
+ it_behaves_like 'project users response' do
+ let(:project) { project4 }
+ let(:current_user) { user4 }
+ end
end
end
@@ -3125,6 +3137,29 @@ RSpec.describe API::Projects do
expect(json_response['topics']).to eq(%w[topic2])
end
+
+ it 'updates squash_option' do
+ project3.update!(squash_option: 'always')
+
+ project_param = { squash_option: "default_on" }
+
+ expect { put api("/projects/#{project3.id}", user), params: project_param }
+ .to change { project3.reload.squash_option }
+ .from('always')
+ .to('default_on')
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['squash_option']).to eq("default_on")
+ end
+
+ it 'does not update an invalid squash_option' do
+ project_param = { squash_option: "jawn" }
+
+ expect { put api("/projects/#{project3.id}", user), params: project_param }
+ .not_to change { project3.reload.squash_option }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
end
context 'when authenticated as project maintainer' do
diff --git a/spec/requests/api/pypi_packages_spec.rb b/spec/requests/api/pypi_packages_spec.rb
index 86925e6a0ba..e66326db2a2 100644
--- a/spec/requests/api/pypi_packages_spec.rb
+++ b/spec/requests/api/pypi_packages_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe API::PypiPackages do
let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) }
let_it_be(:job) { create(:ci_build, :running, user: user, project: project) }
+
let(:headers) { {} }
context 'simple API endpoint' do
@@ -117,6 +118,7 @@ RSpec.describe API::PypiPackages do
include_context 'workhorse headers'
let_it_be(:file_name) { 'package.whl' }
+
let(:url) { "/projects/#{project.id}/packages/pypi" }
let(:headers) { {} }
let(:requires_python) { '>=3.7' }
diff --git a/spec/requests/api/release/links_spec.rb b/spec/requests/api/release/links_spec.rb
index c03dd0331cf..00326426af5 100644
--- a/spec/requests/api/release/links_spec.rb
+++ b/spec/requests/api/release/links_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe API::Release::Links do
let(:project) { create(:project, :repository, :private) }
let(:maintainer) { create(:user) }
+ let(:developer) { create(:user) }
let(:reporter) { create(:user) }
let(:non_project_member) { create(:user) }
let(:commit) { create(:commit, project: project) }
@@ -18,6 +19,7 @@ RSpec.describe API::Release::Links do
before do
project.add_maintainer(maintainer)
+ project.add_developer(developer)
project.add_reporter(reporter)
project.repository.add_tag(maintainer, 'v0.1', commit.id)
@@ -196,6 +198,28 @@ RSpec.describe API::Release::Links do
expect(response).to match_response_schema('release/link')
end
+ context 'with protected tag' do
+ context 'when user has access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
+
+ it 'accepts the request' do
+ post api("/projects/#{project.id}/releases/v0.1/assets/links", developer), params: params
+
+ expect(response).to have_gitlab_http_status(:created)
+ end
+ end
+
+ context 'when user does not have access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :maintainers_can_create, name: '*', project: project) }
+
+ it 'forbids the request' do
+ post api("/projects/#{project.id}/releases/v0.1/assets/links", developer), params: params
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
context 'when name is empty' do
let(:params) do
{
@@ -290,6 +314,28 @@ RSpec.describe API::Release::Links do
expect(response).to match_response_schema('release/link')
end
+ context 'with protected tag' do
+ context 'when user has access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
+
+ it 'accepts the request' do
+ put api("/projects/#{project.id}/releases/v0.1/assets/links/#{release_link.id}", developer), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when user does not have access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :maintainers_can_create, name: '*', project: project) }
+
+ it 'forbids the request' do
+ put api("/projects/#{project.id}/releases/v0.1/assets/links/#{release_link.id}", developer), params: params
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
context 'when params is empty' do
let(:params) { {} }
@@ -365,6 +411,28 @@ RSpec.describe API::Release::Links do
expect(response).to match_response_schema('release/link')
end
+ context 'with protected tag' do
+ context 'when user has access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
+
+ it 'accepts the request' do
+ delete api("/projects/#{project.id}/releases/v0.1/assets/links/#{release_link.id}", developer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when user does not have access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :maintainers_can_create, name: '*', project: project) }
+
+ it 'forbids the request' do
+ delete api("/projects/#{project.id}/releases/v0.1/assets/links/#{release_link.id}", developer)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
context 'when there are no corresponding release link' do
let!(:release_link) { }
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index 81a4fcdbcac..03e0954e5ab 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -463,9 +463,23 @@ RSpec.describe API::Releases do
end
context 'when specified tag is not found in the project' do
- it 'cannot find the release entry' do
+ it 'returns 404 for maintater' do
get api("/projects/#{project.id}/releases/non_exist_tag", maintainer)
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 Not Found')
+ end
+
+ it 'returns project not found for no user' do
+ get api("/projects/#{project.id}/releases/non_exist_tag", nil)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 Project Not Found')
+ end
+
+ it 'returns forbidden for guest' do
+ get api("/projects/#{project.id}/releases/non_existing_tag", guest)
+
expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -662,6 +676,28 @@ RSpec.describe API::Releases do
end.not_to change { Project.find_by_id(project.id).repository.tag_count }
end
+ context 'with protected tag' do
+ context 'when user has access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
+
+ it 'accepts the request' do
+ post api("/projects/#{project.id}/releases", developer), params: params
+
+ expect(response).to have_gitlab_http_status(:created)
+ end
+ end
+
+ context 'when user does not have access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :maintainers_can_create, name: '*', project: project) }
+
+ it 'forbids the request' do
+ post api("/projects/#{project.id}/releases", developer), params: params
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
context 'when user is a reporter' do
it 'forbids the request' do
post api("/projects/#{project.id}/releases", reporter), params: params
@@ -1000,6 +1036,28 @@ RSpec.describe API::Releases do
expect(project.releases.last.released_at).to eq('2015-10-10T05:00:00Z')
end
+ context 'with protected tag' do
+ context 'when user has access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
+
+ it 'accepts the request' do
+ put api("/projects/#{project.id}/releases/v0.1", developer), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when user does not have access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :maintainers_can_create, name: '*', project: project) }
+
+ it 'forbids the request' do
+ put api("/projects/#{project.id}/releases/v0.1", developer), params: params
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
context 'when user tries to update sha' do
let(:params) { { sha: 'xxx' } }
@@ -1180,6 +1238,28 @@ RSpec.describe API::Releases do
expect(response).to match_response_schema('public_api/v4/release')
end
+ context 'with protected tag' do
+ context 'when user has access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
+
+ it 'accepts the request' do
+ delete api("/projects/#{project.id}/releases/v0.1", developer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when user does not have access to the protected tag' do
+ let!(:protected_tag) { create(:protected_tag, :maintainers_can_create, name: '*', project: project) }
+
+ it 'forbids the request' do
+ delete api("/projects/#{project.id}/releases/v0.1", developer)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
context 'when there are no corresponding releases' do
let!(:release) { }
diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb
index 1b96efeca22..d019e89e0b4 100644
--- a/spec/requests/api/repositories_spec.rb
+++ b/spec/requests/api/repositories_spec.rb
@@ -477,6 +477,17 @@ RSpec.describe API::Repositories do
let(:request) { get api(route, guest) }
end
end
+
+ context 'api_caching_rate_limit_repository_compare is disabled' do
+ before do
+ stub_feature_flags(api_caching_rate_limit_repository_compare: false)
+ end
+
+ it_behaves_like 'repository compare' do
+ let(:project) { create(:project, :public, :repository) }
+ let(:current_user) { nil }
+ end
+ end
end
describe 'GET /projects/:id/repository/contributors' do
diff --git a/spec/requests/api/resource_access_tokens_spec.rb b/spec/requests/api/resource_access_tokens_spec.rb
index 1a3c805fe9f..23061ab4bf0 100644
--- a/spec/requests/api/resource_access_tokens_spec.rb
+++ b/spec/requests/api/resource_access_tokens_spec.rb
@@ -38,6 +38,7 @@ RSpec.describe API::ResourceAccessTokens do
expect(api_get_token["name"]).to eq(token.name)
expect(api_get_token["scopes"]).to eq(token.scopes)
+ expect(api_get_token["access_level"]).to eq(project.team.max_member_access(token.user.id))
expect(api_get_token["expires_at"]).to eq(token.expires_at.to_date.iso8601)
expect(api_get_token).not_to have_key('token')
end
@@ -211,8 +212,9 @@ RSpec.describe API::ResourceAccessTokens do
end
describe "POST projects/:id/access_tokens" do
- let(:params) { { name: "test", scopes: ["api"], expires_at: expires_at } }
+ let(:params) { { name: "test", scopes: ["api"], expires_at: expires_at, access_level: access_level } }
let(:expires_at) { 1.month.from_now }
+ let(:access_level) { 20 }
subject(:create_token) { post api("/projects/#{project_id}/access_tokens", user), params: params }
@@ -231,6 +233,7 @@ RSpec.describe API::ResourceAccessTokens do
expect(response).to have_gitlab_http_status(:created)
expect(json_response["name"]).to eq("test")
expect(json_response["scopes"]).to eq(["api"])
+ expect(json_response["access_level"]).to eq(20)
expect(json_response["expires_at"]).to eq(expires_at.to_date.iso8601)
expect(json_response["token"]).to be_present
end
@@ -248,6 +251,21 @@ RSpec.describe API::ResourceAccessTokens do
expect(json_response["expires_at"]).to eq(nil)
end
end
+
+ context "when 'access_level' is not set" do
+ let(:access_level) { nil }
+
+ it 'creates a project access token with the default access level', :aggregate_failures do
+ create_token
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response["name"]).to eq("test")
+ expect(json_response["scopes"]).to eq(["api"])
+ expect(json_response["access_level"]).to eq(40)
+ expect(json_response["expires_at"]).to eq(expires_at.to_date.iso8601)
+ expect(json_response["token"]).to be_present
+ end
+ end
end
context "with invalid params" do
diff --git a/spec/requests/api/rubygem_packages_spec.rb b/spec/requests/api/rubygem_packages_spec.rb
index 7d863b55bbe..9b104520b52 100644
--- a/spec/requests/api/rubygem_packages_spec.rb
+++ b/spec/requests/api/rubygem_packages_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe API::RubygemPackages do
let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) }
let_it_be(:headers) { {} }
+
let(:snowplow_gitlab_standard_context) { { project: project, namespace: project.namespace, user: user } }
let(:tokens) do
@@ -244,6 +245,7 @@ RSpec.describe API::RubygemPackages do
let(:url) { "/projects/#{project.id}/packages/rubygems/api/v1/gems" }
let_it_be(:file_name) { 'package.gem' }
+
let(:headers) { {} }
let(:params) { { file: temp_file(file_name) } }
let(:file_key) { :file }
diff --git a/spec/requests/api/services_spec.rb b/spec/requests/api/services_spec.rb
index f7394fa0cb4..e550132e776 100644
--- a/spec/requests/api/services_spec.rb
+++ b/spec/requests/api/services_spec.rb
@@ -24,14 +24,14 @@ RSpec.describe API::Services do
expect(response).to have_gitlab_http_status(:forbidden)
end
- context 'project with services' do
+ context 'with integrations' do
let!(:active_integration) { create(:emails_on_push_integration, project: project, active: true) }
let!(:integration) { create(:custom_issue_tracker_integration, project: project, active: false) }
- it "returns a list of all active services" do
+ it "returns a list of all active integrations" do
get api("/projects/#{project.id}/services", user)
- aggregate_failures 'expect successful response with all active services' do
+ aggregate_failures 'expect successful response with all active integrations' do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.count).to eq(1)
@@ -42,41 +42,39 @@ RSpec.describe API::Services do
end
end
- Integration.available_services_names.each do |service|
- describe "PUT /projects/:id/services/#{service.dasherize}" do
- include_context service
+ Integration.available_integration_names.each do |integration|
+ describe "PUT /projects/:id/services/#{integration.dasherize}" do
+ include_context integration
- it "updates #{service} settings" do
- put api("/projects/#{project.id}/services/#{dashed_service}", user), params: service_attrs
+ it "updates #{integration} settings" do
+ put api("/projects/#{project.id}/services/#{dashed_integration}", user), params: integration_attrs
expect(response).to have_gitlab_http_status(:ok)
- current_service = project.integrations.first
- events = current_service.event_names.empty? ? ["foo"].freeze : current_service.event_names
+ current_integration = project.integrations.first
+ events = current_integration.event_names.empty? ? ["foo"].freeze : current_integration.event_names
query_strings = []
events.each do |event|
- query_strings << "#{event}=#{!current_service[event]}"
+ query_strings << "#{event}=#{!current_integration[event]}"
end
query_strings = query_strings.join('&')
- put api("/projects/#{project.id}/services/#{dashed_service}?#{query_strings}", user), params: service_attrs
+ put api("/projects/#{project.id}/services/#{dashed_integration}?#{query_strings}", user), params: integration_attrs
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['slug']).to eq(dashed_service)
+ expect(json_response['slug']).to eq(dashed_integration)
events.each do |event|
next if event == "foo"
- expect(project.integrations.first[event]).not_to eq(current_service[event]),
- "expected #{!current_service[event]} for event #{event} for service #{current_service.title}, got #{current_service[event]}"
+ expect(project.integrations.first[event]).not_to eq(current_integration[event]),
+ "expected #{!current_integration[event]} for event #{event} for service #{current_integration.title}, got #{current_integration[event]}"
end
end
it "returns if required fields missing" do
- attrs = service_attrs
-
- required_attributes = service_attrs_list.select do |attr|
- service_klass.validators_on(attr).any? do |v|
- v.class == ActiveRecord::Validations::PresenceValidator &&
+ required_attributes = integration_attrs_list.select do |attr|
+ integration_klass.validators_on(attr).any? do |v|
+ v.instance_of?(ActiveRecord::Validations::PresenceValidator) &&
# exclude presence validators with conditional since those are not really required
![:if, :unless].any? { |cond| v.options.include?(cond) }
end
@@ -85,85 +83,85 @@ RSpec.describe API::Services do
if required_attributes.empty?
expected_code = :ok
else
- attrs.delete(required_attributes.sample)
+ integration_attrs.delete(required_attributes.sample)
expected_code = :bad_request
end
- put api("/projects/#{project.id}/services/#{dashed_service}", user), params: attrs
+ put api("/projects/#{project.id}/services/#{dashed_integration}", user), params: integration_attrs
expect(response).to have_gitlab_http_status(expected_code)
end
end
- describe "DELETE /projects/:id/services/#{service.dasherize}" do
- include_context service
+ describe "DELETE /projects/:id/services/#{integration.dasherize}" do
+ include_context integration
before do
- initialize_service(service)
+ initialize_integration(integration)
end
- it "deletes #{service}" do
- delete api("/projects/#{project.id}/services/#{dashed_service}", user)
+ it "deletes #{integration}" do
+ delete api("/projects/#{project.id}/services/#{dashed_integration}", user)
expect(response).to have_gitlab_http_status(:no_content)
- project.send(service_method).reload
- expect(project.send(service_method).activated?).to be_falsey
+ project.send(integration_method).reload
+ expect(project.send(integration_method).activated?).to be_falsey
end
end
- describe "GET /projects/:id/services/#{service.dasherize}" do
- include_context service
+ describe "GET /projects/:id/services/#{integration.dasherize}" do
+ include_context integration
- let!(:initialized_service) { initialize_service(service, active: true) }
+ let!(:initialized_integration) { initialize_integration(integration, active: true) }
let_it_be(:project2) do
create(:project, creator_id: user.id, namespace: user.namespace)
end
- def deactive_service!
- return initialized_service.update!(active: false) unless initialized_service.is_a?(PrometheusService)
+ def deactive_integration!
+ return initialized_integration.update!(active: false) unless initialized_integration.is_a?(::Integrations::Prometheus)
- # PrometheusService sets `#active` itself within a `before_save`:
- initialized_service.manual_configuration = false
- initialized_service.save!
+ # Integrations::Prometheus sets `#active` itself within a `before_save`:
+ initialized_integration.manual_configuration = false
+ initialized_integration.save!
end
it 'returns authentication error when unauthenticated' do
- get api("/projects/#{project.id}/services/#{dashed_service}")
+ get api("/projects/#{project.id}/services/#{dashed_integration}")
expect(response).to have_gitlab_http_status(:unauthorized)
end
- it "returns all properties of active service #{service}" do
- get api("/projects/#{project.id}/services/#{dashed_service}", user)
+ it "returns all properties of active service #{integration}" do
+ get api("/projects/#{project.id}/services/#{dashed_integration}", user)
- expect(initialized_service).to be_active
+ expect(initialized_integration).to be_active
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['properties'].keys).to match_array(service_instance.api_field_names)
+ expect(json_response['properties'].keys).to match_array(integration_instance.api_field_names)
end
- it "returns all properties of inactive service #{service}" do
- deactive_service!
+ it "returns all properties of inactive integration #{integration}" do
+ deactive_integration!
- get api("/projects/#{project.id}/services/#{dashed_service}", user)
+ get api("/projects/#{project.id}/services/#{dashed_integration}", user)
- expect(initialized_service).not_to be_active
+ expect(initialized_integration).not_to be_active
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['properties'].keys).to match_array(service_instance.api_field_names)
+ expect(json_response['properties'].keys).to match_array(integration_instance.api_field_names)
end
- it "returns not found if service does not exist" do
- get api("/projects/#{project2.id}/services/#{dashed_service}", user)
+ it "returns not found if integration does not exist" do
+ get api("/projects/#{project2.id}/services/#{dashed_integration}", user)
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Service Not Found')
end
- it "returns not found if service exists but is in `Project#disabled_services`" do
+ it "returns not found if service exists but is in `Project#disabled_integrations`" do
expect_next_found_instance_of(Project) do |project|
- expect(project).to receive(:disabled_services).at_least(:once).and_return([service])
+ expect(project).to receive(:disabled_integrations).at_least(:once).and_return([integration])
end
- get api("/projects/#{project.id}/services/#{dashed_service}", user)
+ get api("/projects/#{project.id}/services/#{dashed_integration}", user)
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Service Not Found')
@@ -171,7 +169,7 @@ RSpec.describe API::Services do
it "returns error when authenticated but not a project owner" do
project.add_developer(user2)
- get api("/projects/#{project.id}/services/#{dashed_service}", user2)
+ get api("/projects/#{project.id}/services/#{dashed_integration}", user2)
expect(response).to have_gitlab_http_status(:forbidden)
end
@@ -179,10 +177,10 @@ RSpec.describe API::Services do
end
describe 'POST /projects/:id/services/:slug/trigger' do
- describe 'Mattermost Service' do
- let(:service_name) { 'mattermost_slash_commands' }
+ describe 'Mattermost integration' do
+ let(:integration_name) { 'mattermost_slash_commands' }
- context 'no service is available' do
+ context 'when no integration is available' do
it 'returns a not found message' do
post api("/projects/#{project.id}/services/idonotexist/trigger")
@@ -191,34 +189,34 @@ RSpec.describe API::Services do
end
end
- context 'the service exists' do
+ context 'when the integration exists' do
let(:params) { { token: 'token' } }
- context 'the service is not active' do
+ context 'when the integration is not active' do
before do
- project.create_mattermost_slash_commands_service(
+ project.create_mattermost_slash_commands_integration(
active: false,
properties: params
)
end
- it 'when the service is inactive' do
- post api("/projects/#{project.id}/services/#{service_name}/trigger"), params: params
+ it 'when the integration is inactive' do
+ post api("/projects/#{project.id}/services/#{integration_name}/trigger"), params: params
expect(response).to have_gitlab_http_status(:not_found)
end
end
- context 'the service is active' do
+ context 'when the integration is active' do
before do
- project.create_mattermost_slash_commands_service(
+ project.create_mattermost_slash_commands_integration(
active: true,
properties: params
)
end
it 'returns status 200' do
- post api("/projects/#{project.id}/services/#{service_name}/trigger"), params: params
+ post api("/projects/#{project.id}/services/#{integration_name}/trigger"), params: params
expect(response).to have_gitlab_http_status(:ok)
end
@@ -226,7 +224,7 @@ RSpec.describe API::Services do
context 'when the project can not be found' do
it 'returns a generic 404' do
- post api("/projects/404/services/#{service_name}/trigger"), params: params
+ post api("/projects/404/services/#{integration_name}/trigger"), params: params
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response["message"]).to eq("404 Service Not Found")
@@ -235,18 +233,18 @@ RSpec.describe API::Services do
end
end
- describe 'Slack Service' do
- let(:service_name) { 'slack_slash_commands' }
+ describe 'Slack Integration' do
+ let(:integration_name) { 'slack_slash_commands' }
before do
- project.create_slack_slash_commands_service(
+ project.create_slack_slash_commands_integration(
active: true,
properties: { token: 'token' }
)
end
it 'returns status 200' do
- post api("/projects/#{project.id}/services/#{service_name}/trigger"), params: { token: 'token', text: 'help' }
+ post api("/projects/#{project.id}/services/#{integration_name}/trigger"), params: { token: 'token', text: 'help' }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['response_type']).to eq("ephemeral")
@@ -254,29 +252,29 @@ RSpec.describe API::Services do
end
end
- describe 'Mattermost service' do
- let(:service_name) { 'mattermost' }
+ describe 'Mattermost integration' do
+ let(:integration_name) { 'mattermost' }
let(:params) do
{ webhook: 'https://hook.example.com', username: 'username' }
end
before do
- project.create_mattermost_service(
+ project.create_mattermost_integration(
active: true,
properties: params
)
end
it 'accepts a username for update' do
- put api("/projects/#{project.id}/services/#{service_name}", user), params: params.merge(username: 'new_username')
+ put api("/projects/#{project.id}/services/#{integration_name}", user), params: params.merge(username: 'new_username')
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['properties']['username']).to eq('new_username')
end
end
- describe 'Microsoft Teams service' do
- let(:service_name) { 'microsoft-teams' }
+ describe 'Microsoft Teams integration' do
+ let(:integration_name) { 'microsoft-teams' }
let(:params) do
{
webhook: 'https://hook.example.com',
@@ -286,29 +284,31 @@ RSpec.describe API::Services do
end
before do
- project.create_microsoft_teams_service(
+ project.create_microsoft_teams_integration(
active: true,
properties: params
)
end
it 'accepts branches_to_be_notified for update' do
- put api("/projects/#{project.id}/services/#{service_name}", user), params: params.merge(branches_to_be_notified: 'all')
+ put api("/projects/#{project.id}/services/#{integration_name}", user),
+ params: params.merge(branches_to_be_notified: 'all')
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['properties']['branches_to_be_notified']).to eq('all')
end
it 'accepts notify_only_broken_pipelines for update' do
- put api("/projects/#{project.id}/services/#{service_name}", user), params: params.merge(notify_only_broken_pipelines: true)
+ put api("/projects/#{project.id}/services/#{integration_name}", user),
+ params: params.merge(notify_only_broken_pipelines: true)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['properties']['notify_only_broken_pipelines']).to eq(true)
end
end
- describe 'Hangouts Chat service' do
- let(:service_name) { 'hangouts-chat' }
+ describe 'Hangouts Chat integration' do
+ let(:integration_name) { 'hangouts-chat' }
let(:params) do
{
webhook: 'https://hook.example.com',
@@ -324,16 +324,38 @@ RSpec.describe API::Services do
end
it 'accepts branches_to_be_notified for update', :aggregate_failures do
- put api("/projects/#{project.id}/services/#{service_name}", user), params: params.merge(branches_to_be_notified: 'all')
+ put api("/projects/#{project.id}/services/#{integration_name}", user), params: params.merge(branches_to_be_notified: 'all')
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['properties']['branches_to_be_notified']).to eq('all')
end
it 'only requires the webhook param' do
- put api("/projects/#{project.id}/services/#{service_name}", user), params: { webhook: 'https://hook.example.com' }
+ put api("/projects/#{project.id}/services/#{integration_name}", user), params: { webhook: 'https://hook.example.com' }
expect(response).to have_gitlab_http_status(:ok)
end
end
+
+ describe 'Pipelines Email Integration' do
+ let(:integration_name) { 'pipelines-email' }
+
+ context 'notify_only_broken_pipelines property was saved as a string' do
+ before do
+ project.create_pipelines_email_integration(
+ active: false,
+ properties: {
+ "notify_only_broken_pipelines": "true",
+ "branches_to_be_notified": "default"
+ }
+ )
+ end
+
+ it 'returns boolean values for notify_only_broken_pipelines' do
+ get api("/projects/#{project.id}/services/#{integration_name}", user)
+
+ expect(json_response['properties']['notify_only_broken_pipelines']).to eq(true)
+ end
+ end
+ end
end
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index 4a4aeaea714..4008b57a1cf 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -127,6 +127,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
spam_check_endpoint_enabled: true,
spam_check_endpoint_url: 'grpc://example.com/spam_check',
spam_check_api_key: 'SPAM_CHECK_API_KEY',
+ mailgun_events_enabled: true,
+ mailgun_signing_key: 'MAILGUN_SIGNING_KEY',
disabled_oauth_sign_in_sources: 'unknown',
import_sources: 'github,bitbucket',
wiki_page_max_content_bytes: 12345,
@@ -175,6 +177,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
expect(json_response['spam_check_endpoint_enabled']).to be_truthy
expect(json_response['spam_check_endpoint_url']).to eq('grpc://example.com/spam_check')
expect(json_response['spam_check_api_key']).to eq('SPAM_CHECK_API_KEY')
+ expect(json_response['mailgun_events_enabled']).to be(true)
+ expect(json_response['mailgun_signing_key']).to eq('MAILGUN_SIGNING_KEY')
expect(json_response['disabled_oauth_sign_in_sources']).to eq([])
expect(json_response['import_sources']).to match_array(%w(github bitbucket))
expect(json_response['wiki_page_max_content_bytes']).to eq(12345)
@@ -493,6 +497,15 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
end
end
+ context "missing mailgun_signing_key value when mailgun_events_enabled is true" do
+ it "returns a blank parameter error message" do
+ put api("/application/settings", admin), params: { mailgun_events_enabled: true }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('mailgun_signing_key is missing')
+ end
+ end
+
context "personal access token prefix settings" do
context "handles validation errors" do
it "fails to update the settings with too long prefix" do
diff --git a/spec/requests/api/snippets_spec.rb b/spec/requests/api/snippets_spec.rb
index 227c53f8fb9..f4d15d0525e 100644
--- a/spec/requests/api/snippets_spec.rb
+++ b/spec/requests/api/snippets_spec.rb
@@ -223,7 +223,7 @@ RSpec.describe API::Snippets, factory_default: :keep do
it 'commit the files to the repository' do
subject
- blob = snippet.repository.blob_at('master', file_path)
+ blob = snippet.repository.blob_at(snippet.default_branch, file_path)
expect(blob.data).to eq file_content
end
diff --git a/spec/requests/api/system_hooks_spec.rb b/spec/requests/api/system_hooks_spec.rb
index 3cea1af686e..1511872d183 100644
--- a/spec/requests/api/system_hooks_spec.rb
+++ b/spec/requests/api/system_hooks_spec.rb
@@ -81,6 +81,7 @@ RSpec.describe API::SystemHooks do
expect(json_response['push_events']).to be false
expect(json_response['tag_push_events']).to be false
expect(json_response['merge_requests_events']).to be false
+ expect(json_response['repository_update_events']).to be true
end
it 'sets explicit values for events' do
@@ -92,7 +93,8 @@ RSpec.describe API::SystemHooks do
enable_ssl_verification: false,
push_events: true,
tag_push_events: true,
- merge_requests_events: true
+ merge_requests_events: true,
+ repository_update_events: false
}
expect(response).to have_gitlab_http_status(:created)
@@ -100,6 +102,7 @@ RSpec.describe API::SystemHooks do
expect(json_response['push_events']).to be true
expect(json_response['tag_push_events']).to be true
expect(json_response['merge_requests_events']).to be true
+ expect(json_response['repository_update_events']).to be false
end
end
diff --git a/spec/requests/api/unleash_spec.rb b/spec/requests/api/unleash_spec.rb
index 9989f8d28bd..0718710f15c 100644
--- a/spec/requests/api/unleash_spec.rb
+++ b/spec/requests/api/unleash_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe API::Unleash do
include FeatureFlagHelpers
let_it_be(:project, refind: true) { create(:project) }
+
let(:project_id) { project.id }
let(:params) { }
let(:headers) { }
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index d724cb9612c..383940ce34a 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe API::Users do
let_it_be(:key) { create(:key, user: user) }
let_it_be(:gpg_key) { create(:gpg_key, user: user) }
let_it_be(:email) { create(:email, user: user) }
+
let(:omniauth_user) { create(:omniauth_user) }
let(:ldap_blocked_user) { create(:omniauth_user, provider: 'ldapmain', state: 'ldap_blocked') }
let(:private_user) { create(:user, private_profile: true) }
@@ -2967,6 +2968,7 @@ RSpec.describe API::Users do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:group) { create(:group) }
+
let(:requesting_user) { create(:user) }
before_all do
diff --git a/spec/requests/api/wikis_spec.rb b/spec/requests/api/wikis_spec.rb
index 64fde3db19f..ec34dc7e7a1 100644
--- a/spec/requests/api/wikis_spec.rb
+++ b/spec/requests/api/wikis_spec.rb
@@ -611,11 +611,12 @@ RSpec.describe API::Wikis do
let(:payload) { { file: fixture_file_upload('spec/fixtures/dk.png') } }
let(:url) { "/projects/#{project.id}/wikis/attachments" }
let(:file_path) { "#{Wikis::CreateAttachmentService::ATTACHMENT_PATH}/fixed_hex/dk.png" }
+ let(:branch) { wiki.default_branch }
let(:result_hash) do
{
file_name: 'dk.png',
file_path: file_path,
- branch: 'master',
+ branch: branch,
link: {
url: file_path,
markdown: "![dk](#{file_path})"
diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb
index 40005596c3e..3fb683ea0fa 100644
--- a/spec/requests/git_http_spec.rb
+++ b/spec/requests/git_http_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe 'Git HTTP requests' do
shared_examples 'operations are not allowed with expired password' do
context "when password is expired" do
it "responds to downloads with status 401 Unauthorized" do
- user.update!(password_expires_at: 2.days.ago)
+ user.update!(password_expires_at: 2.days.ago, password_automatically_set: true)
download(path, user: user.username, password: user.password) do |response|
expect(response).to have_gitlab_http_status(:unauthorized)
@@ -69,7 +69,7 @@ RSpec.describe 'Git HTTP requests' do
end
it "responds to uploads with status 401 Unauthorized" do
- user.update!(password_expires_at: 2.days.ago)
+ user.update!(password_expires_at: 2.days.ago, password_automatically_set: true)
upload(path, user: user.username, password: user.password) do |response|
expect(response).to have_gitlab_http_status(:unauthorized)
@@ -614,7 +614,7 @@ RSpec.describe 'Git HTTP requests' do
context "when password is expired" do
it "responds to downloads with status 401 unauthorized" do
- user.update!(password_expires_at: 2.days.ago)
+ user.update!(password_expires_at: 2.days.ago, password_automatically_set: true)
download(path, **env) do |response|
expect(response).to have_gitlab_http_status(:unauthorized)
@@ -697,7 +697,7 @@ RSpec.describe 'Git HTTP requests' do
context "when password is expired" do
it "responds to uploads with status 401 unauthorized" do
- user.update!(password_expires_at: 2.days.ago)
+ user.update!(password_expires_at: 2.days.ago, password_automatically_set: true)
write_access_token = create(:personal_access_token, user: user, scopes: [:write_repository])
@@ -889,10 +889,10 @@ RSpec.describe 'Git HTTP requests' do
context 'when admin mode is enabled', :enable_admin_mode do
it_behaves_like 'can download code only'
- it 'downloads from other project get status 404' do
+ it 'downloads from other project get status 403' do
clone_get "#{other_project.full_path}.git", user: 'gitlab-ci-token', password: build.token
- expect(response).to have_gitlab_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -920,7 +920,7 @@ RSpec.describe 'Git HTTP requests' do
context 'when users password is expired' do
it 'rejects pulls with 401 unauthorized' do
- user.update!(password_expires_at: 2.days.ago)
+ user.update!(password_expires_at: 2.days.ago, password_automatically_set: true)
download(path, user: 'gitlab-ci-token', password: build.token) do |response|
expect(response).to have_gitlab_http_status(:unauthorized)
@@ -1215,7 +1215,7 @@ RSpec.describe 'Git HTTP requests' do
context "when password is expired" do
it "responds to downloads with status 401 unauthorized" do
- user.update!(password_expires_at: 2.days.ago)
+ user.update!(password_expires_at: 2.days.ago, password_automatically_set: true)
download(path, **env) do |response|
expect(response).to have_gitlab_http_status(:unauthorized)
@@ -1298,7 +1298,7 @@ RSpec.describe 'Git HTTP requests' do
context "when password is expired" do
it "responds to uploads with status 401 unauthorized" do
- user.update!(password_expires_at: 2.days.ago)
+ user.update!(password_expires_at: 2.days.ago, password_automatically_set: true)
write_access_token = create(:personal_access_token, user: user, scopes: [:write_repository])
@@ -1490,10 +1490,10 @@ RSpec.describe 'Git HTTP requests' do
context 'when admin mode is enabled', :enable_admin_mode do
it_behaves_like 'can download code only'
- it 'downloads from other project get status 404' do
+ it 'downloads from other project get status 403' do
clone_get "#{other_project.full_path}.git", user: 'gitlab-ci-token', password: build.token
- expect(response).to have_gitlab_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -1521,7 +1521,7 @@ RSpec.describe 'Git HTTP requests' do
context 'when users password is expired' do
it 'rejects pulls with 401 unauthorized' do
- user.update!(password_expires_at: 2.days.ago)
+ user.update!(password_expires_at: 2.days.ago, password_automatically_set: true)
download(path, user: 'gitlab-ci-token', password: build.token) do |response|
expect(response).to have_gitlab_http_status(:unauthorized)
diff --git a/spec/requests/import/gitlab_groups_controller_spec.rb b/spec/requests/import/gitlab_groups_controller_spec.rb
index c65caf2ebf0..1f6487986a3 100644
--- a/spec/requests/import/gitlab_groups_controller_spec.rb
+++ b/spec/requests/import/gitlab_groups_controller_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Import::GitlabGroupsController do
include_context 'workhorse headers'
let_it_be(:user) { create(:user) }
+
let(:import_path) { "#{Dir.tmpdir}/gitlab_groups_controller_spec" }
before do
diff --git a/spec/requests/invite_registration_spec.rb b/spec/requests/invite_registration_spec.rb
deleted file mode 100644
index 167cf4b1de7..00000000000
--- a/spec/requests/invite_registration_spec.rb
+++ /dev/null
@@ -1,68 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Registering from an invite' do
- let(:com) { true }
-
- before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(com)
- end
-
- describe 'GET /users/sign_up/invites/new' do
- subject(:request) { get '/users/sign_up/invites/new' }
-
- context 'when on .com' do
- it 'renders the template with expected text', :aggregate_failures do
- request
-
- expect(response).to render_template('layouts/simple_registration')
- expect(response).to render_template(:new)
- expect(response.body).to include('Join your team')
- end
- end
-
- context 'when not on .com' do
- let(:com) { false }
-
- it 'returns not found' do
- request
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- describe 'POST /users/sign_up/invites' do
- subject(:request) do
- post '/users/sign_up/invites',
- params: {
- user: {
- first_name: 'first',
- last_name: 'last',
- username: 'new_username',
- email: 'new@user.com',
- password: 'Any_password'
- }
- }
- end
-
- context 'when on .com' do
- it 'creates a user' do
- expect { request }.to change(User, :count).by(1)
-
- expect(response).to have_gitlab_http_status(:found)
- end
- end
-
- context 'when not on .com' do
- let(:com) { false }
-
- it 'returns not found' do
- request
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-end
diff --git a/spec/requests/jwt_controller_spec.rb b/spec/requests/jwt_controller_spec.rb
index 5b5658da97e..55577a5dc65 100644
--- a/spec/requests/jwt_controller_spec.rb
+++ b/spec/requests/jwt_controller_spec.rb
@@ -227,6 +227,7 @@ RSpec.describe JwtController do
let_it_be(:group_deploy_token) { create(:deploy_token, :group, groups: [group]) }
let_it_be(:project_deploy_token) { create(:deploy_token, :project, projects: [project]) }
let_it_be(:service_name) { 'dependency_proxy' }
+
let(:headers) { { authorization: credentials(credential_user, credential_password) } }
let(:params) { { account: credential_user, client_id: 'docker', offline_token: true, service: service_name } }
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index fda8b2ecec6..02eb4262690 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -105,6 +105,7 @@ RSpec.describe 'Git LFS API and storage' do
context 'when deploy key is authorized' do
let_it_be(:key) { create(:deploy_key) }
+
let(:authorization) { authorize_deploy_key }
before do
@@ -125,7 +126,7 @@ RSpec.describe 'Git LFS API and storage' do
it_behaves_like 'LFS http 200 blob response'
context 'when user password is expired' do
- let_it_be(:user) { create(:user, password_expires_at: 1.minute.ago)}
+ let_it_be(:user) { create(:user, password_expires_at: 1.minute.ago, password_automatically_set: true)}
it_behaves_like 'LFS http 401 response'
end
@@ -343,7 +344,8 @@ RSpec.describe 'Git LFS API and storage' do
end
context 'when user password is expired' do
- let_it_be(:user) { create(:user, password_expires_at: 1.minute.ago)}
+ let_it_be(:user) { create(:user, password_expires_at: 1.minute.ago, password_automatically_set: true)}
+
let(:role) { :reporter}
it_behaves_like 'LFS http 401 response'
@@ -351,6 +353,7 @@ RSpec.describe 'Git LFS API and storage' do
context 'when user is blocked' do
let_it_be(:user) { create(:user, :blocked)}
+
let(:role) { :reporter}
it_behaves_like 'LFS http 401 response'
@@ -405,6 +408,7 @@ RSpec.describe 'Git LFS API and storage' do
context 'administrator', :enable_admin_mode do
let_it_be(:user) { create(:admin) }
+
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
it_behaves_like 'can download LFS only from own projects', renew_authorization: true
@@ -458,6 +462,7 @@ RSpec.describe 'Git LFS API and storage' do
describe 'upload' do
let_it_be(:project) { create(:project, :public) }
+
let(:body) { upload_body(sample_object) }
shared_examples 'pushes new LFS objects' do |renew_authorization:|
@@ -569,7 +574,7 @@ RSpec.describe 'Git LFS API and storage' do
let(:pipeline) { create(:ci_empty_pipeline, project: other_project) }
# I'm not sure what this tests that is different from the previous test
- it_behaves_like 'LFS http 404 response'
+ it_behaves_like 'LFS http 403 response'
end
end
@@ -953,7 +958,7 @@ RSpec.describe 'Git LFS API and storage' do
it_behaves_like 'LFS http 200 workhorse response'
context 'when user password is expired' do
- let_it_be(:user) { create(:user, password_expires_at: 1.minute.ago)}
+ let_it_be(:user) { create(:user, password_expires_at: 1.minute.ago, password_automatically_set: true) }
it_behaves_like 'LFS http 401 response'
end
@@ -984,6 +989,7 @@ RSpec.describe 'Git LFS API and storage' do
describe 'to a forked project' do
let_it_be(:upstream_project) { create(:project, :public) }
let_it_be(:project_owner) { create(:user) }
+
let(:project) { fork_project(upstream_project, project_owner) }
describe 'when user is authenticated' do
@@ -1043,7 +1049,7 @@ RSpec.describe 'Git LFS API and storage' do
let(:pipeline) { create(:ci_empty_pipeline, project: other_project) }
# I'm not sure what this tests that is different from the previous test
- it_behaves_like 'LFS http 404 response'
+ it_behaves_like 'LFS http 403 response'
end
end
diff --git a/spec/requests/product_analytics/collector_app_spec.rb b/spec/requests/product_analytics/collector_app_spec.rb
index b87610841e7..0d55d167a6f 100644
--- a/spec/requests/product_analytics/collector_app_spec.rb
+++ b/spec/requests/product_analytics/collector_app_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'ProductAnalytics::CollectorApp' do
let_it_be(:project) { create(:project) }
+
let(:params) { {} }
let(:raw_event) { Gitlab::Json.parse(fixture_file('product_analytics/event.json')) }
diff --git a/spec/requests/projects/merge_requests/diffs_spec.rb b/spec/requests/projects/merge_requests/diffs_spec.rb
new file mode 100644
index 00000000000..3a64c88acc1
--- /dev/null
+++ b/spec/requests/projects/merge_requests/diffs_spec.rb
@@ -0,0 +1,126 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Merge Requests Diffs' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ describe 'GET diffs_batch' do
+ let(:headers) { {} }
+
+ shared_examples_for 'serializes diffs with expected arguments' do
+ it 'serializes paginated merge request diff collection' do
+ expect_next_instance_of(PaginatedDiffSerializer) do |instance|
+ expect(instance).to receive(:represent)
+ .with(an_instance_of(collection), expected_options)
+ .and_call_original
+ end
+
+ subject
+ end
+ end
+
+ def collection_arguments(pagination_data = {})
+ {
+ environment: nil,
+ merge_request: merge_request,
+ diff_view: :inline,
+ merge_ref_head_diff: nil,
+ pagination_data: {
+ total_pages: nil
+ }.merge(pagination_data)
+ }
+ end
+
+ def go(extra_params = {})
+ params = {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: merge_request.iid,
+ page: 0,
+ per_page: 20,
+ format: 'json'
+ }
+
+ get diffs_batch_namespace_project_json_merge_request_path(params.merge(extra_params)), headers: headers
+ end
+
+ context 'with caching', :use_clean_rails_memory_store_caching do
+ subject { go(page: 0, per_page: 5) }
+
+ context 'when the request has not been cached' do
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
+ let(:expected_options) { collection_arguments(total_pages: 20) }
+ end
+ end
+
+ context 'when the request has already been cached' do
+ before do
+ go(page: 0, per_page: 5)
+ end
+
+ it 'does not serialize diffs' do
+ expect_next_instance_of(PaginatedDiffSerializer) do |instance|
+ expect(instance).not_to receive(:represent)
+ end
+
+ subject
+ end
+
+ context 'with the different pagination option' do
+ subject { go(page: 5, per_page: 5) }
+
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
+ let(:expected_options) { collection_arguments(total_pages: 20) }
+ end
+ end
+
+ context 'with the different diff_view' do
+ subject { go(page: 0, per_page: 5, view: :parallel) }
+
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
+ let(:expected_options) { collection_arguments(total_pages: 20).merge(diff_view: :parallel) }
+ end
+ end
+
+ context 'with the different expanded option' do
+ subject { go(page: 0, per_page: 5, expanded: true ) }
+
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
+ let(:expected_options) { collection_arguments(total_pages: 20) }
+ end
+ end
+
+ context 'with the different ignore_whitespace_change option' do
+ subject { go(page: 0, per_page: 5, w: 1) }
+
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:collection) { Gitlab::Diff::FileCollection::Compare }
+ let(:expected_options) { collection_arguments(total_pages: 20) }
+ end
+ end
+ end
+
+ context 'when the paths is given' do
+ subject { go(page: 0, per_page: 5, paths: %w[README CHANGELOG]) }
+
+ it 'does not use cache' do
+ expect(Rails.cache).not_to receive(:fetch).with(/cache:gitlab:PaginatedDiffSerializer/).and_call_original
+
+ subject
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/projects/merge_requests_discussions_spec.rb b/spec/requests/projects/merge_requests_discussions_spec.rb
index eb8cf9f797d..595222a9eb2 100644
--- a/spec/requests/projects/merge_requests_discussions_spec.rb
+++ b/spec/requests/projects/merge_requests_discussions_spec.rb
@@ -52,5 +52,144 @@ RSpec.describe 'merge requests discussions' do
expect { send_request }
.to change { Gitlab::GitalyClient.get_request_count }.by_at_most(4)
end
+
+ context 'caching', :use_clean_rails_memory_store_caching do
+ let!(:first_note) { create(:diff_note_on_merge_request, noteable: merge_request, project: project) }
+ let!(:second_note) { create(:diff_note_on_merge_request, in_reply_to: first_note, noteable: merge_request, project: project) }
+ let!(:award_emoji) { create(:award_emoji, awardable: first_note) }
+
+ before do
+ # Make a request to cache the discussions
+ send_request
+ end
+
+ shared_examples 'cache miss' do
+ it 'does not hit a warm cache' do
+ expect_next_instance_of(DiscussionSerializer) do |serializer|
+ expect(serializer).to receive(:represent) do |arg|
+ expect(arg.notes).to contain_exactly(*changed_notes)
+ end.and_call_original
+ end
+
+ send_request
+ end
+ end
+
+ it 'gets cached on subsequent requests' do
+ expect_next_instance_of(DiscussionSerializer) do |serializer|
+ expect(serializer).not_to receive(:represent)
+ end
+
+ send_request
+ end
+
+ context 'when a note in a discussion got updated' do
+ before do
+ first_note.update!(updated_at: 1.minute.from_now)
+ end
+
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
+ end
+
+ context 'when a note in a discussion got resolved' do
+ before do
+ travel_to(1.minute.from_now) do
+ first_note.resolve!(user)
+ end
+ end
+
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
+ end
+
+ context 'when a note is added to a discussion' do
+ let!(:third_note) { create(:diff_note_on_merge_request, in_reply_to: first_note, noteable: merge_request, project: project) }
+
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note, third_note] }
+ end
+ end
+
+ context 'when a note is removed from a discussion' do
+ before do
+ second_note.destroy!
+ end
+
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note] }
+ end
+ end
+
+ context 'when an emoji is awarded to a note in discussion' do
+ before do
+ travel_to(1.minute.from_now) do
+ create(:award_emoji, awardable: first_note)
+ end
+ end
+
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
+ end
+
+ context 'when an award emoji is removed from a note in discussion' do
+ before do
+ travel_to(1.minute.from_now) do
+ award_emoji.destroy!
+ end
+ end
+
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
+ end
+
+ context 'when cached markdown version gets bump' do
+ before do
+ settings = Gitlab::CurrentSettings.current_application_settings
+ settings.update!(local_markdown_version: settings.local_markdown_version + 1)
+ end
+
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
+ end
+
+ context 'when the diff note position changes' do
+ before do
+ # This replicates a position change wherein timestamps aren't updated
+ # which is why `Gitlab::Timeless.timeless` is utilized. This is the
+ # same approach being used in Discussions::UpdateDiffPositionService
+ # which is responsible for updating the positions of diff discussions
+ # when MR updates.
+ first_note.position = Gitlab::Diff::Position.new(
+ old_path: first_note.position.old_path,
+ new_path: first_note.position.new_path,
+ old_line: first_note.position.old_line,
+ new_line: first_note.position.new_line + 1,
+ diff_refs: first_note.position.diff_refs
+ )
+
+ Gitlab::Timeless.timeless(first_note, &:save)
+ end
+
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
+ end
+
+ context 'when merge_request_discussion_cache is disabled' do
+ before do
+ stub_feature_flags(merge_request_discussion_cache: false)
+ end
+
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
+ end
+ end
end
end
diff --git a/spec/requests/rack_attack_global_spec.rb b/spec/requests/rack_attack_global_spec.rb
index f24f815e9c6..f7b1b4726f6 100644
--- a/spec/requests/rack_attack_global_spec.rb
+++ b/spec/requests/rack_attack_global_spec.rb
@@ -221,6 +221,7 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac
let_it_be(:token) { create(:personal_access_token, user: user) }
let_it_be(:other_user) { create(:user) }
let_it_be(:other_user_token) { create(:personal_access_token, user: other_user) }
+
let(:throttle_setting_prefix) { 'throttle_authenticated_api' }
let(:api_partial_url) { '/todos' }
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index fe04a1d7c4a..f3d0179ffdd 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -770,13 +770,13 @@ RSpec.describe 'project routing' do
end
end
- describe Projects::UsagePingController, 'routing' do
- it 'routes to usage_ping#web_ide_clientside_preview' do
- expect(post('/gitlab/gitlabhq/usage_ping/web_ide_clientside_preview')).to route_to('projects/usage_ping#web_ide_clientside_preview', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ describe Projects::ServicePingController, 'routing' do
+ it 'routes to service_ping#web_ide_clientside_preview' do
+ expect(post('/gitlab/gitlabhq/service_ping/web_ide_clientside_preview')).to route_to('projects/service_ping#web_ide_clientside_preview', namespace_id: 'gitlab', project_id: 'gitlabhq')
end
- it 'routes to usage_ping#web_ide_pipelines_count' do
- expect(post('/gitlab/gitlabhq/usage_ping/web_ide_pipelines_count')).to route_to('projects/usage_ping#web_ide_pipelines_count', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ it 'routes to service_ping#web_ide_pipelines_count' do
+ expect(post('/gitlab/gitlabhq/service_ping/web_ide_pipelines_count')).to route_to('projects/service_ping#web_ide_pipelines_count', namespace_id: 'gitlab', project_id: 'gitlabhq')
end
end
diff --git a/spec/rubocop/code_reuse_helpers_spec.rb b/spec/rubocop/code_reuse_helpers_spec.rb
index 9337df368e3..695c152e3db 100644
--- a/spec/rubocop/code_reuse_helpers_spec.rb
+++ b/spec/rubocop/code_reuse_helpers_spec.rb
@@ -150,6 +150,31 @@ RSpec.describe RuboCop::CodeReuseHelpers do
end
end
+ describe '#in_graphql_types?' do
+ %w[
+ app/graphql/types
+ ee/app/graphql/ee/types
+ ee/app/graphql/types
+ ].each do |path|
+ it "returns true for a node in #{path}" do
+ node = build_and_parse_source('10', rails_root_join(path, 'foo.rb'))
+
+ expect(cop.in_graphql_types?(node)).to eq(true)
+ end
+ end
+
+ %w[
+ app/graphql/resolvers
+ app/foo
+ ].each do |path|
+ it "returns true for a node in #{path}" do
+ node = build_and_parse_source('10', rails_root_join(path, 'foo.rb'))
+
+ expect(cop.in_graphql_types?(node)).to eq(false)
+ end
+ end
+ end
+
describe '#in_api?' do
it 'returns true for a node in the API directory' do
node = build_and_parse_source('10', rails_root_join('lib', 'api', 'foo.rb'))
@@ -164,25 +189,67 @@ RSpec.describe RuboCop::CodeReuseHelpers do
end
end
- describe '#in_directory?' do
+ describe '#in_spec?' do
+ it 'returns true for a node in the spec directory' do
+ node = build_and_parse_source('10', rails_root_join('spec', 'foo.rb'))
+
+ expect(cop.in_spec?(node)).to eq(true)
+ end
+
+ it 'returns true for a node in the ee/spec directory' do
+ node = build_and_parse_source('10', rails_root_join('ee', 'spec', 'foo.rb'))
+
+ expect(cop.in_spec?(node)).to eq(true)
+ end
+
+ it 'returns false for a node outside the spec directory' do
+ node = build_and_parse_source('10', rails_root_join('lib', 'foo.rb'))
+
+ expect(cop.in_spec?(node)).to eq(false)
+ end
+ end
+
+ describe '#in_app_directory?' do
it 'returns true for a directory in the CE app/ directory' do
node = build_and_parse_source('10', rails_root_join('app', 'models', 'foo.rb'))
- expect(cop.in_directory?(node, 'models')).to eq(true)
+ expect(cop.in_app_directory?(node, 'models')).to eq(true)
end
it 'returns true for a directory in the EE app/ directory' do
node =
build_and_parse_source('10', rails_root_join('ee', 'app', 'models', 'foo.rb'))
- expect(cop.in_directory?(node, 'models')).to eq(true)
+ expect(cop.in_app_directory?(node, 'models')).to eq(true)
end
it 'returns false for a directory in the lib/ directory' do
node =
build_and_parse_source('10', rails_root_join('lib', 'models', 'foo.rb'))
- expect(cop.in_directory?(node, 'models')).to eq(false)
+ expect(cop.in_app_directory?(node, 'models')).to eq(false)
+ end
+ end
+
+ describe '#in_lib_directory?' do
+ it 'returns true for a directory in the CE lib/ directory' do
+ node = build_and_parse_source('10', rails_root_join('lib', 'models', 'foo.rb'))
+
+ expect(cop.in_lib_directory?(node, 'models')).to eq(true)
+ end
+
+ it 'returns true for a directory in the EE lib/ directory' do
+ node =
+ build_and_parse_source('10', rails_root_join('ee', 'lib', 'models', 'foo.rb'))
+
+ expect(cop.in_lib_directory?(node, 'models')).to eq(true)
+ end
+
+ it 'returns false for a directory in the app/ directory' do
+ node =
+ build_and_parse_source('10', rails_root_join('app', 'models', 'foo.rb'))
+
+ expect(cop.in_lib_directory?(node, 'models')).to eq(false)
end
end
diff --git a/spec/rubocop/cop/database/multiple_databases_spec.rb b/spec/rubocop/cop/database/multiple_databases_spec.rb
new file mode 100644
index 00000000000..16b916d61db
--- /dev/null
+++ b/spec/rubocop/cop/database/multiple_databases_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative '../../../../rubocop/cop/database/multiple_databases'
+
+RSpec.describe RuboCop::Cop::Database::MultipleDatabases do
+ subject(:cop) { described_class.new }
+
+ it 'flags the use of ActiveRecord::Base.connection' do
+ expect_offense(<<~SOURCE)
+ ActiveRecord::Base.connection.inspect
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Do not use methods from ActiveRecord::Base, [...]
+ SOURCE
+ end
+end
diff --git a/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb b/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb
new file mode 100644
index 00000000000..968cafc57d4
--- /dev/null
+++ b/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb
@@ -0,0 +1,233 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rubocop'
+require 'rubocop/rspec/support'
+require_relative '../../../../rubocop/cop/gitlab/mark_used_feature_flags'
+
+RSpec.describe RuboCop::Cop::Gitlab::MarkUsedFeatureFlags do
+ let(:defined_feature_flags) do
+ %w[a_feature_flag foo_hello foo_world baz_experiment_percentage bar_baz]
+ end
+
+ subject(:cop) { described_class.new }
+
+ before do
+ stub_const("#{described_class}::DYNAMIC_FEATURE_FLAGS", [])
+ allow(cop).to receive(:defined_feature_flags).and_return(defined_feature_flags)
+ allow(cop).to receive(:usage_data_counters_known_event_feature_flags).and_return([])
+ end
+
+ def feature_flag_path(feature_flag_name)
+ File.expand_path("../../../../tmp/feature_flags/#{feature_flag_name}.used", __dir__)
+ end
+
+ shared_examples 'sets flag as used' do |method_call, flags_to_be_set|
+ it 'sets the flag as used' do
+ Array(flags_to_be_set).each do |flag_to_be_set|
+ expect(FileUtils).to receive(:touch).with(feature_flag_path(flag_to_be_set))
+ end
+
+ expect_no_offenses(<<~RUBY)
+ class Foo < ApplicationRecord
+ #{method_call}
+ end
+ RUBY
+ end
+ end
+
+ shared_examples 'does not set any flags as used' do |method_call|
+ it 'sets the flag as used' do
+ expect(FileUtils).not_to receive(:touch)
+
+ expect_no_offenses(method_call)
+ end
+ end
+
+ %w[
+ Feature.enabled?
+ Feature.disabled?
+ push_frontend_feature_flag
+ ].each do |feature_flag_method|
+ context "#{feature_flag_method} method" do
+ context 'a string feature flag' do
+ include_examples 'sets flag as used', %Q|#{feature_flag_method}("foo")|, 'foo'
+ end
+
+ context 'a symbol feature flag' do
+ include_examples 'sets flag as used', %Q|#{feature_flag_method}(:foo)|, 'foo'
+ end
+
+ context 'an interpolated string feature flag with a string prefix' do
+ include_examples 'sets flag as used', %Q|#{feature_flag_method}("foo_\#{bar}")|, %w[foo_hello foo_world]
+ end
+
+ context 'an interpolated symbol feature flag with a string prefix' do
+ include_examples 'sets flag as used', %Q|#{feature_flag_method}(:"foo_\#{bar}")|, %w[foo_hello foo_world]
+ end
+
+ context 'a string with a "/" in it' do
+ include_examples 'sets flag as used', %Q|#{feature_flag_method}("bar/baz")|, 'bar_baz'
+ end
+
+ context 'an interpolated string feature flag with a string prefix and suffix' do
+ include_examples 'does not set any flags as used', %Q|#{feature_flag_method}(:"foo_\#{bar}_baz")|
+ end
+
+ context 'a dynamic string feature flag as a variable' do
+ include_examples 'does not set any flags as used', %Q|#{feature_flag_method}(a_variable, an_arg)|
+ end
+
+ context 'an integer feature flag' do
+ include_examples 'does not set any flags as used', %Q|#{feature_flag_method}(123)|
+ end
+ end
+ end
+
+ %w[
+ Feature::Gitaly.enabled?
+ Feature::Gitaly.disabled?
+ ].each do |feature_flag_method|
+ context "#{feature_flag_method} method" do
+ context 'a string feature flag' do
+ include_examples 'sets flag as used', %Q|#{feature_flag_method}("foo")|, 'gitaly_foo'
+ end
+
+ context 'a symbol feature flag' do
+ include_examples 'sets flag as used', %Q|#{feature_flag_method}(:foo)|, 'gitaly_foo'
+ end
+
+ context 'an interpolated string feature flag with a string prefix' do
+ include_examples 'sets flag as used', %Q|#{feature_flag_method}("foo_\#{bar}")|, %w[foo_hello foo_world]
+ end
+
+ context 'an interpolated symbol feature flag with a string prefix' do
+ include_examples 'sets flag as used', %Q|#{feature_flag_method}(:"foo_\#{bar}")|, %w[foo_hello foo_world]
+ end
+
+ context 'an interpolated string feature flag with a string prefix and suffix' do
+ include_examples 'does not set any flags as used', %Q|#{feature_flag_method}(:"foo_\#{bar}_baz")|
+ end
+
+ context 'a dynamic string feature flag as a variable' do
+ include_examples 'does not set any flags as used', %Q|#{feature_flag_method}(a_variable, an_arg)|
+ end
+
+ context 'an integer feature flag' do
+ include_examples 'does not set any flags as used', %Q|#{feature_flag_method}(123)|
+ end
+ end
+ end
+
+ %w[
+ experiment
+ experiment_enabled?
+ push_frontend_experiment
+ Gitlab::Experimentation.active?
+ ].each do |feature_flag_method|
+ context "#{feature_flag_method} method" do
+ context 'a string feature flag' do
+ include_examples 'sets flag as used', %Q|#{feature_flag_method}("baz")|, %w[baz baz_experiment_percentage]
+ end
+
+ context 'a symbol feature flag' do
+ include_examples 'sets flag as used', %Q|#{feature_flag_method}(:baz)|, %w[baz baz_experiment_percentage]
+ end
+
+ context 'an interpolated string feature flag with a string prefix' do
+ include_examples 'sets flag as used', %Q|#{feature_flag_method}("foo_\#{bar}")|, %w[foo_hello foo_world]
+ end
+
+ context 'an interpolated symbol feature flag with a string prefix' do
+ include_examples 'sets flag as used', %Q|#{feature_flag_method}(:"foo_\#{bar}")|, %w[foo_hello foo_world]
+ end
+
+ context 'an interpolated string feature flag with a string prefix and suffix' do
+ include_examples 'does not set any flags as used', %Q|#{feature_flag_method}(:"foo_\#{bar}_baz")|
+ end
+
+ context 'a dynamic string feature flag as a variable' do
+ include_examples 'does not set any flags as used', %Q|#{feature_flag_method}(a_variable, an_arg)|
+ end
+
+ context 'an integer feature flag' do
+ include_examples 'does not set any flags as used', %Q|#{feature_flag_method}(123)|
+ end
+ end
+ end
+
+ %w[
+ use_rugged?
+ ].each do |feature_flag_method|
+ context "#{feature_flag_method} method" do
+ context 'a string feature flag' do
+ include_examples 'sets flag as used', %Q|#{feature_flag_method}(arg, "baz")|, 'baz'
+ end
+
+ context 'a symbol feature flag' do
+ include_examples 'sets flag as used', %Q|#{feature_flag_method}(arg, :baz)|, 'baz'
+ end
+
+ context 'an interpolated string feature flag with a string prefix' do
+ include_examples 'sets flag as used', %Q|#{feature_flag_method}(arg, "foo_\#{bar}")|, %w[foo_hello foo_world]
+ end
+
+ context 'an interpolated symbol feature flag with a string prefix' do
+ include_examples 'sets flag as used', %Q|#{feature_flag_method}(arg, :"foo_\#{bar}")|, %w[foo_hello foo_world]
+ end
+
+ context 'an interpolated string feature flag with a string prefix and suffix' do
+ include_examples 'does not set any flags as used', %Q|#{feature_flag_method}(arg, :"foo_\#{bar}_baz")|
+ end
+
+ context 'a dynamic string feature flag as a variable' do
+ include_examples 'does not set any flags as used', %Q|#{feature_flag_method}(a_variable, an_arg)|
+ end
+
+ context 'an integer feature flag' do
+ include_examples 'does not set any flags as used', %Q|#{feature_flag_method}(arg, 123)|
+ end
+ end
+ end
+
+ describe 'self.limit_feature_flag = :foo' do
+ include_examples 'sets flag as used', 'self.limit_feature_flag = :foo', 'foo'
+ end
+
+ describe 'FEATURE_FLAG = :foo' do
+ include_examples 'sets flag as used', 'FEATURE_FLAG = :foo', 'foo'
+ end
+
+ describe 'Worker `data_consistency` method' do
+ include_examples 'sets flag as used', 'data_consistency :delayed, feature_flag: :foo', 'foo'
+ include_examples 'does not set any flags as used', 'data_consistency :delayed'
+ end
+
+ describe 'Worker `deduplicate` method' do
+ include_examples 'sets flag as used', 'deduplicate :delayed, feature_flag: :foo', 'foo'
+ include_examples 'does not set any flags as used', 'deduplicate :delayed'
+ end
+
+ describe 'GraphQL `field` method' do
+ before do
+ allow(cop).to receive(:in_graphql_types?).and_return(true)
+ end
+
+ include_examples 'sets flag as used', 'field :runners, Types::Ci::RunnerType.connection_type, null: true, feature_flag: :foo', 'foo'
+ include_examples 'sets flag as used', 'field :runners, null: true, feature_flag: :foo', 'foo'
+ include_examples 'does not set any flags as used', 'field :solution'
+ include_examples 'does not set any flags as used', 'field :runners, Types::Ci::RunnerType.connection_type'
+ include_examples 'does not set any flags as used', 'field :runners, Types::Ci::RunnerType.connection_type, null: true, description: "hello world"'
+ include_examples 'does not set any flags as used', 'field :solution, type: GraphQL::STRING_TYPE, null: true, description: "URL to the vulnerabilitys details page."'
+ end
+
+ describe "tracking of usage data metrics known events happens at the beginning of inspection" do
+ let(:usage_data_counters_known_event_feature_flags) { ['an_event_feature_flag'] }
+
+ before do
+ allow(cop).to receive(:usage_data_counters_known_event_feature_flags).and_return(usage_data_counters_known_event_feature_flags)
+ end
+
+ include_examples 'sets flag as used', "FEATURE_FLAG = :foo", %w[foo an_event_feature_flag]
+ end
+end
diff --git a/spec/rubocop/cop/migration/prevent_index_creation_spec.rb b/spec/rubocop/cop/migration/prevent_index_creation_spec.rb
new file mode 100644
index 00000000000..a3965f54bbd
--- /dev/null
+++ b/spec/rubocop/cop/migration/prevent_index_creation_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative '../../../../rubocop/cop/migration/prevent_index_creation'
+
+RSpec.describe RuboCop::Cop::Migration::PreventIndexCreation do
+ subject(:cop) { described_class.new }
+
+ context 'when in migration' do
+ before do
+ allow(cop).to receive(:in_migration?).and_return(true)
+ end
+
+ context 'when adding an index to a forbidden table' do
+ it 'registers an offense when add_index is used' do
+ expect_offense(<<~RUBY)
+ def change
+ add_index :ci_builds, :protected
+ ^^^^^^^^^ Adding new index to ci_builds is forbidden, see https://gitlab.com/gitlab-org/gitlab/-/issues/332886
+ end
+ RUBY
+ end
+
+ it 'registers an offense when add_concurrent_index is used' do
+ expect_offense(<<~RUBY)
+ def change
+ add_concurrent_index :ci_builds, :protected
+ ^^^^^^^^^^^^^^^^^^^^ Adding new index to ci_builds is forbidden, see https://gitlab.com/gitlab-org/gitlab/-/issues/332886
+ end
+ RUBY
+ end
+ end
+
+ context 'when adding an index to a regular table' do
+ it 'does not register an offense' do
+ expect_no_offenses(<<~RUBY)
+ def change
+ add_index :ci_pipelines, :locked
+ end
+ RUBY
+ end
+ end
+ end
+
+ context 'when outside of migration' do
+ it 'does not register an offense' do
+ expect_no_offenses('def change; add_index :table, :column; end')
+ end
+ end
+end
diff --git a/spec/rubocop/cop/migration/sidekiq_queue_migrate_spec.rb b/spec/rubocop/cop/migration/sidekiq_queue_migrate_spec.rb
new file mode 100644
index 00000000000..499351b3585
--- /dev/null
+++ b/spec/rubocop/cop/migration/sidekiq_queue_migrate_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative '../../../../rubocop/cop/migration/sidekiq_queue_migrate'
+
+RSpec.describe RuboCop::Cop::Migration::SidekiqQueueMigrate do
+ subject(:cop) { described_class.new }
+
+ def source(meth = 'change')
+ "def #{meth}; sidekiq_queue_migrate 'queue', to: 'new_queue'; end"
+ end
+
+ context 'when in a regular migration' do
+ before do
+ allow(cop).to receive(:in_migration?).and_return(true)
+ allow(cop).to receive(:in_post_deployment_migration?).and_return(false)
+ end
+
+ %w(up down change any_other_method).each do |method_name|
+ it "registers an offense when sidekiq_queue_migrate is used in ##{method_name}" do
+ expect_offense(<<~RUBY)
+ def #{method_name}
+ sidekiq_queue_migrate 'queue', to: 'new_queue'
+ ^^^^^^^^^^^^^^^^^^^^^ `sidekiq_queue_migrate` must only be used in post-deployment migrations
+ end
+ RUBY
+ end
+ end
+ end
+
+ context 'when in a post-deployment migration' do
+ before do
+ allow(cop).to receive(:in_migration?).and_return(true)
+ allow(cop).to receive(:in_post_deployment_migration?).and_return(true)
+ end
+
+ it 'registers no offense' do
+ expect_no_offenses(source)
+ end
+ end
+
+ context 'when outside of a migration' do
+ it 'registers no offense' do
+ expect_no_offenses(source)
+ end
+ end
+end
diff --git a/spec/rubocop/cop/worker_data_consistency_spec.rb b/spec/rubocop/cop/worker_data_consistency_spec.rb
new file mode 100644
index 00000000000..5fa42bf2b87
--- /dev/null
+++ b/spec/rubocop/cop/worker_data_consistency_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative '../../../rubocop/cop/worker_data_consistency'
+
+RSpec.describe RuboCop::Cop::WorkerDataConsistency do
+ subject(:cop) { described_class.new }
+
+ before do
+ allow(cop)
+ .to receive(:in_worker?)
+ .and_return(true)
+ end
+
+ it 'adds an offense when not defining data_consistency' do
+ expect_offense(<<~CODE)
+ class SomeWorker
+ ^^^^^^^^^^^^^^^^ Should define data_consistency expectation.[...]
+ include ApplicationWorker
+
+ queue_namespace :pipeline_hooks
+ feature_category :continuous_integration
+ urgency :high
+ end
+ CODE
+ end
+
+ it 'adds no offense when defining data_consistency' do
+ expect_no_offenses(<<~CODE)
+ class SomeWorker
+ include ApplicationWorker
+
+ queue_namespace :pipeline_hooks
+ feature_category :continuous_integration
+ data_consistency :delayed
+ urgency :high
+ end
+ CODE
+ end
+
+ it 'adds no offense when worker is not an ApplicationWorker' do
+ expect_no_offenses(<<~CODE)
+ class SomeWorker
+ queue_namespace :pipeline_hooks
+ feature_category :continuous_integration
+ urgency :high
+ end
+ CODE
+ end
+end
diff --git a/spec/serializers/analytics/cycle_analytics/stage_entity_spec.rb b/spec/serializers/analytics/cycle_analytics/stage_entity_spec.rb
index 90cc7f7827b..8b45e8a64fc 100644
--- a/spec/serializers/analytics/cycle_analytics/stage_entity_spec.rb
+++ b/spec/serializers/analytics/cycle_analytics/stage_entity_spec.rb
@@ -11,4 +11,12 @@ RSpec.describe Analytics::CycleAnalytics::StageEntity do
expect(entity_json).to have_key(:start_event_html_description)
expect(entity_json).to have_key(:end_event_html_description)
end
+
+ it 'exposes start_event and end_event objects' do
+ expect(entity_json[:start_event][:identifier]).to eq(entity_json[:start_event_identifier])
+ expect(entity_json[:end_event][:identifier]).to eq(entity_json[:end_event_identifier])
+
+ expect(entity_json[:start_event][:html_description]).to eq(entity_json[:start_event_html_description])
+ expect(entity_json[:end_event][:html_description]).to eq(entity_json[:end_event_html_description])
+ end
end
diff --git a/spec/serializers/paginated_diff_entity_spec.rb b/spec/serializers/paginated_diff_entity_spec.rb
index a8ac89a8481..f408deb734e 100644
--- a/spec/serializers/paginated_diff_entity_spec.rb
+++ b/spec/serializers/paginated_diff_entity_spec.rb
@@ -19,21 +19,12 @@ RSpec.describe PaginatedDiffEntity do
subject { entity.as_json }
- before do
- stub_feature_flags(diffs_gradual_load: false)
- end
-
it 'exposes diff_files' do
expect(subject[:diff_files]).to be_present
end
it 'exposes pagination data' do
- expect(subject[:pagination]).to eq(
- current_page: 2,
- next_page: 3,
- next_page_href: "/#{merge_request.project.full_path}/-/merge_requests/#{merge_request.iid}/diffs_batch.json?page=3",
- total_pages: 7
- )
+ expect(subject[:pagination]).to eq(total_pages: 20)
end
context 'when there are conflicts' do
diff --git a/spec/serializers/service_event_entity_spec.rb b/spec/serializers/service_event_entity_spec.rb
index 91254c7dd27..f610c8f1488 100644
--- a/spec/serializers/service_event_entity_spec.rb
+++ b/spec/serializers/service_event_entity_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe ServiceEventEntity do
end
describe '#as_json' do
- context 'service without fields' do
+ context 'integration without fields' do
let(:integration) { create(:emails_on_push_integration, push_events: true) }
let(:event) { 'push' }
@@ -24,8 +24,8 @@ RSpec.describe ServiceEventEntity do
end
end
- context 'service with fields' do
- let(:integration) { create(:slack_service, note_events: false, note_channel: 'note-channel') }
+ context 'integration with fields' do
+ let(:integration) { create(:integrations_slack, note_events: false, note_channel: 'note-channel') }
let(:event) { 'note' }
it 'exposes correct attributes' do
diff --git a/spec/serializers/service_field_entity_spec.rb b/spec/serializers/service_field_entity_spec.rb
index 20ca98416f8..6e9ebfb66d9 100644
--- a/spec/serializers/service_field_entity_spec.rb
+++ b/spec/serializers/service_field_entity_spec.rb
@@ -5,18 +5,18 @@ require 'spec_helper'
RSpec.describe ServiceFieldEntity do
let(:request) { double('request') }
- subject { described_class.new(field, request: request, service: service).as_json }
+ subject { described_class.new(field, request: request, service: integration).as_json }
before do
- allow(request).to receive(:service).and_return(service)
+ allow(request).to receive(:service).and_return(integration)
end
describe '#as_json' do
context 'Jira Service' do
- let(:service) { create(:jira_service) }
+ let(:integration) { create(:jira_integration) }
context 'field with type text' do
- let(:field) { service.global_fields.find { |field| field[:name] == 'username' } }
+ let(:field) { integration_field('username') }
it 'exposes correct attributes' do
expected_hash = {
@@ -35,7 +35,7 @@ RSpec.describe ServiceFieldEntity do
end
context 'field with type password' do
- let(:field) { service.global_fields.find { |field| field[:name] == 'password' } }
+ let(:field) { integration_field('password') }
it 'exposes correct attributes but hides password' do
expected_hash = {
@@ -56,10 +56,9 @@ RSpec.describe ServiceFieldEntity do
context 'EmailsOnPush Service' do
let(:integration) { create(:emails_on_push_integration, send_from_committer_email: '1') }
- let(:service) { integration } # TODO: remove when https://gitlab.com/gitlab-org/gitlab/-/issues/330300 is complete
context 'field with type checkbox' do
- let(:field) { integration.global_fields.find { |field| field[:name] == 'send_from_committer_email' } }
+ let(:field) { integration_field('send_from_committer_email') }
it 'exposes correct attributes and casts value to Boolean' do
expected_hash = {
@@ -78,7 +77,7 @@ RSpec.describe ServiceFieldEntity do
end
context 'field with type select' do
- let(:field) { integration.global_fields.find { |field| field[:name] == 'branches_to_be_notified' } }
+ let(:field) { integration_field('branches_to_be_notified') }
it 'exposes correct attributes' do
expected_hash = {
@@ -97,4 +96,8 @@ RSpec.describe ServiceFieldEntity do
end
end
end
+
+ def integration_field(name)
+ integration.global_fields.find { |f| f[:name] == name }
+ end
end
diff --git a/spec/services/admin/propagate_integration_service_spec.rb b/spec/services/admin/propagate_integration_service_spec.rb
index 13320528e4f..151658fe429 100644
--- a/spec/services/admin/propagate_integration_service_spec.rb
+++ b/spec/services/admin/propagate_integration_service_spec.rb
@@ -7,20 +7,20 @@ RSpec.describe Admin::PropagateIntegrationService do
include JiraServiceHelper
before do
- stub_jira_service_test
+ stub_jira_integration_test
end
let(:group) { create(:group) }
let_it_be(:project) { create(:project) }
- let_it_be(:instance_integration) { create(:jira_service, :instance) }
- let_it_be(:not_inherited_integration) { create(:jira_service, project: project) }
+ let_it_be(:instance_integration) { create(:jira_integration, :instance) }
+ let_it_be(:not_inherited_integration) { create(:jira_integration, project: project) }
let_it_be(:inherited_integration) do
- create(:jira_service, project: create(:project), inherit_from_id: instance_integration.id)
+ create(:jira_integration, project: create(:project), inherit_from_id: instance_integration.id)
end
let_it_be(:different_type_inherited_integration) do
- create(:redmine_service, project: project, inherit_from_id: instance_integration.id)
+ create(:redmine_integration, project: project, inherit_from_id: instance_integration.id)
end
context 'with inherited integration' do
@@ -55,7 +55,7 @@ RSpec.describe Admin::PropagateIntegrationService do
end
context 'for a group-level integration' do
- let(:group_integration) { create(:jira_service, group: group, project: nil) }
+ let(:group_integration) { create(:jira_integration, group: group, project: nil) }
context 'with a project without integration' do
let(:another_project) { create(:project, group: group) }
@@ -81,7 +81,7 @@ RSpec.describe Admin::PropagateIntegrationService do
context 'with a subgroup with integration' do
let(:subgroup) { create(:group, parent: group) }
- let(:subgroup_integration) { create(:jira_service, group: subgroup, project: nil, inherit_from_id: group_integration.id) }
+ let(:subgroup_integration) { create(:jira_integration, group: subgroup, project: nil, inherit_from_id: group_integration.id) }
it 'calls to PropagateIntegrationInheritDescendantWorker' do
expect(PropagateIntegrationInheritDescendantWorker).to receive(:perform_async)
diff --git a/spec/services/admin/propagate_service_template_spec.rb b/spec/services/admin/propagate_service_template_spec.rb
index 1bcf9af78ce..c8ca3173f99 100644
--- a/spec/services/admin/propagate_service_template_spec.rb
+++ b/spec/services/admin/propagate_service_template_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Admin::PropagateServiceTemplate do
describe '.propagate' do
let_it_be(:project) { create(:project) }
+
let!(:service_template) do
Integrations::Pushover.create!(
template: true,
diff --git a/spec/services/alert_management/create_alert_issue_service_spec.rb b/spec/services/alert_management/create_alert_issue_service_spec.rb
index 695e90ebd92..55f8e47717c 100644
--- a/spec/services/alert_management/create_alert_issue_service_spec.rb
+++ b/spec/services/alert_management/create_alert_issue_service_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe AlertManagement::CreateAlertIssueService do
let_it_be(:generic_alert, reload: true) { create(:alert_management_alert, :triggered, project: project, payload: payload) }
let_it_be(:prometheus_alert, reload: true) { create(:alert_management_alert, :triggered, :prometheus, project: project, payload: payload) }
+
let(:alert) { generic_alert }
let(:alert_presenter) { alert.present }
let(:created_issue) { Issue.last! }
diff --git a/spec/services/application_settings/update_service_spec.rb b/spec/services/application_settings/update_service_spec.rb
index 56c1284927d..5f0c02cd521 100644
--- a/spec/services/application_settings/update_service_spec.rb
+++ b/spec/services/application_settings/update_service_spec.rb
@@ -23,8 +23,8 @@ RSpec.describe ApplicationSettings::UpdateService do
context 'when the passed terms are blank' do
let(:params) { { terms: '' } }
- it 'does not create terms' do
- expect { subject.execute }.not_to change { ApplicationSetting::Term.count }
+ it 'does create terms' do
+ expect { subject.execute }.to change { ApplicationSetting::Term.count }.by(1)
end
end
diff --git a/spec/services/audit_event_service_spec.rb b/spec/services/audit_event_service_spec.rb
index 997f506c269..ce7b43972da 100644
--- a/spec/services/audit_event_service_spec.rb
+++ b/spec/services/audit_event_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe AuditEventService do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user, :with_sign_ins) }
let_it_be(:project_member) { create(:project_member, user: user) }
+
let(:service) { described_class.new(user, project, { action: :destroy }) }
let(:logger) { instance_double(Gitlab::AuditJsonLogger) }
@@ -78,15 +79,14 @@ RSpec.describe AuditEventService do
context 'with IP address', :request_store do
using RSpec::Parameterized::TableSyntax
- where(:from_caller, :from_context, :from_author_sign_in, :output) do
- '192.168.0.1' | '192.168.0.2' | '192.168.0.3' | '192.168.0.1'
- nil | '192.168.0.2' | '192.168.0.3' | '192.168.0.2'
- nil | nil | '192.168.0.3' | '192.168.0.3'
+ where(:from_context, :from_author_sign_in, :output) do
+ '192.168.0.2' | '192.168.0.3' | '192.168.0.2'
+ nil | '192.168.0.3' | '192.168.0.3'
end
with_them do
let(:user) { create(:user, current_sign_in_ip: from_author_sign_in) }
- let(:audit_service) { described_class.new(user, user, with: 'standard', ip_address: from_caller) }
+ let(:audit_service) { described_class.new(user, user, with: 'standard') }
before do
allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(from_context)
diff --git a/spec/services/auth/container_registry_authentication_service_spec.rb b/spec/services/auth/container_registry_authentication_service_spec.rb
index ba7acd3d3df..4124696ac08 100644
--- a/spec/services/auth/container_registry_authentication_service_spec.rb
+++ b/spec/services/auth/container_registry_authentication_service_spec.rb
@@ -6,4 +6,96 @@ RSpec.describe Auth::ContainerRegistryAuthenticationService do
include AdminModeHelper
it_behaves_like 'a container registry auth service'
+
+ context 'when in migration mode' do
+ include_context 'container registry auth service context'
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ shared_examples 'an unmodified token' do
+ it_behaves_like 'a valid token'
+ it { expect(payload['access']).not_to include(have_key('migration_eligible')) }
+ end
+
+ shared_examples 'a modified token with migration eligibility' do |eligible|
+ it_behaves_like 'a valid token'
+ it { expect(payload['access']).to include(include('migration_eligible' => eligible)) }
+ end
+
+ shared_examples 'a modified token' do
+ context 'with a non eligible root ancestor and project' do
+ before do
+ stub_feature_flags(container_registry_migration_phase1_deny: project.root_ancestor)
+ stub_feature_flags(container_registry_migration_phase1_allow: false)
+ end
+
+ it_behaves_like 'a modified token with migration eligibility', false
+ end
+
+ context 'with a non eligible root ancestor and eligible project' do
+ before do
+ stub_feature_flags(container_registry_migration_phase1_deny: false)
+ stub_feature_flags(container_registry_migration_phase1_deny: project.root_ancestor)
+ stub_feature_flags(container_registry_migration_phase1_allow: project)
+ end
+
+ it_behaves_like 'a modified token with migration eligibility', false
+ end
+
+ context 'with an eligible root ancestor and non eligible project' do
+ before do
+ stub_feature_flags(container_registry_migration_phase1_deny: false)
+ stub_feature_flags(container_registry_migration_phase1_allow: false)
+ end
+
+ it_behaves_like 'a modified token with migration eligibility', false
+ end
+
+ context 'with an eligible root ancestor and project' do
+ before do
+ stub_feature_flags(container_registry_migration_phase1_deny: false)
+ stub_feature_flags(container_registry_migration_phase1_allow: project)
+ end
+
+ it_behaves_like 'a modified token with migration eligibility', true
+ end
+ end
+
+ context 'with pull action' do
+ let(:current_params) do
+ { scopes: ["repository:#{project.full_path}:pull"] }
+ end
+
+ it_behaves_like 'an unmodified token'
+ end
+
+ context 'with push action' do
+ let(:current_params) do
+ { scopes: ["repository:#{project.full_path}:push"] }
+ end
+
+ it_behaves_like 'a modified token'
+ end
+
+ context 'with multiple actions including push' do
+ let(:current_params) do
+ { scopes: ["repository:#{project.full_path}:pull,push,delete"] }
+ end
+
+ it_behaves_like 'a modified token'
+ end
+
+ context 'with multiple actions excluding push' do
+ let(:current_params) do
+ { scopes: ["repository:#{project.full_path}:pull,delete"] }
+ end
+
+ it_behaves_like 'an unmodified token'
+ end
+ end
end
diff --git a/spec/services/auth/dependency_proxy_authentication_service_spec.rb b/spec/services/auth/dependency_proxy_authentication_service_spec.rb
index 1fd1677c7da..35e6d59b456 100644
--- a/spec/services/auth/dependency_proxy_authentication_service_spec.rb
+++ b/spec/services/auth/dependency_proxy_authentication_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Auth::DependencyProxyAuthenticationService do
let_it_be(:user) { create(:user) }
+
let(:service) { Auth::DependencyProxyAuthenticationService.new(nil, user) }
before do
diff --git a/spec/services/auto_merge_service_spec.rb b/spec/services/auto_merge_service_spec.rb
index 3f7a26aa00e..335c608c206 100644
--- a/spec/services/auto_merge_service_spec.rb
+++ b/spec/services/auto_merge_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe AutoMergeService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:service) { described_class.new(project, user) }
before_all do
diff --git a/spec/services/branches/create_service_spec.rb b/spec/services/branches/create_service_spec.rb
index 5cf0d5af75f..1962aca35e1 100644
--- a/spec/services/branches/create_service_spec.rb
+++ b/spec/services/branches/create_service_spec.rb
@@ -38,10 +38,23 @@ RSpec.describe Branches::CreateService do
end
it 'returns an error with a reference name' do
+ err_msg = 'Failed to create branch \'new-feature\': invalid reference name \'unknown\''
result = service.execute('new-feature', 'unknown')
expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Invalid reference name: unknown')
+ expect(result[:message]).to eq(err_msg)
+ end
+ end
+
+ context 'when an ambiguous branch name is provided' do
+ it 'returns an error that branch could not be created' do
+ err_msg = 'Failed to create branch \'feature\': 13:reference is ambiguous.'
+
+ service.execute('feature/widget', 'master')
+ result = service.execute('feature', 'master')
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq(err_msg)
end
end
diff --git a/spec/services/bulk_create_integration_service_spec.rb b/spec/services/bulk_create_integration_service_spec.rb
index 8369eb48088..4b0029e27cb 100644
--- a/spec/services/bulk_create_integration_service_spec.rb
+++ b/spec/services/bulk_create_integration_service_spec.rb
@@ -6,13 +6,14 @@ RSpec.describe BulkCreateIntegrationService do
include JiraServiceHelper
before_all do
- stub_jira_service_test
+ stub_jira_integration_test
end
let_it_be(:excluded_group) { create(:group) }
let_it_be(:excluded_project) { create(:project, group: excluded_group) }
- let(:instance_integration) { create(:jira_service, :instance) }
- let(:template_integration) { create(:jira_service, :template) }
+
+ let(:instance_integration) { create(:jira_integration, :instance) }
+ let(:template_integration) { create(:jira_integration, :template) }
let(:excluded_attributes) { %w[id project_id group_id inherit_from_id instance template created_at updated_at] }
shared_examples 'creates integration from batch ids' do
@@ -49,7 +50,7 @@ RSpec.describe BulkCreateIntegrationService do
context 'with a project association' do
let!(:project) { create(:project) }
- let(:created_integration) { project.jira_service }
+ let(:created_integration) { project.jira_integration }
let(:batch) { Project.where(id: project.id) }
let(:association) { 'project' }
@@ -73,8 +74,8 @@ RSpec.describe BulkCreateIntegrationService do
context 'with a project association' do
let!(:project) { create(:project, group: group) }
- let(:integration) { create(:jira_service, group: group, project: nil) }
- let(:created_integration) { project.jira_service }
+ let(:integration) { create(:jira_integration, group: group, project: nil) }
+ let(:created_integration) { project.jira_integration }
let(:batch) { Project.where(id: Project.minimum(:id)..Project.maximum(:id)).without_integration(integration).in_namespace(integration.group.self_and_descendants) }
let(:association) { 'project' }
let(:inherit_from_id) { integration.id }
@@ -85,7 +86,7 @@ RSpec.describe BulkCreateIntegrationService do
context 'with a group association' do
let!(:subgroup) { create(:group, parent: group) }
- let(:integration) { create(:jira_service, group: group, project: nil, inherit_from_id: instance_integration.id) }
+ let(:integration) { create(:jira_integration, group: group, project: nil, inherit_from_id: instance_integration.id) }
let(:created_integration) { Integration.find_by(group: subgroup) }
let(:batch) { Group.where(id: subgroup.id) }
let(:association) { 'group' }
@@ -101,7 +102,7 @@ RSpec.describe BulkCreateIntegrationService do
context 'with a project association' do
let!(:project) { create(:project) }
- let(:created_integration) { project.jira_service }
+ let(:created_integration) { project.jira_integration }
let(:batch) { Project.where(id: project.id) }
let(:association) { 'project' }
let(:inherit_from_id) { integration.id }
diff --git a/spec/services/bulk_imports/file_download_service_spec.rb b/spec/services/bulk_imports/file_download_service_spec.rb
index 0961ddce553..a24af9ae64d 100644
--- a/spec/services/bulk_imports/file_download_service_spec.rb
+++ b/spec/services/bulk_imports/file_download_service_spec.rb
@@ -4,26 +4,41 @@ require 'spec_helper'
RSpec.describe BulkImports::FileDownloadService do
describe '#execute' do
+ let_it_be(:allowed_content_types) { %w(application/gzip application/octet-stream) }
+ let_it_be(:file_size_limit) { 5.gigabytes }
let_it_be(:config) { build(:bulk_import_configuration) }
let_it_be(:content_type) { 'application/octet-stream' }
+ let_it_be(:content_disposition) { nil }
let_it_be(:filename) { 'file_download_service_spec' }
let_it_be(:tmpdir) { Dir.tmpdir }
let_it_be(:filepath) { File.join(tmpdir, filename) }
+ let_it_be(:content_length) { 1000 }
+
+ let(:chunk_double) { double('chunk', size: 100, code: 200) }
- let(:chunk_double) { double('chunk', size: 1000, code: 200) }
let(:response_double) do
double(
code: 200,
success?: true,
parsed_response: {},
headers: {
- 'content-length' => 100,
- 'content-type' => content_type
+ 'content-length' => content_length,
+ 'content-type' => content_type,
+ 'content-disposition' => content_disposition
}
)
end
- subject { described_class.new(configuration: config, relative_url: '/test', dir: tmpdir, filename: filename) }
+ subject do
+ described_class.new(
+ configuration: config,
+ relative_url: '/test',
+ dir: tmpdir,
+ filename: filename,
+ file_size_limit: file_size_limit,
+ allowed_content_types: allowed_content_types
+ )
+ end
before do
allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
@@ -54,7 +69,14 @@ RSpec.describe BulkImports::FileDownloadService do
stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
double = instance_double(BulkImports::Configuration, url: 'https://localhost', access_token: 'token')
- service = described_class.new(configuration: double, relative_url: '/test', dir: tmpdir, filename: filename)
+ service = described_class.new(
+ configuration: double,
+ relative_url: '/test',
+ dir: tmpdir,
+ filename: filename,
+ file_size_limit: file_size_limit,
+ allowed_content_types: allowed_content_types
+ )
expect { service.execute }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
end
@@ -70,31 +92,46 @@ RSpec.describe BulkImports::FileDownloadService do
context 'when content-length is not valid' do
context 'when content-length exceeds limit' do
- before do
- stub_const("#{described_class}::FILE_SIZE_LIMIT", 1)
- end
+ let(:file_size_limit) { 1 }
it 'raises an error' do
- expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid content length')
+ expect { subject.execute }.to raise_error(
+ described_class::ServiceError,
+ 'File size 1000 Bytes exceeds limit of 1 Byte'
+ )
end
end
context 'when content-length is missing' do
- let(:response_double) { double(success?: true, headers: { 'content-type' => content_type }) }
+ let(:content_length) { nil }
it 'raises an error' do
- expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid content length')
+ expect { subject.execute }.to raise_error(
+ described_class::ServiceError,
+ 'Missing content-length header'
+ )
end
end
end
- context 'when partially downloaded file exceeds limit' do
- before do
- stub_const("#{described_class}::FILE_SIZE_LIMIT", 150)
+ context 'when content-length is equals the file size limit' do
+ let(:content_length) { 150 }
+ let(:file_size_limit) { 150 }
+
+ it 'does not raise an error' do
+ expect { subject.execute }.not_to raise_error
end
+ end
+
+ context 'when partially downloaded file exceeds limit' do
+ let(:content_length) { 151 }
+ let(:file_size_limit) { 150 }
it 'raises an error' do
- expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid downloaded file')
+ expect { subject.execute }.to raise_error(
+ described_class::ServiceError,
+ 'File size 151 Bytes exceeds limit of 150 Bytes'
+ )
end
end
@@ -102,7 +139,10 @@ RSpec.describe BulkImports::FileDownloadService do
let(:chunk_double) { double('chunk', size: 1000, code: 307) }
it 'raises an error' do
- expect { subject.execute }.to raise_error(described_class::ServiceError, 'File download error 307')
+ expect { subject.execute }.to raise_error(
+ described_class::ServiceError,
+ 'File download error 307'
+ )
end
end
@@ -110,23 +150,88 @@ RSpec.describe BulkImports::FileDownloadService do
let_it_be(:symlink) { File.join(tmpdir, 'symlink') }
before do
- FileUtils.ln_s(File.join(tmpdir, filename), symlink)
+ FileUtils.ln_s(File.join(tmpdir, filename), symlink, force: true)
end
- subject { described_class.new(configuration: config, relative_url: '/test', dir: tmpdir, filename: 'symlink') }
+ subject do
+ described_class.new(
+ configuration: config,
+ relative_url: '/test',
+ dir: tmpdir,
+ filename: 'symlink',
+ file_size_limit: file_size_limit,
+ allowed_content_types: allowed_content_types
+ )
+ end
it 'raises an error and removes the file' do
- expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid downloaded file')
+ expect { subject.execute }.to raise_error(
+ described_class::ServiceError,
+ 'Invalid downloaded file'
+ )
expect(File.exist?(symlink)).to eq(false)
end
end
context 'when dir is not in tmpdir' do
- subject { described_class.new(configuration: config, relative_url: '/test', dir: '/etc', filename: filename) }
+ subject do
+ described_class.new(
+ configuration: config,
+ relative_url: '/test',
+ dir: '/etc',
+ filename: filename,
+ file_size_limit: file_size_limit,
+ allowed_content_types: allowed_content_types
+ )
+ end
it 'raises an error' do
- expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid target directory')
+ expect { subject.execute }.to raise_error(
+ described_class::ServiceError,
+ 'Invalid target directory'
+ )
+ end
+ end
+
+ context 'when using the remote filename' do
+ let_it_be(:filename) { nil }
+
+ context 'when no filename is given' do
+ it 'raises an error when the filename is not provided in the request header' do
+ expect { subject.execute }.to raise_error(
+ described_class::ServiceError,
+ 'Remote filename not provided in content-disposition header'
+ )
+ end
+ end
+
+ context 'with a given filename' do
+ let_it_be(:content_disposition) { 'filename="avatar.png"' }
+
+ it 'uses the given filename' do
+ expect(subject.execute).to eq(File.join(tmpdir, "avatar.png"))
+ end
+ end
+
+ context 'when the filename is a path' do
+ let_it_be(:content_disposition) { 'filename="../../avatar.png"' }
+
+ it 'raises an error when the filename is not provided in the request header' do
+ expect(subject.execute).to eq(File.join(tmpdir, "avatar.png"))
+ end
+ end
+
+ context 'when the filename is longer the the limit' do
+ let_it_be(:content_disposition) { 'filename="../../xxx.b"' }
+
+ before do
+ stub_const("#{described_class}::FILENAME_SIZE_LIMIT", 1)
+ end
+
+ it 'raises an error when the filename is not provided in the request header' do
+ expect(subject.execute).to eq(File.join(tmpdir, "x.b"))
+ end
end
end
end
diff --git a/spec/services/bulk_update_integration_service_spec.rb b/spec/services/bulk_update_integration_service_spec.rb
index a866e0852bc..b6b7d1936a2 100644
--- a/spec/services/bulk_update_integration_service_spec.rb
+++ b/spec/services/bulk_update_integration_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe BulkUpdateIntegrationService do
include JiraServiceHelper
before_all do
- stub_jira_service_test
+ stub_jira_integration_test
end
let(:excluded_attributes) { %w[id project_id group_id inherit_from_id instance template created_at updated_at] }
diff --git a/spec/services/captcha/captcha_verification_service_spec.rb b/spec/services/captcha/captcha_verification_service_spec.rb
index 245e06703f5..fe2199fb53e 100644
--- a/spec/services/captcha/captcha_verification_service_spec.rb
+++ b/spec/services/captcha/captcha_verification_service_spec.rb
@@ -4,21 +4,31 @@ require 'spec_helper'
RSpec.describe Captcha::CaptchaVerificationService do
describe '#execute' do
- let(:captcha_response) { nil }
- let(:request) { double(:request) }
- let(:service) { described_class.new }
+ let(:captcha_response) { 'abc123' }
+ let(:fake_ip) { '1.2.3.4' }
+ let(:spam_params) do
+ ::Spam::SpamParams.new(
+ captcha_response: captcha_response,
+ spam_log_id: double,
+ ip_address: fake_ip,
+ user_agent: double,
+ referer: double
+ )
+ end
+
+ let(:service) { described_class.new(spam_params: spam_params) }
- subject { service.execute(captcha_response: captcha_response, request: request) }
+ subject { service.execute }
context 'when there is no captcha_response' do
+ let(:captcha_response) { nil }
+
it 'returns false' do
expect(subject).to eq(false)
end
end
context 'when there is a captcha_response' do
- let(:captcha_response) { 'abc123' }
-
before do
expect(Gitlab::Recaptcha).to receive(:load_configurations!)
end
@@ -29,10 +39,12 @@ RSpec.describe Captcha::CaptchaVerificationService do
expect(subject).to eq(true)
end
- it 'has a request method which returns the request' do
+ it 'has a request method which returns an object with the ip address #remote_ip' do
subject
- expect(service.send(:request)).to eq(request)
+ request_struct = service.send(:request)
+ expect(request_struct).to respond_to(:remote_ip)
+ expect(request_struct.remote_ip).to eq(fake_ip)
end
end
end
diff --git a/spec/services/ci/after_requeue_job_service_spec.rb b/spec/services/ci/after_requeue_job_service_spec.rb
index a2147759dba..f8c49060ce0 100644
--- a/spec/services/ci/after_requeue_job_service_spec.rb
+++ b/spec/services/ci/after_requeue_job_service_spec.rb
@@ -8,9 +8,9 @@ RSpec.describe Ci::AfterRequeueJobService do
let(:pipeline) { create(:ci_pipeline, project: project) }
- let!(:build) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
let!(:test1) { create(:ci_build, :success, pipeline: pipeline, stage_idx: 1) }
let!(:test2) { create(:ci_build, :skipped, pipeline: pipeline, stage_idx: 1) }
+ let!(:build) { create(:ci_build, pipeline: pipeline, stage_idx: 0, name: 'build') }
subject(:execute_service) { described_class.new(project, user).execute(build) }
@@ -24,6 +24,34 @@ RSpec.describe Ci::AfterRequeueJobService do
expect(test2.reload).to be_created
end
+ context 'when there is a job need from the same stage' do
+ let!(:test3) do
+ create(:ci_build,
+ :skipped,
+ pipeline: pipeline,
+ stage_idx: 0,
+ scheduling_type: :dag)
+ end
+
+ before do
+ create(:ci_build_need, build: test3, name: 'build')
+ end
+
+ it 'marks subsequent skipped jobs as processable' do
+ expect { execute_service }.to change { test3.reload.status }.from('skipped').to('created')
+ end
+
+ context 'with ci_same_stage_job_needs FF disabled' do
+ before do
+ stub_feature_flags(ci_same_stage_job_needs: false)
+ end
+
+ it 'does nothing with the build' do
+ expect { execute_service }.not_to change { test3.reload.status }
+ end
+ end
+ end
+
context 'when the pipeline is a downstream pipeline and the bridge is depended' do
let!(:trigger_job) { create(:ci_bridge, :strategy_depend, status: 'success') }
diff --git a/spec/services/ci/append_build_trace_service_spec.rb b/spec/services/ci/append_build_trace_service_spec.rb
index 8812680b665..b251f00158f 100644
--- a/spec/services/ci/append_build_trace_service_spec.rb
+++ b/spec/services/ci/append_build_trace_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Ci::AppendBuildTraceService do
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
- let_it_be(:build) { create(:ci_build, :running, pipeline: pipeline) }
+ let_it_be_with_reload(:build) { create(:ci_build, :running, pipeline: pipeline) }
before do
stub_feature_flags(ci_enable_live_trace: true)
@@ -54,4 +54,46 @@ RSpec.describe Ci::AppendBuildTraceService do
expect(result.stream_size).to eq 4
end
end
+
+ context 'when the trace size is exceeded' do
+ before do
+ project.actual_limits.update!(ci_jobs_trace_size_limit: 1)
+ end
+
+ it 'returns 403 status code' do
+ stream_size = 1.25.megabytes
+ body_data = 'x' * stream_size
+ content_range = "0-#{stream_size}"
+
+ result = described_class
+ .new(build, content_range: content_range)
+ .execute(body_data)
+
+ expect(result.status).to eq 403
+ expect(result.stream_size).to be_nil
+ expect(build.trace_chunks.count).to eq 0
+ expect(build.reload).to be_failed
+ expect(build.failure_reason).to eq 'trace_size_exceeded'
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_jobs_trace_size_limit: false)
+ end
+
+ it 'appends trace chunks' do
+ stream_size = 1.25.megabytes
+ body_data = 'x' * stream_size
+ content_range = "0-#{stream_size}"
+
+ result = described_class
+ .new(build, content_range: content_range)
+ .execute(body_data)
+
+ expect(result.status).to eq 202
+ expect(result.stream_size).to eq stream_size
+ expect(build.trace_chunks.count).to eq 10
+ end
+ end
+ end
end
diff --git a/spec/services/ci/archive_trace_service_spec.rb b/spec/services/ci/archive_trace_service_spec.rb
index a4f498f17c3..12804efc28c 100644
--- a/spec/services/ci/archive_trace_service_spec.rb
+++ b/spec/services/ci/archive_trace_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Ci::ArchiveTraceService, '#execute' do
- subject { described_class.new.execute(job, worker_name: ArchiveTraceWorker.name) }
+ subject { described_class.new.execute(job, worker_name: Ci::ArchiveTraceWorker.name) }
context 'when job is finished' do
let(:job) { create(:ci_build, :success, :trace_live) }
@@ -30,43 +30,17 @@ RSpec.describe Ci::ArchiveTraceService, '#execute' do
create(:ci_build_trace_chunk, build: job)
end
- context 'when the feature flag `erase_traces_from_already_archived_jobs_when_archiving_again` is enabled' do
- before do
- stub_feature_flags(erase_traces_from_already_archived_jobs_when_archiving_again: true)
- end
-
- it 'removes the trace chunks' do
- expect { subject }.to change { job.trace_chunks.count }.to(0)
- end
-
- context 'when associated data does not exist' do
- before do
- job.job_artifacts_trace.file.remove!
- end
-
- it 'removes the trace artifact' do
- expect { subject }.to change { job.reload.job_artifacts_trace }.to(nil)
- end
- end
+ it 'removes the trace chunks' do
+ expect { subject }.to change { job.trace_chunks.count }.to(0)
end
- context 'when the feature flag `erase_traces_from_already_archived_jobs_when_archiving_again` is disabled' do
+ context 'when associated data does not exist' do
before do
- stub_feature_flags(erase_traces_from_already_archived_jobs_when_archiving_again: false)
+ job.job_artifacts_trace.file.remove!
end
- it 'does not remove the trace chunks' do
- expect { subject }.not_to change { job.trace_chunks.count }
- end
-
- context 'when associated data does not exist' do
- before do
- job.job_artifacts_trace.file.remove!
- end
-
- it 'does not remove the trace artifact' do
- expect { subject }.not_to change { job.reload.job_artifacts_trace }
- end
+ it 'removes the trace artifact' do
+ expect { subject }.to change { job.reload.job_artifacts_trace }.to(nil)
end
end
end
@@ -77,7 +51,7 @@ RSpec.describe Ci::ArchiveTraceService, '#execute' do
it 'leaves a warning message in sidekiq log' do
expect(Sidekiq.logger).to receive(:warn).with(
- class: ArchiveTraceWorker.name,
+ class: Ci::ArchiveTraceWorker.name,
message: 'The job does not have live trace but going to be archived.',
job_id: job.id)
@@ -94,7 +68,7 @@ RSpec.describe Ci::ArchiveTraceService, '#execute' do
it 'leaves a warning message in sidekiq log' do
expect(Sidekiq.logger).to receive(:warn).with(
- class: ArchiveTraceWorker.name,
+ class: Ci::ArchiveTraceWorker.name,
message: 'The job does not have archived trace after archiving.',
job_id: job.id)
@@ -114,7 +88,7 @@ RSpec.describe Ci::ArchiveTraceService, '#execute' do
job_id: job.id).once
expect(Sidekiq.logger).to receive(:warn).with(
- class: ArchiveTraceWorker.name,
+ class: Ci::ArchiveTraceWorker.name,
message: "Failed to archive trace. message: Job is not finished yet.",
job_id: job.id).and_call_original
diff --git a/spec/services/ci/create_pipeline_service/cache_spec.rb b/spec/services/ci/create_pipeline_service/cache_spec.rb
index 5f74c2f1cef..f9767a794db 100644
--- a/spec/services/ci/create_pipeline_service/cache_spec.rb
+++ b/spec/services/ci/create_pipeline_service/cache_spec.rb
@@ -33,11 +33,11 @@ RSpec.describe Ci::CreatePipelineService do
it 'uses the provided key' do
expected = {
- 'key' => 'a-key',
- 'paths' => ['logs/', 'binaries/'],
- 'policy' => 'pull-push',
- 'untracked' => true,
- 'when' => 'on_success'
+ key: 'a-key',
+ paths: ['logs/', 'binaries/'],
+ policy: 'pull-push',
+ untracked: true,
+ when: 'on_success'
}
expect(pipeline).to be_persisted
@@ -66,10 +66,10 @@ RSpec.describe Ci::CreatePipelineService do
it 'builds a cache key' do
expected = {
- 'key' => /[a-f0-9]{40}/,
- 'paths' => ['logs/'],
- 'policy' => 'pull-push',
- 'when' => 'on_success'
+ key: /[a-f0-9]{40}/,
+ paths: ['logs/'],
+ policy: 'pull-push',
+ when: 'on_success'
}
expect(pipeline).to be_persisted
@@ -82,10 +82,10 @@ RSpec.describe Ci::CreatePipelineService do
it 'uses default cache key' do
expected = {
- 'key' => /default/,
- 'paths' => ['logs/'],
- 'policy' => 'pull-push',
- 'when' => 'on_success'
+ key: /default/,
+ paths: ['logs/'],
+ policy: 'pull-push',
+ when: 'on_success'
}
expect(pipeline).to be_persisted
@@ -115,10 +115,10 @@ RSpec.describe Ci::CreatePipelineService do
it 'builds a cache key' do
expected = {
- 'key' => /\$ENV_VAR-[a-f0-9]{40}/,
- 'paths' => ['logs/'],
- 'policy' => 'pull-push',
- 'when' => 'on_success'
+ key: /\$ENV_VAR-[a-f0-9]{40}/,
+ paths: ['logs/'],
+ policy: 'pull-push',
+ when: 'on_success'
}
expect(pipeline).to be_persisted
@@ -131,10 +131,10 @@ RSpec.describe Ci::CreatePipelineService do
it 'uses default cache key' do
expected = {
- 'key' => /\$ENV_VAR-default/,
- 'paths' => ['logs/'],
- 'policy' => 'pull-push',
- 'when' => 'on_success'
+ key: /\$ENV_VAR-default/,
+ paths: ['logs/'],
+ policy: 'pull-push',
+ when: 'on_success'
}
expect(pipeline).to be_persisted
diff --git a/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb b/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb
index 7193e5bd7d4..a42770aae20 100644
--- a/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb
+++ b/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'contains both errors and warnings' do
- error_message = 'build job: need test is not defined in prior stages'
+ error_message = 'build job: need test is not defined in current or prior stages'
warning_message = /jobs:test may allow multiple pipelines to run/
expect(pipeline.yaml_errors).to eq(error_message)
diff --git a/spec/services/ci/create_pipeline_service/custom_yaml_tags_spec.rb b/spec/services/ci/create_pipeline_service/custom_yaml_tags_spec.rb
index 4cf52223e38..5dceb9f57f0 100644
--- a/spec/services/ci/create_pipeline_service/custom_yaml_tags_spec.rb
+++ b/spec/services/ci/create_pipeline_service/custom_yaml_tags_spec.rb
@@ -39,8 +39,8 @@ RSpec.describe Ci::CreatePipelineService do
it 'creates a pipeline' do
expect(pipeline).to be_persisted
expect(pipeline.builds.first.options).to match(a_hash_including({
- 'before_script' => ['ls'],
- 'script' => [
+ before_script: ['ls'],
+ script: [
'echo doing my first step',
'echo doing step 1 of job 1',
'echo doing my last step'
diff --git a/spec/services/ci/create_pipeline_service/dry_run_spec.rb b/spec/services/ci/create_pipeline_service/dry_run_spec.rb
index 0fb500f5729..01df7772eef 100644
--- a/spec/services/ci/create_pipeline_service/dry_run_spec.rb
+++ b/spec/services/ci/create_pipeline_service/dry_run_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe Ci::CreatePipelineService do
it_behaves_like 'returns a non persisted pipeline'
it 'returns a pipeline with errors', :aggregate_failures do
- error_message = 'build job: need test is not defined in prior stages'
+ error_message = 'build job: need test is not defined in current or prior stages'
expect(subject.error_messages.map(&:content)).to eq([error_message])
expect(subject.errors).not_to be_empty
@@ -109,7 +109,7 @@ RSpec.describe Ci::CreatePipelineService do
it_behaves_like 'returns a non persisted pipeline'
it 'returns a pipeline with errors', :aggregate_failures do
- error_message = "'test' job needs 'build' job, but it was not added to the pipeline"
+ error_message = "'test' job needs 'build' job, but 'build' is not in any previous stage"
expect(subject.error_messages.map(&:content)).to eq([error_message])
expect(subject.errors).not_to be_empty
diff --git a/spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb b/spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb
new file mode 100644
index 00000000000..df881c1ac8f
--- /dev/null
+++ b/spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb
@@ -0,0 +1,144 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::CreatePipelineService do
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:group_variable) { create(:ci_group_variable, group: group, key: 'RUNNER_TAG', value: 'group')}
+ let_it_be(:project) { create(:project, :repository, group: group) }
+ let_it_be(:user) { create(:user) }
+
+ let(:service) { described_class.new(project, user, ref: 'master') }
+ let(:pipeline) { service.execute(:push) }
+ let(:job) { pipeline.builds.find_by(name: 'job') }
+
+ before do
+ project.add_developer(user)
+ stub_ci_pipeline_yaml_file config
+ end
+
+ context 'when the variable is set' do
+ let(:config) do
+ <<~EOS
+ variables:
+ KUBERNETES_RUNNER: kubernetes
+
+ job:
+ tags:
+ - docker
+ - $KUBERNETES_RUNNER
+ script:
+ - echo "Hello runner selector feature"
+ EOS
+ end
+
+ it 'uses the evaluated variable' do
+ expect(pipeline).to be_created_successfully
+ expect(job.tags.pluck(:name)).to match_array(%w[docker kubernetes])
+ end
+ end
+
+ context 'when the tag is composed by two variables' do
+ let(:config) do
+ <<~EOS
+ variables:
+ CLOUD_PROVIDER: aws
+ KUBERNETES_RUNNER: kubernetes
+ ENVIRONMENT_NAME: prod
+
+ job:
+ tags:
+ - docker
+ - $CLOUD_PROVIDER-$KUBERNETES_RUNNER-$ENVIRONMENT_NAME
+ script:
+ - echo "Hello runner selector feature"
+ EOS
+ end
+
+ it 'uses the evaluated variables' do
+ expect(pipeline).to be_created_successfully
+ expect(job.tags.pluck(:name)).to match_array(%w[docker aws-kubernetes-prod])
+ end
+ end
+
+ context 'when the variable is not set' do
+ let(:config) do
+ <<~EOS
+ job:
+ tags:
+ - docker
+ - $KUBERNETES_RUNNER
+ script:
+ - echo "Hello runner selector feature"
+ EOS
+ end
+
+ it 'uses the variable as a regular string' do
+ expect(pipeline).to be_created_successfully
+ expect(job.tags.pluck(:name)).to match_array(%w[docker $KUBERNETES_RUNNER])
+ end
+ end
+
+ context 'when the tag uses group variables' do
+ let(:config) do
+ <<~EOS
+ job:
+ tags:
+ - docker
+ - $RUNNER_TAG
+ script:
+ - echo "Hello runner selector feature"
+ EOS
+ end
+
+ it 'uses the evaluated variables' do
+ expect(pipeline).to be_created_successfully
+ expect(job.tags.pluck(:name)).to match_array(%w[docker group])
+ end
+ end
+
+ context 'when the tag has the same variable name defined for both group and project' do
+ let_it_be(:project_variable) { create(:ci_variable, project: project, key: 'RUNNER_TAG', value: 'project') }
+
+ let(:config) do
+ <<~EOS
+ variables:
+ RUNNER_TAG: pipeline
+ job:
+ tags:
+ - docker
+ - $RUNNER_TAG
+ script:
+ - echo "Hello runner selector feature"
+ EOS
+ end
+
+ it 'uses the project variable instead of group due to variable precedence' do
+ expect(pipeline).to be_created_successfully
+ expect(job.tags.pluck(:name)).to match_array(%w[docker project])
+ end
+ end
+
+ context 'with parallel:matrix config' do
+ let(:tags) { pipeline.builds.map(&:tags).flatten.pluck(:name) }
+
+ let(:config) do
+ <<~EOS
+ job:
+ parallel:
+ matrix:
+ - PROVIDER: [aws, gcp]
+ STACK: [monitoring, backup, app]
+ tags:
+ - ${PROVIDER}-${STACK}
+ script:
+ - echo "Hello runner selector feature"
+ EOS
+ end
+
+ it 'uses the evaluated variables' do
+ expect(pipeline).to be_created_successfully
+ expect(tags).to match_array(%w[aws-monitoring aws-backup aws-app gcp-monitoring gcp-backup gcp-app])
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service/needs_spec.rb b/spec/services/ci/create_pipeline_service/needs_spec.rb
index 3b4a6178b8f..d096db10d0b 100644
--- a/spec/services/ci/create_pipeline_service/needs_spec.rb
+++ b/spec/services/ci/create_pipeline_service/needs_spec.rb
@@ -104,7 +104,7 @@ RSpec.describe Ci::CreatePipelineService do
it 'saves dependencies' do
expect(test_a_build.options)
- .to match(a_hash_including('dependencies' => ['build_a']))
+ .to match(a_hash_including(dependencies: ['build_a']))
end
it 'artifacts default to true' do
@@ -257,7 +257,7 @@ RSpec.describe Ci::CreatePipelineService do
it 'returns error' do
expect(pipeline.yaml_errors)
- .to eq("'test' job needs 'build' job, but it was not added to the pipeline")
+ .to eq("'test' job needs 'build' job, but 'build' is not in any previous stage")
end
context 'when need is optional' do
diff --git a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
index 512cf546e6a..7a6535ed3fa 100644
--- a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
@@ -69,9 +69,9 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
it_behaves_like 'successful creation' do
let(:expected_bridge_options) do
{
- 'trigger' => {
- 'include' => [
- { 'local' => 'path/to/child.yml' }
+ trigger: {
+ include: [
+ { local: 'path/to/child.yml' }
]
}
}
@@ -149,9 +149,9 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
it_behaves_like 'successful creation' do
let(:expected_bridge_options) do
{
- 'trigger' => {
- 'include' => [
- { 'local' => 'path/to/child.yml' }
+ trigger: {
+ include: [
+ { local: 'path/to/child.yml' }
]
}
}
@@ -175,8 +175,8 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
it_behaves_like 'successful creation' do
let(:expected_bridge_options) do
{
- 'trigger' => {
- 'include' => 'path/to/child.yml'
+ trigger: {
+ include: 'path/to/child.yml'
}
}
end
@@ -202,8 +202,8 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
it_behaves_like 'successful creation' do
let(:expected_bridge_options) do
{
- 'trigger' => {
- 'include' => ['path/to/child.yml', 'path/to/child2.yml']
+ trigger: {
+ include: ['path/to/child.yml', 'path/to/child2.yml']
}
}
end
@@ -252,7 +252,7 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
end
it_behaves_like 'creation failure' do
- let(:expected_error) { /test job: dependency generator is not defined in prior stages/ }
+ let(:expected_error) { /test job: dependency generator is not defined in current or prior stages/ }
end
end
@@ -295,12 +295,12 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
it_behaves_like 'successful creation' do
let(:expected_bridge_options) do
{
- 'trigger' => {
- 'include' => [
+ trigger: {
+ include: [
{
- 'file' => 'path/to/child.yml',
- 'project' => 'my-namespace/my-project',
- 'ref' => 'master'
+ file: 'path/to/child.yml',
+ project: 'my-namespace/my-project',
+ ref: 'master'
}
]
}
@@ -353,11 +353,11 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
it_behaves_like 'successful creation' do
let(:expected_bridge_options) do
{
- 'trigger' => {
- 'include' => [
+ trigger: {
+ include: [
{
- 'file' => ["path/to/child1.yml", "path/to/child2.yml"],
- 'project' => 'my-namespace/my-project'
+ file: ["path/to/child1.yml", "path/to/child2.yml"],
+ project: 'my-namespace/my-project'
}
]
}
diff --git a/spec/services/ci/create_pipeline_service/rules_spec.rb b/spec/services/ci/create_pipeline_service/rules_spec.rb
index 33ec6aacc44..acdf38bbc13 100644
--- a/spec/services/ci/create_pipeline_service/rules_spec.rb
+++ b/spec/services/ci/create_pipeline_service/rules_spec.rb
@@ -230,22 +230,6 @@ RSpec.describe Ci::CreatePipelineService do
[nil, nil, nil, 'job var 4', nil, nil, 'overridden var 7']
)
end
-
- context 'when FF ci_workflow_rules_variables is disabled' do
- before do
- stub_feature_flags(ci_workflow_rules_variables: false)
- end
-
- it 'does not affect workflow variables but job variables' do
- expect(job1.scoped_variables.to_hash.values_at(*variable_keys)).to eq(
- ['overridden var 1', 'job var 2', nil, 'workflow var 4', 'job var 5', nil, 'workflow var 7']
- )
-
- expect(job2.scoped_variables.to_hash.values_at(*variable_keys)).to eq(
- [nil, nil, nil, 'job var 4', nil, nil, 'overridden var 7']
- )
- end
- end
end
context 'when matching to the second rule' do
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 3316f8c3d9b..64e8c6ac2df 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -1001,7 +1001,7 @@ RSpec.describe Ci::CreatePipelineService do
expect(pipeline.yaml_errors).not_to be_present
expect(pipeline).to be_persisted
expect(build).to be_kind_of(Ci::Build)
- expect(build.options).to eq(config[:release].except(:stage, :only).with_indifferent_access)
+ expect(build.options).to eq(config[:release].except(:stage, :only))
expect(build).to be_persisted
end
end
@@ -1715,7 +1715,7 @@ RSpec.describe Ci::CreatePipelineService do
it 'contains the expected errors' do
expect(pipeline.builds).to be_empty
- error_message = "'test_a' job needs 'build_a' job, but it was not added to the pipeline"
+ error_message = "'test_a' job needs 'build_a' job, but 'build_a' is not in any previous stage"
expect(pipeline.yaml_errors).to eq(error_message)
expect(pipeline.error_messages.map(&:content)).to contain_exactly(error_message)
expect(pipeline.errors[:base]).to contain_exactly(error_message)
diff --git a/spec/services/ci/destroy_pipeline_service_spec.rb b/spec/services/ci/destroy_pipeline_service_spec.rb
index 302233cea5a..588ff0b1762 100644
--- a/spec/services/ci/destroy_pipeline_service_spec.rb
+++ b/spec/services/ci/destroy_pipeline_service_spec.rb
@@ -67,6 +67,30 @@ RSpec.describe ::Ci::DestroyPipelineService do
end
end
end
+
+ context 'when pipeline is in cancelable state' do
+ before do
+ allow(pipeline).to receive(:cancelable?).and_return(true)
+ end
+
+ it 'cancels the pipeline' do
+ expect(pipeline).to receive(:cancel_running)
+
+ subject
+ end
+
+ context 'when cancel_pipelines_prior_to_destroy is disabled' do
+ before do
+ stub_feature_flags(cancel_pipelines_prior_to_destroy: false)
+ end
+
+ it "doesn't cancel the pipeline" do
+ expect(pipeline).not_to receive(:cancel_running)
+
+ subject
+ end
+ end
+ end
end
context 'user is not owner' do
diff --git a/spec/services/ci/job_token_scope/add_project_service_spec.rb b/spec/services/ci/job_token_scope/add_project_service_spec.rb
new file mode 100644
index 00000000000..ba889465fac
--- /dev/null
+++ b/spec/services/ci/job_token_scope/add_project_service_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Ci::JobTokenScope::AddProjectService do
+ let(:service) { described_class.new(project, current_user) }
+
+ let_it_be(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) }
+ let_it_be(:target_project) { create(:project) }
+ let_it_be(:current_user) { create(:user) }
+
+ describe '#execute' do
+ subject(:result) { service.execute(target_project) }
+
+ it_behaves_like 'editable job token scope' do
+ context 'when user has permissions on source and target projects' do
+ before do
+ project.add_maintainer(current_user)
+ target_project.add_developer(current_user)
+ end
+
+ it 'adds the project to the scope' do
+ expect do
+ expect(result).to be_success
+ end.to change { Ci::JobToken::ProjectScopeLink.count }.by(1)
+ end
+ end
+
+ context 'when target project is same as the source project' do
+ before do
+ project.add_maintainer(current_user)
+ end
+
+ let(:target_project) { project }
+
+ it_behaves_like 'returns error', "Validation failed: Target project can't be the same as the source project"
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/job_token_scope/remove_project_service_spec.rb b/spec/services/ci/job_token_scope/remove_project_service_spec.rb
new file mode 100644
index 00000000000..238fc879f54
--- /dev/null
+++ b/spec/services/ci/job_token_scope/remove_project_service_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Ci::JobTokenScope::RemoveProjectService do
+ let(:service) { described_class.new(project, current_user) }
+
+ let_it_be(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) }
+ let_it_be(:target_project) { create(:project) }
+ let_it_be(:current_user) { create(:user) }
+
+ let_it_be(:link) do
+ create(:ci_job_token_project_scope_link,
+ source_project: project,
+ target_project: target_project)
+ end
+
+ describe '#execute' do
+ subject(:result) { service.execute(target_project) }
+
+ it_behaves_like 'editable job token scope' do
+ context 'when user has permissions on source and target project' do
+ before do
+ project.add_maintainer(current_user)
+ target_project.add_developer(current_user)
+ end
+
+ it 'removes the project from the scope' do
+ expect do
+ expect(result).to be_success
+ end.to change { Ci::JobToken::ProjectScopeLink.count }.by(-1)
+ end
+ end
+
+ context 'when target project is same as the source project' do
+ before do
+ project.add_maintainer(current_user)
+ end
+
+ let(:target_project) { project }
+
+ it_behaves_like 'returns error', "Source project cannot be removed from the job token scope"
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service.rb b/spec/services/ci/pipeline_processing/shared_processing_service.rb
index 13c924a3089..5089f8d5dba 100644
--- a/spec/services/ci/pipeline_processing/shared_processing_service.rb
+++ b/spec/services/ci/pipeline_processing/shared_processing_service.rb
@@ -842,20 +842,6 @@ RSpec.shared_examples 'Pipeline Processing Service' do
expect(all_builds.manual).to contain_exactly(linux_build)
expect(all_builds.skipped).to contain_exactly(deploy)
end
-
- context 'when FF ci_fix_pipeline_status_for_dag_needs_manual is disabled' do
- before do
- stub_feature_flags(ci_fix_pipeline_status_for_dag_needs_manual: false)
- end
-
- it 'makes deploy DAG to be waiting for optional manual to finish' do
- expect(process_pipeline).to be_truthy
-
- expect(stages).to eq(%w(skipped created))
- expect(all_builds.manual).to contain_exactly(linux_build)
- expect(all_builds.created).to contain_exactly(deploy)
- end
- end
end
context 'when a bridge job has parallel:matrix config', :sidekiq_inline do
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_same_stages.yml b/spec/services/ci/pipeline_processing/test_cases/dag_same_stages.yml
new file mode 100644
index 00000000000..2a63daeb561
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_same_stages.yml
@@ -0,0 +1,47 @@
+config:
+ build:
+ stage: test
+ script: exit 0
+
+ test:
+ stage: test
+ script: exit 0
+ needs: [build]
+
+ deploy:
+ stage: test
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ test: running
+ jobs:
+ build: success
+ test: pending
+ deploy: created
+
+ - event: success
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: running
+ jobs:
+ build: success
+ test: success
+ deploy: pending
diff --git a/spec/services/ci/pipelines/add_job_service_spec.rb b/spec/services/ci/pipelines/add_job_service_spec.rb
new file mode 100644
index 00000000000..a72ffbfdc87
--- /dev/null
+++ b/spec/services/ci/pipelines/add_job_service_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Pipelines::AddJobService do
+ let_it_be(:pipeline) { create(:ci_pipeline) }
+
+ let(:job) { build(:ci_build) }
+
+ subject(:service) { described_class.new(pipeline) }
+
+ context 'when the pipeline is not persisted' do
+ let(:pipeline) { build(:ci_pipeline) }
+
+ it 'raises error' do
+ expect { service }.to raise_error('Pipeline must be persisted for this service to be used')
+ end
+ end
+
+ describe '#execute!' do
+ subject(:execute) do
+ service.execute!(job) do |job|
+ job.save!
+ end
+ end
+
+ it 'assigns pipeline attributes to the job' do
+ expect do
+ execute
+ end.to change { job.slice(:pipeline, :project, :ref) }.to(
+ pipeline: pipeline, project: pipeline.project, ref: pipeline.ref
+ )
+ end
+
+ it 'returns a service response with the job as payload' do
+ expect(execute).to be_success
+ expect(execute.payload[:job]).to eq(job)
+ end
+
+ it 'calls update_older_statuses_retried!' do
+ expect(job).to receive(:update_older_statuses_retried!)
+
+ execute
+ end
+
+ context 'when the block raises an error' do
+ subject(:execute) do
+ service.execute!(job) do |job|
+ raise "this is an error"
+ end
+ end
+
+ it 'returns a service response with the error and the job as payload' do
+ expect(execute).to be_error
+ expect(execute.payload[:job]).to eq(job)
+ expect(execute.message).to eq('this is an error')
+ end
+ end
+
+ context 'when the FF ci_fix_commit_status_retried is disabled' do
+ before do
+ stub_feature_flags(ci_fix_commit_status_retried: false)
+ end
+
+ it 'does not call update_older_statuses_retried!' do
+ expect(job).not_to receive(:update_older_statuses_retried!)
+
+ execute
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/play_bridge_service_spec.rb b/spec/services/ci/play_bridge_service_spec.rb
index d6130325b5a..3f97bfdf5ae 100644
--- a/spec/services/ci/play_bridge_service_spec.rb
+++ b/spec/services/ci/play_bridge_service_spec.rb
@@ -45,16 +45,6 @@ RSpec.describe Ci::PlayBridgeService, '#execute' do
it 'marks the subsequent job as processable' do
expect { execute_service }.to change { job.reload.status }.from('skipped').to('created')
end
-
- context 'when the FF ci_fix_pipeline_status_for_dag_needs_manual is disabled' do
- before do
- stub_feature_flags(ci_fix_pipeline_status_for_dag_needs_manual: false)
- end
-
- it 'does not change the subsequent job' do
- expect { execute_service }.not_to change { job.reload.status }.from('skipped')
- end
- end
end
context 'when bridge is not playable' do
diff --git a/spec/services/ci/play_build_service_spec.rb b/spec/services/ci/play_build_service_spec.rb
index 78de91675f9..babd601e0cf 100644
--- a/spec/services/ci/play_build_service_spec.rb
+++ b/spec/services/ci/play_build_service_spec.rb
@@ -71,16 +71,6 @@ RSpec.describe Ci::PlayBuildService, '#execute' do
it 'marks the subsequent job as processable' do
expect { service.execute(build) }.to change { job.reload.status }.from('skipped').to('created')
end
-
- context 'when the FF ci_fix_pipeline_status_for_dag_needs_manual is disabled' do
- before do
- stub_feature_flags(ci_fix_pipeline_status_for_dag_needs_manual: false)
- end
-
- it 'does not change the subsequent job' do
- expect { service.execute(build) }.not_to change { job.reload.status }.from('skipped')
- end
- end
end
context 'when variables are supplied' do
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index c4b1e2133ed..6e5d7725a7a 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -145,7 +145,7 @@ module Ci
context 'when using DEFCON mode that disables fair scheduling' do
before do
- stub_feature_flags(ci_queueing_disaster_recovery: true)
+ stub_feature_flags(ci_queueing_disaster_recovery_disable_fair_scheduling: true)
end
context 'when all builds are pending' do
@@ -269,51 +269,31 @@ module Ci
let!(:unrelated_group_runner) { create(:ci_runner, :group, groups: [unrelated_group]) }
it 'does not consider builds from other group runners' do
- expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 6
+ queue = ::Ci::Queue::BuildQueueService.new(group_runner)
+
+ expect(queue.builds_for_group_runner.size).to eq 6
execute(group_runner)
- expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 5
+ expect(queue.builds_for_group_runner.size).to eq 5
execute(group_runner)
- expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 4
+ expect(queue.builds_for_group_runner.size).to eq 4
execute(group_runner)
- expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 3
+ expect(queue.builds_for_group_runner.size).to eq 3
execute(group_runner)
- expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 2
+ expect(queue.builds_for_group_runner.size).to eq 2
execute(group_runner)
- expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 1
+ expect(queue.builds_for_group_runner.size).to eq 1
execute(group_runner)
- expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 0
+ expect(queue.builds_for_group_runner.size).to eq 0
expect(execute(group_runner)).to be_nil
end
end
- context 'when the use_distinct_in_register_job_object_hierarchy feature flag is enabled' do
- before do
- stub_feature_flags(use_distinct_in_register_job_object_hierarchy: true)
- stub_feature_flags(use_distinct_for_all_object_hierarchy: true)
- end
-
- it 'calls DISTINCT' do
- expect(described_class.new(group_runner).send(:builds_for_group_runner).to_sql).to include("DISTINCT")
- end
- end
-
- context 'when the use_distinct_in_register_job_object_hierarchy feature flag is disabled' do
- before do
- stub_feature_flags(use_distinct_in_register_job_object_hierarchy: false)
- stub_feature_flags(use_distinct_for_all_object_hierarchy: false)
- end
-
- it 'does not call DISTINCT' do
- expect(described_class.new(group_runner).send(:builds_for_group_runner).to_sql).not_to include("DISTINCT")
- end
- end
-
context 'group runner' do
let(:build) { execute(group_runner) }
@@ -349,8 +329,9 @@ module Ci
let!(:other_build) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
before do
- allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_project_runner)
- .and_return(Ci::Build.where(id: [pending_job, other_build]))
+ allow_any_instance_of(::Ci::Queue::BuildQueueService)
+ .to receive(:execute)
+ .and_return(Ci::Build.where(id: [pending_job, other_build]).pluck(:id))
end
it "receives second build from the queue" do
@@ -361,8 +342,9 @@ module Ci
context 'when single build is in queue' do
before do
- allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_project_runner)
- .and_return(Ci::Build.where(id: pending_job))
+ allow_any_instance_of(::Ci::Queue::BuildQueueService)
+ .to receive(:execute)
+ .and_return(Ci::Build.where(id: pending_job).pluck(:id))
end
it "does not receive any valid result" do
@@ -372,8 +354,9 @@ module Ci
context 'when there is no build in queue' do
before do
- allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_project_runner)
- .and_return(Ci::Build.none)
+ allow_any_instance_of(::Ci::Queue::BuildQueueService)
+ .to receive(:execute)
+ .and_return([])
end
it "does not receive builds but result is valid" do
@@ -721,17 +704,17 @@ module Ci
include_examples 'handles runner assignment'
end
- context 'when joining with pending builds table' do
+ context 'when using pending builds table' do
before do
- stub_feature_flags(ci_pending_builds_queue_join: true)
+ stub_feature_flags(ci_pending_builds_queue_source: true)
end
include_examples 'handles runner assignment'
end
- context 'when not joining with pending builds table' do
+ context 'when not using pending builds table' do
before do
- stub_feature_flags(ci_pending_builds_queue_join: false)
+ stub_feature_flags(ci_pending_builds_queue_source: false)
end
include_examples 'handles runner assignment'
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index c71bec31984..42d6e66b38b 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Ci::RetryBuildService do
project.add_reporter(reporter)
end
- clone_accessors = described_class.clone_accessors
+ clone_accessors = described_class.clone_accessors.without(described_class.extra_accessors)
reject_accessors =
%i[id status user token token_encrypted coverage trace runner
@@ -39,7 +39,7 @@ RSpec.describe Ci::RetryBuildService do
erased_at auto_canceled_by job_artifacts job_artifacts_archive
job_artifacts_metadata job_artifacts_trace job_artifacts_junit
job_artifacts_sast job_artifacts_secret_detection job_artifacts_dependency_scanning
- job_artifacts_container_scanning job_artifacts_dast
+ job_artifacts_container_scanning job_artifacts_cluster_image_scanning job_artifacts_dast
job_artifacts_license_scanning
job_artifacts_performance job_artifacts_browser_performance job_artifacts_load_performance
job_artifacts_lsif job_artifacts_terraform job_artifacts_cluster_applications
@@ -98,7 +98,7 @@ RSpec.describe Ci::RetryBuildService do
end
clone_accessors.each do |attribute|
- it "clones #{attribute} build attribute" do
+ it "clones #{attribute} build attribute", :aggregate_failures do
expect(attribute).not_to be_in(forbidden_associations), "association #{attribute} must be `belongs_to`"
expect(build.send(attribute)).not_to be_nil
expect(new_build.send(attribute)).not_to be_nil
@@ -134,7 +134,7 @@ RSpec.describe Ci::RetryBuildService do
end
end
- it 'has correct number of known attributes' do
+ it 'has correct number of known attributes', :aggregate_failures do
processed_accessors = clone_accessors + reject_accessors
known_accessors = processed_accessors + ignore_accessors
@@ -146,9 +146,10 @@ RSpec.describe Ci::RetryBuildService do
Ci::Build.attribute_names.map(&:to_sym) +
Ci::Build.attribute_aliases.keys.map(&:to_sym) +
Ci::Build.reflect_on_all_associations.map(&:name) +
- [:tag_list, :needs_attributes]
-
- current_accessors << :secrets if Gitlab.ee?
+ [:tag_list, :needs_attributes] -
+ # ee-specific accessors should be tested in ee/spec/services/ci/retry_build_service_spec.rb instead
+ described_class.extra_accessors -
+ [:dast_site_profiles_build, :dast_scanner_profiles_build] # join tables
current_accessors.uniq!
diff --git a/spec/services/ci/update_build_queue_service_spec.rb b/spec/services/ci/update_build_queue_service_spec.rb
index 44d7809b85f..2e2ef120f1b 100644
--- a/spec/services/ci/update_build_queue_service_spec.rb
+++ b/spec/services/ci/update_build_queue_service_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe Ci::UpdateBuildQueueService do
context 'when duplicate entry exists' do
before do
- ::Ci::PendingBuild.create!(build: build, project: project)
+ create(:ci_pending_build, build: build, project: build.project)
end
it 'does nothing and returns build id' do
@@ -66,7 +66,7 @@ RSpec.describe Ci::UpdateBuildQueueService do
context 'when pending build exists' do
before do
- Ci::PendingBuild.create!(build: build, project: project)
+ create(:ci_pending_build, build: build, project: build.project)
end
it 'removes pending build in a transaction' do
@@ -146,9 +146,7 @@ RSpec.describe Ci::UpdateBuildQueueService do
context 'when duplicate entry exists' do
before do
- ::Ci::RunningBuild.create!(
- build: build, project: project, runner: runner, runner_type: runner.runner_type
- )
+ create(:ci_running_build, build: build, project: project, runner: runner)
end
it 'does nothing and returns build id' do
@@ -169,9 +167,7 @@ RSpec.describe Ci::UpdateBuildQueueService do
context 'when shared runner build tracking entry exists' do
before do
- Ci::RunningBuild.create!(
- build: build, project: project, runner: runner, runner_type: runner.runner_type
- )
+ create(:ci_running_build, build: build, project: project, runner: runner)
end
it 'removes shared runner build' do
diff --git a/spec/services/clusters/applications/prometheus_health_check_service_spec.rb b/spec/services/clusters/applications/prometheus_health_check_service_spec.rb
index ee47d00f700..e6c7b147ab7 100644
--- a/spec/services/clusters/applications/prometheus_health_check_service_spec.rb
+++ b/spec/services/clusters/applications/prometheus_health_check_service_spec.rb
@@ -42,6 +42,7 @@ RSpec.describe Clusters::Applications::PrometheusHealthCheckService, '#execute'
context 'when cluster is project_type' do
let_it_be(:project) { create(:project) }
let_it_be(:integration) { create(:alert_management_http_integration, project: project) }
+
let(:applications_prometheus_healthy) { true }
let(:prometheus) { create(:clusters_applications_prometheus, status: prometheus_status_value, healthy: applications_prometheus_healthy) }
let(:cluster) { create(:cluster, :project, application_prometheus: prometheus, projects: [project]) }
diff --git a/spec/services/commits/commit_patch_service_spec.rb b/spec/services/commits/commit_patch_service_spec.rb
index 55cbd0e5d66..edd0918e488 100644
--- a/spec/services/commits/commit_patch_service_spec.rb
+++ b/spec/services/commits/commit_patch_service_spec.rb
@@ -87,7 +87,7 @@ RSpec.describe Commits::CommitPatchService do
context 'when specifying a non existent start branch' do
let(:start_branch) { 'does-not-exist' }
- it_behaves_like 'an error response', 'Invalid reference name'
+ it_behaves_like 'an error response', 'Failed to create branch'
end
end
end
diff --git a/spec/services/container_expiration_policy_service_spec.rb b/spec/services/container_expiration_policy_service_spec.rb
index 4294e6b3f06..41dd890dd35 100644
--- a/spec/services/container_expiration_policy_service_spec.rb
+++ b/spec/services/container_expiration_policy_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe ContainerExpirationPolicyService do
let_it_be(:user) { create(:user) }
let_it_be(:container_expiration_policy) { create(:container_expiration_policy, :runnable) }
+
let(:project) { container_expiration_policy.project }
let(:container_repository) { create(:container_repository, project: project) }
diff --git a/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb b/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
index 40a2f954786..1c8ae860d10 100644
--- a/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
+++ b/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
let_it_be(:image) { 'alpine' }
let_it_be(:tag) { 'latest' }
let_it_be(:dependency_proxy_manifest) { create(:dependency_proxy_manifest, file_name: "#{image}:#{tag}.json") }
+
let(:manifest) { dependency_proxy_manifest.file.read }
let(:group) { dependency_proxy_manifest.group }
let(:token) { Digest::SHA256.hexdigest('123') }
diff --git a/spec/services/design_management/copy_design_collection/copy_service_spec.rb b/spec/services/design_management/copy_design_collection/copy_service_spec.rb
index 186d2481c19..89a78c9bf5f 100644
--- a/spec/services/design_management/copy_design_collection/copy_service_spec.rb
+++ b/spec/services/design_management/copy_design_collection/copy_service_spec.rb
@@ -191,8 +191,8 @@ RSpec.describe DesignManagement::CopyDesignCollection::CopyService, :clean_gitla
expect(commits_on_master(limit: 99)).to include(*target_issue.design_versions.ordered.pluck(:sha))
end
- it 'creates a master branch if none previously existed' do
- expect { subject }.to change { target_repository.branch_names }.from([]).to(['master'])
+ it 'creates a default branch if none previously existed' do
+ expect { subject }.to change { target_repository.branch_names }.from([]).to([project.design_repository.root_ref])
end
it 'does not create default branch when one exists' do
@@ -255,7 +255,7 @@ RSpec.describe DesignManagement::CopyDesignCollection::CopyService, :clean_gitla
end
def commits_on_master(limit: 10)
- target_repository.commits('master', limit: limit).map(&:id)
+ target_repository.commits(target_repository.root_ref, limit: limit).map(&:id)
end
end
end
diff --git a/spec/services/design_management/copy_design_collection/queue_service_spec.rb b/spec/services/design_management/copy_design_collection/queue_service_spec.rb
index 2d9ea4633a0..05a7b092ccf 100644
--- a/spec/services/design_management/copy_design_collection/queue_service_spec.rb
+++ b/spec/services/design_management/copy_design_collection/queue_service_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe DesignManagement::CopyDesignCollection::QueueService, :clean_gitl
expect { subject }.to change { target_issue.design_collection.copy_state }.from('ready').to('in_progress')
end
- it 'queues a DesignManagement::CopyDesignCollectionWorker' do
+ it 'queues a DesignManagement::CopyDesignCollectionWorker', :clean_gitlab_redis_queues do
expect { subject }.to change(DesignManagement::CopyDesignCollectionWorker.jobs, :size).by(1)
end
diff --git a/spec/services/design_management/save_designs_service_spec.rb b/spec/services/design_management/save_designs_service_spec.rb
index 5bc763cc95e..b76c91fbac9 100644
--- a/spec/services/design_management/save_designs_service_spec.rb
+++ b/spec/services/design_management/save_designs_service_spec.rb
@@ -177,6 +177,18 @@ RSpec.describe DesignManagement::SaveDesignsService do
end
end
+ context 'when HEAD branch is different from master' do
+ before do
+ stub_feature_flags(main_branch_over_master: true)
+ end
+
+ it 'does not raise an exception during update' do
+ run_service
+
+ expect { run_service }.not_to raise_error
+ end
+ end
+
context 'when a design is being updated' do
before do
run_service
@@ -343,7 +355,7 @@ RSpec.describe DesignManagement::SaveDesignsService do
path = File.join(build(:design, issue: issue, filename: filename).full_path)
design_repository.create_if_not_exists
design_repository.create_file(user, path, 'something fake',
- branch_name: 'master',
+ branch_name: project.default_branch_or_main,
message: 'Somehow created without being tracked in db')
end
diff --git a/spec/services/discussions/resolve_service_spec.rb b/spec/services/discussions/resolve_service_spec.rb
index 24de1d90526..9cc27973bcb 100644
--- a/spec/services/discussions/resolve_service_spec.rb
+++ b/spec/services/discussions/resolve_service_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Discussions::ResolveService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user, developer_projects: [project]) }
let_it_be(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds, source_project: project) }
+
let(:discussion) { create(:diff_note_on_merge_request, noteable: merge_request, project: project).to_discussion }
let(:service) { described_class.new(project, user, one_or_more_discussions: discussion) }
@@ -100,6 +101,7 @@ RSpec.describe Discussions::ResolveService do
context 'when discussion is not for a merge request' do
let_it_be(:design) { create(:design, :with_file, issue: create(:issue, project: project)) }
+
let(:discussion) { create(:diff_note_on_design, noteable: design, project: project).to_discussion }
it 'does not execute the notification service' do
diff --git a/spec/services/discussions/unresolve_service_spec.rb b/spec/services/discussions/unresolve_service_spec.rb
index 6298a00a474..0009239232c 100644
--- a/spec/services/discussions/unresolve_service_spec.rb
+++ b/spec/services/discussions/unresolve_service_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Discussions::UnresolveService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user, developer_projects: [project]) }
let_it_be(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds, source_project: project) }
+
let(:discussion) { create(:diff_note_on_merge_request, noteable: merge_request, project: project).to_discussion }
let(:service) { described_class.new(discussion, user) }
diff --git a/spec/services/error_tracking/collect_error_service_spec.rb b/spec/services/error_tracking/collect_error_service_spec.rb
new file mode 100644
index 00000000000..14cd588f40b
--- /dev/null
+++ b/spec/services/error_tracking/collect_error_service_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ErrorTracking::CollectErrorService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:parsed_event) { Gitlab::Json.parse(fixture_file('error_tracking/parsed_event.json')) }
+
+ subject { described_class.new(project, nil, event: parsed_event) }
+
+ describe '#execute' do
+ it 'creates Error and creates ErrorEvent' do
+ expect { subject.execute }
+ .to change { ErrorTracking::Error.count }.by(1)
+ .and change { ErrorTracking::ErrorEvent.count }.by(1)
+ end
+
+ it 'updates Error and created ErrorEvent on second hit' do
+ subject.execute
+
+ expect { subject.execute }.not_to change { ErrorTracking::Error.count }
+ expect { subject.execute }.to change { ErrorTracking::ErrorEvent.count }.by(1)
+ end
+
+ it 'has correct values set' do
+ subject.execute
+
+ event = ErrorTracking::ErrorEvent.last
+ error = event.error
+
+ expect(error.name).to eq 'ActionView::MissingTemplate'
+ expect(error.description).to start_with 'Missing template posts/error2'
+ expect(error.actor).to eq 'PostsController#error2'
+ expect(error.platform).to eq 'ruby'
+ expect(error.last_seen_at).to eq '2021-07-08T12:59:16Z'
+
+ expect(event.description).to eq 'ActionView::MissingTemplate'
+ expect(event.occurred_at).to eq '2021-07-08T12:59:16Z'
+ expect(event.level).to eq 'error'
+ expect(event.environment).to eq 'development'
+ expect(event.payload).to eq parsed_event
+ end
+ end
+end
diff --git a/spec/services/event_create_service_spec.rb b/spec/services/event_create_service_spec.rb
index 17b2c7b38e1..611e821f3e5 100644
--- a/spec/services/event_create_service_spec.rb
+++ b/spec/services/event_create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe EventCreateService do
+RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redis_shared_state do
let(:service) { described_class.new }
let_it_be(:user, reload: true) { create :user }
@@ -50,7 +50,7 @@ RSpec.describe EventCreateService do
end
end
- describe 'Merge Requests', :clean_gitlab_redis_shared_state do
+ describe 'Merge Requests' do
describe '#open_mr' do
subject(:open_mr) { service.open_mr(merge_request, merge_request.author) }
@@ -194,7 +194,7 @@ RSpec.describe EventCreateService do
end
end
- describe '#wiki_event', :clean_gitlab_redis_shared_state do
+ describe '#wiki_event' do
let_it_be(:user) { create(:user) }
let_it_be(:wiki_page) { create(:wiki_page) }
let_it_be(:meta) { create(:wiki_page_meta, :for_wiki_page, wiki_page: wiki_page) }
@@ -247,7 +247,7 @@ RSpec.describe EventCreateService do
end
end
- describe '#push', :clean_gitlab_redis_shared_state do
+ describe '#push' do
let(:push_data) do
{
commits: [
@@ -272,7 +272,7 @@ RSpec.describe EventCreateService do
end
end
- describe '#bulk_push', :clean_gitlab_redis_shared_state do
+ describe '#bulk_push' do
let(:push_data) do
{
action: :created,
@@ -306,7 +306,7 @@ RSpec.describe EventCreateService do
end
end
- describe 'design events', :clean_gitlab_redis_shared_state do
+ describe 'design events' do
let_it_be(:design) { create(:design, project: project) }
let_it_be(:author) { user }
@@ -318,7 +318,8 @@ RSpec.describe EventCreateService do
specify { expect { result }.to change { Event.count }.by(8) }
- specify { expect { result }.not_to exceed_query_limit(1) }
+ # An addditional query due to event tracking
+ specify { expect { result }.not_to exceed_query_limit(2) }
it 'creates 3 created design events' do
ids = result.pluck('id')
@@ -347,7 +348,8 @@ RSpec.describe EventCreateService do
specify { expect { result }.to change { Event.count }.by(5) }
- specify { expect { result }.not_to exceed_query_limit(1) }
+ # An addditional query due to event tracking
+ specify { expect { result }.not_to exceed_query_limit(2) }
it 'creates 5 destroyed design events' do
ids = result.pluck('id')
diff --git a/spec/services/git/base_hooks_service_spec.rb b/spec/services/git/base_hooks_service_spec.rb
index 4ab27c7ab05..539c294a2e7 100644
--- a/spec/services/git/base_hooks_service_spec.rb
+++ b/spec/services/git/base_hooks_service_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe Git::BaseHooksService do
end
end
- describe 'project hooks and services' do
+ describe 'project hooks and integrations' do
context 'hooks' do
before do
expect(project).to receive(:has_active_hooks?).and_return(active)
@@ -88,45 +88,45 @@ RSpec.describe Git::BaseHooksService do
end
end
- context 'services' do
+ context 'with integrations' do
before do
- expect(project).to receive(:has_active_services?).and_return(active)
+ expect(project).to receive(:has_active_integrations?).and_return(active)
end
- context 'active services' do
+ context 'with active integrations' do
let(:active) { true }
it 'executes the services' do
expect(subject).to receive(:push_data).at_least(:once).and_call_original
- expect(project).to receive(:execute_services)
+ expect(project).to receive(:execute_integrations)
subject.execute
end
end
- context 'inactive services' do
+ context 'with inactive integrations' do
let(:active) { false }
it 'does not execute the services' do
expect(subject).not_to receive(:push_data)
- expect(project).not_to receive(:execute_services)
+ expect(project).not_to receive(:execute_integrations)
subject.execute
end
end
end
- context 'execute_project_hooks param set to false' do
+ context 'when execute_project_hooks param is set to false' do
before do
params[:execute_project_hooks] = false
allow(project).to receive(:has_active_hooks?).and_return(true)
- allow(project).to receive(:has_active_services?).and_return(true)
+ allow(project).to receive(:has_active_integrations?).and_return(true)
end
- it 'does not execute hooks and services' do
+ it 'does not execute hooks and integrations' do
expect(project).not_to receive(:execute_hooks)
- expect(project).not_to receive(:execute_services)
+ expect(project).not_to receive(:execute_integrations)
subject.execute
end
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index cc3ba21f002..fc629fe583d 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Git::BranchPushService, services: true do
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :repository) }
+
let(:blankrev) { Gitlab::Git::BLANK_SHA }
let(:oldrev) { sample_commit.parent_id }
let(:newrev) { sample_commit.id }
@@ -411,13 +412,13 @@ RSpec.describe Git::BranchPushService, services: true do
context "for jira issue tracker" do
include JiraServiceHelper
- let(:jira_tracker) { project.create_jira_service if project.jira_service.nil? }
+ let(:jira_tracker) { project.create_jira_integration if project.jira_integration.nil? }
before do
- # project.create_jira_service doesn't seem to invalidate the cache here
+ # project.create_jira_integration doesn't seem to invalidate the cache here
project.has_external_issue_tracker = true
- stub_jira_service_test
- jira_service_settings
+ stub_jira_integration_test
+ jira_integration_settings
stub_jira_urls("JIRA-1")
allow(closing_commit).to receive_messages({
@@ -553,24 +554,13 @@ RSpec.describe Git::BranchPushService, services: true do
end
end
- describe "housekeeping" do
+ describe "housekeeping", :clean_gitlab_redis_cache, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state do
let(:housekeeping) { Repositories::HousekeepingService.new(project) }
before do
- # Flush any raw key-value data stored by the housekeeping code.
- Gitlab::Redis::Cache.with { |conn| conn.flushall }
- Gitlab::Redis::Queues.with { |conn| conn.flushall }
- Gitlab::Redis::SharedState.with { |conn| conn.flushall }
-
allow(Repositories::HousekeepingService).to receive(:new).and_return(housekeeping)
end
- after do
- Gitlab::Redis::Cache.with { |conn| conn.flushall }
- Gitlab::Redis::Queues.with { |conn| conn.flushall }
- Gitlab::Redis::SharedState.with { |conn| conn.flushall }
- end
-
it 'does not perform housekeeping when not needed' do
expect(housekeeping).not_to receive(:execute)
@@ -707,6 +697,7 @@ RSpec.describe Git::BranchPushService, services: true do
context 'Jira Connect hooks' do
let_it_be(:project) { create(:project, :repository) }
+
let(:branch_to_sync) { nil }
let(:commits_to_sync) { [] }
let(:params) do
diff --git a/spec/services/git/wiki_push_service_spec.rb b/spec/services/git/wiki_push_service_spec.rb
index 151c2a1d014..7e5d7066e89 100644
--- a/spec/services/git/wiki_push_service_spec.rb
+++ b/spec/services/git/wiki_push_service_spec.rb
@@ -5,11 +5,12 @@ require 'spec_helper'
RSpec.describe Git::WikiPushService, services: true do
include RepoHelpers
+ let_it_be(:current_user) { create(:user) }
let_it_be(:key_id) { create(:key, user: current_user).shell_id }
- let_it_be(:wiki) { create(:project_wiki) }
- let_it_be(:current_user) { wiki.container.default_owner }
- let_it_be(:git_wiki) { wiki.wiki }
- let_it_be(:repository) { wiki.repository }
+
+ let(:wiki) { create(:project_wiki, user: current_user) }
+ let(:git_wiki) { wiki.wiki }
+ let(:repository) { wiki.repository }
describe '#execute' do
it 'executes model-specific callbacks' do
@@ -64,6 +65,26 @@ RSpec.describe Git::WikiPushService, services: true do
expect(Event.last(count).pluck(:action)).to match_array(Event::WIKI_ACTIONS.map(&:to_s))
end
+
+ context 'when wiki_page slug is not UTF-8 ' do
+ let(:binary_title) { Gitlab::EncodingHelper.encode_binary('编码') }
+
+ def run_service
+ wiki_page = create(:wiki_page, wiki: wiki, title: "#{binary_title} 'foo'")
+
+ process_changes do
+ # Test that new_path is converted to UTF-8
+ create(:wiki_page, wiki: wiki, title: binary_title)
+
+ # Test that old_path is also is converted to UTF-8
+ update_page(wiki_page.title, 'foo')
+ end
+ end
+
+ it 'does not raise an error' do
+ expect { run_service }.not_to raise_error
+ end
+ end
end
context 'two pages have been created' do
@@ -345,9 +366,10 @@ RSpec.describe Git::WikiPushService, services: true do
::Wikis::CreateAttachmentService.new(container: wiki.container, current_user: current_user, params: params).execute
end
- def update_page(title)
+ def update_page(title, new_title = nil)
+ new_title = title unless new_title.present?
page = git_wiki.page(title: title)
- git_wiki.update_page(page.path, title, 'markdown', 'Hey', commit_details)
+ git_wiki.update_page(page.path, new_title, 'markdown', 'Hey', commit_details)
end
def delete_page(page)
diff --git a/spec/services/groups/create_service_spec.rb b/spec/services/groups/create_service_spec.rb
index b59ee894fe8..bcba39b0eb4 100644
--- a/spec/services/groups/create_service_spec.rb
+++ b/spec/services/groups/create_service_spec.rb
@@ -161,7 +161,7 @@ RSpec.describe Groups::CreateService, '#execute' do
let(:created_group) { service.execute }
context 'with an active instance-level integration' do
- let!(:instance_integration) { create(:prometheus_service, :instance, api_url: 'https://prometheus.instance.com/') }
+ let!(:instance_integration) { create(:prometheus_integration, :instance, api_url: 'https://prometheus.instance.com/') }
it 'creates a service from the instance-level integration' do
expect(created_group.integrations.count).to eq(1)
@@ -171,7 +171,7 @@ RSpec.describe Groups::CreateService, '#execute' do
context 'with an active group-level integration' do
let(:service) { described_class.new(user, group_params.merge(parent_id: group.id)) }
- let!(:group_integration) { create(:prometheus_service, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
+ let!(:group_integration) { create(:prometheus_integration, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
let(:group) do
create(:group).tap do |group|
group.add_owner(user)
@@ -186,7 +186,7 @@ RSpec.describe Groups::CreateService, '#execute' do
context 'with an active subgroup' do
let(:service) { described_class.new(user, group_params.merge(parent_id: subgroup.id)) }
- let!(:subgroup_integration) { create(:prometheus_service, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
+ let!(:subgroup_integration) { create(:prometheus_integration, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
let(:subgroup) do
create(:group, parent: group).tap do |subgroup|
subgroup.add_owner(user)
diff --git a/spec/services/groups/group_links/destroy_service_spec.rb b/spec/services/groups/group_links/destroy_service_spec.rb
index 97fe23e9147..e63adc07313 100644
--- a/spec/services/groups/group_links/destroy_service_spec.rb
+++ b/spec/services/groups/group_links/destroy_service_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Groups::GroupLinks::DestroyService, '#execute' do
expect { subject.execute(link) }.to change { shared_group.shared_with_group_links.count }.from(1).to(0)
end
- it 'revokes project authorization' do
+ it 'revokes project authorization', :sidekiq_inline do
group.add_developer(user)
expect { subject.execute(link) }.to(
@@ -47,8 +47,8 @@ RSpec.describe Groups::GroupLinks::DestroyService, '#execute' do
it 'updates project authorization once per group' do
expect(GroupGroupLink).to receive(:delete).and_call_original
- expect(group).to receive(:refresh_members_authorized_projects).with(direct_members_only: true).once
- expect(another_group).to receive(:refresh_members_authorized_projects).with(direct_members_only: true).once
+ expect(group).to receive(:refresh_members_authorized_projects).with(direct_members_only: true, blocking: false).once
+ expect(another_group).to receive(:refresh_members_authorized_projects).with(direct_members_only: true, blocking: false).once
subject.execute(links)
end
diff --git a/spec/services/groups/group_links/update_service_spec.rb b/spec/services/groups/group_links/update_service_spec.rb
index 82c4a10f15a..31446c8e4bf 100644
--- a/spec/services/groups/group_links/update_service_spec.rb
+++ b/spec/services/groups/group_links/update_service_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe Groups::GroupLinks::UpdateService, '#execute' do
expect(link.expires_at).to eq(expiry_date)
end
- it 'updates project permissions' do
+ it 'updates project permissions', :sidekiq_inline do
expect { subject }.to change { group_member_user.can?(:create_release, project) }.from(true).to(false)
end
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index 2fbd5eeef5f..889b5551746 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -241,7 +241,7 @@ RSpec.describe Groups::TransferService do
context 'when the group is allowed to be transferred' do
let_it_be(:new_parent_group, reload: true) { create(:group, :public) }
- let_it_be(:new_parent_group_integration) { create(:slack_service, group: new_parent_group, project: nil, webhook: 'http://new-group.slack.com') }
+ let_it_be(:new_parent_group_integration) { create(:integrations_slack, group: new_parent_group, project: nil, webhook: 'http://new-group.slack.com') }
before do
allow(PropagateIntegrationWorker).to receive(:perform_async)
@@ -277,8 +277,8 @@ RSpec.describe Groups::TransferService do
let(:new_created_integration) { Integration.find_by(group: group) }
context 'with an inherited integration' do
- let_it_be(:instance_integration) { create(:slack_service, :instance, webhook: 'http://project.slack.com') }
- let_it_be(:group_integration) { create(:slack_service, group: group, project: nil, webhook: 'http://group.slack.com', inherit_from_id: instance_integration.id) }
+ let_it_be(:instance_integration) { create(:integrations_slack, :instance, webhook: 'http://project.slack.com') }
+ let_it_be(:group_integration) { create(:integrations_slack, group: group, project: nil, webhook: 'http://group.slack.com', inherit_from_id: instance_integration.id) }
it 'replaces inherited integrations', :aggregate_failures do
expect(new_created_integration.webhook).to eq(new_parent_group_integration.webhook)
@@ -288,7 +288,7 @@ RSpec.describe Groups::TransferService do
end
context 'with a custom integration' do
- let_it_be(:group_integration) { create(:slack_service, group: group, project: nil, webhook: 'http://group.slack.com') }
+ let_it_be(:group_integration) { create(:integrations_slack, group: group, project: nil, webhook: 'http://group.slack.com') }
it 'does not updates the integrations', :aggregate_failures do
expect { transfer_service.execute(new_parent_group) }.not_to change { group_integration.webhook }
diff --git a/spec/services/import/bitbucket_server_service_spec.rb b/spec/services/import/bitbucket_server_service_spec.rb
index c548e87b040..56d93625b91 100644
--- a/spec/services/import/bitbucket_server_service_spec.rb
+++ b/spec/services/import/bitbucket_server_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Import::BitbucketServerService do
let_it_be(:user) { create(:user) }
+
let(:base_uri) { "https://test:7990" }
let(:token) { "asdasd12345" }
let(:secret) { "sekrettt" }
diff --git a/spec/services/incident_management/incidents/create_service_spec.rb b/spec/services/incident_management/incidents/create_service_spec.rb
index 4601bd807d0..0f32a4b5425 100644
--- a/spec/services/incident_management/incidents/create_service_spec.rb
+++ b/spec/services/incident_management/incidents/create_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe IncidentManagement::Incidents::CreateService do
let_it_be(:project) { create(:project) }
let_it_be(:user) { User.alert_bot }
+
let(:description) { 'Incident description' }
describe '#execute' do
diff --git a/spec/services/incident_management/incidents/update_severity_service_spec.rb b/spec/services/incident_management/incidents/update_severity_service_spec.rb
deleted file mode 100644
index bc1abf82cf2..00000000000
--- a/spec/services/incident_management/incidents/update_severity_service_spec.rb
+++ /dev/null
@@ -1,86 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe IncidentManagement::Incidents::UpdateSeverityService do
- let_it_be(:user) { create(:user) }
-
- describe '#execute' do
- let(:severity) { 'low' }
- let(:system_note_worker) { ::IncidentManagement::AddSeveritySystemNoteWorker }
-
- subject(:update_severity) { described_class.new(issuable, user, severity).execute }
-
- before do
- allow(system_note_worker).to receive(:perform_async)
- end
-
- shared_examples 'adds a system note' do
- it 'calls AddSeveritySystemNoteWorker' do
- update_severity
-
- expect(system_note_worker).to have_received(:perform_async).with(issuable.id, user.id)
- end
- end
-
- context 'when issuable not an incident' do
- %i(issue merge_request).each do |issuable_type|
- let(:issuable) { build_stubbed(issuable_type) }
-
- it { is_expected.to be_nil }
-
- it 'does not set severity' do
- expect { update_severity }.not_to change(IssuableSeverity, :count)
- end
-
- it 'does not add a system note' do
- update_severity
-
- expect(system_note_worker).not_to have_received(:perform_async)
- end
- end
- end
-
- context 'when issuable is an incident' do
- let!(:issuable) { create(:incident) }
-
- context 'when issuable does not have issuable severity yet' do
- it 'creates new record' do
- expect { update_severity }.to change { IssuableSeverity.where(issue: issuable).count }.to(1)
- end
-
- it 'sets severity to specified value' do
- expect { update_severity }.to change { issuable.severity }.to('low')
- end
-
- it_behaves_like 'adds a system note'
- end
-
- context 'when issuable has an issuable severity' do
- let!(:issuable_severity) { create(:issuable_severity, issue: issuable, severity: 'medium') }
-
- it 'does not create new record' do
- expect { update_severity }.not_to change(IssuableSeverity, :count)
- end
-
- it 'updates existing issuable severity' do
- expect { update_severity }.to change { issuable_severity.severity }.to(severity)
- end
-
- it_behaves_like 'adds a system note'
- end
-
- context 'when severity value is unsupported' do
- let(:severity) { 'unsupported-severity' }
-
- it 'sets the severity to default value' do
- update_severity
-
- expect(issuable.issuable_severity.severity).to eq(IssuableSeverity::DEFAULT)
- end
-
- it_behaves_like 'adds a system note'
- end
- end
- end
-end
diff --git a/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb b/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb
index 73ad0532e07..fb536df5d17 100644
--- a/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb
+++ b/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe IncidentManagement::PagerDuty::CreateIncidentIssueService do
let_it_be(:project, reload: true) { create(:project) }
let_it_be(:user) { User.alert_bot }
+
let(:webhook_payload) { Gitlab::Json.parse(fixture_file('pager_duty/webhook_incident_trigger.json')) }
let(:parsed_payload) { ::PagerDuty::WebhookPayloadParser.call(webhook_payload) }
let(:incident_payload) { parsed_payload.first['incident'] }
diff --git a/spec/services/incident_management/pager_duty/process_webhook_service_spec.rb b/spec/services/incident_management/pager_duty/process_webhook_service_spec.rb
index 0caffb16f42..8b6eb21c25d 100644
--- a/spec/services/incident_management/pager_duty/process_webhook_service_spec.rb
+++ b/spec/services/incident_management/pager_duty/process_webhook_service_spec.rb
@@ -95,6 +95,7 @@ RSpec.describe IncidentManagement::PagerDuty::ProcessWebhookService do
context 'when both tokens are nil' do
let_it_be(:incident_management_setting) { create(:project_incident_management_setting, project: project, pagerduty_active: false) }
+
let(:token) { nil }
before do
diff --git a/spec/services/integrations/test/project_service_spec.rb b/spec/services/integrations/test/project_service_spec.rb
index 052b25b0f10..32f9f632d7a 100644
--- a/spec/services/integrations/test/project_service_spec.rb
+++ b/spec/services/integrations/test/project_service_spec.rb
@@ -7,7 +7,8 @@ RSpec.describe Integrations::Test::ProjectService do
describe '#execute' do
let_it_be(:project) { create(:project) }
- let(:integration) { create(:slack_service, project: project) }
+
+ let(:integration) { create(:integrations_slack, project: project) }
let(:user) { project.owner }
let(:event) { nil }
let(:sample_data) { { data: 'sample' } }
@@ -23,8 +24,8 @@ RSpec.describe Integrations::Test::ProjectService do
expect(subject).to eq(success_result)
end
- context 'PipelinesEmailService' do
- let(:integration) { create(:pipelines_email_service, project: project) }
+ context 'with Integrations::PipelinesEmail' do
+ let(:integration) { create(:pipelines_email_integration, project: project) }
it_behaves_like 'tests for integration with pipeline data'
end
@@ -32,7 +33,7 @@ RSpec.describe Integrations::Test::ProjectService do
context 'with event specified' do
context 'event not supported by integration' do
- let(:integration) { create(:jira_service, project: project) }
+ let(:integration) { create(:jira_integration, project: project) }
let(:event) { 'push' }
it 'returns error message' do
@@ -131,6 +132,7 @@ RSpec.describe Integrations::Test::ProjectService do
context 'deployment' do
let_it_be(:project) { create(:project, :test_repo) }
+
let(:deployment) { build(:deployment) }
let(:event) { 'deployment' }
@@ -168,6 +170,7 @@ RSpec.describe Integrations::Test::ProjectService do
context 'wiki_page' do
let_it_be(:project) { create(:project, :wiki_repo) }
+
let(:event) { 'wiki_page' }
it 'returns error message if wiki disabled' do
diff --git a/spec/services/issuable/bulk_update_service_spec.rb b/spec/services/issuable/bulk_update_service_spec.rb
index dfdfb57111c..55e0e799c19 100644
--- a/spec/services/issuable/bulk_update_service_spec.rb
+++ b/spec/services/issuable/bulk_update_service_spec.rb
@@ -314,6 +314,7 @@ RSpec.describe Issuable::BulkUpdateService do
context 'with issuables at a group level' do
let_it_be(:group) { create(:group) }
+
let(:parent) { group }
before do
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 0b315422be8..9a70de80123 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -81,7 +81,7 @@ RSpec.describe Issues::CloseService do
describe '#close_issue' do
context 'with external issue' do
context 'with an active external issue tracker supporting close_issue' do
- let!(:external_issue_tracker) { create(:jira_service, project: project) }
+ let!(:external_issue_tracker) { create(:jira_integration, project: project) }
it 'closes the issue on the external issue tracker' do
project.reload
@@ -92,7 +92,7 @@ RSpec.describe Issues::CloseService do
end
context 'with inactive external issue tracker supporting close_issue' do
- let!(:external_issue_tracker) { create(:jira_service, project: project, active: false) }
+ let!(:external_issue_tracker) { create(:jira_integration, project: project, active: false) }
it 'does not close the issue on the external issue tracker' do
project.reload
@@ -323,7 +323,7 @@ RSpec.describe Issues::CloseService do
context 'when issue is not confidential' do
it 'executes issue hooks' do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
- expect(project).to receive(:execute_services).with(an_instance_of(Hash), :issue_hooks)
+ expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks)
described_class.new(project: project, current_user: user).close_issue(issue)
end
@@ -334,7 +334,7 @@ RSpec.describe Issues::CloseService do
issue = create(:issue, :confidential, project: project)
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks)
- expect(project).to receive(:execute_services).with(an_instance_of(Hash), :confidential_issue_hooks)
+ expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :confidential_issue_hooks)
described_class.new(project: project, current_user: user).close_issue(issue)
end
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 94810d6134a..b073ffd291f 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -8,11 +8,17 @@ RSpec.describe Issues::CreateService do
let_it_be_with_reload(:project) { create(:project) }
let_it_be(:user) { create(:user) }
+ let(:spam_params) { double }
+
describe '#execute' do
let_it_be(:assignee) { create(:user) }
let_it_be(:milestone) { create(:milestone, project: project) }
- let(:issue) { described_class.new(project: project, current_user: user, params: opts).execute }
+ let(:issue) { described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute }
+
+ before do
+ stub_spam_services
+ end
context 'when params are valid' do
let_it_be(:labels) { create_pair(:label, project: project) }
@@ -44,7 +50,7 @@ RSpec.describe Issues::CreateService do
end
context 'when skip_system_notes is true' do
- let(:issue) { described_class.new(project: project, current_user: user, params: opts).execute(skip_system_notes: true) }
+ let(:issue) { described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute(skip_system_notes: true) }
it 'does not call Issuable::CommonSystemNotesService' do
expect(Issuable::CommonSystemNotesService).not_to receive(:new)
@@ -92,7 +98,7 @@ RSpec.describe Issues::CreateService do
let_it_be(:non_member) { create(:user) }
it 'filters out params that cannot be set without the :set_issue_metadata permission' do
- issue = described_class.new(project: project, current_user: non_member, params: opts).execute
+ issue = described_class.new(project: project, current_user: non_member, params: opts, spam_params: spam_params).execute
expect(issue).to be_persisted
expect(issue.title).to eq('Awesome issue')
@@ -104,7 +110,7 @@ RSpec.describe Issues::CreateService do
end
it 'can create confidential issues' do
- issue = described_class.new(project: project, current_user: non_member, params: { confidential: true }).execute
+ issue = described_class.new(project: project, current_user: non_member, params: { confidential: true }, spam_params: spam_params).execute
expect(issue.confidential).to be_truthy
end
@@ -113,7 +119,7 @@ RSpec.describe Issues::CreateService do
it 'moves the issue to the end, in an asynchronous worker' do
expect(IssuePlacementWorker).to receive(:perform_async).with(be_nil, Integer)
- described_class.new(project: project, current_user: user, params: opts).execute
+ described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
end
context 'when label belongs to project group' do
@@ -200,7 +206,7 @@ RSpec.describe Issues::CreateService do
it 'invalidates open issues counter for assignees when issue is assigned' do
project.add_maintainer(assignee)
- described_class.new(project: project, current_user: user, params: opts).execute
+ described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
expect(assignee.assigned_open_issues_count).to eq 1
end
@@ -224,18 +230,18 @@ RSpec.describe Issues::CreateService do
opts = { title: 'Title', description: 'Description', confidential: false }
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
- expect(project).to receive(:execute_services).with(an_instance_of(Hash), :issue_hooks)
+ expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks)
- described_class.new(project: project, current_user: user, params: opts).execute
+ described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
end
it 'executes confidential issue hooks when issue is confidential' do
opts = { title: 'Title', description: 'Description', confidential: true }
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks)
- expect(project).to receive(:execute_services).with(an_instance_of(Hash), :confidential_issue_hooks)
+ expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :confidential_issue_hooks)
- described_class.new(project: project, current_user: user, params: opts).execute
+ described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
end
context 'after_save callback to store_mentions' do
@@ -279,7 +285,7 @@ RSpec.describe Issues::CreateService do
it 'removes assignee when user id is invalid' do
opts = { title: 'Title', description: 'Description', assignee_ids: [-1] }
- issue = described_class.new(project: project, current_user: user, params: opts).execute
+ issue = described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
expect(issue.assignees).to be_empty
end
@@ -287,7 +293,7 @@ RSpec.describe Issues::CreateService do
it 'removes assignee when user id is 0' do
opts = { title: 'Title', description: 'Description', assignee_ids: [0] }
- issue = described_class.new(project: project, current_user: user, params: opts).execute
+ issue = described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
expect(issue.assignees).to be_empty
end
@@ -296,7 +302,7 @@ RSpec.describe Issues::CreateService do
project.add_maintainer(assignee)
opts = { title: 'Title', description: 'Description', assignee_ids: [assignee.id] }
- issue = described_class.new(project: project, current_user: user, params: opts).execute
+ issue = described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
expect(issue.assignees).to eq([assignee])
end
@@ -314,7 +320,7 @@ RSpec.describe Issues::CreateService do
project.update!(visibility_level: level)
opts = { title: 'Title', description: 'Description', assignee_ids: [assignee.id] }
- issue = described_class.new(project: project, current_user: user, params: opts).execute
+ issue = described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
expect(issue.assignees).to be_empty
end
@@ -324,7 +330,7 @@ RSpec.describe Issues::CreateService do
end
it_behaves_like 'issuable record that supports quick actions' do
- let(:issuable) { described_class.new(project: project, current_user: user, params: params).execute }
+ let(:issuable) { described_class.new(project: project, current_user: user, params: params, spam_params: spam_params).execute }
end
context 'Quick actions' do
@@ -364,14 +370,14 @@ RSpec.describe Issues::CreateService do
let(:opts) { { discussion_to_resolve: discussion.id, merge_request_to_resolve_discussions_of: merge_request.iid } }
it 'resolves the discussion' do
- described_class.new(project: project, current_user: user, params: opts).execute
+ described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
discussion.first_note.reload
expect(discussion.resolved?).to be(true)
end
it 'added a system note to the discussion' do
- described_class.new(project: project, current_user: user, params: opts).execute
+ described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
reloaded_discussion = MergeRequest.find(merge_request.id).discussions.first
@@ -379,7 +385,7 @@ RSpec.describe Issues::CreateService do
end
it 'assigns the title and description for the issue' do
- issue = described_class.new(project: project, current_user: user, params: opts).execute
+ issue = described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
expect(issue.title).not_to be_nil
expect(issue.description).not_to be_nil
@@ -391,7 +397,8 @@ RSpec.describe Issues::CreateService do
merge_request_to_resolve_discussions_of: merge_request,
description: nil,
title: nil
- }).execute
+ },
+ spam_params: spam_params).execute
expect(issue.description).to be_nil
expect(issue.title).to be_nil
@@ -402,14 +409,14 @@ RSpec.describe Issues::CreateService do
let(:opts) { { merge_request_to_resolve_discussions_of: merge_request.iid } }
it 'resolves the discussion' do
- described_class.new(project: project, current_user: user, params: opts).execute
+ described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
discussion.first_note.reload
expect(discussion.resolved?).to be(true)
end
it 'added a system note to the discussion' do
- described_class.new(project: project, current_user: user, params: opts).execute
+ described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
reloaded_discussion = MergeRequest.find(merge_request.id).discussions.first
@@ -417,7 +424,7 @@ RSpec.describe Issues::CreateService do
end
it 'assigns the title and description for the issue' do
- issue = described_class.new(project: project, current_user: user, params: opts).execute
+ issue = described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
expect(issue.title).not_to be_nil
expect(issue.description).not_to be_nil
@@ -429,7 +436,8 @@ RSpec.describe Issues::CreateService do
merge_request_to_resolve_discussions_of: merge_request,
description: nil,
title: nil
- }).execute
+ },
+ spam_params: spam_params).execute
expect(issue.description).to be_nil
expect(issue.title).to be_nil
@@ -438,47 +446,27 @@ RSpec.describe Issues::CreateService do
end
context 'checking spam' do
- let(:request) { double(:request, headers: nil) }
- let(:api) { true }
- let(:captcha_response) { 'abc123' }
- let(:spam_log_id) { 1 }
-
let(:params) do
{
- title: 'Spam issue',
- request: request,
- api: api,
- captcha_response: captcha_response,
- spam_log_id: spam_log_id
+ title: 'Spam issue'
}
end
subject do
- described_class.new(project: project, current_user: user, params: params)
- end
-
- before do
- allow_next_instance_of(UserAgentDetailService) do |instance|
- allow(instance).to receive(:create)
- end
+ described_class.new(project: project, current_user: user, params: params, spam_params: spam_params)
end
it 'executes SpamActionService' do
- spam_params = Spam::SpamParams.new(
- api: api,
- captcha_response: captcha_response,
- spam_log_id: spam_log_id
- )
expect_next_instance_of(
Spam::SpamActionService,
{
- spammable: an_instance_of(Issue),
- request: request,
- user: user,
+ spammable: kind_of(Issue),
+ spam_params: spam_params,
+ user: an_instance_of(User),
action: :create
}
) do |instance|
- expect(instance).to receive(:execute).with(spam_params: spam_params)
+ expect(instance).to receive(:execute)
end
subject.execute
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index 76588860957..36af38aef18 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -38,6 +38,10 @@ RSpec.describe Issues::MoveService do
context 'issue movable' do
include_context 'user can move issue'
+ it 'creates resource state event' do
+ expect { move_service.execute(old_issue, new_project) }.to change(ResourceStateEvent.where(issue_id: old_issue), :count).by(1)
+ end
+
context 'generic issue' do
include_context 'issue move executed'
@@ -87,6 +91,10 @@ RSpec.describe Issues::MoveService do
expect(old_issue.moved_to).to eq new_issue
end
+ it 'marks issue as closed' do
+ expect(old_issue.closed?).to eq true
+ end
+
it 'preserves create time' do
expect(old_issue.created_at).to eq new_issue.created_at
end
diff --git a/spec/services/issues/reopen_service_spec.rb b/spec/services/issues/reopen_service_spec.rb
index 746a9105531..d58c27289c2 100644
--- a/spec/services/issues/reopen_service_spec.rb
+++ b/spec/services/issues/reopen_service_spec.rb
@@ -65,7 +65,7 @@ RSpec.describe Issues::ReopenService do
context 'when issue is not confidential' do
it 'executes issue hooks' do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
- expect(project).to receive(:execute_services).with(an_instance_of(Hash), :issue_hooks)
+ expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks)
described_class.new(project: project, current_user: user).execute(issue)
end
@@ -76,7 +76,7 @@ RSpec.describe Issues::ReopenService do
issue = create(:issue, :confidential, :closed, project: project)
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks)
- expect(project).to receive(:execute_services).with(an_instance_of(Hash), :confidential_issue_hooks)
+ expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :confidential_issue_hooks)
described_class.new(project: project, current_user: user).execute(issue)
end
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index b95d94e3784..70c3c2a0f5d 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -83,16 +83,16 @@ RSpec.describe Issues::UpdateService, :mailer do
end
context 'when issue type is not incident' do
- it 'returns default severity' do
+ before do
update_issue(opts)
-
- expect(issue.severity).to eq(IssuableSeverity::DEFAULT)
end
- it_behaves_like 'not an incident issue' do
- before do
- update_issue(opts)
- end
+ it_behaves_like 'not an incident issue'
+
+ context 'when confidentiality is changed' do
+ subject { update_issue(confidential: true) }
+
+ it_behaves_like 'does not track incident management event'
end
end
@@ -105,12 +105,16 @@ RSpec.describe Issues::UpdateService, :mailer do
it_behaves_like 'incident issue'
- it 'changes updates the severity' do
- expect(issue.severity).to eq('low')
+ it 'does not add an incident label' do
+ expect(issue.labels).to match_array [label]
end
- it 'does not apply incident labels' do
- expect(issue.labels).to match_array [label]
+ context 'when confidentiality is changed' do
+ let(:current_user) { user }
+
+ subject { update_issue(confidential: true) }
+
+ it_behaves_like 'an incident management tracked event', :incident_management_incident_change_confidential
end
end
@@ -140,24 +144,6 @@ RSpec.describe Issues::UpdateService, :mailer do
expect(issue.confidential).to be_falsey
end
- context 'issue in incident type' do
- let(:current_user) { user }
-
- before do
- opts.merge!(issue_type: 'incident', confidential: true)
- end
-
- subject { update_issue(opts) }
-
- it_behaves_like 'an incident management tracked event', :incident_management_incident_change_confidential
-
- it_behaves_like 'incident issue' do
- before do
- subject
- end
- end
- end
-
context 'changing issue_type' do
let!(:label_1) { create(:label, project: project, title: 'incident') }
let!(:label_2) { create(:label, project: project, title: 'missed-sla') }
@@ -167,6 +153,12 @@ RSpec.describe Issues::UpdateService, :mailer do
end
context 'from issue to incident' do
+ it_behaves_like 'incident issue' do
+ before do
+ update_issue(**opts, issue_type: 'incident')
+ end
+ end
+
it 'adds a `incident` label if one does not exist' do
expect { update_issue(issue_type: 'incident') }.to change(issue.labels, :count).by(1)
expect(issue.labels.pluck(:title)).to eq(['incident'])
@@ -488,6 +480,21 @@ RSpec.describe Issues::UpdateService, :mailer do
end
end
end
+
+ it 'verifies the number of queries' do
+ update_issue(description: "- [ ] Task 1 #{user.to_reference}")
+
+ baseline = ActiveRecord::QueryRecorder.new do
+ update_issue(description: "- [x] Task 1 #{user.to_reference}")
+ end
+
+ recorded = ActiveRecord::QueryRecorder.new do
+ update_issue(description: "- [x] Task 1 #{user.to_reference}\n- [ ] Task 2 #{user.to_reference}")
+ end
+
+ expect(recorded.count).to eq(baseline.count - 1)
+ expect(recorded.cached_count).to eq(0)
+ end
end
context 'when description changed' do
@@ -522,7 +529,7 @@ RSpec.describe Issues::UpdateService, :mailer do
it 'executes confidential issue hooks' do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks)
- expect(project).to receive(:execute_services).with(an_instance_of(Hash), :confidential_issue_hooks)
+ expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :confidential_issue_hooks)
update_issue(confidential: true)
end
@@ -1005,6 +1012,101 @@ RSpec.describe Issues::UpdateService, :mailer do
include_examples 'updating mentions', described_class
end
+ context 'updating severity' do
+ let(:opts) { { severity: 'low' } }
+
+ shared_examples 'updates the severity' do |expected_severity|
+ it 'has correct value' do
+ update_issue(opts)
+
+ expect(issue.severity).to eq(expected_severity)
+ end
+
+ it 'creates a system note' do
+ expect(::IncidentManagement::AddSeveritySystemNoteWorker).to receive(:perform_async).with(issue.id, user.id)
+
+ update_issue(opts)
+ end
+
+ it 'triggers webhooks' do
+ expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
+ expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks)
+
+ update_issue(opts)
+ end
+ end
+
+ shared_examples 'does not change the severity' do
+ it 'retains the original value' do
+ expected_severity = issue.severity
+
+ update_issue(opts)
+
+ expect(issue.severity).to eq(expected_severity)
+ end
+
+ it 'does not trigger side-effects' do
+ expect(::IncidentManagement::AddSeveritySystemNoteWorker).not_to receive(:perform_async)
+ expect(project).not_to receive(:execute_hooks)
+ expect(project).not_to receive(:execute_integrations)
+
+ expect { update_issue(opts) }.not_to change(IssuableSeverity, :count)
+ end
+ end
+
+ context 'on incidents' do
+ let(:issue) { create(:incident, project: project) }
+
+ context 'when severity has not been set previously' do
+ it_behaves_like 'updates the severity', 'low'
+
+ it 'creates a new record' do
+ expect { update_issue(opts) }.to change(IssuableSeverity, :count).by(1)
+ end
+
+ context 'with unsupported severity value' do
+ let(:opts) { { severity: 'unsupported-severity' } }
+
+ it_behaves_like 'does not change the severity'
+ end
+
+ context 'with severity value defined but unchanged' do
+ let(:opts) { { severity: IssuableSeverity::DEFAULT } }
+
+ it_behaves_like 'does not change the severity'
+ end
+ end
+
+ context 'when severity has been set before' do
+ before do
+ create(:issuable_severity, issue: issue, severity: 'high')
+ end
+
+ it_behaves_like 'updates the severity', 'low'
+
+ it 'does not create a new record' do
+ expect { update_issue(opts) }.not_to change(IssuableSeverity, :count)
+ end
+
+ context 'with unsupported severity value' do
+ let(:opts) { { severity: 'unsupported-severity' } }
+
+ it_behaves_like 'updates the severity', IssuableSeverity::DEFAULT
+ end
+
+ context 'with severity value defined but unchanged' do
+ let(:opts) { { severity: issue.severity } }
+
+ it_behaves_like 'does not change the severity'
+ end
+ end
+ end
+
+ context 'when issue type is not incident' do
+ it_behaves_like 'does not change the severity'
+ end
+ end
+
context 'duplicate issue' do
let(:canonical_issue) { create(:issue, project: project) }
diff --git a/spec/services/jira/requests/projects/list_service_spec.rb b/spec/services/jira/requests/projects/list_service_spec.rb
index 0fff51b1226..ab15254d948 100644
--- a/spec/services/jira/requests/projects/list_service_spec.rb
+++ b/spec/services/jira/requests/projects/list_service_spec.rb
@@ -5,17 +5,17 @@ require 'spec_helper'
RSpec.describe Jira::Requests::Projects::ListService do
include AfterNextHelpers
- let(:jira_service) { create(:jira_service) }
+ let(:jira_integration) { create(:jira_integration) }
let(:params) { {} }
describe '#execute' do
- let(:service) { described_class.new(jira_service, params) }
+ let(:service) { described_class.new(jira_integration, params) }
subject { service.execute }
- context 'without jira_service' do
+ context 'without jira_integration' do
before do
- jira_service.update!(active: false)
+ jira_integration.update!(active: false)
end
it 'returns an error response' do
@@ -24,8 +24,8 @@ RSpec.describe Jira::Requests::Projects::ListService do
end
end
- context 'when jira_service is nil' do
- let(:jira_service) { nil }
+ context 'when jira_integration is nil' do
+ let(:jira_integration) { nil }
it 'returns an error response' do
expect(subject.error?).to be_truthy
@@ -33,11 +33,11 @@ RSpec.describe Jira::Requests::Projects::ListService do
end
end
- context 'with jira_service' do
+ context 'with jira_integration' do
context 'when validations and params are ok' do
let(:response_headers) { { 'content-type' => 'application/json' } }
let(:response_body) { [].to_json }
- let(:expected_url_pattern) { /.*jira.example.com\/rest\/api\/2\/project/ }
+ let(:expected_url_pattern) { %r{.*jira.example.com/rest/api/2/project} }
before do
stub_request(:get, expected_url_pattern).to_return(status: 200, body: response_body, headers: response_headers)
@@ -59,8 +59,8 @@ RSpec.describe Jira::Requests::Projects::ListService do
end
context 'when jira runs on a subpath' do
- let(:jira_service) { create(:jira_service, url: 'http://jira.example.com/jira') }
- let(:expected_url_pattern) { /.*jira.example.com\/jira\/rest\/api\/2\/project/ }
+ let(:jira_integration) { create(:jira_integration, url: 'http://jira.example.com/jira') }
+ let(:expected_url_pattern) { %r{.*jira.example.com/jira/rest/api/2/project} }
it 'takes the subpath into account' do
expect(subject.success?).to be_truthy
diff --git a/spec/services/jira_connect/sync_service_spec.rb b/spec/services/jira_connect/sync_service_spec.rb
index edd0bad70f5..c20aecaaef0 100644
--- a/spec/services/jira_connect/sync_service_spec.rb
+++ b/spec/services/jira_connect/sync_service_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe JiraConnect::SyncService do
describe '#execute' do
let_it_be(:project) { create(:project, :repository) }
+
let(:client) { Atlassian::JiraConnect::Client }
let(:info) { { a: 'Some', b: 'Info' } }
diff --git a/spec/services/jira_connect_installations/destroy_service_spec.rb b/spec/services/jira_connect_installations/destroy_service_spec.rb
new file mode 100644
index 00000000000..bb5bab53ccb
--- /dev/null
+++ b/spec/services/jira_connect_installations/destroy_service_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraConnectInstallations::DestroyService do
+ describe '.execute' do
+ it 'creates an instance and calls execute' do
+ expect_next_instance_of(described_class, 'param1', 'param2', 'param3') do |destroy_service|
+ expect(destroy_service).to receive(:execute)
+ end
+
+ described_class.execute('param1', 'param2', 'param3')
+ end
+ end
+
+ describe '#execute' do
+ let!(:installation) { create(:jira_connect_installation) }
+ let(:jira_base_path) { '/-/jira_connect' }
+ let(:jira_event_path) { '/-/jira_connect/events/uninstalled' }
+
+ subject { described_class.new(installation, jira_base_path, jira_event_path).execute }
+
+ it { is_expected.to be_truthy }
+
+ it 'deletes the installation' do
+ expect { subject }.to change(JiraConnectInstallation, :count).by(-1)
+ end
+
+ context 'and the installation has an instance_url set' do
+ let!(:installation) { create(:jira_connect_installation, instance_url: 'http://example.com') }
+
+ it { is_expected.to be_truthy }
+
+ it 'schedules a ForwardEventWorker background job and keeps the installation' do
+ expect(JiraConnect::ForwardEventWorker).to receive(:perform_async).with(installation.id, jira_base_path, jira_event_path)
+
+ expect { subject }.not_to change(JiraConnectInstallation, :count)
+ end
+ end
+ end
+end
diff --git a/spec/services/jira_import/start_import_service_spec.rb b/spec/services/jira_import/start_import_service_spec.rb
index a10928355ef..e04e3314158 100644
--- a/spec/services/jira_import/start_import_service_spec.rb
+++ b/spec/services/jira_import/start_import_service_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe JiraImport::StartImportService do
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project) }
+
let(:key) { 'KEY' }
let(:mapping) do
[
@@ -28,10 +29,10 @@ RSpec.describe JiraImport::StartImportService do
end
context 'when project validation is ok' do
- let!(:jira_service) { create(:jira_service, project: project, active: true) }
+ let!(:jira_integration) { create(:jira_integration, project: project, active: true) }
before do
- stub_jira_service_test
+ stub_jira_integration_test
allow(Gitlab::JiraImport).to receive(:validate_project_settings!)
end
diff --git a/spec/services/jira_import/users_importer_spec.rb b/spec/services/jira_import/users_importer_spec.rb
index 2e8c556d62c..af408847260 100644
--- a/spec/services/jira_import/users_importer_spec.rb
+++ b/spec/services/jira_import/users_importer_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe JiraImport::UsersImporter do
end
before do
- stub_jira_service_test
+ stub_jira_integration_test
project.add_maintainer(user)
end
@@ -45,7 +45,7 @@ RSpec.describe JiraImport::UsersImporter do
RSpec.shared_examples 'maps Jira users to GitLab users' do |users_mapper_service:|
context 'when Jira import is configured correctly' do
- let_it_be(:jira_service) { create(:jira_service, project: project, active: true, url: "http://jira.example.net") }
+ let_it_be(:jira_integration) { create(:jira_integration, project: project, active: true, url: "http://jira.example.net") }
context 'when users mapper service raises an error' do
let(:error) { Timeout::Error.new }
@@ -98,9 +98,9 @@ RSpec.describe JiraImport::UsersImporter do
context 'when Jira instance is of Server deployment type' do
before do
- allow(project).to receive(:jira_service).and_return(jira_service)
+ allow(project).to receive(:jira_integration).and_return(jira_integration)
- jira_service.data_fields.deployment_server!
+ jira_integration.data_fields.deployment_server!
end
it_behaves_like 'maps Jira users to GitLab users', users_mapper_service: JiraImport::ServerUsersMapperService
@@ -108,9 +108,9 @@ RSpec.describe JiraImport::UsersImporter do
context 'when Jira instance is of Cloud deployment type' do
before do
- allow(project).to receive(:jira_service).and_return(jira_service)
+ allow(project).to receive(:jira_integration).and_return(jira_integration)
- jira_service.data_fields.deployment_cloud!
+ jira_integration.data_fields.deployment_cloud!
end
it_behaves_like 'maps Jira users to GitLab users', users_mapper_service: JiraImport::CloudUsersMapperService
diff --git a/spec/services/keys/destroy_service_spec.rb b/spec/services/keys/destroy_service_spec.rb
index 59ce4a941c7..dd40f9d73fd 100644
--- a/spec/services/keys/destroy_service_spec.rb
+++ b/spec/services/keys/destroy_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Keys::DestroyService do
subject { described_class.new(user) }
it 'destroys a key' do
- key = create(:key)
+ key = create(:personal_key)
expect { subject.execute(key) }.to change(Key, :count).by(-1)
end
diff --git a/spec/services/markdown_content_rewriter_service_spec.rb b/spec/services/markdown_content_rewriter_service_spec.rb
index 47332bec319..37c8a210ba5 100644
--- a/spec/services/markdown_content_rewriter_service_spec.rb
+++ b/spec/services/markdown_content_rewriter_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe MarkdownContentRewriterService do
let_it_be(:user) { create(:user) }
let_it_be(:source_parent) { create(:project, :public) }
let_it_be(:target_parent) { create(:project, :public) }
+
let(:content) { 'My content' }
describe '#initialize' do
@@ -34,6 +35,7 @@ RSpec.describe MarkdownContentRewriterService do
# to prove they run correctly.
context 'when content contains a reference' do
let_it_be(:issue) { create(:issue, project: source_parent) }
+
let(:content) { "See ##{issue.iid}" }
it 'rewrites content' do
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index ffe63a8a94b..ee5250b5b3d 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -2,12 +2,13 @@
require 'spec_helper'
-RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_shared_state, :sidekiq_inline do
+RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_cache, :clean_gitlab_redis_shared_state, :sidekiq_inline do
let_it_be(:source) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:member) { create(:user) }
let_it_be(:user_ids) { member.id.to_s }
let_it_be(:access_level) { Gitlab::Access::GUEST }
+
let(:additional_params) { { invite_source: '_invite_source_' } }
let(:params) { { user_ids: user_ids, access_level: access_level }.merge(additional_params) }
diff --git a/spec/services/members/groups/creator_service_spec.rb b/spec/services/members/groups/creator_service_spec.rb
new file mode 100644
index 00000000000..4427c4e7d9f
--- /dev/null
+++ b/spec/services/members/groups/creator_service_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Members::Groups::CreatorService do
+ it_behaves_like 'member creation' do
+ let_it_be(:source, reload: true) { create(:group, :public) }
+ let_it_be(:member_type) { GroupMember }
+ end
+
+ describe '.access_levels' do
+ it 'returns Gitlab::Access.options_with_owner' do
+ expect(described_class.access_levels).to eq(Gitlab::Access.sym_options_with_owner)
+ end
+ end
+end
diff --git a/spec/services/members/invite_service_spec.rb b/spec/services/members/invite_service_spec.rb
index c530e3d0c53..dd82facaf14 100644
--- a/spec/services/members/invite_service_spec.rb
+++ b/spec/services/members/invite_service_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_
let_it_be(:user) { project.owner }
let_it_be(:project_user) { create(:user) }
let_it_be(:namespace) { project.namespace }
+
let(:params) { {} }
let(:base_params) { { access_level: Gitlab::Access::GUEST, source: project, invite_source: '_invite_source_' } }
diff --git a/spec/services/members/projects/creator_service_spec.rb b/spec/services/members/projects/creator_service_spec.rb
new file mode 100644
index 00000000000..c6917a21bcd
--- /dev/null
+++ b/spec/services/members/projects/creator_service_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Members::Projects::CreatorService do
+ it_behaves_like 'member creation' do
+ let_it_be(:source, reload: true) { create(:project, :public) }
+ let_it_be(:member_type) { ProjectMember }
+ end
+
+ describe '.access_levels' do
+ it 'returns Gitlab::Access.sym_options' do
+ expect(described_class.access_levels).to eq(Gitlab::Access.sym_options)
+ end
+ end
+end
diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb
index d10f82289bd..0f282384661 100644
--- a/spec/services/merge_requests/build_service_spec.rb
+++ b/spec/services/merge_requests/build_service_spec.rb
@@ -252,8 +252,8 @@ RSpec.describe MergeRequests::BuildService do
context 'when the source branch matches an issue' do
where(:factory, :source_branch, :closing_message) do
- :jira_service | 'FOO-123-fix-issue' | 'Closes FOO-123'
- :jira_service | 'fix-issue' | nil
+ :jira_integration | 'FOO-123-fix-issue' | 'Closes FOO-123'
+ :jira_integration | 'fix-issue' | nil
:custom_issue_tracker_integration | '123-fix-issue' | 'Closes #123'
:custom_issue_tracker_integration | 'fix-issue' | nil
nil | '123-fix-issue' | 'Closes #123'
@@ -351,8 +351,8 @@ RSpec.describe MergeRequests::BuildService do
context 'when the source branch matches an issue' do
where(:factory, :source_branch, :title, :closing_message) do
- :jira_service | 'FOO-123-fix-issue' | 'Resolve FOO-123 "Fix issue"' | 'Closes FOO-123'
- :jira_service | 'fix-issue' | 'Fix issue' | nil
+ :jira_integration | 'FOO-123-fix-issue' | 'Resolve FOO-123 "Fix issue"' | 'Closes FOO-123'
+ :jira_integration | 'fix-issue' | 'Fix issue' | nil
:custom_issue_tracker_integration | '123-fix-issue' | 'Resolve #123 "Fix issue"' | 'Closes #123'
:custom_issue_tracker_integration | 'fix-issue' | 'Fix issue' | nil
nil | '123-fix-issue' | 'Resolve "A bug"' | 'Closes #123'
@@ -400,8 +400,8 @@ RSpec.describe MergeRequests::BuildService do
context 'when the source branch matches an issue' do
where(:factory, :source_branch, :title, :closing_message) do
- :jira_service | 'FOO-123-fix-issue' | 'Resolve FOO-123 "Fix issue"' | 'Closes FOO-123'
- :jira_service | 'fix-issue' | 'Fix issue' | nil
+ :jira_integration | 'FOO-123-fix-issue' | 'Resolve FOO-123 "Fix issue"' | 'Closes FOO-123'
+ :jira_integration | 'fix-issue' | 'Fix issue' | nil
:custom_issue_tracker_integration | '123-fix-issue' | 'Resolve #123 "Fix issue"' | 'Closes #123'
:custom_issue_tracker_integration | 'fix-issue' | 'Fix issue' | nil
nil | '123-fix-issue' | 'Resolve "A bug"' | 'Closes #123'
diff --git a/spec/services/merge_requests/handle_assignees_change_service_spec.rb b/spec/services/merge_requests/handle_assignees_change_service_spec.rb
index f9eed6eea2d..c43f5db6059 100644
--- a/spec/services/merge_requests/handle_assignees_change_service_spec.rb
+++ b/spec/services/merge_requests/handle_assignees_change_service_spec.rb
@@ -104,9 +104,9 @@ RSpec.describe MergeRequests::HandleAssigneesChangeService do
context 'when execute_hooks option is set to true' do
let(:options) { { execute_hooks: true } }
- it 'execute hooks and services' do
+ it 'executes hooks and integrations' do
expect(merge_request.project).to receive(:execute_hooks).with(anything, :merge_request_hooks)
- expect(merge_request.project).to receive(:execute_services).with(anything, :merge_request_hooks)
+ expect(merge_request.project).to receive(:execute_integrations).with(anything, :merge_request_hooks)
expect(service).to receive(:enqueue_jira_connect_messages_for).with(merge_request)
execute
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index 503c0282bd6..b3af4d67896 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -163,14 +163,14 @@ RSpec.describe MergeRequests::MergeService do
context 'with Jira integration' do
include JiraServiceHelper
- let(:jira_tracker) { project.create_jira_service }
+ let(:jira_tracker) { project.create_jira_integration }
let(:jira_issue) { ExternalIssue.new('JIRA-123', project) }
let(:commit) { double('commit', safe_message: "Fixes #{jira_issue.to_reference}") }
before do
- stub_jira_service_test
+ stub_jira_integration_test
project.update!(has_external_issue_tracker: true)
- jira_service_settings
+ jira_integration_settings
stub_jira_urls(jira_issue.id)
allow(merge_request).to receive(:commits).and_return([commit])
end
diff --git a/spec/services/merge_requests/push_options_handler_service_spec.rb b/spec/services/merge_requests/push_options_handler_service_spec.rb
index 87c3fc6a2d8..5f76f6f5c44 100644
--- a/spec/services/merge_requests/push_options_handler_service_spec.rb
+++ b/spec/services/merge_requests/push_options_handler_service_spec.rb
@@ -5,11 +5,16 @@ require 'spec_helper'
RSpec.describe MergeRequests::PushOptionsHandlerService do
include ProjectForksHelper
- let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:parent_group) { create(:group, :public) }
+ let_it_be(:child_group) { create(:group, :public, parent: parent_group) }
+ let_it_be(:project) { create(:project, :public, :repository, group: child_group) }
let_it_be(:user1) { create(:user, developer_projects: [project]) }
let_it_be(:user2) { create(:user, developer_projects: [project]) }
let_it_be(:user3) { create(:user, developer_projects: [project]) }
let_it_be(:forked_project) { fork_project(project, user1, repository: true) }
+ let_it_be(:parent_group_milestone) { create(:milestone, group: parent_group, title: 'ParentGroupMilestone1.0') }
+ let_it_be(:child_group_milestone) { create(:milestone, group: child_group, title: 'ChildGroupMilestone1.0') }
+ let_it_be(:project_milestone) { create(:milestone, project: project, title: 'ProjectMilestone1.0') }
let(:service) { described_class.new(project: project, current_user: user1, changes: changes, push_options: push_options) }
let(:source_branch) { 'fix' }
@@ -59,6 +64,16 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
end
end
+ shared_examples_for 'a service that can set the milestone of a merge request' do
+ subject(:last_mr) { MergeRequest.last }
+
+ it 'sets the milestone' do
+ service.execute
+
+ expect(last_mr.milestone&.title).to eq(expected_milestone)
+ end
+ end
+
shared_examples_for 'a service that can set the merge request to merge when pipeline succeeds' do
subject(:last_mr) { MergeRequest.last }
@@ -514,6 +529,90 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'with the project default branch'
end
+ describe '`milestone` push option' do
+ context 'with a valid milestone' do
+ let(:expected_milestone) { project_milestone.title }
+ let(:push_options) { { milestone: project_milestone.title } }
+
+ context 'with a new branch' do
+ let(:changes) { new_branch_changes }
+
+ it_behaves_like 'a service that does not create a merge request'
+
+ it 'adds an error to the service' do
+ service.execute
+
+ expect(service.errors).to include(error_mr_required)
+ end
+
+ context 'when coupled with the `create` push option' do
+ let(:push_options) { { create: true, milestone: project_milestone.title } }
+
+ it_behaves_like 'a service that can create a merge request'
+ it_behaves_like 'a service that can set the milestone of a merge request'
+ end
+ end
+
+ context 'with an existing branch but no open MR' do
+ let(:changes) { existing_branch_changes }
+
+ it_behaves_like 'a service that does not create a merge request'
+
+ it 'adds an error to the service' do
+ service.execute
+
+ expect(service.errors).to include(error_mr_required)
+ end
+
+ context 'when coupled with the `create` push option' do
+ let(:push_options) { { create: true, milestone: project_milestone.title } }
+
+ it_behaves_like 'a service that can create a merge request'
+ it_behaves_like 'a service that can set the milestone of a merge request'
+ end
+ end
+
+ context 'with an existing branch that has a merge request open' do
+ let(:changes) { existing_branch_changes }
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch)}
+
+ it_behaves_like 'a service that does not create a merge request'
+ it_behaves_like 'a service that can set the milestone of a merge request'
+ end
+
+ it_behaves_like 'with a deleted branch'
+ it_behaves_like 'with the project default branch'
+ end
+
+ context 'with invalid milestone' do
+ let(:expected_milestone) { nil }
+ let(:changes) { new_branch_changes }
+ let(:push_options) { { create: true, milestone: 'invalid_milestone' } }
+
+ it_behaves_like 'a service that can set the milestone of a merge request'
+ end
+
+ context 'with an ancestor milestone' do
+ let(:changes) { existing_branch_changes }
+
+ context 'with immediate parent milestone' do
+ let(:push_options) { { create: true, milestone: child_group_milestone.title } }
+ let(:expected_milestone) { child_group_milestone.title }
+
+ it_behaves_like 'a service that can create a merge request'
+ it_behaves_like 'a service that can set the milestone of a merge request'
+ end
+
+ context 'with multi-level ancestor milestone' do
+ let(:push_options) { { create: true, milestone: parent_group_milestone.title } }
+ let(:expected_milestone) { parent_group_milestone.title }
+
+ it_behaves_like 'a service that can create a merge request'
+ it_behaves_like 'a service that can set the milestone of a merge request'
+ end
+ end
+ end
+
shared_examples 'with an existing branch that has a merge request open in foss' do
let(:changes) { existing_branch_changes }
let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch)}
diff --git a/spec/services/merge_requests/rebase_service_spec.rb b/spec/services/merge_requests/rebase_service_spec.rb
index a46f3cf6148..ca561376581 100644
--- a/spec/services/merge_requests/rebase_service_spec.rb
+++ b/spec/services/merge_requests/rebase_service_spec.rb
@@ -25,30 +25,6 @@ RSpec.describe MergeRequests::RebaseService do
end
describe '#execute' do
- context 'when another rebase is already in progress' do
- before do
- allow(repository).to receive(:rebase_in_progress?).with(merge_request.id).and_return(true)
- end
-
- it 'saves the error message' do
- service.execute(merge_request)
-
- expect(merge_request.reload.merge_error).to eq 'Rebase task canceled: Another rebase is already in progress'
- end
-
- it 'returns an error' do
- expect(service.execute(merge_request)).to match(status: :error,
- message: described_class::REBASE_ERROR)
- end
-
- it 'clears rebase_jid' do
- expect { service.execute(merge_request) }
- .to change { merge_request.rebase_jid }
- .from(rebase_jid)
- .to(nil)
- end
- end
-
shared_examples 'sequence of failure and success' do
it 'properly clears the error message' do
allow(repository).to receive(:gitaly_operation_client).and_raise('Something went wrong')
@@ -150,6 +126,13 @@ RSpec.describe MergeRequests::RebaseService do
it_behaves_like 'a service that can execute a successful rebase'
+ it 'clears rebase_jid' do
+ expect { service.execute(merge_request) }
+ .to change(merge_request, :rebase_jid)
+ .from(rebase_jid)
+ .to(nil)
+ end
+
context 'when skip_ci flag is set' do
let(:skip_ci) { true }
diff --git a/spec/services/metrics/dashboard/annotations/create_service_spec.rb b/spec/services/metrics/dashboard/annotations/create_service_spec.rb
index c3fe7238047..8f5484fcabe 100644
--- a/spec/services/metrics/dashboard/annotations/create_service_spec.rb
+++ b/spec/services/metrics/dashboard/annotations/create_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Metrics::Dashboard::Annotations::CreateService do
let_it_be(:user) { create(:user) }
+
let(:description) { 'test annotation' }
let(:dashboard_path) { 'config/prometheus/common_metrics.yml' }
let(:starting_at) { 15.minutes.ago }
diff --git a/spec/services/metrics/dashboard/gitlab_alert_embed_service_spec.rb b/spec/services/metrics/dashboard/gitlab_alert_embed_service_spec.rb
index d5928b1b5af..2905e4599f3 100644
--- a/spec/services/metrics/dashboard/gitlab_alert_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/gitlab_alert_embed_service_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Metrics::Dashboard::GitlabAlertEmbedService do
let_it_be(:alert) { create(:prometheus_alert) }
let_it_be(:project) { alert.project }
let_it_be(:user) { create(:user) }
+
let(:alert_id) { alert.id }
before_all do
diff --git a/spec/services/metrics/users_starred_dashboards/create_service_spec.rb b/spec/services/metrics/users_starred_dashboards/create_service_spec.rb
index 910b556b8dd..1435e39e458 100644
--- a/spec/services/metrics/users_starred_dashboards/create_service_spec.rb
+++ b/spec/services/metrics/users_starred_dashboards/create_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Metrics::UsersStarredDashboards::CreateService do
let_it_be(:user) { create(:user) }
+
let(:dashboard_path) { 'config/prometheus/common_metrics.yml' }
let(:service_instance) { described_class.new(user, project, dashboard_path) }
let(:project) { create(:project) }
diff --git a/spec/services/namespace_settings/update_service_spec.rb b/spec/services/namespace_settings/update_service_spec.rb
index 8e176dbc6cd..e0f32cb3821 100644
--- a/spec/services/namespace_settings/update_service_spec.rb
+++ b/spec/services/namespace_settings/update_service_spec.rb
@@ -76,34 +76,61 @@ RSpec.describe NamespaceSettings::UpdateService do
end
end
- context "updating :prevent_sharing_groups_outside_hierarchy" do
- let(:settings) { { prevent_sharing_groups_outside_hierarchy: true } }
+ describe 'validating settings param for root group' do
+ using RSpec::Parameterized::TableSyntax
- context 'when user is a group owner' do
- before do
- group.add_owner(user)
- end
+ where(:setting_key, :setting_changes_from, :setting_changes_to) do
+ :prevent_sharing_groups_outside_hierarchy | false | true
+ :new_user_signups_cap | nil | 100
+ end
- it 'changes settings' do
- expect { service.execute }
- .to change { group.namespace_settings.prevent_sharing_groups_outside_hierarchy }
- .from(false).to(true)
+ with_them do
+ let(:settings) do
+ { setting_key => setting_changes_to }
end
- end
- context 'when user is not a group owner' do
- before do
- group.add_maintainer(user)
+ context 'when user is not a group owner' do
+ before do
+ group.add_maintainer(user)
+ end
+
+ it 'does not change settings' do
+ expect { service.execute }.not_to change { group.namespace_settings.public_send(setting_key) }
+ end
+
+ it 'returns the group owner error' do
+ service.execute
+
+ expect(group.namespace_settings.errors.messages[setting_key]).to include('can only be changed by a group admin.')
+ end
end
- it 'does not change settings' do
- expect { service.execute }.not_to change { group.namespace_settings.prevent_sharing_groups_outside_hierarchy }
+ context 'with a subgroup' do
+ let(:subgroup) { create(:group, parent: group) }
+
+ before do
+ group.add_owner(user)
+ end
+
+ it 'does not change settings' do
+ service = described_class.new(user, subgroup, settings)
+
+ expect { service.execute }.not_to change { group.namespace_settings.public_send(setting_key) }
+
+ expect(subgroup.namespace_settings.errors.messages[setting_key]).to include('only available on top-level groups.')
+ end
end
- it 'returns the group owner error' do
- service.execute
+ context 'when user is a group owner' do
+ before do
+ group.add_owner(user)
+ end
- expect(group.namespace_settings.errors.messages[:prevent_sharing_groups_outside_hierarchy]).to include('can only be changed by a group admin.')
+ it 'changes settings' do
+ expect { service.execute }
+ .to change { group.namespace_settings.public_send(setting_key) }
+ .from(setting_changes_from).to(setting_changes_to)
+ end
end
end
end
diff --git a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb b/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
index 2bf02e541f9..9d4fcf9ca64 100644
--- a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
+++ b/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
@@ -11,7 +11,6 @@ RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
let(:frozen_time) { Time.zone.parse('23 Mar 2021 10:14:40 UTC') }
let(:previous_action_completed_at) { frozen_time - 2.days }
let(:current_action_completed_at) { nil }
- let(:experiment_enabled) { true }
let(:user_can_perform_current_track_action) { true }
let(:actions_completed) { { created_at: previous_action_completed_at, git_write_at: current_action_completed_at } }
@@ -22,7 +21,6 @@ RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
travel_to(frozen_time)
create(:onboarding_progress, namespace: group, **actions_completed)
group.add_developer(user)
- stub_experiment_for_subject(in_product_marketing_emails: experiment_enabled)
allow(Ability).to receive(:allowed?).with(user, anything, anything).and_return(user_can_perform_current_track_action)
allow(Notify).to receive(:in_product_marketing_email).and_return(double(deliver_later: nil))
end
@@ -85,50 +83,6 @@ RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
end
end
- describe 'experimentation' do
- context 'when on dotcom' do
- before do
- allow(::Gitlab).to receive(:com?).and_return(true)
- end
-
- context 'when the experiment is enabled' do
- it 'adds the group as an experiment subject in the experimental group' do
- expect(Experiment).to receive(:add_group)
- .with(:in_product_marketing_emails, variant: :experimental, group: group)
-
- execute_service
- end
- end
-
- context 'when the experiment is disabled' do
- let(:experiment_enabled) { false }
-
- it 'adds the group as an experiment subject in the control group' do
- expect(Experiment).to receive(:add_group)
- .with(:in_product_marketing_emails, variant: :control, group: group)
-
- execute_service
- end
-
- it { is_expected.not_to send_in_product_marketing_email }
- end
-
- context 'when not on dotcom' do
- before do
- allow(::Gitlab).to receive(:com?).and_return(false)
- end
-
- it 'does not add the group as an experiment subject' do
- expect(Experiment).not_to receive(:add_group)
-
- execute_service
- end
-
- it { is_expected.to send_in_product_marketing_email(user.id, group.id, :create, 0) }
- end
- end
- end
-
context 'when the previous track action is not yet completed' do
let(:previous_action_completed_at) { nil }
diff --git a/spec/services/notes/copy_service_spec.rb b/spec/services/notes/copy_service_spec.rb
index d9b6bafd7ff..dd11fa15ea8 100644
--- a/spec/services/notes/copy_service_spec.rb
+++ b/spec/services/notes/copy_service_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe Notes::CopyService do
let_it_be(:group) { create(:group) }
let_it_be(:from_project) { create(:project, :public, group: group) }
let_it_be(:to_project) { create(:project, :public, group: group) }
+
let(:from_noteable) { create(:issue, project: from_project) }
let(:to_noteable) { create(:issue, project: to_project) }
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index 5b4d6188b66..6621ad1f294 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Notes::CreateService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:user) { create(:user) }
+
let(:opts) do
{ note: 'Awesome comment', noteable_type: 'Issue', noteable_id: issue.id, confidential: true }
end
@@ -295,6 +296,7 @@ RSpec.describe Notes::CreateService do
context 'for merge requests' do
let_it_be(:merge_request) { create(:merge_request, source_project: project, labels: [bug_label]) }
+
let(:issuable) { merge_request }
let(:note_params) { opts.merge(noteable_type: 'MergeRequest', noteable_id: merge_request.id) }
let(:merge_request_quick_actions) do
diff --git a/spec/services/notes/destroy_service_spec.rb b/spec/services/notes/destroy_service_spec.rb
index eebbdcc33b8..55acdabef82 100644
--- a/spec/services/notes/destroy_service_spec.rb
+++ b/spec/services/notes/destroy_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Notes::DestroyService do
let_it_be(:project) { create(:project, :public) }
let_it_be(:issue) { create(:issue, project: project) }
+
let(:user) { issue.author }
describe '#execute' do
diff --git a/spec/services/notes/post_process_service_spec.rb b/spec/services/notes/post_process_service_spec.rb
index 07ef08d36c4..17001733c5b 100644
--- a/spec/services/notes/post_process_service_spec.rb
+++ b/spec/services/notes/post_process_service_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Notes::PostProcessService do
it do
expect(project).to receive(:execute_hooks)
- expect(project).to receive(:execute_services)
+ expect(project).to receive(:execute_integrations)
described_class.new(@note).execute
end
@@ -29,16 +29,16 @@ RSpec.describe Notes::PostProcessService do
context 'with a confidential issue' do
let(:issue) { create(:issue, :confidential, project: project) }
- it "doesn't call note hooks/services" do
+ it "doesn't call note hooks/integrations" do
expect(project).not_to receive(:execute_hooks).with(anything, :note_hooks)
- expect(project).not_to receive(:execute_services).with(anything, :note_hooks)
+ expect(project).not_to receive(:execute_integrations).with(anything, :note_hooks)
described_class.new(@note).execute
end
- it "calls confidential-note hooks/services" do
+ it "calls confidential-note hooks/integrations" do
expect(project).to receive(:execute_hooks).with(anything, :confidential_note_hooks)
- expect(project).to receive(:execute_services).with(anything, :confidential_note_hooks)
+ expect(project).to receive(:execute_integrations).with(anything, :confidential_note_hooks)
described_class.new(@note).execute
end
diff --git a/spec/services/notes/quick_actions_service_spec.rb b/spec/services/notes/quick_actions_service_spec.rb
index cb7d0163cac..0a56f01ebba 100644
--- a/spec/services/notes/quick_actions_service_spec.rb
+++ b/spec/services/notes/quick_actions_service_spec.rb
@@ -43,6 +43,7 @@ RSpec.describe Notes::QuickActionsService do
context '/relate' do
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:other_issue) { create(:issue, project: project) }
+
let(:note_text) { "/relate #{other_issue.to_reference}" }
let(:note) { create(:note_on_issue, noteable: issue, project: project, note: note_text) }
diff --git a/spec/services/notes/update_service_spec.rb b/spec/services/notes/update_service_spec.rb
index 000f3d26efa..71ac1641ca5 100644
--- a/spec/services/notes/update_service_spec.rb
+++ b/spec/services/notes/update_service_spec.rb
@@ -273,6 +273,7 @@ RSpec.describe Notes::UpdateService do
context 'for a personal snippet' do
let_it_be(:snippet) { create(:personal_snippet, :public) }
+
let(:note) { create(:note, project: nil, noteable: snippet, author: user, note: "Note on a snippet with reference #{issue.to_reference}" ) }
it 'does not create todos' do
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index c3a0766cb17..ac82e4c025f 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -361,6 +361,7 @@ RSpec.describe NotificationService, :mailer do
let_it_be_with_reload(:issue) { create(:issue, project: project, assignees: [assignee]) }
let_it_be(:mentioned_issue) { create(:issue, assignees: issue.assignees) }
let_it_be_with_reload(:author) { create(:user) }
+
let(:note) { create(:note_on_issue, author: author, noteable: issue, project_id: issue.project_id, note: '@mention referenced, @unsubscribed_mentioned and @outsider also') }
subject { notification.new_note(note) }
@@ -376,41 +377,31 @@ RSpec.describe NotificationService, :mailer do
let(:subject) { NotificationService.new }
let(:mailer) { double(deliver_later: true) }
+ let(:issue) { create(:issue, author: User.support_bot) }
+ let(:project) { issue.project }
+ let(:note) { create(:note, noteable: issue, project: project) }
- def should_email!
- expect(Notify).to receive(:service_desk_new_note_email)
- .with(issue.id, note.id, issue.external_author)
- end
+ shared_examples 'notification with exact metric events' do |number_of_events|
+ it 'adds metric event' do
+ metric_transaction = double('Gitlab::Metrics::WebTransaction', increment: true, observe: true)
+ allow(::Gitlab::Metrics::BackgroundTransaction).to receive(:current).and_return(metric_transaction)
+ expect(metric_transaction).to receive(:add_event).with(:service_desk_new_note_email).exactly(number_of_events).times
- def should_not_email!
- expect(Notify).not_to receive(:service_desk_new_note_email)
+ subject.new_note(note)
+ end
end
- def execute!
- subject.new_note(note)
- end
+ shared_examples 'no participants are notified' do
+ it 'does not send the email' do
+ expect(Notify).not_to receive(:service_desk_new_note_email)
- def self.it_should_email!
- it 'sends the email' do
- should_email!
- execute!
+ subject.new_note(note)
end
- end
- def self.it_should_not_email!
- it 'doesn\'t send the email' do
- should_not_email!
- execute!
- end
+ it_behaves_like 'notification with exact metric events', 0
end
- let(:issue) { create(:issue, author: User.support_bot) }
- let(:project) { issue.project }
- let(:note) { create(:note, noteable: issue, project: project) }
-
- context 'do not exist' do
- it_should_not_email!
- end
+ it_behaves_like 'no participants are notified'
context 'do exist and note not confidential' do
let!(:issue_email_participant) { issue.issue_email_participants.create!(email: 'service.desk@example.com') }
@@ -420,7 +411,14 @@ RSpec.describe NotificationService, :mailer do
project.update!(service_desk_enabled: true)
end
- it_should_email!
+ it 'sends the email' do
+ expect(Notify).to receive(:service_desk_new_note_email)
+ .with(issue.id, note.id, issue.external_author)
+
+ subject.new_note(note)
+ end
+
+ it_behaves_like 'notification with exact metric events', 1
end
context 'do exist and note is confidential' do
@@ -432,7 +430,7 @@ RSpec.describe NotificationService, :mailer do
project.update!(service_desk_enabled: true)
end
- it_should_not_email!
+ it_behaves_like 'no participants are notified'
end
end
@@ -644,6 +642,7 @@ RSpec.describe NotificationService, :mailer do
let_it_be(:issue) { create(:issue, project: project, assignees: [assignee]) }
let_it_be(:mentioned_issue) { create(:issue, assignees: issue.assignees) }
let_it_be(:author) { create(:user) }
+
let(:note) { create(:note_on_issue, author: author, noteable: issue, project_id: issue.project_id, note: '@all mentioned') }
before_all do
@@ -930,6 +929,10 @@ RSpec.describe NotificationService, :mailer do
end
context 'design management is disabled' do
+ before do
+ enable_design_management(false)
+ end
+
it 'does not notify anyone' do
notification.new_note(note)
@@ -2616,6 +2619,16 @@ RSpec.describe NotificationService, :mailer do
end
end
+ describe '#user_deactivated', :deliver_mails_inline do
+ let_it_be(:user) { create(:user) }
+
+ it 'sends the user an email' do
+ notification.user_deactivated(user.name, user.notification_email)
+
+ should_only_email(user)
+ end
+ end
+
describe 'GroupMember', :deliver_mails_inline do
let(:added_user) { create(:user) }
diff --git a/spec/services/packages/composer/create_package_service_spec.rb b/spec/services/packages/composer/create_package_service_spec.rb
index 526c7b4929b..553d58fdd86 100644
--- a/spec/services/packages/composer/create_package_service_spec.rb
+++ b/spec/services/packages/composer/create_package_service_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Packages::Composer::CreatePackageService do
let_it_be(:json) { { name: package_name }.to_json }
let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json } ) }
let_it_be(:user) { create(:user) }
+
let(:params) do
{
branch: branch,
diff --git a/spec/services/packages/conan/search_service_spec.rb b/spec/services/packages/conan/search_service_spec.rb
index 39d284ee088..55dcdfe646d 100644
--- a/spec/services/packages/conan/search_service_spec.rb
+++ b/spec/services/packages/conan/search_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Packages::Conan::SearchService do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public) }
+
let!(:conan_package) { create(:conan_package, project: project) }
let!(:conan_package2) { create(:conan_package, project: project) }
diff --git a/spec/services/packages/create_package_file_service_spec.rb b/spec/services/packages/create_package_file_service_spec.rb
index e4b4b15ebf9..2ff00ea8568 100644
--- a/spec/services/packages/create_package_file_service_spec.rb
+++ b/spec/services/packages/create_package_file_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Packages::CreatePackageFileService do
let_it_be(:package) { create(:maven_package) }
let_it_be(:user) { create(:user) }
+
let(:service) { described_class.new(package, params) }
describe '#execute' do
diff --git a/spec/services/packages/debian/find_or_create_package_service_spec.rb b/spec/services/packages/debian/find_or_create_package_service_spec.rb
index 3582b1f1dc3..f06f86b0146 100644
--- a/spec/services/packages/debian/find_or_create_package_service_spec.rb
+++ b/spec/services/packages/debian/find_or_create_package_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Packages::Debian::FindOrCreatePackageService do
let_it_be(:distribution) { create(:debian_project_distribution) }
let_it_be(:project) { distribution.project }
let_it_be(:user) { create(:user) }
+
let(:params) { { name: 'foo', version: '1.0+debian', distribution_name: distribution.codename } }
subject(:service) { described_class.new(project, user, params) }
diff --git a/spec/services/packages/destroy_package_service_spec.rb b/spec/services/packages/destroy_package_service_spec.rb
new file mode 100644
index 00000000000..92db8da968c
--- /dev/null
+++ b/spec/services/packages/destroy_package_service_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::DestroyPackageService do
+ let_it_be(:user) { create(:user) }
+
+ let!(:package) { create(:npm_package) }
+
+ describe '#execute' do
+ subject(:service) { described_class.new(container: package, current_user: user) }
+
+ context 'when the user is authorized' do
+ before do
+ package.project.add_maintainer(user)
+ end
+
+ context 'when the destroy is successfull' do
+ it 'destroy the package' do
+ expect(package).to receive(:sync_maven_metadata).and_call_original
+ expect { service.execute }.to change { Packages::Package.count }.by(-1)
+ end
+
+ it 'returns a success ServiceResponse' do
+ response = service.execute
+
+ expect(response).to be_a(ServiceResponse)
+ expect(response).to be_success
+ expect(response.message).to eq("Package was successfully deleted")
+ end
+ end
+
+ context 'when the destroy is not successful' do
+ before do
+ allow(package).to receive(:destroy!).and_raise(StandardError, "test")
+ end
+
+ it 'returns an error ServiceResponse' do
+ response = service.execute
+
+ expect(package).not_to receive(:sync_maven_metadata)
+ expect(response).to be_a(ServiceResponse)
+ expect(response).to be_error
+ expect(response.message).to eq("Failed to remove the package")
+ expect(response.status).to eq(:error)
+ end
+ end
+ end
+
+ context 'when the user is not authorized' do
+ it 'returns an error ServiceResponse' do
+ response = service.execute
+
+ expect(response).to be_a(ServiceResponse)
+ expect(response).to be_error
+ expect(response.message).to eq("You don't have access to this package")
+ expect(response.status).to eq(:error)
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/maven/find_or_create_package_service_spec.rb b/spec/services/packages/maven/find_or_create_package_service_spec.rb
index 803371af4bf..d8b48af0121 100644
--- a/spec/services/packages/maven/find_or_create_package_service_spec.rb
+++ b/spec/services/packages/maven/find_or_create_package_service_spec.rb
@@ -91,6 +91,7 @@ RSpec.describe Packages::Maven::FindOrCreatePackageService do
context 'with a build' do
let_it_be(:pipeline) { create(:ci_pipeline, user: user) }
+
let(:build) { double('build', pipeline: pipeline) }
let(:params) { { path: param_path, file_name: file_name, build: build } }
@@ -103,6 +104,7 @@ RSpec.describe Packages::Maven::FindOrCreatePackageService do
let_it_be_with_refind(:package_settings) { create(:namespace_package_setting, :group, maven_duplicates_allowed: false) }
let_it_be_with_refind(:group) { package_settings.namespace }
let_it_be_with_refind(:project) { create(:project, group: group) }
+
let!(:existing_package) { create(:maven_package, name: path, version: version, project: project) }
it { expect { subject }.not_to change { project.package_files.count } }
diff --git a/spec/services/packages/nuget/metadata_extraction_service_spec.rb b/spec/services/packages/nuget/metadata_extraction_service_spec.rb
index 79428b58bd9..8eddd27f8a2 100644
--- a/spec/services/packages/nuget/metadata_extraction_service_spec.rb
+++ b/spec/services/packages/nuget/metadata_extraction_service_spec.rb
@@ -21,7 +21,8 @@ RSpec.describe Packages::Nuget::MetadataExtractionService do
version: '12.0.3'
}
],
- package_tags: []
+ package_tags: [],
+ package_types: []
}
it { is_expected.to eq(expected_metadata) }
@@ -47,6 +48,16 @@ RSpec.describe Packages::Nuget::MetadataExtractionService do
end
end
+ context 'with package types' do
+ let(:nuspec_filepath) { 'packages/nuget/with_package_types.nuspec' }
+
+ it { is_expected.to have_key(:package_types) }
+
+ it 'extracts package types' do
+ expect(subject[:package_types]).to include('SymbolsPackage')
+ end
+ end
+
context 'with a nuspec file with metadata' do
let(:nuspec_filepath) { 'packages/nuget/with_metadata.nuspec' }
diff --git a/spec/services/packages/nuget/search_service_spec.rb b/spec/services/packages/nuget/search_service_spec.rb
index 1838065c5be..66c91487a8f 100644
--- a/spec/services/packages/nuget/search_service_spec.rb
+++ b/spec/services/packages/nuget/search_service_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Packages::Nuget::SearchService do
let_it_be(:package_d) { create(:nuget_package, project: project, name: 'FooBarD') }
let_it_be(:other_package_a) { create(:nuget_package, name: 'DummyPackageA') }
let_it_be(:other_package_a) { create(:nuget_package, name: 'DummyPackageB') }
+
let(:search_term) { 'ummy' }
let(:per_page) { 5 }
let(:padding) { 0 }
diff --git a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
index ffe1a5b7646..328484c3e5a 100644
--- a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
+++ b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_redis_shared_state do
include ExclusiveLeaseHelpers
- let(:package) { create(:nuget_package, :processing) }
+ let(:package) { create(:nuget_package, :processing, :with_symbol_package) }
let(:package_file) { package.package_files.first }
let(:service) { described_class.new(package_file) }
let(:package_name) { 'DummyProject.DummyPackage' }
@@ -201,6 +201,41 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
it_behaves_like 'raising an', ::Packages::Nuget::MetadataExtractionService::ExtractionError
end
+ context 'with a symbol package' do
+ let(:package_file) { package.package_files.last }
+ let(:package_file_name) { 'dummyproject.dummypackage.1.0.0.snupkg' }
+
+ context 'with no existing package' do
+ let(:package_id) { package.id }
+
+ it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
+ end
+
+ context 'with existing package' do
+ let!(:existing_package) { create(:nuget_package, project: package.project, name: package_name, version: package_version) }
+ let(:package_id) { existing_package.id }
+
+ it 'link existing package and updates package file', :aggregate_failures do
+ expect(service).to receive(:try_obtain_lease).and_call_original
+ expect(::Packages::Nuget::SyncMetadatumService).not_to receive(:new)
+ expect(::Packages::UpdateTagsService).not_to receive(:new)
+
+ expect { subject }
+ .to change { ::Packages::Package.count }.by(-1)
+ .and change { Packages::Dependency.count }.by(0)
+ .and change { Packages::DependencyLink.count }.by(0)
+ .and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(0)
+ .and change { ::Packages::Nuget::Metadatum.count }.by(0)
+ expect(package_file.reload.file_name).to eq(package_file_name)
+ expect(package_file.package).to eq(existing_package)
+ end
+
+ it_behaves_like 'taking the lease'
+
+ it_behaves_like 'not updating the package if the lease is taken'
+ end
+ end
+
context 'with an invalid package name' do
invalid_names = [
'',
diff --git a/spec/services/packages/rubygems/dependency_resolver_service_spec.rb b/spec/services/packages/rubygems/dependency_resolver_service_spec.rb
index 78abfc96ed5..f23ed0e5fbc 100644
--- a/spec/services/packages/rubygems/dependency_resolver_service_spec.rb
+++ b/spec/services/packages/rubygems/dependency_resolver_service_spec.rb
@@ -5,6 +5,7 @@ RSpec.describe Packages::Rubygems::DependencyResolverService do
let_it_be(:project) { create(:project, :private) }
let_it_be(:package) { create(:package, project: project) }
let_it_be(:user) { create(:user) }
+
let(:gem_name) { package.name }
let(:service) { described_class.new(project, user, gem_name: gem_name) }
diff --git a/spec/services/pod_logs/base_service_spec.rb b/spec/services/pod_logs/base_service_spec.rb
index 6f7731fda3a..d2f6300ab65 100644
--- a/spec/services/pod_logs/base_service_spec.rb
+++ b/spec/services/pod_logs/base_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe ::PodLogs::BaseService do
include KubernetesHelpers
let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*') }
+
let(:namespace) { 'autodevops-deploy-9-production' }
let(:pod_name) { 'pod-1' }
diff --git a/spec/services/pod_logs/elasticsearch_service_spec.rb b/spec/services/pod_logs/elasticsearch_service_spec.rb
index 598b162aee4..1111d9b9307 100644
--- a/spec/services/pod_logs/elasticsearch_service_spec.rb
+++ b/spec/services/pod_logs/elasticsearch_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe ::PodLogs::ElasticsearchService do
let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*') }
+
let(:namespace) { 'autodevops-deploy-9-production' }
let(:pod_name) { 'pod-1' }
diff --git a/spec/services/pod_logs/kubernetes_service_spec.rb b/spec/services/pod_logs/kubernetes_service_spec.rb
index 3e31ff15c1b..c06a87830ca 100644
--- a/spec/services/pod_logs/kubernetes_service_spec.rb
+++ b/spec/services/pod_logs/kubernetes_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe ::PodLogs::KubernetesService do
include KubernetesHelpers
let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*') }
+
let(:namespace) { 'autodevops-deploy-9-production' }
let(:pod_name) { 'pod-1' }
diff --git a/spec/services/post_receive_service_spec.rb b/spec/services/post_receive_service_spec.rb
index 2a78dc454c7..871ed95bf28 100644
--- a/spec/services/post_receive_service_spec.rb
+++ b/spec/services/post_receive_service_spec.rb
@@ -283,7 +283,7 @@ RSpec.describe PostReceiveService do
context 'with a redirected data' do
it 'returns redirected message on the response' do
- project_moved = Gitlab::Checks::ProjectMoved.new(project.repository, user, 'http', 'foo/baz')
+ project_moved = Gitlab::Checks::ContainerMoved.new(project.repository, user, 'http', 'foo/baz')
project_moved.add_message
expect(subject).to include(build_basic_message(project_moved.message))
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index ac0b6cc8ef1..defeadb479a 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -190,6 +190,7 @@ RSpec.describe Projects::CreateService, '#execute' do
let_it_be(:group) { create(:group) }
let_it_be(:shared_group) { create(:group) }
let_it_be(:shared_group_user) { create(:user) }
+
let(:opts) do
{
name: 'GitLab',
@@ -221,6 +222,7 @@ RSpec.describe Projects::CreateService, '#execute' do
let_it_be(:subgroup_for_projects) { create(:group, :private, parent: group) }
let_it_be(:subgroup_for_access) { create(:group, :private, parent: group) }
let_it_be(:group_maintainer) { create(:user) }
+
let(:group_access_level) { Gitlab::Access::REPORTER }
let(:subgroup_access_level) { Gitlab::Access::DEVELOPER }
let(:share_max_access_level) { Gitlab::Access::MAINTAINER }
@@ -582,32 +584,49 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(branches.size).to eq(1)
expect(branches.collect(&:name)).to contain_exactly('example_branch')
end
+
+ describe 'advanced readme content', experiment: :new_project_readme_content do
+ before do
+ stub_experiments(new_project_readme_content: :advanced)
+ end
+
+ it_behaves_like 'creates README.md'
+
+ it 'includes advanced content in the README.md' do
+ content = project.repository.readme.data
+ expect(content).to include <<~MARKDOWN
+ git remote add origin #{project.http_url_to_repo}
+ git branch -M example_branch
+ git push -uf origin example_branch
+ MARKDOWN
+ end
+ end
end
end
- describe 'create service for the project' do
+ describe 'create integration for the project' do
subject(:project) { create_project(user, opts) }
- context 'with an active service template' do
- let!(:template_integration) { create(:prometheus_service, :template, api_url: 'https://prometheus.template.com/') }
+ context 'with an active integration template' do
+ let!(:template_integration) { create(:prometheus_integration, :template, api_url: 'https://prometheus.template.com/') }
- it 'creates a service from the template' do
+ it 'creates an integration from the template' do
expect(project.integrations.count).to eq(1)
expect(project.integrations.first.api_url).to eq(template_integration.api_url)
expect(project.integrations.first.inherit_from_id).to be_nil
end
context 'with an active instance-level integration' do
- let!(:instance_integration) { create(:prometheus_service, :instance, api_url: 'https://prometheus.instance.com/') }
+ let!(:instance_integration) { create(:prometheus_integration, :instance, api_url: 'https://prometheus.instance.com/') }
- it 'creates a service from the instance-level integration' do
+ it 'creates an integration from the instance-level integration' do
expect(project.integrations.count).to eq(1)
expect(project.integrations.first.api_url).to eq(instance_integration.api_url)
expect(project.integrations.first.inherit_from_id).to eq(instance_integration.id)
end
context 'with an active group-level integration' do
- let!(:group_integration) { create(:prometheus_service, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
+ let!(:group_integration) { create(:prometheus_integration, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
let!(:group) do
create(:group).tap do |group|
group.add_owner(user)
@@ -621,14 +640,14 @@ RSpec.describe Projects::CreateService, '#execute' do
}
end
- it 'creates a service from the group-level integration' do
+ it 'creates an integration from the group-level integration' do
expect(project.integrations.count).to eq(1)
expect(project.integrations.first.api_url).to eq(group_integration.api_url)
expect(project.integrations.first.inherit_from_id).to eq(group_integration.id)
end
context 'with an active subgroup' do
- let!(:subgroup_integration) { create(:prometheus_service, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
+ let!(:subgroup_integration) { create(:prometheus_integration, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
let!(:subgroup) do
create(:group, parent: group).tap do |subgroup|
subgroup.add_owner(user)
@@ -642,7 +661,7 @@ RSpec.describe Projects::CreateService, '#execute' do
}
end
- it 'creates a service from the subgroup-level integration' do
+ it 'creates an integration from the subgroup-level integration' do
expect(project.integrations.count).to eq(1)
expect(project.integrations.first.api_url).to eq(subgroup_integration.api_url)
expect(project.integrations.first.inherit_from_id).to eq(subgroup_integration.id)
@@ -686,69 +705,6 @@ RSpec.describe Projects::CreateService, '#execute' do
create_project(user, opts)
end
- context 'when project has access to shared service' do
- before do
- stub_feature_flags(projects_post_creation_worker: false)
- end
-
- context 'Prometheus integration is shared via group cluster' do
- let(:cluster) { create(:cluster, :group, groups: [group]) }
- let(:group) do
- create(:group).tap do |group|
- group.add_owner(user)
- end
- end
-
- before do
- create(:clusters_integrations_prometheus, cluster: cluster)
- end
-
- it 'creates PrometheusService record', :aggregate_failures do
- project = create_project(user, opts.merge!(namespace_id: group.id))
- service = project.prometheus_service
-
- expect(service.active).to be true
- expect(service.manual_configuration?).to be false
- expect(service.persisted?).to be true
- end
- end
-
- context 'Prometheus integration is shared via instance cluster' do
- let(:cluster) { create(:cluster, :instance) }
-
- before do
- create(:clusters_integrations_prometheus, cluster: cluster)
- end
-
- it 'creates PrometheusService record', :aggregate_failures do
- project = create_project(user, opts)
- service = project.prometheus_service
-
- expect(service.active).to be true
- expect(service.manual_configuration?).to be false
- expect(service.persisted?).to be true
- end
-
- it 'cleans invalid record and logs warning', :aggregate_failures do
- invalid_service_record = build(:prometheus_service, properties: { api_url: nil, manual_configuration: true }.to_json)
- allow(PrometheusService).to receive(:new).and_return(invalid_service_record)
-
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(an_instance_of(ActiveRecord::RecordInvalid), include(extra: { project_id: a_kind_of(Integer) }))
- project = create_project(user, opts)
-
- expect(project.prometheus_service).to be_nil
- end
- end
-
- context 'shared Prometheus integration is not available' do
- it 'does not persist PrometheusService record', :aggregate_failures do
- project = create_project(user, opts)
-
- expect(project.prometheus_service).to be_nil
- end
- end
- end
-
context 'with external authorization enabled' do
before do
enable_external_authorization_service_check
diff --git a/spec/services/projects/destroy_rollback_service_spec.rb b/spec/services/projects/destroy_rollback_service_spec.rb
index f63939337b8..3eaacc8c1e7 100644
--- a/spec/services/projects/destroy_rollback_service_spec.rb
+++ b/spec/services/projects/destroy_rollback_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Projects::DestroyRollbackService do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
+
let(:repository) { project.repository }
let(:repository_storage) { project.repository_storage }
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index c6b2b1e2b21..4a76347ea45 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
include ProjectForksHelper
let_it_be(:user) { create(:user) }
+
let!(:project) { create(:project, :repository, namespace: user.namespace) }
let(:path) { project.repository.disk_path }
let(:remove_path) { removal_path(path) }
diff --git a/spec/services/projects/gitlab_projects_import_service_spec.rb b/spec/services/projects/gitlab_projects_import_service_spec.rb
index 09d093a9916..d32e720a49f 100644
--- a/spec/services/projects/gitlab_projects_import_service_spec.rb
+++ b/spec/services/projects/gitlab_projects_import_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Projects::GitlabProjectsImportService do
let_it_be(:namespace) { create(:namespace) }
+
let(:path) { 'test-path' }
let(:file) { fixture_file_upload('spec/fixtures/project_export.tar.gz') }
let(:overwrite) { false }
diff --git a/spec/services/projects/group_links/create_service_spec.rb b/spec/services/projects/group_links/create_service_spec.rb
index 9bc780fe177..4ea5f2b3a53 100644
--- a/spec/services/projects/group_links/create_service_spec.rb
+++ b/spec/services/projects/group_links/create_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Projects::GroupLinks::CreateService, '#execute' do
let_it_be(:user) { create :user }
let_it_be(:group) { create :group }
let_it_be(:project) { create :project }
+
let(:group_access) { Gitlab::Access::DEVELOPER }
let(:opts) do
{
@@ -49,9 +50,9 @@ RSpec.describe Projects::GroupLinks::CreateService, '#execute' do
expect(AuthorizedProjectsWorker).not_to(
receive(:bulk_perform_async)
)
- expect(AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker).to(
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to(
receive(:perform_async)
- .with(project.id, group.id, group_access)
+ .with(project.id)
.and_call_original
)
expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
diff --git a/spec/services/projects/group_links/destroy_service_spec.rb b/spec/services/projects/group_links/destroy_service_spec.rb
index d60e9a01e54..d65afb68bfe 100644
--- a/spec/services/projects/group_links/destroy_service_spec.rb
+++ b/spec/services/projects/group_links/destroy_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Projects::GroupLinks::DestroyService, '#execute' do
let_it_be(:user) { create :user }
let_it_be(:project) { create(:project, :private) }
let_it_be(:group) { create(:group) }
+
let!(:group_link) { create(:project_group_link, project: project, group: group) }
subject { described_class.new(project, user) }
diff --git a/spec/services/projects/group_links/update_service_spec.rb b/spec/services/projects/group_links/update_service_spec.rb
index 053c5eb611e..4a38fb0c7d9 100644
--- a/spec/services/projects/group_links/update_service_spec.rb
+++ b/spec/services/projects/group_links/update_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Projects::GroupLinks::UpdateService, '#execute' do
let_it_be(:user) { create :user }
let_it_be(:group) { create :group }
let_it_be(:project) { create :project }
+
let!(:link) { create(:project_group_link, project: project, group: group) }
let(:expiry_date) { 1.month.from_now.to_date }
@@ -32,25 +33,87 @@ RSpec.describe Projects::GroupLinks::UpdateService, '#execute' do
expect(link.expires_at).to eq(expiry_date)
end
- it 'updates project permissions' do
- expect { subject }.to change { user.can?(:create_release, project) }.from(true).to(false)
- end
+ context 'project authorizations update' do
+ context 'when the feature flag `specialized_worker_for_project_share_update_auth_recalculation` is enabled' do
+ before do
+ stub_feature_flags(specialized_worker_for_project_share_update_auth_recalculation: true)
+ end
+
+ it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
+ .to receive(:perform_async).with(link.project.id)
+
+ subject
+ end
+
+ it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
+ receive(:bulk_perform_in)
+ .with(1.hour,
+ [[user.id]],
+ batch_delay: 30.seconds, batch_size: 100)
+ )
- it 'executes UserProjectAccessChangedService' do
- expect_next_instance_of(UserProjectAccessChangedService) do |service|
- expect(service).to receive(:execute)
+ subject
+ end
+
+ it 'updates project authorizations of users who had access to the project via the group share', :sidekiq_inline do
+ group.add_maintainer(user)
+
+ expect { subject }.to(
+ change { Ability.allowed?(user, :create_release, project) }
+ .from(true).to(false))
+ end
end
- subject
+ context 'when the feature flag `specialized_worker_for_project_share_update_auth_recalculation` is disabled' do
+ before do
+ stub_feature_flags(specialized_worker_for_project_share_update_auth_recalculation: false)
+ end
+
+ it 'calls UserProjectAccessChangedService to update project authorizations' do
+ expect_next_instance_of(UserProjectAccessChangedService, [user.id]) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ subject
+ end
+
+ it 'updates project authorizations of users who had access to the project via the group share' do
+ group.add_maintainer(user)
+
+ expect { subject }.to(
+ change { Ability.allowed?(user, :create_release, project) }
+ .from(true).to(false))
+ end
+ end
end
context 'with only param not requiring authorization refresh' do
let(:group_link_params) { { expires_at: Date.tomorrow } }
- it 'does not execute UserProjectAccessChangedService' do
- expect(UserProjectAccessChangedService).not_to receive(:new)
+ context 'when the feature flag `specialized_worker_for_project_share_update_auth_recalculation` is enabled' do
+ before do
+ stub_feature_flags(specialized_worker_for_project_share_update_auth_recalculation: true)
+ end
+
+ it 'does not perform any project authorizations update using `AuthorizedProjectUpdate::ProjectRecalculateWorker`' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).not_to receive(:perform_async)
+
+ subject
+ end
+ end
+
+ context 'when the feature flag `specialized_worker_for_project_share_update_auth_recalculation` is disabled' do
+ before do
+ stub_feature_flags(specialized_worker_for_project_share_update_auth_recalculation: false)
+ end
+
+ it 'does not perform any project authorizations update using `UserProjectAccessChangedService`' do
+ expect(UserProjectAccessChangedService).not_to receive(:new)
- subject
+ subject
+ end
end
end
end
diff --git a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
index 1fb6dae0c08..f27ebb2e19e 100644
--- a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
@@ -106,6 +106,26 @@ RSpec.describe Projects::LfsPointers::LfsDownloadService do
end
end
+ context 'when file download returns a redirect' do
+ let(:redirect_link) { 'http://external-link' }
+
+ before do
+ stub_full_request(download_link).to_return(status: 301, body: 'You are being redirected', headers: { 'Location' => redirect_link } )
+ stub_full_request(redirect_link).to_return(body: lfs_content)
+ end
+
+ it_behaves_like 'lfs object is created'
+
+ it 'correctly stores lfs object' do
+ subject.execute
+
+ new_lfs_object = LfsObject.first
+
+ expect(new_lfs_object).to have_attributes(oid: oid, size: size)
+ expect(File.binread(new_lfs_object.file.file.file)).to eq lfs_content
+ end
+ end
+
context 'when downloaded lfs file has a different size' do
let(:size) { 1 }
@@ -252,6 +272,18 @@ RSpec.describe Projects::LfsPointers::LfsDownloadService do
context 'and first fragments are the same' do
let(:lfs_content) { existing_lfs_object.file.read }
+ context 'when lfs_link_existing_object feature flag disabled' do
+ before do
+ stub_feature_flags(lfs_link_existing_object: false)
+ end
+
+ it 'does not call link_existing_lfs_object!' do
+ expect(subject).not_to receive(:link_existing_lfs_object!)
+
+ subject.execute
+ end
+ end
+
it 'returns success' do
expect(subject.execute).to eq({ status: :success })
end
diff --git a/spec/services/projects/operations/update_service_spec.rb b/spec/services/projects/operations/update_service_spec.rb
index 018bfa8ef61..f91f879b772 100644
--- a/spec/services/projects/operations/update_service_spec.rb
+++ b/spec/services/projects/operations/update_service_spec.rb
@@ -378,8 +378,8 @@ RSpec.describe Projects::Operations::UpdateService do
context 'prometheus integration' do
context 'prometheus params were passed into service' do
- let(:prometheus_service) do
- build_stubbed(:prometheus_service, project: project, properties: {
+ let(:prometheus_integration) do
+ build_stubbed(:prometheus_integration, project: project, properties: {
api_url: "http://example.prometheus.com",
manual_configuration: "0"
})
@@ -394,18 +394,18 @@ RSpec.describe Projects::Operations::UpdateService do
}
end
- it 'uses Project#find_or_initialize_service to include instance defined defaults and pass them to Projects::UpdateService', :aggregate_failures do
+ it 'uses Project#find_or_initialize_integration to include instance defined defaults and pass them to Projects::UpdateService', :aggregate_failures do
project_update_service = double(Projects::UpdateService)
expect(project)
- .to receive(:find_or_initialize_service)
+ .to receive(:find_or_initialize_integration)
.with('prometheus')
- .and_return(prometheus_service)
+ .and_return(prometheus_integration)
expect(Projects::UpdateService).to receive(:new) do |project_arg, user_arg, update_params_hash|
expect(project_arg).to eq project
expect(user_arg).to eq user
- expect(update_params_hash[:prometheus_service_attributes]).to include('properties' => { 'api_url' => 'http://new.prometheus.com', 'manual_configuration' => '1' })
- expect(update_params_hash[:prometheus_service_attributes]).not_to include(*%w(id project_id created_at updated_at))
+ expect(update_params_hash[:prometheus_integration_attributes]).to include('properties' => { 'api_url' => 'http://new.prometheus.com', 'manual_configuration' => '1' })
+ expect(update_params_hash[:prometheus_integration_attributes]).not_to include(*%w(id project_id created_at updated_at))
end.and_return(project_update_service)
expect(project_update_service).to receive(:execute)
@@ -413,13 +413,13 @@ RSpec.describe Projects::Operations::UpdateService do
end
end
- context 'prometheus params were not passed into service' do
+ context 'when prometheus params are not passed into service' do
let(:params) { { something: :else } }
it 'does not pass any prometheus params into Projects::UpdateService', :aggregate_failures do
project_update_service = double(Projects::UpdateService)
- expect(project).not_to receive(:find_or_initialize_service)
+ expect(project).not_to receive(:find_or_initialize_integration)
expect(Projects::UpdateService)
.to receive(:new)
.with(project, user, {})
diff --git a/spec/services/projects/prometheus/alerts/notify_service_spec.rb b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
index 5235c64d451..25cf588dedf 100644
--- a/spec/services/projects/prometheus/alerts/notify_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
@@ -115,7 +115,7 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
let(:alert_manager_token) { token_input }
before do
- create(:prometheus_service, project: project)
+ create(:prometheus_integration, project: project)
if alerting_setting
create(:project_alerting_setting,
@@ -165,7 +165,7 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
context 'incident settings' do
before do
- create(:prometheus_service, project: project)
+ create(:prometheus_integration, project: project)
create(:project_alerting_setting, project: project, token: token)
end
@@ -204,7 +204,7 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
let(:process_service) { instance_double(AlertManagement::ProcessPrometheusAlertService) }
before do
- create(:prometheus_service, project: project)
+ create(:prometheus_integration, project: project)
create(:project_alerting_setting, project: project, token: token)
end
diff --git a/spec/services/projects/protect_default_branch_service_spec.rb b/spec/services/projects/protect_default_branch_service_spec.rb
index a485a64ca35..c8aa421cdd4 100644
--- a/spec/services/projects/protect_default_branch_service_spec.rb
+++ b/spec/services/projects/protect_default_branch_service_spec.rb
@@ -99,6 +99,53 @@ RSpec.describe Projects::ProtectDefaultBranchService do
.not_to have_received(:create_protected_branch)
end
end
+
+ context 'when protected branch does not exist' do
+ before do
+ allow(service)
+ .to receive(:protected_branch_exists?)
+ .and_return(false)
+ allow(service)
+ .to receive(:protect_branch?)
+ .and_return(true)
+ end
+
+ it 'changes the HEAD of the project' do
+ service.protect_default_branch
+
+ expect(project)
+ .to have_received(:change_head)
+ end
+
+ it 'protects the default branch' do
+ service.protect_default_branch
+
+ expect(service)
+ .to have_received(:create_protected_branch)
+ end
+ end
+
+ context 'when protected branch already exists' do
+ before do
+ allow(service)
+ .to receive(:protected_branch_exists?)
+ .and_return(true)
+ end
+
+ it 'changes the HEAD of the project' do
+ service.protect_default_branch
+
+ expect(project)
+ .to have_received(:change_head)
+ end
+
+ it 'does not protect the default branch' do
+ service.protect_default_branch
+
+ expect(service)
+ .not_to have_received(:create_protected_branch)
+ end
+ end
end
describe '#create_protected_branch' do
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index 3171abfb36f..b71677a5e8f 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -7,7 +7,8 @@ RSpec.describe Projects::TransferService do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
- let_it_be(:group_integration) { create(:slack_service, group: group, project: nil, webhook: 'http://group.slack.com') }
+ let_it_be(:group_integration) { create(:integrations_slack, group: group, project: nil, webhook: 'http://group.slack.com') }
+
let(:project) { create(:project, :repository, :legacy_storage, namespace: user.namespace) }
subject(:execute_transfer) { described_class.new(project, user).execute(group).tap { project.reload } }
@@ -121,24 +122,24 @@ RSpec.describe Projects::TransferService do
context 'with a project integration' do
let_it_be_with_reload(:project) { create(:project, namespace: user.namespace) }
- let_it_be(:instance_integration) { create(:slack_service, :instance, webhook: 'http://project.slack.com') }
+ let_it_be(:instance_integration) { create(:integrations_slack, :instance, webhook: 'http://project.slack.com') }
context 'with an inherited integration' do
- let_it_be(:project_integration) { create(:slack_service, project: project, webhook: 'http://project.slack.com', inherit_from_id: instance_integration.id) }
+ let_it_be(:project_integration) { create(:integrations_slack, project: project, webhook: 'http://project.slack.com', inherit_from_id: instance_integration.id) }
it 'replaces inherited integrations', :aggregate_failures do
execute_transfer
- expect(project.slack_service.webhook).to eq(group_integration.webhook)
+ expect(project.slack_integration.webhook).to eq(group_integration.webhook)
expect(Integration.count).to eq(3)
end
end
context 'with a custom integration' do
- let_it_be(:project_integration) { create(:slack_service, project: project, webhook: 'http://project.slack.com') }
+ let_it_be(:project_integration) { create(:integrations_slack, project: project, webhook: 'http://project.slack.com') }
it 'does not updates the integrations' do
- expect { execute_transfer }.not_to change { project.slack_service.webhook }
+ expect { execute_transfer }.not_to change { project.slack_integration.webhook }
end
end
end
@@ -434,28 +435,74 @@ RSpec.describe Projects::TransferService do
end
describe 'refreshing project authorizations' do
+ let(:old_group) { create(:group) }
+ let!(:project) { create(:project, namespace: old_group) }
+ let(:member_of_old_group) { create(:user) }
let(:group) { create(:group) }
- let(:owner) { project.namespace.owner }
- let(:group_member) { create(:user) }
+ let(:member_of_new_group) { create(:user) }
before do
- group.add_user(owner, GroupMember::MAINTAINER)
- group.add_user(group_member, GroupMember::DEVELOPER)
+ old_group.add_developer(member_of_old_group)
+ group.add_maintainer(member_of_new_group)
+
+ # Add the executing user as owner in both groups, so that
+ # transfer can be executed.
+ old_group.add_owner(user)
+ group.add_owner(user)
end
- it 'refreshes the permissions of the old and new namespace' do
- execute_transfer
+ context 'when the feature flag `specialized_worker_for_project_transfer_auth_recalculation` is enabled' do
+ before do
+ stub_feature_flags(specialized_worker_for_project_transfer_auth_recalculation: true)
+ end
+
+ it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
+ .to receive(:perform_async).with(project.id)
+
+ execute_transfer
+ end
+
+ it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
+ user_ids = [user.id, member_of_old_group.id, member_of_new_group.id].map { |id| [id] }
+
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
+ receive(:bulk_perform_in)
+ .with(1.hour,
+ user_ids,
+ batch_delay: 30.seconds, batch_size: 100)
+ )
- expect(group_member.authorized_projects).to include(project)
- expect(owner.authorized_projects).to include(project)
+ subject
+ end
+
+ it 'refreshes the permissions of the members of the old and new namespace', :sidekiq_inline do
+ expect { execute_transfer }
+ .to change { member_of_old_group.authorized_projects.include?(project) }.from(true).to(false)
+ .and change { member_of_new_group.authorized_projects.include?(project) }.from(false).to(true)
+ end
end
- it 'only schedules a single job for every user' do
- expect_next_instance_of(UserProjectAccessChangedService, [owner.id, group_member.id]) do |service|
- expect(service).to receive(:execute).once.and_call_original
+ context 'when the feature flag `specialized_worker_for_project_transfer_auth_recalculation` is disabled' do
+ before do
+ stub_feature_flags(specialized_worker_for_project_transfer_auth_recalculation: false)
end
- execute_transfer
+ it 'calls UserProjectAccessChangedService to update project authorizations' do
+ user_ids = [user.id, member_of_old_group.id, member_of_new_group.id]
+
+ expect_next_instance_of(UserProjectAccessChangedService, user_ids) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ execute_transfer
+ end
+
+ it 'refreshes the permissions of the members of the old and new namespace' do
+ expect { execute_transfer }
+ .to change { member_of_old_group.authorized_projects.include?(project) }.from(true).to(false)
+ .and change { member_of_new_group.authorized_projects.include?(project) }.from(false).to(true)
+ end
end
end
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index b11607bc213..5898504b463 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -5,6 +5,7 @@ require "spec_helper"
RSpec.describe Projects::UpdatePagesService do
let_it_be(:project, refind: true) { create(:project, :repository) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
+
let(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
let(:invalid_file) { fixture_file_upload('spec/fixtures/dk.png') }
diff --git a/spec/services/projects/update_repository_storage_service_spec.rb b/spec/services/projects/update_repository_storage_service_spec.rb
index 5b15b7d5f34..17d01a57221 100644
--- a/spec/services/projects/update_repository_storage_service_spec.rb
+++ b/spec/services/projects/update_repository_storage_service_spec.rb
@@ -83,9 +83,10 @@ RSpec.describe Projects::UpdateRepositoryStorageService do
.with(project.repository.raw)
.and_raise(Gitlab::Git::CommandError)
- result = subject.execute
+ expect do
+ subject.execute
+ end.to raise_error(Gitlab::Git::CommandError)
- expect(result).to be_error
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('default')
expect(repository_storage_move).to be_failed
@@ -101,9 +102,10 @@ RSpec.describe Projects::UpdateRepositoryStorageService do
expect(original_project_repository_double).to receive(:remove)
.and_raise(Gitlab::Git::CommandError)
- result = subject.execute
+ expect do
+ subject.execute
+ end.to raise_error(Gitlab::Git::CommandError)
- expect(result).to be_error
expect(repository_storage_move).to be_cleanup_failed
end
end
@@ -118,9 +120,10 @@ RSpec.describe Projects::UpdateRepositoryStorageService do
expect(project_repository_double).to receive(:checksum)
.and_return('not matching checksum')
- result = subject.execute
+ expect do
+ subject.execute
+ end.to raise_error(UpdateRepositoryStorageMethods::Error, /Failed to verify project repository checksum/)
- expect(result).to be_error
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('default')
end
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index e1b22da2e61..c74a8295d0a 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -200,17 +200,32 @@ RSpec.describe Projects::UpdateService do
context 'when updating a default branch' do
let(:project) { create(:project, :repository) }
- it 'changes a default branch' do
+ it 'changes default branch, tracking the previous branch' do
+ previous_default_branch = project.default_branch
+
update_project(project, admin, default_branch: 'feature')
- expect(Project.find(project.id).default_branch).to eq 'feature'
+ project.reload
+
+ expect(project.default_branch).to eq('feature')
+ expect(project.previous_default_branch).to eq(previous_default_branch)
+
+ update_project(project, admin, default_branch: previous_default_branch)
+
+ project.reload
+
+ expect(project.default_branch).to eq(previous_default_branch)
+ expect(project.previous_default_branch).to eq('feature')
end
it 'does not change a default branch' do
# The branch 'unexisted-branch' does not exist.
update_project(project, admin, default_branch: 'unexisted-branch')
- expect(Project.find(project.id).default_branch).to eq 'master'
+ project.reload
+
+ expect(project.default_branch).to eq 'master'
+ expect(project.previous_default_branch).to be_nil
end
end
@@ -468,58 +483,58 @@ RSpec.describe Projects::UpdateService do
end
end
- context 'when updating nested attributes for prometheus service' do
- context 'prometheus service exists' do
- let(:prometheus_service_attributes) do
- attributes_for(:prometheus_service,
+ context 'when updating nested attributes for prometheus integration' do
+ context 'prometheus integration exists' do
+ let(:prometheus_integration_attributes) do
+ attributes_for(:prometheus_integration,
project: project,
properties: { api_url: "http://new.prometheus.com", manual_configuration: "0" }
)
end
- let!(:prometheus_service) do
- create(:prometheus_service,
+ let!(:prometheus_integration) do
+ create(:prometheus_integration,
project: project,
properties: { api_url: "http://old.prometheus.com", manual_configuration: "0" }
)
end
it 'updates existing record' do
- expect { update_project(project, user, prometheus_service_attributes: prometheus_service_attributes) }
- .to change { prometheus_service.reload.api_url }
+ expect { update_project(project, user, prometheus_integration_attributes: prometheus_integration_attributes) }
+ .to change { prometheus_integration.reload.api_url }
.from("http://old.prometheus.com")
.to("http://new.prometheus.com")
end
end
- context 'prometheus service does not exist' do
+ context 'prometheus integration does not exist' do
context 'valid parameters' do
- let(:prometheus_service_attributes) do
- attributes_for(:prometheus_service,
+ let(:prometheus_integration_attributes) do
+ attributes_for(:prometheus_integration,
project: project,
properties: { api_url: "http://example.prometheus.com", manual_configuration: "0" }
)
end
it 'creates new record' do
- expect { update_project(project, user, prometheus_service_attributes: prometheus_service_attributes) }
- .to change { ::PrometheusService.where(project: project).count }
+ expect { update_project(project, user, prometheus_integration_attributes: prometheus_integration_attributes) }
+ .to change { ::Integrations::Prometheus.where(project: project).count }
.from(0)
.to(1)
end
end
context 'invalid parameters' do
- let(:prometheus_service_attributes) do
- attributes_for(:prometheus_service,
+ let(:prometheus_integration_attributes) do
+ attributes_for(:prometheus_integration,
project: project,
properties: { api_url: nil, manual_configuration: "1" }
)
end
it 'does not create new record' do
- expect { update_project(project, user, prometheus_service_attributes: prometheus_service_attributes) }
- .not_to change { ::PrometheusService.where(project: project).count }
+ expect { update_project(project, user, prometheus_integration_attributes: prometheus_integration_attributes) }
+ .not_to change { ::Integrations::Prometheus.where(project: project).count }
end
end
end
diff --git a/spec/services/prometheus/create_default_alerts_service_spec.rb b/spec/services/prometheus/create_default_alerts_service_spec.rb
index e149161d881..0880799b589 100644
--- a/spec/services/prometheus/create_default_alerts_service_spec.rb
+++ b/spec/services/prometheus/create_default_alerts_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Prometheus::CreateDefaultAlertsService do
let_it_be(:project) { create(:project, :repository) }
+
let(:instance) { described_class.new(project: project) }
let(:expected_alerts) { described_class::DEFAULT_ALERTS }
diff --git a/spec/services/prometheus/proxy_service_spec.rb b/spec/services/prometheus/proxy_service_spec.rb
index f22ea361fde..b78683cace7 100644
--- a/spec/services/prometheus/proxy_service_spec.rb
+++ b/spec/services/prometheus/proxy_service_spec.rb
@@ -65,7 +65,7 @@ RSpec.describe Prometheus::ProxyService do
end
describe '#execute' do
- let(:prometheus_adapter) { instance_double(PrometheusService) }
+ let(:prometheus_adapter) { instance_double(::Integrations::Prometheus) }
let(:params) { ActionController::Parameters.new(query: '1').permit! }
subject { described_class.new(environment, 'GET', 'query', params) }
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 4af76bc65ab..d7f5c39e457 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe QuickActions::InterpretService do
let_it_be(:inprogress) { create(:label, project: project, title: 'In Progress') }
let_it_be(:helmchart) { create(:label, project: project, title: 'Helm Chart Registry') }
let_it_be(:bug) { create(:label, project: project, title: 'Bug') }
+
let(:service) { described_class.new(project, developer) }
before_all do
diff --git a/spec/services/releases/create_evidence_service_spec.rb b/spec/services/releases/create_evidence_service_spec.rb
index 818d20f0468..0ac15a7291d 100644
--- a/spec/services/releases/create_evidence_service_spec.rb
+++ b/spec/services/releases/create_evidence_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Releases::CreateEvidenceService do
let_it_be(:project) { create(:project) }
+
let(:release) { create(:release, project: project) }
let(:service) { described_class.new(release) }
diff --git a/spec/services/releases/create_service_spec.rb b/spec/services/releases/create_service_spec.rb
index 7287825a0be..bf28fde3d90 100644
--- a/spec/services/releases/create_service_spec.rb
+++ b/spec/services/releases/create_service_spec.rb
@@ -44,6 +44,21 @@ RSpec.describe Releases::CreateService do
it_behaves_like 'a successful release creation'
+ context 'when tag is protected and user does not have access to it' do
+ let!(:protected_tag) { create(:protected_tag, :no_one_can_create, name: '*', project: project) }
+
+ it 'track the error event' do
+ stub_feature_flags(evalute_protected_tag_for_release_permissions: false)
+
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ kind_of(described_class::ReleaseProtectedTagAccessError),
+ project_id: project.id,
+ user_id: user.id)
+
+ service.execute
+ end
+ end
+
context 'when the tag does not exist' do
let(:tag_name) { 'non-exist-tag' }
diff --git a/spec/services/releases/destroy_service_spec.rb b/spec/services/releases/destroy_service_spec.rb
index bc5bff0b31d..38cdcef3825 100644
--- a/spec/services/releases/destroy_service_spec.rb
+++ b/spec/services/releases/destroy_service_spec.rb
@@ -28,6 +28,21 @@ RSpec.describe Releases::DestroyService do
it 'returns the destroyed object' do
is_expected.to include(status: :success, release: release)
end
+
+ context 'when tag is protected and user does not have access to it' do
+ let!(:protected_tag) { create(:protected_tag, :no_one_can_create, name: '*', project: project) }
+
+ it 'track the error event' do
+ stub_feature_flags(evalute_protected_tag_for_release_permissions: false)
+
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ kind_of(described_class::ReleaseProtectedTagAccessError),
+ project_id: project.id,
+ user_id: user.id)
+
+ service.execute
+ end
+ end
end
context 'when tag does not exist in the repository' do
diff --git a/spec/services/releases/update_service_spec.rb b/spec/services/releases/update_service_spec.rb
index 932a7fab5ec..96b562a8071 100644
--- a/spec/services/releases/update_service_spec.rb
+++ b/spec/services/releases/update_service_spec.rb
@@ -38,6 +38,21 @@ RSpec.describe Releases::UpdateService do
service.execute
end
+ context 'when tag is protected and user does not have access to it' do
+ let!(:protected_tag) { create(:protected_tag, :no_one_can_create, name: '*', project: project) }
+
+ it 'track the error event' do
+ stub_feature_flags(evalute_protected_tag_for_release_permissions: false)
+
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ kind_of(described_class::ReleaseProtectedTagAccessError),
+ project_id: project.id,
+ user_id: user.id)
+
+ service.execute
+ end
+ end
+
context 'when the tag does not exists' do
let(:tag_name) { 'foobar' }
diff --git a/spec/services/repositories/changelog_service_spec.rb b/spec/services/repositories/changelog_service_spec.rb
index 9a5b0f33fbb..02d60f076ca 100644
--- a/spec/services/repositories/changelog_service_spec.rb
+++ b/spec/services/repositories/changelog_service_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe Repositories::ChangelogService do
recorder = ActiveRecord::QueryRecorder.new { service.execute }
changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
- expect(recorder.count).to eq(12)
+ expect(recorder.count).to eq(11)
expect(changelog).to include('Title 1', 'Title 2')
end
diff --git a/spec/services/repositories/destroy_rollback_service_spec.rb b/spec/services/repositories/destroy_rollback_service_spec.rb
index 9cc41a4c7f8..717e52f0e40 100644
--- a/spec/services/repositories/destroy_rollback_service_spec.rb
+++ b/spec/services/repositories/destroy_rollback_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Repositories::DestroyRollbackService do
let_it_be(:user) { create(:user) }
+
let!(:project) { create(:project, :repository, namespace: user.namespace) }
let(:repository) { project.repository }
let(:path) { repository.disk_path }
diff --git a/spec/services/repositories/destroy_service_spec.rb b/spec/services/repositories/destroy_service_spec.rb
index 81bda2130a6..240f837e973 100644
--- a/spec/services/repositories/destroy_service_spec.rb
+++ b/spec/services/repositories/destroy_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Repositories::DestroyService do
let_it_be(:user) { create(:user) }
+
let!(:project) { create(:project, :repository, namespace: user.namespace) }
let(:repository) { project.repository }
let(:path) { repository.disk_path }
diff --git a/spec/services/repositories/shell_destroy_service_spec.rb b/spec/services/repositories/shell_destroy_service_spec.rb
index 9020ef7b209..65168a1784a 100644
--- a/spec/services/repositories/shell_destroy_service_spec.rb
+++ b/spec/services/repositories/shell_destroy_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Repositories::ShellDestroyService do
let_it_be(:user) { create(:user) }
+
let!(:project) { create(:project, :repository, namespace: user.namespace) }
let(:path) { project.repository.disk_path }
let(:remove_path) { "#{path}+#{project.id}#{described_class::DELETED_FLAG}" }
diff --git a/spec/services/resource_access_tokens/create_service_spec.rb b/spec/services/resource_access_tokens/create_service_spec.rb
index 517ed086713..11069dc1bb8 100644
--- a/spec/services/resource_access_tokens/create_service_spec.rb
+++ b/spec/services/resource_access_tokens/create_service_spec.rb
@@ -88,12 +88,28 @@ RSpec.describe ResourceAccessTokens::CreateService do
end
end
- it 'adds the bot user as a maintainer in the resource' do
- response = subject
- access_token = response.payload[:access_token]
- bot_user = access_token.user
+ context 'access level' do
+ context 'when user does not specify an access level' do
+ it 'adds the bot user as a maintainer in the resource' do
+ response = subject
+ access_token = response.payload[:access_token]
+ bot_user = access_token.user
+
+ expect(resource.members.maintainers.map(&:user_id)).to include(bot_user.id)
+ end
+ end
- expect(resource.members.maintainers.map(&:user_id)).to include(bot_user.id)
+ context 'when user specifies an access level' do
+ let_it_be(:params) { { access_level: Gitlab::Access::DEVELOPER } }
+
+ it 'adds the bot user with the specified access level in the resource' do
+ response = subject
+ access_token = response.payload[:access_token]
+ bot_user = access_token.user
+
+ expect(resource.members.developers.map(&:user_id)).to include(bot_user.id)
+ end
+ end
end
context 'personal access token' do
@@ -176,6 +192,7 @@ RSpec.describe ResourceAccessTokens::CreateService do
context "when access provisioning fails" do
let_it_be(:bot_user) { create(:user, :project_bot) }
+
let(:unpersisted_member) { build(:project_member, source: resource, user: bot_user) }
before do
diff --git a/spec/services/resource_access_tokens/revoke_service_spec.rb b/spec/services/resource_access_tokens/revoke_service_spec.rb
index 99adb4bb7a0..4f4e2ab0c99 100644
--- a/spec/services/resource_access_tokens/revoke_service_spec.rb
+++ b/spec/services/resource_access_tokens/revoke_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe ResourceAccessTokens::RevokeService do
subject { described_class.new(user, resource, access_token).execute }
let_it_be(:user) { create(:user) }
+
let(:access_token) { create(:personal_access_token, user: resource_bot) }
describe '#execute', :sidekiq_inline do
@@ -80,6 +81,7 @@ RSpec.describe ResourceAccessTokens::RevokeService do
context 'when resource is a project' do
let_it_be(:resource) { create(:project, :private) }
+
let(:resource_bot) { create(:user, :project_bot) }
before do
diff --git a/spec/services/resource_events/change_labels_service_spec.rb b/spec/services/resource_events/change_labels_service_spec.rb
index 8eac6ae0b49..012168ef719 100644
--- a/spec/services/resource_events/change_labels_service_spec.rb
+++ b/spec/services/resource_events/change_labels_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe ResourceEvents::ChangeLabelsService do
let_it_be(:project) { create(:project) }
let_it_be(:author) { create(:user) }
+
let(:resource) { create(:issue, project: project) }
describe '.change_labels' do
diff --git a/spec/services/resource_events/merge_into_notes_service_spec.rb b/spec/services/resource_events/merge_into_notes_service_spec.rb
index 6209294f4ce..abe00e72f20 100644
--- a/spec/services/resource_events/merge_into_notes_service_spec.rb
+++ b/spec/services/resource_events/merge_into_notes_service_spec.rb
@@ -21,6 +21,7 @@ RSpec.describe ResourceEvents::MergeIntoNotesService do
let_it_be(:resource) { create(:issue, project: project) }
let_it_be(:label) { create(:label, project: project) }
let_it_be(:label2) { create(:label, project: project) }
+
let(:time) { Time.current }
describe '#execute' do
diff --git a/spec/services/security/ci_configuration/sast_parser_service_spec.rb b/spec/services/security/ci_configuration/sast_parser_service_spec.rb
index 4fe99f20879..4346d0a9e07 100644
--- a/spec/services/security/ci_configuration/sast_parser_service_spec.rb
+++ b/spec/services/security/ci_configuration/sast_parser_service_spec.rb
@@ -3,11 +3,13 @@
require 'spec_helper'
RSpec.describe Security::CiConfiguration::SastParserService do
+ include Ci::TemplateHelpers
+
describe '#configuration' do
include_context 'read ci configuration for sast enabled project'
let(:configuration) { described_class.new(project).configuration }
- let(:secure_analyzers_prefix) { configuration['global'][0] }
+ let(:secure_analyzers) { configuration['global'][0] }
let(:sast_excluded_paths) { configuration['global'][1] }
let(:sast_pipeline_stage) { configuration['pipeline'][0] }
let(:sast_search_max_depth) { configuration['pipeline'][1] }
@@ -16,7 +18,7 @@ RSpec.describe Security::CiConfiguration::SastParserService do
let(:sast_brakeman_level) { brakeman['variables'][0] }
it 'parses the configuration for SAST' do
- expect(secure_analyzers_prefix['default_value']).to eql('registry.gitlab.com/gitlab-org/security-products/analyzers')
+ expect(secure_analyzers['default_value']).to eql(secure_analyzers_prefix)
expect(sast_excluded_paths['default_value']).to eql('spec, test, tests, tmp')
expect(sast_pipeline_stage['default_value']).to eql('test')
expect(sast_search_max_depth['default_value']).to eql('4')
@@ -28,7 +30,7 @@ RSpec.describe Security::CiConfiguration::SastParserService do
context 'when .gitlab-ci.yml is present' do
it 'populates the current values from the file' do
allow(project.repository).to receive(:blob_data_at).and_return(gitlab_ci_yml_content)
- expect(secure_analyzers_prefix['value']).to eql('registry.gitlab.com/gitlab-org/security-products/analyzers2')
+ expect(secure_analyzers['value']).to eql("registry.gitlab.com/gitlab-org/security-products/analyzers2")
expect(sast_excluded_paths['value']).to eql('spec, executables')
expect(sast_pipeline_stage['value']).to eql('our_custom_security_stage')
expect(sast_search_max_depth['value']).to eql('8')
@@ -50,7 +52,7 @@ RSpec.describe Security::CiConfiguration::SastParserService do
context 'when .gitlab-ci.yml is absent' do
it 'populates the current values with the default values' do
allow(project.repository).to receive(:blob_data_at).and_return(nil)
- expect(secure_analyzers_prefix['value']).to eql('registry.gitlab.com/gitlab-org/security-products/analyzers')
+ expect(secure_analyzers['value']).to eql(secure_analyzers_prefix)
expect(sast_excluded_paths['value']).to eql('spec, test, tests, tmp')
expect(sast_pipeline_stage['value']).to eql('test')
expect(sast_search_max_depth['value']).to eql('4')
@@ -67,7 +69,7 @@ RSpec.describe Security::CiConfiguration::SastParserService do
end
it 'populates the current values with the default values' do
- expect(secure_analyzers_prefix['value']).to eql('registry.gitlab.com/gitlab-org/security-products/analyzers')
+ expect(secure_analyzers['value']).to eql(secure_analyzers_prefix)
expect(sast_excluded_paths['value']).to eql('spec, test, tests, tmp')
expect(sast_pipeline_stage['value']).to eql('test')
expect(sast_search_max_depth['value']).to eql('4')
diff --git a/spec/services/service_ping/build_payload_service_spec.rb b/spec/services/service_ping/build_payload_service_spec.rb
new file mode 100644
index 00000000000..cd2685069c9
--- /dev/null
+++ b/spec/services/service_ping/build_payload_service_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ServicePing::BuildPayloadService do
+ describe '#execute', :without_license do
+ subject(:service_ping_payload) { described_class.new.execute }
+
+ include_context 'stubbed service ping metrics definitions' do
+ let(:subscription_metrics) do
+ [
+ metric_attributes('active_user_count', "Subscription")
+ ]
+ end
+ end
+
+ context 'when usage_ping_enabled setting is false' do
+ before do
+ # Gitlab::CurrentSettings.usage_ping_enabled? == false
+ stub_config_setting(usage_ping_enabled: false)
+ end
+
+ it 'returns empty service ping payload' do
+ expect(service_ping_payload).to eq({})
+ end
+ end
+
+ context 'when usage_ping_enabled setting is true' do
+ before do
+ # Gitlab::CurrentSettings.usage_ping_enabled? == true
+ stub_config_setting(usage_ping_enabled: true)
+ end
+
+ it_behaves_like 'complete service ping payload'
+
+ context 'with require stats consent enabled' do
+ before do
+ allow(User).to receive(:single_user).and_return(double(:user, requires_usage_stats_consent?: true))
+ end
+
+ it 'returns empty service ping payload' do
+ expect(service_ping_payload).to eq({})
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/service_ping/permit_data_categories_service_spec.rb b/spec/services/service_ping/permit_data_categories_service_spec.rb
new file mode 100644
index 00000000000..4fd5c6f9ccb
--- /dev/null
+++ b/spec/services/service_ping/permit_data_categories_service_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ServicePing::PermitDataCategoriesService do
+ using RSpec::Parameterized::TableSyntax
+
+ describe '#execute', :without_license do
+ subject(:permitted_categories) { described_class.new.execute }
+
+ context 'when usage ping setting is set to true' do
+ before do
+ allow(User).to receive(:single_user).and_return(double(:user, requires_usage_stats_consent?: false))
+ stub_config_setting(usage_ping_enabled: true)
+ end
+
+ it 'returns all categories' do
+ expect(permitted_categories).to match_array(%w[Standard Subscription Operational Optional])
+ end
+ end
+
+ context 'when usage ping setting is set to false' do
+ before do
+ allow(User).to receive(:single_user).and_return(double(:user, requires_usage_stats_consent?: false))
+ stub_config_setting(usage_ping_enabled: false)
+ end
+
+ it 'returns no categories' do
+ expect(permitted_categories).to match_array([])
+ end
+ end
+
+ context 'when User.single_user&.requires_usage_stats_consent? is required' do
+ before do
+ allow(User).to receive(:single_user).and_return(double(:user, requires_usage_stats_consent?: true))
+ stub_config_setting(usage_ping_enabled: true)
+ end
+
+ it 'returns no categories' do
+ expect(permitted_categories).to match_array([])
+ end
+ end
+ end
+
+ describe '#product_intelligence_enabled?' do
+ where(:usage_ping_enabled, :requires_usage_stats_consent, :expected_product_intelligence_enabled) do
+ # Usage ping enabled
+ true | false | true
+ true | true | false
+
+ # Usage ping disabled
+ false | false | false
+ false | true | false
+ end
+
+ with_them do
+ before do
+ allow(User).to receive(:single_user).and_return(double(:user, requires_usage_stats_consent?: requires_usage_stats_consent))
+ stub_config_setting(usage_ping_enabled: usage_ping_enabled)
+ end
+
+ it 'has the correct product_intelligence_enabled?' do
+ expect(described_class.new.product_intelligence_enabled?).to eq(expected_product_intelligence_enabled)
+ end
+ end
+ end
+end
diff --git a/spec/services/submit_usage_ping_service_spec.rb b/spec/services/service_ping/submit_service_ping_service_spec.rb
index 7133dc35fc3..8a3065e6bc6 100644
--- a/spec/services/submit_usage_ping_service_spec.rb
+++ b/spec/services/service_ping/submit_service_ping_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe SubmitUsagePingService do
+RSpec.describe ServicePing::SubmitService do
include StubRequests
include UsageDataHelpers
@@ -98,6 +98,34 @@ RSpec.describe SubmitUsagePingService do
it_behaves_like 'does not run'
end
+ context 'when product_intelligence_enabled is false' do
+ before do
+ allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |service|
+ allow(service).to receive(:product_intelligence_enabled?).and_return(false)
+ end
+ end
+
+ it_behaves_like 'does not run'
+ end
+
+ context 'when product_intelligence_enabled is true' do
+ before do
+ stub_usage_data_connections
+
+ allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |service|
+ allow(service).to receive(:product_intelligence_enabled?).and_return(true)
+ end
+ end
+
+ it 'generates service ping' do
+ stub_response(body: with_dev_ops_score_params)
+
+ expect(Gitlab::UsageData).to receive(:data).with(force_refresh: true).and_call_original
+
+ subject.execute
+ end
+ end
+
context 'when usage ping is enabled' do
before do
stub_usage_data_connections
@@ -217,11 +245,63 @@ RSpec.describe SubmitUsagePingService do
context 'and usage data is nil' do
before do
+ allow(ServicePing::BuildPayloadService).to receive(:execute).and_return(nil)
allow(Gitlab::UsageData).to receive(:data).and_return(nil)
end
it_behaves_like 'does not send a blank usage ping payload'
end
+
+ context 'if payload service fails' do
+ before do
+ stub_response(body: with_dev_ops_score_params)
+ allow(ServicePing::BuildPayloadService).to receive(:execute).and_raise(described_class::SubmissionError, 'SubmissionError')
+ end
+
+ it 'calls UsageData .data method' do
+ usage_data = build_usage_data
+
+ expect(Gitlab::UsageData).to receive(:data).and_return(usage_data)
+
+ subject.execute
+ end
+ end
+
+ context 'calls BuildPayloadService first' do
+ before do
+ stub_response(body: with_dev_ops_score_params)
+ end
+
+ it 'returns usage data' do
+ usage_data = build_usage_data
+
+ expect_next_instance_of(ServicePing::BuildPayloadService) do |service|
+ expect(service).to receive(:execute).and_return(usage_data)
+ end
+
+ subject.execute
+ end
+ end
+
+ context 'if version app response fails' do
+ before do
+ stub_response(body: with_dev_ops_score_params, status: 404)
+
+ usage_data = build_usage_data
+ allow_next_instance_of(ServicePing::BuildPayloadService) do |service|
+ allow(service).to receive(:execute).and_return(usage_data)
+ end
+ end
+
+ it 'calls UsageData .data method' do
+ usage_data = build_usage_data
+
+ expect(Gitlab::UsageData).to receive(:data).and_return(usage_data)
+
+ # SubmissionError is raised as a result of 404 in response from HTTP Request
+ expect { subject.execute }.to raise_error(described_class::SubmissionError)
+ end
+ end
end
def stub_response(body:, status: 201)
@@ -232,4 +312,8 @@ RSpec.describe SubmitUsagePingService do
status: status
)
end
+
+ def build_usage_data
+ { uuid: 'uuid', recorded_at: Time.current }
+ end
end
diff --git a/spec/services/snippets/bulk_destroy_service_spec.rb b/spec/services/snippets/bulk_destroy_service_spec.rb
index 8a6250a8b45..2f399d10188 100644
--- a/spec/services/snippets/bulk_destroy_service_spec.rb
+++ b/spec/services/snippets/bulk_destroy_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Snippets::BulkDestroyService do
let_it_be(:project) { create(:project) }
+
let(:user) { create(:user) }
let!(:personal_snippet) { create(:personal_snippet, :repository, author: user) }
let!(:project_snippet) { create(:project_snippet, :repository, project: project, author: user) }
diff --git a/spec/services/snippets/create_service_spec.rb b/spec/services/snippets/create_service_spec.rb
index eb6e85eb408..0eb73c8edd2 100644
--- a/spec/services/snippets/create_service_spec.rb
+++ b/spec/services/snippets/create_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Snippets::CreateService do
describe '#execute' do
let_it_be(:user) { create(:user) }
let_it_be(:admin) { create(:user, :admin) }
+
let(:action) { :create }
let(:opts) { base_opts.merge(extra_opts) }
let(:base_opts) do
@@ -19,8 +20,9 @@ RSpec.describe Snippets::CreateService do
let(:extra_opts) { {} }
let(:creator) { admin }
+ let(:spam_params) { double }
- subject { described_class.new(project: project, current_user: creator, params: opts).execute }
+ subject { described_class.new(project: project, current_user: creator, params: opts, spam_params: spam_params).execute }
let(:snippet) { subject.payload[:snippet] }
@@ -301,6 +303,10 @@ RSpec.describe Snippets::CreateService do
end
end
+ before do
+ stub_spam_services
+ end
+
context 'when ProjectSnippet' do
let_it_be(:project) { create(:project) }
diff --git a/spec/services/snippets/update_repository_storage_service_spec.rb b/spec/services/snippets/update_repository_storage_service_spec.rb
index 50b28a5a125..fdea3615fb1 100644
--- a/spec/services/snippets/update_repository_storage_service_spec.rb
+++ b/spec/services/snippets/update_repository_storage_service_spec.rb
@@ -75,9 +75,10 @@ RSpec.describe Snippets::UpdateRepositoryStorageService do
.with(snippet.repository.raw)
.and_raise(Gitlab::Git::CommandError)
- result = subject.execute
+ expect do
+ subject.execute
+ end.to raise_error(Gitlab::Git::CommandError)
- expect(result).to be_error
expect(snippet).not_to be_repository_read_only
expect(snippet.repository_storage).to eq('default')
expect(repository_storage_move).to be_failed
@@ -93,9 +94,10 @@ RSpec.describe Snippets::UpdateRepositoryStorageService do
expect(original_snippet_repository_double).to receive(:remove)
.and_raise(Gitlab::Git::CommandError)
- result = subject.execute
+ expect do
+ subject.execute
+ end.to raise_error(Gitlab::Git::CommandError)
- expect(result).to be_error
expect(repository_storage_move).to be_cleanup_failed
end
end
@@ -107,9 +109,10 @@ RSpec.describe Snippets::UpdateRepositoryStorageService do
expect(snippet_repository_double).to receive(:checksum)
.and_return('not matching checksum')
- result = subject.execute
+ expect do
+ subject.execute
+ end.to raise_error(UpdateRepositoryStorageMethods::Error, /Failed to verify snippet repository checksum from \w+ to not matching checksum/)
- expect(result).to be_error
expect(snippet).not_to be_repository_read_only
expect(snippet.repository_storage).to eq('default')
end
diff --git a/spec/services/snippets/update_service_spec.rb b/spec/services/snippets/update_service_spec.rb
index 46bc62e11ef..f61d33e2436 100644
--- a/spec/services/snippets/update_service_spec.rb
+++ b/spec/services/snippets/update_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Snippets::UpdateService do
describe '#execute', :aggregate_failures do
let_it_be(:user) { create(:user) }
let_it_be(:admin) { create :user, admin: true }
+
let(:action) { :update }
let(:visibility_level) { Gitlab::VisibilityLevel::PRIVATE }
let(:base_opts) do
@@ -20,7 +21,9 @@ RSpec.describe Snippets::UpdateService do
let(:extra_opts) { {} }
let(:options) { base_opts.merge(extra_opts) }
let(:updater) { user }
- let(:service) { Snippets::UpdateService.new(project: project, current_user: updater, params: options) }
+ let(:spam_params) { double }
+
+ let(:service) { Snippets::UpdateService.new(project: project, current_user: updater, params: options, spam_params: spam_params) }
subject { service.execute(snippet) }
@@ -721,8 +724,13 @@ RSpec.describe Snippets::UpdateService do
end
end
+ before do
+ stub_spam_services
+ end
+
context 'when Project Snippet' do
let_it_be(:project) { create(:project) }
+
let!(:snippet) { create(:project_snippet, :repository, author: user, project: project) }
before do
diff --git a/spec/services/spam/akismet_service_spec.rb b/spec/services/spam/akismet_service_spec.rb
index 1cd049da592..d9f62258a53 100644
--- a/spec/services/spam/akismet_service_spec.rb
+++ b/spec/services/spam/akismet_service_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe Spam::AkismetService do
it_behaves_like 'no activity if Akismet is not enabled', :spam?, :check
context 'if Akismet is enabled' do
- it 'correctly transforms options for the akismet client' do
+ it 'correctly transforms options for the akismet client, including spelling of referrer key' do
expected_check_params = {
type: 'comment',
text: text,
diff --git a/spec/services/spam/ham_service_spec.rb b/spec/services/spam/ham_service_spec.rb
index c947de6cf92..0101a8e7704 100644
--- a/spec/services/spam/ham_service_spec.rb
+++ b/spec/services/spam/ham_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Spam::HamService do
let_it_be(:user) { create(:user) }
+
let!(:spam_log) { create(:spam_log, user: user, submitted_as_ham: false) }
let(:fake_akismet_service) { double(:akismet_service) }
diff --git a/spec/services/spam/spam_action_service_spec.rb b/spec/services/spam/spam_action_service_spec.rb
index 9ca52b92267..3a92e5acb5a 100644
--- a/spec/services/spam/spam_action_service_spec.rb
+++ b/spec/services/spam/spam_action_service_spec.rb
@@ -5,15 +5,20 @@ require 'spec_helper'
RSpec.describe Spam::SpamActionService do
include_context 'includes Spam constants'
- let(:request) { double(:request, env: env, headers: {}) }
let(:issue) { create(:issue, project: project, author: user) }
let(:fake_ip) { '1.2.3.4' }
let(:fake_user_agent) { 'fake-user-agent' }
let(:fake_referer) { 'fake-http-referer' }
- let(:env) do
- { 'action_dispatch.remote_ip' => fake_ip,
- 'HTTP_USER_AGENT' => fake_user_agent,
- 'HTTP_REFERER' => fake_referer }
+ let(:captcha_response) { 'abc123' }
+ let(:spam_log_id) { existing_spam_log.id }
+ let(:spam_params) do
+ ::Spam::SpamParams.new(
+ captcha_response: captcha_response,
+ spam_log_id: spam_log_id,
+ ip_address: fake_ip,
+ user_agent: fake_user_agent,
+ referer: fake_referer
+ )
end
let_it_be(:project) { create(:project, :public) }
@@ -23,32 +28,33 @@ RSpec.describe Spam::SpamActionService do
issue.spam = false
end
- shared_examples 'only checks for spam if a request is provided' do
- context 'when request is missing' do
- let(:request) { nil }
+ describe 'constructor argument validation' do
+ subject do
+ described_service = described_class.new(spammable: issue, spam_params: spam_params, user: user, action: :create)
+ described_service.execute
+ end
- it "doesn't check as spam" do
- expect(fake_verdict_service).not_to receive(:execute)
+ context 'when spam_params is nil' do
+ let(:spam_params) { nil }
+ let(:expected_service_params_not_present_message) do
+ /Skipped spam check because spam_params was not present/
+ end
+ it "returns success with a messaage" do
response = subject
- expect(response.message).to match(/request was not present/)
+ expect(response.message).to match(expected_service_params_not_present_message)
expect(issue).not_to be_spam
end
end
-
- context 'when request exists' do
- it 'creates a spam log' do
- expect { subject }
- .to log_spam(title: issue.title, description: issue.description, noteable_type: 'Issue')
- end
- end
end
shared_examples 'creates a spam log' do
it do
- expect { subject }.to change(SpamLog, :count).by(1)
+ expect { subject }
+ .to log_spam(title: issue.title, description: issue.description, noteable_type: 'Issue')
+ # TODO: These checks should be incorporated into the `log_spam` RSpec matcher above
new_spam_log = SpamLog.last
expect(new_spam_log.user_id).to eq(user.id)
expect(new_spam_log.title).to eq(issue.title)
@@ -56,25 +62,14 @@ RSpec.describe Spam::SpamActionService do
expect(new_spam_log.source_ip).to eq(fake_ip)
expect(new_spam_log.user_agent).to eq(fake_user_agent)
expect(new_spam_log.noteable_type).to eq('Issue')
- expect(new_spam_log.via_api).to eq(false)
+ expect(new_spam_log.via_api).to eq(true)
end
end
describe '#execute' do
- let(:request) { double(:request, env: env, headers: nil) }
let(:fake_captcha_verification_service) { double(:captcha_verification_service) }
let(:fake_verdict_service) { double(:spam_verdict_service) }
let(:allowlisted) { false }
- let(:api) { nil }
- let(:captcha_response) { 'abc123' }
- let(:spam_log_id) { existing_spam_log.id }
- let(:spam_params) do
- ::Spam::SpamParams.new(
- api: api,
- captcha_response: captcha_response,
- spam_log_id: spam_log_id
- )
- end
let(:verdict_service_opts) do
{
@@ -88,7 +83,6 @@ RSpec.describe Spam::SpamActionService do
{
target: issue,
user: user,
- request: request,
options: verdict_service_opts,
context: {
action: :create,
@@ -100,40 +94,20 @@ RSpec.describe Spam::SpamActionService do
let_it_be(:existing_spam_log) { create(:spam_log, user: user, recaptcha_verified: false) }
subject do
- described_service = described_class.new(spammable: issue, request: request, user: user, action: :create)
+ described_service = described_class.new(spammable: issue, spam_params: spam_params, user: user, action: :create)
allow(described_service).to receive(:allowlisted?).and_return(allowlisted)
- described_service.execute(spam_params: spam_params)
+ described_service.execute
end
before do
- allow(Captcha::CaptchaVerificationService).to receive(:new) { fake_captcha_verification_service }
+ allow(Captcha::CaptchaVerificationService).to receive(:new).with(spam_params: spam_params) { fake_captcha_verification_service }
allow(Spam::SpamVerdictService).to receive(:new).with(verdict_service_args).and_return(fake_verdict_service)
end
- context 'when the captcha params are passed in the headers' do
- let(:request) { double(:request, env: env, headers: headers) }
- let(:spam_params) { Spam::SpamActionService.filter_spam_params!({ api: api }, request) }
- let(:headers) do
- {
- 'X-GitLab-Captcha-Response' => captcha_response,
- 'X-GitLab-Spam-Log-Id' => spam_log_id
- }
- end
-
- it 'extracts the headers correctly' do
- expect(fake_captcha_verification_service)
- .to receive(:execute).with(captcha_response: captcha_response, request: request).and_return(true)
- expect(SpamLog)
- .to receive(:verify_recaptcha!).with(user_id: user.id, id: spam_log_id)
-
- subject
- end
- end
-
context 'when captcha response verification returns true' do
before do
allow(fake_captcha_verification_service)
- .to receive(:execute).with(captcha_response: captcha_response, request: request).and_return(true)
+ .to receive(:execute).and_return(true)
end
it "doesn't check with the SpamVerdictService" do
@@ -156,7 +130,7 @@ RSpec.describe Spam::SpamActionService do
context 'when captcha response verification returns false' do
before do
allow(fake_captcha_verification_service)
- .to receive(:execute).with(captcha_response: captcha_response, request: request).and_return(false)
+ .to receive(:execute).and_return(false)
end
context 'when spammable attributes have not changed' do
@@ -200,8 +174,6 @@ RSpec.describe Spam::SpamActionService do
stub_feature_flags(allow_possible_spam: false)
end
- it_behaves_like 'only checks for spam if a request is provided'
-
it 'marks as spam' do
response = subject
@@ -211,8 +183,6 @@ RSpec.describe Spam::SpamActionService do
end
context 'when allow_possible_spam feature flag is true' do
- it_behaves_like 'only checks for spam if a request is provided'
-
it 'does not mark as spam' do
response = subject
@@ -232,8 +202,6 @@ RSpec.describe Spam::SpamActionService do
stub_feature_flags(allow_possible_spam: false)
end
- it_behaves_like 'only checks for spam if a request is provided'
-
it 'marks as spam' do
response = subject
@@ -243,8 +211,6 @@ RSpec.describe Spam::SpamActionService do
end
context 'when allow_possible_spam feature flag is true' do
- it_behaves_like 'only checks for spam if a request is provided'
-
it 'does not mark as spam' do
response = subject
@@ -264,8 +230,6 @@ RSpec.describe Spam::SpamActionService do
stub_feature_flags(allow_possible_spam: false)
end
- it_behaves_like 'only checks for spam if a request is provided'
-
it_behaves_like 'creates a spam log'
it 'does not mark as spam' do
@@ -284,8 +248,6 @@ RSpec.describe Spam::SpamActionService do
end
context 'when allow_possible_spam feature flag is true' do
- it_behaves_like 'only checks for spam if a request is provided'
-
it_behaves_like 'creates a spam log'
it 'does not mark as needing reCAPTCHA' do
@@ -334,37 +296,10 @@ RSpec.describe Spam::SpamActionService do
allow(fake_verdict_service).to receive(:execute).and_return(ALLOW)
end
- context 'when the request is nil' do
- let(:request) { nil }
- let(:issue_ip_address) { '1.2.3.4' }
- let(:issue_user_agent) { 'lynx' }
- let(:verdict_service_opts) do
- {
- ip_address: issue_ip_address,
- user_agent: issue_user_agent
- }
- end
-
- before do
- allow(issue).to receive(:ip_address) { issue_ip_address }
- allow(issue).to receive(:user_agent) { issue_user_agent }
- end
-
- it 'assembles the options with information from the spammable' do
- # TODO: This code untestable, because we do not perform a verification if there is not a
- # request. See corresponding comment in code
- # expect(Spam::SpamVerdictService).to receive(:new).with(verdict_service_args)
-
- subject
- end
- end
-
- context 'when the request is present' do
- it 'assembles the options with information from the request' do
- expect(Spam::SpamVerdictService).to receive(:new).with(verdict_service_args)
+ it 'assembles the options with information from the request' do
+ expect(Spam::SpamVerdictService).to receive(:new).with(verdict_service_args)
- subject
- end
+ subject
end
end
end
diff --git a/spec/services/spam/spam_params_spec.rb b/spec/services/spam/spam_params_spec.rb
new file mode 100644
index 00000000000..e7e8b468adb
--- /dev/null
+++ b/spec/services/spam/spam_params_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Spam::SpamParams do
+ describe '.new_from_request' do
+ let(:captcha_response) { 'abc123' }
+ let(:spam_log_id) { 42 }
+ let(:ip_address) { '0.0.0.0' }
+ let(:user_agent) { 'Lynx' }
+ let(:referer) { 'http://localhost' }
+ let(:headers) do
+ {
+ 'X-GitLab-Captcha-Response' => captcha_response,
+ 'X-GitLab-Spam-Log-Id' => spam_log_id
+ }
+ end
+
+ let(:env) do
+ {
+ 'action_dispatch.remote_ip' => ip_address,
+ 'HTTP_USER_AGENT' => user_agent,
+ 'HTTP_REFERER' => referer
+ }
+ end
+
+ let(:request) {double(:request, headers: headers, env: env)}
+
+ it 'constructs from a request' do
+ expected = ::Spam::SpamParams.new(
+ captcha_response: captcha_response,
+ spam_log_id: spam_log_id,
+ ip_address: ip_address,
+ user_agent: user_agent,
+ referer: referer
+ )
+ expect(described_class.new_from_request(request: request)).to eq(expected)
+ end
+ end
+end
diff --git a/spec/services/spam/spam_verdict_service_spec.rb b/spec/services/spam/spam_verdict_service_spec.rb
index 215df81de63..659c21b7d4f 100644
--- a/spec/services/spam/spam_verdict_service_spec.rb
+++ b/spec/services/spam/spam_verdict_service_spec.rb
@@ -14,13 +14,12 @@ RSpec.describe Spam::SpamVerdictService do
'HTTP_REFERER' => fake_referer }
end
- let(:request) { double(:request, env: env) }
-
let(:check_for_spam) { true }
let_it_be(:user) { create(:user) }
let_it_be(:issue) { create(:issue, author: user) }
+
let(:service) do
- described_class.new(user: user, target: issue, request: request, options: {})
+ described_class.new(user: user, target: issue, options: {})
end
let(:attribs) do
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index 54cef164f1c..e9bd40b058b 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe SystemNoteService do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :repository, group: group) }
let_it_be(:author) { create(:user) }
+
let(:noteable) { create(:issue, project: project) }
let(:issue) { noteable }
@@ -355,15 +356,15 @@ RSpec.describe SystemNoteService do
let(:issue) { create(:issue, project: project) }
let(:merge_request) { create(:merge_request, :simple, target_project: project, source_project: project) }
let(:jira_issue) { ExternalIssue.new("JIRA-1", project)}
- let(:jira_tracker) { project.jira_service }
+ let(:jira_tracker) { project.jira_integration }
let(:commit) { project.commit }
let(:comment_url) { jira_api_comment_url(jira_issue.id) }
let(:success_message) { "SUCCESS: Successfully posted to http://jira.example.net." }
before do
- stub_jira_service_test
+ stub_jira_integration_test
stub_jira_urls(jira_issue.id)
- jira_service_settings
+ jira_integration_settings
end
def cross_reference(type, link_exists = false)
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index 0eb327ea7f1..1ea3c241d27 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe ::SystemNotes::IssuablesService do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :repository, group: group) }
let_it_be(:author) { create(:user) }
+
let(:noteable) { create(:issue, project: project) }
let(:issue) { noteable }
@@ -728,7 +729,7 @@ RSpec.describe ::SystemNotes::IssuablesService do
let(:noteable) { ExternalIssue.new('EXT-1234', project) }
it 'is false with issue tracker supporting referencing' do
- create(:jira_service, project: project)
+ create(:jira_integration, project: project)
project.reload
expect(service.cross_reference_disallowed?(noteable)).to be_falsey
diff --git a/spec/services/test_hooks/project_service_spec.rb b/spec/services/test_hooks/project_service_spec.rb
index a87e612e378..cd6284b4a87 100644
--- a/spec/services/test_hooks/project_service_spec.rb
+++ b/spec/services/test_hooks/project_service_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe TestHooks::ProjectService do
describe '#execute' do
let_it_be(:project) { create(:project, :repository) }
+
let(:hook) { create(:project_hook, project: project) }
let(:trigger) { 'not_implemented_events' }
let(:service) { described_class.new(hook, current_user, trigger) }
@@ -163,6 +164,7 @@ RSpec.describe TestHooks::ProjectService do
context 'wiki_page_events' do
let_it_be(:project) { create(:project, :wiki_repo) }
+
let(:trigger) { 'wiki_page_events' }
let(:trigger_key) { :wiki_page_hooks }
diff --git a/spec/services/test_hooks/system_service_spec.rb b/spec/services/test_hooks/system_service_spec.rb
index e500a1057ab..a13ae471b4b 100644
--- a/spec/services/test_hooks/system_service_spec.rb
+++ b/spec/services/test_hooks/system_service_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe TestHooks::SystemService do
describe '#execute' do
let_it_be(:project) { create(:project, :repository) }
+
let(:hook) { create(:system_hook) }
let(:service) { described_class.new(hook, project.owner, trigger) }
let(:success_result) { { status: :success, http_status: 200, message: 'ok' } }
diff --git a/spec/services/user_project_access_changed_service_spec.rb b/spec/services/user_project_access_changed_service_spec.rb
index 4723619afd2..f8835fefc84 100644
--- a/spec/services/user_project_access_changed_service_spec.rb
+++ b/spec/services/user_project_access_changed_service_spec.rb
@@ -30,6 +30,17 @@ RSpec.describe UserProjectAccessChangedService do
described_class.new([1, 2]).execute(blocking: false,
priority: described_class::LOW_PRIORITY)
end
+
+ it 'sets the current caller_id as related_class in the context of all the enqueued jobs' do
+ Gitlab::ApplicationContext.with_context(caller_id: 'Foo') do
+ described_class.new([1, 2]).execute(blocking: false,
+ priority: described_class::LOW_PRIORITY)
+ end
+
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker.jobs).to all(
+ include(Labkit::Context.log_key(:related_class) => 'Foo')
+ )
+ end
end
context 'with load balancing enabled' do
diff --git a/spec/services/users/approve_service_spec.rb b/spec/services/users/approve_service_spec.rb
index 9999e674c7d..078dde546c9 100644
--- a/spec/services/users/approve_service_spec.rb
+++ b/spec/services/users/approve_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Users::ApproveService do
let_it_be(:current_user) { create(:admin) }
+
let(:user) { create(:user, :blocked_pending_approval) }
subject(:execute) { described_class.new(current_user).execute(user) }
diff --git a/spec/services/users/reject_service_spec.rb b/spec/services/users/reject_service_spec.rb
index b9aaff5cde5..b0094a7c47e 100644
--- a/spec/services/users/reject_service_spec.rb
+++ b/spec/services/users/reject_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Users::RejectService do
let_it_be(:current_user) { create(:admin) }
+
let(:user) { create(:user, :blocked_pending_approval) }
subject(:execute) { described_class.new(current_user).execute(user) }
diff --git a/spec/services/users/validate_otp_service_spec.rb b/spec/services/users/validate_otp_service_spec.rb
index 42f0c10488c..46b80b2149f 100644
--- a/spec/services/users/validate_otp_service_spec.rb
+++ b/spec/services/users/validate_otp_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Users::ValidateOtpService do
let_it_be(:user) { create(:user) }
+
let(:otp_code) { 42 }
subject(:validate) { described_class.new(user).execute(otp_code) }
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index 5f53d6f34d8..f9fa46a4fc8 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -418,19 +418,6 @@ RSpec.describe WebHookService do
described_class.new(other_hook, data, :push_hooks).async_execute
end
end
-
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(web_hooks_rate_limit: false)
- end
-
- it 'queues a worker without tracking the call' do
- expect(Gitlab::ApplicationRateLimiter).not_to receive(:throttled?)
- expect_to_perform_worker(project_hook)
-
- service_instance.async_execute
- end
- end
end
context 'when hook has custom context attributes' do
diff --git a/spec/services/wiki_pages/create_service_spec.rb b/spec/services/wiki_pages/create_service_spec.rb
index 44b57088319..fd3776f4207 100644
--- a/spec/services/wiki_pages/create_service_spec.rb
+++ b/spec/services/wiki_pages/create_service_spec.rb
@@ -4,4 +4,24 @@ require 'spec_helper'
RSpec.describe WikiPages::CreateService do
it_behaves_like 'WikiPages::CreateService#execute', :project
+
+ describe '#execute' do
+ let_it_be(:project) { create(:project) }
+
+ subject(:service) { described_class.new(container: project) }
+
+ context 'when wiki create fails due to git error' do
+ let(:wiki_git_error) { 'Could not create wiki page' }
+
+ it 'catches the thrown error and returns a ServiceResponse error' do
+ allow_next_instance_of(WikiPage) do |instance|
+ allow(instance).to receive(:create).and_raise(Gitlab::Git::CommandError.new(wiki_git_error))
+ end
+
+ result = service.execute
+ expect(result).to be_error
+ expect(result.message).to eq(wiki_git_error)
+ end
+ end
+ end
end
diff --git a/spec/services/wiki_pages/event_create_service_spec.rb b/spec/services/wiki_pages/event_create_service_spec.rb
index 974f2591763..6bc6a678189 100644
--- a/spec/services/wiki_pages/event_create_service_spec.rb
+++ b/spec/services/wiki_pages/event_create_service_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe WikiPages::EventCreateService do
describe '#execute' do
let_it_be(:page) { create(:wiki_page, project: project) }
+
let(:slug) { generate(:sluggified_title) }
let(:action) { :created }
let(:fingerprint) { page.sha }
diff --git a/spec/services/wiki_pages/update_service_spec.rb b/spec/services/wiki_pages/update_service_spec.rb
index 33ac98e764d..62881817e32 100644
--- a/spec/services/wiki_pages/update_service_spec.rb
+++ b/spec/services/wiki_pages/update_service_spec.rb
@@ -4,4 +4,26 @@ require 'spec_helper'
RSpec.describe WikiPages::UpdateService do
it_behaves_like 'WikiPages::UpdateService#execute', :project
+
+ describe '#execute' do
+ let_it_be(:project) { create(:project) }
+
+ let(:page) { create(:wiki_page, project: project) }
+
+ subject(:service) { described_class.new(container: project) }
+
+ context 'when wiki create fails due to git error' do
+ let(:wiki_git_error) { 'Could not update wiki page' }
+
+ it 'catches the thrown error and returns a ServiceResponse error' do
+ allow_next_instance_of(WikiPage) do |instance|
+ allow(instance).to receive(:update).and_raise(Gitlab::Git::CommandError.new(wiki_git_error))
+ end
+
+ result = service.execute(page)
+ expect(result).to be_error
+ expect(result.message).to eq(wiki_git_error)
+ end
+ end
+ end
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 31ff619232c..d339ac67810 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -76,9 +76,6 @@ require_relative '../tooling/quality/test_level'
quality_level = Quality::TestLevel.new
RSpec.configure do |config|
- config.filter_run focus: true
- config.run_all_when_everything_filtered = true
-
config.use_transactional_fixtures = true
config.use_instantiated_fixtures = false
config.fixture_path = Rails.root
@@ -113,6 +110,11 @@ RSpec.configure do |config|
end
unless ENV['CI']
+ # Allow running `:focus` examples locally,
+ # falling back to all tests when there is no `:focus` example.
+ config.filter_run focus: true
+ config.run_all_when_everything_filtered = true
+
# Re-run failures locally with `--only-failures`
config.example_status_persistence_file_path = './spec/examples.txt'
end
@@ -190,6 +192,7 @@ RSpec.configure do |config|
config.include RailsHelpers
config.include SidekiqMiddleware
config.include StubActionCableConnection, type: :channel
+ config.include StubSpamServices
include StubFeatureFlags
@@ -230,6 +233,10 @@ RSpec.configure do |config|
Gitlab::Database.set_open_transactions_baseline
end
+ config.append_before do
+ Thread.current[:current_example_group] = ::RSpec.current_example.metadata[:example_group]
+ end
+
config.append_after do
Gitlab::Database.reset_open_transactions_baseline
end
@@ -253,8 +260,9 @@ RSpec.configure do |config|
# tests, until we introduce it in user settings
stub_feature_flags(forti_token_cloud: false)
- # This feature flag is by default disabled and used in disaster recovery mode
- stub_feature_flags(ci_queueing_disaster_recovery: false)
+ # These feature flag are by default disabled and used in disaster recovery mode
+ stub_feature_flags(ci_queueing_disaster_recovery_disable_fair_scheduling: false)
+ stub_feature_flags(ci_queueing_disaster_recovery_disable_quota: false)
enable_rugged = example.metadata[:enable_rugged].present?
@@ -267,7 +275,6 @@ RSpec.configure do |config|
# See https://gitlab.com/gitlab-org/gitlab/-/issues/33867
stub_feature_flags(file_identifier_hash: false)
- stub_feature_flags(unified_diff_components: false)
stub_feature_flags(diffs_virtual_scrolling: false)
# The following `vue_issues_list`/`vue_issuables_list` stubs can be removed
@@ -286,6 +293,8 @@ RSpec.configure do |config|
# As we're ready to change `master` usages to `main`, let's enable it
stub_feature_flags(main_branch_over_master: false)
+ stub_feature_flags(issue_boards_filtered_search: false)
+
# Disable issue respositioning to avoid heavy load on database when importing big projects.
# This is only turned on when app is handling heavy project imports.
# Can be removed when we find a better way to deal with the problem.
diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb
index e48a7b322ac..6f96d552da6 100644
--- a/spec/support/capybara.rb
+++ b/spec/support/capybara.rb
@@ -60,8 +60,8 @@ Capybara.register_driver :chrome do |app|
# Chrome won't work properly in a Docker container in sandbox mode
options.add_argument("no-sandbox")
- # Run headless by default unless CHROME_HEADLESS specified
- options.add_argument("headless") unless ENV['CHROME_HEADLESS'] =~ /^(false|no|0)$/i
+ # Run headless by default unless WEBDRIVER_HEADLESS specified
+ options.add_argument("headless") unless ENV['WEBDRIVER_HEADLESS'] =~ /^(false|no|0)$/i || ENV['CHROME_HEADLESS'] =~ /^(false|no|0)$/i
# Disable /dev/shm use in CI. See https://gitlab.com/gitlab-org/gitlab/issues/4252
options.add_argument("disable-dev-shm-usage") if ENV['CI'] || ENV['CI_SERVER']
@@ -197,7 +197,7 @@ RSpec.configure do |config|
raise JSConsoleError, message
end
rescue Selenium::WebDriver::Error::WebDriverError => error
- if error.message =~ /unknown command: session\/[0-9a-zA-Z]+(?:\/se)?\/log/
+ if error.message =~ %r{unknown command: session/[0-9a-zA-Z]+(?:/se)?/log}
message = "Unable to access Chrome javascript console logs. You may be using an outdated version of ChromeDriver."
raise JSConsoleError, message
else
diff --git a/spec/support/gitlab_experiment.rb b/spec/support/gitlab_experiment.rb
index b84adf82d29..3d099dc689c 100644
--- a/spec/support/gitlab_experiment.rb
+++ b/spec/support/gitlab_experiment.rb
@@ -4,16 +4,6 @@
require 'gitlab/experiment/rspec'
require_relative 'stub_snowplow'
-# This is a temporary fix until we have a larger discussion around the
-# challenges raised in https://gitlab.com/gitlab-org/gitlab/-/issues/300104
-require Rails.root.join('app', 'experiments', 'application_experiment')
-class ApplicationExperiment # rubocop:disable Gitlab/NamespacedClass
- def initialize(...)
- super(...)
- Feature.persist_used!(feature_flag_name)
- end
-end
-
RSpec.configure do |config|
config.include StubSnowplow, :experiment
diff --git a/spec/support/helpers/ci/template_helpers.rb b/spec/support/helpers/ci/template_helpers.rb
new file mode 100644
index 00000000000..7bab58a574e
--- /dev/null
+++ b/spec/support/helpers/ci/template_helpers.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module Ci
+ module TemplateHelpers
+ def secure_analyzers_prefix
+ 'registry.gitlab.com/gitlab-org/security-products/analyzers'
+ end
+ end
+end
+
+Ci::TemplateHelpers.prepend_mod
diff --git a/spec/support/helpers/cycle_analytics_helpers.rb b/spec/support/helpers/cycle_analytics_helpers.rb
index 4515b96c79e..e48c8125d84 100644
--- a/spec/support/helpers/cycle_analytics_helpers.rb
+++ b/spec/support/helpers/cycle_analytics_helpers.rb
@@ -12,9 +12,7 @@ module CycleAnalyticsHelpers
page.all('.gl-path-button').collect(&:text).map {|name_with_median| name_with_median.split("\n")[0] }
end
- def add_custom_stage_to_form
- page.find_button(s_('CreateValueStreamForm|Add another stage')).click
-
+ def fill_in_custom_stage_fields
index = page.all('[data-testid="value-stream-stage-fields"]').length
last_stage = page.all('[data-testid="value-stream-stage-fields"]').last
@@ -25,6 +23,12 @@ module CycleAnalyticsHelpers
end
end
+ def add_custom_stage_to_form
+ page.find_button(s_('CreateValueStreamForm|Add another stage')).click
+
+ fill_in_custom_stage_fields
+ end
+
def save_value_stream(custom_value_stream_name)
fill_in 'create-value-stream-name', with: custom_value_stream_name
diff --git a/spec/support/helpers/database/table_schema_helpers.rb b/spec/support/helpers/database/table_schema_helpers.rb
index 48d33442110..472eaa45b4b 100644
--- a/spec/support/helpers/database/table_schema_helpers.rb
+++ b/spec/support/helpers/database/table_schema_helpers.rb
@@ -43,6 +43,14 @@ module Database
expect(index_exists_by_name(name, schema: schema)).to be_nil
end
+ def expect_foreign_key_to_exist(table_name, name, schema: nil)
+ expect(foreign_key_exists_by_name(table_name, name, schema: schema)).to eq(true)
+ end
+
+ def expect_foreign_key_not_to_exist(table_name, name, schema: nil)
+ expect(foreign_key_exists_by_name(table_name, name, schema: schema)).to be_nil
+ end
+
def expect_check_constraint(table_name, name, definition, schema: nil)
expect(check_constraint_definition(table_name, name, schema: schema)).to eq("CHECK ((#{definition}))")
end
@@ -133,6 +141,18 @@ module Database
SQL
end
+ def foreign_key_exists_by_name(table_name, foreign_key_name, schema: nil)
+ table_name = schema ? "#{schema}.#{table_name}" : table_name
+
+ connection.select_value(<<~SQL)
+ SELECT true
+ FROM pg_catalog.pg_constraint
+ WHERE pg_constraint.conrelid = '#{table_name}'::regclass
+ AND pg_constraint.contype = 'f'
+ AND pg_constraint.conname = '#{foreign_key_name}'
+ SQL
+ end
+
def check_constraint_definition(table_name, constraint_name, schema: nil)
table_name = schema ? "#{schema}.#{table_name}" : table_name
diff --git a/spec/support/helpers/feature_flag_helpers.rb b/spec/support/helpers/feature_flag_helpers.rb
index af7a674f3bc..51ba9039b70 100644
--- a/spec/support/helpers/feature_flag_helpers.rb
+++ b/spec/support/helpers/feature_flag_helpers.rb
@@ -14,6 +14,12 @@ module FeatureFlagHelpers
strategies: strategies)
end
+ def create_strategy(feature_flag, name = 'default', parameters = {})
+ create(:operations_strategy,
+ feature_flag: feature_flag,
+ name: name)
+ end
+
def within_feature_flag_row(index)
within ".gl-responsive-table-row:nth-child(#{index + 1})" do
yield
diff --git a/spec/support/helpers/features/admin_users_helpers.rb b/spec/support/helpers/features/admin_users_helpers.rb
new file mode 100644
index 00000000000..99b19eedcff
--- /dev/null
+++ b/spec/support/helpers/features/admin_users_helpers.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Spec
+ module Support
+ module Helpers
+ module Features
+ module AdminUsersHelpers
+ def click_user_dropdown_toggle(user_id)
+ page.within("[data-testid='user-actions-#{user_id}']") do
+ find("[data-testid='dropdown-toggle']").click
+ end
+ end
+
+ def click_action_in_user_dropdown(user_id, action)
+ click_user_dropdown_toggle(user_id)
+
+ within find("[data-testid='user-actions-#{user_id}']") do
+ find('li button', exact_text: action).click
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/helpers/features/invite_members_modal_helper.rb b/spec/support/helpers/features/invite_members_modal_helper.rb
index 1127c817656..7b8cd6963c0 100644
--- a/spec/support/helpers/features/invite_members_modal_helper.rb
+++ b/spec/support/helpers/features/invite_members_modal_helper.rb
@@ -9,7 +9,7 @@ module Spec
click_on 'Invite members'
page.within '#invite-members-modal' do
- fill_in 'Select members or type email addresses', with: name
+ find('[data-testid="members-token-select-input"]').set(name)
wait_for_requests
click_button name
diff --git a/spec/support/helpers/features/snippet_helpers.rb b/spec/support/helpers/features/snippet_helpers.rb
index c26849a9680..dc718b1b212 100644
--- a/spec/support/helpers/features/snippet_helpers.rb
+++ b/spec/support/helpers/features/snippet_helpers.rb
@@ -1,14 +1,17 @@
# frozen_string_literal: true
-# These helpers help you interact within the Editor Lite (single-file editor, snippets, etc.).
+# These helpers help you interact within the Source Editor (single-file editor, snippets, etc.).
#
+
+require Rails.root.join("spec/support/helpers/features/source_editor_spec_helpers.rb")
+
module Spec
module Support
module Helpers
module Features
module SnippetSpecHelpers
include ActionView::Helpers::JavaScriptHelper
- include Spec::Support::Helpers::Features::EditorLiteSpecHelpers
+ include Spec::Support::Helpers::Features::SourceEditorSpecHelpers
def snippet_description_locator
'snippet-description'
@@ -31,7 +34,7 @@ module Spec
end
def snippet_get_first_blob_value
- page.find('.gl-editor-lite', match: :first)
+ page.find('.gl-source-editor', match: :first)
end
def snippet_description_value
@@ -53,7 +56,7 @@ module Spec
end
def snippet_fill_in_content(value)
- page.within('.gl-editor-lite') do
+ page.within('.gl-source-editor') do
el = find('.inputarea')
el.send_keys value
end
diff --git a/spec/support/helpers/features/editor_lite_spec_helpers.rb b/spec/support/helpers/features/source_editor_spec_helpers.rb
index 0a67e753379..57057b47fbb 100644
--- a/spec/support/helpers/features/editor_lite_spec_helpers.rb
+++ b/spec/support/helpers/features/source_editor_spec_helpers.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
-# These helpers help you interact within the Editor Lite (single-file editor, snippets, etc.).
+# These helpers help you interact within the Source Editor (single-file editor, snippets, etc.).
#
module Spec
module Support
module Helpers
module Features
- module EditorLiteSpecHelpers
+ module SourceEditorSpecHelpers
include ActionView::Helpers::JavaScriptHelper
def editor_set_value(value)
diff --git a/spec/support/helpers/features/top_nav_spec_helpers.rb b/spec/support/helpers/features/top_nav_spec_helpers.rb
index ab664ce4283..87ed897ec74 100644
--- a/spec/support/helpers/features/top_nav_spec_helpers.rb
+++ b/spec/support/helpers/features/top_nav_spec_helpers.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-# These helpers help you interact within the Editor Lite (single-file editor, snippets, etc.).
+# These helpers help you interact within the Source Editor (single-file editor, snippets, etc.).
#
module Spec
module Support
diff --git a/spec/support/helpers/grafana_api_helpers.rb b/spec/support/helpers/grafana_api_helpers.rb
index e47b1a808f2..7a7b6fec5b4 100644
--- a/spec/support/helpers/grafana_api_helpers.rb
+++ b/spec/support/helpers/grafana_api_helpers.rb
@@ -31,7 +31,7 @@ module GrafanaApiHelpers
end
def stub_all_grafana_proxy_requests(base_url)
- stub_request(:any, /#{base_url}\/api\/datasources\/proxy/)
+ stub_request(:any, %r{#{base_url}/api/datasources/proxy})
.to_return(
status: 200,
body: fixture_file('grafana/proxy_response.json'),
diff --git a/spec/support/helpers/javascript_fixtures_helpers.rb b/spec/support/helpers/javascript_fixtures_helpers.rb
index 8fd8a548011..4c90b907d2d 100644
--- a/spec/support/helpers/javascript_fixtures_helpers.rb
+++ b/spec/support/helpers/javascript_fixtures_helpers.rb
@@ -43,12 +43,14 @@ module JavaScriptFixturesHelpers
# Public: Reads a GraphQL query from the filesystem as a string
#
# query_path - file path to the GraphQL query, relative to `app/assets/javascripts`
- # fragment_paths - an optional array of file paths to any fragments the query uses,
- # also relative to `app/assets/javascripts`
- def get_graphql_query_as_string(query_path, fragment_paths = [])
- [query_path, *fragment_paths].map do |path|
- File.read(File.join(Rails.root, '/app/assets/javascripts', path))
- end.join("\n")
+ def get_graphql_query_as_string(query_path)
+ path = Rails.root / 'app/assets/javascripts' / query_path
+ queries = Gitlab::Graphql::Queries.find(path)
+ if queries.length == 1
+ queries.first.text(mode: Gitlab.ee? ? :ee : :ce )
+ else
+ raise "Could not find query file at #{path}, please check your query_path" % path
+ end
end
private
diff --git a/spec/support/helpers/jira_service_helper.rb b/spec/support/helpers/jira_service_helper.rb
index ce908d53f88..3cfd0de06e8 100644
--- a/spec/support/helpers/jira_service_helper.rb
+++ b/spec/support/helpers/jira_service_helper.rb
@@ -4,7 +4,7 @@ module JiraServiceHelper
JIRA_URL = "http://jira.example.net"
JIRA_API = JIRA_URL + "/rest/api/2"
- def jira_service_settings
+ def jira_integration_settings
url = JIRA_URL
username = 'jira-user'
password = 'my-secret-password'
@@ -77,7 +77,7 @@ module JiraServiceHelper
JIRA_API + "/issue/#{issue_id}"
end
- def stub_jira_service_test
+ def stub_jira_integration_test
WebMock.stub_request(:get, /serverInfo/).to_return(body: { url: 'http://url' }.to_json)
end
diff --git a/spec/support/helpers/live_debugger.rb b/spec/support/helpers/live_debugger.rb
index cdb068760f4..f4199d518a3 100644
--- a/spec/support/helpers/live_debugger.rb
+++ b/spec/support/helpers/live_debugger.rb
@@ -7,8 +7,8 @@ module LiveDebugger
puts
puts "Current example is paused for live debugging."
- if ENV['CHROME_HEADLESS'] =~ /^(false|no|0)$/i
- puts "Switch to the Chrome window that was automatically opened to run the test in order to view current page"
+ if is_headless_disabled?
+ puts "Switch to the browser window that was automatically opened to run the test in order to view current page"
else
puts "Opening #{current_url} in your default browser..."
end
@@ -16,10 +16,16 @@ module LiveDebugger
puts "The current user credentials are: #{@current_user.username} / #{@current_user.password}" if @current_user
puts "Press any key to resume the execution of the example!!"
- `open #{current_url}` if ENV['CHROME_HEADLESS'] !~ /^(false|no|0)$/i
+ `open #{current_url}` if is_headless_disabled?
loop until $stdin.getch
puts "Back to the example!"
end
+
+ def is_headless_disabled?
+ ActiveSupport::Deprecation.warn("CHROME_HEADLESS is deprecated. Use WEBDRIVER_HEADLESS instead.") if ENV.key?('CHROME_HEADLESS')
+
+ ENV['WEBDRIVER_HEADLESS'] =~ /^(false|no|0)$/i || ENV['CHROME_HEADLESS'] =~ /^(false|no|0)$/i
+ end
end
diff --git a/spec/support/helpers/merge_request_diff_helpers.rb b/spec/support/helpers/merge_request_diff_helpers.rb
index 49beecc6d4b..30afde7efed 100644
--- a/spec/support/helpers/merge_request_diff_helpers.rb
+++ b/spec/support/helpers/merge_request_diff_helpers.rb
@@ -3,8 +3,8 @@
module MergeRequestDiffHelpers
def click_diff_line(line_holder, diff_side = nil)
line = get_line_components(line_holder, diff_side)
- line[:content].hover
- line[:num].find('.js-add-diff-note-button', visible: false).send_keys(:return)
+ line_holder.hover
+ line[:num].find('.js-add-diff-note-button').click
end
def get_line_components(line_holder, diff_side = nil)
diff --git a/spec/support/helpers/require_migration.rb b/spec/support/helpers/require_migration.rb
index 8de71d3073f..de3a8a81ab5 100644
--- a/spec/support/helpers/require_migration.rb
+++ b/spec/support/helpers/require_migration.rb
@@ -15,7 +15,7 @@ class RequireMigration
end
MIGRATION_FOLDERS = %w[db/migrate db/post_migrate].freeze
- SPEC_FILE_PATTERN = /.+\/(?<file_name>.+)_spec\.rb/.freeze
+ SPEC_FILE_PATTERN = %r{.+/(?<file_name>.+)_spec\.rb}.freeze
class << self
def require_migration!(file_name)
@@ -29,7 +29,7 @@ class RequireMigration
migration_folders.flat_map do |path|
migration_path = Rails.root.join(path).to_s
- Find.find(migration_path).grep(/\d+_#{file_name}\.rb/)
+ Find.find(migration_path).select { |m| File.basename(m).match? /\A\d+_#{file_name}\.rb\z/ }
end
end
diff --git a/spec/support/helpers/services_helper.rb b/spec/support/helpers/services_helper.rb
deleted file mode 100644
index bf007815551..00000000000
--- a/spec/support/helpers/services_helper.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-# frozen_string_literal: true
-
-require_relative './after_next_helpers'
-
-module ServicesHelper
- include AfterNextHelpers
-
- def expect_execution_of(service_class, *args)
- expect_next(service_class, *args).to receive(:execute)
- end
-end
diff --git a/spec/support/helpers/stub_experiments.rb b/spec/support/helpers/stub_experiments.rb
index 408d16a7c08..8995b8f5f7b 100644
--- a/spec/support/helpers/stub_experiments.rb
+++ b/spec/support/helpers/stub_experiments.rb
@@ -11,7 +11,6 @@ module StubExperiments
allow(Gitlab::Experimentation).to receive(:active?).and_call_original
experiments.each do |experiment_key, enabled|
- Feature.persist_used!("#{experiment_key}#{feature_flag_suffix}")
allow(Gitlab::Experimentation).to receive(:active?).with(experiment_key) { enabled }
end
end
@@ -26,7 +25,6 @@ module StubExperiments
allow(Gitlab::Experimentation).to receive(:in_experiment_group?).and_call_original
experiments.each do |experiment_key, enabled|
- Feature.persist_used!("#{experiment_key}#{feature_flag_suffix}")
allow(Gitlab::Experimentation).to receive(:in_experiment_group?).with(experiment_key, anything) { enabled }
end
end
diff --git a/spec/support/helpers/stub_spam_services.rb b/spec/support/helpers/stub_spam_services.rb
new file mode 100644
index 00000000000..841e8366845
--- /dev/null
+++ b/spec/support/helpers/stub_spam_services.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module StubSpamServices
+ def stub_spam_services
+ allow(::Spam::SpamParams).to receive(:new_from_request) do
+ ::Spam::SpamParams.new(
+ captcha_response: double(:captcha_response),
+ spam_log_id: double(:spam_log_id),
+ ip_address: double(:ip_address),
+ user_agent: double(:user_agent),
+ referer: double(:referer)
+ )
+ end
+
+ allow_next_instance_of(::Spam::SpamActionService) do |service|
+ allow(service).to receive(:execute)
+ end
+
+ allow_next_instance_of(::UserAgentDetailService) do |service|
+ allow(service).to receive(:create)
+ end
+ end
+end
diff --git a/spec/support/helpers/stubbed_feature.rb b/spec/support/helpers/stubbed_feature.rb
index 67ceb7d9b35..4113a28182b 100644
--- a/spec/support/helpers/stubbed_feature.rb
+++ b/spec/support/helpers/stubbed_feature.rb
@@ -4,14 +4,6 @@
module StubbedFeature
extend ActiveSupport::Concern
- prepended do
- cattr_reader(:persist_used) do
- # persist feature flags in CI
- # nil: indicates that we do not want to persist used feature flags
- Gitlab::Utils.to_boolean(ENV['CI']) ? {} : nil
- end
- end
-
class_methods do
# Turn stubbed feature flags on or off.
def stub=(stub)
@@ -41,8 +33,6 @@ module StubbedFeature
feature_flag = super
return feature_flag unless stub?
- persist_used!(args.first)
-
# If feature flag is not persisted we mark the feature flag as enabled
# We do `m.call` as we want to validate the execution of method arguments
# and a feature flag state if it is not persisted
@@ -52,17 +42,5 @@ module StubbedFeature
feature_flag
end
-
- # This method creates a temporary file in `tmp/feature_flags`
- # if feature flag was touched during execution
- def persist_used!(name)
- return unless persist_used
- return if persist_used[name]
-
- persist_used[name] = true
- FileUtils.touch(
- Rails.root.join('tmp', 'feature_flags', name.to_s + ".used")
- )
- end
end
end
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index 40a3dbfbf25..8814d260fb3 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -1,7 +1,8 @@
# frozen_string_literal: true
+require 'parallel'
+
module TestEnv
- extend ActiveSupport::Concern
extend self
ComponentFailedToInstallError = Class.new(StandardError)
@@ -94,50 +95,40 @@ module TestEnv
TMP_TEST_PATH = Rails.root.join('tmp', 'tests').freeze
REPOS_STORAGE = 'default'
SECOND_STORAGE_PATH = Rails.root.join('tmp', 'tests', 'second_storage')
+ SETUP_METHODS = %i[setup_gitaly setup_gitlab_shell setup_workhorse setup_factory_repo setup_forked_repo].freeze
+
+ # Can be overriden
+ def setup_methods
+ SETUP_METHODS
+ end
# Test environment
#
# See gitlab.yml.example test section for paths
#
- def init(opts = {})
+ def init
unless Rails.env.test?
puts "\nTestEnv.init can only be run if `RAILS_ENV` is set to 'test' not '#{Rails.env}'!\n"
exit 1
end
+ start = Time.now
# Disable mailer for spinach tests
- disable_mailer if opts[:mailer] == false
-
clean_test_path
- setup_gitlab_shell
-
- setup_gitaly
-
- # Feature specs are run through Workhorse
- setup_workhorse
-
- # Create repository for FactoryBot.create(:project)
- setup_factory_repo
-
- # Create repository for FactoryBot.create(:forked_project_with_submodules)
- setup_forked_repo
- end
-
- included do |config|
- config.append_before do
- set_current_example_group
+ # Install components in parallel as most of the setup is I/O.
+ Parallel.each(setup_methods) do |method|
+ public_send(method)
end
- end
- def disable_mailer
- allow_any_instance_of(NotificationService).to receive(:mailer)
- .and_return(double.as_null_object)
+ post_init
+
+ puts "\nTest environment set up in #{Time.now - start} seconds"
end
- def enable_mailer
- allow_any_instance_of(NotificationService).to receive(:mailer)
- .and_call_original
+ # Can be overriden
+ def post_init
+ start_gitaly(gitaly_dir)
end
# Clean /tmp/tests
@@ -164,12 +155,11 @@ module TestEnv
end
def setup_gitaly
- install_gitaly_args = [gitaly_dir, repos_path, gitaly_url].compact.join(',')
-
component_timed_setup('Gitaly',
install_dir: gitaly_dir,
version: Gitlab::GitalyClient.expected_server_version,
- task: "gitlab:gitaly:install[#{install_gitaly_args}]") do
+ task: "gitlab:gitaly:install",
+ task_args: [gitaly_dir, repos_path, gitaly_url].compact) do
Gitlab::SetupHelper::Gitaly.create_configuration(
gitaly_dir,
{ 'default' => repos_path },
@@ -190,8 +180,6 @@ module TestEnv
)
Gitlab::SetupHelper::Praefect.create_configuration(gitaly_dir, { 'praefect' => repos_path }, force: true)
end
-
- start_gitaly(gitaly_dir)
end
def gitaly_socket_path
@@ -273,19 +261,18 @@ module TestEnv
raise "could not connect to #{service} at #{socket.inspect} after #{sleep_time} seconds"
end
+ # Feature specs are run through Workhorse
def setup_workhorse
start = Time.now
return if skip_compile_workhorse?
- puts "\n==> Setting up GitLab Workhorse..."
-
FileUtils.rm_rf(workhorse_dir)
Gitlab::SetupHelper::Workhorse.compile_into(workhorse_dir)
Gitlab::SetupHelper::Workhorse.create_configuration(workhorse_dir, nil)
File.write(workhorse_tree_file, workhorse_tree) if workhorse_source_clean?
- puts " GitLab Workhorse set up in #{Time.now - start} seconds...\n"
+ puts "==> GitLab Workhorse set up in #{Time.now - start} seconds...\n"
end
def skip_compile_workhorse?
@@ -349,10 +336,12 @@ module TestEnv
ENV.fetch('GITLAB_WORKHORSE_URL', nil)
end
+ # Create repository for FactoryBot.create(:project)
def setup_factory_repo
setup_repo(factory_repo_path, factory_repo_path_bare, factory_repo_name, BRANCH_SHA)
end
+ # Create repository for FactoryBot.create(:forked_project_with_submodules)
# This repo has a submodule commit that is not present in the main test
# repository.
def setup_forked_repo
@@ -363,20 +352,18 @@ module TestEnv
clone_url = "https://gitlab.com/gitlab-org/#{repo_name}.git"
unless File.directory?(repo_path)
- puts "\n==> Setting up #{repo_name} repository in #{repo_path}..."
start = Time.now
system(*%W(#{Gitlab.config.git.bin_path} clone --quiet -- #{clone_url} #{repo_path}))
- puts " #{repo_path} set up in #{Time.now - start} seconds...\n"
+ puts "==> #{repo_path} set up in #{Time.now - start} seconds...\n"
end
set_repo_refs(repo_path, refs)
unless File.directory?(repo_path_bare)
- puts "\n==> Setting up #{repo_name} bare repository in #{repo_path_bare}..."
start = Time.now
# We must copy bare repositories because we will push to them.
system(git_env, *%W(#{Gitlab.config.git.bin_path} clone --quiet --bare -- #{repo_path} #{repo_path_bare}))
- puts " #{repo_path_bare} set up in #{Time.now - start} seconds...\n"
+ puts "==> #{repo_path_bare} set up in #{Time.now - start} seconds...\n"
end
end
@@ -468,10 +455,6 @@ module TestEnv
private
- def set_current_example_group
- Thread.current[:current_example_group] = ::RSpec.current_example.metadata[:example_group]
- end
-
# These are directories that should be preserved at cleanup time
def test_dirs
@test_dirs ||= %w[
@@ -526,7 +509,7 @@ module TestEnv
end
end
- def component_timed_setup(component, install_dir:, version:, task:)
+ def component_timed_setup(component, install_dir:, version:, task:, task_args: [])
start = Time.now
ensure_component_dir_name_is_correct!(component, install_dir)
@@ -535,17 +518,22 @@ module TestEnv
return if File.exist?(install_dir) && ci?
if component_needs_update?(install_dir, version)
- puts "\n==> Setting up #{component}..."
# Cleanup the component entirely to ensure we start fresh
FileUtils.rm_rf(install_dir)
- unless system('rake', task)
- raise ComponentFailedToInstallError
+ if ENV['SKIP_RAILS_ENV_IN_RAKE']
+ # When we run `scripts/setup-test-env`, we take care of loading the necessary dependencies
+ # so we can run the rake task programmatically.
+ Rake::Task[task].invoke(*task_args)
+ else
+ # In other cases, we run the task via `rake` so that the environment
+ # and dependencies are automatically loaded.
+ raise ComponentFailedToInstallError unless system('rake', "#{task}[#{task_args.join(',')}]")
end
yield if block_given?
- puts " #{component} set up in #{Time.now - start} seconds...\n"
+ puts "==> #{component} set up in #{Time.now - start} seconds...\n"
end
rescue ComponentFailedToInstallError
puts "\n#{component} failed to install, cleaning up #{install_dir}!\n"
diff --git a/spec/support/matchers/be_executed.rb b/spec/support/matchers/be_executed.rb
new file mode 100644
index 00000000000..fea86386755
--- /dev/null
+++ b/spec/support/matchers/be_executed.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+# named as `get_executed` to avoid clashing
+# with `be_executed === have_attributes(executed: true)`
+RSpec::Matchers.define :get_executed do |args = []|
+ include AfterNextHelpers
+
+ match do |service_class|
+ expect_next(service_class, *args).to receive(:execute)
+ end
+end
diff --git a/spec/support/matchers/have_issuable_counts.rb b/spec/support/matchers/have_issuable_counts.rb
index 049cfc022fb..586ba0651dc 100644
--- a/spec/support/matchers/have_issuable_counts.rb
+++ b/spec/support/matchers/have_issuable_counts.rb
@@ -6,7 +6,7 @@ RSpec::Matchers.define :have_issuable_counts do |opts|
end
match do |actual|
- actual.within '.issues-state-filters' do
+ actual.within '.top-area' do
expected_counts.each do |expected_count|
expect(actual).to have_content(expected_count)
end
diff --git a/spec/support/matchers/usage_metric_matchers.rb b/spec/support/matchers/usage_metric_matchers.rb
new file mode 100644
index 00000000000..83433334e8b
--- /dev/null
+++ b/spec/support/matchers/usage_metric_matchers.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+RSpec::Matchers.define :have_usage_metric do |key_path|
+ match do |payload|
+ payload = payload.deep_stringify_keys
+
+ key_path.split('.').each do |part|
+ break false unless payload&.has_key?(part)
+
+ payload = payload[part]
+ end
+ end
+
+ failure_message do
+ "Payload does not contain metric with key path: '#{key_path}'"
+ end
+
+ failure_message_when_negated do
+ "Payload contains restricted metric with key path: '#{key_path}'"
+ end
+end
diff --git a/spec/support/omniauth_strategy.rb b/spec/support/omniauth_strategy.rb
index 23907b8e450..5d5ee7dc1db 100644
--- a/spec/support/omniauth_strategy.rb
+++ b/spec/support/omniauth_strategy.rb
@@ -6,12 +6,6 @@ module StrategyHelpers
include Shoulda::Matchers::ActionController
include OmniAuth::Test::StrategyTestCase
- def post(*args)
- super.tap do
- @response = ActionDispatch::TestResponse.from_response(last_response)
- end
- end
-
def auth_hash
last_request.env['omniauth.auth']
end
@@ -21,7 +15,9 @@ module StrategyHelpers
original_on_failure = OmniAuth.config.on_failure
OmniAuth.config.test_mode = false
- OmniAuth.config.on_failure = OmniAuth::FailureEndpoint
+ OmniAuth.config.on_failure = proc do |env|
+ OmniAuth::FailureEndpoint.new(env).redirect_to_failure
+ end
yield
ensure
@@ -33,7 +29,7 @@ end
RSpec.configure do |config|
config.include StrategyHelpers, type: :strategy
- config.around(:all, type: :strategy) do |example|
+ config.around(type: :strategy) do |example|
StrategyHelpers.without_test_mode do
example.run
end
diff --git a/spec/support/redis/redis_helpers.rb b/spec/support/redis/redis_helpers.rb
index b8118bf94cc..3511d906203 100644
--- a/spec/support/redis/redis_helpers.rb
+++ b/spec/support/redis/redis_helpers.rb
@@ -5,21 +5,21 @@ module RedisHelpers
# Usage: performance enhancement
def redis_cache_cleanup!
- Gitlab::Redis::Cache.with(&:flushall)
+ Gitlab::Redis::Cache.with(&:flushdb)
end
# Usage: SideKiq, Mailroom, CI Runner, Workhorse, push services
def redis_queues_cleanup!
- Gitlab::Redis::Queues.with(&:flushall)
+ Gitlab::Redis::Queues.with(&:flushdb)
end
# Usage: session state, rate limiting
def redis_shared_state_cleanup!
- Gitlab::Redis::SharedState.with(&:flushall)
+ Gitlab::Redis::SharedState.with(&:flushdb)
end
# Usage: CI trace chunks
def redis_trace_chunks_cleanup!
- Gitlab::Redis::TraceChunks.with(&:flushall)
+ Gitlab::Redis::TraceChunks.with(&:flushdb)
end
end
diff --git a/spec/support/services/issuable_import_csv_service_shared_examples.rb b/spec/support/services/issuable_import_csv_service_shared_examples.rb
index f68750bec32..07118198969 100644
--- a/spec/support/services/issuable_import_csv_service_shared_examples.rb
+++ b/spec/support/services/issuable_import_csv_service_shared_examples.rb
@@ -67,10 +67,7 @@ RSpec.shared_examples 'issuable import csv service' do |issuable_type|
it 'correctly sets the issuable attributes' do
expect { subject }.to change { issuables.count }.by 4
- expect(issuables.reload.last).to have_attributes(
- title: 'Test Title',
- description: 'Test Description'
- )
+ expect(issuables.reload).to include(have_attributes({ title: 'Test Title', description: 'Test Description' }))
end
it_behaves_like 'importer with email notification'
@@ -89,10 +86,7 @@ RSpec.shared_examples 'issuable import csv service' do |issuable_type|
it 'correctly sets the issuable attributes' do
expect { subject }.to change { issuables.count }.by 3
- expect(issuables.reload.last).to have_attributes(
- title: 'Title with quote"',
- description: 'Description'
- )
+ expect(issuables.reload).to include(have_attributes(title: 'Title with quote"', description: 'Description'))
end
it_behaves_like 'importer with email notification'
@@ -111,10 +105,7 @@ RSpec.shared_examples 'issuable import csv service' do |issuable_type|
it 'correctly sets the issuable attributes' do
expect { subject }.to change { issuables.count }.by 2
- expect(issuables.reload.last).to have_attributes(
- title: 'Hello',
- description: 'World'
- )
+ expect(issuables.reload).to include(have_attributes(title: 'Hello', description: 'World'))
end
it_behaves_like 'importer with email notification'
@@ -133,10 +124,7 @@ RSpec.shared_examples 'issuable import csv service' do |issuable_type|
it 'correctly sets the issuable attributes' do
expect { subject }.to change { issuables.count }.by 3
- expect(issuables.reload.last).to have_attributes(
- title: 'Hello',
- description: 'World'
- )
+ expect(issuables.reload).to include(have_attributes(title: 'Hello', description: 'World'))
end
it_behaves_like 'importer with email notification'
diff --git a/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb b/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
index e532b42fd1c..3d2b0433b21 100644
--- a/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
+++ b/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
@@ -1,36 +1,38 @@
# frozen_string_literal: true
-Integration.available_services_names.each do |service|
- RSpec.shared_context service do
- include JiraServiceHelper if service == 'jira'
+Integration.available_integration_names.each do |integration|
+ RSpec.shared_context integration do
+ include JiraServiceHelper if integration == 'jira'
- let(:dashed_service) { service.dasherize }
- let(:service_method) { Project.integration_association_name(service) }
- let(:service_klass) { Integration.integration_name_to_model(service) }
- let(:service_instance) { service_klass.new }
- let(:service_fields) { service_instance.fields }
- let(:service_attrs_list) { service_fields.inject([]) {|arr, hash| arr << hash[:name].to_sym } }
- let(:service_attrs) do
- service_attrs_list.inject({}) do |hash, k|
+ let(:dashed_integration) { integration.dasherize }
+ let(:integration_method) { Project.integration_association_name(integration) }
+ let(:integration_klass) { Integration.integration_name_to_model(integration) }
+ let(:integration_instance) { integration_klass.new }
+ let(:integration_fields) { integration_instance.fields }
+ let(:integration_attrs_list) { integration_fields.inject([]) {|arr, hash| arr << hash[:name].to_sym } }
+ let(:integration_attrs) do
+ integration_attrs_list.inject({}) do |hash, k|
if k =~ /^(token*|.*_token|.*_key)/
hash.merge!(k => 'secrettoken')
- elsif service == 'confluence' && k == :confluence_url
+ elsif integration == 'confluence' && k == :confluence_url
hash.merge!(k => 'https://example.atlassian.net/wiki')
- elsif service == 'datadog' && k == :datadog_site
+ elsif integration == 'datadog' && k == :datadog_site
hash.merge!(k => 'datadoghq.com')
+ elsif integration == 'packagist' && k == :server
+ hash.merge!(k => 'https://packagist.example.com')
elsif k =~ /^(.*_url|url|webhook)/
hash.merge!(k => "http://example.com")
- elsif service_klass.method_defined?("#{k}?")
+ elsif integration_klass.method_defined?("#{k}?")
hash.merge!(k => true)
- elsif service == 'irker' && k == :recipients
+ elsif integration == 'irker' && k == :recipients
hash.merge!(k => 'irc://irc.network.net:666/#channel')
- elsif service == 'irker' && k == :server_port
+ elsif integration == 'irker' && k == :server_port
hash.merge!(k => 1234)
- elsif service == 'jira' && k == :jira_issue_transition_id
+ elsif integration == 'jira' && k == :jira_issue_transition_id
hash.merge!(k => '1,2,3')
- elsif service == 'emails_on_push' && k == :recipients
+ elsif integration == 'emails_on_push' && k == :recipients
hash.merge!(k => 'foo@bar.com')
- elsif service == 'slack' || service == 'mattermost' && k == :labels_to_be_notified_behavior
+ elsif integration == 'slack' || integration == 'mattermost' && k == :labels_to_be_notified_behavior
hash.merge!(k => "match_any")
else
hash.merge!(k => "someword")
@@ -45,28 +47,28 @@ Integration.available_services_names.each do |service|
end
before do
- enable_license_for_service(service)
- stub_jira_service_test if service == 'jira'
+ enable_license_for_integration(integration)
+ stub_jira_integration_test if integration == 'jira'
end
- def initialize_service(service, attrs = {})
- service_item = project.find_or_initialize_service(service)
- service_item.attributes = attrs
- service_item.properties = service_attrs
- service_item.save!
- service_item
+ def initialize_integration(integration, attrs = {})
+ record = project.find_or_initialize_integration(integration)
+ record.attributes = attrs
+ record.properties = integration_attrs
+ record.save!
+ record
end
private
- def enable_license_for_service(service)
+ def enable_license_for_integration(integration)
return unless respond_to?(:stub_licensed_features)
- licensed_feature = licensed_features[service]
+ licensed_feature = licensed_features[integration]
return unless licensed_feature
stub_licensed_features(licensed_feature => true)
- project.clear_memoization(:disabled_services)
+ project.clear_memoization(:disabled_integrations)
end
end
end
diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb
index c00b7203af6..b7eb03de8f0 100644
--- a/spec/support/shared_contexts/navbar_structure_context.rb
+++ b/spec/support/shared_contexts/navbar_structure_context.rb
@@ -1,19 +1,6 @@
# frozen_string_literal: true
RSpec.shared_context 'project navbar structure' do
- let(:analytics_nav_item) do
- {
- nav_item: _('Analytics'),
- nav_sub_items: [
- _('CI/CD'),
- (_('Code Review') if Gitlab.ee?),
- (_('Merge Request') if Gitlab.ee?),
- _('Repository'),
- _('Value Stream')
- ]
- }
- end
-
let(:security_and_compliance_nav_item) do
{
nav_item: _('Security & Compliance'),
@@ -24,64 +11,20 @@ RSpec.shared_context 'project navbar structure' do
}
end
- let(:monitor_nav_item) do
- {
- nav_item: _('Operations'),
- nav_sub_items: monitor_menu_items
- }
- end
-
- let(:monitor_menu_items) do
- [
- _('Metrics'),
- _('Logs'),
- _('Tracing'),
- _('Error Tracking'),
- _('Alerts'),
- _('Incidents'),
- _('Serverless'),
- _('Terraform'),
- _('Kubernetes'),
- _('Environments'),
- _('Feature Flags'),
- _('Product Analytics')
- ]
- end
-
- let(:project_information_nav_item) do
- {
- nav_item: _('Project overview'),
- nav_sub_items: [
- _('Details'),
- _('Activity'),
- _('Releases')
- ]
- }
- end
-
- let(:settings_menu_items) do
- [
- _('General'),
- _('Integrations'),
- _('Webhooks'),
- _('Access Tokens'),
- _('Repository'),
- _('CI/CD'),
- _('Operations')
- ]
- end
-
- let(:project_context_nav_item) do
- {
- nav_item: "#{project.name[0, 1].upcase} #{project.name}",
- nav_sub_items: []
- }
- end
-
let(:structure) do
[
- project_context_nav_item,
- project_information_nav_item,
+ {
+ nav_item: "#{project.name[0, 1].upcase} #{project.name}",
+ nav_sub_items: []
+ },
+ {
+ nav_item: _('Project information'),
+ nav_sub_items: [
+ _('Activity'),
+ _('Labels'),
+ _('Members')
+ ]
+ },
{
nav_item: _('Repository'),
nav_sub_items: [
@@ -120,8 +63,44 @@ RSpec.shared_context 'project navbar structure' do
]
},
security_and_compliance_nav_item,
- monitor_nav_item,
- analytics_nav_item,
+ {
+ nav_item: _('Deployments'),
+ nav_sub_items: [
+ _('Feature Flags'),
+ _('Environments'),
+ _('Releases')
+ ]
+ },
+ {
+ nav_item: _('Monitor'),
+ nav_sub_items: [
+ _('Metrics'),
+ _('Logs'),
+ _('Tracing'),
+ _('Error Tracking'),
+ _('Alerts'),
+ _('Incidents'),
+ _('Product Analytics')
+ ]
+ },
+ {
+ nav_item: _('Infrastructure'),
+ nav_sub_items: [
+ _('Kubernetes clusters'),
+ _('Serverless platform'),
+ _('Terraform')
+ ]
+ },
+ {
+ nav_item: _('Analytics'),
+ nav_sub_items: [
+ _('CI/CD'),
+ (_('Code review') if Gitlab.ee?),
+ (_('Merge request') if Gitlab.ee?),
+ _('Repository'),
+ _('Value stream')
+ ]
+ },
{
nav_item: _('Wiki'),
nav_sub_items: []
@@ -132,7 +111,15 @@ RSpec.shared_context 'project navbar structure' do
},
{
nav_item: _('Settings'),
- nav_sub_items: settings_menu_items
+ nav_sub_items: [
+ _('General'),
+ _('Integrations'),
+ _('Webhooks'),
+ _('Access Tokens'),
+ _('Repository'),
+ _('CI/CD'),
+ _('Monitor')
+ ]
}
].compact
end
@@ -189,17 +176,6 @@ RSpec.shared_context 'group navbar structure' do
}
end
- let(:group_information_nav_item) do
- {
- nav_item: _('Group information'),
- nav_sub_items: [
- _('Activity'),
- _('Labels'),
- _('Members')
- ]
- }
- end
-
let(:issues_nav_items) do
[
_('List'),
@@ -208,17 +184,20 @@ RSpec.shared_context 'group navbar structure' do
]
end
- let(:group_context_nav_item) do
- {
- nav_item: "#{group.name[0, 1].upcase} #{group.name}",
- nav_sub_items: []
- }
- end
-
let(:structure) do
[
- group_context_nav_item,
- group_information_nav_item,
+ {
+ nav_item: "#{group.name[0, 1].upcase} #{group.name}",
+ nav_sub_items: []
+ },
+ {
+ nav_item: _('Group information'),
+ nav_sub_items: [
+ _('Activity'),
+ _('Labels'),
+ _('Members')
+ ]
+ },
{
nav_item: _('Issues'),
nav_sub_items: issues_nav_items
@@ -227,7 +206,7 @@ RSpec.shared_context 'group navbar structure' do
nav_item: _('Merge requests'),
nav_sub_items: []
},
- security_and_compliance_nav_item,
+ (security_and_compliance_nav_item if Gitlab.ee?),
(push_rules_nav_item if Gitlab.ee?),
{
nav_item: _('Kubernetes'),
diff --git a/spec/support/shared_contexts/policies/project_policy_shared_context.rb b/spec/support/shared_contexts/policies/project_policy_shared_context.rb
index d638ffcf8fa..de1b46c65ad 100644
--- a/spec/support/shared_contexts/policies/project_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/project_policy_shared_context.rb
@@ -48,7 +48,7 @@ RSpec.shared_context 'ProjectPolicy context' do
destroy_container_image push_code read_pod_logs read_terraform_state
resolve_note update_build update_commit_status update_container_image
update_deployment update_environment update_merge_request
- update_metrics_dashboard_annotation update_pipeline update_release
+ update_metrics_dashboard_annotation update_pipeline update_release destroy_release
]
end
@@ -57,7 +57,7 @@ RSpec.shared_context 'ProjectPolicy context' do
add_cluster admin_build admin_commit_status admin_container_image
admin_deployment admin_environment admin_note admin_pipeline
admin_project admin_project_member admin_snippet admin_terraform_state
- admin_wiki create_deploy_token destroy_deploy_token destroy_release
+ admin_wiki create_deploy_token destroy_deploy_token
push_to_delete_protected_branch read_deploy_token update_snippet
]
end
diff --git a/spec/support/shared_contexts/requests/api/graphql/jira_import/jira_projects_context.rb b/spec/support/shared_contexts/requests/api/graphql/jira_import/jira_projects_context.rb
index de40b926a1c..6d34675e8e5 100644
--- a/spec/support/shared_contexts/requests/api/graphql/jira_import/jira_projects_context.rb
+++ b/spec/support/shared_contexts/requests/api/graphql/jira_import/jira_projects_context.rb
@@ -4,8 +4,8 @@ RSpec.shared_context 'Jira projects request context' do
let(:url) { 'https://jira.example.com' }
let(:username) { 'jira-username' }
let(:password) { 'jira-password' }
- let!(:jira_service) do
- create(:jira_service,
+ let!(:jira_integration) do
+ create(:jira_integration,
project: project,
url: url,
username: username,
diff --git a/spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb b/spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb
new file mode 100644
index 00000000000..ea72398010c
--- /dev/null
+++ b/spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'stubbed service ping metrics definitions' do
+ include UsageDataHelpers
+
+ let(:metrics_definitions) { standard_metrics + subscription_metrics + operational_metrics + optional_metrics }
+ let(:standard_metrics) do
+ [
+ metric_attributes('uuid', "Standard")
+ ]
+ end
+
+ let(:operational_metrics) do
+ [
+ metric_attributes('counts.merge_requests', "Operational"),
+ metric_attributes('counts.todos', "Operational")
+ ]
+ end
+
+ let(:optional_metrics) do
+ [
+ metric_attributes('counts.boards', "Optional"),
+ metric_attributes('gitaly.filesystems', '').except('data_category')
+ ]
+ end
+
+ before do
+ stub_usage_data_connections
+ stub_object_store_settings
+
+ allow(Gitlab::Usage::MetricDefinition).to(
+ receive(:definitions)
+ .and_return(metrics_definitions.to_h { |definition| [definition['key_path'], Gitlab::Usage::MetricDefinition.new('', definition.symbolize_keys)] })
+ )
+ end
+
+ def metric_attributes(key_path, category)
+ {
+ 'key_path' => key_path,
+ 'data_category' => category
+ }
+ end
+end
diff --git a/spec/support/shared_contexts/unique_ip_check_shared_context.rb b/spec/support/shared_contexts/unique_ip_check_shared_context.rb
index f6bedb6cada..8d199df1c10 100644
--- a/spec/support/shared_contexts/unique_ip_check_shared_context.rb
+++ b/spec/support/shared_contexts/unique_ip_check_shared_context.rb
@@ -5,9 +5,9 @@ RSpec.shared_context 'unique ips sign in limit' do
let(:request_context) { Gitlab::RequestContext.instance }
before do
- Gitlab::Redis::Cache.with(&:flushall)
- Gitlab::Redis::Queues.with(&:flushall)
- Gitlab::Redis::SharedState.with(&:flushall)
+ redis_cache_cleanup!
+ redis_queues_cleanup!
+ redis_shared_state_cleanup!
end
before do
diff --git a/spec/support/shared_examples/ci/edit_job_token_scope_shared_examples.rb b/spec/support/shared_examples/ci/edit_job_token_scope_shared_examples.rb
new file mode 100644
index 00000000000..05b2b5f5de1
--- /dev/null
+++ b/spec/support/shared_examples/ci/edit_job_token_scope_shared_examples.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'editable job token scope' do
+ shared_examples 'returns error' do |error|
+ it 'returns an error response', :aggregate_failures do
+ expect(result).to be_error
+ expect(result.message).to eq(error)
+ end
+ end
+
+ context 'when job token scope is disabled for the given project' do
+ before do
+ allow(project).to receive(:ci_job_token_scope_enabled?).and_return(false)
+ end
+
+ it_behaves_like 'returns error', 'Job token scope is disabled for this project'
+ end
+
+ context 'when user does not have permissions to edit the job token scope' do
+ it_behaves_like 'returns error', 'Insufficient permissions to modify the job token scope'
+ end
+
+ context 'when user has permissions to edit the job token scope' do
+ before do
+ project.add_maintainer(current_user)
+ end
+
+ context 'when target project is not provided' do
+ let(:target_project) { nil }
+
+ it_behaves_like 'returns error', Ci::JobTokenScope::EditScopeValidations::TARGET_PROJECT_UNAUTHORIZED_OR_UNFOUND
+ end
+
+ context 'when target project is provided' do
+ context 'when user does not have permissions to read the target project' do
+ it_behaves_like 'returns error', Ci::JobTokenScope::EditScopeValidations::TARGET_PROJECT_UNAUTHORIZED_OR_UNFOUND
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/controllers/access_tokens_controller_shared_examples.rb b/spec/support/shared_examples/controllers/access_tokens_controller_shared_examples.rb
index 70a684c12bf..017e55309f7 100644
--- a/spec/support/shared_examples/controllers/access_tokens_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/access_tokens_controller_shared_examples.rb
@@ -44,11 +44,13 @@ RSpec.shared_examples 'project access tokens available #create' do
end
it 'creates project access token' do
+ access_level = access_token_params[:access_level] || Gitlab::Access::MAINTAINER
subject
expect(created_token.name).to eq(access_token_params[:name])
expect(created_token.scopes).to eq(access_token_params[:scopes])
expect(created_token.expires_at).to eq(access_token_params[:expires_at])
+ expect(project.project_member(created_token.user).access_level).to eq(access_level)
end
it 'creates project bot user' do
diff --git a/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb b/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
index 9af35c189d0..e8f7e62d0d7 100644
--- a/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
@@ -1,10 +1,11 @@
# frozen_string_literal: true
RSpec.shared_examples 'wiki controller actions' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:other_user) { create(:user) }
+
let(:container) { raise NotImplementedError }
let(:routing_params) { raise NotImplementedError }
-
- let_it_be(:user) { create(:user) }
let(:wiki) { Wiki.for_container(container, user) }
let(:wiki_title) { 'page title test' }
@@ -458,6 +459,7 @@ RSpec.shared_examples 'wiki controller actions' do
describe 'DELETE #destroy' do
let(:id_param) { wiki_title }
+ let(:delete_user) { user }
subject(:request) do
delete(:destroy,
@@ -466,13 +468,21 @@ RSpec.shared_examples 'wiki controller actions' do
))
end
+ before do
+ sign_in(delete_user)
+ end
+
context 'when page exists' do
- it 'deletes the page' do
- expect do
- request
- end.to change { wiki.list_pages.size }.by(-1)
+ shared_examples 'deletes the page' do
+ specify do
+ expect do
+ request
+ end.to change { wiki.list_pages.size }.by(-1)
+ end
end
+ it_behaves_like 'deletes the page'
+
context 'but page cannot be deleted' do
before do
allow_next_instance_of(WikiPage) do |page|
@@ -489,6 +499,28 @@ RSpec.shared_examples 'wiki controller actions' do
expect(assigns(:error)).to eq('Could not delete wiki page')
end
end
+
+ context 'when user is a developer' do
+ let(:delete_user) { other_user }
+
+ before do
+ container.add_developer(other_user)
+ end
+
+ it_behaves_like 'deletes the page'
+ end
+
+ context 'when user is a reporter' do
+ let(:delete_user) { other_user }
+
+ before do
+ container.add_reporter(other_user)
+ end
+
+ it 'returns 404' do
+ is_expected.to have_gitlab_http_status(:not_found)
+ end
+ end
end
context 'when page does not exist' do
diff --git a/spec/support/shared_examples/features/cascading_settings_shared_examples.rb b/spec/support/shared_examples/features/cascading_settings_shared_examples.rb
index 29ef3da9a85..395f4fc54e0 100644
--- a/spec/support/shared_examples/features/cascading_settings_shared_examples.rb
+++ b/spec/support/shared_examples/features/cascading_settings_shared_examples.rb
@@ -13,10 +13,22 @@ RSpec.shared_examples 'a cascading setting' do
click_save_button
end
- it 'disables setting in subgroups' do
- visit subgroup_path
+ shared_examples 'subgroup settings are disabled' do
+ it 'disables setting in subgroups' do
+ visit subgroup_path
+
+ expect(find("#{setting_field_selector}[disabled]")).to be_checked
+ end
+ end
+
+ include_examples 'subgroup settings are disabled'
+
+ context 'when use_traversal_ids_for_ancestors is disabled' do
+ before do
+ stub_feature_flags(use_traversal_ids_for_ancestors: false)
+ end
- expect(find("#{setting_field_selector}[disabled]")).to be_checked
+ include_examples 'subgroup settings are disabled'
end
it 'does not show enforcement checkbox in subgroups' do
diff --git a/spec/support/shared_examples/features/packages_shared_examples.rb b/spec/support/shared_examples/features/packages_shared_examples.rb
index 4d2e13aa5bc..9e88db2e1c0 100644
--- a/spec/support/shared_examples/features/packages_shared_examples.rb
+++ b/spec/support/shared_examples/features/packages_shared_examples.rb
@@ -32,11 +32,9 @@ RSpec.shared_examples 'package details link' do |property|
expect(page).to have_current_path(project_package_path(package.project, package))
- page.within('[data-qa-selector="package_title"]') do
- expect(page).to have_content(package.name)
- end
+ expect(page).to have_css('.packages-app h1[data-testid="title"]', text: package.name)
- page.within('[data-qa-selector="package_information_content"]') do
+ page.within(%Q([name="#{package.name}"])) do
expect(page).to have_content('Installation')
expect(page).to have_content('Registry setup')
end
diff --git a/spec/support/shared_examples/features/search/search_timeouts_shared_examples.rb b/spec/support/shared_examples/features/search/search_timeouts_shared_examples.rb
new file mode 100644
index 00000000000..bb5460e2a6f
--- /dev/null
+++ b/spec/support/shared_examples/features/search/search_timeouts_shared_examples.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'search timeouts' do |scope|
+ context 'when search times out' do
+ before do
+ allow_next_instance_of(SearchService) do |service|
+ allow(service).to receive(:search_objects).and_raise(ActiveRecord::QueryCanceled)
+ end
+
+ visit(search_path(search: 'test', scope: scope))
+ end
+
+ it 'renders timeout information' do
+ expect(page).to have_content('Your search timed out')
+ end
+
+ it 'sets tab count to 0' do
+ expect(page.find('.search-filter .active')).to have_text('0')
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/sidebar_shared_examples.rb b/spec/support/shared_examples/features/sidebar_shared_examples.rb
index c9508818f74..5bfe929e957 100644
--- a/spec/support/shared_examples/features/sidebar_shared_examples.rb
+++ b/spec/support/shared_examples/features/sidebar_shared_examples.rb
@@ -175,12 +175,4 @@ RSpec.shared_examples 'issue boards sidebar' do
end
end
end
-
- def refresh_and_click_first_card
- page.refresh
-
- wait_for_requests
-
- first_card.click
- end
end
diff --git a/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
index f2576931642..dfc9a45bd0d 100644
--- a/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
@@ -20,17 +20,6 @@ RSpec.shared_examples 'User creates wiki page' do
click_link "Create your first page"
end
- it "shows validation error message if the form is force submitted", :js do
- page.within(".wiki-form") do
- fill_in(:wiki_content, with: "")
-
- page.execute_script("document.querySelector('.wiki-form').submit()")
- page.accept_alert # manually force form submit
- end
-
- expect(page).to have_content("The form contains the following error:").and have_content("Content can't be blank")
- end
-
it "disables the submit button", :js do
page.within(".wiki-form") do
fill_in(:wiki_content, with: "")
diff --git a/spec/support/shared_examples/features/wiki/user_deletes_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_deletes_wiki_page_shared_examples.rb
index ee0261771f9..55c89977a99 100644
--- a/spec/support/shared_examples/features/wiki/user_deletes_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_deletes_wiki_page_shared_examples.rb
@@ -7,18 +7,34 @@
RSpec.shared_examples 'User deletes wiki page' do
include WikiHelpers
+ let_it_be(:developer) { create(:user) }
+
let(:wiki_page) { create(:wiki_page, wiki: wiki) }
before do
+ wiki.container.add_developer(developer)
+
sign_in(user)
visit wiki_page_path(wiki, wiki_page)
end
- it 'deletes a page', :js do
- click_on('Edit')
- click_on('Delete')
- find('[data-testid="confirm_deletion_button"]').click
+ shared_examples 'deletes a wiki page' do
+ specify 'deletes a page', :js do
+ click_on('Edit')
+ click_on('Delete')
+ find('[data-testid="confirm_deletion_button"]').click
+
+ expect(page).to have_content('Wiki page was successfully deleted.')
+ end
+ end
+
+ context 'when user is the owner or maintainer' do
+ it_behaves_like 'deletes a wiki page'
+ end
+
+ context 'when user is a developer' do
+ let(:user) { developer }
- expect(page).to have_content('Wiki page was successfully deleted.')
+ it_behaves_like 'deletes a wiki page'
end
end
diff --git a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
index db2a96d9649..9587da0233e 100644
--- a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
@@ -90,19 +90,6 @@ RSpec.shared_examples 'User updates wiki page' do
expect(page).to have_field('wiki[message]', with: 'Update Wiki title')
end
- it 'shows a validation error message if the form is force submitted', :js do
- fill_in(:wiki_content, with: '')
-
- page.execute_script("document.querySelector('.wiki-form').submit()")
- page.accept_alert # manually force form submit
-
- expect(page).to have_selector('.wiki-form')
- expect(page).to have_content('Edit Page')
- expect(page).to have_content('The form contains the following error:')
- expect(page).to have_content("Content can't be blank")
- expect(find('textarea#wiki_content').value).to eq('')
- end
-
it "disables the submit button", :js do
page.within(".wiki-form") do
fill_in(:wiki_content, with: "")
diff --git a/spec/support/shared_examples/graphql/design_fields_shared_examples.rb b/spec/support/shared_examples/graphql/design_fields_shared_examples.rb
index 9c2eb3e5a5c..efbcfaf0e91 100644
--- a/spec/support/shared_examples/graphql/design_fields_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/design_fields_shared_examples.rb
@@ -27,6 +27,7 @@ RSpec.shared_examples 'a GraphQL type with design fields' do
describe '#image' do
let_it_be(:current_user) { create(:user) }
+
let(:schema) { GitlabSchema }
let(:query) { GraphQL::Query.new(schema) }
let(:context) { query.context }
diff --git a/spec/support/shared_examples/graphql/mutations/can_mutate_spammable_examples.rb b/spec/support/shared_examples/graphql/mutations/can_mutate_spammable_examples.rb
index 5e15c91cd41..011a2157f24 100644
--- a/spec/support/shared_examples/graphql/mutations/can_mutate_spammable_examples.rb
+++ b/spec/support/shared_examples/graphql/mutations/can_mutate_spammable_examples.rb
@@ -3,17 +3,13 @@
require 'spec_helper'
RSpec.shared_examples 'a mutation which can mutate a spammable' do
- describe "#additional_spam_params" do
- it 'passes additional spam params to the service' do
+ describe "#spam_params" do
+ it 'passes spam params to the service constructor' do
args = [
project: anything,
current_user: anything,
- params: hash_including(
- api: true,
- request: instance_of(ActionDispatch::Request),
- captcha_response: captcha_response,
- spam_log_id: spam_log_id
- )
+ params: anything,
+ spam_params: instance_of(::Spam::SpamParams)
]
expect(service).to receive(:new).with(*args).and_call_original
diff --git a/spec/support/shared_examples/graphql/spam_protection_shared_examples.rb b/spec/support/shared_examples/graphql/spam_protection_shared_examples.rb
index 8fb89a4f80e..c0b71a494d0 100644
--- a/spec/support/shared_examples/graphql/spam_protection_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/spam_protection_shared_examples.rb
@@ -57,7 +57,7 @@ RSpec.shared_examples 'has spam protection' do
context 'and no CAPTCHA is required' do
let(:render_captcha) { false }
- it 'does not return a to-level error' do
+ it 'does not return a top-level error' do
send_request
expect(graphql_errors).to be_blank
diff --git a/spec/support/shared_examples/lib/cache_helpers_shared_examples.rb b/spec/support/shared_examples/lib/cache_helpers_shared_examples.rb
new file mode 100644
index 00000000000..845fa78a827
--- /dev/null
+++ b/spec/support/shared_examples/lib/cache_helpers_shared_examples.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples_for 'object cache helper' do
+ it { is_expected.to be_a(Gitlab::Json::PrecompiledJson) }
+
+ it "uses the presenter" do
+ expect(presenter).to receive(:represent).with(presentable, project: project)
+
+ subject
+ end
+
+ it "is valid JSON" do
+ parsed = Gitlab::Json.parse(subject.to_s)
+
+ expect(parsed).to be_a(Hash)
+ expect(parsed["id"]).to eq(presentable.id)
+ end
+
+ it "fetches from the cache" do
+ expect(instance.cache).to receive(:fetch).with("#{presenter.class.name}:#{presentable.cache_key}:#{user.cache_key}", expires_in: described_class::DEFAULT_EXPIRY).once
+
+ subject
+ end
+
+ context "when a cache context is supplied" do
+ before do
+ kwargs[:cache_context] = -> (item) { item.project.cache_key }
+ end
+
+ it "uses the context to augment the cache key" do
+ expect(instance.cache).to receive(:fetch).with("#{presenter.class.name}:#{presentable.cache_key}:#{project.cache_key}", expires_in: described_class::DEFAULT_EXPIRY).once
+
+ subject
+ end
+ end
+
+ context "when expires_in is supplied" do
+ it "sets the expiry when accessing the cache" do
+ kwargs[:expires_in] = 7.days
+
+ expect(instance.cache).to receive(:fetch).with("#{presenter.class.name}:#{presentable.cache_key}:#{user.cache_key}", expires_in: 7.days).once
+
+ subject
+ end
+ end
+end
+
+RSpec.shared_examples_for 'collection cache helper' do
+ it { is_expected.to be_an(Gitlab::Json::PrecompiledJson) }
+
+ it "uses the presenter" do
+ presentable.each do |item|
+ expect(presenter).to receive(:represent).with(item, project: project)
+ end
+
+ subject
+ end
+
+ it "is valid JSON" do
+ parsed = Gitlab::Json.parse(subject.to_s)
+
+ expect(parsed).to be_an(Array)
+
+ presentable.each_with_index do |item, i|
+ expect(parsed[i]["id"]).to eq(item.id)
+ end
+ end
+
+ it "fetches from the cache" do
+ keys = presentable.map { |item| "#{presenter.class.name}:#{item.cache_key}:#{user.cache_key}" }
+
+ expect(instance.cache).to receive(:fetch_multi).with(*keys, expires_in: described_class::DEFAULT_EXPIRY).once.and_call_original
+
+ subject
+ end
+
+ context "when a cache context is supplied" do
+ before do
+ kwargs[:cache_context] = -> (item) { item.project.cache_key }
+ end
+
+ it "uses the context to augment the cache key" do
+ keys = presentable.map { |item| "#{presenter.class.name}:#{item.cache_key}:#{project.cache_key}" }
+
+ expect(instance.cache).to receive(:fetch_multi).with(*keys, expires_in: described_class::DEFAULT_EXPIRY).once.and_call_original
+
+ subject
+ end
+ end
+
+ context "expires_in is supplied" do
+ it "sets the expiry when accessing the cache" do
+ keys = presentable.map { |item| "#{presenter.class.name}:#{item.cache_key}:#{user.cache_key}" }
+ kwargs[:expires_in] = 7.days
+
+ expect(instance.cache).to receive(:fetch_multi).with(*keys, expires_in: 7.days).once.and_call_original
+
+ subject
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/cycle_analytics/event_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/cycle_analytics/event_shared_examples.rb
index 7d341d79bae..6e12b5a0e85 100644
--- a/spec/support/shared_examples/lib/gitlab/cycle_analytics/event_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/cycle_analytics/event_shared_examples.rb
@@ -3,6 +3,7 @@
RSpec.shared_examples_for 'value stream analytics event' do
let(:params) { {} }
let(:instance) { described_class.new(params) }
+ let(:expected_hash_code) { Digest::SHA256.hexdigest(instance.class.identifier.to_s) }
it { expect(described_class.name).to be_a_kind_of(String) }
it { expect(described_class.identifier).to be_a_kind_of(Symbol) }
@@ -19,4 +20,16 @@ RSpec.shared_examples_for 'value stream analytics event' do
expect(output_query).to be_a_kind_of(ActiveRecord::Relation)
end
end
+
+ describe '#hash_code' do
+ it 'returns a hash that uniquely identifies an event' do
+ expect(instance.hash_code).to eq(expected_hash_code)
+ end
+
+ it 'does not differ when the same object is built with the same params' do
+ another_instance_with_same_params = described_class.new(params)
+
+ expect(another_instance_with_same_params.hash_code).to eq(instance.hash_code)
+ end
+ end
end
diff --git a/spec/support/shared_examples/lib/gitlab/import_export/relation_factory_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/import_export/relation_factory_shared_examples.rb
index 33061f17bde..3c5c65f0690 100644
--- a/spec/support/shared_examples/lib/gitlab/import_export/relation_factory_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/import_export/relation_factory_shared_examples.rb
@@ -12,7 +12,7 @@ RSpec.shared_examples 'Notes user references' do
'id' => 111,
'access_level' => 30,
'source_id' => 1,
- 'source_type' => importable.class.name == 'Project' ? 'Project' : 'Namespace',
+ 'source_type' => importable.instance_of?(Project) ? 'Project' : 'Namespace',
'user_id' => 3,
'notification_level' => 3,
'created_at' => '2016-11-18T09:29:42.634Z',
diff --git a/spec/support/shared_examples/lib/gitlab/kubernetes/network_policy_common_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/kubernetes/network_policy_common_shared_examples.rb
index f018ece0d46..2633a89eeee 100644
--- a/spec/support/shared_examples/lib/gitlab/kubernetes/network_policy_common_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/kubernetes/network_policy_common_shared_examples.rb
@@ -19,7 +19,8 @@ RSpec.shared_examples 'network policy common specs' do
creation_timestamp: nil,
manifest: YAML.dump(policy.resource.deep_stringify_keys),
is_autodevops: false,
- is_enabled: true
+ is_enabled: true,
+ environment_ids: []
}
end
diff --git a/spec/support/shared_examples/lib/gitlab/search_results_sorted_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/search_results_sorted_shared_examples.rb
index eafb49cef71..e4f09dfa0b0 100644
--- a/spec/support/shared_examples/lib/gitlab/search_results_sorted_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/search_results_sorted_shared_examples.rb
@@ -33,3 +33,21 @@ RSpec.shared_examples 'search results sorted' do
end
end
end
+
+RSpec.shared_examples 'search results sorted by popularity' do
+ context 'sort: popularity_desc' do
+ let(:sort) { 'popularity_desc' }
+
+ it 'sorts results by upvotes' do
+ expect(results_popular.objects(scope).map(&:id)).to eq([popular_result.id, less_popular_result.id, non_popular_result.id])
+ end
+ end
+
+ context 'sort: popularity_asc' do
+ let(:sort) { 'popularity_asc' }
+
+ it 'sorts results by created_at' do
+ expect(results_popular.objects(scope).map(&:id)).to eq([non_popular_result.id, less_popular_result.id, popular_result.id])
+ end
+ end
+end
diff --git a/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb b/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
index 1b110ab02b5..a84658780b9 100644
--- a/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
+++ b/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
@@ -5,9 +5,10 @@ RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role|
2.times do
Gitlab::WithRequestStore.with_request_store do
subscriber.sql(event)
+ connection = event.payload[:connection]
if db_role == :primary
- expect(described_class.db_counter_payload).to eq(
+ expected = {
db_count: record_query ? 1 : 0,
db_write_count: record_write_query ? 1 : 0,
db_cached_count: record_cached_query ? 1 : 0,
@@ -18,10 +19,13 @@ RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role|
db_replica_count: 0,
db_replica_duration_s: 0.0,
db_primary_wal_count: record_wal_query ? 1 : 0,
+ db_primary_wal_cached_count: record_wal_query && record_cached_query ? 1 : 0,
+ db_replica_wal_cached_count: 0,
db_replica_wal_count: 0
- )
+ }
+ expected[:"db_primary_#{::Gitlab::Database.dbname(connection)}_duration_s"] = 0.002 if record_query
elsif db_role == :replica
- expect(described_class.db_counter_payload).to eq(
+ expected = {
db_count: record_query ? 1 : 0,
db_write_count: record_write_query ? 1 : 0,
db_cached_count: record_cached_query ? 1 : 0,
@@ -32,15 +36,35 @@ RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role|
db_replica_count: record_query ? 1 : 0,
db_replica_duration_s: record_query ? 0.002 : 0,
db_replica_wal_count: record_wal_query ? 1 : 0,
+ db_replica_wal_cached_count: record_wal_query && record_cached_query ? 1 : 0,
+ db_primary_wal_cached_count: 0,
db_primary_wal_count: 0
- )
+ }
+ expected[:"db_replica_#{::Gitlab::Database.dbname(connection)}_duration_s"] = 0.002 if record_query
else
- expect(described_class.db_counter_payload).to eq(
+ expected = {
db_count: record_query ? 1 : 0,
db_write_count: record_write_query ? 1 : 0,
db_cached_count: record_cached_query ? 1 : 0
- )
+ }
end
+
+ expect(described_class.db_counter_payload).to eq(expected)
+ end
+ end
+ end
+
+ context 'when multiple_database_metrics is disabled' do
+ before do
+ stub_feature_flags(multiple_database_metrics: false)
+ end
+
+ it 'does not include per database metrics' do
+ Gitlab::WithRequestStore.with_request_store do
+ subscriber.sql(event)
+ connection = event.payload[:connection]
+
+ expect(described_class.db_counter_payload).not_to include(:"db_replica_#{::Gitlab::Database.dbname(connection)}_duration_s")
end
end
end
@@ -71,7 +95,10 @@ RSpec.shared_examples 'record ActiveRecord metrics in a metrics transaction' do
end
if record_wal_query
- expect(transaction).to receive(:increment).with("gitlab_transaction_db_#{db_role}_wal_count_total".to_sym, 1) if db_role
+ if db_role
+ expect(transaction).to receive(:increment).with("gitlab_transaction_db_#{db_role}_wal_count_total".to_sym, 1)
+ expect(transaction).to receive(:increment).with("gitlab_transaction_db_#{db_role}_wal_cached_count_total".to_sym, 1) if record_cached_query
+ end
else
expect(transaction).not_to receive(:increment).with("gitlab_transaction_db_#{db_role}_wal_count_total".to_sym, 1) if db_role
end
diff --git a/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb b/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb
index 42f82987989..03f565e0aac 100644
--- a/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb
+++ b/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb
@@ -165,9 +165,9 @@ RSpec.shared_examples 'AtomicInternalId' do |validate_presence: true|
3.times { supply.next_value }
end
- current_value = described_class.public_send(method_name, scope_value, &:current_value)
-
- expect(current_value).to eq(iid + 3)
+ described_class.public_send(method_name, scope_value) do |supply|
+ expect(supply.next_value).to eq(iid + 4)
+ end
end
end
diff --git a/spec/support/shared_examples/models/chat_integration_shared_examples.rb b/spec/support/shared_examples/models/chat_integration_shared_examples.rb
index 9f3be3e2e06..72659dd5f3b 100644
--- a/spec/support/shared_examples/models/chat_integration_shared_examples.rb
+++ b/spec/support/shared_examples/models/chat_integration_shared_examples.rb
@@ -13,7 +13,7 @@ RSpec.shared_examples "chat integration" do |integration_name|
end
it { is_expected.to validate_presence_of(:webhook) }
- it_behaves_like "issue tracker service URL attribute", :webhook
+ it_behaves_like "issue tracker integration URL attribute", :webhook
end
context "when integration is inactive" do
@@ -163,7 +163,7 @@ RSpec.shared_examples "chat integration" do |integration_name|
context "with issue events" do
let(:opts) { { title: "Awesome issue", description: "please fix" } }
let(:sample_data) do
- service = Issues::CreateService.new(project: project, current_user: user, params: opts)
+ service = Issues::CreateService.new(project: project, current_user: user, params: opts, spam_params: nil)
issue = service.execute
service.hook_data(issue, "open")
end
diff --git a/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb b/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
index 66448aca2c5..2d4c0b60f2b 100644
--- a/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
@@ -8,7 +8,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |service_name|
def execute_with_options(options)
receive(:new).with(webhook_url, options.merge(http_client: Integrations::SlackMattermostNotifier::HTTPClient))
- .and_return(double(:slack_service).as_null_object)
+ .and_return(double(:slack_integration).as_null_object)
end
describe "Associations" do
@@ -23,7 +23,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |service_name|
end
it { is_expected.to validate_presence_of(:webhook) }
- it_behaves_like 'issue tracker service URL attribute', :webhook
+ it_behaves_like 'issue tracker integration URL attribute', :webhook
end
context 'when service is inactive' do
diff --git a/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb b/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb
index d23f95b2e9e..cf38a583944 100644
--- a/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb
+++ b/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb
@@ -122,6 +122,22 @@ RSpec.shared_examples 'value stream analytics stage' do
expect(stage.parent_id).to eq(parent.id)
end
end
+
+ describe '#hash_code' do
+ it 'does not differ when the same object is built with the same params' do
+ stage_1 = build(factory)
+ stage_2 = build(factory)
+
+ expect(stage_1.events_hash_code).to eq(stage_2.events_hash_code)
+ end
+
+ it 'differs when the stage events are different' do
+ stage_1 = build(factory, start_event_identifier: :merge_request_created, end_event_identifier: :merge_request_merged)
+ stage_2 = build(factory, start_event_identifier: :issue_created, end_event_identifier: :issue_first_mentioned_in_commit)
+
+ expect(stage_1.events_hash_code).not_to eq(stage_2.events_hash_code)
+ end
+ end
end
RSpec.shared_examples 'value stream analytics label based stage' do
diff --git a/spec/support/shared_examples/models/integrations/base_slash_commands_shared_examples.rb b/spec/support/shared_examples/models/integrations/base_slash_commands_shared_examples.rb
index 128999d02fa..e35ac9c0d0d 100644
--- a/spec/support/shared_examples/models/integrations/base_slash_commands_shared_examples.rb
+++ b/spec/support/shared_examples/models/integrations/base_slash_commands_shared_examples.rb
@@ -66,14 +66,14 @@ RSpec.shared_examples Integrations::BaseSlashCommands do
}
end
- let(:service) do
- project.create_mattermost_slash_commands_service(
+ let(:integration) do
+ project.create_mattermost_slash_commands_integration(
properties: { token: 'token' }
)
end
it 'generates the url' do
- response = service.trigger(params)
+ response = integration.trigger(params)
expect(response[:text]).to start_with(':wave: Hi there!')
end
diff --git a/spec/support/shared_examples/models/integrations/has_web_hook_shared_examples.rb b/spec/support/shared_examples/models/integrations/has_web_hook_shared_examples.rb
new file mode 100644
index 00000000000..1fa340a0cf4
--- /dev/null
+++ b/spec/support/shared_examples/models/integrations/has_web_hook_shared_examples.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples Integrations::HasWebHook do
+ include AfterNextHelpers
+
+ describe 'callbacks' do
+ it 'calls #update_web_hook! when enabled' do
+ expect(integration).to receive(:update_web_hook!)
+
+ integration.active = true
+ integration.save!
+ end
+
+ it 'does not call #update_web_hook! when disabled' do
+ expect(integration).not_to receive(:update_web_hook!)
+
+ integration.active = false
+ integration.save!
+ end
+
+ it 'does not call #update_web_hook! when validation fails' do
+ expect(integration).not_to receive(:update_web_hook!)
+
+ integration.active = true
+ integration.project = nil
+ expect(integration.save).to be(false)
+ end
+ end
+
+ describe '#hook_url' do
+ it 'returns a string' do
+ expect(integration.hook_url).to be_a(String)
+ end
+ end
+
+ describe '#hook_ssl_verification' do
+ it 'returns a boolean' do
+ expect(integration.hook_ssl_verification).to be_in([true, false])
+ end
+ end
+
+ describe '#update_web_hook!' do
+ def call
+ integration.update_web_hook!
+ end
+
+ it 'creates or updates a service hook' do
+ expect { call }.to change(ServiceHook, :count).by(1)
+ expect(integration.service_hook.url).to eq(hook_url)
+
+ integration.service_hook.update!(url: 'http://other.com')
+
+ expect { call }.to change { integration.service_hook.reload.url }.from('http://other.com').to(hook_url)
+ end
+
+ it 'raises an error if the service hook could not be saved' do
+ call
+ integration.service_hook.integration = nil
+
+ expect { call }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+
+ it 'does not attempt to save the service hook if there are no changes' do
+ call
+
+ expect(integration.service_hook).not_to receive(:save!)
+
+ call
+ end
+ end
+
+ describe '#execute_web_hook!' do
+ let(:args) { ['foo', [1, 2, 3]] }
+
+ def call
+ integration.execute_web_hook!(*args)
+ end
+
+ it 'creates the webhook if necessary and executes it' do
+ expect_next(ServiceHook).to receive(:execute).with(*args)
+ expect { call }.to change(ServiceHook, :count).by(1)
+
+ expect(integration.service_hook).to receive(:execute).with(*args)
+ expect { call }.not_to change(ServiceHook, :count)
+ end
+
+ it 'raises an error if the service hook could not be saved' do
+ expect_next(ServiceHook).to receive(:execute).with(*args)
+
+ call
+ integration.service_hook.integration = nil
+
+ expect(integration.service_hook).not_to receive(:execute)
+ expect { call }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/issue_tracker_service_shared_examples.rb b/spec/support/shared_examples/models/issue_tracker_service_shared_examples.rb
index b275d594792..6d519e561ee 100644
--- a/spec/support/shared_examples/models/issue_tracker_service_shared_examples.rb
+++ b/spec/support/shared_examples/models/issue_tracker_service_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_examples 'issue tracker service URL attribute' do |url_attr|
+RSpec.shared_examples 'issue tracker integration URL attribute' do |url_attr|
it { is_expected.to allow_value('https://example.com').for(url_attr) }
it { is_expected.not_to allow_value('example.com').for(url_attr) }
diff --git a/spec/support/shared_examples/models/member_shared_examples.rb b/spec/support/shared_examples/models/member_shared_examples.rb
index 7ede6f0d8d4..c111d250d34 100644
--- a/spec/support/shared_examples/models/member_shared_examples.rb
+++ b/spec/support/shared_examples/models/member_shared_examples.rb
@@ -75,3 +75,259 @@ RSpec.shared_examples '#valid_level_roles' do |entity_name|
expect(presenter.valid_level_roles).to eq(expected_roles)
end
end
+
+RSpec.shared_examples_for "member creation" do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:admin) { create(:admin) }
+
+ describe '#execute' do
+ it 'returns a Member object', :aggregate_failures do
+ member = described_class.new(source, user, :maintainer).execute
+
+ expect(member).to be_a member_type
+ expect(member).to be_persisted
+ end
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'sets members.created_by to the given admin current_user' do
+ member = described_class.new(source, user, :maintainer, current_user: admin).execute
+
+ expect(member.created_by).to eq(admin)
+ end
+ end
+
+ context 'when admin mode is disabled' do
+ it 'rejects setting members.created_by to the given admin current_user' do
+ member = described_class.new(source, user, :maintainer, current_user: admin).execute
+
+ expect(member.created_by).to be_nil
+ end
+ end
+
+ it 'sets members.expires_at to the given expires_at' do
+ member = described_class.new(source, user, :maintainer, expires_at: Date.new(2016, 9, 22)).execute
+
+ expect(member.expires_at).to eq(Date.new(2016, 9, 22))
+ end
+
+ described_class.access_levels.each do |sym_key, int_access_level|
+ it "accepts the :#{sym_key} symbol as access level", :aggregate_failures do
+ expect(source.users).not_to include(user)
+
+ member = described_class.new(source, user.id, sym_key).execute
+
+ expect(member.access_level).to eq(int_access_level)
+ expect(source.users.reload).to include(user)
+ end
+
+ it "accepts the #{int_access_level} integer as access level", :aggregate_failures do
+ expect(source.users).not_to include(user)
+
+ member = described_class.new(source, user.id, int_access_level).execute
+
+ expect(member.access_level).to eq(int_access_level)
+ expect(source.users.reload).to include(user)
+ end
+ end
+
+ context 'with no current_user' do
+ context 'when called with a known user id' do
+ it 'adds the user as a member' do
+ expect(source.users).not_to include(user)
+
+ described_class.new(source, user.id, :maintainer).execute
+
+ expect(source.users.reload).to include(user)
+ end
+ end
+
+ context 'when called with an unknown user id' do
+ it 'adds the user as a member' do
+ expect(source.users).not_to include(user)
+
+ described_class.new(source, non_existing_record_id, :maintainer).execute
+
+ expect(source.users.reload).not_to include(user)
+ end
+ end
+
+ context 'when called with a user object' do
+ it 'adds the user as a member' do
+ expect(source.users).not_to include(user)
+
+ described_class.new(source, user, :maintainer).execute
+
+ expect(source.users.reload).to include(user)
+ end
+ end
+
+ context 'when called with a requester user object' do
+ before do
+ source.request_access(user)
+ end
+
+ it 'adds the requester as a member', :aggregate_failures do
+ expect(source.users).not_to include(user)
+ expect(source.requesters.exists?(user_id: user)).to be_truthy
+
+ expect do
+ described_class.new(source, user, :maintainer).execute
+ end.to raise_error(Gitlab::Access::AccessDeniedError)
+
+ expect(source.users.reload).not_to include(user)
+ expect(source.requesters.reload.exists?(user_id: user)).to be_truthy
+ end
+ end
+
+ context 'when called with a known user email' do
+ it 'adds the user as a member' do
+ expect(source.users).not_to include(user)
+
+ described_class.new(source, user.email, :maintainer).execute
+
+ expect(source.users.reload).to include(user)
+ end
+ end
+
+ context 'when called with an unknown user email' do
+ it 'creates an invited member' do
+ expect(source.users).not_to include(user)
+
+ described_class.new(source, 'user@example.com', :maintainer).execute
+
+ expect(source.members.invite.pluck(:invite_email)).to include('user@example.com')
+ end
+ end
+
+ context 'when called with an unknown user email starting with a number' do
+ it 'creates an invited member', :aggregate_failures do
+ email_starting_with_number = "#{user.id}_email@example.com"
+
+ described_class.new(source, email_starting_with_number, :maintainer).execute
+
+ expect(source.members.invite.pluck(:invite_email)).to include(email_starting_with_number)
+ expect(source.users.reload).not_to include(user)
+ end
+ end
+ end
+
+ context 'when current_user can update member', :enable_admin_mode do
+ it 'creates the member' do
+ expect(source.users).not_to include(user)
+
+ described_class.new(source, user, :maintainer, current_user: admin).execute
+
+ expect(source.users.reload).to include(user)
+ end
+
+ context 'when called with a requester user object' do
+ before do
+ source.request_access(user)
+ end
+
+ it 'adds the requester as a member', :aggregate_failures do
+ expect(source.users).not_to include(user)
+ expect(source.requesters.exists?(user_id: user)).to be_truthy
+
+ described_class.new(source, user, :maintainer, current_user: admin).execute
+
+ expect(source.users.reload).to include(user)
+ expect(source.requesters.reload.exists?(user_id: user)).to be_falsy
+ end
+ end
+ end
+
+ context 'when current_user cannot update member' do
+ it 'does not create the member', :aggregate_failures do
+ expect(source.users).not_to include(user)
+
+ member = described_class.new(source, user, :maintainer, current_user: user).execute
+
+ expect(source.users.reload).not_to include(user)
+ expect(member).not_to be_persisted
+ end
+
+ context 'when called with a requester user object' do
+ before do
+ source.request_access(user)
+ end
+
+ it 'does not destroy the requester', :aggregate_failures do
+ expect(source.users).not_to include(user)
+ expect(source.requesters.exists?(user_id: user)).to be_truthy
+
+ described_class.new(source, user, :maintainer, current_user: user).execute
+
+ expect(source.users.reload).not_to include(user)
+ expect(source.requesters.exists?(user_id: user)).to be_truthy
+ end
+ end
+ end
+
+ context 'when member already exists' do
+ before do
+ source.add_user(user, :developer)
+ end
+
+ context 'with no current_user' do
+ it 'updates the member' do
+ expect(source.users).to include(user)
+
+ described_class.new(source, user, :maintainer).execute
+
+ expect(source.members.find_by(user_id: user).access_level).to eq(Gitlab::Access::MAINTAINER)
+ end
+ end
+
+ context 'when current_user can update member', :enable_admin_mode do
+ it 'updates the member' do
+ expect(source.users).to include(user)
+
+ described_class.new(source, user, :maintainer, current_user: admin).execute
+
+ expect(source.members.find_by(user_id: user).access_level).to eq(Gitlab::Access::MAINTAINER)
+ end
+ end
+
+ context 'when current_user cannot update member' do
+ it 'does not update the member' do
+ expect(source.users).to include(user)
+
+ described_class.new(source, user, :maintainer, current_user: user).execute
+
+ expect(source.members.find_by(user_id: user).access_level).to eq(Gitlab::Access::DEVELOPER)
+ end
+ end
+ end
+ end
+
+ describe '.add_users' do
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+
+ it 'returns a Member objects' do
+ members = described_class.add_users(source, [user1, user2], :maintainer)
+
+ expect(members).to be_a Array
+ expect(members.size).to eq(2)
+ expect(members.first).to be_a member_type
+ expect(members.first).to be_persisted
+ end
+
+ it 'returns an empty array' do
+ members = described_class.add_users(source, [], :maintainer)
+
+ expect(members).to be_a Array
+ expect(members).to be_empty
+ end
+
+ it 'supports different formats' do
+ list = ['joe@local.test', admin, user1.id, user2.id.to_s]
+
+ members = described_class.add_users(source, list, :maintainer)
+
+ expect(members.size).to eq(4)
+ expect(members.first).to be_invite
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/project_ci_cd_settings_shared_examples.rb b/spec/support/shared_examples/models/project_ci_cd_settings_shared_examples.rb
new file mode 100644
index 00000000000..c92e819db19
--- /dev/null
+++ b/spec/support/shared_examples/models/project_ci_cd_settings_shared_examples.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'ci_cd_settings delegation' do
+ let(:exclude_attributes) { [] }
+
+ context 'when ci_cd_settings is destroyed but project is not' do
+ it 'allows methods delegated to ci_cd_settings to be nil', :aggregate_failures do
+ project = create(:project)
+ attributes = project.ci_cd_settings.attributes.keys - %w(id project_id) - exclude_attributes
+ project.ci_cd_settings.destroy!
+ project.reload
+ attributes.each do |attr|
+ method = project.respond_to?("ci_#{attr}") ? "ci_#{attr}" : attr
+ expect(project.send(method)).to be_nil, "#{attr} was not nil"
+ end
+ end
+ end
+end
+
+RSpec.shared_examples 'a ci_cd_settings predicate method' do |prefix: ''|
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:project) { create(:project) }
+
+ context 'when ci_cd_settings is nil' do
+ before do
+ allow(project).to receive(:ci_cd_settings).and_return(nil)
+ end
+
+ it 'returns false' do
+ expect(project.send("#{prefix}#{delegated_method}")).to be(false)
+ end
+ end
+
+ context 'when ci_cd_settings is not nil' do
+ where(:delegated_method_return, :subject_return) do
+ true | true
+ false | false
+ end
+
+ with_them do
+ let(:ci_cd_settings_double) { double('ProjectCiCdSetting') }
+
+ before do
+ allow(project).to receive(:ci_cd_settings).and_return(ci_cd_settings_double)
+ allow(ci_cd_settings_double).to receive(delegated_method).and_return(delegated_method_return)
+ end
+
+ it 'returns the expected boolean value' do
+ expect(project.send("#{prefix}#{delegated_method}")).to be(subject_return)
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/wiki_shared_examples.rb b/spec/support/shared_examples/models/wiki_shared_examples.rb
index 2498bf35a09..bc5956e3eec 100644
--- a/spec/support/shared_examples/models/wiki_shared_examples.rb
+++ b/spec/support/shared_examples/models/wiki_shared_examples.rb
@@ -2,6 +2,7 @@
RSpec.shared_examples 'wiki model' do
let_it_be(:user) { create(:user, :commit_email) }
+
let(:wiki_container) { raise NotImplementedError }
let(:wiki_container_without_repo) { raise NotImplementedError }
let(:wiki_lfs_enabled) { false }
@@ -536,4 +537,98 @@ RSpec.shared_examples 'wiki model' do
expect(subject.hook_attrs.keys).to contain_exactly(:web_url, :git_ssh_url, :git_http_url, :path_with_namespace, :default_branch)
end
end
+
+ describe '#default_branch' do
+ subject { wiki.default_branch }
+
+ before do
+ allow(Gitlab::DefaultBranch).to receive(:value).and_return('main')
+ end
+
+ context 'when repository is not created' do
+ let(:wiki_container) { wiki_container_without_repo }
+
+ it 'returns the instance default branch' do
+ expect(subject).to eq 'main'
+ end
+ end
+
+ context 'when repository is empty' do
+ let(:wiki_container) { wiki_container_without_repo }
+
+ before do
+ wiki.repository.create_if_not_exists
+ end
+
+ it 'returns the instance default branch' do
+ expect(subject).to eq 'main'
+ end
+ end
+
+ context 'when repository is not empty' do
+ it 'returns the repository default branch' do
+ wiki.create_page('index', 'test content')
+
+ expect(subject).to eq wiki.repository.root_ref
+ end
+ end
+ end
+
+ describe '#create_wiki_repository' do
+ let(:head_path) { Rails.root.join(TestEnv.repos_path, "#{wiki.disk_path}.git", 'HEAD') }
+ let(:default_branch) { 'foo' }
+
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:default_branch_name).and_return(default_branch)
+ end
+
+ subject { wiki.create_wiki_repository }
+
+ context 'when repository is not created' do
+ let(:wiki_container) { wiki_container_without_repo }
+
+ it 'changes the HEAD reference to the default branch' do
+ expect(wiki.empty?).to eq true
+
+ subject
+
+ expect(File.read(head_path).squish).to eq "ref: refs/heads/#{default_branch}"
+ end
+ end
+
+ context 'when repository is empty' do
+ let(:wiki_container) { wiki_container_without_repo }
+
+ it 'changes the HEAD reference to the default branch' do
+ wiki.repository.create_if_not_exists
+ wiki.repository.raw_repository.write_ref('HEAD', 'refs/heads/bar')
+
+ subject
+
+ expect(File.read(head_path).squish).to eq "ref: refs/heads/#{default_branch}"
+ end
+ end
+
+ context 'when repository is not empty' do
+ before do
+ wiki.create_page('index', 'test content')
+ end
+
+ it 'does nothing when HEAD points to the right branch' do
+ expect(wiki.repository.raw_repository).not_to receive(:write_ref)
+
+ subject
+ end
+
+ context 'when HEAD points to the wrong branch' do
+ it 'rewrites HEAD with the right branch' do
+ wiki.repository.raw_repository.write_ref('HEAD', 'refs/heads/bar')
+
+ subject
+
+ expect(File.read(head_path).squish).to eq "ref: refs/heads/#{default_branch}"
+ end
+ end
+ end
+ end
end
diff --git a/spec/support/shared_examples/namespaces/traversal_examples.rb b/spec/support/shared_examples/namespaces/traversal_examples.rb
index ccc64c80fd4..f09634556c3 100644
--- a/spec/support/shared_examples/namespaces/traversal_examples.rb
+++ b/spec/support/shared_examples/namespaces/traversal_examples.rb
@@ -12,16 +12,18 @@ RSpec.shared_examples 'namespace traversal' do
it "makes a recursive query" do
groups.each do |group|
- expect { group.public_send(recursive_method).load }.to make_queries_matching(/WITH RECURSIVE/)
+ expect { group.public_send(recursive_method).try(:load) }.to make_queries_matching(/WITH RECURSIVE/)
end
end
end
- describe '#root_ancestor' do
- let_it_be(:group) { create(:group) }
- let_it_be(:nested_group) { create(:group, parent: group) }
- let_it_be(:deep_nested_group) { create(:group, parent: nested_group) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:nested_group) { create(:group, parent: group) }
+ let_it_be(:deep_nested_group) { create(:group, parent: nested_group) }
+ let_it_be(:very_deep_nested_group) { create(:group, parent: deep_nested_group) }
+ let_it_be(:groups) { [group, nested_group, deep_nested_group, very_deep_nested_group] }
+ describe '#root_ancestor' do
it 'returns the correct root ancestor' do
expect(group.root_ancestor).to eq(group)
expect(nested_group.root_ancestor).to eq(group)
@@ -29,8 +31,6 @@ RSpec.shared_examples 'namespace traversal' do
end
describe '#recursive_root_ancestor' do
- let(:groups) { [group, nested_group, deep_nested_group] }
-
it "is equivalent to #recursive_root_ancestor" do
groups.each do |group|
expect(group.root_ancestor).to eq(group.recursive_root_ancestor)
@@ -40,12 +40,8 @@ RSpec.shared_examples 'namespace traversal' do
end
describe '#self_and_hierarchy' do
- let!(:group) { create(:group, path: 'git_lab') }
- let!(:nested_group) { create(:group, parent: group) }
- let!(:deep_nested_group) { create(:group, parent: nested_group) }
- let!(:very_deep_nested_group) { create(:group, parent: deep_nested_group) }
- let!(:another_group) { create(:group, path: 'gitllab') }
- let!(:another_group_nested) { create(:group, path: 'foo', parent: another_group) }
+ let!(:another_group) { create(:group) }
+ let!(:another_group_nested) { create(:group, parent: another_group) }
it 'returns the correct tree' do
expect(group.self_and_hierarchy).to contain_exactly(group, nested_group, deep_nested_group, very_deep_nested_group)
@@ -54,18 +50,11 @@ RSpec.shared_examples 'namespace traversal' do
end
describe '#recursive_self_and_hierarchy' do
- let(:groups) { [group, nested_group, very_deep_nested_group] }
-
it_behaves_like 'recursive version', :self_and_hierarchy
end
end
describe '#ancestors' do
- let_it_be(:group) { create(:group) }
- let_it_be(:nested_group) { create(:group, parent: group) }
- let_it_be(:deep_nested_group) { create(:group, parent: nested_group) }
- let_it_be(:very_deep_nested_group) { create(:group, parent: deep_nested_group) }
-
it 'returns the correct ancestors' do
# #reload is called to make sure traversal_ids are reloaded
expect(very_deep_nested_group.reload.ancestors).to contain_exactly(group, nested_group, deep_nested_group)
@@ -75,18 +64,28 @@ RSpec.shared_examples 'namespace traversal' do
end
describe '#recursive_ancestors' do
- let(:groups) { [nested_group, deep_nested_group, very_deep_nested_group] }
+ let_it_be(:groups) { [nested_group, deep_nested_group, very_deep_nested_group] }
it_behaves_like 'recursive version', :ancestors
end
end
- describe '#self_and_ancestors' do
- let(:group) { create(:group) }
- let(:nested_group) { create(:group, parent: group) }
- let(:deep_nested_group) { create(:group, parent: nested_group) }
- let(:very_deep_nested_group) { create(:group, parent: deep_nested_group) }
+ describe '#ancestor_ids' do
+ it 'returns the correct ancestor ids' do
+ expect(very_deep_nested_group.ancestor_ids).to contain_exactly(group.id, nested_group.id, deep_nested_group.id)
+ expect(deep_nested_group.ancestor_ids).to contain_exactly(group.id, nested_group.id)
+ expect(nested_group.ancestor_ids).to contain_exactly(group.id)
+ expect(group.ancestor_ids).to be_empty
+ end
+
+ describe '#recursive_ancestor_ids' do
+ let_it_be(:groups) { [nested_group, deep_nested_group, very_deep_nested_group] }
+
+ it_behaves_like 'recursive version', :ancestor_ids
+ end
+ end
+ describe '#self_and_ancestors' do
it 'returns the correct ancestors' do
expect(very_deep_nested_group.self_and_ancestors).to contain_exactly(group, nested_group, deep_nested_group, very_deep_nested_group)
expect(deep_nested_group.self_and_ancestors).to contain_exactly(group, nested_group, deep_nested_group)
@@ -95,19 +94,30 @@ RSpec.shared_examples 'namespace traversal' do
end
describe '#recursive_self_and_ancestors' do
- let(:groups) { [nested_group, deep_nested_group, very_deep_nested_group] }
+ let_it_be(:groups) { [nested_group, deep_nested_group, very_deep_nested_group] }
it_behaves_like 'recursive version', :self_and_ancestors
end
end
+ describe '#self_and_ancestor_ids' do
+ it 'returns the correct ancestor ids' do
+ expect(very_deep_nested_group.self_and_ancestor_ids).to contain_exactly(group.id, nested_group.id, deep_nested_group.id, very_deep_nested_group.id)
+ expect(deep_nested_group.self_and_ancestor_ids).to contain_exactly(group.id, nested_group.id, deep_nested_group.id)
+ expect(nested_group.self_and_ancestor_ids).to contain_exactly(group.id, nested_group.id)
+ expect(group.self_and_ancestor_ids).to contain_exactly(group.id)
+ end
+
+ describe '#recursive_self_and_ancestor_ids' do
+ let_it_be(:groups) { [nested_group, deep_nested_group, very_deep_nested_group] }
+
+ it_behaves_like 'recursive version', :self_and_ancestor_ids
+ end
+ end
+
describe '#descendants' do
- let!(:group) { create(:group, path: 'git_lab') }
- let!(:nested_group) { create(:group, parent: group) }
- let!(:deep_nested_group) { create(:group, parent: nested_group) }
- let!(:very_deep_nested_group) { create(:group, parent: deep_nested_group) }
- let!(:another_group) { create(:group, path: 'gitllab') }
- let!(:another_group_nested) { create(:group, path: 'foo', parent: another_group) }
+ let!(:another_group) { create(:group) }
+ let!(:another_group_nested) { create(:group, parent: another_group) }
it 'returns the correct descendants' do
expect(very_deep_nested_group.descendants.to_a).to eq([])
@@ -117,19 +127,13 @@ RSpec.shared_examples 'namespace traversal' do
end
describe '#recursive_descendants' do
- let(:groups) { [group, nested_group, deep_nested_group, very_deep_nested_group] }
-
it_behaves_like 'recursive version', :descendants
end
end
describe '#self_and_descendants' do
- let!(:group) { create(:group, path: 'git_lab') }
- let!(:nested_group) { create(:group, parent: group) }
- let!(:deep_nested_group) { create(:group, parent: nested_group) }
- let!(:very_deep_nested_group) { create(:group, parent: deep_nested_group) }
- let!(:another_group) { create(:group, path: 'gitllab') }
- let!(:another_group_nested) { create(:group, path: 'foo', parent: another_group) }
+ let!(:another_group) { create(:group) }
+ let!(:another_group_nested) { create(:group, parent: another_group) }
it 'returns the correct descendants' do
expect(very_deep_nested_group.self_and_descendants).to contain_exactly(very_deep_nested_group)
@@ -139,24 +143,18 @@ RSpec.shared_examples 'namespace traversal' do
end
describe '#recursive_self_and_descendants' do
- let(:groups) { [group, nested_group, deep_nested_group, very_deep_nested_group] }
+ let_it_be(:groups) { [group, nested_group, deep_nested_group] }
it_behaves_like 'recursive version', :self_and_descendants
end
end
describe '#self_and_descendant_ids' do
- let!(:group) { create(:group, path: 'git_lab') }
- let!(:nested_group) { create(:group, parent: group) }
- let!(:deep_nested_group) { create(:group, parent: nested_group) }
-
subject { group.self_and_descendant_ids.pluck(:id) }
- it { is_expected.to contain_exactly(group.id, nested_group.id, deep_nested_group.id) }
+ it { is_expected.to contain_exactly(group.id, nested_group.id, deep_nested_group.id, very_deep_nested_group.id) }
describe '#recursive_self_and_descendant_ids' do
- let(:groups) { [group, nested_group, deep_nested_group] }
-
it_behaves_like 'recursive version', :self_and_descendant_ids
end
end
diff --git a/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb b/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb
index 3cdba315d1f..4d142199c95 100644
--- a/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb
@@ -233,6 +233,7 @@ RSpec.shared_examples 'issuable quick actions' do
context 'when user can update issuable' do
let_it_be(:developer) { create(:user) }
+
let(:note_author) { developer }
before do
@@ -260,6 +261,7 @@ RSpec.shared_examples 'issuable quick actions' do
context 'when user cannot update issuable' do
let_it_be(:non_member) { create(:user) }
+
let(:note_author) { non_member }
it 'applies commands that user can execute' do
diff --git a/spec/support/shared_examples/quick_actions/issuable/time_tracking_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/issuable/time_tracking_quick_action_shared_examples.rb
index 92849ddf1cb..052fd0622d0 100644
--- a/spec/support/shared_examples/quick_actions/issuable/time_tracking_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issuable/time_tracking_quick_action_shared_examples.rb
@@ -72,6 +72,24 @@ RSpec.shared_examples 'issuable time tracker' do |issuable_type|
end
end
+ it 'shows the time tracking report when link is clicked' do
+ submit_time('/estimate 1w')
+ submit_time('/spend 1d')
+
+ wait_for_requests
+
+ page.within '.time-tracking-component-wrap' do
+ click_link 'Time tracking report'
+
+ wait_for_requests
+ end
+
+ page.within '#time-tracking-report' do
+ expect(find('tbody')).to have_content maintainer.name
+ expect(find('tbody')).to have_content '1d'
+ end
+ end
+
it 'hides the help state when close icon is clicked' do
page.within '.time-tracking-component-wrap' do
find('.help-button').click
diff --git a/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
index 0530aa8c760..1f68dd7a382 100644
--- a/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
@@ -12,15 +12,17 @@ RSpec.shared_context 'Debian repository shared context' do |container_type, can_
let_it_be(:user, freeze: true) { create(:user) }
let_it_be(:personal_access_token, freeze: true) { create(:personal_access_token, user: user) }
- let_it_be(:private_distribution, freeze: true) { create("debian_#{container_type}_distribution", container: private_container, codename: 'existing-codename') }
+ let_it_be(:private_distribution, freeze: true) { create("debian_#{container_type}_distribution", :with_file, container: private_container, codename: 'existing-codename') }
let_it_be(:private_component, freeze: true) { create("debian_#{container_type}_component", distribution: private_distribution, name: 'existing-component') }
let_it_be(:private_architecture_all, freeze: true) { create("debian_#{container_type}_architecture", distribution: private_distribution, name: 'all') }
let_it_be(:private_architecture, freeze: true) { create("debian_#{container_type}_architecture", distribution: private_distribution, name: 'existing-arch') }
+ let_it_be(:private_component_file) { create("debian_#{container_type}_component_file", component: private_component, architecture: private_architecture) }
- let_it_be(:public_distribution, freeze: true) { create("debian_#{container_type}_distribution", container: public_container, codename: 'existing-codename') }
+ let_it_be(:public_distribution, freeze: true) { create("debian_#{container_type}_distribution", :with_file, container: public_container, codename: 'existing-codename') }
let_it_be(:public_component, freeze: true) { create("debian_#{container_type}_component", distribution: public_distribution, name: 'existing-component') }
let_it_be(:public_architecture_all, freeze: true) { create("debian_#{container_type}_architecture", distribution: public_distribution, name: 'all') }
let_it_be(:public_architecture, freeze: true) { create("debian_#{container_type}_architecture", distribution: public_distribution, name: 'existing-arch') }
+ let_it_be(:public_component_file) { create("debian_#{container_type}_component_file", component: public_component, architecture: public_architecture) }
if container_type == :group
let_it_be(:private_project) { create(:project, :private, group: private_container) }
@@ -40,14 +42,15 @@ RSpec.shared_context 'Debian repository shared context' do |container_type, can_
let(:visibility_level) { :public }
let(:distribution) { { private: private_distribution, public: public_distribution }[visibility_level] }
+ let(:architecture) { { private: private_architecture, public: public_architecture }[visibility_level] }
+ let(:component) { { private: private_component, public: public_component }[visibility_level] }
+ let(:component_file) { { private: private_component_file, public: public_component_file }[visibility_level] }
- let(:component) { 'main' }
- let(:architecture) { 'amd64' }
let(:source_package) { 'sample' }
let(:letter) { source_package[0..2] == 'lib' ? source_package[0..3] : source_package[0] }
let(:package_name) { 'libsample0' }
let(:package_version) { '1.2.3~alpha2' }
- let(:file_name) { "#{package_name}_#{package_version}_#{architecture}.deb" }
+ let(:file_name) { "#{package_name}_#{package_version}_#{architecture.name}.deb" }
let(:method) { :get }
diff --git a/spec/support/shared_examples/requests/api/graphql/noteable_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/noteable_shared_examples.rb
index 9cf5bc04f65..7e1f4500779 100644
--- a/spec/support/shared_examples/requests/api/graphql/noteable_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/graphql/noteable_shared_examples.rb
@@ -31,6 +31,28 @@ RSpec.shared_examples 'a noteable graphql type we can query' do
expect(graphql_data_at(*path_to_noteable, :discussions, :nodes))
.to match_array(expected)
end
+
+ it 'can fetch discussion noteable' do
+ create(discussion_factory, project: project, noteable: noteable)
+ fields =
+ <<-QL.strip_heredoc
+ discussions {
+ nodes {
+ noteable {
+ __typename
+ ... on #{noteable.class.name.demodulize} {
+ id
+ }
+ }
+ }
+ }
+ QL
+
+ post_graphql(query(fields), current_user: current_user)
+
+ data = graphql_data_at(*path_to_noteable, :discussions, :nodes, :noteable, :id)
+ expect(data[0]).to eq(global_id_of(noteable))
+ end
end
describe '.notes' do
diff --git a/spec/support/shared_examples/requests/api/helm_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/helm_packages_shared_examples.rb
index 585c4fb8a4e..1ad38a17f9c 100644
--- a/spec/support/shared_examples/requests/api/helm_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/helm_packages_shared_examples.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
-RSpec.shared_examples 'rejects helm packages access' do |user_type, status, add_member = true|
+RSpec.shared_examples 'rejects helm packages access' do |user_type, status|
context "for user type #{user_type}" do
before do
- project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ project.send("add_#{user_type}", user) if user_type != :anonymous && user_type != :not_a_member
end
it_behaves_like 'returning response status', status
@@ -18,19 +18,170 @@ RSpec.shared_examples 'rejects helm packages access' do |user_type, status, add_
end
end
-RSpec.shared_examples 'process helm download content request' do |user_type, status, add_member = true|
+RSpec.shared_examples 'process helm service index request' do |user_type, status|
context "for user type #{user_type}" do
before do
- project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ project.send("add_#{user_type}", user) if user_type != :anonymous && user_type != :not_a_member
end
- it_behaves_like 'returning response status', status
+ it 'returns a valid YAML response', :aggregate_failures do
+ subject
+
+ expect(response).to have_gitlab_http_status(status)
+
+ expect(response.media_type).to eq('text/yaml')
+ expect(response.body).to start_with("---\napiVersion: v1\nentries:\n")
+
+ yaml_response = YAML.safe_load(response.body)
+
+ expect(yaml_response.keys).to contain_exactly('apiVersion', 'entries', 'generated', 'serverInfo')
+ expect(yaml_response['entries']).to be_a(Hash)
+ expect(yaml_response['entries'].keys).to contain_exactly(package.name)
+ expect(yaml_response['serverInfo']).to eq({ 'contextPath' => "/api/v4/projects/#{project.id}/packages/helm" })
+
+ package_entry = yaml_response['entries'][package.name]
+
+ expect(package_entry.length).to eq(1)
+ expect(package_entry.first.keys).to contain_exactly('name', 'version', 'apiVersion', 'created', 'digest', 'urls')
+ expect(package_entry.first['digest']).to eq('fd2b2fa0329e80a2a602c2bb3b40608bcd6ee5cf96cf46fd0d2800a4c129c9db')
+ expect(package_entry.first['urls']).to eq(["charts/#{package.name}-#{package.version}.tgz"])
+ end
+ end
+end
+
+RSpec.shared_examples 'process helm workhorse authorization' do |user_type, status, test_bypass: false|
+ context "for user type #{user_type}" do
+ before do
+ project.send("add_#{user_type}", user) if user_type != :anonymous && user_type != :not_a_member
+ end
+
+ it 'has the proper status and content type' do
+ subject
+
+ expect(response).to have_gitlab_http_status(status)
+ expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ end
+
+ context 'with a request that bypassed gitlab-workhorse' do
+ let(:headers) do
+ basic_auth_header(user.username, personal_access_token.token)
+ .merge(workhorse_headers)
+ .tap { |h| h.delete(Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER) }
+ end
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ it_behaves_like 'returning response status', :forbidden
+ end
+ end
+end
+
+RSpec.shared_examples 'process helm upload' do |user_type, status|
+ shared_examples 'creates helm package files' do
+ it 'creates package files' do
+ expect(::Packages::Helm::ExtractionWorker).to receive(:perform_async).once
+ expect { subject }
+ .to change { project.packages.count }.by(1)
+ .and change { Packages::PackageFile.count }.by(1)
+ expect(response).to have_gitlab_http_status(status)
+
+ package_file = project.packages.last.package_files.reload.last
+ expect(package_file.file_name).to eq('package.tgz')
+ end
+ end
+
+ context "for user type #{user_type}" do
+ before do
+ project.send("add_#{user_type}", user) if user_type != :anonymous && user_type != :not_a_member
+ end
+
+ context 'with object storage disabled' do
+ before do
+ stub_package_file_object_storage(enabled: false)
+ end
+
+ context 'without a file from workhorse' do
+ let(:send_rewritten_field) { false }
+
+ it_behaves_like 'returning response status', :bad_request
+ end
+
+ context 'with correct params' do
+ it_behaves_like 'package workhorse uploads'
+ it_behaves_like 'creates helm package files'
+ it_behaves_like 'a package tracking event', 'API::HelmPackages', 'push_package'
+ end
+ end
+
+ context 'with object storage enabled' do
+ let(:tmp_object) do
+ fog_connection.directories.new(key: 'packages').files.create( # rubocop:disable Rails/SaveBang
+ key: "tmp/uploads/#{file_name}",
+ body: 'content'
+ )
+ end
+
+ let(:fog_file) { fog_to_uploaded_file(tmp_object) }
+ let(:params) { { chart: fog_file, 'chart.remote_id' => file_name } }
+
+ context 'and direct upload enabled' do
+ let(:fog_connection) do
+ stub_package_file_object_storage(direct_upload: true)
+ end
+
+ it_behaves_like 'creates helm package files'
+
+ ['123123', '../../123123'].each do |remote_id|
+ context "with invalid remote_id: #{remote_id}" do
+ let(:params) do
+ {
+ chart: fog_file,
+ 'chart.remote_id' => remote_id
+ }
+ end
+
+ it_behaves_like 'returning response status', :forbidden
+ end
+ end
+ end
+
+ context 'and direct upload disabled' do
+ context 'and background upload disabled' do
+ let(:fog_connection) do
+ stub_package_file_object_storage(direct_upload: false, background_upload: false)
+ end
+
+ it_behaves_like 'creates helm package files'
+ end
+
+ context 'and background upload enabled' do
+ let(:fog_connection) do
+ stub_package_file_object_storage(direct_upload: false, background_upload: true)
+ end
+
+ it_behaves_like 'creates helm package files'
+ end
+ end
+ end
+
+ it_behaves_like 'background upload schedules a file migration'
+ end
+end
+
+RSpec.shared_examples 'process helm download content request' do |user_type, status|
+ context "for user type #{user_type}" do
+ before do
+ project.send("add_#{user_type}", user) if user_type != :anonymous && user_type != :not_a_member
+ end
it_behaves_like 'a package tracking event', 'API::HelmPackages', 'pull_package'
- it 'returns a valid package archive' do
+ it 'returns expected status and a valid package archive' do
subject
+ expect(response).to have_gitlab_http_status(status)
expect(response.media_type).to eq('application/octet-stream')
end
end
@@ -51,3 +202,69 @@ RSpec.shared_examples 'rejects helm access with unknown project id' do
end
end
end
+
+RSpec.shared_examples 'handling helm chart index requests' do
+ context 'with valid project' do
+ subject { get api(url), headers: headers }
+
+ using RSpec::Parameterized::TableSyntax
+
+ context 'personal token' do
+ where(:visibility, :user_role, :shared_examples_name, :expected_status) do
+ :public | :guest | 'process helm service index request' | :success
+ :public | :not_a_member | 'process helm service index request' | :success
+ :public | :anonymous | 'process helm service index request' | :success
+ :private | :reporter | 'process helm service index request' | :success
+ :private | :guest | 'rejects helm packages access' | :forbidden
+ :private | :not_a_member | 'rejects helm packages access' | :not_found
+ :private | :anonymous | 'rejects helm packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, personal_access_token.token) }
+
+ before do
+ project.update!(visibility: visibility.to_s)
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status]
+ end
+ end
+
+ context 'when an invalid token is passed' do
+ let(:headers) { basic_auth_header(user.username, 'wrong') }
+
+ it_behaves_like 'returning response status', :unauthorized
+ end
+
+ context 'with job token' do
+ where(:visibility, :user_role, :shared_examples_name, :expected_status) do
+ :public | :guest | 'process helm service index request' | :success
+ :public | :not_a_member | 'process helm service index request' | :success
+ :public | :anonymous | 'process helm service index request' | :success
+ :private | :reporter | 'process helm service index request' | :success
+ :private | :guest | 'rejects helm packages access' | :forbidden
+ :private | :not_a_member | 'rejects helm packages access' | :not_found
+ :private | :anonymous | 'rejects helm packages access' | :unauthorized
+ end
+
+ with_them do
+ let_it_be(:ci_build) { create(:ci_build, project: project, user: user, status: :running) }
+
+ let(:headers) { user_role == :anonymous ? {} : job_basic_auth_header(ci_build) }
+
+ before do
+ project.update!(visibility: visibility.to_s)
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status]
+ end
+ end
+ end
+
+ it_behaves_like 'deploy token for package GET requests'
+
+ it_behaves_like 'rejects helm access with unknown project id' do
+ subject { get api(url) }
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
index 617fdecbb5b..878cbc10a24 100644
--- a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
@@ -136,8 +136,8 @@ RSpec.shared_examples 'process nuget workhorse authorization' do |user_type, sta
end
end
-RSpec.shared_examples 'process nuget upload' do |user_type, status, add_member = true|
- RSpec.shared_examples 'creates nuget package files' do
+RSpec.shared_examples 'process nuget upload' do |user_type, status, add_member = true, symbol_package = false|
+ shared_examples 'creates nuget package files' do
it 'creates package files' do
expect(::Packages::Nuget::ExtractionWorker).to receive(:perform_async).once
expect { subject }
@@ -146,7 +146,7 @@ RSpec.shared_examples 'process nuget upload' do |user_type, status, add_member =
expect(response).to have_gitlab_http_status(status)
package_file = target.packages.last.package_files.reload.last
- expect(package_file.file_name).to eq('package.nupkg')
+ expect(package_file.file_name).to eq(file_name)
end
end
@@ -169,7 +169,12 @@ RSpec.shared_examples 'process nuget upload' do |user_type, status, add_member =
context 'with correct params' do
it_behaves_like 'package workhorse uploads'
it_behaves_like 'creates nuget package files'
- it_behaves_like 'a package tracking event', 'API::NugetPackages', 'push_package'
+
+ if symbol_package
+ it_behaves_like 'a package tracking event', 'API::NugetPackages', 'push_symbol_package'
+ else
+ it_behaves_like 'a package tracking event', 'API::NugetPackages', 'push_package'
+ end
end
end
@@ -300,6 +305,18 @@ RSpec.shared_examples 'process nuget download content request' do |user_type, st
it_behaves_like 'rejects nuget packages access', :anonymous, :not_found
end
+ context 'with symbol package' do
+ let(:format) { 'snupkg' }
+
+ it 'returns a valid package archive' do
+ subject
+
+ expect(response.media_type).to eq('application/octet-stream')
+ end
+
+ it_behaves_like 'a package tracking event', 'API::NugetPackages', 'pull_symbol_package'
+ end
+
context 'with lower case package name' do
let_it_be(:package_name) { 'dummy.package' }
@@ -407,3 +424,114 @@ RSpec.shared_examples 'rejects nuget access with unknown target id' do
end
end
end
+
+RSpec.shared_examples 'nuget authorize upload endpoint' do
+ using RSpec::Parameterized::TableSyntax
+
+ context 'with valid project' do
+ where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'process nuget workhorse authorization' | :success
+ 'PUBLIC' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :developer | false | true | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :guest | false | true | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | true | true | 'process nuget workhorse authorization' | :success
+ 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
+ let(:headers) { user_headers.merge(workhorse_headers) }
+
+ before do
+ update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ end
+ end
+
+ it_behaves_like 'deploy token for package uploads'
+
+ it_behaves_like 'job token for package uploads', authorize_endpoint: true do
+ let_it_be(:job) { create(:ci_build, :running, user: user, project: project) }
+ end
+
+ it_behaves_like 'rejects nuget access with unknown target id'
+
+ it_behaves_like 'rejects nuget access with invalid target id'
+end
+
+RSpec.shared_examples 'nuget upload endpoint' do |symbol_package: false|
+ using RSpec::Parameterized::TableSyntax
+
+ context 'with valid project' do
+ where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'process nuget upload' | :created
+ 'PUBLIC' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :developer | false | true | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :guest | false | true | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | true | true | 'process nuget upload' | :created
+ 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
+ let(:headers) { user_headers.merge(workhorse_headers) }
+ let(:snowplow_gitlab_standard_context) { { project: project, user: user, namespace: project.namespace } }
+
+ before do
+ update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member], symbol_package
+ end
+ end
+
+ it_behaves_like 'deploy token for package uploads'
+
+ it_behaves_like 'job token for package uploads' do
+ let_it_be(:job) { create(:ci_build, :running, user: user, project: project) }
+ end
+
+ it_behaves_like 'rejects nuget access with unknown target id'
+
+ it_behaves_like 'rejects nuget access with invalid target id'
+
+ context 'file size above maximum limit' do
+ let(:headers) { basic_auth_header(deploy_token.username, deploy_token.token).merge(workhorse_headers) }
+
+ before do
+ allow_next_instance_of(UploadedFile) do |uploaded_file|
+ allow(uploaded_file).to receive(:size).and_return(project.actual_limits.nuget_max_file_size + 1)
+ end
+ end
+
+ it_behaves_like 'returning response status', :bad_request
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/packages_shared_examples.rb b/spec/support/shared_examples/requests/api/packages_shared_examples.rb
index 42c29084d7b..ecde4ee8565 100644
--- a/spec/support/shared_examples/requests/api/packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/packages_shared_examples.rb
@@ -100,7 +100,7 @@ RSpec.shared_examples 'job token for package GET requests' do
end
end
-RSpec.shared_examples 'job token for package uploads' do |authorize_endpoint: false|
+RSpec.shared_examples 'job token for package uploads' do |authorize_endpoint: false, accept_invalid_username: false|
context 'with job token headers' do
let(:headers) { basic_auth_header(::Gitlab::Auth::CI_JOB_USER, job.token).merge(workhorse_headers) }
@@ -133,7 +133,11 @@ RSpec.shared_examples 'job token for package uploads' do |authorize_endpoint: fa
context 'invalid user' do
let(:headers) { basic_auth_header('foo', job.token).merge(workhorse_headers) }
- it_behaves_like 'returning response status', :unauthorized
+ if accept_invalid_username
+ it_behaves_like 'returning response status', :success
+ else
+ it_behaves_like 'returning response status', :unauthorized
+ end
end
end
end
@@ -143,7 +147,7 @@ RSpec.shared_examples 'a package tracking event' do |category, action|
stub_feature_flags(collect_package_events: true)
end
- it "creates a gitlab tracking event #{action}", :snowplow do
+ it "creates a gitlab tracking event #{action}", :snowplow, :aggregate_failures do
expect { subject }.to change { Packages::Event.count }.by(1)
expect_snowplow_event(category: category, action: action, **snowplow_gitlab_standard_context)
diff --git a/spec/support/shared_examples/services/alert_management/alert_processing/alert_firing_shared_examples.rb b/spec/support/shared_examples/services/alert_management/alert_processing/alert_firing_shared_examples.rb
index 218a3462c35..92a7d7ab3a3 100644
--- a/spec/support/shared_examples/services/alert_management/alert_processing/alert_firing_shared_examples.rb
+++ b/spec/support/shared_examples/services/alert_management/alert_processing/alert_firing_shared_examples.rb
@@ -13,7 +13,7 @@ RSpec.shared_examples 'creates an alert management alert or errors' do
it 'executes the alert service hooks' do
expect_next_instance_of(AlertManagement::Alert) do |alert|
- expect(alert).to receive(:execute_services)
+ expect(alert).to receive(:execute_integrations)
end
subject
@@ -84,7 +84,7 @@ end
# - `alert`, the alert for which events should be incremented
RSpec.shared_examples 'adds an alert management alert event' do
specify do
- expect(alert).not_to receive(:execute_services)
+ expect(alert).not_to receive(:execute_integrations)
expect { subject }.to change { alert.reload.events }.by(1)
diff --git a/spec/support/shared_examples/services/alert_management/alert_processing/incident_creation_shared_examples.rb b/spec/support/shared_examples/services/alert_management/alert_processing/incident_creation_shared_examples.rb
index c6ac07b6dd5..98834f01ce2 100644
--- a/spec/support/shared_examples/services/alert_management/alert_processing/incident_creation_shared_examples.rb
+++ b/spec/support/shared_examples/services/alert_management/alert_processing/incident_creation_shared_examples.rb
@@ -20,7 +20,7 @@ end
RSpec.shared_examples 'processes incident issues' do |with_issue: false|
before do
allow_next_instance_of(AlertManagement::Alert) do |alert|
- allow(alert).to receive(:execute_services)
+ allow(alert).to receive(:execute_integrations)
end
end
diff --git a/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb b/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
index ba176b616c3..eafcbd77040 100644
--- a/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
@@ -157,8 +157,13 @@ end
RSpec.shared_examples 'a container registry auth service' do
include_context 'container registry auth service context'
+ before do
+ stub_feature_flags(container_registry_migration_phase1: false)
+ end
+
describe '#full_access_token' do
let_it_be(:project) { create(:project) }
+
let(:token) { described_class.full_access_token(project.full_path) }
subject { { token: token } }
@@ -172,6 +177,7 @@ RSpec.shared_examples 'a container registry auth service' do
describe '#pull_access_token' do
let_it_be(:project) { create(:project) }
+
let(:token) { described_class.pull_access_token(project.full_path) }
subject { { token: token } }
@@ -432,6 +438,7 @@ RSpec.shared_examples 'a container registry auth service' do
context 'for external user' do
context 'disallow anyone to pull or push images' do
let_it_be(:current_user) { create(:user, external: true) }
+
let(:current_params) do
{ scopes: ["repository:#{project.full_path}:pull,push"] }
end
@@ -442,6 +449,7 @@ RSpec.shared_examples 'a container registry auth service' do
context 'disallow anyone to delete images' do
let_it_be(:current_user) { create(:user, external: true) }
+
let(:current_params) do
{ scopes: ["repository:#{project.full_path}:*"] }
end
@@ -452,6 +460,7 @@ RSpec.shared_examples 'a container registry auth service' do
context 'disallow anyone to delete images since registry 2.7' do
let_it_be(:current_user) { create(:user, external: true) }
+
let(:current_params) do
{ scopes: ["repository:#{project.full_path}:delete"] }
end
@@ -620,6 +629,22 @@ RSpec.shared_examples 'a container registry auth service' do
end
end
end
+
+ context 'for project with private container registry' do
+ let_it_be(:project, reload: true) { create(:project, :public) }
+
+ before do
+ project.project_feature.update!(container_registry_access_level: ProjectFeature::PRIVATE)
+ end
+
+ it_behaves_like 'pullable for being team member'
+
+ context 'when you are admin' do
+ let_it_be(:current_user) { create(:admin) }
+
+ it_behaves_like 'pullable for being team member'
+ end
+ end
end
context 'when pushing' do
diff --git a/spec/support/shared_examples/services/jira_import/user_mapper_services_shared_examples.rb b/spec/support/shared_examples/services/jira_import/user_mapper_services_shared_examples.rb
index cbe5c7d89db..0151723793e 100644
--- a/spec/support/shared_examples/services/jira_import/user_mapper_services_shared_examples.rb
+++ b/spec/support/shared_examples/services/jira_import/user_mapper_services_shared_examples.rb
@@ -3,7 +3,7 @@
RSpec.shared_examples 'mapping jira users' do
let(:client) { double }
- let_it_be(:jira_service) { create(:jira_service, project: project, active: true) }
+ let_it_be(:jira_integration) { create(:jira_integration, project: project, active: true) }
before do
allow(subject).to receive(:client).and_return(client)
diff --git a/spec/support/shared_examples/services/packages_shared_examples.rb b/spec/support/shared_examples/services/packages_shared_examples.rb
index 72878e925dc..6bc4f171d9c 100644
--- a/spec/support/shared_examples/services/packages_shared_examples.rb
+++ b/spec/support/shared_examples/services/packages_shared_examples.rb
@@ -43,6 +43,7 @@ end
RSpec.shared_examples 'assigns status to package' do
context 'with status param' do
let_it_be(:status) { 'hidden' }
+
let(:params) { super().merge(status: status) }
it 'assigns the status to the package' do
diff --git a/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb b/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
index 275ddebc18c..14af35e58b7 100644
--- a/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
@@ -123,9 +123,10 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
.with(repository.raw)
.and_raise(Gitlab::Git::CommandError)
- result = subject.execute
+ expect do
+ subject.execute
+ end.to raise_error(Gitlab::Git::CommandError)
- expect(result).to be_error
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('default')
expect(repository_storage_move).to be_failed
@@ -149,9 +150,10 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
expect(original_repository_double).to receive(:remove)
.and_raise(Gitlab::Git::CommandError)
- result = subject.execute
+ expect do
+ subject.execute
+ end.to raise_error(Gitlab::Git::CommandError)
- expect(result).to be_error
expect(repository_storage_move).to be_cleanup_failed
end
end
@@ -170,9 +172,10 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
allow(repository_double).to receive(:checksum)
.and_return('not matching checksum')
- result = subject.execute
+ expect do
+ subject.execute
+ end.to raise_error(UpdateRepositoryStorageMethods::Error, /Failed to verify \w+ repository checksum from \w+ to not matching checksum/)
- expect(result).to be_error
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('default')
end
diff --git a/spec/support/shared_examples/services/service_ping/complete_service_ping_payload_shared_examples.rb b/spec/support/shared_examples/services/service_ping/complete_service_ping_payload_shared_examples.rb
new file mode 100644
index 00000000000..8dcff99fb6f
--- /dev/null
+++ b/spec/support/shared_examples/services/service_ping/complete_service_ping_payload_shared_examples.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'complete service ping payload' do
+ it_behaves_like 'service ping payload with all expected metrics' do
+ let(:expected_metrics) do
+ standard_metrics + subscription_metrics + operational_metrics + optional_metrics
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/service_ping/service_ping_payload_with_all_expected_metrics_shared_examples.rb b/spec/support/shared_examples/services/service_ping/service_ping_payload_with_all_expected_metrics_shared_examples.rb
new file mode 100644
index 00000000000..535e7291b7e
--- /dev/null
+++ b/spec/support/shared_examples/services/service_ping/service_ping_payload_with_all_expected_metrics_shared_examples.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'service ping payload with all expected metrics' do
+ specify do
+ aggregate_failures do
+ expected_metrics.each do |metric|
+ is_expected.to have_usage_metric metric['key_path']
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/service_ping/service_ping_payload_without_restricted_metrics_shared_examples.rb b/spec/support/shared_examples/services/service_ping/service_ping_payload_without_restricted_metrics_shared_examples.rb
new file mode 100644
index 00000000000..9f18174cbc7
--- /dev/null
+++ b/spec/support/shared_examples/services/service_ping/service_ping_payload_without_restricted_metrics_shared_examples.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'service ping payload without restricted metrics' do
+ specify do
+ aggregate_failures do
+ restricted_metrics.each do |metric|
+ is_expected.not_to have_usage_metric metric['key_path']
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/snippets_shared_examples.rb b/spec/support/shared_examples/services/snippets_shared_examples.rb
index 0c4db7ded69..5a44f739b27 100644
--- a/spec/support/shared_examples/services/snippets_shared_examples.rb
+++ b/spec/support/shared_examples/services/snippets_shared_examples.rb
@@ -1,23 +1,6 @@
# frozen_string_literal: true
RSpec.shared_examples 'checking spam' do
- let(:request) { double(:request, headers: headers) }
- let(:headers) { nil }
- let(:api) { true }
- let(:captcha_response) { 'abc123' }
- let(:spam_log_id) { 1 }
- let(:disable_spam_action_service) { false }
-
- let(:extra_opts) do
- {
- request: request,
- api: api,
- captcha_response: captcha_response,
- spam_log_id: spam_log_id,
- disable_spam_action_service: disable_spam_action_service
- }
- end
-
before do
allow_next_instance_of(UserAgentDetailService) do |instance|
allow(instance).to receive(:create)
@@ -25,73 +8,20 @@ RSpec.shared_examples 'checking spam' do
end
it 'executes SpamActionService' do
- spam_params = Spam::SpamParams.new(
- api: api,
- captcha_response: captcha_response,
- spam_log_id: spam_log_id
- )
expect_next_instance_of(
Spam::SpamActionService,
{
spammable: kind_of(Snippet),
- request: request,
+ spam_params: spam_params,
user: an_instance_of(User),
action: action
}
) do |instance|
- expect(instance).to receive(:execute).with(spam_params: spam_params)
+ expect(instance).to receive(:execute)
end
subject
end
-
- context 'when CAPTCHA arguments are passed in the headers' do
- let(:headers) do
- {
- 'X-GitLab-Spam-Log-Id' => spam_log_id,
- 'X-GitLab-Captcha-Response' => captcha_response
- }
- end
-
- let(:extra_opts) do
- {
- request: request,
- api: api,
- disable_spam_action_service: disable_spam_action_service
- }
- end
-
- it 'executes the SpamActionService correctly' do
- spam_params = Spam::SpamParams.new(
- api: api,
- captcha_response: captcha_response,
- spam_log_id: spam_log_id
- )
- expect_next_instance_of(
- Spam::SpamActionService,
- {
- spammable: kind_of(Snippet),
- request: request,
- user: an_instance_of(User),
- action: action
- }
- ) do |instance|
- expect(instance).to receive(:execute).with(spam_params: spam_params)
- end
-
- subject
- end
- end
-
- context 'when spam action service is disabled' do
- let(:disable_spam_action_service) { true }
-
- it 'request parameter is not passed to the service' do
- expect(Spam::SpamActionService).not_to receive(:new)
-
- subject
- end
- end
end
shared_examples 'invalid params error response' do
diff --git a/spec/support/shared_examples/services/wikis/create_attachment_service_shared_examples.rb b/spec/support/shared_examples/services/wikis/create_attachment_service_shared_examples.rb
index 555a6d5eed0..1646c18a0ed 100644
--- a/spec/support/shared_examples/services/wikis/create_attachment_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/wikis/create_attachment_service_shared_examples.rb
@@ -25,6 +25,25 @@ RSpec.shared_examples 'Wikis::CreateAttachmentService#execute' do |container_typ
container.add_developer(user)
end
+ context 'creates wiki repository if it does not exist' do
+ let(:container) { create(container_type) } # rubocop:disable Rails/SaveBang
+
+ it 'creates wiki repository' do
+ expect { service.execute }.to change { container.wiki.repository.exists? }.to(true)
+ end
+
+ context 'if an error is raised creating the repository' do
+ it 'catches error and return gracefully' do
+ allow(container.wiki).to receive(:repository_exists?).and_return(false)
+
+ result = service.execute
+
+ expect(result[:status]).to eq :error
+ expect(result[:message]).to eq 'Error creating the wiki repository'
+ end
+ end
+ end
+
context 'creates branch if it does not exists' do
let(:branch_name) { 'new_branch' }
let(:opts) { file_opts.merge(branch_name: branch_name) }
diff --git a/spec/support/shared_examples/workers/in_product_marketing_email_shared_example.rb b/spec/support/shared_examples/workers/in_product_marketing_email_shared_example.rb
deleted file mode 100644
index c4391f61369..00000000000
--- a/spec/support/shared_examples/workers/in_product_marketing_email_shared_example.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'in-product marketing email' do
- before do
- stub_application_setting(in_product_marketing_emails_enabled: in_product_marketing_emails_enabled)
- stub_experiment(in_product_marketing_emails: experiment_active)
- allow(::Gitlab).to receive(:com?).and_return(is_gitlab_com)
- end
-
- it 'executes the email service service' do
- expect(Namespaces::InProductMarketingEmailsService).to receive(:send_for_all_tracks_and_intervals).exactly(executes_service).times
-
- subject.perform
- end
-end
diff --git a/spec/support/sidekiq.rb b/spec/support/sidekiq.rb
index 374997af1ec..dc475b92c0b 100644
--- a/spec/support/sidekiq.rb
+++ b/spec/support/sidekiq.rb
@@ -20,4 +20,25 @@ RSpec.configure do |config|
config.around(:example, :sidekiq_inline) do |example|
gitlab_sidekiq_inline { example.run }
end
+
+ # Some specs need to run mailers through Sidekiq explicitly, rather
+ # than the ActiveJob test adapter. There is a Rails bug that means we
+ # have to do some extra steps to make this happen:
+ # https://github.com/rails/rails/issues/37270
+ #
+ # In particular, we can't use an `around` hook because then the 'before' part
+ # of that will run before the `before_setup` hook in ActiveJob::TestHelper,
+ # which doesn't do what we want.
+ #
+ config.before(:example, :sidekiq_mailers) do
+ queue_adapter_changed_jobs.each { |k| k.queue_adapter = :sidekiq }
+ queue_adapter_changed_jobs.each(&:disable_test_adapter)
+ end
+
+ config.after(:example, :sidekiq_mailers) do
+ queue_adapter_changed_jobs.each do |klass|
+ klass.queue_adapter = :test
+ klass.enable_test_adapter(ActiveJob::QueueAdapters::TestAdapter.new)
+ end
+ end
end
diff --git a/spec/tasks/cache/clear/redis_spec.rb b/spec/tasks/cache/clear/redis_spec.rb
index 64ed83c649b..e8c62bbe124 100644
--- a/spec/tasks/cache/clear/redis_spec.rb
+++ b/spec/tasks/cache/clear/redis_spec.rb
@@ -36,8 +36,6 @@ RSpec.describe 'clearing redis cache', :clean_gitlab_redis_cache, :silence_stdou
let(:cache) { Gitlab::RepositorySetCache.new(repository) }
before do
- pending "Enable as part of https://gitlab.com/gitlab-org/gitlab/-/issues/331319"
-
cache.write(:foo, [:bar])
end
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index e5a210bb344..ebaaf179546 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -209,6 +209,23 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
expect { run_rake_task("gitlab:backup:#{task}:create") }.to output(/Dumping /).to_stdout_from_any_process
end
end
+
+ it 'logs the progress to log file' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping database ... ")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "[SKIPPED]")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping repositories ...")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping uploads ... ")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping builds ... ")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping artifacts ... ")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping pages ... ")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping lfs objects ... ")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping container registry images ... ")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "done").exactly(7).times
+
+ task_list.each do |task|
+ run_rake_task("gitlab:backup:#{task}:create")
+ end
+ end
end
end
@@ -377,6 +394,11 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
it 'passes through concurrency environment variables' do
+ # The way concurrency is handled will change with the `gitaly_backup`
+ # feature flag. For now we need to check that both ways continue to
+ # work. This will be cleaned up in the rollout issue.
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/333034
+
stub_env('GITLAB_BACKUP_MAX_CONCURRENCY', 5)
stub_env('GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY', 2)
@@ -385,6 +407,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
.with(max_concurrency: 5, max_storage_concurrency: 2)
.and_call_original
end
+ expect(::Backup::GitalyBackup).to receive(:new).with(anything, parallel: 5, parallel_storage: 2).and_call_original
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
end
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index 08ca6c32b49..03fbd238ee9 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -124,64 +124,31 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
describe 'clean_structure_sql' do
let_it_be(:clean_rake_task) { 'gitlab:db:clean_structure_sql' }
let_it_be(:test_task_name) { 'gitlab:db:_test_multiple_structure_cleans' }
- let_it_be(:structure_file) { 'db/structure.sql' }
let_it_be(:input) { 'this is structure data' }
+
let(:output) { StringIO.new }
before do
- stub_file_read(structure_file, content: input)
- allow(File).to receive(:open).with(structure_file, any_args).and_yield(output)
- end
+ structure_files = %w[db/structure.sql db/ci_structure.sql]
- after do
- Rake::Task[test_task_name].clear if Rake::Task.task_defined?(test_task_name)
- end
+ allow(File).to receive(:open).and_call_original
- it 'can be executed multiple times within another rake task' do
- expect_multiple_executions_of_task(test_task_name, clean_rake_task) do
- expect_next_instance_of(Gitlab::Database::SchemaCleaner) do |cleaner|
- expect(cleaner).to receive(:clean).with(output)
- end
+ structure_files.each do |structure_file|
+ stub_file_read(structure_file, content: input)
+ allow(File).to receive(:open).with(Rails.root.join(structure_file).to_s, any_args).and_yield(output)
end
end
- end
-
- describe 'load_custom_structure' do
- let_it_be(:db_config) { Rails.application.config_for(:database) }
- let_it_be(:custom_load_task) { 'gitlab:db:load_custom_structure' }
- let_it_be(:custom_filepath) { Pathname.new('db/directory') }
-
- it 'uses the psql command to load the custom structure file' do
- expect(Gitlab::Database::CustomStructure).to receive(:custom_dump_filepath).and_return(custom_filepath)
-
- expect(Kernel).to receive(:system)
- .with('psql', any_args, custom_filepath.to_path, db_config['database']).and_return(true)
-
- run_rake_task(custom_load_task)
- end
-
- it 'raises an error when the call to the psql command fails' do
- expect(Gitlab::Database::CustomStructure).to receive(:custom_dump_filepath).and_return(custom_filepath)
-
- expect(Kernel).to receive(:system)
- .with('psql', any_args, custom_filepath.to_path, db_config['database']).and_return(nil)
-
- expect { run_rake_task(custom_load_task) }.to raise_error(/failed to execute:\s*psql/)
- end
- end
-
- describe 'dump_custom_structure' do
- let_it_be(:test_task_name) { 'gitlab:db:_test_multiple_task_executions' }
- let_it_be(:custom_dump_task) { 'gitlab:db:dump_custom_structure' }
after do
Rake::Task[test_task_name].clear if Rake::Task.task_defined?(test_task_name)
end
it 'can be executed multiple times within another rake task' do
- expect_multiple_executions_of_task(test_task_name, custom_dump_task) do
- expect_next_instance_of(Gitlab::Database::CustomStructure) do |custom_structure|
- expect(custom_structure).to receive(:dump)
+ expect_multiple_executions_of_task(test_task_name, clean_rake_task, count: 2) do
+ database_count = ActiveRecord::Base.configurations.configs_for(env_name: Rails.env).size
+
+ expect_next_instances_of(Gitlab::Database::SchemaCleaner, database_count) do |cleaner|
+ expect(cleaner).to receive(:clean).with(output)
end
end
end
diff --git a/spec/tasks/gitlab/packages/composer_rake_spec.rb b/spec/tasks/gitlab/packages/composer_rake_spec.rb
index 78013714de5..f4f43bf77d8 100644
--- a/spec/tasks/gitlab/packages/composer_rake_spec.rb
+++ b/spec/tasks/gitlab/packages/composer_rake_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe 'gitlab:packages:build_composer_cache namespace rake task', :sile
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json.to_json }, group: group) }
let_it_be(:project2) { create(:project, :custom_repo, files: { 'composer.json' => json2.to_json }, group: group) }
+
let!(:package) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
let!(:package2) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '2.0.0', json: json) }
let!(:package3) { create(:composer_package, :with_metadatum, project: project2, name: package_name2, version: '3.0.0', json: json2) }
diff --git a/spec/tasks/gitlab/snippets_rake_spec.rb b/spec/tasks/gitlab/snippets_rake_spec.rb
index d40b784b3a0..c55bded1d5a 100644
--- a/spec/tasks/gitlab/snippets_rake_spec.rb
+++ b/spec/tasks/gitlab/snippets_rake_spec.rb
@@ -5,6 +5,7 @@ require 'rake_helper'
RSpec.describe 'gitlab:snippets namespace rake task', :silence_stdout do
let_it_be(:user) { create(:user)}
let_it_be(:migrated) { create(:personal_snippet, :repository, author: user) }
+
let(:non_migrated) { create_list(:personal_snippet, 3, author: user) }
let(:non_migrated_ids) { non_migrated.pluck(:id) }
diff --git a/spec/tooling/danger/feature_flag_spec.rb b/spec/tooling/danger/feature_flag_spec.rb
index 5e495cd43c6..7cae3e0a8b3 100644
--- a/spec/tooling/danger/feature_flag_spec.rb
+++ b/spec/tooling/danger/feature_flag_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Tooling::Danger::FeatureFlag do
let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) }
- subject(:feature_flag) { fake_danger.new(git: fake_git) }
+ subject(:feature_flag) { fake_danger.new(helper: fake_helper) }
describe '#feature_flag_files' do
let(:feature_flag_files) do
@@ -87,7 +87,11 @@ RSpec.describe Tooling::Danger::FeatureFlag do
let(:feature_flag_path) { 'config/feature_flags/development/entry.yml' }
let(:group) { 'group::source code' }
let(:raw_yaml) do
- YAML.dump('group' => group)
+ YAML.dump(
+ 'group' => group,
+ 'default_enabled' => true,
+ 'rollout_issue_url' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/1'
+ )
end
subject(:found) { described_class.new(feature_flag_path) }
@@ -109,6 +113,18 @@ RSpec.describe Tooling::Danger::FeatureFlag do
end
end
+ describe '#default_enabled' do
+ it 'returns the default_enabled found in the YAML' do
+ expect(found.default_enabled).to eq(true)
+ end
+ end
+
+ describe '#rollout_issue_url' do
+ it 'returns the rollout_issue_url found in the YAML' do
+ expect(found.rollout_issue_url).to eq('https://gitlab.com/gitlab-org/gitlab/-/issues/1')
+ end
+ end
+
describe '#group_match_mr_label?' do
subject(:result) { found.group_match_mr_label?(mr_group_label) }
diff --git a/spec/tooling/danger/project_helper_spec.rb b/spec/tooling/danger/project_helper_spec.rb
index 7474709d255..8bcfac5a699 100644
--- a/spec/tooling/danger/project_helper_spec.rb
+++ b/spec/tooling/danger/project_helper_spec.rb
@@ -56,24 +56,26 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'ee/doc/foo' | [:unknown]
'ee/README' | [:unknown]
- 'app/assets/foo' | [:frontend]
- 'app/views/foo' | [:frontend]
- 'public/foo' | [:frontend]
- 'scripts/frontend/foo' | [:frontend]
- 'spec/javascripts/foo' | [:frontend]
- 'spec/frontend/bar' | [:frontend]
- 'vendor/assets/foo' | [:frontend]
- 'babel.config.js' | [:frontend]
- 'jest.config.js' | [:frontend]
- 'package.json' | [:frontend]
- 'yarn.lock' | [:frontend]
- 'config/foo.js' | [:frontend]
- 'config/deep/foo.js' | [:frontend]
-
- 'ee/app/assets/foo' | [:frontend]
- 'ee/app/views/foo' | [:frontend]
- 'ee/spec/javascripts/foo' | [:frontend]
- 'ee/spec/frontend/bar' | [:frontend]
+ 'app/assets/foo' | [:frontend]
+ 'app/views/foo' | [:frontend]
+ 'public/foo' | [:frontend]
+ 'scripts/frontend/foo' | [:frontend]
+ 'spec/javascripts/foo' | [:frontend]
+ 'spec/frontend/bar' | [:frontend]
+ 'spec/frontend_integration/bar' | [:frontend]
+ 'vendor/assets/foo' | [:frontend]
+ 'babel.config.js' | [:frontend]
+ 'jest.config.js' | [:frontend]
+ 'package.json' | [:frontend]
+ 'yarn.lock' | [:frontend]
+ 'config/foo.js' | [:frontend]
+ 'config/deep/foo.js' | [:frontend]
+
+ 'ee/app/assets/foo' | [:frontend]
+ 'ee/app/views/foo' | [:frontend]
+ 'ee/spec/javascripts/foo' | [:frontend]
+ 'ee/spec/frontend/bar' | [:frontend]
+ 'ee/spec/frontend_integration/bar' | [:frontend]
'.gitlab/ci/frontend.gitlab-ci.yml' | %i[frontend engineering_productivity]
@@ -183,7 +185,8 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'lib/generators/rails/usage_metric_definition_generator.rb' | [:backend, :product_intelligence]
'spec/lib/generators/usage_metric_definition_generator_spec.rb' | [:backend, :product_intelligence]
'config/metrics/schema.json' | [:product_intelligence]
- 'app/assets/javascripts/tracking.js' | [:frontend, :product_intelligence]
+ 'app/assets/javascripts/tracking/foo.js' | [:frontend, :product_intelligence]
+ 'spec/frontend/tracking/foo.js' | [:frontend, :product_intelligence]
'spec/frontend/tracking_spec.js' | [:frontend, :product_intelligence]
'lib/gitlab/usage_database/foo.rb' | [:backend]
end
@@ -218,7 +221,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do
describe '.local_warning_message' do
it 'returns an informational message with rules that can run' do
- expect(described_class.local_warning_message).to eq('==> Only the following Danger rules can be run locally: changelog, database, datateam, documentation, duplicate_yarn_dependencies, eslint, karma, pajamas, pipeline, prettier, product_intelligence, utility_css')
+ expect(described_class.local_warning_message).to eq('==> Only the following Danger rules can be run locally: changelog, database, documentation, duplicate_yarn_dependencies, eslint, gitaly, karma, pajamas, pipeline, prettier, product_intelligence, utility_css, vue_shared_documentation')
end
end
diff --git a/spec/tooling/lib/tooling/kubernetes_client_spec.rb b/spec/tooling/lib/tooling/kubernetes_client_spec.rb
index 636727401af..a7f50b0bb50 100644
--- a/spec/tooling/lib/tooling/kubernetes_client_spec.rb
+++ b/spec/tooling/lib/tooling/kubernetes_client_spec.rb
@@ -135,6 +135,52 @@ RSpec.describe Tooling::KubernetesClient do
end
end
+ describe '#cleanup_review_app_namespaces' do
+ let(:two_days_ago) { Time.now - 3600 * 24 * 2 }
+ let(:namespaces) { %w[review-abc-123 review-xyz-789] }
+
+ subject { described_class.new(namespace: nil) }
+
+ before do
+ allow(subject).to receive(:review_app_namespaces_created_before).with(created_before: two_days_ago).and_return(namespaces)
+ end
+
+ shared_examples 'a kubectl command to delete namespaces older than given creation time' do
+ let(:wait) { true }
+
+ specify do
+ expect(Gitlab::Popen).to receive(:popen_with_detail)
+ .with(["kubectl delete namespace " +
+ %(--now --ignore-not-found --wait=#{wait} #{namespaces.join(' ')})])
+ .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true)))
+
+ # We're not verifying the output here, just silencing it
+ expect { subject.cleanup_review_app_namespaces(created_before: two_days_ago) }.to output.to_stdout
+ end
+ end
+
+ it_behaves_like 'a kubectl command to delete namespaces older than given creation time'
+
+ it 'raises an error if the Kubernetes command fails' do
+ expect(Gitlab::Popen).to receive(:popen_with_detail)
+ .with(["kubectl delete namespace " +
+ %(--now --ignore-not-found --wait=true #{namespaces.join(' ')})])
+ .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: false)))
+
+ expect { subject.cleanup_review_app_namespaces(created_before: two_days_ago) }.to raise_error(described_class::CommandFailedError)
+ end
+
+ context 'with no namespaces found' do
+ let(:namespaces) { [] }
+
+ it 'does not call #delete_namespaces_by_exact_names' do
+ expect(subject).not_to receive(:delete_namespaces_by_exact_names)
+
+ subject.cleanup_review_app_namespaces(created_before: two_days_ago)
+ end
+ end
+ end
+
describe '#raw_resource_names' do
it 'calls kubectl to retrieve the resource names' do
expect(Gitlab::Popen).to receive(:popen_with_detail)
@@ -200,4 +246,49 @@ RSpec.describe Tooling::KubernetesClient do
it_behaves_like 'a kubectl command to retrieve resource names sorted by creationTimestamp'
end
end
+
+ describe '#review_app_namespaces_created_before' do
+ let(:three_days_ago) { Time.now - 3600 * 24 * 3 }
+ let(:two_days_ago) { Time.now - 3600 * 24 * 2 }
+ let(:namespace_created_three_days_ago) { 'namespace-created-three-days-ago' }
+ let(:resource_type) { 'namespace' }
+ let(:raw_resources) do
+ {
+ items: [
+ {
+ apiVersion: "v1",
+ kind: "Namespace",
+ metadata: {
+ creationTimestamp: three_days_ago,
+ name: namespace_created_three_days_ago,
+ labels: {
+ tls: 'review-apps-tls'
+ }
+ }
+ },
+ {
+ apiVersion: "v1",
+ kind: "Namespace",
+ metadata: {
+ creationTimestamp: Time.now,
+ name: 'another-pvc',
+ labels: {
+ tls: 'review-apps-tls'
+ }
+ }
+ }
+ ]
+ }.to_json
+ end
+
+ specify do
+ expect(Gitlab::Popen).to receive(:popen_with_detail)
+ .with(["kubectl get namespace " \
+ "-l tls=review-apps-tls " \
+ "--sort-by='{.metadata.creationTimestamp}' -o json"])
+ .and_return(Gitlab::Popen::Result.new([], raw_resources, '', double(success?: true)))
+
+ expect(subject.__send__(:review_app_namespaces_created_before, created_before: two_days_ago)).to contain_exactly(namespace_created_three_days_ago)
+ end
+ end
end
diff --git a/spec/uploaders/content_type_whitelist_spec.rb b/spec/uploaders/content_type_whitelist_spec.rb
index cf7463369ab..0cafc7a3ae2 100644
--- a/spec/uploaders/content_type_whitelist_spec.rb
+++ b/spec/uploaders/content_type_whitelist_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe ContentTypeWhitelist do
let_it_be(:model) { build_stubbed(:user) }
+
let!(:uploader) do
stub_const('DummyUploader', Class.new(CarrierWave::Uploader::Base))
diff --git a/spec/uploaders/dependency_proxy/file_uploader_spec.rb b/spec/uploaders/dependency_proxy/file_uploader_spec.rb
index 6e94a661d6d..eb12e7dffa5 100644
--- a/spec/uploaders/dependency_proxy/file_uploader_spec.rb
+++ b/spec/uploaders/dependency_proxy/file_uploader_spec.rb
@@ -5,6 +5,7 @@ RSpec.describe DependencyProxy::FileUploader do
describe 'DependencyProxy::Blob uploader' do
let_it_be(:blob) { create(:dependency_proxy_blob) }
let_it_be(:path) { Gitlab.config.dependency_proxy.storage_path }
+
let(:uploader) { described_class.new(blob, :file) }
subject { uploader }
@@ -30,6 +31,7 @@ RSpec.describe DependencyProxy::FileUploader do
let_it_be(:manifest) { create(:dependency_proxy_manifest) }
let_it_be(:initial_content_type) { 'application/json' }
let_it_be(:fixture_file) { fixture_file_upload('spec/fixtures/dependency_proxy/manifest', initial_content_type) }
+
let(:uploader) { described_class.new(manifest, :file) }
subject { uploader }
diff --git a/spec/views/admin/application_settings/_eks.html.haml_spec.rb b/spec/views/admin/application_settings/_eks.html.haml_spec.rb
index 2f45eaadc9f..e407970c7a4 100644
--- a/spec/views/admin/application_settings/_eks.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/_eks.html.haml_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'admin/application_settings/_eks' do
let_it_be(:admin) { create(:admin) }
+
let(:page) { Capybara::Node::Simple.new(rendered) }
before do
diff --git a/spec/views/admin/application_settings/_package_registry.html.haml_spec.rb b/spec/views/admin/application_settings/_package_registry.html.haml_spec.rb
index e0aa2fc8d56..72e32643a49 100644
--- a/spec/views/admin/application_settings/_package_registry.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/_package_registry.html.haml_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'admin/application_settings/_package_registry' do
let_it_be(:admin) { create(:admin) }
let_it_be(:default_plan_limits) { create(:plan_limits, :default_plan, :with_package_file_sizes) }
let_it_be(:application_setting) { build(:application_setting) }
+
let(:page) { Capybara::Node::Simple.new(rendered) }
before do
diff --git a/spec/views/admin/application_settings/ci_cd.html.haml_spec.rb b/spec/views/admin/application_settings/ci_cd.html.haml_spec.rb
new file mode 100644
index 00000000000..4d40bf5671e
--- /dev/null
+++ b/spec/views/admin/application_settings/ci_cd.html.haml_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'admin/application_settings/ci_cd.html.haml' do
+ let_it_be(:app_settings) { build(:application_setting) }
+ let_it_be(:user) { create(:admin) }
+
+ let_it_be(:default_plan_limits) { create(:plan_limits, :default_plan, :with_package_file_sizes) }
+
+ before do
+ assign(:application_setting, app_settings)
+ assign(:plans, [default_plan_limits.plan])
+ allow(view).to receive(:current_user).and_return(user)
+ end
+
+ describe 'CI CD Runner Registration' do
+ context 'when feature flag is enabled' do
+ before do
+ stub_feature_flags(runner_registration_control: true)
+ end
+
+ it 'has the setting section' do
+ render
+
+ expect(rendered).to have_css("#js-runner-settings")
+ end
+
+ it 'renders the correct setting section content' do
+ render
+
+ expect(rendered).to have_content("Runner registration")
+ expect(rendered).to have_content("If no options are selected, only administrators can register runners.")
+ end
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(runner_registration_control: false)
+ end
+
+ it 'does not have the setting section' do
+ render
+
+ expect(rendered).not_to have_css("#js-runner-settings")
+ end
+
+ it 'does not render the correct setting section content' do
+ render
+
+ expect(rendered).not_to have_content("Runner registration")
+ expect(rendered).not_to have_content("If no options are selected, only administrators can register runners.")
+ end
+ end
+ end
+end
diff --git a/spec/views/admin/application_settings/repository.html.haml_spec.rb b/spec/views/admin/application_settings/repository.html.haml_spec.rb
index 47cadd29e33..30047878b0f 100644
--- a/spec/views/admin/application_settings/repository.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/repository.html.haml_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe 'admin/application_settings/repository.html.haml' do
render
expect(rendered).to have_content("Default initial branch name")
- expect(rendered).to have_content("Set the default name of the initial branch when creating new repositories through the user interface.")
+ expect(rendered).to have_content("The default name for the initial branch of new repositories created in the instance.")
end
end
end
diff --git a/spec/views/devise/sessions/new.html.haml_spec.rb b/spec/views/devise/sessions/new.html.haml_spec.rb
index b5c69f4f04d..d3552bf2e5a 100644
--- a/spec/views/devise/sessions/new.html.haml_spec.rb
+++ b/spec/views/devise/sessions/new.html.haml_spec.rb
@@ -3,6 +3,32 @@
require 'spec_helper'
RSpec.describe 'devise/sessions/new' do
+ describe 'marketing text' do
+ subject { render(template: 'devise/sessions/new', layout: 'layouts/devise') }
+
+ before do
+ stub_devise
+ disable_captcha
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
+ end
+
+ it 'when flash is anything it renders marketing text' do
+ flash[:notice] = "You can't do that"
+
+ subject
+
+ expect(rendered).to have_content('A complete DevOps platform')
+ end
+
+ it 'when flash notice is devise confirmed message it hides marketing text' do
+ flash[:notice] = t(:confirmed, scope: [:devise, :confirmations])
+
+ subject
+
+ expect(rendered).not_to have_content('A complete DevOps platform')
+ end
+ end
+
describe 'ldap' do
include LdapHelpers
diff --git a/spec/views/groups/_home_panel.html.haml_spec.rb b/spec/views/groups/_home_panel.html.haml_spec.rb
index b8168b20450..e76862cdaea 100644
--- a/spec/views/groups/_home_panel.html.haml_spec.rb
+++ b/spec/views/groups/_home_panel.html.haml_spec.rb
@@ -14,4 +14,30 @@ RSpec.describe 'groups/_home_panel' do
expect(rendered).to have_content("Group ID: #{group.id}")
end
+
+ context 'admin area link' do
+ it 'renders admin area link for admin' do
+ allow(view).to receive(:current_user).and_return(create(:admin))
+
+ render
+
+ expect(rendered).to have_link(href: admin_group_path(group))
+ end
+
+ it 'does not render admin area link for non-admin' do
+ allow(view).to receive(:current_user).and_return(create(:user))
+
+ render
+
+ expect(rendered).not_to have_link(href: admin_group_path(group))
+ end
+
+ it 'does not render admin area link for anonymous' do
+ allow(view).to receive(:current_user).and_return(nil)
+
+ render
+
+ expect(rendered).not_to have_link(href: admin_group_path(group))
+ end
+ end
end
diff --git a/spec/views/groups/runners/_group_runners.html.haml_spec.rb b/spec/views/groups/runners/_group_runners.html.haml_spec.rb
new file mode 100644
index 00000000000..0d47409c658
--- /dev/null
+++ b/spec/views/groups/runners/_group_runners.html.haml_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'groups/runners/group_runners.html.haml' do
+ describe 'render' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
+ before do
+ @group = group
+ allow(view).to receive(:current_user).and_return(user)
+ allow(view).to receive(:reset_registration_token_group_settings_ci_cd_path).and_return('banana_url')
+ allow(view).to receive(:can?).with(user, :admin_pipeline, group).and_return(true)
+ end
+
+ context 'when group runner registration is allowed' do
+ before do
+ stub_application_setting(valid_runner_registrars: ['group'])
+ end
+
+ it 'enables the Remove group button for a group' do
+ render 'groups/runners/group_runners', group: group
+
+ expect(rendered).to have_selector '#js-install-runner'
+ expect(rendered).not_to have_content 'Please contact an admin to register runners.'
+ end
+ end
+
+ context 'when group runner registration is not allowed' do
+ before do
+ stub_application_setting(valid_runner_registrars: ['project'])
+ end
+
+ it 'does not enable the the Remove group button for a group' do
+ render 'groups/runners/group_runners', group: group
+
+ expect(rendered).to have_content 'Please contact an admin to register runners.'
+ expect(rendered).not_to have_selector '#js-install-runner'
+ end
+ end
+ end
+end
diff --git a/spec/views/groups/settings/_transfer.html.haml_spec.rb b/spec/views/groups/settings/_transfer.html.haml_spec.rb
new file mode 100644
index 00000000000..aeb70251a62
--- /dev/null
+++ b/spec/views/groups/settings/_transfer.html.haml_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'groups/settings/_transfer.html.haml' do
+ describe 'render' do
+ it 'enables the Select parent group dropdown and does not show an alert for a group' do
+ group = build(:group)
+
+ render 'groups/settings/transfer', group: group
+
+ expect(rendered).to have_selector '[data-qa-selector="select_group_dropdown"]'
+ expect(rendered).not_to have_selector '[data-qa-selector="select_group_dropdown"][disabled]'
+ expect(rendered).not_to have_selector '[data-testid="group-to-transfer-has-linked-subscription-alert"]'
+ end
+ end
+end
diff --git a/spec/views/help/show.html.haml_spec.rb b/spec/views/help/show.html.haml_spec.rb
index ab303919673..dc73b4a2cfe 100644
--- a/spec/views/help/show.html.haml_spec.rb
+++ b/spec/views/help/show.html.haml_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'help/show' do
describe 'Markdown rendering' do
before do
- assign(:path, 'ssh/README')
+ assign(:path, 'ssh/index')
assign(:markdown, 'Welcome to [GitLab](https://about.gitlab.com/) Documentation.')
end
diff --git a/spec/views/layouts/_head.html.haml_spec.rb b/spec/views/layouts/_head.html.haml_spec.rb
index 6752bdc8337..2c7289deaef 100644
--- a/spec/views/layouts/_head.html.haml_spec.rb
+++ b/spec/views/layouts/_head.html.haml_spec.rb
@@ -100,7 +100,7 @@ RSpec.describe 'layouts/_head' do
it 'add a Matomo Javascript' do
render
- expect(rendered).to match(/<script.*>.*var u="\/\/#{matomo_host}\/".*<\/script>/m)
+ expect(rendered).to match(%r{<script.*>.*var u="//#{matomo_host}/".*</script>}m)
expect(rendered).to match(%r(<noscript>.*<img src="//#{matomo_host}/matomo.php.*</noscript>))
expect(rendered).not_to include('_paq.push(["disableCookies"])')
end
diff --git a/spec/views/layouts/_search.html.haml_spec.rb b/spec/views/layouts/_search.html.haml_spec.rb
index f0c7cb57b25..ceb82e3640e 100644
--- a/spec/views/layouts/_search.html.haml_spec.rb
+++ b/spec/views/layouts/_search.html.haml_spec.rb
@@ -25,6 +25,15 @@ RSpec.describe 'layouts/_search' do
end
shared_examples 'search context scope is set' do
+ context 'when rendering' do
+ it 'sets the placeholder' do
+ render
+
+ expect(rendered).to include('placeholder="Search GitLab"')
+ expect(rendered).to include('aria-label="Search GitLab"')
+ end
+ end
+
context 'when on issues' do
it 'sets scope to issues' do
render
diff --git a/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb
index 50390964e1b..7df076d35c4 100644
--- a/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb
@@ -27,62 +27,11 @@ RSpec.describe 'layouts/nav/sidebar/_group' do
expect(rendered).to have_link('Group information', href: activity_group_path(group))
end
- it 'does not have a link to the details menu item' do
- render
-
- expect(rendered).not_to have_link('Details', href: details_group_path(group))
- end
-
it 'has a link to the members page' do
render
expect(rendered).to have_selector('.sidebar-top-level-items > li.home a[title="Members"]')
expect(rendered).to have_link('Members', href: group_group_members_path(group))
end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- it 'has a link to the group path with the "Group overview" title' do
- render
-
- expect(rendered).to have_link('Group overview', href: group_path(group))
- end
-
- it 'has a link to the details menu item' do
- render
-
- expect(rendered).to have_link('Details', href: details_group_path(group))
- end
-
- it 'does not have a link to the members page' do
- render
-
- expect(rendered).not_to have_selector('.sidebar-top-level-items > li.home a[title="Members"]')
- end
- end
- end
-
- describe 'Members' do
- it 'does not have a Members menu' do
- render
-
- expect(rendered).not_to have_selector('.nav-item-name', text: 'Members')
- end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- it 'has a Member menu' do
- render
-
- expect(rendered).to have_selector('.nav-item-name', text: 'Members')
- expect(rendered).to have_link('Members', href: group_group_members_path(group))
- end
- end
end
end
diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
index a1aa7c04b67..f2de43dfd19 100644
--- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
@@ -36,38 +36,6 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
expect(rendered).to have_selector('[aria-label="Project information"]')
end
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- it 'has a link to the project path' do
- render
-
- expect(rendered).to have_link('Project overview', href: project_path(project), class: %w(shortcuts-project))
- expect(rendered).to have_selector('[aria-label="Project overview"]')
- end
- end
-
- describe 'Details' do
- it 'does not have a link to the details menu' do
- render
-
- expect(rendered).not_to have_link('Details', href: project_path(project))
- end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- it 'has a link to the projects path' do
- stub_feature_flags(sidebar_refactor: false)
-
- render
-
- expect(rendered).to have_link('Details', href: project_path(project), class: 'shortcuts-project')
- expect(rendered).to have_selector('[aria-label="Project details"]')
- end
- end
- end
-
describe 'Activity' do
it 'has a link to the project activity path' do
render
@@ -76,24 +44,6 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
end
- describe 'Releases' do
- it 'does not have a link to the project releases path' do
- render
-
- expect(rendered).not_to have_link('Releases', href: project_releases_path(project), class: 'shortcuts-project-releases')
- end
-
- context 'when feature flag :sidebar refactor is disabled' do
- it 'has a link to the project releases path' do
- stub_feature_flags(sidebar_refactor: false)
-
- render
-
- expect(rendered).to have_link('Releases', href: project_releases_path(project), class: 'shortcuts-project-releases')
- end
- end
- end
-
describe 'Labels' do
let(:page) { Nokogiri::HTML.parse(rendered) }
@@ -103,16 +53,6 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
expect(page.at_css('.shortcuts-project-information').parent.css('[aria-label="Labels"]')).not_to be_empty
expect(rendered).to have_link('Labels', href: project_labels_path(project))
end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- it 'does not have the labels menu item' do
- stub_feature_flags(sidebar_refactor: false)
-
- render
-
- expect(page.at_css('.shortcuts-project').parent.css('[aria-label="Labels"]')).to be_empty
- end
- end
end
describe 'Members' do
@@ -124,16 +64,6 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
expect(page.at_css('.shortcuts-project-information').parent.css('[aria-label="Members"]')).not_to be_empty
expect(rendered).to have_link('Members', href: project_project_members_path(project))
end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- it 'does not have a link to the members page' do
- stub_feature_flags(sidebar_refactor: false)
-
- render
-
- expect(page.at_css('.shortcuts-project').parent.css('[aria-label="Members"]')).to be_empty
- end
- end
end
end
@@ -243,27 +173,6 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
end
- describe 'Labels' do
- let(:page) { Nokogiri::HTML.parse(rendered) }
-
- it 'does not have a link to the labels page' do
- render
-
- expect(page.at_css('.shortcuts-issues').parent.css('[aria-label="Labels"]')).to be_empty
- end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- it 'has a link to the labels page' do
- stub_feature_flags(sidebar_refactor: false)
-
- render
-
- expect(page.at_css('.shortcuts-issues').parent.css('[aria-label="Labels"]')).not_to be_empty
- expect(rendered).to have_link('Labels', href: project_labels_path(project))
- end
- end
- end
-
describe 'Service Desk' do
it 'has a link to the service desk path' do
render
@@ -313,7 +222,7 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
context 'with Jira issue tracker' do
- let_it_be(:jira) { create(:jira_service, project: project, issues_enabled: false) }
+ let_it_be(:jira) { create(:jira_integration, project: project, issues_enabled: false) }
it 'has a link to the Jira issue tracker' do
render
@@ -323,40 +232,6 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
end
- describe 'Labels' do
- it 'does not show the labels menu' do
- project.project_feature.update!(issues_access_level: ProjectFeature::DISABLED)
-
- render
-
- expect(rendered).not_to have_link('Labels', href: project_labels_path(project), class: 'shortcuts-labels')
- end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- context 'when issues are not enabled' do
- it 'has a link to the labels path' do
- project.project_feature.update!(issues_access_level: ProjectFeature::DISABLED)
-
- render
-
- expect(rendered).to have_link('Labels', href: project_labels_path(project), class: 'shortcuts-labels')
- end
- end
-
- context 'when issues are enabled' do
- it 'does not have a link to the labels path' do
- render
-
- expect(rendered).not_to have_link('Labels', href: project_labels_path(project), class: 'shortcuts-labels')
- end
- end
- end
- end
-
describe 'Merge Requests' do
it 'has a link to the merge request list path' do
render
@@ -479,16 +354,6 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
expect(rendered).not_to have_link('Feature Flags')
end
end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- it 'does not have a Feature Flags menu item' do
- stub_feature_flags(sidebar_refactor: false)
-
- render
-
- expect(rendered).not_to have_selector('.shortcuts-deployments')
- end
- end
end
describe 'Environments' do
@@ -508,16 +373,6 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
expect(rendered).not_to have_link('Environments')
end
end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- it 'does not have a Environments menu item' do
- stub_feature_flags(sidebar_refactor: false)
-
- render
-
- expect(rendered).not_to have_selector('.shortcuts-deployments')
- end
- end
end
describe 'Releases' do
@@ -526,16 +381,6 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
expect(rendered).to have_link('Releases', href: project_releases_path(project), class: 'shortcuts-deployments-releases')
end
-
- context 'when feature flag :sidebar refactor is disabled' do
- it 'does not have a link to the project releases path' do
- stub_feature_flags(sidebar_refactor: false)
-
- render
-
- expect(rendered).not_to have_link('Releases', href: project_releases_path(project), class: 'shortcuts-deployments-releases')
- end
- end
end
end
@@ -662,141 +507,6 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
end
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- describe 'Serverless' do
- it 'has a link to the serverless page' do
- render
-
- page = Nokogiri::HTML.parse(rendered)
-
- expect(page.at_css('.shortcuts-operations').parent.css('[aria-label="Serverless"]')).not_to be_empty
- expect(rendered).to have_link('Serverless', href: project_serverless_functions_path(project))
- end
-
- describe 'when the user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the serverless page' do
- render
-
- expect(rendered).not_to have_link('Serverless')
- end
- end
- end
-
- describe 'Terraform' do
- it 'has a link to the terraform page' do
- render
-
- page = Nokogiri::HTML.parse(rendered)
-
- expect(page.at_css('.shortcuts-operations').parent.css('[aria-label="Terraform"]')).not_to be_empty
- expect(rendered).to have_link('Terraform', href: project_terraform_index_path(project))
- end
-
- describe 'when the user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the terraform page' do
- render
-
- expect(rendered).not_to have_link('Terraform')
- end
- end
- end
-
- describe 'Kubernetes' do
- it 'has a link to the kubernetes page' do
- render
-
- page = Nokogiri::HTML.parse(rendered)
-
- expect(page.at_css('.shortcuts-operations').parent.css('[aria-label="Kubernetes"]')).not_to be_empty
- expect(rendered).to have_link('Kubernetes', href: project_clusters_path(project))
- end
-
- describe 'when the user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the kubernetes page' do
- render
-
- expect(rendered).not_to have_link('Kubernetes')
- end
- end
- end
- end
-
- describe 'Environments' do
- let(:page) { Nokogiri::HTML.parse(rendered) }
-
- it 'does not have a link to the environments page' do
- render
-
- expect(page.at_css('.shortcuts-monitor').parent.css('[aria-label="Environments"]')).to be_empty
- end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- it 'has a link to the environments page' do
- render
-
- expect(page.at_css('.shortcuts-operations').parent.css('[aria-label="Environments"]')).not_to be_empty
- expect(rendered).to have_link('Environments', href: project_environments_path(project))
- end
-
- describe 'when the user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the environments page' do
- render
-
- expect(rendered).not_to have_link('Environments')
- end
- end
- end
- end
-
- describe 'Feature Flags' do
- let(:page) { Nokogiri::HTML.parse(rendered) }
-
- it 'does not have a link to the feature flags page' do
- render
-
- expect(page.at_css('.shortcuts-monitor').parent.css('[aria-label="Feature Flags"]')).to be_empty
- end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- it 'has a link to the feature flags page' do
- render
-
- expect(page.at_css('.shortcuts-operations').parent.css('[aria-label="Feature Flags"]')).not_to be_empty
- expect(rendered).to have_link('Feature Flags', href: project_feature_flags_path(project))
- end
-
- describe 'when the user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the feature flags page' do
- render
-
- expect(rendered).not_to have_link('Feature Flags')
- end
- end
- end
- end
-
describe 'Product Analytics' do
it 'has a link to the product analytics page' do
render
@@ -985,11 +695,11 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
end
- describe 'Value Stream' do
+ describe 'Value stream' do
it 'has a link to the value stream page' do
render
- expect(rendered).to have_link('Value Stream', href: project_cycle_analytics_path(project))
+ expect(rendered).to have_link('Value stream', href: project_cycle_analytics_path(project))
end
context 'when user does not have access' do
@@ -998,7 +708,7 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
it 'does not have a link to the value stream page' do
render
- expect(rendered).not_to have_link('Value Stream', href: project_cycle_analytics_path(project))
+ expect(rendered).not_to have_link('Value stream', href: project_cycle_analytics_path(project))
end
end
end
@@ -1104,35 +814,6 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
end
- describe 'Members' do
- it 'does not show the Member menu item' do
- expect(rendered).not_to have_selector('.sidebar-top-level-items > li > a[aria-label="Members"]')
- end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
-
- render
- end
-
- context 'when user can access members' do
- it 'show Members link' do
- expect(rendered).to have_selector('.sidebar-top-level-items > li > a[aria-label="Members"]')
- expect(rendered).to have_link('Members', href: project_project_members_path(project))
- end
- end
-
- context 'when user cannot access members' do
- let(:user) { nil }
-
- it 'show Members link' do
- expect(rendered).not_to have_link('Members')
- end
- end
- end
- end
-
describe 'Settings' do
describe 'General' do
it 'has a link to the General settings' do
@@ -1275,16 +956,6 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
expect(rendered).to have_link('Packages & Registries', href: project_settings_packages_and_registries_path(project))
end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- it 'does not have a link to the Packages & Registries settings' do
- stub_feature_flags(sidebar_refactor: false)
-
- render
-
- expect(rendered).not_to have_link('Packages & Registries', href: project_settings_packages_and_registries_path(project))
- end
- end
end
context 'when registry is not enabled' do
@@ -1345,15 +1016,5 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
expect(rendered).not_to have_selector('.sidebar-sub-level-items > li.fly-out-top-item > a')
end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- it 'renders the collapsed top menu as a link' do
- stub_feature_flags(sidebar_refactor: false)
-
- render
-
- expect(rendered).to have_selector('.sidebar-sub-level-items > li.fly-out-top-item > a')
- end
- end
end
end
diff --git a/spec/views/projects/_flash_messages.html.haml_spec.rb b/spec/views/projects/_flash_messages.html.haml_spec.rb
new file mode 100644
index 00000000000..e1858229208
--- /dev/null
+++ b/spec/views/projects/_flash_messages.html.haml_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'projects/_flash_messages' do
+ let_it_be(:template) { 'projects/flash_messages' }
+ let_it_be(:user) { create(:user) }
+
+ let_it_be(:ruby) { create(:programming_language, name: 'Ruby') }
+ let_it_be(:html) { create(:programming_language, name: 'HTML') }
+ let_it_be(:hcl) { create(:programming_language, name: 'HCL') }
+
+ before do
+ allow(view).to receive(:current_user).and_return(user)
+ allow(view).to receive(:can?).with(user, :download_code, project).and_return(true)
+ end
+
+ context 'when current_user has download_code permission' do
+ context 'when user has a terraform state' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:terraform_state) { create(:terraform_state, :locked, :with_version, project: project) }
+
+ it "doesn't show the terraform notification banner" do
+ render(template, project: project)
+ expect(view.content_for(:flash_message)).not_to have_selector('.js-terraform-notification')
+ end
+ end
+
+ context 'when there are no .tf files in the repository' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:mock_repo_languages) do
+ { project => { ruby => 0.5, html => 0.5 } }
+ end
+
+ before do
+ mock_repo_languages.each do |project, lang_shares|
+ lang_shares.each do |lang, share|
+ create(:repository_language, project: project, programming_language: lang, share: share)
+ end
+ end
+ end
+
+ it "doesn't show the terraform notification banner" do
+ render(template, project: project)
+ expect(view.content_for(:flash_message)).not_to have_selector('.js-terraform-notification')
+ end
+ end
+
+ context 'when .tf files are present in the repository and user does not have any terraform states' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:mock_repo_languages) do
+ { project => { ruby => 0.5, hcl => 0.5 } }
+ end
+
+ before do
+ mock_repo_languages.each do |project, lang_shares|
+ lang_shares.each do |lang, share|
+ create(:repository_language, project: project, programming_language: lang, share: share)
+ end
+ end
+ end
+
+ it 'shows the terraform notification banner' do
+ render(template, project: project)
+ expect(view.content_for(:flash_message)).to have_selector('.js-terraform-notification')
+ end
+ end
+ end
+end
diff --git a/spec/views/projects/_home_panel.html.haml_spec.rb b/spec/views/projects/_home_panel.html.haml_spec.rb
index d329c57af00..78131937d3c 100644
--- a/spec/views/projects/_home_panel.html.haml_spec.rb
+++ b/spec/views/projects/_home_panel.html.haml_spec.rb
@@ -5,6 +5,38 @@ require 'spec_helper'
RSpec.describe 'projects/_home_panel' do
include ProjectForksHelper
+ context 'admin area link' do
+ let(:project) { create(:project) }
+
+ before do
+ assign(:project, project)
+ end
+
+ it 'renders admin area link for admin' do
+ allow(view).to receive(:current_user).and_return(create(:admin))
+
+ render
+
+ expect(rendered).to have_link(href: admin_project_path(project))
+ end
+
+ it 'does not render admin area link for non-admin' do
+ allow(view).to receive(:current_user).and_return(create(:user))
+
+ render
+
+ expect(rendered).not_to have_link(href: admin_project_path(project))
+ end
+
+ it 'does not render admin area link for anonymous' do
+ allow(view).to receive(:current_user).and_return(nil)
+
+ render
+
+ expect(rendered).not_to have_link(href: admin_project_path(project))
+ end
+ end
+
context 'notifications' do
let(:project) { create(:project) }
diff --git a/spec/views/projects/empty.html.haml_spec.rb b/spec/views/projects/empty.html.haml_spec.rb
index 7fa95507f75..0fb0ae5ff29 100644
--- a/spec/views/projects/empty.html.haml_spec.rb
+++ b/spec/views/projects/empty.html.haml_spec.rb
@@ -64,6 +64,7 @@ RSpec.describe 'projects/empty' do
expect(rendered).to have_selector('.js-invite-members-modal')
expect(rendered).to have_selector('[data-label=invite_members_empty_project]')
expect(rendered).to have_selector('[data-event=click_button]')
+ expect(rendered).to have_selector('[data-trigger-source=project-empty-page]')
end
context 'when user does not have permissions to invite members' do
diff --git a/spec/views/projects/pipelines/show.html.haml_spec.rb b/spec/views/projects/pipelines/show.html.haml_spec.rb
index 5b5c05527de..fcae587f8c8 100644
--- a/spec/views/projects/pipelines/show.html.haml_spec.rb
+++ b/spec/views/projects/pipelines/show.html.haml_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'projects/pipelines/show' do
include Devise::Test::ControllerHelpers
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:presented_pipeline) { pipeline.present(current_user: user) }
diff --git a/spec/views/projects/runners/_specific_runners.html.haml_spec.rb b/spec/views/projects/runners/_specific_runners.html.haml_spec.rb
new file mode 100644
index 00000000000..ace3502dd1e
--- /dev/null
+++ b/spec/views/projects/runners/_specific_runners.html.haml_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'projects/runners/specific_runners.html.haml' do
+ describe 'render' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ before do
+ @project = project
+ @assignable_runners = []
+ @project_runners = []
+ allow(view).to receive(:reset_registration_token_namespace_project_settings_ci_cd_path).and_return('banana_url')
+ end
+
+ context 'when project runner registration is allowed' do
+ before do
+ stub_application_setting(valid_runner_registrars: ['project'])
+ end
+
+ it 'enables the Remove project button for a project' do
+ render 'projects/runners/specific_runners', project: project
+
+ expect(rendered).to have_selector '#js-install-runner'
+ expect(rendered).not_to have_content 'Please contact an admin to register runners.'
+ end
+ end
+
+ context 'when project runner registration is not allowed' do
+ before do
+ stub_application_setting(valid_runner_registrars: ['group'])
+ end
+
+ it 'does not enable the the Remove project button for a project' do
+ render 'projects/runners/specific_runners', project: project
+
+ expect(rendered).to have_content 'Please contact an admin to register runners.'
+ expect(rendered).not_to have_selector '#js-install-runner'
+ end
+ end
+ end
+end
diff --git a/spec/views/projects/services/_form.haml_spec.rb b/spec/views/projects/services/_form.haml_spec.rb
index f063e73dae4..177f703ba6c 100644
--- a/spec/views/projects/services/_form.haml_spec.rb
+++ b/spec/views/projects/services/_form.haml_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'projects/services/_form' do
current_user: user,
can?: true,
current_application_settings: Gitlab::CurrentSettings.current_application_settings,
- integration: project.redmine_service,
+ integration: project.redmine_integration,
request: double(referer: '/services')
)
end
diff --git a/spec/views/projects/settings/operations/show.html.haml_spec.rb b/spec/views/projects/settings/operations/show.html.haml_spec.rb
index 43c064e1a2b..c0ec86a41a7 100644
--- a/spec/views/projects/settings/operations/show.html.haml_spec.rb
+++ b/spec/views/projects/settings/operations/show.html.haml_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe 'projects/settings/operations/show' do
create(:project_tracing_setting, project: project)
end
- let_it_be(:prometheus_service) { create(:prometheus_service, project: project) }
+ let_it_be(:prometheus_integration) { create(:prometheus_integration, project: project) }
before_all do
project.add_maintainer(user)
@@ -27,8 +27,8 @@ RSpec.describe 'projects/settings/operations/show' do
.and_return(error_tracking_setting)
allow(view).to receive(:tracing_setting)
.and_return(tracing_setting)
- allow(view).to receive(:prometheus_service)
- .and_return(prometheus_service)
+ allow(view).to receive(:prometheus_integration)
+ .and_return(prometheus_integration)
allow(view).to receive(:current_user).and_return(user)
end
diff --git a/spec/views/search/_results.html.haml_spec.rb b/spec/views/search/_results.html.haml_spec.rb
index 11f2a4082e7..ecfcf74edc1 100644
--- a/spec/views/search/_results.html.haml_spec.rb
+++ b/spec/views/search/_results.html.haml_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe 'search/_results' do
let_it_be(:project) { create(:project) }
let_it_be(:issue) { create(:issue, project: project, title: '*') }
let_it_be(:note) { create(:discussion_note_on_issue, noteable: issue, project: issue.project, note: '```"helloworld"```') }
+
let(:scope) { 'notes' }
let(:search_objects) { Note.page(1).per(2) }
let(:term) { 'helloworld' }
@@ -61,7 +62,7 @@ RSpec.describe 'search/_results' do
let_it_be(:merge_request) { create(:merge_request, title: '*', source_project: project, target_project: project) }
let_it_be(:milestone) { create(:milestone, title: '*', project: project) }
let_it_be(:note) { create(:discussion_note_on_issue, project: project, note: '*') }
- let_it_be(:wiki_blob) { create(:wiki_page, project: project, content: '*') }
+ let_it_be(:wiki_blob) { create(:wiki_page, wiki: project.wiki, content: '*') }
let_it_be(:user) { create(:admin) }
%w[issues merge_requests].each do |search_scope|
diff --git a/spec/views/shared/_global_alert.html.haml_spec.rb b/spec/views/shared/_global_alert.html.haml_spec.rb
new file mode 100644
index 00000000000..7eec068645a
--- /dev/null
+++ b/spec/views/shared/_global_alert.html.haml_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'shared/_global_alert.html.haml' do
+ before do
+ allow(view).to receive(:sprite_icon).and_return('<span class="icon"></span>'.html_safe)
+ end
+
+ it 'renders the title' do
+ title = "The alert's title"
+ render partial: 'shared/global_alert', locals: { title: title }
+
+ expect(rendered).to have_text(title)
+ end
+
+ context 'variants' do
+ it 'renders an info alert by default' do
+ render
+
+ expect(rendered).to have_selector(".gl-alert-info")
+ end
+
+ %w[warning success danger tip].each do |variant|
+ it "renders a #{variant} variant" do
+ allow(view).to receive(:variant).and_return(variant)
+ render partial: 'shared/global_alert', locals: { variant: variant }
+
+ expect(rendered).to have_selector(".gl-alert-#{variant}")
+ end
+ end
+ end
+
+ context 'dismissible option' do
+ it 'shows the dismiss button by default' do
+ render
+
+ expect(rendered).to have_selector('.gl-dismiss-btn')
+ end
+
+ it 'does not show the dismiss button when dismissible is false' do
+ render partial: 'shared/global_alert', locals: { dismissible: false }
+
+ expect(rendered).not_to have_selector('.gl-dismiss-btn')
+ end
+ end
+
+ context 'fixed layout' do
+ before do
+ allow(view).to receive(:fluid_layout).and_return(false)
+ end
+
+ it 'does not add layout limited class' do
+ render
+
+ expect(rendered).not_to have_selector('.gl-alert-layout-limited')
+ end
+
+ it 'adds container classes' do
+ render
+
+ expect(rendered).to have_selector('.container-fluid.container-limited')
+ end
+
+ it 'does not add container classes if is_contained is true' do
+ render partial: 'shared/global_alert', locals: { is_contained: true }
+
+ expect(rendered).not_to have_selector('.container-fluid.container-limited')
+ end
+ end
+
+ context 'fluid layout' do
+ before do
+ allow(view).to receive(:fluid_layout).and_return(true)
+ render
+ end
+
+ it 'adds layout limited class' do
+ expect(rendered).to have_selector('.gl-alert-layout-limited')
+ end
+
+ it 'does not add container classes' do
+ expect(rendered).not_to have_selector('.container-fluid.container-limited')
+ end
+ end
+end
diff --git a/spec/views/shared/_label_row.html.haml_spec.rb b/spec/views/shared/_label_row.html.haml_spec.rb
index e9a0bfdcd4e..6fe74b6633b 100644
--- a/spec/views/shared/_label_row.html.haml_spec.rb
+++ b/spec/views/shared/_label_row.html.haml_spec.rb
@@ -3,6 +3,7 @@ require 'spec_helper'
RSpec.describe 'shared/_label_row.html.haml' do
let_it_be(:group) { create(:group) }
+
let(:label) { build_stubbed(:group_label, group: group).present(issuable_subject: group) }
before do
@@ -11,6 +12,7 @@ RSpec.describe 'shared/_label_row.html.haml' do
context 'with a project context' do
let_it_be(:project) { create(:project, group: group) }
+
let(:label) { build_stubbed(:label, project: project).present(issuable_subject: project) }
before do
@@ -42,6 +44,7 @@ RSpec.describe 'shared/_label_row.html.haml' do
context 'with a subgroup context' do
let_it_be(:subgroup) { create(:group, parent: group) }
+
let(:label) { build_stubbed(:group_label, group: subgroup).present(issuable_subject: subgroup) }
before do
diff --git a/spec/views/shared/milestones/_top.html.haml_spec.rb b/spec/views/shared/milestones/_top.html.haml_spec.rb
index 1aa971709f1..aa989d3e441 100644
--- a/spec/views/shared/milestones/_top.html.haml_spec.rb
+++ b/spec/views/shared/milestones/_top.html.haml_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'shared/milestones/_top.html.haml' do
let_it_be(:group) { create(:group) }
+
let(:project) { create(:project, group: group) }
let(:milestone) { create(:milestone, project: project) }
diff --git a/spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb b/spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb
index cdf2cb493b0..c27629c3a15 100644
--- a/spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb
+++ b/spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb
@@ -3,9 +3,72 @@
require 'spec_helper'
RSpec.describe AuthorizedProjectUpdate::UserRefreshFromReplicaWorker do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { project.namespace.owner }
+
+ let(:execute_worker) { subject.perform(user.id) }
+
it 'is labeled as low urgency' do
expect(described_class.get_urgency).to eq(:low)
end
- it_behaves_like "refreshes user's project authorizations"
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { user.id }
+ end
+
+ describe '#perform' do
+ it 'checks if a project_authorization refresh is needed for the user' do
+ expect(AuthorizedProjectUpdate::FindRecordsDueForRefreshService).to(
+ receive(:new).with(user).and_call_original)
+
+ execute_worker
+ end
+
+ context 'when there are project authorization records due for either removal or addition for a specific user' do
+ before do
+ user.project_authorizations.delete_all
+ end
+
+ it 'enqueues a new project authorization update job for the user' do
+ expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).to receive(:perform_async).with(user.id)
+
+ execute_worker
+ end
+ end
+
+ context 'when there are no additions or removals to be made to project authorizations for a specific user' do
+ it 'does not enqueue a new project authorization update job for the user' do
+ expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).not_to receive(:perform_async)
+
+ execute_worker
+ end
+ end
+
+ context 'with load balancing enabled' do
+ before do
+ allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
+ end
+
+ it 'reads from the replica database' do
+ expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_replicas_for_read_queries).and_call_original
+
+ execute_worker
+ end
+ end
+
+ context 'when the feature flag `user_refresh_from_replica_worker_uses_replica_db` is disabled' do
+ before do
+ stub_feature_flags(user_refresh_from_replica_worker_uses_replica_db: false)
+ end
+
+ it 'calls Users::RefreshAuthorizedProjectsService' do
+ source = 'AuthorizedProjectUpdate::UserRefreshFromReplicaWorker'
+ expect_next_instance_of(Users::RefreshAuthorizedProjectsService, user, { source: source }) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ execute_worker
+ end
+ end
+ end
end
diff --git a/spec/workers/build_finished_worker_spec.rb b/spec/workers/build_finished_worker_spec.rb
index 3434980341b..6b7162ee886 100644
--- a/spec/workers/build_finished_worker_spec.rb
+++ b/spec/workers/build_finished_worker_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe BuildFinishedWorker do
let_it_be(:build) { create(:ci_build, :success, pipeline: create(:ci_pipeline)) }
before do
+ stub_feature_flags(ci_build_finished_worker_namespace_changed: build.project)
expect(Ci::Build).to receive(:find_by).with(id: build.id).and_return(build)
end
@@ -23,11 +24,23 @@ RSpec.describe BuildFinishedWorker do
expect(BuildHooksWorker).to receive(:perform_async)
expect(ChatNotificationWorker).not_to receive(:perform_async)
- expect(ArchiveTraceWorker).to receive(:perform_in)
+ expect(Ci::ArchiveTraceWorker).to receive(:perform_in)
subject
end
+ context 'with ci_build_finished_worker_namespace_changed feature flag disabled' do
+ before do
+ stub_feature_flags(ci_build_finished_worker_namespace_changed: false)
+ end
+
+ it 'calls deprecated worker' do
+ expect(ArchiveTraceWorker).to receive(:perform_in)
+
+ subject
+ end
+ end
+
context 'when build is failed' do
before do
build.update!(status: :failed)
diff --git a/spec/workers/build_queue_worker_spec.rb b/spec/workers/build_queue_worker_spec.rb
index 5f8510abf23..0786722e647 100644
--- a/spec/workers/build_queue_worker_spec.rb
+++ b/spec/workers/build_queue_worker_spec.rb
@@ -26,6 +26,5 @@ RSpec.describe BuildQueueWorker do
it_behaves_like 'worker with data consistency',
described_class,
- feature_flag: :load_balancing_for_build_queue_worker,
data_consistency: :sticky
end
diff --git a/spec/workers/bulk_imports/export_request_worker_spec.rb b/spec/workers/bulk_imports/export_request_worker_spec.rb
index 8d528011752..cb280c6d263 100644
--- a/spec/workers/bulk_imports/export_request_worker_spec.rb
+++ b/spec/workers/bulk_imports/export_request_worker_spec.rb
@@ -6,12 +6,17 @@ RSpec.describe BulkImports::ExportRequestWorker do
let_it_be(:bulk_import) { create(:bulk_import) }
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import) }
let_it_be(:entity) { create(:bulk_import_entity, source_full_path: 'foo/bar', bulk_import: bulk_import) }
+ let_it_be(:version_url) { 'https://gitlab.example/api/v4/version' }
let(:response_double) { double(code: 200, success?: true, parsed_response: {}) }
let(:job_args) { [entity.id] }
describe '#perform' do
before do
+ allow(Gitlab::HTTP)
+ .to receive(:get)
+ .with(version_url, anything)
+ .and_return(double(code: 200, success?: true, parsed_response: { 'version' => Gitlab::VERSION }))
allow(Gitlab::HTTP).to receive(:post).and_return(response_double)
end
diff --git a/spec/workers/ci/archive_trace_worker_spec.rb b/spec/workers/ci/archive_trace_worker_spec.rb
new file mode 100644
index 00000000000..889e0c92042
--- /dev/null
+++ b/spec/workers/ci/archive_trace_worker_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::ArchiveTraceWorker do
+ describe '#perform' do
+ subject { described_class.new.perform(job&.id) }
+
+ context 'when job is found' do
+ let(:job) { create(:ci_build, :trace_live) }
+
+ it 'executes service' do
+ allow_next_instance_of(Ci::ArchiveTraceService) do |instance|
+ allow(instance).to receive(:execute).with(job, anything)
+ end
+
+ subject
+ end
+ end
+
+ context 'when job is not found' do
+ let(:job) { nil }
+
+ it 'does not execute service' do
+ allow_next_instance_of(Ci::ArchiveTraceService) do |instance|
+ allow(instance).not_to receive(:execute)
+ end
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/build_finished_worker_spec.rb b/spec/workers/ci/build_finished_worker_spec.rb
new file mode 100644
index 00000000000..374ecd8619f
--- /dev/null
+++ b/spec/workers/ci/build_finished_worker_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::BuildFinishedWorker do
+ subject { described_class.new.perform(build.id) }
+
+ describe '#perform' do
+ context 'when build exists' do
+ let_it_be(:build) { create(:ci_build, :success, pipeline: create(:ci_pipeline)) }
+
+ before do
+ stub_feature_flags(ci_build_finished_worker_namespace_changed: build.project)
+ expect(Ci::Build).to receive(:find_by).with(id: build.id).and_return(build)
+ end
+
+ it 'calculates coverage and calls hooks', :aggregate_failures do
+ expect(build).to receive(:parse_trace_sections!).ordered
+ expect(build).to receive(:update_coverage).ordered
+
+ expect_next_instance_of(Ci::BuildReportResultService) do |build_report_result_service|
+ expect(build_report_result_service).to receive(:execute).with(build)
+ end
+
+ expect(BuildHooksWorker).to receive(:perform_async)
+ expect(ChatNotificationWorker).not_to receive(:perform_async)
+ expect(Ci::ArchiveTraceWorker).to receive(:perform_in)
+
+ subject
+ end
+
+ context 'with ci_build_finished_worker_namespace_changed feature flag disabled' do
+ before do
+ stub_feature_flags(ci_build_finished_worker_namespace_changed: false)
+ end
+
+ it 'calls deprecated worker' do
+ expect(ArchiveTraceWorker).to receive(:perform_in)
+
+ subject
+ end
+ end
+
+ context 'when build is failed' do
+ before do
+ build.update!(status: :failed)
+ end
+
+ it 'adds a todo' do
+ expect(::Ci::MergeRequests::AddTodoWhenBuildFailsWorker).to receive(:perform_async)
+
+ subject
+ end
+ end
+
+ context 'when build has a chat' do
+ before do
+ build.pipeline.update!(source: :chat)
+ end
+
+ it 'schedules a ChatNotification job' do
+ expect(ChatNotificationWorker).to receive(:perform_async).with(build.id)
+
+ subject
+ end
+ end
+ end
+
+ context 'when build does not exist' do
+ it 'does not raise exception' do
+ expect { described_class.new.perform(non_existing_record_id) }
+ .not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb b/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb
index f9914a7cecb..650be1e84a9 100644
--- a/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb
+++ b/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb
@@ -5,15 +5,23 @@ require 'spec_helper'
RSpec.describe Ci::ResourceGroups::AssignResourceFromResourceGroupWorker do
let(:worker) { described_class.new }
+ it 'has the `until_executed` deduplicate strategy' do
+ expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
+ end
+
describe '#perform' do
subject { worker.perform(resource_group_id) }
- context 'when resource group exists' do
- let(:resource_group) { create(:ci_resource_group) }
- let(:resource_group_id) { resource_group.id }
+ let(:resource_group) { create(:ci_resource_group) }
+ let(:resource_group_id) { resource_group.id }
+ include_examples 'an idempotent worker' do
+ let(:job_args) { [resource_group_id] }
+ end
+
+ context 'when resource group exists' do
it 'executes AssignResourceFromResourceGroupService' do
- expect_next_instance_of(Ci::ResourceGroups::AssignResourceFromResourceGroupService, resource_group.project, nil) do |service|
+ expect_next_instances_of(Ci::ResourceGroups::AssignResourceFromResourceGroupService, 2, resource_group.project, nil) do |service|
expect(service).to receive(:execute).with(resource_group)
end
@@ -22,7 +30,7 @@ RSpec.describe Ci::ResourceGroups::AssignResourceFromResourceGroupWorker do
end
context 'when build does not exist' do
- let(:resource_group_id) { 123 }
+ let(:resource_group_id) { non_existing_record_id }
it 'does not execute AssignResourceFromResourceGroupService' do
expect(Ci::ResourceGroups::AssignResourceFromResourceGroupService).not_to receive(:new)
diff --git a/spec/workers/clusters/applications/activate_service_worker_spec.rb b/spec/workers/clusters/applications/activate_service_worker_spec.rb
index 7b05b76bebc..019bfe7a750 100644
--- a/spec/workers/clusters/applications/activate_service_worker_spec.rb
+++ b/spec/workers/clusters/applications/activate_service_worker_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
RSpec.describe Clusters::Applications::ActivateServiceWorker, '#perform' do
context 'cluster exists' do
- describe 'prometheus service' do
- let(:service_name) { 'prometheus' }
+ describe 'prometheus integration' do
+ let(:integration_name) { 'prometheus' }
before do
create(:clusters_integrations_prometheus, cluster: cluster)
@@ -16,9 +16,9 @@ RSpec.describe Clusters::Applications::ActivateServiceWorker, '#perform' do
let(:project) { create(:project, group: group) }
let(:cluster) { create(:cluster_for_group, groups: [group]) }
- it 'ensures Prometheus service is activated' do
- expect { described_class.new.perform(cluster.id, service_name) }
- .to change { project.reload.prometheus_service&.active }.from(nil).to(true)
+ it 'ensures Prometheus integration is activated' do
+ expect { described_class.new.perform(cluster.id, integration_name) }
+ .to change { project.reload.prometheus_integration&.active }.from(nil).to(true)
end
end
@@ -26,9 +26,9 @@ RSpec.describe Clusters::Applications::ActivateServiceWorker, '#perform' do
let(:project) { create(:project) }
let(:cluster) { create(:cluster, projects: [project]) }
- it 'ensures Prometheus service is activated' do
- expect { described_class.new.perform(cluster.id, service_name) }
- .to change { project.reload.prometheus_service&.active }.from(nil).to(true)
+ it 'ensures Prometheus integration is activated' do
+ expect { described_class.new.perform(cluster.id, integration_name) }
+ .to change { project.reload.prometheus_integration&.active }.from(nil).to(true)
end
end
@@ -36,9 +36,9 @@ RSpec.describe Clusters::Applications::ActivateServiceWorker, '#perform' do
let(:project) { create(:project) }
let(:cluster) { create(:cluster, :instance) }
- it 'ensures Prometheus service is activated' do
- expect { described_class.new.perform(cluster.id, service_name) }
- .to change { project.reload.prometheus_service&.active }.from(nil).to(true)
+ it 'ensures Prometheus integration is activated' do
+ expect { described_class.new.perform(cluster.id, integration_name) }
+ .to change { project.reload.prometheus_integration&.active }.from(nil).to(true)
end
end
end
diff --git a/spec/workers/clusters/applications/deactivate_service_worker_spec.rb b/spec/workers/clusters/applications/deactivate_service_worker_spec.rb
index 4068c5c9eaa..77788cfa893 100644
--- a/spec/workers/clusters/applications/deactivate_service_worker_spec.rb
+++ b/spec/workers/clusters/applications/deactivate_service_worker_spec.rb
@@ -4,15 +4,15 @@ require 'spec_helper'
RSpec.describe Clusters::Applications::DeactivateServiceWorker, '#perform' do
context 'cluster exists' do
- describe 'prometheus service' do
- let(:service_name) { 'prometheus' }
+ describe 'prometheus integration' do
+ let(:integration_name) { 'prometheus' }
let!(:integration) { create(:clusters_integrations_prometheus, cluster: cluster) }
- context 'prometheus service exists' do
- let!(:prometheus_service) { create(:prometheus_service, project: project, manual_configuration: false, active: true) }
+ context 'prometheus integration exists' do
+ let!(:prometheus_integration) { create(:prometheus_integration, project: project, manual_configuration: false, active: true) }
before do
- integration.delete # prometheus service before save synchronises active stated with integration existence.
+ integration.delete # prometheus integration before save synchronises active stated with integration existence.
end
context 'cluster type: group' do
@@ -20,9 +20,9 @@ RSpec.describe Clusters::Applications::DeactivateServiceWorker, '#perform' do
let(:project) { create(:project, group: group) }
let(:cluster) { create(:cluster_for_group, groups: [group]) }
- it 'ensures Prometheus service is deactivated' do
- expect { described_class.new.perform(cluster.id, service_name) }
- .to change { prometheus_service.reload.active }.from(true).to(false)
+ it 'ensures Prometheus integration is deactivated' do
+ expect { described_class.new.perform(cluster.id, integration_name) }
+ .to change { prometheus_integration.reload.active }.from(true).to(false)
end
end
@@ -30,9 +30,9 @@ RSpec.describe Clusters::Applications::DeactivateServiceWorker, '#perform' do
let(:project) { create(:project) }
let(:cluster) { create(:cluster, projects: [project]) }
- it 'ensures Prometheus service is deactivated' do
- expect { described_class.new.perform(cluster.id, service_name) }
- .to change { prometheus_service.reload.active }.from(true).to(false)
+ it 'ensures Prometheus integration is deactivated' do
+ expect { described_class.new.perform(cluster.id, integration_name) }
+ .to change { prometheus_integration.reload.active }.from(true).to(false)
end
end
@@ -40,20 +40,20 @@ RSpec.describe Clusters::Applications::DeactivateServiceWorker, '#perform' do
let(:project) { create(:project) }
let(:cluster) { create(:cluster, :instance) }
- it 'ensures Prometheus service is deactivated' do
- expect { described_class.new.perform(cluster.id, service_name) }
- .to change { prometheus_service.reload.active }.from(true).to(false)
+ it 'ensures Prometheus integration is deactivated' do
+ expect { described_class.new.perform(cluster.id, integration_name) }
+ .to change { prometheus_integration.reload.active }.from(true).to(false)
end
end
end
- context 'prometheus service does not exist' do
+ context 'prometheus integration does not exist' do
context 'cluster type: project' do
let(:project) { create(:project) }
let(:cluster) { create(:cluster, projects: [project]) }
it 'does not raise errors' do
- expect { described_class.new.perform(cluster.id, service_name) }.not_to raise_error
+ expect { described_class.new.perform(cluster.id, integration_name) }.not_to raise_error
end
end
end
diff --git a/spec/workers/concerns/application_worker_spec.rb b/spec/workers/concerns/application_worker_spec.rb
index 29c69ff8b4b..ac4e4a682c8 100644
--- a/spec/workers/concerns/application_worker_spec.rb
+++ b/spec/workers/concerns/application_worker_spec.rb
@@ -176,6 +176,77 @@ RSpec.describe ApplicationWorker do
end
end
+ describe '.data_consistency' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:data_consistency, :sidekiq_option_retry, :expect_error) do
+ :delayed | false | true
+ :delayed | 0 | true
+ :delayed | 3 | false
+ :delayed | nil | false
+ :sticky | false | false
+ :sticky | 0 | false
+ :sticky | 3 | false
+ :sticky | nil | false
+ :always | false | false
+ :always | 0 | false
+ :always | 3 | false
+ :always | nil | false
+ end
+
+ with_them do
+ before do
+ worker.sidekiq_options retry: sidekiq_option_retry unless sidekiq_option_retry.nil?
+ end
+
+ context "when workers data consistency is #{params['data_consistency']}" do
+ it "#{params['expect_error'] ? '' : 'not to '}raise an exception" do
+ if expect_error
+ expect { worker.data_consistency data_consistency }
+ .to raise_error("Retry support cannot be disabled if data_consistency is set to :delayed")
+ else
+ expect { worker.data_consistency data_consistency }
+ .not_to raise_error
+ end
+ end
+ end
+ end
+ end
+
+ describe '.retry' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:data_consistency, :sidekiq_option_retry, :expect_error) do
+ :delayed | false | true
+ :delayed | 0 | true
+ :delayed | 3 | false
+ :sticky | false | false
+ :sticky | 0 | false
+ :sticky | 3 | false
+ :always | false | false
+ :always | 0 | false
+ :always | 3 | false
+ end
+
+ with_them do
+ before do
+ worker.data_consistency(data_consistency)
+ end
+
+ context "when retry sidekiq option is #{params['sidekiq_option_retry']}" do
+ it "#{params['expect_error'] ? '' : 'not to '}raise an exception" do
+ if expect_error
+ expect { worker.sidekiq_options retry: sidekiq_option_retry }
+ .to raise_error("Retry support cannot be disabled if data_consistency is set to :delayed")
+ else
+ expect { worker.sidekiq_options retry: sidekiq_option_retry }
+ .not_to raise_error
+ end
+ end
+ end
+ end
+ end
+
describe '.perform_async' do
shared_examples_for 'worker utilizes load balancing capabilities' do |data_consistency|
before do
diff --git a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
index 85e1721461f..4c96daea7b3 100644
--- a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
@@ -11,12 +11,8 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
include(Gitlab::GithubImport::ObjectImporter)
- def counter_name
- :dummy_counter
- end
-
- def counter_description
- 'This is a counter'
+ def object_type
+ :dummy
end
def representation_class
@@ -42,7 +38,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
end)
end
- describe '#import' do
+ describe '#import', :clean_gitlab_redis_shared_state do
let(:importer_class) { double(:importer_class, name: 'klass_name') }
let(:importer_instance) { double(:importer_instance) }
let(:project) { double(:project, full_path: 'foo/bar', id: 1) }
@@ -64,10 +60,6 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(importer_instance)
.to receive(:execute)
- expect(worker.counter)
- .to receive(:increment)
- .and_call_original
-
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger)
.to receive(:info)
@@ -90,6 +82,11 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
end
worker.import(project, client, { 'number' => 10, 'github_id' => 1 })
+
+ expect(Gitlab::GithubImport::ObjectCounter.summary(project)).to eq({
+ 'fetched' => {},
+ 'imported' => { 'dummy' => 1 }
+ })
end
it 'logs error when the import fails' do
@@ -176,18 +173,4 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
.to raise_error(KeyError, 'key not found: :github_id')
end
end
-
- describe '#counter' do
- it 'returns a Prometheus counter' do
- expect(worker)
- .to receive(:counter_name)
- .and_call_original
-
- expect(worker)
- .to receive(:counter_description)
- .and_call_original
-
- worker.counter
- end
- end
end
diff --git a/spec/workers/concerns/waitable_worker_spec.rb b/spec/workers/concerns/waitable_worker_spec.rb
index 5d08d38380a..824ae8fcf83 100644
--- a/spec/workers/concerns/waitable_worker_spec.rb
+++ b/spec/workers/concerns/waitable_worker_spec.rb
@@ -38,6 +38,12 @@ RSpec.describe WaitableWorker do
it 'inlines workloads <= 3 jobs' do
args_list = [[1], [2], [3]]
expect(worker).to receive(:bulk_perform_inline).with(args_list).and_call_original
+ expect(Gitlab::AppJsonLogger).to(
+ receive(:info).with(a_hash_including('message' => 'running inline',
+ 'class' => 'Gitlab::Foo::Bar::DummyWorker',
+ 'job_status' => 'running',
+ 'queue' => 'foo_bar_dummy'))
+ .exactly(3).times)
worker.bulk_perform_and_wait(args_list)
diff --git a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
index c399697cbe0..506124216af 100644
--- a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
+++ b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
@@ -413,20 +413,30 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
disabled_repository.project.container_expiration_policy.update_column(:enabled, false)
end
+ context 'counts and capacity' do
+ where(:scheduled_count, :unfinished_count, :capacity, :expected_count) do
+ 2 | 2 | 10 | 4
+ 2 | 0 | 10 | 2
+ 0 | 2 | 10 | 2
+ 4 | 2 | 2 | 4
+ 4 | 0 | 2 | 4
+ 0 | 4 | 2 | 4
+ end
+
+ with_them do
+ before do
+ allow(worker).to receive(:cleanup_scheduled_count).and_return(scheduled_count)
+ allow(worker).to receive(:cleanup_unfinished_count).and_return(unfinished_count)
+ end
+
+ it { is_expected.to eq(expected_count) }
+ end
+ end
+
context 'with container repositories waiting for cleanup' do
let_it_be(:unfinished_repositories) { create_list(:container_repository, 2, :cleanup_unfinished) }
it { is_expected.to eq(3) }
-
- it 'logs the work count' do
- expect_log_info(
- cleanup_scheduled_count: 1,
- cleanup_unfinished_count: 2,
- cleanup_total_count: 3
- )
-
- subject
- end
end
context 'with no container repositories waiting for cleanup' do
@@ -436,16 +446,6 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
end
it { is_expected.to eq(0) }
-
- it 'logs 0 work count' do
- expect_log_info(
- cleanup_scheduled_count: 0,
- cleanup_unfinished_count: 0,
- cleanup_total_count: 0
- )
-
- subject
- end
end
end
@@ -468,9 +468,4 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
it { is_expected.to eq(0) }
end
end
-
- def expect_log_info(structure)
- expect(worker.logger)
- .to receive(:info).with(worker.structured_payload(structure))
- end
end
diff --git a/spec/workers/container_expiration_policy_worker_spec.rb b/spec/workers/container_expiration_policy_worker_spec.rb
index 6f81d06f653..69ddbe5c0f4 100644
--- a/spec/workers/container_expiration_policy_worker_spec.rb
+++ b/spec/workers/container_expiration_policy_worker_spec.rb
@@ -113,8 +113,8 @@ RSpec.describe ContainerExpirationPolicyWorker do
context 'process stale ongoing cleanups' do
let_it_be(:stuck_cleanup) { create(:container_repository, :cleanup_ongoing, expiration_policy_started_at: 1.day.ago) }
- let_it_be(:container_repository) { create(:container_repository, :cleanup_scheduled) }
- let_it_be(:container_repository) { create(:container_repository, :cleanup_unfinished) }
+ let_it_be(:container_repository1) { create(:container_repository, :cleanup_scheduled) }
+ let_it_be(:container_repository2) { create(:container_repository, :cleanup_unfinished) }
it 'set them as unfinished' do
expect { subject }
@@ -137,5 +137,36 @@ RSpec.describe ContainerExpirationPolicyWorker do
expect(container_expiration_policy3.reload.enabled).to be false
end
end
+
+ context 'counts logging' do
+ let_it_be(:container_repository1) { create(:container_repository, :cleanup_scheduled) }
+ let_it_be(:container_repository2) { create(:container_repository, :cleanup_unfinished) }
+ let_it_be(:container_repository3) { create(:container_repository, :cleanup_unfinished) }
+
+ before do
+ ContainerExpirationPolicy.update_all(enabled: true)
+ container_repository1.project.container_expiration_policy.update_column(:next_run_at, 5.minutes.ago)
+ end
+
+ it 'logs all the counts' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_required_count, 1)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_unfinished_count, 2)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_total_count, 3)
+
+ subject
+ end
+
+ context 'with load balancing enabled' do
+ before do
+ allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
+ end
+
+ it 'reads the counts from the replica' do
+ expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_replicas_for_read_queries).and_call_original
+
+ subject
+ end
+ end
+ end
end
end
diff --git a/spec/workers/database/partition_management_worker_spec.rb b/spec/workers/database/partition_management_worker_spec.rb
new file mode 100644
index 00000000000..01b7f209b2d
--- /dev/null
+++ b/spec/workers/database/partition_management_worker_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Database::PartitionManagementWorker do
+ describe '#perform' do
+ subject { described_class.new.perform }
+
+ let(:manager) { instance_double('PartitionManager', sync_partitions: nil) }
+ let(:monitoring) { instance_double('PartitionMonitoring', report_metrics: nil) }
+
+ before do
+ allow(Gitlab::Database::Partitioning::PartitionManager).to receive(:new).and_return(manager)
+ allow(Gitlab::Database::Partitioning::PartitionMonitoring).to receive(:new).and_return(monitoring)
+ end
+
+ it 'delegates to PartitionManager' do
+ expect(manager).to receive(:sync_partitions)
+
+ subject
+ end
+
+ it 'reports partition metrics' do
+ expect(monitoring).to receive(:report_metrics)
+
+ subject
+ end
+ end
+end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 34d42addef3..c75b9b43ef4 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -148,7 +148,9 @@ RSpec.describe 'Every Sidekiq worker' do
'Chaos::LeakMemWorker' => 3,
'Chaos::SleepWorker' => 3,
'ChatNotificationWorker' => false,
+ 'Ci::ArchiveTraceWorker' => 3,
'Ci::BatchResetMinutesWorker' => 10,
+ 'Ci::BuildFinishedWorker' => 3,
'Ci::BuildPrepareWorker' => 3,
'Ci::BuildScheduleWorker' => 3,
'Ci::BuildTraceChunkFlushWorker' => 3,
@@ -287,7 +289,7 @@ RSpec.describe 'Every Sidekiq worker' do
'Gitlab::PhabricatorImport::ImportTasksWorker' => 5,
'GitlabPerformanceBarStatsWorker' => 3,
'GitlabShellWorker' => 3,
- 'GitlabUsagePingWorker' => 3,
+ 'GitlabServicePingWorker' => 3,
'GroupDestroyWorker' => 3,
'GroupExportWorker' => false,
'GroupImportWorker' => false,
@@ -416,6 +418,7 @@ RSpec.describe 'Every Sidekiq worker' do
'ScanSecurityReportSecretsWorker' => 17,
'Security::AutoFixWorker' => 3,
'Security::StoreScansWorker' => 3,
+ 'Security::TrackSecureScansWorker' => 1,
'SelfMonitoringProjectCreateWorker' => 3,
'SelfMonitoringProjectDeleteWorker' => 3,
'ServiceDeskEmailReceiverWorker' => 3,
diff --git a/spec/workers/expire_pipeline_cache_worker_spec.rb b/spec/workers/expire_pipeline_cache_worker_spec.rb
index 6a1a95b8052..8c24aaa985b 100644
--- a/spec/workers/expire_pipeline_cache_worker_spec.rb
+++ b/spec/workers/expire_pipeline_cache_worker_spec.rb
@@ -50,7 +50,6 @@ RSpec.describe ExpirePipelineCacheWorker do
it_behaves_like 'worker with data consistency',
described_class,
- feature_flag: :load_balancing_for_expire_pipeline_cache_worker,
data_consistency: :delayed
end
end
diff --git a/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb b/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb
index 6476d82eb85..34073d0ea39 100644
--- a/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe Gitlab::GithubImport::ImportDiffNoteWorker do
expect(importer)
.to receive(:execute)
- expect(worker.counter)
+ expect(Gitlab::GithubImport::ObjectCounter)
.to receive(:increment)
.and_call_original
diff --git a/spec/workers/gitlab/github_import/import_issue_worker_spec.rb b/spec/workers/gitlab/github_import/import_issue_worker_spec.rb
index 9f5bd1d9e5e..dc0338eccad 100644
--- a/spec/workers/gitlab/github_import/import_issue_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_issue_worker_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe Gitlab::GithubImport::ImportIssueWorker do
expect(importer)
.to receive(:execute)
- expect(worker.counter)
+ expect(Gitlab::GithubImport::ObjectCounter)
.to receive(:increment)
.and_call_original
diff --git a/spec/workers/gitlab/github_import/import_note_worker_spec.rb b/spec/workers/gitlab/github_import/import_note_worker_spec.rb
index 94bc8e26e4a..bc254e6246d 100644
--- a/spec/workers/gitlab/github_import/import_note_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_note_worker_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Gitlab::GithubImport::ImportNoteWorker do
expect(importer)
.to receive(:execute)
- expect(worker.counter)
+ expect(Gitlab::GithubImport::ObjectCounter)
.to receive(:increment)
.and_call_original
diff --git a/spec/workers/gitlab/github_import/import_pull_request_merged_by_worker_spec.rb b/spec/workers/gitlab/github_import/import_pull_request_merged_by_worker_spec.rb
index c799c676300..728b4c6b440 100644
--- a/spec/workers/gitlab/github_import/import_pull_request_merged_by_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_pull_request_merged_by_worker_spec.rb
@@ -12,12 +12,4 @@ RSpec.describe Gitlab::GithubImport::ImportPullRequestMergedByWorker do
describe '#importer_class' do
it { expect(subject.importer_class).to eq(Gitlab::GithubImport::Importer::PullRequestMergedByImporter) }
end
-
- describe '#counter_name' do
- it { expect(subject.counter_name).to eq(:github_importer_imported_pull_requests_merged_by) }
- end
-
- describe '#counter_description' do
- it { expect(subject.counter_description).to eq('The number of imported GitHub pull requests merged by') }
- end
end
diff --git a/spec/workers/gitlab/github_import/import_pull_request_review_worker_spec.rb b/spec/workers/gitlab/github_import/import_pull_request_review_worker_spec.rb
index cd14d6631d5..0607add52cd 100644
--- a/spec/workers/gitlab/github_import/import_pull_request_review_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_pull_request_review_worker_spec.rb
@@ -12,12 +12,4 @@ RSpec.describe Gitlab::GithubImport::ImportPullRequestReviewWorker do
describe '#importer_class' do
it { expect(subject.importer_class).to eq(Gitlab::GithubImport::Importer::PullRequestReviewImporter) }
end
-
- describe '#counter_name' do
- it { expect(subject.counter_name).to eq(:github_importer_imported_pull_request_reviews) }
- end
-
- describe '#counter_description' do
- it { expect(subject.counter_description).to eq('The number of imported GitHub pull request reviews') }
- end
end
diff --git a/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb b/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb
index 1238929fbcb..6fe9741075f 100644
--- a/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe Gitlab::GithubImport::ImportPullRequestWorker do
expect(importer)
.to receive(:execute)
- expect(worker.counter)
+ expect(Gitlab::GithubImport::ObjectCounter)
.to receive(:increment)
.and_call_original
diff --git a/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb b/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
index 2615da2be15..8dea24dc74f 100644
--- a/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
@@ -33,6 +33,10 @@ RSpec.describe Gitlab::GithubImport::Stage::FinishImportWorker do
message: 'GitHub project import finished',
import_stage: 'Gitlab::GithubImport::Stage::FinishImportWorker',
import_source: :github,
+ object_counts: {
+ 'fetched' => {},
+ 'imported' => {}
+ },
project_id: project.id,
duration_s: a_kind_of(Numeric)
)
diff --git a/spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb b/spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb
index f82f6ccd9d6..10702c17cb5 100644
--- a/spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::JiraImport::Stage::ImportIssuesWorker do
let_it_be(:jira_import, reload: true) { create(:jira_import_state, :scheduled, project: project) }
before do
- stub_jira_service_test
+ stub_jira_integration_test
end
context 'when import did not start' do
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::JiraImport::Stage::ImportIssuesWorker do
end
context 'when import started', :clean_gitlab_redis_cache do
- let_it_be(:jira_service) { create(:jira_service, project: project) }
+ let_it_be(:jira_integration) { create(:jira_integration, project: project) }
before do
jira_import.start!
diff --git a/spec/workers/gitlab/jira_import/stage/import_labels_worker_spec.rb b/spec/workers/gitlab/jira_import/stage/import_labels_worker_spec.rb
index 0b7a35a92e2..52c516b9ff9 100644
--- a/spec/workers/gitlab/jira_import/stage/import_labels_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/stage/import_labels_worker_spec.rb
@@ -21,10 +21,10 @@ RSpec.describe Gitlab::JiraImport::Stage::ImportLabelsWorker do
end
context 'when import started' do
- let!(:jira_service) { create(:jira_service, project: project) }
+ let!(:jira_integration) { create(:jira_integration, project: project) }
before do
- stub_jira_service_test
+ stub_jira_integration_test
jira_import.start!
diff --git a/spec/workers/gitlab_usage_ping_worker_spec.rb b/spec/workers/gitlab_service_ping_worker_spec.rb
index f282b20363c..abccc0dc967 100644
--- a/spec/workers/gitlab_usage_ping_worker_spec.rb
+++ b/spec/workers/gitlab_service_ping_worker_spec.rb
@@ -2,21 +2,21 @@
require 'spec_helper'
-RSpec.describe GitlabUsagePingWorker, :clean_gitlab_redis_shared_state do
+RSpec.describe GitlabServicePingWorker, :clean_gitlab_redis_shared_state do
before do
- allow_next_instance_of(SubmitUsagePingService) { |service| allow(service).to receive(:execute) }
+ allow_next_instance_of(ServicePing::SubmitService) { |service| allow(service).to receive(:execute) }
allow(subject).to receive(:sleep)
end
it 'does not run for GitLab.com' do
allow(Gitlab).to receive(:com?).and_return(true)
- expect(SubmitUsagePingService).not_to receive(:new)
+ expect(ServicePing::SubmitService).not_to receive(:new)
subject.perform
end
- it 'delegates to SubmitUsagePingService' do
- expect_next_instance_of(SubmitUsagePingService) { |service| expect(service).to receive(:execute) }
+ it 'delegates to ServicePing::SubmitService' do
+ expect_next_instance_of(ServicePing::SubmitService) { |service| expect(service).to receive(:execute) }
subject.perform
end
@@ -41,8 +41,8 @@ RSpec.describe GitlabUsagePingWorker, :clean_gitlab_redis_shared_state do
Gitlab::ExclusiveLease.new(described_class::LEASE_KEY, timeout: described_class::LEASE_TIMEOUT).try_obtain
end
- it 'does not invoke SubmitUsagePingService' do
- allow_next_instance_of(SubmitUsagePingService) { |service| expect(service).not_to receive(:execute) }
+ it 'does not invoke ServicePing::SubmitService' do
+ allow_next_instance_of(ServicePing::SubmitService) { |service| expect(service).not_to receive(:execute) }
expect { subject.perform }.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
end
diff --git a/spec/workers/jira_connect/forward_event_worker_spec.rb b/spec/workers/jira_connect/forward_event_worker_spec.rb
new file mode 100644
index 00000000000..adfc071779a
--- /dev/null
+++ b/spec/workers/jira_connect/forward_event_worker_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraConnect::ForwardEventWorker do
+ describe '#perform' do
+ let!(:jira_connect_installation) { create(:jira_connect_installation, instance_url: self_managed_url, client_key: client_key, shared_secret: shared_secret) }
+ let(:base_path) { '/-/jira_connect' }
+ let(:event_path) { '/-/jira_connect/events/uninstalled' }
+
+ let(:self_managed_url) { 'http://example.com' }
+ let(:base_url) { self_managed_url + base_path }
+ let(:event_url) { self_managed_url + event_path }
+
+ let(:client_key) { '123' }
+ let(:shared_secret) { '123' }
+
+ subject { described_class.new.perform(jira_connect_installation.id, base_path, event_path) }
+
+ it 'forwards the event including the auth header and deletes the installation' do
+ stub_request(:post, event_url)
+
+ expect(Atlassian::Jwt).to receive(:create_query_string_hash).with(event_url, 'POST', base_url).and_return('some_qsh')
+ expect(Atlassian::Jwt).to receive(:encode).with({ iss: client_key, qsh: 'some_qsh' }, shared_secret).and_return('auth_token')
+ expect { subject }.to change(JiraConnectInstallation, :count).by(-1)
+
+ expect(WebMock).to have_requested(:post, event_url).with(headers: { 'Authorization' => 'JWT auth_token' })
+ end
+
+ context 'when installation does not exist' do
+ let(:jira_connect_installation) { instance_double(JiraConnectInstallation, id: -1) }
+
+ it 'does nothing' do
+ expect { subject }.not_to change(JiraConnectInstallation, :count)
+ end
+ end
+
+ context 'when installation does not have an instance_url' do
+ let!(:jira_connect_installation) { create(:jira_connect_installation) }
+
+ it 'forwards the event including the auth header' do
+ expect { subject }.to change(JiraConnectInstallation, :count).by(-1)
+
+ expect(WebMock).not_to have_requested(:post, '*')
+ end
+ end
+
+ context 'when it fails to forward the event' do
+ it 'still deletes the installation' do
+ allow(Gitlab::HTTP).to receive(:post).and_raise(StandardError)
+
+ expect { subject }.to raise_error(StandardError).and change(JiraConnectInstallation, :count).by(-1)
+ end
+ end
+ end
+end
diff --git a/spec/workers/jira_connect/sync_branch_worker_spec.rb b/spec/workers/jira_connect/sync_branch_worker_spec.rb
index 7c715f36fb4..349ccd10694 100644
--- a/spec/workers/jira_connect/sync_branch_worker_spec.rb
+++ b/spec/workers/jira_connect/sync_branch_worker_spec.rb
@@ -5,6 +5,10 @@ require 'spec_helper'
RSpec.describe JiraConnect::SyncBranchWorker do
include AfterNextHelpers
+ it_behaves_like 'worker with data consistency',
+ described_class,
+ data_consistency: :delayed
+
describe '#perform' do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :repository, group: group) }
@@ -15,65 +19,59 @@ RSpec.describe JiraConnect::SyncBranchWorker do
let(:commit_shas) { %w(b83d6e3 5a62481) }
let(:update_sequence_id) { 1 }
+ def perform
+ described_class.new.perform(project_id, branch_name, commit_shas, update_sequence_id)
+ end
+
def expect_jira_sync_service_execute(args)
- expect_next_instances_of(JiraConnect::SyncService, IdempotentWorkerHelper::WORKER_EXEC_TIMES) do |instance|
- expect(instance).to receive(:execute).with(args)
- end
+ expect_next(JiraConnect::SyncService).to receive(:execute).with(args)
end
- it_behaves_like 'an idempotent worker' do
- let(:job_args) { [project_id, branch_name, commit_shas, update_sequence_id] }
+ it 'calls JiraConnect::SyncService#execute' do
+ expect_jira_sync_service_execute(
+ branches: [instance_of(Gitlab::Git::Branch)],
+ commits: project.commits_by(oids: commit_shas),
+ update_sequence_id: update_sequence_id
+ )
- before do
- stub_request(:post, 'https://sample.atlassian.net/rest/devinfo/0.10/bulk').to_return(status: 200, body: '', headers: {})
- end
+ perform
+ end
+
+ context 'without branch name' do
+ let(:branch_name) { nil }
it 'calls JiraConnect::SyncService#execute' do
expect_jira_sync_service_execute(
- branches: [instance_of(Gitlab::Git::Branch)],
+ branches: nil,
commits: project.commits_by(oids: commit_shas),
update_sequence_id: update_sequence_id
)
- subject
- end
-
- context 'without branch name' do
- let(:branch_name) { nil }
-
- it 'calls JiraConnect::SyncService#execute' do
- expect_jira_sync_service_execute(
- branches: nil,
- commits: project.commits_by(oids: commit_shas),
- update_sequence_id: update_sequence_id
- )
-
- subject
- end
+ perform
end
+ end
- context 'without commits' do
- let(:commit_shas) { nil }
+ context 'without commits' do
+ let(:commit_shas) { nil }
- it 'calls JiraConnect::SyncService#execute' do
- expect_jira_sync_service_execute(
- branches: [instance_of(Gitlab::Git::Branch)],
- commits: nil,
- update_sequence_id: update_sequence_id
- )
+ it 'calls JiraConnect::SyncService#execute' do
+ expect_jira_sync_service_execute(
+ branches: [instance_of(Gitlab::Git::Branch)],
+ commits: nil,
+ update_sequence_id: update_sequence_id
+ )
- subject
- end
+ perform
end
+ end
- context 'when project no longer exists' do
- let(:project_id) { non_existing_record_id }
+ context 'when project no longer exists' do
+ let(:project_id) { non_existing_record_id }
- it 'does not call JiraConnect::SyncService' do
- expect(JiraConnect::SyncService).not_to receive(:new)
+ it 'does not call JiraConnect::SyncService' do
+ expect(JiraConnect::SyncService).not_to receive(:new)
- subject
- end
+ perform
end
end
end
diff --git a/spec/workers/jira_connect/sync_builds_worker_spec.rb b/spec/workers/jira_connect/sync_builds_worker_spec.rb
index 8fb8692fdf7..9be0cccae2b 100644
--- a/spec/workers/jira_connect/sync_builds_worker_spec.rb
+++ b/spec/workers/jira_connect/sync_builds_worker_spec.rb
@@ -4,7 +4,10 @@ require 'spec_helper'
RSpec.describe ::JiraConnect::SyncBuildsWorker do
include AfterNextHelpers
- include ServicesHelper
+
+ it_behaves_like 'worker with data consistency',
+ described_class,
+ data_consistency: :delayed
describe '#perform' do
let_it_be(:pipeline) { create(:ci_pipeline) }
diff --git a/spec/workers/jira_connect/sync_deployments_worker_spec.rb b/spec/workers/jira_connect/sync_deployments_worker_spec.rb
index 16fa2643d04..86ba11ebe9c 100644
--- a/spec/workers/jira_connect/sync_deployments_worker_spec.rb
+++ b/spec/workers/jira_connect/sync_deployments_worker_spec.rb
@@ -4,7 +4,10 @@ require 'spec_helper'
RSpec.describe ::JiraConnect::SyncDeploymentsWorker do
include AfterNextHelpers
- include ServicesHelper
+
+ it_behaves_like 'worker with data consistency',
+ described_class,
+ data_consistency: :delayed
describe '#perform' do
let_it_be(:deployment) { create(:deployment) }
diff --git a/spec/workers/jira_connect/sync_feature_flags_worker_spec.rb b/spec/workers/jira_connect/sync_feature_flags_worker_spec.rb
index 038eed7b9f1..6763aefcbec 100644
--- a/spec/workers/jira_connect/sync_feature_flags_worker_spec.rb
+++ b/spec/workers/jira_connect/sync_feature_flags_worker_spec.rb
@@ -4,7 +4,10 @@ require 'spec_helper'
RSpec.describe ::JiraConnect::SyncFeatureFlagsWorker do
include AfterNextHelpers
- include ServicesHelper
+
+ it_behaves_like 'worker with data consistency',
+ described_class,
+ data_consistency: :delayed
describe '#perform' do
let_it_be(:feature_flag) { create(:operations_feature_flag) }
diff --git a/spec/workers/jira_connect/sync_merge_request_worker_spec.rb b/spec/workers/jira_connect/sync_merge_request_worker_spec.rb
index 6a0a0744f6f..65976566b22 100644
--- a/spec/workers/jira_connect/sync_merge_request_worker_spec.rb
+++ b/spec/workers/jira_connect/sync_merge_request_worker_spec.rb
@@ -5,6 +5,10 @@ require 'spec_helper'
RSpec.describe JiraConnect::SyncMergeRequestWorker do
include AfterNextHelpers
+ it_behaves_like 'worker with data consistency',
+ described_class,
+ data_consistency: :delayed
+
describe '#perform' do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :repository, group: group) }
@@ -14,29 +18,24 @@ RSpec.describe JiraConnect::SyncMergeRequestWorker do
let(:merge_request_id) { merge_request.id }
let(:update_sequence_id) { 1 }
- it_behaves_like 'an idempotent worker' do
- let(:job_args) { [merge_request_id, update_sequence_id] }
-
- before do
- stub_request(:post, 'https://sample.atlassian.net/rest/devinfo/0.10/bulk').to_return(status: 200, body: '', headers: {})
- end
+ def perform
+ described_class.new.perform(merge_request_id, update_sequence_id)
+ end
- it 'calls JiraConnect::SyncService#execute' do
- expect_next_instances_of(JiraConnect::SyncService, IdempotentWorkerHelper::WORKER_EXEC_TIMES) do |service|
- expect(service).to receive(:execute).with(merge_requests: [merge_request], update_sequence_id: update_sequence_id)
- end
+ it 'calls JiraConnect::SyncService#execute' do
+ expect_next(JiraConnect::SyncService).to receive(:execute)
+ .with(merge_requests: [merge_request], update_sequence_id: update_sequence_id)
- subject
- end
+ perform
+ end
- context 'when MR no longer exists' do
- let(:merge_request_id) { non_existing_record_id }
+ context 'when MR no longer exists' do
+ let(:merge_request_id) { non_existing_record_id }
- it 'does not call JiraConnect::SyncService' do
- expect(JiraConnect::SyncService).not_to receive(:new)
+ it 'does not call JiraConnect::SyncService' do
+ expect(JiraConnect::SyncService).not_to receive(:new)
- subject
- end
+ perform
end
end
end
diff --git a/spec/workers/jira_connect/sync_project_worker_spec.rb b/spec/workers/jira_connect/sync_project_worker_spec.rb
index 5c0e7e7609c..d172bde2400 100644
--- a/spec/workers/jira_connect/sync_project_worker_spec.rb
+++ b/spec/workers/jira_connect/sync_project_worker_spec.rb
@@ -3,6 +3,12 @@
require 'spec_helper'
RSpec.describe JiraConnect::SyncProjectWorker, factory_default: :keep do
+ include AfterNextHelpers
+
+ it_behaves_like 'worker with data consistency',
+ described_class,
+ data_consistency: :delayed
+
describe '#perform' do
let_it_be(:project) { create_default(:project).freeze }
@@ -14,6 +20,22 @@ RSpec.describe JiraConnect::SyncProjectWorker, factory_default: :keep do
let(:jira_connect_sync_service) { JiraConnect::SyncService.new(project) }
let(:job_args) { [project.id, update_sequence_id] }
let(:update_sequence_id) { 1 }
+ let(:request_path) { '/rest/devinfo/0.10/bulk' }
+ let(:request_body) do
+ {
+ repositories: [
+ Atlassian::JiraConnect::Serializers::RepositoryEntity.represent(
+ project,
+ merge_requests: [mr_with_jira_description, mr_with_jira_title],
+ update_sequence_id: update_sequence_id
+ )
+ ]
+ }
+ end
+
+ def perform(project_id, update_sequence_id)
+ described_class.new.perform(project_id, update_sequence_id)
+ end
before do
stub_request(:post, 'https://sample.atlassian.net/rest/devinfo/0.10/bulk').to_return(status: 200, body: '', headers: {})
@@ -24,54 +46,37 @@ RSpec.describe JiraConnect::SyncProjectWorker, factory_default: :keep do
context 'when the project is not found' do
it 'does not raise an error' do
- expect { described_class.new.perform('non_existing_record_id', update_sequence_id) }.not_to raise_error
+ expect { perform('non_existing_record_id', update_sequence_id) }.not_to raise_error
end
end
it 'avoids N+1 database queries' do
- control_count = ActiveRecord::QueryRecorder.new { described_class.new.perform(project.id, update_sequence_id) }.count
+ control_count = ActiveRecord::QueryRecorder.new { perform(project.id, update_sequence_id) }.count
create(:merge_request, :unique_branches, title: 'TEST-123')
- expect { described_class.new.perform(project.id, update_sequence_id) }.not_to exceed_query_limit(control_count)
+ expect { perform(project.id, update_sequence_id) }.not_to exceed_query_limit(control_count)
end
- it_behaves_like 'an idempotent worker' do
- let(:request_path) { '/rest/devinfo/0.10/bulk' }
- let(:request_body) do
- {
- repositories: [
- Atlassian::JiraConnect::Serializers::RepositoryEntity.represent(
- project,
- merge_requests: [mr_with_jira_description, mr_with_jira_title],
- update_sequence_id: update_sequence_id
- )
- ]
- }
- end
-
- it 'sends the request with custom update_sequence_id' do
- allow_next_instances_of(Atlassian::JiraConnect::Client, IdempotentWorkerHelper::WORKER_EXEC_TIMES) do |client|
- expect(client).to receive(:post).with(request_path, request_body)
- end
+ it 'sends the request with custom update_sequence_id' do
+ allow_next(Atlassian::JiraConnect::Client).to receive(:post)
+ .with(request_path, request_body)
- subject
- end
+ perform(project.id, update_sequence_id)
+ end
- context 'when the number of merge requests to sync is higher than the limit' do
- let!(:most_recent_merge_request) { create(:merge_request, :unique_branches, description: 'TEST-323', title: 'TEST-123') }
+ context 'when the number of merge requests to sync is higher than the limit' do
+ let!(:most_recent_merge_request) { create(:merge_request, :unique_branches, description: 'TEST-323', title: 'TEST-123') }
- before do
- stub_const("#{described_class}::MERGE_REQUEST_LIMIT", 1)
- end
+ before do
+ stub_const("#{described_class}::MERGE_REQUEST_LIMIT", 1)
+ end
- it 'syncs only the most recent merge requests within the limit' do
- expect(jira_connect_sync_service).to receive(:execute)
- .exactly(IdempotentWorkerHelper::WORKER_EXEC_TIMES).times
- .with(merge_requests: [most_recent_merge_request], update_sequence_id: update_sequence_id)
+ it 'syncs only the most recent merge requests within the limit' do
+ expect(jira_connect_sync_service).to receive(:execute)
+ .with(merge_requests: [most_recent_merge_request], update_sequence_id: update_sequence_id)
- subject
- end
+ perform(project.id, update_sequence_id)
end
end
end
diff --git a/spec/workers/merge_request_cleanup_refs_worker_spec.rb b/spec/workers/merge_request_cleanup_refs_worker_spec.rb
index 7401c6dd4d7..1de927a81e4 100644
--- a/spec/workers/merge_request_cleanup_refs_worker_spec.rb
+++ b/spec/workers/merge_request_cleanup_refs_worker_spec.rb
@@ -3,18 +3,41 @@
require 'spec_helper'
RSpec.describe MergeRequestCleanupRefsWorker do
- describe '#perform' do
- context 'when merge request exists' do
- let(:merge_request) { create(:merge_request) }
- let(:job_args) { merge_request.id }
-
- include_examples 'an idempotent worker' do
- it 'calls MergeRequests::CleanupRefsService#execute' do
- expect_next_instance_of(MergeRequests::CleanupRefsService, merge_request) do |svc|
- expect(svc).to receive(:execute).and_call_original
- end.twice
-
- subject
+ let(:worker) { described_class.new }
+
+ describe '#perform_work' do
+ context 'when next cleanup schedule is found' do
+ let(:failed_count) { 0 }
+ let!(:cleanup_schedule) { create(:merge_request_cleanup_schedule, failed_count: failed_count) }
+
+ it 'marks the cleanup schedule as completed on success' do
+ stub_cleanup_service(status: :success)
+ worker.perform_work
+
+ expect(cleanup_schedule.reload).to be_completed
+ expect(cleanup_schedule.completed_at).to be_present
+ end
+
+ context 'when service fails' do
+ before do
+ stub_cleanup_service(status: :error)
+ worker.perform_work
+ end
+
+ it 'marks the cleanup schedule as unstarted and track the failure' do
+ expect(cleanup_schedule.reload).to be_unstarted
+ expect(cleanup_schedule.failed_count).to eq(1)
+ expect(cleanup_schedule.completed_at).to be_nil
+ end
+
+ context "and cleanup schedule has already failed #{described_class::FAILURE_THRESHOLD} times" do
+ let(:failed_count) { described_class::FAILURE_THRESHOLD }
+
+ it 'marks the cleanup schedule as failed and track the failure' do
+ expect(cleanup_schedule.reload).to be_failed
+ expect(cleanup_schedule.failed_count).to eq(described_class::FAILURE_THRESHOLD + 1)
+ expect(cleanup_schedule.completed_at).to be_nil
+ end
end
end
@@ -23,20 +46,52 @@ RSpec.describe MergeRequestCleanupRefsWorker do
stub_feature_flags(merge_request_refs_cleanup: false)
end
- it 'does not clean up the merge request' do
+ it 'does nothing' do
expect(MergeRequests::CleanupRefsService).not_to receive(:new)
- perform_multiple(1)
+ worker.perform_work
end
end
end
- context 'when merge request does not exist' do
- it 'does not call MergeRequests::CleanupRefsService' do
+ context 'when there is no next cleanup schedule found' do
+ it 'does nothing' do
expect(MergeRequests::CleanupRefsService).not_to receive(:new)
- perform_multiple(1)
+ worker.perform_work
+ end
+ end
+ end
+
+ describe '#remaining_work_count' do
+ let_it_be(:unstarted) { create_list(:merge_request_cleanup_schedule, 2) }
+ let_it_be(:running) { create_list(:merge_request_cleanup_schedule, 2, :running) }
+ let_it_be(:completed) { create_list(:merge_request_cleanup_schedule, 2, :completed) }
+
+ it 'returns number of scheduled and unstarted cleanup schedule records' do
+ expect(worker.remaining_work_count).to eq(unstarted.count)
+ end
+
+ context 'when count exceeds max_running_jobs' do
+ before do
+ create_list(:merge_request_cleanup_schedule, worker.max_running_jobs)
+ end
+
+ it 'gets capped at max_running_jobs' do
+ expect(worker.remaining_work_count).to eq(worker.max_running_jobs)
end
end
end
+
+ describe '#max_running_jobs' do
+ it 'returns the value of MAX_RUNNING_JOBS' do
+ expect(worker.max_running_jobs).to eq(described_class::MAX_RUNNING_JOBS)
+ end
+ end
+
+ def stub_cleanup_service(result)
+ expect_next_instance_of(MergeRequests::CleanupRefsService, cleanup_schedule.merge_request) do |svc|
+ expect(svc).to receive(:execute).and_return(result)
+ end
+ end
end
diff --git a/spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb b/spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb
index 3b94eb0d1be..2e7b6356692 100644
--- a/spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb
+++ b/spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb
@@ -2,38 +2,31 @@
require 'spec_helper'
-RSpec.describe Namespaces::InProductMarketingEmailsWorker, '#perform' do
- using RSpec::Parameterized::TableSyntax
-
+RSpec.describe Namespaces::InProductMarketingEmailsWorker, '#perform', unless: Gitlab.ee? do
# Running this in EE would call the overridden method, which can't be tested in CE.
# The EE code is covered in a separate EE spec.
- context 'not on gitlab.com', unless: Gitlab.ee? do
- let(:is_gitlab_com) { false }
-
- where(:in_product_marketing_emails_enabled, :experiment_active, :executes_service) do
- true | true | 1
- true | false | 1
- false | false | 0
- false | true | 0
+
+ context 'when the in_product_marketing_emails_enabled setting is disabled' do
+ before do
+ stub_application_setting(in_product_marketing_emails_enabled: false)
end
- with_them do
- it_behaves_like 'in-product marketing email'
+ it 'does not execute the email service' do
+ expect(Namespaces::InProductMarketingEmailsService).not_to receive(:send_for_all_tracks_and_intervals)
+
+ subject.perform
end
end
- context 'on gitlab.com' do
- let(:is_gitlab_com) { true }
-
- where(:in_product_marketing_emails_enabled, :experiment_active, :executes_service) do
- true | true | 1
- true | false | 0
- false | false | 0
- false | true | 0
+ context 'when the in_product_marketing_emails_enabled setting is enabled' do
+ before do
+ stub_application_setting(in_product_marketing_emails_enabled: true)
end
- with_them do
- it_behaves_like 'in-product marketing email'
+ it 'executes the email service' do
+ expect(Namespaces::InProductMarketingEmailsService).to receive(:send_for_all_tracks_and_intervals)
+
+ subject.perform
end
end
end
diff --git a/spec/workers/packages/helm/extraction_worker_spec.rb b/spec/workers/packages/helm/extraction_worker_spec.rb
new file mode 100644
index 00000000000..258413a3410
--- /dev/null
+++ b/spec/workers/packages/helm/extraction_worker_spec.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Helm::ExtractionWorker, type: :worker do
+ describe '#perform' do
+ let_it_be(:package) { create(:helm_package, without_package_files: true, status: 'processing')}
+
+ let!(:package_file) { create(:helm_package_file, without_loaded_metadatum: true, package: package) }
+ let(:package_file_id) { package_file.id }
+ let(:channel) { 'stable' }
+
+ let(:expected_metadata) do
+ {
+ 'apiVersion' => 'v2',
+ 'description' => 'File, Block, and Object Storage Services for your Cloud-Native Environment',
+ 'icon' => 'https://rook.io/images/rook-logo.svg',
+ 'name' => 'rook-ceph',
+ 'sources' => ['https://github.com/rook/rook'],
+ 'version' => 'v1.5.8'
+ }
+ end
+
+ subject { described_class.new.perform(channel, package_file_id) }
+
+ shared_examples 'handling error' do
+ it 'mark the package as errored', :aggregate_failures do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ instance_of(Packages::Helm::ExtractFileMetadataService::ExtractionError),
+ project_id: package_file.package.project_id
+ )
+ expect { subject }
+ .to not_change { Packages::Package.count }
+ .and not_change { Packages::PackageFile.count }
+ .and change { package.reload.status }.from('processing').to('error')
+ end
+ end
+
+ context 'with valid package file' do
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [channel, package_file_id] }
+
+ it 'updates package and package file', :aggregate_failures do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ expect { subject }
+ .to not_change { Packages::Package.count }
+ .and not_change { Packages::PackageFile.count }
+ .and change { Packages::Helm::FileMetadatum.count }.from(0).to(1)
+ .and change { package.reload.status }.from('processing').to('default')
+
+ helm_file_metadatum = package_file.helm_file_metadatum
+
+ expect(helm_file_metadatum.channel).to eq(channel)
+ expect(helm_file_metadatum.metadata).to eq(expected_metadata)
+ end
+ end
+ end
+
+ context 'with invalid package file id' do
+ let(:package_file_id) { 5555 }
+
+ it "doesn't update helm_file_metadatum", :aggregate_failures do
+ expect { subject }
+ .to not_change { Packages::Package.count }
+ .and not_change { Packages::PackageFile.count }
+ .and not_change { Packages::Helm::FileMetadatum.count }
+ .and not_change { package.reload.status }
+ end
+ end
+
+ context 'with an empty package file' do
+ before do
+ expect_next_instance_of(Gem::Package::TarReader) do |tar_reader|
+ expect(tar_reader).to receive(:each).and_return([])
+ end
+ end
+
+ it_behaves_like 'handling error'
+ end
+
+ context 'with an invalid YAML' do
+ before do
+ expect_next_instance_of(Gem::Package::TarReader::Entry) do |entry|
+ expect(entry).to receive(:read).and_return('{')
+ end
+ end
+
+ it_behaves_like 'handling error'
+ end
+ end
+end
diff --git a/spec/workers/partition_creation_worker_spec.rb b/spec/workers/partition_creation_worker_spec.rb
index 37225cc1f79..5d15870b7f6 100644
--- a/spec/workers/partition_creation_worker_spec.rb
+++ b/spec/workers/partition_creation_worker_spec.rb
@@ -1,27 +1,16 @@
# frozen_string_literal: true
-
-require "spec_helper"
+#
+require 'spec_helper'
RSpec.describe PartitionCreationWorker do
- describe '#perform' do
- subject { described_class.new.perform }
-
- let(:creator) { instance_double('PartitionCreator', create_partitions: nil) }
- let(:monitoring) { instance_double('PartitionMonitoring', report_metrics: nil) }
-
- before do
- allow(Gitlab::Database::Partitioning::PartitionCreator).to receive(:new).and_return(creator)
- allow(Gitlab::Database::Partitioning::PartitionMonitoring).to receive(:new).and_return(monitoring)
- end
+ subject { described_class.new.perform }
- it 'delegates to PartitionCreator' do
- expect(creator).to receive(:create_partitions)
+ let(:management_worker) { double }
- subject
- end
-
- it 'reports partition metrics' do
- expect(monitoring).to receive(:report_metrics)
+ describe '#perform' do
+ it 'forwards to the Database::PartitionManagementWorker' do
+ expect(Database::PartitionManagementWorker).to receive(:new).and_return(management_worker)
+ expect(management_worker).to receive(:perform)
subject
end
diff --git a/spec/workers/pipeline_hooks_worker_spec.rb b/spec/workers/pipeline_hooks_worker_spec.rb
index 5957b355c8e..0ed00c0c66a 100644
--- a/spec/workers/pipeline_hooks_worker_spec.rb
+++ b/spec/workers/pipeline_hooks_worker_spec.rb
@@ -25,6 +25,5 @@ RSpec.describe PipelineHooksWorker do
it_behaves_like 'worker with data consistency',
described_class,
- feature_flag: :load_balancing_for_pipeline_hooks_worker,
data_consistency: :delayed
end
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index 4d3cc447d9b..04a38874905 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe PostReceive do
include AfterNextHelpers
- include ServicesHelper
let(:changes) { "123456 789012 refs/heads/tést\n654321 210987 refs/tags/tag" }
let(:wrongly_encoded_changes) { changes.encode("ISO-8859-1").force_encoding("UTF-8") }
@@ -234,7 +233,7 @@ RSpec.describe PostReceive do
end
it 'calls Git::ProcessRefChangesService' do
- expect_execution_of(Git::ProcessRefChangesService)
+ expect(Git::ProcessRefChangesService).to get_executed
perform
end
@@ -269,7 +268,7 @@ RSpec.describe PostReceive do
allow(Gitlab::DataBuilder::Repository).to receive(:update).and_return(fake_hook_data)
# silence hooks so we can isolate
allow_next(Key).to receive(:post_create_hook).and_return(true)
- expect_execution_of(Git::ProcessRefChangesService)
+ expect(Git::ProcessRefChangesService).to get_executed
end
it 'calls SystemHooksService' do
@@ -379,7 +378,7 @@ RSpec.describe PostReceive do
allow(Project).to receive(:find_by).and_return(project)
expect(project).to receive(:execute_hooks).twice
- expect(project).to receive(:execute_services).twice
+ expect(project).to receive(:execute_integrations).twice
perform
end
diff --git a/spec/workers/project_service_worker_spec.rb b/spec/workers/project_service_worker_spec.rb
index 9383e7ec5c4..7813d011274 100644
--- a/spec/workers/project_service_worker_spec.rb
+++ b/spec/workers/project_service_worker_spec.rb
@@ -3,22 +3,24 @@ require 'spec_helper'
RSpec.describe ProjectServiceWorker, '#perform' do
let(:worker) { described_class.new }
- let(:service) { Integrations::Jira.new }
+ let(:integration) { Integrations::Jira.new }
before do
- allow(Integration).to receive(:find).and_return(service)
+ allow(Integration).to receive(:find).and_return(integration)
end
- it 'executes service with given data' do
+ it 'executes integration with given data' do
data = { test: 'test' }
- expect(service).to receive(:execute).with(data)
+ expect(integration).to receive(:execute).with(data)
worker.perform(1, data)
end
it 'logs error messages' do
- allow(service).to receive(:execute).and_raise(StandardError, 'invalid URL')
- expect(Sidekiq.logger).to receive(:error).with({ class: described_class.name, service_class: service.class.name, message: "invalid URL" })
+ error = StandardError.new('invalid URL')
+ allow(integration).to receive(:execute).and_raise(error)
+
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(error, integration_class: 'Integrations::Jira')
worker.perform(1, {})
end
diff --git a/spec/workers/projects/post_creation_worker_spec.rb b/spec/workers/projects/post_creation_worker_spec.rb
index 50c21575878..06acf601666 100644
--- a/spec/workers/projects/post_creation_worker_spec.rb
+++ b/spec/workers/projects/post_creation_worker_spec.rb
@@ -13,16 +13,16 @@ RSpec.describe Projects::PostCreationWorker do
it_behaves_like 'an idempotent worker' do
let(:job_args) { [project.id] }
- describe 'Prometheus service' do
+ describe 'Prometheus integration' do
context 'project is nil' do
let(:job_args) { [nil] }
- it 'does not create prometheus service' do
+ it 'does not create prometheus integration' do
expect { subject }.not_to change { Integration.count }
end
end
- context 'when project has access to shared service' do
+ context 'when project has access to shared integration' do
context 'Prometheus application is shared via group cluster' do
let(:project) { create(:project, group: group) }
let(:cluster) { create(:cluster, :group, groups: [group]) }
@@ -36,13 +36,13 @@ RSpec.describe Projects::PostCreationWorker do
create(:clusters_integrations_prometheus, cluster: cluster)
end
- it 'creates PrometheusService record', :aggregate_failures do
+ it 'creates an Integrations::Prometheus record', :aggregate_failures do
subject
- service = project.prometheus_service
- expect(service.active).to be true
- expect(service.manual_configuration?).to be false
- expect(service.persisted?).to be true
+ integration = project.prometheus_integration
+ expect(integration.active).to be true
+ expect(integration.manual_configuration?).to be false
+ expect(integration.persisted?).to be true
end
end
@@ -53,31 +53,31 @@ RSpec.describe Projects::PostCreationWorker do
create(:clusters_integrations_prometheus, cluster: cluster)
end
- it 'creates PrometheusService record', :aggregate_failures do
+ it 'creates an Integrations::Prometheus record', :aggregate_failures do
subject
- service = project.prometheus_service
- expect(service.active).to be true
- expect(service.manual_configuration?).to be false
- expect(service.persisted?).to be true
+ integration = project.prometheus_integration
+ expect(integration.active).to be true
+ expect(integration.manual_configuration?).to be false
+ expect(integration.persisted?).to be true
end
it 'cleans invalid record and logs warning', :aggregate_failures do
- invalid_service_record = build(:prometheus_service, properties: { api_url: nil, manual_configuration: true }.to_json)
- allow(PrometheusService).to receive(:new).and_return(invalid_service_record)
+ invalid_integration_record = build(:prometheus_integration, properties: { api_url: nil, manual_configuration: true }.to_json)
+ allow(::Integrations::Prometheus).to receive(:new).and_return(invalid_integration_record)
expect(Gitlab::ErrorTracking).to receive(:track_exception).with(an_instance_of(ActiveRecord::RecordInvalid), include(extra: { project_id: a_kind_of(Integer) })).twice
subject
- expect(project.prometheus_service).to be_nil
+ expect(project.prometheus_integration).to be_nil
end
end
context 'shared Prometheus application is not available' do
- it 'does not persist PrometheusService record', :aggregate_failures do
+ it 'does not persist an Integrations::Prometheus record' do
subject
- expect(project.prometheus_service).to be_nil
+ expect(project.prometheus_integration).to be_nil
end
end
end
diff --git a/spec/workers/propagate_integration_group_worker_spec.rb b/spec/workers/propagate_integration_group_worker_spec.rb
index 1c72bed323a..9d46534df4f 100644
--- a/spec/workers/propagate_integration_group_worker_spec.rb
+++ b/spec/workers/propagate_integration_group_worker_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe PropagateIntegrationGroupWorker do
let_it_be(:another_group) { create(:group) }
let_it_be(:subgroup1) { create(:group, parent: group) }
let_it_be(:subgroup2) { create(:group, parent: group) }
- let_it_be(:integration) { create(:redmine_service, :instance) }
+ let_it_be(:integration) { create(:redmine_integration, :instance) }
let(:job_args) { [integration.id, group.id, subgroup2.id] }
@@ -22,7 +22,7 @@ RSpec.describe PropagateIntegrationGroupWorker do
end
context 'with a group integration' do
- let_it_be(:integration) { create(:redmine_service, group: group, project: nil) }
+ let_it_be(:integration) { create(:redmine_integration, group: group, project: nil) }
it 'calls to BulkCreateIntegrationService' do
expect(BulkCreateIntegrationService).to receive(:new)
diff --git a/spec/workers/propagate_integration_inherit_descendant_worker_spec.rb b/spec/workers/propagate_integration_inherit_descendant_worker_spec.rb
index b5eb0f69017..8a231d4104c 100644
--- a/spec/workers/propagate_integration_inherit_descendant_worker_spec.rb
+++ b/spec/workers/propagate_integration_inherit_descendant_worker_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe PropagateIntegrationInheritDescendantWorker do
let_it_be(:group) { create(:group) }
let_it_be(:subgroup) { create(:group, parent: group) }
- let_it_be(:group_integration) { create(:redmine_service, group: group, project: nil) }
- let_it_be(:subgroup_integration) { create(:redmine_service, group: subgroup, project: nil, inherit_from_id: group_integration.id) }
+ let_it_be(:group_integration) { create(:redmine_integration, group: group, project: nil) }
+ let_it_be(:subgroup_integration) { create(:redmine_integration, group: subgroup, project: nil, inherit_from_id: group_integration.id) }
it_behaves_like 'an idempotent worker' do
let(:job_args) { [group_integration.id, subgroup_integration.id, subgroup_integration.id] }
diff --git a/spec/workers/propagate_integration_inherit_worker_spec.rb b/spec/workers/propagate_integration_inherit_worker_spec.rb
index 2b4f241f755..dd5d246d7f9 100644
--- a/spec/workers/propagate_integration_inherit_worker_spec.rb
+++ b/spec/workers/propagate_integration_inherit_worker_spec.rb
@@ -4,10 +4,10 @@ require 'spec_helper'
RSpec.describe PropagateIntegrationInheritWorker do
describe '#perform' do
- let_it_be(:integration) { create(:redmine_service, :instance) }
- let_it_be(:integration1) { create(:redmine_service, inherit_from_id: integration.id) }
+ let_it_be(:integration) { create(:redmine_integration, :instance) }
+ let_it_be(:integration1) { create(:redmine_integration, inherit_from_id: integration.id) }
let_it_be(:integration2) { create(:bugzilla_integration, inherit_from_id: integration.id) }
- let_it_be(:integration3) { create(:redmine_service) }
+ let_it_be(:integration3) { create(:redmine_integration) }
it_behaves_like 'an idempotent worker' do
let(:job_args) { [integration.id, integration1.id, integration3.id] }
diff --git a/spec/workers/propagate_integration_project_worker_spec.rb b/spec/workers/propagate_integration_project_worker_spec.rb
index c8293744bec..312631252cc 100644
--- a/spec/workers/propagate_integration_project_worker_spec.rb
+++ b/spec/workers/propagate_integration_project_worker_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe PropagateIntegrationProjectWorker do
let_it_be(:project1) { create(:project) }
let_it_be(:project2) { create(:project, group: group) }
let_it_be(:project3) { create(:project, group: group) }
- let_it_be(:integration) { create(:redmine_service, :instance) }
+ let_it_be(:integration) { create(:redmine_integration, :instance) }
let(:job_args) { [integration.id, project1.id, project3.id] }
@@ -22,7 +22,7 @@ RSpec.describe PropagateIntegrationProjectWorker do
end
context 'with a group integration' do
- let_it_be(:integration) { create(:redmine_service, group: group, project: nil) }
+ let_it_be(:integration) { create(:redmine_integration, group: group, project: nil) }
it 'calls to BulkCreateIntegrationService' do
expect(BulkCreateIntegrationService).to receive(:new)
diff --git a/spec/workers/remove_expired_group_links_worker_spec.rb b/spec/workers/remove_expired_group_links_worker_spec.rb
index ff5f7b9db27..151bbb75226 100644
--- a/spec/workers/remove_expired_group_links_worker_spec.rb
+++ b/spec/workers/remove_expired_group_links_worker_spec.rb
@@ -51,7 +51,7 @@ RSpec.describe RemoveExpiredGroupLinksWorker do
subject.perform
end
- it 'removes project authorization' do
+ it 'removes project authorization', :sidekiq_inline do
shared_group = group_group_link.shared_group
shared_with_group = group_group_link.shared_with_group
project = create(:project, group: shared_group)
diff --git a/spec/workers/schedule_merge_request_cleanup_refs_worker_spec.rb b/spec/workers/schedule_merge_request_cleanup_refs_worker_spec.rb
index 869818b257e..ef515e43474 100644
--- a/spec/workers/schedule_merge_request_cleanup_refs_worker_spec.rb
+++ b/spec/workers/schedule_merge_request_cleanup_refs_worker_spec.rb
@@ -6,16 +6,9 @@ RSpec.describe ScheduleMergeRequestCleanupRefsWorker do
subject(:worker) { described_class.new }
describe '#perform' do
- before do
- allow(MergeRequest::CleanupSchedule)
- .to receive(:scheduled_merge_request_ids)
- .with(described_class::LIMIT)
- .and_return([1, 2, 3, 4])
- end
-
it 'does nothing if the database is read-only' do
allow(Gitlab::Database).to receive(:read_only?).and_return(true)
- expect(MergeRequestCleanupRefsWorker).not_to receive(:bulk_perform_in)
+ expect(MergeRequestCleanupRefsWorker).not_to receive(:perform_with_capacity)
worker.perform
end
@@ -26,25 +19,17 @@ RSpec.describe ScheduleMergeRequestCleanupRefsWorker do
end
it 'does not schedule any merge request clean ups' do
- expect(MergeRequestCleanupRefsWorker).not_to receive(:bulk_perform_in)
+ expect(MergeRequestCleanupRefsWorker).not_to receive(:perform_with_capacity)
worker.perform
end
end
include_examples 'an idempotent worker' do
- it 'schedules MergeRequestCleanupRefsWorker to be performed by batch' do
- expect(MergeRequestCleanupRefsWorker)
- .to receive(:bulk_perform_in)
- .with(
- described_class::DELAY,
- [[1], [2], [3], [4]],
- batch_size: described_class::BATCH_SIZE
- )
+ it 'schedules MergeRequestCleanupRefsWorker to be performed with capacity' do
+ expect(MergeRequestCleanupRefsWorker).to receive(:perform_with_capacity).twice
- expect(worker).to receive(:log_extra_metadata_on_done).with(:merge_requests_count, 4)
-
- worker.perform
+ subject
end
end
end
diff --git a/spec/workers/users/deactivate_dormant_users_worker_spec.rb b/spec/workers/users/deactivate_dormant_users_worker_spec.rb
index 32291a143ee..934c497c79a 100644
--- a/spec/workers/users/deactivate_dormant_users_worker_spec.rb
+++ b/spec/workers/users/deactivate_dormant_users_worker_spec.rb
@@ -4,12 +4,12 @@ require 'spec_helper'
RSpec.describe Users::DeactivateDormantUsersWorker do
describe '#perform' do
+ let_it_be(:dormant) { create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date) }
+ let_it_be(:inactive) { create(:user, last_activity_on: nil) }
+
subject(:worker) { described_class.new }
it 'does not run for GitLab.com' do
- create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date)
- create(:user, last_activity_on: nil)
-
expect(Gitlab).to receive(:com?).and_return(true)
expect(Gitlab::CurrentSettings).not_to receive(:current_application_settings)
@@ -29,9 +29,6 @@ RSpec.describe Users::DeactivateDormantUsersWorker do
stub_const("#{described_class.name}::BATCH_SIZE", 1)
stub_const("#{described_class.name}::PAUSE_SECONDS", 0)
- create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date)
- create(:user, last_activity_on: nil)
-
expect(worker).to receive(:sleep).twice
worker.perform
@@ -48,9 +45,6 @@ RSpec.describe Users::DeactivateDormantUsersWorker do
end
it 'does nothing' do
- create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date)
- create(:user, last_activity_on: nil)
-
worker.perform
expect(User.dormant.count).to eq(1)