Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-02-20 15:52:10 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2020-02-20 15:52:10 +0300
commitdba864470fbcbb6bdd5b94eb510acdce62c962d8 (patch)
treee8ead0b84e7b814f5891d2c8cd3db2d6b635fb64 /spec
parentb7d29500f28ff59c8898cdf889a40d3da908f162 (diff)
Add latest changes from gitlab-org/gitlab@12-8-stable-ee
Diffstat (limited to 'spec')
-rw-r--r--spec/config/application_spec.rb2
-rw-r--r--spec/config/mail_room_spec.rb48
-rw-r--r--spec/controllers/acme_challenges_controller_spec.rb4
-rw-r--r--spec/controllers/admin/application_settings_controller_spec.rb16
-rw-r--r--spec/controllers/admin/applications_controller_spec.rb37
-rw-r--r--spec/controllers/admin/clusters/applications_controller_spec.rb16
-rw-r--r--spec/controllers/admin/clusters_controller_spec.rb4
-rw-r--r--spec/controllers/admin/gitaly_servers_controller_spec.rb2
-rw-r--r--spec/controllers/admin/hooks_controller_spec.rb2
-rw-r--r--spec/controllers/admin/impersonations_controller_spec.rb6
-rw-r--r--spec/controllers/admin/projects_controller_spec.rb4
-rw-r--r--spec/controllers/admin/requests_profiles_controller_spec.rb4
-rw-r--r--spec/controllers/admin/runners_controller_spec.rb43
-rw-r--r--spec/controllers/admin/serverless/domains_controller_spec.rb298
-rw-r--r--spec/controllers/admin/services_controller_spec.rb6
-rw-r--r--spec/controllers/admin/sessions_controller_spec.rb2
-rw-r--r--spec/controllers/admin/spam_logs_controller_spec.rb12
-rw-r--r--spec/controllers/admin/users_controller_spec.rb8
-rw-r--r--spec/controllers/application_controller_spec.rb30
-rw-r--r--spec/controllers/autocomplete_controller_spec.rb20
-rw-r--r--spec/controllers/boards/issues_controller_spec.rb36
-rw-r--r--spec/controllers/boards/lists_controller_spec.rb40
-rw-r--r--spec/controllers/chaos_controller_spec.rb28
-rw-r--r--spec/controllers/concerns/confirm_email_warning_spec.rb14
-rw-r--r--spec/controllers/concerns/controller_with_cross_project_access_check_spec.rb18
-rw-r--r--spec/controllers/concerns/enforces_admin_authentication_spec.rb8
-rw-r--r--spec/controllers/concerns/lfs_request_spec.rb20
-rw-r--r--spec/controllers/concerns/metrics_dashboard_spec.rb24
-rw-r--r--spec/controllers/concerns/page_limiter_spec.rb107
-rw-r--r--spec/controllers/concerns/project_unauthorized_spec.rb6
-rw-r--r--spec/controllers/concerns/routable_actions_spec.rb14
-rw-r--r--spec/controllers/concerns/send_file_upload_spec.rb24
-rw-r--r--spec/controllers/concerns/static_object_external_storage_spec.rb6
-rw-r--r--spec/controllers/dashboard/groups_controller_spec.rb4
-rw-r--r--spec/controllers/dashboard/milestones_controller_spec.rb6
-rw-r--r--spec/controllers/dashboard/projects_controller_spec.rb35
-rw-r--r--spec/controllers/dashboard/snippets_controller_spec.rb9
-rw-r--r--spec/controllers/dashboard/todos_controller_spec.rb14
-rw-r--r--spec/controllers/explore/projects_controller_spec.rb81
-rw-r--r--spec/controllers/google_api/authorizations_controller_spec.rb2
-rw-r--r--spec/controllers/graphql_controller_spec.rb4
-rw-r--r--spec/controllers/groups/avatars_controller_spec.rb2
-rw-r--r--spec/controllers/groups/boards_controller_spec.rb20
-rw-r--r--spec/controllers/groups/children_controller_spec.rb8
-rw-r--r--spec/controllers/groups/clusters/applications_controller_spec.rb16
-rw-r--r--spec/controllers/groups/clusters_controller_spec.rb4
-rw-r--r--spec/controllers/groups/group_links_controller_spec.rb51
-rw-r--r--spec/controllers/groups/group_members_controller_spec.rb34
-rw-r--r--spec/controllers/groups/labels_controller_spec.rb2
-rw-r--r--spec/controllers/groups/milestones_controller_spec.rb67
-rw-r--r--spec/controllers/groups/registry/repositories_controller_spec.rb32
-rw-r--r--spec/controllers/groups/runners_controller_spec.rb24
-rw-r--r--spec/controllers/groups/settings/ci_cd_controller_spec.rb12
-rw-r--r--spec/controllers/groups/variables_controller_spec.rb4
-rw-r--r--spec/controllers/groups_controller_spec.rb48
-rw-r--r--spec/controllers/health_check_controller_spec.rb8
-rw-r--r--spec/controllers/help_controller_spec.rb4
-rw-r--r--spec/controllers/import/bitbucket_controller_spec.rb8
-rw-r--r--spec/controllers/import/bitbucket_server_controller_spec.rb20
-rw-r--r--spec/controllers/import/fogbugz_controller_spec.rb4
-rw-r--r--spec/controllers/import/gitea_controller_spec.rb2
-rw-r--r--spec/controllers/import/github_controller_spec.rb6
-rw-r--r--spec/controllers/import/gitlab_controller_spec.rb8
-rw-r--r--spec/controllers/import/gitlab_projects_controller_spec.rb8
-rw-r--r--spec/controllers/import/google_code_controller_spec.rb4
-rw-r--r--spec/controllers/import/phabricator_controller_spec.rb8
-rw-r--r--spec/controllers/invites_controller_spec.rb4
-rw-r--r--spec/controllers/notification_settings_controller_spec.rb4
-rw-r--r--spec/controllers/oauth/applications_controller_spec.rb79
-rw-r--r--spec/controllers/oauth/authorizations_controller_spec.rb8
-rw-r--r--spec/controllers/oauth/token_info_controller_spec.rb71
-rw-r--r--spec/controllers/omniauth_callbacks_controller_spec.rb2
-rw-r--r--spec/controllers/passwords_controller_spec.rb2
-rw-r--r--spec/controllers/profiles/accounts_controller_spec.rb6
-rw-r--r--spec/controllers/profiles/notifications_controller_spec.rb4
-rw-r--r--spec/controllers/profiles/preferences_controller_spec.rb1
-rw-r--r--spec/controllers/profiles_controller_spec.rb2
-rw-r--r--spec/controllers/projects/alerting/notifications_controller_spec.rb92
-rw-r--r--spec/controllers/projects/artifacts_controller_spec.rb20
-rw-r--r--spec/controllers/projects/autocomplete_sources_controller_spec.rb2
-rw-r--r--spec/controllers/projects/avatars_controller_spec.rb4
-rw-r--r--spec/controllers/projects/blame_controller_spec.rb2
-rw-r--r--spec/controllers/projects/blob_controller_spec.rb8
-rw-r--r--spec/controllers/projects/boards_controller_spec.rb12
-rw-r--r--spec/controllers/projects/branches_controller_spec.rb46
-rw-r--r--spec/controllers/projects/ci/lints_controller_spec.rb4
-rw-r--r--spec/controllers/projects/clusters/applications_controller_spec.rb24
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb4
-rw-r--r--spec/controllers/projects/commit_controller_spec.rb12
-rw-r--r--spec/controllers/projects/compare_controller_spec.rb22
-rw-r--r--spec/controllers/projects/deploy_keys_controller_spec.rb20
-rw-r--r--spec/controllers/projects/deployments_controller_spec.rb8
-rw-r--r--spec/controllers/projects/discussions_controller_spec.rb20
-rw-r--r--spec/controllers/projects/environments_controller_spec.rb24
-rw-r--r--spec/controllers/projects/error_tracking_controller_spec.rb2
-rw-r--r--spec/controllers/projects/forks_controller_spec.rb8
-rw-r--r--spec/controllers/projects/git_http_controller_spec.rb107
-rw-r--r--spec/controllers/projects/group_links_controller_spec.rb6
-rw-r--r--spec/controllers/projects/hooks_controller_spec.rb2
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb84
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb4
-rw-r--r--spec/controllers/projects/labels_controller_spec.rb18
-rw-r--r--spec/controllers/projects/mattermosts_controller_spec.rb2
-rw-r--r--spec/controllers/projects/merge_requests/content_controller_spec.rb4
-rw-r--r--spec/controllers/projects/merge_requests/creations_controller_spec.rb10
-rw-r--r--spec/controllers/projects/merge_requests/diffs_controller_spec.rb61
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb60
-rw-r--r--spec/controllers/projects/milestones_controller_spec.rb18
-rw-r--r--spec/controllers/projects/mirrors_controller_spec.rb8
-rw-r--r--spec/controllers/projects/notes_controller_spec.rb66
-rw-r--r--spec/controllers/projects/pages_controller_spec.rb12
-rw-r--r--spec/controllers/projects/pages_domains_controller_spec.rb18
-rw-r--r--spec/controllers/projects/pipeline_schedules_controller_spec.rb10
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb114
-rw-r--r--spec/controllers/projects/pipelines_settings_controller_spec.rb2
-rw-r--r--spec/controllers/projects/project_members_controller_spec.rb16
-rw-r--r--spec/controllers/projects/prometheus/metrics_controller_spec.rb10
-rw-r--r--spec/controllers/projects/raw_controller_spec.rb28
-rw-r--r--spec/controllers/projects/registry/repositories_controller_spec.rb35
-rw-r--r--spec/controllers/projects/releases_controller_spec.rb54
-rw-r--r--spec/controllers/projects/repositories_controller_spec.rb46
-rw-r--r--spec/controllers/projects/runners_controller_spec.rb8
-rw-r--r--spec/controllers/projects/serverless/functions_controller_spec.rb119
-rw-r--r--spec/controllers/projects/services_controller_spec.rb75
-rw-r--r--spec/controllers/projects/settings/ci_cd_controller_spec.rb4
-rw-r--r--spec/controllers/projects/settings/integrations_controller_spec.rb2
-rw-r--r--spec/controllers/projects/settings/operations_controller_spec.rb103
-rw-r--r--spec/controllers/projects/settings/repository_controller_spec.rb4
-rw-r--r--spec/controllers/projects/snippets_controller_spec.rb39
-rw-r--r--spec/controllers/projects/stages_controller_spec.rb2
-rw-r--r--spec/controllers/projects/tags/releases_controller_spec.rb14
-rw-r--r--spec/controllers/projects/templates_controller_spec.rb4
-rw-r--r--spec/controllers/projects/todos_controller_spec.rb2
-rw-r--r--spec/controllers/projects/tree_controller_spec.rb12
-rw-r--r--spec/controllers/projects/uploads_controller_spec.rb2
-rw-r--r--spec/controllers/projects/usage_ping_controller_spec.rb6
-rw-r--r--spec/controllers/projects/wikis_controller_spec.rb10
-rw-r--r--spec/controllers/projects_controller_spec.rb86
-rw-r--r--spec/controllers/registrations_controller_spec.rb56
-rw-r--r--spec/controllers/repositories/git_http_controller_spec.rb146
-rw-r--r--spec/controllers/search_controller_spec.rb12
-rw-r--r--spec/controllers/sent_notifications_controller_spec.rb42
-rw-r--r--spec/controllers/sessions_controller_spec.rb10
-rw-r--r--spec/controllers/snippets/notes_controller_spec.rb30
-rw-r--r--spec/controllers/snippets_controller_spec.rb46
-rw-r--r--spec/controllers/uploads_controller_spec.rb62
-rw-r--r--spec/controllers/user_callouts_controller_spec.rb4
-rw-r--r--spec/controllers/users_controller_spec.rb12
-rw-r--r--spec/db/schema_spec.rb2
-rw-r--r--spec/factories/alerting/alert.rb25
-rw-r--r--spec/factories/ci/bridge.rb1
-rw-r--r--spec/factories/ci/build_need.rb2
-rw-r--r--spec/factories/ci/builds.rb1
-rw-r--r--spec/factories/ci/job_artifacts.rb10
-rw-r--r--spec/factories/container_expiration_policies.rb13
-rw-r--r--spec/factories/deploy_tokens.rb9
-rw-r--r--spec/factories/deployment_clusters.rb9
-rw-r--r--spec/factories/diff_position.rb51
-rw-r--r--spec/factories/environments.rb2
-rw-r--r--spec/factories/error_tracking/detailed_error.rb35
-rw-r--r--spec/factories/error_tracking/error.rb12
-rw-r--r--spec/factories/group_deploy_tokens.rb8
-rw-r--r--spec/factories/incident_management/project_incident_management_settings.rb10
-rw-r--r--spec/factories/merge_request_context_commit.rb12
-rw-r--r--spec/factories/merge_request_context_commit_diff_file.rb20
-rw-r--r--spec/factories/notes.rb47
-rw-r--r--spec/factories/pages_domains.rb10
-rw-r--r--spec/factories/project_error_tracking_settings.rb6
-rw-r--r--spec/factories/projects.rb7
-rw-r--r--spec/factories/prometheus_alert.rb17
-rw-r--r--spec/factories/serverless/domain_cluster.rb39
-rw-r--r--spec/factories/services.rb10
-rw-r--r--spec/factories/snippet_repositories.rb12
-rw-r--r--spec/factories/snippets.rb15
-rw-r--r--spec/factories/users.rb4
-rw-r--r--spec/factories/x509_certificate.rb12
-rw-r--r--spec/factories/x509_commit_signature.rb10
-rw-r--r--spec/factories/x509_issuer.rb10
-rw-r--r--spec/features/admin/admin_browses_logs_spec.rb2
-rw-r--r--spec/features/admin/admin_disables_git_access_protocol_spec.rb2
-rw-r--r--spec/features/admin/admin_groups_spec.rb2
-rw-r--r--spec/features/admin/admin_manage_applications_spec.rb3
-rw-r--r--spec/features/admin/admin_mode/workers_spec.rb77
-rw-r--r--spec/features/admin/admin_mode_spec.rb173
-rw-r--r--spec/features/admin/admin_serverless_domains_spec.rb59
-rw-r--r--spec/features/admin/admin_settings_spec.rb100
-rw-r--r--spec/features/admin/admin_uses_repository_checks_spec.rb66
-rw-r--r--spec/features/broadcast_messages_spec.rb30
-rw-r--r--spec/features/calendar_spec.rb34
-rw-r--r--spec/features/clusters/installing_applications_shared_examples.rb2
-rw-r--r--spec/features/container_registry_spec.rb1
-rw-r--r--spec/features/cycle_analytics_spec.rb12
-rw-r--r--spec/features/dashboard/projects_spec.rb55
-rw-r--r--spec/features/dashboard/shortcuts_spec.rb2
-rw-r--r--spec/features/dashboard/snippets_spec.rb11
-rw-r--r--spec/features/error_tracking/user_sees_error_details_spec.rb32
-rw-r--r--spec/features/error_tracking/user_sees_error_index_spec.rb69
-rw-r--r--spec/features/graphiql_spec.rb32
-rw-r--r--spec/features/groups/navbar_spec.rb77
-rw-r--r--spec/features/groups_spec.rb4
-rw-r--r--spec/features/invites_spec.rb48
-rw-r--r--spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb2
-rw-r--r--spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/filter_issues_spec.rb4
-rw-r--r--spec/features/issues/gfm_autocomplete_spec.rb26
-rw-r--r--spec/features/issues/issue_detail_spec.rb8
-rw-r--r--spec/features/issues/move_spec.rb2
-rw-r--r--spec/features/issues/user_creates_branch_and_merge_request_spec.rb4
-rw-r--r--spec/features/markdown/copy_as_gfm_spec.rb34
-rw-r--r--spec/features/markdown/mermaid_spec.rb30
-rw-r--r--spec/features/markdown/metrics_spec.rb10
-rw-r--r--spec/features/merge_request/maintainer_edits_fork_spec.rb2
-rw-r--r--spec/features/merge_request/user_creates_image_diff_notes_spec.rb44
-rw-r--r--spec/features/merge_request/user_edits_assignees_sidebar_spec.rb2
-rw-r--r--spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb111
-rw-r--r--spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb8
-rw-r--r--spec/features/merge_request/user_posts_diff_notes_spec.rb4
-rw-r--r--spec/features/merge_request/user_resolves_conflicts_spec.rb4
-rw-r--r--spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb20
-rw-r--r--spec/features/merge_request/user_resolves_outdated_diff_discussions_spec.rb12
-rw-r--r--spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb6
-rw-r--r--spec/features/merge_request/user_sees_deployment_widget_spec.rb4
-rw-r--r--spec/features/merge_request/user_sees_diff_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_discussions_spec.rb6
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb137
-rw-r--r--spec/features/merge_request/user_sees_versions_spec.rb37
-rw-r--r--spec/features/merge_request/user_suggests_changes_on_diff_spec.rb2
-rw-r--r--spec/features/merge_requests/user_squashes_merge_request_spec.rb31
-rw-r--r--spec/features/profiles/password_spec.rb4
-rw-r--r--spec/features/profiles/user_edit_preferences_spec.rb27
-rw-r--r--spec/features/profiles/user_manages_applications_spec.rb3
-rw-r--r--spec/features/projects/active_tabs_spec.rb (renamed from spec/features/projects/actve_tabs_spec.rb)48
-rw-r--r--spec/features/projects/artifacts/user_downloads_artifacts_spec.rb2
-rw-r--r--spec/features/projects/badges/coverage_spec.rb2
-rw-r--r--spec/features/projects/blobs/blob_line_permalink_updater_spec.rb22
-rw-r--r--spec/features/projects/blobs/blob_show_spec.rb8
-rw-r--r--spec/features/projects/blobs/edit_spec.rb2
-rw-r--r--spec/features/projects/blobs/user_creates_new_blob_in_new_project_spec.rb1
-rw-r--r--spec/features/projects/branches/user_deletes_branch_spec.rb4
-rw-r--r--spec/features/projects/clusters/eks_spec.rb4
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb18
-rw-r--r--spec/features/projects/environments/environment_metrics_spec.rb4
-rw-r--r--spec/features/projects/environments/environments_spec.rb2
-rw-r--r--spec/features/projects/files/edit_file_soft_wrap_spec.rb2
-rw-r--r--spec/features/projects/files/template_type_dropdown_spec.rb5
-rw-r--r--spec/features/projects/files/user_browses_files_spec.rb25
-rw-r--r--spec/features/projects/files/user_creates_directory_spec.rb21
-rw-r--r--spec/features/projects/files/user_creates_files_spec.rb2
-rw-r--r--spec/features/projects/files/user_deletes_files_spec.rb2
-rw-r--r--spec/features/projects/files/user_replaces_files_spec.rb2
-rw-r--r--spec/features/projects/graph_spec.rb14
-rw-r--r--spec/features/projects/jobs_spec.rb2
-rw-r--r--spec/features/projects/members/group_member_cannot_request_access_to_his_group_project_spec.rb2
-rw-r--r--spec/features/projects/members/member_cannot_request_access_to_his_project_spec.rb2
-rw-r--r--spec/features/projects/members/owner_cannot_request_access_to_his_project_spec.rb2
-rw-r--r--spec/features/projects/navbar_spec.rb104
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb100
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb6
-rw-r--r--spec/features/projects/serverless/functions_spec.rb5
-rw-r--r--spec/features/projects/services/user_activates_alerts_spec.rb81
-rw-r--r--spec/features/projects/services/user_activates_issue_tracker_spec.rb15
-rw-r--r--spec/features/projects/services/user_activates_jira_spec.rb8
-rw-r--r--spec/features/projects/services/user_activates_prometheus_spec.rb5
-rw-r--r--spec/features/projects/services/user_activates_youtrack_spec.rb8
-rw-r--r--spec/features/projects/settings/operations_settings_spec.rb52
-rw-r--r--spec/features/projects/settings/registry_settings_spec.rb50
-rw-r--r--spec/features/projects/show/user_sees_collaboration_links_spec.rb106
-rw-r--r--spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb6
-rw-r--r--spec/features/projects/snippets/create_snippet_spec.rb20
-rw-r--r--spec/features/projects/snippets/user_views_snippets_spec.rb107
-rw-r--r--spec/features/projects/tags/user_views_tags_spec.rb2
-rw-r--r--spec/features/projects/tree/create_directory_spec.rb7
-rw-r--r--spec/features/projects/tree/create_file_spec.rb7
-rw-r--r--spec/features/projects/user_sees_user_popover_spec.rb37
-rw-r--r--spec/features/projects/user_uses_shortcuts_spec.rb69
-rw-r--r--spec/features/projects/view_on_env_spec.rb2
-rw-r--r--spec/features/projects/wiki/user_updates_wiki_page_spec.rb10
-rw-r--r--spec/features/projects/wiki/user_views_wiki_page_spec.rb12
-rw-r--r--spec/features/security/group/internal_access_spec.rb8
-rw-r--r--spec/features/security/group/private_access_spec.rb8
-rw-r--r--spec/features/security/group/public_access_spec.rb8
-rw-r--r--spec/features/security/project/internal_access_spec.rb18
-rw-r--r--spec/features/security/project/private_access_spec.rb16
-rw-r--r--spec/features/security/project/public_access_spec.rb18
-rw-r--r--spec/features/signed_commits_spec.rb32
-rw-r--r--spec/features/snippets/spam_snippets_spec.rb8
-rw-r--r--spec/features/snippets/user_creates_snippet_spec.rb10
-rw-r--r--spec/features/task_lists_spec.rb49
-rw-r--r--spec/features/users/login_spec.rb3
-rw-r--r--spec/features/users/signup_spec.rb129
-rw-r--r--spec/finders/award_emojis_finder_spec.rb12
-rw-r--r--spec/finders/clusters_finder_spec.rb2
-rw-r--r--spec/finders/concerns/finder_with_cross_project_access_spec.rb2
-rw-r--r--spec/finders/context_commits_finder_spec.rb28
-rw-r--r--spec/finders/issues_finder_spec.rb39
-rw-r--r--spec/finders/jobs_finder_spec.rb14
-rw-r--r--spec/finders/keys_finder_spec.rb200
-rw-r--r--spec/finders/members_finder_spec.rb51
-rw-r--r--spec/finders/merge_requests_finder_spec.rb4
-rw-r--r--spec/finders/milestones_finder_spec.rb44
-rw-r--r--spec/finders/projects/prometheus/alerts_finder_spec.rb169
-rw-r--r--spec/finders/projects/serverless/functions_finder_spec.rb70
-rw-r--r--spec/finders/projects_finder_spec.rb277
-rw-r--r--spec/finders/protected_branches_finder_spec.rb37
-rw-r--r--spec/finders/resource_label_event_finder_spec.rb6
-rw-r--r--spec/fixtures/api/graphql/recursive-introspection.graphql46
-rw-r--r--spec/fixtures/api/graphql/recursive-query-edges-node.graphql23
-rw-r--r--spec/fixtures/api/graphql/recursive-query-nodes.graphql19
-rw-r--r--spec/fixtures/api/graphql/recursive-query.graphql47
-rw-r--r--spec/fixtures/api/schemas/deployment.json2
-rw-r--r--spec/fixtures/api/schemas/deployment_cluster.json (renamed from spec/fixtures/api/schemas/cluster_basic.json)6
-rw-r--r--spec/fixtures/api/schemas/entities/test_reports_comparer.json2
-rw-r--r--spec/fixtures/api/schemas/entities/test_suite_comparer.json12
-rw-r--r--spec/fixtures/api/schemas/error_tracking/update_issue.json8
-rw-r--r--spec/fixtures/api/schemas/job/job_details.json6
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/members.json22
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/membership.json10
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/memberships.json4
-rw-r--r--spec/fixtures/api/schemas/variable.json1
-rw-r--r--spec/fixtures/authentication/adfs_saml_response.xml58
-rw-r--r--spec/fixtures/config/mail_room_disabled.yml11
-rw-r--r--spec/fixtures/config/mail_room_enabled.yml11
-rw-r--r--spec/fixtures/emails/.gitattributes2
-rw-r--r--spec/fixtures/emails/envelope_to_header_with_angle_brackets.eml32
-rw-r--r--spec/fixtures/emails/valid_reply_signed_smime.eml294
-rw-r--r--spec/fixtures/group_export.tar.gzbin4551 -> 2795 bytes
-rw-r--r--spec/fixtures/lib/gitlab/import_export/complex/project.json4
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/complex/group.json140
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/internal/group.json166
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/private/group.json166
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/public/group.json166
-rw-r--r--spec/fixtures/lib/gitlab/import_export/with_duplicates.json43
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json3
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panel_groups.json3
-rw-r--r--spec/fixtures/lsif.json.gzbin0 -> 759 bytes
-rw-r--r--spec/fixtures/sentry/global_integration_link_sample_response.json (renamed from spec/fixtures/sentry/issue_link_sample_response.json)0
-rw-r--r--spec/fixtures/sentry/issue_sample_response.json282
-rw-r--r--spec/fixtures/sentry/plugin_link_sample_response.json6
-rw-r--r--spec/fixtures/valid.po12
-rw-r--r--spec/frontend/.eslintrc.yml6
-rw-r--r--spec/frontend/alerts_service_settings/components/__snapshots__/alerts_service_form_spec.js.snap9
-rw-r--r--spec/frontend/alerts_service_settings/components/alerts_service_form_spec.js168
-rw-r--r--spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap35
-rw-r--r--spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap24
-rw-r--r--spec/frontend/blob/components/blob_content_error_spec.js27
-rw-r--r--spec/frontend/blob/components/blob_content_spec.js70
-rw-r--r--spec/frontend/blob/components/blob_embeddable_spec.js35
-rw-r--r--spec/frontend/blob/components/blob_header_default_actions_spec.js70
-rw-r--r--spec/frontend/blob/components/blob_header_filepath_spec.js90
-rw-r--r--spec/frontend/blob/components/blob_header_spec.js139
-rw-r--r--spec/frontend/blob/components/blob_header_viewer_switcher_spec.js97
-rw-r--r--spec/frontend/blob/components/mock_data.js43
-rw-r--r--spec/frontend/boards/boards_store_spec.js26
-rw-r--r--spec/frontend/boards/components/issue_card_inner_scoped_label_spec.js (renamed from spec/javascripts/boards/components/issue_card_inner_scoped_label_spec.js)2
-rw-r--r--spec/frontend/boards/components/issue_due_date_spec.js (renamed from spec/javascripts/boards/components/issue_due_date_spec.js)0
-rw-r--r--spec/frontend/boards/issue_card_spec.js51
-rw-r--r--spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap14
-rw-r--r--spec/frontend/clusters/components/applications_spec.js3
-rw-r--r--spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap39
-rw-r--r--spec/frontend/code_navigation/components/app_spec.js64
-rw-r--r--spec/frontend/code_navigation/components/popover_spec.js58
-rw-r--r--spec/frontend/code_navigation/store/actions_spec.js212
-rw-r--r--spec/frontend/code_navigation/store/mutations_spec.js63
-rw-r--r--spec/frontend/code_navigation/utils/index_spec.js58
-rw-r--r--spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap24
-rw-r--r--spec/frontend/contributors/component/contributors_spec.js4
-rw-r--r--spec/frontend/create_cluster/gke_cluster/components/gke_submit_button_spec.js53
-rw-r--r--spec/frontend/diffs/components/compare_versions_spec.js3
-rw-r--r--spec/frontend/diffs/components/diff_file_row_spec.js74
-rw-r--r--spec/frontend/diffs/components/diff_stats_spec.js27
-rw-r--r--spec/frontend/diffs/components/diff_table_cell_spec.js213
-rw-r--r--spec/frontend/diffs/mock_data/diff_file.js244
-rw-r--r--spec/frontend/diffs/mock_data/merge_request_diffs.js8
-rw-r--r--spec/frontend/environments/emtpy_state_spec.js40
-rw-r--r--spec/frontend/environments/enable_review_app_button_spec.js31
-rw-r--r--spec/frontend/environments/environment_actions_spec.js124
-rw-r--r--spec/frontend/environments/environment_external_url_spec.js16
-rw-r--r--spec/frontend/environments/environments_store_spec.js (renamed from spec/javascripts/environments/environments_store_spec.js)0
-rw-r--r--spec/frontend/environments/folder/environments_folder_view_spec.js180
-rw-r--r--spec/frontend/environments/mock_data.js59
-rw-r--r--spec/frontend/error_tracking/components/error_details_spec.js213
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_list_spec.js33
-rw-r--r--spec/frontend/error_tracking/store/actions_spec.js64
-rw-r--r--spec/frontend/error_tracking/store/details/actions_spec.js66
-rw-r--r--spec/frontend/error_tracking/store/list/mutation_spec.js25
-rw-r--r--spec/frontend/error_tracking_settings/components/project_dropdown_spec.js12
-rw-r--r--spec/frontend/error_tracking_settings/store/getters_spec.js2
-rw-r--r--spec/frontend/filtered_search/components/recent_searches_dropdown_content_spec.js236
-rw-r--r--spec/frontend/fixtures/merge_requests.rb41
-rw-r--r--spec/frontend/fixtures/merge_requests_diffs.rb6
-rw-r--r--spec/frontend/gfm_auto_complete_spec.js2
-rw-r--r--spec/frontend/gl_field_errors_spec.js2
-rw-r--r--spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap3
-rw-r--r--spec/frontend/graphql_shared/utils_spec.js42
-rw-r--r--spec/frontend/helpers/dom_shims/element_scroll_into_view.js1
-rw-r--r--spec/frontend/helpers/dom_shims/index.js2
-rw-r--r--spec/frontend/helpers/dom_shims/window_scroll_to.js1
-rw-r--r--spec/frontend/ide/components/branches/item_spec.js66
-rw-r--r--spec/frontend/ide/components/error_message_spec.js29
-rw-r--r--spec/frontend/ide/components/ide_file_row_spec.js117
-rw-r--r--spec/frontend/ide/components/ide_status_list_spec.js2
-rw-r--r--spec/frontend/ide/components/ide_status_mr_spec.js59
-rw-r--r--spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap4
-rw-r--r--spec/frontend/ide/components/jobs/detail/scroll_button_spec.js67
-rw-r--r--spec/frontend/ide/components/panes/right_spec.js22
-rw-r--r--spec/frontend/ide/components/preview/navigator_spec.js226
-rw-r--r--spec/frontend/ide/ide_router_extension_spec.js48
-rw-r--r--spec/frontend/ide/mock_data.js7
-rw-r--r--spec/frontend/ide/services/index_spec.js33
-rw-r--r--spec/frontend/ide/stores/actions/file_spec.js80
-rw-r--r--spec/frontend/ide/stores/getters_spec.js36
-rw-r--r--spec/frontend/ide/stores/integration_spec.js15
-rw-r--r--spec/frontend/ide/stores/modules/file_templates/getters_spec.js6
-rw-r--r--spec/frontend/ide/stores/modules/pipelines/actions_spec.js6
-rw-r--r--spec/frontend/ide/stores/mutations/file_spec.js29
-rw-r--r--spec/frontend/ide/stores/mutations_spec.js20
-rw-r--r--spec/frontend/import_projects/components/import_projects_table_spec.js2
-rw-r--r--spec/frontend/issuables_list/components/issuable_spec.js4
-rw-r--r--spec/frontend/issuables_list/issuable_list_test_data.js3
-rw-r--r--spec/frontend/issue_show/mock_data.js23
-rw-r--r--spec/frontend/jobs/components/job_app_spec.js (renamed from spec/javascripts/jobs/components/job_app_spec.js)423
-rw-r--r--spec/frontend/jobs/components/log/mock_data.js2
-rw-r--r--spec/frontend/jobs/mock_data.js1191
-rw-r--r--spec/frontend/jobs/store/mutations_spec.js18
-rw-r--r--spec/frontend/lib/utils/datetime_range_spec.js382
-rw-r--r--spec/frontend/lib/utils/url_utility_spec.js113
-rw-r--r--spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap102
-rw-r--r--spec/frontend/monitoring/components/charts/single_stat_spec.js48
-rw-r--r--spec/frontend/monitoring/components/charts/stacked_column_spec.js45
-rw-r--r--spec/frontend/monitoring/components/charts/time_series_spec.js180
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js439
-rw-r--r--spec/frontend/monitoring/components/dashboard_template_spec.js39
-rw-r--r--spec/frontend/monitoring/components/dashboard_time_url_spec.js51
-rw-r--r--spec/frontend/monitoring/components/dashboard_time_window_spec.js68
-rw-r--r--spec/frontend/monitoring/components/dashboard_url_time_spec.js140
-rw-r--r--spec/frontend/monitoring/components/dashboards_dropdown_spec.js97
-rw-r--r--spec/frontend/monitoring/components/date_time_picker/date_time_picker_spec.js170
-rw-r--r--spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js35
-rw-r--r--spec/frontend/monitoring/components/graph_group_spec.js15
-rw-r--r--spec/frontend/monitoring/components/panel_type_spec.js277
-rw-r--r--spec/frontend/monitoring/embed/embed_spec.js21
-rw-r--r--spec/frontend/monitoring/init_utils.js6
-rw-r--r--spec/frontend/monitoring/mock_data.js248
-rw-r--r--spec/frontend/monitoring/panel_type_spec.js184
-rw-r--r--spec/frontend/monitoring/store/actions_spec.js124
-rw-r--r--spec/frontend/monitoring/store/getters_spec.js81
-rw-r--r--spec/frontend/monitoring/store/mutations_spec.js37
-rw-r--r--spec/frontend/monitoring/store/utils_spec.js77
-rw-r--r--spec/frontend/monitoring/utils_spec.js371
-rw-r--r--spec/frontend/mr_popover/mr_popover_spec.js2
-rw-r--r--spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap2
-rw-r--r--spec/frontend/notes/components/comment_form_spec.js2
-rw-r--r--spec/frontend/notes/components/discussion_actions_spec.js9
-rw-r--r--spec/frontend/notes/components/discussion_filter_note_spec.js89
-rw-r--r--spec/frontend/notes/components/discussion_jump_to_next_button_spec.js11
-rw-r--r--spec/frontend/notes/components/discussion_keyboard_navigator_spec.js12
-rw-r--r--spec/frontend/notes/components/note_app_spec.js2
-rw-r--r--spec/frontend/notes/components/note_attachment_spec.js58
-rw-r--r--spec/frontend/notes/components/note_header_spec.js210
-rw-r--r--spec/frontend/notes/mock_data.js26
-rw-r--r--spec/frontend/notes/old_notes_spec.js5
-rw-r--r--spec/frontend/notes/stores/mutation_spec.js2
-rw-r--r--spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap3
-rw-r--r--spec/frontend/pages/admin/users/components/__snapshots__/user_operation_confirmation_modal_spec.js.snap1
-rw-r--r--spec/frontend/performance_bar/components/request_selector_spec.js2
-rw-r--r--spec/frontend/performance_bar/stores/performance_bar_store_spec.js4
-rw-r--r--spec/frontend/projects/pipelines/charts/components/__snapshots__/pipelines_area_chart_spec.js.snap23
-rw-r--r--spec/frontend/projects/pipelines/charts/components/__snapshots__/statistics_list_spec.js.snap45
-rw-r--r--spec/frontend/projects/pipelines/charts/components/app_spec.js72
-rw-r--r--spec/frontend/projects/pipelines/charts/components/pipelines_area_chart_spec.js30
-rw-r--r--spec/frontend/projects/pipelines/charts/components/statistics_list_spec.js24
-rw-r--r--spec/frontend/projects/pipelines/charts/mock_data.js33
-rw-r--r--spec/frontend/registry/explorer/components/__snapshots__/group_empty_state_spec.js.snap21
-rw-r--r--spec/frontend/registry/explorer/components/__snapshots__/project_empty_state_spec.js.snap119
-rw-r--r--spec/frontend/registry/explorer/components/__snapshots__/registry_breadcrumb_spec.js.snap28
-rw-r--r--spec/frontend/registry/explorer/components/group_empty_state_spec.js40
-rw-r--r--spec/frontend/registry/explorer/components/project_empty_state_spec.js44
-rw-r--r--spec/frontend/registry/explorer/components/registry_breadcrumb_spec.js135
-rw-r--r--spec/frontend/registry/explorer/mock_data.js89
-rw-r--r--spec/frontend/registry/explorer/pages/details_spec.js293
-rw-r--r--spec/frontend/registry/explorer/pages/list_spec.js205
-rw-r--r--spec/frontend/registry/explorer/stores/actions_spec.js333
-rw-r--r--spec/frontend/registry/explorer/stores/mutations_spec.js86
-rw-r--r--spec/frontend/registry/explorer/stubs.js11
-rw-r--r--spec/frontend/registry/list/components/__snapshots__/project_empty_state_spec.js.snap21
-rw-r--r--spec/frontend/registry/settings/components/__snapshots__/settings_form_spec.js.snap181
-rw-r--r--spec/frontend/registry/settings/components/registry_settings_app_spec.js71
-rw-r--r--spec/frontend/registry/settings/components/settings_form_spec.js235
-rw-r--r--spec/frontend/registry/settings/store/actions_spec.js87
-rw-r--r--spec/frontend/registry/settings/store/getters_spec.js44
-rw-r--r--spec/frontend/registry/settings/store/mutations_spec.js14
-rw-r--r--spec/frontend/registry/shared/components/__snapshots__/expiration_policy_fields_spec.js.snap134
-rw-r--r--spec/frontend/registry/shared/components/expiration_policy_fields_spec.js172
-rw-r--r--spec/frontend/registry/shared/mock_data.js (renamed from spec/frontend/registry/settings/mock_data.js)0
-rw-r--r--spec/frontend/releases/components/app_edit_spec.js (renamed from spec/frontend/releases/detail/components/app_spec.js)18
-rw-r--r--spec/frontend/releases/components/evidence_block_spec.js (renamed from spec/frontend/releases/list/components/evidence_block_spec.js)4
-rw-r--r--spec/frontend/releases/components/release_block_footer_spec.js (renamed from spec/frontend/releases/list/components/release_block_footer_spec.js)4
-rw-r--r--spec/frontend/releases/components/release_block_header_spec.js56
-rw-r--r--spec/frontend/releases/components/release_block_milestone_info_spec.js (renamed from spec/frontend/releases/list/components/release_block_milestone_info_spec.js)6
-rw-r--r--spec/frontend/releases/components/release_block_spec.js (renamed from spec/frontend/releases/list/components/release_block_spec.js)15
-rw-r--r--spec/frontend/releases/mock_data.js1
-rw-r--r--spec/frontend/releases/stores/modules/detail/actions_spec.js (renamed from spec/frontend/releases/detail/store/actions_spec.js)8
-rw-r--r--spec/frontend/releases/stores/modules/detail/mutations_spec.js (renamed from spec/frontend/releases/detail/store/mutations_spec.js)8
-rw-r--r--spec/frontend/reports/store/utils_spec.js60
-rw-r--r--spec/frontend/repository/components/breadcrumbs_spec.js11
-rw-r--r--spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap51
-rw-r--r--spec/frontend/repository/components/table/parent_row_spec.js11
-rw-r--r--spec/frontend/repository/components/table/row_spec.js32
-rw-r--r--spec/frontend/repository/log_tree_spec.js2
-rw-r--r--spec/frontend/repository/router_spec.js11
-rw-r--r--spec/frontend/repository/utils/dom_spec.js13
-rw-r--r--spec/frontend/repository/utils/title_spec.js25
-rw-r--r--spec/frontend/self_monitor/components/__snapshots__/self_monitor_spec.js.snap4
-rw-r--r--spec/frontend/self_monitor/components/self_monitor_spec.js4
-rw-r--r--spec/frontend/self_monitor/store/actions_spec.js7
-rw-r--r--spec/frontend/sidebar/confidential_issue_sidebar_spec.js2
-rw-r--r--spec/frontend/snippet/collapsible_input_spec.js104
-rw-r--r--spec/frontend/snippets/components/app_spec.js6
-rw-r--r--spec/frontend/snippets/components/snippet_blob_view_spec.js179
-rw-r--r--spec/frontend/vue_alerts_spec.js87
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js52
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_spec.js42
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/code_block_spec.js.snap16
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap88
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/identicon_spec.js.snap11
-rw-r--r--spec/frontend/vue_shared/components/blob_viewers/__snapshots__/simple_viewer_spec.js.snap86
-rw-r--r--spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js27
-rw-r--r--spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js81
-rw-r--r--spec/frontend/vue_shared/components/changed_file_icon_spec.js12
-rw-r--r--spec/frontend/vue_shared/components/clipboard_button_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/code_block_spec.js38
-rw-r--r--spec/frontend/vue_shared/components/date_time_picker/date_time_picker_input_spec.js (renamed from spec/frontend/monitoring/components/date_time_picker/date_time_picker_input_spec.js)2
-rw-r--r--spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js173
-rw-r--r--spec/frontend/vue_shared/components/date_time_picker/date_time_picker_spec.js267
-rw-r--r--spec/frontend/vue_shared/components/dismissible_alert_spec.js57
-rw-r--r--spec/frontend/vue_shared/components/expand_button_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/file_tree_spec.js88
-rw-r--r--spec/frontend/vue_shared/components/identicon_spec.js80
-rw-r--r--spec/frontend/vue_shared/components/issue/__snapshots__/issue_warning_spec.js.snap62
-rw-r--r--spec/frontend/vue_shared/components/issue/issue_assignees_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/issue/issue_warning_spec.js134
-rw-r--r--spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap28
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestion_diff_row_spec.js46
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js83
-rw-r--r--spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap62
-rw-r--r--spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_system_note_spec.js.snap15
-rw-r--r--spec/frontend/vue_shared/components/notes/placeholder_note_spec.js80
-rw-r--r--spec/frontend/vue_shared/components/notes/placeholder_system_note_spec.js34
-rw-r--r--spec/frontend/vue_shared/components/recaptcha_modal_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/slot_switch_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js4
-rw-r--r--spec/graphql/features/authorization_spec.rb35
-rw-r--r--spec/graphql/features/feature_flag_spec.rb36
-rw-r--r--spec/graphql/mutations/todos/restore_many_spec.rb114
-rw-r--r--spec/graphql/resolvers/boards_resolver_spec.rb75
-rw-r--r--spec/graphql/resolvers/error_tracking/sentry_error_collection_resolver_spec.rb47
-rw-r--r--spec/graphql/resolvers/error_tracking/sentry_errors_resolver_spec.rb103
-rw-r--r--spec/graphql/resolvers/milestone_resolver_spec.rb93
-rw-r--r--spec/graphql/types/base_field_spec.rb65
-rw-r--r--spec/graphql/types/blob_viewers/type_enum_spec.rb11
-rw-r--r--spec/graphql/types/commit_type_spec.rb3
-rw-r--r--spec/graphql/types/error_tracking/sentry_detailed_error_type_spec.rb3
-rw-r--r--spec/graphql/types/error_tracking/sentry_error_collection_type_spec.rb32
-rw-r--r--spec/graphql/types/error_tracking/sentry_error_stack_trace_entry_type_spec.rb19
-rw-r--r--spec/graphql/types/error_tracking/sentry_error_stack_trace_type_spec.rb19
-rw-r--r--spec/graphql/types/error_tracking/sentry_error_type_spec.rb31
-rw-r--r--spec/graphql/types/group_type_spec.rb9
-rw-r--r--spec/graphql/types/permission_types/project_spec.rb3
-rw-r--r--spec/graphql/types/query_type_spec.rb13
-rw-r--r--spec/graphql/types/snippet_type_spec.rb4
-rw-r--r--spec/graphql/types/snippets/blob_type_spec.rb13
-rw-r--r--spec/graphql/types/snippets/blob_viewer_type_spec.rb12
-rw-r--r--spec/helpers/application_settings_helper_spec.rb2
-rw-r--r--spec/helpers/auth_helper_spec.rb9
-rw-r--r--spec/helpers/avatars_helper_spec.rb36
-rw-r--r--spec/helpers/award_emoji_helper_spec.rb2
-rw-r--r--spec/helpers/blob_helper_spec.rb8
-rw-r--r--spec/helpers/broadcast_messages_helper_spec.rb23
-rw-r--r--spec/helpers/button_helper_spec.rb2
-rw-r--r--spec/helpers/clusters_helper_spec.rb28
-rw-r--r--spec/helpers/commits_helper_spec.rb13
-rw-r--r--spec/helpers/diff_helper_spec.rb4
-rw-r--r--spec/helpers/environments_helper_spec.rb3
-rw-r--r--spec/helpers/events_helper_spec.rb4
-rw-r--r--spec/helpers/labels_helper_spec.rb10
-rw-r--r--spec/helpers/markup_helper_spec.rb22
-rw-r--r--spec/helpers/nav_helper_spec.rb6
-rw-r--r--spec/helpers/notes_helper_spec.rb11
-rw-r--r--spec/helpers/projects/error_tracking_helper_spec.rb10
-rw-r--r--spec/helpers/projects_helper_spec.rb33
-rw-r--r--spec/helpers/sourcegraph_helper_spec.rb2
-rw-r--r--spec/helpers/submodule_helper_spec.rb26
-rw-r--r--spec/initializers/action_mailer_hooks_spec.rb5
-rw-r--r--spec/initializers/lograge_spec.rb57
-rw-r--r--spec/initializers/mail_encoding_patch_spec.rb207
-rw-r--r--spec/javascripts/badges/dummy_badge.js4
-rw-r--r--spec/javascripts/behaviors/shortcuts/shortcuts_issuable_spec.js14
-rw-r--r--spec/javascripts/blob/notebook/index_spec.js4
-rw-r--r--spec/javascripts/blob/pdf/index_spec.js2
-rw-r--r--spec/javascripts/create_cluster/gke_cluster/stores/getters_spec.js52
-rw-r--r--spec/javascripts/cycle_analytics/banner_spec.js8
-rw-r--r--spec/javascripts/diffs/components/compare_versions_dropdown_spec.js2
-rw-r--r--spec/javascripts/diffs/components/diff_file_spec.js23
-rw-r--r--spec/javascripts/diffs/components/diff_line_gutter_content_spec.js105
-rw-r--r--spec/javascripts/diffs/components/diff_table_cell_spec.js37
-rw-r--r--spec/javascripts/diffs/mock_data/diff_discussions.js27
-rw-r--r--spec/javascripts/diffs/mock_data/diff_file.js249
-rw-r--r--spec/javascripts/diffs/store/actions_spec.js117
-rw-r--r--spec/javascripts/diffs/store/mutations_spec.js4
-rw-r--r--spec/javascripts/diffs/store/utils_spec.js39
-rw-r--r--spec/javascripts/editor/editor_lite_spec.js111
-rw-r--r--spec/javascripts/environments/emtpy_state_spec.js54
-rw-r--r--spec/javascripts/environments/environment_actions_spec.js117
-rw-r--r--spec/javascripts/environments/environment_external_url_spec.js22
-rw-r--r--spec/javascripts/environments/environments_app_spec.js20
-rw-r--r--spec/javascripts/environments/folder/environments_folder_view_spec.js229
-rw-r--r--spec/javascripts/environments/mock_data.js37
-rw-r--r--spec/javascripts/flash_spec.js2
-rw-r--r--spec/javascripts/groups/components/group_item_spec.js29
-rw-r--r--spec/javascripts/ide/components/activity_bar_spec.js10
-rw-r--r--spec/javascripts/ide/components/commit_sidebar/form_spec.js10
-rw-r--r--spec/javascripts/ide/components/commit_sidebar/new_merge_request_option_spec.js53
-rw-r--r--spec/javascripts/ide/components/file_row_extra_spec.js2
-rw-r--r--spec/javascripts/ide/components/ide_side_bar_spec.js4
-rw-r--r--spec/javascripts/ide/components/ide_spec.js4
-rw-r--r--spec/javascripts/ide/components/ide_status_bar_spec.js143
-rw-r--r--spec/javascripts/ide/components/merge_requests/info_spec.js51
-rw-r--r--spec/javascripts/ide/components/nav_dropdown_button_spec.js104
-rw-r--r--spec/javascripts/ide/components/nav_dropdown_spec.js30
-rw-r--r--spec/javascripts/ide/components/new_dropdown/upload_spec.js41
-rw-r--r--spec/javascripts/ide/components/repo_editor_spec.js6
-rw-r--r--spec/javascripts/ide/lib/editor_spec.js1
-rw-r--r--spec/javascripts/ide/stores/actions/merge_request_spec.js30
-rw-r--r--spec/javascripts/ide/stores/actions/tree_spec.js2
-rw-r--r--spec/javascripts/ide/stores/actions_spec.js114
-rw-r--r--spec/javascripts/ide/stores/modules/commit/actions_spec.js13
-rw-r--r--spec/javascripts/issue_show/components/app_spec.js4
-rw-r--r--spec/javascripts/issue_show/mock_data.js25
-rw-r--r--spec/javascripts/jobs/components/environments_block_spec.js134
-rw-r--r--spec/javascripts/jobs/components/stages_dropdown_spec.js2
-rw-r--r--spec/javascripts/jobs/mock_data.js1193
-rw-r--r--spec/javascripts/jobs/store/actions_spec.js117
-rw-r--r--spec/javascripts/merge_request_spec.js8
-rw-r--r--spec/javascripts/merge_request_tabs_spec.js34
-rw-r--r--spec/javascripts/monitoring/components/dashboard_resize_spec.js7
-rw-r--r--spec/javascripts/notes/components/discussion_counter_spec.js11
-rw-r--r--spec/javascripts/notes/components/note_actions_spec.js2
-rw-r--r--spec/javascripts/notes/components/noteable_discussion_spec.js31
-rw-r--r--spec/javascripts/pipelines/header_component_spec.js55
-rw-r--r--spec/javascripts/releases/components/app_index_spec.js (renamed from spec/javascripts/releases/list/components/app_spec.js)11
-rw-r--r--spec/javascripts/releases/stores/modules/list/actions_spec.js (renamed from spec/javascripts/releases/list/store/actions_spec.js)8
-rw-r--r--spec/javascripts/releases/stores/modules/list/helpers.js (renamed from spec/javascripts/releases/list/store/helpers.js)2
-rw-r--r--spec/javascripts/releases/stores/modules/list/mutations_spec.js (renamed from spec/javascripts/releases/list/store/mutations_spec.js)8
-rw-r--r--spec/javascripts/reports/components/grouped_test_reports_app_spec.js48
-rw-r--r--spec/javascripts/reports/components/modal_spec.js4
-rw-r--r--spec/javascripts/reports/mock_data/new_and_fixed_failures_report.json56
-rw-r--r--spec/javascripts/reports/mock_data/new_errors_report.json38
-rw-r--r--spec/javascripts/reports/mock_data/new_failures_report.json39
-rw-r--r--spec/javascripts/reports/mock_data/no_failures_report.json29
-rw-r--r--spec/javascripts/reports/mock_data/resolved_failures.json31
-rw-r--r--spec/javascripts/search_autocomplete_spec.js6
-rw-r--r--spec/javascripts/user_popovers_spec.js90
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js2
-rw-r--r--spec/javascripts/vue_mr_widget/mock_data.js28
-rw-r--r--spec/javascripts/vue_mr_widget/mr_widget_options_spec.js55
-rw-r--r--spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js14
-rw-r--r--spec/javascripts/vue_shared/components/bar_chart_spec.js79
-rw-r--r--spec/javascripts/vue_shared/components/content_viewer/content_viewer_spec.js28
-rw-r--r--spec/javascripts/vue_shared/components/diff_viewer/diff_viewer_spec.js4
-rw-r--r--spec/javascripts/vue_shared/components/file_row_spec.js64
-rw-r--r--spec/javascripts/vue_shared/components/header_ci_component_spec.js37
-rw-r--r--spec/javascripts/vue_shared/components/pagination/graphql_pagination_spec.js73
-rw-r--r--spec/javascripts/vue_shared/components/project_selector/project_selector_spec.js6
-rw-r--r--spec/javascripts/vue_shared/components/tooltip_on_truncate_spec.js211
-rw-r--r--spec/lib/api/helpers_spec.rb42
-rw-r--r--spec/lib/backup/repository_spec.rb16
-rw-r--r--spec/lib/banzai/filter/commit_range_reference_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/gollum_tags_filter_spec.rb15
-rw-r--r--spec/lib/banzai/filter/label_reference_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/milestone_reference_filter_spec.rb8
-rw-r--r--spec/lib/banzai/filter/project_reference_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/repository_link_filter_spec.rb38
-rw-r--r--spec/lib/banzai/filter/table_of_contents_tag_filter_spec.rb23
-rw-r--r--spec/lib/banzai/pipeline/full_pipeline_spec.rb31
-rw-r--r--spec/lib/container_registry/client_spec.rb53
-rw-r--r--spec/lib/container_registry/registry_spec.rb2
-rw-r--r--spec/lib/container_registry/tag_spec.rb18
-rw-r--r--spec/lib/extracts_path_spec.rb2
-rw-r--r--spec/lib/feature/gitaly_spec.rb18
-rw-r--r--spec/lib/gitaly/server_spec.rb47
-rw-r--r--spec/lib/gitlab/alerting/alert_spec.rb226
-rw-r--r--spec/lib/gitlab/alerting/notification_payload_parser_spec.rb134
-rw-r--r--spec/lib/gitlab/application_context_spec.rb38
-rw-r--r--spec/lib/gitlab/asciidoc/include_processor_spec.rb6
-rw-r--r--spec/lib/gitlab/auth/current_user_mode_spec.rb172
-rw-r--r--spec/lib/gitlab/auth/saml/auth_hash_spec.rb11
-rw-r--r--spec/lib/gitlab/auth_spec.rb18
-rw-r--r--spec/lib/gitlab/background_migration/activate_prometheus_services_for_shared_cluster_applications_spec.rb75
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_fullpath_in_repo_config_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb24
-rw-r--r--spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb75
-rw-r--r--spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/schedule_calculate_wiki_sizes_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb46
-rw-r--r--spec/lib/gitlab/background_migration_spec.rb34
-rw-r--r--spec/lib/gitlab/batch_worker_context_spec.rb28
-rw-r--r--spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb18
-rw-r--r--spec/lib/gitlab/ci/build/rules/rule/clause/exists_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/build/rules_spec.rb50
-rw-r--r--spec/lib/gitlab/ci/config/entry/bridge_spec.rb229
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb7
-rw-r--r--spec/lib/gitlab/ci/config/entry/jobs_spec.rb70
-rw-r--r--spec/lib/gitlab/ci/config/entry/reports_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/entry/retry_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/config/entry/root_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb45
-rw-r--r--spec/lib/gitlab/ci/config/entry/trigger_spec.rb164
-rw-r--r--spec/lib/gitlab/ci/parsers/test/junit_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb9
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/reports/test_reports_comparer_spec.rb52
-rw-r--r--spec/lib/gitlab/ci/reports/test_suite_comparer_spec.rb140
-rw-r--r--spec/lib/gitlab/ci/reports/test_suite_spec.rb9
-rw-r--r--spec/lib/gitlab/ci/trace/chunked_io_spec.rb18
-rw-r--r--spec/lib/gitlab/ci/trace/section_parser_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb53
-rw-r--r--spec/lib/gitlab/cleanup/project_uploads_spec.rb4
-rw-r--r--spec/lib/gitlab/conflict/file_spec.rb148
-rw-r--r--spec/lib/gitlab/content_security_policy/config_loader_spec.rb4
-rw-r--r--spec/lib/gitlab/current_settings_spec.rb40
-rw-r--r--spec/lib/gitlab/danger/helper_spec.rb4
-rw-r--r--spec/lib/gitlab/danger/teammate_spec.rb2
-rw-r--r--spec/lib/gitlab/data_builder/build_spec.rb12
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb11
-rw-r--r--spec/lib/gitlab/database/batch_count_spec.rb94
-rw-r--r--spec/lib/gitlab/database/count_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb381
-rw-r--r--spec/lib/gitlab/database/sha_attribute_spec.rb2
-rw-r--r--spec/lib/gitlab/database/subquery_spec.rb17
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_spec.rb150
-rw-r--r--spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb40
-rw-r--r--spec/lib/gitlab/database_importers/self_monitoring/project/delete_service_spec.rb4
-rw-r--r--spec/lib/gitlab/database_spec.rb6
-rw-r--r--spec/lib/gitlab/dependency_linker/godeps_json_linker_spec.rb4
-rw-r--r--spec/lib/gitlab/diff/deprecated_highlight_cache_spec.rb70
-rw-r--r--spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb15
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb16
-rw-r--r--spec/lib/gitlab/diff/formatters/image_formatter_spec.rb40
-rw-r--r--spec/lib/gitlab/diff/highlight_cache_spec.rb16
-rw-r--r--spec/lib/gitlab/diff/lines_unfolder_spec.rb74
-rw-r--r--spec/lib/gitlab/diff/position_collection_spec.rb39
-rw-r--r--spec/lib/gitlab/diff/position_spec.rb26
-rw-r--r--spec/lib/gitlab/diff/suggestion_diff_spec.rb15
-rw-r--r--spec/lib/gitlab/email/attachment_uploader_spec.rb15
-rw-r--r--spec/lib/gitlab/email/handler_spec.rb16
-rw-r--r--spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb17
-rw-r--r--spec/lib/gitlab/email/receiver_spec.rb14
-rw-r--r--spec/lib/gitlab/error_tracking_spec.rb11
-rw-r--r--spec/lib/gitlab/etag_caching/middleware_spec.rb69
-rw-r--r--spec/lib/gitlab/etag_caching/router_spec.rb6
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb31
-rw-r--r--spec/lib/gitlab/gfm/reference_rewriter_spec.rb4
-rw-r--r--spec/lib/gitlab/git/blob_spec.rb65
-rw-r--r--spec/lib/gitlab/git/conflict/parser_spec.rb148
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb99
-rw-r--r--spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb8
-rw-r--r--spec/lib/gitlab/git_access_snippet_spec.rb85
-rw-r--r--spec/lib/gitlab/git_access_spec.rb28
-rw-r--r--spec/lib/gitlab/git_ref_validator_spec.rb4
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb28
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb59
-rw-r--r--spec/lib/gitlab/gitlab_import/client_spec.rb4
-rw-r--r--spec/lib/gitlab/gl_repository/repo_type_spec.rb60
-rw-r--r--spec/lib/gitlab/gl_repository_spec.rb8
-rw-r--r--spec/lib/gitlab/gpg_spec.rb8
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb113
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb91
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/connection_spec.rb150
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb14
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb33
-rw-r--r--spec/lib/gitlab/graphql/representation/submodule_tree_entry_spec.rb2
-rw-r--r--spec/lib/gitlab/hashed_storage/migrator_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml24
-rw-r--r--spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb8
-rw-r--r--spec/lib/gitlab/import_export/group_object_builder_spec.rb66
-rw-r--r--spec/lib/gitlab/import_export/group_project_object_builder_spec.rb24
-rw-r--r--spec/lib/gitlab/import_export/group_relation_factory_spec.rb120
-rw-r--r--spec/lib/gitlab/import_export/group_tree_restorer_spec.rb153
-rw-r--r--spec/lib/gitlab/import_export/group_tree_saver_spec.rb30
-rw-r--r--spec/lib/gitlab/import_export/hash_util_spec.rb6
-rw-r--r--spec/lib/gitlab/import_export/import_export_equivalence_spec.rb60
-rw-r--r--spec/lib/gitlab/import_export/import_failure_service_spec.rb23
-rw-r--r--spec/lib/gitlab/import_export/merge_request_parser_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/project_tree_loader_spec.rb49
-rw-r--r--spec/lib/gitlab/import_export/project_tree_restorer_spec.rb64
-rw-r--r--spec/lib/gitlab/import_export/project_tree_saver_spec.rb8
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml29
-rw-r--r--spec/lib/gitlab/incoming_email_spec.rb4
-rw-r--r--spec/lib/gitlab/internal_post_receive/response_spec.rb12
-rw-r--r--spec/lib/gitlab/kubernetes/generic_secret_spec.rb24
-rw-r--r--spec/lib/gitlab/kubernetes/kube_client_spec.rb38
-rw-r--r--spec/lib/gitlab/kubernetes/tls_secret_spec.rb32
-rw-r--r--spec/lib/gitlab/legacy_github_import/client_spec.rb4
-rw-r--r--spec/lib/gitlab/legacy_github_import/importer_spec.rb2
-rw-r--r--spec/lib/gitlab/log_timestamp_formatter_spec.rb15
-rw-r--r--spec/lib/gitlab/looping_batcher_spec.rb71
-rw-r--r--spec/lib/gitlab/mail_room/mail_room_spec.rb110
-rw-r--r--spec/lib/gitlab/metrics/dashboard/finder_spec.rb34
-rw-r--r--spec/lib/gitlab/metrics/dashboard/processor_spec.rb17
-rw-r--r--spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb6
-rw-r--r--spec/lib/gitlab/metrics/dashboard/url_spec.rb44
-rw-r--r--spec/lib/gitlab/middleware/go_spec.rb2
-rw-r--r--spec/lib/gitlab/private_commit_email_spec.rb8
-rw-r--r--spec/lib/gitlab/profiler_spec.rb39
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb33
-rw-r--r--spec/lib/gitlab/prometheus/queries/knative_invocation_query_spec.rb2
-rw-r--r--spec/lib/gitlab/query_limiting/middleware_spec.rb4
-rw-r--r--spec/lib/gitlab/quick_actions/extractor_spec.rb18
-rw-r--r--spec/lib/gitlab/quick_actions/substitution_definition_spec.rb26
-rw-r--r--spec/lib/gitlab/redis/boolean_spec.rb150
-rw-r--r--spec/lib/gitlab/repository_cache_adapter_spec.rb3
-rw-r--r--spec/lib/gitlab/repository_cache_spec.rb12
-rw-r--r--spec/lib/gitlab/repository_hash_cache_spec.rb184
-rw-r--r--spec/lib/gitlab/request_context_spec.rb12
-rw-r--r--spec/lib/gitlab/rugged_instrumentation_spec.rb2
-rw-r--r--spec/lib/gitlab/runtime_spec.rb135
-rw-r--r--spec/lib/gitlab/safe_request_store_spec.rb4
-rw-r--r--spec/lib/gitlab/sanitizers/exif_spec.rb2
-rw-r--r--spec/lib/gitlab/search/found_blob_spec.rb10
-rw-r--r--spec/lib/gitlab/search/found_wiki_page_spec.rb18
-rw-r--r--spec/lib/gitlab/serverless/domain_spec.rb22
-rw-r--r--spec/lib/gitlab/serverless/function_uri_spec.rb81
-rw-r--r--spec/lib/gitlab/serverless/service_spec.rb134
-rw-r--r--spec/lib/gitlab/shell_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb217
-rw-r--r--spec/lib/gitlab/sidekiq_config/worker_spec.rb128
-rw-r--r--spec/lib/gitlab/sidekiq_config_spec.rb105
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb16
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/admin_mode/client_spec.rb94
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/admin_mode/server_spec.rb72
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb15
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb45
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb72
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb18
-rw-r--r--spec/lib/gitlab/sidekiq_versioning_spec.rb2
-rw-r--r--spec/lib/gitlab/submodule_links_spec.rb4
-rw-r--r--spec/lib/gitlab/tab_width_spec.rb31
-rw-r--r--spec/lib/gitlab/tcp_checker_spec.rb2
-rw-r--r--spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb2
-rw-r--r--spec/lib/gitlab/untrusted_regexp_spec.rb2
-rw-r--r--spec/lib/gitlab/url_blocker_spec.rb2
-rw-r--r--spec/lib/gitlab/url_builder_spec.rb12
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb702
-rw-r--r--spec/lib/gitlab/user_access_spec.rb2
-rw-r--r--spec/lib/gitlab/utils/deep_size_spec.rb38
-rw-r--r--spec/lib/gitlab/utils/log_limited_array_spec.rb49
-rw-r--r--spec/lib/gitlab/x509/commit_spec.rb208
-rw-r--r--spec/lib/marginalia_spec.rb2
-rw-r--r--spec/lib/microsoft_teams/notifier_spec.rb20
-rw-r--r--spec/lib/omni_auth/strategies/jwt_spec.rb2
-rw-r--r--spec/lib/quality/kubernetes_client_spec.rb2
-rw-r--r--spec/lib/quality/test_level_spec.rb4
-rw-r--r--spec/lib/rspec_flaky/report_spec.rb8
-rw-r--r--spec/lib/safe_zip/entry_spec.rb4
-rw-r--r--spec/lib/safe_zip/extract_spec.rb2
-rw-r--r--spec/lib/sentry/client/issue_link_spec.rb38
-rw-r--r--spec/lib/sentry/client/issue_spec.rb4
-rw-r--r--spec/mailers/emails/pipelines_spec.rb42
-rw-r--r--spec/mailers/notify_spec.rb26
-rw-r--r--spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb18
-rw-r--r--spec/migrations/20200122123016_backfill_project_settings_spec.rb32
-rw-r--r--spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb104
-rw-r--r--spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb2
-rw-r--r--spec/migrations/add_deploy_token_type_to_deploy_tokens_spec.rb24
-rw-r--r--spec/migrations/assure_commits_count_for_merge_request_diff_spec.rb2
-rw-r--r--spec/migrations/backfill_operations_feature_flags_iid_spec.rb34
-rw-r--r--spec/migrations/cleanup_build_stage_migration_spec.rb2
-rw-r--r--spec/migrations/cleanup_legacy_artifact_migration_spec.rb2
-rw-r--r--spec/migrations/cleanup_stages_position_migration_spec.rb2
-rw-r--r--spec/migrations/delete_internal_ids_where_feature_flags_usage_spec.rb44
-rw-r--r--spec/migrations/drop_background_migration_jobs_spec.rb61
-rw-r--r--spec/migrations/enqueue_reset_merge_status_second_run_spec.rb2
-rw-r--r--spec/migrations/enqueue_reset_merge_status_spec.rb2
-rw-r--r--spec/migrations/enqueue_verify_pages_domain_workers_spec.rb2
-rw-r--r--spec/migrations/fix_projects_without_project_feature_spec.rb42
-rw-r--r--spec/migrations/migrate_cluster_configure_worker_sidekiq_queue_spec.rb2
-rw-r--r--spec/migrations/migrate_create_commit_signature_worker_sidekiq_queue_spec.rb44
-rw-r--r--spec/migrations/migrate_create_trace_artifact_sidekiq_queue_spec.rb2
-rw-r--r--spec/migrations/migrate_discussion_id_on_promoted_epics_spec.rb2
-rw-r--r--spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb2
-rw-r--r--spec/migrations/migrate_object_storage_upload_sidekiq_queue_spec.rb2
-rw-r--r--spec/migrations/migrate_storage_migrator_sidekiq_queue_spec.rb2
-rw-r--r--spec/migrations/migrate_store_security_reports_sidekiq_queue_spec.rb33
-rw-r--r--spec/migrations/migrate_sync_security_reports_to_report_approval_rules_sidekiq_queue_spec.rb33
-rw-r--r--spec/migrations/migrate_update_head_pipeline_for_merge_request_sidekiq_queue_spec.rb2
-rw-r--r--spec/migrations/patch_prometheus_services_for_shared_cluster_applications_spec.rb134
-rw-r--r--spec/migrations/remove_packages_deprecated_dependencies_spec.rb30
-rw-r--r--spec/migrations/reschedule_builds_stages_migration_spec.rb2
-rw-r--r--spec/migrations/reschedule_commits_count_for_merge_request_diff_spec.rb2
-rw-r--r--spec/migrations/save_instance_administrators_group_id_spec.rb99
-rw-r--r--spec/migrations/schedule_digest_personal_access_tokens_spec.rb2
-rw-r--r--spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb2
-rw-r--r--spec/migrations/schedule_pages_metadata_migration_spec.rb2
-rw-r--r--spec/migrations/schedule_populate_merge_request_assignees_table_spec.rb2
-rw-r--r--spec/migrations/schedule_recalculate_project_authorizations_spec.rb2
-rw-r--r--spec/migrations/schedule_runners_token_encryption_spec.rb2
-rw-r--r--spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb2
-rw-r--r--spec/migrations/schedule_stages_index_migration_spec.rb2
-rw-r--r--spec/migrations/schedule_sync_issuables_state_id_spec.rb2
-rw-r--r--spec/migrations/schedule_sync_issuables_state_id_where_nil_spec.rb2
-rw-r--r--spec/migrations/schedule_update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb79
-rw-r--r--spec/migrations/services_remove_temporary_index_on_project_id_spec.rb40
-rw-r--r--spec/migrations/sync_issuables_state_id_spec.rb2
-rw-r--r--spec/migrations/update_fingerprint_sha256_within_keys_spec.rb2
-rw-r--r--spec/migrations/update_timestamp_softwarelicensespolicy_spec.rb24
-rw-r--r--spec/models/abuse_report_spec.rb4
-rw-r--r--spec/models/award_emoji_spec.rb4
-rw-r--r--spec/models/badge_spec.rb4
-rw-r--r--spec/models/badges/project_badge_spec.rb4
-rw-r--r--spec/models/blob_spec.rb541
-rw-r--r--spec/models/blob_viewer/gitlab_ci_yml_spec.rb5
-rw-r--r--spec/models/board_spec.rb23
-rw-r--r--spec/models/chat_name_spec.rb2
-rw-r--r--spec/models/chat_team_spec.rb2
-rw-r--r--spec/models/ci/artifact_blob_spec.rb6
-rw-r--r--spec/models/ci/bridge_spec.rb265
-rw-r--r--spec/models/ci/build_metadata_spec.rb8
-rw-r--r--spec/models/ci/build_spec.rb51
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb41
-rw-r--r--spec/models/ci/job_artifact_spec.rb12
-rw-r--r--spec/models/ci/persistent_ref_spec.rb6
-rw-r--r--spec/models/ci/pipeline_schedule_spec.rb4
-rw-r--r--spec/models/ci/pipeline_spec.rb215
-rw-r--r--spec/models/ci/processable_spec.rb97
-rw-r--r--spec/models/ci/runner_spec.rb8
-rw-r--r--spec/models/ci/sources/pipeline_spec.rb1
-rw-r--r--spec/models/clusters/applications/cert_manager_spec.rb2
-rw-r--r--spec/models/clusters/applications/ingress_spec.rb4
-rw-r--r--spec/models/clusters/applications/knative_spec.rb2
-rw-r--r--spec/models/clusters/applications/prometheus_spec.rb11
-rw-r--r--spec/models/clusters/cluster_spec.rb3
-rw-r--r--spec/models/clusters/platforms/kubernetes_spec.rb8
-rw-r--r--spec/models/commit_spec.rb250
-rw-r--r--spec/models/commit_status_spec.rb4
-rw-r--r--spec/models/concerns/avatarable_spec.rb6
-rw-r--r--spec/models/concerns/batch_destroy_dependent_associations_spec.rb8
-rw-r--r--spec/models/concerns/bulk_insert_safe_spec.rb38
-rw-r--r--spec/models/concerns/delete_with_limit_spec.rb15
-rw-r--r--spec/models/concerns/discussion_on_diff_spec.rb12
-rw-r--r--spec/models/concerns/mentionable_spec.rb36
-rw-r--r--spec/models/concerns/milestoneable_spec.rb35
-rw-r--r--spec/models/concerns/project_features_compatibility_spec.rb36
-rw-r--r--spec/models/concerns/reactive_caching_spec.rb65
-rw-r--r--spec/models/concerns/redis_cacheable_spec.rb4
-rw-r--r--spec/models/concerns/routable_spec.rb2
-rw-r--r--spec/models/concerns/sortable_spec.rb15
-rw-r--r--spec/models/concerns/triggerable_hooks_spec.rb2
-rw-r--r--spec/models/concerns/x509_serial_number_attribute_spec.rb91
-rw-r--r--spec/models/container_expiration_policy_spec.rb4
-rw-r--r--spec/models/container_repository_spec.rb34
-rw-r--r--spec/models/cycle_analytics/group_level_spec.rb4
-rw-r--r--spec/models/deploy_token_spec.rb172
-rw-r--r--spec/models/deployment_cluster_spec.rb22
-rw-r--r--spec/models/deployment_metrics_spec.rb2
-rw-r--r--spec/models/deployment_spec.rb59
-rw-r--r--spec/models/diff_viewer/server_side_spec.rb2
-rw-r--r--spec/models/email_spec.rb5
-rw-r--r--spec/models/environment_spec.rb69
-rw-r--r--spec/models/error_tracking/project_error_tracking_setting_spec.rb15
-rw-r--r--spec/models/event_collection_spec.rb8
-rw-r--r--spec/models/event_spec.rb3
-rw-r--r--spec/models/group_deploy_token_spec.rb17
-rw-r--r--spec/models/guest_spec.rb6
-rw-r--r--spec/models/hooks/web_hook_log_spec.rb16
-rw-r--r--spec/models/identity_spec.rb2
-rw-r--r--spec/models/incident_management/project_incident_management_setting_spec.rb111
-rw-r--r--spec/models/issue_assignee_spec.rb18
-rw-r--r--spec/models/issue_spec.rb16
-rw-r--r--spec/models/key_spec.rb2
-rw-r--r--spec/models/label_link_spec.rb2
-rw-r--r--spec/models/label_note_spec.rb12
-rw-r--r--spec/models/label_spec.rb25
-rw-r--r--spec/models/lfs_file_lock_spec.rb2
-rw-r--r--spec/models/lfs_object_spec.rb10
-rw-r--r--spec/models/lfs_objects_project_spec.rb2
-rw-r--r--spec/models/list_user_preference_spec.rb4
-rw-r--r--spec/models/merge_request_assignee_spec.rb18
-rw-r--r--spec/models/merge_request_context_commit_diff_file_spec.rb9
-rw-r--r--spec/models/merge_request_context_commit_spec.rb33
-rw-r--r--spec/models/merge_request_diff_commit_spec.rb9
-rw-r--r--spec/models/merge_request_diff_file_spec.rb2
-rw-r--r--spec/models/merge_request_diff_spec.rb14
-rw-r--r--spec/models/merge_request_spec.rb123
-rw-r--r--spec/models/milestone_spec.rb17
-rw-r--r--spec/models/note_spec.rb8
-rw-r--r--spec/models/notification_recipient_spec.rb2
-rw-r--r--spec/models/pages_domain_spec.rb54
-rw-r--r--spec/models/personal_snippet_spec.rb9
-rw-r--r--spec/models/project_auto_devops_spec.rb2
-rw-r--r--spec/models/project_ci_cd_setting_spec.rb6
-rw-r--r--spec/models/project_group_link_spec.rb8
-rw-r--r--spec/models/project_services/alerts_service_spec.rb109
-rw-r--r--spec/models/project_services/bamboo_service_spec.rb4
-rw-r--r--spec/models/project_services/buildkite_service_spec.rb2
-rw-r--r--spec/models/project_services/chat_message/issue_message_spec.rb2
-rw-r--r--spec/models/project_services/chat_message/merge_message_spec.rb12
-rw-r--r--spec/models/project_services/chat_notification_service_spec.rb14
-rw-r--r--spec/models/project_services/drone_ci_service_spec.rb2
-rw-r--r--spec/models/project_services/emails_on_push_service_spec.rb16
-rw-r--r--spec/models/project_services/hipchat_service_spec.rb2
-rw-r--r--spec/models/project_services/jira_service_spec.rb55
-rw-r--r--spec/models/project_services/microsoft_teams_service_spec.rb4
-rw-r--r--spec/models/project_services/pipelines_email_service_spec.rb4
-rw-r--r--spec/models/project_services/prometheus_service_spec.rb4
-rw-r--r--spec/models/project_services/youtrack_service_spec.rb4
-rw-r--r--spec/models/project_setting_spec.rb7
-rw-r--r--spec/models/project_snippet_spec.rb9
-rw-r--r--spec/models/project_spec.rb542
-rw-r--r--spec/models/prometheus_alert_spec.rb103
-rw-r--r--spec/models/prometheus_metric_spec.rb3
-rw-r--r--spec/models/protected_branch_spec.rb28
-rw-r--r--spec/models/push_event_spec.rb4
-rw-r--r--spec/models/release_spec.rb6
-rw-r--r--spec/models/releases/source_spec.rb2
-rw-r--r--spec/models/remote_mirror_spec.rb8
-rw-r--r--spec/models/repository_spec.rb316
-rw-r--r--spec/models/sent_notification_spec.rb4
-rw-r--r--spec/models/sentry_issue_spec.rb41
-rw-r--r--spec/models/serverless/domain_cluster_spec.rb45
-rw-r--r--spec/models/serverless/function_spec.rb2
-rw-r--r--spec/models/snippet_repository_spec.rb23
-rw-r--r--spec/models/snippet_spec.rb106
-rw-r--r--spec/models/spam_log_spec.rb27
-rw-r--r--spec/models/todo_spec.rb30
-rw-r--r--spec/models/trending_project_spec.rb10
-rw-r--r--spec/models/user_callout_spec.rb33
-rw-r--r--spec/models/user_preference_spec.rb15
-rw-r--r--spec/models/user_spec.rb205
-rw-r--r--spec/models/wiki_page_spec.rb385
-rw-r--r--spec/models/x509_certificate_spec.rb107
-rw-r--r--spec/models/x509_commit_signature_spec.rb53
-rw-r--r--spec/models/x509_issuer_spec.rb71
-rw-r--r--spec/policies/base_policy_spec.rb10
-rw-r--r--spec/policies/global_policy_spec.rb8
-rw-r--r--spec/policies/group_policy_spec.rb4
-rw-r--r--spec/policies/note_policy_spec.rb41
-rw-r--r--spec/policies/personal_snippet_policy_spec.rb28
-rw-r--r--spec/policies/project_policy_spec.rb22
-rw-r--r--spec/policies/project_snippet_policy_spec.rb46
-rw-r--r--spec/presenters/blob_presenter_spec.rb2
-rw-r--r--spec/presenters/ci/bridge_presenter_spec.rb6
-rw-r--r--spec/presenters/ci/pipeline_presenter_spec.rb9
-rw-r--r--spec/presenters/ci/trigger_presenter_spec.rb6
-rw-r--r--spec/presenters/event_presenter_spec.rb10
-rw-r--r--spec/presenters/label_presenter_spec.rb4
-rw-r--r--spec/presenters/merge_request_presenter_spec.rb22
-rw-r--r--spec/presenters/milestone_presenter_spec.rb20
-rw-r--r--spec/presenters/project_presenter_spec.rb6
-rw-r--r--spec/presenters/projects/prometheus/alert_presenter_spec.rb235
-rw-r--r--spec/presenters/release_presenter_spec.rb16
-rw-r--r--spec/presenters/sentry_error_presenter_spec.rb (renamed from spec/presenters/sentry_detailed_error_presenter_spec.rb)4
-rw-r--r--spec/presenters/snippet_blob_presenter_spec.rb94
-rw-r--r--spec/presenters/snippet_presenter_spec.rb24
-rw-r--r--spec/presenters/tree_entry_presenter_spec.rb2
-rw-r--r--spec/requests/api/access_requests_spec.rb12
-rw-r--r--spec/requests/api/api_spec.rb25
-rw-r--r--spec/requests/api/applications_spec.rb11
-rw-r--r--spec/requests/api/award_emoji_spec.rb15
-rw-r--r--spec/requests/api/boards_spec.rb32
-rw-r--r--spec/requests/api/branches_spec.rb4
-rw-r--r--spec/requests/api/broadcast_messages_spec.rb83
-rw-r--r--spec/requests/api/commits_spec.rb57
-rw-r--r--spec/requests/api/deployments_spec.rb63
-rw-r--r--spec/requests/api/error_tracking_spec.rb177
-rw-r--r--spec/requests/api/events_spec.rb12
-rw-r--r--spec/requests/api/features_spec.rb4
-rw-r--r--spec/requests/api/graphql/current_user_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/gitlab_schema_spec.rb47
-rw-r--r--spec/requests/api/graphql/group/milestones_spec.rb85
-rw-r--r--spec/requests/api/graphql/group_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/award_emojis/add_spec.rb6
-rw-r--r--spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb6
-rw-r--r--spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/notes/create/image_diff_note_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/notes/create/note_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb244
-rw-r--r--spec/requests/api/graphql/mutations/notes/update/note_spec.rb (renamed from spec/requests/api/graphql/mutations/notes/update_spec.rb)15
-rw-r--r--spec/requests/api/graphql/mutations/snippets/create_spec.rb6
-rw-r--r--spec/requests/api/graphql/mutations/snippets/update_spec.rb6
-rw-r--r--spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb4
-rw-r--r--spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb256
-rw-r--r--spec/requests/api/graphql/project/merge_request_spec.rb2
-rw-r--r--spec/requests/api/graphql/tasks/task_completion_status_spec.rb8
-rw-r--r--spec/requests/api/graphql_spec.rb2
-rw-r--r--spec/requests/api/group_boards_spec.rb28
-rw-r--r--spec/requests/api/group_export_spec.rb80
-rw-r--r--spec/requests/api/group_import_spec.rb304
-rw-r--r--spec/requests/api/internal/base_spec.rb293
-rw-r--r--spec/requests/api/internal/pages_spec.rb14
-rw-r--r--spec/requests/api/issues/get_group_issues_spec.rb34
-rw-r--r--spec/requests/api/issues/get_project_issues_spec.rb74
-rw-r--r--spec/requests/api/issues/issues_spec.rb94
-rw-r--r--spec/requests/api/issues/post_projects_issues_spec.rb108
-rw-r--r--spec/requests/api/issues/put_projects_issues_spec.rb80
-rw-r--r--spec/requests/api/jobs_spec.rb4
-rw-r--r--spec/requests/api/lsif_data_spec.rb94
-rw-r--r--spec/requests/api/markdown_spec.rb4
-rw-r--r--spec/requests/api/merge_requests_spec.rb519
-rw-r--r--spec/requests/api/pages/internal_access_spec.rb19
-rw-r--r--spec/requests/api/pages/pages_spec.rb6
-rw-r--r--spec/requests/api/pages/private_access_spec.rb19
-rw-r--r--spec/requests/api/pages/public_access_spec.rb19
-rw-r--r--spec/requests/api/pipeline_schedules_spec.rb62
-rw-r--r--spec/requests/api/project_container_repositories_spec.rb15
-rw-r--r--spec/requests/api/project_export_spec.rb10
-rw-r--r--spec/requests/api/project_hooks_spec.rb2
-rw-r--r--spec/requests/api/project_import_spec.rb13
-rw-r--r--spec/requests/api/project_snippets_spec.rb10
-rw-r--r--spec/requests/api/projects_spec.rb34
-rw-r--r--spec/requests/api/protected_branches_spec.rb18
-rw-r--r--spec/requests/api/releases_spec.rb114
-rw-r--r--spec/requests/api/repositories_spec.rb23
-rw-r--r--spec/requests/api/resource_label_events_spec.rb6
-rw-r--r--spec/requests/api/runner_spec.rb103
-rw-r--r--spec/requests/api/runners_spec.rb2
-rw-r--r--spec/requests/api/search_spec.rb8
-rw-r--r--spec/requests/api/services_spec.rb55
-rw-r--r--spec/requests/api/settings_spec.rb2
-rw-r--r--spec/requests/api/snippets_spec.rb16
-rw-r--r--spec/requests/api/task_completion_status_spec.rb4
-rw-r--r--spec/requests/api/triggers_spec.rb4
-rw-r--r--spec/requests/api/users_spec.rb236
-rw-r--r--spec/requests/git_http_spec.rb32
-rw-r--r--spec/requests/jwt_controller_spec.rb22
-rw-r--r--spec/requests/lfs_http_spec.rb14
-rw-r--r--spec/requests/lfs_locks_api_spec.rb14
-rw-r--r--spec/requests/openid_connect_spec.rb4
-rw-r--r--spec/requests/profiles/notifications_controller_spec.rb42
-rw-r--r--spec/requests/projects/cycle_analytics_events_spec.rb4
-rw-r--r--spec/requests/projects/merge_requests_discussions_spec.rb2
-rw-r--r--spec/requests/rack_attack_global_spec.rb28
-rw-r--r--spec/requests/self_monitoring_project_spec.rb43
-rw-r--r--spec/requests/user_activity_spec.rb4
-rw-r--r--spec/requests/user_avatar_spec.rb4
-rw-r--r--spec/routing/admin/serverless/domains_controller_routing_spec.rb22
-rw-r--r--spec/routing/project_routing_spec.rb173
-rw-r--r--spec/rubocop/cop/include_action_view_context_spec.rb45
-rw-r--r--spec/rubocop/cop/scalability/bulk_perform_with_context_spec.rb60
-rw-r--r--spec/rubocop/cop/scalability/cron_worker_context_spec.rb82
-rw-r--r--spec/serializers/blob_entity_spec.rb2
-rw-r--r--spec/serializers/build_artifact_entity_spec.rb2
-rw-r--r--spec/serializers/build_details_entity_spec.rb17
-rw-r--r--spec/serializers/container_repositories_serializer_spec.rb84
-rw-r--r--spec/serializers/deployment_cluster_entity_spec.rb (renamed from spec/serializers/cluster_basic_entity_spec.rb)14
-rw-r--r--spec/serializers/diff_file_entity_spec.rb2
-rw-r--r--spec/serializers/diffs_metadata_entity_spec.rb1
-rw-r--r--spec/serializers/merge_request_diff_entity_spec.rb27
-rw-r--r--spec/serializers/merge_request_poll_widget_entity_spec.rb2
-rw-r--r--spec/serializers/merge_request_widget_entity_spec.rb92
-rw-r--r--spec/serializers/paginated_diff_entity_spec.rb2
-rw-r--r--spec/serializers/pipeline_details_entity_spec.rb39
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb4
-rw-r--r--spec/serializers/test_reports_comparer_entity_spec.rb6
-rw-r--r--spec/serializers/test_suite_comparer_entity_spec.rb78
-rw-r--r--spec/serializers/test_suite_entity_spec.rb2
-rw-r--r--spec/serializers/variable_entity_spec.rb2
-rw-r--r--spec/services/branches/delete_merged_service_spec.rb4
-rw-r--r--spec/services/ci/create_cross_project_pipeline_service_spec.rb364
-rw-r--r--spec/services/ci/create_job_artifacts_service_spec.rb121
-rw-r--r--spec/services/ci/create_pipeline_service/needs_spec.rb66
-rw-r--r--spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb133
-rw-r--r--spec/services/ci/create_pipeline_service/pre_post_stages_spec.rb60
-rw-r--r--spec/services/ci/create_pipeline_service/rules_spec.rb19
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb252
-rw-r--r--spec/services/ci/ensure_stage_service_spec.rb2
-rw-r--r--spec/services/ci/expire_pipeline_cache_service_spec.rb4
-rw-r--r--spec/services/ci/pipeline_bridge_status_service_spec.rb27
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb12
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb6
-rw-r--r--spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb6
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service.rb76
-rw-r--r--spec/services/ci/process_build_service_spec.rb31
-rw-r--r--spec/services/ci/process_pipeline_service_spec.rb19
-rw-r--r--spec/services/ci/retry_build_service_spec.rb26
-rw-r--r--spec/services/ci/retry_pipeline_service_spec.rb17
-rw-r--r--spec/services/ci/stop_environments_service_spec.rb53
-rw-r--r--spec/services/clusters/cleanup/app_service_spec.rb2
-rw-r--r--spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb197
-rw-r--r--spec/services/commits/cherry_pick_service_spec.rb85
-rw-r--r--spec/services/container_expiration_policy_service_spec.rb2
-rw-r--r--spec/services/deployments/link_merge_requests_service_spec.rb109
-rw-r--r--spec/services/deployments/older_deployments_drop_service_spec.rb78
-rw-r--r--spec/services/environments/auto_stop_service_spec.rb94
-rw-r--r--spec/services/error_tracking/base_service_spec.rb74
-rw-r--r--spec/services/error_tracking/issue_details_service_spec.rb33
-rw-r--r--spec/services/error_tracking/issue_latest_event_service_spec.rb17
-rw-r--r--spec/services/error_tracking/issue_update_service_spec.rb116
-rw-r--r--spec/services/error_tracking/list_issues_service_spec.rb55
-rw-r--r--spec/services/error_tracking/list_projects_service_spec.rb26
-rw-r--r--spec/services/git/branch_hooks_service_spec.rb17
-rw-r--r--spec/services/git/branch_push_service_spec.rb23
-rw-r--r--spec/services/git/tag_hooks_service_spec.rb1
-rw-r--r--spec/services/groups/import_export/export_service_spec.rb16
-rw-r--r--spec/services/groups/import_export/import_service_spec.rb38
-rw-r--r--spec/services/incident_management/create_issue_service_spec.rb311
-rw-r--r--spec/services/issues/create_service_spec.rb37
-rw-r--r--spec/services/issues/move_service_spec.rb14
-rw-r--r--spec/services/issues/update_service_spec.rb43
-rw-r--r--spec/services/labels/available_labels_service_spec.rb4
-rw-r--r--spec/services/merge_requests/add_context_service_spec.rb44
-rw-r--r--spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb1
-rw-r--r--spec/services/merge_requests/create_from_issue_service_spec.rb6
-rw-r--r--spec/services/merge_requests/create_service_spec.rb48
-rw-r--r--spec/services/merge_requests/get_urls_service_spec.rb6
-rw-r--r--spec/services/merge_requests/link_lfs_objects_service_spec.rb103
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb5
-rw-r--r--spec/services/merge_requests/mergeability_check_service_spec.rb37
-rw-r--r--spec/services/merge_requests/migrate_external_diffs_service_spec.rb2
-rw-r--r--spec/services/merge_requests/rebase_service_spec.rb24
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb8
-rw-r--r--spec/services/merge_requests/reload_diffs_service_spec.rb31
-rw-r--r--spec/services/merge_requests/update_service_spec.rb46
-rw-r--r--spec/services/metrics/dashboard/clone_dashboard_service_spec.rb33
-rw-r--r--spec/services/metrics/dashboard/default_embed_service_spec.rb20
-rw-r--r--spec/services/metrics/dashboard/pod_dashboard_service_spec.rb22
-rw-r--r--spec/services/metrics/dashboard/project_dashboard_service_spec.rb24
-rw-r--r--spec/services/metrics/dashboard/self_monitoring_dashboard_service_spec.rb69
-rw-r--r--spec/services/metrics/dashboard/system_dashboard_service_spec.rb24
-rw-r--r--spec/services/notes/create_service_spec.rb24
-rw-r--r--spec/services/notes/quick_actions_service_spec.rb1
-rw-r--r--spec/services/notes/update_service_spec.rb6
-rw-r--r--spec/services/notification_service_spec.rb152
-rw-r--r--spec/services/post_receive_service_spec.rb186
-rw-r--r--spec/services/projects/after_import_service_spec.rb48
-rw-r--r--spec/services/projects/after_rename_service_spec.rb2
-rw-r--r--spec/services/projects/alerting/notify_service_spec.rb84
-rw-r--r--spec/services/projects/batch_open_issues_count_service_spec.rb2
-rw-r--r--spec/services/projects/container_repository/cleanup_tags_service_spec.rb36
-rw-r--r--spec/services/projects/container_repository/delete_tags_service_spec.rb214
-rw-r--r--spec/services/projects/create_service_spec.rb6
-rw-r--r--spec/services/projects/destroy_rollback_service_spec.rb45
-rw-r--r--spec/services/projects/destroy_service_spec.rb149
-rw-r--r--spec/services/projects/fork_service_spec.rb8
-rw-r--r--spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb10
-rw-r--r--spec/services/projects/hashed_storage/migrate_repository_service_spec.rb2
-rw-r--r--spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb8
-rw-r--r--spec/services/projects/hashed_storage/rollback_repository_service_spec.rb2
-rw-r--r--spec/services/projects/housekeeping_service_spec.rb2
-rw-r--r--spec/services/projects/lfs_pointers/lfs_download_service_spec.rb3
-rw-r--r--spec/services/projects/lsif_data_service_spec.rb129
-rw-r--r--spec/services/projects/open_issues_count_service_spec.rb2
-rw-r--r--spec/services/projects/operations/update_service_spec.rb125
-rw-r--r--spec/services/projects/transfer_service_spec.rb38
-rw-r--r--spec/services/projects/update_service_spec.rb57
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb9
-rw-r--r--spec/services/repositories/destroy_rollback_service_spec.rb73
-rw-r--r--spec/services/repositories/destroy_service_spec.rb80
-rw-r--r--spec/services/repositories/shell_destroy_service_spec.rb25
-rw-r--r--spec/services/snippets/count_service_spec.rb66
-rw-r--r--spec/services/snippets/create_service_spec.rb36
-rw-r--r--spec/services/spam/akismet_service_spec.rb136
-rw-r--r--spec/services/spam/ham_service_spec.rb56
-rw-r--r--spec/services/spam/spam_check_service_spec.rb153
-rw-r--r--spec/services/spam_service_spec.rb111
-rw-r--r--spec/services/submit_usage_ping_service_spec.rb123
-rw-r--r--spec/services/system_note_service_spec.rb16
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb13
-rw-r--r--spec/services/system_notes/merge_requests_service_spec.rb21
-rw-r--r--spec/services/users/block_service_spec.rb38
-rw-r--r--spec/services/users/destroy_service_spec.rb19
-rw-r--r--spec/services/users/update_service_spec.rb9
-rw-r--r--spec/spec_helper.rb172
-rw-r--r--spec/support/banzai/reference_filter_shared_examples.rb4
-rw-r--r--spec/support/caching.rb29
-rw-r--r--spec/support/controllers/project_import_rate_limiter_shared_examples.rb22
-rw-r--r--spec/support/db_cleaner.rb4
-rw-r--r--spec/support/dns.rb20
-rw-r--r--spec/support/factory_bot.rb9
-rw-r--r--spec/support/helpers/api_helpers.rb24
-rw-r--r--spec/support/helpers/controller_helpers.rb18
-rw-r--r--spec/support/helpers/create_environments_helpers.rb13
-rw-r--r--spec/support/helpers/dns_helpers.rb30
-rw-r--r--spec/support/helpers/email_helpers.rb5
-rw-r--r--spec/support/helpers/fake_blob_helpers.rb4
-rw-r--r--spec/support/helpers/filter_spec_helper.rb13
-rw-r--r--spec/support/helpers/graphql_helpers.rb80
-rw-r--r--spec/support/helpers/javascript_fixtures_helpers.rb2
-rw-r--r--spec/support/helpers/kubernetes_helpers.rb73
-rw-r--r--spec/support/helpers/metrics_dashboard_helpers.rb4
-rw-r--r--spec/support/helpers/migrations_helpers.rb2
-rw-r--r--spec/support/helpers/notification_helpers.rb24
-rw-r--r--spec/support/helpers/query_recorder.rb49
-rw-r--r--spec/support/helpers/rack_attack_spec_helpers.rb2
-rw-r--r--spec/support/helpers/smime_helper.rb12
-rw-r--r--spec/support/helpers/stub_configuration.rb1
-rw-r--r--spec/support/helpers/user_login_helper.rb2
-rw-r--r--spec/support/helpers/x509_helpers.rb208
-rw-r--r--spec/support/import_export/common_util.rb33
-rw-r--r--spec/support/import_export/project_tree_expectations.rb128
-rw-r--r--spec/support/matchers/background_migrations_matchers.rb23
-rw-r--r--spec/support/matchers/graphql_matchers.rb6
-rw-r--r--spec/support/matchers/log_spam.rb55
-rw-r--r--spec/support/matchers/match_asset_path.rb19
-rw-r--r--spec/support/matchers/schema_matcher.rb (renamed from spec/support/api/schema_matcher.rb)0
-rw-r--r--spec/support/migration.rb31
-rw-r--r--spec/support/migrations_helpers/namespaces_helper.rb14
-rw-r--r--spec/support/migrations_helpers/prometheus_service_helpers.rb35
-rw-r--r--spec/support/pages.rb19
-rw-r--r--spec/support/redis.rb33
-rw-r--r--spec/support/services/clusters/create_service_shared.rb8
-rw-r--r--spec/support/services/issuable_create_service_slash_commands_shared_examples.rb2
-rw-r--r--spec/support/services/issuable_update_service_shared_examples.rb2
-rw-r--r--spec/support/services/migrate_to_ghost_user_service_shared_examples.rb4
-rw-r--r--spec/support/shared_contexts/change_access_checks_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/controllers/githubish_import_controller_shared_context.rb (renamed from spec/support/controllers/githubish_import_controller_shared_context.rb)2
-rw-r--r--spec/support/shared_contexts/controllers/ldap_omniauth_callbacks_controller_shared_context.rb (renamed from spec/support/controllers/ldap_omniauth_callbacks_controller_shared_context.rb)4
-rw-r--r--spec/support/shared_contexts/email_shared_context.rb4
-rw-r--r--spec/support/shared_contexts/features/error_tracking_shared_context.rb35
-rw-r--r--spec/support/shared_contexts/finders/group_projects_finder_shared_contexts.rb2
-rw-r--r--spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb2
-rw-r--r--spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb2
-rw-r--r--spec/support/shared_contexts/finders/users_finder_shared_contexts.rb2
-rw-r--r--spec/support/shared_contexts/json_response_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/mailers/notify_shared_context.rb25
-rw-r--r--spec/support/shared_contexts/merge_request_create_shared_context.rb (renamed from spec/support/shared_contexts/merge_request_create.rb)2
-rw-r--r--spec/support/shared_contexts/merge_request_edit_shared_context.rb (renamed from spec/support/shared_contexts/merge_request_edit.rb)3
-rw-r--r--spec/support/shared_contexts/merge_requests_allowing_collaboration_shared_context.rb (renamed from spec/support/shared_contexts/merge_requests_allowing_collaboration.rb)2
-rw-r--r--spec/support/shared_contexts/policies/group_policy_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/policies/project_policy_shared_context.rb8
-rw-r--r--spec/support/shared_contexts/policies/project_policy_table_shared_context.rb164
-rw-r--r--spec/support/shared_contexts/rack_attack_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/sentry_error_tracking_shared_context.rb21
-rw-r--r--spec/support/shared_contexts/services_shared_context.rb5
-rw-r--r--spec/support/shared_contexts/session_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/unique_ip_check_shared_context.rb39
-rw-r--r--spec/support/shared_contexts/upload_type_check_shared_context.rb4
-rw-r--r--spec/support/shared_contexts/url_shared_context.rb2
-rw-r--r--spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/ci/auto_merge_merge_requests_shared_examples.rb (renamed from spec/support/shared_examples/ci/auto_merge_merge_requests_examples.rb)4
-rw-r--r--spec/support/shared_examples/ci/pipeline_email_shared_examples.rb (renamed from spec/support/shared_examples/ci/pipeline_email_examples.rb)2
-rw-r--r--spec/support/shared_examples/ci/stage_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/controllers/application_settings_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/controllers/discussions_provider_shared_examples.rb (renamed from spec/support/shared_examples/discussions_provider_shared_examples.rb)6
-rw-r--r--spec/support/shared_examples/controllers/environments_controller_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/controllers/error_tracking_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/controllers/external_authorization_service_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb (renamed from spec/support/controllers/githubish_import_controller_shared_examples.rb)22
-rw-r--r--spec/support/shared_examples/controllers/instance_statistics_controllers_shared_examples.rb (renamed from spec/support/shared_examples/instance_statistics_controllers_shared_examples.rb)2
-rw-r--r--spec/support/shared_examples/controllers/issuable_notes_filter_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/controllers/issuables_list_metadata_shared_examples.rb (renamed from spec/support/shared_examples/issuables_list_metadata_shared_examples.rb)2
-rw-r--r--spec/support/shared_examples/controllers/issuables_requiring_filter_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/controllers/milestone_tabs_shared_examples.rb (renamed from spec/support/shared_examples/milestone_tabs_examples.rb)2
-rw-r--r--spec/support/shared_examples/controllers/paginated_collection_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/controllers/repository_lfs_file_load_shared_examples.rb (renamed from spec/support/shared_examples/controllers/repository_lfs_file_load_examples.rb)20
-rw-r--r--spec/support/shared_examples/controllers/sessionless_auth_controller_shared_examples.rb (renamed from spec/support/controllers/sessionless_auth_controller_shared_examples.rb)12
-rw-r--r--spec/support/shared_examples/controllers/set_sort_order_from_user_preference_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/controllers/todos_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/controllers/trackable_shared_examples.rb (renamed from spec/support/shared_examples/trackable_shared_examples.rb)2
-rw-r--r--spec/support/shared_examples/controllers/update_invalid_issuable_shared_examples.rb (renamed from spec/support/shared_examples/update_invalid_issuable.rb)2
-rw-r--r--spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb32
-rw-r--r--spec/support/shared_examples/controllers/variables_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/email_shared_examples.rb9
-rw-r--r--spec/support/shared_examples/error_tracking_shared_examples.rb36
-rw-r--r--spec/support/shared_examples/evidence_updated_exposed_fields.rb29
-rw-r--r--spec/support/shared_examples/fast_destroy_all.rb40
-rw-r--r--spec/support/shared_examples/features/archive_download_buttons_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/comments_on_merge_request_files_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/dirty_submit_form_shared_examples.rb (renamed from spec/support/shared_examples/dirty_submit_form_shared_examples.rb)2
-rw-r--r--spec/support/shared_examples/features/discussion_comments_shared_example.rb (renamed from spec/support/features/discussion_comments_shared_example.rb)63
-rw-r--r--spec/support/shared_examples/features/error_tracking_shared_example.rb80
-rw-r--r--spec/support/shared_examples/features/issuable_sidebar_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/multiple_assignees_mr_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/navbar_shared_examples.rb23
-rw-r--r--spec/support/shared_examples/features/project_features_apply_to_issuables_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/project_list_shared_examples.rb (renamed from spec/support/shared_examples/project_list_shared_examples.rb)4
-rw-r--r--spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb (renamed from spec/support/shared_examples/features/protected_branches_access_control_ce.rb)2
-rw-r--r--spec/support/shared_examples/features/reportable_note_shared_examples.rb (renamed from spec/support/features/reportable_note_shared_examples.rb)4
-rw-r--r--spec/support/shared_examples/features/resolving_discussions_in_issues_shared_examples.rb (renamed from spec/support/features/resolving_discussions_in_issues_shared_examples.rb)2
-rw-r--r--spec/support/shared_examples/features/rss_shared_examples.rb (renamed from spec/support/features/rss_shared_examples.rb)8
-rw-r--r--spec/support/shared_examples/features/search_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/showing_user_status_shared_examples.rb (renamed from spec/support/shared_examples/showing_user_status_shared_examples.rb)2
-rw-r--r--spec/support/shared_examples/features/snippets_shared_examples.rb52
-rw-r--r--spec/support/shared_examples/features/variable_list_shared_examples.rb (renamed from spec/support/features/variable_list_shared_examples.rb)2
-rw-r--r--spec/support/shared_examples/features/wiki_file_attachments_shared_examples.rb (renamed from spec/support/shared_examples/wiki_file_attachments_examples.rb)2
-rw-r--r--spec/support/shared_examples/finders/assignees_filter_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/finders/finder_with_external_authorization_enabled_shared_examples.rb (renamed from spec/support/shared_examples/finders/finder_with_external_authorization_enabled.rb)4
-rw-r--r--spec/support/shared_examples/finders/snippet_visibility_shared_examples.rb (renamed from spec/support/shared_examples/snippet_visibility_shared_examples.rb)8
-rw-r--r--spec/support/shared_examples/graphql/connection_paged_nodes_shared_examples.rb (renamed from spec/support/shared_examples/graphql/connection_paged_nodes.rb)0
-rw-r--r--spec/support/shared_examples/graphql/failure_to_find_anything_shared_examples.rb (renamed from spec/support/shared_examples/graphql/failure_to_find_anything.rb)4
-rw-r--r--spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/initializers/uses_gitlab_url_blocker_shared_examples.rb (renamed from spec/support/shared_examples/uses_gitlab_url_blocker_examples.rb)2
-rw-r--r--spec/support/shared_examples/legacy_path_redirect_shared_examples.rb23
-rw-r--r--spec/support/shared_examples/lib/banzai/reference_parser_shared_examples.rb (renamed from spec/support/shared_examples/reference_parser_shared_examples.rb)0
-rw-r--r--spec/support/shared_examples/lib/gitlab/background_migration/backfill_project_repositories_shared_examples.rb (renamed from spec/support/shared_examples/lib/gitlab/background_migration/backfill_project_repositories_examples.rb)2
-rw-r--r--spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb82
-rw-r--r--spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb (renamed from spec/support/shared_examples/ci_trace_shared_examples.rb)6
-rw-r--r--spec/support/shared_examples/lib/gitlab/cycle_analytics_event_shared_examples.rb (renamed from spec/support/shared_examples/cycle_analytics_event_shared_examples.rb)2
-rw-r--r--spec/support/shared_examples/lib/gitlab/diff_file_collections_shared_examples.rb (renamed from spec/support/shared_examples/diff_file_collections.rb)6
-rw-r--r--spec/support/shared_examples/lib/gitlab/file_finder_shared_examples.rb (renamed from spec/support/shared_examples/file_finder.rb)2
-rw-r--r--spec/support/shared_examples/lib/gitlab/gitlab_verify_shared_examples.rb (renamed from spec/support/shared_examples/gitlab_verify.rb)0
-rw-r--r--spec/support/shared_examples/lib/gitlab/helm_generated_script_shared_examples.rb (renamed from spec/support/shared_examples/helm_generated_script.rb)2
-rw-r--r--spec/support/shared_examples/lib/gitlab/import_export/import_failure_service_shared_examples.rb11
-rw-r--r--spec/support/shared_examples/lib/gitlab/ldap_shared_examples.rb (renamed from spec/support/shared_examples/ldap_shared_examples.rb)4
-rw-r--r--spec/support/shared_examples/lib/gitlab/malicious_regexp_shared_examples.rb (renamed from spec/support/shared_examples/malicious_regexp_shared_examples.rb)2
-rw-r--r--spec/support/shared_examples/lib/gitlab/migration_helpers_shared_examples.rb (renamed from spec/support/shared_examples/migration_helpers_examples.rb)4
-rw-r--r--spec/support/shared_examples/lib/gitlab/position_formatters_shared_examples.rb (renamed from spec/support/shared_examples/position_formatters.rb)2
-rw-r--r--spec/support/shared_examples/lib/gitlab/repo_type_shared_examples.rb41
-rw-r--r--spec/support/shared_examples/lib/gitlab/unique_ip_check_shared_examples.rb (renamed from spec/support/shared_examples/unique_ip_check_shared_examples.rb)44
-rw-r--r--spec/support/shared_examples/lib/gitlab/usage_data_counters/a_redis_counter_shared_examples.rb (renamed from spec/support/shared_examples/lib/gitlab/usage_data_counters/a_redis_counter.rb)4
-rw-r--r--spec/support/shared_examples/mail_room_shared_examples.rb35
-rw-r--r--spec/support/shared_examples/mailers/notify_shared_examples.rb (renamed from spec/support/shared_examples/notify_shared_examples.rb)68
-rw-r--r--spec/support/shared_examples/metrics/url_shared_examples.rb31
-rw-r--r--spec/support/shared_examples/models/active_record_enum_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/application_setting_shared_examples.rb (renamed from spec/support/shared_examples/application_setting_examples.rb)0
-rw-r--r--spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/chat_service_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/chat_slash_commands_shared_examples.rb (renamed from spec/support/shared_examples/chat_slash_commands_shared_examples.rb)2
-rw-r--r--spec/support/shared_examples/models/ci_variable_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/cluster_application_core_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/cluster_application_helm_cert_shared_examples.rb (renamed from spec/support/shared_examples/models/cluster_application_helm_cert_examples.rb)2
-rw-r--r--spec/support/shared_examples/models/cluster_application_initial_status_shared_examples.rb (renamed from spec/support/shared_examples/models/cluster_application_initial_status.rb)2
-rw-r--r--spec/support/shared_examples/models/cluster_application_status_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/cluster_application_version_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/cluster_cleanup_worker_base_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/clusters/providers/provider_status_shared_examples.rb (renamed from spec/support/shared_examples/models/clusters/providers/provider_status.rb)2
-rw-r--r--spec/support/shared_examples/models/concerns/bulk_insert_safe_shared_examples.rb38
-rw-r--r--spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb171
-rw-r--r--spec/support/shared_examples/models/concerns/issuable_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/models/concerns/redactable_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb (renamed from spec/support/shared_examples/cycle_analytics_stage_shared_examples.rb)24
-rw-r--r--spec/support/shared_examples/models/diff_note_after_commit_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/diff_positionable_note_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/email_format_shared_examples.rb (renamed from spec/support/shared_examples/email_format_shared_examples.rb)2
-rw-r--r--spec/support/shared_examples/models/group_members_shared_example.rb (renamed from spec/support/shared_examples/group_members_shared_example.rb)0
-rw-r--r--spec/support/shared_examples/models/issuable_hook_data_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/issue_tracker_service_shared_examples.rb (renamed from spec/support/shared_examples/issue_tracker_service_shared_example.rb)0
-rw-r--r--spec/support/shared_examples/models/label_note_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/member_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/mentionable_shared_examples.rb (renamed from spec/support/shared_examples/mentionable_shared_examples.rb)26
-rw-r--r--spec/support/shared_examples/models/project_hook_data_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/models/project_latest_successful_build_for_shared_examples.rb (renamed from spec/support/shared_examples/project_latest_successful_build_for_examples.rb)2
-rw-r--r--spec/support/shared_examples/models/relative_positioning_shared_examples.rb (renamed from spec/support/shared_examples/relative_positioning_shared_examples.rb)0
-rw-r--r--spec/support/shared_examples/models/services_fields_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/slack_mattermost_notifications_shared_examples.rb (renamed from spec/support/shared_examples/slack_mattermost_notifications_shared_examples.rb)0
-rw-r--r--spec/support/shared_examples/models/taskable_shared_examples.rb (renamed from spec/support/shared_examples/taskable_shared_examples.rb)2
-rw-r--r--spec/support/shared_examples/models/throttled_touch_shared_examples.rb (renamed from spec/support/shared_examples/throttled_touch.rb)2
-rw-r--r--spec/support/shared_examples/models/update_project_statistics_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/user_mentions_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/versioned_description_shared_examples.rb (renamed from spec/support/shared_examples/versioned_description_shared_examples.rb)0
-rw-r--r--spec/support/shared_examples/models/with_uploads_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/nav_sidebar_shared_examples.rb26
-rw-r--r--spec/support/shared_examples/policies/clusterable_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/policies/within_timeframe_shared_examples.rb23
-rw-r--r--spec/support/shared_examples/quick_actions/commit/tag_quick_action_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/quick_actions/issuable/time_tracking_quick_action_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/quick_actions/issue/board_move_quick_action_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/quick_actions/issue/create_merge_request_quick_action_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/quick_actions/issue/move_quick_action_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/quick_actions/issue/zoom_quick_actions_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/repo_type_shared_examples.rb31
-rw-r--r--spec/support/shared_examples/requests/api/award_emoji_todo_shared_examples.rb (renamed from spec/support/shared_examples/award_emoji_todo_shared_examples.rb)0
-rw-r--r--spec/support/shared_examples/requests/api/boards_shared_examples.rb (renamed from spec/support/api/boards_shared_examples.rb)34
-rw-r--r--spec/support/shared_examples/requests/api/container_repositories_shared_examples.rb (renamed from spec/support/shared_examples/container_repositories_shared_examples.rb)6
-rw-r--r--spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb28
-rw-r--r--spec/support/shared_examples/requests/api/diff_discussions_shared_examples.rb (renamed from spec/support/shared_examples/requests/api/diff_discussions.rb)14
-rw-r--r--spec/support/shared_examples/requests/api/discussions_shared_examples.rb (renamed from spec/support/shared_examples/requests/api/discussions.rb)54
-rw-r--r--spec/support/shared_examples/requests/api/issuable_participants_examples.rb8
-rw-r--r--spec/support/shared_examples/requests/api/issues/merge_requests_count_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/api/issues_resolving_discussions_shared_examples.rb (renamed from spec/support/api/issues_resolving_discussions_shared_examples.rb)2
-rw-r--r--spec/support/shared_examples/requests/api/issues_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/api/logging_application_context_shared_examples.rb (renamed from spec/support/shared_examples/logging_application_context_shared_examples.rb)0
-rw-r--r--spec/support/shared_examples/requests/api/members_shared_examples.rb (renamed from spec/support/api/members_shared_examples.rb)4
-rw-r--r--spec/support/shared_examples/requests/api/milestones_shared_examples.rb (renamed from spec/support/api/milestones_shared_examples.rb)74
-rw-r--r--spec/support/shared_examples/requests/api/notes_shared_examples.rb (renamed from spec/support/shared_examples/requests/api/notes.rb)56
-rw-r--r--spec/support/shared_examples/requests/api/pipelines/visibility_table_shared_examples.rb (renamed from spec/support/shared_examples/requests/api/pipelines/visibility_table_examples.rb)2
-rw-r--r--spec/support/shared_examples/requests/api/read_user_shared_examples.rb (renamed from spec/support/api/scopes/read_user_shared_examples.rb)18
-rw-r--r--spec/support/shared_examples/requests/api/repositories_shared_context.rb (renamed from spec/support/api/repositories_shared_context.rb)2
-rw-r--r--spec/support/shared_examples/requests/api/resolvable_discussions_shared_examples.rb (renamed from spec/support/shared_examples/requests/api/resolvable_discussions.rb)22
-rw-r--r--spec/support/shared_examples/requests/api/resource_label_events_api_shared_examples.rb (renamed from spec/support/shared_examples/resource_label_events_api.rb)16
-rw-r--r--spec/support/shared_examples/requests/api/status_shared_examples.rb16
-rw-r--r--spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb (renamed from spec/support/api/time_tracking_shared_examples.rb)22
-rw-r--r--spec/support/shared_examples/requests/graphql_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/requests/lfs_http_shared_examples.rb (renamed from spec/support/shared_examples/lfs_http_shared_examples.rb)12
-rw-r--r--spec/support/shared_examples/requests/rack_attack_shared_examples.rb36
-rw-r--r--spec/support/shared_examples/requests/self_monitoring_shared_examples.rb18
-rw-r--r--spec/support/shared_examples/routing/legacy_path_redirect_shared_examples.rb9
-rw-r--r--spec/support/shared_examples/serializers/diff_file_entity_shared_examples.rb (renamed from spec/support/shared_examples/serializers/diff_file_entity_examples.rb)6
-rw-r--r--spec/support/shared_examples/serializers/note_entity_shared_examples.rb (renamed from spec/support/shared_examples/serializers/note_entity_examples.rb)2
-rw-r--r--spec/support/shared_examples/services/base_helm_service_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/boards/boards_create_service_shared_examples.rb (renamed from spec/support/shared_examples/services/boards/boards_create_service.rb)2
-rw-r--r--spec/support/shared_examples/services/boards/boards_list_service_shared_examples.rb (renamed from spec/support/shared_examples/services/boards/boards_list_service.rb)25
-rw-r--r--spec/support/shared_examples/services/boards/issues_list_service_shared_examples.rb (renamed from spec/support/shared_examples/services/boards/issues_list_service.rb)4
-rw-r--r--spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb (renamed from spec/support/shared_examples/services/boards/issues_move_service.rb)2
-rw-r--r--spec/support/shared_examples/services/boards/lists_destroy_service_shared_examples.rb (renamed from spec/support/shared_examples/services/boards/lists_destroy_service.rb)2
-rw-r--r--spec/support/shared_examples/services/boards/lists_list_service_shared_examples.rb (renamed from spec/support/shared_examples/services/boards/lists_list_service.rb)2
-rw-r--r--spec/support/shared_examples/services/boards/lists_move_service_shared_examples.rb (renamed from spec/support/shared_examples/services/boards/lists_move_service.rb)2
-rw-r--r--spec/support/shared_examples/services/check_ingress_ip_address_service_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/common_system_notes_shared_examples.rb (renamed from spec/support/shared_examples/common_system_notes_examples.rb)8
-rw-r--r--spec/support/shared_examples/services/count_service_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/error_tracking_service_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/services/gitlab_projects_import_service_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/issuable_shared_examples.rb (renamed from spec/support/shared_examples/issuable_shared_examples.rb)6
-rw-r--r--spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb47
-rw-r--r--spec/support/shared_examples/services/notification_service_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/services/pages_size_limit_shared_examples.rb (renamed from spec/support/shared_examples/pages_size_limit_shared_examples.rb)2
-rw-r--r--spec/support/shared_examples/services/updating_mentions_shared_examples.rb (renamed from spec/support/shared_examples/updating_mentions_shared_examples.rb)0
-rw-r--r--spec/support/shared_examples/snippets_shared_examples.rb20
-rw-r--r--spec/support/shared_examples/spam_check_shared_examples.rb20
-rw-r--r--spec/support/shared_examples/tasks/gitlab/import_export/import_measurement_shared_examples.rb31
-rw-r--r--spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/uploaders/object_storage_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/uploaders/upload_type_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/validators/url_validator_shared_examples.rb (renamed from spec/support/shared_examples/url_validator_examples.rb)0
-rw-r--r--spec/support/shared_examples/views/nav_sidebar.rb11
-rw-r--r--spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/workers/pages_domain_cron_worker_shared_examples.rb21
-rw-r--r--spec/support/sidekiq.rb16
-rw-r--r--spec/support_specs/helpers/active_record/query_recorder_spec.rb40
-rw-r--r--spec/tasks/gitlab/cleanup_rake_spec.rb6
-rw-r--r--spec/tasks/gitlab/import_export/import_rake_spec.rb7
-rw-r--r--spec/tasks/gitlab/seed/group_seed_rake_spec.rb26
-rw-r--r--spec/tasks/gitlab/storage_rake_spec.rb2
-rw-r--r--spec/uploaders/gitlab_uploader_spec.rb2
-rw-r--r--spec/uploaders/namespace_file_uploader_spec.rb2
-rw-r--r--spec/uploaders/workers/object_storage/migrate_uploads_worker_spec.rb2
-rw-r--r--spec/views/help/index.html.haml_spec.rb2
-rw-r--r--spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb27
-rw-r--r--spec/views/notify/pipeline_failed_email.html.haml_spec.rb64
-rw-r--r--spec/views/notify/pipeline_failed_email.text.erb_spec.rb38
-rw-r--r--spec/views/projects/tree/_tree_header.html.haml_spec.rb13
-rw-r--r--spec/views/shared/projects/_list.html.haml_spec.rb78
-rw-r--r--spec/workers/background_migration_worker_spec.rb2
-rw-r--r--spec/workers/ci/create_cross_project_pipeline_worker_spec.rb36
-rw-r--r--spec/workers/ci/pipeline_bridge_status_worker_spec.rb38
-rw-r--r--spec/workers/cleanup_container_repository_worker_spec.rb49
-rw-r--r--spec/workers/cluster_configure_istio_worker_spec.rb41
-rw-r--r--spec/workers/concerns/cronjob_queue_spec.rb10
-rw-r--r--spec/workers/concerns/worker_context_spec.rb120
-rw-r--r--spec/workers/create_commit_signature_worker_spec.rb (renamed from spec/workers/create_gpg_signature_worker_spec.rb)47
-rw-r--r--spec/workers/environments/auto_stop_cron_worker_spec.rb17
-rw-r--r--spec/workers/error_tracking_issue_link_worker_spec.rb99
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb14
-rw-r--r--spec/workers/expire_pipeline_cache_worker_spec.rb15
-rw-r--r--spec/workers/gitlab/phabricator_import/base_worker_spec.rb (renamed from spec/lib/gitlab/phabricator_import/base_worker_spec.rb)0
-rw-r--r--spec/workers/gitlab/phabricator_import/import_tasks_worker_spec.rb (renamed from spec/lib/gitlab/phabricator_import/import_tasks_worker_spec.rb)0
-rw-r--r--spec/workers/group_import_worker_spec.rb29
-rw-r--r--spec/workers/incident_management/process_alert_worker_spec.rb39
-rw-r--r--spec/workers/mail_scheduler/notification_service_worker_spec.rb2
-rw-r--r--spec/workers/merge_request_mergeability_check_worker_spec.rb29
-rw-r--r--spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb8
-rw-r--r--spec/workers/pages_domain_verification_cron_worker_spec.rb8
-rw-r--r--spec/workers/reactive_caching_worker_spec.rb12
-rw-r--r--spec/workers/repository_fork_worker_spec.rb24
1560 files changed, 45479 insertions, 14699 deletions
diff --git a/spec/config/application_spec.rb b/spec/config/application_spec.rb
index 01ed81964c3..994cea4c84f 100644
--- a/spec/config/application_spec.rb
+++ b/spec/config/application_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Gitlab::Application do # rubocop:disable RSpec/FilePath
using RSpec::Parameterized::TableSyntax
- FILTERED_PARAM = ActionDispatch::Http::ParameterFilter::FILTERED
+ FILTERED_PARAM = ActiveSupport::ParameterFilter::FILTERED
context 'when parameters are logged' do
describe 'rails does not leak confidential parameters' do
diff --git a/spec/config/mail_room_spec.rb b/spec/config/mail_room_spec.rb
index 94b29b89f24..fcef4e7a9b0 100644
--- a/spec/config/mail_room_spec.rb
+++ b/spec/config/mail_room_spec.rb
@@ -39,39 +39,31 @@ describe 'mail_room.yml' do
end
end
- context 'when incoming email is enabled' do
+ context 'when both incoming email and service desk email are enabled' do
let(:gitlab_config_path) { 'spec/fixtures/config/mail_room_enabled.yml' }
let(:queues_config_path) { 'spec/fixtures/config/redis_queues_new_format_host.yml' }
-
let(:gitlab_redis_queues) { Gitlab::Redis::Queues.new(Rails.env) }
it 'contains the intended configuration' do
- expect(configuration[:mailboxes].length).to eq(1)
- mailbox = configuration[:mailboxes].first
-
- expect(mailbox[:host]).to eq('imap.gmail.com')
- expect(mailbox[:port]).to eq(993)
- expect(mailbox[:ssl]).to eq(true)
- expect(mailbox[:start_tls]).to eq(false)
- expect(mailbox[:email]).to eq('gitlab-incoming@gmail.com')
- expect(mailbox[:password]).to eq('[REDACTED]')
- expect(mailbox[:name]).to eq('inbox')
- expect(mailbox[:idle_timeout]).to eq(60)
-
- redis_url = gitlab_redis_queues.url
- sentinels = gitlab_redis_queues.sentinels
-
- expect(mailbox[:delivery_options][:redis_url]).to be_present
- expect(mailbox[:delivery_options][:redis_url]).to eq(redis_url)
-
- expect(mailbox[:delivery_options][:sentinels]).to be_present
- expect(mailbox[:delivery_options][:sentinels]).to eq(sentinels)
-
- expect(mailbox[:arbitration_options][:redis_url]).to be_present
- expect(mailbox[:arbitration_options][:redis_url]).to eq(redis_url)
-
- expect(mailbox[:arbitration_options][:sentinels]).to be_present
- expect(mailbox[:arbitration_options][:sentinels]).to eq(sentinels)
+ expected_mailbox = {
+ host: 'imap.gmail.com',
+ port: 993,
+ ssl: true,
+ start_tls: false,
+ email: 'gitlab-incoming@gmail.com',
+ password: '[REDACTED]',
+ name: 'inbox',
+ idle_timeout: 60
+ }
+ expected_options = {
+ redis_url: gitlab_redis_queues.url,
+ sentinels: gitlab_redis_queues.sentinels
+ }
+
+ expect(configuration[:mailboxes].length).to eq(2)
+ expect(configuration[:mailboxes]).to all(include(expected_mailbox))
+ expect(configuration[:mailboxes].map { |m| m[:delivery_options] }).to all(include(expected_options))
+ expect(configuration[:mailboxes].map { |m| m[:arbitration_options] }).to all(include(expected_options))
end
end
diff --git a/spec/controllers/acme_challenges_controller_spec.rb b/spec/controllers/acme_challenges_controller_spec.rb
index cee06bed27b..be077a4b20d 100644
--- a/spec/controllers/acme_challenges_controller_spec.rb
+++ b/spec/controllers/acme_challenges_controller_spec.rb
@@ -28,7 +28,7 @@ describe AcmeChallengesController do
let(:token) { acme_order.challenge_token }
it 'renders not found' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -37,7 +37,7 @@ describe AcmeChallengesController do
let(:token) { 'wrongtoken' }
it 'renders not found' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/admin/application_settings_controller_spec.rb b/spec/controllers/admin/application_settings_controller_spec.rb
index fa575ba2eae..05c4743ed7f 100644
--- a/spec/controllers/admin/application_settings_controller_spec.rb
+++ b/spec/controllers/admin/application_settings_controller_spec.rb
@@ -16,6 +16,7 @@ describe Admin::ApplicationSettingsController do
describe 'GET #usage_data with no access' do
before do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
sign_in(user)
end
@@ -28,6 +29,7 @@ describe Admin::ApplicationSettingsController do
describe 'GET #usage_data' do
before do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
sign_in(admin)
end
@@ -56,49 +58,49 @@ describe Admin::ApplicationSettingsController do
it 'updates the password_authentication_enabled_for_git setting' do
put :update, params: { application_setting: { password_authentication_enabled_for_git: "0" } }
- expect(response).to redirect_to(admin_application_settings_path)
+ expect(response).to redirect_to(general_admin_application_settings_path)
expect(ApplicationSetting.current.password_authentication_enabled_for_git).to eq(false)
end
it 'updates the default_project_visibility for string value' do
put :update, params: { application_setting: { default_project_visibility: "20" } }
- expect(response).to redirect_to(admin_application_settings_path)
+ expect(response).to redirect_to(general_admin_application_settings_path)
expect(ApplicationSetting.current.default_project_visibility).to eq(Gitlab::VisibilityLevel::PUBLIC)
end
it 'update the restricted levels for string values' do
put :update, params: { application_setting: { restricted_visibility_levels: %w[10 20] } }
- expect(response).to redirect_to(admin_application_settings_path)
+ expect(response).to redirect_to(general_admin_application_settings_path)
expect(ApplicationSetting.current.restricted_visibility_levels).to eq([10, 20])
end
it 'updates the restricted_visibility_levels when empty array is passed' do
put :update, params: { application_setting: { restricted_visibility_levels: [""] } }
- expect(response).to redirect_to(admin_application_settings_path)
+ expect(response).to redirect_to(general_admin_application_settings_path)
expect(ApplicationSetting.current.restricted_visibility_levels).to be_empty
end
it 'updates the receive_max_input_size setting' do
put :update, params: { application_setting: { receive_max_input_size: "1024" } }
- expect(response).to redirect_to(admin_application_settings_path)
+ expect(response).to redirect_to(general_admin_application_settings_path)
expect(ApplicationSetting.current.receive_max_input_size).to eq(1024)
end
it 'updates the default_project_creation for string value' do
put :update, params: { application_setting: { default_project_creation: ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS } }
- expect(response).to redirect_to(admin_application_settings_path)
+ expect(response).to redirect_to(general_admin_application_settings_path)
expect(ApplicationSetting.current.default_project_creation).to eq(::Gitlab::Access::MAINTAINER_PROJECT_ACCESS)
end
it 'updates minimum_password_length setting' do
put :update, params: { application_setting: { minimum_password_length: 10 } }
- expect(response).to redirect_to(admin_application_settings_path)
+ expect(response).to redirect_to(general_admin_application_settings_path)
expect(ApplicationSetting.current.minimum_password_length).to eq(10)
end
diff --git a/spec/controllers/admin/applications_controller_spec.rb b/spec/controllers/admin/applications_controller_spec.rb
index 2f3c7da484b..163a2033b58 100644
--- a/spec/controllers/admin/applications_controller_spec.rb
+++ b/spec/controllers/admin/applications_controller_spec.rb
@@ -16,7 +16,7 @@ describe Admin::ApplicationsController do
it 'renders the application form' do
get :index
- expect(response).to have_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -40,7 +40,7 @@ describe Admin::ApplicationsController do
describe 'POST #create' do
it 'creates the application' do
- create_params = attributes_for(:application, trusted: true)
+ create_params = attributes_for(:application, trusted: true, confidential: false)
expect do
post :create, params: { doorkeeper_application: create_params }
@@ -60,16 +60,34 @@ describe Admin::ApplicationsController do
expect(response).to render_template :new
expect(assigns[:scopes]).to be_kind_of(Doorkeeper::OAuth::Scopes)
end
+
+ context 'when the params are for a confidential application' do
+ it 'creates a confidential application' do
+ create_params = attributes_for(:application, confidential: true)
+
+ expect do
+ post :create, params: { doorkeeper_application: create_params }
+ end.to change { Doorkeeper::Application.count }.by(1)
+
+ application = Doorkeeper::Application.last
+
+ expect(response).to redirect_to(admin_application_path(application))
+ expect(application).to have_attributes(create_params.except(:uid, :owner_type))
+ end
+ end
end
describe 'PATCH #update' do
it 'updates the application' do
- patch :update, params: { id: application.id, doorkeeper_application: { redirect_uri: 'http://example.com/', trusted: true } }
+ doorkeeper_params = { redirect_uri: 'http://example.com/', trusted: true, confidential: false }
+
+ patch :update, params: { id: application.id, doorkeeper_application: doorkeeper_params }
application.reload
expect(response).to redirect_to(admin_application_path(application))
- expect(application).to have_attributes(redirect_uri: 'http://example.com/', trusted: true)
+ expect(application)
+ .to have_attributes(redirect_uri: 'http://example.com/', trusted: true, confidential: false)
end
it 'renders the application form on errors' do
@@ -78,5 +96,16 @@ describe Admin::ApplicationsController do
expect(response).to render_template :edit
expect(assigns[:scopes]).to be_kind_of(Doorkeeper::OAuth::Scopes)
end
+
+ context 'when updating the application to be confidential' do
+ it 'successfully sets the application to confidential' do
+ doorkeeper_params = { confidential: true }
+
+ patch :update, params: { id: application.id, doorkeeper_application: doorkeeper_params }
+
+ expect(response).to redirect_to(admin_application_path(application))
+ expect(application).to be_confidential
+ end
+ end
end
end
diff --git a/spec/controllers/admin/clusters/applications_controller_spec.rb b/spec/controllers/admin/clusters/applications_controller_spec.rb
index 9d6edcd80c0..44693505c4f 100644
--- a/spec/controllers/admin/clusters/applications_controller_spec.rb
+++ b/spec/controllers/admin/clusters/applications_controller_spec.rb
@@ -36,7 +36,7 @@ describe Admin::Clusters::ApplicationsController do
expect(ClusterInstallAppWorker).to receive(:perform_async).with(application, anything).once
expect { subject }.to change { current_application.count }
- expect(response).to have_http_status(:no_content)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(cluster.application_helm).to be_scheduled
end
@@ -47,7 +47,7 @@ describe Admin::Clusters::ApplicationsController do
it 'return 404' do
expect { subject }.not_to change { current_application.count }
- expect(response).to have_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -55,7 +55,7 @@ describe Admin::Clusters::ApplicationsController do
let(:application) { 'unkwnown-app' }
it 'return 404' do
- is_expected.to have_http_status(:not_found)
+ is_expected.to have_gitlab_http_status(:not_found)
end
end
@@ -65,7 +65,7 @@ describe Admin::Clusters::ApplicationsController do
end
it 'returns 400' do
- is_expected.to have_http_status(:bad_request)
+ is_expected.to have_gitlab_http_status(:bad_request)
end
end
end
@@ -99,7 +99,7 @@ describe Admin::Clusters::ApplicationsController do
it "schedules an application update" do
expect(ClusterPatchAppWorker).to receive(:perform_async).with(application.name, anything).once
- is_expected.to have_http_status(:no_content)
+ is_expected.to have_gitlab_http_status(:no_content)
expect(cluster.application_cert_manager).to be_scheduled
end
@@ -110,13 +110,13 @@ describe Admin::Clusters::ApplicationsController do
cluster.destroy!
end
- it { is_expected.to have_http_status(:not_found) }
+ it { is_expected.to have_gitlab_http_status(:not_found) }
end
context 'when application is unknown' do
let(:application_name) { 'unkwnown-app' }
- it { is_expected.to have_http_status(:not_found) }
+ it { is_expected.to have_gitlab_http_status(:not_found) }
end
context 'when application is already scheduled' do
@@ -124,7 +124,7 @@ describe Admin::Clusters::ApplicationsController do
application.make_scheduled!
end
- it { is_expected.to have_http_status(:bad_request) }
+ it { is_expected.to have_gitlab_http_status(:bad_request) }
end
end
diff --git a/spec/controllers/admin/clusters_controller_spec.rb b/spec/controllers/admin/clusters_controller_spec.rb
index f27519496df..1f5c33d8022 100644
--- a/spec/controllers/admin/clusters_controller_spec.rb
+++ b/spec/controllers/admin/clusters_controller_spec.rb
@@ -567,7 +567,7 @@ describe Admin::ClustersController do
put_update(format: :json)
cluster.reload
- expect(response).to have_http_status(:no_content)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(cluster.enabled).to be_falsey
expect(cluster.name).to eq('my-new-cluster-name')
expect(cluster).not_to be_managed
@@ -587,7 +587,7 @@ describe Admin::ClustersController do
it 'rejects changes' do
put_update(format: :json)
- expect(response).to have_http_status(:bad_request)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
diff --git a/spec/controllers/admin/gitaly_servers_controller_spec.rb b/spec/controllers/admin/gitaly_servers_controller_spec.rb
index c75418a9ad4..db94ea06f59 100644
--- a/spec/controllers/admin/gitaly_servers_controller_spec.rb
+++ b/spec/controllers/admin/gitaly_servers_controller_spec.rb
@@ -11,7 +11,7 @@ describe Admin::GitalyServersController do
it 'shows the gitaly servers page' do
get :index
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
diff --git a/spec/controllers/admin/hooks_controller_spec.rb b/spec/controllers/admin/hooks_controller_spec.rb
index 3c3a16ef9d5..9973ef93cd9 100644
--- a/spec/controllers/admin/hooks_controller_spec.rb
+++ b/spec/controllers/admin/hooks_controller_spec.rb
@@ -24,7 +24,7 @@ describe Admin::HooksController do
post :create, params: { hook: hook_params }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(SystemHook.all.size).to eq(1)
expect(SystemHook.first).to have_attributes(hook_params)
end
diff --git a/spec/controllers/admin/impersonations_controller_spec.rb b/spec/controllers/admin/impersonations_controller_spec.rb
index b44797b23e5..fa3923bca8c 100644
--- a/spec/controllers/admin/impersonations_controller_spec.rb
+++ b/spec/controllers/admin/impersonations_controller_spec.rb
@@ -24,7 +24,7 @@ describe Admin::ImpersonationsController do
it "responds with status 404" do
delete :destroy
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "doesn't sign us in" do
@@ -48,7 +48,7 @@ describe Admin::ImpersonationsController do
it "responds with status 404" do
delete :destroy
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "doesn't sign us in as the impersonator" do
@@ -67,7 +67,7 @@ describe Admin::ImpersonationsController do
it "responds with status 404" do
delete :destroy
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "doesn't sign us in as the impersonator" do
diff --git a/spec/controllers/admin/projects_controller_spec.rb b/spec/controllers/admin/projects_controller_spec.rb
index 6b996798b74..2d783dab621 100644
--- a/spec/controllers/admin/projects_controller_spec.rb
+++ b/spec/controllers/admin/projects_controller_spec.rb
@@ -29,7 +29,7 @@ describe Admin::ProjectsController do
get :index
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.body).not_to match(pending_delete_project.name)
expect(response.body).to match(project.name)
end
@@ -61,7 +61,7 @@ describe Admin::ProjectsController do
it 'renders show page' do
get :show, params: { namespace_id: project.namespace.path, id: project.path }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to match(project.name)
end
end
diff --git a/spec/controllers/admin/requests_profiles_controller_spec.rb b/spec/controllers/admin/requests_profiles_controller_spec.rb
index 853767199bc..13123c8e486 100644
--- a/spec/controllers/admin/requests_profiles_controller_spec.rb
+++ b/spec/controllers/admin/requests_profiles_controller_spec.rb
@@ -36,7 +36,7 @@ describe Admin::RequestsProfilesController do
it 'renders the data' do
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to eq(sample_data)
end
end
@@ -54,7 +54,7 @@ describe Admin::RequestsProfilesController do
it 'renders the data' do
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to eq(sample_data)
end
end
diff --git a/spec/controllers/admin/runners_controller_spec.rb b/spec/controllers/admin/runners_controller_spec.rb
index bbeda7dae0f..7582006df36 100644
--- a/spec/controllers/admin/runners_controller_spec.rb
+++ b/spec/controllers/admin/runners_controller_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Admin::RunnersController do
- let!(:runner) { create(:ci_runner) }
+ let_it_be(:runner) { create(:ci_runner) }
before do
sign_in(create(:admin))
@@ -15,7 +15,7 @@ describe Admin::RunnersController do
it 'lists all runners' do
get :index
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'avoids N+1 queries', :request_store do
@@ -29,23 +29,48 @@ describe Admin::RunnersController do
# We also need to add 1 because it takes 2 queries to preload tags
expect { get :index }.not_to exceed_query_limit(control_count + 6)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to have_content('tag1')
expect(response.body).to have_content('tag2')
end
end
describe '#show' do
+ render_views
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:project_two) { create(:project) }
+
+ before_all do
+ create(:ci_build, runner: runner, project: project)
+ create(:ci_build, runner: runner, project: project_two)
+ end
+
it 'shows a particular runner' do
get :show, params: { id: runner.id }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'shows 404 for unknown runner' do
get :show, params: { id: 0 }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'avoids N+1 queries', :request_store do
+ get :show, params: { id: runner.id }
+
+ control_count = ActiveRecord::QueryRecorder.new { get :show, params: { id: runner.id } }.count
+
+ new_project = create(:project)
+ create(:ci_build, runner: runner, project: new_project)
+
+ # There is one additional query looking up subject.group in ProjectPolicy for the
+ # needs_new_sso_session permission
+ expect { get :show, params: { id: runner.id } }.not_to exceed_query_limit(control_count + 1)
+
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -59,7 +84,7 @@ describe Admin::RunnersController do
runner.reload
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(runner.description).to eq(new_desc)
end
end
@@ -68,7 +93,7 @@ describe Admin::RunnersController do
it 'destroys the runner' do
delete :destroy, params: { id: runner.id }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(Ci::Runner.find_by(id: runner.id)).to be_nil
end
end
@@ -83,7 +108,7 @@ describe Admin::RunnersController do
runner.reload
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(runner.active).to eq(true)
end
end
@@ -98,7 +123,7 @@ describe Admin::RunnersController do
runner.reload
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(runner.active).to eq(false)
end
end
diff --git a/spec/controllers/admin/serverless/domains_controller_spec.rb b/spec/controllers/admin/serverless/domains_controller_spec.rb
new file mode 100644
index 00000000000..aed83e190be
--- /dev/null
+++ b/spec/controllers/admin/serverless/domains_controller_spec.rb
@@ -0,0 +1,298 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Admin::Serverless::DomainsController do
+ let(:admin) { create(:admin) }
+ let(:user) { create(:user) }
+
+ describe '#index' do
+ context 'non-admin user' do
+ before do
+ sign_in(user)
+ end
+
+ it 'responds with 404' do
+ get :index
+
+ expect(response.status).to eq(404)
+ end
+ end
+
+ context 'admin user' do
+ before do
+ create(:pages_domain)
+ sign_in(admin)
+ end
+
+ context 'with serverless_domain feature disabled' do
+ before do
+ stub_feature_flags(serverless_domain: false)
+ end
+
+ it 'responds with 404' do
+ get :index
+
+ expect(response.status).to eq(404)
+ end
+ end
+
+ context 'when instance-level serverless domain exists' do
+ let!(:serverless_domain) { create(:pages_domain, :instance_serverless) }
+
+ it 'loads the instance serverless domain' do
+ get :index
+
+ expect(assigns(:domain).id).to eq(serverless_domain.id)
+ end
+ end
+
+ context 'when domain does not exist' do
+ it 'initializes an instance serverless domain' do
+ get :index
+
+ domain = assigns(:domain)
+
+ expect(domain.persisted?).to eq(false)
+ expect(domain.wildcard).to eq(true)
+ expect(domain.scope).to eq('instance')
+ expect(domain.usage).to eq('serverless')
+ end
+ end
+ end
+ end
+
+ describe '#create' do
+ let(:create_params) do
+ sample_domain = build(:pages_domain)
+
+ {
+ domain: 'serverless.gitlab.io',
+ user_provided_certificate: sample_domain.certificate,
+ user_provided_key: sample_domain.key
+ }
+ end
+
+ context 'non-admin user' do
+ before do
+ sign_in(user)
+ end
+
+ it 'responds with 404' do
+ post :create, params: { pages_domain: create_params }
+
+ expect(response.status).to eq(404)
+ end
+ end
+
+ context 'admin user' do
+ before do
+ sign_in(admin)
+ end
+
+ context 'with serverless_domain feature disabled' do
+ before do
+ stub_feature_flags(serverless_domain: false)
+ end
+
+ it 'responds with 404' do
+ post :create, params: { pages_domain: create_params }
+
+ expect(response.status).to eq(404)
+ end
+ end
+
+ context 'when an instance-level serverless domain exists' do
+ let!(:serverless_domain) { create(:pages_domain, :instance_serverless) }
+
+ it 'does not create a new domain' do
+ expect { post :create, params: { pages_domain: create_params } }.not_to change { PagesDomain.instance_serverless.count }
+ end
+
+ it 'redirects to index' do
+ post :create, params: { pages_domain: create_params }
+
+ expect(response).to redirect_to admin_serverless_domains_path
+ expect(flash[:notice]).to include('An instance-level serverless domain already exists.')
+ end
+ end
+
+ context 'when an instance-level serverless domain does not exist' do
+ it 'creates an instance serverless domain with the provided attributes' do
+ expect { post :create, params: { pages_domain: create_params } }.to change { PagesDomain.instance_serverless.count }.by(1)
+
+ domain = PagesDomain.instance_serverless.first
+ expect(domain.domain).to eq(create_params[:domain])
+ expect(domain.certificate).to eq(create_params[:user_provided_certificate])
+ expect(domain.key).to eq(create_params[:user_provided_key])
+ expect(domain.wildcard).to eq(true)
+ expect(domain.scope).to eq('instance')
+ expect(domain.usage).to eq('serverless')
+ end
+
+ it 'redirects to index' do
+ post :create, params: { pages_domain: create_params }
+
+ expect(response).to redirect_to admin_serverless_domains_path
+ expect(flash[:notice]).to include('Domain was successfully created.')
+ end
+ end
+
+ context 'when there are errors' do
+ it 'renders index view' do
+ post :create, params: { pages_domain: { foo: 'bar' } }
+
+ expect(assigns(:domain).errors.size).to be > 0
+ expect(response).to render_template('index')
+ end
+ end
+ end
+ end
+
+ describe '#update' do
+ let(:domain) { create(:pages_domain, :instance_serverless) }
+
+ let(:update_params) do
+ sample_domain = build(:pages_domain)
+
+ {
+ user_provided_certificate: sample_domain.certificate,
+ user_provided_key: sample_domain.key
+ }
+ end
+
+ context 'non-admin user' do
+ before do
+ sign_in(user)
+ end
+
+ it 'responds with 404' do
+ put :update, params: { id: domain.id, pages_domain: update_params }
+
+ expect(response.status).to eq(404)
+ end
+ end
+
+ context 'admin user' do
+ before do
+ sign_in(admin)
+ end
+
+ context 'with serverless_domain feature disabled' do
+ before do
+ stub_feature_flags(serverless_domain: false)
+ end
+
+ it 'responds with 404' do
+ put :update, params: { id: domain.id, pages_domain: update_params }
+
+ expect(response.status).to eq(404)
+ end
+ end
+
+ context 'when domain exists' do
+ it 'updates the domain with the provided attributes' do
+ new_certificate = build(:pages_domain, :ecdsa).certificate
+ new_key = build(:pages_domain, :ecdsa).key
+
+ put :update, params: { id: domain.id, pages_domain: { user_provided_certificate: new_certificate, user_provided_key: new_key } }
+
+ domain.reload
+
+ expect(domain.certificate).to eq(new_certificate)
+ expect(domain.key).to eq(new_key)
+ end
+
+ it 'does not update the domain name' do
+ put :update, params: { id: domain.id, pages_domain: { domain: 'new.com' } }
+
+ expect(domain.reload.domain).not_to eq('new.com')
+ end
+
+ it 'redirects to index' do
+ put :update, params: { id: domain.id, pages_domain: update_params }
+
+ expect(response).to redirect_to admin_serverless_domains_path
+ expect(flash[:notice]).to include('Domain was successfully updated.')
+ end
+ end
+
+ context 'when domain does not exist' do
+ it 'returns 404' do
+ put :update, params: { id: 0, pages_domain: update_params }
+
+ expect(response.status).to eq(404)
+ end
+ end
+
+ context 'when there are errors' do
+ it 'renders index view' do
+ put :update, params: { id: domain.id, pages_domain: { user_provided_certificate: 'bad certificate' } }
+
+ expect(assigns(:domain).errors.size).to be > 0
+ expect(response).to render_template('index')
+ end
+ end
+ end
+ end
+
+ describe '#verify' do
+ let(:domain) { create(:pages_domain, :instance_serverless) }
+
+ context 'non-admin user' do
+ before do
+ sign_in(user)
+ end
+
+ it 'responds with 404' do
+ post :verify, params: { id: domain.id }
+
+ expect(response.status).to eq(404)
+ end
+ end
+
+ context 'admin user' do
+ before do
+ sign_in(admin)
+ end
+
+ def stub_service
+ service = double(:service)
+
+ expect(VerifyPagesDomainService).to receive(:new).with(domain).and_return(service)
+
+ service
+ end
+
+ context 'with serverless_domain feature disabled' do
+ before do
+ stub_feature_flags(serverless_domain: false)
+ end
+
+ it 'responds with 404' do
+ post :verify, params: { id: domain.id }
+
+ expect(response.status).to eq(404)
+ end
+ end
+
+ it 'handles verification success' do
+ expect(stub_service).to receive(:execute).and_return(status: :success)
+
+ post :verify, params: { id: domain.id }
+
+ expect(response).to redirect_to admin_serverless_domains_path
+ expect(flash[:notice]).to eq('Successfully verified domain ownership')
+ end
+
+ it 'handles verification failure' do
+ expect(stub_service).to receive(:execute).and_return(status: :failed)
+
+ post :verify, params: { id: domain.id }
+
+ expect(response).to redirect_to admin_serverless_domains_path
+ expect(flash[:alert]).to eq('Failed to verify domain ownership')
+ end
+ end
+ end
+end
diff --git a/spec/controllers/admin/services_controller_spec.rb b/spec/controllers/admin/services_controller_spec.rb
index 1c518dab11e..44233776865 100644
--- a/spec/controllers/admin/services_controller_spec.rb
+++ b/spec/controllers/admin/services_controller_spec.rb
@@ -22,7 +22,7 @@ describe Admin::ServicesController do
it 'successfully displays the template' do
get :edit, params: { id: service.id }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -48,7 +48,7 @@ describe Admin::ServicesController do
put :update, params: { id: service.id, service: { active: true } }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
it 'does not call the propagation worker when service is not active' do
@@ -56,7 +56,7 @@ describe Admin::ServicesController do
put :update, params: { id: service.id, service: { properties: {} } }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
end
diff --git a/spec/controllers/admin/sessions_controller_spec.rb b/spec/controllers/admin/sessions_controller_spec.rb
index be996aee1d2..4bab6b51102 100644
--- a/spec/controllers/admin/sessions_controller_spec.rb
+++ b/spec/controllers/admin/sessions_controller_spec.rb
@@ -124,7 +124,7 @@ describe Admin::SessionsController, :do_not_mock_admin_mode do
it 'shows error page' do
post :destroy
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(controller.current_user_mode.admin_mode?).to be(false)
end
end
diff --git a/spec/controllers/admin/spam_logs_controller_spec.rb b/spec/controllers/admin/spam_logs_controller_spec.rb
index baf4216dcde..ec0d8c47660 100644
--- a/spec/controllers/admin/spam_logs_controller_spec.rb
+++ b/spec/controllers/admin/spam_logs_controller_spec.rb
@@ -16,7 +16,7 @@ describe Admin::SpamLogsController do
it 'lists all spam logs' do
get :index
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -24,14 +24,14 @@ describe Admin::SpamLogsController do
it 'removes only the spam log when removing log' do
expect { delete :destroy, params: { id: first_spam.id } }.to change { SpamLog.count }.by(-1)
expect(User.find(user.id)).to be_truthy
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
- it 'removes user and his spam logs when removing the user', :sidekiq_might_not_need_inline do
+ it 'removes user and their spam logs when removing the user', :sidekiq_might_not_need_inline do
delete :destroy, params: { id: first_spam.id, remove_user: true }
expect(flash[:notice]).to eq "User #{user.username} was successfully removed."
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(SpamLog.count).to eq(0)
expect { User.find(user.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
@@ -39,14 +39,14 @@ describe Admin::SpamLogsController do
describe '#mark_as_ham' do
before do
- allow_next_instance_of(AkismetService) do |instance|
+ allow_next_instance_of(Spam::AkismetService) do |instance|
allow(instance).to receive(:submit_ham).and_return(true)
end
end
it 'submits the log as ham' do
post :mark_as_ham, params: { id: first_spam.id }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(SpamLog.find(first_spam.id).submitted_as_ham).to be_truthy
end
end
diff --git a/spec/controllers/admin/users_controller_spec.rb b/spec/controllers/admin/users_controller_spec.rb
index ebdfbe14dec..a4ce510b413 100644
--- a/spec/controllers/admin/users_controller_spec.rb
+++ b/spec/controllers/admin/users_controller_spec.rb
@@ -47,7 +47,7 @@ describe Admin::UsersController do
it 'deletes user and ghosts their contributions' do
delete :destroy, params: { id: user.username }, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(User.exists?(user.id)).to be_falsy
expect(issue.reload.author).to be_ghost
end
@@ -55,7 +55,7 @@ describe Admin::UsersController do
it 'deletes the user and their contributions when hard delete is specified' do
delete :destroy, params: { id: user.username, hard_delete: true }, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(User.exists?(user.id)).to be_falsy
expect(Issue.exists?(issue.id)).to be_falsy
end
@@ -270,7 +270,7 @@ describe Admin::UsersController do
post :update, params: params
end
- context 'when the admin changes his own password' do
+ context 'when the admin changes their own password' do
it 'updates the password' do
expect { update_password(admin, 'AValidPassword1') }
.to change { admin.reload.encrypted_password }
@@ -399,7 +399,7 @@ describe Admin::UsersController do
it "shows error page" do
post :impersonate, params: { id: user.username }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index 0c299dcda34..bdac7369780 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -156,7 +156,7 @@ describe ApplicationController do
it 'returns 200 response' do
get :index, format: requested_format
- expect(response).to have_gitlab_http_status 200
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -164,7 +164,7 @@ describe ApplicationController do
it 'returns 404 response' do
get :index
- expect(response).to have_gitlab_http_status 404
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -181,7 +181,7 @@ describe ApplicationController do
get :index
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'redirects to login page if not authenticated' do
@@ -202,7 +202,7 @@ describe ApplicationController do
get :index, format: 'unknown'
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -489,7 +489,7 @@ describe ApplicationController do
it 'redirects if the user did not accept the terms' do
get :index
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
it 'does not redirect when the user accepted terms' do
@@ -497,7 +497,7 @@ describe ApplicationController do
get :index
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -581,21 +581,21 @@ describe ApplicationController do
it 'renders a 404 without a message' do
get :index
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(response).to render_template('errors/not_found')
end
it 'renders a 403 when a message is passed to access denied' do
get :index, params: { message: 'None shall pass' }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(response).to render_template('errors/access_denied')
end
it 'renders a status passed to access denied' do
get :index, params: { status: 401 }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -896,7 +896,7 @@ describe ApplicationController do
end
end
- context '#set_current_context' do
+ describe '#set_current_context' do
controller(described_class) do
def index
Labkit::Context.with_context do |context|
@@ -924,7 +924,7 @@ describe ApplicationController do
end
it 'sets the group if it was available' do
- group = build_stubbed(:group)
+ group = build(:group)
controller.instance_variable_set(:@group, group)
get :index, format: :json
@@ -933,12 +933,18 @@ describe ApplicationController do
end
it 'sets the project if one was available' do
- project = build_stubbed(:project)
+ project = build(:project)
controller.instance_variable_set(:@project, project)
get :index, format: :json
expect(json_response['meta.project']).to eq(project.full_path)
end
+
+ it 'sets the caller_id as controller#action' do
+ get :index, format: :json
+
+ expect(json_response['meta.caller_id']).to eq('AnonymousController#index')
+ end
end
end
diff --git a/spec/controllers/autocomplete_controller_spec.rb b/spec/controllers/autocomplete_controller_spec.rb
index 51f20bae880..1ebbeecc583 100644
--- a/spec/controllers/autocomplete_controller_spec.rb
+++ b/spec/controllers/autocomplete_controller_spec.rb
@@ -32,7 +32,7 @@ describe AutocompleteController do
get(:users, params: { project_id: 'unknown' })
end
- it { expect(response).to have_gitlab_http_status(404) }
+ it { expect(response).to have_gitlab_http_status(:not_found) }
end
end
@@ -61,7 +61,7 @@ describe AutocompleteController do
get(:users, params: { group_id: 'unknown' })
end
- it { expect(response).to have_gitlab_http_status(404) }
+ it { expect(response).to have_gitlab_http_status(:not_found) }
end
end
@@ -140,7 +140,7 @@ describe AutocompleteController do
get(:users, params: { project_id: project.id })
end
- it { expect(response).to have_gitlab_http_status(404) }
+ it { expect(response).to have_gitlab_http_status(:not_found) }
end
describe 'GET #users with unknown project' do
@@ -148,7 +148,7 @@ describe AutocompleteController do
get(:users, params: { project_id: 'unknown' })
end
- it { expect(response).to have_gitlab_http_status(404) }
+ it { expect(response).to have_gitlab_http_status(:not_found) }
end
describe 'GET #users with inaccessible group' do
@@ -157,7 +157,7 @@ describe AutocompleteController do
get(:users, params: { group_id: user.namespace.id })
end
- it { expect(response).to have_gitlab_http_status(404) }
+ it { expect(response).to have_gitlab_http_status(:not_found) }
end
describe 'GET #users with no project' do
@@ -372,7 +372,7 @@ describe AutocompleteController do
it 'returns empty json' do
get :merge_request_target_branches, params: { project_id: project.id }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_empty
end
end
@@ -383,7 +383,7 @@ describe AutocompleteController do
get :merge_request_target_branches, params: { project_id: project.id }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_empty
end
end
@@ -404,7 +404,7 @@ describe AutocompleteController do
get :merge_request_target_branches, params: params
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to eq({ 'error' => 'At least one of group_id or project_id must be specified' })
end
end
@@ -416,7 +416,7 @@ describe AutocompleteController do
get :merge_request_target_branches, params: { project_id: project.id }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to contain_exactly({ 'title' => 'feature' })
end
end
@@ -433,7 +433,7 @@ describe AutocompleteController do
get :merge_request_target_branches, params: { group_id: group.id }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to contain_exactly({ 'title' => 'feature' })
end
end
diff --git a/spec/controllers/boards/issues_controller_spec.rb b/spec/controllers/boards/issues_controller_spec.rb
index d54f7ad33cf..605fff60c31 100644
--- a/spec/controllers/boards/issues_controller_spec.rb
+++ b/spec/controllers/boards/issues_controller_spec.rb
@@ -28,7 +28,7 @@ describe Boards::IssuesController do
it 'returns a not found 404 response' do
list_issues user: user, board: 999, list: list2
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -106,7 +106,7 @@ describe Boards::IssuesController do
it 'returns a not found 404 response' do
list_issues user: user, board: board, list: 999
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -132,7 +132,7 @@ describe Boards::IssuesController do
it 'returns a forbidden 403 response' do
list_issues user: unauth_user, board: board, list: list2
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -148,7 +148,7 @@ describe Boards::IssuesController do
list_issues(user: user, board: group_board)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'is successful for project boards' do
@@ -156,7 +156,7 @@ describe Boards::IssuesController do
list_issues(user: user, board: project_board)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -215,7 +215,7 @@ describe Boards::IssuesController do
expect(response).to have_gitlab_http_status(expected_status)
list_issues user: requesting_user, board: board, list: list2
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('entities/issue_boards')
@@ -391,7 +391,7 @@ describe Boards::IssuesController do
it 'returns a successful 200 response' do
create_issue user: user, board: board, list: list1, title: 'New issue'
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'returns the created issue' do
@@ -406,7 +406,7 @@ describe Boards::IssuesController do
it 'returns an unprocessable entity 422 response' do
create_issue user: user, board: board, list: list1, title: nil
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
@@ -416,7 +416,7 @@ describe Boards::IssuesController do
create_issue user: user, board: board, list: list, title: 'New issue'
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -424,7 +424,7 @@ describe Boards::IssuesController do
it 'returns a not found 404 response' do
create_issue user: user, board: 999, list: list1, title: 'New issue'
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -432,7 +432,7 @@ describe Boards::IssuesController do
it 'returns a not found 404 response' do
create_issue user: user, board: board, list: 999, title: 'New issue'
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -443,7 +443,7 @@ describe Boards::IssuesController do
open_list = board.lists.create(list_type: :backlog)
create_issue user: guest, board: board, list: open_list, title: 'New issue'
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -451,7 +451,7 @@ describe Boards::IssuesController do
it 'returns a forbidden 403 response' do
create_issue user: guest, board: board, list: list1, title: 'New issue'
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -475,7 +475,7 @@ describe Boards::IssuesController do
it 'returns a successful 200 response' do
move user: user, board: board, issue: issue, from_list_id: list1.id, to_list_id: list2.id
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'moves issue to the desired list' do
@@ -489,19 +489,19 @@ describe Boards::IssuesController do
it 'returns a unprocessable entity 422 response for invalid lists' do
move user: user, board: board, issue: issue, from_list_id: nil, to_list_id: nil
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
it 'returns a not found 404 response for invalid board id' do
move user: user, board: 999, issue: issue, from_list_id: list1.id, to_list_id: list2.id
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a not found 404 response for invalid issue id' do
move user: user, board: board, issue: double(id: 999), from_list_id: list1.id, to_list_id: list2.id
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -515,7 +515,7 @@ describe Boards::IssuesController do
it 'returns a forbidden 403 response' do
move user: guest, board: board, issue: issue, from_list_id: list1.id, to_list_id: list2.id
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
diff --git a/spec/controllers/boards/lists_controller_spec.rb b/spec/controllers/boards/lists_controller_spec.rb
index bc46d02556b..3886388bcf4 100644
--- a/spec/controllers/boards/lists_controller_spec.rb
+++ b/spec/controllers/boards/lists_controller_spec.rb
@@ -21,7 +21,7 @@ describe Boards::ListsController do
it 'returns a successful 200 response' do
read_board_list user: user, board: board
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'application/json'
end
@@ -50,7 +50,7 @@ describe Boards::ListsController do
it 'returns a forbidden 403 response' do
read_board_list user: unauth_user, board: board
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -73,7 +73,7 @@ describe Boards::ListsController do
it 'returns a successful 200 response' do
create_board_list user: user, board: board, label_id: label.id
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'returns the created list' do
@@ -88,7 +88,7 @@ describe Boards::ListsController do
it 'returns a not found 404 response' do
create_board_list user: user, board: board, label_id: nil
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -98,7 +98,7 @@ describe Boards::ListsController do
create_board_list user: user, board: board, label_id: label.id
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -109,7 +109,7 @@ describe Boards::ListsController do
create_board_list user: guest, board: board, label_id: label.id
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -134,7 +134,7 @@ describe Boards::ListsController do
it 'returns a successful 200 response' do
move user: user, board: board, list: planning, position: 1
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'moves the list to the desired position' do
@@ -148,7 +148,7 @@ describe Boards::ListsController do
it 'returns an unprocessable entity 422 response' do
move user: user, board: board, list: planning, position: 6
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
@@ -156,7 +156,7 @@ describe Boards::ListsController do
it 'returns a not found 404 response' do
move user: user, board: board, list: 999, position: 1
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -164,7 +164,7 @@ describe Boards::ListsController do
it 'returns a 422 unprocessable entity response' do
move user: guest, board: board, list: planning, position: 6
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
@@ -173,14 +173,14 @@ describe Boards::ListsController do
save_setting user: user, board: board, list: planning, setting: { collapsed: true }
expect(planning.preferences_for(user).collapsed).to eq(true)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'saves not collapsed preference for user' do
save_setting user: user, board: board, list: planning, setting: { collapsed: false }
expect(planning.preferences_for(user).collapsed).to eq(false)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -191,14 +191,14 @@ describe Boards::ListsController do
save_setting user: user, board: board, list: closed, setting: { collapsed: true }
expect(closed.preferences_for(user).collapsed).to eq(true)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'saves not collapsed preference for user' do
save_setting user: user, board: board, list: closed, setting: { collapsed: false }
expect(closed.preferences_for(user).collapsed).to eq(false)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -236,7 +236,7 @@ describe Boards::ListsController do
it 'returns a successful 200 response' do
remove_board_list user: user, board: board, list: planning
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'removes list from board' do
@@ -248,7 +248,7 @@ describe Boards::ListsController do
it 'returns a not found 404 response' do
remove_board_list user: user, board: board, list: 999
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -256,7 +256,7 @@ describe Boards::ListsController do
it 'returns a forbidden 403 response' do
remove_board_list user: guest, board: board, list: planning
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -278,7 +278,7 @@ describe Boards::ListsController do
it 'returns a successful 200 response' do
generate_default_lists user: user, board: board
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'returns the defaults lists' do
@@ -294,7 +294,7 @@ describe Boards::ListsController do
generate_default_lists user: user, board: board
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
@@ -302,7 +302,7 @@ describe Boards::ListsController do
it 'returns a forbidden 403 response' do
generate_default_lists user: guest, board: board
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
diff --git a/spec/controllers/chaos_controller_spec.rb b/spec/controllers/chaos_controller_spec.rb
index bafd4a70862..5812990ce7a 100644
--- a/spec/controllers/chaos_controller_spec.rb
+++ b/spec/controllers/chaos_controller_spec.rb
@@ -9,7 +9,7 @@ describe ChaosController do
get :leakmem
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'call synchronously with params' do
@@ -17,7 +17,7 @@ describe ChaosController do
get :leakmem, params: { memory_mb: 1, duration_s: 2 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'calls asynchronously' do
@@ -25,7 +25,7 @@ describe ChaosController do
get :leakmem, params: { async: 1 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -35,7 +35,7 @@ describe ChaosController do
get :cpu_spin
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'calls synchronously with params' do
@@ -43,7 +43,7 @@ describe ChaosController do
get :cpu_spin, params: { duration_s: 3 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'calls asynchronously' do
@@ -51,7 +51,7 @@ describe ChaosController do
get :cpu_spin, params: { async: 1 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -61,7 +61,7 @@ describe ChaosController do
get :db_spin
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'calls synchronously with params' do
@@ -69,7 +69,7 @@ describe ChaosController do
get :db_spin, params: { duration_s: 4, interval_s: 5 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'calls asynchronously' do
@@ -77,7 +77,7 @@ describe ChaosController do
get :db_spin, params: { async: 1 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -87,7 +87,7 @@ describe ChaosController do
get :sleep
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'calls synchronously with params' do
@@ -95,7 +95,7 @@ describe ChaosController do
get :sleep, params: { duration_s: 5 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'calls asynchronously' do
@@ -103,7 +103,7 @@ describe ChaosController do
get :sleep, params: { async: 1 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -113,7 +113,7 @@ describe ChaosController do
get :kill
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'calls asynchronously' do
@@ -121,7 +121,7 @@ describe ChaosController do
get :kill, params: { async: 1 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
diff --git a/spec/controllers/concerns/confirm_email_warning_spec.rb b/spec/controllers/concerns/confirm_email_warning_spec.rb
index 56a6efab8ed..2aad380203b 100644
--- a/spec/controllers/concerns/confirm_email_warning_spec.rb
+++ b/spec/controllers/concerns/confirm_email_warning_spec.rb
@@ -3,11 +3,6 @@
require 'spec_helper'
describe ConfirmEmailWarning do
- before do
- stub_feature_flags(soft_email_confirmation: true)
- allow(User).to receive(:allow_unconfirmed_access_for).and_return 2.days
- end
-
controller(ApplicationController) do
# `described_class` is not available in this context
include ConfirmEmailWarning
@@ -52,15 +47,6 @@ describe ConfirmEmailWarning do
context 'with an unconfirmed user' do
let(:user) { create(:user, confirmed_at: nil) }
- context 'when executing a peek request' do
- before do
- request.path = '/-/peek'
- get :index
- end
-
- it { is_expected.not_to set_confirm_warning_for(user.email) }
- end
-
context 'when executing a json request' do
before do
get :index, format: :json
diff --git a/spec/controllers/concerns/controller_with_cross_project_access_check_spec.rb b/spec/controllers/concerns/controller_with_cross_project_access_check_spec.rb
index e47f1650b1f..85989ea3e92 100644
--- a/spec/controllers/concerns/controller_with_cross_project_access_check_spec.rb
+++ b/spec/controllers/concerns/controller_with_cross_project_access_check_spec.rb
@@ -51,7 +51,7 @@ describe ControllerWithCrossProjectAccessCheck do
get :index
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(response.body).to match(/#{message}/)
end
@@ -60,7 +60,7 @@ describe ControllerWithCrossProjectAccessCheck do
get :index
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'is skipped when the `unless` condition returns true' do
@@ -68,13 +68,13 @@ describe ControllerWithCrossProjectAccessCheck do
get :index
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'correctly renders an action that does not require cross project access' do
get :show, params: { id: 'nothing' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -113,7 +113,7 @@ describe ControllerWithCrossProjectAccessCheck do
it 'renders a success when the check is skipped' do
get :index
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'is executed when the `if` condition returns false' do
@@ -121,7 +121,7 @@ describe ControllerWithCrossProjectAccessCheck do
get :index
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'is executed when the `unless` condition returns true' do
@@ -129,19 +129,19 @@ describe ControllerWithCrossProjectAccessCheck do
get :index
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'does not skip the check on an action that is not skipped' do
get :show, params: { id: 'hello' }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'does not skip the check on an action that was not defined to skip' do
get :edit, params: { id: 'hello' }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
diff --git a/spec/controllers/concerns/enforces_admin_authentication_spec.rb b/spec/controllers/concerns/enforces_admin_authentication_spec.rb
index 019a21e8cf0..a8494543558 100644
--- a/spec/controllers/concerns/enforces_admin_authentication_spec.rb
+++ b/spec/controllers/concerns/enforces_admin_authentication_spec.rb
@@ -39,7 +39,7 @@ describe EnforcesAdminAuthentication, :do_not_mock_admin_mode do
it 'renders ok' do
get :index
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -48,7 +48,7 @@ describe EnforcesAdminAuthentication, :do_not_mock_admin_mode do
it 'renders a 404' do
get :index
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'does not set admin mode' do
@@ -75,7 +75,7 @@ describe EnforcesAdminAuthentication, :do_not_mock_admin_mode do
let(:user) { create(:admin) }
it 'allows direct access to page' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'does not set admin mode' do
@@ -85,7 +85,7 @@ describe EnforcesAdminAuthentication, :do_not_mock_admin_mode do
context 'as a user' do
it 'renders a 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'does not set admin mode' do
diff --git a/spec/controllers/concerns/lfs_request_spec.rb b/spec/controllers/concerns/lfs_request_spec.rb
index 584448e68f9..67c81156ca6 100644
--- a/spec/controllers/concerns/lfs_request_spec.rb
+++ b/spec/controllers/concerns/lfs_request_spec.rb
@@ -5,13 +5,11 @@ require 'spec_helper'
describe LfsRequest do
include ProjectForksHelper
- controller(Projects::GitHttpClientController) do
+ controller(Repositories::GitHttpClientController) do
# `described_class` is not available in this context
include LfsRequest
def show
- storage_project
-
head :ok
end
@@ -38,22 +36,6 @@ describe LfsRequest do
stub_lfs_setting(enabled: true)
end
- describe '#storage_project' do
- it 'assigns the project as storage project' do
- get :show, params: { id: project.id }
-
- expect(assigns(:storage_project)).to eq(project)
- end
-
- it 'assigns the source of a forked project' do
- forked_project = fork_project(project)
-
- get :show, params: { id: forked_project.id }
-
- expect(assigns(:storage_project)).to eq(project)
- end
- end
-
context 'user is authenticated without access to lfs' do
before do
allow(controller).to receive(:authenticate_user)
diff --git a/spec/controllers/concerns/metrics_dashboard_spec.rb b/spec/controllers/concerns/metrics_dashboard_spec.rb
index 389d264bed3..4e42171e3d3 100644
--- a/spec/controllers/concerns/metrics_dashboard_spec.rb
+++ b/spec/controllers/concerns/metrics_dashboard_spec.rb
@@ -23,7 +23,7 @@ describe MetricsDashboard do
routes.draw { get "metrics_dashboard" => "anonymous#metrics_dashboard" }
response = get :metrics_dashboard, format: :json
- JSON.parse(response.parsed_body)
+ response.parsed_body
end
context 'when no parameters are provided' do
@@ -45,6 +45,7 @@ describe MetricsDashboard do
it 'returns the specified dashboard' do
expect(json_response['dashboard']['dashboard']).to eq('Environment metrics')
expect(json_response).not_to have_key('all_dashboards')
+ expect(json_response).not_to have_key('metrics_data')
end
context 'when the params are in an alternate format' do
@@ -53,6 +54,25 @@ describe MetricsDashboard do
it 'returns the specified dashboard' do
expect(json_response['dashboard']['dashboard']).to eq('Environment metrics')
expect(json_response).not_to have_key('all_dashboards')
+ expect(json_response).not_to have_key('metrics_data')
+ end
+ end
+
+ context 'when environment for dashboard is available' do
+ let(:params) { { environment: environment } }
+
+ before do
+ allow(controller).to receive(:project).and_return(project)
+ allow(controller).to receive(:environment).and_return(environment)
+ allow(controller)
+ .to receive(:metrics_dashboard_params)
+ .and_return(params)
+ end
+
+ it 'returns the specified dashboard' do
+ expect(json_response['dashboard']['dashboard']).to eq('Environment metrics')
+ expect(json_response).not_to have_key('all_dashboards')
+ expect(json_response).to have_key('metrics_data')
end
end
@@ -72,7 +92,7 @@ describe MetricsDashboard do
it 'includes project_blob_path only for project dashboards' do
expect(system_dashboard['project_blob_path']).to be_nil
- expect(project_dashboard['project_blob_path']).to eq("/#{project.namespace.path}/#{project.name}/blob/master/.gitlab/dashboards/test.yml")
+ expect(project_dashboard['project_blob_path']).to eq("/#{project.namespace.path}/#{project.name}/-/blob/master/.gitlab/dashboards/test.yml")
end
describe 'project permissions' do
diff --git a/spec/controllers/concerns/page_limiter_spec.rb b/spec/controllers/concerns/page_limiter_spec.rb
new file mode 100644
index 00000000000..287b62cb66c
--- /dev/null
+++ b/spec/controllers/concerns/page_limiter_spec.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+class PageLimiterSpecController < ApplicationController
+ include PageLimiter
+
+ before_action do
+ limit_pages 200
+ end
+
+ def index
+ head :ok
+ end
+end
+
+describe PageLimiter do
+ let(:controller_class) do
+ PageLimiterSpecController
+ end
+
+ let(:instance) do
+ controller_class.new
+ end
+
+ before do
+ allow(instance).to receive(:params) do
+ {
+ controller: "explore/projects",
+ action: "index"
+ }
+ end
+
+ allow(instance).to receive(:request) do
+ double(:request, user_agent: "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)")
+ end
+ end
+
+ describe "#limit_pages" do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:max_page, :actual_page, :result) do
+ 2 | 1 | nil
+ 2 | 2 | nil
+ 2 | 3 | PageLimiter::PageOutOfBoundsError
+ nil | 1 | PageLimiter::PageLimitNotANumberError
+ 0 | 1 | PageLimiter::PageLimitNotSensibleError
+ -1 | 1 | PageLimiter::PageLimitNotSensibleError
+ end
+
+ with_them do
+ subject { instance.limit_pages(max_page) }
+
+ before do
+ allow(instance).to receive(:params) { { page: actual_page.to_s } }
+ end
+
+ it "returns the expected result" do
+ if result == PageLimiter::PageOutOfBoundsError
+ expect(instance).to receive(:record_page_limit_interception)
+ expect { subject }.to raise_error(result)
+ elsif result&.superclass == PageLimiter::PageLimiterError
+ expect { subject }.to raise_error(result)
+ else
+ expect(subject).to eq(result)
+ end
+ end
+ end
+ end
+
+ describe "#default_page_out_of_bounds_response" do
+ subject { instance.send(:default_page_out_of_bounds_response) }
+
+ it "returns a bad_request header" do
+ expect(instance).to receive(:head).with(:bad_request)
+
+ subject
+ end
+ end
+
+ describe "#record_page_limit_interception" do
+ subject { instance.send(:record_page_limit_interception) }
+
+ let(:counter) { double("counter", increment: true) }
+
+ before do
+ allow(Gitlab::Metrics).to receive(:counter) { counter }
+ end
+
+ it "creates a metric counter" do
+ expect(Gitlab::Metrics).to receive(:counter).with(
+ :gitlab_page_out_of_bounds,
+ controller: "explore/projects",
+ action: "index",
+ bot: true
+ )
+
+ subject
+ end
+
+ it "increments the counter" do
+ expect(counter).to receive(:increment)
+
+ subject
+ end
+ end
+end
diff --git a/spec/controllers/concerns/project_unauthorized_spec.rb b/spec/controllers/concerns/project_unauthorized_spec.rb
index 5834b1ef37f..9b40660811e 100644
--- a/spec/controllers/concerns/project_unauthorized_spec.rb
+++ b/spec/controllers/concerns/project_unauthorized_spec.rb
@@ -30,7 +30,7 @@ describe ProjectUnauthorized do
get :show, params: { namespace_id: project.namespace.to_param, id: project.to_param }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'renders a 403 when the service denies access to the project' do
@@ -38,7 +38,7 @@ describe ProjectUnauthorized do
get :show, params: { namespace_id: project.namespace.to_param, id: project.to_param }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(response.body).to match("External authorization denied access to this project")
end
@@ -47,7 +47,7 @@ describe ProjectUnauthorized do
get :show, params: { namespace_id: other_project.namespace.to_param, id: other_project.to_param }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/concerns/routable_actions_spec.rb b/spec/controllers/concerns/routable_actions_spec.rb
index a11f4d2a154..80c67022219 100644
--- a/spec/controllers/concerns/routable_actions_spec.rb
+++ b/spec/controllers/concerns/routable_actions_spec.rb
@@ -47,14 +47,14 @@ describe RoutableActions do
it 'allows access' do
get_routable(routable)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
it 'prevents access when not authorized' do
get_routable(routable)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -75,14 +75,14 @@ describe RoutableActions do
it 'allows access' do
get_routable(routable)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
it 'prevents access when not authorized' do
get_routable(routable)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -92,7 +92,7 @@ describe RoutableActions do
it 'allows access when authorized' do
get_routable(routable)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'prevents access when unauthorized' do
@@ -100,7 +100,7 @@ describe RoutableActions do
get_routable(user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -111,7 +111,7 @@ describe RoutableActions do
get_routable(routable)
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response.location).to end_with('/users/sign_in')
end
end
diff --git a/spec/controllers/concerns/send_file_upload_spec.rb b/spec/controllers/concerns/send_file_upload_spec.rb
index 4110be721ad..3cfb7b5a488 100644
--- a/spec/controllers/concerns/send_file_upload_spec.rb
+++ b/spec/controllers/concerns/send_file_upload_spec.rb
@@ -59,11 +59,9 @@ describe SendFileUpload do
let(:params) { { disposition: 'inline', attachment: filename } }
it 'sends a file with inline disposition' do
- # Notice the filename= is omitted from the disposition; this is because
- # Rails 5 will append this header in send_file
expected_params = {
filename: 'test.png',
- disposition: "inline; filename*=UTF-8''test.png"
+ disposition: 'inline'
}
expect(controller).to receive(:send_file).with(uploader.path, expected_params)
@@ -76,34 +74,16 @@ describe SendFileUpload do
let(:params) { { attachment: filename } }
it 'sends a file with content-type of text/plain' do
- # Notice the filename= is omitted from the disposition; this is because
- # Rails 5 will append this header in send_file
expected_params = {
content_type: 'text/plain',
filename: 'test.js',
- disposition: "attachment; filename*=UTF-8''test.js"
+ disposition: 'attachment'
}
expect(controller).to receive(:send_file).with(uploader.path, expected_params)
subject
end
- context 'with non-ASCII encoded filename' do
- let(:filename) { 'テスト.txt' }
-
- # Notice the filename= is omitted from the disposition; this is because
- # Rails 5 will append this header in send_file
- it 'sends content-disposition for non-ASCII encoded filenames' do
- expected_params = {
- filename: filename,
- disposition: "attachment; filename*=UTF-8''%E3%83%86%E3%82%B9%E3%83%88.txt"
- }
- expect(controller).to receive(:send_file).with(uploader.path, expected_params)
-
- subject
- end
- end
-
context 'with a proxied file in object storage' do
before do
stub_uploads_object_storage(uploader: uploader_class)
diff --git a/spec/controllers/concerns/static_object_external_storage_spec.rb b/spec/controllers/concerns/static_object_external_storage_spec.rb
index ddd1a95427e..d3ece587ef7 100644
--- a/spec/controllers/concerns/static_object_external_storage_spec.rb
+++ b/spec/controllers/concerns/static_object_external_storage_spec.rb
@@ -27,7 +27,7 @@ describe StaticObjectExternalStorage do
do_request
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -75,7 +75,7 @@ describe StaticObjectExternalStorage do
request.headers['X-Gitlab-External-Storage-Token'] = 'letmein'
do_request
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -84,7 +84,7 @@ describe StaticObjectExternalStorage do
request.headers['X-Gitlab-External-Storage-Token'] = 'donotletmein'
do_request
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
diff --git a/spec/controllers/dashboard/groups_controller_spec.rb b/spec/controllers/dashboard/groups_controller_spec.rb
index 20a0951423b..b615bcc1e6b 100644
--- a/spec/controllers/dashboard/groups_controller_spec.rb
+++ b/spec/controllers/dashboard/groups_controller_spec.rb
@@ -40,7 +40,7 @@ describe Dashboard::GroupsController do
it 'renders only groups the user is a member of when searching hierarchy correctly' do
get :index, params: { filter: 'chef' }, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
all_groups = [top_level_result, top_level_a, sub_level_result_a]
expect(assigns(:groups)).to contain_exactly(*all_groups)
end
@@ -51,7 +51,7 @@ describe Dashboard::GroupsController do
get :index
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
diff --git a/spec/controllers/dashboard/milestones_controller_spec.rb b/spec/controllers/dashboard/milestones_controller_spec.rb
index 67939aa4e6a..f4b04ad6dee 100644
--- a/spec/controllers/dashboard/milestones_controller_spec.rb
+++ b/spec/controllers/dashboard/milestones_controller_spec.rb
@@ -40,7 +40,7 @@ describe Dashboard::MilestonesController do
it 'shows milestone page' do
view_milestone
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -55,7 +55,7 @@ describe Dashboard::MilestonesController do
it 'returns group and project milestones to which the user belongs' do
get :index, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(2)
expect(json_response.map { |i| i["name"] }).to match_array([group_milestone.name, project_milestone.name])
expect(json_response.map { |i| i["group_name"] }.compact).to match_array(group.name)
@@ -64,7 +64,7 @@ describe Dashboard::MilestonesController do
it 'returns closed group and project milestones to which the user belongs' do
get :index, params: { state: 'closed' }, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(2)
expect(json_response.map { |i| i["name"] }).to match_array([closed_group_milestone.name, closed_project_milestone.name])
expect(json_response.map { |i| i["group_name"] }.compact).to match_array(group.name)
diff --git a/spec/controllers/dashboard/projects_controller_spec.rb b/spec/controllers/dashboard/projects_controller_spec.rb
index 8b95c9f2496..a13b56deb23 100644
--- a/spec/controllers/dashboard/projects_controller_spec.rb
+++ b/spec/controllers/dashboard/projects_controller_spec.rb
@@ -11,7 +11,14 @@ describe Dashboard::ProjectsController do
end
context 'user logged in' do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:project2) { create(:project) }
+
+ before_all do
+ project.add_developer(user)
+ project2.add_developer(user)
+ end
before do
sign_in(user)
@@ -23,17 +30,12 @@ describe Dashboard::ProjectsController do
get :index
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
it 'orders the projects by last activity by default' do
- project = create(:project)
- project.add_developer(user)
project.update!(last_repository_updated_at: 3.days.ago, last_activity_at: 3.days.ago)
-
- project2 = create(:project)
- project2.add_developer(user)
project2.update!(last_repository_updated_at: 10.days.ago, last_activity_at: 10.days.ago)
get :index
@@ -42,12 +44,27 @@ describe Dashboard::ProjectsController do
end
context 'project sorting' do
- let(:project) { create(:project) }
-
it_behaves_like 'set sort order from user preference' do
let(:sorting_param) { 'created_asc' }
end
end
+
+ context 'with search and sort parameters' do
+ render_views
+
+ shared_examples 'search and sort parameters' do |sort|
+ it 'returns a single project with no ambiguous column errors' do
+ get :index, params: { name: project2.name, sort: sort }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(assigns(:projects)).to eq([project2])
+ end
+ end
+
+ %w[latest_activity_desc latest_activity_asc stars_desc stars_asc created_desc].each do |sort|
+ it_behaves_like 'search and sort parameters', sort
+ end
+ end
end
end
diff --git a/spec/controllers/dashboard/snippets_controller_spec.rb b/spec/controllers/dashboard/snippets_controller_spec.rb
index 2d839094d34..d5e3a348cd2 100644
--- a/spec/controllers/dashboard/snippets_controller_spec.rb
+++ b/spec/controllers/dashboard/snippets_controller_spec.rb
@@ -17,5 +17,14 @@ describe Dashboard::SnippetsController do
create(:personal_snippet, :public, author: user)
end
end
+
+ it 'fetches snippet counts via the snippet count service' do
+ service = double(:count_service, execute: {})
+ expect(Snippets::CountService)
+ .to receive(:new).with(user, author: user)
+ .and_return(service)
+
+ get :index
+ end
end
end
diff --git a/spec/controllers/dashboard/todos_controller_spec.rb b/spec/controllers/dashboard/todos_controller_spec.rb
index 4ce445fe41a..0823afe410d 100644
--- a/spec/controllers/dashboard/todos_controller_spec.rb
+++ b/spec/controllers/dashboard/todos_controller_spec.rb
@@ -20,19 +20,19 @@ describe Dashboard::TodosController do
get :index, params: { project_id: unauthorized_project.id }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'renders 404 when given project does not exists' do
get :index, params: { project_id: 999 }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'renders 200 when filtering for "any project" todos' do
get :index, params: { project_id: '' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'renders 200 when user has access on given project' do
@@ -40,7 +40,7 @@ describe Dashboard::TodosController do
get :index, params: { project_id: authorized_project.id }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -78,7 +78,7 @@ describe Dashboard::TodosController do
get :index, params: { group_id: unauthorized_group.id }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -130,7 +130,7 @@ describe Dashboard::TodosController do
patch :restore, params: { id: todo.id }
expect(todo.reload).to be_pending
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq({ "count" => 1, "done_count" => 0 })
end
end
@@ -144,7 +144,7 @@ describe Dashboard::TodosController do
todos.each do |todo|
expect(todo.reload).to be_pending
end
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq({ 'count' => 2, 'done_count' => 0 })
end
end
diff --git a/spec/controllers/explore/projects_controller_spec.rb b/spec/controllers/explore/projects_controller_spec.rb
index 6752d2b8ebd..c2cd29eb036 100644
--- a/spec/controllers/explore/projects_controller_spec.rb
+++ b/spec/controllers/explore/projects_controller_spec.rb
@@ -59,6 +59,85 @@ describe Explore::ProjectsController do
end
end
+ shared_examples "blocks high page numbers" do
+ let(:page_limit) { 200 }
+
+ context "page number is too high" do
+ [:index, :trending, :starred].each do |endpoint|
+ describe "GET #{endpoint}" do
+ render_views
+
+ before do
+ get endpoint, params: { page: page_limit + 1 }
+ end
+
+ it { is_expected.to respond_with(:bad_request) }
+ it { is_expected.to render_template("explore/projects/page_out_of_bounds") }
+
+ it "assigns the page number" do
+ expect(assigns[:max_page_number]).to eq(page_limit.to_s)
+ end
+ end
+
+ describe "GET #{endpoint}.json" do
+ render_views
+
+ before do
+ get endpoint, params: { page: page_limit + 1 }, format: :json
+ end
+
+ it { is_expected.to respond_with(:bad_request) }
+ end
+
+ describe "metrics recording" do
+ subject { get endpoint, params: { page: page_limit + 1 } }
+
+ let(:counter) { double("counter", increment: true) }
+
+ before do
+ allow(Gitlab::Metrics).to receive(:counter) { counter }
+ end
+
+ it "records the interception" do
+ expect(Gitlab::Metrics).to receive(:counter).with(
+ :gitlab_page_out_of_bounds,
+ controller: "explore/projects",
+ action: endpoint.to_s,
+ bot: false
+ )
+
+ subject
+ end
+ end
+ end
+ end
+
+ context "page number is acceptable" do
+ [:index, :trending, :starred].each do |endpoint|
+ describe "GET #{endpoint}" do
+ render_views
+
+ before do
+ get endpoint, params: { page: page_limit }
+ end
+
+ it { is_expected.to respond_with(:success) }
+ it { is_expected.to render_template("explore/projects/#{endpoint}") }
+ end
+
+ describe "GET #{endpoint}.json" do
+ render_views
+
+ before do
+ get endpoint, params: { page: page_limit }, format: :json
+ end
+
+ it { is_expected.to respond_with(:success) }
+ end
+ end
+ end
+ end
+
context 'when user is signed in' do
let(:user) { create(:user) }
@@ -67,6 +146,7 @@ describe Explore::ProjectsController do
end
include_examples 'explore projects'
+ include_examples "blocks high page numbers"
context 'user preference sorting' do
let(:project) { create(:project) }
@@ -79,6 +159,7 @@ describe Explore::ProjectsController do
context 'when user is not signed in' do
include_examples 'explore projects'
+ include_examples "blocks high page numbers"
context 'user preference sorting' do
let(:project) { create(:project) }
diff --git a/spec/controllers/google_api/authorizations_controller_spec.rb b/spec/controllers/google_api/authorizations_controller_spec.rb
index 4d200140f16..58bda2bd4e8 100644
--- a/spec/controllers/google_api/authorizations_controller_spec.rb
+++ b/spec/controllers/google_api/authorizations_controller_spec.rb
@@ -23,7 +23,7 @@ describe GoogleApi::AuthorizationsController do
subject
expect(session[GoogleApi::CloudPlatform::Client.session_key_for_token]).to be_nil
- expect(response).to have_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
diff --git a/spec/controllers/graphql_controller_spec.rb b/spec/controllers/graphql_controller_spec.rb
index 9937bdf4061..0c1089dc7a8 100644
--- a/spec/controllers/graphql_controller_spec.rb
+++ b/spec/controllers/graphql_controller_spec.rb
@@ -39,7 +39,7 @@ describe GraphqlController do
it 'returns 200 when user can access API' do
post :execute
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'returns access denied template when user cannot access API' do
@@ -59,7 +59,7 @@ describe GraphqlController do
it 'returns 200' do
post :execute
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
diff --git a/spec/controllers/groups/avatars_controller_spec.rb b/spec/controllers/groups/avatars_controller_spec.rb
index 7fffafaa2d4..1229328000b 100644
--- a/spec/controllers/groups/avatars_controller_spec.rb
+++ b/spec/controllers/groups/avatars_controller_spec.rb
@@ -25,6 +25,6 @@ describe Groups::AvatarsController do
delete :destroy, params: { group_id: group }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
diff --git a/spec/controllers/groups/boards_controller_spec.rb b/spec/controllers/groups/boards_controller_spec.rb
index e4232c2c1ab..acfa8bc9354 100644
--- a/spec/controllers/groups/boards_controller_spec.rb
+++ b/spec/controllers/groups/boards_controller_spec.rb
@@ -27,13 +27,14 @@ describe Groups::BoardsController do
context 'with unauthorized user' do
before do
allow(Ability).to receive(:allowed?).with(user, :read_cross_project, :global).and_return(true)
- allow(Ability).to receive(:allowed?).with(user, :read_group, group).and_return(false)
+ allow(Ability).to receive(:allowed?).with(user, :read_group, group).and_return(true)
+ allow(Ability).to receive(:allowed?).with(user, :read_board, group).and_return(false)
end
it 'returns a not found 404 response' do
list_boards
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(response.content_type).to eq 'text/html'
end
end
@@ -70,13 +71,14 @@ describe Groups::BoardsController do
context 'with unauthorized user' do
before do
allow(Ability).to receive(:allowed?).with(user, :read_cross_project, :global).and_return(true)
- allow(Ability).to receive(:allowed?).with(user, :read_group, group).and_return(false)
+ allow(Ability).to receive(:allowed?).with(user, :read_group, group).and_return(true)
+ allow(Ability).to receive(:allowed?).with(user, :read_board, group).and_return(false)
end
it 'returns a not found 404 response' do
list_boards format: :json
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(response.content_type).to eq 'application/json'
end
end
@@ -105,13 +107,14 @@ describe Groups::BoardsController do
context 'with unauthorized user' do
before do
allow(Ability).to receive(:allowed?).with(user, :read_cross_project, :global).and_return(true)
- allow(Ability).to receive(:allowed?).with(user, :read_group, group).and_return(false)
+ allow(Ability).to receive(:allowed?).with(user, :read_group, group).and_return(true)
+ allow(Ability).to receive(:allowed?).with(user, :read_board, group).and_return(false)
end
it 'returns a not found 404 response' do
read_board board: board
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(response.content_type).to eq 'text/html'
end
end
@@ -142,13 +145,14 @@ describe Groups::BoardsController do
context 'with unauthorized user' do
before do
allow(Ability).to receive(:allowed?).with(user, :read_cross_project, :global).and_return(true)
+ allow(Ability).to receive(:allowed?).with(user, :read_group, group).and_return(true)
allow(Ability).to receive(:allowed?).with(user, :read_group, group).and_return(false)
end
it 'returns a not found 404 response' do
read_board board: board, format: :json
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(response.content_type).to eq 'application/json'
end
end
@@ -160,7 +164,7 @@ describe Groups::BoardsController do
read_board board: another_board
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
diff --git a/spec/controllers/groups/children_controller_spec.rb b/spec/controllers/groups/children_controller_spec.rb
index 171326f3f8b..a8921300e6b 100644
--- a/spec/controllers/groups/children_controller_spec.rb
+++ b/spec/controllers/groups/children_controller_spec.rb
@@ -142,7 +142,7 @@ describe Groups::ChildrenController do
get :index, params: { group_id: subgroup.to_param, filter: 'test' }, format: :json
- expect(response).to have_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'returns an array with one element when only one result is matched' do
@@ -185,7 +185,7 @@ describe Groups::ChildrenController do
get :index, params: { group_id: group.to_param, filter: 'filter', per_page: 3 }, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'includes pagination headers' do
@@ -316,7 +316,7 @@ describe Groups::ChildrenController do
it 'correctly calculates the counts' do
get :index, params: { group_id: group.to_param, sort: 'id_asc', page: 2 }, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -328,7 +328,7 @@ describe Groups::ChildrenController do
get :index, params: { group_id: group }, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
diff --git a/spec/controllers/groups/clusters/applications_controller_spec.rb b/spec/controllers/groups/clusters/applications_controller_spec.rb
index 21533d1c89a..bab9e64cfdb 100644
--- a/spec/controllers/groups/clusters/applications_controller_spec.rb
+++ b/spec/controllers/groups/clusters/applications_controller_spec.rb
@@ -43,7 +43,7 @@ describe Groups::Clusters::ApplicationsController do
expect(ClusterInstallAppWorker).to receive(:perform_async).with(application, anything).once
expect { subject }.to change { current_application.count }
- expect(response).to have_http_status(:no_content)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(cluster.application_helm).to be_scheduled
end
@@ -54,7 +54,7 @@ describe Groups::Clusters::ApplicationsController do
it 'return 404' do
expect { subject }.not_to change { current_application.count }
- expect(response).to have_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -62,7 +62,7 @@ describe Groups::Clusters::ApplicationsController do
let(:application) { 'unkwnown-app' }
it 'return 404' do
- is_expected.to have_http_status(:not_found)
+ is_expected.to have_gitlab_http_status(:not_found)
end
end
@@ -72,7 +72,7 @@ describe Groups::Clusters::ApplicationsController do
end
it 'returns 400' do
- is_expected.to have_http_status(:bad_request)
+ is_expected.to have_gitlab_http_status(:bad_request)
end
end
end
@@ -107,7 +107,7 @@ describe Groups::Clusters::ApplicationsController do
it "schedules an application update" do
expect(ClusterPatchAppWorker).to receive(:perform_async).with(application.name, anything).once
- is_expected.to have_http_status(:no_content)
+ is_expected.to have_gitlab_http_status(:no_content)
expect(cluster.application_cert_manager).to be_scheduled
end
@@ -118,13 +118,13 @@ describe Groups::Clusters::ApplicationsController do
cluster.destroy!
end
- it { is_expected.to have_http_status(:not_found) }
+ it { is_expected.to have_gitlab_http_status(:not_found) }
end
context 'when application is unknown' do
let(:application_name) { 'unkwnown-app' }
- it { is_expected.to have_http_status(:not_found) }
+ it { is_expected.to have_gitlab_http_status(:not_found) }
end
context 'when application is already scheduled' do
@@ -132,7 +132,7 @@ describe Groups::Clusters::ApplicationsController do
application.make_scheduled!
end
- it { is_expected.to have_http_status(:bad_request) }
+ it { is_expected.to have_gitlab_http_status(:bad_request) }
end
end
diff --git a/spec/controllers/groups/clusters_controller_spec.rb b/spec/controllers/groups/clusters_controller_spec.rb
index cf90d388a61..cdb45e1946e 100644
--- a/spec/controllers/groups/clusters_controller_spec.rb
+++ b/spec/controllers/groups/clusters_controller_spec.rb
@@ -654,7 +654,7 @@ describe Groups::ClustersController do
go(format: :json)
cluster.reload
- expect(response).to have_http_status(:no_content)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(cluster.enabled).to be_falsey
expect(cluster.name).to eq('my-new-cluster-name')
expect(cluster).not_to be_managed
@@ -674,7 +674,7 @@ describe Groups::ClustersController do
it 'rejects changes' do
go(format: :json)
- expect(response).to have_http_status(:bad_request)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
diff --git a/spec/controllers/groups/group_links_controller_spec.rb b/spec/controllers/groups/group_links_controller_spec.rb
index 04f2e33b26a..c062de468fc 100644
--- a/spec/controllers/groups/group_links_controller_spec.rb
+++ b/spec/controllers/groups/group_links_controller_spec.rb
@@ -13,12 +13,30 @@ describe Groups::GroupLinksController do
describe '#create' do
let(:shared_with_group_id) { shared_with_group.id }
+ let(:shared_group_access) { GroupGroupLink.default_access }
subject do
post(:create,
params: { group_id: shared_group,
shared_with_group_id: shared_with_group_id,
- shared_group_access: GroupGroupLink.default_access })
+ shared_group_access: shared_group_access })
+ end
+
+ shared_examples 'creates group group link' do
+ it 'links group with selected group' do
+ expect { subject }.to change { shared_with_group.shared_groups.include?(shared_group) }.from(false).to(true)
+ end
+
+ it 'redirects to group links page' do
+ subject
+
+ expect(response).to(redirect_to(group_group_members_path(shared_group)))
+ end
+
+ it 'allows access for group member' do
+ expect { subject }.to(
+ change { group_member.can?(:read_group, shared_group) }.from(false).to(true))
+ end
end
context 'when user has correct access to both groups' do
@@ -31,18 +49,19 @@ describe Groups::GroupLinksController do
shared_with_group.add_developer(group_member)
end
- it 'links group with selected group' do
- expect { subject }.to change { shared_with_group.shared_groups.include?(shared_group) }.from(false).to(true)
+ context 'when default access level is requested' do
+ include_examples 'creates group group link'
end
- it 'redirects to group links page' do
- subject
+ context 'when owner access is requested' do
+ let(:shared_group_access) { Gitlab::Access::OWNER }
- expect(response).to(redirect_to(group_group_members_path(shared_group)))
- end
+ include_examples 'creates group group link'
- it 'allows access for group member' do
- expect { subject }.to change { group_member.can?(:read_group, shared_group) }.from(false).to(true)
+ it 'allows admin access for group member' do
+ expect { subject }.to(
+ change { group_member.can?(:admin_group, shared_group) }.from(false).to(true))
+ end
end
context 'when shared with group id is not present' do
@@ -81,7 +100,7 @@ describe Groups::GroupLinksController do
it 'renders 404' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -94,7 +113,7 @@ describe Groups::GroupLinksController do
it 'renders 404' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -107,7 +126,7 @@ describe Groups::GroupLinksController do
it 'renders 404' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -149,7 +168,7 @@ describe Groups::GroupLinksController do
it 'renders 404' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -161,7 +180,7 @@ describe Groups::GroupLinksController do
it 'renders 404' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -191,7 +210,7 @@ describe Groups::GroupLinksController do
it 'renders 404' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -203,7 +222,7 @@ describe Groups::GroupLinksController do
it 'renders 404' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/groups/group_members_controller_spec.rb b/spec/controllers/groups/group_members_controller_spec.rb
index 1c8a2bd160d..f69d0602404 100644
--- a/spec/controllers/groups/group_members_controller_spec.rb
+++ b/spec/controllers/groups/group_members_controller_spec.rb
@@ -13,7 +13,7 @@ describe Groups::GroupMembersController do
it 'renders index with 200 status code' do
get :index, params: { group_id: group }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:index)
end
@@ -105,7 +105,7 @@ describe Groups::GroupMembersController do
access_level: Gitlab::Access::GUEST
}
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(group.users).not_to include group_user
end
end
@@ -173,7 +173,7 @@ describe Groups::GroupMembersController do
it 'returns 403' do
delete :destroy, params: { group_id: group, id: 42 }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -186,7 +186,7 @@ describe Groups::GroupMembersController do
it 'returns 403' do
delete :destroy, params: { group_id: group, id: member }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(group.members).to include member
end
end
@@ -223,7 +223,7 @@ describe Groups::GroupMembersController do
it 'returns 404' do
delete :leave, params: { group_id: group }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -244,7 +244,7 @@ describe Groups::GroupMembersController do
it 'supports json request' do
delete :leave, params: { group_id: group }, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['notice']).to eq "You left the \"#{group.name}\" group."
end
end
@@ -257,7 +257,7 @@ describe Groups::GroupMembersController do
it 'cannot removes himself from the group' do
delete :leave, params: { group_id: group }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -304,7 +304,7 @@ describe Groups::GroupMembersController do
it 'returns 403' do
post :approve_access_request, params: { group_id: group, id: 42 }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -317,7 +317,7 @@ describe Groups::GroupMembersController do
it 'returns 403' do
post :approve_access_request, params: { group_id: group, id: member }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(group.members).not_to include member
end
end
@@ -348,7 +348,7 @@ describe Groups::GroupMembersController do
it 'is successful' do
get :index, params: { group_id: group }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -356,7 +356,7 @@ describe Groups::GroupMembersController do
it 'is successful' do
post :create, params: { group_id: group, users: user, access_level: Gitlab::Access::GUEST }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -370,7 +370,7 @@ describe Groups::GroupMembersController do
},
format: :js
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -378,7 +378,7 @@ describe Groups::GroupMembersController do
it 'is successful' do
delete :destroy, params: { group_id: group, id: membership }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -388,7 +388,7 @@ describe Groups::GroupMembersController do
post :request_access, params: { group_id: group }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -397,7 +397,7 @@ describe Groups::GroupMembersController do
access_request = create(:group_member, :access_request, group: group)
post :approve_access_request, params: { group_id: group, id: access_request }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -407,7 +407,7 @@ describe Groups::GroupMembersController do
delete :leave, params: { group_id: group }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -415,7 +415,7 @@ describe Groups::GroupMembersController do
it 'is successful' do
post :resend_invite, params: { group_id: group, id: membership }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
end
diff --git a/spec/controllers/groups/labels_controller_spec.rb b/spec/controllers/groups/labels_controller_spec.rb
index d4780fa2675..90f91a4ff72 100644
--- a/spec/controllers/groups/labels_controller_spec.rb
+++ b/spec/controllers/groups/labels_controller_spec.rb
@@ -53,7 +53,7 @@ describe Groups::LabelsController do
post :toggle_subscription, params: { group_id: group.to_param, id: label.to_param }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
diff --git a/spec/controllers/groups/milestones_controller_spec.rb b/spec/controllers/groups/milestones_controller_spec.rb
index 8fb9f0c516c..afb950bc538 100644
--- a/spec/controllers/groups/milestones_controller_spec.rb
+++ b/spec/controllers/groups/milestones_controller_spec.rb
@@ -42,7 +42,7 @@ describe Groups::MilestonesController do
get :index, params: { group_id: group.to_param }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to include(milestone.title)
end
@@ -74,7 +74,7 @@ describe Groups::MilestonesController do
get :index, params: { group_id: group.to_param }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to include(milestone.title)
end
end
@@ -84,7 +84,7 @@ describe Groups::MilestonesController do
it 'does not return milestone' do
get :index, params: { group_id: public_group.to_param }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.body).not_to include(private_milestone.title)
end
end
@@ -125,11 +125,45 @@ describe Groups::MilestonesController do
it 'returns the milestone' do
get :index, params: { group_id: public_group.to_param }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to include(private_milestone.title)
end
end
end
+
+ context 'when subgroup milestones are present' do
+ let(:subgroup) { create(:group, :private, parent: group) }
+ let(:sub_project) { create(:project, :private, group: subgroup) }
+ let!(:group_milestone) { create(:milestone, group: group, title: 'Group milestone') }
+ let!(:sub_project_milestone) { create(:milestone, project: sub_project, title: 'Sub Project Milestone') }
+ let!(:subgroup_milestone) { create(:milestone, title: 'Subgroup Milestone', group: subgroup) }
+
+ it 'shows subgroup milestones that user has access to' do
+ get :index, params: { group_id: group.to_param }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to include(group_milestone.title)
+ expect(response.body).to include(sub_project_milestone.title)
+ expect(response.body).to include(subgroup_milestone.title)
+ end
+
+ context 'when user has no access to subgroups' do
+ let(:non_member) { create(:user) }
+
+ before do
+ sign_in(non_member)
+ end
+
+ it 'does not show subgroup milestones' do
+ get :index, params: { group_id: group.to_param }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to include(group_milestone.title)
+ expect(response.body).not_to include(sub_project_milestone.title)
+ expect(response.body).not_to include(subgroup_milestone.title)
+ end
+ end
+ end
end
context 'as JSON' do
@@ -145,10 +179,23 @@ describe Groups::MilestonesController do
expect(milestones.count).to eq(2)
expect(milestones.first["title"]).to eq("group milestone")
expect(milestones.second["title"]).to eq("legacy")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'application/json'
end
+ context 'with subgroup milestones' do
+ it 'lists descendants group milestones' do
+ subgroup = create(:group, :public, parent: group)
+ create(:milestone, group: subgroup, title: 'subgroup milestone')
+
+ get :index, params: { group_id: group.to_param }, format: :json
+ milestones = json_response
+
+ expect(milestones.count).to eq(3)
+ expect(milestones.second["title"]).to eq("subgroup milestone")
+ end
+ end
+
context 'for a subgroup' do
let(:subgroup) { create(:group, parent: group) }
@@ -283,7 +330,7 @@ describe Groups::MilestonesController do
it 'does not redirect' do
get :index, params: { group_id: group.to_param }
- expect(response).not_to have_gitlab_http_status(301)
+ expect(response).not_to have_gitlab_http_status(:moved_permanently)
end
end
@@ -302,7 +349,7 @@ describe Groups::MilestonesController do
it 'does not redirect' do
get :show, params: { group_id: group.to_param, id: title }
- expect(response).not_to have_gitlab_http_status(301)
+ expect(response).not_to have_gitlab_http_status(:moved_permanently)
end
end
@@ -392,7 +439,7 @@ describe Groups::MilestonesController do
milestone: { title: title }
}
- expect(response).not_to have_gitlab_http_status(404)
+ expect(response).not_to have_gitlab_http_status(:not_found)
end
it 'does not redirect to the correct casing' do
@@ -402,7 +449,7 @@ describe Groups::MilestonesController do
milestone: { title: title }
}
- expect(response).not_to have_gitlab_http_status(301)
+ expect(response).not_to have_gitlab_http_status(:moved_permanently)
end
end
@@ -416,7 +463,7 @@ describe Groups::MilestonesController do
milestone: { title: title }
}
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/groups/registry/repositories_controller_spec.rb b/spec/controllers/groups/registry/repositories_controller_spec.rb
index 4129891914d..eb702d65325 100644
--- a/spec/controllers/groups/registry/repositories_controller_spec.rb
+++ b/spec/controllers/groups/registry/repositories_controller_spec.rb
@@ -14,9 +14,11 @@ describe Groups::Registry::RepositoriesController do
sign_in(user)
end
- context 'GET #index' do
+ shared_examples 'renders a list of repositories' do
context 'when container registry is enabled' do
it 'show index page' do
+ expect(Gitlab::Tracking).not_to receive(:event)
+
get :index, params: {
group_id: group
}
@@ -31,6 +33,7 @@ describe Groups::Registry::RepositoriesController do
}
expect(response).to match_response_schema('registry/repositories')
+ expect(response).to include_pagination_headers
end
it 'returns a list of projects for json format' do
@@ -54,7 +57,8 @@ describe Groups::Registry::RepositoriesController do
expect(Gitlab::Tracking).to receive(:event).with(anything, 'list_repositories', {})
get :index, params: {
- group_id: group
+ group_id: group,
+ format: :json
}
end
end
@@ -86,5 +90,29 @@ describe Groups::Registry::RepositoriesController do
expect(response).to have_gitlab_http_status(:not_found)
end
end
+
+ context 'with :vue_container_registry_explorer feature flag disabled' do
+ before do
+ stub_feature_flags(vue_container_registry_explorer: false)
+ end
+
+ it 'has the correct response schema' do
+ get :index, params: {
+ group_id: group,
+ format: :json
+ }
+
+ expect(response).to match_response_schema('registry/repositories')
+ expect(response).not_to include_pagination_headers
+ end
+ end
+ end
+
+ context 'GET #index' do
+ it_behaves_like 'renders a list of repositories'
+ end
+
+ context 'GET #show' do
+ it_behaves_like 'renders a list of repositories'
end
end
diff --git a/spec/controllers/groups/runners_controller_spec.rb b/spec/controllers/groups/runners_controller_spec.rb
index 14b0cf959b3..bf556078eec 100644
--- a/spec/controllers/groups/runners_controller_spec.rb
+++ b/spec/controllers/groups/runners_controller_spec.rb
@@ -21,7 +21,7 @@ describe Groups::RunnersController do
it 'renders show with 200 status code' do
get :show, params: { group_id: group, id: runner }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:show)
end
end
@@ -34,7 +34,7 @@ describe Groups::RunnersController do
it 'renders a 404' do
get :show, params: { group_id: group, id: runner }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -48,7 +48,7 @@ describe Groups::RunnersController do
it 'renders show with 200 status code' do
get :edit, params: { group_id: group, id: runner }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:edit)
end
end
@@ -61,7 +61,7 @@ describe Groups::RunnersController do
it 'renders a 404' do
get :edit, params: { group_id: group, id: runner }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -79,7 +79,7 @@ describe Groups::RunnersController do
post :update, params: params.merge(runner: { description: new_desc } )
end.to change { runner.ensure_runner_queue_value }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(runner.reload.description).to eq(new_desc)
end
end
@@ -96,7 +96,7 @@ describe Groups::RunnersController do
post :update, params: params.merge(runner: { description: old_desc.swapcase } )
end.not_to change { runner.ensure_runner_queue_value }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(runner.reload.description).to eq(old_desc)
end
end
@@ -111,7 +111,7 @@ describe Groups::RunnersController do
it 'destroys the runner and redirects' do
delete :destroy, params: params
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(Ci::Runner.find_by(id: runner.id)).to be_nil
end
end
@@ -124,7 +124,7 @@ describe Groups::RunnersController do
it 'responds 404 and does not destroy the runner' do
delete :destroy, params: params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(Ci::Runner.find_by(id: runner.id)).to be_present
end
end
@@ -143,7 +143,7 @@ describe Groups::RunnersController do
post :resume, params: params
end.to change { runner.ensure_runner_queue_value }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(runner.reload.active).to eq(true)
end
end
@@ -160,7 +160,7 @@ describe Groups::RunnersController do
post :resume, params: params
end.not_to change { runner.ensure_runner_queue_value }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(runner.reload.active).to eq(false)
end
end
@@ -179,7 +179,7 @@ describe Groups::RunnersController do
post :pause, params: params
end.to change { runner.ensure_runner_queue_value }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(runner.reload.active).to eq(false)
end
end
@@ -196,7 +196,7 @@ describe Groups::RunnersController do
post :pause, params: params
end.not_to change { runner.ensure_runner_queue_value }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(runner.reload.active).to eq(true)
end
end
diff --git a/spec/controllers/groups/settings/ci_cd_controller_spec.rb b/spec/controllers/groups/settings/ci_cd_controller_spec.rb
index 897ba491036..4e8cb3f94fb 100644
--- a/spec/controllers/groups/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/groups/settings/ci_cd_controller_spec.rb
@@ -21,7 +21,7 @@ describe Groups::Settings::CiCdController do
it 'renders show with 200 status code' do
get :show, params: { group_id: group }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:show)
end
end
@@ -34,7 +34,7 @@ describe Groups::Settings::CiCdController do
it 'renders a 404' do
get :show, params: { group_id: group }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -47,7 +47,7 @@ describe Groups::Settings::CiCdController do
it 'renders show with 200 status code' do
get :show, params: { group_id: group }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -79,7 +79,7 @@ describe Groups::Settings::CiCdController do
it 'renders a 404' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -99,7 +99,7 @@ describe Groups::Settings::CiCdController do
group.add_maintainer(user)
end
- it { is_expected.to have_gitlab_http_status(404) }
+ it { is_expected.to have_gitlab_http_status(:not_found) }
end
context 'when user has enough privileges' do
@@ -170,7 +170,7 @@ describe Groups::Settings::CiCdController do
group.add_owner(user)
end
- it { is_expected.to have_gitlab_http_status(404) }
+ it { is_expected.to have_gitlab_http_status(:not_found) }
end
context 'when user is an admin' do
diff --git a/spec/controllers/groups/variables_controller_spec.rb b/spec/controllers/groups/variables_controller_spec.rb
index 2d9c5c9d799..d6c790ae7b8 100644
--- a/spec/controllers/groups/variables_controller_spec.rb
+++ b/spec/controllers/groups/variables_controller_spec.rb
@@ -50,7 +50,7 @@ describe Groups::VariablesController do
it 'is successful' do
get :show, params: { group_id: group }, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -66,7 +66,7 @@ describe Groups::VariablesController do
},
format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index ddfd2b424e7..1c58c2b5c97 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -96,7 +96,7 @@ describe GroupsController do
User.where(id: [admin, owner, maintainer, developer, guest]).update_all(can_create_group: can_create_group_status)
end
- [:admin, :owner].each do |member_type|
+ [:admin, :owner, :maintainer].each do |member_type|
context "and logged in as #{member_type.capitalize}" do
it_behaves_like 'member with ability to create subgroups' do
let(:member) { send(member_type) }
@@ -104,7 +104,7 @@ describe GroupsController do
end
end
- [:guest, :developer, :maintainer].each do |member_type|
+ [:guest, :developer].each do |member_type|
context "and logged in as #{member_type.capitalize}" do
it_behaves_like 'member without ability to create subgroups' do
let(:member) { send(member_type) }
@@ -136,7 +136,7 @@ describe GroupsController do
get :activity, params: { id: group.to_param }, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['count']).to eq(3)
expect(assigns(:projects).limit_value).to be_nil
end
@@ -176,7 +176,7 @@ describe GroupsController do
post :create, params: { group: { name: 'new_group', path: "new_group" } }
end.to change { Group.count }.by(1)
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
context 'authorization' do
@@ -187,7 +187,7 @@ describe GroupsController do
post :create, params: { group: { name: 'new_group', path: "new_group" } }
end.to change { Group.count }.by(1)
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -392,7 +392,7 @@ describe GroupsController do
it 'updates the path successfully' do
post :update, params: { id: group.to_param, group: { path: 'new_path' } }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(controller).to set_flash[:notice]
end
@@ -407,7 +407,7 @@ describe GroupsController do
it 'updates the project_creation_level successfully' do
post :update, params: { id: group.to_param, group: { project_creation_level: ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS } }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(group.reload.project_creation_level).to eq(::Gitlab::Access::MAINTAINER_PROJECT_ACCESS)
end
@@ -422,7 +422,7 @@ describe GroupsController do
post :update, params: { id: group.to_param, group: { name: 'new_name' } }
expect(controller).to set_flash[:notice]
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(group.reload.name).to eq('new_name')
end
@@ -430,7 +430,7 @@ describe GroupsController do
post :update, params: { id: group.to_param, group: { path: 'new_path' } }
expect(assigns(:group).errors[:base].first).to match(/Docker images in their Container Registry/)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -493,7 +493,7 @@ describe GroupsController do
it 'does not redirect' do
get :issues, params: { id: group.to_param }
- expect(response).not_to have_gitlab_http_status(301)
+ expect(response).not_to have_gitlab_http_status(:moved_permanently)
end
end
@@ -512,7 +512,7 @@ describe GroupsController do
it 'does not redirect' do
get :show, params: { id: group.to_param }
- expect(response).not_to have_gitlab_http_status(301)
+ expect(response).not_to have_gitlab_http_status(:moved_permanently)
end
end
@@ -579,13 +579,13 @@ describe GroupsController do
it 'does not 404' do
post :update, params: { id: group.to_param.upcase, group: { path: 'new_path' } }
- expect(response).not_to have_gitlab_http_status(404)
+ expect(response).not_to have_gitlab_http_status(:not_found)
end
it 'does not redirect to the correct casing' do
post :update, params: { id: group.to_param.upcase, group: { path: 'new_path' } }
- expect(response).not_to have_gitlab_http_status(301)
+ expect(response).not_to have_gitlab_http_status(:moved_permanently)
end
end
@@ -595,7 +595,7 @@ describe GroupsController do
it 'returns not found' do
post :update, params: { id: redirect_route.path, group: { path: 'new_path' } }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -605,13 +605,13 @@ describe GroupsController do
it 'does not 404' do
delete :destroy, params: { id: group.to_param.upcase }
- expect(response).not_to have_gitlab_http_status(404)
+ expect(response).not_to have_gitlab_http_status(:not_found)
end
it 'does not redirect to the correct casing' do
delete :destroy, params: { id: group.to_param.upcase }
- expect(response).not_to have_gitlab_http_status(301)
+ expect(response).not_to have_gitlab_http_status(:moved_permanently)
end
end
@@ -621,7 +621,7 @@ describe GroupsController do
it 'returns not found' do
delete :destroy, params: { id: redirect_route.path }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -718,7 +718,7 @@ describe GroupsController do
end
it 'is denied' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -780,13 +780,13 @@ describe GroupsController do
it 'is successful' do
get :show, params: { id: group.to_param }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'does not allow other formats' do
get :show, params: { id: group.to_param }, format: :atom
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -794,7 +794,7 @@ describe GroupsController do
it 'is successful' do
get :edit, params: { id: group.to_param }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -802,7 +802,7 @@ describe GroupsController do
it 'is successful' do
get :new
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -811,7 +811,7 @@ describe GroupsController do
get :index
# Redirects to the dashboard
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -835,7 +835,7 @@ describe GroupsController do
it 'deletes the group' do
delete :destroy, params: { id: group.to_param }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
end
diff --git a/spec/controllers/health_check_controller_spec.rb b/spec/controllers/health_check_controller_spec.rb
index cbcda5d0dc7..d1de669ad43 100644
--- a/spec/controllers/health_check_controller_spec.rb
+++ b/spec/controllers/health_check_controller_spec.rb
@@ -101,7 +101,7 @@ describe HealthCheckController, :request_store do
it 'supports failure plaintext response' do
get :index
- expect(response).to have_gitlab_http_status(500)
+ expect(response).to have_gitlab_http_status(:internal_server_error)
expect(response.content_type).to eq 'text/plain'
expect(response.body).to include('The server is on fire')
end
@@ -109,7 +109,7 @@ describe HealthCheckController, :request_store do
it 'supports failure json response' do
get :index, format: :json
- expect(response).to have_gitlab_http_status(500)
+ expect(response).to have_gitlab_http_status(:internal_server_error)
expect(response.content_type).to eq 'application/json'
expect(json_response['healthy']).to be false
expect(json_response['message']).to include('The server is on fire')
@@ -118,7 +118,7 @@ describe HealthCheckController, :request_store do
it 'supports failure xml response' do
get :index, format: :xml
- expect(response).to have_gitlab_http_status(500)
+ expect(response).to have_gitlab_http_status(:internal_server_error)
expect(response.content_type).to eq 'application/xml'
expect(xml_response['healthy']).to be false
expect(xml_response['message']).to include('The server is on fire')
@@ -127,7 +127,7 @@ describe HealthCheckController, :request_store do
it 'supports failure responses for specific checks' do
get :index, params: { checks: 'email' }, format: :json
- expect(response).to have_gitlab_http_status(500)
+ expect(response).to have_gitlab_http_status(:internal_server_error)
expect(response.content_type).to eq 'application/json'
expect(json_response['healthy']).to be false
expect(json_response['message']).to include('Email is on fire')
diff --git a/spec/controllers/help_controller_spec.rb b/spec/controllers/help_controller_spec.rb
index 03b6e85b653..e010cac2f73 100644
--- a/spec/controllers/help_controller_spec.rb
+++ b/spec/controllers/help_controller_spec.rb
@@ -111,7 +111,7 @@ describe HelpController do
it 'renders the raw file' do
get :show,
params: {
- path: 'user/project/img/labels_default_v12_1'
+ path: 'user/img/markdown_logo'
},
format: :png
expect(response).to be_successful
@@ -148,7 +148,7 @@ describe HelpController do
context 'for UI Development Kit' do
it 'renders found' do
get :ui
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
diff --git a/spec/controllers/import/bitbucket_controller_spec.rb b/spec/controllers/import/bitbucket_controller_spec.rb
index d013bd6d427..ab4f6d5054c 100644
--- a/spec/controllers/import/bitbucket_controller_spec.rb
+++ b/spec/controllers/import/bitbucket_controller_spec.rb
@@ -123,7 +123,7 @@ describe Import::BitbucketController do
post :create, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'returns 422 response when the project could not be imported' do
@@ -133,9 +133,11 @@ describe Import::BitbucketController do
post :create, format: :json
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
+ it_behaves_like 'project import rate limiter'
+
context "when the repository owner is the Bitbucket user" do
context "when the Bitbucket user and GitLab user's usernames match" do
it "takes the current user's namespace" do
@@ -328,7 +330,7 @@ describe Import::BitbucketController do
post :create, params: { target_namespace: other_namespace.name }, format: :json
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
end
diff --git a/spec/controllers/import/bitbucket_server_controller_spec.rb b/spec/controllers/import/bitbucket_server_controller_spec.rb
index f30eace7d30..3a347368884 100644
--- a/spec/controllers/import/bitbucket_server_controller_spec.rb
+++ b/spec/controllers/import/bitbucket_server_controller_spec.rb
@@ -48,7 +48,7 @@ describe Import::BitbucketServerController do
post :create, params: { project: project_key, repository: repo_slug }, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
context 'with project key with tildes' do
@@ -61,20 +61,20 @@ describe Import::BitbucketServerController do
post :create, params: { project: project_key, repository: repo_slug, format: :json }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
it 'returns an error when an invalid project key is used' do
post :create, params: { project: 'some&project' }
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
it 'returns an error when an invalid repository slug is used' do
post :create, params: { project: 'some-project', repository: 'try*this' }
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
it 'returns an error when the project cannot be found' do
@@ -82,7 +82,7 @@ describe Import::BitbucketServerController do
post :create, params: { project: project_key, repository: repo_slug }, format: :json
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
it 'returns an error when the project cannot be saved' do
@@ -92,7 +92,7 @@ describe Import::BitbucketServerController do
post :create, params: { project: project_key, repository: repo_slug }, format: :json
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
it "returns an error when the server can't be contacted" do
@@ -100,8 +100,10 @@ describe Import::BitbucketServerController do
post :create, params: { project: project_key, repository: repo_slug }, format: :json
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
+
+ it_behaves_like 'project import rate limiter'
end
describe 'POST configure' do
@@ -116,7 +118,7 @@ describe Import::BitbucketServerController do
expect(session[:bitbucket_server_username]).to be_nil
expect(session[:bitbucket_server_personal_access_token]).to be_nil
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(status_import_bitbucket_server_path)
end
@@ -126,7 +128,7 @@ describe Import::BitbucketServerController do
expect(session[:bitbucket_server_url]).to eq(url)
expect(session[:bitbucket_server_username]).to eq(username)
expect(session[:bitbucket_server_personal_access_token]).to eq(token)
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(status_import_bitbucket_server_path)
end
end
diff --git a/spec/controllers/import/fogbugz_controller_spec.rb b/spec/controllers/import/fogbugz_controller_spec.rb
index f7c813576aa..9a647b8caae 100644
--- a/spec/controllers/import/fogbugz_controller_spec.rb
+++ b/spec/controllers/import/fogbugz_controller_spec.rb
@@ -75,4 +75,8 @@ describe Import::FogbugzController do
expect(assigns(:repos)).to eq([])
end
end
+
+ describe 'POST create' do
+ it_behaves_like 'project import rate limiter'
+ end
end
diff --git a/spec/controllers/import/gitea_controller_spec.rb b/spec/controllers/import/gitea_controller_spec.rb
index b7bdfcc3dc6..730e3f98c98 100644
--- a/spec/controllers/import/gitea_controller_spec.rb
+++ b/spec/controllers/import/gitea_controller_spec.rb
@@ -41,6 +41,8 @@ describe Import::GiteaController do
assign_host_url
end
end
+
+ it_behaves_like 'project import rate limiter'
end
describe "GET realtime_changes" do
diff --git a/spec/controllers/import/github_controller_spec.rb b/spec/controllers/import/github_controller_spec.rb
index 5675798ac33..40ea0bb3a44 100644
--- a/spec/controllers/import/github_controller_spec.rb
+++ b/spec/controllers/import/github_controller_spec.rb
@@ -22,7 +22,7 @@ describe Import::GithubController do
get :new
- expect(response).to have_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
it "prompts for an access token if GitHub not configured" do
@@ -31,7 +31,7 @@ describe Import::GithubController do
get :new
- expect(response).to have_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
context 'when importing a CI/CD project' do
@@ -71,6 +71,8 @@ describe Import::GithubController do
describe "POST create" do
it_behaves_like 'a GitHub-ish import controller: POST create'
+
+ it_behaves_like 'project import rate limiter'
end
describe "GET realtime_changes" do
diff --git a/spec/controllers/import/gitlab_controller_spec.rb b/spec/controllers/import/gitlab_controller_spec.rb
index 6a3713a1212..96a8eb99d5c 100644
--- a/spec/controllers/import/gitlab_controller_spec.rb
+++ b/spec/controllers/import/gitlab_controller_spec.rb
@@ -86,7 +86,7 @@ describe Import::GitlabController do
post :create, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'returns 422 response when the project could not be imported' do
@@ -96,7 +96,7 @@ describe Import::GitlabController do
post :create, format: :json
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
context "when the repository owner is the GitLab.com user" do
@@ -279,9 +279,11 @@ describe Import::GitlabController do
post :create, params: { target_namespace: other_namespace.name }, format: :json
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
+
+ it_behaves_like 'project import rate limiter'
end
end
end
diff --git a/spec/controllers/import/gitlab_projects_controller_spec.rb b/spec/controllers/import/gitlab_projects_controller_spec.rb
index a3f6d8dcea2..a9aaefda0f6 100644
--- a/spec/controllers/import/gitlab_projects_controller_spec.rb
+++ b/spec/controllers/import/gitlab_projects_controller_spec.rb
@@ -17,14 +17,14 @@ describe Import::GitlabProjectsController do
post :create, params: { namespace_id: namespace.id, path: '/test', file: file }
expect(flash[:alert]).to start_with('Project could not be imported')
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
it 'redirects with an error when a relative path is used' do
post :create, params: { namespace_id: namespace.id, path: '../test', file: file }
expect(flash[:alert]).to start_with('Project could not be imported')
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -33,8 +33,10 @@ describe Import::GitlabProjectsController do
post :create, params: { namespace_id: namespace.id, path: 'test', file: file }
expect(flash[:notice]).to include('is being imported')
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
+
+ it_behaves_like 'project import rate limiter'
end
end
diff --git a/spec/controllers/import/google_code_controller_spec.rb b/spec/controllers/import/google_code_controller_spec.rb
index 17be91c0bbb..3773f691ed0 100644
--- a/spec/controllers/import/google_code_controller_spec.rb
+++ b/spec/controllers/import/google_code_controller_spec.rb
@@ -58,4 +58,8 @@ describe Import::GoogleCodeController do
expect(assigns(:incompatible_repos)).to eq([@repo])
end
end
+
+ describe "POST create" do
+ it_behaves_like 'project import rate limiter'
+ end
end
diff --git a/spec/controllers/import/phabricator_controller_spec.rb b/spec/controllers/import/phabricator_controller_spec.rb
index a127e3cda3a..d29a06efbb5 100644
--- a/spec/controllers/import/phabricator_controller_spec.rb
+++ b/spec/controllers/import/phabricator_controller_spec.rb
@@ -18,7 +18,7 @@ describe Import::PhabricatorController do
stub_application_setting(import_sources: [])
end
- it { is_expected.to have_gitlab_http_status(404) }
+ it { is_expected.to have_gitlab_http_status(:not_found) }
end
context 'when the feature is disabled' do
@@ -27,7 +27,7 @@ describe Import::PhabricatorController do
stub_application_setting(import_sources: ['phabricator'])
end
- it { is_expected.to have_gitlab_http_status(404) }
+ it { is_expected.to have_gitlab_http_status(:not_found) }
end
context 'when the import is available' do
@@ -36,7 +36,7 @@ describe Import::PhabricatorController do
stub_application_setting(import_sources: ['phabricator'])
end
- it { is_expected.to have_gitlab_http_status(200) }
+ it { is_expected.to have_gitlab_http_status(:ok) }
end
end
@@ -88,5 +88,7 @@ describe Import::PhabricatorController do
expect { post_create }.not_to change { current_user.namespace.projects.reload.size }
end
end
+
+ it_behaves_like 'project import rate limiter'
end
end
diff --git a/spec/controllers/invites_controller_spec.rb b/spec/controllers/invites_controller_spec.rb
index ac0adcd06a3..9daaa258aa2 100644
--- a/spec/controllers/invites_controller_spec.rb
+++ b/spec/controllers/invites_controller_spec.rb
@@ -17,7 +17,7 @@ describe InvitesController do
get :accept, params: { id: token }
member.reload
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(member.user).to eq(user)
expect(flash[:notice]).to include 'You have been granted'
end
@@ -28,7 +28,7 @@ describe InvitesController do
get :decline, params: { id: token }
expect {member.reload}.to raise_error ActiveRecord::RecordNotFound
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(flash[:notice]).to include 'You have declined the invitation to join'
end
end
diff --git a/spec/controllers/notification_settings_controller_spec.rb b/spec/controllers/notification_settings_controller_spec.rb
index 46328148eff..7b19c67cad3 100644
--- a/spec/controllers/notification_settings_controller_spec.rb
+++ b/spec/controllers/notification_settings_controller_spec.rb
@@ -125,7 +125,7 @@ describe NotificationSettingsController do
notification_setting: { level: :participating }
}
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -195,7 +195,7 @@ describe NotificationSettingsController do
notification_setting: { level: :participating }
}
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/oauth/applications_controller_spec.rb b/spec/controllers/oauth/applications_controller_spec.rb
index 270a2fcc1d6..09f8ad4332d 100644
--- a/spec/controllers/oauth/applications_controller_spec.rb
+++ b/spec/controllers/oauth/applications_controller_spec.rb
@@ -4,42 +4,93 @@ require 'spec_helper'
describe Oauth::ApplicationsController do
let(:user) { create(:user) }
+ let(:application) { create(:oauth_application, owner: user) }
context 'project members' do
before do
sign_in(user)
end
- describe 'GET #index' do
- it 'shows list of applications' do
- get :index
-
- expect(response).to have_gitlab_http_status(200)
+ shared_examples 'redirects to login page when the user is not signed in' do
+ before do
+ sign_out(user)
end
- it 'redirects back to profile page if OAuth applications are disabled' do
- disable_user_oauth
+ it { is_expected.to redirect_to(new_user_session_path) }
+ end
+
+ describe 'GET #new' do
+ subject { get :new }
+
+ it { is_expected.to have_gitlab_http_status(:ok) }
+
+ it_behaves_like 'redirects to login page when the user is not signed in'
+ end
+
+ describe 'DELETE #destroy' do
+ subject { delete :destroy, params: { id: application.id } }
+
+ it { is_expected.to redirect_to(oauth_applications_url) }
+
+ it_behaves_like 'redirects to login page when the user is not signed in'
+ end
+
+ describe 'GET #edit' do
+ subject { get :edit, params: { id: application.id } }
+
+ it { is_expected.to have_gitlab_http_status(:ok) }
+
+ it_behaves_like 'redirects to login page when the user is not signed in'
+ end
+
+ describe 'PUT #update' do
+ subject { put :update, params: { id: application.id, doorkeeper_application: { name: 'application' } } }
+
+ it { is_expected.to redirect_to(oauth_application_url(application)) }
+
+ it_behaves_like 'redirects to login page when the user is not signed in'
+ end
+
+ describe 'GET #show' do
+ subject { get :show, params: { id: application.id } }
+
+ it { is_expected.to have_gitlab_http_status(:ok) }
+
+ it_behaves_like 'redirects to login page when the user is not signed in'
+ end
+
+ describe 'GET #index' do
+ subject { get :index }
+
+ it { is_expected.to have_gitlab_http_status(:ok) }
- get :index
+ context 'when OAuth applications are disabled' do
+ before do
+ disable_user_oauth
+ end
- expect(response).to have_gitlab_http_status(200)
+ it { is_expected.to have_gitlab_http_status(:ok) }
end
+
+ it_behaves_like 'redirects to login page when the user is not signed in'
end
describe 'POST #create' do
+ subject { post :create, params: oauth_params }
+
it 'creates an application' do
- post :create, params: oauth_params
+ subject
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(oauth_application_path(Doorkeeper::Application.last))
end
it 'redirects back to profile page if OAuth applications are disabled' do
disable_user_oauth
- post :create, params: oauth_params
+ subject
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(profile_path)
end
@@ -59,6 +110,8 @@ describe Oauth::ApplicationsController do
expect(response.body).to include 'Redirect URI is forbidden by the server'
end
end
+
+ it_behaves_like 'redirects to login page when the user is not signed in'
end
end
diff --git a/spec/controllers/oauth/authorizations_controller_spec.rb b/spec/controllers/oauth/authorizations_controller_spec.rb
index 41f7684051e..1b4bebd9707 100644
--- a/spec/controllers/oauth/authorizations_controller_spec.rb
+++ b/spec/controllers/oauth/authorizations_controller_spec.rb
@@ -23,7 +23,7 @@ describe Oauth::AuthorizationsController do
it 'returns 200 code and renders error view' do
get :new
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('doorkeeper/authorizations/error')
end
end
@@ -34,7 +34,7 @@ describe Oauth::AuthorizationsController do
it 'returns 200 code and renders view' do
get :new, params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('doorkeeper/authorizations/new')
end
@@ -45,7 +45,7 @@ describe Oauth::AuthorizationsController do
get :new, params: params
expect(request.session['user_return_to']).to be_nil
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
context 'when there is already an access token for the application' do
@@ -62,7 +62,7 @@ describe Oauth::AuthorizationsController do
get :new, params: params
expect(request.session['user_return_to']).to be_nil
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
end
diff --git a/spec/controllers/oauth/token_info_controller_spec.rb b/spec/controllers/oauth/token_info_controller_spec.rb
new file mode 100644
index 00000000000..35ad0dcf98c
--- /dev/null
+++ b/spec/controllers/oauth/token_info_controller_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Oauth::TokenInfoController do
+ describe '#show' do
+ context 'when the user is not authenticated' do
+ it 'responds with a 401' do
+ get :show
+
+ expect(response.status).to eq 401
+ expect(JSON.parse(response.body)).to include('error' => 'invalid_request')
+ end
+ end
+
+ context 'when the request is valid' do
+ let(:application) { create(:oauth_application, scopes: 'api') }
+ let(:access_token) do
+ create(:oauth_access_token, expires_in: 5.minutes, application: application)
+ end
+
+ it 'responds with the token info' do
+ get :show, params: { access_token: access_token.token }
+
+ expect(response.status).to eq 200
+ expect(JSON.parse(response.body)).to eq(
+ 'scope' => %w[api],
+ 'scopes' => %w[api],
+ 'created_at' => access_token.created_at.to_i,
+ 'expires_in' => access_token.expires_in,
+ 'application' => { 'uid' => application.uid },
+ 'resource_owner_id' => access_token.resource_owner_id,
+ 'expires_in_seconds' => access_token.expires_in
+ )
+ end
+ end
+
+ context 'when the doorkeeper_token is not recognised' do
+ it 'responds with a 401' do
+ get :show, params: { access_token: 'unknown_token' }
+
+ expect(response.status).to eq 401
+ expect(JSON.parse(response.body)).to include('error' => 'invalid_request')
+ end
+ end
+
+ context 'when the token is expired' do
+ let(:access_token) do
+ create(:oauth_access_token, created_at: 2.days.ago, expires_in: 10.minutes)
+ end
+
+ it 'responds with a 401' do
+ get :show, params: { access_token: access_token.token }
+
+ expect(response.status).to eq 401
+ expect(JSON.parse(response.body)).to include('error' => 'invalid_request')
+ end
+ end
+
+ context 'when the token is revoked' do
+ let(:access_token) { create(:oauth_access_token, revoked_at: 2.days.ago) }
+
+ it 'responds with a 401' do
+ get :show, params: { access_token: access_token.token }
+
+ expect(response.status).to eq 401
+ expect(JSON.parse(response.body)).to include('error' => 'invalid_request')
+ end
+ end
+ end
+end
diff --git a/spec/controllers/omniauth_callbacks_controller_spec.rb b/spec/controllers/omniauth_callbacks_controller_spec.rb
index 8b92976252c..71cdba12147 100644
--- a/spec/controllers/omniauth_callbacks_controller_spec.rb
+++ b/spec/controllers/omniauth_callbacks_controller_spec.rb
@@ -172,7 +172,7 @@ describe OmniauthCallbacksController, type: :controller, do_not_mock_admin_mode:
it 'returns 403' do
post provider
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
diff --git a/spec/controllers/passwords_controller_spec.rb b/spec/controllers/passwords_controller_spec.rb
index bf9680329ce..3ec8e347659 100644
--- a/spec/controllers/passwords_controller_spec.rb
+++ b/spec/controllers/passwords_controller_spec.rb
@@ -15,7 +15,7 @@ describe PasswordsController do
post :create
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(flash[:alert]).to eq _('Password authentication is unavailable.')
end
end
diff --git a/spec/controllers/profiles/accounts_controller_spec.rb b/spec/controllers/profiles/accounts_controller_spec.rb
index f481b5078f2..518ea4e5c48 100644
--- a/spec/controllers/profiles/accounts_controller_spec.rb
+++ b/spec/controllers/profiles/accounts_controller_spec.rb
@@ -13,7 +13,7 @@ describe Profiles::AccountsController do
it 'renders 404 if someone tries to unlink a non existent provider' do
delete :unlink, params: { provider: 'github' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
[:saml, :cas3].each do |provider|
@@ -25,7 +25,7 @@ describe Profiles::AccountsController do
delete :unlink, params: { provider: provider.to_s }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(user.reload.identities).to include(identity)
end
end
@@ -40,7 +40,7 @@ describe Profiles::AccountsController do
delete :unlink, params: { provider: provider.to_s }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(user.reload.identities).not_to include(identity)
end
end
diff --git a/spec/controllers/profiles/notifications_controller_spec.rb b/spec/controllers/profiles/notifications_controller_spec.rb
index ede68744ac6..47d6f11fecf 100644
--- a/spec/controllers/profiles/notifications_controller_spec.rb
+++ b/spec/controllers/profiles/notifications_controller_spec.rb
@@ -22,8 +22,8 @@ describe Profiles::NotificationsController do
end
context 'with groups that do not have notification preferences' do
- set(:group) { create(:group) }
- set(:subgroup) { create(:group, parent: group) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:subgroup) { create(:group, parent: group) }
before do
group.add_developer(user)
diff --git a/spec/controllers/profiles/preferences_controller_spec.rb b/spec/controllers/profiles/preferences_controller_spec.rb
index 77e7b32af25..98a9c3eaec6 100644
--- a/spec/controllers/profiles/preferences_controller_spec.rb
+++ b/spec/controllers/profiles/preferences_controller_spec.rb
@@ -47,6 +47,7 @@ describe Profiles::PreferencesController do
theme_id: '2',
first_day_of_week: '1',
preferred_language: 'jp',
+ tab_width: '5',
render_whitespace_in_code: 'true'
}.with_indifferent_access
diff --git a/spec/controllers/profiles_controller_spec.rb b/spec/controllers/profiles_controller_spec.rb
index 265f941e146..91f3bfcfa40 100644
--- a/spec/controllers/profiles_controller_spec.rb
+++ b/spec/controllers/profiles_controller_spec.rb
@@ -87,7 +87,7 @@ describe ProfilesController, :request_store do
put :update, params: { user: { status: { message: 'Working hard!' } } }
expect(user.reload.status.message).to eq('Working hard!')
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
diff --git a/spec/controllers/projects/alerting/notifications_controller_spec.rb b/spec/controllers/projects/alerting/notifications_controller_spec.rb
new file mode 100644
index 00000000000..a56ac59215f
--- /dev/null
+++ b/spec/controllers/projects/alerting/notifications_controller_spec.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::Alerting::NotificationsController do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:environment) { create(:environment, project: project) }
+
+ describe 'POST #create' do
+ let(:service_response) { ServiceResponse.success }
+ let(:notify_service) { instance_double(Projects::Alerting::NotifyService, execute: service_response) }
+
+ around do |example|
+ ForgeryProtection.with_forgery_protection { example.run }
+ end
+
+ before do
+ allow(Projects::Alerting::NotifyService).to receive(:new).and_return(notify_service)
+ end
+
+ def make_request(body = {})
+ post :create, params: project_params, body: body.to_json, as: :json
+ end
+
+ context 'when notification service succeeds' do
+ let(:payload) do
+ {
+ title: 'Alert title',
+ hosts: 'https://gitlab.com'
+ }
+ end
+
+ let(:permitted_params) { ActionController::Parameters.new(payload).permit! }
+
+ it 'responds with ok' do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'does not pass excluded parameters to the notify service' do
+ make_request(payload)
+
+ expect(Projects::Alerting::NotifyService)
+ .to have_received(:new)
+ .with(project, nil, permitted_params)
+ end
+ end
+
+ context 'when notification service fails' do
+ let(:service_response) { ServiceResponse.error(message: 'Unauthorized', http_status: 401) }
+
+ it 'responds with the service response' do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'bearer token' do
+ context 'when set' do
+ it 'extracts bearer token' do
+ request.headers['HTTP_AUTHORIZATION'] = 'Bearer some token'
+
+ expect(notify_service).to receive(:execute).with('some token')
+
+ make_request
+ end
+
+ it 'pass nil if cannot extract a non-bearer token' do
+ request.headers['HTTP_AUTHORIZATION'] = 'some token'
+
+ expect(notify_service).to receive(:execute).with(nil)
+
+ make_request
+ end
+ end
+
+ context 'when missing' do
+ it 'passes nil' do
+ expect(notify_service).to receive(:execute).with(nil)
+
+ make_request
+ end
+ end
+ end
+ end
+
+ def project_params(opts = {})
+ opts.reverse_merge(namespace_id: project.namespace, project_id: project)
+ end
+end
diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb
index 126bb1c9822..c59983d5138 100644
--- a/spec/controllers/projects/artifacts_controller_spec.rb
+++ b/spec/controllers/projects/artifacts_controller_spec.rb
@@ -138,14 +138,14 @@ describe Projects::ArtifactsController do
let(:filename) { job.artifacts_file.filename }
it 'sends the artifacts file' do
- # Notice the filename= is omitted from the disposition; this is because
- # Rails 5 will append this header in send_file
expect(controller).to receive(:send_file)
.with(
job.artifacts_file.file.path,
- hash_including(disposition: %Q(attachment; filename*=UTF-8''#{filename}))).and_call_original
+ hash_including(disposition: 'attachment', filename: filename)).and_call_original
download_artifact
+
+ expect(response.headers['Content-Disposition']).to eq(%Q(attachment; filename="#{filename}"; filename*=UTF-8''#{filename}))
end
end
@@ -156,7 +156,7 @@ describe Projects::ArtifactsController do
it 'returns 404' do
download_artifact(file_type: file_type)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -170,13 +170,13 @@ describe Projects::ArtifactsController do
end
it 'sends the codequality report' do
- # Notice the filename= is omitted from the disposition; this is because
- # Rails 5 will append this header in send_file
expect(controller).to receive(:send_file)
.with(job.job_artifacts_codequality.file.path,
- hash_including(disposition: %Q(attachment; filename*=UTF-8''#{filename}))).and_call_original
+ hash_including(disposition: 'attachment', filename: filename)).and_call_original
download_artifact(file_type: file_type)
+
+ expect(response.headers['Content-Disposition']).to eq(%Q(attachment; filename="#{filename}"; filename*=UTF-8''#{filename}))
end
end
@@ -236,7 +236,7 @@ describe Projects::ArtifactsController do
it 'renders the file view' do
get :file, params: { namespace_id: project.namespace, project_id: project, job_id: job, path: 'ci_artifacts.txt' }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -302,7 +302,7 @@ describe Projects::ArtifactsController do
it 'renders the file view' do
get :file, params: { namespace_id: private_project.namespace, project_id: private_project, job_id: job, path: 'ci_artifacts.txt' }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
end
@@ -317,7 +317,7 @@ describe Projects::ArtifactsController do
it 'serves the file using workhorse' do
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(send_data).to start_with('artifacts-entry:')
expect(params.keys).to eq(%w(Archive Entry))
diff --git a/spec/controllers/projects/autocomplete_sources_controller_spec.rb b/spec/controllers/projects/autocomplete_sources_controller_spec.rb
index fc8fe1ac4f6..d35192b2ccb 100644
--- a/spec/controllers/projects/autocomplete_sources_controller_spec.rb
+++ b/spec/controllers/projects/autocomplete_sources_controller_spec.rb
@@ -65,7 +65,7 @@ describe Projects::AutocompleteSourcesController do
get :milestones, format: :json, params: { namespace_id: group.path, project_id: project.path }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/projects/avatars_controller_spec.rb b/spec/controllers/projects/avatars_controller_spec.rb
index d463619ad0b..1d844c847d6 100644
--- a/spec/controllers/projects/avatars_controller_spec.rb
+++ b/spec/controllers/projects/avatars_controller_spec.rb
@@ -16,7 +16,7 @@ describe Projects::AvatarsController do
it 'shows 404' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -31,7 +31,7 @@ describe Projects::AvatarsController do
it 'sends the avatar' do
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.header['Content-Disposition']).to eq('inline')
expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
diff --git a/spec/controllers/projects/blame_controller_spec.rb b/spec/controllers/projects/blame_controller_spec.rb
index dd7c0f45dc2..ac8394e3cd4 100644
--- a/spec/controllers/projects/blame_controller_spec.rb
+++ b/spec/controllers/projects/blame_controller_spec.rb
@@ -36,7 +36,7 @@ describe Projects::BlameController do
it 'redirects' do
expect(subject)
- .to redirect_to("/#{project.full_path}/tree/master")
+ .to redirect_to("/#{project.full_path}/-/tree/master")
end
end
diff --git a/spec/controllers/projects/blob_controller_spec.rb b/spec/controllers/projects/blob_controller_spec.rb
index 78599935910..225538dcc45 100644
--- a/spec/controllers/projects/blob_controller_spec.rb
+++ b/spec/controllers/projects/blob_controller_spec.rb
@@ -33,7 +33,7 @@ describe Projects::BlobController do
it 'redirects' do
expect(subject)
- .to redirect_to("/#{project.full_path}/tree/master")
+ .to redirect_to("/#{project.full_path}/-/tree/master")
end
end
@@ -115,7 +115,7 @@ describe Projects::BlobController do
it 'redirects' do
expect(subject)
- .to redirect_to("/#{project.full_path}/tree/markdown/doc")
+ .to redirect_to("/#{project.full_path}/-/tree/markdown/doc")
end
end
end
@@ -232,7 +232,7 @@ describe Projects::BlobController do
end
it 'redirects to blob show' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -246,7 +246,7 @@ describe Projects::BlobController do
end
it 'redirects to blob show' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
diff --git a/spec/controllers/projects/boards_controller_spec.rb b/spec/controllers/projects/boards_controller_spec.rb
index 543479d8dd5..ebfdb997974 100644
--- a/spec/controllers/projects/boards_controller_spec.rb
+++ b/spec/controllers/projects/boards_controller_spec.rb
@@ -39,7 +39,7 @@ describe Projects::BoardsController do
it 'returns a not found 404 response' do
list_boards
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(response.content_type).to eq 'text/html'
end
end
@@ -82,7 +82,7 @@ describe Projects::BoardsController do
it 'returns a not found 404 response' do
list_boards format: :json
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(response.content_type).to eq 'application/json'
end
end
@@ -94,7 +94,7 @@ describe Projects::BoardsController do
it 'returns a not found 404 response' do
list_boards
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -137,7 +137,7 @@ describe Projects::BoardsController do
it 'returns a not found 404 response' do
read_board board: board
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(response.content_type).to eq 'text/html'
end
end
@@ -174,7 +174,7 @@ describe Projects::BoardsController do
it 'returns a not found 404 response' do
read_board board: board, format: :json
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(response.content_type).to eq 'application/json'
end
end
@@ -186,7 +186,7 @@ describe Projects::BoardsController do
read_board board: another_board
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
diff --git a/spec/controllers/projects/branches_controller_spec.rb b/spec/controllers/projects/branches_controller_spec.rb
index ac39ac626c7..85d3044993e 100644
--- a/spec/controllers/projects/branches_controller_spec.rb
+++ b/spec/controllers/projects/branches_controller_spec.rb
@@ -38,7 +38,7 @@ describe Projects::BranchesController do
it 'redirects' do
expect(subject)
- .to redirect_to("/#{project.full_path}/tree/merge_branch")
+ .to redirect_to("/#{project.full_path}/-/tree/merge_branch")
end
end
@@ -48,7 +48,7 @@ describe Projects::BranchesController do
it 'redirects' do
expect(subject)
- .to redirect_to("/#{project.full_path}/tree/alert('merge');")
+ .to redirect_to("/#{project.full_path}/-/tree/alert('merge');")
end
end
@@ -93,7 +93,7 @@ describe Projects::BranchesController do
}
expect(subject)
- .to redirect_to("/#{project.full_path}/tree/1-feature-branch")
+ .to redirect_to("/#{project.full_path}/-/tree/1-feature-branch")
end
it 'posts a system note' do
@@ -217,7 +217,7 @@ describe Projects::BranchesController do
}
expect(response.location).to include(project_new_blob_path(project, branch))
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -238,7 +238,7 @@ describe Projects::BranchesController do
}
expect(response.location).to include(project_new_blob_path(project, branch))
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -289,7 +289,7 @@ describe Projects::BranchesController do
it 'returns a successful 200 response' do
create_branch name: 'my-branch', ref: 'master'
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'returns the created branch' do
@@ -303,7 +303,7 @@ describe Projects::BranchesController do
it 'returns an unprocessable entity 422 response' do
create_branch name: "<script>alert('merge');</script>", ref: "<script>alert('ref');</script>"
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
@@ -335,7 +335,7 @@ describe Projects::BranchesController do
project_id: project
}
- expect(response).to have_gitlab_http_status(303)
+ expect(response).to have_gitlab_http_status(:see_other)
end
end
@@ -361,28 +361,28 @@ describe Projects::BranchesController do
context "valid branch name, valid source" do
let(:branch) { "feature" }
- it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response).to have_gitlab_http_status(:ok) }
it { expect(response.body).to be_blank }
end
context "valid branch name with unencoded slashes" do
let(:branch) { "improve/awesome" }
- it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response).to have_gitlab_http_status(:ok) }
it { expect(response.body).to be_blank }
end
context "valid branch name with encoded slashes" do
let(:branch) { "improve%2Fawesome" }
- it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response).to have_gitlab_http_status(:ok) }
it { expect(response.body).to be_blank }
end
context "invalid branch name, valid ref" do
let(:branch) { "no-branch" }
- it { expect(response).to have_gitlab_http_status(404) }
+ it { expect(response).to have_gitlab_http_status(:not_found) }
it { expect(response.body).to be_blank }
end
end
@@ -398,7 +398,7 @@ describe Projects::BranchesController do
expect(json_response).to eql("message" => 'Branch was deleted')
end
- it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response).to have_gitlab_http_status(:ok) }
end
context 'valid branch name with unencoded slashes' do
@@ -408,7 +408,7 @@ describe Projects::BranchesController do
expect(json_response).to eql('message' => 'Branch was deleted')
end
- it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response).to have_gitlab_http_status(:ok) }
end
context "valid branch name with encoded slashes" do
@@ -418,7 +418,7 @@ describe Projects::BranchesController do
expect(json_response).to eql('message' => 'Branch was deleted')
end
- it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response).to have_gitlab_http_status(:ok) }
end
context 'invalid branch name, valid ref' do
@@ -428,7 +428,7 @@ describe Projects::BranchesController do
expect(json_response).to eql('message' => 'No such branch')
end
- it { expect(response).to have_gitlab_http_status(404) }
+ it { expect(response).to have_gitlab_http_status(:not_found) }
end
end
@@ -478,7 +478,7 @@ describe Projects::BranchesController do
it 'responds with status 404' do
destroy_all_merged
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -519,7 +519,7 @@ describe Projects::BranchesController do
state: 'all'
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -537,7 +537,7 @@ describe Projects::BranchesController do
state: 'all'
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -594,7 +594,7 @@ describe Projects::BranchesController do
names: %w[fix add-pdf-file branch-merged]
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq(
"fix" => { "behind" => 29, "ahead" => 2 },
"branch-merged" => { "behind" => 1, "ahead" => 0 },
@@ -612,7 +612,7 @@ describe Projects::BranchesController do
project_id: project
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.count).to be > 1
end
@@ -629,7 +629,7 @@ describe Projects::BranchesController do
project_id: project
}
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['error']).to eq("Specify at least one and at most #{Kaminari.config.default_per_page} branch names")
end
@@ -642,7 +642,7 @@ describe Projects::BranchesController do
names: %w[fix add-pdf-file branch-merged]
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.count).to be > 1
end
end
diff --git a/spec/controllers/projects/ci/lints_controller_spec.rb b/spec/controllers/projects/ci/lints_controller_spec.rb
index 8fb39f734b6..f45b1d7ddd8 100644
--- a/spec/controllers/projects/ci/lints_controller_spec.rb
+++ b/spec/controllers/projects/ci/lints_controller_spec.rb
@@ -39,7 +39,7 @@ describe Projects::Ci::LintsController do
end
it 'responds with 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -115,7 +115,7 @@ describe Projects::Ci::LintsController do
end
it 'responds with 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/projects/clusters/applications_controller_spec.rb b/spec/controllers/projects/clusters/applications_controller_spec.rb
index 70b34f071c8..8dcbf575627 100644
--- a/spec/controllers/projects/clusters/applications_controller_spec.rb
+++ b/spec/controllers/projects/clusters/applications_controller_spec.rb
@@ -42,7 +42,7 @@ describe Projects::Clusters::ApplicationsController do
expect(ClusterInstallAppWorker).to receive(:perform_async).with(application, anything).once
expect { subject }.to change { current_application.count }
- expect(response).to have_http_status(:no_content)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(cluster.application_helm).to be_scheduled
end
@@ -53,7 +53,7 @@ describe Projects::Clusters::ApplicationsController do
it 'return 404' do
expect { subject }.not_to change { current_application.count }
- expect(response).to have_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -61,7 +61,7 @@ describe Projects::Clusters::ApplicationsController do
let(:application) { 'unkwnown-app' }
it 'return 404' do
- is_expected.to have_http_status(:not_found)
+ is_expected.to have_gitlab_http_status(:not_found)
end
end
@@ -71,7 +71,7 @@ describe Projects::Clusters::ApplicationsController do
end
it 'returns 400' do
- is_expected.to have_http_status(:bad_request)
+ is_expected.to have_gitlab_http_status(:bad_request)
end
end
end
@@ -108,7 +108,7 @@ describe Projects::Clusters::ApplicationsController do
it "schedules an application update" do
expect(ClusterPatchAppWorker).to receive(:perform_async).with(application.name, anything).once
- is_expected.to have_http_status(:no_content)
+ is_expected.to have_gitlab_http_status(:no_content)
expect(cluster.application_knative).to be_scheduled
end
@@ -119,13 +119,13 @@ describe Projects::Clusters::ApplicationsController do
cluster.destroy!
end
- it { is_expected.to have_http_status(:not_found) }
+ it { is_expected.to have_gitlab_http_status(:not_found) }
end
context 'when application is unknown' do
let(:application_name) { 'unkwnown-app' }
- it { is_expected.to have_http_status(:not_found) }
+ it { is_expected.to have_gitlab_http_status(:not_found) }
end
context 'when application is already scheduled' do
@@ -133,7 +133,7 @@ describe Projects::Clusters::ApplicationsController do
application.make_scheduled!
end
- it { is_expected.to have_http_status(:bad_request) }
+ it { is_expected.to have_gitlab_http_status(:bad_request) }
end
end
@@ -170,7 +170,7 @@ describe Projects::Clusters::ApplicationsController do
it "schedules an application update" do
expect(worker_class).to receive(:perform_async).with(application.name, application.id).once
- is_expected.to have_http_status(:no_content)
+ is_expected.to have_gitlab_http_status(:no_content)
expect(cluster.application_prometheus).to be_scheduled
end
@@ -181,13 +181,13 @@ describe Projects::Clusters::ApplicationsController do
cluster.destroy!
end
- it { is_expected.to have_http_status(:not_found) }
+ it { is_expected.to have_gitlab_http_status(:not_found) }
end
context 'when application is unknown' do
let(:application_name) { 'unkwnown-app' }
- it { is_expected.to have_http_status(:not_found) }
+ it { is_expected.to have_gitlab_http_status(:not_found) }
end
context 'when application is already scheduled' do
@@ -195,7 +195,7 @@ describe Projects::Clusters::ApplicationsController do
application.make_scheduled!
end
- it { is_expected.to have_http_status(:bad_request) }
+ it { is_expected.to have_gitlab_http_status(:bad_request) }
end
end
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index 642932e2935..c6345a2153c 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -648,7 +648,7 @@ describe Projects::ClustersController do
go(format: :json)
cluster.reload
- expect(response).to have_http_status(:no_content)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(cluster.enabled).to be_falsey
expect(cluster.name).to eq('my-new-cluster-name')
expect(cluster).not_to be_managed
@@ -671,7 +671,7 @@ describe Projects::ClustersController do
it "rejects changes" do
go(format: :json)
- expect(response).to have_http_status(:bad_request)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
diff --git a/spec/controllers/projects/commit_controller_spec.rb b/spec/controllers/projects/commit_controller_spec.rb
index 95112cfeabe..c8ddd181d10 100644
--- a/spec/controllers/projects/commit_controller_spec.rb
+++ b/spec/controllers/projects/commit_controller_spec.rb
@@ -179,7 +179,7 @@ describe Projects::CommitController do
})
expect(response).not_to be_successful
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -236,7 +236,7 @@ describe Projects::CommitController do
})
expect(response).not_to be_successful
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -322,7 +322,7 @@ describe Projects::CommitController do
end
it 'returns a 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -334,7 +334,7 @@ describe Projects::CommitController do
end
it 'returns a 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -345,7 +345,7 @@ describe Projects::CommitController do
end
it 'returns a 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -393,7 +393,7 @@ describe Projects::CommitController do
end
it 'returns a 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/projects/compare_controller_spec.rb b/spec/controllers/projects/compare_controller_spec.rb
index 9afc46c4be9..d1a4a9a0058 100644
--- a/spec/controllers/projects/compare_controller_spec.rb
+++ b/spec/controllers/projects/compare_controller_spec.rb
@@ -108,7 +108,7 @@ describe Projects::CompareController do
show_request
expect(flash[:alert]).to eq('Invalid branch name')
- expect(response).to have_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -120,7 +120,7 @@ describe Projects::CompareController do
show_request
expect(flash[:alert]).to eq('Invalid branch name')
- expect(response).to have_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
end
@@ -164,7 +164,7 @@ describe Projects::CompareController do
end
it 'returns a 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -176,7 +176,7 @@ describe Projects::CompareController do
end
it 'returns a 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -187,7 +187,7 @@ describe Projects::CompareController do
end
it 'returns a 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -197,7 +197,7 @@ describe Projects::CompareController do
end
it 'returns a 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -281,7 +281,7 @@ describe Projects::CompareController do
context 'when the user has access to the project' do
render_views
- let(:signature_commit) { build(:commit, project: project, safe_message: "message", sha: 'signature_commit') }
+ let(:signature_commit) { project.commit_by(oid: '0b4bc9a49b562e85de7cc9e834518ea6828729b9') }
let(:non_signature_commit) { build(:commit, project: project, safe_message: "message", sha: 'non_signature_commit') }
before do
@@ -301,7 +301,7 @@ describe Projects::CompareController do
it 'returns only the commit with a signature' do
signatures_request
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
signatures = json_response['signatures']
expect(signatures.size).to eq(1)
@@ -318,7 +318,7 @@ describe Projects::CompareController do
it 'returns a 404' do
signatures_request
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -330,7 +330,7 @@ describe Projects::CompareController do
it 'returns no signatures' do
signatures_request
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['signatures']).to be_empty
end
end
@@ -342,7 +342,7 @@ describe Projects::CompareController do
it 'returns no signatures' do
signatures_request
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['signatures']).to be_empty
end
end
diff --git a/spec/controllers/projects/deploy_keys_controller_spec.rb b/spec/controllers/projects/deploy_keys_controller_spec.rb
index 2c7c99eabf6..25e3e8e37a9 100644
--- a/spec/controllers/projects/deploy_keys_controller_spec.rb
+++ b/spec/controllers/projects/deploy_keys_controller_spec.rb
@@ -122,7 +122,7 @@ describe Projects::DeployKeysController do
put :enable, params: { id: deploy_key.id, namespace_id: project.namespace, project_id: project }
end.not_to change { DeployKeysProject.count }
- expect(response).to have_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(new_user_session_path)
end
end
@@ -137,7 +137,7 @@ describe Projects::DeployKeysController do
put :enable, params: { id: deploy_key.id, namespace_id: project.namespace, project_id: project }
end.not_to change { DeployKeysProject.count }
- expect(response).to have_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -152,14 +152,14 @@ describe Projects::DeployKeysController do
end.to change { DeployKeysProject.count }.by(1)
expect(DeployKeysProject.where(project_id: project.id, deploy_key_id: deploy_key.id).count).to eq(1)
- expect(response).to have_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(namespace_project_settings_repository_path(anchor: 'js-deploy-keys-settings'))
end
it 'returns 404' do
put :enable, params: { id: 0, namespace_id: project.namespace, project_id: project }
- expect(response).to have_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -174,7 +174,7 @@ describe Projects::DeployKeysController do
end.to change { DeployKeysProject.count }.by(1)
expect(DeployKeysProject.where(project_id: project.id, deploy_key_id: deploy_key.id).count).to eq(1)
- expect(response).to have_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(namespace_project_settings_repository_path(anchor: 'js-deploy-keys-settings'))
end
end
@@ -192,7 +192,7 @@ describe Projects::DeployKeysController do
it 'redirects to login' do
put :disable, params: { id: deploy_key.id, namespace_id: project.namespace, project_id: project }
- expect(response).to have_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(new_user_session_path)
expect(DeployKey.find(deploy_key.id)).to eq(deploy_key)
end
@@ -206,7 +206,7 @@ describe Projects::DeployKeysController do
it 'returns 404' do
put :disable, params: { id: deploy_key.id, namespace_id: project.namespace, project_id: project }
- expect(response).to have_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(DeployKey.find(deploy_key.id)).to eq(deploy_key)
end
end
@@ -215,7 +215,7 @@ describe Projects::DeployKeysController do
it 'returns 302' do
put :disable, params: { id: deploy_key.id, namespace_id: project.namespace, project_id: project }
- expect(response).to have_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(namespace_project_settings_repository_path(anchor: 'js-deploy-keys-settings'))
expect { DeployKey.find(deploy_key.id) }.to raise_error(ActiveRecord::RecordNotFound)
@@ -224,7 +224,7 @@ describe Projects::DeployKeysController do
it 'returns 404' do
put :disable, params: { id: 0, namespace_id: project.namespace, project_id: project }
- expect(response).to have_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -238,7 +238,7 @@ describe Projects::DeployKeysController do
put :disable, params: { id: deploy_key.id, namespace_id: project.namespace, project_id: project }
end.to change { DeployKey.count }.by(-1)
- expect(response).to have_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(namespace_project_settings_repository_path(anchor: 'js-deploy-keys-settings'))
expect { DeployKey.find(deploy_key.id) }.to raise_error(ActiveRecord::RecordNotFound)
diff --git a/spec/controllers/projects/deployments_controller_spec.rb b/spec/controllers/projects/deployments_controller_spec.rb
index b360319c6b1..37dcfa78772 100644
--- a/spec/controllers/projects/deployments_controller_spec.rb
+++ b/spec/controllers/projects/deployments_controller_spec.rb
@@ -59,9 +59,9 @@ describe Projects::DeploymentsController do
end
end
- it 'returns a empty response 204 resposne' do
+ it 'returns an empty 204 response' do
get :metrics, params: deployment_params(id: deployment.to_param)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(response.body).to eq('')
end
end
@@ -100,7 +100,7 @@ describe Projects::DeploymentsController do
get :metrics, params: deployment_params(id: failed_deployment.to_param)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -129,7 +129,7 @@ describe Projects::DeploymentsController do
it 'returns a empty response 204 response' do
get :additional_metrics, params: deployment_params(id: deployment.to_param, format: :json)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(response.body).to eq('')
end
end
diff --git a/spec/controllers/projects/discussions_controller_spec.rb b/spec/controllers/projects/discussions_controller_spec.rb
index d59f76c1b32..b2e4a3b7b0d 100644
--- a/spec/controllers/projects/discussions_controller_spec.rb
+++ b/spec/controllers/projects/discussions_controller_spec.rb
@@ -27,7 +27,7 @@ describe Projects::DiscussionsController do
it 'returns 404' do
get :show, params: request_params, session: { format: :json }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -39,7 +39,7 @@ describe Projects::DiscussionsController do
it 'returns status 200' do
get :show, params: request_params, session: { format: :json }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'returns status 404 if MR does not exists' do
@@ -47,7 +47,7 @@ describe Projects::DiscussionsController do
get :show, params: request_params, session: { format: :json }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -60,7 +60,7 @@ describe Projects::DiscussionsController do
it 'returns status 200' do
get :show, params: request_params, session: { format: :json }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -74,7 +74,7 @@ describe Projects::DiscussionsController do
it "returns status 404" do
post :resolve, params: request_params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -91,7 +91,7 @@ describe Projects::DiscussionsController do
it "returns status 404" do
post :resolve, params: request_params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -120,7 +120,7 @@ describe Projects::DiscussionsController do
it "returns status 200" do
post :resolve, params: request_params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it "renders discussion with serializer" do
@@ -157,7 +157,7 @@ describe Projects::DiscussionsController do
it "returns status 404" do
delete :unresolve, params: request_params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -174,7 +174,7 @@ describe Projects::DiscussionsController do
it "returns status 404" do
delete :unresolve, params: request_params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -188,7 +188,7 @@ describe Projects::DiscussionsController do
it "returns status 200" do
delete :unresolve, params: request_params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
context "when vue_mr_discussions cookie is present" do
diff --git a/spec/controllers/projects/environments_controller_spec.rb b/spec/controllers/projects/environments_controller_spec.rb
index e7ce2cb0c2e..6c63b220322 100644
--- a/spec/controllers/projects/environments_controller_spec.rb
+++ b/spec/controllers/projects/environments_controller_spec.rb
@@ -179,7 +179,7 @@ describe Projects::EnvironmentsController do
params[:id] = 12345
get :show, params: params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -197,7 +197,7 @@ describe Projects::EnvironmentsController do
patch_params = environment_params.merge(environment: { external_url: 'https://git.gitlab.com' })
patch :update, params: patch_params
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -208,7 +208,7 @@ describe Projects::EnvironmentsController do
patch :stop, params: environment_params(format: :json)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -221,7 +221,7 @@ describe Projects::EnvironmentsController do
patch :stop, params: environment_params(format: :json)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq(
{ 'redirect_url' =>
project_job_url(project, action) })
@@ -235,7 +235,7 @@ describe Projects::EnvironmentsController do
patch :stop, params: environment_params(format: :json)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq(
{ 'redirect_url' =>
project_environment_url(project, environment) })
@@ -278,7 +278,7 @@ describe Projects::EnvironmentsController do
it 'responds with a status code 200' do
get :terminal, params: environment_params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'loads the terminals for the environment' do
@@ -295,7 +295,7 @@ describe Projects::EnvironmentsController do
it 'responds with a status code 404' do
get :terminal, params: environment_params(id: 666)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -321,7 +321,7 @@ describe Projects::EnvironmentsController do
get :terminal_websocket_authorize, params: environment_params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.headers["Content-Type"]).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(response.body).to eq('{"workhorse":"response"}')
end
@@ -331,7 +331,7 @@ describe Projects::EnvironmentsController do
it 'returns 404' do
get :terminal_websocket_authorize, params: environment_params(id: 666)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -386,7 +386,7 @@ describe Projects::EnvironmentsController do
get :metrics, params: environment_params(format: :json)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(json_response).to eq({})
end
end
@@ -428,7 +428,7 @@ describe Projects::EnvironmentsController do
it 'returns a metrics JSON document' do
additional_metrics(window_params)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(json_response).to eq({})
end
end
@@ -489,7 +489,7 @@ describe Projects::EnvironmentsController do
end
shared_examples_for '200 response' do
- let(:expected_keys) { %w(dashboard status) }
+ let(:expected_keys) { %w(dashboard status metrics_data) }
it_behaves_like 'correctly formatted response', :ok
end
diff --git a/spec/controllers/projects/error_tracking_controller_spec.rb b/spec/controllers/projects/error_tracking_controller_spec.rb
index 588c4b05528..22826938de2 100644
--- a/spec/controllers/projects/error_tracking_controller_spec.rb
+++ b/spec/controllers/projects/error_tracking_controller_spec.rb
@@ -301,7 +301,7 @@ describe Projects::ErrorTrackingController do
context 'update result is successful' do
before do
expect(issue_update_service).to receive(:execute)
- .and_return(status: :success, updated: true)
+ .and_return(status: :success, updated: true, closed_issue_iid: 1234)
update_issue
end
diff --git a/spec/controllers/projects/forks_controller_spec.rb b/spec/controllers/projects/forks_controller_spec.rb
index e351fb2b1f6..e6a68459a84 100644
--- a/spec/controllers/projects/forks_controller_spec.rb
+++ b/spec/controllers/projects/forks_controller_spec.rb
@@ -23,7 +23,7 @@ describe Projects::ForksController do
it 'returns with 404' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -167,7 +167,7 @@ describe Projects::ForksController do
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -205,7 +205,7 @@ describe Projects::ForksController do
it 'responds with status 302' do
subject
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(namespace_project_import_path(user.namespace, project))
end
@@ -228,7 +228,7 @@ describe Projects::ForksController do
it 'passes continue params to the redirect' do
subject
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(namespace_project_import_path(user.namespace, project, continue: continue_params))
end
end
diff --git a/spec/controllers/projects/git_http_controller_spec.rb b/spec/controllers/projects/git_http_controller_spec.rb
deleted file mode 100644
index 4df53121aaa..00000000000
--- a/spec/controllers/projects/git_http_controller_spec.rb
+++ /dev/null
@@ -1,107 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Projects::GitHttpController do
- include GitHttpHelpers
-
- let_it_be(:project) { create(:project, :public, :repository) }
- let(:project_params) do
- {
- namespace_id: project.namespace.to_param,
- project_id: project.path + '.git'
- }
- end
- let(:params) { project_params }
-
- describe 'HEAD #info_refs' do
- it 'returns 403' do
- head :info_refs, params: { namespace_id: project.namespace.to_param, project_id: project.path + '.git' }
-
- expect(response.status).to eq(403)
- end
- end
-
- describe 'GET #info_refs' do
- let(:params) { project_params.merge(service: 'git-upload-pack') }
-
- it 'returns 401 for unauthenticated requests to public repositories when http protocol is disabled' do
- stub_application_setting(enabled_git_access_protocol: 'ssh')
-
- get :info_refs, params: params
-
- expect(response.status).to eq(401)
- end
-
- context 'with authorized user' do
- let(:user) { project.owner }
-
- before do
- request.headers.merge! auth_env(user.username, user.password, nil)
- end
-
- it 'returns 200' do
- get :info_refs, params: params
-
- expect(response.status).to eq(200)
- end
-
- it 'updates the user activity' do
- expect_next_instance_of(Users::ActivityService) do |activity_service|
- expect(activity_service).to receive(:execute)
- end
-
- get :info_refs, params: params
- end
- end
-
- context 'with exceptions' do
- before do
- allow(controller).to receive(:verify_workhorse_api!).and_return(true)
- end
-
- it 'returns 503 with GRPC Unavailable' do
- allow(controller).to receive(:access_check).and_raise(GRPC::Unavailable)
-
- get :info_refs, params: params
-
- expect(response.status).to eq(503)
- end
-
- it 'returns 503 with timeout error' do
- allow(controller).to receive(:access_check).and_raise(Gitlab::GitAccess::TimeoutError)
-
- get :info_refs, params: params
-
- expect(response.status).to eq(503)
- expect(response.body).to eq 'Gitlab::GitAccess::TimeoutError'
- end
- end
- end
-
- describe 'POST #git_upload_pack' do
- before do
- allow(controller).to receive(:authenticate_user).and_return(true)
- allow(controller).to receive(:verify_workhorse_api!).and_return(true)
- allow(controller).to receive(:access_check).and_return(nil)
- end
-
- after do
- post :git_upload_pack, params: params
- end
-
- context 'on a read-only instance' do
- before do
- allow(Gitlab::Database).to receive(:read_only?).and_return(true)
- end
-
- it 'does not update project statistics' do
- expect(ProjectDailyStatisticsWorker).not_to receive(:perform_async)
- end
- end
-
- it 'updates project statistics' do
- expect(ProjectDailyStatisticsWorker).to receive(:perform_async)
- end
- end
-end
diff --git a/spec/controllers/projects/group_links_controller_spec.rb b/spec/controllers/projects/group_links_controller_spec.rb
index d0cb3a74b78..f8271bc8e8a 100644
--- a/spec/controllers/projects/group_links_controller_spec.rb
+++ b/spec/controllers/projects/group_links_controller_spec.rb
@@ -33,11 +33,11 @@ describe Projects::GroupLinksController do
include_context 'link project to group'
it 'responds with status 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
- context 'when user has access to group he want to link project to' do
+ context 'when user has access to group they want to link project to' do
before do
group.add_developer(user)
end
@@ -55,7 +55,7 @@ describe Projects::GroupLinksController do
end
end
- context 'when user doers not have access to group he want to link to' do
+ context 'when user doers not have access to group they want to link to' do
include_context 'link project to group'
it 'renders 404' do
diff --git a/spec/controllers/projects/hooks_controller_spec.rb b/spec/controllers/projects/hooks_controller_spec.rb
index 137296b4f19..f50ef2d804c 100644
--- a/spec/controllers/projects/hooks_controller_spec.rb
+++ b/spec/controllers/projects/hooks_controller_spec.rb
@@ -42,7 +42,7 @@ describe Projects::HooksController do
post :create, params: { namespace_id: project.namespace, project_id: project, hook: hook_params }
- expect(response).to have_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(ProjectHook.all.size).to eq(1)
expect(ProjectHook.first).to have_attributes(hook_params)
end
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 945a56365c8..fb4d1cf59fe 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -24,7 +24,7 @@ describe Projects::IssuesController do
get :index, params: { namespace_id: project.namespace, project_id: project }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -32,7 +32,7 @@ describe Projects::IssuesController do
it 'renders the "index" template' do
get :index, params: { namespace_id: project.namespace, project_id: project }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:index)
end
end
@@ -51,14 +51,14 @@ describe Projects::IssuesController do
get :index, params: { namespace_id: project.namespace, project_id: project }
expect(response).to redirect_to(project_issues_path(new_project))
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
it 'redirects from an old issue correctly' do
get :show, params: { namespace_id: project.namespace, project_id: project, id: issue }
expect(response).to redirect_to(project_issue_path(new_project, issue))
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
end
@@ -78,7 +78,7 @@ describe Projects::IssuesController do
it "returns index" do
get :index, params: { namespace_id: project.namespace, project_id: project }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it "returns 301 if request path doesn't match project path" do
@@ -92,7 +92,7 @@ describe Projects::IssuesController do
project.save!
get :index, params: { namespace_id: project.namespace, project_id: project }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -118,7 +118,7 @@ describe Projects::IssuesController do
get :index, params: params.merge(page: last_page + 1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:issues).size).to eq(2)
end
end
@@ -227,7 +227,7 @@ describe Projects::IssuesController do
get :new, params: { namespace_id: project.namespace, project_id: project }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -235,7 +235,7 @@ describe Projects::IssuesController do
it 'renders the "new" template' do
get :new, params: { namespace_id: project.namespace, project_id: project }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:new)
end
end
@@ -330,7 +330,7 @@ describe Projects::IssuesController do
[issue1, issue2, issue3].map(&:reload)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(issue1.relative_position)
.to be_between(issue2.relative_position, issue3.relative_position)
end
@@ -340,7 +340,7 @@ describe Projects::IssuesController do
it 'returns a unprocessable entity 422 response for invalid move ids' do
reorder_issue(issue1, move_after_id: 99, move_before_id: 999)
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
it 'returns a not found 404 response for invalid issue id' do
@@ -348,7 +348,7 @@ describe Projects::IssuesController do
move_after_id: issue2.id,
move_before_id: issue3.id)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a unprocessable entity 422 response for issues not in group' do
@@ -359,7 +359,7 @@ describe Projects::IssuesController do
move_before_id: issue3.id,
group_full_path: another_group.full_path)
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
end
@@ -415,14 +415,14 @@ describe Projects::IssuesController do
it 'updates the issue' do
subject
- expect(response).to have_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:ok)
expect(issue.reload.title).to eq('New title')
end
context 'when Akismet is enabled and the issue is identified as spam' do
before do
stub_application_setting(recaptcha_enabled: true)
- expect_next_instance_of(AkismetService) do |akismet_service|
+ expect_next_instance_of(Spam::AkismetService) do |akismet_service|
expect(akismet_service).to receive_messages(spam?: true)
end
end
@@ -443,7 +443,7 @@ describe Projects::IssuesController do
it 'updates the issue' do
subject
- expect(response).to have_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:ok)
expect(issue.reload.title).to eq('New title')
end
end
@@ -458,7 +458,7 @@ describe Projects::IssuesController do
it 'responds with 404' do
subject
- expect(response).to have_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -505,7 +505,7 @@ describe Projects::IssuesController do
it 'returns 200' do
go(id: issue.iid)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -702,7 +702,7 @@ describe Projects::IssuesController do
context 'when an issue is not identified as spam' do
before do
- expect_next_instance_of(AkismetService) do |akismet_service|
+ expect_next_instance_of(Spam::AkismetService) do |akismet_service|
expect(akismet_service).to receive_messages(spam?: false)
end
end
@@ -715,7 +715,7 @@ describe Projects::IssuesController do
context 'when an issue is identified as spam' do
context 'when captcha is not verified' do
before do
- expect_next_instance_of(AkismetService) do |akismet_service|
+ expect_next_instance_of(Spam::AkismetService) do |akismet_service|
expect(akismet_service).to receive_messages(spam?: true)
end
end
@@ -749,7 +749,7 @@ describe Projects::IssuesController do
it 'returns 200 status' do
update_issue
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -769,7 +769,7 @@ describe Projects::IssuesController do
it 'returns 200 status' do
update_issue
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -785,7 +785,7 @@ describe Projects::IssuesController do
end
it 'returns 200 status' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'accepts an issue after recaptcha is verified' do
@@ -954,7 +954,7 @@ describe Projects::IssuesController do
before do
stub_feature_flags(allow_possible_spam: false)
- expect_next_instance_of(AkismetService) do |akismet_service|
+ expect_next_instance_of(Spam::AkismetService) do |akismet_service|
expect(akismet_service).to receive_messages(spam?: false)
end
end
@@ -971,7 +971,7 @@ describe Projects::IssuesController do
end
before do
- expect_next_instance_of(AkismetService) do |akismet_service|
+ expect_next_instance_of(Spam::AkismetService) do |akismet_service|
expect(akismet_service).to receive_messages(spam?: true)
end
end
@@ -1084,19 +1084,13 @@ describe Projects::IssuesController do
it 'creates a sentry issue' do
expect { subject }.to change(SentryIssue, :count)
end
-
- it 'with existing issue it will not create an issue' do
- post_new_issue(sentry_issue_attributes: { sentry_issue_identifier: 1234567 })
-
- expect { subject }.not_to change(Issue, :count)
- end
end
end
describe 'POST #mark_as_spam' do
context 'properly submits to Akismet' do
before do
- expect_next_instance_of(AkismetService) do |akismet_service|
+ expect_next_instance_of(Spam::AkismetService) do |akismet_service|
expect(akismet_service).to receive_messages(submit_spam: true)
end
expect_next_instance_of(ApplicationSetting) do |setting|
@@ -1129,9 +1123,10 @@ describe Projects::IssuesController do
sign_in(user)
end
- it "rejects a developer to destroy an issue" do
+ it "does not delete the issue, returning :not_found" do
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }
- expect(response).to have_gitlab_http_status(404)
+
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -1147,14 +1142,7 @@ describe Projects::IssuesController do
it "deletes the issue" do
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: issue.iid, destroy_confirm: true }
- expect(response).to have_gitlab_http_status(302)
- expect(controller).to set_flash[:notice].to(/The issue was successfully deleted\./)
- end
-
- it "deletes the issue" do
- delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: issue.iid, destroy_confirm: true }
-
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(controller).to set_flash[:notice].to(/The issue was successfully deleted\./)
end
@@ -1163,7 +1151,7 @@ describe Projects::IssuesController do
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(controller).to set_flash[:notice].to('Destroy confirmation not provided for issue')
end
@@ -1172,7 +1160,7 @@ describe Projects::IssuesController do
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: issue.iid, format: 'json' }
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response).to eq({ 'errors' => 'Destroy confirmation not provided for issue' })
end
@@ -1206,7 +1194,7 @@ describe Projects::IssuesController do
subject
end.to change { issue.award_emoji.count }.by(1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it "removes the already awarded emoji" do
@@ -1214,7 +1202,7 @@ describe Projects::IssuesController do
expect { subject }.to change { AwardEmoji.count }.by(-1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'marks Todos on the Issue as done' do
@@ -1250,7 +1238,7 @@ describe Projects::IssuesController do
create_merge_request
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'is not available for users who cannot create merge requests' do
@@ -1258,7 +1246,7 @@ describe Projects::IssuesController do
create_merge_request
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context 'target_project_id is set' do
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index 53c40683a5b..65f57deff1b 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -1183,7 +1183,7 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
get_terminal_websocket(id: job.id)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.headers["Content-Type"]).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(response.body).to eq('{"workhorse":"response"}')
end
@@ -1193,7 +1193,7 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
it 'returns 404' do
get_terminal_websocket(id: 1234)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/projects/labels_controller_spec.rb b/spec/controllers/projects/labels_controller_spec.rb
index aee017b211a..c6098f5934d 100644
--- a/spec/controllers/projects/labels_controller_spec.rb
+++ b/spec/controllers/projects/labels_controller_spec.rb
@@ -80,7 +80,7 @@ describe Projects::LabelsController do
it 'creates labels' do
post :generate, params: { namespace_id: personal_project.namespace.to_param, project_id: personal_project }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -88,7 +88,7 @@ describe Projects::LabelsController do
it 'creates labels' do
post :generate, params: { namespace_id: project.namespace.to_param, project_id: project }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
end
@@ -99,7 +99,7 @@ describe Projects::LabelsController do
toggle_subscription(label)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'allows user to toggle subscription on group labels' do
@@ -107,7 +107,7 @@ describe Projects::LabelsController do
toggle_subscription(group_label)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
def toggle_subscription(label)
@@ -123,7 +123,7 @@ describe Projects::LabelsController do
it 'denies access' do
post :promote, params: { namespace_id: project.namespace.to_param, project_id: project, id: label_1.to_param }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -180,7 +180,7 @@ describe Projects::LabelsController do
it 'does not redirect' do
get :index, params: { namespace_id: project.namespace, project_id: project.to_param }
- expect(response).not_to have_gitlab_http_status(301)
+ expect(response).not_to have_gitlab_http_status(:moved_permanently)
end
end
@@ -231,13 +231,13 @@ describe Projects::LabelsController do
it 'does not 404' do
post :generate, params: { namespace_id: project.namespace, project_id: project }
- expect(response).not_to have_gitlab_http_status(404)
+ expect(response).not_to have_gitlab_http_status(:not_found)
end
it 'does not redirect to the correct casing' do
post :generate, params: { namespace_id: project.namespace, project_id: project }
- expect(response).not_to have_gitlab_http_status(301)
+ expect(response).not_to have_gitlab_http_status(:moved_permanently)
end
end
@@ -247,7 +247,7 @@ describe Projects::LabelsController do
it 'returns not found' do
post :generate, params: { namespace_id: project.namespace, project_id: project.to_param + 'old' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/projects/mattermosts_controller_spec.rb b/spec/controllers/projects/mattermosts_controller_spec.rb
index 64440ed585d..693176d0cfc 100644
--- a/spec/controllers/projects/mattermosts_controller_spec.rb
+++ b/spec/controllers/projects/mattermosts_controller_spec.rb
@@ -25,7 +25,7 @@ describe Projects::MattermostsController do
project_id: project
})
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
diff --git a/spec/controllers/projects/merge_requests/content_controller_spec.rb b/spec/controllers/projects/merge_requests/content_controller_spec.rb
index 818cf794ec6..72eedc837a4 100644
--- a/spec/controllers/projects/merge_requests/content_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/content_controller_spec.rb
@@ -69,7 +69,7 @@ describe Projects::MergeRequests::ContentController do
it 'returns 404' do
do_request
- expect(response).to have_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -77,7 +77,7 @@ describe Projects::MergeRequests::ContentController do
it 'returns 404' do
do_request(:widget)
- expect(response).to have_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/projects/merge_requests/creations_controller_spec.rb b/spec/controllers/projects/merge_requests/creations_controller_spec.rb
index 1bbb80f9904..db25ad62019 100644
--- a/spec/controllers/projects/merge_requests/creations_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/creations_controller_spec.rb
@@ -62,7 +62,7 @@ describe Projects::MergeRequests::CreationsController do
expect(assigns(:commits)).to be_an Array
expect(total).to be > 0
expect(assigns(:hidden_commit_count)).to be > 0
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to match %r(<span class="commits-count">2 commits</span>)
end
end
@@ -76,7 +76,7 @@ describe Projects::MergeRequests::CreationsController do
expect(assigns(:commits)).to be_an CommitCollection
expect(total).to be > 0
expect(assigns(:hidden_commit_count)).to eq(0)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to match %r(<span class="commits-count">#{total} commits</span>)
end
end
@@ -173,7 +173,7 @@ describe Projects::MergeRequests::CreationsController do
end
it 'returns a 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -196,7 +196,7 @@ describe Projects::MergeRequests::CreationsController do
}
expect(assigns(:commit)).not_to be_nil
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'does not load the commit when the user cannot read the project' do
@@ -211,7 +211,7 @@ describe Projects::MergeRequests::CreationsController do
}
expect(assigns(:commit)).to be_nil
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
diff --git a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
index d7e790360e3..88c14e03fd8 100644
--- a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
@@ -13,7 +13,7 @@ describe Projects::MergeRequests::DiffsController do
go(diff_id: unexistent_diff_id)
expect(MergeRequestDiff.find_by(id: unexistent_diff_id)).to be_nil
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -76,7 +76,7 @@ describe Projects::MergeRequests::DiffsController do
end
it 'returns a 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -162,7 +162,7 @@ describe Projects::MergeRequests::DiffsController do
it 'returns 404 when not a member' do
go
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 404 when visibility level is not enough' do
@@ -170,7 +170,7 @@ describe Projects::MergeRequests::DiffsController do
go
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -178,7 +178,7 @@ describe Projects::MergeRequests::DiffsController do
it 'returns success' do
go(diff_id: merge_request.merge_request_diff.id)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'serializes diffs metadata with expected arguments' do
@@ -203,11 +203,46 @@ describe Projects::MergeRequests::DiffsController do
end
end
+ context 'with diff_head param passed' do
+ before do
+ allow(merge_request).to receive(:diffable_merge_ref?)
+ .and_return(diffable_merge_ref)
+ end
+
+ context 'the merge request can be compared with head' do
+ let(:diffable_merge_ref) { true }
+
+ it 'compares diffs with the head' do
+ MergeRequests::MergeToRefService.new(project, merge_request.author).execute(merge_request)
+
+ expect(CompareService).to receive(:new).with(
+ project, merge_request.merge_ref_head.sha
+ ).and_call_original
+
+ go(diff_head: true)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'the merge request cannot be compared with head' do
+ let(:diffable_merge_ref) { false }
+
+ it 'compares diffs with the base' do
+ expect(CompareService).not_to receive(:new)
+
+ go(diff_head: true)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+
context 'with MR regular diff params' do
it 'returns success' do
go
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'serializes diffs metadata with expected arguments' do
@@ -236,7 +271,7 @@ describe Projects::MergeRequests::DiffsController do
it 'returns success' do
go(commit_id: merge_request.diff_head_sha)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'serializes diffs metadata with expected arguments' do
@@ -305,7 +340,7 @@ describe Projects::MergeRequests::DiffsController do
end
it 'returns a 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -316,7 +351,7 @@ describe Projects::MergeRequests::DiffsController do
end
it 'returns a 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -329,7 +364,7 @@ describe Projects::MergeRequests::DiffsController do
end
it 'returns a 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -351,7 +386,7 @@ describe Projects::MergeRequests::DiffsController do
it 'returns success' do
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -390,7 +425,7 @@ describe Projects::MergeRequests::DiffsController do
it 'returns 404' do
go
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -404,7 +439,7 @@ describe Projects::MergeRequests::DiffsController do
it 'returns 404' do
go
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index d5b1bfe0ac4..da26eb94fb0 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -44,6 +44,21 @@ describe Projects::MergeRequestsController do
get :show, params: params.merge(extra_params)
end
+ context 'when merge request is unchecked' do
+ before do
+ merge_request.mark_as_unchecked!
+ end
+
+ it 'checks mergeability asynchronously' do
+ expect_next_instance_of(MergeRequests::MergeabilityCheckService) do |service|
+ expect(service).not_to receive(:execute)
+ expect(service).to receive(:async_execute)
+ end
+
+ go
+ end
+ end
+
describe 'as html' do
context 'when diff files were cleaned' do
render_views
@@ -98,7 +113,7 @@ describe Projects::MergeRequestsController do
}
expect(response).to redirect_to(project_merge_request_path(new_project, merge_request))
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
it 'redirects from an old merge request commits correctly' do
@@ -110,7 +125,7 @@ describe Projects::MergeRequestsController do
}
expect(response).to redirect_to(commits_project_merge_request_path(new_project, merge_request))
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
end
@@ -214,7 +229,7 @@ describe Projects::MergeRequestsController do
get_merge_requests(last_page)
expect(assigns(:merge_requests).current_page).to eq(last_page)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'does not redirect to external sites when provided a host field' do
@@ -291,7 +306,7 @@ describe Projects::MergeRequestsController do
it 'responds with 404' do
update_merge_request(title: 'New title')
- expect(response).to have_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -366,7 +381,7 @@ describe Projects::MergeRequestsController do
end
it 'returns 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -600,7 +615,7 @@ describe Projects::MergeRequestsController do
it "denies access to users unless they're admin or project owner" do
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context "when the user is owner" do
@@ -615,7 +630,7 @@ describe Projects::MergeRequestsController do
it "deletes the merge request" do
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid, destroy_confirm: true }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(controller).to set_flash[:notice].to(/The merge request was successfully deleted\./)
end
@@ -624,7 +639,7 @@ describe Projects::MergeRequestsController do
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(controller).to set_flash[:notice].to('Destroy confirmation not provided for merge request')
end
@@ -633,7 +648,7 @@ describe Projects::MergeRequestsController do
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid, format: 'json' }
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response).to eq({ 'errors' => 'Destroy confirmation not provided for merge request' })
end
@@ -776,6 +791,21 @@ describe Projects::MergeRequestsController do
end
end
+ describe 'GET context commits' do
+ it 'returns the commits for context commits' do
+ get :context_commits,
+ params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: merge_request.iid
+ },
+ format: 'json'
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response).to be_an Array
+ end
+ end
+
describe 'GET exposed_artifacts' do
let(:merge_request) do
create(:merge_request,
@@ -827,7 +857,7 @@ describe Projects::MergeRequestsController do
it 'responds with a 404' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(response.body).to be_blank
end
end
@@ -840,7 +870,7 @@ describe Projects::MergeRequestsController do
it 'responds with a 404' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(response.body).to be_blank
end
end
@@ -883,7 +913,7 @@ describe Projects::MergeRequestsController do
it 'returns exposed artifacts' do
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['status']).to eq('parsed')
expect(json_response['data']).to eq([{
'job_name' => 'test',
@@ -936,7 +966,7 @@ describe Projects::MergeRequestsController do
it 'returns no content' do
subject
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(response.body).to be_empty
end
end
@@ -983,7 +1013,7 @@ describe Projects::MergeRequestsController do
it 'responds with a 404' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(response.body).to be_blank
end
end
@@ -996,7 +1026,7 @@ describe Projects::MergeRequestsController do
it 'responds with a 404' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(response.body).to be_blank
end
end
diff --git a/spec/controllers/projects/milestones_controller_spec.rb b/spec/controllers/projects/milestones_controller_spec.rb
index a3f55c1eed0..6b698c6da66 100644
--- a/spec/controllers/projects/milestones_controller_spec.rb
+++ b/spec/controllers/projects/milestones_controller_spec.rb
@@ -30,14 +30,14 @@ describe Projects::MilestonesController do
it 'shows milestone page' do
view_milestone
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/html'
end
it 'returns milestone json' do
view_milestone format: :json
- expect(response).to have_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(response.content_type).to eq 'application/json'
end
end
@@ -171,7 +171,7 @@ describe Projects::MilestonesController do
it 'renders 404' do
post :promote, params: { namespace_id: project.namespace.id, project_id: project.id, id: milestone.iid }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -190,7 +190,7 @@ describe Projects::MilestonesController do
get :labels, params: { namespace_id: group.id, project_id: project.id, id: milestone.iid }, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'application/json'
expect(json_response['html']).not_to include(label.title)
@@ -201,7 +201,7 @@ describe Projects::MilestonesController do
get :labels, params: { namespace_id: group.id, project_id: project.id, id: milestone.iid }, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'application/json'
expect(json_response['html']).to include(label.title)
@@ -240,12 +240,12 @@ describe Projects::MilestonesController do
post :promote, params: { namespace_id: project.namespace.id, project_id: project.id, id: milestone.iid }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
- context '#participants' do
+ describe '#participants' do
render_views
context "when guest user" do
@@ -263,7 +263,7 @@ describe Projects::MilestonesController do
params = { namespace_id: project.namespace.id, project_id: project.id, id: milestone.iid, format: :json }
get :participants, params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'application/json'
expect(json_response['html']).to include(issue_assignee.name)
end
@@ -278,7 +278,7 @@ describe Projects::MilestonesController do
params = { namespace_id: project.namespace.id, project_id: project.id, id: milestone.iid, format: :json }
get :participants, params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'application/json'
expect(json_response['html']).not_to include(issue_assignee.name)
end
diff --git a/spec/controllers/projects/mirrors_controller_spec.rb b/spec/controllers/projects/mirrors_controller_spec.rb
index e14686970a1..4362febda5c 100644
--- a/spec/controllers/projects/mirrors_controller_spec.rb
+++ b/spec/controllers/projects/mirrors_controller_spec.rb
@@ -103,7 +103,7 @@ describe Projects::MirrorsController do
it "returns an error with a 400 response for URL #{url.inspect}" do
do_get(project, url)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to eq('message' => 'Invalid URL')
end
end
@@ -115,7 +115,7 @@ describe Projects::MirrorsController do
do_get(project)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
end
@@ -125,7 +125,7 @@ describe Projects::MirrorsController do
do_get(project)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to eq('message' => 'An error')
end
end
@@ -139,7 +139,7 @@ describe Projects::MirrorsController do
do_get(project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq('known_hosts' => ssh_key, 'fingerprints' => [ssh_fp.stringify_keys], 'host_keys_changed' => true)
end
end
diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb
index abc9e728cb3..7c00af7b945 100644
--- a/spec/controllers/projects/notes_controller_spec.rb
+++ b/spec/controllers/projects/notes_controller_spec.rb
@@ -152,7 +152,7 @@ describe Projects::NotesController do
it 'renders 404' do
get :index, params: params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -246,7 +246,7 @@ describe Projects::NotesController do
context 'the project is publically available' do
context 'for HTML' do
it "returns status 302" do
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -254,7 +254,7 @@ describe Projects::NotesController do
let(:extra_request_params) { { format: :json } }
it "returns status 200 for json" do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -265,7 +265,7 @@ describe Projects::NotesController do
let(:extra_request_params) { { format: :json } }
it "returns status 422 for json" do
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
end
@@ -278,7 +278,7 @@ describe Projects::NotesController do
let(:extra_request_params) { extra }
it "returns status 404" do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -296,7 +296,7 @@ describe Projects::NotesController do
it "returns status 302 (redirect)" do
create!
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -306,7 +306,7 @@ describe Projects::NotesController do
it "returns status 200" do
create!
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -316,7 +316,7 @@ describe Projects::NotesController do
it 'returns discussion JSON when the return_discussion param is set' do
create!
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to have_key 'discussion'
expect(json_response.dig('discussion', 'notes', 0, 'note')).to eq(request_params[:note][:note])
end
@@ -330,7 +330,7 @@ describe Projects::NotesController do
it 'includes changes in commands_changes ' do
create!
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['commands_changes']).to include('emoji_award', 'time_estimate', 'spend_time')
expect(json_response['commands_changes']).not_to include('target_project', 'title')
end
@@ -349,7 +349,7 @@ describe Projects::NotesController do
it 'does not include changes in commands_changes' do
create!
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['commands_changes']).not_to include('target_project', 'title')
end
end
@@ -363,7 +363,7 @@ describe Projects::NotesController do
it "prevents a non-member user from creating a note on one of the project's merge requests" do
create!
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context 'when the user is a team member' do
@@ -424,7 +424,7 @@ describe Projects::NotesController do
it 'returns an error to the user' do
create!
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -436,7 +436,7 @@ describe Projects::NotesController do
it "prevents a non-member user from creating a note on one of the project's merge requests" do
create!
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context 'when the user is a team member' do
@@ -467,7 +467,7 @@ describe Projects::NotesController do
it "returns status 302 for html" do
create!
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -531,7 +531,7 @@ describe Projects::NotesController do
it 'returns a 404', :sidekiq_might_not_need_inline do
create!
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -540,7 +540,7 @@ describe Projects::NotesController do
it 'returns a 404', :sidekiq_might_not_need_inline do
create!
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -550,7 +550,7 @@ describe Projects::NotesController do
it 'is successful' do
create!
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
it 'creates the note' do
@@ -574,7 +574,7 @@ describe Projects::NotesController do
expect { post :create, params: request_params }.to change { issue.notes.count }.by(1)
.and change { locked_issue.notes.count }.by(0)
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -588,7 +588,7 @@ describe Projects::NotesController do
request_params[:target_id] = 9999
post :create, params: request_params.merge(format: :json)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -600,13 +600,13 @@ describe Projects::NotesController do
it 'returns 302 status for html' do
post :create, params: request_params
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
it 'returns 200 status for json' do
post :create, params: request_params.merge(format: :json)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'creates a new note' do
@@ -618,7 +618,7 @@ describe Projects::NotesController do
it 'returns 404 status' do
post :create, params: request_params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'does not create a new note' do
@@ -671,7 +671,7 @@ describe Projects::NotesController do
}
}
expect { put :update, params: request_params }.not_to change { note.reload.note }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -695,7 +695,7 @@ describe Projects::NotesController do
it "returns status 200 for html" do
delete :destroy, params: request_params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it "deletes the note" do
@@ -712,7 +712,7 @@ describe Projects::NotesController do
it "returns status 404" do
delete :destroy, params: request_params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -732,7 +732,7 @@ describe Projects::NotesController do
subject
end.to change { note.award_emoji.count }.by(1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it "removes the already awarded emoji" do
@@ -740,7 +740,7 @@ describe Projects::NotesController do
expect { subject }.to change { AwardEmoji.count }.by(-1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'marks Todos on the Noteable as done' do
@@ -766,7 +766,7 @@ describe Projects::NotesController do
it "returns status 404" do
post :resolve, params: request_params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -783,7 +783,7 @@ describe Projects::NotesController do
it "returns status 404" do
post :resolve, params: request_params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -812,7 +812,7 @@ describe Projects::NotesController do
it "returns status 200" do
post :resolve, params: request_params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -829,7 +829,7 @@ describe Projects::NotesController do
it "returns status 404" do
delete :unresolve, params: request_params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -846,7 +846,7 @@ describe Projects::NotesController do
it "returns status 404" do
delete :unresolve, params: request_params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -860,7 +860,7 @@ describe Projects::NotesController do
it "returns status 200" do
delete :unresolve, params: request_params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
diff --git a/spec/controllers/projects/pages_controller_spec.rb b/spec/controllers/projects/pages_controller_spec.rb
index c07619465bf..102c0b6b048 100644
--- a/spec/controllers/projects/pages_controller_spec.rb
+++ b/spec/controllers/projects/pages_controller_spec.rb
@@ -23,7 +23,7 @@ describe Projects::PagesController do
it 'returns 200 status' do
get :show, params: request_params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
context 'when the project is in a subgroup' do
@@ -33,7 +33,7 @@ describe Projects::PagesController do
it 'returns a 200 status code' do
get :show, params: request_params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -42,7 +42,7 @@ describe Projects::PagesController do
it 'returns 302 status' do
delete :destroy, params: request_params
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
context 'when user is developer' do
@@ -53,7 +53,7 @@ describe Projects::PagesController do
it 'returns 404 status' do
delete :destroy, params: request_params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -67,7 +67,7 @@ describe Projects::PagesController do
it 'returns 404 status' do
get :show, params: request_params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -75,7 +75,7 @@ describe Projects::PagesController do
it 'returns 404 status' do
delete :destroy, params: request_params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/projects/pages_domains_controller_spec.rb b/spec/controllers/projects/pages_domains_controller_spec.rb
index ef6a193e677..ef5e831d26c 100644
--- a/spec/controllers/projects/pages_domains_controller_spec.rb
+++ b/spec/controllers/projects/pages_domains_controller_spec.rb
@@ -35,7 +35,7 @@ describe Projects::PagesDomainsController do
it "displays to the 'show' page" do
make_request
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('show')
end
@@ -47,7 +47,7 @@ describe Projects::PagesDomainsController do
it 'renders 404 page' do
make_request
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -56,7 +56,7 @@ describe Projects::PagesDomainsController do
it "displays the 'new' page" do
get(:new, params: request_params)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('new')
end
end
@@ -78,7 +78,7 @@ describe Projects::PagesDomainsController do
it "displays the 'show' page" do
get(:show, params: request_params.merge(id: pages_domain.domain))
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('show')
end
end
@@ -177,7 +177,7 @@ describe Projects::PagesDomainsController do
it 'returns a 404 response for an unknown domain' do
post :verify, params: request_params.merge(id: 'unknown-domain')
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -250,7 +250,7 @@ describe Projects::PagesDomainsController do
it 'returns 404 status' do
get(:show, params: request_params.merge(id: pages_domain.domain))
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -258,7 +258,7 @@ describe Projects::PagesDomainsController do
it 'returns 404 status' do
get :new, params: request_params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -266,7 +266,7 @@ describe Projects::PagesDomainsController do
it "returns 404 status" do
post(:create, params: request_params.merge(pages_domain: pages_domain_params))
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -274,7 +274,7 @@ describe Projects::PagesDomainsController do
it "deletes the pages domain" do
delete(:destroy, params: request_params.merge(id: pages_domain.domain))
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/projects/pipeline_schedules_controller_spec.rb b/spec/controllers/projects/pipeline_schedules_controller_spec.rb
index 60b802f1cab..72b282429e9 100644
--- a/spec/controllers/projects/pipeline_schedules_controller_spec.rb
+++ b/spec/controllers/projects/pipeline_schedules_controller_spec.rb
@@ -396,7 +396,7 @@ describe Projects::PipelineSchedulesController do
post :play, params: { namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -407,7 +407,7 @@ describe Projects::PipelineSchedulesController do
post :play, params: { namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id }
expect(flash[:notice]).to start_with 'Successfully scheduled a pipeline to run'
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
it 'prevents users from scheduling the same pipeline repeatedly' do
@@ -417,7 +417,7 @@ describe Projects::PipelineSchedulesController do
expect(flash.to_a.size).to eq(2)
expect(flash[:alert]).to eq _('You cannot play this scheduled pipeline at the moment. Please wait a minute.')
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -430,7 +430,7 @@ describe Projects::PipelineSchedulesController do
post :play, params: { namespace_id: project.namespace.to_param, project_id: project, id: protected_schedule.id }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -460,7 +460,7 @@ describe Projects::PipelineSchedulesController do
delete :destroy, params: { namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id }
end.to change { project.pipeline_schedules.count }.by(-1)
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
end
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index 4cc5b3cba7c..fd33f32e877 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -29,7 +29,7 @@ describe Projects::PipelinesController do
stub_feature_flags(ci_pipeline_persisted_stages: true)
end
- it 'returns serialized pipelines', :request_store do
+ it 'returns serialized pipelines' do
expect(::Gitlab::GitalyClient).to receive(:allow_ref_name_caching).and_call_original
get_pipelines_index_json
@@ -60,7 +60,6 @@ describe Projects::PipelinesController do
# There appears to be one extra query for Pipelines#has_warnings? for some reason
expect { get_pipelines_index_json }.not_to exceed_query_limit(control_count + 1)
-
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['pipelines'].count).to eq 10
end
@@ -90,11 +89,18 @@ describe Projects::PipelinesController do
end
it 'does not execute N+1 queries' do
- queries = ActiveRecord::QueryRecorder.new do
+ get_pipelines_index_json
+
+ control_count = ActiveRecord::QueryRecorder.new do
get_pipelines_index_json
- end
+ end.count
+
+ create_all_pipeline_types
- expect(queries.count).to be <= 36
+ # There appears to be one extra query for Pipelines#has_warnings? for some reason
+ expect { get_pipelines_index_json }.not_to exceed_query_limit(control_count + 1)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['pipelines'].count).to eq 10
end
end
@@ -165,7 +171,17 @@ describe Projects::PipelinesController do
def create_build(pipeline, stage, stage_idx, name, user = nil)
status = %w[created running pending success failed canceled].sample
- create(:ci_build, pipeline: pipeline, stage: stage, stage_idx: stage_idx, name: name, status: status, user: user)
+ create(
+ :ci_build,
+ :artifacts,
+ artifacts_expire_at: 2.days.from_now,
+ pipeline: pipeline,
+ stage: stage,
+ stage_idx: stage_idx,
+ name: name,
+ status: status,
+ user: user
+ )
end
end
@@ -608,7 +624,7 @@ describe Projects::PipelinesController do
describe 'GET test_report.json' do
subject(:get_test_report_json) do
- post :test_report, params: {
+ get :test_report, params: {
namespace_id: project.namespace,
project_id: project,
id: pipeline.id
@@ -676,6 +692,76 @@ describe Projects::PipelinesController do
end
end
+ describe 'GET test_report_count.json' do
+ subject(:test_reports_count_json) do
+ get :test_reports_count, params: {
+ namespace_id: project.namespace,
+ project_id: project,
+ id: pipeline.id
+ },
+ format: :json
+ end
+
+ context 'when feature is enabled' do
+ before do
+ stub_feature_flags(junit_pipeline_view: true)
+ end
+
+ context 'when pipeline does not have a test report' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ it 'renders an empty badge counter' do
+ test_reports_count_json
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['total_count']).to eq(0)
+ end
+ end
+
+ context 'when pipeline has a test report' do
+ let(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
+
+ it 'renders the badge counter value' do
+ test_reports_count_json
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['total_count']).to eq(4)
+ end
+ end
+
+ context 'when pipeline has corrupt test reports' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ before do
+ job = create(:ci_build, pipeline: pipeline)
+ create(:ci_job_artifact, :junit_with_corrupted_data, job: job, project: project)
+ end
+
+ it 'renders 0' do
+ test_reports_count_json
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['total_count']).to eq(0)
+ end
+ end
+ end
+
+ context 'when feature is disabled' do
+ let(:pipeline) { create(:ci_empty_pipeline, project: project) }
+
+ before do
+ stub_feature_flags(junit_pipeline_view: false)
+ end
+
+ it 'renders empty response' do
+ test_reports_count_json
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.body).to be_empty
+ end
+ end
+ end
+
describe 'GET latest' do
let(:branch_main) { project.repository.branches[0] }
let(:branch_secondary) { project.repository.branches[1] }
@@ -703,7 +789,7 @@ describe Projects::PipelinesController do
it 'shows latest pipeline for the default project branch' do
get :show, params: { namespace_id: project.namespace, project_id: project, latest: true, ref: nil }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:pipeline)).to have_attributes(id: pipeline_master.id)
end
end
@@ -716,7 +802,7 @@ describe Projects::PipelinesController do
it 'shows the latest pipeline for the provided ref' do
get :show, params: { namespace_id: project.namespace, project_id: project, latest: true, ref: branch_secondary.name }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:pipeline)).to have_attributes(id: pipeline_secondary.id)
end
@@ -728,7 +814,7 @@ describe Projects::PipelinesController do
it 'shows the provided ref with the last sha/pipeline combo' do
get :show, params: { namespace_id: project.namespace, project_id: project, latest: true, ref: branch_secondary.name }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:pipeline)).to have_attributes(id: pipeline_secondary.id)
end
end
@@ -737,7 +823,7 @@ describe Projects::PipelinesController do
it 'renders a 404 if no pipeline is found for the ref' do
get :show, params: { namespace_id: project.namespace, project_id: project, ref: 'no-branch' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -754,7 +840,7 @@ describe Projects::PipelinesController do
it 'deletes pipeline and redirects' do
delete_pipeline
- expect(response).to have_gitlab_http_status(303)
+ expect(response).to have_gitlab_http_status(:see_other)
expect(Ci::Build.exists?(build.id)).to be_falsy
expect(Ci::Pipeline.exists?(pipeline.id)).to be_falsy
@@ -766,7 +852,7 @@ describe Projects::PipelinesController do
it 'fails to delete pipeline' do
delete_pipeline
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -775,7 +861,7 @@ describe Projects::PipelinesController do
it 'fails to delete pipeline' do
delete_pipeline
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
diff --git a/spec/controllers/projects/pipelines_settings_controller_spec.rb b/spec/controllers/projects/pipelines_settings_controller_spec.rb
index 16a43f62bd5..789b2104d3c 100644
--- a/spec/controllers/projects/pipelines_settings_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_settings_controller_spec.rb
@@ -17,7 +17,7 @@ describe Projects::PipelinesSettingsController do
it 'redirects with 302 status code' do
get :show, params: { namespace_id: project.namespace, project_id: project }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
end
diff --git a/spec/controllers/projects/project_members_controller_spec.rb b/spec/controllers/projects/project_members_controller_spec.rb
index 0df19aa6e44..09420cc8556 100644
--- a/spec/controllers/projects/project_members_controller_spec.rb
+++ b/spec/controllers/projects/project_members_controller_spec.rb
@@ -11,7 +11,7 @@ describe Projects::ProjectMembersController do
it 'has the project_members address with a 200 status code' do
get :index, params: { namespace_id: project.namespace, project_id: project }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
context 'when project belongs to group' do
@@ -64,7 +64,7 @@ describe Projects::ProjectMembersController do
access_level: Gitlab::Access::GUEST
}
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(project.users).not_to include project_user
end
end
@@ -145,7 +145,7 @@ describe Projects::ProjectMembersController do
id: 42
}
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -162,7 +162,7 @@ describe Projects::ProjectMembersController do
id: member
}
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(project.members).to include member
end
end
@@ -211,7 +211,7 @@ describe Projects::ProjectMembersController do
project_id: project
}
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -246,7 +246,7 @@ describe Projects::ProjectMembersController do
project_id: project
}
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -305,7 +305,7 @@ describe Projects::ProjectMembersController do
id: 42
}
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -322,7 +322,7 @@ describe Projects::ProjectMembersController do
id: member
}
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(project.members).not_to include member
end
end
diff --git a/spec/controllers/projects/prometheus/metrics_controller_spec.rb b/spec/controllers/projects/prometheus/metrics_controller_spec.rb
index 157948de29d..314214ceefb 100644
--- a/spec/controllers/projects/prometheus/metrics_controller_spec.rb
+++ b/spec/controllers/projects/prometheus/metrics_controller_spec.rb
@@ -28,7 +28,7 @@ describe Projects::Prometheus::MetricsController do
it 'returns no content response' do
get :active_common, params: project_params(format: :json)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
end
@@ -42,7 +42,7 @@ describe Projects::Prometheus::MetricsController do
it 'returns no content response' do
get :active_common, params: project_params(format: :json)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq(sample_response.deep_stringify_keys)
end
end
@@ -51,7 +51,7 @@ describe Projects::Prometheus::MetricsController do
it 'returns not found response' do
get :active_common, params: project_params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -66,7 +66,7 @@ describe Projects::Prometheus::MetricsController do
get :active_common, params: project_params(format: :json)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -74,7 +74,7 @@ describe Projects::Prometheus::MetricsController do
it 'renders 404' do
get :active_common, params: project_params(format: :json)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb
index a570db12d94..8cb48dca095 100644
--- a/spec/controllers/projects/raw_controller_spec.rb
+++ b/spec/controllers/projects/raw_controller_spec.rb
@@ -23,7 +23,7 @@ describe Projects::RawController do
it 'delivers ASCII file' do
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
expect(response.header['Content-Disposition']).to eq('inline')
expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
@@ -37,7 +37,7 @@ describe Projects::RawController do
it 'leaves image content disposition' do
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.header['Content-Disposition']).to eq('inline')
expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
@@ -63,7 +63,7 @@ describe Projects::RawController do
.to change { Gitlab::GitalyClient.get_request_count }.by(0)
expect(response.body).to eq(_('You cannot access the raw file. Please wait a minute.'))
- expect(response).to have_gitlab_http_status(429)
+ expect(response).to have_gitlab_http_status(:too_many_requests)
end
it 'logs the event on auth.log' do
@@ -72,7 +72,7 @@ describe Projects::RawController do
env: :raw_blob_request_limit,
remote_ip: '0.0.0.0',
request_method: 'GET',
- path: "/#{project.full_path}/raw/#{file_path}"
+ path: "/#{project.full_path}/-/raw/#{file_path}"
}
expect(Gitlab::AuthLogger).to receive(:error).with(attributes).once
@@ -94,7 +94,7 @@ describe Projects::RawController do
request.headers['X-Gitlab-External-Storage-Token'] = token
execute_raw_requests(requests: 6, project: project, file_path: file_path)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -113,7 +113,7 @@ describe Projects::RawController do
execute_raw_requests(requests: 3, project: project, file_path: modified_path)
expect(response.body).to eq(_('You cannot access the raw file. Please wait a minute.'))
- expect(response).to have_gitlab_http_status(429)
+ expect(response).to have_gitlab_http_status(:too_many_requests)
end
end
@@ -125,7 +125,7 @@ describe Projects::RawController do
it 'does not prevent from accessing the raw file' do
execute_raw_requests(requests: 10, project: project, file_path: file_path)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -141,14 +141,14 @@ describe Projects::RawController do
execute_raw_requests(requests: 6, project: project, file_path: file_path)
expect(response.body).to eq(_('You cannot access the raw file. Please wait a minute.'))
- expect(response).to have_gitlab_http_status(429)
+ expect(response).to have_gitlab_http_status(:too_many_requests)
# Accessing upcase version of readme
file_path = "#{commit_sha}/README.md"
execute_raw_requests(requests: 1, project: project, file_path: file_path)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -166,7 +166,7 @@ describe Projects::RawController do
it 'redirects to sign in page' do
execute_raw_requests(requests: 1, project: project, file_path: file_path)
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response.location).to end_with('/users/sign_in')
end
end
@@ -176,7 +176,7 @@ describe Projects::RawController do
it 'calls the action normally' do
execute_raw_requests(requests: 1, project: project, file_path: file_path, token: user.static_object_token)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -184,7 +184,7 @@ describe Projects::RawController do
it 'redirects to sign in page' do
execute_raw_requests(requests: 1, project: project, file_path: file_path, token: 'foobar')
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response.location).to end_with('/users/sign_in')
end
end
@@ -196,7 +196,7 @@ describe Projects::RawController do
request.headers['X-Gitlab-Static-Object-Token'] = user.static_object_token
execute_raw_requests(requests: 1, project: project, file_path: file_path)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -205,7 +205,7 @@ describe Projects::RawController do
request.headers['X-Gitlab-Static-Object-Token'] = 'foobar'
execute_raw_requests(requests: 1, project: project, file_path: file_path)
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response.location).to end_with('/users/sign_in')
end
end
diff --git a/spec/controllers/projects/registry/repositories_controller_spec.rb b/spec/controllers/projects/registry/repositories_controller_spec.rb
index 192e4ce2e73..5b9c0211b39 100644
--- a/spec/controllers/projects/registry/repositories_controller_spec.rb
+++ b/spec/controllers/projects/registry/repositories_controller_spec.rb
@@ -16,7 +16,7 @@ describe Projects::Registry::RepositoriesController do
project.add_developer(user)
end
- describe 'GET index' do
+ shared_examples 'renders a list of repositories' do
context 'when root container repository exists' do
before do
create(:container_repository, :root, project: project)
@@ -35,6 +35,8 @@ describe Projects::Registry::RepositoriesController do
end
it 'successfully renders container repositories' do
+ expect(Gitlab::Tracking).not_to receive(:event)
+
go_to_index
expect(response).to have_gitlab_http_status(:ok)
@@ -43,7 +45,7 @@ describe Projects::Registry::RepositoriesController do
it 'tracks the event' do
expect(Gitlab::Tracking).to receive(:event).with(anything, 'list_repositories', {})
- go_to_index
+ go_to_index(format: :json)
end
it 'creates a root container repository' do
@@ -56,6 +58,7 @@ describe Projects::Registry::RepositoriesController do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('registry/repositories')
+ expect(response).to include_pagination_headers
end
end
@@ -82,9 +85,33 @@ describe Projects::Registry::RepositoriesController do
end
end
end
+
+ context 'with :vue_container_registry_explorer feature flag disabled' do
+ before do
+ stub_feature_flags(vue_container_registry_explorer: false)
+ stub_container_registry_tags(repository: project.full_path,
+ tags: %w[rc1 latest])
+ end
+
+ it 'json has a list of projects' do
+ go_to_index(format: :json)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('registry/repositories')
+ expect(response).not_to include_pagination_headers
+ end
+ end
+ end
+
+ describe 'GET #index' do
+ it_behaves_like 'renders a list of repositories'
+ end
+
+ describe 'GET #show' do
+ it_behaves_like 'renders a list of repositories'
end
- describe 'DELETE destroy' do
+ describe 'DELETE #destroy' do
context 'when root container repository exists' do
let!(:repository) do
create(:container_repository, :root, project: project)
@@ -113,7 +140,7 @@ describe Projects::Registry::RepositoriesController do
end
context 'when user does not have access to registry' do
- describe 'GET index' do
+ describe 'GET #index' do
it 'responds with 404' do
go_to_index
diff --git a/spec/controllers/projects/releases_controller_spec.rb b/spec/controllers/projects/releases_controller_spec.rb
index 750e9aabef0..a03fabad2de 100644
--- a/spec/controllers/projects/releases_controller_spec.rb
+++ b/spec/controllers/projects/releases_controller_spec.rb
@@ -127,13 +127,13 @@ describe Projects::ReleasesController do
sign_in(user)
end
- let!(:release) { create(:release, project: project) }
+ let(:release) { create(:release, project: project) }
let(:tag) { CGI.escape(release.tag) }
it_behaves_like 'successful request'
context 'when tag name contains slash' do
- let!(:release) { create(:release, project: project, tag: 'awesome/v1.0') }
+ let(:release) { create(:release, project: project, tag: 'awesome/v1.0') }
let(:tag) { CGI.escape(release.tag) }
it_behaves_like 'successful request'
@@ -144,16 +144,7 @@ describe Projects::ReleasesController do
end
end
- context 'when feature flag `release_edit_page` is disabled' do
- before do
- stub_feature_flags(release_edit_page: false)
- end
-
- it_behaves_like 'not found'
- end
-
context 'when release does not exist' do
- let!(:release) { }
let(:tag) { 'non-existent-tag' }
it_behaves_like 'not found'
@@ -166,6 +157,47 @@ describe Projects::ReleasesController do
end
end
+ describe 'GET #show' do
+ subject do
+ get :show, params: { namespace_id: project.namespace, project_id: project, tag: tag }
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ let(:release) { create(:release, project: project) }
+ let(:tag) { CGI.escape(release.tag) }
+
+ it_behaves_like 'successful request'
+
+ context 'when tag name contains slash' do
+ let(:release) { create(:release, project: project, tag: 'awesome/v1.0') }
+ let(:tag) { CGI.escape(release.tag) }
+
+ it_behaves_like 'successful request'
+
+ it 'is accesible at a URL encoded path' do
+ expect(project_release_path(project, release))
+ .to eq("/#{project.namespace.path}/#{project.name}/-/releases/awesome%252Fv1.0")
+ end
+ end
+
+ context 'when feature flag `release_show_page` is disabled' do
+ before do
+ stub_feature_flags(release_show_page: false)
+ end
+
+ it_behaves_like 'not found'
+ end
+
+ context 'when release does not exist' do
+ let(:tag) { 'non-existent-tag' }
+
+ it_behaves_like 'not found'
+ end
+ end
+
describe 'GET #evidence' do
let_it_be(:tag_name) { "v1.1.0-evidence" }
let!(:release) { create(:release, :with_evidence, project: project, tag: tag_name) }
diff --git a/spec/controllers/projects/repositories_controller_spec.rb b/spec/controllers/projects/repositories_controller_spec.rb
index 084644484c5..d4a81f24d9c 100644
--- a/spec/controllers/projects/repositories_controller_spec.rb
+++ b/spec/controllers/projects/repositories_controller_spec.rb
@@ -17,6 +17,7 @@ describe Projects::RepositoriesController do
context 'as a user' do
let(:user) { create(:user) }
+ let(:archive_name) { "#{project.path}-master" }
before do
project.add_developer(user)
@@ -30,9 +31,18 @@ describe Projects::RepositoriesController do
end
it 'responds with redirect to the short name archive if fully qualified' do
- get :archive, params: { namespace_id: project.namespace, project_id: project, id: "master/#{project.path}-master" }, format: "zip"
+ get :archive, params: { namespace_id: project.namespace, project_id: project, id: "master/#{archive_name}" }, format: "zip"
expect(assigns(:ref)).to eq("master")
+ expect(assigns(:filename)).to eq(archive_name)
+ expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-archive:")
+ end
+
+ it 'responds with redirect for a path with multiple slashes' do
+ get :archive, params: { namespace_id: project.namespace, project_id: project, id: "improve/awesome/#{archive_name}" }, format: "zip"
+
+ expect(assigns(:ref)).to eq("improve/awesome")
+ expect(assigns(:filename)).to eq(archive_name)
expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-archive:")
end
@@ -45,7 +55,7 @@ describe Projects::RepositoriesController do
it 'handles legacy queries with the ref specified as ref in params' do
get :archive, params: { namespace_id: project.namespace, project_id: project, ref: 'feature' }, format: 'zip'
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:ref)).to eq('feature')
expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-archive:")
end
@@ -53,7 +63,7 @@ describe Projects::RepositoriesController do
it 'handles legacy queries with the ref specified as id in params' do
get :archive, params: { namespace_id: project.namespace, project_id: project, id: 'feature' }, format: 'zip'
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:ref)).to eq('feature')
expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-archive:")
end
@@ -61,7 +71,7 @@ describe Projects::RepositoriesController do
it 'prioritizes the id param over the ref param when both are specified' do
get :archive, params: { namespace_id: project.namespace, project_id: project, id: 'feature', ref: 'feature_conflict' }, format: 'zip'
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:ref)).to eq('feature')
expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-archive:")
end
@@ -74,7 +84,15 @@ describe Projects::RepositoriesController do
it "renders Not Found" do
get :archive, params: { namespace_id: project.namespace, project_id: project, id: "master" }, format: "zip"
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context "when the request format is HTML" do
+ it "renders 404" do
+ get :archive, params: { namespace_id: project.namespace, project_id: project, id: 'master' }, format: "html"
+
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -82,7 +100,7 @@ describe Projects::RepositoriesController do
it 'sets appropriate caching headers' do
get_archive
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.header['ETag']).to be_present
expect(response.header['Cache-Control']).to include('max-age=60, private')
end
@@ -93,7 +111,7 @@ describe Projects::RepositoriesController do
it 'sets appropriate caching headers' do
get_archive
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.header['ETag']).to be_present
expect(response.header['Cache-Control']).to include('max-age=60, public')
end
@@ -103,7 +121,7 @@ describe Projects::RepositoriesController do
it 'max-age is set to 3600 in Cache-Control header' do
get_archive('ddd0f15ae83993f5cb66a927a28673882e99100b')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.header['Cache-Control']).to include('max-age=3600')
end
end
@@ -116,7 +134,7 @@ describe Projects::RepositoriesController do
request.headers['If-None-Match'] = response.headers['ETag']
get_archive
- expect(response).to have_gitlab_http_status(304)
+ expect(response).to have_gitlab_http_status(:not_modified)
end
end
@@ -137,7 +155,7 @@ describe Projects::RepositoriesController do
it 'redirects to sign in page' do
get :archive, params: { namespace_id: project.namespace, project_id: project, id: 'master' }, format: 'zip'
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -146,7 +164,7 @@ describe Projects::RepositoriesController do
it 'calls the action normally' do
get :archive, params: { namespace_id: project.namespace, project_id: project, id: 'master', token: user.static_object_token }, format: 'zip'
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -154,7 +172,7 @@ describe Projects::RepositoriesController do
it 'redirects to sign in page' do
get :archive, params: { namespace_id: project.namespace, project_id: project, id: 'master', token: 'foobar' }, format: 'zip'
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
end
@@ -165,7 +183,7 @@ describe Projects::RepositoriesController do
request.headers['X-Gitlab-Static-Object-Token'] = user.static_object_token
get :archive, params: { namespace_id: project.namespace, project_id: project, id: 'master' }, format: 'zip'
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -174,7 +192,7 @@ describe Projects::RepositoriesController do
request.headers['X-Gitlab-Static-Object-Token'] = 'foobar'
get :archive, params: { namespace_id: project.namespace, project_id: project, id: 'master' }, format: 'zip'
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
end
diff --git a/spec/controllers/projects/runners_controller_spec.rb b/spec/controllers/projects/runners_controller_spec.rb
index 279b4f360c5..1893180fe9b 100644
--- a/spec/controllers/projects/runners_controller_spec.rb
+++ b/spec/controllers/projects/runners_controller_spec.rb
@@ -30,7 +30,7 @@ describe Projects::RunnersController do
runner.reload
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(runner.description).to eq(new_desc)
end
end
@@ -39,7 +39,7 @@ describe Projects::RunnersController do
it 'destroys the runner' do
delete :destroy, params: params
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(Ci::Runner.find_by(id: runner.id)).to be_nil
end
end
@@ -54,7 +54,7 @@ describe Projects::RunnersController do
runner.reload
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(runner.active).to eq(true)
end
end
@@ -69,7 +69,7 @@ describe Projects::RunnersController do
runner.reload
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(runner.active).to eq(false)
end
end
diff --git a/spec/controllers/projects/serverless/functions_controller_spec.rb b/spec/controllers/projects/serverless/functions_controller_spec.rb
index 7e98ded88a9..db7533eb609 100644
--- a/spec/controllers/projects/serverless/functions_controller_spec.rb
+++ b/spec/controllers/projects/serverless/functions_controller_spec.rb
@@ -14,9 +14,11 @@ describe Projects::Serverless::FunctionsController do
let!(:deployment) { create(:deployment, :success, environment: environment, cluster: cluster) }
let(:knative_services_finder) { environment.knative_services_finder }
let(:function_description) { 'A serverless function' }
+ let(:function_name) { 'some-function-name' }
let(:knative_stub_options) do
- { namespace: namespace.namespace, name: cluster.project.name, description: function_description }
+ { namespace: namespace.namespace, name: function_name, description: function_description }
end
+ let(:knative) { create(:clusters_applications_knative, :installed, cluster: cluster) }
let(:namespace) do
create(:cluster_kubernetes_namespace,
@@ -51,7 +53,7 @@ describe Projects::Serverless::FunctionsController do
expect(json_response).to eq expected_json
end
- it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response).to have_gitlab_http_status(:ok) }
end
context 'when cache is ready' do
@@ -83,45 +85,120 @@ describe Projects::Serverless::FunctionsController do
expect(json_response).to eq expected_json
end
- it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response).to have_gitlab_http_status(:ok) }
end
context 'when functions were found' do
- let(:functions) { ["asdf"] }
+ let(:functions) { [{}, {}] }
before do
- stub_kubeclient_knative_services(namespace: namespace.namespace)
- get :index, params: params({ format: :json })
+ stub_kubeclient_knative_services(namespace: namespace.namespace, cluster_id: cluster.id, name: function_name)
end
it 'returns functions' do
+ get :index, params: params({ format: :json })
expect(json_response["functions"]).not_to be_empty
end
- it { expect(response).to have_gitlab_http_status(200) }
+ it 'filters out the functions whose cluster the user does not have permission to read' do
+ allow(controller).to receive(:can?).and_return(true)
+ expect(controller).to receive(:can?).with(user, :read_cluster, cluster).and_return(false)
+
+ get :index, params: params({ format: :json })
+
+ expect(json_response["functions"]).to be_empty
+ end
+
+ it 'returns a successful response status' do
+ get :index, params: params({ format: :json })
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ context 'when there is serverless domain for a cluster' do
+ let!(:serverless_domain_cluster) do
+ create(:serverless_domain_cluster, clusters_applications_knative_id: knative.id)
+ end
+
+ it 'returns JSON with function details with serverless domain URL' do
+ get :index, params: params({ format: :json })
+ expect(response).to have_gitlab_http_status(:ok)
+
+ expect(json_response["functions"]).not_to be_empty
+
+ expect(json_response["functions"]).to all(
+ include(
+ 'url' => "https://#{function_name}-#{serverless_domain_cluster.uuid[0..1]}a1#{serverless_domain_cluster.uuid[2..-3]}f2#{serverless_domain_cluster.uuid[-2..-1]}#{"%x" % environment.id}-#{environment.slug}.#{serverless_domain_cluster.domain}"
+ )
+ )
+ end
+ end
+
+ context 'when there is no serverless domain for a cluster' do
+ it 'keeps function URL as it was' do
+ expect(Gitlab::Serverless::Domain).not_to receive(:new)
+
+ get :index, params: params({ format: :json })
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
end
end
end
describe 'GET #show' do
- context 'invalid data' do
- it 'has a bad function name' do
+ context 'with function that does not exist' do
+ it 'returns 404' do
get :show, params: params({ format: :json, environment_id: "*", id: "foo" })
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'with valid data', :use_clean_rails_memory_store_caching do
shared_examples 'GET #show with valid data' do
- it 'has a valid function name' do
- get :show, params: params({ format: :json, environment_id: "*", id: cluster.project.name })
- expect(response).to have_gitlab_http_status(200)
+ context 'when there is serverless domain for a cluster' do
+ let!(:serverless_domain_cluster) do
+ create(:serverless_domain_cluster, clusters_applications_knative_id: knative.id)
+ end
+
+ it 'returns JSON with function details with serverless domain URL' do
+ get :show, params: params({ format: :json, environment_id: "*", id: function_name })
+ expect(response).to have_gitlab_http_status(:ok)
+
+ expect(json_response).to include(
+ 'url' => "https://#{function_name}-#{serverless_domain_cluster.uuid[0..1]}a1#{serverless_domain_cluster.uuid[2..-3]}f2#{serverless_domain_cluster.uuid[-2..-1]}#{"%x" % environment.id}-#{environment.slug}.#{serverless_domain_cluster.domain}"
+ )
+ end
+
+ it 'returns 404 when user does not have permission to read the cluster' do
+ allow(controller).to receive(:can?).and_return(true)
+ expect(controller).to receive(:can?).with(user, :read_cluster, cluster).and_return(false)
+
+ get :show, params: params({ format: :json, environment_id: "*", id: function_name })
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when there is no serverless domain for a cluster' do
+ it 'keeps function URL as it was' do
+ get :show, params: params({ format: :json, environment_id: "*", id: function_name })
+ expect(response).to have_gitlab_http_status(:ok)
+
+ expect(json_response).to include(
+ 'url' => "http://#{function_name}.#{namespace.namespace}.example.com"
+ )
+ end
+ end
+
+ it 'return json with function details' do
+ get :show, params: params({ format: :json, environment_id: "*", id: function_name })
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include(
- 'name' => project.name,
- 'url' => "http://#{project.name}.#{namespace.namespace}.example.com",
+ 'name' => function_name,
+ 'url' => "http://#{function_name}.#{namespace.namespace}.example.com",
'description' => function_description,
- 'podcount' => 1
+ 'podcount' => 0
)
end
end
@@ -164,7 +241,7 @@ describe Projects::Serverless::FunctionsController do
context 'invalid data' do
it 'has a bad function name' do
get :metrics, params: params({ format: :json, environment_id: "*", id: "foo" })
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
end
end
@@ -174,14 +251,14 @@ describe Projects::Serverless::FunctionsController do
it 'has data' do
get :index, params: params({ format: :json })
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to match({
'knative_installed' => 'checking',
'functions' => [
a_hash_including(
- 'name' => project.name,
- 'url' => "http://#{project.name}.#{namespace.namespace}.example.com",
+ 'name' => function_name,
+ 'url' => "http://#{function_name}.#{namespace.namespace}.example.com",
'description' => function_description
)
]
@@ -191,7 +268,7 @@ describe Projects::Serverless::FunctionsController do
it 'has data in html' do
get :index, params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
diff --git a/spec/controllers/projects/services_controller_spec.rb b/spec/controllers/projects/services_controller_spec.rb
index 0c074714bf3..fb7cca3997b 100644
--- a/spec/controllers/projects/services_controller_spec.rb
+++ b/spec/controllers/projects/services_controller_spec.rb
@@ -191,16 +191,81 @@ describe Projects::ServicesController do
end
end
end
+
+ context 'Prometheus service' do
+ let!(:service) { create(:prometheus_service, project: project) }
+ let(:service_params) { { manual_configuration: '1', api_url: 'http://example.com' } }
+
+ context 'feature flag :settings_operations_prometheus_service is enabled' do
+ before do
+ stub_feature_flags(settings_operations_prometheus_service: true)
+ end
+
+ it 'redirects user back to edit page with alert' do
+ put :update, params: project_params.merge(service: service_params)
+
+ expect(response).to redirect_to(edit_project_service_path(project, service))
+ expected_alert = "You can now manage your Prometheus settings on the <a href=\"#{project_settings_operations_path(project)}\">Operations</a> page. Fields on this page has been deprecated."
+
+ expect(response).to set_flash.now[:alert].to(expected_alert)
+ end
+
+ it 'does not modify service' do
+ expect { put :update, params: project_params.merge(service: service_params) }.not_to change { project.prometheus_service.reload.attributes }
+ end
+ end
+
+ context 'feature flag :settings_operations_prometheus_service is disabled' do
+ before do
+ stub_feature_flags(settings_operations_prometheus_service: false)
+ end
+
+ it 'modifies service' do
+ expect { put :update, params: project_params.merge(service: service_params) }.to change { project.prometheus_service.reload.attributes }
+ end
+ end
+ end
end
describe 'GET #edit' do
- before do
- get :edit, params: project_params(id: 'jira')
+ context 'Jira service' do
+ let(:service_param) { 'jira' }
+
+ before do
+ get :edit, params: project_params(id: service_param)
+ end
+
+ context 'with approved services' do
+ it 'renders edit page' do
+ expect(response).to be_successful
+ end
+ end
end
- context 'with approved services' do
- it 'renders edit page' do
- expect(response).to be_successful
+ context 'Prometheus service' do
+ let(:service_param) { 'prometheus' }
+
+ context 'feature flag :settings_operations_prometheus_service is enabled' do
+ before do
+ stub_feature_flags(settings_operations_prometheus_service: true)
+ get :edit, params: project_params(id: service_param)
+ end
+
+ it 'renders deprecation warning notice' do
+ expected_alert = "You can now manage your Prometheus settings on the <a href=\"#{project_settings_operations_path(project)}\">Operations</a> page. Fields on this page has been deprecated."
+ expect(response).to set_flash.now[:alert].to(expected_alert)
+ end
+ end
+
+ context 'feature flag :settings_operations_prometheus_service is disabled' do
+ before do
+ stub_feature_flags(settings_operations_prometheus_service: false)
+ get :edit, params: project_params(id: service_param)
+ end
+
+ it 'does not render deprecation warning notice' do
+ expect(response).not_to set_flash.now[:alert]
+ end
end
end
end
diff --git a/spec/controllers/projects/settings/ci_cd_controller_spec.rb b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
index abe31a0c86e..68260e4e101 100644
--- a/spec/controllers/projects/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
@@ -16,7 +16,7 @@ describe Projects::Settings::CiCdController do
it 'renders show with 200 status code' do
get :show, params: { namespace_id: project.namespace, project_id: project }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:show)
end
@@ -106,7 +106,7 @@ describe Projects::Settings::CiCdController do
it 'redirects to the settings page' do
subject
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(flash[:toast]).to eq("Pipelines settings for '#{project.name}' were successfully updated.")
end
diff --git a/spec/controllers/projects/settings/integrations_controller_spec.rb b/spec/controllers/projects/settings/integrations_controller_spec.rb
index 93e8d03098a..5d9fe3da912 100644
--- a/spec/controllers/projects/settings/integrations_controller_spec.rb
+++ b/spec/controllers/projects/settings/integrations_controller_spec.rb
@@ -15,7 +15,7 @@ describe Projects::Settings::IntegrationsController do
it 'renders show with 200 status code' do
get :show, params: { namespace_id: project.namespace, project_id: project }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:show)
end
end
diff --git a/spec/controllers/projects/settings/operations_controller_spec.rb b/spec/controllers/projects/settings/operations_controller_spec.rb
index 667a6336952..62b906e8507 100644
--- a/spec/controllers/projects/settings/operations_controller_spec.rb
+++ b/spec/controllers/projects/settings/operations_controller_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
describe Projects::Settings::OperationsController do
let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
+ let_it_be(:project, reload: true) { create(:project) }
before do
sign_in(user)
@@ -121,6 +121,74 @@ describe Projects::Settings::OperationsController do
end
end
+ context 'incident management' do
+ describe 'GET #show' do
+ context 'with existing setting' do
+ let!(:incident_management_setting) do
+ create(:project_incident_management_setting, project: project)
+ end
+
+ it 'loads existing setting' do
+ get :show, params: project_params(project)
+
+ expect(controller.helpers.project_incident_management_setting)
+ .to eq(incident_management_setting)
+ end
+ end
+
+ context 'without an existing setting' do
+ it 'builds a new setting' do
+ get :show, params: project_params(project)
+
+ expect(controller.helpers.project_incident_management_setting).to be_new_record
+ end
+ end
+ end
+
+ describe 'PATCH #update' do
+ let(:params) do
+ {
+ incident_management_setting_attributes: {
+ create_issue: 'false',
+ send_email: 'false',
+ issue_template_key: 'some-other-template'
+ }
+ }
+ end
+
+ it_behaves_like 'PATCHable'
+
+ context 'updating each incident management setting' do
+ let(:project) { create(:project) }
+ let(:new_incident_management_settings) { {} }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ shared_examples 'a gitlab tracking event' do |params, event_key|
+ it "creates a gitlab tracking event #{event_key}" do
+ new_incident_management_settings = params
+
+ expect(Gitlab::Tracking).to receive(:event)
+ .with('IncidentManagement::Settings', event_key, kind_of(Hash))
+
+ patch :update, params: project_params(project, incident_management_setting_attributes: new_incident_management_settings)
+
+ project.reload
+ end
+ end
+
+ it_behaves_like 'a gitlab tracking event', { create_issue: '1' }, 'enabled_issue_auto_creation_on_alerts'
+ it_behaves_like 'a gitlab tracking event', { create_issue: '0' }, 'disabled_issue_auto_creation_on_alerts'
+ it_behaves_like 'a gitlab tracking event', { issue_template_key: 'template' }, 'enabled_issue_template_on_alerts'
+ it_behaves_like 'a gitlab tracking event', { issue_template_key: nil }, 'disabled_issue_template_on_alerts'
+ it_behaves_like 'a gitlab tracking event', { send_email: '1' }, 'enabled_sending_emails'
+ it_behaves_like 'a gitlab tracking event', { send_email: '0' }, 'disabled_sending_emails'
+ end
+ end
+ end
+
context 'error tracking' do
describe 'GET #show' do
context 'with existing setting' do
@@ -196,6 +264,39 @@ describe Projects::Settings::OperationsController do
end
end
+ context 'prometheus integration' do
+ describe 'PATCH #update' do
+ let(:params) do
+ {
+ prometheus_integration_attributes: {
+ manual_configuration: '0',
+ api_url: 'https://gitlab.prometheus.rocks'
+ }
+ }
+ end
+
+ context 'feature flag :settings_operations_prometheus_service is enabled' do
+ before do
+ stub_feature_flags(settings_operations_prometheus_service: true)
+ end
+
+ it_behaves_like 'PATCHable'
+ end
+
+ context 'feature flag :settings_operations_prometheus_service is disabled' do
+ before do
+ stub_feature_flags(settings_operations_prometheus_service: false)
+ end
+
+ it_behaves_like 'PATCHable' do
+ let(:permitted_params) do
+ ActionController::Parameters.new(params.except(:prometheus_integration_attributes)).permit!
+ end
+ end
+ end
+ end
+ end
+
private
def project_params(project, params = {})
diff --git a/spec/controllers/projects/settings/repository_controller_spec.rb b/spec/controllers/projects/settings/repository_controller_spec.rb
index 7f67f67e775..64f5b8e34ae 100644
--- a/spec/controllers/projects/settings/repository_controller_spec.rb
+++ b/spec/controllers/projects/settings/repository_controller_spec.rb
@@ -15,7 +15,7 @@ describe Projects::Settings::RepositoryController do
it 'renders show with 200 status code' do
get :show, params: { namespace_id: project.namespace, project_id: project }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:show)
end
end
@@ -48,7 +48,7 @@ describe Projects::Settings::RepositoryController do
it 'creates deploy token' do
expect { create_deploy_token }.to change { DeployToken.active.count }.by(1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:show)
end
end
diff --git a/spec/controllers/projects/snippets_controller_spec.rb b/spec/controllers/projects/snippets_controller_spec.rb
index ee145a62b57..4d1537ae787 100644
--- a/spec/controllers/projects/snippets_controller_spec.rb
+++ b/spec/controllers/projects/snippets_controller_spec.rb
@@ -27,6 +27,15 @@ describe Projects::SnippetsController do
end
end
+ it 'fetches snippet counts via the snippet count service' do
+ service = double(:count_service, execute: {})
+ expect(Snippets::CountService)
+ .to receive(:new).with(nil, project: project)
+ .and_return(service)
+
+ get :index, params: { namespace_id: project.namespace, project_id: project }
+ end
+
context 'when the project snippet is private' do
let!(:project_snippet) { create(:project_snippet, :private, project: project, author: user) }
@@ -35,7 +44,7 @@ describe Projects::SnippetsController do
get :index, params: { namespace_id: project.namespace, project_id: project }
expect(assigns(:snippets)).not_to include(project_snippet)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -48,7 +57,7 @@ describe Projects::SnippetsController do
get :index, params: { namespace_id: project.namespace, project_id: project }
expect(assigns(:snippets)).to include(project_snippet)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -61,7 +70,7 @@ describe Projects::SnippetsController do
get :index, params: { namespace_id: project.namespace, project_id: project }
expect(assigns(:snippets)).to include(project_snippet)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -92,7 +101,7 @@ describe Projects::SnippetsController do
context 'when the snippet is spam' do
before do
- allow_next_instance_of(AkismetService) do |instance|
+ allow_next_instance_of(Spam::AkismetService) do |instance|
allow(instance).to receive(:spam?).and_return(true)
end
end
@@ -172,7 +181,7 @@ describe Projects::SnippetsController do
context 'when the snippet is spam' do
before do
- allow_next_instance_of(AkismetService) do |instance|
+ allow_next_instance_of(Spam::AkismetService) do |instance|
allow(instance).to receive(:spam?).and_return(true)
end
end
@@ -282,7 +291,7 @@ describe Projects::SnippetsController do
let(:snippet) { create(:project_snippet, :private, project: project, author: user) }
before do
- allow_next_instance_of(AkismetService) do |instance|
+ allow_next_instance_of(Spam::AkismetService) do |instance|
allow(instance).to receive_messages(submit_spam: true)
end
stub_application_setting(akismet_enabled: true)
@@ -318,7 +327,7 @@ describe Projects::SnippetsController do
it 'responds with status 404' do
get action, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -331,7 +340,7 @@ describe Projects::SnippetsController do
get action, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param }
expect(assigns(:snippet)).to eq(project_snippet)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -344,7 +353,7 @@ describe Projects::SnippetsController do
get action, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param }
expect(assigns(:snippet)).to eq(project_snippet)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -354,7 +363,7 @@ describe Projects::SnippetsController do
it 'responds with status 404' do
get action, params: { namespace_id: project.namespace, project_id: project, id: 42 }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -366,7 +375,7 @@ describe Projects::SnippetsController do
it 'responds with status 404' do
get action, params: { namespace_id: project.namespace, project_id: project, id: 42 }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -386,7 +395,7 @@ describe Projects::SnippetsController do
let(:snippet_permission) { :private }
it 'responds with status 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -395,7 +404,7 @@ describe Projects::SnippetsController do
it 'responds with status 200' do
expect(assigns(:snippet)).to eq(project_snippet)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -407,7 +416,7 @@ describe Projects::SnippetsController do
it 'responds with status 404' do
expect(assigns(:snippet)).to eq(project_snippet)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -501,7 +510,7 @@ describe Projects::SnippetsController do
it 'responds with status 404' do
delete :destroy, params: params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/projects/stages_controller_spec.rb b/spec/controllers/projects/stages_controller_spec.rb
index a91e3523fd7..c38e3d2544f 100644
--- a/spec/controllers/projects/stages_controller_spec.rb
+++ b/spec/controllers/projects/stages_controller_spec.rb
@@ -25,7 +25,7 @@ describe Projects::StagesController do
it 'returns not authorized' do
play_manual_stage!
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
diff --git a/spec/controllers/projects/tags/releases_controller_spec.rb b/spec/controllers/projects/tags/releases_controller_spec.rb
index 66eff4844c2..da87756d782 100644
--- a/spec/controllers/projects/tags/releases_controller_spec.rb
+++ b/spec/controllers/projects/tags/releases_controller_spec.rb
@@ -23,7 +23,7 @@ describe Projects::Tags::ReleasesController do
release = assigns(:release)
expect(release).not_to be_nil
expect(release).not_to be_persisted
- expect(response).to have_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'retrieves an existing release' do
@@ -32,7 +32,7 @@ describe Projects::Tags::ReleasesController do
release = assigns(:release)
expect(release).not_to be_nil
expect(release).to be_persisted
- expect(response).to have_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -42,7 +42,7 @@ describe Projects::Tags::ReleasesController do
release = project.releases.find_by(tag: tag)
expect(release.description).to eq("description updated")
- expect(response).to have_http_status(:found)
+ expect(response).to have_gitlab_http_status(:found)
end
it 'creates a release if one does not exist' do
@@ -52,7 +52,7 @@ describe Projects::Tags::ReleasesController do
update_release(tag_without_release.name, "a new release")
end.to change { project.releases.count }.by(1)
- expect(response).to have_http_status(:found)
+ expect(response).to have_gitlab_http_status(:found)
end
it 'sets the release name, sha, and author for a new release' do
@@ -64,7 +64,7 @@ describe Projects::Tags::ReleasesController do
expect(release.name).to eq(tag_without_release.name)
expect(release.sha).to eq(tag_without_release.target_commit.sha)
expect(release.author.id).to eq(user.id)
- expect(response).to have_http_status(:found)
+ expect(response).to have_gitlab_http_status(:found)
end
it 'deletes release when description is empty' do
@@ -74,7 +74,7 @@ describe Projects::Tags::ReleasesController do
expect(initial_releases_count).to eq(1)
expect(project.releases.count).to eq(0)
- expect(response).to have_http_status(:found)
+ expect(response).to have_gitlab_http_status(:found)
end
it 'does nothing when description is empty and the tag does not have a release' do
@@ -84,7 +84,7 @@ describe Projects::Tags::ReleasesController do
update_release(tag_without_release.name, "")
end.not_to change { project.releases.count }
- expect(response).to have_http_status(:found)
+ expect(response).to have_gitlab_http_status(:found)
end
end
diff --git a/spec/controllers/projects/templates_controller_spec.rb b/spec/controllers/projects/templates_controller_spec.rb
index 07b8a36fefc..b4b04878181 100644
--- a/spec/controllers/projects/templates_controller_spec.rb
+++ b/spec/controllers/projects/templates_controller_spec.rb
@@ -110,7 +110,7 @@ describe Projects::TemplatesController do
it 'returns the template names' do
get(:names, params: { namespace_id: project.namespace, template_type: template_type, project_id: project }, format: :json)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(1)
expect(json_response[0]['name']).to eq(expected_template_name)
end
@@ -121,7 +121,7 @@ describe Projects::TemplatesController do
get(:names, params: { namespace_id: project.namespace, template_type: template_type, project_id: project }, format: :json)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
diff --git a/spec/controllers/projects/todos_controller_spec.rb b/spec/controllers/projects/todos_controller_spec.rb
index c12019fed5e..b7d40d2a452 100644
--- a/spec/controllers/projects/todos_controller_spec.rb
+++ b/spec/controllers/projects/todos_controller_spec.rb
@@ -22,7 +22,7 @@ describe Projects::TodosController do
it "doesn't create todo" do
expect { post_create }.not_to change { user.todos.count }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/projects/tree_controller_spec.rb b/spec/controllers/projects/tree_controller_spec.rb
index 7c9abdf700b..96838383540 100644
--- a/spec/controllers/projects/tree_controller_spec.rb
+++ b/spec/controllers/projects/tree_controller_spec.rb
@@ -45,7 +45,7 @@ describe Projects::TreeController do
it 'redirects' do
expect(subject)
- .to redirect_to("/#{project.full_path}/tree/master")
+ .to redirect_to("/#{project.full_path}/-/tree/master")
end
end
@@ -60,7 +60,7 @@ describe Projects::TreeController do
it 'redirects' do
expect(subject)
- .to redirect_to("/#{project.full_path}/tree/empty-branch")
+ .to redirect_to("/#{project.full_path}/-/tree/empty-branch")
end
end
@@ -85,7 +85,7 @@ describe Projects::TreeController do
context "valid SHA commit ID with path" do
let(:id) { '6d39438/.gitignore' }
- it { expect(response).to have_gitlab_http_status(302) }
+ it { expect(response).to have_gitlab_http_status(:found) }
end
end
@@ -153,7 +153,7 @@ describe Projects::TreeController do
let(:id) { 'master/README.md' }
it 'redirects' do
- redirect_url = "/#{project.full_path}/blob/master/README.md"
+ redirect_url = "/#{project.full_path}/-/blob/master/README.md"
expect(subject)
.to redirect_to(redirect_url)
end
@@ -181,7 +181,7 @@ describe Projects::TreeController do
it 'redirects to the new directory' do
expect(subject)
- .to redirect_to("/#{project.full_path}/tree/#{branch_name}/#{path}")
+ .to redirect_to("/#{project.full_path}/-/tree/#{branch_name}/#{path}")
expect(flash[:notice]).to eq('The directory has been successfully created.')
end
end
@@ -192,7 +192,7 @@ describe Projects::TreeController do
it 'does not allow overwriting of existing files' do
expect(subject)
- .to redirect_to("/#{project.full_path}/tree/master")
+ .to redirect_to("/#{project.full_path}/-/tree/master")
expect(flash[:alert]).to eq('A file with this name already exists')
end
end
diff --git a/spec/controllers/projects/uploads_controller_spec.rb b/spec/controllers/projects/uploads_controller_spec.rb
index a70669e86a6..bb5415ee62c 100644
--- a/spec/controllers/projects/uploads_controller_spec.rb
+++ b/spec/controllers/projects/uploads_controller_spec.rb
@@ -49,7 +49,7 @@ describe Projects::UploadsController do
it "responds with status internal_server_error" do
post_authorize
- expect(response).to have_gitlab_http_status(500)
+ expect(response).to have_gitlab_http_status(:internal_server_error)
expect(response.body).to eq(_('Error uploading file'))
end
end
diff --git a/spec/controllers/projects/usage_ping_controller_spec.rb b/spec/controllers/projects/usage_ping_controller_spec.rb
index a9abbff160d..284db93d7a8 100644
--- a/spec/controllers/projects/usage_ping_controller_spec.rb
+++ b/spec/controllers/projects/usage_ping_controller_spec.rb
@@ -24,7 +24,7 @@ describe Projects::UsagePingController do
it 'returns 302' do
subject
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -32,7 +32,7 @@ describe Projects::UsagePingController do
it 'returns 404' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -57,7 +57,7 @@ describe Projects::UsagePingController do
it 'returns 404' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/projects/wikis_controller_spec.rb b/spec/controllers/projects/wikis_controller_spec.rb
index bfa555aab4c..6dd050af277 100644
--- a/spec/controllers/projects/wikis_controller_spec.rb
+++ b/spec/controllers/projects/wikis_controller_spec.rb
@@ -25,7 +25,7 @@ describe Projects::WikisController do
it 'redirects to #show and appends a `random_title` param' do
subject
- expect(response).to have_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(Rails.application.routes.recognize_path(response.redirect_url)).to include(
controller: 'projects/wikis',
action: 'show'
@@ -70,7 +70,7 @@ describe Projects::WikisController do
end
it "returns status #{expected_status}" do
- expect(response).to have_http_status(expected_status)
+ expect(response).to have_gitlab_http_status(expected_status)
end
end
@@ -103,7 +103,7 @@ describe Projects::WikisController do
subject
- expect(response).to have_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:page).title).to eq(wiki_title)
end
@@ -113,7 +113,7 @@ describe Projects::WikisController do
subject
- expect(response).to have_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:ok)
expect(flash[:notice]).to eq(_('The content of this page is not encoded in UTF-8. Edits can only be made via the Git repository.'))
end
end
@@ -204,7 +204,7 @@ describe Projects::WikisController do
it 'shows the edit page' do
subject
- expect(response).to have_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to include(s_('Wiki|Edit Page'))
end
end
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index 3870ef5d947..67e24841dee 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -27,7 +27,7 @@ describe ProjectsController do
get :new, params: { namespace_id: group.id }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('new')
end
end
@@ -36,7 +36,7 @@ describe ProjectsController do
it 'responds with status 404' do
get :new, params: { namespace_id: group.id }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(response).not_to render_template('new')
end
end
@@ -199,7 +199,7 @@ describe ProjectsController do
it 'renders a 503' do
get :show, params: { namespace_id: project.namespace, id: project }
- expect(response).to have_gitlab_http_status(503)
+ expect(response).to have_gitlab_http_status(:service_unavailable)
end
end
@@ -304,7 +304,7 @@ describe ProjectsController do
get :show, params: { namespace_id: project.namespace, id: project }, format: :git
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(namespace_project_path)
end
end
@@ -371,7 +371,7 @@ describe ProjectsController do
id: project.path
}
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'sets the badge API endpoint' do
@@ -411,7 +411,7 @@ describe ProjectsController do
end
it 'redirects to projects path' do
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(project_path(project))
end
end
@@ -431,7 +431,7 @@ describe ProjectsController do
end
it 'returns 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -459,7 +459,7 @@ describe ProjectsController do
end
it 'redirects to projects path' do
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(project_path(project))
end
end
@@ -479,7 +479,7 @@ describe ProjectsController do
end
it 'returns 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -506,7 +506,7 @@ describe ProjectsController do
id: project.path
}
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -526,7 +526,7 @@ describe ProjectsController do
id: project.path
}
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
end
@@ -561,7 +561,7 @@ describe ProjectsController do
expect(assign_repository_path).to include(project.path)
end
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -577,7 +577,7 @@ describe ProjectsController do
.not_to change { project.reload.path }
expect(controller).to set_flash.now[:alert].to(s_('UpdateProject|Cannot rename project because it contains container registry tags!'))
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -595,7 +595,7 @@ describe ProjectsController do
project: params
}
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
params.each do |param, value|
expect(project.public_send(param)).to eq(value)
end
@@ -693,7 +693,7 @@ describe ProjectsController do
project.reload
expect(project.namespace).to eq(new_namespace)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
context 'when new namespace is empty' do
@@ -714,7 +714,7 @@ describe ProjectsController do
project.reload
expect(project.namespace).to eq(old_namespace)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(flash[:alert]).to eq s_('TransferProject|Please select a new namespace for your project.')
end
end
@@ -731,7 +731,7 @@ describe ProjectsController do
delete :destroy, params: { namespace_id: project.namespace, id: project }
expect { Project.find(orig_id) }.to raise_error(ActiveRecord::RecordNotFound)
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(dashboard_projects_path)
end
@@ -773,7 +773,7 @@ describe ProjectsController do
end
it 'has http status 200' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'changes the user incoming email token' do
@@ -803,7 +803,7 @@ describe ProjectsController do
end
it 'has http status 200' do
- expect(response).to have_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'changes the user incoming email token' do
@@ -896,7 +896,7 @@ describe ProjectsController do
id: project
},
format: :js)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -945,7 +945,7 @@ describe ProjectsController do
sign_in(user)
get :refs, params: { namespace_id: project.namespace, id: project }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -968,7 +968,7 @@ describe ProjectsController do
it 'returns 404' do
post :preview_markdown, params: { namespace_id: private_project.namespace, id: private_project, text: '*Markdown* text' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -1013,7 +1013,7 @@ describe ProjectsController do
end
it 'renders JSON body with image links expanded' do
- expanded_path = "/#{project_with_repo.full_path}/raw/master/files/images/logo-white.png"
+ expanded_path = "/#{project_with_repo.full_path}/-/raw/master/files/images/logo-white.png"
post :preview_markdown, params: preview_markdown_params
@@ -1034,7 +1034,7 @@ describe ProjectsController do
get :show, params: { namespace_id: public_project.namespace, id: public_project }
expect(assigns(:project)).to eq(public_project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -1073,13 +1073,13 @@ describe ProjectsController do
it 'does not 404' do
post :toggle_star, params: { namespace_id: public_project.namespace, id: public_project.path.upcase }
- expect(response).not_to have_gitlab_http_status(404)
+ expect(response).not_to have_gitlab_http_status(:not_found)
end
it 'does not redirect to the correct casing' do
post :toggle_star, params: { namespace_id: public_project.namespace, id: public_project.path.upcase }
- expect(response).not_to have_gitlab_http_status(301)
+ expect(response).not_to have_gitlab_http_status(:moved_permanently)
end
end
@@ -1089,7 +1089,7 @@ describe ProjectsController do
it 'returns not found' do
post :toggle_star, params: { namespace_id: 'foo', id: 'bar' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -1103,13 +1103,13 @@ describe ProjectsController do
it 'does not 404' do
delete :destroy, params: { namespace_id: project.namespace, id: project.path.upcase }
- expect(response).not_to have_gitlab_http_status(404)
+ expect(response).not_to have_gitlab_http_status(:not_found)
end
it 'does not redirect to the correct casing' do
delete :destroy, params: { namespace_id: project.namespace, id: project.path.upcase }
- expect(response).not_to have_gitlab_http_status(301)
+ expect(response).not_to have_gitlab_http_status(:moved_permanently)
end
end
@@ -1119,7 +1119,7 @@ describe ProjectsController do
it 'returns not found' do
delete :destroy, params: { namespace_id: 'foo', id: 'bar' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -1143,7 +1143,7 @@ describe ProjectsController do
get action, params: { namespace_id: project.namespace, id: project }
expect(flash[:alert]).to eq('This endpoint has been requested too many times. Try again later.')
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -1154,7 +1154,7 @@ describe ProjectsController do
it 'returns 302' do
get action, params: { namespace_id: project.namespace, id: project }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -1166,7 +1166,7 @@ describe ProjectsController do
it 'returns 404' do
get action, params: { namespace_id: project.namespace, id: project }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -1183,7 +1183,7 @@ describe ProjectsController do
it 'returns 302' do
get action, params: { namespace_id: project.namespace, id: project }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -1195,7 +1195,7 @@ describe ProjectsController do
it 'returns 404' do
get action, params: { namespace_id: project.namespace, id: project }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -1212,7 +1212,7 @@ describe ProjectsController do
it 'returns 302' do
post action, params: { namespace_id: project.namespace, id: project }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -1224,7 +1224,7 @@ describe ProjectsController do
it 'returns 404' do
post action, params: { namespace_id: project.namespace, id: project }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -1236,7 +1236,7 @@ describe ProjectsController do
it 'returns 302' do
post action, params: { namespace_id: project.namespace, id: project }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
@@ -1248,7 +1248,7 @@ describe ProjectsController do
it 'returns 404' do
post action, params: { namespace_id: project.namespace, id: project }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -1285,7 +1285,7 @@ describe ProjectsController do
it 'redirects to the project page' do
get :resolve, params: { id: project.id }
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(project_path(project))
end
end
@@ -1307,14 +1307,14 @@ describe ProjectsController do
it 'gives 404 for existing project' do
get :resolve, params: { id: project.id }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
it 'gives 404 for non-existing project' do
get :resolve, params: { id: '0' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -1328,7 +1328,7 @@ describe ProjectsController do
it 'gives 404 for private project' do
get :resolve, params: { id: project.id }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index 214eb35ec9d..8d79e505e5d 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -30,7 +30,7 @@ describe RegistrationsController do
it 'renders new template and sets the resource variable' do
expect(subject).to render_template(:new)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:resource)).to be_a(User)
end
end
@@ -48,7 +48,7 @@ describe RegistrationsController do
it 'renders new template and sets the resource variable' do
subject
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(new_user_session_path(anchor: 'register-pane'))
end
end
@@ -79,31 +79,29 @@ describe RegistrationsController do
stub_application_setting(send_user_confirmation_email: true)
end
- context 'when soft email confirmation is not enabled' do
+ context 'when a grace period is active for confirming the email address' do
before do
- stub_feature_flags(soft_email_confirmation: false)
- allow(User).to receive(:allow_unconfirmed_access_for).and_return 0
+ allow(User).to receive(:allow_unconfirmed_access_for).and_return 2.days
end
- it 'does not authenticate the user and sends a confirmation email' do
+ it 'sends a confirmation email and redirects to the dashboard' do
post(:create, params: user_params)
expect(ActionMailer::Base.deliveries.last.to.first).to eq(user_params[:user][:email])
- expect(subject.current_user).to be_nil
+ expect(response).to redirect_to(dashboard_projects_path)
end
end
- context 'when soft email confirmation is enabled' do
+ context 'when no grace period is active for confirming the email address' do
before do
- stub_feature_flags(soft_email_confirmation: true)
- allow(User).to receive(:allow_unconfirmed_access_for).and_return 2.days
+ allow(User).to receive(:allow_unconfirmed_access_for).and_return 0
end
- it 'authenticates the user and sends a confirmation email' do
+ it 'sends a confirmation email and redirects to the almost there page' do
post(:create, params: user_params)
expect(ActionMailer::Base.deliveries.last.to.first).to eq(user_params[:user][:email])
- expect(response).to redirect_to(dashboard_projects_path)
+ expect(response).to redirect_to(users_almost_there_path)
end
end
end
@@ -200,7 +198,7 @@ describe RegistrationsController do
.and_call_original
expect(Gitlab::AuthLogger).to receive(:error).with(auth_log_attributes).once
expect { post(:create, params: user_params, session: session_params) }.not_to change(User, :count)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to be_empty
end
end
@@ -415,4 +413,36 @@ describe RegistrationsController do
patch :update_registration, params: { user: { role: 'software_developer', setup_for_company: 'false' } }
end
end
+
+ describe '#welcome' do
+ subject { get :welcome }
+
+ before do
+ sign_in(create(:user))
+ end
+
+ context 'signup_flow experiment enabled' do
+ before do
+ stub_experiment_for_user(signup_flow: true)
+ end
+
+ it 'renders the devise_experimental_separate_sign_up_flow layout' do
+ expected_layout = Gitlab.ee? ? :checkout : :devise_experimental_separate_sign_up_flow
+
+ expect(subject).to render_template(expected_layout)
+ end
+ end
+
+ context 'signup_flow experiment disabled' do
+ before do
+ stub_experiment_for_user(signup_flow: false)
+ end
+
+ it 'renders the devise layout' do
+ expected_layout = Gitlab.ee? ? :checkout : :devise
+
+ expect(subject).to render_template(expected_layout)
+ end
+ end
+ end
end
diff --git a/spec/controllers/repositories/git_http_controller_spec.rb b/spec/controllers/repositories/git_http_controller_spec.rb
new file mode 100644
index 00000000000..10a7b72ca89
--- /dev/null
+++ b/spec/controllers/repositories/git_http_controller_spec.rb
@@ -0,0 +1,146 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Repositories::GitHttpController do
+ include GitHttpHelpers
+
+ let_it_be(:project) { create(:project, :public, :repository) }
+
+ let(:namespace_id) { project.namespace.to_param }
+ let(:repository_id) { project.path + '.git' }
+ let(:project_params) do
+ {
+ namespace_id: namespace_id,
+ repository_id: repository_id
+ }
+ end
+ let(:params) { project_params }
+
+ describe 'HEAD #info_refs' do
+ it 'returns 403' do
+ head :info_refs, params: params
+
+ expect(response.status).to eq(403)
+ end
+ end
+
+ shared_examples 'info_refs behavior' do
+ describe 'GET #info_refs' do
+ let(:params) { project_params.merge(service: 'git-upload-pack') }
+
+ it 'returns 401 for unauthenticated requests to public repositories when http protocol is disabled' do
+ stub_application_setting(enabled_git_access_protocol: 'ssh')
+ allow(controller).to receive(:basic_auth_provided?).and_call_original
+
+ expect(controller).to receive(:http_download_allowed?).and_call_original
+
+ get :info_refs, params: params
+
+ expect(response.status).to eq(401)
+ end
+
+ context 'with authorized user' do
+ let(:user) { project.owner }
+
+ before do
+ request.headers.merge! auth_env(user.username, user.password, nil)
+ end
+
+ it 'returns 200' do
+ get :info_refs, params: params
+
+ expect(response.status).to eq(200)
+ end
+
+ it 'updates the user activity' do
+ expect_next_instance_of(Users::ActivityService) do |activity_service|
+ expect(activity_service).to receive(:execute)
+ end
+
+ get :info_refs, params: params
+ end
+ end
+
+ context 'with exceptions' do
+ before do
+ allow(controller).to receive(:verify_workhorse_api!).and_return(true)
+ end
+
+ it 'returns 503 with GRPC Unavailable' do
+ allow(controller).to receive(:access_check).and_raise(GRPC::Unavailable)
+
+ get :info_refs, params: params
+
+ expect(response.status).to eq(503)
+ end
+
+ it 'returns 503 with timeout error' do
+ allow(controller).to receive(:access_check).and_raise(Gitlab::GitAccess::TimeoutError)
+
+ get :info_refs, params: params
+
+ expect(response.status).to eq(503)
+ expect(response.body).to eq 'Gitlab::GitAccess::TimeoutError'
+ end
+ end
+ end
+ end
+
+ shared_examples 'git_upload_pack behavior' do |expected|
+ describe 'POST #git_upload_pack' do
+ before do
+ allow(controller).to receive(:authenticate_user).and_return(true)
+ allow(controller).to receive(:verify_workhorse_api!).and_return(true)
+ allow(controller).to receive(:access_check).and_return(nil)
+ end
+
+ after do
+ post :git_upload_pack, params: params
+ end
+
+ context 'on a read-only instance' do
+ before do
+ allow(Gitlab::Database).to receive(:read_only?).and_return(true)
+ end
+
+ it 'does not update project statistics' do
+ expect(ProjectDailyStatisticsWorker).not_to receive(:perform_async)
+ end
+ end
+
+ if expected
+ it 'updates project statistics' do
+ expect(ProjectDailyStatisticsWorker).to receive(:perform_async)
+ end
+ else
+ it 'does not update project statistics' do
+ expect(ProjectDailyStatisticsWorker).not_to receive(:perform_async)
+ end
+ end
+ end
+ end
+
+ shared_examples 'access checker class' do
+ let(:params) { project_params.merge(service: 'git-upload-pack') }
+
+ it 'calls the right access class checker with the right object' do
+ allow(controller).to receive(:verify_workhorse_api!).and_return(true)
+
+ access_double = double
+ expect(expected_class).to receive(:new).with(anything, expected_object, 'http', anything).and_return(access_double)
+ allow(access_double).to receive(:check).and_return(false)
+
+ get :info_refs, params: params
+ end
+ end
+
+ context 'when repository container is a project' do
+ it_behaves_like 'info_refs behavior'
+ it_behaves_like 'git_upload_pack behavior', true
+ it_behaves_like 'access checker class' do
+ let(:expected_class) { Gitlab::GitAccess }
+ let(:expected_object) { project }
+ end
+ end
+end
diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb
index ca7b8a4036a..19786417d76 100644
--- a/spec/controllers/search_controller_spec.rb
+++ b/spec/controllers/search_controller_spec.rb
@@ -21,13 +21,13 @@ describe SearchController do
it 'blocks access without a project_id' do
get action, params: params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'allows access with a project_id' do
get action, params: params.merge(project_id: create(:project, :public).id)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -42,13 +42,13 @@ describe SearchController do
it 'renders a 403 when no project is given' do
get action, params: params
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'renders a 200 when a project was set' do
get action, params: params.merge(project_id: project.id)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -57,7 +57,7 @@ describe SearchController do
it 'still allows accessing the search page' do
get :show
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -195,7 +195,7 @@ describe SearchController do
get :count, params: { search: 'hello', scope: 'projects' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq({ 'count' => '1' })
end
diff --git a/spec/controllers/sent_notifications_controller_spec.rb b/spec/controllers/sent_notifications_controller_spec.rb
index 4dd4f49dcf1..b4b867f5c66 100644
--- a/spec/controllers/sent_notifications_controller_spec.rb
+++ b/spec/controllers/sent_notifications_controller_spec.rb
@@ -30,6 +30,16 @@ describe SentNotificationsController do
let(:target_project) { project }
describe 'GET unsubscribe' do
+ shared_examples 'returns 404' do
+ it 'does not set the flash message' do
+ expect(controller).not_to set_flash[:notice]
+ end
+
+ it 'returns a 404' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
context 'when the user is not logged in' do
context 'when the force param is passed' do
before do
@@ -156,6 +166,16 @@ describe SentNotificationsController do
end
end
end
+
+ context 'when the noteable associated to the notification has been deleted' do
+ before do
+ sent_notification.noteable.destroy!
+
+ get(:unsubscribe, params: { id: sent_notification.reply_key })
+ end
+
+ it_behaves_like 'returns 404'
+ end
end
context 'when the user is logged in' do
@@ -168,17 +188,7 @@ describe SentNotificationsController do
get(:unsubscribe, params: { id: sent_notification.reply_key.reverse })
end
- it 'does not unsubscribe the user' do
- expect(issue.subscribed?(user, project)).to be_truthy
- end
-
- it 'does not set the flash message' do
- expect(controller).not_to set_flash[:notice]
- end
-
- it 'returns a 404' do
- expect(response).to have_gitlab_http_status(:not_found)
- end
+ it_behaves_like 'returns 404'
end
context 'when the force param is passed' do
@@ -254,6 +264,16 @@ describe SentNotificationsController do
end
end
end
+
+ context 'when the noteable associated to the notification has been deleted' do
+ before do
+ sent_notification.noteable.destroy!
+
+ get(:unsubscribe, params: { id: sent_notification.reply_key })
+ end
+
+ it_behaves_like 'returns 404'
+ end
end
end
end
diff --git a/spec/controllers/sessions_controller_spec.rb b/spec/controllers/sessions_controller_spec.rb
index 1e47df150b4..2f597fd5cb3 100644
--- a/spec/controllers/sessions_controller_spec.rb
+++ b/spec/controllers/sessions_controller_spec.rb
@@ -22,7 +22,7 @@ describe SessionsController do
it 'redirects to :omniauth_authorize_path' do
get(:new)
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to('/saml')
end
end
@@ -31,7 +31,7 @@ describe SessionsController do
it 'responds with 200' do
get(:new, params: { auto_sign_in: 'false' })
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -394,8 +394,7 @@ describe SessionsController do
end
it 'warns about invalid login' do
- expect(response).to set_flash.now[:alert]
- .to /Invalid Login or password/
+ expect(response).to set_flash.now[:alert].to /Your account is locked./
end
it 'locks the user' do
@@ -405,8 +404,7 @@ describe SessionsController do
it 'keeps the user locked on future login attempts' do
post(:create, params: { user: { login: user.username, password: user.password } })
- expect(response)
- .to set_flash.now[:alert].to /Invalid Login or password/
+ expect(response).to set_flash.now[:alert].to /Your account is locked./
end
end
end
diff --git a/spec/controllers/snippets/notes_controller_spec.rb b/spec/controllers/snippets/notes_controller_spec.rb
index fd4b95ce226..b93df3555ab 100644
--- a/spec/controllers/snippets/notes_controller_spec.rb
+++ b/spec/controllers/snippets/notes_controller_spec.rb
@@ -22,7 +22,7 @@ describe Snippets::NotesController do
end
it "returns status 200" do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it "returns not empty array of notes" do
@@ -39,7 +39,7 @@ describe Snippets::NotesController do
it "returns status 404" do
get :index, params: { snippet_id: internal_snippet }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -51,7 +51,7 @@ describe Snippets::NotesController do
it "returns status 200" do
get :index, params: { snippet_id: internal_snippet }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -65,7 +65,7 @@ describe Snippets::NotesController do
it "returns status 404" do
get :index, params: { snippet_id: private_snippet }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -77,7 +77,7 @@ describe Snippets::NotesController do
it "returns status 404" do
get :index, params: { snippet_id: private_snippet }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -91,7 +91,7 @@ describe Snippets::NotesController do
it "returns status 200" do
get :index, params: { snippet_id: private_snippet }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it "returns 1 note" do
@@ -135,7 +135,7 @@ describe Snippets::NotesController do
it 'returns status 302' do
post :create, params: request_params
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
it 'creates the note' do
@@ -158,7 +158,7 @@ describe Snippets::NotesController do
it 'returns status 302' do
post :create, params: request_params
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
it 'creates the note' do
@@ -186,7 +186,7 @@ describe Snippets::NotesController do
it 'returns status 404' do
post :create, params: request_params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'does not create the note' do
@@ -204,7 +204,7 @@ describe Snippets::NotesController do
it 'returns status 302' do
post :create, params: request_params
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
it 'creates the note on the public snippet' do
@@ -222,7 +222,7 @@ describe Snippets::NotesController do
it 'returns status 302' do
post :create, params: request_params
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
it 'creates the note' do
@@ -249,7 +249,7 @@ describe Snippets::NotesController do
it "returns status 200" do
delete :destroy, params: request_params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it "deletes the note" do
@@ -277,7 +277,7 @@ describe Snippets::NotesController do
it "returns status 404" do
delete :destroy, params: request_params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "does not update the note" do
@@ -299,7 +299,7 @@ describe Snippets::NotesController do
it "toggles the award emoji" do
expect { subject }.to change { note.award_emoji.count }.by(1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it "removes the already awarded emoji when it exists" do
@@ -307,7 +307,7 @@ describe Snippets::NotesController do
expect { subject }.to change { AwardEmoji.count }.by(-1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
diff --git a/spec/controllers/snippets_controller_spec.rb b/spec/controllers/snippets_controller_spec.rb
index c8f9e4256c9..daa560649f0 100644
--- a/spec/controllers/snippets_controller_spec.rb
+++ b/spec/controllers/snippets_controller_spec.rb
@@ -51,7 +51,7 @@ describe SnippetsController do
it 'responds with status 200' do
get :new
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
context 'when user is not allowed to create a personal snippet' do
@@ -60,7 +60,7 @@ describe SnippetsController do
it 'responds with status 404' do
get :new
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -90,7 +90,7 @@ describe SnippetsController do
it 'responds with status 404' do
get :show, params: { id: other_personal_snippet.to_param }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -99,13 +99,13 @@ describe SnippetsController do
get :show, params: { id: personal_snippet.to_param }
expect(assigns(:snippet)).to eq(personal_snippet)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'responds with status 404 when embeddable content is requested' do
get :show, params: { id: personal_snippet.to_param }, format: :js
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -131,13 +131,13 @@ describe SnippetsController do
get :show, params: { id: personal_snippet.to_param }
expect(assigns(:snippet)).to eq(personal_snippet)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'responds with status 404 when embeddable content is requested' do
get :show, params: { id: personal_snippet.to_param }, format: :js
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -162,14 +162,14 @@ describe SnippetsController do
get :show, params: { id: personal_snippet.to_param }
expect(assigns(:snippet)).to eq(personal_snippet)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'responds with status 200 when embeddable content is requested' do
get :show, params: { id: personal_snippet.to_param }, format: :js
expect(assigns(:snippet)).to eq(personal_snippet)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -178,7 +178,7 @@ describe SnippetsController do
get :show, params: { id: personal_snippet.to_param }
expect(assigns(:snippet)).to eq(personal_snippet)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -192,7 +192,7 @@ describe SnippetsController do
it 'responds with status 404' do
get :show, params: { id: 'doesntexist' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -234,7 +234,7 @@ describe SnippetsController do
create_snippet(visibility_level: Snippet::PUBLIC)
end.not_to change { Snippet.count }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -275,7 +275,7 @@ describe SnippetsController do
context 'when the snippet is spam' do
before do
- allow_next_instance_of(AkismetService) do |instance|
+ allow_next_instance_of(Spam::AkismetService) do |instance|
allow(instance).to receive(:spam?).and_return(true)
end
end
@@ -349,7 +349,7 @@ describe SnippetsController do
context 'when the snippet is spam' do
before do
- allow_next_instance_of(AkismetService) do |instance|
+ allow_next_instance_of(Spam::AkismetService) do |instance|
allow(instance).to receive(:spam?).and_return(true)
end
end
@@ -459,7 +459,7 @@ describe SnippetsController do
let(:snippet) { create(:personal_snippet, :public, author: user) }
before do
- allow_next_instance_of(AkismetService) do |instance|
+ allow_next_instance_of(Spam::AkismetService) do |instance|
allow(instance).to receive_messages(submit_spam: true)
end
stub_application_setting(akismet_enabled: true)
@@ -496,7 +496,7 @@ describe SnippetsController do
it 'responds with status 404' do
get :raw, params: { id: other_personal_snippet.to_param }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -507,7 +507,7 @@ describe SnippetsController do
it 'responds with status 200' do
expect(assigns(:snippet)).to eq(personal_snippet)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'has expected headers' do
@@ -517,7 +517,7 @@ describe SnippetsController do
end
it "sets #{Gitlab::Workhorse::DETECT_HEADER} header" do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
end
end
@@ -544,7 +544,7 @@ describe SnippetsController do
get :raw, params: { id: personal_snippet.to_param }
expect(assigns(:snippet)).to eq(personal_snippet)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -569,7 +569,7 @@ describe SnippetsController do
get :raw, params: { id: personal_snippet.to_param }
expect(assigns(:snippet)).to eq(personal_snippet)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
context 'CRLF line ending' do
@@ -596,7 +596,7 @@ describe SnippetsController do
get :raw, params: { id: personal_snippet.to_param }
expect(assigns(:snippet)).to eq(personal_snippet)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -610,7 +610,7 @@ describe SnippetsController do
it 'responds with status 404' do
get :raw, params: { id: 'doesntexist' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -712,7 +712,7 @@ describe SnippetsController do
it 'responds with status 404' do
delete :destroy, params: { id: snippet.to_param }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/uploads_controller_spec.rb b/spec/controllers/uploads_controller_spec.rb
index ff15e685007..f42d0560e80 100644
--- a/spec/controllers/uploads_controller_spec.rb
+++ b/spec/controllers/uploads_controller_spec.rb
@@ -46,7 +46,7 @@ describe UploadsController do
it "returns 401 when the user is not logged in" do
post :create, params: { model: model, id: snippet.id }, format: :json
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it "returns 404 when user can't comment on a snippet" do
@@ -55,7 +55,7 @@ describe UploadsController do
sign_in(user)
post :create, params: { model: model, id: private_snippet.id }, format: :json
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -67,7 +67,7 @@ describe UploadsController do
it "returns an error without file" do
post :create, params: { model: model, id: snippet.id }, format: :json
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
it "returns an error with invalid model" do
@@ -78,7 +78,7 @@ describe UploadsController do
it "returns 404 status when object not found" do
post :create, params: { model: model, id: 9999 }, format: :json
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context 'with valid image' do
@@ -129,7 +129,7 @@ describe UploadsController do
it 'returns 401 when the user has no access' do
post :create, params: { model: 'user', id: user.id }, format: :json
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
context 'when user is logged in' do
@@ -188,7 +188,7 @@ describe UploadsController do
post :create, params: { model: model, id: another_user.id, file: txt }, format: :json
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -246,7 +246,7 @@ describe UploadsController do
it "responds with status 401" do
get :show, params: { model: "user", mounted_as: "avatar", id: user.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -254,7 +254,7 @@ describe UploadsController do
it "responds with status 200" do
get :show, params: { model: "user", mounted_as: "avatar", id: user.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it_behaves_like 'content publicly cached' do
@@ -271,7 +271,7 @@ describe UploadsController do
it "responds with status 200" do
get :show, params: { model: "user", mounted_as: "avatar", id: user.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it_behaves_like 'content publicly cached' do
@@ -296,7 +296,7 @@ describe UploadsController do
it "responds with status 200" do
get :show, params: { model: "project", mounted_as: "avatar", id: project.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it_behaves_like 'content 5 min private cached with revalidation' do
@@ -316,7 +316,7 @@ describe UploadsController do
it "responds with status 200" do
get :show, params: { model: "project", mounted_as: "avatar", id: project.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it_behaves_like 'content 5 min private cached with revalidation' do
@@ -338,7 +338,7 @@ describe UploadsController do
it "responds with status 401" do
get :show, params: { model: "project", mounted_as: "avatar", id: project.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -361,7 +361,7 @@ describe UploadsController do
it "responds with status 401" do
get :show, params: { model: "project", mounted_as: "avatar", id: project.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -369,7 +369,7 @@ describe UploadsController do
it "responds with status 200" do
get :show, params: { model: "project", mounted_as: "avatar", id: project.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it_behaves_like 'content 5 min private cached with revalidation' do
@@ -386,7 +386,7 @@ describe UploadsController do
it "responds with status 404" do
get :show, params: { model: "project", mounted_as: "avatar", id: project.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -401,7 +401,7 @@ describe UploadsController do
it "responds with status 200" do
get :show, params: { model: "group", mounted_as: "avatar", id: group.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it_behaves_like 'content 5 min private cached with revalidation' do
@@ -421,7 +421,7 @@ describe UploadsController do
it "responds with status 200" do
get :show, params: { model: "group", mounted_as: "avatar", id: group.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it_behaves_like 'content 5 min private cached with revalidation' do
@@ -457,7 +457,7 @@ describe UploadsController do
it "responds with status 401" do
get :show, params: { model: "group", mounted_as: "avatar", id: group.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -465,7 +465,7 @@ describe UploadsController do
it "responds with status 200" do
get :show, params: { model: "group", mounted_as: "avatar", id: group.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it_behaves_like 'content 5 min private cached with revalidation' do
@@ -482,7 +482,7 @@ describe UploadsController do
it "responds with status 404" do
get :show, params: { model: "group", mounted_as: "avatar", id: group.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -502,7 +502,7 @@ describe UploadsController do
it "responds with status 200" do
get :show, params: { model: "note", mounted_as: "attachment", id: note.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it_behaves_like 'content not cached' do
@@ -522,7 +522,7 @@ describe UploadsController do
it "responds with status 200" do
get :show, params: { model: "note", mounted_as: "attachment", id: note.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it_behaves_like 'content not cached' do
@@ -544,7 +544,7 @@ describe UploadsController do
it "responds with status 401" do
get :show, params: { model: "note", mounted_as: "attachment", id: note.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -567,7 +567,7 @@ describe UploadsController do
it "responds with status 401" do
get :show, params: { model: "note", mounted_as: "attachment", id: note.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -575,7 +575,7 @@ describe UploadsController do
it "responds with status 200" do
get :show, params: { model: "note", mounted_as: "attachment", id: note.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it_behaves_like 'content not cached' do
@@ -592,7 +592,7 @@ describe UploadsController do
it "responds with status 404" do
get :show, params: { model: "note", mounted_as: "attachment", id: note.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -607,7 +607,7 @@ describe UploadsController do
it 'responds with status 200' do
get :show, params: { model: 'appearance', mounted_as: 'header_logo', id: appearance.id, filename: 'dk.png' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it_behaves_like 'content publicly cached' do
@@ -627,7 +627,7 @@ describe UploadsController do
it 'responds with status 200' do
get :show, params: { model: 'appearance', mounted_as: 'logo', id: appearance.id, filename: 'dk.png' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it_behaves_like 'content publicly cached' do
@@ -648,8 +648,8 @@ describe UploadsController do
it 'successfully returns the file' do
get :show, params: { model: 'appearance', mounted_as: 'favicon', id: appearance.id, filename: 'dk.png' }
- expect(response).to have_gitlab_http_status(200)
- expect(response.header['Content-Disposition']).to end_with 'filename="dk.png"'
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.header['Content-Disposition']).to include('filename="dk.png"')
end
end
@@ -657,7 +657,7 @@ describe UploadsController do
it 'returns a 404' do
get :show, params: { model: 'appearance', mounted_as: 'favicon', id: appearance.id, filename: 'bogus.png' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/controllers/user_callouts_controller_spec.rb b/spec/controllers/user_callouts_controller_spec.rb
index 07eaff2da09..04f73749e1d 100644
--- a/spec/controllers/user_callouts_controller_spec.rb
+++ b/spec/controllers/user_callouts_controller_spec.rb
@@ -13,7 +13,7 @@ describe UserCalloutsController do
subject { post :create, params: { feature_name: feature_name }, format: :json }
context 'with valid feature name' do
- let(:feature_name) { UserCallout.feature_names.first.first }
+ let(:feature_name) { UserCallout.feature_names.each_key.first }
context 'when callout entry does not exist' do
it 'creates a callout entry with dismissed state' do
@@ -28,7 +28,7 @@ describe UserCalloutsController do
end
context 'when callout entry already exists' do
- let!(:callout) { create(:user_callout, feature_name: UserCallout.feature_names.first.first, user: user) }
+ let!(:callout) { create(:user_callout, feature_name: UserCallout.feature_names.each_key.first, user: user) }
it 'returns success' do
subject
diff --git a/spec/controllers/users_controller_spec.rb b/spec/controllers/users_controller_spec.rb
index 597d2a185b5..2af398e143d 100644
--- a/spec/controllers/users_controller_spec.rb
+++ b/spec/controllers/users_controller_spec.rb
@@ -28,7 +28,7 @@ describe UsersController do
it 'renders the show template' do
get :show, params: { username: user.username }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('show')
end
end
@@ -53,7 +53,7 @@ describe UsersController do
it 'renders show' do
get :show, params: { username: user.username }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('show')
end
end
@@ -74,7 +74,7 @@ describe UsersController do
it 'renders 404' do
get :show, params: { username: 'nonexistent' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -130,7 +130,7 @@ describe UsersController do
get :calendar, params: { username: public_user.username }, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -285,7 +285,7 @@ describe UsersController do
context 'format html' do
it 'renders snippets page' do
get :snippets, params: { username: user.username }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('show')
end
end
@@ -293,7 +293,7 @@ describe UsersController do
context 'format json' do
it 'response with snippets json data' do
get :snippets, params: { username: user.username }, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to have_key('html')
end
end
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 482e0fbe7ce..f2f7d6cbafc 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -55,7 +55,7 @@ describe 'Database schema' do
members: %w[source_id created_by_id],
merge_requests: %w[last_edited_by_id state_id],
namespaces: %w[owner_id parent_id],
- notes: %w[author_id commit_id noteable_id updated_by_id resolved_by_id discussion_id],
+ notes: %w[author_id commit_id noteable_id updated_by_id resolved_by_id confirmed_by_id discussion_id],
notification_settings: %w[source_id],
oauth_access_grants: %w[resource_owner_id application_id],
oauth_access_tokens: %w[resource_owner_id application_id],
diff --git a/spec/factories/alerting/alert.rb b/spec/factories/alerting/alert.rb
new file mode 100644
index 00000000000..285bb14efa2
--- /dev/null
+++ b/spec/factories/alerting/alert.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :alerting_alert, class: 'Gitlab::Alerting::Alert' do
+ project
+ payload { {} }
+
+ transient do
+ metric_id { nil }
+
+ after(:build) do |alert, evaluator|
+ unless alert.payload.key?('startsAt')
+ alert.payload['startsAt'] = Time.now.rfc3339
+ end
+
+ if metric_id = evaluator.metric_id
+ alert.payload['labels'] ||= {}
+ alert.payload['labels']['gitlab_alert_id'] = metric_id.to_s
+ end
+ end
+ end
+
+ skip_create
+ end
+end
diff --git a/spec/factories/ci/bridge.rb b/spec/factories/ci/bridge.rb
index b2e8051eb5e..bacf163896c 100644
--- a/spec/factories/ci/bridge.rb
+++ b/spec/factories/ci/bridge.rb
@@ -9,6 +9,7 @@ FactoryBot.define do
tag { false }
created_at { 'Di 29. Okt 09:50:00 CET 2013' }
status { :created }
+ scheduling_type { 'stage' }
pipeline factory: :ci_pipeline
diff --git a/spec/factories/ci/build_need.rb b/spec/factories/ci/build_need.rb
index fa72e696343..aa571ffabb7 100644
--- a/spec/factories/ci/build_need.rb
+++ b/spec/factories/ci/build_need.rb
@@ -2,7 +2,7 @@
FactoryBot.define do
factory :ci_build_need, class: 'Ci::BuildNeed' do
- build factory: :ci_build
+ build factory: :ci_build, scheduling_type: :dag
sequence(:name) { |n| "build_#{n}" }
end
end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index 3d65f9065bf..5127d55645c 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -11,6 +11,7 @@ FactoryBot.define do
tag { false }
add_attribute(:protected) { false }
created_at { 'Di 29. Okt 09:50:00 CET 2013' }
+ scheduling_type { 'stage' }
pending
options do
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index 7347c2b87ca..590578aec9a 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -139,6 +139,16 @@ FactoryBot.define do
end
end
+ trait :lsif do
+ file_type { :lsif }
+ file_format { :raw }
+
+ after(:build) do |artifact, evaluator|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/lsif.json.gz'), 'application/octet-stream')
+ end
+ end
+
trait :correct_checksum do
after(:build) do |artifact, evaluator|
artifact.file_sha256 = Digest::SHA256.file(artifact.file.path).hexdigest
diff --git a/spec/factories/container_expiration_policies.rb b/spec/factories/container_expiration_policies.rb
index 951127a4aa7..41c3a7f8cb9 100644
--- a/spec/factories/container_expiration_policies.rb
+++ b/spec/factories/container_expiration_policies.rb
@@ -2,7 +2,18 @@
FactoryBot.define do
factory :container_expiration_policy, class: 'ContainerExpirationPolicy' do
- association :project, factory: [:project, :without_container_expiration_policy]
+ # Note: because of the project_id primary_key on
+ # container_expiration_policies, and the create_container_expiration_policy
+ # callback on Project, we need to build the project first before assigning
+ # it to a container_expiration_policy.
+ #
+ # Also, if you wish to assign an existing project to a
+ # container_expiration_policy, you will then have to destroy the project's
+ # container_expiration_policy first.
+ before(:create) do |container_expiration_policy|
+ container_expiration_policy.project = build(:project) unless container_expiration_policy.project
+ end
+
cadence { '1d' }
enabled { true }
diff --git a/spec/factories/deploy_tokens.rb b/spec/factories/deploy_tokens.rb
index 42ed66ac191..e86d4ab8812 100644
--- a/spec/factories/deploy_tokens.rb
+++ b/spec/factories/deploy_tokens.rb
@@ -9,6 +9,7 @@ FactoryBot.define do
read_registry { true }
revoked { false }
expires_at { 5.days.from_now }
+ deploy_token_type { DeployToken.deploy_token_types[:project_type] }
trait :revoked do
revoked { true }
@@ -21,5 +22,13 @@ FactoryBot.define do
trait :expired do
expires_at { Date.today - 1.month }
end
+
+ trait :group do
+ deploy_token_type { DeployToken.deploy_token_types[:group_type] }
+ end
+
+ trait :project do
+ deploy_token_type { DeployToken.deploy_token_types[:project_type] }
+ end
end
end
diff --git a/spec/factories/deployment_clusters.rb b/spec/factories/deployment_clusters.rb
new file mode 100644
index 00000000000..1bdfff79aaf
--- /dev/null
+++ b/spec/factories/deployment_clusters.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :deployment_cluster, class: 'DeploymentCluster' do
+ cluster
+ deployment
+ kubernetes_namespace { 'the-namespace' }
+ end
+end
diff --git a/spec/factories/diff_position.rb b/spec/factories/diff_position.rb
new file mode 100644
index 00000000000..a43c5afdff4
--- /dev/null
+++ b/spec/factories/diff_position.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :diff_position, class: 'Gitlab::Diff::Position' do
+ skip_create # non-model factories (i.e. without #save)
+
+ transient do
+ file { 'path/to/file' }
+
+ # Allow diff to be passed as a single object.
+ diff_refs do
+ ::Gitlab::Diff::DiffRefs.new(
+ base_sha: Digest::SHA1.hexdigest(SecureRandom.hex),
+ head_sha: Digest::SHA1.hexdigest(SecureRandom.hex),
+ start_sha: Digest::SHA1.hexdigest(SecureRandom.hex)
+ )
+ end
+ end
+
+ old_path { file }
+ new_path { file }
+
+ base_sha { diff_refs&.base_sha }
+ head_sha { diff_refs&.head_sha }
+ start_sha { diff_refs&.start_sha }
+
+ initialize_with { new(attributes) }
+
+ trait :moved do
+ new_path { 'path/to/new.file' }
+ end
+
+ factory :text_diff_position do
+ position_type { 'text' }
+ old_line { 10 }
+ new_line { 10 }
+
+ trait :added do
+ old_line { nil }
+ end
+ end
+
+ factory :image_diff_position do
+ position_type { 'image' }
+ x { 1 }
+ y { 1 }
+ width { 10 }
+ height { 10 }
+ end
+ end
+end
diff --git a/spec/factories/environments.rb b/spec/factories/environments.rb
index 323ea2d478b..998672ebe7c 100644
--- a/spec/factories/environments.rb
+++ b/spec/factories/environments.rb
@@ -45,7 +45,7 @@ FactoryBot.define do
self.when { 'manual' }
end
- trait :auto_stopped do
+ trait :auto_stoppable do
auto_stop_at { 1.day.ago }
end
diff --git a/spec/factories/error_tracking/detailed_error.rb b/spec/factories/error_tracking/detailed_error.rb
index 07b6c53e3cd..83004ffae38 100644
--- a/spec/factories/error_tracking/detailed_error.rb
+++ b/spec/factories/error_tracking/detailed_error.rb
@@ -1,41 +1,20 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :detailed_error_tracking_error, class: 'Gitlab::ErrorTracking::DetailedError' do
- id { '1' }
- title { 'title' }
- type { 'error' }
- user_count { 1 }
- count { 2 }
- first_seen { Time.now.iso8601 }
- last_seen { Time.now.iso8601 }
- message { 'message' }
- culprit { 'culprit' }
- external_url { 'http://example.com/id' }
+ factory :detailed_error_tracking_error, parent: :error_tracking_error, class: 'Gitlab::ErrorTracking::DetailedError' do
+ gitlab_issue { 'http://gitlab.example.com/issues/1' }
external_base_url { 'http://example.com' }
- project_id { 'project1' }
- project_name { 'project name' }
- project_slug { 'project_name' }
- short_id { 'ID' }
- status { 'unresolved' }
+ first_release_last_commit { '68c914da9' }
+ last_release_last_commit { '9ad419c86' }
+ first_release_short_version { 'abc123' }
+ last_release_short_version { 'abc123' }
+ first_release_version { '12345678' }
tags do
{
level: 'error',
logger: 'rails'
}
end
- frequency do
- [
- [Time.now.to_i, 10]
- ]
- end
- gitlab_issue { 'http://gitlab.example.com/issues/1' }
- first_release_last_commit { '68c914da9' }
- last_release_last_commit { '9ad419c86' }
- first_release_short_version { 'abc123' }
- last_release_short_version { 'abc123' }
- first_release_version { '12345678' }
-
skip_create
end
end
diff --git a/spec/factories/error_tracking/error.rb b/spec/factories/error_tracking/error.rb
index 5be1f074555..e5f2e2ca9a7 100644
--- a/spec/factories/error_tracking/error.rb
+++ b/spec/factories/error_tracking/error.rb
@@ -2,13 +2,13 @@
FactoryBot.define do
factory :error_tracking_error, class: 'Gitlab::ErrorTracking::Error' do
- id { 'id' }
+ id { '1' }
title { 'title' }
type { 'error' }
user_count { 1 }
count { 2 }
- first_seen { Time.now }
- last_seen { Time.now }
+ first_seen { Time.now.iso8601 }
+ last_seen { Time.now.iso8601 }
message { 'message' }
culprit { 'culprit' }
external_url { 'http://example.com/id' }
@@ -17,7 +17,11 @@ FactoryBot.define do
project_slug { 'project_name' }
short_id { 'ID' }
status { 'unresolved' }
- frequency { [] }
+ frequency do
+ [
+ [Time.now.to_i, 10]
+ ]
+ end
skip_create
end
diff --git a/spec/factories/group_deploy_tokens.rb b/spec/factories/group_deploy_tokens.rb
new file mode 100644
index 00000000000..9ec7d0701be
--- /dev/null
+++ b/spec/factories/group_deploy_tokens.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :group_deploy_token do
+ group
+ deploy_token
+ end
+end
diff --git a/spec/factories/incident_management/project_incident_management_settings.rb b/spec/factories/incident_management/project_incident_management_settings.rb
new file mode 100644
index 00000000000..5b6a71d87d5
--- /dev/null
+++ b/spec/factories/incident_management/project_incident_management_settings.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :project_incident_management_setting, class: 'IncidentManagement::ProjectIncidentManagementSetting' do
+ project
+ create_issue { false }
+ issue_template_key { nil }
+ send_email { false }
+ end
+end
diff --git a/spec/factories/merge_request_context_commit.rb b/spec/factories/merge_request_context_commit.rb
new file mode 100644
index 00000000000..f9bfc9af02e
--- /dev/null
+++ b/spec/factories/merge_request_context_commit.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :merge_request_context_commit do
+ association :merge_request, factory: :merge_request
+ author_name { 'test' }
+ author_email { 'test@test.com' }
+ message { '' }
+ relative_order { 0 }
+ sha { Digest::SHA1.hexdigest(SecureRandom.hex) }
+ end
+end
diff --git a/spec/factories/merge_request_context_commit_diff_file.rb b/spec/factories/merge_request_context_commit_diff_file.rb
new file mode 100644
index 00000000000..eb497166d05
--- /dev/null
+++ b/spec/factories/merge_request_context_commit_diff_file.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :merge_request_context_commit_diff_file do
+ association :merge_request_context_commit
+
+ sha { Digest::SHA1.hexdigest(SecureRandom.hex) }
+ relative_order { 0 }
+ new_file { true }
+ renamed_file { false }
+ deleted_file { false }
+ too_large { false }
+ a_mode { 0 }
+ b_mode { 100644 }
+ new_path { 'foo' }
+ old_path { 'foo' }
+ diff { '' }
+ binary { false }
+ end
+end
diff --git a/spec/factories/notes.rb b/spec/factories/notes.rb
index 11fc5060cf0..848590efabc 100644
--- a/spec/factories/notes.rb
+++ b/spec/factories/notes.rb
@@ -29,6 +29,11 @@ FactoryBot.define do
end
end
+ factory :track_mr_picking_note, traits: [:on_merge_request, :system] do
+ association :system_note_metadata, action: 'cherry_pick'
+ commit_id { RepoHelpers.sample_commit.id }
+ end
+
factory :discussion_note_on_issue, traits: [:on_issue], class: 'DiscussionNote'
factory :discussion_note_on_commit, traits: [:on_commit], class: 'DiscussionNote'
@@ -53,24 +58,20 @@ FactoryBot.define do
end
position do
- Gitlab::Diff::Position.new(
- old_path: "files/ruby/popen.rb",
- new_path: "files/ruby/popen.rb",
- old_line: nil,
- new_line: line_number,
- diff_refs: diff_refs
- )
+ build(:text_diff_position,
+ file: "files/ruby/popen.rb",
+ old_line: nil,
+ new_line: line_number,
+ diff_refs: diff_refs)
end
trait :folded_position do
position do
- Gitlab::Diff::Position.new(
- old_path: "files/ruby/popen.rb",
- new_path: "files/ruby/popen.rb",
- old_line: 1,
- new_line: 1,
- diff_refs: diff_refs
- )
+ build(:text_diff_position,
+ file: "files/ruby/popen.rb",
+ old_line: 1,
+ new_line: 1,
+ diff_refs: diff_refs)
end
end
@@ -81,16 +82,9 @@ FactoryBot.define do
factory :image_diff_note_on_merge_request do
position do
- Gitlab::Diff::Position.new(
- old_path: "files/images/any_image.png",
- new_path: "files/images/any_image.png",
- width: 10,
- height: 10,
- x: 1,
- y: 1,
- diff_refs: diff_refs,
- position_type: "image"
- )
+ build(:image_diff_position,
+ file: "files/images/any_image.png",
+ diff_refs: diff_refs)
end
end
end
@@ -104,9 +98,8 @@ FactoryBot.define do
end
position do
- Gitlab::Diff::Position.new(
- old_path: "files/ruby/popen.rb",
- new_path: "files/ruby/popen.rb",
+ build(:text_diff_position,
+ file: "files/ruby/popen.rb",
old_line: nil,
new_line: line_number,
diff_refs: diff_refs
diff --git a/spec/factories/pages_domains.rb b/spec/factories/pages_domains.rb
index 91423832888..f914128ed3b 100644
--- a/spec/factories/pages_domains.rb
+++ b/spec/factories/pages_domains.rb
@@ -374,5 +374,15 @@ x6zG6WoibsbsJMj70nwseUnPTBQNDP+j61RJjC/r
-----END EC PRIVATE KEY-----'
end
end
+
+ trait :instance_serverless do
+ wildcard { true }
+ scope { :instance }
+ usage { :serverless }
+ end
+
+ trait :with_project do
+ association :project
+ end
end
end
diff --git a/spec/factories/project_error_tracking_settings.rb b/spec/factories/project_error_tracking_settings.rb
index 7af881f4214..e09d58d293f 100644
--- a/spec/factories/project_error_tracking_settings.rb
+++ b/spec/factories/project_error_tracking_settings.rb
@@ -3,10 +3,14 @@
FactoryBot.define do
factory :project_error_tracking_setting, class: 'ErrorTracking::ProjectErrorTrackingSetting' do
project
- api_url { 'https://gitlab.com/api/0/projects/sentry-org/sentry-project' }
+ api_url { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
enabled { true }
token { 'access_token_123' }
project_name { 'Sentry Project' }
organization_name { 'Sentry Org' }
+
+ trait :disabled do
+ enabled { false }
+ end
end
end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 490ae9e84e7..ba38e1bb312 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -37,6 +37,7 @@ FactoryBot.define do
group_runners_enabled { nil }
import_status { nil }
import_jid { nil }
+ forward_deployment_enabled { nil }
end
after(:create) do |project, evaluator|
@@ -139,12 +140,6 @@ FactoryBot.define do
end
end
- trait :without_container_expiration_policy do
- after(:build) do |project|
- project.class.skip_callback(:create, :after, :create_container_expiration_policy, raise: false)
- end
- end
-
# Build a custom repository by specifying a hash of `filename => content` in
# the transient `files` attribute. Each file will be created in its own
# commit, operating against the master branch. So, the following call:
diff --git a/spec/factories/prometheus_alert.rb b/spec/factories/prometheus_alert.rb
new file mode 100644
index 00000000000..a9fede9efca
--- /dev/null
+++ b/spec/factories/prometheus_alert.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :prometheus_alert do
+ project
+ operator { :gt }
+ threshold { 1 }
+
+ environment do |alert|
+ build(:environment, project: alert.project)
+ end
+
+ prometheus_metric do |alert|
+ build(:prometheus_metric, project: alert.project)
+ end
+ end
+end
diff --git a/spec/factories/serverless/domain_cluster.rb b/spec/factories/serverless/domain_cluster.rb
index 5adfcacbd7f..bc32552d4c7 100644
--- a/spec/factories/serverless/domain_cluster.rb
+++ b/spec/factories/serverless/domain_cluster.rb
@@ -1,10 +1,45 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :serverless_domain_cluster, class: 'Serverless::DomainCluster' do
+ factory :serverless_domain_cluster, class: '::Serverless::DomainCluster' do
pages_domain { create(:pages_domain) }
knative { create(:clusters_applications_knative) }
creator { create(:user) }
- uuid { SecureRandom.hex(7) }
+
+ certificate do
+ '-----BEGIN CERTIFICATE-----
+MIICGzCCAYSgAwIBAgIBATANBgkqhkiG9w0BAQUFADAbMRkwFwYDVQQDExB0ZXN0
+LWNlcnRpZmljYXRlMB4XDTE2MDIxMjE0MzIwMFoXDTIwMDQxMjE0MzIwMFowGzEZ
+MBcGA1UEAxMQdGVzdC1jZXJ0aWZpY2F0ZTCBnzANBgkqhkiG9w0BAQEFAAOBjQAw
+gYkCgYEApL4J9L0ZxFJ1hI1LPIflAlAGvm6ZEvoT4qKU5Xf2JgU7/2geNR1qlNFa
+SvCc08Knupp5yTgmvyK/Xi09U0N82vvp4Zvr/diSc4A/RA6Mta6egLySNT438kdT
+nY2tR5feoTLwQpX0t4IMlwGQGT5h6Of2fKmDxzuwuyffcIHqLdsCAwEAAaNvMG0w
+DAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUxl9WSxBprB0z0ibJs3rXEk0+95AwCwYD
+VR0PBAQDAgXgMBEGCWCGSAGG+EIBAQQEAwIGQDAeBglghkgBhvhCAQ0EERYPeGNh
+IGNlcnRpZmljYXRlMA0GCSqGSIb3DQEBBQUAA4GBAGC4T8SlFHK0yPSa+idGLQFQ
+joZp2JHYvNlTPkRJ/J4TcXxBTJmArcQgTIuNoBtC+0A/SwdK4MfTCUY4vNWNdese
+5A4K65Nb7Oh1AdQieTBHNXXCdyFsva9/ScfQGEl7p55a52jOPs0StPd7g64uvjlg
+YHi2yesCrOvVXt+lgPTd
+-----END CERTIFICATE-----'
+ end
+
+ key do
+ '-----BEGIN PRIVATE KEY-----
+MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAKS+CfS9GcRSdYSN
+SzyH5QJQBr5umRL6E+KilOV39iYFO/9oHjUdapTRWkrwnNPCp7qaeck4Jr8iv14t
+PVNDfNr76eGb6/3YknOAP0QOjLWunoC8kjU+N/JHU52NrUeX3qEy8EKV9LeCDJcB
+kBk+Yejn9nypg8c7sLsn33CB6i3bAgMBAAECgYA2D26w80T7WZvazYr86BNMePpd
+j2mIAqx32KZHzt/lhh40J/SRtX9+Kl0Y7nBoRR5Ja9u/HkAIxNxLiUjwg9r6cpg/
+uITEF5nMt7lAk391BuI+7VOZZGbJDsq2ulPd6lO+C8Kq/PI/e4kXcIjeH6KwQsuR
+5vrXfBZ3sQfflaiN4QJBANBt8JY2LIGQF8o89qwUpRL5vbnKQ4IzZ5+TOl4RLR7O
+AQpJ81tGuINghO7aunctb6rrcKJrxmEH1whzComybrMCQQDKV49nOBudRBAIgG4K
+EnLzsRKISUHMZSJiYTYnablof8cKw1JaQduw7zgrUlLwnroSaAGX88+Jw1f5n2Lh
+Vlg5AkBDdUGnrDLtYBCDEQYZHblrkc7ZAeCllDOWjxUV+uMqlCv8A4Ey6omvY57C
+m6I8DkWVAQx8VPtozhvHjUw80rZHAkB55HWHAM3h13axKG0htCt7klhPsZHpx6MH
+EPjGlXIT+aW2XiPmK3ZlCDcWIenE+lmtbOpI159Wpk8BGXs/s/xBAkEAlAY3ymgx
+63BDJEwvOb2IaP8lDDxNsXx9XJNVvQbv5n15vNsLHbjslHfAhAbxnLQ1fLhUPqSi
+nNp/xedE1YxutQ==
+-----END PRIVATE KEY-----'
+ end
end
end
diff --git a/spec/factories/services.rb b/spec/factories/services.rb
index 5d62b3cb9c9..ffa51abf26f 100644
--- a/spec/factories/services.rb
+++ b/spec/factories/services.rb
@@ -44,6 +44,16 @@ FactoryBot.define do
end
end
+ factory :alerts_service do
+ project
+ type { 'AlertsService' }
+ active { true }
+
+ trait :inactive do
+ active { false }
+ end
+ end
+
factory :drone_ci_service do
project
active { true }
diff --git a/spec/factories/snippet_repositories.rb b/spec/factories/snippet_repositories.rb
new file mode 100644
index 00000000000..1f9e68514bb
--- /dev/null
+++ b/spec/factories/snippet_repositories.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :snippet_repository do
+ snippet
+
+ after(:build) do |snippet_repository, _|
+ snippet_repository.shard_name = snippet_repository.snippet.repository_storage
+ snippet_repository.disk_path = snippet_repository.snippet.disk_path
+ end
+ end
+end
diff --git a/spec/factories/snippets.rb b/spec/factories/snippets.rb
index 5990ed7ffb0..6fcb0319748 100644
--- a/spec/factories/snippets.rb
+++ b/spec/factories/snippets.rb
@@ -20,6 +20,21 @@ FactoryBot.define do
trait :private do
visibility_level { Snippet::PRIVATE }
end
+
+ # Test repository - https://gitlab.com/gitlab-org/gitlab-test
+ trait :repository do
+ after :create do |snippet|
+ TestEnv.copy_repo(snippet,
+ bare_repo: TestEnv.factory_repo_path_bare,
+ refs: TestEnv::BRANCH_SHA)
+ end
+ end
+
+ trait :empty_repo do
+ after(:create) do |snippet|
+ raise "Failed to create repository!" unless snippet.repository.create_if_not_exists
+ end
+ end
end
factory :project_snippet, parent: :snippet, class: :ProjectSnippet do
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index f83c137b758..34f6da682b6 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -23,6 +23,10 @@ FactoryBot.define do
after(:build) { |user, _| user.block! }
end
+ trait :bot do
+ bot_type { User.bot_types[:alert_bot] }
+ end
+
trait :external do
external { true }
end
diff --git a/spec/factories/x509_certificate.rb b/spec/factories/x509_certificate.rb
new file mode 100644
index 00000000000..819ad0704dc
--- /dev/null
+++ b/spec/factories/x509_certificate.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :x509_certificate do
+ subject_key_identifier { 'BC:BC:BC:BC:BC:BC:BC:BC:BC:BC:BC:BC:BC:BC:BC:BC:BC:BC:BC:BC' }
+ subject { 'CN=gitlab@example.org,OU=Example,O=World' }
+
+ email { 'gitlab@example.org' }
+ serial_number { 278969561018901340486471282831158785578 }
+ x509_issuer
+ end
+end
diff --git a/spec/factories/x509_commit_signature.rb b/spec/factories/x509_commit_signature.rb
new file mode 100644
index 00000000000..a342b240690
--- /dev/null
+++ b/spec/factories/x509_commit_signature.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :x509_commit_signature do
+ commit_sha { Digest::SHA1.hexdigest(SecureRandom.hex) }
+ project
+ x509_certificate
+ verification_status { :verified }
+ end
+end
diff --git a/spec/factories/x509_issuer.rb b/spec/factories/x509_issuer.rb
new file mode 100644
index 00000000000..e003b16ad86
--- /dev/null
+++ b/spec/factories/x509_issuer.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :x509_issuer do
+ subject_key_identifier { 'AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB' }
+ subject { 'CN=PKI,OU=Example,O=World' }
+
+ crl_url { 'http://example.com/pki.crl' }
+ end
+end
diff --git a/spec/features/admin/admin_browses_logs_spec.rb b/spec/features/admin/admin_browses_logs_spec.rb
index 2b97362c8e9..45e860e1536 100644
--- a/spec/features/admin/admin_browses_logs_spec.rb
+++ b/spec/features/admin/admin_browses_logs_spec.rb
@@ -10,7 +10,7 @@ describe 'Admin browses logs' do
it 'shows available log files' do
visit admin_logs_path
- expect(page).to have_link 'application.log'
+ expect(page).to have_link 'application_json.log'
expect(page).to have_link 'git_json.log'
expect(page).to have_link 'test.log'
expect(page).to have_link 'sidekiq.log'
diff --git a/spec/features/admin/admin_disables_git_access_protocol_spec.rb b/spec/features/admin/admin_disables_git_access_protocol_spec.rb
index bc757d72a49..05ebb7e90d2 100644
--- a/spec/features/admin/admin_disables_git_access_protocol_spec.rb
+++ b/spec/features/admin/admin_disables_git_access_protocol_spec.rb
@@ -121,7 +121,7 @@ describe 'Admin disables Git access protocol', :js do
end
def switch_git_protocol(value)
- visit admin_application_settings_path
+ visit general_admin_application_settings_path
page.within('.as-visibility-access') do
find('#application_setting_enabled_git_access_protocol').find(:xpath, "option[#{value}]").select_option
diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb
index 9a4889a0335..1d82650d11d 100644
--- a/spec/features/admin/admin_groups_spec.rb
+++ b/spec/features/admin/admin_groups_spec.rb
@@ -134,7 +134,7 @@ describe 'Admin Groups' do
end
describe 'add user into a group', :js do
- shared_context 'adds user into a group' do
+ shared_examples 'adds user into a group' do
it do
visit admin_group_path(group)
diff --git a/spec/features/admin/admin_manage_applications_spec.rb b/spec/features/admin/admin_manage_applications_spec.rb
index dd4d4b1a426..3f3d71e842c 100644
--- a/spec/features/admin/admin_manage_applications_spec.rb
+++ b/spec/features/admin/admin_manage_applications_spec.rb
@@ -21,18 +21,21 @@ RSpec.describe 'admin manage applications' do
expect(page).to have_content('Application ID')
expect(page).to have_content('Secret')
expect(page).to have_content('Trusted Y')
+ expect(page).to have_content('Confidential Y')
click_on 'Edit'
expect(page).to have_content('Edit application')
fill_in :doorkeeper_application_name, with: 'test_changed'
uncheck :doorkeeper_application_trusted
+ uncheck :doorkeeper_application_confidential
click_on 'Submit'
expect(page).to have_content('test_changed')
expect(page).to have_content('Application ID')
expect(page).to have_content('Secret')
expect(page).to have_content('Trusted N')
+ expect(page).to have_content('Confidential N')
visit admin_applications_path
page.within '.oauth-applications' do
diff --git a/spec/features/admin/admin_mode/workers_spec.rb b/spec/features/admin/admin_mode/workers_spec.rb
new file mode 100644
index 00000000000..e33c9d7e64c
--- /dev/null
+++ b/spec/features/admin/admin_mode/workers_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# Test an operation that triggers background jobs requiring administrative rights
+describe 'Admin mode for workers', :do_not_mock_admin_mode, :request_store, :clean_gitlab_redis_shared_state do
+ let(:user) { create(:user) }
+ let(:user_to_delete) { create(:user) }
+
+ before do
+ add_sidekiq_middleware
+
+ sign_in(user)
+ end
+
+ context 'as a regular user' do
+ it 'cannot delete user' do
+ visit admin_user_path(user_to_delete)
+
+ expect(page).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'as an admin user' do
+ let(:user) { create(:admin) }
+
+ context 'when admin mode disabled' do
+ it 'cannot delete user', :js do
+ visit admin_user_path(user_to_delete)
+
+ expect(page).to have_content('Re-authentication required')
+ end
+ end
+
+ context 'when admin mode enabled', :delete do
+ before do
+ gitlab_enable_admin_mode_sign_in(user)
+ end
+
+ it 'can delete user', :js do
+ visit admin_user_path(user_to_delete)
+ click_button 'Delete user'
+
+ page.within '.modal-dialog' do
+ find("input[name='username']").send_keys(user_to_delete.name)
+ click_button 'Delete user'
+
+ wait_for_requests
+ end
+
+ expect(page).to have_content('The user is being deleted.')
+
+ # Perform jobs while logged out so that admin mode is only enabled in job metadata
+ execute_jobs_signed_out(user)
+
+ visit admin_user_path(user_to_delete)
+
+ expect(page).to have_title('Not Found')
+ end
+ end
+ end
+
+ def add_sidekiq_middleware
+ Sidekiq::Testing.server_middleware do |chain|
+ chain.add Gitlab::SidekiqMiddleware::AdminMode::Server
+ end
+ end
+
+ def execute_jobs_signed_out(user)
+ gitlab_sign_out
+
+ Sidekiq::Worker.drain_all
+
+ sign_in(user)
+ gitlab_enable_admin_mode_sign_in(user)
+ end
+end
diff --git a/spec/features/admin/admin_mode_spec.rb b/spec/features/admin/admin_mode_spec.rb
new file mode 100644
index 00000000000..7b8990aceef
--- /dev/null
+++ b/spec/features/admin/admin_mode_spec.rb
@@ -0,0 +1,173 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Admin mode', :clean_gitlab_redis_shared_state, :do_not_mock_admin_mode do
+ include MobileHelpers
+ include StubENV
+
+ let(:admin) { create(:admin) }
+
+ before do
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
+ end
+
+ context 'feature flag :user_mode_in_session is enabled', :request_store do
+ before do
+ sign_in(admin)
+ end
+
+ context 'when not in admin mode' do
+ it 'has no leave admin mode button' do
+ visit new_admin_session_path
+
+ page.within('.navbar-sub-nav') do
+ expect(page).not_to have_link(href: destroy_admin_session_path)
+ end
+ end
+
+ it 'can open pages not in admin scope' do
+ visit new_admin_session_path
+
+ page.within('.navbar-sub-nav') do
+ find_all('a', text: 'Projects').first.click
+ end
+
+ expect(page).to have_current_path(dashboard_projects_path)
+ end
+
+ it 'is necessary to provide credentials again before opening pages in admin scope' do
+ visit general_admin_application_settings_path # admin logged out because not in admin_mode
+
+ expect(page).to have_current_path(new_admin_session_path)
+ end
+
+ it 'can enter admin mode' do
+ visit new_admin_session_path
+
+ fill_in 'password', with: admin.password
+
+ click_button 'Enter Admin Mode'
+
+ expect(page).to have_current_path(admin_root_path)
+ end
+
+ context 'on a read-only instance' do
+ before do
+ allow(Gitlab::Database).to receive(:read_only?).and_return(true)
+ end
+
+ it 'can enter admin mode' do
+ visit new_admin_session_path
+
+ fill_in 'password', with: admin.password
+
+ click_button 'Enter Admin Mode'
+
+ expect(page).to have_current_path(admin_root_path)
+ end
+ end
+ end
+
+ context 'when in admin_mode' do
+ before do
+ gitlab_enable_admin_mode_sign_in(admin)
+ end
+
+ it 'contains link to leave admin mode' do
+ page.within('.navbar-sub-nav') do
+ expect(page).to have_link(href: destroy_admin_session_path)
+ end
+ end
+
+ it 'can leave admin mode using main dashboard link', :js do
+ page.within('.navbar-sub-nav') do
+ click_on 'Leave Admin Mode'
+
+ expect(page).to have_link(href: new_admin_session_path)
+ end
+ end
+
+ it 'can leave admin mode using dropdown menu on smaller screens', :js do
+ resize_screen_xs
+ visit root_dashboard_path
+
+ find('.header-more').click
+
+ page.within '.navbar-sub-nav' do
+ click_on 'Leave Admin Mode'
+
+ find('.header-more').click
+
+ expect(page).to have_link(href: new_admin_session_path)
+ end
+ end
+
+ it 'can open pages not in admin scope' do
+ page.within('.navbar-sub-nav') do
+ find_all('a', text: 'Projects').first.click
+
+ expect(page).to have_current_path(dashboard_projects_path)
+ end
+ end
+
+ context 'nav bar' do
+ it 'shows admin dashboard links on bigger screen' do
+ visit root_dashboard_path
+
+ page.within '.navbar' do
+ expect(page).to have_link(text: 'Admin Area', href: admin_root_path, visible: true)
+ expect(page).to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
+ end
+ end
+
+ it 'relocates admin dashboard links to dropdown list on smaller screen', :js do
+ resize_screen_xs
+ visit root_dashboard_path
+
+ page.within '.navbar' do
+ expect(page).not_to have_link(text: 'Admin Area', href: admin_root_path, visible: true)
+ expect(page).not_to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
+ end
+
+ find('.header-more').click
+
+ page.within '.navbar' do
+ expect(page).to have_link(text: 'Admin Area', href: admin_root_path, visible: true)
+ expect(page).to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
+ end
+ end
+ end
+
+ context 'on a read-only instance' do
+ before do
+ allow(Gitlab::Database).to receive(:read_only?).and_return(true)
+ end
+
+ it 'can leave admin mode', :js do
+ page.within('.navbar-sub-nav') do
+ click_on 'Leave Admin Mode'
+
+ expect(page).to have_link(href: new_admin_session_path)
+ end
+ end
+ end
+ end
+ end
+
+ context 'feature flag :user_mode_in_session is disabled' do
+ before do
+ stub_feature_flags(user_mode_in_session: false)
+ sign_in(admin)
+ end
+
+ it 'shows no admin mode buttons in navbar' do
+ visit admin_root_path
+
+ page.within('.navbar-sub-nav') do
+ expect(page).not_to have_link(href: new_admin_session_path)
+ expect(page).not_to have_link(href: destroy_admin_session_path)
+ end
+ end
+ end
+end
diff --git a/spec/features/admin/admin_serverless_domains_spec.rb b/spec/features/admin/admin_serverless_domains_spec.rb
new file mode 100644
index 00000000000..85fe67004da
--- /dev/null
+++ b/spec/features/admin/admin_serverless_domains_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Admin Serverless Domains', :js do
+ let(:sample_domain) { build(:pages_domain) }
+
+ before do
+ allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
+ sign_in(create(:admin))
+ end
+
+ it 'Add domain with certificate' do
+ visit admin_serverless_domains_path
+
+ fill_in 'pages_domain[domain]', with: 'foo.com'
+ fill_in 'pages_domain[user_provided_certificate]', with: sample_domain.certificate
+ fill_in 'pages_domain[user_provided_key]', with: sample_domain.key
+ click_button 'Add domain'
+
+ expect(current_path).to eq admin_serverless_domains_path
+
+ expect(page).to have_field('pages_domain[domain]', with: 'foo.com')
+ expect(page).to have_field('serverless_domain_dns', with: /^\*\.foo\.com CNAME /)
+ expect(page).to have_field('serverless_domain_verification', with: /^_gitlab-pages-verification-code.foo.com TXT /)
+ expect(page).not_to have_field('pages_domain[user_provided_certificate]')
+ expect(page).not_to have_field('pages_domain[user_provided_key]')
+
+ expect(page).to have_content 'Unverified'
+ expect(page).to have_content '/CN=test-certificate'
+ end
+
+ it 'Update domain certificate' do
+ visit admin_serverless_domains_path
+
+ fill_in 'pages_domain[domain]', with: 'foo.com'
+ fill_in 'pages_domain[user_provided_certificate]', with: sample_domain.certificate
+ fill_in 'pages_domain[user_provided_key]', with: sample_domain.key
+ click_button 'Add domain'
+
+ expect(current_path).to eq admin_serverless_domains_path
+
+ expect(page).not_to have_field('pages_domain[user_provided_certificate]')
+ expect(page).not_to have_field('pages_domain[user_provided_key]')
+
+ click_button 'Replace'
+
+ expect(page).to have_field('pages_domain[user_provided_certificate]')
+ expect(page).to have_field('pages_domain[user_provided_key]')
+
+ fill_in 'pages_domain[user_provided_certificate]', with: sample_domain.certificate
+ fill_in 'pages_domain[user_provided_key]', with: sample_domain.key
+
+ click_button 'Save changes'
+
+ expect(page).to have_content 'Domain was successfully updated'
+ expect(page).to have_content '/CN=test-certificate'
+ end
+end
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 99a6165cfc9..9b275a8897b 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -5,11 +5,10 @@ require 'spec_helper'
describe 'Admin updates settings', :clean_gitlab_redis_shared_state, :do_not_mock_admin_mode do
include StubENV
include TermsHelper
- include MobileHelpers
let(:admin) { create(:admin) }
- context 'feature flag :user_mode_in_session is enabled' do
+ context 'feature flag :user_mode_in_session is enabled', :request_store do
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
sign_in(admin)
@@ -228,9 +227,7 @@ describe 'Admin updates settings', :clean_gitlab_redis_shared_state, :do_not_moc
click_link 'Slack notifications'
- page.all('input[type=checkbox]').each do |checkbox|
- expect(checkbox).to be_checked
- end
+ expect(page.all('input[type=checkbox]')).to all(be_checked)
expect(find_field('Webhook').value).to eq 'http://localhost'
expect(find_field('Username').value).to eq 'test_user'
expect(find('#service_push_channel').value).to eq '#test_channel'
@@ -329,6 +326,8 @@ describe 'Admin updates settings', :clean_gitlab_redis_shared_state, :do_not_moc
end
it 'loads usage ping payload on click', :js do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+
expect(page).to have_button 'Preview payload'
find('.js-usage-ping-payload-trigger').click
@@ -451,86 +450,6 @@ describe 'Admin updates settings', :clean_gitlab_redis_shared_state, :do_not_moc
expect(page).to have_link(text: 'Support', href: new_support_url)
end
end
-
- it 'Shows admin dashboard links on bigger screen' do
- visit root_dashboard_path
-
- page.within '.navbar' do
- expect(page).to have_link(text: 'Admin Area', href: admin_root_path, visible: true)
- expect(page).to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
- end
- end
-
- it 'Relocates admin dashboard links to dropdown list on smaller screen', :js do
- resize_screen_xs
- visit root_dashboard_path
-
- page.within '.navbar' do
- expect(page).not_to have_link(text: 'Admin Area', href: admin_root_path, visible: true)
- expect(page).not_to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
- end
-
- find('.header-more').click
-
- page.within '.navbar' do
- expect(page).to have_link(text: 'Admin Area', href: admin_root_path, visible: true)
- expect(page).to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
- end
- end
- end
-
- context 'when in admin_mode' do
- it 'contains link to leave admin mode' do
- page.within('.navbar-sub-nav') do
- expect(page).to have_link(href: destroy_admin_session_path)
- end
- end
-
- it 'can leave admin mode' do
- page.within('.navbar-sub-nav') do
- # Select first, link is also included in mobile view list
- click_on 'Leave Admin Mode', match: :first
-
- expect(page).to have_link(href: new_admin_session_path)
- end
- end
-
- it 'can open pages not in admin scope' do
- page.within('.navbar-sub-nav') do
- find_all('a', text: 'Projects').first.click
-
- expect(page).to have_current_path(dashboard_projects_path)
- end
- end
- end
-
- context 'when not in admin mode' do
- before do
- page.within('.navbar-sub-nav') do
- # Select first, link is also included in mobile view list
- click_on 'Leave Admin Mode', match: :first
- end
- end
-
- it 'has no leave admin mode button' do
- page.within('.navbar-sub-nav') do
- expect(page).not_to have_link(href: destroy_admin_session_path)
- end
- end
-
- it 'is necessary to provide credentials again before opening admin settings' do
- visit admin_application_settings_path # admin logged out because not in admin_mode
-
- expect(page).to have_current_path(new_admin_session_path)
- end
-
- it 'can open pages not in admin scope' do
- page.within('.navbar-sub-nav') do
- find_all('a', text: 'Projects').first.click
- end
-
- expect(page).to have_current_path(dashboard_projects_path)
- end
end
end
@@ -541,18 +460,11 @@ describe 'Admin updates settings', :clean_gitlab_redis_shared_state, :do_not_moc
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
sign_in(admin)
- visit admin_application_settings_path
+ visit general_admin_application_settings_path
end
it 'loads admin settings page without redirect for reauthentication' do
- expect(current_path).to eq admin_application_settings_path
- end
-
- it 'shows no admin mode buttons in navbar' do
- page.within('.navbar-sub-nav') do
- expect(page).not_to have_link(href: new_admin_session_path)
- expect(page).not_to have_link(href: destroy_admin_session_path)
- end
+ expect(current_path).to eq general_admin_application_settings_path
end
end
diff --git a/spec/features/admin/admin_uses_repository_checks_spec.rb b/spec/features/admin/admin_uses_repository_checks_spec.rb
index 3e8197588ed..954773e766d 100644
--- a/spec/features/admin/admin_uses_repository_checks_spec.rb
+++ b/spec/features/admin/admin_uses_repository_checks_spec.rb
@@ -2,46 +2,64 @@
require 'spec_helper'
-describe 'Admin uses repository checks' do
+describe 'Admin uses repository checks', :request_store, :clean_gitlab_redis_shared_state, :do_not_mock_admin_mode do
include StubENV
+ let(:admin) { create(:admin) }
+
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
- sign_in(create(:admin))
+ sign_in(admin)
end
- it 'to trigger a single check' do
- project = create(:project)
- visit_admin_project_page(project)
+ context 'when admin mode is disabled' do
+ it 'admin project page requires admin mode' do
+ project = create(:project)
+ visit_admin_project_page(project)
- page.within('.repository-check') do
- click_button 'Trigger repository check'
+ expect(page).not_to have_css('.repository-check')
+ expect(page).to have_content('Enter Admin Mode')
end
-
- expect(page).to have_content('Repository check was triggered')
end
- it 'to see a single failed repository check', :js do
- project = create(:project)
- project.update_columns(
- last_repository_check_failed: true,
- last_repository_check_at: Time.now
- )
- visit_admin_project_page(project)
+ context 'when admin mode is enabled' do
+ before do
+ gitlab_enable_admin_mode_sign_in(admin)
+ end
+
+ it 'to trigger a single check', :js do
+ project = create(:project)
+ visit_admin_project_page(project)
+
+ page.within('.repository-check') do
+ click_button 'Trigger repository check'
+ end
- page.within('.alert') do
- expect(page.text).to match(/Last repository check \(just now\) failed/)
+ expect(page).to have_content('Repository check was triggered')
end
- end
- it 'to clear all repository checks', :js do
- visit repository_admin_application_settings_path
+ it 'to see a single failed repository check', :js do
+ project = create(:project)
+ project.update_columns(
+ last_repository_check_failed: true,
+ last_repository_check_at: Time.now
+ )
+ visit_admin_project_page(project)
+
+ page.within('.alert') do
+ expect(page.text).to match(/Last repository check \(just now\) failed/)
+ end
+ end
- expect(RepositoryCheck::ClearWorker).to receive(:perform_async)
+ it 'to clear all repository checks', :js do
+ visit repository_admin_application_settings_path
- accept_confirm { find(:link, 'Clear all repository checks').send_keys(:return) }
+ expect(RepositoryCheck::ClearWorker).to receive(:perform_async)
- expect(page).to have_content('Started asynchronous removal of all repository check states.')
+ accept_confirm { find(:link, 'Clear all repository checks').send_keys(:return) }
+
+ expect(page).to have_content('Started asynchronous removal of all repository check states.')
+ end
end
def visit_admin_project_page(project)
diff --git a/spec/features/broadcast_messages_spec.rb b/spec/features/broadcast_messages_spec.rb
new file mode 100644
index 00000000000..43fbf1010c9
--- /dev/null
+++ b/spec/features/broadcast_messages_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Broadcast Messages' do
+ let!(:broadcast_message) { create(:broadcast_message, broadcast_type: 'notification', message: 'SampleMessage') }
+
+ it 'shows broadcast message' do
+ visit root_path
+
+ expect(page).to have_content 'SampleMessage'
+ end
+
+ it 'hides broadcast message after dismiss', :js do
+ visit root_path
+
+ find('.js-dismiss-current-broadcast-notification').click
+
+ expect(page).not_to have_content 'SampleMessage'
+ end
+
+ it 'broadcast message is still hidden after refresh', :js do
+ visit root_path
+
+ find('.js-dismiss-current-broadcast-notification').click
+ visit root_path
+
+ expect(page).not_to have_content 'SampleMessage'
+ end
+end
diff --git a/spec/features/calendar_spec.rb b/spec/features/calendar_spec.rb
index bac5c9f568e..acdc38038aa 100644
--- a/spec/features/calendar_spec.rb
+++ b/spec/features/calendar_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
describe 'Contributions Calendar', :js do
+ include MobileHelpers
+
let(:user) { create(:user) }
let(:contributed_project) { create(:project, :public, :repository) }
let(:issue_note) { create(:note, project: contributed_project) }
@@ -122,15 +124,15 @@ describe 'Contributions Calendar', :js do
end
end
- describe 'calendar daily activities' do
- shared_context 'visit user page' do
- before do
- visit user.username
- page.find('.js-overview-tab a').click
- wait_for_requests
- end
+ shared_context 'visit user page' do
+ before do
+ visit user.username
+ page.find('.js-overview-tab a').click
+ wait_for_requests
end
+ end
+ describe 'calendar daily activities' do
shared_examples 'a day with activity' do |contribution_count:|
include_context 'visit user page'
@@ -199,4 +201,22 @@ describe 'Contributions Calendar', :js do
end
end
end
+
+ describe 'on smaller screens' do
+ shared_examples 'hidden activity calendar' do
+ include_context 'visit user page'
+
+ it 'hides the activity calender' do
+ expect(find('#js-overview')).not_to have_css('.js-contrib-calendar')
+ end
+ end
+
+ context 'size xs' do
+ before do
+ resize_screen_xs
+ end
+
+ it_behaves_like 'hidden activity calendar'
+ end
+ end
end
diff --git a/spec/features/clusters/installing_applications_shared_examples.rb b/spec/features/clusters/installing_applications_shared_examples.rb
index 20648ed3d46..ff44ce46213 100644
--- a/spec/features/clusters/installing_applications_shared_examples.rb
+++ b/spec/features/clusters/installing_applications_shared_examples.rb
@@ -187,6 +187,8 @@ shared_examples "installing applications on a cluster" do
page.within('.js-cluster-application-row-elastic_stack') do
click_button 'Install'
end
+
+ wait_for_requests
end
it 'shows status transition' do
diff --git a/spec/features/container_registry_spec.rb b/spec/features/container_registry_spec.rb
index 28b68e699e8..881cad1864b 100644
--- a/spec/features/container_registry_spec.rb
+++ b/spec/features/container_registry_spec.rb
@@ -15,6 +15,7 @@ describe 'Container Registry', :js do
project.add_developer(user)
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: [])
+ stub_feature_flags(vue_container_registry_explorer: false)
end
it 'has a page title set' do
diff --git a/spec/features/cycle_analytics_spec.rb b/spec/features/cycle_analytics_spec.rb
index 0cafdb4e982..4a20d1b7d60 100644
--- a/spec/features/cycle_analytics_spec.rb
+++ b/spec/features/cycle_analytics_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe 'Cycle Analytics', :js do
+describe 'Value Stream Analytics', :js do
let(:user) { create(:user) }
let(:guest) { create(:user) }
let(:project) { create(:project, :repository) }
@@ -23,7 +23,7 @@ describe 'Cycle Analytics', :js do
end
it 'shows introductory message' do
- expect(page).to have_content('Introducing Cycle Analytics')
+ expect(page).to have_content('Introducing Value Stream Analytics')
end
it 'shows pipeline summary' do
@@ -38,11 +38,8 @@ describe 'Cycle Analytics', :js do
end
end
- context "when there's cycle analytics data" do
+ context "when there's value stream analytics data" do
before do
- allow_next_instance_of(Gitlab::ReferenceExtractor) do |instance|
- allow(instance).to receive(:issues).and_return([issue])
- end
project.add_maintainer(user)
@build = create_cycle(user, project, issue, mr, milestone, pipeline)
@@ -101,9 +98,6 @@ describe 'Cycle Analytics', :js do
project.add_developer(user)
project.add_guest(guest)
- allow_next_instance_of(Gitlab::ReferenceExtractor) do |instance|
- allow(instance).to receive(:issues).and_return([issue])
- end
create_cycle(user, project, issue, mr, milestone, pipeline)
deploy_master(user, project)
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index 73f759f8a54..9bd2e85e3b8 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -152,61 +152,6 @@ describe 'Dashboard Projects' do
end
end
- describe 'with a pipeline', :clean_gitlab_redis_shared_state do
- let(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit.sha, ref: project.default_branch) }
-
- before do
- # Since the cache isn't updated when a new pipeline is created
- # we need the pipeline to advance in the pipeline since the cache was created
- # by visiting the login page.
- pipeline.succeed
- end
-
- it 'shows that the last pipeline passed' do
- visit dashboard_projects_path
-
- page.within('.controls') do
- expect(page).to have_xpath("//a[@href='#{pipelines_project_commit_path(project, project.commit, ref: pipeline.ref)}']")
- expect(page).to have_css('.ci-status-link')
- expect(page).to have_css('.ci-status-icon-success')
- expect(page).to have_link('Pipeline: passed')
- end
- end
-
- shared_examples 'hidden pipeline status' do
- it 'does not show the pipeline status' do
- visit dashboard_projects_path
-
- page.within('.controls') do
- expect(page).not_to have_xpath("//a[@href='#{pipelines_project_commit_path(project, project.commit, ref: pipeline.ref)}']")
- expect(page).not_to have_css('.ci-status-link')
- expect(page).not_to have_css('.ci-status-icon-success')
- expect(page).not_to have_link('Pipeline: passed')
- end
- end
- end
-
- context 'guest user of project and project has private pipelines' do
- let(:guest_user) { create(:user) }
-
- before do
- project.update(public_builds: false)
- project.add_guest(guest_user)
- sign_in(guest_user)
- end
-
- it_behaves_like 'hidden pipeline status'
- end
-
- context 'when dashboard_pipeline_status is disabled' do
- before do
- stub_feature_flags(dashboard_pipeline_status: false)
- end
-
- it_behaves_like 'hidden pipeline status'
- end
- end
-
context 'last push widget', :use_clean_rails_memory_store_caching do
before do
event = create(:push_event, project: project, author: user)
diff --git a/spec/features/dashboard/shortcuts_spec.rb b/spec/features/dashboard/shortcuts_spec.rb
index 3a47475da2b..cf74b2cc8ce 100644
--- a/spec/features/dashboard/shortcuts_spec.rb
+++ b/spec/features/dashboard/shortcuts_spec.rb
@@ -51,7 +51,7 @@ describe 'Dashboard shortcuts', :js do
find('body').send_keys([:shift, 'P'])
find('.nothing-here-block')
- expect(page).to have_content('Explore public groups to find projects to contribute to.')
+ expect(page).to have_content("This user doesn't have any personal projects")
end
end
diff --git a/spec/features/dashboard/snippets_spec.rb b/spec/features/dashboard/snippets_spec.rb
index 94dc8601abb..db5e56bdde0 100644
--- a/spec/features/dashboard/snippets_spec.rb
+++ b/spec/features/dashboard/snippets_spec.rb
@@ -32,7 +32,7 @@ describe 'Dashboard snippets' do
it 'shows the empty state when there are no snippets' do
element = page.find('.row.empty-state')
- expect(element).to have_content("Snippets are small pieces of code or notes that you want to keep.")
+ expect(element).to have_content("Code snippets")
expect(element.find('.svg-content img')['src']).to have_content('illustrations/snippets_empty')
end
@@ -40,6 +40,11 @@ describe 'Dashboard snippets' do
parent_element = page.find('.row.empty-state')
expect(parent_element).to have_link('New snippet')
end
+
+ it 'shows documentation button in main comment area' do
+ parent_element = page.find('.row.empty-state')
+ expect(parent_element).to have_link('Documentation', href: help_page_path('user/snippets.md'))
+ end
end
context 'filtering by visibility' do
@@ -59,6 +64,10 @@ describe 'Dashboard snippets' do
visit dashboard_snippets_path
end
+ it_behaves_like 'tabs with counts' do
+ let_it_be(:counts) { { all: '3', public: '1', private: '1', internal: '1' } }
+ end
+
it 'contains all snippets of logged user' do
expect(page).to have_selector('.snippet-row', count: 3)
diff --git a/spec/features/error_tracking/user_sees_error_details_spec.rb b/spec/features/error_tracking/user_sees_error_details_spec.rb
new file mode 100644
index 00000000000..6f72c44c689
--- /dev/null
+++ b/spec/features/error_tracking/user_sees_error_details_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'View error details page', :js, :use_clean_rails_memory_store_caching, :sidekiq_inline do
+ include_context 'sentry error tracking context feature'
+
+ context 'with current user as project owner' do
+ before do
+ sign_in(project.owner)
+
+ visit details_project_error_tracking_index_path(project, issue_id: issue_id)
+ end
+
+ it_behaves_like 'error tracking show page'
+ end
+
+ context 'with current user as project guest' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ project.add_guest(user)
+ sign_in(user)
+
+ visit details_project_error_tracking_index_path(project, issue_id: issue_id)
+ end
+
+ it 'renders not found' do
+ expect(page).to have_content('Page Not Found')
+ end
+ end
+end
diff --git a/spec/features/error_tracking/user_sees_error_index_spec.rb b/spec/features/error_tracking/user_sees_error_index_spec.rb
new file mode 100644
index 00000000000..842e4a2e8b5
--- /dev/null
+++ b/spec/features/error_tracking/user_sees_error_index_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'View error index page', :js, :use_clean_rails_memory_store_caching, :sidekiq_inline do
+ include_context 'sentry error tracking context feature'
+
+ let_it_be(:issues_response_body) { fixture_file('sentry/issues_sample_response.json') }
+ let_it_be(:issues_response) { JSON.parse(issues_response_body) }
+ let(:issues_api_url) { "#{sentry_api_urls.issues_url}?limit=20&query=is:unresolved" }
+
+ before do
+ stub_request(:get, issues_api_url).with(
+ headers: { 'Authorization' => 'Bearer access_token_123' }
+ ).to_return(status: 200, body: issues_response_body, headers: { 'Content-Type' => 'application/json' })
+ end
+
+ context 'with current user as project owner' do
+ before do
+ sign_in(project.owner)
+
+ visit project_error_tracking_index_path(project)
+ end
+
+ it_behaves_like 'error tracking index page'
+ end
+
+ # A bug caused the detail link to be broken for all users but the project owner
+ context 'with current user as project maintainer' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+
+ visit project_error_tracking_index_path(project)
+ end
+
+ it_behaves_like 'error tracking index page'
+ end
+
+ context 'with error tracking settings disabled' do
+ before do
+ project_error_tracking_settings.update(enabled: false)
+ sign_in(project.owner)
+
+ visit project_error_tracking_index_path(project)
+ end
+
+ it 'renders call to action' do
+ expect(page).to have_content('Enable error tracking')
+ end
+ end
+
+ context 'with current user as project guest' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ project.add_guest(user)
+ sign_in(user)
+
+ visit project_error_tracking_index_path(project)
+ end
+
+ it 'renders not found' do
+ expect(page).to have_content('Page Not Found')
+ end
+ end
+end
diff --git a/spec/features/graphiql_spec.rb b/spec/features/graphiql_spec.rb
new file mode 100644
index 00000000000..329758113ab
--- /dev/null
+++ b/spec/features/graphiql_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'GraphiQL' do
+ context 'without relative_url_root' do
+ before do
+ visit '/-/graphql-explorer'
+ end
+
+ it 'has the correct graphQLEndpoint' do
+ expect(page.body).to include('var graphQLEndpoint = "/api/graphql";')
+ end
+ end
+
+ context 'with relative_url_root' do
+ before do
+ stub_config_setting(relative_url_root: '/gitlab/root')
+ Rails.application.reload_routes!
+
+ visit '/-/graphql-explorer'
+ end
+
+ after do
+ Rails.application.reload_routes!
+ end
+
+ it 'has the correct graphQLEndpoint' do
+ expect(page.body).to include('var graphQLEndpoint = "/gitlab/root/api/graphql";')
+ end
+ end
+end
diff --git a/spec/features/groups/navbar_spec.rb b/spec/features/groups/navbar_spec.rb
new file mode 100644
index 00000000000..8c16dcec42f
--- /dev/null
+++ b/spec/features/groups/navbar_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Group navbar' do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+
+ let(:analytics_nav_item) do
+ {
+ nav_item: _('Analytics'),
+ nav_sub_items: [
+ _('Contribution Analytics')
+ ]
+ }
+ end
+
+ let(:structure) do
+ [
+ {
+ nav_item: _('Group overview'),
+ nav_sub_items: [
+ _('Details'),
+ _('Activity')
+ ]
+ },
+ {
+ nav_item: _('Issues'),
+ nav_sub_items: [
+ _('List'),
+ _('Board'),
+ _('Labels'),
+ _('Milestones')
+ ]
+ },
+ {
+ nav_item: _('Merge Requests'),
+ nav_sub_items: []
+ },
+ {
+ nav_item: _('Kubernetes'),
+ nav_sub_items: []
+ },
+ (analytics_nav_item if Gitlab.ee?),
+ {
+ nav_item: _('Members'),
+ nav_sub_items: []
+ }
+ ]
+ end
+
+ it_behaves_like 'verified navigation bar' do
+ before do
+ group.add_maintainer(user)
+ sign_in(user)
+
+ visit group_path(group)
+ end
+ end
+
+ if Gitlab.ee?
+ context 'when productivity analytics is available' do
+ before do
+ stub_licensed_features(productivity_analytics: true)
+
+ analytics_nav_item[:nav_sub_items] << _('Productivity Analytics')
+
+ group.add_maintainer(user)
+ sign_in(user)
+
+ visit group_path(group)
+ end
+
+ it_behaves_like 'verified navigation bar'
+ end
+ end
+end
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index e958ebb1275..d2e65c02e37 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -82,8 +82,8 @@ describe 'Group' do
expect(page).to have_selector('#group_create_chat_team')
end
- it 'checks the checkbox by default' do
- expect(find('#group_create_chat_team')['checked']).to eq(true)
+ it 'unchecks the checkbox by default' do
+ expect(find('#group_create_chat_team')['checked']).to eq(false)
end
it 'updates the team URL on graph path update', :js do
diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb
index 2a1980346e9..7884a16c118 100644
--- a/spec/features/invites_spec.rb
+++ b/spec/features/invites_spec.rb
@@ -153,24 +153,6 @@ describe 'Invites' do
context 'email confirmation enabled' do
let(:send_email_confirmation) { true }
- context 'when soft email confirmation is not enabled' do
- before do
- # stub_feature_flags(soft_email_confirmation: false)
- allow(User).to receive(:allow_unconfirmed_access_for).and_return 0
- end
-
- it 'signs up and redirects to root page with all the project/groups invitation automatically accepted' do
- fill_in_sign_up_form(new_user)
- confirm_email(new_user)
- fill_in_sign_in_form(new_user)
-
- expect(current_path).to eq(root_path)
- expect(page).to have_content(project.full_name)
- visit group_path(group)
- expect(page).to have_content(group.full_name)
- end
- end
-
context 'when soft email confirmation is enabled' do
before do
allow(User).to receive(:allow_unconfirmed_access_for).and_return 2.days
@@ -187,7 +169,7 @@ describe 'Invites' do
end
end
- it "doesn't accept invitations until the user confirms his email" do
+ it "doesn't accept invitations until the user confirms their email" do
fill_in_sign_up_form(new_user)
sign_in(owner)
@@ -198,32 +180,14 @@ describe 'Invites' do
context 'the user sign-up using a different email address' do
let(:invite_email) { build_stubbed(:user).email }
- context 'when soft email confirmation is not enabled' do
- before do
- stub_feature_flags(soft_email_confirmation: false)
- allow(User).to receive(:allow_unconfirmed_access_for).and_return 0
- end
-
- it 'signs up and redirects to the invitation page' do
- fill_in_sign_up_form(new_user)
- confirm_email(new_user)
- fill_in_sign_in_form(new_user)
-
- expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
- end
+ before do
+ allow(User).to receive(:allow_unconfirmed_access_for).and_return 2.days
end
- context 'when soft email confirmation is enabled' do
- before do
- stub_feature_flags(soft_email_confirmation: true)
- allow(User).to receive(:allow_unconfirmed_access_for).and_return 2.days
- end
-
- it 'signs up and redirects to the invitation page' do
- fill_in_sign_up_form(new_user)
+ it 'signs up and redirects to the invitation page' do
+ fill_in_sign_up_form(new_user)
- expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
- end
+ expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
end
end
end
diff --git a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
index 7d3df711555..a3742af31de 100644
--- a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
+++ b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
@@ -11,7 +11,7 @@ describe 'Resolving all open threads in a merge request from an issue', :js do
def resolve_all_discussions_link_selector
text = "Resolve all threads in new issue"
url = new_project_issue_path(project, merge_request_to_resolve_discussions_of: merge_request.iid)
- %Q{a[data-original-title="#{text}"][href="#{url}"]}
+ %Q{a[title="#{text}"][href="#{url}"]}
end
describe 'as a user with access to the project' do
diff --git a/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb b/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb
index 8f4083533b3..1c17b39c03a 100644
--- a/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb
+++ b/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb
@@ -11,7 +11,7 @@ describe 'Resolve an open thread in a merge request by creating an issue', :js d
def resolve_discussion_selector
title = 'Resolve this thread in a new issue'
url = new_project_issue_path(project, discussion_to_resolve: discussion.id, merge_request_to_resolve_discussions_of: merge_request.iid)
- "a[data-original-title=\"#{title}\"][href=\"#{url}\"]"
+ "a[title=\"#{title}\"][href=\"#{url}\"]"
end
describe 'As a user with access to the project' do
diff --git a/spec/features/issues/filtered_search/filter_issues_spec.rb b/spec/features/issues/filtered_search/filter_issues_spec.rb
index c99c205d5da..ee5773f1484 100644
--- a/spec/features/issues/filtered_search/filter_issues_spec.rb
+++ b/spec/features/issues/filtered_search/filter_issues_spec.rb
@@ -192,7 +192,7 @@ describe 'Filter issues', :js do
end
it 'filters issues by label containing special characters' do
- special_label = create(:label, project: project, title: '!@#{$%^&*()-+[]<>?/:{}|\}')
+ special_label = create(:label, project: project, title: '!@#$%^&*()-+[]<>?/:{}|\\')
special_issue = create(:issue, title: "Issue with special character label", project: project)
special_issue.labels << special_label
@@ -204,7 +204,7 @@ describe 'Filter issues', :js do
end
it 'filters issues by label not containing special characters' do
- special_label = create(:label, project: project, title: '!@#{$%^&*()-+[]<>?/:{}|\}')
+ special_label = create(:label, project: project, title: '!@#$%^&*()-+[]<>?/:{}|\\')
special_issue = create(:issue, title: "Issue with special character label", project: project)
special_issue.labels << special_label
diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb
index 0ff3809a915..b7ec445a7aa 100644
--- a/spec/features/issues/gfm_autocomplete_spec.rb
+++ b/spec/features/issues/gfm_autocomplete_spec.rb
@@ -282,6 +282,32 @@ describe 'GFM autocomplete', :js do
end
end
+ context 'assignees' do
+ let(:issue_assignee) { create(:issue, project: project) }
+
+ before do
+ issue_assignee.update(assignees: [user])
+
+ visit project_issue_path(project, issue_assignee)
+
+ wait_for_requests
+ end
+
+ it 'lists users who are currently not assigned to the issue when using /assign' do
+ note = find('#note-body')
+ page.within '.timeline-content-form' do
+ note.native.send_keys('/as')
+ end
+
+ find('.atwho-view li', text: '/assign')
+ note.native.send_keys(:tab)
+
+ wait_for_requests
+
+ expect(find('#at-view-users .atwho-view-ul')).not_to have_content(user.username)
+ end
+ end
+
context 'labels' do
it 'opens autocomplete menu for Labels when field starts with text with item escaping HTML characters' do
create(:label, project: project, title: label_xss_title)
diff --git a/spec/features/issues/issue_detail_spec.rb b/spec/features/issues/issue_detail_spec.rb
index a1b53718577..0d24b02a64c 100644
--- a/spec/features/issues/issue_detail_spec.rb
+++ b/spec/features/issues/issue_detail_spec.rb
@@ -23,16 +23,18 @@ describe 'Issue Detail', :js do
context 'when issue description has xss snippet' do
before do
issue.update!(description: '![xss" onload=alert(1);//](a)')
+
sign_in(user)
visit project_issue_path(project, issue)
- wait_for_requests
end
it 'encodes the description to prevent xss issues' do
page.within('.issuable-details .detail-page-description') do
+ image = find('img.js-lazy-loaded')
+
expect(page).to have_selector('img', count: 1)
- expect(find('img')['onerror']).to be_nil
- expect(find('img')['src']).to end_with('/a')
+ expect(image['onerror']).to be_nil
+ expect(image['src']).to end_with('/a')
end
end
end
diff --git a/spec/features/issues/move_spec.rb b/spec/features/issues/move_spec.rb
index 7126707affd..831bcf8931e 100644
--- a/spec/features/issues/move_spec.rb
+++ b/spec/features/issues/move_spec.rb
@@ -32,7 +32,7 @@ describe 'issue move to another project' do
let(:new_project) { create(:project) }
let(:new_project_search) { create(:project) }
let(:text) { "Text with #{mr.to_reference}" }
- let(:cross_reference) { old_project.to_reference(new_project) }
+ let(:cross_reference) { old_project.to_reference_base(new_project) }
before do
old_project.add_reporter(user)
diff --git a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
index 8322a6afa04..7eecfd1ccf4 100644
--- a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
+++ b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
@@ -187,7 +187,7 @@ describe 'User creates branch and merge request on issue page', :js do
let(:branch_name) { "#{issue.iid}-foo" }
before do
- project.repository.create_branch(branch_name, 'master')
+ project.repository.create_branch(branch_name)
visit project_issue_path(project, issue)
end
@@ -287,7 +287,7 @@ describe 'User creates branch and merge request on issue page', :js do
expect(source_message).to have_text('Source is not available')
# JavaScript gets refs started with `mas` (entered above) and places the first match.
- # User sees `mas` in black color (the part he entered) and the `ter` in gray color (a hint).
+ # User sees `mas` in black color (the part they entered) and the `ter` in gray color (a hint).
# Since hinting is implemented via text selection and rspec/capybara doesn't have matchers for it,
# we just checking the whole source name.
expect(input_source.value).to eq(project.default_branch)
diff --git a/spec/features/markdown/copy_as_gfm_spec.rb b/spec/features/markdown/copy_as_gfm_spec.rb
index 84221f5555a..f85acc28645 100644
--- a/spec/features/markdown/copy_as_gfm_spec.rb
+++ b/spec/features/markdown/copy_as_gfm_spec.rb
@@ -172,18 +172,36 @@ describe 'Copy as GFM', :js do
'![Image](https://example.com/image.png)'
)
+ verify_media_with_partial_path(
+ '![Image](/uploads/a123/image.png)',
+
+ project_media_uri(@project, '/uploads/a123/image.png')
+ )
+
verify(
'VideoLinkFilter',
'![Video](https://example.com/video.mp4)'
)
+ verify_media_with_partial_path(
+ '![Video](/uploads/a123/video.mp4)',
+
+ project_media_uri(@project, '/uploads/a123/video.mp4')
+ )
+
verify(
'AudioLinkFilter',
'![Audio](https://example.com/audio.wav)'
)
+ verify_media_with_partial_path(
+ '![Audio](/uploads/a123/audio.wav)',
+
+ project_media_uri(@project, '/uploads/a123/audio.wav')
+ )
+
verify(
'MathFilter: math as converted from GFM to HTML',
@@ -624,7 +642,7 @@ describe 'Copy as GFM', :js do
GFM
# table with empty heading
- <<~GFM,
+ <<~GFM
| | x | y |
|--|---|---|
| a | 1 | 0 |
@@ -647,6 +665,16 @@ describe 'Copy as GFM', :js do
end
end
+ def project_media_uri(project, media_path)
+ "#{project_path(project)}#{media_path}"
+ end
+
+ def verify_media_with_partial_path(gfm, media_uri)
+ html = gfm_to_html(gfm)
+ output_gfm = html_to_gfm(html)
+ expect(output_gfm).to include(media_uri)
+ end
+
# Fake a `current_user` helper
def current_user
@feat.user
@@ -784,7 +812,7 @@ describe 'Copy as GFM', :js do
verify(
'.line[id="LC9"], .line[id="LC10"]',
- <<~GFM,
+ <<~GFM
```ruby
raise RuntimeError, "System commands must be given as an array of strings"
end
@@ -826,7 +854,7 @@ describe 'Copy as GFM', :js do
verify(
'.line[id="LC27"], .line[id="LC28"]',
- <<~GFM,
+ <<~GFM
```json
"bio": null,
"skype": "",
diff --git a/spec/features/markdown/mermaid_spec.rb b/spec/features/markdown/mermaid_spec.rb
index 4520d1bb2da..542caccb18d 100644
--- a/spec/features/markdown/mermaid_spec.rb
+++ b/spec/features/markdown/mermaid_spec.rb
@@ -68,4 +68,34 @@ describe 'Mermaid rendering', :js do
expect(page).to have_selector('pre.mermaid')
end
end
+
+ it 'correctly sizes mermaid diagram inside <details> block', :js do
+ description = <<~MERMAID
+ <details>
+ <summary>Click to show diagram</summary>
+
+ ```mermaid
+ graph TD;
+ A-->B;
+ A-->C;
+ B-->D;
+ C-->D;
+ ```
+
+ </details>
+ MERMAID
+
+ project = create(:project, :public)
+ issue = create(:issue, project: project, description: description)
+
+ visit project_issue_path(project, issue)
+
+ page.within('.description') do
+ page.find('summary').click
+ svg = page.find('svg.mermaid')
+
+ expect(svg[:width].to_i).to be_within(5).of(120)
+ expect(svg[:height].to_i).to be_within(5).of(220)
+ end
+ end
end
diff --git a/spec/features/markdown/metrics_spec.rb b/spec/features/markdown/metrics_spec.rb
index e7fec41fae3..69e93268b57 100644
--- a/spec/features/markdown/metrics_spec.rb
+++ b/spec/features/markdown/metrics_spec.rb
@@ -93,10 +93,20 @@ describe 'Metrics rendering', :js, :use_clean_rails_memory_store_caching, :sidek
# Ensure we identify urls with the appropriate host.
# Configure host to include port in app:
Gitlab.config.gitlab[:url] = root_url.chomp('/')
+
+ clear_host_from_memoized_variables
end
def restore_host
default_url_options[:host] = @original_default_host
Gitlab.config.gitlab[:url] = @original_gitlab_url
+
+ clear_host_from_memoized_variables
+ end
+
+ def clear_host_from_memoized_variables
+ [:metrics_regex, :grafana_regex].each do |method_name|
+ Gitlab::Metrics::Dashboard::Url.clear_memoization(method_name)
+ end
end
end
diff --git a/spec/features/merge_request/maintainer_edits_fork_spec.rb b/spec/features/merge_request/maintainer_edits_fork_spec.rb
index 4f2c5fc73d8..17ff494a6fa 100644
--- a/spec/features/merge_request/maintainer_edits_fork_spec.rb
+++ b/spec/features/merge_request/maintainer_edits_fork_spec.rb
@@ -20,7 +20,7 @@ describe 'a maintainer edits files on a source-branch of an MR from a fork', :js
end
before do
- stub_feature_flags(web_ide_default: false, single_mr_diff_view: false)
+ stub_feature_flags(web_ide_default: false, single_mr_diff_view: false, code_navigation: false)
target_project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/merge_request/user_creates_image_diff_notes_spec.rb b/spec/features/merge_request/user_creates_image_diff_notes_spec.rb
index e0724a04ea3..e1b139c1b3b 100644
--- a/spec/features/merge_request/user_creates_image_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_creates_image_diff_notes_spec.rb
@@ -48,29 +48,11 @@ describe 'Merge request > User creates image diff notes', :js do
let(:commit) { project.commit('2f63565e7aac07bcdadb654e253078b727143ec4') }
let(:note1_position) do
- Gitlab::Diff::Position.new(
- old_path: path,
- new_path: path,
- width: 100,
- height: 100,
- x: 10,
- y: 10,
- position_type: "image",
- diff_refs: commit.diff_refs
- )
+ build(:image_diff_position, file: path, diff_refs: commit.diff_refs)
end
let(:note2_position) do
- Gitlab::Diff::Position.new(
- old_path: path,
- new_path: path,
- width: 100,
- height: 100,
- x: 20,
- y: 20,
- position_type: "image",
- diff_refs: commit.diff_refs
- )
+ build(:image_diff_position, file: path, diff_refs: commit.diff_refs)
end
let!(:note1) { create(:diff_note_on_commit, commit_id: commit.id, project: project, position: note1_position, note: 'my note 1') }
@@ -93,16 +75,7 @@ describe 'Merge request > User creates image diff notes', :js do
%w(inline parallel).each do |view|
context "#{view} view" do
let(:position) do
- Gitlab::Diff::Position.new(
- old_path: path,
- new_path: path,
- width: 100,
- height: 100,
- x: 1,
- y: 1,
- position_type: "image",
- diff_refs: merge_request.diff_refs
- )
+ build(:image_diff_position, file: path, diff_refs: merge_request.diff_refs)
end
let!(:note) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: position) }
@@ -167,16 +140,7 @@ describe 'Merge request > User creates image diff notes', :js do
let(:path) { "files/images/ee_repo_logo.png" }
let(:position) do
- Gitlab::Diff::Position.new(
- old_path: path,
- new_path: path,
- width: 100,
- height: 100,
- x: 50,
- y: 50,
- position_type: "image",
- diff_refs: merge_request.diff_refs
- )
+ build(:image_diff_position, file: path, diff_refs: merge_request.diff_refs)
end
before do
diff --git a/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb b/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
index 59c20f4ec6b..e6b77e28281 100644
--- a/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
+++ b/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
@@ -16,7 +16,7 @@ describe 'Merge request > User edits assignees sidebar', :js do
# DOM finders to simplify and improve readability
let(:sidebar_assignee_block) { page.find('.js-issuable-sidebar .assignee') }
let(:sidebar_assignee_avatar_link) { sidebar_assignee_block.find_all('a').find { |a| a['href'].include? assignee.username } }
- let(:sidebar_assignee_tooltip) { sidebar_assignee_avatar_link['data-original-title'] || '' }
+ let(:sidebar_assignee_tooltip) { sidebar_assignee_avatar_link['title'] || '' }
let(:sidebar_assignee_dropdown_item) { sidebar_assignee_block.find(".dropdown-menu li[data-user-id=\"#{assignee.id}\"]") }
let(:sidebar_assignee_dropdown_tooltip) { sidebar_assignee_dropdown_item.find('a')['data-title'] || '' }
diff --git a/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb b/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
new file mode 100644
index 00000000000..92d90926c0a
--- /dev/null
+++ b/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Batch diffs', :js do
+ include MergeRequestDiffHelpers
+ include RepoHelpers
+
+ let(:project) { create(:project, :repository) }
+ let(:merge_request) { create(:merge_request, source_project: project, source_branch: 'master', target_branch: 'empty-branch') }
+
+ before do
+ stub_feature_flags(single_mr_diff_view: true)
+ stub_feature_flags(diffs_batch_load: true)
+
+ sign_in(project.owner)
+
+ visit diffs_project_merge_request_path(merge_request.project, merge_request)
+ wait_for_requests
+
+ # Add discussion to first line of first file
+ click_diff_line(find('.diff-file.file-holder:first-of-type tr.line_holder.new:first-of-type'))
+ page.within('.js-discussion-note-form') do
+ fill_in('note_note', with: 'First Line Comment')
+ click_button('Comment')
+ end
+
+ # Add discussion to first line of last file
+ click_diff_line(find('.diff-file.file-holder:last-of-type tr.line_holder.new:first-of-type'))
+ page.within('.js-discussion-note-form') do
+ fill_in('note_note', with: 'Last Line Comment')
+ click_button('Comment')
+ end
+
+ wait_for_requests
+ end
+
+ it 'assigns discussions to diff files across multiple batch pages' do
+ # Reload so we know the discussions are persisting across batch loads
+ visit page.current_url
+
+ # Wait for JS to settle
+ wait_for_requests
+
+ expect(page).to have_selector('.diff-files-holder .file-holder', count: 39)
+
+ # Confirm discussions are applied to appropriate files (should be contained in multiple diff pages)
+ page.within('.diff-file.file-holder:first-of-type .notes .timeline-entry .note .note-text') do
+ expect(page).to have_content('First Line Comment')
+ end
+
+ page.within('.diff-file.file-holder:last-of-type .notes .timeline-entry .note .note-text') do
+ expect(page).to have_content('Last Line Comment')
+ end
+ end
+
+ context 'when user visits a URL with a link directly to to a discussion' do
+ context 'which is in the first batched page of diffs' do
+ it 'scrolls to the correct discussion' do
+ page.within('.diff-file.file-holder:first-of-type') do
+ click_link('just now')
+ end
+
+ visit page.current_url
+
+ wait_for_requests
+
+ # Confirm scrolled to correct UI element
+ expect(page.find('.diff-file.file-holder:first-of-type .discussion-notes .timeline-entry li.note[id]').obscured?).to be_falsey
+ expect(page.find('.diff-file.file-holder:last-of-type .discussion-notes .timeline-entry li.note[id]').obscured?).to be_truthy
+ end
+ end
+
+ context 'which is in at least page 2 of the batched pages of diffs' do
+ it 'scrolls to the correct discussion' do
+ page.within('.diff-file.file-holder:last-of-type') do
+ click_link('just now')
+ end
+
+ visit page.current_url
+
+ wait_for_requests
+
+ # Confirm scrolled to correct UI element
+ expect(page.find('.diff-file.file-holder:first-of-type .discussion-notes .timeline-entry li.note[id]').obscured?).to be_truthy
+ expect(page.find('.diff-file.file-holder:last-of-type .discussion-notes .timeline-entry li.note[id]').obscured?).to be_falsey
+ end
+ end
+ end
+
+ context 'when user switches view styles' do
+ before do
+ find('.js-show-diff-settings').click
+ click_button 'Side-by-side'
+
+ wait_for_requests
+ end
+
+ it 'has the correct discussions applied to files across batched pages' do
+ expect(page).to have_selector('.diff-files-holder .file-holder', count: 39)
+
+ page.within('.diff-file.file-holder:first-of-type .notes .timeline-entry .note .note-text') do
+ expect(page).to have_content('First Line Comment')
+ end
+
+ page.within('.diff-file.file-holder:last-of-type .notes .timeline-entry .note .note-text') do
+ expect(page).to have_content('Last Line Comment')
+ end
+ end
+ end
+end
diff --git a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
index e40276f74e4..a37fc120b86 100644
--- a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
+++ b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
@@ -152,7 +152,7 @@ describe 'Merge request > User merges when pipeline succeeds', :js do
before do
merge_request.update(
merge_user: merge_request.author,
- merge_error: 'Something went wrong'
+ merge_error: 'Something went wrong.'
)
refresh
end
@@ -162,7 +162,7 @@ describe 'Merge request > User merges when pipeline succeeds', :js do
wait_for_requests
page.within('.mr-section-container') do
- expect(page).to have_content('Merge failed: Something went wrong')
+ expect(page).to have_content('Merge failed: Something went wrong. Please try again.')
end
end
end
@@ -171,7 +171,7 @@ describe 'Merge request > User merges when pipeline succeeds', :js do
before do
merge_request.update(
merge_user: merge_request.author,
- merge_error: 'Something went wrong'
+ merge_error: 'Something went wrong.'
)
refresh
end
@@ -181,7 +181,7 @@ describe 'Merge request > User merges when pipeline succeeds', :js do
wait_for_requests
page.within('.mr-section-container') do
- expect(page).to have_content('Merge failed: Something went wrong')
+ expect(page).to have_content('Merge failed: Something went wrong. Please try again.')
end
end
end
diff --git a/spec/features/merge_request/user_posts_diff_notes_spec.rb b/spec/features/merge_request/user_posts_diff_notes_spec.rb
index 8b16760606c..103d126891e 100644
--- a/spec/features/merge_request/user_posts_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_diff_notes_spec.rb
@@ -46,7 +46,7 @@ describe 'Merge request > User posts diff notes', :js do
end
context 'with an old line on the left and a new line on the right' do
- it 'allows commenting on the left side' do
+ it 'allows commenting on the left side', quarantine: 'https://gitlab.com/gitlab-org/gitlab/issues/199050' do
should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_9_9"]').find(:xpath, '..'), 'left')
end
@@ -56,7 +56,7 @@ describe 'Merge request > User posts diff notes', :js do
end
context 'with an unchanged line on the left and an unchanged line on the right' do
- it 'allows commenting on the left side' do
+ it 'allows commenting on the left side', quarantine: 'https://gitlab.com/gitlab-org/gitlab/issues/196826' do
should_allow_commenting(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_7_7"]', match: :first).find(:xpath, '..'), 'left')
end
diff --git a/spec/features/merge_request/user_resolves_conflicts_spec.rb b/spec/features/merge_request/user_resolves_conflicts_spec.rb
index ce85e81868d..5fc65f020d3 100644
--- a/spec/features/merge_request/user_resolves_conflicts_spec.rb
+++ b/spec/features/merge_request/user_resolves_conflicts_spec.rb
@@ -169,8 +169,8 @@ describe 'Merge request > User resolves conflicts', :js do
context "with malicious branch name" do
let(:bad_branch_name) { "malicious-branch-{{toString.constructor('alert(/xss/)')()}}" }
- let(:branch) { project.repository.create_branch(bad_branch_name, 'conflict-resolvable') }
- let(:merge_request) { create_merge_request(branch.name) }
+ let!(:branch) { project.repository.create_branch(bad_branch_name, 'conflict-resolvable') }
+ let(:merge_request) { create_merge_request(bad_branch_name) }
before do
visit project_merge_request_path(project, merge_request)
diff --git a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
index eb86b1e33af..38a31d3bbd9 100644
--- a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
+++ b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
@@ -10,13 +10,9 @@ describe 'Merge request > User resolves diff notes and threads', :js do
let!(:note) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, note: "| Markdown | Table |\n|-------|---------|\n| first | second |") }
let(:path) { "files/ruby/popen.rb" }
let(:position) do
- Gitlab::Diff::Position.new(
- old_path: path,
- new_path: path,
- old_line: nil,
- new_line: 9,
- diff_refs: merge_request.diff_refs
- )
+ build(:text_diff_position,
+ file: path, old_line: nil, new_line: 9,
+ diff_refs: merge_request.diff_refs)
end
before do
@@ -368,16 +364,6 @@ describe 'Merge request > User resolves diff notes and threads', :js do
end
end
- it 'shows jump to next discussion button on all discussions' do
- wait_for_requests
-
- all_discussion_replies = page.all('.discussion-reply-holder')
-
- expect(all_discussion_replies.count).to eq(2)
- expect(all_discussion_replies.first.all('.discussion-next-btn').count).to eq(1)
- expect(all_discussion_replies.last.all('.discussion-next-btn').count).to eq(1)
- end
-
it 'displays next thread even if hidden' do
page.all('.note-discussion', count: 2).each do |discussion|
page.within discussion do
diff --git a/spec/features/merge_request/user_resolves_outdated_diff_discussions_spec.rb b/spec/features/merge_request/user_resolves_outdated_diff_discussions_spec.rb
index b40c11f0d47..9f7c97e510c 100644
--- a/spec/features/merge_request/user_resolves_outdated_diff_discussions_spec.rb
+++ b/spec/features/merge_request/user_resolves_outdated_diff_discussions_spec.rb
@@ -13,20 +13,16 @@ describe 'Merge request > User resolves outdated diff discussions', :js do
let(:current_diff_refs) { merge_request.diff_refs }
let(:outdated_position) do
- Gitlab::Diff::Position.new(
- old_path: 'files/csv/Book1.csv',
- new_path: 'files/csv/Book1.csv',
- old_line: nil,
+ build(:text_diff_position, :added,
+ file: 'files/csv/Book1.csv',
new_line: 9,
diff_refs: outdated_diff_refs
)
end
let(:current_position) do
- Gitlab::Diff::Position.new(
- old_path: 'files/csv/Book1.csv',
- new_path: 'files/csv/Book1.csv',
- old_line: nil,
+ build(:text_diff_position, :added,
+ file: 'files/csv/Book1.csv',
new_line: 1,
diff_refs: current_diff_refs
)
diff --git a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
index 3e77b9e75d6..c482d783bab 100644
--- a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
@@ -10,10 +10,8 @@ describe 'Merge request > User sees avatars on diff notes', :js do
let(:merge_request) { create(:merge_request_with_diffs, source_project: project, author: user, title: 'Bug NS-04') }
let(:path) { 'files/ruby/popen.rb' }
let(:position) do
- Gitlab::Diff::Position.new(
- old_path: path,
- new_path: path,
- old_line: nil,
+ build(:text_diff_position, :added,
+ file: path,
new_line: 9,
diff_refs: merge_request.diff_refs
)
diff --git a/spec/features/merge_request/user_sees_deployment_widget_spec.rb b/spec/features/merge_request/user_sees_deployment_widget_spec.rb
index 99c9e9dc501..9670bd798bf 100644
--- a/spec/features/merge_request/user_sees_deployment_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_deployment_widget_spec.rb
@@ -29,7 +29,7 @@ describe 'Merge request > User sees deployment widget', :js do
wait_for_requests
expect(page).to have_content("Deployed to #{environment.name}")
- expect(find('.js-deploy-time')['data-original-title']).to eq(deployment.created_at.to_time.in_time_zone.to_s(:medium))
+ expect(find('.js-deploy-time')['title']).to eq(deployment.created_at.to_time.in_time_zone.to_s(:medium))
end
context 'when a user created a new merge request with the same SHA' do
@@ -96,7 +96,7 @@ describe 'Merge request > User sees deployment widget', :js do
visit project_merge_request_path(project, merge_request)
wait_for_requests
- expect(page).to have_content("Canceled deploy to #{environment.name}")
+ expect(page).to have_content("Canceled deployment to #{environment.name}")
expect(page).not_to have_css('.js-deploy-time')
end
end
diff --git a/spec/features/merge_request/user_sees_diff_spec.rb b/spec/features/merge_request/user_sees_diff_spec.rb
index 2d91d09a486..2ef4a18f78d 100644
--- a/spec/features/merge_request/user_sees_diff_spec.rb
+++ b/spec/features/merge_request/user_sees_diff_spec.rb
@@ -76,7 +76,7 @@ describe 'Merge request > User sees diff', :js do
end
context 'as user who needs to fork' do
- it 'shows fork/cancel confirmation', :sidekiq_might_not_need_inline do
+ it 'shows fork/cancel confirmation', :sidekiq_might_not_need_inline, quarantine: 'https://gitlab.com/gitlab-org/gitlab/issues/196749' do
sign_in(user)
visit diffs_project_merge_request_path(project, merge_request)
diff --git a/spec/features/merge_request/user_sees_discussions_spec.rb b/spec/features/merge_request/user_sees_discussions_spec.rb
index 319eee0c55a..b4afd8c6332 100644
--- a/spec/features/merge_request/user_sees_discussions_spec.rb
+++ b/spec/features/merge_request/user_sees_discussions_spec.rb
@@ -18,10 +18,8 @@ describe 'Merge request > User sees threads', :js do
let!(:outdated_discussion) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: outdated_position).to_discussion }
let!(:active_discussion) { create(:diff_note_on_merge_request, noteable: merge_request, project: project).to_discussion }
let(:outdated_position) do
- Gitlab::Diff::Position.new(
- old_path: "files/ruby/popen.rb",
- new_path: "files/ruby/popen.rb",
- old_line: nil,
+ build(:text_diff_position, :added,
+ file: "files/ruby/popen.rb",
new_line: 9,
diff_refs: outdated_diff_refs
)
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index 098f41f120d..94f57cdda74 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -19,7 +19,7 @@ describe 'Merge request > User sees merge widget', :js do
sign_in(user)
end
- context 'new merge request' do
+ context 'new merge request', :sidekiq_might_not_need_inline do
before do
visit project_new_merge_request_path(
project,
@@ -604,7 +604,7 @@ describe 'Merge request > User sees merge widget', :js do
click_button 'addTest'
expect(page).to have_content('6.66')
- expect(page).to have_content(sample_java_failed_message.gsub!(/\s+/, ' ').strip)
+ expect(page).to have_content(sample_java_failed_message.gsub(/\s+/, ' ').strip)
end
end
end
@@ -649,7 +649,7 @@ describe 'Merge request > User sees merge widget', :js do
click_button 'Test#sum when a is 1 and b is 3 returns summary'
expect(page).to have_content('2.22')
- expect(page).to have_content(sample_rspec_failed_message.gsub!(/\s+/, ' ').strip)
+ expect(page).to have_content(sample_rspec_failed_message.gsub(/\s+/, ' ').strip)
end
end
end
@@ -699,6 +699,137 @@ describe 'Merge request > User sees merge widget', :js do
end
end
+ context 'when a new error exists' do
+ let(:base_reports) do
+ Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
+ reports.get_suite('junit').add_test_case(create_test_case_java_success)
+ end
+ end
+
+ let(:head_reports) do
+ Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
+ reports.get_suite('junit').add_test_case(create_test_case_java_error)
+ end
+ end
+
+ it 'shows test reports summary which includes the new error' do
+ within(".js-reports-container") do
+ click_button 'Expand'
+
+ expect(page).to have_content('Test summary contained 1 failed/error test result out of 2 total tests')
+ within(".js-report-section-container") do
+ expect(page).to have_content('rspec found no changed test results out of 1 total test')
+ expect(page).to have_content('junit found 1 failed/error test result out of 1 total test')
+ expect(page).to have_content('New')
+ expect(page).to have_content('addTest')
+ end
+ end
+ end
+
+ context 'when user clicks the new error' do
+ it 'shows the test report detail' do
+ within(".js-reports-container") do
+ click_button 'Expand'
+
+ within(".js-report-section-container") do
+ click_button 'addTest'
+
+ expect(page).to have_content('8.88')
+ end
+ end
+ end
+ end
+ end
+
+ context 'when an existing error exists' do
+ let(:base_reports) do
+ Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ reports.get_suite('rspec').add_test_case(create_test_case_rspec_error)
+ reports.get_suite('junit').add_test_case(create_test_case_java_success)
+ end
+ end
+
+ let(:head_reports) do
+ Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ reports.get_suite('rspec').add_test_case(create_test_case_rspec_error)
+ reports.get_suite('junit').add_test_case(create_test_case_java_success)
+ end
+ end
+
+ it 'shows test reports summary which includes the existing error' do
+ within(".js-reports-container") do
+ click_button 'Expand'
+
+ expect(page).to have_content('Test summary contained 1 failed/error test result out of 2 total tests')
+ within(".js-report-section-container") do
+ expect(page).to have_content('rspec found 1 failed/error test result out of 1 total test')
+ expect(page).to have_content('junit found no changed test results out of 1 total test')
+ expect(page).not_to have_content('New')
+ expect(page).to have_content('Test#sum when a is 4 and b is 4 returns summary')
+ end
+ end
+ end
+
+ context 'when user clicks the existing error' do
+ it 'shows test report detail of it' do
+ within(".js-reports-container") do
+ click_button 'Expand'
+
+ within(".js-report-section-container") do
+ click_button 'Test#sum when a is 4 and b is 4 returns summary'
+
+ expect(page).to have_content('4.44')
+ end
+ end
+ end
+ end
+ end
+
+ context 'when a resolved error exists' do
+ let(:base_reports) do
+ Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
+ reports.get_suite('junit').add_test_case(create_test_case_java_error)
+ end
+ end
+
+ let(:head_reports) do
+ Gitlab::Ci::Reports::TestReports.new.tap do |reports|
+ reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
+ reports.get_suite('junit').add_test_case(create_test_case_java_success)
+ end
+ end
+
+ it 'shows test reports summary which includes the resolved error' do
+ within(".js-reports-container") do
+ click_button 'Expand'
+
+ expect(page).to have_content('Test summary contained 1 fixed test result out of 2 total tests')
+ within(".js-report-section-container") do
+ expect(page).to have_content('rspec found no changed test results out of 1 total test')
+ expect(page).to have_content('junit found 1 fixed test result out of 1 total test')
+ expect(page).to have_content('addTest')
+ end
+ end
+ end
+
+ context 'when user clicks the resolved error' do
+ it 'shows test report detail of it' do
+ within(".js-reports-container") do
+ click_button 'Expand'
+
+ within(".js-report-section-container") do
+ click_button 'addTest'
+
+ expect(page).to have_content('5.55')
+ end
+ end
+ end
+ end
+ end
+
context 'properly truncates the report' do
let(:base_reports) do
Gitlab::Ci::Reports::TestReports.new.tap do |reports|
diff --git a/spec/features/merge_request/user_sees_versions_spec.rb b/spec/features/merge_request/user_sees_versions_spec.rb
index cab86f3fd94..5b43fe407eb 100644
--- a/spec/features/merge_request/user_sees_versions_spec.rb
+++ b/spec/features/merge_request/user_sees_versions_spec.rb
@@ -50,7 +50,7 @@ describe 'Merge request > User sees versions', :js do
expect(page).to have_content 'latest version'
end
- expect(page).to have_content '8 Files'
+ expect(page).to have_content '8 files'
end
it_behaves_like 'allows commenting',
@@ -84,12 +84,10 @@ describe 'Merge request > User sees versions', :js do
end
it 'shows comments that were last relevant at that version' do
- expect(page).to have_content '5 Files'
+ expect(page).to have_content '5 files'
- position = Gitlab::Diff::Position.new(
- old_path: ".gitmodules",
- new_path: ".gitmodules",
- old_line: nil,
+ position = build(:text_diff_position, :added,
+ file: ".gitmodules",
new_line: 4,
diff_refs: merge_request_diff1.diff_refs
)
@@ -128,19 +126,16 @@ describe 'Merge request > User sees versions', :js do
diff_id: merge_request_diff3.id,
start_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9'
)
- expect(page).to have_content '4 Files'
+ expect(page).to have_content '4 files'
- additions_content = page.find('.diff-stats.is-compare-versions-header .diff-stats-group svg.ic-file-addition')
- .ancestor('.diff-stats-group').text
- deletions_content = page.find('.diff-stats.is-compare-versions-header .diff-stats-group svg.ic-file-deletion')
- .ancestor('.diff-stats-group').text
+ additions_content = page.find('.diff-stats.is-compare-versions-header .diff-stats-group .js-file-addition-line').text
+ deletions_content = page.find('.diff-stats.is-compare-versions-header .diff-stats-group .js-file-deletion-line').text
expect(additions_content).to eq '15'
expect(deletions_content).to eq '6'
- position = Gitlab::Diff::Position.new(
- old_path: ".gitmodules",
- new_path: ".gitmodules",
+ position = build(:text_diff_position,
+ file: ".gitmodules",
old_line: 4,
new_line: 4,
diff_refs: merge_request_diff3.compare_with(merge_request_diff1.head_commit_sha).diff_refs
@@ -156,12 +151,10 @@ describe 'Merge request > User sees versions', :js do
end
it 'show diff between new and old version' do
- additions_content = page.find('.diff-stats.is-compare-versions-header .diff-stats-group svg.ic-file-addition')
- .ancestor('.diff-stats-group').text
- deletions_content = page.find('.diff-stats.is-compare-versions-header .diff-stats-group svg.ic-file-deletion')
- .ancestor('.diff-stats-group').text
+ additions_content = page.find('.diff-stats.is-compare-versions-header .diff-stats-group .js-file-addition-line').text
+ deletions_content = page.find('.diff-stats.is-compare-versions-header .diff-stats-group .js-file-deletion-line').text
- expect(page).to have_content '4 Files'
+ expect(page).to have_content '4 files'
expect(additions_content).to eq '15'
expect(deletions_content).to eq '6'
end
@@ -171,7 +164,7 @@ describe 'Merge request > User sees versions', :js do
page.within '.mr-version-dropdown' do
expect(page).to have_content 'latest version'
end
- expect(page).to have_content '8 Files'
+ expect(page).to have_content '8 files'
end
it_behaves_like 'allows commenting',
@@ -197,7 +190,7 @@ describe 'Merge request > User sees versions', :js do
find('.btn-default').click
click_link 'version 1'
end
- expect(page).to have_content '0 Files'
+ expect(page).to have_content '0 files'
end
end
@@ -223,7 +216,7 @@ describe 'Merge request > User sees versions', :js do
expect(page).to have_content 'version 1'
end
- expect(page).to have_content '0 Files'
+ expect(page).to have_content '0 files'
end
end
diff --git a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
index 95cb0a2dee3..62e0e4d76ed 100644
--- a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
+++ b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
@@ -112,7 +112,7 @@ describe 'User comments on a diff', :js do
changes = sample_compare(expanded_changes).changes.last(expanded_changes.size)
page.within("[id='#{hash}']") do
- find("button[data-original-title='Show full file']").click
+ find("button[title='Show full file']").click
wait_for_requests
click_diff_line(find("[id='#{changes.first[:line_code]}']"))
diff --git a/spec/features/merge_requests/user_squashes_merge_request_spec.rb b/spec/features/merge_requests/user_squashes_merge_request_spec.rb
index a9b96c5bbf5..ce5ed76dc7a 100644
--- a/spec/features/merge_requests/user_squashes_merge_request_spec.rb
+++ b/spec/features/merge_requests/user_squashes_merge_request_spec.rb
@@ -6,6 +6,7 @@ describe 'User squashes a merge request', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:source_branch) { 'csv' }
+ let(:protected_source_branch) { false }
let!(:original_head) { project.repository.commit('master') }
@@ -40,7 +41,7 @@ describe 'User squashes a merge request', :js do
def accept_mr
expect(page).to have_button('Merge')
- uncheck 'Delete source branch'
+ uncheck 'Delete source branch' unless protected_source_branch
click_on 'Merge'
end
@@ -56,18 +57,38 @@ describe 'User squashes a merge request', :js do
end
context 'when the MR has only one commit' do
+ let(:source_branch) { 'master' }
+ let(:target_branch) { 'branch-merged' }
+ let(:protected_source_branch) { true }
+ let(:source_sha) { project.commit(source_branch).sha }
+ let(:target_sha) { project.commit(target_branch).sha }
+
before do
- merge_request = create(:merge_request, source_project: project, target_project: project, source_branch: 'master', target_branch: 'branch-merged')
+ merge_request = create(:merge_request, source_project: project, target_project: project, source_branch: source_branch, target_branch: target_branch, squash: true)
visit project_merge_request_path(project, merge_request)
end
- it 'does not show the squash checkbox' do
+ it 'accepts the merge request without issuing a squash request', :sidekiq_inline do
+ expect_next_instance_of(Gitlab::GitalyClient::OperationService) do |instance|
+ expect(instance).not_to receive(:user_squash)
+ end
+
+ expect(project.repository.ancestor?(source_branch, target_branch)).to be_falsey
expect(page).not_to have_field('squash')
+
+ accept_mr
+
+ expect(page).to have_content('Merged')
+
+ latest_target_commits = project.repository.commits_between(source_sha, target_sha).map(&:raw)
+
+ expect(latest_target_commits.count).to eq(1)
+ expect(project.repository.ancestor?(source_branch, target_branch)).to be_truthy
end
end
- context 'when squash is enabled on merge request creation' do
+ context 'when squash is enabled on merge request creation', :sidekiq_might_not_need_inline do
before do
visit project_new_merge_request_path(project, merge_request: { target_branch: 'master', source_branch: source_branch })
check 'merge_request[squash]'
@@ -97,7 +118,7 @@ describe 'User squashes a merge request', :js do
end
end
- context 'when squash is not enabled on merge request creation' do
+ context 'when squash is not enabled on merge request creation', :sidekiq_might_not_need_inline do
before do
visit project_new_merge_request_path(project, merge_request: { target_branch: 'master', source_branch: source_branch })
click_on 'Submit merge request'
diff --git a/spec/features/profiles/password_spec.rb b/spec/features/profiles/password_spec.rb
index 8c0c426f689..9dc96080732 100644
--- a/spec/features/profiles/password_spec.rb
+++ b/spec/features/profiles/password_spec.rb
@@ -63,7 +63,7 @@ describe 'Profile > Password' do
visit edit_profile_password_path
- expect(page).to have_gitlab_http_status(404)
+ expect(page).to have_gitlab_http_status(:not_found)
end
end
@@ -73,7 +73,7 @@ describe 'Profile > Password' do
it 'renders 404' do
visit edit_profile_password_path
- expect(page).to have_gitlab_http_status(404)
+ expect(page).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/features/profiles/user_edit_preferences_spec.rb b/spec/features/profiles/user_edit_preferences_spec.rb
index 2d2da222998..6e61536d5ff 100644
--- a/spec/features/profiles/user_edit_preferences_spec.rb
+++ b/spec/features/profiles/user_edit_preferences_spec.rb
@@ -29,4 +29,31 @@ describe 'User edit preferences profile' do
expect(field).not_to be_checked
end
+
+ describe 'User changes tab width to acceptable value' do
+ it 'shows success message' do
+ fill_in 'Tab width', with: 9
+ click_button 'Save changes'
+
+ expect(page).to have_content('Preferences saved.')
+ end
+
+ it 'saves the value' do
+ tab_width_field = page.find_field('Tab width')
+
+ expect do
+ tab_width_field.fill_in with: 6
+ click_button 'Save changes'
+ end.to change { tab_width_field.value }
+ end
+ end
+
+ describe 'User changes tab width to unacceptable value' do
+ it 'shows error message' do
+ fill_in 'Tab width', with: -1
+ click_button 'Save changes'
+
+ expect(page).to have_content('Failed to save preferences')
+ end
+ end
end
diff --git a/spec/features/profiles/user_manages_applications_spec.rb b/spec/features/profiles/user_manages_applications_spec.rb
index 7a961855c92..668c4e8c784 100644
--- a/spec/features/profiles/user_manages_applications_spec.rb
+++ b/spec/features/profiles/user_manages_applications_spec.rb
@@ -20,16 +20,19 @@ describe 'User manages applications' do
expect(page).to have_content 'Application: test'
expect(page).to have_content 'Application ID'
expect(page).to have_content 'Secret'
+ expect(page).to have_content 'Confidential Yes'
click_on 'Edit'
expect(page).to have_content 'Edit application'
fill_in :doorkeeper_application_name, with: 'test_changed'
+ uncheck :doorkeeper_application_confidential
click_on 'Save application'
expect(page).to have_content 'test_changed'
expect(page).to have_content 'Application ID'
expect(page).to have_content 'Secret'
+ expect(page).to have_content 'Confidential No'
visit applications_profile_path
diff --git a/spec/features/projects/actve_tabs_spec.rb b/spec/features/projects/active_tabs_spec.rb
index 56f587f23ee..41c0e583815 100644
--- a/spec/features/projects/actve_tabs_spec.rb
+++ b/spec/features/projects/active_tabs_spec.rb
@@ -7,6 +7,8 @@ describe 'Project active tab' do
let(:project) { create(:project, :repository) }
before do
+ stub_feature_flags(analytics_pages_under_project_analytics_sidebar: { enabled: false, thing: project })
+
project.add_maintainer(user)
sign_in(user)
end
@@ -17,21 +19,6 @@ describe 'Project active tab' do
end
end
- shared_examples 'page has active tab' do |title|
- it "activates #{title} tab" do
- expect(page).to have_selector('.sidebar-top-level-items > li.active', count: 1)
- expect(find('.sidebar-top-level-items > li.active')).to have_content(title)
- end
- end
-
- shared_examples 'page has active sub tab' do |title|
- it "activates #{title} sub tab" do
- expect(page).to have_selector('.sidebar-sub-level-items > li.active:not(.fly-out-top-item)', count: 1)
- expect(find('.sidebar-sub-level-items > li.active:not(.fly-out-top-item)'))
- .to have_content(title)
- end
- end
-
context 'on project Home' do
before do
visit project_path(project)
@@ -136,4 +123,35 @@ describe 'Project active tab' do
it_behaves_like 'page has active sub tab', 'Repository'
end
end
+
+ context 'when `analytics_pages_under_project_analytics_sidebar` feature flag is enabled' do
+ before do
+ stub_feature_flags(analytics_pages_under_project_analytics_sidebar: { enabled: true, thing: project })
+ end
+
+ context 'on project Analytics' do
+ before do
+ visit charts_project_graph_path(project, 'master')
+ end
+
+ context 'on project Analytics/Repository Analytics' do
+ it_behaves_like 'page has active tab', _('Analytics')
+ it_behaves_like 'page has active sub tab', _('Repository Analytics')
+ end
+
+ context 'on project Analytics/Repository Analytics' do
+ it_behaves_like 'page has active tab', _('Analytics')
+ it_behaves_like 'page has active sub tab', _('Repository Analytics')
+ end
+
+ context 'on project Analytics/Cycle Analytics' do
+ before do
+ click_tab(_('CI / CD Analytics'))
+ end
+
+ it_behaves_like 'page has active tab', _('Analytics')
+ it_behaves_like 'page has active sub tab', _('CI / CD Analytics')
+ end
+ end
+ end
end
diff --git a/spec/features/projects/artifacts/user_downloads_artifacts_spec.rb b/spec/features/projects/artifacts/user_downloads_artifacts_spec.rb
index 254ebfb839a..fb70076fcf1 100644
--- a/spec/features/projects/artifacts/user_downloads_artifacts_spec.rb
+++ b/spec/features/projects/artifacts/user_downloads_artifacts_spec.rb
@@ -9,7 +9,7 @@ describe "User downloads artifacts" do
shared_examples "downloading" do
it "downloads the zip" do
- expect(page.response_headers["Content-Disposition"]).to eq(%Q{attachment; filename*=UTF-8''#{job.artifacts_file.filename}; filename="#{job.artifacts_file.filename}"})
+ expect(page.response_headers['Content-Disposition']).to eq(%Q{attachment; filename="#{job.artifacts_file.filename}"; filename*=UTF-8''#{job.artifacts_file.filename}})
expect(page.response_headers['Content-Transfer-Encoding']).to eq("binary")
expect(page.response_headers['Content-Type']).to eq("application/zip")
expect(page.source.b).to eq(job.artifacts_file.file.read.b)
diff --git a/spec/features/projects/badges/coverage_spec.rb b/spec/features/projects/badges/coverage_spec.rb
index dd51eac9be1..d17588bb7b4 100644
--- a/spec/features/projects/badges/coverage_spec.rb
+++ b/spec/features/projects/badges/coverage_spec.rb
@@ -54,7 +54,7 @@ describe 'test coverage badge' do
it 'user requests test coverage badge image' do
show_test_coverage_badge
- expect(page).to have_gitlab_http_status(404)
+ expect(page).to have_gitlab_http_status(:not_found)
end
end
diff --git a/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb b/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb
index 1fc490ecbfe..e54137b9492 100644
--- a/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb
+++ b/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb
@@ -44,6 +44,17 @@ describe 'Blob button line permalinks (BlobLinePermalinkUpdater)', :js do
expect(find('.js-data-file-blob-permalink-url')['href']).to eq(get_absolute_url(project_blob_path(project, tree_join(sha, path), anchor: ending_fragment)))
end
+ it 'changes fragment hash if icon inside line number link is clicked' do
+ ending_fragment = "L7"
+
+ visit_blob
+
+ find("##{ending_fragment}").hover
+ find("##{ending_fragment} i").click
+
+ expect(find('.js-data-file-blob-permalink-url')['href']).to eq(get_absolute_url(project_blob_path(project, tree_join(sha, path), anchor: ending_fragment)))
+ end
+
it 'with initial fragment hash, changes fragment hash if line number clicked' do
fragment = "L1"
ending_fragment = "L5"
@@ -83,6 +94,17 @@ describe 'Blob button line permalinks (BlobLinePermalinkUpdater)', :js do
expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), anchor: ending_fragment)))
end
+ it 'changes fragment hash if icon inside line number link is clicked' do
+ ending_fragment = "L7"
+
+ visit_blob
+
+ find("##{ending_fragment}").hover
+ find("##{ending_fragment} i").click
+
+ expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), anchor: ending_fragment)))
+ end
+
it 'with initial fragment hash, changes fragment hash if line number clicked' do
fragment = "L1"
ending_fragment = "L5"
diff --git a/spec/features/projects/blobs/blob_show_spec.rb b/spec/features/projects/blobs/blob_show_spec.rb
index 5d86e4125df..0ff3e45c956 100644
--- a/spec/features/projects/blobs/blob_show_spec.rb
+++ b/spec/features/projects/blobs/blob_show_spec.rb
@@ -13,6 +13,10 @@ describe 'File blob', :js do
wait_for_requests
end
+ before do
+ stub_feature_flags(code_navigation: false)
+ end
+
context 'Ruby file' do
before do
visit_blob('files/ruby/popen.rb')
@@ -449,7 +453,7 @@ describe 'File blob', :js do
end
end
- context '.gitlab-ci.yml' do
+ describe '.gitlab-ci.yml' do
before do
project.add_maintainer(project.creator)
@@ -477,7 +481,7 @@ describe 'File blob', :js do
end
end
- context '.gitlab/route-map.yml' do
+ describe '.gitlab/route-map.yml' do
before do
project.add_maintainer(project.creator)
diff --git a/spec/features/projects/blobs/edit_spec.rb b/spec/features/projects/blobs/edit_spec.rb
index a1d6a8896c7..5d62b2f87bb 100644
--- a/spec/features/projects/blobs/edit_spec.rb
+++ b/spec/features/projects/blobs/edit_spec.rb
@@ -69,6 +69,8 @@ describe 'Editing file blob', :js do
context 'from blob file path' do
before do
+ stub_feature_flags(code_navigation: false)
+
visit project_blob_path(project, tree_join(branch, file_path))
end
diff --git a/spec/features/projects/blobs/user_creates_new_blob_in_new_project_spec.rb b/spec/features/projects/blobs/user_creates_new_blob_in_new_project_spec.rb
index b90129d6176..30878b7fb64 100644
--- a/spec/features/projects/blobs/user_creates_new_blob_in_new_project_spec.rb
+++ b/spec/features/projects/blobs/user_creates_new_blob_in_new_project_spec.rb
@@ -8,6 +8,7 @@ describe 'User creates blob in new project', :js do
shared_examples 'creating a file' do
before do
+ stub_feature_flags(code_navigation: false)
sign_in(user)
visit project_path(project)
end
diff --git a/spec/features/projects/branches/user_deletes_branch_spec.rb b/spec/features/projects/branches/user_deletes_branch_spec.rb
index 1f053b69646..ad63a75a149 100644
--- a/spec/features/projects/branches/user_deletes_branch_spec.rb
+++ b/spec/features/projects/branches/user_deletes_branch_spec.rb
@@ -4,7 +4,7 @@ require "spec_helper"
describe "User deletes branch", :js do
set(:user) { create(:user) }
- set(:project) { create(:project, :repository) }
+ let(:project) { create(:project, :repository) }
before do
project.add_developer(user)
@@ -20,6 +20,8 @@ describe "User deletes branch", :js do
accept_alert { find(".btn-remove").click }
end
+ wait_for_requests
+
expect(page).to have_css(".js-branch-improve\\/awesome", visible: :hidden)
end
end
diff --git a/spec/features/projects/clusters/eks_spec.rb b/spec/features/projects/clusters/eks_spec.rb
index bb0072fc8dd..a856376cb4b 100644
--- a/spec/features/projects/clusters/eks_spec.rb
+++ b/spec/features/projects/clusters/eks_spec.rb
@@ -30,6 +30,10 @@ describe 'AWS EKS Cluster', :js do
it 'user sees a form to create an EKS cluster' do
expect(page).to have_content('Create new cluster on EKS')
end
+
+ it 'highlights Amazon EKS logo' do
+ expect(page).to have_css('.js-create-aws-cluster-button.active')
+ end
end
end
end
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
index 4bc0aef0cd4..0143461eadb 100644
--- a/spec/features/projects/clusters/gcp_spec.rb
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -14,6 +14,11 @@ describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
allow(Projects::ClustersController).to receive(:STATUS_POLLING_INTERVAL) { 100 }
end
+ def submit_form
+ execute_script('document.querySelector(".js-gke-cluster-creation-submit").removeAttribute("disabled")')
+ execute_script('document.querySelector(".js-gke-cluster-creation-submit").click()')
+ end
+
context 'when user has signed with Google' do
let(:project_id) { 'test-project-1234' }
@@ -33,8 +38,12 @@ describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
click_link 'Google GKE'
end
+ it 'highlights Google GKE logo' do
+ expect(page).to have_css('.js-create-gcp-cluster-button.active')
+ end
+
context 'when user filled form with valid parameters' do
- subject { click_button 'Create Kubernetes cluster' }
+ subject { submit_form }
before do
allow_any_instance_of(GoogleApi::CloudPlatform::Client)
@@ -47,8 +56,8 @@ describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
allow(WaitForClusterCreationWorker).to receive(:perform_in).and_return(nil)
- execute_script('document.querySelector(".js-gke-cluster-creation-submit").removeAttribute("disabled")')
- sleep 2 # wait for ajax
+ expect(page).to have_css('.js-gcp-project-id-dropdown')
+
execute_script('document.querySelector(".js-gcp-project-id-dropdown input").setAttribute("type", "text")')
execute_script('document.querySelector(".js-gcp-zone-dropdown input").setAttribute("type", "text")')
execute_script('document.querySelector(".js-gcp-machine-type-dropdown input").setAttribute("type", "text")')
@@ -86,8 +95,7 @@ describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
context 'when user filled form with invalid parameters' do
before do
- execute_script('document.querySelector(".js-gke-cluster-creation-submit").removeAttribute("disabled")')
- click_button 'Create Kubernetes cluster'
+ submit_form
end
it 'user sees a validation error' do
diff --git a/spec/features/projects/environments/environment_metrics_spec.rb b/spec/features/projects/environments/environment_metrics_spec.rb
index d34db5e15cc..150df66bdd7 100644
--- a/spec/features/projects/environments/environment_metrics_spec.rb
+++ b/spec/features/projects/environments/environment_metrics_spec.rb
@@ -30,9 +30,9 @@ describe 'Environment > Metrics' do
click_link('See metrics')
expect(page).to have_metrics_path(environment)
- expect(page).to have_css('div.js-environments-dropdown')
+ expect(page).to have_css('[data-qa-selector="environments_dropdown"]')
- within('div.js-environments-dropdown') do
+ within('[data-qa-selector="environments_dropdown"]') do
# Click on the dropdown
click_on(environment.name)
diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb
index 01687674309..12412e87a7b 100644
--- a/spec/features/projects/environments/environments_spec.rb
+++ b/spec/features/projects/environments/environments_spec.rb
@@ -13,7 +13,7 @@ describe 'Environments page', :js do
end
def stop_button_selector
- %q{button[data-original-title="Stop environment"]}
+ %q{button[title="Stop environment"]}
end
describe 'page tabs' do
diff --git a/spec/features/projects/files/edit_file_soft_wrap_spec.rb b/spec/features/projects/files/edit_file_soft_wrap_spec.rb
index df6bc6883a9..53b1a522a8e 100644
--- a/spec/features/projects/files/edit_file_soft_wrap_spec.rb
+++ b/spec/features/projects/files/edit_file_soft_wrap_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe 'Projects > Files > User uses soft wrap whilst editing file', :js do
+describe 'Projects > Files > User uses soft wrap while editing file', :js do
before do
project = create(:project, :repository)
user = project.owner
diff --git a/spec/features/projects/files/template_type_dropdown_spec.rb b/spec/features/projects/files/template_type_dropdown_spec.rb
index ba52a7e7deb..03b4b9b4517 100644
--- a/spec/features/projects/files/template_type_dropdown_spec.rb
+++ b/spec/features/projects/files/template_type_dropdown_spec.rb
@@ -75,6 +75,11 @@ describe 'Projects > Files > Template type dropdown selector', :js do
check_type_selector_toggle_text('.gitignore')
end
+ it 'sets the toggle text when selecting the template type' do
+ select_template_type('.gitignore')
+ check_type_selector_toggle_text('.gitignore')
+ end
+
it 'selects every template type correctly' do
try_selecting_all_types
end
diff --git a/spec/features/projects/files/user_browses_files_spec.rb b/spec/features/projects/files/user_browses_files_spec.rb
index b8efabb0cab..5364bc10b2f 100644
--- a/spec/features/projects/files/user_browses_files_spec.rb
+++ b/spec/features/projects/files/user_browses_files_spec.rb
@@ -171,6 +171,31 @@ describe "User browses files" do
end
end
+ context "when browsing a `improve/awesome` branch", :js do
+ before do
+ visit(project_tree_path(project, "improve/awesome"))
+ end
+
+ it "shows files from a repository" do
+ expect(page).to have_content("VERSION")
+ .and have_content(".gitignore")
+ .and have_content("LICENSE")
+ end
+ end
+
+ context "when browsing a `test-#` branch", :js do
+ before do
+ project.repository.create_branch('test-#', project.repository.root_ref)
+ visit(project_tree_path(project, "test-#"))
+ end
+
+ it "shows files from a repository" do
+ expect(page).to have_content("VERSION")
+ .and have_content(".gitignore")
+ .and have_content("LICENSE")
+ end
+ end
+
context "when browsing a specific ref", :js do
let(:ref) { project_tree_path(project, "6d39438") }
diff --git a/spec/features/projects/files/user_creates_directory_spec.rb b/spec/features/projects/files/user_creates_directory_spec.rb
index b8765066217..4291f0a74f8 100644
--- a/spec/features/projects/files/user_creates_directory_spec.rb
+++ b/spec/features/projects/files/user_creates_directory_spec.rb
@@ -16,6 +16,8 @@ describe 'Projects > Files > User creates a directory', :js do
project.add_developer(user)
sign_in(user)
visit project_tree_path(project, 'master')
+
+ wait_for_requests
end
context 'with default target branch' do
@@ -43,6 +45,25 @@ describe 'Projects > Files > User creates a directory', :js do
end
end
+ context 'inside sub-folder' do
+ it 'creates new directory' do
+ click_link 'files'
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('files')
+ end
+
+ first('.add-to-tree').click
+ click_link('New directory')
+
+ fill_in(:dir_name, with: 'new_directory')
+ click_button('Create directory')
+
+ expect(page).to have_content('files')
+ expect(page).to have_content('new_directory')
+ end
+ end
+
context 'with a new target branch' do
before do
first('.add-to-tree').click
diff --git a/spec/features/projects/files/user_creates_files_spec.rb b/spec/features/projects/files/user_creates_files_spec.rb
index eb9a4d8cb09..2d4f22e299e 100644
--- a/spec/features/projects/files/user_creates_files_spec.rb
+++ b/spec/features/projects/files/user_creates_files_spec.rb
@@ -14,7 +14,7 @@ describe 'Projects > Files > User creates files', :js do
let(:user) { create(:user) }
before do
- stub_feature_flags(web_ide_default: false)
+ stub_feature_flags(web_ide_default: false, code_navigation: false)
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/projects/files/user_deletes_files_spec.rb b/spec/features/projects/files/user_deletes_files_spec.rb
index 0f543e47631..5e36407d9cb 100644
--- a/spec/features/projects/files/user_deletes_files_spec.rb
+++ b/spec/features/projects/files/user_deletes_files_spec.rb
@@ -14,6 +14,8 @@ describe 'Projects > Files > User deletes files', :js do
let(:user) { create(:user) }
before do
+ stub_feature_flags(code_navigation: false)
+
sign_in(user)
end
diff --git a/spec/features/projects/files/user_replaces_files_spec.rb b/spec/features/projects/files/user_replaces_files_spec.rb
index 4c54bbdcd67..e1eefdcc40f 100644
--- a/spec/features/projects/files/user_replaces_files_spec.rb
+++ b/spec/features/projects/files/user_replaces_files_spec.rb
@@ -16,6 +16,8 @@ describe 'Projects > Files > User replaces files', :js do
let(:user) { create(:user) }
before do
+ stub_feature_flags(code_navigation: false)
+
sign_in(user)
end
diff --git a/spec/features/projects/graph_spec.rb b/spec/features/projects/graph_spec.rb
index 5dabaf20952..6b2a9a6b852 100644
--- a/spec/features/projects/graph_spec.rb
+++ b/spec/features/projects/graph_spec.rb
@@ -22,20 +22,12 @@ describe 'Project Graph', :js do
end
end
- shared_examples 'page should have languages graphs' do
- it 'renders languages' do
- expect(page).to have_content(/Ruby 66.* %/)
- expect(page).to have_content(/JavaScript 22.* %/)
- end
- end
-
context 'commits graph' do
before do
visit commits_project_graph_path(project, 'master')
end
it_behaves_like 'page should have commits graphs'
- it_behaves_like 'page should have languages graphs'
end
context 'languages graph' do
@@ -44,7 +36,6 @@ describe 'Project Graph', :js do
end
it_behaves_like 'page should have commits graphs'
- it_behaves_like 'page should have languages graphs'
end
context 'charts graph' do
@@ -53,14 +44,13 @@ describe 'Project Graph', :js do
end
it_behaves_like 'page should have commits graphs'
- it_behaves_like 'page should have languages graphs'
end
context 'chart graph with HTML escaped branch name' do
let(:branch_name) { '<h1>evil</h1>' }
before do
- project.repository.create_branch(branch_name, 'master')
+ project.repository.create_branch(branch_name)
visit charts_project_graph_path(project, branch_name)
end
@@ -85,7 +75,7 @@ describe 'Project Graph', :js do
expect(page).to have_content 'Pipelines for last week'
expect(page).to have_content 'Pipelines for last month'
expect(page).to have_content 'Pipelines for last year'
- expect(page).to have_content 'Commit duration in minutes for last 30 commits'
+ expect(page).to have_content 'Duration for the last 30 commits'
end
end
end
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index f9ff076a416..a17793bc6d6 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -346,7 +346,7 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do
artifact_request = requests.find { |req| req.url.match(%r{artifacts/download}) }
- expect(artifact_request.response_headers["Content-Disposition"]).to eq(%Q{attachment; filename*=UTF-8''#{job.artifacts_file.filename}; filename="#{job.artifacts_file.filename}"})
+ expect(artifact_request.response_headers['Content-Disposition']).to eq(%Q{attachment; filename="#{job.artifacts_file.filename}"; filename*=UTF-8''#{job.artifacts_file.filename}})
expect(artifact_request.response_headers['Content-Transfer-Encoding']).to eq("binary")
expect(artifact_request.response_headers['Content-Type']).to eq("image/gif")
expect(artifact_request.body).to eq(job.artifacts_file.file.read.b)
diff --git a/spec/features/projects/members/group_member_cannot_request_access_to_his_group_project_spec.rb b/spec/features/projects/members/group_member_cannot_request_access_to_his_group_project_spec.rb
index 1f4d555c6ae..cf9441bcd55 100644
--- a/spec/features/projects/members/group_member_cannot_request_access_to_his_group_project_spec.rb
+++ b/spec/features/projects/members/group_member_cannot_request_access_to_his_group_project_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe 'Projects > Members > Group member cannot request access to his group project' do
+describe 'Projects > Members > Group member cannot request access to their group project' do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project) { create(:project, namespace: group) }
diff --git a/spec/features/projects/members/member_cannot_request_access_to_his_project_spec.rb b/spec/features/projects/members/member_cannot_request_access_to_his_project_spec.rb
index 606444b36a2..e2b57472b2e 100644
--- a/spec/features/projects/members/member_cannot_request_access_to_his_project_spec.rb
+++ b/spec/features/projects/members/member_cannot_request_access_to_his_project_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe 'Projects > Members > Member cannot request access to his project' do
+describe 'Projects > Members > Member cannot request access to their project' do
let(:member) { create(:user) }
let(:project) { create(:project) }
diff --git a/spec/features/projects/members/owner_cannot_request_access_to_his_project_spec.rb b/spec/features/projects/members/owner_cannot_request_access_to_his_project_spec.rb
index 2fb76da36ad..5643a29b4e4 100644
--- a/spec/features/projects/members/owner_cannot_request_access_to_his_project_spec.rb
+++ b/spec/features/projects/members/owner_cannot_request_access_to_his_project_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe 'Projects > Members > Owner cannot request access to his project' do
+describe 'Projects > Members > Owner cannot request access to their own project' do
let(:project) { create(:project) }
before do
diff --git a/spec/features/projects/navbar_spec.rb b/spec/features/projects/navbar_spec.rb
new file mode 100644
index 00000000000..bcb05e1c718
--- /dev/null
+++ b/spec/features/projects/navbar_spec.rb
@@ -0,0 +1,104 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Project navbar' do
+ it_behaves_like 'verified navigation bar' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :repository) }
+
+ let(:structure) do
+ [
+ {
+ nav_item: _('Project overview'),
+ nav_sub_items: [
+ _('Details'),
+ _('Activity'),
+ _('Releases')
+ ]
+ },
+ {
+ nav_item: _('Repository'),
+ nav_sub_items: [
+ _('Files'),
+ _('Commits'),
+ _('Branches'),
+ _('Tags'),
+ _('Contributors'),
+ _('Graph'),
+ _('Compare'),
+ (_('Locked Files') if Gitlab.ee?)
+ ]
+ },
+ {
+ nav_item: _('Issues'),
+ nav_sub_items: [
+ _('List'),
+ _('Boards'),
+ _('Labels'),
+ _('Milestones')
+ ]
+ },
+ {
+ nav_item: _('Merge Requests'),
+ nav_sub_items: []
+ },
+ {
+ nav_item: _('CI / CD'),
+ nav_sub_items: [
+ _('Pipelines'),
+ _('Jobs'),
+ _('Artifacts'),
+ _('Schedules')
+ ]
+ },
+ {
+ nav_item: _('Operations'),
+ nav_sub_items: [
+ _('Metrics'),
+ _('Environments'),
+ _('Error Tracking'),
+ _('Serverless'),
+ _('Kubernetes')
+ ]
+ },
+ {
+ nav_item: _('Analytics'),
+ nav_sub_items: [
+ _('CI / CD Analytics'),
+ (_('Code Review') if Gitlab.ee?),
+ _('Repository Analytics'),
+ _('Value Stream Analytics')
+ ]
+ },
+ {
+ nav_item: _('Wiki'),
+ nav_sub_items: []
+ },
+ {
+ nav_item: _('Snippets'),
+ nav_sub_items: []
+ },
+ {
+ nav_item: _('Settings'),
+ nav_sub_items: [
+ _('General'),
+ _('Members'),
+ _('Integrations'),
+ _('Repository'),
+ _('CI / CD'),
+ _('Operations'),
+ (_('Audit Events') if Gitlab.ee?)
+ ].compact
+ }
+ ]
+ end
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+
+ visit project_path(project)
+ end
+ end
+end
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index 198af65c361..63c0695fe95 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -99,12 +99,74 @@ describe 'Pipeline', :js do
end
end
- it 'shows links to the related merge requests' do
- visit_pipeline
+ describe 'related merge requests' do
+ context 'when there are no related merge requests' do
+ it 'shows a "no related merge requests" message' do
+ visit_pipeline
+
+ within '.related-merge-request-info' do
+ expect(page).to have_content('No related merge requests found.')
+ end
+ end
+ end
+
+ context 'when there is one related merge request' do
+ before do
+ create(:merge_request,
+ source_project: project,
+ source_branch: pipeline.ref)
+ end
+
+ it 'shows a link to the merge request' do
+ visit_pipeline
- within '.related-merge-request-info' do
- pipeline.all_merge_requests.map do |merge_request|
- expect(page).to have_link(project_merge_request_path(project, merge_request))
+ within '.related-merge-requests' do
+ expect(page).to have_content('1 related merge request: ')
+ expect(page).to have_selector('.js-truncated-mr-list')
+ expect(page).to have_link('!1 My title 1')
+
+ expect(page).not_to have_selector('.js-full-mr-list')
+ expect(page).not_to have_selector('.text-expander')
+ end
+ end
+ end
+
+ context 'when there are two related merge requests' do
+ before do
+ create(:merge_request,
+ source_project: project,
+ source_branch: pipeline.ref,
+ target_branch: 'feature-1')
+
+ create(:merge_request,
+ source_project: project,
+ source_branch: pipeline.ref,
+ target_branch: 'feature-2')
+ end
+
+ it 'links to the most recent related merge request' do
+ visit_pipeline
+
+ within '.related-merge-requests' do
+ expect(page).to have_content('2 related merge requests: ')
+ expect(page).to have_link('!2 My title 3')
+ expect(page).to have_selector('.text-expander')
+ expect(page).to have_selector('.js-full-mr-list', visible: false)
+ end
+ end
+
+ it 'expands to show links to all related merge requests' do
+ visit_pipeline
+
+ within '.related-merge-requests' do
+ find('.text-expander').click
+
+ expect(page).to have_selector('.js-full-mr-list', visible: true)
+
+ pipeline.all_merge_requests.map do |merge_request|
+ expect(page).to have_link(href: project_merge_request_path(project, merge_request))
+ end
+ end
end
end
end
@@ -217,7 +279,7 @@ describe 'Pipeline', :js do
it 'includes the failure reason' do
page.within('#ci-badge-test') do
build_link = page.find('.js-pipeline-graph-job-link')
- expect(build_link['data-original-title']).to eq('test - failed - (unknown failure)')
+ expect(build_link['title']).to eq('test - failed - (unknown failure)')
end
end
end
@@ -294,6 +356,32 @@ describe 'Pipeline', :js do
end
end
+ context 'test tabs' do
+ let(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
+
+ before do
+ visit_pipeline
+ wait_for_requests
+ end
+
+ it 'shows badge counter in Tests tab' do
+ expect(pipeline.test_reports.total_count).to eq(4)
+ expect(page.find('.js-test-report-badge-counter').text).to eq(pipeline.test_reports.total_count.to_s)
+ end
+
+ it 'does not call test_report.json endpoint by default', :js do
+ expect(page).to have_selector('.js-no-tests-to-show', visible: :all)
+ end
+
+ it 'does call test_report.json endpoint when tab is selected', :js do
+ find('.js-tests-tab-link').click
+ wait_for_requests
+
+ expect(page).to have_content('Test suites')
+ expect(page).to have_selector('.js-tests-detail', visible: :all)
+ end
+ end
+
context 'retrying jobs' do
before do
visit_pipeline
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index b4c9eb7ebec..7634100347e 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -270,7 +270,7 @@ describe 'Pipelines', :js do
it 'contains badge with tooltip which contains error' do
expect(pipeline).to have_yaml_errors
expect(page).to have_selector(
- %Q{span[data-original-title="#{pipeline.yaml_errors}"]})
+ %Q{span[title="#{pipeline.yaml_errors}"]})
end
it 'contains badge that indicates failure reason' do
@@ -280,7 +280,7 @@ describe 'Pipelines', :js do
it 'contains badge with tooltip which contains failure reason' do
expect(pipeline.failure_reason?).to eq true
expect(page).to have_selector(
- %Q{span[data-original-title="#{pipeline.present.failure_reason}"]})
+ %Q{span[title="#{pipeline.present.failure_reason}"]})
end
end
@@ -569,7 +569,7 @@ describe 'Pipelines', :js do
within('.js-builds-dropdown-list') do
build_element = page.find('.mini-pipeline-graph-dropdown-item')
- expect(build_element['data-original-title']).to eq('build - failed - (unknown failure)')
+ expect(build_element['title']).to eq('build - failed - (unknown failure)')
end
end
end
diff --git a/spec/features/projects/serverless/functions_spec.rb b/spec/features/projects/serverless/functions_spec.rb
index c661ceb8eda..e494a0e9626 100644
--- a/spec/features/projects/serverless/functions_spec.rb
+++ b/spec/features/projects/serverless/functions_spec.rb
@@ -16,7 +16,12 @@ describe 'Functions', :js do
shared_examples "it's missing knative installation" do
before do
+ functions_finder = Projects::Serverless::FunctionsFinder.new(project)
visit project_serverless_functions_path(project)
+ allow(Projects::Serverless::FunctionsFinder)
+ .to receive(:new)
+ .and_return(functions_finder)
+ synchronous_reactive_cache(functions_finder)
end
it 'sees an empty state require Knative installation' do
diff --git a/spec/features/projects/services/user_activates_alerts_spec.rb b/spec/features/projects/services/user_activates_alerts_spec.rb
new file mode 100644
index 00000000000..47de7fab859
--- /dev/null
+++ b/spec/features/projects/services/user_activates_alerts_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'User activates Alerts', :js do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ let(:service_name) { 'alerts' }
+ let(:service_title) { 'Alerts endpoint' }
+
+ before do
+ sign_in(user)
+ project.add_maintainer(user)
+ end
+
+ context 'when service is deactivated' do
+ it 'activates service' do
+ visit_project_services
+
+ expect(page).to have_link(service_title)
+ click_link(service_title)
+
+ expect(page).not_to have_active_service
+
+ click_activate_service
+ wait_for_requests
+
+ expect(page).to have_active_service
+ end
+ end
+
+ context 'when service is activated' do
+ before do
+ visit_alerts_service
+ click_activate_service
+ end
+
+ it 're-generates key' do
+ expect(reset_key.value).to be_blank
+
+ click_reset_key
+ click_confirm_reset_key
+ wait_for_requests
+
+ expect(reset_key.value).to be_present
+ end
+ end
+
+ private
+
+ def visit_project_services
+ visit(project_settings_integrations_path(project))
+ end
+
+ def visit_alerts_service
+ visit(edit_project_service_path(project, service_name))
+ end
+
+ def click_activate_service
+ find('#activated').click
+ end
+
+ def click_reset_key
+ click_button('Reset key')
+ end
+
+ def click_confirm_reset_key
+ within '.modal-content' do
+ click_reset_key
+ end
+ end
+
+ def reset_key
+ find_field('Authorization key')
+ end
+
+ def have_active_service
+ have_selector('.js-service-active-status[data-value="true"]')
+ end
+end
diff --git a/spec/features/projects/services/user_activates_issue_tracker_spec.rb b/spec/features/projects/services/user_activates_issue_tracker_spec.rb
index 5f3bb794b48..0b0a3362043 100644
--- a/spec/features/projects/services/user_activates_issue_tracker_spec.rb
+++ b/spec/features/projects/services/user_activates_issue_tracker_spec.rb
@@ -8,16 +8,15 @@ describe 'User activates issue tracker', :js do
let(:url) { 'http://tracker.example.com' }
- def fill_short_form(active = true)
- check 'Active' if active
+ def fill_short_form(disabled: false)
+ uncheck 'Active' if disabled
fill_in 'service_project_url', with: url
fill_in 'service_issues_url', with: "#{url}/:id"
end
- def fill_full_form(active = true)
- fill_short_form(active)
- check 'Active' if active
+ def fill_full_form(disabled: false)
+ fill_short_form(disabled: disabled)
fill_in 'service_new_issue_url', with: url
end
@@ -86,14 +85,14 @@ describe 'User activates issue tracker', :js do
end
end
- describe 'user sets the service but keeps it disabled' do
+ describe 'user disables the service' do
before do
click_link(tracker)
if skip_new_issue_url
- fill_short_form(false)
+ fill_short_form(disabled: true)
else
- fill_full_form(false)
+ fill_full_form(disabled: true)
end
click_button('Save changes')
diff --git a/spec/features/projects/services/user_activates_jira_spec.rb b/spec/features/projects/services/user_activates_jira_spec.rb
index 7847b7d5177..557615f8872 100644
--- a/spec/features/projects/services/user_activates_jira_spec.rb
+++ b/spec/features/projects/services/user_activates_jira_spec.rb
@@ -9,8 +9,8 @@ describe 'User activates Jira', :js do
let(:url) { 'http://jira.example.com' }
let(:test_url) { 'http://jira.example.com/rest/api/2/serverInfo' }
- def fill_form(active = true)
- check 'Active' if active
+ def fill_form(disabled: false)
+ uncheck 'Active' if disabled
fill_in 'service_url', with: url
fill_in 'service_username', with: 'username'
@@ -83,10 +83,10 @@ describe 'User activates Jira', :js do
end
end
- describe 'user sets Jira Service but keeps it disabled' do
+ describe 'user disables the Jira Service' do
before do
click_link('Jira')
- fill_form(false)
+ fill_form(disabled: true)
click_button('Save changes')
end
diff --git a/spec/features/projects/services/user_activates_prometheus_spec.rb b/spec/features/projects/services/user_activates_prometheus_spec.rb
index a83d3e2e8be..ab372d532aa 100644
--- a/spec/features/projects/services/user_activates_prometheus_spec.rb
+++ b/spec/features/projects/services/user_activates_prometheus_spec.rb
@@ -15,11 +15,12 @@ describe 'User activates Prometheus' do
click_link('Prometheus')
end
- it 'activates service' do
+ it 'does not activate service and informs about deprecation' do
check('Active')
fill_in('API URL', with: 'http://prometheus.example.com')
click_button('Save changes')
- expect(page).to have_content('Prometheus activated.')
+ expect(page).not_to have_content('Prometheus activated.')
+ expect(page).to have_content('Fields on this page has been deprecated.')
end
end
diff --git a/spec/features/projects/services/user_activates_youtrack_spec.rb b/spec/features/projects/services/user_activates_youtrack_spec.rb
index 8fdeddfdfb4..2f6aad1d736 100644
--- a/spec/features/projects/services/user_activates_youtrack_spec.rb
+++ b/spec/features/projects/services/user_activates_youtrack_spec.rb
@@ -8,8 +8,8 @@ describe 'User activates issue tracker', :js do
let(:url) { 'http://tracker.example.com' }
- def fill_form(active = true)
- check 'Active' if active
+ def fill_form(disabled: false)
+ uncheck 'Active' if disabled
fill_in 'service_project_url', with: url
fill_in 'service_issues_url', with: "#{url}/:id"
@@ -67,10 +67,10 @@ describe 'User activates issue tracker', :js do
end
end
- describe 'user sets the service but keeps it disabled' do
+ describe 'user disables the service' do
before do
click_link(tracker)
- fill_form(false)
+ fill_form(disabled: true)
click_button('Save changes')
end
diff --git a/spec/features/projects/settings/operations_settings_spec.rb b/spec/features/projects/settings/operations_settings_spec.rb
index 9bbeb0eb260..d57401471ff 100644
--- a/spec/features/projects/settings/operations_settings_spec.rb
+++ b/spec/features/projects/settings/operations_settings_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
describe 'Projects > Settings > For a forked project', :js do
let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
+ let(:project) { create(:project, :repository, create_templates: :issue) }
let(:role) { :maintainer }
before do
@@ -22,6 +22,54 @@ describe 'Projects > Settings > For a forked project', :js do
end
describe 'Settings > Operations' do
+ describe 'Incidents' do
+ let(:create_issue) { 'Create an issue. Issues are created for each alert triggered.' }
+ let(:send_email) { 'Send a separate email notification to Developers.' }
+
+ before do
+ create(:project_incident_management_setting, send_email: true, project: project)
+ visit project_settings_operations_path(project)
+
+ wait_for_requests
+ click_expand_incident_management_button
+ end
+
+ it 'renders form for incident management' do
+ expect(page).to have_selector('h4', text: 'Incidents')
+ end
+
+ it 'sets correct default values' do
+ expect(find_field(create_issue)).not_to be_checked
+ expect(find_field(send_email)).to be_checked
+ end
+
+ it 'updates form values' do
+ check(create_issue)
+ template_select = find_field('Issue template')
+ template_select.find(:xpath, 'option[2]').select_option
+ uncheck(send_email)
+
+ save_form
+ click_expand_incident_management_button
+
+ expect(find_field(create_issue)).to be_checked
+ expect(page).to have_select('Issue template', selected: 'bug')
+ expect(find_field(send_email)).not_to be_checked
+ end
+
+ def click_expand_incident_management_button
+ within '.qa-incident-management-settings' do
+ click_button('Expand')
+ end
+ end
+
+ def save_form
+ page.within "#edit_project_#{project.id}" do
+ click_on 'Save changes'
+ end
+ end
+ end
+
context 'error tracking settings form' do
let(:sentry_list_projects_url) { 'http://sentry.example.com/api/0/projects/' }
@@ -61,7 +109,7 @@ describe 'Projects > Settings > For a forked project', :js do
within('div#project-dropdown') do
click_button('Select project')
- click_button('Sentry | Internal')
+ click_button('Sentry | internal')
end
click_button('Save changes')
diff --git a/spec/features/projects/settings/registry_settings_spec.rb b/spec/features/projects/settings/registry_settings_spec.rb
index 86da866a927..fc1a85c3efe 100644
--- a/spec/features/projects/settings/registry_settings_spec.rb
+++ b/spec/features/projects/settings/registry_settings_spec.rb
@@ -4,34 +4,62 @@ require 'spec_helper'
describe 'Project > Settings > CI/CD > Container registry tag expiration policy', :js do
let(:user) { create(:user) }
- let(:project) { create(:project, namespace: user.namespace) }
+ let(:project) { create(:project, namespace: user.namespace, container_registry_enabled: container_registry_enabled) }
+ let(:container_registry_enabled) { true }
+
+ before do
+ sign_in(user)
+ stub_container_registry_config(enabled: true)
+ end
context 'as owner' do
before do
- sign_in(user)
visit project_settings_ci_cd_path(project)
end
- it 'section is available' do
+ it 'shows available section' do
settings_block = find('#js-registry-policies')
expect(settings_block).to have_text 'Container Registry tag expiration policy'
end
- it 'Save expiration policy submit the form', :js do
+ it 'saves expiration policy submit the form' do
within '#js-registry-policies' do
within '.card-body' do
- click_button(class: 'gl-toggle')
- select('7 days until tags are automatically removed', from: 'expiration-policy-interval')
- select('Every day', from: 'expiration-policy-schedule')
- select('50 tags per image name', from: 'expiration-policy-latest')
- fill_in('expiration-policy-name-matching', with: '*-production')
+ find('.gl-toggle-wrapper button:not(.is-disabled)').click
+ select('7 days until tags are automatically removed', from: 'Expiration interval:')
+ select('Every day', from: 'Expiration schedule:')
+ select('50 tags per image name', from: 'Number of tags to retain:')
+ fill_in('Docker tags with names matching this regex pattern will expire:', with: '*-production')
end
submit_button = find('.card-footer .btn.btn-success')
expect(submit_button).not_to be_disabled
submit_button.click
end
- flash_text = find('.flash-text')
- expect(flash_text).to have_content('Expiration policy successfully saved.')
+ toast = find('.gl-toast')
+ expect(toast).to have_content('Expiration policy successfully saved.')
+ end
+ end
+
+ context 'when registry is disabled' do
+ before do
+ stub_container_registry_config(enabled: false)
+ visit project_settings_ci_cd_path(project)
+ end
+
+ it 'does not exists' do
+ expect(page).not_to have_selector('#js-registry-policies')
+ end
+ end
+
+ context 'when container registry is disabled on project' do
+ let(:container_registry_enabled) { false }
+
+ before do
+ visit project_settings_ci_cd_path(project)
+ end
+
+ it 'does not exists' do
+ expect(page).not_to have_selector('#js-registry-policies')
end
end
end
diff --git a/spec/features/projects/show/user_sees_collaboration_links_spec.rb b/spec/features/projects/show/user_sees_collaboration_links_spec.rb
index ff133b58f89..63fcec4f9b3 100644
--- a/spec/features/projects/show/user_sees_collaboration_links_spec.rb
+++ b/spec/features/projects/show/user_sees_collaboration_links_spec.rb
@@ -3,66 +3,96 @@
require 'spec_helper'
describe 'Projects > Show > Collaboration links', :js do
- let(:project) { create(:project, :repository) }
+ using RSpec::Parameterized::TableSyntax
+
+ let(:project) { create(:project, :repository, :public) }
let(:user) { create(:user) }
before do
- project.add_developer(user)
sign_in(user)
end
- it 'shows all the expected links' do
- visit project_path(project)
+ context 'with developer user' do
+ before do
+ project.add_developer(user)
+ end
- # The navigation bar
- page.within('.header-new') do
- find('.qa-new-menu-toggle').click
+ it 'shows all the expected links' do
+ visit project_path(project)
- aggregate_failures 'dropdown links in the navigation bar' do
- expect(page).to have_link('New issue')
- expect(page).to have_link('New merge request')
- expect(page).to have_link('New snippet', href: new_project_snippet_path(project))
- end
+ # The navigation bar
+ page.within('.header-new') do
+ find('.qa-new-menu-toggle').click
- find('.qa-new-menu-toggle').click
- end
+ aggregate_failures 'dropdown links in the navigation bar' do
+ expect(page).to have_link('New issue')
+ expect(page).to have_link('New merge request')
+ expect(page).to have_link('New snippet', href: new_project_snippet_path(project))
+ end
- # The dropdown above the tree
- page.within('.repo-breadcrumb') do
- find('.qa-add-to-tree').click
+ find('.qa-new-menu-toggle').click
+ end
- aggregate_failures 'dropdown links above the repo tree' do
- expect(page).to have_link('New file')
- expect(page).to have_link('Upload file')
- expect(page).to have_link('New directory')
- expect(page).to have_link('New branch')
- expect(page).to have_link('New tag')
+ # The dropdown above the tree
+ page.within('.repo-breadcrumb') do
+ find('.qa-add-to-tree').click
+
+ aggregate_failures 'dropdown links above the repo tree' do
+ expect(page).to have_link('New file')
+ expect(page).to have_link('Upload file')
+ expect(page).to have_link('New directory')
+ expect(page).to have_link('New branch')
+ expect(page).to have_link('New tag')
+ end
end
+
+ # The Web IDE
+ expect(page).to have_link('Web IDE')
end
- # The Web IDE
- expect(page).to have_link('Web IDE')
- end
+ it 'hides the links when the project is archived' do
+ project.update!(archived: true)
- it 'hides the links when the project is archived' do
- project.update!(archived: true)
+ visit project_path(project)
- visit project_path(project)
+ page.within('.header-new') do
+ find('.qa-new-menu-toggle').click
- page.within('.header-new') do
- find('.qa-new-menu-toggle').click
+ aggregate_failures 'dropdown links' do
+ expect(page).not_to have_link('New issue')
+ expect(page).not_to have_link('New merge request')
+ expect(page).not_to have_link('New snippet', href: new_project_snippet_path(project))
+ end
- aggregate_failures 'dropdown links' do
- expect(page).not_to have_link('New issue')
- expect(page).not_to have_link('New merge request')
- expect(page).not_to have_link('New snippet', href: new_project_snippet_path(project))
+ find('.qa-new-menu-toggle').click
end
- find('.qa-new-menu-toggle').click
+ expect(page).not_to have_selector('.qa-add-to-tree')
+
+ expect(page).not_to have_link('Web IDE')
end
+ end
- expect(page).not_to have_selector('.qa-add-to-tree')
+ context "Web IDE link" do
+ where(:merge_requests_access_level, :user_level, :expect_ide_link) do
+ ::ProjectFeature::DISABLED | :guest | false
+ ::ProjectFeature::DISABLED | :developer | true
+ ::ProjectFeature::PRIVATE | :guest | false
+ ::ProjectFeature::PRIVATE | :developer | true
+ ::ProjectFeature::ENABLED | :guest | true
+ ::ProjectFeature::ENABLED | :developer | true
+ end
- expect(page).not_to have_link('Web IDE')
+ with_them do
+ before do
+ project.project_feature.update!({ merge_requests_access_level: merge_requests_access_level })
+ project.add_user(user, user_level)
+ visit project_path(project)
+ end
+
+ it "updates Web IDE link" do
+ expect(page.has_link?('Web IDE')).to be(expect_ide_link)
+ end
+ end
end
end
diff --git a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
index 41c3c6b5770..8e20facda15 100644
--- a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
+++ b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
@@ -33,7 +33,7 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
expect(page).not_to have_link('Enable Auto DevOps')
expect(page).not_to have_link('Auto DevOps enabled')
expect(page).not_to have_link('Add Kubernetes cluster')
- expect(page).not_to have_link('Kubernetes configured')
+ expect(page).not_to have_link('Kubernetes')
end
end
end
@@ -100,7 +100,7 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
it 'no Kubernetes cluster button if can not manage clusters' do
page.within('.project-buttons') do
expect(page).not_to have_link('Add Kubernetes cluster')
- expect(page).not_to have_link('Kubernetes configured')
+ expect(page).not_to have_link('Kubernetes')
end
end
end
@@ -308,7 +308,7 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
visit project_path(project)
page.within('.project-buttons') do
- expect(page).to have_link('Kubernetes configured', href: project_cluster_path(project, cluster))
+ expect(page).to have_link('Kubernetes', href: project_cluster_path(project, cluster))
end
end
end
diff --git a/spec/features/projects/snippets/create_snippet_spec.rb b/spec/features/projects/snippets/create_snippet_spec.rb
index 94af023e804..5a425fb5d27 100644
--- a/spec/features/projects/snippets/create_snippet_spec.rb
+++ b/spec/features/projects/snippets/create_snippet_spec.rb
@@ -8,9 +8,17 @@ describe 'Projects > Snippets > Create Snippet', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :public) }
+ def description_field
+ find('.js-description-input input,textarea')
+ end
+
def fill_form
fill_in 'project_snippet_title', with: 'My Snippet Title'
+
+ # Click placeholder first to expand full description field
+ description_field.click
fill_in 'project_snippet_description', with: 'My Snippet **Description**'
+
page.within('.file-editor') do
find('.ace_text-input', visible: false).send_keys('Hello World!')
end
@@ -27,6 +35,18 @@ describe 'Projects > Snippets > Create Snippet', :js do
click_on('New snippet')
end
+ it 'shows collapsible description input' do
+ collapsed = description_field
+
+ expect(page).not_to have_field('project_snippet_description')
+ expect(collapsed).to be_visible
+
+ collapsed.click
+
+ expect(page).to have_field('project_snippet_description')
+ expect(collapsed).not_to be_visible
+ end
+
it 'creates a new snippet' do
fill_form
click_button('Create snippet')
diff --git a/spec/features/projects/snippets/user_views_snippets_spec.rb b/spec/features/projects/snippets/user_views_snippets_spec.rb
index 5739c9510a8..22910029ee5 100644
--- a/spec/features/projects/snippets/user_views_snippets_spec.rb
+++ b/spec/features/projects/snippets/user_views_snippets_spec.rb
@@ -3,32 +3,107 @@
require 'spec_helper'
describe 'Projects > Snippets > User views snippets' do
- let(:project) { create(:project) }
- let!(:project_snippet) { create(:project_snippet, project: project, author: user) }
- let!(:snippet) { create(:snippet, author: user) }
- let(:snippets) { [project_snippet, snippet] } # Used by the shared examples
+ let_it_be(:project) { create(:project) }
let(:user) { create(:user) }
- before do
- project.add_maintainer(user)
- sign_in(user)
-
+ def visit_project_snippets
visit(project_snippets_path(project))
end
- context 'pagination' do
+ context 'snippets list' do
+ let!(:project_snippet) { create(:project_snippet, project: project, author: user) }
+ let!(:snippet) { create(:snippet, author: user) }
+ let(:snippets) { [project_snippet, snippet] } # Used by the shared examples
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ context 'pagination' do
+ before do
+ create(:project_snippet, project: project, author: user)
+ allow(Snippet).to receive(:default_per_page).and_return(1)
+
+ visit_project_snippets
+ end
+
+ it_behaves_like 'paginated snippets'
+ end
+
+ context 'filtering by visibility' do
+ before do
+ visit_project_snippets
+ end
+
+ it_behaves_like 'tabs with counts' do
+ let_it_be(:counts) { { all: '1', public: '0', private: '1', internal: '0' } }
+ end
+ end
+
+ it 'shows snippets' do
+ visit_project_snippets
+
+ expect(page).to have_link(project_snippet.title, href: project_snippet_path(project, project_snippet))
+ expect(page).not_to have_content(snippet.title)
+ end
+ end
+
+ context 'when current user is a guest' do
before do
- create(:project_snippet, project: project, author: user)
- allow(Snippet).to receive(:default_per_page).and_return(1)
+ project.add_guest(user)
+ sign_in(user)
+ end
- visit project_snippets_path(project)
+ context 'when snippets list is empty' do
+ it 'hides New Snippet button' do
+ visit_project_snippets
+
+ page.within(find('.empty-state')) do
+ expect(page).not_to have_link('New snippet')
+ end
+ end
end
- it_behaves_like 'paginated snippets'
+ context 'when project has snippets' do
+ let!(:project_snippet) { create(:project_snippet, project: project, author: user) }
+
+ it 'hides New Snippet button' do
+ visit_project_snippets
+
+ page.within(find('.top-area')) do
+ expect(page).not_to have_link('New snippet')
+ end
+ end
+ end
end
- it 'shows snippets' do
- expect(page).to have_link(project_snippet.title, href: project_snippet_path(project, project_snippet))
- expect(page).not_to have_content(snippet.title)
+ context 'when current user is not a guest' do
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ end
+
+ context 'when snippets list is empty' do
+ it 'shows New Snippet button' do
+ visit_project_snippets
+
+ page.within(find('.empty-state')) do
+ expect(page).to have_link('New snippet')
+ end
+ end
+ end
+
+ context 'when project has snippets' do
+ let!(:project_snippet) { create(:project_snippet, project: project, author: user) }
+
+ it 'shows New Snippet button' do
+ visit_project_snippets
+
+ page.within(find('.top-area')) do
+ expect(page).to have_link('New snippet')
+ end
+ end
+ end
end
end
diff --git a/spec/features/projects/tags/user_views_tags_spec.rb b/spec/features/projects/tags/user_views_tags_spec.rb
index bc570f502bf..7b49b0d0f65 100644
--- a/spec/features/projects/tags/user_views_tags_spec.rb
+++ b/spec/features/projects/tags/user_views_tags_spec.rb
@@ -7,7 +7,7 @@ describe 'User views tags', :feature do
it do
visit project_tags_path(project, format: :atom)
- expect(page).to have_gitlab_http_status(200)
+ expect(page).to have_gitlab_http_status(:ok)
end
end
diff --git a/spec/features/projects/tree/create_directory_spec.rb b/spec/features/projects/tree/create_directory_spec.rb
index 7e0ee861b18..829b01832df 100644
--- a/spec/features/projects/tree/create_directory_spec.rb
+++ b/spec/features/projects/tree/create_directory_spec.rb
@@ -46,6 +46,13 @@ describe 'Multi-file editor new directory', :js do
find('.js-ide-commit-mode').click
+ # Compact mode depends on the size of window. If it is shorter than MAX_WINDOW_HEIGHT_COMPACT,
+ # (as it is with CHROME_HEADLESS=0), this initial commit button will exist. Otherwise, if it is
+ # taller (as it is by default with chrome headless) then the button will not exist.
+ if page.has_css?('.qa-begin-commit-button')
+ find('.qa-begin-commit-button').click
+ end
+
fill_in('commit-message', with: 'commit message ide')
find(:css, ".js-ide-commit-new-mr input").set(false)
diff --git a/spec/features/projects/tree/create_file_spec.rb b/spec/features/projects/tree/create_file_spec.rb
index eba33168006..58ff623c9ae 100644
--- a/spec/features/projects/tree/create_file_spec.rb
+++ b/spec/features/projects/tree/create_file_spec.rb
@@ -36,6 +36,13 @@ describe 'Multi-file editor new file', :js do
find('.js-ide-commit-mode').click
+ # Compact mode depends on the size of window. If it is shorter than MAX_WINDOW_HEIGHT_COMPACT,
+ # (as it is with CHROME_HEADLESS=0), this initial commit button will exist. Otherwise, if it is
+ # taller (as it is by default with chrome headless) then the button will not exist.
+ if page.has_css?('.qa-begin-commit-button')
+ find('.qa-begin-commit-button').click
+ end
+
fill_in('commit-message', with: 'commit message ide')
find(:css, ".js-ide-commit-new-mr input").set(false)
diff --git a/spec/features/projects/user_sees_user_popover_spec.rb b/spec/features/projects/user_sees_user_popover_spec.rb
new file mode 100644
index 00000000000..adbf9073d59
--- /dev/null
+++ b/spec/features/projects/user_sees_user_popover_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'User sees user popover', :js do
+ set(:project) { create(:project, :repository) }
+
+ let(:user) { project.creator }
+ let(:merge_request) do
+ create(:merge_request, source_project: project, target_project: project)
+ end
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ subject { page }
+
+ describe 'hovering over a user link in a merge request' do
+ before do
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it 'displays user popover' do
+ popover_selector = '.user-popover'
+
+ find('.js-user-link').hover
+
+ expect(page).to have_css(popover_selector, visible: true)
+
+ page.within(popover_selector) do
+ expect(page).to have_content(user.name)
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/user_uses_shortcuts_spec.rb b/spec/features/projects/user_uses_shortcuts_spec.rb
index ff24730acef..beed1c07e51 100644
--- a/spec/features/projects/user_uses_shortcuts_spec.rb
+++ b/spec/features/projects/user_uses_shortcuts_spec.rb
@@ -7,6 +7,8 @@ describe 'User uses shortcuts', :js do
let(:user) { create(:user) }
before do
+ stub_feature_flags(analytics_pages_under_project_analytics_sidebar: { enabled: false, thing: project })
+
project.add_maintainer(user)
sign_in(user)
@@ -15,6 +17,59 @@ describe 'User uses shortcuts', :js do
wait_for_requests
end
+ context 'disabling shortcuts' do
+ before do
+ page.evaluate_script("localStorage.removeItem('shortcutsDisabled')")
+ end
+
+ it 'can disable shortcuts from help menu' do
+ open_modal_shortcut_keys
+ click_toggle_button
+ close_modal
+
+ open_modal_shortcut_keys
+
+ # modal-shortcuts still in the DOM, but hidden
+ expect(find('#modal-shortcuts', visible: false)).not_to be_visible
+
+ page.refresh
+ open_modal_shortcut_keys
+
+ # after reload, shortcuts modal doesn't exist at all until we add it
+ expect(page).not_to have_selector('#modal-shortcuts')
+ end
+
+ it 're-enables shortcuts' do
+ open_modal_shortcut_keys
+ click_toggle_button
+ close_modal
+
+ open_modal_from_help_menu
+ click_toggle_button
+ close_modal
+
+ open_modal_shortcut_keys
+ expect(find('#modal-shortcuts')).to be_visible
+ end
+
+ def open_modal_shortcut_keys
+ find('body').native.send_key('?')
+ end
+
+ def open_modal_from_help_menu
+ find('.header-help-dropdown-toggle').click
+ find('button', text: 'Keyboard shortcuts').click
+ end
+
+ def click_toggle_button
+ find('.js-toggle-shortcuts .gl-toggle').click
+ end
+
+ def close_modal
+ find('.modal button[aria-label="Close"]').click
+ end
+ end
+
context 'when navigating to the Project pages' do
it 'redirects to the details page' do
visit project_issues_path(project)
@@ -156,4 +211,18 @@ describe 'User uses shortcuts', :js do
expect(page).to have_active_navigation('Wiki')
end
end
+
+ context 'when `analytics_pages_under_project_analytics_sidebar` feature flag is enabled' do
+ before do
+ stub_feature_flags(analytics_pages_under_project_analytics_sidebar: { enabled: true, thing: project })
+ end
+
+ it 'redirects to the repository charts page' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('d')
+
+ expect(page).to have_active_navigation(_('Analytics'))
+ expect(page).to have_active_sub_navigation(_('Repository Analytics'))
+ end
+ end
end
diff --git a/spec/features/projects/view_on_env_spec.rb b/spec/features/projects/view_on_env_spec.rb
index 8b25565c08a..845c7b89a71 100644
--- a/spec/features/projects/view_on_env_spec.rb
+++ b/spec/features/projects/view_on_env_spec.rb
@@ -65,7 +65,7 @@ describe 'View on environment', :js do
within '.diffs' do
text = 'View on feature.review.example.com'
url = 'http://feature.review.example.com/ruby/feature'
- expect(page).to have_selector("a[data-original-title='#{text}'][href='#{url}']")
+ expect(page).to have_selector("a[title='#{text}'][href='#{url}']")
end
end
end
diff --git a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
index 3f3711f9eb8..d3a0c9b790b 100644
--- a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
@@ -83,15 +83,15 @@ describe 'User updates wiki page' do
end
it 'updates the commit message as the title is changed', :js do
- fill_in(:wiki_title, with: 'Wiki title')
+ fill_in(:wiki_title, with: '& < > \ \ { } &')
- expect(page).to have_field('wiki[message]', with: 'Update Wiki title')
+ expect(page).to have_field('wiki[message]', with: 'Update & < > \ \ { } &')
end
- it 'does not allow XSS', :js do
- fill_in(:wiki_title, with: '<script>')
+ it 'correctly escapes the commit message entities', :js do
+ fill_in(:wiki_title, with: 'Wiki title')
- expect(page).to have_field('wiki[message]', with: 'Update &lt;script&gt;')
+ expect(page).to have_field('wiki[message]', with: 'Update Wiki title')
end
it 'shows a validation error message' do
diff --git a/spec/features/projects/wiki/user_views_wiki_page_spec.rb b/spec/features/projects/wiki/user_views_wiki_page_spec.rb
index 77e725e7f11..c7856342fb2 100644
--- a/spec/features/projects/wiki/user_views_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_views_wiki_page_spec.rb
@@ -129,6 +129,18 @@ describe 'User views a wiki page' do
end
end
+ context 'when a page has XSS in its message' do
+ before do
+ wiki_page.update(message: '<script>alert(true)<script>', content: 'XSS update')
+ end
+
+ it 'safely displays the message' do
+ visit(project_wiki_history_path(project, wiki_page))
+
+ expect(page).to have_content('<script>alert(true)<script>')
+ end
+ end
+
context 'when page has invalid content encoding' do
let(:content) { (+'whatever').force_encoding('ISO-8859-1') }
diff --git a/spec/features/security/group/internal_access_spec.rb b/spec/features/security/group/internal_access_spec.rb
index 8dd15789cd1..114bc1a1f0c 100644
--- a/spec/features/security/group/internal_access_spec.rb
+++ b/spec/features/security/group/internal_access_spec.rb
@@ -36,7 +36,7 @@ describe 'Internal Group access' do
it { is_expected.to be_denied_for(:visitor) }
end
- describe 'GET /groups/:path/issues' do
+ describe 'GET /groups/:path/-/issues' do
subject { issues_group_path(group) }
it { is_expected.to be_allowed_for(:admin) }
@@ -51,7 +51,7 @@ describe 'Internal Group access' do
it { is_expected.to be_denied_for(:visitor) }
end
- describe 'GET /groups/:path/merge_requests' do
+ describe 'GET /groups/:path/-/merge_requests' do
let(:project) { create(:project, :internal, :repository, group: group) }
subject { merge_requests_group_path(group) }
@@ -68,7 +68,7 @@ describe 'Internal Group access' do
it { is_expected.to be_denied_for(:visitor) }
end
- describe 'GET /groups/:path/group_members' do
+ describe 'GET /groups/:path/-/group_members' do
subject { group_group_members_path(group) }
it { is_expected.to be_allowed_for(:admin) }
@@ -83,7 +83,7 @@ describe 'Internal Group access' do
it { is_expected.to be_denied_for(:visitor) }
end
- describe 'GET /groups/:path/edit' do
+ describe 'GET /groups/:path/-/edit' do
subject { edit_group_path(group) }
it { is_expected.to be_allowed_for(:admin) }
diff --git a/spec/features/security/group/private_access_spec.rb b/spec/features/security/group/private_access_spec.rb
index 0720302b03b..3362b9a9e9e 100644
--- a/spec/features/security/group/private_access_spec.rb
+++ b/spec/features/security/group/private_access_spec.rb
@@ -36,7 +36,7 @@ describe 'Private Group access' do
it { is_expected.to be_denied_for(:visitor) }
end
- describe 'GET /groups/:path/issues' do
+ describe 'GET /groups/:path/-/issues' do
subject { issues_group_path(group) }
it { is_expected.to be_allowed_for(:admin) }
@@ -51,7 +51,7 @@ describe 'Private Group access' do
it { is_expected.to be_denied_for(:visitor) }
end
- describe 'GET /groups/:path/merge_requests' do
+ describe 'GET /groups/:path/-/merge_requests' do
let(:project) { create(:project, :private, :repository, group: group) }
subject { merge_requests_group_path(group) }
@@ -68,7 +68,7 @@ describe 'Private Group access' do
it { is_expected.to be_denied_for(:visitor) }
end
- describe 'GET /groups/:path/group_members' do
+ describe 'GET /groups/:path/-/group_members' do
subject { group_group_members_path(group) }
it { is_expected.to be_allowed_for(:admin) }
@@ -83,7 +83,7 @@ describe 'Private Group access' do
it { is_expected.to be_denied_for(:visitor) }
end
- describe 'GET /groups/:path/edit' do
+ describe 'GET /groups/:path/-/edit' do
subject { edit_group_path(group) }
it { is_expected.to be_allowed_for(:admin) }
diff --git a/spec/features/security/group/public_access_spec.rb b/spec/features/security/group/public_access_spec.rb
index 0a18a0ff812..bf05f276cc6 100644
--- a/spec/features/security/group/public_access_spec.rb
+++ b/spec/features/security/group/public_access_spec.rb
@@ -36,7 +36,7 @@ describe 'Public Group access' do
it { is_expected.to be_allowed_for(:visitor) }
end
- describe 'GET /groups/:path/issues' do
+ describe 'GET /groups/:path/-/issues' do
subject { issues_group_path(group) }
it { is_expected.to be_allowed_for(:admin) }
@@ -51,7 +51,7 @@ describe 'Public Group access' do
it { is_expected.to be_allowed_for(:visitor) }
end
- describe 'GET /groups/:path/merge_requests' do
+ describe 'GET /groups/:path/-/merge_requests' do
let(:project) { create(:project, :public, :repository, group: group) }
subject { merge_requests_group_path(group) }
@@ -68,7 +68,7 @@ describe 'Public Group access' do
it { is_expected.to be_allowed_for(:visitor) }
end
- describe 'GET /groups/:path/group_members' do
+ describe 'GET /groups/:path/-/group_members' do
subject { group_group_members_path(group) }
it { is_expected.to be_allowed_for(:admin) }
@@ -83,7 +83,7 @@ describe 'Public Group access' do
it { is_expected.to be_allowed_for(:visitor) }
end
- describe 'GET /groups/:path/edit' do
+ describe 'GET /groups/:path/-/edit' do
subject { edit_group_path(group) }
it { is_expected.to be_allowed_for(:admin) }
diff --git a/spec/features/security/project/internal_access_spec.rb b/spec/features/security/project/internal_access_spec.rb
index 5c74b566ef0..ed1dbe15d65 100644
--- a/spec/features/security/project/internal_access_spec.rb
+++ b/spec/features/security/project/internal_access_spec.rb
@@ -29,7 +29,7 @@ describe "Internal Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/tree/master" do
+ describe "GET /:project_path/-/tree/master" do
subject { project_tree_path(project, project.repository.root_ref) }
it { is_expected.to be_allowed_for(:admin) }
@@ -43,7 +43,7 @@ describe "Internal Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/commits/master" do
+ describe "GET /:project_path/-/commits/master" do
subject { project_commits_path(project, project.repository.root_ref, limit: 1) }
it { is_expected.to be_allowed_for(:admin) }
@@ -57,7 +57,7 @@ describe "Internal Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/commit/:sha" do
+ describe "GET /:project_path/-/commit/:sha" do
subject { project_commit_path(project, project.repository.commit) }
it { is_expected.to be_allowed_for(:admin) }
@@ -71,7 +71,7 @@ describe "Internal Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/compare" do
+ describe "GET /:project_path/-/compare" do
subject { project_compare_index_path(project) }
it { is_expected.to be_allowed_for(:admin) }
@@ -127,7 +127,7 @@ describe "Internal Project Access" do
it { is_expected.to be_denied_for(:external) }
end
- describe "GET /:project_path/blob" do
+ describe "GET /:project_path/-/blob" do
let(:commit) { project.repository.commit }
subject { project_blob_path(project, File.join(commit.id, '.gitignore')) }
@@ -229,7 +229,7 @@ describe "Internal Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/merge_requests" do
+ describe "GET /:project_path/-/merge_requests" do
subject { project_merge_requests_path(project) }
it { is_expected.to be_allowed_for(:admin) }
@@ -243,7 +243,7 @@ describe "Internal Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/merge_requests/new" do
+ describe "GET /:project_path/-/merge_requests/new" do
subject { project_new_merge_request_path(project) }
it { is_expected.to be_allowed_for(:admin) }
@@ -257,7 +257,7 @@ describe "Internal Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/branches" do
+ describe "GET /:project_path/-/branches" do
subject { project_branches_path(project) }
before do
@@ -278,7 +278,7 @@ describe "Internal Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/tags" do
+ describe "GET /:project_path/-/tags" do
subject { project_tags_path(project) }
before do
diff --git a/spec/features/security/project/private_access_spec.rb b/spec/features/security/project/private_access_spec.rb
index 2404b7cc69d..97e6b3bd4ff 100644
--- a/spec/features/security/project/private_access_spec.rb
+++ b/spec/features/security/project/private_access_spec.rb
@@ -29,7 +29,7 @@ describe "Private Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/tree/master" do
+ describe "GET /:project_path/-/tree/master" do
subject { project_tree_path(project, project.repository.root_ref) }
it { is_expected.to be_allowed_for(:admin) }
@@ -43,7 +43,7 @@ describe "Private Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/commits/master" do
+ describe "GET /:project_path/-/commits/master" do
subject { project_commits_path(project, project.repository.root_ref, limit: 1) }
it { is_expected.to be_allowed_for(:admin) }
@@ -57,7 +57,7 @@ describe "Private Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/commit/:sha" do
+ describe "GET /:project_path/-/commit/:sha" do
subject { project_commit_path(project, project.repository.commit) }
it { is_expected.to be_allowed_for(:admin) }
@@ -71,7 +71,7 @@ describe "Private Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/compare" do
+ describe "GET /:project_path/-/compare" do
subject { project_compare_index_path(project) }
it { is_expected.to be_allowed_for(:admin) }
@@ -127,7 +127,7 @@ describe "Private Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/blob" do
+ describe "GET /:project_path/-/blob" do
let(:commit) { project.repository.commit }
subject { project_blob_path(project, File.join(commit.id, '.gitignore')) }
@@ -215,7 +215,7 @@ describe "Private Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/merge_requests" do
+ describe "GET /:project_path/-/merge_requests" do
subject { project_merge_requests_path(project) }
it { is_expected.to be_allowed_for(:admin) }
@@ -229,7 +229,7 @@ describe "Private Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/branches" do
+ describe "GET /:project_path/-/branches" do
subject { project_branches_path(project) }
before do
@@ -250,7 +250,7 @@ describe "Private Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/tags" do
+ describe "GET /:project_path/-/tags" do
subject { project_tags_path(project) }
before do
diff --git a/spec/features/security/project/public_access_spec.rb b/spec/features/security/project/public_access_spec.rb
index 8e07af61c8b..24bbb8d9b9e 100644
--- a/spec/features/security/project/public_access_spec.rb
+++ b/spec/features/security/project/public_access_spec.rb
@@ -29,7 +29,7 @@ describe "Public Project Access" do
it { is_expected.to be_allowed_for(:visitor) }
end
- describe "GET /:project_path/tree/master" do
+ describe "GET /:project_path/-/tree/master" do
subject { project_tree_path(project, project.repository.root_ref) }
it { is_expected.to be_allowed_for(:admin) }
@@ -43,7 +43,7 @@ describe "Public Project Access" do
it { is_expected.to be_allowed_for(:visitor) }
end
- describe "GET /:project_path/commits/master" do
+ describe "GET /:project_path/-/commits/master" do
subject { project_commits_path(project, project.repository.root_ref, limit: 1) }
it { is_expected.to be_allowed_for(:admin) }
@@ -57,7 +57,7 @@ describe "Public Project Access" do
it { is_expected.to be_allowed_for(:visitor) }
end
- describe "GET /:project_path/commit/:sha" do
+ describe "GET /:project_path/-/commit/:sha" do
subject { project_commit_path(project, project.repository.commit) }
it { is_expected.to be_allowed_for(:admin) }
@@ -71,7 +71,7 @@ describe "Public Project Access" do
it { is_expected.to be_allowed_for(:visitor) }
end
- describe "GET /:project_path/compare" do
+ describe "GET /:project_path/-/compare" do
subject { project_compare_index_path(project) }
it { is_expected.to be_allowed_for(:admin) }
@@ -345,7 +345,7 @@ describe "Public Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/blob" do
+ describe "GET /:project_path/-/blob" do
let(:commit) { project.repository.commit }
subject { project_blob_path(project, File.join(commit.id, '.gitignore')) }
@@ -446,7 +446,7 @@ describe "Public Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/merge_requests" do
+ describe "GET /:project_path/-/merge_requests" do
subject { project_merge_requests_path(project) }
it { is_expected.to be_allowed_for(:admin) }
@@ -460,7 +460,7 @@ describe "Public Project Access" do
it { is_expected.to be_allowed_for(:visitor) }
end
- describe "GET /:project_path/merge_requests/new" do
+ describe "GET /:project_path/-/merge_requests/new" do
subject { project_new_merge_request_path(project) }
it { is_expected.to be_allowed_for(:admin) }
@@ -474,7 +474,7 @@ describe "Public Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/branches" do
+ describe "GET /:project_path/-/branches" do
subject { project_branches_path(project) }
before do
@@ -495,7 +495,7 @@ describe "Public Project Access" do
it { is_expected.to be_allowed_for(:visitor) }
end
- describe "GET /:project_path/tags" do
+ describe "GET /:project_path/-/tags" do
subject { project_tags_path(project) }
before do
diff --git a/spec/features/signed_commits_spec.rb b/spec/features/signed_commits_spec.rb
index f56bd055224..3c7a31ac11b 100644
--- a/spec/features/signed_commits_spec.rb
+++ b/spec/features/signed_commits_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe 'GPG signed commits' do
let(:project) { create(:project, :public, :repository) }
- it 'changes from unverified to verified when the user changes his email to match the gpg key', :sidekiq_might_not_need_inline do
+ it 'changes from unverified to verified when the user changes their email to match the gpg key', :sidekiq_might_not_need_inline do
ref = GpgHelpers::SIGNED_AND_AUTHORED_SHA
user = create(:user, email: 'unrelated.user@example.org')
@@ -15,10 +15,9 @@ describe 'GPG signed commits' do
visit project_commit_path(project, ref)
- expect(page).to have_button 'Unverified'
- expect(page).not_to have_button 'Verified'
+ expect(page).to have_selector('.gpg-status-box', text: 'Unverified')
- # user changes his email which makes the gpg key verified
+ # user changes their email which makes the gpg key verified
perform_enqueued_jobs do
user.skip_reconfirmation!
user.update!(email: GpgHelpers::User1.emails.first)
@@ -26,8 +25,7 @@ describe 'GPG signed commits' do
visit project_commit_path(project, ref)
- expect(page).not_to have_button 'Unverified'
- expect(page).to have_button 'Verified'
+ expect(page).to have_selector('.gpg-status-box', text: 'Verified')
end
it 'changes from unverified to verified when the user adds the missing gpg key', :sidekiq_might_not_need_inline do
@@ -36,8 +34,7 @@ describe 'GPG signed commits' do
visit project_commit_path(project, ref)
- expect(page).to have_button 'Unverified'
- expect(page).not_to have_button 'Verified'
+ expect(page).to have_selector('.gpg-status-box', text: 'Unverified')
# user adds the gpg key which makes the signature valid
perform_enqueued_jobs do
@@ -46,8 +43,7 @@ describe 'GPG signed commits' do
visit project_commit_path(project, ref)
- expect(page).not_to have_button 'Unverified'
- expect(page).to have_button 'Verified'
+ expect(page).to have_selector('.gpg-status-box', text: 'Verified')
end
context 'shows popover badges', :js do
@@ -77,7 +73,7 @@ describe 'GPG signed commits' do
it 'unverified signature' do
visit project_commit_path(project, GpgHelpers::SIGNED_COMMIT_SHA)
- click_on 'Unverified'
+ page.find('.gpg-status-box', text: 'Unverified').click
within '.popover' do
expect(page).to have_content 'This commit was signed with an unverified signature.'
@@ -90,7 +86,7 @@ describe 'GPG signed commits' do
visit project_commit_path(project, GpgHelpers::DIFFERING_EMAIL_SHA)
- click_on 'Unverified'
+ page.find('.gpg-status-box', text: 'Unverified').click
within '.popover' do
expect(page).to have_content 'This commit was signed with a verified signature, but the committer email is not verified to belong to the same user.'
@@ -105,7 +101,7 @@ describe 'GPG signed commits' do
visit project_commit_path(project, GpgHelpers::SIGNED_COMMIT_SHA)
- click_on 'Unverified'
+ page.find('.gpg-status-box', text: 'Unverified').click
within '.popover' do
expect(page).to have_content "This commit was signed with a different user's verified signature."
@@ -120,7 +116,7 @@ describe 'GPG signed commits' do
visit project_commit_path(project, GpgHelpers::SIGNED_AND_AUTHORED_SHA)
- click_on 'Verified'
+ page.find('.gpg-status-box', text: 'Verified').click
within '.popover' do
expect(page).to have_content 'This commit was signed with a verified signature and the committer email is verified to belong to the same user.'
@@ -136,13 +132,13 @@ describe 'GPG signed commits' do
visit project_commit_path(project, GpgHelpers::SIGNED_AND_AUTHORED_SHA)
# wait for the signature to get generated
- expect(page).to have_button 'Verified'
+ expect(page).to have_selector('.gpg-status-box', text: 'Verified')
user_1.destroy!
refresh
- click_on 'Verified'
+ page.find('.gpg-status-box', text: 'Verified').click
within '.popover' do
expect(page).to have_content 'This commit was signed with a verified signature and the committer email is verified to belong to the same user.'
@@ -160,9 +156,9 @@ describe 'GPG signed commits' do
end
it 'displays commit signature' do
- expect(page).to have_button 'Unverified'
+ expect(page).to have_selector('.gpg-status-box', text: 'Unverified')
- click_on 'Unverified'
+ page.find('.gpg-status-box', text: 'Unverified').click
within '.popover' do
expect(page).to have_content 'This commit was signed with an unverified signature'
diff --git a/spec/features/snippets/spam_snippets_spec.rb b/spec/features/snippets/spam_snippets_spec.rb
index 0c3ca6f17c8..dac36ba2b28 100644
--- a/spec/features/snippets/spam_snippets_spec.rb
+++ b/spec/features/snippets/spam_snippets_spec.rb
@@ -5,6 +5,10 @@ require 'spec_helper'
describe 'User creates snippet', :js do
let(:user) { create(:user) }
+ def description_field
+ find('.js-description-input input,textarea')
+ end
+
before do
stub_feature_flags(allow_possible_spam: false)
stub_feature_flags(snippets_vue: false)
@@ -22,7 +26,11 @@ describe 'User creates snippet', :js do
visit new_snippet_path
fill_in 'personal_snippet_title', with: 'My Snippet Title'
+
+ # Click placeholder first to expand full description field
+ description_field.click
fill_in 'personal_snippet_description', with: 'My Snippet **Description**'
+
find('#personal_snippet_visibility_level_20').set(true)
page.within('.file-editor') do
find('.ace_text-input', visible: false).send_keys 'Hello World!'
diff --git a/spec/features/snippets/user_creates_snippet_spec.rb b/spec/features/snippets/user_creates_snippet_spec.rb
index b373264bbe4..eb55613b954 100644
--- a/spec/features/snippets/user_creates_snippet_spec.rb
+++ b/spec/features/snippets/user_creates_snippet_spec.rb
@@ -13,9 +13,17 @@ describe 'User creates snippet', :js do
visit new_snippet_path
end
+ def description_field
+ find('.js-description-input input,textarea')
+ end
+
def fill_form
fill_in 'personal_snippet_title', with: 'My Snippet Title'
+
+ # Click placeholder first to expand full description field
+ description_field.click
fill_in 'personal_snippet_description', with: 'My Snippet **Description**'
+
page.within('.file-editor') do
find('.ace_text-input', visible: false).send_keys 'Hello World!'
end
@@ -36,6 +44,8 @@ describe 'User creates snippet', :js do
end
it 'previews a snippet with file' do
+ # Click placeholder first to expand full description field
+ description_field.click
fill_in 'personal_snippet_description', with: 'My Snippet'
dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
find('.js-md-preview-button').click
diff --git a/spec/features/task_lists_spec.rb b/spec/features/task_lists_spec.rb
index bcd894a0d20..24a183017c9 100644
--- a/spec/features/task_lists_spec.rb
+++ b/spec/features/task_lists_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe 'Task Lists' do
include Warden::Test::Helpers
- let(:project) { create(:project, :repository) }
+ let(:project) { create(:project, :public, :repository) }
let(:user) { create(:user) }
let(:user2) { create(:user) }
@@ -122,6 +122,7 @@ describe 'Task Lists' do
it 'provides a summary on Issues#index' do
visit project_issues_path(project)
+
expect(page).to have_content("2 of 6 tasks completed")
end
end
@@ -191,6 +192,7 @@ describe 'Task Lists' do
it 'is only editable by author', :js do
visit_issue(project, issue)
+
expect(page).to have_selector('.js-task-list-container')
gitlab_sign_out
@@ -237,52 +239,70 @@ describe 'Task Lists' do
visit project_merge_request_path(project, merge)
end
- describe 'multiple tasks' do
- let(:project) { create(:project, :repository) }
- let!(:merge) { create(:merge_request, :simple, description: markdown, author: user, source_project: project) }
-
- it 'renders for description' do
+ shared_examples 'multiple tasks' do
+ it 'renders for description', :js do
visit_merge_request(project, merge)
+ wait_for_requests
expect(page).to have_selector('ul.task-list', count: 1)
expect(page).to have_selector('li.task-list-item', count: 6)
expect(page).to have_selector('ul input[checked]', count: 2)
end
- it 'contains the required selectors' do
+ it 'contains the required selectors', :js do
visit_merge_request(project, merge)
+ wait_for_requests
container = '.detail-page-description .description.js-task-list-container'
expect(page).to have_selector(container)
expect(page).to have_selector("#{container} .md .task-list .task-list-item .task-list-item-checkbox")
- expect(page).to have_selector("#{container} .js-task-list-field")
+ expect(page).to have_selector("#{container} .js-task-list-field", visible: false)
expect(page).to have_selector('form.js-issuable-update')
- expect(page).to have_selector('a.btn-close')
end
- it 'is only editable by author' do
+ it 'is only editable by author', :js do
visit_merge_request(project, merge)
+ wait_for_requests
+
expect(page).to have_selector('.js-task-list-container')
+ expect(page).to have_selector('li.task-list-item.enabled', count: 6)
logout(:user)
-
login_as(user2)
visit current_path
+ wait_for_requests
+
expect(page).not_to have_selector('.js-task-list-container')
+ expect(page).to have_selector('li.task-list-item.enabled', count: 0)
+ expect(page).to have_selector('li.task-list-item input[disabled]', count: 6)
end
+ end
+
+ context 'when merge request is open' do
+ let!(:merge) { create(:merge_request, :simple, description: markdown, author: user, source_project: project) }
+
+ it_behaves_like 'multiple tasks'
it 'provides a summary on MergeRequests#index' do
visit project_merge_requests_path(project)
+
expect(page).to have_content("2 of 6 tasks completed")
end
end
+ context 'when merge request is closed' do
+ let!(:merge) { create(:merge_request, :closed, :simple, description: markdown, author: user, source_project: project) }
+
+ it_behaves_like 'multiple tasks'
+ end
+
describe 'single incomplete task' do
let!(:merge) { create(:merge_request, :simple, description: singleIncompleteMarkdown, author: user, source_project: project) }
- it 'renders for description' do
+ it 'renders for description', :js do
visit_merge_request(project, merge)
+ wait_for_requests
expect(page).to have_selector('ul.task-list', count: 1)
expect(page).to have_selector('li.task-list-item', count: 1)
@@ -291,6 +311,7 @@ describe 'Task Lists' do
it 'provides a summary on MergeRequests#index' do
visit project_merge_requests_path(project)
+
expect(page).to have_content("0 of 1 task completed")
end
end
@@ -298,8 +319,9 @@ describe 'Task Lists' do
describe 'single complete task' do
let!(:merge) { create(:merge_request, :simple, description: singleCompleteMarkdown, author: user, source_project: project) }
- it 'renders for description' do
+ it 'renders for description', :js do
visit_merge_request(project, merge)
+ wait_for_requests
expect(page).to have_selector('ul.task-list', count: 1)
expect(page).to have_selector('li.task-list-item', count: 1)
@@ -308,6 +330,7 @@ describe 'Task Lists' do
it 'provides a summary on MergeRequests#index' do
visit project_merge_requests_path(project)
+
expect(page).to have_content("1 of 1 task completed")
end
end
diff --git a/spec/features/users/login_spec.rb b/spec/features/users/login_spec.rb
index 5f4f92e547c..0bef61a4854 100644
--- a/spec/features/users/login_spec.rb
+++ b/spec/features/users/login_spec.rb
@@ -152,7 +152,7 @@ describe 'Login' do
end
end
- describe 'with two-factor authentication' do
+ describe 'with two-factor authentication', :js do
def enter_code(code)
fill_in 'user_otp_attempt', with: code
click_button 'Verify code'
@@ -797,7 +797,6 @@ describe 'Login' do
before do
stub_application_setting(send_user_confirmation_email: true)
- stub_feature_flags(soft_email_confirmation: true)
allow(User).to receive(:allow_unconfirmed_access_for).and_return grace_period
end
diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb
index 30f298b1fc3..8d5c0657fa5 100644
--- a/spec/features/users/signup_spec.rb
+++ b/spec/features/users/signup_spec.rb
@@ -129,63 +129,29 @@ shared_examples 'Signup' do
stub_application_setting(send_user_confirmation_email: true)
end
- context 'when soft email confirmation is not enabled' do
- before do
- stub_feature_flags(soft_email_confirmation: false)
- end
-
- it 'creates the user account and sends a confirmation email' do
- visit new_user_registration_path
-
- fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: new_user.email
-
- if Gitlab::Experimentation.enabled?(:signup_flow)
- fill_in 'new_user_first_name', with: new_user.first_name
- fill_in 'new_user_last_name', with: new_user.last_name
- else
- fill_in 'new_user_name', with: new_user.name
- fill_in 'new_user_email_confirmation', with: new_user.email
- end
-
- fill_in 'new_user_password', with: new_user.password
-
- expect { click_button 'Register' }.to change { User.count }.by(1)
+ it 'creates the user account and sends a confirmation email' do
+ visit new_user_registration_path
- expect(current_path).to eq users_almost_there_path
- expect(page).to have_content('Please check your email to confirm your account')
- end
- end
+ fill_in 'new_user_username', with: new_user.username
+ fill_in 'new_user_email', with: new_user.email
- context 'when soft email confirmation is enabled' do
- before do
- stub_feature_flags(soft_email_confirmation: true)
+ if Gitlab::Experimentation.enabled?(:signup_flow)
+ fill_in 'new_user_first_name', with: new_user.first_name
+ fill_in 'new_user_last_name', with: new_user.last_name
+ else
+ fill_in 'new_user_name', with: new_user.name
+ fill_in 'new_user_email_confirmation', with: new_user.email
end
- it 'creates the user account and sends a confirmation email' do
- visit new_user_registration_path
-
- fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: new_user.email
-
- if Gitlab::Experimentation.enabled?(:signup_flow)
- fill_in 'new_user_first_name', with: new_user.first_name
- fill_in 'new_user_last_name', with: new_user.last_name
- else
- fill_in 'new_user_name', with: new_user.name
- fill_in 'new_user_email_confirmation', with: new_user.email
- end
-
- fill_in 'new_user_password', with: new_user.password
+ fill_in 'new_user_password', with: new_user.password
- expect { click_button 'Register' }.to change { User.count }.by(1)
+ expect { click_button 'Register' }.to change { User.count }.by(1)
- if Gitlab::Experimentation.enabled?(:signup_flow)
- expect(current_path).to eq users_sign_up_welcome_path
- else
- expect(current_path).to eq dashboard_projects_path
- expect(page).to have_content("Please check your email (#{new_user.email}) to verify that you own this address and unlock the power of CI/CD.")
- end
+ if Gitlab::Experimentation.enabled?(:signup_flow)
+ expect(current_path).to eq users_sign_up_welcome_path
+ else
+ expect(current_path).to eq dashboard_projects_path
+ expect(page).to have_content("Please check your email (#{new_user.email}) to verify that you own this address and unlock the power of CI/CD.")
end
end
end
@@ -360,7 +326,7 @@ shared_examples 'Signup' do
InvisibleCaptcha.timestamp_enabled = true
stub_application_setting(recaptcha_enabled: true)
allow_next_instance_of(RegistrationsController) do |instance|
- allow(instance).to receive(:verify_recaptcha).and_return(false)
+ allow(instance).to receive(:verify_recaptcha).and_return(true)
end
end
@@ -368,28 +334,53 @@ shared_examples 'Signup' do
InvisibleCaptcha.timestamp_enabled = false
end
- it 'prevents from signing up' do
- visit new_user_registration_path
+ context 'when reCAPTCHA detects malicious behaviour' do
+ before do
+ allow_next_instance_of(RegistrationsController) do |instance|
+ allow(instance).to receive(:verify_recaptcha).and_return(false)
+ end
+ end
- fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: new_user.email
+ it 'prevents from signing up' do
+ visit new_user_registration_path
- if Gitlab::Experimentation.enabled?(:signup_flow)
- fill_in 'new_user_first_name', with: new_user.first_name
- fill_in 'new_user_last_name', with: new_user.last_name
- else
- fill_in 'new_user_name', with: new_user.name
- fill_in 'new_user_email_confirmation', with: new_user.email
+ fill_in 'new_user_username', with: new_user.username
+ fill_in 'new_user_email', with: new_user.email
+
+ if Gitlab::Experimentation.enabled?(:signup_flow)
+ fill_in 'new_user_first_name', with: new_user.first_name
+ fill_in 'new_user_last_name', with: new_user.last_name
+ else
+ fill_in 'new_user_name', with: new_user.name
+ fill_in 'new_user_email_confirmation', with: new_user.email
+ end
+
+ fill_in 'new_user_password', with: new_user.password
+
+ expect { click_button 'Register' }.not_to change { User.count }
+ expect(page).to have_content('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
end
+ end
- fill_in 'new_user_password', with: new_user.password
+ context 'when invisible captcha detects malicious behaviour' do
+ it 'prevents from signing up' do
+ visit new_user_registration_path
- expect { click_button 'Register' }.not_to change { User.count }
+ fill_in 'new_user_username', with: new_user.username
+ fill_in 'new_user_email', with: new_user.email
- if Gitlab::Experimentation.enabled?(:signup_flow)
+ if Gitlab::Experimentation.enabled?(:signup_flow)
+ fill_in 'new_user_first_name', with: new_user.first_name
+ fill_in 'new_user_last_name', with: new_user.last_name
+ else
+ fill_in 'new_user_name', with: new_user.name
+ fill_in 'new_user_email_confirmation', with: new_user.email
+ end
+
+ fill_in 'new_user_password', with: new_user.password
+
+ expect { click_button 'Register' }.not_to change { User.count }
expect(page).to have_content('That was a bit too quick! Please resubmit.')
- else
- expect(page).to have_content('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
end
end
end
@@ -445,8 +436,8 @@ end
describe 'With experimental flow' do
before do
- stub_experiment(signup_flow: true)
- stub_experiment_for_user(signup_flow: true)
+ stub_experiment(signup_flow: true, paid_signup_flow: false)
+ stub_experiment_for_user(signup_flow: true, paid_signup_flow: false)
end
it_behaves_like 'Signup'
diff --git a/spec/finders/award_emojis_finder_spec.rb b/spec/finders/award_emojis_finder_spec.rb
index ccac475daad..bdfd2a9a3f4 100644
--- a/spec/finders/award_emojis_finder_spec.rb
+++ b/spec/finders/award_emojis_finder_spec.rb
@@ -3,14 +3,14 @@
require 'spec_helper'
describe AwardEmojisFinder do
- set(:issue_1) { create(:issue) }
- set(:issue_1_thumbsup) { create(:award_emoji, name: 'thumbsup', awardable: issue_1) }
- set(:issue_1_thumbsdown) { create(:award_emoji, name: 'thumbsdown', awardable: issue_1) }
+ let_it_be(:issue_1) { create(:issue) }
+ let_it_be(:issue_1_thumbsup) { create(:award_emoji, name: 'thumbsup', awardable: issue_1) }
+ let_it_be(:issue_1_thumbsdown) { create(:award_emoji, name: 'thumbsdown', awardable: issue_1) }
# Create a matching set of emoji for a second issue.
# These should never appear in our finder results
- set(:issue_2) { create(:issue) }
- set(:issue_2_thumbsup) { create(:award_emoji, name: 'thumbsup', awardable: issue_2) }
- set(:issue_2_thumbsdown) { create(:award_emoji, name: 'thumbsdown', awardable: issue_2) }
+ let_it_be(:issue_2) { create(:issue) }
+ let_it_be(:issue_2_thumbsup) { create(:award_emoji, name: 'thumbsup', awardable: issue_2) }
+ let_it_be(:issue_2_thumbsdown) { create(:award_emoji, name: 'thumbsdown', awardable: issue_2) }
describe 'param validation' do
it 'raises an error if `name` is invalid' do
diff --git a/spec/finders/clusters_finder_spec.rb b/spec/finders/clusters_finder_spec.rb
index 5dde616f679..3bad88573f7 100644
--- a/spec/finders/clusters_finder_spec.rb
+++ b/spec/finders/clusters_finder_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe ClustersFinder do
let(:project) { create(:project) }
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
describe '#execute' do
let(:enabled_cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
diff --git a/spec/finders/concerns/finder_with_cross_project_access_spec.rb b/spec/finders/concerns/finder_with_cross_project_access_spec.rb
index 6ba98b79176..f3365309b05 100644
--- a/spec/finders/concerns/finder_with_cross_project_access_spec.rb
+++ b/spec/finders/concerns/finder_with_cross_project_access_spec.rb
@@ -128,7 +128,7 @@ describe FinderWithCrossProjectAccess do
end
end
- context '.finder_model' do
+ describe '.finder_model' do
it 'is set correctly' do
expect(finder_class.finder_model).to eq(Project)
end
diff --git a/spec/finders/context_commits_finder_spec.rb b/spec/finders/context_commits_finder_spec.rb
new file mode 100644
index 00000000000..13cfa32ecfc
--- /dev/null
+++ b/spec/finders/context_commits_finder_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ContextCommitsFinder do
+ describe "#execute" do
+ let(:project) { create(:project, :repository) }
+ let(:merge_request) { create(:merge_request) }
+ let(:commit) { create(:commit, id: '6d394385cf567f80a8fd85055db1ab4c5295806f') }
+
+ it 'filters commits by valid sha/commit message' do
+ params = { search: commit.id }
+
+ commits = described_class.new(project, merge_request, params).execute
+
+ expect(commits.length).to eq(1)
+ expect(commits[0].id).to eq(commit.id)
+ end
+
+ it 'returns nothing when searched by invalid sha/commit message' do
+ params = { search: 'zzz' }
+
+ commits = described_class.new(project, merge_request, params).execute
+
+ expect(commits).to be_empty
+ end
+ end
+end
diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb
index 056795a50d0..ff52e1b9d5f 100644
--- a/spec/finders/issues_finder_spec.rb
+++ b/spec/finders/issues_finder_spec.rb
@@ -29,26 +29,31 @@ describe IssuesFinder do
end
context 'filter by username' do
- set(:user3) { create(:user) }
+ let_it_be(:user3) { create(:user) }
before do
project2.add_developer(user3)
- issue3.assignees = [user2, user3]
+ issue2.assignees = [user2]
+ issue3.assignees = [user3]
end
it_behaves_like 'assignee username filter' do
- let(:params) { { assignee_username: [user2.username, user3.username] } }
- let(:expected_issuables) { [issue3] }
+ let(:params) { { assignee_username: [user2.username] } }
+ let(:expected_issuables) { [issue2] }
end
it_behaves_like 'assignee NOT username filter' do
- let(:params) { { not: { assignee_username: [user2.username, user3.username] } } }
- let(:expected_issuables) { [issue1, issue2, issue4] }
+ before do
+ issue2.assignees = [user2]
+ end
+
+ let(:params) { { not: { assignee_username: [user.username, user2.username] } } }
+ let(:expected_issuables) { [issue3, issue4] }
end
end
it_behaves_like 'no assignee filter' do
- set(:user3) { create(:user) }
+ let_it_be(:user3) { create(:user) }
let(:expected_issuables) { [issue4] }
end
@@ -395,8 +400,8 @@ describe IssuesFinder do
context 'using NOT' do
let(:params) { { not: { label_name: [label.title, label2.title].join(',') } } }
- it 'returns issues that do not have ALL labels provided' do
- expect(issues).to contain_exactly(issue1, issue3, issue4)
+ it 'returns issues that do not have any of the labels provided' do
+ expect(issues).to contain_exactly(issue1, issue4)
end
end
end
@@ -417,8 +422,8 @@ describe IssuesFinder do
context 'using NOT' do
let(:params) { { not: { label_name: [label.title, label2.title].join(',') } } }
- it 'returns issues that do not have ALL labels provided' do
- expect(issues).to contain_exactly(issue1, issue3, issue4)
+ it 'returns issues that do not have ANY ONE of the labels provided' do
+ expect(issues).to contain_exactly(issue1, issue4)
end
end
end
@@ -674,7 +679,7 @@ describe IssuesFinder do
end
context 'filtering by confidential' do
- set(:confidential_issue) { create(:issue, project: project1, confidential: true) }
+ let_it_be(:confidential_issue) { create(:issue, project: project1, confidential: true) }
context 'no filtering' do
it 'returns all issues' do
@@ -772,7 +777,7 @@ describe IssuesFinder do
it 'returns the number of rows for the default state' do
finder = described_class.new(user)
- expect(finder.row_count).to eq(5)
+ expect(finder.row_count).to eq(4)
end
it 'returns the number of rows for a given state' do
@@ -785,10 +790,10 @@ describe IssuesFinder do
describe '#with_confidentiality_access_check' do
let(:guest) { create(:user) }
- set(:authorized_user) { create(:user) }
- set(:project) { create(:project, namespace: authorized_user.namespace) }
- set(:public_issue) { create(:issue, project: project) }
- set(:confidential_issue) { create(:issue, project: project, confidential: true) }
+ let_it_be(:authorized_user) { create(:user) }
+ let_it_be(:project) { create(:project, namespace: authorized_user.namespace) }
+ let_it_be(:public_issue) { create(:issue, project: project) }
+ let_it_be(:confidential_issue) { create(:issue, project: project, confidential: true) }
context 'when no project filter is given' do
let(:params) { {} }
diff --git a/spec/finders/jobs_finder_spec.rb b/spec/finders/jobs_finder_spec.rb
index 675d170b90e..01f9ec03c79 100644
--- a/spec/finders/jobs_finder_spec.rb
+++ b/spec/finders/jobs_finder_spec.rb
@@ -3,13 +3,13 @@
require 'spec_helper'
describe JobsFinder, '#execute' do
- set(:user) { create(:user) }
- set(:admin) { create(:user, :admin) }
- set(:project) { create(:project, :private, public_builds: false) }
- set(:pipeline) { create(:ci_pipeline, project: project) }
- set(:job_1) { create(:ci_build) }
- set(:job_2) { create(:ci_build, :running) }
- set(:job_3) { create(:ci_build, :success, pipeline: pipeline) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:admin) { create(:user, :admin) }
+ let_it_be(:project) { create(:project, :private, public_builds: false) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:job_1) { create(:ci_build) }
+ let_it_be(:job_2) { create(:ci_build, :running) }
+ let_it_be(:job_3) { create(:ci_build, :success, pipeline: pipeline) }
let(:params) { {} }
diff --git a/spec/finders/keys_finder_spec.rb b/spec/finders/keys_finder_spec.rb
index 7605d066ddf..bae4a542484 100644
--- a/spec/finders/keys_finder_spec.rb
+++ b/spec/finders/keys_finder_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe KeysFinder do
- subject { described_class.new(user, params).execute }
+ subject { described_class.new(params).execute }
let(:user) { create(:user) }
let(:params) { {} }
@@ -20,159 +20,149 @@ describe KeysFinder do
let!(:key_2) { create(:personal_key, last_used_at: nil, user: user) }
let!(:key_3) { create(:personal_key, last_used_at: 2.days.ago) }
- context 'with a regular user' do
- it 'raises GitLabAccessDeniedError' do
- expect { subject }.to raise_error(KeysFinder::GitLabAccessDeniedError)
- end
- end
+ context 'key_type' do
+ let!(:deploy_key) { create(:deploy_key) }
- context 'with an admin user' do
- let(:user) {create(:admin)}
+ context 'when `key_type` is `ssh`' do
+ before do
+ params[:key_type] = 'ssh'
+ end
+
+ it 'returns only SSH keys' do
+ expect(subject).to contain_exactly(key_1, key_2, key_3)
+ end
+ end
- context 'key_type' do
- let!(:deploy_key) { create(:deploy_key) }
+ context 'when `key_type` is not specified' do
+ it 'returns all types of keys' do
+ expect(subject).to contain_exactly(key_1, key_2, key_3, deploy_key)
+ end
+ end
+ end
- context 'when `key_type` is `ssh`' do
+ context 'fingerprint' do
+ context 'with invalid fingerprint' do
+ context 'with invalid MD5 fingerprint' do
before do
- params[:key_type] = 'ssh'
+ params[:fingerprint] = '11:11:11:11'
end
- it 'returns only SSH keys' do
- expect(subject).to contain_exactly(key_1, key_2, key_3)
+ it 'raises InvalidFingerprint' do
+ expect { subject }.to raise_error(KeysFinder::InvalidFingerprint)
end
end
- context 'when `key_type` is not specified' do
- it 'returns all types of keys' do
- expect(subject).to contain_exactly(key_1, key_2, key_3, deploy_key)
+ context 'with invalid SHA fingerprint' do
+ before do
+ params[:fingerprint] = 'nUhzNyftwAAKs7HufskYTte2g'
+ end
+
+ it 'raises InvalidFingerprint' do
+ expect { subject }.to raise_error(KeysFinder::InvalidFingerprint)
end
end
end
- context 'fingerprint' do
- context 'with invalid fingerprint' do
- context 'with invalid MD5 fingerprint' do
+ context 'with valid fingerprints' do
+ let!(:deploy_key) do
+ create(:deploy_key,
+ user: user,
+ key: 'ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM4lhHNedGfBpPJNPpZ7yKu+dnn1SJejgt1017k6YjzGGphH2TUxwKzxcKDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4soW6NUlfDzpvZK2H5E7eQaSeP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0=',
+ fingerprint: '8a:4a:12:92:0b:50:47:02:d4:5a:8e:a9:44:4e:08:b4',
+ fingerprint_sha256: '4DPHOVNh53i9dHb5PpY2vjfyf5qniTx1/pBFPoZLDdk')
+ end
+
+ context 'personal key with valid MD5 params' do
+ context 'with an existent fingerprint' do
before do
- params[:fingerprint] = '11:11:11:11'
+ params[:fingerprint] = 'ba:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d1'
end
- it 'raises InvalidFingerprint' do
- expect { subject }.to raise_error(KeysFinder::InvalidFingerprint)
+ it 'returns the key' do
+ expect(subject).to eq(key_1)
+ expect(subject.user).to eq(user)
end
end
- context 'with invalid SHA fingerprint' do
+ context 'deploy key with an existent fingerprint' do
before do
- params[:fingerprint] = 'nUhzNyftwAAKs7HufskYTte2g'
+ params[:fingerprint] = '8a:4a:12:92:0b:50:47:02:d4:5a:8e:a9:44:4e:08:b4'
end
- it 'raises InvalidFingerprint' do
- expect { subject }.to raise_error(KeysFinder::InvalidFingerprint)
+ it 'returns the key' do
+ expect(subject).to eq(deploy_key)
+ expect(subject.user).to eq(user)
end
end
- end
-
- context 'with valid fingerprints' do
- let!(:deploy_key) do
- create(:deploy_key,
- user: user,
- key: 'ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM4lhHNedGfBpPJNPpZ7yKu+dnn1SJejgt1017k6YjzGGphH2TUxwKzxcKDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4soW6NUlfDzpvZK2H5E7eQaSeP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0=',
- fingerprint: '8a:4a:12:92:0b:50:47:02:d4:5a:8e:a9:44:4e:08:b4',
- fingerprint_sha256: '4DPHOVNh53i9dHb5PpY2vjfyf5qniTx1/pBFPoZLDdk')
- end
-
- context 'personal key with valid MD5 params' do
- context 'with an existent fingerprint' do
- before do
- params[:fingerprint] = 'ba:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d1'
- end
- it 'returns the key' do
- expect(subject).to eq(key_1)
- expect(subject.user).to eq(user)
- end
+ context 'with a non-existent fingerprint' do
+ before do
+ params[:fingerprint] = 'bb:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d2'
end
- context 'deploy key with an existent fingerprint' do
- before do
- params[:fingerprint] = '8a:4a:12:92:0b:50:47:02:d4:5a:8e:a9:44:4e:08:b4'
- end
-
- it 'returns the key' do
- expect(subject).to eq(deploy_key)
- expect(subject.user).to eq(user)
- end
+ it 'returns nil' do
+ expect(subject).to be_nil
end
+ end
+ end
- context 'with a non-existent fingerprint' do
- before do
- params[:fingerprint] = 'bb:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d2'
- end
+ context 'personal key with valid SHA256 params' do
+ context 'with an existent fingerprint' do
+ before do
+ params[:fingerprint] = 'SHA256:nUhzNyftwADy8AH3wFY31tAKs7HufskYTte2aXo/lCg'
+ end
- it 'returns nil' do
- expect(subject).to be_nil
- end
+ it 'returns key' do
+ expect(subject).to eq(key_1)
+ expect(subject.user).to eq(user)
end
end
- context 'personal key with valid SHA256 params' do
- context 'with an existent fingerprint' do
- before do
- params[:fingerprint] = 'SHA256:nUhzNyftwADy8AH3wFY31tAKs7HufskYTte2aXo/lCg'
- end
-
- it 'returns key' do
- expect(subject).to eq(key_1)
- expect(subject.user).to eq(user)
- end
+ context 'deploy key with an existent fingerprint' do
+ before do
+ params[:fingerprint] = 'SHA256:4DPHOVNh53i9dHb5PpY2vjfyf5qniTx1/pBFPoZLDdk'
end
- context 'deploy key with an existent fingerprint' do
- before do
- params[:fingerprint] = 'SHA256:4DPHOVNh53i9dHb5PpY2vjfyf5qniTx1/pBFPoZLDdk'
- end
-
- it 'returns key' do
- expect(subject).to eq(deploy_key)
- expect(subject.user).to eq(user)
- end
+ it 'returns key' do
+ expect(subject).to eq(deploy_key)
+ expect(subject.user).to eq(user)
end
+ end
- context 'with a non-existent fingerprint' do
- before do
- params[:fingerprint] = 'SHA256:xTjuFqftwADy8AH3wFY31tAKs7HufskYTte2aXi/mNp'
- end
+ context 'with a non-existent fingerprint' do
+ before do
+ params[:fingerprint] = 'SHA256:xTjuFqftwADy8AH3wFY31tAKs7HufskYTte2aXi/mNp'
+ end
- it 'returns nil' do
- expect(subject).to be_nil
- end
+ it 'returns nil' do
+ expect(subject).to be_nil
end
end
end
end
+ end
- context 'user' do
- context 'without user' do
- it 'contains ssh_keys of all users in the system' do
- expect(subject).to contain_exactly(key_1, key_2, key_3)
- end
+ context 'user' do
+ context 'without user' do
+ it 'contains ssh_keys of all users in the system' do
+ expect(subject).to contain_exactly(key_1, key_2, key_3)
end
+ end
- context 'with user' do
- before do
- params[:user] = user
- end
+ context 'with user' do
+ before do
+ params[:users] = user
+ end
- it 'contains ssh_keys of only the specified users' do
- expect(subject).to contain_exactly(key_1, key_2)
- end
+ it 'contains ssh_keys of only the specified users' do
+ expect(subject).to contain_exactly(key_1, key_2)
end
end
+ end
- context 'sort order' do
- it 'sorts in last_used_at_desc order' do
- expect(subject).to eq([key_3, key_1, key_2])
- end
+ context 'sort order' do
+ it 'sorts in last_used_at_desc order' do
+ expect(subject).to eq([key_3, key_1, key_2])
end
end
end
diff --git a/spec/finders/members_finder_spec.rb b/spec/finders/members_finder_spec.rb
index 2bc37606a25..f6df727f7db 100644
--- a/spec/finders/members_finder_spec.rb
+++ b/spec/finders/members_finder_spec.rb
@@ -3,13 +3,13 @@
require 'spec_helper'
describe MembersFinder, '#execute' do
- set(:group) { create(:group) }
- set(:nested_group) { create(:group, parent: group) }
- set(:project) { create(:project, namespace: nested_group) }
- set(:user1) { create(:user) }
- set(:user2) { create(:user) }
- set(:user3) { create(:user) }
- set(:user4) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:nested_group) { create(:group, parent: group) }
+ let_it_be(:project, reload: true) { create(:project, namespace: nested_group) }
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:user3) { create(:user) }
+ let_it_be(:user4) { create(:user) }
it 'returns members for project and parent groups' do
nested_group.request_access(user1)
@@ -75,6 +75,15 @@ describe MembersFinder, '#execute' do
expect(result).to contain_exactly(member2, member3)
end
+ it 'returns only inherited members of a personal project' do
+ project = create(:project, namespace: user1.namespace)
+ member = project.members.first
+
+ result = described_class.new(project, user1).execute(include_relations: [:inherited])
+
+ expect(result).to contain_exactly(member)
+ end
+
it 'returns the members.access_level when the user is invited', :nested_groups do
member_invite = create(:project_member, :invited, project: project, invite_email: create(:user).email)
member1 = group.add_maintainer(user2)
@@ -96,13 +105,33 @@ describe MembersFinder, '#execute' do
expect(result.first.access_level).to eq(Gitlab::Access::DEVELOPER)
end
+ it 'returns searched members if requested' do
+ project.add_maintainer(user2)
+ project.add_maintainer(user3)
+ member3 = project.add_maintainer(user4)
+
+ result = described_class.new(project, user2).execute(params: { search: user4.name })
+
+ expect(result).to contain_exactly(member3)
+ end
+
+ it 'returns members sorted by id_desc' do
+ member1 = project.add_maintainer(user2)
+ member2 = project.add_maintainer(user3)
+ member3 = project.add_maintainer(user4)
+
+ result = described_class.new(project, user2).execute(params: { sort: 'id_desc' })
+
+ expect(result).to eq([member3, member2, member1])
+ end
+
context 'when include_invited_groups_members == true' do
subject { described_class.new(project, user2).execute(include_relations: [:inherited, :direct, :invited_groups_members]) }
- set(:linked_group) { create(:group, :public) }
- set(:nested_linked_group) { create(:group, parent: linked_group) }
- set(:linked_group_member) { linked_group.add_guest(user1) }
- set(:nested_linked_group_member) { nested_linked_group.add_guest(user2) }
+ let_it_be(:linked_group) { create(:group, :public) }
+ let_it_be(:nested_linked_group) { create(:group, parent: linked_group) }
+ let_it_be(:linked_group_member) { linked_group.add_guest(user1) }
+ let_it_be(:nested_linked_group_member) { nested_linked_group.add_guest(user2) }
it 'includes all the invited_groups members including members inherited from ancestor groups' do
create(:project_group_link, project: project, group: nested_linked_group)
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index 849387b72bd..42211f7ac9d 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -214,13 +214,13 @@ describe MergeRequestsFinder do
merge_request3.assignees = [user2, user3]
end
- set(:user3) { create(:user) }
+ let_it_be(:user3) { create(:user) }
let(:params) { { assignee_username: [user2.username, user3.username] } }
let(:expected_issuables) { [merge_request3] }
end
it_behaves_like 'no assignee filter' do
- set(:user3) { create(:user) }
+ let_it_be(:user3) { create(:user) }
let(:expected_issuables) { [merge_request4, merge_request5] }
end
diff --git a/spec/finders/milestones_finder_spec.rb b/spec/finders/milestones_finder_spec.rb
index 3545ff35ed8..3402eb39b3b 100644
--- a/spec/finders/milestones_finder_spec.rb
+++ b/spec/finders/milestones_finder_spec.rb
@@ -3,13 +3,14 @@
require 'spec_helper'
describe MilestonesFinder do
+ let(:now) { Time.now }
let(:group) { create(:group) }
let(:project_1) { create(:project, namespace: group) }
let(:project_2) { create(:project, namespace: group) }
- let!(:milestone_1) { create(:milestone, group: group, title: 'one test', due_date: Date.today) }
- let!(:milestone_2) { create(:milestone, group: group) }
- let!(:milestone_3) { create(:milestone, project: project_1, state: 'active', due_date: Date.tomorrow) }
- let!(:milestone_4) { create(:milestone, project: project_2, state: 'active') }
+ let!(:milestone_1) { create(:milestone, group: group, title: 'one test', start_date: now - 1.day, due_date: now) }
+ let!(:milestone_2) { create(:milestone, group: group, start_date: now + 1.day, due_date: now + 2.days) }
+ let!(:milestone_3) { create(:milestone, project: project_1, state: 'active', start_date: now + 2.days, due_date: now + 3.days) }
+ let!(:milestone_4) { create(:milestone, project: project_2, state: 'active', start_date: now + 4.days, due_date: now + 5.days) }
it 'returns milestones for projects' do
result = described_class.new(project_ids: [project_1.id, project_2.id], state: 'all').execute
@@ -33,8 +34,11 @@ describe MilestonesFinder do
end
it 'orders milestones by due date' do
- expect(result.first).to eq(milestone_1)
- expect(result.second).to eq(milestone_3)
+ milestone = create(:milestone, group: group, due_date: now - 2.days)
+
+ expect(result.first).to eq(milestone)
+ expect(result.second).to eq(milestone_1)
+ expect(result.third).to eq(milestone_2)
end
end
@@ -77,6 +81,34 @@ describe MilestonesFinder do
expect(result.to_a).to contain_exactly(milestone_1)
end
+
+ context 'by timeframe' do
+ it 'returns milestones with start_date and due_date between timeframe' do
+ params.merge!(start_date: now - 1.day, end_date: now + 3.days)
+
+ milestones = described_class.new(params).execute
+
+ expect(milestones).to match_array([milestone_1, milestone_2, milestone_3])
+ end
+
+ it 'returns milestones which starts before the timeframe' do
+ milestone = create(:milestone, project: project_2, start_date: now - 5.days)
+ params.merge!(start_date: now - 3.days, end_date: now - 2.days)
+
+ milestones = described_class.new(params).execute
+
+ expect(milestones).to match_array([milestone])
+ end
+
+ it 'returns milestones which ends after the timeframe' do
+ milestone = create(:milestone, project: project_2, due_date: now + 6.days)
+ params.merge!(start_date: now + 6.days, end_date: now + 7.days)
+
+ milestones = described_class.new(params).execute
+
+ expect(milestones).to match_array([milestone])
+ end
+ end
end
describe '#find_by' do
diff --git a/spec/finders/projects/prometheus/alerts_finder_spec.rb b/spec/finders/projects/prometheus/alerts_finder_spec.rb
new file mode 100644
index 00000000000..bb59e77cca8
--- /dev/null
+++ b/spec/finders/projects/prometheus/alerts_finder_spec.rb
@@ -0,0 +1,169 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::Prometheus::AlertsFinder do
+ let(:finder) { described_class.new(params) }
+ let(:params) { {} }
+
+ describe 'with params' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:other_project) { create(:project) }
+ let_it_be(:other_env) { create(:environment, project: other_project) }
+ let_it_be(:production) { create(:environment, project: project) }
+ let_it_be(:staging) { create(:environment, project: project) }
+ let_it_be(:alert) { create_alert(project, production) }
+ let_it_be(:alert2) { create_alert(project, production) }
+ let_it_be(:stg_alert) { create_alert(project, staging) }
+ let_it_be(:other_alert) { create_alert(other_project, other_env) }
+
+ describe '#execute' do
+ subject { finder.execute }
+
+ context 'with project' do
+ before do
+ params[:project] = project
+ end
+
+ it { is_expected.to eq([alert, alert2, stg_alert]) }
+
+ context 'with matching metric' do
+ before do
+ params[:metric] = alert.prometheus_metric
+ end
+
+ it { is_expected.to eq([alert]) }
+ end
+
+ context 'with matching metric id' do
+ before do
+ params[:metric] = alert.prometheus_metric_id
+ end
+
+ it { is_expected.to eq([alert]) }
+ end
+
+ context 'with project non-specific metric' do
+ before do
+ params[:metric] = other_alert.prometheus_metric
+ end
+
+ it { is_expected.to be_empty }
+ end
+ end
+
+ context 'with environment' do
+ before do
+ params[:environment] = production
+ end
+
+ it { is_expected.to eq([alert, alert2]) }
+
+ context 'with matching metric' do
+ before do
+ params[:metric] = alert.prometheus_metric
+ end
+
+ it { is_expected.to eq([alert]) }
+ end
+
+ context 'with environment non-specific metric' do
+ before do
+ params[:metric] = stg_alert.prometheus_metric
+ end
+
+ it { is_expected.to be_empty }
+ end
+ end
+
+ context 'with matching project and environment' do
+ before do
+ params[:project] = project
+ params[:environment] = production
+ end
+
+ it { is_expected.to eq([alert, alert2]) }
+
+ context 'with matching metric' do
+ before do
+ params[:metric] = alert.prometheus_metric
+ end
+
+ it { is_expected.to eq([alert]) }
+ end
+
+ context 'with environment non-specific metric' do
+ before do
+ params[:metric] = stg_alert.prometheus_metric
+ end
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'with matching id' do
+ before do
+ params[:id] = alert.id
+ end
+
+ it { is_expected.to eq([alert]) }
+ end
+
+ context 'with a nil id' do
+ before do
+ params[:id] = nil
+ end
+
+ it { is_expected.to eq([alert, alert2]) }
+ end
+ end
+
+ context 'with non-matching project-environment pair' do
+ before do
+ params[:project] = project
+ params[:environment] = other_env
+ end
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'with id' do
+ before do
+ params[:id] = alert.id
+ end
+
+ it { is_expected.to eq([alert]) }
+ end
+
+ context 'with multiple ids' do
+ before do
+ params[:id] = [alert.id, other_alert.id]
+ end
+
+ it { is_expected.to eq([alert, other_alert]) }
+ end
+
+ context 'with non-matching id' do
+ before do
+ params[:id] = -5
+ end
+
+ it { is_expected.to be_empty }
+ end
+ end
+
+ private
+
+ def create_alert(project, environment)
+ create(:prometheus_alert, project: project, environment: environment)
+ end
+ end
+
+ describe 'without params' do
+ subject { finder }
+
+ it 'raises an error' do
+ expect { subject }
+ .to raise_error(ArgumentError, 'Please provide one or more of the following params: :project, :environment, :id')
+ end
+ end
+end
diff --git a/spec/finders/projects/serverless/functions_finder_spec.rb b/spec/finders/projects/serverless/functions_finder_spec.rb
index d5644daebab..4e9f3d371ce 100644
--- a/spec/finders/projects/serverless/functions_finder_spec.rb
+++ b/spec/finders/projects/serverless/functions_finder_spec.rb
@@ -26,9 +26,69 @@ describe Projects::Serverless::FunctionsFinder do
project.add_maintainer(user)
end
- describe '#installed' do
- it 'when reactive_caching is still fetching data' do
- expect(described_class.new(project).knative_installed).to eq 'checking'
+ describe '#knative_installed' do
+ context 'when environment does not exist yet' do
+ shared_examples 'before first deployment' do
+ let(:service) { cluster.platform_kubernetes }
+ let(:deployment) { nil }
+
+ it 'returns true if Knative is installed on cluster' do
+ stub_kubeclient_discover_knative_found(service.api_url)
+ function_finder = described_class.new(project)
+ synchronous_reactive_cache(function_finder)
+
+ expect(function_finder.knative_installed).to be true
+ end
+
+ it 'returns false if Knative is not installed on cluster' do
+ stub_kubeclient_discover_knative_not_found(service.api_url)
+ function_finder = described_class.new(project)
+ synchronous_reactive_cache(function_finder)
+
+ expect(function_finder.knative_installed).to be false
+ end
+ end
+ context 'when project level cluster is present and enabled' do
+ it_behaves_like 'before first deployment' do
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp, enabled: true) }
+ let(:project) { cluster.project }
+ end
+ end
+
+ context 'when group level cluster is present and enabled' do
+ it_behaves_like 'before first deployment' do
+ let(:cluster) { create(:cluster, :group, :provided_by_gcp, enabled: true) }
+ let(:project) { create(:project, group: cluster.groups.first) }
+ end
+ end
+
+ context 'when instance level cluster is present and enabled' do
+ it_behaves_like 'before first deployment' do
+ let(:project) { create(:project) }
+ let(:cluster) { create(:cluster, :instance, :provided_by_gcp, enabled: true) }
+ end
+ end
+
+ context 'when project level cluster is present, but disabled' do
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp, enabled: false) }
+ let(:project) { cluster.project }
+ let(:service) { cluster.platform_kubernetes }
+ let(:deployment) { nil }
+
+ it 'returns false even if Knative is installed on cluster' do
+ stub_kubeclient_discover_knative_found(service.api_url)
+ function_finder = described_class.new(project)
+ synchronous_reactive_cache(function_finder)
+
+ expect(function_finder.knative_installed).to be false
+ end
+ end
+ end
+
+ context 'when reactive_caching is still fetching data' do
+ it 'returns "checking"' do
+ expect(described_class.new(project).knative_installed).to eq 'checking'
+ end
end
context 'when reactive_caching has finished' do
@@ -93,8 +153,8 @@ describe Projects::Serverless::FunctionsFinder do
*knative_services_finder.cache_args)
result = finder.service(cluster.environment_scope, cluster.project.name)
- expect(result).not_to be_empty
- expect(result["metadata"]["name"]).to be_eql(cluster.project.name)
+ expect(result).to be_present
+ expect(result.name).to be_eql(cluster.project.name)
end
it 'has metrics', :use_clean_rails_memory_store_caching do
diff --git a/spec/finders/projects_finder_spec.rb b/spec/finders/projects_finder_spec.rb
index bf97e3cf7db..6a04ca0eb67 100644
--- a/spec/finders/projects_finder_spec.rb
+++ b/spec/finders/projects_finder_spec.rb
@@ -28,210 +28,227 @@ describe ProjectsFinder, :do_not_mock_admin_mode do
let(:params) { {} }
let(:current_user) { user }
let(:project_ids_relation) { nil }
- let(:finder) { described_class.new(params: params, current_user: current_user, project_ids_relation: project_ids_relation) }
+ let(:use_cte) { true }
+ let(:finder) { described_class.new(params: params.merge(use_cte: use_cte), current_user: current_user, project_ids_relation: project_ids_relation) }
subject { finder.execute }
- describe 'without a user' do
- let(:current_user) { nil }
+ shared_examples 'ProjectFinder#execute examples' do
+ describe 'without a user' do
+ let(:current_user) { nil }
- it { is_expected.to eq([public_project]) }
- end
-
- describe 'with a user' do
- describe 'without private projects' do
- it { is_expected.to match_array([public_project, internal_project]) }
+ it { is_expected.to eq([public_project]) }
end
- describe 'with private projects' do
- before do
- private_project.add_maintainer(user)
+ describe 'with a user' do
+ describe 'without private projects' do
+ it { is_expected.to match_array([public_project, internal_project]) }
end
- it { is_expected.to match_array([public_project, internal_project, private_project]) }
+ describe 'with private projects' do
+ before do
+ private_project.add_maintainer(user)
+ end
+
+ it { is_expected.to match_array([public_project, internal_project, private_project]) }
+ end
end
- end
- describe 'with project_ids_relation' do
- let(:project_ids_relation) { Project.where(id: internal_project.id) }
+ describe 'with project_ids_relation' do
+ let(:project_ids_relation) { Project.where(id: internal_project.id) }
- it { is_expected.to eq([internal_project]) }
- end
+ it { is_expected.to eq([internal_project]) }
+ end
- describe 'with id_after' do
- context 'only returns projects with a project id greater than given' do
- let(:params) { { id_after: internal_project.id }}
+ describe 'with id_after' do
+ context 'only returns projects with a project id greater than given' do
+ let(:params) { { id_after: internal_project.id }}
- it { is_expected.to eq([public_project]) }
+ it { is_expected.to eq([public_project]) }
+ end
end
- end
- describe 'with id_before' do
- context 'only returns projects with a project id less than given' do
- let(:params) { { id_before: public_project.id }}
+ describe 'with id_before' do
+ context 'only returns projects with a project id less than given' do
+ let(:params) { { id_before: public_project.id }}
- it { is_expected.to eq([internal_project]) }
+ it { is_expected.to eq([internal_project]) }
+ end
end
- end
- describe 'with both id_before and id_after' do
- context 'only returns projects with a project id less than given' do
- let!(:projects) { create_list(:project, 5, :public) }
- let(:params) { { id_after: projects.first.id, id_before: projects.last.id }}
+ describe 'with both id_before and id_after' do
+ context 'only returns projects with a project id less than given' do
+ let!(:projects) { create_list(:project, 5, :public) }
+ let(:params) { { id_after: projects.first.id, id_before: projects.last.id }}
- it { is_expected.to contain_exactly(*projects[1..-2]) }
+ it { is_expected.to contain_exactly(*projects[1..-2]) }
+ end
end
- end
- describe 'filter by visibility_level' do
- before do
- private_project.add_maintainer(user)
- end
+ describe 'filter by visibility_level' do
+ before do
+ private_project.add_maintainer(user)
+ end
- context 'private' do
- let(:params) { { visibility_level: Gitlab::VisibilityLevel::PRIVATE } }
+ context 'private' do
+ let(:params) { { visibility_level: Gitlab::VisibilityLevel::PRIVATE } }
- it { is_expected.to eq([private_project]) }
- end
+ it { is_expected.to eq([private_project]) }
+ end
- context 'internal' do
- let(:params) { { visibility_level: Gitlab::VisibilityLevel::INTERNAL } }
+ context 'internal' do
+ let(:params) { { visibility_level: Gitlab::VisibilityLevel::INTERNAL } }
- it { is_expected.to eq([internal_project]) }
+ it { is_expected.to eq([internal_project]) }
+ end
+
+ context 'public' do
+ let(:params) { { visibility_level: Gitlab::VisibilityLevel::PUBLIC } }
+
+ it { is_expected.to eq([public_project]) }
+ end
end
- context 'public' do
- let(:params) { { visibility_level: Gitlab::VisibilityLevel::PUBLIC } }
+ describe 'filter by tags' do
+ before do
+ public_project.tag_list.add('foo')
+ public_project.save!
+ end
+
+ let(:params) { { tag: 'foo' } }
it { is_expected.to eq([public_project]) }
end
- end
- describe 'filter by tags' do
- before do
- public_project.tag_list.add('foo')
- public_project.save!
+ describe 'filter by personal' do
+ let!(:personal_project) { create(:project, namespace: user.namespace) }
+ let(:params) { { personal: true } }
+
+ it { is_expected.to eq([personal_project]) }
end
- let(:params) { { tag: 'foo' } }
+ describe 'filter by search' do
+ let(:params) { { search: 'C' } }
- it { is_expected.to eq([public_project]) }
- end
+ it { is_expected.to eq([public_project]) }
+ end
- describe 'filter by personal' do
- let!(:personal_project) { create(:project, namespace: user.namespace) }
- let(:params) { { personal: true } }
+ describe 'filter by name for backward compatibility' do
+ let(:params) { { name: 'C' } }
- it { is_expected.to eq([personal_project]) }
- end
+ it { is_expected.to eq([public_project]) }
+ end
- describe 'filter by search' do
- let(:params) { { search: 'C' } }
+ describe 'filter by archived' do
+ let!(:archived_project) { create(:project, :public, :archived, name: 'E', path: 'E') }
- it { is_expected.to eq([public_project]) }
- end
+ context 'non_archived=true' do
+ let(:params) { { non_archived: true } }
- describe 'filter by name for backward compatibility' do
- let(:params) { { name: 'C' } }
+ it { is_expected.to match_array([public_project, internal_project]) }
+ end
- it { is_expected.to eq([public_project]) }
- end
+ context 'non_archived=false' do
+ let(:params) { { non_archived: false } }
- describe 'filter by archived' do
- let!(:archived_project) { create(:project, :public, :archived, name: 'E', path: 'E') }
+ it { is_expected.to match_array([public_project, internal_project, archived_project]) }
+ end
- context 'non_archived=true' do
- let(:params) { { non_archived: true } }
+ describe 'filter by archived only' do
+ let(:params) { { archived: 'only' } }
- it { is_expected.to match_array([public_project, internal_project]) }
- end
+ it { is_expected.to eq([archived_project]) }
+ end
- context 'non_archived=false' do
- let(:params) { { non_archived: false } }
+ describe 'filter by archived for backward compatibility' do
+ let(:params) { { archived: false } }
- it { is_expected.to match_array([public_project, internal_project, archived_project]) }
+ it { is_expected.to match_array([public_project, internal_project]) }
+ end
end
- describe 'filter by archived only' do
- let(:params) { { archived: 'only' } }
+ describe 'filter by trending' do
+ let!(:trending_project) { create(:trending_project, project: public_project) }
+ let(:params) { { trending: true } }
- it { is_expected.to eq([archived_project]) }
+ it { is_expected.to eq([public_project]) }
end
- describe 'filter by archived for backward compatibility' do
- let(:params) { { archived: false } }
+ describe 'filter by owned' do
+ let(:params) { { owned: true } }
+ let!(:owned_project) { create(:project, :private, namespace: current_user.namespace) }
- it { is_expected.to match_array([public_project, internal_project]) }
+ it { is_expected.to eq([owned_project]) }
end
- end
- describe 'filter by trending' do
- let!(:trending_project) { create(:trending_project, project: public_project) }
- let(:params) { { trending: true } }
-
- it { is_expected.to eq([public_project]) }
- end
+ describe 'filter by non_public' do
+ let(:params) { { non_public: true } }
- describe 'filter by owned' do
- let(:params) { { owned: true } }
- let!(:owned_project) { create(:project, :private, namespace: current_user.namespace) }
+ before do
+ private_project.add_developer(current_user)
+ end
- it { is_expected.to eq([owned_project]) }
- end
+ it { is_expected.to eq([private_project]) }
+ end
- describe 'filter by non_public' do
- let(:params) { { non_public: true } }
+ describe 'filter by starred' do
+ let(:params) { { starred: true } }
- before do
- private_project.add_developer(current_user)
- end
+ before do
+ current_user.toggle_star(public_project)
+ end
- it { is_expected.to eq([private_project]) }
- end
+ it { is_expected.to eq([public_project]) }
- describe 'filter by starred' do
- let(:params) { { starred: true } }
+ it 'returns only projects the user has access to' do
+ current_user.toggle_star(private_project)
- before do
- current_user.toggle_star(public_project)
+ is_expected.to eq([public_project])
+ expect(subject.count).to eq(1)
+ expect(subject.limit(1000).count).to eq(1)
+ end
end
- it { is_expected.to eq([public_project]) }
+ describe 'filter by without_deleted' do
+ let(:params) { { without_deleted: true } }
+ let!(:pending_delete_project) { create(:project, :public, pending_delete: true) }
- it 'returns only projects the user has access to' do
- current_user.toggle_star(private_project)
+ it { is_expected.to match_array([public_project, internal_project]) }
+ end
- is_expected.to eq([public_project])
+ describe 'sorting' do
+ let(:params) { { sort: 'name_asc' } }
+
+ it { is_expected.to eq([internal_project, public_project]) }
end
- end
- describe 'filter by without_deleted' do
- let(:params) { { without_deleted: true } }
- let!(:pending_delete_project) { create(:project, :public, pending_delete: true) }
+ describe 'with admin user' do
+ let(:user) { create(:admin) }
- it { is_expected.to match_array([public_project, internal_project]) }
- end
+ context 'admin mode enabled' do
+ before do
+ enable_admin_mode!(current_user)
+ end
- describe 'sorting' do
- let(:params) { { sort: 'name_asc' } }
+ it { is_expected.to match_array([public_project, internal_project, private_project, shared_project]) }
+ end
- it { is_expected.to eq([internal_project, public_project]) }
+ context 'admin mode disabled' do
+ it { is_expected.to match_array([public_project, internal_project]) }
+ end
+ end
end
- describe 'with admin user' do
- let(:user) { create(:admin) }
+ describe 'without CTE flag enabled' do
+ let(:use_cte) { false }
- context 'admin mode enabled' do
- before do
- enable_admin_mode!(current_user)
- end
+ it_behaves_like 'ProjectFinder#execute examples'
+ end
- it { is_expected.to match_array([public_project, internal_project, private_project, shared_project]) }
- end
+ describe 'with CTE flag enabled' do
+ let(:use_cte) { true }
- context 'admin mode disabled' do
- it { is_expected.to match_array([public_project, internal_project]) }
- end
+ it_behaves_like 'ProjectFinder#execute examples'
end
end
end
diff --git a/spec/finders/protected_branches_finder_spec.rb b/spec/finders/protected_branches_finder_spec.rb
new file mode 100644
index 00000000000..e6a2cf4577c
--- /dev/null
+++ b/spec/finders/protected_branches_finder_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ProtectedBranchesFinder do
+ let(:project) { create(:project) }
+ let!(:protected_branch) { create(:protected_branch, project: project) }
+ let!(:another_protected_branch) { create(:protected_branch, project: project) }
+ let!(:other_protected_branch) { create(:protected_branch) }
+ let(:params) { {} }
+
+ describe '#execute' do
+ subject { described_class.new(project, params).execute }
+
+ it 'returns all protected branches of project by default' do
+ expect(subject).to match_array([protected_branch, another_protected_branch])
+ end
+
+ context 'when search param is present' do
+ let(:params) { { search: protected_branch.name } }
+
+ it 'filters by search param' do
+ expect(subject).to eq([protected_branch])
+ end
+ end
+
+ context 'when there are more protected branches than the limit' do
+ before do
+ stub_const("#{described_class}::LIMIT", 1)
+ end
+
+ it 'returns limited protected branches of project' do
+ expect(subject).to eq([another_protected_branch])
+ end
+ end
+ end
+end
diff --git a/spec/finders/resource_label_event_finder_spec.rb b/spec/finders/resource_label_event_finder_spec.rb
index c894387100d..5068ea19d63 100644
--- a/spec/finders/resource_label_event_finder_spec.rb
+++ b/spec/finders/resource_label_event_finder_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe ResourceLabelEventFinder do
- set(:user) { create(:user) }
- set(:issue_project) { create(:project) }
- set(:issue) { create(:issue, project: issue_project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:issue_project) { create(:project) }
+ let_it_be(:issue) { create(:issue, project: issue_project) }
describe '#execute' do
subject { described_class.new(user, issue).execute }
diff --git a/spec/fixtures/api/graphql/recursive-introspection.graphql b/spec/fixtures/api/graphql/recursive-introspection.graphql
index db970bb14b6..8346fbdde74 100644
--- a/spec/fixtures/api/graphql/recursive-introspection.graphql
+++ b/spec/fixtures/api/graphql/recursive-introspection.graphql
@@ -2,51 +2,11 @@ query allSchemaTypes {
__schema {
types {
fields {
- type{
+ type {
fields {
type {
fields {
- type {
- fields {
- type {
- fields {
- type {
- fields {
- type {
- fields {
- type {
- fields {
- type {
- fields {
- type {
- fields {
- type {
- fields {
- type {
- fields {
- type {
- fields {
- name
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
+ name
}
}
}
@@ -54,4 +14,4 @@ query allSchemaTypes {
}
}
}
-} \ No newline at end of file
+}
diff --git a/spec/fixtures/api/graphql/recursive-query-edges-node.graphql b/spec/fixtures/api/graphql/recursive-query-edges-node.graphql
new file mode 100644
index 00000000000..de790b1a6c7
--- /dev/null
+++ b/spec/fixtures/api/graphql/recursive-query-edges-node.graphql
@@ -0,0 +1,23 @@
+{
+ project(fullPath: "gitlab-org/gitlab") {
+ group {
+ projects {
+ edges {
+ node {
+ group {
+ projects {
+ edges {
+ node {
+ group {
+ description
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/spec/fixtures/api/graphql/recursive-query-nodes.graphql b/spec/fixtures/api/graphql/recursive-query-nodes.graphql
new file mode 100644
index 00000000000..c8d93f74f86
--- /dev/null
+++ b/spec/fixtures/api/graphql/recursive-query-nodes.graphql
@@ -0,0 +1,19 @@
+{
+ project(fullPath: "gitlab-org/gitlab") {
+ group {
+ projects {
+ nodes {
+ group {
+ projects {
+ nodes {
+ group {
+ description
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/spec/fixtures/api/graphql/recursive-query.graphql b/spec/fixtures/api/graphql/recursive-query.graphql
deleted file mode 100644
index d1616c4de6e..00000000000
--- a/spec/fixtures/api/graphql/recursive-query.graphql
+++ /dev/null
@@ -1,47 +0,0 @@
-{
- project(fullPath: "gitlab-org/gitlab-ce") {
- group {
- projects {
- edges {
- node {
- group {
- projects {
- edges {
- node {
- group {
- projects {
- edges {
- node {
- group {
- projects {
- edges {
- node {
- group {
- projects {
- edges {
- node {
- group {
- description
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
- }
-}
diff --git a/spec/fixtures/api/schemas/deployment.json b/spec/fixtures/api/schemas/deployment.json
index 0cfeadfe548..ac37dd084d3 100644
--- a/spec/fixtures/api/schemas/deployment.json
+++ b/spec/fixtures/api/schemas/deployment.json
@@ -50,7 +50,7 @@
"cluster": {
"oneOf": [
{ "type": "null" },
- { "$ref": "cluster_basic.json" }
+ { "$ref": "deployment_cluster.json" }
]
},
"manual_actions": {
diff --git a/spec/fixtures/api/schemas/cluster_basic.json b/spec/fixtures/api/schemas/deployment_cluster.json
index 6f0e77997f0..86497f98dcb 100644
--- a/spec/fixtures/api/schemas/cluster_basic.json
+++ b/spec/fixtures/api/schemas/deployment_cluster.json
@@ -10,6 +10,12 @@
{ "type": "null" },
{ "type": "string" }
]
+ },
+ "kubernetes_namespace": {
+ "oneOf": [
+ { "type": "null" },
+ { "type": "string" }
+ ]
}
},
"additionalProperties": false
diff --git a/spec/fixtures/api/schemas/entities/test_reports_comparer.json b/spec/fixtures/api/schemas/entities/test_reports_comparer.json
index d7880801c01..03812527f71 100644
--- a/spec/fixtures/api/schemas/entities/test_reports_comparer.json
+++ b/spec/fixtures/api/schemas/entities/test_reports_comparer.json
@@ -12,11 +12,13 @@
"properties": {
"total": { "type": "integer" },
"resolved": { "type": "integer" },
+ "errored": { "type": "integer" },
"failed": { "type": "integer" }
},
"required": [
"total",
"resolved",
+ "errored",
"failed"
]
},
diff --git a/spec/fixtures/api/schemas/entities/test_suite_comparer.json b/spec/fixtures/api/schemas/entities/test_suite_comparer.json
index d63fea1f0db..ecb331ae013 100644
--- a/spec/fixtures/api/schemas/entities/test_suite_comparer.json
+++ b/spec/fixtures/api/schemas/entities/test_suite_comparer.json
@@ -16,17 +16,17 @@
"properties": {
"total": { "type": "integer" },
"resolved": { "type": "integer" },
+ "errored": { "type": "integer" },
"failed": { "type": "integer" }
},
- "required": [
- "total",
- "resolved",
- "failed"
- ]
+ "required": ["total", "resolved", "errored", "failed"]
},
"new_failures": { "type": "array", "items": { "$ref": "test_case.json" } },
"resolved_failures": { "type": "array", "items": { "$ref": "test_case.json" } },
- "existing_failures": { "type": "array", "items": { "$ref": "test_case.json" } }
+ "existing_failures": { "type": "array", "items": { "$ref": "test_case.json" } },
+ "new_errors": { "type": "array", "items": { "$ref": "test_case.json" } },
+ "resolved_errors": { "type": "array", "items": { "$ref": "test_case.json" } },
+ "existing_errors": { "type": "array", "items": { "$ref": "test_case.json" } }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/error_tracking/update_issue.json b/spec/fixtures/api/schemas/error_tracking/update_issue.json
index 72514ce647d..75f2c1152d9 100644
--- a/spec/fixtures/api/schemas/error_tracking/update_issue.json
+++ b/spec/fixtures/api/schemas/error_tracking/update_issue.json
@@ -6,9 +6,15 @@
"properties" : {
"result": {
"type": "object",
+ "required" : [
+ "status",
+ "updated",
+ "closed_issue_iid"
+ ],
"properties": {
"status": { "type": "string" },
- "updated": { "type": "boolean" }
+ "updated": { "type": "boolean" },
+ "closed_issue_iid": { "type": ["integer", "null"] }
}
}
},
diff --git a/spec/fixtures/api/schemas/job/job_details.json b/spec/fixtures/api/schemas/job/job_details.json
index cdf7b049ab6..ae05ecea9ef 100644
--- a/spec/fixtures/api/schemas/job/job_details.json
+++ b/spec/fixtures/api/schemas/job/job_details.json
@@ -15,6 +15,12 @@
"terminal_path": { "type": "string" },
"trigger": { "$ref": "trigger.json" },
"deployment_status": { "$ref": "deployment_status.json" },
+ "deployment_cluster": {
+ "oneOf": [
+ { "$ref": "../deployment_cluster.json" },
+ { "type": "null" }
+ ]
+ },
"runner": { "$ref": "runner.json" },
"runners": { "$ref": "runners.json" },
"has_trace": { "type": "boolean" },
diff --git a/spec/fixtures/api/schemas/public_api/v4/members.json b/spec/fixtures/api/schemas/public_api/v4/members.json
new file mode 100644
index 00000000000..38ad64ad061
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/members.json
@@ -0,0 +1,22 @@
+{
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties" : {
+ "id": { "type": "integer" },
+ "name": { "type": "string" },
+ "username": { "type": "string" },
+ "state": { "type": "string" },
+ "avatar_url": { "type": ["string", "null"] },
+ "web_url": { "type": ["string", "null"] },
+ "access_level": { "type": "integer" },
+ "expires_at": { "type": ["date", "null"] },
+ "is_using_seat": { "type": "boolean" }
+ },
+ "required": [
+ "id", "name", "username", "state",
+ "web_url", "access_level", "expires_at"
+ ],
+ "additionalProperties": false
+ }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/membership.json b/spec/fixtures/api/schemas/public_api/v4/membership.json
new file mode 100644
index 00000000000..3412076f84a
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/membership.json
@@ -0,0 +1,10 @@
+{
+ "type": "object",
+ "properties" : {
+ "source_id": { "type": "integer" },
+ "source_name": { "type": "string" },
+ "source_type": { "type": "string" },
+ "access_level": { "type": "integer" }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/memberships.json b/spec/fixtures/api/schemas/public_api/v4/memberships.json
new file mode 100644
index 00000000000..54c98b9cb99
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/memberships.json
@@ -0,0 +1,4 @@
+{
+ "type": "array",
+ "items": { "$ref": "membership.json" }
+}
diff --git a/spec/fixtures/api/schemas/variable.json b/spec/fixtures/api/schemas/variable.json
index 305071a6b3f..c49d7d8c5ea 100644
--- a/spec/fixtures/api/schemas/variable.json
+++ b/spec/fixtures/api/schemas/variable.json
@@ -13,6 +13,7 @@
"value": { "type": "string" },
"masked": { "type": "boolean" },
"protected": { "type": "boolean" },
+ "variable_type": { "type": "string" },
"environment_scope": { "type": "string", "optional": true }
},
"additionalProperties": false
diff --git a/spec/fixtures/authentication/adfs_saml_response.xml b/spec/fixtures/authentication/adfs_saml_response.xml
new file mode 100644
index 00000000000..1056d73638c
--- /dev/null
+++ b/spec/fixtures/authentication/adfs_saml_response.xml
@@ -0,0 +1,58 @@
+<?xml version="1.0"?>
+<samlp:Response xmlns:samlp="urn:oasis:names:tc:SAML:2.0:protocol" ID="jVFQbyEpSfUwqhZtJtarIaGoshwuAQMDwLoiMhzJXsv" Version="2.0" IssueInstant="2020-01-23T06:12:41.896Z" Destination="https://gitlab-example.com/users/auth/saml/callback" Consent="urn:oasis:names:tc:SAML:2.0:consent:unspecified" InResponseTo="feooghajnhofcmogakmlhpkohnmikicnfhdnjlc">
+ <Issuer xmlns="urn:oasis:names:tc:SAML:2.0:assertion">https://example.com/adfs/services/trust</Issuer>
+ <samlp:Status>
+ <samlp:StatusCode Value="urn:oasis:names:tc:SAML:2.0:status:Success"/>
+ </samlp:Status>
+ <Assertion xmlns="urn:oasis:names:tc:SAML:2.0:assertion" ID="emmCjammnYdAbMWDuMAJeZvQIMBayeeYqqwvQoDclKE" IssueInstant="2020-01-23T06:12:41.896Z" Version="2.0">
+ <Issuer>https://example.com/adfs/services/trust</Issuer>
+ <ds:Signature xmlns:ds="http://www.w3.org/2000/09/xmldsig#">
+ <ds:SignedInfo>
+ <ds:CanonicalizationMethod Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/>
+ <ds:SignatureMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha256"/>
+ <ds:Reference URI="#jVFQbyEpSfUwqhZtJtarIaGoshwuAQMDwLoiMhzJXsv">
+ <ds:Transforms>
+ <ds:Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"/>
+ <ds:Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/>
+ </ds:Transforms>
+ <ds:DigestMethod Algorithm="http://www.w3.org/2001/04/xmlenc#sha256"/>
+ <ds:DigestValue>uHuSry39P16Yh7srS32xESmj4Lw</ds:DigestValue>
+ </ds:Reference>
+ </ds:SignedInfo>
+ <ds:SignatureValue>fdghdfggfd=</ds:SignatureValue>
+ <KeyInfo xmlns="http://www.w3.org/2000/09/xmldsig#">
+ <ds:X509Data>
+ <ds:X509Certificate>dfghjkl</ds:X509Certificate>
+ </ds:X509Data>
+ </KeyInfo>
+ </ds:Signature>
+ <Subject>
+ <NameID Format="urn:oasis:names:tc:SAML:2.0:nameid-format:transient">example@example.com</NameID>
+ <SubjectConfirmation Method="urn:oasis:names:tc:SAML:2.0:cm:bearer">
+ <SubjectConfirmationData InResponseTo="cfeooghajnhofcmogakmlhpkohnmikicnfhdnjlc" NotOnOrAfter="2020-01-23T06:17:41.896Z" Recipient="https://gitlab-example.com/users/auth/saml/callback"/>
+ </SubjectConfirmation>
+ </Subject>
+ <Conditions NotBefore="2020-01-23T06:10:41.818Z" NotOnOrAfter="2020-01-23T07:10:41.818Z">
+ <AudienceRestriction>
+ <Audience>https://gitlab-example.com</Audience>
+ </AudienceRestriction>
+ </Conditions>
+ <AttributeStatement>
+ <Attribute Name="http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress">
+ <AttributeValue>example@example.com</AttributeValue>
+ </Attribute>
+ <Attribute Name="http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name">
+ <AttributeValue>Example User</AttributeValue>
+ </Attribute>
+ <Attribute Name="http://schemas.xmlsoap.org/claims/Group">
+ <AttributeValue>Group 1</AttributeValue>
+ <AttributeValue>Another Group</AttributeValue>
+ </Attribute>
+ </AttributeStatement>
+ <AuthnStatement AuthnInstant="2020-01-23T06:12:41.756Z" SessionIndex="perdkjfskdjfksdiertusfsdfsddeurtherukjdfgkdffg">
+ <AuthnContext>
+ <AuthnContextClassRef>urn:federation:authentication:windows</AuthnContextClassRef>
+ </AuthnContext>
+ </AuthnStatement>
+ </Assertion>
+</samlp:Response>
diff --git a/spec/fixtures/config/mail_room_disabled.yml b/spec/fixtures/config/mail_room_disabled.yml
index 97f8cff051f..538f2a35f81 100644
--- a/spec/fixtures/config/mail_room_disabled.yml
+++ b/spec/fixtures/config/mail_room_disabled.yml
@@ -9,3 +9,14 @@ test:
ssl: true
start_tls: false
mailbox: "inbox"
+
+ service_desk_email:
+ enabled: false
+ address: "gitlab-incoming+%{key}@gmail.com"
+ user: "gitlab-incoming@gmail.com"
+ password: "[REDACTED]"
+ host: "imap.gmail.com"
+ port: 993
+ ssl: true
+ start_tls: false
+ mailbox: "inbox"
diff --git a/spec/fixtures/config/mail_room_enabled.yml b/spec/fixtures/config/mail_room_enabled.yml
index 9c94649244d..e1f4c2f44de 100644
--- a/spec/fixtures/config/mail_room_enabled.yml
+++ b/spec/fixtures/config/mail_room_enabled.yml
@@ -9,3 +9,14 @@ test:
ssl: true
start_tls: false
mailbox: "inbox"
+
+ service_desk_email:
+ enabled: true
+ address: "gitlab-incoming+%{key}@gmail.com"
+ user: "gitlab-incoming@gmail.com"
+ password: "[REDACTED]"
+ host: "imap.gmail.com"
+ port: 993
+ ssl: true
+ start_tls: false
+ mailbox: "inbox"
diff --git a/spec/fixtures/emails/.gitattributes b/spec/fixtures/emails/.gitattributes
new file mode 100644
index 00000000000..6c6ba98a0dd
--- /dev/null
+++ b/spec/fixtures/emails/.gitattributes
@@ -0,0 +1,2 @@
+# Do not mangle line endings or signature will be invalid
+valid_reply_signed_smime.eml eol=crlf \ No newline at end of file
diff --git a/spec/fixtures/emails/envelope_to_header_with_angle_brackets.eml b/spec/fixtures/emails/envelope_to_header_with_angle_brackets.eml
new file mode 100644
index 00000000000..809e885666c
--- /dev/null
+++ b/spec/fixtures/emails/envelope_to_header_with_angle_brackets.eml
@@ -0,0 +1,32 @@
+Return-Path: <jake@example.com>
+Received: from myserver.example.com ([unix socket]) by myserver (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400
+Received: from mail.example.com (mail.example.com [IPv6:2607:f8b0:4001:c03::234]) by myserver.example.com (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@example.com>; Thu, 13 Jun 2013 17:03:50 -0400
+Received: by myserver.example.com with SMTP id f4so21977375iea.25 for <incoming+gitlabhq/gitlabhq@appmail.example.com>; Thu, 13 Jun 2013 14:03:48 -0700
+Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700
+From: "jake@example.com" <jake@example.com>
+To: "support@example.com" <support@example.com>
+Subject: Insert hilarious subject line here
+Date: Tue, 26 Nov 2019 14:22:41 +0000
+Message-ID: <7e2296f83dbf4de388cbf5f56f52c11f@EXDAG29-1.EXCHANGE.INT>
+Accept-Language: de-DE, en-US
+Content-Language: de-DE
+X-MS-Has-Attach:
+X-MS-TNEF-Correlator:
+x-ms-exchange-transport-fromentityheader: Hosted
+x-originating-ip: [62.96.54.178]
+Content-Type: multipart/alternative;
+ boundary="_000_7e2296f83dbf4de388cbf5f56f52c11fEXDAG291EXCHANGEINT_"
+MIME-Version: 1.0
+Envelope-To: <incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com>
+
+--_000_7e2296f83dbf4de388cbf5f56f52c11fEXDAG291EXCHANGEINT_
+Content-Type: text/plain; charset="iso-8859-1"
+Content-Transfer-Encoding: quoted-printable
+
+
+
+--_000_7e2296f83dbf4de388cbf5f56f52c11fEXDAG291EXCHANGEINT_
+Content-Type: text/html; charset="iso-8859-1"
+Content-Transfer-Encoding: quoted-printable
+
+Look, a message with some alternate headers! We should really support them.
diff --git a/spec/fixtures/emails/valid_reply_signed_smime.eml b/spec/fixtures/emails/valid_reply_signed_smime.eml
new file mode 100644
index 00000000000..0c5e2c439ae
--- /dev/null
+++ b/spec/fixtures/emails/valid_reply_signed_smime.eml
@@ -0,0 +1,294 @@
+User-Agent: Microsoft-MacOutlook/10.22.0.200209
+Date: Mon, 17 Feb 2020 22:56:47 +0100
+Subject: Re: htmltest | test issue (#1)
+From: "Louzan Martinez, Diego (ext) (SI BP R&D ZG)"
+ <diego.louzan.ext@siemens.com>
+To: Administrator / htmltest
+ <dlouzan.dummy+c034670b1623e617e15a3df64223d363@gmail.com>
+Message-ID: <012E37D9-2A3F-4AC8-B79A-871F42914D86@siemens.com>
+Thread-Topic: htmltest | test issue (#1)
+References: <reply-c034670b1623e617e15a3df64223d363@169.254.169.254>
+ <issue_451@169.254.169.254>
+ <note_1797@169.254.169.254>
+In-Reply-To: <note_1797@169.254.169.254>
+Content-type: multipart/signed;
+ protocol="application/pkcs7-signature";
+ micalg=sha256;
+ boundary="B_3664825007_1904734766"
+MIME-Version: 1.0
+
+--B_3664825007_1904734766
+Content-type: multipart/mixed;
+ boundary="B_3664825007_384940722"
+
+
+--B_3664825007_384940722
+Content-type: multipart/alternative;
+ boundary="B_3664825007_1519466360"
+
+
+--B_3664825007_1519466360
+Content-type: text/plain;
+ charset="UTF-8"
+Content-transfer-encoding: quoted-printable
+
+Me too, with an attachment
+
+=20
+
+From: Administrator <dlouzan.dummy@gmail.com>
+Reply to: Administrator / htmltest <dlouzan.dummy+c034670b1623e617e15a3df64=
+223d363@gmail.com>
+Date: Monday, 17 February 2020 at 22:55
+To: "Louzan Martinez, Diego (ext) (SOP IT STG XS)" <diego.louzan.ext@siemen=
+s.com>
+Subject: Re: htmltest | test issue (#1)
+
+=20
+
+Administrator commented:=20
+
+I pity the foo !!!
+
+=E2=80=94=20
+Reply to this email directly or view it on GitLab.=20
+You're receiving this email because of your account on 169.254.169.254. If =
+you'd like to receive fewer emails, you can unsubscribe from this thread or =
+adjust your notification settings.=20
+
+
+--B_3664825007_1519466360
+Content-type: text/html;
+ charset="UTF-8"
+Content-transfer-encoding: quoted-printable
+
+<html xmlns:o=3D"urn:schemas-microsoft-com:office:office" xmlns:w=3D"urn:schema=
+s-microsoft-com:office:word" xmlns:m=3D"http://schemas.microsoft.com/office/20=
+04/12/omml" xmlns=3D"http://www.w3.org/TR/REC-html40"><head><meta http-equiv=3DC=
+ontent-Type content=3D"text/html; charset=3Dutf-8"><meta name=3DGenerator content=3D=
+"Microsoft Word 15 (filtered medium)"><title>GitLab</title><style><!--
+/* Font Definitions */
+@font-face
+ {font-family:"Cambria Math";
+ panose-1:2 4 5 3 5 4 6 3 2 4;}
+@font-face
+ {font-family:Calibri;
+ panose-1:2 15 5 2 2 2 4 3 2 4;}
+/* Style Definitions */
+p.MsoNormal, li.MsoNormal, div.MsoNormal
+ {margin:0cm;
+ margin-bottom:.0001pt;
+ font-size:11.0pt;
+ font-family:"Calibri",sans-serif;}
+a:link, span.MsoHyperlink
+ {mso-style-priority:99;
+ color:blue;
+ text-decoration:underline;}
+span.EmailStyle19
+ {mso-style-type:personal-reply;
+ font-family:"Calibri",sans-serif;
+ color:windowtext;}
+.MsoChpDefault
+ {mso-style-type:export-only;
+ font-size:10.0pt;}
+@page WordSection1
+ {size:612.0pt 792.0pt;
+ margin:72.0pt 72.0pt 72.0pt 72.0pt;}
+div.WordSection1
+ {page:WordSection1;}
+--></style></head><body lang=3Den-ES link=3Dblue vlink=3Dpurple><div class=3DWordSe=
+ction1><p class=3DMsoNormal><span lang=3DEN-US style=3D'mso-fareast-language:EN-US=
+'>Me too, with an attachment<o:p></o:p></span></p><p class=3DMsoNormal><span s=
+tyle=3D'mso-fareast-language:EN-US'><o:p>&nbsp;</o:p></span></p><div style=3D'bo=
+rder:none;border-top:solid #B5C4DF 1.0pt;padding:3.0pt 0cm 0cm 0cm'><p class=
+=3DMsoNormal><b><span style=3D'font-size:12.0pt;color:black'>From: </span></b><s=
+pan style=3D'font-size:12.0pt;color:black'>Administrator &lt;dlouzan.dummy@gma=
+il.com&gt;<br><b>Reply to: </b>Administrator / htmltest &lt;dlouzan.dummy+c0=
+34670b1623e617e15a3df64223d363@gmail.com&gt;<br><b>Date: </b>Monday, 17 Febr=
+uary 2020 at 22:55<br><b>To: </b>&quot;Louzan Martinez, Diego (ext) (SOP IT =
+STG XS)&quot; &lt;diego.louzan.ext@siemens.com&gt;<br><b>Subject: </b>Re: ht=
+mltest | test issue (#1)<o:p></o:p></span></p></div><div><p class=3DMsoNormal>=
+<o:p>&nbsp;</o:p></p></div><div><p><span style=3D'color:#777777'><a href=3D"http=
+://localhost:3000/root">Administrator</a> commented: <o:p></o:p></span></p><=
+div><p>I pity the foo !!!<o:p></o:p></p></div></div><div style=3D'margin-top:7=
+.5pt'><p><span style=3D'font-size:12.0pt;color:#777777'>=E2=80=94 <br>Reply to this =
+email directly or <a href=3D"http://localhost:3000/root/htmltest/issues/1#note=
+_1797">view it on GitLab</a>. <br>You're receiving this email because of you=
+r account on 169.254.169.254. If you'd like to receive fewer emails, you can=
+ <a href=3D"http://localhost:3000/sent_notifications/c034670b1623e617e15a3df64=
+223d363/unsubscribe">unsubscribe</a> from this thread or adjust your notific=
+ation settings. <o:p></o:p></span></p></div></div></body></html>
+
+--B_3664825007_1519466360--
+
+
+--B_3664825007_384940722
+Content-type: image/png; name="gitlab_logo.png";
+ x-mac-creator="4F50494D";
+ x-mac-type="504E4766"
+Content-disposition: attachment;
+ filename="gitlab_logo.png"
+Content-transfer-encoding: base64
+
+
+iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAIAAABMXPacAAAABnRSTlMA/wD/AP83WBt9AAAN
+1UlEQVR4AexcZXPjSBTcXxOTvMy7xxfGZWaGaJmZmZmZmZmZmdnMzB7JNwv1qs6VOJY0tuWU
+p/rz5PW0q0f99JQakcxK6eItQGZlBMgIkFkZATICZFZGgIwAmZURICMAshitiybrexXblk5D
+NnOk2i3G6bCvmYcJWuaMCevVohPAsWGx6h/Zd/wrd2xbWf0EcB3YqsqmfnK0LZseYZCIBEBW
+E/5p4Mp+wtCvJWO3Vqufv8dtHNoZCOo6ZYd1ahEJ4LtzRZ1fC+pTF9T1P7hZnQQIvHqiKW0I
+BFU5lPfiCREJYFs5C4r7Cfu6BdVJAOeutVEErfPGRRhGFAIgu1Xft0VUfYaBbRmXI1ItFuvz
+Gkd0jyKo65oXNupEIYD//g11QZ2o+tRF9QJP7lUPAYJvX2haNIkmmKv0Xj0rCgHsa+dDWRgA
+x+al1eT5Z9+mCglaF02KsGyKBWCcdsOA1hXWZ6A7MB5X2vtPwG8a07tCgvoehchsSLEA/sd3
+sNtUWJ+mpEHgxaN0FyD08Y2mVbMKCarzavluXkyxAI5NS3AplcG5fVXa+8+h7TEI4kSWSgEY
+t9NQ3j5GfcZhXRivJ439JxgwT+gfg6C+dymymlMmQOD5Q01xgxj1acoaBV8/S2P/+fJe2+b3
+GATV+bV9d6+lTADc88FFxIZz9/r0FcB9fE+VBO2r56RGAMYL7ZFYMI3qwfp9aek/oZB5Snks
+dtD4cthSIEDw1VNNaaMq69O0bBp8/yot/Uf1Wdv+zyoJqgvr+h/eSoEAzl3roIjYcB3Yko4C
+eE4fxK31eAja1y9MogDQHhnZPU4BTGP74jiTZv6DwpYZw+MkaBgEja9kCRB89xLaI1VC27p5
+6NPb9BIgrP2m6/hP1eyg8fX0XlIFcO3fHE9lAPeRnWnmP+ePqbIV8RN0bF6WHAGgPdKHkwDm
+iQPZUDB9XoAhy5zRnAga6Y78Gl81SLVHYkPb9o/Q149p4z96ja5LDieCmpKG0PhKuACuwzvi
+rwze1LtP7EsXAbyXT6lylFw5OnesTrQA0B4ZwLU4DPPUIWw4lA4PQIx1wQQeBI3Du7JeT8IF
+CH35AO0RTtC2/yus/hIR/UImva5bPg+CmrLGwTfPEi6A+/heiCfckK3wnD0sfgF818+rc2ty
+ogZw7tmQWAHYMG6P0FzLAlhmjoggJG7/YW1LpvImaBrVk2vjqwb39shfvOvTdfo3rFOJ2n8s
+Jn3PYn7soPGVQAE8Zw6B//BBNp5nOi5q/7l9GSbM+AFPMCZKAGiPCIF13liYZxLhsq2YJZCg
+aVxfNhggLgC0R/7lXxzMMxm0IvUfu0Xfp0wAO2h8vUuIAJ4L0B7hD3UOnmc6I04BYMJMINxH
+d5EVANojY/jWRH6eifyCCTPBME8aBI0vYgKEDbg9kkukPphnEtWCCTPhgMYXSQG8V05De0Qg
+1Hk1YZ5JFAsmzArrCWUHja+T+4kKwLLWhRPJFAfzTCJbjo2LCRI0T8ONrzAJAaA90r2AYH36
+3iUwz5TiBRNmg9sTJKjt8HdY/ZWYAL4bvNsjMeaZropHgMDzB5ri+gQJQuOLiACsbSm0R4jB
+vmqOiPxn6wriBC2zRkYQIiAAfIBHFnr4kE9kH+CRAIcP+Wpw/QCPBGCe6aYYP8AjBfiQj78A
+0B75W5YIiORDPufOtQkiaJkLH/LxFYB1W22j2xjL5MaWSsIoU9iGt/LfuYQbAKnEvau2cZ0S
+RNBKFzE2vTABtNfDKxqEh8jC5VLyoBWmdnVVubXUeamBKremsXXdULkiIezwoS2uy349I0gA
+5uFctD0LzaFQuQSVZxEGneXoitM1vGBIAeydlYgGakQxk0Lbspg7EyIsy1eAgJ051RLtyEJb
+ZWiyAg0mX6W/P6XJU6Tq9NW5Cl9fCtGkeeGDmqBAW+Tfj+5YXsRr4CkAq7+N9tT+vsvOLLRB
+gcbIiWsQLpdhu1T9nRoBDKXK0GAZ+d/+KBlap8CH9v3odilY1QWeAjBPFuEtMH5psJJCw6Sk
+XUji6FozVS5k61STvP8MlaLlFNopgaNj7k3lJUDQyZxp82MLgAQtpAhXTKfMhdQ5Ci95/5Gg
+eRTaIf3fuZ0oivhMnAVgjffR3rq/tgBsl6EZFHEXMpSlwIX0JeT8B6x/Kr54ZdGHtlvJaq5w
+FoB5tvx/u4ARbZaj8UQvZFpi71wzBf7TkZD/wOmPlaONv6w/CsyDWRwFCLmZcx2iNwIN1lJo
+pIygC/n6UfiBJNn+04eo/wyXodUUnH4UmFOlEb+VgwCs6THaVz96IwC+YZZSaCixCzmUdBfS
+F2P/kRM7/SEStBgu3oqwpxaru8lBAObFmkr2AkghnaWjC1k7EPQfyffMtV0a+8SYR/PjFiDs
+ZS50jb3dr3Q2RfBlAC7Ul8K2kCT/yVZ4euMATMj6J/7KXLHBnG6Fg21cArCW52h/w9jbEU9n
++IFEX6pMjgC6YmVwkJxQ5pKj9XDxxsSe2qzhbnwCvNpY9XagwSoK3z9EXMjWMSku9LfM2h78
+h3Dmig3myZI4BAj7mYs9q9yLfDqjs7x9kuFC6my5pxcJ/6GjM1eVYM62iwRdVQjA2t6gA405
+CEAuneHHEhyOEu4/RRQR/4HMxQF767LGh1UJ8GY7t00hnU0QfCHTEmuiXQi/pWoH/iMsc20C
+6+cA5vmqmAIgP3OlP8dNIZ0phKYzOsvTR6nmMP/La2ZNuP+MgMzFGcz5zpGQq1IBWOsrdLA5
+530hnS0TkM7AhYqVCfSfQuw/ClKZiw/2N2QN9ysVgHm5Hu2EW4UHpGiusHRGS3BEgkhM3H/M
+bbH/SAVlrlmQuXiCebygcgHOdeSxI5l0Bi7UG7uQPEH+4+oJ/kMoc/HAiaJKBYh+/uF3GWwU
+lM7wIwp+UEmEANoCKjBQQThz8cBuZeUCHPqdx46E0xktsbQj6kLgP214+Q9krhX8rT/qYbRy
+C7oxXOjukM4W8U1ndBZ+UFFly8n7Tw++/oOJzIfMJRTMpd6VCsBanqFjuWQ0wDfVTIq/CxVS
+IvKfaZC5BOPwn6z+Tswgpr+DTpaS+WNb+KYzWkrWhfBWptY18bAUn4t3HM5cckHWDzieD+8m
+Y7ajXd+Ym6PQLorAZbCOYzoDF+qpxKZB0H+c3fEFwCtzraEInP4uOXOtnHV8iPuVZNiLexI8
+QhmpdBYcqNCScyFNPhUYoOCeuaRoCYmLd39j9uW6SMjNdS6IZY0PfiQDgRVI0Tzu6YyWmtsI
+diHwn1ZK7v4jQbMFZS54D/P9ZSTL8B1P9xmZBzN+zcfxxjbZ997hYG4u5OpByoXkzm5KRHO0
+/kmCM9du5ffBUI9W8CdKTJD9fBQd/VdoOhvLLZ0FsAsVUAT8J4/y9+foP6MFZ67Df7Dv90aQ
+n8AHGvCegLncD+2U8ddgNdd0JjW3FuxCf+PZU+w/XP7uMGGZa6eUudCNNT9NwL+rCTq+T2vt
+ayAonQ2RcHCh7sJdSI5nTxGd8MwFKff79IPfkrB/WcYiVn0ZnSxJTjrDjy7afEqY/yjw7Cmi
+k5K5juex/7V3Dz5yhVEUwP+cce2GjWu7cW3btm03qm27QRXVtt2ZbO8op/r2vp7qS+a+uHHP
+5r7z252ze2N7UUrZZxMB0FBw6GxQUJ1JdXlEXSHcn3oB7g/MFSPN5a75fyEAQGG5QIHUWe9I
+wCskBYa4Qrg/rfADSNZces1Poeb/swAoKEBnM4Lq7H372B32Ct2RAUxb3B/KXHzN/wcBcFCA
+zor92sQVIic01eTzprg/pLn0mn/Hgz/mKVC4moECobMgV4gd8snnTfWM5fTL/G1ZlK75HgTA
+QUGu7eJAOhNG6RMaboDXKWOuhTAXUfM9CICGAnTGD/m4AR7MNQunn6j5HgTAQgEv5CnQGTHk
+IwZ4MNfE+C80iE2o+Z4GgBTSUOgFKKg6G41vl5JDPmKANyKAuVDzO6HmexAAAQVSZxjy1cMV
+ogd4OP0yc1uimgs1Hx9n8zIAHgp4GSwQnUWZCQ0xwBNzzYO5yJrvfwCAwmmBQklGZ8SQDwM8
+t7mm4cVL1HzvA+ChEE5OcOoMc2JqgAdzjcU3O4ma70EAPBQup/a3cUEBOhse168QMcCDuSLB
+aj7xu329CICHAnTWHzrThnz6AA//+30VcxE1388AeChAZz0jxJAPAzynuYia738AxPPqRgYK
+sWJ1Fv7xCgmvlAHMtwM8mGsSzKXW/AIIQIUCdKYP+fQBnkzYVkQcNb8ian5hBQAoNMPX5nc6
+Gwyd6UM+DPB0cyk1vwACUKAAnfWJ6kO+YgZ4vcRcePHqNb9gAlCggJfBTPyaLveQzzHA6wZz
+OWu+BaBAATpThnx3McBzmctR8y0ABQrQmXvIhwGe21zrSqfOjUfNtwB0KEBnUegsN+SLOQd4
+MJde8y0ARwqAQj6DudBZZsiXcA5gekSSs2EureZbAAoUquKFPDWns++HfBjgwVyo+RfmoeZb
+ADQUcjobk9HZN0M+DPBgLtT8I0TNtwDcUFiW0dm3Qz7cn4E5c2Vq/gCm5lsAChSgs+wVwgAP
+5krX/LV8zbcAFCisjiRnxpI9wrkhX3qAlxCsibnYD+1YAAQUJkQ/dozL8ZEBzIf28eTYaHJt
+Ga7mWwAEFPalNtdNDo89bphIfwBdzLWhBlnzLQD+JwoH+7/qVvFlpwqpPT34mm8B8M/n15+P
+Lf90cGHRpxf4RwvAHt8DsMcCsADssQAsAHssAAvAni8AV5380akCdgAAAABJRU5ErkJggg==
+--B_3664825007_384940722--
+
+--B_3664825007_1904734766
+Content-type: application/pkcs7-signature; name="smime.p7s"
+Content-transfer-encoding: base64
+Content-disposition: attachment;
+ filename="smime.p7s"
+
+MIIRpwYJKoZIhvcNAQcCoIIRmDCCEZQCAQExDzANBglghkgBZQMEAgEFADALBgkqhkiG9w0B
+BwGggg8VMIIHojCCBYqgAwIBAgIEZ5a6PTANBgkqhkiG9w0BAQsFADCBtjELMAkGA1UEBhMC
+REUxDzANBgNVBAgMBkJheWVybjERMA8GA1UEBwwITXVlbmNoZW4xEDAOBgNVBAoMB1NpZW1l
+bnMxETAPBgNVBAUTCFpaWlpaWkE2MR0wGwYDVQQLDBRTaWVtZW5zIFRydXN0IENlbnRlcjE/
+MD0GA1UEAww2U2llbWVucyBJc3N1aW5nIENBIE1lZGl1bSBTdHJlbmd0aCBBdXRoZW50aWNh
+dGlvbiAyMDE2MB4XDTE5MTEyMTE0NDQ0N1oXDTIwMTEyMTE0NDQ0N1owdzERMA8GA1UEBRMI
+WjAwM0gwOFQxDjAMBgNVBCoMBURpZWdvMRgwFgYDVQQEDA9Mb3V6YW4gTWFydGluZXoxGDAW
+BgNVBAoMD1NpZW1lbnMtUGFydG5lcjEeMBwGA1UEAwwVTG91emFuIE1hcnRpbmV6IERpZWdv
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuInpNaC7NRYD+0pOpHDz2pk9xmPt
+JGj860SF6Nmn6Eu9EMYKEDfneC6z5QcH+mPS2d0VWgqVVGbRXSPsxJtbi9TCWjQUZdHglEZK
+z9zxoFDh2dvW5/+TOT5Jf78FXyqak0YtY6+oMjQ/i9RUqPL7sIlyXLrBYrILzQ9Afo+7bXZg
+v3ypp6xtqAV2ctHzQWFi0onJzxLVYguiVb7fFF9rBEMvSZonuw5tvOwJIhbe5FDFOrDcfbyU
+ofZ/wikIZ+A+CE5GryXuuQmGxJaC2QqOkRAWQDzLDx9nG+rKiEs5OvlfEZC7EV1PyjZ93coM
+faCVdlAgcFZ5fvd37CjyjKl+1QIDAQABo4IC9DCCAvAwggEEBggrBgEFBQcBAQSB9zCB9DAy
+BggrBgEFBQcwAoYmaHR0cDovL2FoLnNpZW1lbnMuY29tL3BraT9aWlpaWlpBNi5jcnQwQQYI
+KwYBBQUHMAKGNWxkYXA6Ly9hbC5zaWVtZW5zLm5ldC9DTj1aWlpaWlpBNixMPVBLST9jQUNl
+cnRpZmljYXRlMEkGCCsGAQUFBzAChj1sZGFwOi8vYWwuc2llbWVucy5jb20vQ049WlpaWlpa
+QTYsbz1UcnVzdGNlbnRlcj9jQUNlcnRpZmljYXRlMDAGCCsGAQUFBzABhiRodHRwOi8vb2Nz
+cC5wa2ktc2VydmljZXMuc2llbWVucy5jb20wHwYDVR0jBBgwFoAU+BVdRwxsd3tyxAIXkWii
+tvdqCUQwDAYDVR0TAQH/BAIwADBFBgNVHSAEPjA8MDoGDSsGAQQBoWkHAgIEAQMwKTAnBggr
+BgEFBQcCARYbaHR0cDovL3d3dy5zaWVtZW5zLmNvbS9wa2kvMIHKBgNVHR8EgcIwgb8wgbyg
+gbmggbaGJmh0dHA6Ly9jaC5zaWVtZW5zLmNvbS9wa2k/WlpaWlpaQTYuY3JshkFsZGFwOi8v
+Y2wuc2llbWVucy5uZXQvQ049WlpaWlpaQTYsTD1QS0k/Y2VydGlmaWNhdGVSZXZvY2F0aW9u
+TGlzdIZJbGRhcDovL2NsLnNpZW1lbnMuY29tL0NOPVpaWlpaWkE2LG89VHJ1c3RjZW50ZXI/
+Y2VydGlmaWNhdGVSZXZvY2F0aW9uTGlzdDAdBgNVHSUEFjAUBggrBgEFBQcDAgYIKwYBBQUH
+AwQwDgYDVR0PAQH/BAQDAgeAMFUGA1UdEQROMEygLAYKKwYBBAGCNxQCA6AeDBxkaWVnby5s
+b3V6YW4uZXh0QHNpZW1lbnMuY29tgRxkaWVnby5sb3V6YW4uZXh0QHNpZW1lbnMuY29tMB0G
+A1UdDgQWBBQj8k8aqZey68w8ALYKGJSGMt5hZDANBgkqhkiG9w0BAQsFAAOCAgEAFDHqxpb1
+R9cB4noC9vx09bkNbmXCpVfl3XCQUmAWTznC0nwEssTTjo0PWuIV4C3jnsp0MRUeHZ6lsyhZ
+OzS1ETwYgvj6wzjb8RF3wgn7N/JOvFGaErMz5HZpKOfzGiNpW6/Rmd4hsRDjAwOVQOXUTqc/
+0Bj3FMoLRCSWSnTp5HdyvrY2xOKHfTrTjzmcLdFaKE2F5n7+dBkwCKVfzut8CqfVq/I7ks4m
+D1IHk93/P6l9U34R2FHPt6zRTNZcWmDirRSlMH4L18CnfiNPuDN/PtRYlt3Vng5EdYN0VCg2
+NM/uees0U4ingCb0NFjg66uQ/tjfPQk55MN4Wpls4N6TkMoTCWLiqZzYTGdmVQexzroL6940
+tmMr8LoN3TpPf0OdvdKEpyH7fzsx5QlmQyywIWec6X+Fx6+l0g91VJnPEtqACpfZIBZtviHl
+gfX298w+SsvBK8C48Pqs8Ijh7tLrCxx7VMLVHZqwWWPK53ga+CDWmjoSQPxi+CPZF7kao6N5
+4GrJWwSHlHh6WzTbLyLvTJZZ775Utp4W8s8xMUsQJ413iYzEaC8FcSeNjSk5UiDDiHrKmzpM
+tbApD3pUXStblUMKYGTG1Mj9BcEBFkCdoGlw/ulszIrKFfOyRNDG3Ay+Dj/oMjoKsJphu3px
+wyft82rTer7UW/I7o0h0DAG4lkMwggdrMIIFU6ADAgECAgR5nlqfMA0GCSqGSIb3DQEBCwUA
+MIGeMQswCQYDVQQGEwJERTEPMA0GA1UECAwGQmF5ZXJuMREwDwYDVQQHDAhNdWVuY2hlbjEQ
+MA4GA1UECgwHU2llbWVuczERMA8GA1UEBRMIWlpaWlpaQTMxHTAbBgNVBAsMFFNpZW1lbnMg
+VHJ1c3QgQ2VudGVyMScwJQYDVQQDDB5TaWVtZW5zIElzc3VpbmcgQ0EgRUUgRW5jIDIwMTYw
+HhcNMTkwOTI3MDgwMTM5WhcNMjAwOTI3MDgwMTM3WjB3MREwDwYDVQQFEwhaMDAzSDA4VDEO
+MAwGA1UEKgwFRGllZ28xGDAWBgNVBAQMD0xvdXphbiBNYXJ0aW5lejEYMBYGA1UECgwPU2ll
+bWVucy1QYXJ0bmVyMR4wHAYDVQQDDBVMb3V6YW4gTWFydGluZXogRGllZ28wggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCyby5qKzZIrGYWRqxnaAyMt/a/uc0uMk0F3MjwxvPM
+vh5DllUpqx0l8ZDakDjPhlEXTeoL4DHNgmh+CDCs76CppM3cNG/1W1Ajo/L2iwMoXaxYuQ/F
+q7ED+02KEkWX2DDVVG3fhrUGP20QAq77xPDptmVWZnUnuobZBNYkC49Xfl9HJvkJL8P0+Jqb
+Eae7p4roiEr7wNkGriwrVXgA3oPNF/W+OuI76JTNTajS/6PAK/GeqIvLjfuBXpdBZTY031nE
+Cztca8vI1jUjQzVhS+0dWpvpfhkVumbvOnid8DI9lapYsX8dpZFsa3ya+T3tjUdGSOOKi0kg
+lWf/XYyyfhmDAgMBAAGjggLVMIIC0TAdBgNVHQ4EFgQUprhTCDwNLfPImpSfWdq+QvPTo9Mw
+JwYDVR0RBCAwHoEcZGllZ28ubG91emFuLmV4dEBzaWVtZW5zLmNvbTAOBgNVHQ8BAf8EBAMC
+BDAwLAYDVR0lBCUwIwYIKwYBBQUHAwQGCisGAQQBgjcKAwQGCysGAQQBgjcKAwQBMIHKBgNV
+HR8EgcIwgb8wgbyggbmggbaGJmh0dHA6Ly9jaC5zaWVtZW5zLmNvbS9wa2k/WlpaWlpaQTMu
+Y3JshkFsZGFwOi8vY2wuc2llbWVucy5uZXQvQ049WlpaWlpaQTMsTD1QS0k/Y2VydGlmaWNh
+dGVSZXZvY2F0aW9uTGlzdIZJbGRhcDovL2NsLnNpZW1lbnMuY29tL0NOPVpaWlpaWkEzLG89
+VHJ1c3RjZW50ZXI/Y2VydGlmaWNhdGVSZXZvY2F0aW9uTGlzdDBFBgNVHSAEPjA8MDoGDSsG
+AQQBoWkHAgIEAQMwKTAnBggrBgEFBQcCARYbaHR0cDovL3d3dy5zaWVtZW5zLmNvbS9wa2kv
+MAwGA1UdEwEB/wQCMAAwHwYDVR0jBBgwFoAUoassbqB68NPCTeof8R4hivwMre8wggEEBggr
+BgEFBQcBAQSB9zCB9DAyBggrBgEFBQcwAoYmaHR0cDovL2FoLnNpZW1lbnMuY29tL3BraT9a
+WlpaWlpBMy5jcnQwQQYIKwYBBQUHMAKGNWxkYXA6Ly9hbC5zaWVtZW5zLm5ldC9DTj1aWlpa
+WlpBMyxMPVBLST9jQUNlcnRpZmljYXRlMEkGCCsGAQUFBzAChj1sZGFwOi8vYWwuc2llbWVu
+cy5jb20vQ049WlpaWlpaQTMsbz1UcnVzdGNlbnRlcj9jQUNlcnRpZmljYXRlMDAGCCsGAQUF
+BzABhiRodHRwOi8vb2NzcC5wa2ktc2VydmljZXMuc2llbWVucy5jb20wDQYJKoZIhvcNAQEL
+BQADggIBAF98ZMNg28LgkwdjOdvOGbC1QitsWjZTyotmQESF0nClDLUhb0O5675vVixntbrf
+eB8xy1+KRiadk40GnAIJ0YzmNl4Tav6hPYv9VBWe5olsWG7C4qB3Q/SwhvW/e+owxv1cBra8
+R3oRudiN81eTZQHyNghRephVqQG/dpPYqydoANfIhEpHa79QlpaCAeYl4896AZOS8HYbkDFs
+hLdv7sEHtl79YuSWI1wBjbJl70c0Sb4wLRgCPuHyQj2Uw/vQ5xJlEvBDZAIXXe1TP/nqiuY6
+7nweJbbeqfFE6ZP3kCe+mEIWGSaO0iThZyLGer8fHs1XiEmhhPgvC7P7KodzpXU6+hX+ZzbD
+DxEjFfetV5sh0aNSXG9xx4hZmS9bpImBGR8MvZ7cgxqItvLtY2xvfUbYW244d4RcWesaCDq3
+ZEIo6uCIzOzJAwjUdLIac+lLV0rxiHmb7O3cQ19kjpWDB31hmfrus/TKJ55pBKVWBX5m/mFv
+K8Ep5USpGrNS0EzOP7I1kQZv2VsvAhSxk/m5FMLpDy8T0O8YgbLypTXoeJFWCF6RduSjVsaZ
+lkAtTQYud683pjyOMxJXaQUYGU1PmEYSOonMkVsT9aBcxYkXLp+Ln/+8G0OCYu7dRdwnj+Ut
+7yR/ltxtgDcaFApCb0qBTKbgbqZk1fASmkOp+kbdYmoUMYICVjCCAlICAQEwgb8wgbYxCzAJ
+BgNVBAYTAkRFMQ8wDQYDVQQIDAZCYXllcm4xETAPBgNVBAcMCE11ZW5jaGVuMRAwDgYDVQQK
+DAdTaWVtZW5zMREwDwYDVQQFEwhaWlpaWlpBNjEdMBsGA1UECwwUU2llbWVucyBUcnVzdCBD
+ZW50ZXIxPzA9BgNVBAMMNlNpZW1lbnMgSXNzdWluZyBDQSBNZWRpdW0gU3RyZW5ndGggQXV0
+aGVudGljYXRpb24gMjAxNgIEZ5a6PTANBglghkgBZQMEAgEFAKBpMC8GCSqGSIb3DQEJBDEi
+BCAOR58AbNfSrI+vtMs+dgAQtn3IVZ3RjYC5hz3j9k+6TTAYBgkqhkiG9w0BCQMxCwYJKoZI
+hvcNAQcBMBwGCSqGSIb3DQEJBTEPFw0yMDAyMTcyMTU2NDdaMA0GCSqGSIb3DQEBAQUABIIB
+AHLSBcFHhNHPevbwqvA2ecuVb/aKnj45CFF6l8esP1H5DRm1ee5qMKuIS84NFuFC9RUENNhW
+DBzsB+BVGz64o1f8QgIklYVrIJ4JZ0q1abNG7NbkVKWIpS3CQo//YWShUTYg+JpKx4YbahGR
+sP5zbufbU4eagrrqBChjPTLy+njdjwCNu0XPykBTKOOf6BMjnS33AYjHJyh83JOY7rw3IDLx
+8POQH4g5EMRpl9354s0rEkIezMt7pfUAsqY3QnQ8hvlE4KTikPQ+tvLMK1l/ffcLAP8BdBNI
+YA3ikb3qCoGNSLKieYzNnBPhNOIJELUtEEaljAFZYMQzMKCbI4JdiDs=
+
+--B_3664825007_1904734766--
diff --git a/spec/fixtures/group_export.tar.gz b/spec/fixtures/group_export.tar.gz
index 83e360d7cc2..d76c6ddba25 100644
--- a/spec/fixtures/group_export.tar.gz
+++ b/spec/fixtures/group_export.tar.gz
Binary files differ
diff --git a/spec/fixtures/lib/gitlab/import_export/complex/project.json b/spec/fixtures/lib/gitlab/import_export/complex/project.json
index 7d784fbd54f..4d6211a1251 100644
--- a/spec/fixtures/lib/gitlab/import_export/complex/project.json
+++ b/spec/fixtures/lib/gitlab/import_export/complex/project.json
@@ -2398,7 +2398,7 @@
"requested_at": null,
"user": {
"id": 16,
- "email": "maritza_schoen@block.ca",
+ "email": "bernard_willms@gitlabexample.com",
"username": "bernard_willms"
}
},
@@ -2418,7 +2418,7 @@
"requested_at": null,
"user": {
"id": 6,
- "email": "shaina@koelpindenesik.com",
+ "email": "saul_will@gitlabexample.com",
"username": "saul_will"
}
},
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/complex/group.json b/spec/fixtures/lib/gitlab/import_export/group_exports/complex/group.json
index b48386a3fb9..504daae8abd 100644
--- a/spec/fixtures/lib/gitlab/import_export/group_exports/complex/group.json
+++ b/spec/fixtures/lib/gitlab/import_export/group_exports/complex/group.json
@@ -1,7 +1,7 @@
{
"name": "ymg09t5704clnxnqfgaj2h098gz4r7gyx4wc3fzmlqj1en24zf",
"path": "ymg09t5704clnxnqfgaj2h098gz4r7gyx4wc3fzmlqj1en24zf",
- "owner_id": null,
+ "owner_id": 123,
"created_at": "2019-11-20 17:01:53 UTC",
"updated_at": "2019-11-20 17:05:44 UTC",
"description": "Group Description",
@@ -18,7 +18,7 @@
"ldap_sync_last_successful_update_at": null,
"ldap_sync_last_sync_at": null,
"lfs_enabled": null,
- "parent_id": null,
+ "parent_id": 7,
"shared_runners_minutes_limit": null,
"repository_size_limit": null,
"require_two_factor_authentication": false,
@@ -33,6 +33,8 @@
"extra_shared_runners_minutes_limit": null,
"last_ci_minutes_notification_at": null,
"last_ci_minutes_usage_notification_level": null,
+ "runners_token": "token",
+ "runners_token_encrypted": "encrypted",
"subgroup_creation_level": 1,
"emails_disabled": null,
"max_pages_size": null,
@@ -260,25 +262,101 @@
],
"boards": [
{
- "id": 56,
+ "id": 173,
"project_id": null,
- "created_at": "2019-11-20T17:27:16.808Z",
- "updated_at": "2019-11-20T17:27:16.808Z",
- "name": "Development",
+ "created_at": "2020-02-11T14:35:51.561Z",
+ "updated_at": "2020-02-11T14:35:51.561Z",
+ "name": "first board",
"milestone_id": null,
"group_id": 4351,
"weight": null,
- "labels": []
- },
- {
- "id": 57,
- "project_id": null,
- "created_at": "2019-11-20T17:27:41.118Z",
- "updated_at": "2019-11-20T17:27:41.118Z",
- "name": "Board!",
- "milestone_id": 7638,
- "group_id": 4351,
- "weight": null,
+ "lists": [
+ {
+ "id": 189,
+ "board_id": 173,
+ "label_id": 271,
+ "list_type": "label",
+ "position": 0,
+ "created_at": "2020-02-11T14:35:57.131Z",
+ "updated_at": "2020-02-11T14:35:57.131Z",
+ "user_id": null,
+ "milestone_id": null,
+ "max_issue_count": 0,
+ "max_issue_weight": 0,
+ "label": {
+ "id": 271,
+ "title": "TSL",
+ "color": "#58796f",
+ "project_id": null,
+ "created_at": "2019-11-20T17:02:20.541Z",
+ "updated_at": "2020-02-06T15:44:52.048Z",
+ "template": false,
+ "description": null,
+ "group_id": 4351,
+ "type": "GroupLabel",
+ "priorities": []
+ },
+ "board": {
+ "id": 173,
+ "project_id": null,
+ "created_at": "2020-02-11T14:35:51.561Z",
+ "updated_at": "2020-02-11T14:35:51.561Z",
+ "name": "hi",
+ "milestone_id": null,
+ "group_id": 4351,
+ "weight": null
+ }
+ },
+ {
+ "id": 190,
+ "board_id": 173,
+ "label_id": 272,
+ "list_type": "label",
+ "position": 1,
+ "created_at": "2020-02-11T14:35:57.868Z",
+ "updated_at": "2020-02-11T14:35:57.868Z",
+ "user_id": null,
+ "milestone_id": null,
+ "max_issue_count": 0,
+ "max_issue_weight": 0,
+ "label": {
+ "id": 272,
+ "title": "Sosync",
+ "color": "#110320",
+ "project_id": null,
+ "created_at": "2019-11-20T17:02:20.532Z",
+ "updated_at": "2020-02-06T15:44:52.057Z",
+ "template": false,
+ "description": null,
+ "group_id": 4351,
+ "type": "GroupLabel",
+ "priorities": []
+ },
+ "board": {
+ "id": 173,
+ "project_id": null,
+ "created_at": "2020-02-11T14:35:51.561Z",
+ "updated_at": "2020-02-11T14:35:51.561Z",
+ "name": "hi",
+ "milestone_id": null,
+ "group_id": 4351,
+ "weight": null
+ }
+ },
+ {
+ "id": 188,
+ "board_id": 173,
+ "label_id": null,
+ "list_type": "closed",
+ "position": null,
+ "created_at": "2020-02-11T14:35:51.593Z",
+ "updated_at": "2020-02-11T14:35:51.593Z",
+ "user_id": null,
+ "milestone_id": null,
+ "max_issue_count": 0,
+ "max_issue_weight": 0
+ }
+ ],
"labels": []
}
],
@@ -348,7 +426,7 @@
"override": false,
"user": {
"id": 206,
- "email": "margaret.bergnaum@reynolds.us",
+ "email": "gwendolyn_robel@gitlabexample.com",
"username": "gwendolyn_robel"
}
},
@@ -394,7 +472,7 @@
"override": false,
"user": {
"id": 1624,
- "email": "nakesha.herzog@powlowski.com",
+ "email": "adriene.mcclure@gitlabexample.com",
"username": "adriene.mcclure"
}
},
@@ -482,7 +560,18 @@
"type": null,
"author": {
"name": "Administrator"
- }
+ },
+ "award_emoji": [
+ {
+ "id": 12,
+ "name": "drum",
+ "user_id": 1,
+ "awardable_type": "Note",
+ "awardable_id": 44170,
+ "created_at": "2019-11-05T15:32:21.287Z",
+ "updated_at": "2019-11-05T15:32:21.287Z"
+ }
+ ]
},
{
"id": 44168,
@@ -574,6 +663,17 @@
"name": "Administrator"
}
}
+ ],
+ "award_emoji": [
+ {
+ "id": 12,
+ "name": "thumbsup",
+ "user_id": 1,
+ "awardable_type": "Epic",
+ "awardable_id": 13622,
+ "created_at": "2019-11-05T15:37:21.287Z",
+ "updated_at": "2019-11-05T15:37:21.287Z"
+ }
]
},
{
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/internal/group.json b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/internal/group.json
new file mode 100644
index 00000000000..f747088f87e
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/internal/group.json
@@ -0,0 +1,166 @@
+{
+ "id": 283,
+ "name": "internal",
+ "path": "internal",
+ "owner_id": null,
+ "created_at": "2020-02-12T16:56:34.924Z",
+ "updated_at": "2020-02-12T16:56:38.710Z",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 10,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": null,
+ "shared_runners_minutes_limit": null,
+ "repository_size_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "plan_id": null,
+ "project_creation_level": 2,
+ "trial_ends_on": null,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "extra_shared_runners_minutes_limit": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_pages_size": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null,
+ "children": [
+ {
+ "id": 284,
+ "name": "public",
+ "path": "public",
+ "owner_id": null,
+ "created_at": "2020-02-12T17:33:00.575Z",
+ "updated_at": "2020-02-12T17:33:00.575Z",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 20,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": 283,
+ "shared_runners_minutes_limit": null,
+ "repository_size_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "plan_id": null,
+ "project_creation_level": 2,
+ "trial_ends_on": null,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "extra_shared_runners_minutes_limit": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_pages_size": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null
+ },
+ {
+ "id": 285,
+ "name": "internal",
+ "path": "internal",
+ "owner_id": null,
+ "created_at": "2020-02-12T17:33:00.575Z",
+ "updated_at": "2020-02-12T17:33:00.575Z",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 10,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": 283,
+ "shared_runners_minutes_limit": null,
+ "repository_size_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "plan_id": null,
+ "project_creation_level": 2,
+ "trial_ends_on": null,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "extra_shared_runners_minutes_limit": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_pages_size": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null
+ },
+ {
+ "id": 286,
+ "name": "private",
+ "path": "private",
+ "owner_id": null,
+ "created_at": "2020-02-12T17:33:00.575Z",
+ "updated_at": "2020-02-12T17:33:00.575Z",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 0,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": 283,
+ "shared_runners_minutes_limit": null,
+ "repository_size_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "plan_id": null,
+ "project_creation_level": 2,
+ "trial_ends_on": null,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "extra_shared_runners_minutes_limit": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_pages_size": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null
+ }
+ ]
+}
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/private/group.json b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/private/group.json
new file mode 100644
index 00000000000..1328e596fa5
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/private/group.json
@@ -0,0 +1,166 @@
+{
+ "id": 283,
+ "name": "private",
+ "path": "private",
+ "owner_id": null,
+ "created_at": "2020-02-12T16:56:34.924Z",
+ "updated_at": "2020-02-12T16:56:38.710Z",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 0,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": null,
+ "shared_runners_minutes_limit": null,
+ "repository_size_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "plan_id": null,
+ "project_creation_level": 2,
+ "trial_ends_on": null,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "extra_shared_runners_minutes_limit": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_pages_size": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null,
+ "children": [
+ {
+ "id": 284,
+ "name": "public",
+ "path": "public",
+ "owner_id": null,
+ "created_at": "2020-02-12T17:33:00.575Z",
+ "updated_at": "2020-02-12T17:33:00.575Z",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 20,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": 283,
+ "shared_runners_minutes_limit": null,
+ "repository_size_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "plan_id": null,
+ "project_creation_level": 2,
+ "trial_ends_on": null,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "extra_shared_runners_minutes_limit": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_pages_size": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null
+ },
+ {
+ "id": 285,
+ "name": "internal",
+ "path": "internal",
+ "owner_id": null,
+ "created_at": "2020-02-12T17:33:00.575Z",
+ "updated_at": "2020-02-12T17:33:00.575Z",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 10,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": 283,
+ "shared_runners_minutes_limit": null,
+ "repository_size_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "plan_id": null,
+ "project_creation_level": 2,
+ "trial_ends_on": null,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "extra_shared_runners_minutes_limit": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_pages_size": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null
+ },
+ {
+ "id": 286,
+ "name": "private",
+ "path": "private",
+ "owner_id": null,
+ "created_at": "2020-02-12T17:33:00.575Z",
+ "updated_at": "2020-02-12T17:33:00.575Z",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 0,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": 283,
+ "shared_runners_minutes_limit": null,
+ "repository_size_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "plan_id": null,
+ "project_creation_level": 2,
+ "trial_ends_on": null,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "extra_shared_runners_minutes_limit": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_pages_size": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null
+ }
+ ]
+}
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/public/group.json b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/public/group.json
new file mode 100644
index 00000000000..29020e92004
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/public/group.json
@@ -0,0 +1,166 @@
+{
+ "id": 283,
+ "name": "public",
+ "path": "public",
+ "owner_id": null,
+ "created_at": "2020-02-12T16:56:34.924Z",
+ "updated_at": "2020-02-12T16:56:38.710Z",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 20,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": null,
+ "shared_runners_minutes_limit": null,
+ "repository_size_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "plan_id": null,
+ "project_creation_level": 2,
+ "trial_ends_on": null,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "extra_shared_runners_minutes_limit": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_pages_size": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null,
+ "children": [
+ {
+ "id": 284,
+ "name": "public",
+ "path": "public",
+ "owner_id": null,
+ "created_at": "2020-02-12T17:33:00.575Z",
+ "updated_at": "2020-02-12T17:33:00.575Z",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 20,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": 283,
+ "shared_runners_minutes_limit": null,
+ "repository_size_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "plan_id": null,
+ "project_creation_level": 2,
+ "trial_ends_on": null,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "extra_shared_runners_minutes_limit": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_pages_size": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null
+ },
+ {
+ "id": 285,
+ "name": "internal",
+ "path": "internal",
+ "owner_id": null,
+ "created_at": "2020-02-12T17:33:00.575Z",
+ "updated_at": "2020-02-12T17:33:00.575Z",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 10,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": 283,
+ "shared_runners_minutes_limit": null,
+ "repository_size_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "plan_id": null,
+ "project_creation_level": 2,
+ "trial_ends_on": null,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "extra_shared_runners_minutes_limit": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_pages_size": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null
+ },
+ {
+ "id": 286,
+ "name": "private",
+ "path": "private",
+ "owner_id": null,
+ "created_at": "2020-02-12T17:33:00.575Z",
+ "updated_at": "2020-02-12T17:33:00.575Z",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 0,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": 283,
+ "shared_runners_minutes_limit": null,
+ "repository_size_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "plan_id": null,
+ "project_creation_level": 2,
+ "trial_ends_on": null,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "extra_shared_runners_minutes_limit": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_pages_size": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null
+ }
+ ]
+}
diff --git a/spec/fixtures/lib/gitlab/import_export/with_duplicates.json b/spec/fixtures/lib/gitlab/import_export/with_duplicates.json
new file mode 100644
index 00000000000..ed2e1821dd3
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/with_duplicates.json
@@ -0,0 +1,43 @@
+{
+ "simple": 42,
+ "duped_hash_with_id": {
+ "id": 0,
+ "v1": 1
+ },
+ "duped_hash_no_id": {
+ "v1": 1
+ },
+ "duped_array": [
+ "v2"
+ ],
+ "array": [
+ {
+ "duped_hash_with_id": {
+ "id": 0,
+ "v1": 1
+ }
+ },
+ {
+ "duped_array": [
+ "v2"
+ ]
+ },
+ {
+ "duped_hash_no_id": {
+ "v1": 1
+ }
+ }
+ ],
+ "nested": {
+ "duped_hash_with_id": {
+ "id": 0,
+ "v1": 1
+ },
+ "duped_array": [
+ "v2"
+ ],
+ "array": [
+ "don't touch"
+ ]
+ }
+} \ No newline at end of file
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json
index ac40f2dcd13..038f5ac5d4e 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json
@@ -16,7 +16,8 @@
"label": { "type": "string" },
"track": { "type": "string" },
"prometheus_endpoint_path": { "type": "string" },
- "metric_id": { "type": "number" }
+ "metric_id": { "type": "number" },
+ "edit_path": { "type": ["string", "null"] }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panel_groups.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panel_groups.json
index f4afb4cbffc..d16fcd40359 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panel_groups.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panel_groups.json
@@ -10,7 +10,8 @@
"panels": {
"type": "array",
"items": { "$ref": "panels.json" }
- }
+ },
+ "has_custom_metrics": { "type": "boolean" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/lsif.json.gz b/spec/fixtures/lsif.json.gz
new file mode 100644
index 00000000000..3f74588cd7e
--- /dev/null
+++ b/spec/fixtures/lsif.json.gz
Binary files differ
diff --git a/spec/fixtures/sentry/issue_link_sample_response.json b/spec/fixtures/sentry/global_integration_link_sample_response.json
index f7f3220e83d..f7f3220e83d 100644
--- a/spec/fixtures/sentry/issue_link_sample_response.json
+++ b/spec/fixtures/sentry/global_integration_link_sample_response.json
diff --git a/spec/fixtures/sentry/issue_sample_response.json b/spec/fixtures/sentry/issue_sample_response.json
index a320a21de34..43d55f584b8 100644
--- a/spec/fixtures/sentry/issue_sample_response.json
+++ b/spec/fixtures/sentry/issue_sample_response.json
@@ -38,7 +38,7 @@
},
"firstSeen": "2018-11-06T21:19:55Z",
"hasSeen": false,
- "id": "503504",
+ "id": "11",
"isBookmarked": false,
"isPublic": false,
"isSubscribed": true,
@@ -72,232 +72,64 @@
"shortId": "PUMP-STATION-1",
"stats": {
"24h": [
- [
- 1541451600.0,
- 557
- ],
- [
- 1541455200.0,
- 473
- ],
- [
- 1541458800.0,
- 914
- ],
- [
- 1541462400.0,
- 991
- ],
- [
- 1541466000.0,
- 925
- ],
- [
- 1541469600.0,
- 881
- ],
- [
- 1541473200.0,
- 182
- ],
- [
- 1541476800.0,
- 490
- ],
- [
- 1541480400.0,
- 820
- ],
- [
- 1541484000.0,
- 322
- ],
- [
- 1541487600.0,
- 836
- ],
- [
- 1541491200.0,
- 565
- ],
- [
- 1541494800.0,
- 758
- ],
- [
- 1541498400.0,
- 880
- ],
- [
- 1541502000.0,
- 677
- ],
- [
- 1541505600.0,
- 381
- ],
- [
- 1541509200.0,
- 814
- ],
- [
- 1541512800.0,
- 329
- ],
- [
- 1541516400.0,
- 446
- ],
- [
- 1541520000.0,
- 731
- ],
- [
- 1541523600.0,
- 111
- ],
- [
- 1541527200.0,
- 926
- ],
- [
- 1541530800.0,
- 772
- ],
- [
- 1541534400.0,
- 400
- ],
- [
- 1541538000.0,
- 943
- ]
+ [1541451600.0, 557],
+ [1541455200.0, 473],
+ [1541458800.0, 914],
+ [1541462400.0, 991],
+ [1541466000.0, 925],
+ [1541469600.0, 881],
+ [1541473200.0, 182],
+ [1541476800.0, 490],
+ [1541480400.0, 820],
+ [1541484000.0, 322],
+ [1541487600.0, 836],
+ [1541491200.0, 565],
+ [1541494800.0, 758],
+ [1541498400.0, 880],
+ [1541502000.0, 677],
+ [1541505600.0, 381],
+ [1541509200.0, 814],
+ [1541512800.0, 329],
+ [1541516400.0, 446],
+ [1541520000.0, 731],
+ [1541523600.0, 111],
+ [1541527200.0, 926],
+ [1541530800.0, 772],
+ [1541534400.0, 400],
+ [1541538000.0, 943]
],
"30d": [
- [
- 1538870400.0,
- 565
- ],
- [
- 1538956800.0,
- 12862
- ],
- [
- 1539043200.0,
- 15617
- ],
- [
- 1539129600.0,
- 10809
- ],
- [
- 1539216000.0,
- 15065
- ],
- [
- 1539302400.0,
- 12927
- ],
- [
- 1539388800.0,
- 12994
- ],
- [
- 1539475200.0,
- 13139
- ],
- [
- 1539561600.0,
- 11838
- ],
- [
- 1539648000.0,
- 12088
- ],
- [
- 1539734400.0,
- 12338
- ],
- [
- 1539820800.0,
- 12768
- ],
- [
- 1539907200.0,
- 12816
- ],
- [
- 1539993600.0,
- 15356
- ],
- [
- 1540080000.0,
- 10910
- ],
- [
- 1540166400.0,
- 12306
- ],
- [
- 1540252800.0,
- 12912
- ],
- [
- 1540339200.0,
- 14700
- ],
- [
- 1540425600.0,
- 11890
- ],
- [
- 1540512000.0,
- 11684
- ],
- [
- 1540598400.0,
- 13510
- ],
- [
- 1540684800.0,
- 12625
- ],
- [
- 1540771200.0,
- 12811
- ],
- [
- 1540857600.0,
- 13180
- ],
- [
- 1540944000.0,
- 14651
- ],
- [
- 1541030400.0,
- 14161
- ],
- [
- 1541116800.0,
- 12612
- ],
- [
- 1541203200.0,
- 14316
- ],
- [
- 1541289600.0,
- 14742
- ],
- [
- 1541376000.0,
- 12505
- ],
- [
- 1541462400.0,
- 14180
- ]
+ [1538870400.0, 565],
+ [1538956800.0, 12862],
+ [1539043200.0, 15617],
+ [1539129600.0, 10809],
+ [1539216000.0, 15065],
+ [1539302400.0, 12927],
+ [1539388800.0, 12994],
+ [1539475200.0, 13139],
+ [1539561600.0, 11838],
+ [1539648000.0, 12088],
+ [1539734400.0, 12338],
+ [1539820800.0, 12768],
+ [1539907200.0, 12816],
+ [1539993600.0, 15356],
+ [1540080000.0, 10910],
+ [1540166400.0, 12306],
+ [1540252800.0, 12912],
+ [1540339200.0, 14700],
+ [1540425600.0, 11890],
+ [1540512000.0, 11684],
+ [1540598400.0, 13510],
+ [1540684800.0, 12625],
+ [1540771200.0, 12811],
+ [1540857600.0, 13180],
+ [1540944000.0, 14651],
+ [1541030400.0, 14161],
+ [1541116800.0, 12612],
+ [1541203200.0, 14316],
+ [1541289600.0, 14742],
+ [1541376000.0, 12505],
+ [1541462400.0, 14180]
]
},
"status": "unresolved",
diff --git a/spec/fixtures/sentry/plugin_link_sample_response.json b/spec/fixtures/sentry/plugin_link_sample_response.json
new file mode 100644
index 00000000000..2e07e412844
--- /dev/null
+++ b/spec/fixtures/sentry/plugin_link_sample_response.json
@@ -0,0 +1,6 @@
+{
+ "message": "Successfully linked issue.",
+ "link": "https://gitlab.com/test/tanuki-inc/issues/3",
+ "id": 3,
+ "label": "GL-3"
+}
diff --git a/spec/fixtures/valid.po b/spec/fixtures/valid.po
index 28826f05595..bb2dfa419bb 100644
--- a/spec/fixtures/valid.po
+++ b/spec/fixtures/valid.po
@@ -267,11 +267,11 @@ msgstr "Eventos de notificaciones personalizadas"
msgid "Custom notification levels are the same as participating levels. With custom notification levels you will also receive notifications for select events. To find out more, check out %{notification_link}."
msgstr "Los niveles de notificación personalizados son los mismos que los niveles participantes. Con los niveles de notificación personalizados, también recibirá notificaciones para eventos seleccionados. Para obtener más información, consulte %{notification_link}."
-msgid "Cycle Analytics"
-msgstr "Cycle Analytics"
+msgid "Value Stream Analytics"
+msgstr "Value Stream Analytics"
-msgid "Cycle Analytics gives an overview of how much time it takes to go from idea to production in your project."
-msgstr "Cycle Analytics ofrece una visión general de cuánto tiempo tarda en pasar de idea a producción en su proyecto."
+msgid "Value Stream Analytics gives an overview of how much time it takes to go from idea to production in your project."
+msgstr "Value Stream Analytics ofrece una visión general de cuánto tiempo tarda en pasar de idea a producción en su proyecto."
msgid "CycleAnalyticsStage|Code"
msgstr "Código"
@@ -412,8 +412,8 @@ msgstr "Importar repositorio"
msgid "Interval Pattern"
msgstr "Patrón de intervalo"
-msgid "Introducing Cycle Analytics"
-msgstr "Introducción a Cycle Analytics"
+msgid "Introducing Value Stream Analytics"
+msgstr "Introducción a Value Stream Analytics"
msgid "Jobs for last month"
msgstr "Trabajos del mes pasado"
diff --git a/spec/frontend/.eslintrc.yml b/spec/frontend/.eslintrc.yml
index e78a38d31f5..c8aacca5ef2 100644
--- a/spec/frontend/.eslintrc.yml
+++ b/spec/frontend/.eslintrc.yml
@@ -6,6 +6,12 @@ plugins:
extends:
- 'plugin:jest/recommended'
settings:
+ # We have to teach eslint-plugin-import what node modules we use
+ # otherwise there is an error when it tries to resolve them
+ import/core-modules:
+ - events
+ - fs
+ - path
import/resolver:
jest:
jestConfigFile: 'jest.config.js'
diff --git a/spec/frontend/alerts_service_settings/components/__snapshots__/alerts_service_form_spec.js.snap b/spec/frontend/alerts_service_settings/components/__snapshots__/alerts_service_form_spec.js.snap
new file mode 100644
index 00000000000..36ec0badade
--- /dev/null
+++ b/spec/frontend/alerts_service_settings/components/__snapshots__/alerts_service_form_spec.js.snap
@@ -0,0 +1,9 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`AlertsServiceForm with default values renders "authorization-key" input 1`] = `"<gl-form-input-stub id=\\"authorization-key\\" readonly=\\"true\\" value=\\"abcedfg123\\"></gl-form-input-stub>"`;
+
+exports[`AlertsServiceForm with default values renders "url" input 1`] = `"<gl-form-input-stub id=\\"url\\" readonly=\\"true\\" value=\\"https://gitlab.com/endpoint-url\\"></gl-form-input-stub>"`;
+
+exports[`AlertsServiceForm with default values renders toggle button 1`] = `"<toggle-button-stub id=\\"activated\\"></toggle-button-stub>"`;
+
+exports[`AlertsServiceForm with default values shows description and "Learn More" link 1`] = `"Each alert source must be authorized using the following URL and authorization key. <a href=\\"https://docs.gitlab.com/ee/user/project/integrations/generic_alerts.md\\" target=\\"_blank\\" rel=\\"noopener noreferrer\\">Learn more</a> about configuring this endpoint to receive alerts."`;
diff --git a/spec/frontend/alerts_service_settings/components/alerts_service_form_spec.js b/spec/frontend/alerts_service_settings/components/alerts_service_form_spec.js
new file mode 100644
index 00000000000..b7a008c78d0
--- /dev/null
+++ b/spec/frontend/alerts_service_settings/components/alerts_service_form_spec.js
@@ -0,0 +1,168 @@
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import { shallowMount } from '@vue/test-utils';
+import { GlModal } from '@gitlab/ui';
+import AlertsServiceForm from '~/alerts_service_settings/components/alerts_service_form.vue';
+import ToggleButton from '~/vue_shared/components/toggle_button.vue';
+import createFlash from '~/flash';
+
+jest.mock('~/flash');
+
+const defaultProps = {
+ initialAuthorizationKey: 'abcedfg123',
+ formPath: 'http://invalid',
+ url: 'https://gitlab.com/endpoint-url',
+ learnMoreUrl: 'https://docs.gitlab.com/ee/user/project/integrations/generic_alerts.md',
+ initialActivated: false,
+};
+
+describe('AlertsServiceForm', () => {
+ let wrapper;
+ let mockAxios;
+
+ const createComponent = (props = defaultProps, { methods } = {}) => {
+ wrapper = shallowMount(AlertsServiceForm, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ methods,
+ });
+ };
+
+ const findUrl = () => wrapper.find('#url');
+ const findAuthorizationKey = () => wrapper.find('#authorization-key');
+ const findDescription = () => wrapper.find('p');
+ const findActiveStatusIcon = val =>
+ document.querySelector(`.js-service-active-status[data-value=${val.toString()}]`);
+
+ beforeEach(() => {
+ mockAxios = new MockAdapter(axios);
+ setFixtures(`
+ <div>
+ <span class="js-service-active-status fa fa-circle" data-value="true"></span>
+ <span class="js-service-active-status fa fa-power-off" data-value="false"></span>
+ </div>`);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ mockAxios.restore();
+ });
+
+ describe('with default values', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders "url" input', () => {
+ expect(findUrl().html()).toMatchSnapshot();
+ });
+
+ it('renders "authorization-key" input', () => {
+ expect(findAuthorizationKey().html()).toMatchSnapshot();
+ });
+
+ it('renders toggle button', () => {
+ expect(wrapper.find(ToggleButton).html()).toMatchSnapshot();
+ });
+
+ it('shows description and "Learn More" link', () => {
+ expect(findDescription().element.innerHTML).toMatchSnapshot();
+ });
+ });
+
+ describe('reset key', () => {
+ it('triggers resetKey method', () => {
+ const resetKey = jest.fn();
+ const methods = { resetKey };
+ createComponent(defaultProps, { methods });
+
+ wrapper.find(GlModal).vm.$emit('ok');
+
+ expect(resetKey).toHaveBeenCalled();
+ });
+
+ it('updates the authorization key on success', () => {
+ const formPath = 'some/path';
+ mockAxios.onPut(formPath, { service: { token: '' } }).replyOnce(200, { token: 'newToken' });
+
+ createComponent({ formPath });
+
+ return wrapper.vm.resetKey().then(() => {
+ expect(findAuthorizationKey().attributes('value')).toBe('newToken');
+ });
+ });
+
+ it('shows flash message on error', () => {
+ const formPath = 'some/path';
+ mockAxios.onPut(formPath).replyOnce(404);
+
+ createComponent({ formPath });
+
+ return wrapper.vm.resetKey().then(() => {
+ expect(findAuthorizationKey().attributes('value')).toBe(
+ defaultProps.initialAuthorizationKey,
+ );
+ expect(createFlash).toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('activate toggle', () => {
+ it('triggers toggleActivated method', () => {
+ const toggleActivated = jest.fn();
+ const methods = { toggleActivated };
+ createComponent(defaultProps, { methods });
+
+ wrapper.find(ToggleButton).vm.$emit('change', true);
+
+ expect(toggleActivated).toHaveBeenCalled();
+ });
+
+ describe('successfully completes', () => {
+ describe.each`
+ initialActivated | value
+ ${false} | ${true}
+ ${true} | ${false}
+ `(
+ 'when initialActivated=$initialActivated and value=$value',
+ ({ initialActivated, value }) => {
+ beforeEach(() => {
+ const formPath = 'some/path';
+ mockAxios
+ .onPut(formPath, { service: { active: value } })
+ .replyOnce(200, { active: value });
+ createComponent({ initialActivated, formPath });
+
+ return wrapper.vm.toggleActivated(value);
+ });
+
+ it(`updates toggle button value to ${value}`, () => {
+ expect(wrapper.find(ToggleButton).props('value')).toBe(value);
+ });
+
+ it('updates visible status icons', () => {
+ expect(findActiveStatusIcon(!value)).toHaveClass('d-none');
+ expect(findActiveStatusIcon(value)).not.toHaveClass('d-none');
+ });
+ },
+ );
+ });
+
+ describe('error is encountered', () => {
+ beforeEach(() => {
+ const formPath = 'some/path';
+ mockAxios.onPut(formPath).replyOnce(500);
+ });
+
+ it('restores previous value', () => {
+ createComponent({ initialActivated: false });
+
+ return wrapper.vm.toggleActivated(true).then(() => {
+ expect(wrapper.find(ToggleButton).props('value')).toBe(false);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap b/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
new file mode 100644
index 00000000000..7382a3a4cf7
--- /dev/null
+++ b/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
@@ -0,0 +1,35 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Blob Header Filepath rendering matches the snapshot 1`] = `
+<div
+ class="file-header-content d-flex align-items-center lh-100"
+>
+
+ <file-icon-stub
+ aria-hidden="true"
+ cssclasses="mr-2"
+ filename="dummy.md"
+ size="18"
+ />
+
+ <strong
+ class="file-title-name qa-file-title-name mr-1 js-blob-header-filepath"
+ >
+ dummy.md
+ </strong>
+
+ <small
+ class="mr-2"
+ >
+ a lot
+ </small>
+
+ <clipboard-button-stub
+ cssclass="btn-clipboard btn-transparent lh-100 position-static"
+ gfm="\`dummy.md\`"
+ text="dummy.md"
+ title="Copy file path"
+ tooltipplacement="top"
+ />
+</div>
+`;
diff --git a/spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap b/spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap
new file mode 100644
index 00000000000..2878ad492a4
--- /dev/null
+++ b/spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap
@@ -0,0 +1,24 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Blob Header Default Actions rendering matches the snapshot 1`] = `
+<div
+ class="js-file-title file-title-flex-parent"
+>
+ <blob-filepath-stub
+ blob="[object Object]"
+ />
+
+ <div
+ class="file-actions d-none d-sm-block"
+ >
+ <viewer-switcher-stub
+ value="simple"
+ />
+
+ <default-actions-stub
+ activeviewer="simple"
+ rawpath="/flightjs/flight/snippets/51/raw"
+ />
+ </div>
+</div>
+`;
diff --git a/spec/frontend/blob/components/blob_content_error_spec.js b/spec/frontend/blob/components/blob_content_error_spec.js
new file mode 100644
index 00000000000..58a9ee761df
--- /dev/null
+++ b/spec/frontend/blob/components/blob_content_error_spec.js
@@ -0,0 +1,27 @@
+import { shallowMount } from '@vue/test-utils';
+import BlobContentError from '~/blob/components/blob_content_error.vue';
+
+describe('Blob Content Error component', () => {
+ let wrapper;
+ const viewerError = '<h1 id="error">Foo Error</h1>';
+
+ function createComponent() {
+ wrapper = shallowMount(BlobContentError, {
+ propsData: {
+ viewerError,
+ },
+ });
+ }
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the passed error without transformations', () => {
+ expect(wrapper.html()).toContain(viewerError);
+ });
+});
diff --git a/spec/frontend/blob/components/blob_content_spec.js b/spec/frontend/blob/components/blob_content_spec.js
new file mode 100644
index 00000000000..6a130c9c43d
--- /dev/null
+++ b/spec/frontend/blob/components/blob_content_spec.js
@@ -0,0 +1,70 @@
+import { shallowMount } from '@vue/test-utils';
+import BlobContent from '~/blob/components/blob_content.vue';
+import BlobContentError from '~/blob/components/blob_content_error.vue';
+import {
+ RichViewerMock,
+ SimpleViewerMock,
+ RichBlobContentMock,
+ SimpleBlobContentMock,
+} from './mock_data';
+import { GlLoadingIcon } from '@gitlab/ui';
+import { RichViewer, SimpleViewer } from '~/vue_shared/components/blob_viewers';
+
+describe('Blob Content component', () => {
+ let wrapper;
+
+ function createComponent(propsData = {}, activeViewer = SimpleViewerMock) {
+ wrapper = shallowMount(BlobContent, {
+ propsData: {
+ loading: false,
+ activeViewer,
+ ...propsData,
+ },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('rendering', () => {
+ it('renders loader if `loading: true`', () => {
+ createComponent({ loading: true });
+ expect(wrapper.contains(GlLoadingIcon)).toBe(true);
+ expect(wrapper.contains(BlobContentError)).toBe(false);
+ expect(wrapper.contains(RichViewer)).toBe(false);
+ expect(wrapper.contains(SimpleViewer)).toBe(false);
+ });
+
+ it('renders error if there is any in the viewer', () => {
+ const renderError = 'Oops';
+ const viewer = Object.assign({}, SimpleViewerMock, { renderError });
+ createComponent({}, viewer);
+ expect(wrapper.contains(GlLoadingIcon)).toBe(false);
+ expect(wrapper.contains(BlobContentError)).toBe(true);
+ expect(wrapper.contains(RichViewer)).toBe(false);
+ expect(wrapper.contains(SimpleViewer)).toBe(false);
+ });
+
+ it.each`
+ type | mock | viewer
+ ${'simple'} | ${SimpleViewerMock} | ${SimpleViewer}
+ ${'rich'} | ${RichViewerMock} | ${RichViewer}
+ `(
+ 'renders $type viewer when activeViewer is $type and no loading or error detected',
+ ({ mock, viewer }) => {
+ createComponent({}, mock);
+ expect(wrapper.contains(viewer)).toBe(true);
+ },
+ );
+
+ it.each`
+ content | mock | viewer
+ ${SimpleBlobContentMock.plainData} | ${SimpleViewerMock} | ${SimpleViewer}
+ ${RichBlobContentMock.richData} | ${RichViewerMock} | ${RichViewer}
+ `('renders correct content that is passed to the component', ({ content, mock, viewer }) => {
+ createComponent({ content }, mock);
+ expect(wrapper.find(viewer).html()).toContain(content);
+ });
+ });
+});
diff --git a/spec/frontend/blob/components/blob_embeddable_spec.js b/spec/frontend/blob/components/blob_embeddable_spec.js
new file mode 100644
index 00000000000..b2fe71f1401
--- /dev/null
+++ b/spec/frontend/blob/components/blob_embeddable_spec.js
@@ -0,0 +1,35 @@
+import { shallowMount } from '@vue/test-utils';
+import BlobEmbeddable from '~/blob/components/blob_embeddable.vue';
+import { GlFormInputGroup } from '@gitlab/ui';
+
+describe('Blob Embeddable', () => {
+ let wrapper;
+ const url = 'https://foo.bar';
+
+ function createComponent() {
+ wrapper = shallowMount(BlobEmbeddable, {
+ propsData: {
+ url,
+ },
+ });
+ }
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders gl-form-input-group component', () => {
+ expect(wrapper.find(GlFormInputGroup).exists()).toBe(true);
+ });
+
+ it('makes up optionValues based on the url prop', () => {
+ expect(wrapper.vm.optionValues).toEqual([
+ { name: 'Embed', value: expect.stringContaining(`${url}.js`) },
+ { name: 'Share', value: url },
+ ]);
+ });
+});
diff --git a/spec/frontend/blob/components/blob_header_default_actions_spec.js b/spec/frontend/blob/components/blob_header_default_actions_spec.js
new file mode 100644
index 00000000000..39d627e71c5
--- /dev/null
+++ b/spec/frontend/blob/components/blob_header_default_actions_spec.js
@@ -0,0 +1,70 @@
+import { mount } from '@vue/test-utils';
+import BlobHeaderActions from '~/blob/components/blob_header_default_actions.vue';
+import {
+ BTN_COPY_CONTENTS_TITLE,
+ BTN_DOWNLOAD_TITLE,
+ BTN_RAW_TITLE,
+ RICH_BLOB_VIEWER,
+} from '~/blob/components/constants';
+import { GlButtonGroup, GlButton } from '@gitlab/ui';
+import { Blob } from './mock_data';
+
+describe('Blob Header Default Actions', () => {
+ let wrapper;
+ let btnGroup;
+ let buttons;
+ const hrefPrefix = 'http://localhost';
+
+ function createComponent(propsData = {}) {
+ wrapper = mount(BlobHeaderActions, {
+ propsData: {
+ rawPath: Blob.rawPath,
+ ...propsData,
+ },
+ });
+ }
+
+ beforeEach(() => {
+ createComponent();
+ btnGroup = wrapper.find(GlButtonGroup);
+ buttons = wrapper.findAll(GlButton);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('renders', () => {
+ it('gl-button-group component', () => {
+ expect(btnGroup.exists()).toBe(true);
+ });
+
+ it('exactly 3 buttons with predefined actions', () => {
+ expect(buttons.length).toBe(3);
+ [BTN_COPY_CONTENTS_TITLE, BTN_RAW_TITLE, BTN_DOWNLOAD_TITLE].forEach((title, i) => {
+ expect(buttons.at(i).vm.$el.title).toBe(title);
+ });
+ });
+
+ it('correct href attribute on RAW button', () => {
+ expect(buttons.at(1).vm.$el.href).toBe(`${hrefPrefix}${Blob.rawPath}`);
+ });
+
+ it('correct href attribute on Download button', () => {
+ expect(buttons.at(2).vm.$el.href).toBe(`${hrefPrefix}${Blob.rawPath}?inline=false`);
+ });
+
+ it('does not render "Copy file contents" button as disables if the viewer is Simple', () => {
+ expect(buttons.at(0).attributes('disabled')).toBeUndefined();
+ });
+
+ it('renders "Copy file contents" button as disables if the viewer is Rich', () => {
+ createComponent({
+ activeViewer: RICH_BLOB_VIEWER,
+ });
+ buttons = wrapper.findAll(GlButton);
+
+ expect(buttons.at(0).attributes('disabled')).toBeTruthy();
+ });
+ });
+});
diff --git a/spec/frontend/blob/components/blob_header_filepath_spec.js b/spec/frontend/blob/components/blob_header_filepath_spec.js
new file mode 100644
index 00000000000..d029ba2a7a4
--- /dev/null
+++ b/spec/frontend/blob/components/blob_header_filepath_spec.js
@@ -0,0 +1,90 @@
+import { shallowMount } from '@vue/test-utils';
+import BlobHeaderFilepath from '~/blob/components/blob_header_filepath.vue';
+import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
+import { Blob as MockBlob } from './mock_data';
+import { numberToHumanSize } from '~/lib/utils/number_utils';
+
+const mockHumanReadableSize = 'a lot';
+jest.mock('~/lib/utils/number_utils', () => ({
+ numberToHumanSize: jest.fn(() => mockHumanReadableSize),
+}));
+
+describe('Blob Header Filepath', () => {
+ let wrapper;
+
+ function createComponent(blobProps = {}, options = {}) {
+ wrapper = shallowMount(BlobHeaderFilepath, {
+ propsData: {
+ blob: Object.assign({}, MockBlob, blobProps),
+ },
+ ...options,
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('rendering', () => {
+ it('matches the snapshot', () => {
+ createComponent();
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('renders regular name', () => {
+ createComponent();
+ expect(
+ wrapper
+ .find('.js-blob-header-filepath')
+ .text()
+ .trim(),
+ ).toBe(MockBlob.name);
+ });
+
+ it('does not fail if the name is empty', () => {
+ const emptyName = '';
+ createComponent({ name: emptyName });
+ expect(wrapper.find('.js-blob-header-filepath').exists()).toBe(false);
+ });
+
+ it('renders copy-to-clipboard icon that copies path of the Blob', () => {
+ createComponent();
+ const btn = wrapper.find(ClipboardButton);
+ expect(btn.exists()).toBe(true);
+ expect(btn.vm.text).toBe(MockBlob.path);
+ });
+
+ it('renders filesize in a human-friendly format', () => {
+ createComponent();
+ expect(numberToHumanSize).toHaveBeenCalled();
+ expect(wrapper.vm.blobSize).toBe(mockHumanReadableSize);
+ });
+
+ it('renders a slot and prepends its contents to the existing one', () => {
+ const slotContent = 'Foo Bar';
+ createComponent(
+ {},
+ {
+ scopedSlots: {
+ filepathPrepend: `<span>${slotContent}</span>`,
+ },
+ },
+ );
+
+ expect(wrapper.text()).toContain(slotContent);
+ expect(
+ wrapper
+ .text()
+ .trim()
+ .substring(0, slotContent.length),
+ ).toBe(slotContent);
+ });
+ });
+
+ describe('functionality', () => {
+ it('sets gfm value correctly on the clipboard-button', () => {
+ createComponent();
+ expect(wrapper.vm.gfmCopyText).toBe('`dummy.md`');
+ });
+ });
+});
diff --git a/spec/frontend/blob/components/blob_header_spec.js b/spec/frontend/blob/components/blob_header_spec.js
new file mode 100644
index 00000000000..d410ef10fc9
--- /dev/null
+++ b/spec/frontend/blob/components/blob_header_spec.js
@@ -0,0 +1,139 @@
+import { shallowMount, mount } from '@vue/test-utils';
+import BlobHeader from '~/blob/components/blob_header.vue';
+import ViewerSwitcher from '~/blob/components/blob_header_viewer_switcher.vue';
+import DefaultActions from '~/blob/components/blob_header_default_actions.vue';
+import BlobFilepath from '~/blob/components/blob_header_filepath.vue';
+
+import { Blob } from './mock_data';
+
+describe('Blob Header Default Actions', () => {
+ let wrapper;
+
+ function createComponent(blobProps = {}, options = {}, propsData = {}, shouldMount = false) {
+ const method = shouldMount ? mount : shallowMount;
+ wrapper = method.call(this, BlobHeader, {
+ propsData: {
+ blob: Object.assign({}, Blob, blobProps),
+ ...propsData,
+ },
+ ...options,
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('rendering', () => {
+ const slots = {
+ prepend: 'Foo Prepend',
+ actions: 'Actions Bar',
+ };
+
+ it('matches the snapshot', () => {
+ createComponent();
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('renders all components', () => {
+ createComponent();
+ expect(wrapper.find(ViewerSwitcher).exists()).toBe(true);
+ expect(wrapper.find(DefaultActions).exists()).toBe(true);
+ expect(wrapper.find(BlobFilepath).exists()).toBe(true);
+ });
+
+ it('does not render viewer switcher if the blob has only the simple viewer', () => {
+ createComponent({
+ richViewer: null,
+ });
+ expect(wrapper.find(ViewerSwitcher).exists()).toBe(false);
+ });
+
+ it('does not render viewer switcher if a corresponding prop is passed', () => {
+ createComponent(
+ {},
+ {},
+ {
+ hideViewerSwitcher: true,
+ },
+ );
+ expect(wrapper.find(ViewerSwitcher).exists()).toBe(false);
+ });
+
+ it('does not render default actions is corresponding prop is passed', () => {
+ createComponent(
+ {},
+ {},
+ {
+ hideDefaultActions: true,
+ },
+ );
+ expect(wrapper.find(DefaultActions).exists()).toBe(false);
+ });
+
+ Object.keys(slots).forEach(slot => {
+ it('renders the slots', () => {
+ const slotContent = slots[slot];
+ createComponent(
+ {},
+ {
+ scopedSlots: {
+ [slot]: `<span>${slotContent}</span>`,
+ },
+ },
+ {},
+ true,
+ );
+ expect(wrapper.text()).toContain(slotContent);
+ });
+ });
+ });
+
+ describe('functionality', () => {
+ const newViewer = 'Foo Bar';
+ const activeViewerType = 'Alpha Beta';
+
+ const factory = (hideViewerSwitcher = false) => {
+ createComponent(
+ {},
+ {},
+ {
+ activeViewerType,
+ hideViewerSwitcher,
+ },
+ );
+ };
+
+ it('by default sets viewer data based on activeViewerType', () => {
+ factory();
+ expect(wrapper.vm.viewer).toBe(activeViewerType);
+ });
+
+ it('sets viewer to null if the viewer switcher should be hidden', () => {
+ factory(true);
+ expect(wrapper.vm.viewer).toBe(null);
+ });
+
+ it('watches the changes in viewer data and emits event when the change is registered', () => {
+ factory();
+ jest.spyOn(wrapper.vm, '$emit');
+ wrapper.vm.viewer = newViewer;
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm.$emit).toHaveBeenCalledWith('viewer-changed', newViewer);
+ });
+ });
+
+ it('does not emit event if the switcher is not rendered', () => {
+ factory(true);
+
+ expect(wrapper.vm.showViewerSwitcher).toBe(false);
+ jest.spyOn(wrapper.vm, '$emit');
+ wrapper.vm.viewer = newViewer;
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm.$emit).not.toHaveBeenCalled();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/blob/components/blob_header_viewer_switcher_spec.js b/spec/frontend/blob/components/blob_header_viewer_switcher_spec.js
new file mode 100644
index 00000000000..f1a7ac8b21a
--- /dev/null
+++ b/spec/frontend/blob/components/blob_header_viewer_switcher_spec.js
@@ -0,0 +1,97 @@
+import { mount } from '@vue/test-utils';
+import BlobHeaderViewerSwitcher from '~/blob/components/blob_header_viewer_switcher.vue';
+import {
+ RICH_BLOB_VIEWER,
+ RICH_BLOB_VIEWER_TITLE,
+ SIMPLE_BLOB_VIEWER,
+ SIMPLE_BLOB_VIEWER_TITLE,
+} from '~/blob/components/constants';
+import { GlButtonGroup, GlButton } from '@gitlab/ui';
+
+describe('Blob Header Viewer Switcher', () => {
+ let wrapper;
+
+ function createComponent(propsData = {}) {
+ wrapper = mount(BlobHeaderViewerSwitcher, {
+ propsData,
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('intiialization', () => {
+ it('is initialized with simple viewer as active', () => {
+ createComponent();
+ expect(wrapper.vm.value).toBe(SIMPLE_BLOB_VIEWER);
+ });
+ });
+
+ describe('rendering', () => {
+ let btnGroup;
+ let buttons;
+
+ beforeEach(() => {
+ createComponent();
+ btnGroup = wrapper.find(GlButtonGroup);
+ buttons = wrapper.findAll(GlButton);
+ });
+
+ it('renders gl-button-group component', () => {
+ expect(btnGroup.exists()).toBe(true);
+ });
+
+ it('renders exactly 2 buttons with predefined actions', () => {
+ expect(buttons.length).toBe(2);
+ [SIMPLE_BLOB_VIEWER_TITLE, RICH_BLOB_VIEWER_TITLE].forEach((title, i) => {
+ expect(buttons.at(i).attributes('title')).toBe(title);
+ });
+ });
+ });
+
+ describe('viewer changes', () => {
+ let buttons;
+ let simpleBtn;
+ let richBtn;
+
+ function factory(propsData = {}) {
+ createComponent(propsData);
+ buttons = wrapper.findAll(GlButton);
+ simpleBtn = buttons.at(0);
+ richBtn = buttons.at(1);
+
+ jest.spyOn(wrapper.vm, '$emit');
+ }
+
+ it('does not switch the viewer if the selected one is already active', () => {
+ factory();
+ expect(wrapper.vm.value).toBe(SIMPLE_BLOB_VIEWER);
+ simpleBtn.vm.$emit('click');
+ expect(wrapper.vm.value).toBe(SIMPLE_BLOB_VIEWER);
+ expect(wrapper.vm.$emit).not.toHaveBeenCalled();
+ });
+
+ it('emits an event when a Rich Viewer button is clicked', () => {
+ factory();
+ expect(wrapper.vm.value).toBe(SIMPLE_BLOB_VIEWER);
+
+ richBtn.vm.$emit('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm.$emit).toHaveBeenCalledWith('input', RICH_BLOB_VIEWER);
+ });
+ });
+
+ it('emits an event when a Simple Viewer button is clicked', () => {
+ factory({
+ value: RICH_BLOB_VIEWER,
+ });
+ simpleBtn.vm.$emit('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm.$emit).toHaveBeenCalledWith('input', SIMPLE_BLOB_VIEWER);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/blob/components/mock_data.js b/spec/frontend/blob/components/mock_data.js
new file mode 100644
index 00000000000..bfcca14324f
--- /dev/null
+++ b/spec/frontend/blob/components/mock_data.js
@@ -0,0 +1,43 @@
+import { SIMPLE_BLOB_VIEWER, RICH_BLOB_VIEWER } from '~/blob/components/constants';
+
+export const SimpleViewerMock = {
+ collapsed: false,
+ loadingPartialName: 'loading',
+ renderError: null,
+ tooLarge: false,
+ type: SIMPLE_BLOB_VIEWER,
+ fileType: 'text',
+};
+
+export const RichViewerMock = {
+ collapsed: false,
+ loadingPartialName: 'loading',
+ renderError: null,
+ tooLarge: false,
+ type: RICH_BLOB_VIEWER,
+ fileType: 'markdown',
+};
+
+export const Blob = {
+ binary: false,
+ name: 'dummy.md',
+ path: 'dummy.md',
+ rawPath: '/flightjs/flight/snippets/51/raw',
+ size: 75,
+ simpleViewer: {
+ ...SimpleViewerMock,
+ },
+ richViewer: {
+ ...RichViewerMock,
+ },
+};
+
+export const RichBlobContentMock = {
+ richData: '<h1>Rich</h1>',
+};
+
+export const SimpleBlobContentMock = {
+ plainData: 'Plain',
+};
+
+export default {};
diff --git a/spec/frontend/boards/boards_store_spec.js b/spec/frontend/boards/boards_store_spec.js
index bf3d81d3117..2dc9039bc9d 100644
--- a/spec/frontend/boards/boards_store_spec.js
+++ b/spec/frontend/boards/boards_store_spec.js
@@ -6,7 +6,7 @@ import eventHub from '~/boards/eventhub';
import { listObj, listObjDuplicate } from './mock_data';
import ListIssue from '~/boards/models/issue';
-import '~/boards/models/list';
+import List from '~/boards/models/list';
jest.mock('js-cookie');
@@ -190,6 +190,30 @@ describe('boardsStore', () => {
});
});
+ describe('saveList', () => {
+ let list;
+
+ beforeEach(() => {
+ list = new List(listObj);
+ setupDefaultResponses();
+ });
+
+ it('makes a request to save a list', () => {
+ const expectedResponse = expect.objectContaining({ issues: [createTestIssue()] });
+ const expectedListValue = {
+ id: listObj.id,
+ position: listObj.position,
+ type: listObj.list_type,
+ label: listObj.label,
+ };
+ expect(list.id).toBe(listObj.id);
+ expect(list.position).toBe(listObj.position);
+ expect(list).toMatchObject(expectedListValue);
+
+ return expect(boardsStore.saveList(list)).resolves.toEqual(expectedResponse);
+ });
+ });
+
describe('getIssuesForList', () => {
const id = 'TOO-MUCH';
const url = `${endpoints.listsEndpoint}/${id}/issues?id=${id}`;
diff --git a/spec/javascripts/boards/components/issue_card_inner_scoped_label_spec.js b/spec/frontend/boards/components/issue_card_inner_scoped_label_spec.js
index 6ac51ebdb2d..7389cb14ecb 100644
--- a/spec/javascripts/boards/components/issue_card_inner_scoped_label_spec.js
+++ b/spec/frontend/boards/components/issue_card_inner_scoped_label_spec.js
@@ -1,5 +1,5 @@
import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
+import mountComponent from 'helpers/vue_mount_component_helper';
import IssueCardInnerScopedLabel from '~/boards/components/issue_card_inner_scoped_label.vue';
describe('IssueCardInnerScopedLabel Component', () => {
diff --git a/spec/javascripts/boards/components/issue_due_date_spec.js b/spec/frontend/boards/components/issue_due_date_spec.js
index 68e26b68f04..68e26b68f04 100644
--- a/spec/javascripts/boards/components/issue_due_date_spec.js
+++ b/spec/frontend/boards/components/issue_due_date_spec.js
diff --git a/spec/frontend/boards/issue_card_spec.js b/spec/frontend/boards/issue_card_spec.js
index df55a106945..1fd2b417aba 100644
--- a/spec/frontend/boards/issue_card_spec.js
+++ b/spec/frontend/boards/issue_card_spec.js
@@ -1,6 +1,6 @@
/* global ListAssignee, ListLabel, ListIssue */
import { mount } from '@vue/test-utils';
-import _ from 'underscore';
+import { range } from 'lodash';
import '~/boards/models/label';
import '~/boards/models/assignee';
import '~/boards/models/issue';
@@ -66,7 +66,11 @@ describe('Issue card component', () => {
});
it('does not render confidential icon', () => {
- expect(wrapper.find('.fa-eye-flash').exists()).toBe(false);
+ expect(wrapper.find('.confidential-icon').exists()).toBe(false);
+ });
+
+ it('does not render blocked icon', () => {
+ expect(wrapper.find('.issue-blocked-icon').exists()).toBe(false);
});
it('renders confidential icon', done => {
@@ -97,6 +101,9 @@ describe('Issue card component', () => {
issue: {
...wrapper.props('issue'),
assignees: [user],
+ updateData(newData) {
+ Object.assign(this, newData);
+ },
},
});
@@ -118,6 +125,28 @@ describe('Issue card component', () => {
it('renders avatar', () => {
expect(wrapper.find('.board-card-assignee img').exists()).toBe(true);
});
+
+ it('renders the avatar using avatar_url property', done => {
+ wrapper.props('issue').updateData({
+ ...wrapper.props('issue'),
+ assignees: [
+ {
+ id: '1',
+ name: 'test',
+ state: 'active',
+ username: 'test_name',
+ avatar_url: 'test_image_from_avatar_url',
+ },
+ ],
+ });
+
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.find('.board-card-assignee img').attributes('src')).toBe(
+ 'test_image_from_avatar_url?width=24',
+ );
+ done();
+ });
+ });
});
describe('assignee default avatar', () => {
@@ -222,7 +251,7 @@ describe('Issue card component', () => {
it('renders 99+ avatar counter', done => {
const assignees = [
...wrapper.props('issue').assignees,
- ..._.range(5, 103).map(
+ ...range(5, 103).map(
i =>
new ListAssignee({
id: i,
@@ -299,4 +328,20 @@ describe('Issue card component', () => {
.catch(done.fail);
});
});
+
+ describe('blocked', () => {
+ beforeEach(done => {
+ wrapper.setProps({
+ issue: {
+ ...wrapper.props('issue'),
+ blocked: true,
+ },
+ });
+ wrapper.vm.$nextTick(done);
+ });
+
+ it('renders blocked icon if issue is blocked', () => {
+ expect(wrapper.find('.issue-blocked-icon').exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
index 8f406c62824..a35348d86ea 100644
--- a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
+++ b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
@@ -3,7 +3,7 @@
exports[`Remove cluster confirmation modal renders splitbutton with modal included 1`] = `
<div>
<div
- class="dropdown btn-group b-dropdown gl-dropdown"
+ class="dropdown b-dropdown gl-dropdown btn-group"
>
<button
class="btn btn-danger"
@@ -31,7 +31,9 @@ exports[`Remove cluster confirmation modal renders splitbutton with modal includ
role="menu"
tabindex="-1"
>
- <li>
+ <li
+ role="presentation"
+ >
<button
class="dropdown-item is-active"
role="menuitem"
@@ -47,14 +49,18 @@ exports[`Remove cluster confirmation modal renders splitbutton with modal includ
</button>
</li>
- <li>
+ <li
+ role="presentation"
+ >
<hr
aria-orientation="horizontal"
class="dropdown-divider"
role="separator"
/>
</li>
- <li>
+ <li
+ role="presentation"
+ >
<button
class="dropdown-item"
role="menuitem"
diff --git a/spec/frontend/clusters/components/applications_spec.js b/spec/frontend/clusters/components/applications_spec.js
index 01e9b04dcd7..c3336edfe59 100644
--- a/spec/frontend/clusters/components/applications_spec.js
+++ b/spec/frontend/clusters/components/applications_spec.js
@@ -14,9 +14,6 @@ describe('Applications', () => {
beforeEach(() => {
Applications = Vue.extend(applications);
-
- gon.features = gon.features || {};
- gon.features.enableClusterApplicationElasticStack = true;
});
afterEach(() => {
diff --git a/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap b/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap
new file mode 100644
index 00000000000..dda6d68018e
--- /dev/null
+++ b/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap
@@ -0,0 +1,39 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Code navigation popover component renders popover 1`] = `
+<div
+ class="popover code-navigation-popover popover-font-size-normal gl-popover bs-popover-bottom show"
+ style="left: 0px; top: 0px;"
+>
+ <div
+ class="arrow"
+ style="left: 0px;"
+ />
+
+ <div
+ class="border-bottom"
+ >
+ <pre
+ class="border-0 bg-transparent m-0 code highlight"
+ >
+ console.log
+ </pre>
+ </div>
+
+ <div
+ class="popover-body"
+ >
+ <gl-button-stub
+ class="w-100"
+ href="http://test.com"
+ size="md"
+ target="_blank"
+ variant="default"
+ >
+
+ Go to definition
+
+ </gl-button-stub>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/code_navigation/components/app_spec.js b/spec/frontend/code_navigation/components/app_spec.js
new file mode 100644
index 00000000000..cfdc0dcc6cc
--- /dev/null
+++ b/spec/frontend/code_navigation/components/app_spec.js
@@ -0,0 +1,64 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import createState from '~/code_navigation/store/state';
+import App from '~/code_navigation/components/app.vue';
+import Popover from '~/code_navigation/components/popover.vue';
+
+const localVue = createLocalVue();
+const fetchData = jest.fn();
+const showDefinition = jest.fn();
+let wrapper;
+
+localVue.use(Vuex);
+
+function factory(initialState = {}) {
+ const store = new Vuex.Store({
+ state: {
+ ...createState(),
+ ...initialState,
+ },
+ actions: {
+ fetchData,
+ showDefinition,
+ },
+ });
+
+ wrapper = shallowMount(App, { store, localVue });
+}
+
+describe('Code navigation app component', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('fetches data on mount', () => {
+ factory();
+
+ expect(fetchData).toHaveBeenCalled();
+ });
+
+ it('hides popover when no definition set', () => {
+ factory();
+
+ expect(wrapper.find(Popover).exists()).toBe(false);
+ });
+
+ it('renders popover when definition set', () => {
+ factory({
+ currentDefinition: { hover: 'console' },
+ currentDefinitionPosition: { x: 0 },
+ });
+
+ expect(wrapper.find(Popover).exists()).toBe(true);
+ });
+
+ it('calls showDefinition when clicking blob viewer', () => {
+ setFixtures('<div class="blob-viewer"></div>');
+
+ factory();
+
+ document.querySelector('.blob-viewer').click();
+
+ expect(showDefinition).toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/code_navigation/components/popover_spec.js b/spec/frontend/code_navigation/components/popover_spec.js
new file mode 100644
index 00000000000..ad05504a224
--- /dev/null
+++ b/spec/frontend/code_navigation/components/popover_spec.js
@@ -0,0 +1,58 @@
+import { shallowMount } from '@vue/test-utils';
+import Popover from '~/code_navigation/components/popover.vue';
+
+const MOCK_CODE_DATA = Object.freeze({
+ hover: [
+ {
+ language: 'javascript',
+ value: 'console.log',
+ },
+ ],
+ definition_url: 'http://test.com',
+});
+
+const MOCK_DOCS_DATA = Object.freeze({
+ hover: [
+ {
+ language: null,
+ value: 'console.log',
+ },
+ ],
+ definition_url: 'http://test.com',
+});
+
+let wrapper;
+
+function factory(position, data) {
+ wrapper = shallowMount(Popover, { propsData: { position, data } });
+}
+
+describe('Code navigation popover component', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders popover', () => {
+ factory({ x: 0, y: 0, height: 0 }, MOCK_CODE_DATA);
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('code output', () => {
+ it('renders code output', () => {
+ factory({ x: 0, y: 0, height: 0 }, MOCK_CODE_DATA);
+
+ expect(wrapper.find({ ref: 'code-output' }).exists()).toBe(true);
+ expect(wrapper.find({ ref: 'doc-output' }).exists()).toBe(false);
+ });
+ });
+
+ describe('documentation output', () => {
+ it('renders code output', () => {
+ factory({ x: 0, y: 0, height: 0 }, MOCK_DOCS_DATA);
+
+ expect(wrapper.find({ ref: 'code-output' }).exists()).toBe(false);
+ expect(wrapper.find({ ref: 'doc-output' }).exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/code_navigation/store/actions_spec.js b/spec/frontend/code_navigation/store/actions_spec.js
new file mode 100644
index 00000000000..2230e0880bb
--- /dev/null
+++ b/spec/frontend/code_navigation/store/actions_spec.js
@@ -0,0 +1,212 @@
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import actions from '~/code_navigation/store/actions';
+import axios from '~/lib/utils/axios_utils';
+import { setCurrentHoverElement, addInteractionClass } from '~/code_navigation/utils';
+
+jest.mock('~/code_navigation/utils');
+
+describe('Code navigation actions', () => {
+ describe('setInitialData', () => {
+ it('commits SET_INITIAL_DATA', done => {
+ testAction(
+ actions.setInitialData,
+ { projectPath: 'test' },
+ {},
+ [{ type: 'SET_INITIAL_DATA', payload: { projectPath: 'test' } }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('requestDataError', () => {
+ it('commits REQUEST_DATA_ERROR', () =>
+ testAction(actions.requestDataError, null, {}, [{ type: 'REQUEST_DATA_ERROR' }], []));
+ });
+
+ describe('fetchData', () => {
+ let mock;
+ const state = {
+ projectPath: 'gitlab-org/gitlab',
+ commitId: '123',
+ blobPath: 'index',
+ };
+ const apiUrl = '/api/1/projects/gitlab-org%2Fgitlab/commits/123/lsif/info';
+
+ beforeEach(() => {
+ window.gon = { api_version: '1' };
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('success', () => {
+ beforeEach(() => {
+ mock.onGet(apiUrl).replyOnce(200, [
+ {
+ start_line: 0,
+ start_char: 0,
+ hover: { value: '123' },
+ },
+ {
+ start_line: 1,
+ start_char: 0,
+ hover: null,
+ },
+ ]);
+ });
+
+ it('commits REQUEST_DATA_SUCCESS with normalized data', done => {
+ testAction(
+ actions.fetchData,
+ null,
+ state,
+ [
+ { type: 'REQUEST_DATA' },
+ {
+ type: 'REQUEST_DATA_SUCCESS',
+ payload: { '0:0': { start_line: 0, start_char: 0, hover: { value: '123' } } },
+ },
+ ],
+ [],
+ done,
+ );
+ });
+
+ it('calls addInteractionClass with data', done => {
+ testAction(
+ actions.fetchData,
+ null,
+ state,
+ [
+ { type: 'REQUEST_DATA' },
+ {
+ type: 'REQUEST_DATA_SUCCESS',
+ payload: { '0:0': { start_line: 0, start_char: 0, hover: { value: '123' } } },
+ },
+ ],
+ [],
+ )
+ .then(() => {
+ expect(addInteractionClass).toHaveBeenCalledWith({
+ start_line: 0,
+ start_char: 0,
+ hover: { value: '123' },
+ });
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onGet(apiUrl).replyOnce(500);
+ });
+
+ it('dispatches requestDataError', done => {
+ testAction(
+ actions.fetchData,
+ null,
+ state,
+ [{ type: 'REQUEST_DATA' }],
+ [{ type: 'requestDataError' }],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('showDefinition', () => {
+ let target;
+
+ beforeEach(() => {
+ target = document.createElement('div');
+ });
+
+ it('returns early when no data exists', done => {
+ testAction(actions.showDefinition, { target }, {}, [], [], done);
+ });
+
+ it('commits SET_CURRENT_DEFINITION when target is not code navitation element', done => {
+ testAction(
+ actions.showDefinition,
+ { target },
+ { data: {} },
+ [
+ {
+ type: 'SET_CURRENT_DEFINITION',
+ payload: { definition: undefined, position: undefined },
+ },
+ ],
+ [],
+ done,
+ );
+ });
+
+ it('commits SET_CURRENT_DEFINITION with LSIF data', done => {
+ target.classList.add('js-code-navigation');
+ target.setAttribute('data-line-index', '0');
+ target.setAttribute('data-char-index', '0');
+
+ testAction(
+ actions.showDefinition,
+ { target },
+ { data: { '0:0': { hover: 'test' } } },
+ [
+ {
+ type: 'SET_CURRENT_DEFINITION',
+ payload: { definition: { hover: 'test' }, position: { height: 0, x: 0, y: 0 } },
+ },
+ ],
+ [],
+ done,
+ );
+ });
+
+ it('adds hll class to target element', () => {
+ target.classList.add('js-code-navigation');
+ target.setAttribute('data-line-index', '0');
+ target.setAttribute('data-char-index', '0');
+
+ return testAction(
+ actions.showDefinition,
+ { target },
+ { data: { '0:0': { hover: 'test' } } },
+ [
+ {
+ type: 'SET_CURRENT_DEFINITION',
+ payload: { definition: { hover: 'test' }, position: { height: 0, x: 0, y: 0 } },
+ },
+ ],
+ [],
+ ).then(() => {
+ expect(target.classList).toContain('hll');
+ });
+ });
+
+ it('caches current target element', () => {
+ target.classList.add('js-code-navigation');
+ target.setAttribute('data-line-index', '0');
+ target.setAttribute('data-char-index', '0');
+
+ return testAction(
+ actions.showDefinition,
+ { target },
+ { data: { '0:0': { hover: 'test' } } },
+ [
+ {
+ type: 'SET_CURRENT_DEFINITION',
+ payload: { definition: { hover: 'test' }, position: { height: 0, x: 0, y: 0 } },
+ },
+ ],
+ [],
+ ).then(() => {
+ expect(setCurrentHoverElement).toHaveBeenCalledWith(target);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/code_navigation/store/mutations_spec.js b/spec/frontend/code_navigation/store/mutations_spec.js
new file mode 100644
index 00000000000..117a2ed2f14
--- /dev/null
+++ b/spec/frontend/code_navigation/store/mutations_spec.js
@@ -0,0 +1,63 @@
+import mutations from '~/code_navigation/store/mutations';
+import createState from '~/code_navigation/store/state';
+
+let state;
+
+describe('Code navigation mutations', () => {
+ beforeEach(() => {
+ state = createState();
+ });
+
+ describe('SET_INITIAL_DATA', () => {
+ it('sets initial data', () => {
+ mutations.SET_INITIAL_DATA(state, {
+ projectPath: 'test',
+ commitId: '123',
+ blobPath: 'index.js',
+ });
+
+ expect(state.projectPath).toBe('test');
+ expect(state.commitId).toBe('123');
+ expect(state.blobPath).toBe('index.js');
+ });
+ });
+
+ describe('REQUEST_DATA', () => {
+ it('sets loading true', () => {
+ mutations.REQUEST_DATA(state);
+
+ expect(state.loading).toBe(true);
+ });
+ });
+
+ describe('REQUEST_DATA_SUCCESS', () => {
+ it('sets loading false', () => {
+ mutations.REQUEST_DATA_SUCCESS(state, ['test']);
+
+ expect(state.loading).toBe(false);
+ });
+
+ it('sets data', () => {
+ mutations.REQUEST_DATA_SUCCESS(state, ['test']);
+
+ expect(state.data).toEqual(['test']);
+ });
+ });
+
+ describe('REQUEST_DATA_ERROR', () => {
+ it('sets loading false', () => {
+ mutations.REQUEST_DATA_ERROR(state);
+
+ expect(state.loading).toBe(false);
+ });
+ });
+
+ describe('SET_CURRENT_DEFINITION', () => {
+ it('sets current definition and position', () => {
+ mutations.SET_CURRENT_DEFINITION(state, { definition: 'test', position: { x: 0 } });
+
+ expect(state.currentDefinition).toBe('test');
+ expect(state.currentDefinitionPosition).toEqual({ x: 0 });
+ });
+ });
+});
diff --git a/spec/frontend/code_navigation/utils/index_spec.js b/spec/frontend/code_navigation/utils/index_spec.js
new file mode 100644
index 00000000000..458cc536635
--- /dev/null
+++ b/spec/frontend/code_navigation/utils/index_spec.js
@@ -0,0 +1,58 @@
+import {
+ cachedData,
+ getCurrentHoverElement,
+ setCurrentHoverElement,
+ addInteractionClass,
+} from '~/code_navigation/utils';
+
+afterEach(() => {
+ if (cachedData.has('current')) {
+ cachedData.delete('current');
+ }
+});
+
+describe('getCurrentHoverElement', () => {
+ it.each`
+ value
+ ${'test'}
+ ${undefined}
+ `('it returns cached current key', ({ value }) => {
+ if (value) {
+ cachedData.set('current', value);
+ }
+
+ expect(getCurrentHoverElement()).toEqual(value);
+ });
+});
+
+describe('setCurrentHoverElement', () => {
+ it('sets cached current key', () => {
+ setCurrentHoverElement('test');
+
+ expect(getCurrentHoverElement()).toEqual('test');
+ });
+});
+
+describe('addInteractionClass', () => {
+ beforeEach(() => {
+ setFixtures(
+ '<div id="LC1"><span>console</span><span>.</span><span>log</span></div><div id="LC2"><span>function</span></div>',
+ );
+ });
+
+ it.each`
+ line | char | index
+ ${0} | ${0} | ${0}
+ ${0} | ${8} | ${2}
+ ${1} | ${0} | ${0}
+ `(
+ 'it sets code navigation attributes for line $line and character $char',
+ ({ line, char, index }) => {
+ addInteractionClass({ start_line: line, start_char: char });
+
+ expect(document.querySelectorAll(`#LC${line + 1} span`)[index].classList).toContain(
+ 'js-code-navigation',
+ );
+ },
+ );
+});
diff --git a/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap b/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
index 184d0321dc1..a4a0b98de1b 100644
--- a/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
+++ b/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
@@ -22,6 +22,7 @@ exports[`Contributors charts should render charts when loading completed and the
legendmaxtext="Max"
option="[object Object]"
thresholds=""
+ width="0"
/>
</div>
@@ -29,7 +30,7 @@ exports[`Contributors charts should render charts when loading completed and the
class="row"
>
<div
- class="col-6"
+ class="col-lg-6 col-12"
>
<h4>
John
@@ -39,15 +40,18 @@ exports[`Contributors charts should render charts when loading completed and the
2 commits (jawnnypoo@gmail.com)
</p>
- <glareachart-stub
- data="[object Object]"
- height="216"
- includelegendavgmax="true"
- legendaveragetext="Avg"
- legendmaxtext="Max"
- option="[object Object]"
- thresholds=""
- />
+ <div>
+ <glareachart-stub
+ data="[object Object]"
+ height="216"
+ includelegendavgmax="true"
+ legendaveragetext="Avg"
+ legendmaxtext="Max"
+ option="[object Object]"
+ thresholds=""
+ width="0"
+ />
+ </div>
</div>
</div>
</div>
diff --git a/spec/frontend/contributors/component/contributors_spec.js b/spec/frontend/contributors/component/contributors_spec.js
index 3e4924ed906..24816e4e8ac 100644
--- a/spec/frontend/contributors/component/contributors_spec.js
+++ b/spec/frontend/contributors/component/contributors_spec.js
@@ -1,5 +1,5 @@
import Vue from 'vue';
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { createStore } from '~/contributors/stores';
import axios from '~/lib/utils/axios_utils';
@@ -22,7 +22,7 @@ function factory() {
mock.onGet().reply(200, chartData);
store = createStore();
- wrapper = shallowMount(Component, {
+ wrapper = mount(Component, {
propsData: {
endpoint,
branch,
diff --git a/spec/frontend/create_cluster/gke_cluster/components/gke_submit_button_spec.js b/spec/frontend/create_cluster/gke_cluster/components/gke_submit_button_spec.js
new file mode 100644
index 00000000000..9401ba83ef4
--- /dev/null
+++ b/spec/frontend/create_cluster/gke_cluster/components/gke_submit_button_spec.js
@@ -0,0 +1,53 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import GkeSubmitButton from '~/create_cluster/gke_cluster/components/gke_submit_button.vue';
+
+const localVue = createLocalVue();
+
+localVue.use(Vuex);
+
+describe('GkeSubmitButton', () => {
+ let wrapper;
+ let store;
+ let hasValidData;
+
+ const buildStore = () =>
+ new Vuex.Store({
+ getters: {
+ hasValidData,
+ },
+ });
+
+ const buildWrapper = () =>
+ shallowMount(GkeSubmitButton, {
+ store,
+ localVue,
+ });
+
+ const bootstrap = () => {
+ store = buildStore();
+ wrapper = buildWrapper();
+ };
+
+ beforeEach(() => {
+ hasValidData = jest.fn();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('is disabled when hasValidData is false', () => {
+ hasValidData.mockReturnValueOnce(false);
+ bootstrap();
+
+ expect(wrapper.attributes('disabled')).toBe('disabled');
+ });
+
+ it('is not disabled when hasValidData is true', () => {
+ hasValidData.mockReturnValueOnce(true);
+ bootstrap();
+
+ expect(wrapper.attributes('disabled')).toBeFalsy();
+ });
+});
diff --git a/spec/frontend/diffs/components/compare_versions_spec.js b/spec/frontend/diffs/components/compare_versions_spec.js
index 5f919408459..ff92a12eaf6 100644
--- a/spec/frontend/diffs/components/compare_versions_spec.js
+++ b/spec/frontend/diffs/components/compare_versions_spec.js
@@ -49,8 +49,7 @@ describe('CompareVersions', () => {
expect(treeListBtn.exists()).toBe(true);
expect(treeListBtn.attributes('title')).toBe('Hide file browser');
- expect(treeListBtn.findAll(Icon).length).not.toBe(0);
- expect(treeListBtn.find(Icon).props('name')).toBe('collapse-left');
+ expect(treeListBtn.find(Icon).props('name')).toBe('file-tree');
});
it('should render comparison dropdowns with correct values', () => {
diff --git a/spec/frontend/diffs/components/diff_file_row_spec.js b/spec/frontend/diffs/components/diff_file_row_spec.js
new file mode 100644
index 00000000000..9b7a16d0cb5
--- /dev/null
+++ b/spec/frontend/diffs/components/diff_file_row_spec.js
@@ -0,0 +1,74 @@
+import { shallowMount } from '@vue/test-utils';
+import DiffFileRow from '~/diffs/components/diff_file_row.vue';
+import FileRow from '~/vue_shared/components/file_row.vue';
+import FileRowStats from '~/diffs/components/file_row_stats.vue';
+import ChangedFileIcon from '~/vue_shared/components/changed_file_icon.vue';
+
+describe('Diff File Row component', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(DiffFileRow, {
+ propsData: { ...props },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders file row component', () => {
+ const sharedProps = {
+ level: 4,
+ file: {},
+ };
+
+ const diffFileRowProps = {
+ hideFileStats: false,
+ };
+
+ createComponent({
+ ...sharedProps,
+ ...diffFileRowProps,
+ });
+
+ expect(wrapper.find(FileRow).props()).toEqual(
+ expect.objectContaining({
+ ...sharedProps,
+ }),
+ );
+ });
+
+ it('renders ChangedFileIcon component', () => {
+ createComponent({
+ level: 4,
+ file: {},
+ hideFileStats: false,
+ });
+
+ expect(wrapper.find(ChangedFileIcon).props()).toEqual(
+ expect.objectContaining({
+ file: {},
+ size: 16,
+ }),
+ );
+ });
+
+ describe('FileRowStats components', () => {
+ it.each`
+ type | hideFileStats | value | desc
+ ${'blob'} | ${false} | ${true} | ${'is shown if file type is blob'}
+ ${'tree'} | ${false} | ${false} | ${'is hidden if file is not blob'}
+ ${'blob'} | ${true} | ${false} | ${'is hidden if hideFileStats is true'}
+ `('$desc', ({ type, value, hideFileStats }) => {
+ createComponent({
+ level: 4,
+ file: {
+ type,
+ },
+ hideFileStats,
+ });
+ expect(wrapper.find(FileRowStats).exists()).toEqual(value);
+ });
+ });
+});
diff --git a/spec/frontend/diffs/components/diff_stats_spec.js b/spec/frontend/diffs/components/diff_stats_spec.js
index 4482abf18c1..5956b478019 100644
--- a/spec/frontend/diffs/components/diff_stats_spec.js
+++ b/spec/frontend/diffs/components/diff_stats_spec.js
@@ -1,13 +1,26 @@
import { shallowMount } from '@vue/test-utils';
-import Icon from '~/vue_shared/components/icon.vue';
import DiffStats from '~/diffs/components/diff_stats.vue';
+import Icon from '~/vue_shared/components/icon.vue';
describe('diff_stats', () => {
- it('does not render a group if diffFileLengths is not passed in', () => {
+ it('does not render a group if diffFileLengths is empty', () => {
+ const wrapper = shallowMount(DiffStats, {
+ propsData: {
+ addedLines: 1,
+ removedLines: 2,
+ },
+ });
+ const groups = wrapper.findAll('.diff-stats-group');
+
+ expect(groups.length).toBe(2);
+ });
+
+ it('does not render a group if diffFileLengths is not a number', () => {
const wrapper = shallowMount(DiffStats, {
propsData: {
addedLines: 1,
removedLines: 2,
+ diffFilesLength: Number.NaN,
},
});
const groups = wrapper.findAll('.diff-stats-group');
@@ -24,18 +37,18 @@ describe('diff_stats', () => {
},
});
+ const findFileLine = name => wrapper.find(name);
const findIcon = name =>
wrapper
.findAll(Icon)
.filter(c => c.attributes('name') === name)
.at(0).element.parentNode;
-
- const additions = findIcon('file-addition');
- const deletions = findIcon('file-deletion');
+ const additions = findFileLine('.js-file-addition-line');
+ const deletions = findFileLine('.js-file-deletion-line');
const filesChanged = findIcon('doc-code');
- expect(additions.textContent).toContain('100');
- expect(deletions.textContent).toContain('200');
+ expect(additions.text()).toBe('100');
+ expect(deletions.text()).toBe('200');
expect(filesChanged.textContent).toContain('300');
});
});
diff --git a/spec/frontend/diffs/components/diff_table_cell_spec.js b/spec/frontend/diffs/components/diff_table_cell_spec.js
new file mode 100644
index 00000000000..1af0746f3bd
--- /dev/null
+++ b/spec/frontend/diffs/components/diff_table_cell_spec.js
@@ -0,0 +1,213 @@
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import Vuex from 'vuex';
+import DiffTableCell from '~/diffs/components/diff_table_cell.vue';
+import DiffGutterAvatars from '~/diffs/components/diff_gutter_avatars.vue';
+import { LINE_POSITION_RIGHT } from '~/diffs/constants';
+import { createStore } from '~/mr_notes/stores';
+import { TEST_HOST } from 'helpers/test_constants';
+import discussionsMockData from '../mock_data/diff_discussions';
+import diffFileMockData from '../mock_data/diff_file';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+const TEST_USER_ID = 'abc123';
+const TEST_USER = { id: TEST_USER_ID };
+const TEST_LINE_NUMBER = 1;
+const TEST_LINE_CODE = 'LC_42';
+const TEST_FILE_HASH = diffFileMockData.file_hash;
+
+describe('DiffTableCell', () => {
+ let wrapper;
+ let line;
+ let store;
+
+ beforeEach(() => {
+ store = createStore();
+ store.state.notes.userData = TEST_USER;
+
+ line = {
+ line_code: TEST_LINE_CODE,
+ type: 'new',
+ old_line: null,
+ new_line: 1,
+ discussions: [{ ...discussionsMockData }],
+ discussionsExpanded: true,
+ text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
+ rich_text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
+ meta_data: null,
+ };
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const setWindowLocation = value => {
+ Object.defineProperty(window, 'location', {
+ writable: true,
+ value,
+ });
+ };
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(DiffTableCell, {
+ localVue,
+ store,
+ propsData: {
+ line,
+ fileHash: TEST_FILE_HASH,
+ contextLinesPath: '/context/lines/path',
+ isHighlighted: false,
+ ...props,
+ },
+ });
+ };
+
+ const findTd = () => wrapper.find({ ref: 'td' });
+ const findNoteButton = () => wrapper.find({ ref: 'addDiffNoteButton' });
+ const findLineNumber = () => wrapper.find({ ref: 'lineNumberRef' });
+ const findAvatars = () => wrapper.find(DiffGutterAvatars);
+
+ describe('td', () => {
+ it('highlights when isHighlighted true', () => {
+ createComponent({ isHighlighted: true });
+
+ expect(findTd().classes()).toContain('hll');
+ });
+
+ it('does not highlight when isHighlighted false', () => {
+ createComponent({ isHighlighted: false });
+
+ expect(findTd().classes()).not.toContain('hll');
+ });
+ });
+
+ describe('comment button', () => {
+ it.each`
+ showCommentButton | userData | query | expectation
+ ${true} | ${TEST_USER} | ${'diff_head=false'} | ${true}
+ ${true} | ${TEST_USER} | ${'diff_head=true'} | ${false}
+ ${false} | ${TEST_USER} | ${'bogus'} | ${false}
+ ${true} | ${null} | ${''} | ${false}
+ `(
+ 'exists is $expectation - with showCommentButton ($showCommentButton) userData ($userData) query ($query)',
+ ({ showCommentButton, userData, query, expectation }) => {
+ store.state.notes.userData = userData;
+ setWindowLocation({ href: `${TEST_HOST}?${query}` });
+ createComponent({ showCommentButton });
+
+ expect(findNoteButton().exists()).toBe(expectation);
+ },
+ );
+
+ it.each`
+ isHover | otherProps | discussions | expectation
+ ${true} | ${{}} | ${[]} | ${true}
+ ${false} | ${{}} | ${[]} | ${false}
+ ${true} | ${{ line: { ...line, type: 'match' } }} | ${[]} | ${false}
+ ${true} | ${{ line: { ...line, type: 'context' } }} | ${[]} | ${false}
+ ${true} | ${{ line: { ...line, type: 'old-nonewline' } }} | ${[]} | ${false}
+ ${true} | ${{}} | ${[{}]} | ${false}
+ `(
+ 'visible is $expectation - with isHover ($isHover), discussions ($discussions), otherProps ($otherProps)',
+ ({ isHover, otherProps, discussions, expectation }) => {
+ line.discussions = discussions;
+ createComponent({
+ showCommentButton: true,
+ isHover,
+ ...otherProps,
+ });
+
+ expect(findNoteButton().isVisible()).toBe(expectation);
+ },
+ );
+ });
+
+ describe('line number', () => {
+ describe('without lineNumber prop', () => {
+ it('does not render', () => {
+ createComponent({ lineType: 'old' });
+
+ expect(findLineNumber().exists()).toBe(false);
+ });
+ });
+
+ describe('with lineNumber prop', () => {
+ describe.each`
+ lineProps | expectedHref | expectedClickArg
+ ${{ line_code: TEST_LINE_CODE }} | ${`#${TEST_LINE_CODE}`} | ${TEST_LINE_CODE}
+ ${{ line_code: undefined }} | ${'#'} | ${undefined}
+ ${{ line_code: undefined, left: { line_code: TEST_LINE_CODE } }} | ${'#'} | ${TEST_LINE_CODE}
+ ${{ line_code: undefined, right: { line_code: TEST_LINE_CODE } }} | ${'#'} | ${TEST_LINE_CODE}
+ `('with line ($lineProps)', ({ lineProps, expectedHref, expectedClickArg }) => {
+ beforeEach(() => {
+ jest.spyOn(store, 'dispatch').mockImplementation();
+ Object.assign(line, lineProps);
+ createComponent({ lineNumber: TEST_LINE_NUMBER });
+ });
+
+ it('renders', () => {
+ expect(findLineNumber().exists()).toBe(true);
+ expect(findLineNumber().attributes()).toEqual({
+ href: expectedHref,
+ 'data-linenumber': TEST_LINE_NUMBER.toString(),
+ });
+ });
+
+ it('on click, dispatches setHighlightedRow', () => {
+ expect(store.dispatch).not.toHaveBeenCalled();
+
+ findLineNumber().trigger('click');
+
+ expect(store.dispatch).toHaveBeenCalledWith('diffs/setHighlightedRow', expectedClickArg);
+ });
+ });
+ });
+ });
+
+ describe('diff-gutter-avatars', () => {
+ describe('with showCommentButton', () => {
+ beforeEach(() => {
+ jest.spyOn(store, 'dispatch').mockImplementation();
+
+ createComponent({ showCommentButton: true });
+ });
+
+ it('renders', () => {
+ expect(findAvatars().props()).toEqual({
+ discussions: line.discussions,
+ discussionsExpanded: line.discussionsExpanded,
+ });
+ });
+
+ it('toggles line discussion', () => {
+ expect(store.dispatch).not.toHaveBeenCalled();
+
+ findAvatars().vm.$emit('toggleLineDiscussions');
+
+ expect(store.dispatch).toHaveBeenCalledWith('diffs/toggleLineDiscussions', {
+ lineCode: TEST_LINE_CODE,
+ fileHash: TEST_FILE_HASH,
+ expanded: !line.discussionsExpanded,
+ });
+ });
+ });
+
+ it.each`
+ props | lineProps | expectation
+ ${{ showCommentButton: true }} | ${{}} | ${true}
+ ${{ showCommentButton: false }} | ${{}} | ${false}
+ ${{ showCommentButton: true, linePosition: LINE_POSITION_RIGHT }} | ${{ type: null }} | ${false}
+ ${{ showCommentButton: true }} | ${{ discussions: [] }} | ${false}
+ `(
+ 'exists is $expectation - with props ($props), line ($lineProps)',
+ ({ props, lineProps, expectation }) => {
+ Object.assign(line, lineProps);
+ createComponent(props);
+
+ expect(findAvatars().exists()).toBe(expectation);
+ },
+ );
+ });
+});
diff --git a/spec/frontend/diffs/mock_data/diff_file.js b/spec/frontend/diffs/mock_data/diff_file.js
new file mode 100644
index 00000000000..27428197c1c
--- /dev/null
+++ b/spec/frontend/diffs/mock_data/diff_file.js
@@ -0,0 +1,244 @@
+export default {
+ submodule: false,
+ submodule_link: null,
+ blob: {
+ id: '9e10516ca50788acf18c518a231914a21e5f16f7',
+ path: 'CHANGELOG',
+ name: 'CHANGELOG',
+ mode: '100644',
+ readable_text: true,
+ icon: 'file-text-o',
+ },
+ blob_path: 'CHANGELOG',
+ blob_name: 'CHANGELOG',
+ blob_icon: '<i aria-hidden="true" data-hidden="true" class="fa fa-file-text-o fa-fw"></i>',
+ file_hash: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a',
+ file_path: 'CHANGELOG',
+ new_file: false,
+ deleted_file: false,
+ renamed_file: false,
+ old_path: 'CHANGELOG',
+ new_path: 'CHANGELOG',
+ mode_changed: false,
+ a_mode: '100644',
+ b_mode: '100644',
+ text: true,
+ viewer: {
+ name: 'text',
+ error: null,
+ collapsed: false,
+ },
+ added_lines: 2,
+ removed_lines: 0,
+ diff_refs: {
+ base_sha: 'e63f41fe459e62e1228fcef60d7189127aeba95a',
+ start_sha: 'd9eaefe5a676b820c57ff18cf5b68316025f7962',
+ head_sha: 'c48ee0d1bf3b30453f5b32250ce03134beaa6d13',
+ },
+ content_sha: 'c48ee0d1bf3b30453f5b32250ce03134beaa6d13',
+ stored_externally: null,
+ external_storage: null,
+ old_path_html: 'CHANGELOG',
+ new_path_html: 'CHANGELOG',
+ edit_path: '/gitlab-org/gitlab-test/edit/spooky-stuff/CHANGELOG',
+ view_path: '/gitlab-org/gitlab-test/blob/spooky-stuff/CHANGELOG',
+ replaced_view_path: null,
+ collapsed: false,
+ renderIt: false,
+ too_large: false,
+ context_lines_path:
+ '/gitlab-org/gitlab-test/blob/c48ee0d1bf3b30453f5b32250ce03134beaa6d13/CHANGELOG/diff',
+ highlighted_diff_lines: [
+ {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_1',
+ type: 'new',
+ old_line: null,
+ new_line: 1,
+ discussions: [],
+ text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
+ rich_text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
+ meta_data: null,
+ },
+ {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_2',
+ type: 'new',
+ old_line: null,
+ new_line: 2,
+ discussions: [],
+ text: '+<span id="LC2" class="line" lang="plaintext"></span>\n',
+ rich_text: '+<span id="LC2" class="line" lang="plaintext"></span>\n',
+ meta_data: null,
+ },
+ {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3',
+ type: null,
+ old_line: 1,
+ new_line: 3,
+ discussions: [],
+ text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
+ rich_text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
+ meta_data: null,
+ },
+ {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_2_4',
+ type: null,
+ old_line: 2,
+ new_line: 4,
+ discussions: [],
+ text: ' <span id="LC4" class="line" lang="plaintext"></span>\n',
+ rich_text: ' <span id="LC4" class="line" lang="plaintext"></span>\n',
+ meta_data: null,
+ },
+ {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_3_5',
+ type: null,
+ old_line: 3,
+ new_line: 5,
+ discussions: [],
+ text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
+ rich_text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
+ meta_data: null,
+ },
+ {
+ line_code: null,
+ type: 'match',
+ old_line: null,
+ new_line: null,
+ discussions: [],
+ text: '',
+ rich_text: '',
+ meta_data: {
+ old_pos: 3,
+ new_pos: 5,
+ },
+ },
+ ],
+ parallel_diff_lines: [
+ {
+ left: {
+ type: 'empty-cell',
+ },
+ right: {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_1',
+ type: 'new',
+ old_line: null,
+ new_line: 1,
+ discussions: [],
+ text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
+ rich_text: '<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
+ meta_data: null,
+ },
+ },
+ {
+ left: {
+ type: 'empty-cell',
+ },
+ right: {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_2',
+ type: 'new',
+ old_line: null,
+ new_line: 2,
+ discussions: [],
+ text: '+<span id="LC2" class="line" lang="plaintext"></span>\n',
+ rich_text: '<span id="LC2" class="line" lang="plaintext"></span>\n',
+ meta_data: null,
+ },
+ },
+ {
+ left: {
+ line_Code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3',
+ type: null,
+ old_line: 1,
+ new_line: 3,
+ discussions: [],
+ text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
+ rich_text: '<span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
+ meta_data: null,
+ },
+ right: {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3',
+ type: null,
+ old_line: 1,
+ new_line: 3,
+ discussions: [],
+ text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
+ rich_text: '<span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
+ meta_data: null,
+ },
+ },
+ {
+ left: {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_2_4',
+ type: null,
+ old_line: 2,
+ new_line: 4,
+ discussions: [],
+ text: ' <span id="LC4" class="line" lang="plaintext"></span>\n',
+ rich_text: '<span id="LC4" class="line" lang="plaintext"></span>\n',
+ meta_data: null,
+ },
+ right: {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_2_4',
+ type: null,
+ old_line: 2,
+ new_line: 4,
+ discussions: [],
+ text: ' <span id="LC4" class="line" lang="plaintext"></span>\n',
+ rich_text: '<span id="LC4" class="line" lang="plaintext"></span>\n',
+ meta_data: null,
+ },
+ },
+ {
+ left: {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_3_5',
+ type: null,
+ old_line: 3,
+ new_line: 5,
+ discussions: [],
+ text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
+ rich_text: '<span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
+ meta_data: null,
+ },
+ right: {
+ line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_3_5',
+ type: null,
+ old_line: 3,
+ new_line: 5,
+ discussions: [],
+ text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
+ rich_text: '<span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
+ meta_data: null,
+ },
+ },
+ {
+ left: {
+ line_code: null,
+ type: 'match',
+ old_line: null,
+ new_line: null,
+ discussions: [],
+ text: '',
+ rich_text: '',
+ meta_data: {
+ old_pos: 3,
+ new_pos: 5,
+ },
+ },
+ right: {
+ line_code: null,
+ type: 'match',
+ old_line: null,
+ new_line: null,
+ discussions: [],
+ text: '',
+ rich_text: '',
+ meta_data: {
+ old_pos: 3,
+ new_pos: 5,
+ },
+ },
+ },
+ ],
+ discussions: [],
+ renderingLines: false,
+};
diff --git a/spec/frontend/diffs/mock_data/merge_request_diffs.js b/spec/frontend/diffs/mock_data/merge_request_diffs.js
index 4bbef146336..f98374d26bd 100644
--- a/spec/frontend/diffs/mock_data/merge_request_diffs.js
+++ b/spec/frontend/diffs/mock_data/merge_request_diffs.js
@@ -8,7 +8,7 @@ export default [
short_commit_sha: 'de7a8f7f',
version_path: '/gnuwget/wget2/merge_requests/6/diffs?diff_id=37',
compare_path:
- '/gnuwget/wget2/merge_requests/6/diffs?diff_id=37&start_sha=de7a8f7f20c3ea2e0bef3ba01cfd41c21f6b4995',
+ '/gnuwget/wget2/-/merge_requests/6/diffs?diff_id=37&start_sha=de7a8f7f20c3ea2e0bef3ba01cfd41c21f6b4995',
},
{
base_version_path: '/gnuwget/wget2/merge_requests/6/diffs?diff_id=36',
@@ -19,7 +19,7 @@ export default [
short_commit_sha: 'e78fc18f',
version_path: '/gnuwget/wget2/merge_requests/6/diffs?diff_id=36',
compare_path:
- '/gnuwget/wget2/merge_requests/6/diffs?diff_id=37&start_sha=e78fc18fa37acb2185c59ca94d4a964464feb50e',
+ '/gnuwget/wget2/-/merge_requests/6/diffs?diff_id=37&start_sha=e78fc18fa37acb2185c59ca94d4a964464feb50e',
},
{
base_version_path: '/gnuwget/wget2/merge_requests/6/diffs?diff_id=35',
@@ -30,7 +30,7 @@ export default [
short_commit_sha: '48da7e7e',
version_path: '/gnuwget/wget2/merge_requests/6/diffs?diff_id=35',
compare_path:
- '/gnuwget/wget2/merge_requests/6/diffs?diff_id=37&start_sha=48da7e7e9a99d41c852578bd9cb541ca4d864b3e',
+ '/gnuwget/wget2/-/merge_requests/6/diffs?diff_id=37&start_sha=48da7e7e9a99d41c852578bd9cb541ca4d864b3e',
},
{
base_version_path: '/gnuwget/wget2/merge_requests/6/diffs?diff_id=20',
@@ -41,6 +41,6 @@ export default [
short_commit_sha: '47bac2ed',
version_path: '/gnuwget/wget2/merge_requests/6/diffs?diff_id=20',
compare_path:
- '/gnuwget/wget2/merge_requests/6/diffs?diff_id=37&start_sha=47bac2ed972c5bee344c1cea159a22cd7f711dc0',
+ '/gnuwget/wget2/-/merge_requests/6/diffs?diff_id=37&start_sha=47bac2ed972c5bee344c1cea159a22cd7f711dc0',
},
];
diff --git a/spec/frontend/environments/emtpy_state_spec.js b/spec/frontend/environments/emtpy_state_spec.js
new file mode 100644
index 00000000000..ed90c13f1e1
--- /dev/null
+++ b/spec/frontend/environments/emtpy_state_spec.js
@@ -0,0 +1,40 @@
+import { shallowMount } from '@vue/test-utils';
+import EmptyState from '~/environments/components/empty_state.vue';
+
+describe('environments empty state', () => {
+ let vm;
+
+ beforeEach(() => {
+ vm = shallowMount(EmptyState, {
+ propsData: {
+ newPath: 'foo',
+ canCreateEnvironment: true,
+ helpPath: 'bar',
+ },
+ });
+ });
+
+ afterEach(() => {
+ vm.destroy();
+ });
+
+ it('renders the empty state', () => {
+ expect(vm.find('.js-blank-state-title').text()).toEqual(
+ "You don't have any environments right now",
+ );
+ });
+
+ it('renders the new environment button', () => {
+ expect(vm.find('.js-new-environment-button').attributes('href')).toEqual('foo');
+ });
+
+ describe('without permission', () => {
+ beforeEach(() => {
+ vm.setProps({ canCreateEnvironment: false });
+ });
+
+ it('does not render the new environment button', () => {
+ expect(vm.find('.js-new-environment-button').exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/environments/enable_review_app_button_spec.js b/spec/frontend/environments/enable_review_app_button_spec.js
new file mode 100644
index 00000000000..5549a1737fc
--- /dev/null
+++ b/spec/frontend/environments/enable_review_app_button_spec.js
@@ -0,0 +1,31 @@
+import { shallowMount, mount } from '@vue/test-utils';
+import ModalCopyButton from '~/vue_shared/components/modal_copy_button.vue';
+import EnableReviewAppButton from '~/environments/components/enable_review_app_button.vue';
+
+describe('Enable Review App Button', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('renders button with text', () => {
+ beforeEach(() => {
+ wrapper = mount(EnableReviewAppButton);
+ });
+
+ it('renders Enable Review text', () => {
+ expect(wrapper.text()).toBe('Enable review app');
+ });
+ });
+
+ describe('renders the modal', () => {
+ beforeEach(() => {
+ wrapper = shallowMount(EnableReviewAppButton);
+ });
+
+ it('renders the copyToClipboard button', () => {
+ expect(wrapper.find(ModalCopyButton).exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/environments/environment_actions_spec.js b/spec/frontend/environments/environment_actions_spec.js
new file mode 100644
index 00000000000..4c06e19cec0
--- /dev/null
+++ b/spec/frontend/environments/environment_actions_spec.js
@@ -0,0 +1,124 @@
+import { shallowMount } from '@vue/test-utils';
+import { TEST_HOST } from 'helpers/test_constants';
+import eventHub from '~/environments/event_hub';
+import EnvironmentActions from '~/environments/components/environment_actions.vue';
+import Icon from '~/vue_shared/components/icon.vue';
+import { GlLoadingIcon } from '@gitlab/ui';
+
+describe('EnvironmentActions Component', () => {
+ let vm;
+
+ beforeEach(() => {
+ vm = shallowMount(EnvironmentActions, { propsData: { actions: [] } });
+ });
+
+ afterEach(() => {
+ vm.destroy();
+ });
+
+ it('should render a dropdown button with 2 icons', () => {
+ expect(vm.find('.dropdown-new').findAll(Icon).length).toBe(2);
+ });
+
+ it('should render a dropdown button with aria-label description', () => {
+ expect(vm.find('.dropdown-new').attributes('aria-label')).toEqual('Deploy to...');
+ });
+
+ describe('is loading', () => {
+ beforeEach(() => {
+ vm.setData({ isLoading: true });
+ });
+
+ it('should render a dropdown button with a loading icon', () => {
+ expect(vm.findAll(GlLoadingIcon).length).toBe(1);
+ });
+ });
+
+ describe('manual actions', () => {
+ const actions = [
+ {
+ name: 'bar',
+ play_path: 'https://gitlab.com/play',
+ },
+ {
+ name: 'foo',
+ play_path: '#',
+ },
+ {
+ name: 'foo bar',
+ play_path: 'url',
+ playable: false,
+ },
+ ];
+
+ beforeEach(() => {
+ vm.setProps({ actions });
+ });
+
+ it('should render a dropdown with the provided list of actions', () => {
+ expect(vm.findAll('.dropdown-menu li').length).toEqual(actions.length);
+ });
+
+ it("should render a disabled action when it's not playable", () => {
+ expect(vm.find('.dropdown-menu li:last-child button').attributes('disabled')).toEqual(
+ 'disabled',
+ );
+
+ expect(vm.find('.dropdown-menu li:last-child button').classes('disabled')).toBe(true);
+ });
+ });
+
+ describe('scheduled jobs', () => {
+ const scheduledJobAction = {
+ name: 'scheduled action',
+ playPath: `${TEST_HOST}/scheduled/job/action`,
+ playable: true,
+ scheduledAt: '2063-04-05T00:42:00Z',
+ };
+ const expiredJobAction = {
+ name: 'expired action',
+ playPath: `${TEST_HOST}/expired/job/action`,
+ playable: true,
+ scheduledAt: '2018-10-05T08:23:00Z',
+ };
+ const findDropdownItem = action => {
+ const buttons = vm.findAll('.dropdown-menu li button');
+ return buttons.filter(button => button.text().startsWith(action.name)).at(0);
+ };
+
+ beforeEach(() => {
+ jest.spyOn(Date, 'now').mockImplementation(() => new Date('2063-04-04T00:42:00Z').getTime());
+ vm.setProps({ actions: [scheduledJobAction, expiredJobAction] });
+ });
+
+ it('emits postAction event after confirming', () => {
+ const emitSpy = jest.fn();
+ eventHub.$on('postAction', emitSpy);
+ jest.spyOn(window, 'confirm').mockImplementation(() => true);
+
+ findDropdownItem(scheduledJobAction).trigger('click');
+
+ expect(window.confirm).toHaveBeenCalled();
+ expect(emitSpy).toHaveBeenCalledWith({ endpoint: scheduledJobAction.playPath });
+ });
+
+ it('does not emit postAction event if confirmation is cancelled', () => {
+ const emitSpy = jest.fn();
+ eventHub.$on('postAction', emitSpy);
+ jest.spyOn(window, 'confirm').mockImplementation(() => false);
+
+ findDropdownItem(scheduledJobAction).trigger('click');
+
+ expect(window.confirm).toHaveBeenCalled();
+ expect(emitSpy).not.toHaveBeenCalled();
+ });
+
+ it('displays the remaining time in the dropdown', () => {
+ expect(findDropdownItem(scheduledJobAction).text()).toContain('24:00:00');
+ });
+
+ it('displays 00:00:00 for expired jobs in the dropdown', () => {
+ expect(findDropdownItem(expiredJobAction).text()).toContain('00:00:00');
+ });
+ });
+});
diff --git a/spec/frontend/environments/environment_external_url_spec.js b/spec/frontend/environments/environment_external_url_spec.js
new file mode 100644
index 00000000000..9997ea94941
--- /dev/null
+++ b/spec/frontend/environments/environment_external_url_spec.js
@@ -0,0 +1,16 @@
+import { shallowMount } from '@vue/test-utils';
+import ExternalUrlComp from '~/environments/components/environment_external_url.vue';
+
+describe('External URL Component', () => {
+ let wrapper;
+ const externalUrl = 'https://gitlab.com';
+
+ beforeEach(() => {
+ wrapper = shallowMount(ExternalUrlComp, { propsData: { externalUrl } });
+ });
+
+ it('should link to the provided externalUrl prop', () => {
+ expect(wrapper.attributes('href')).toEqual(externalUrl);
+ expect(wrapper.find('a').exists()).toBe(true);
+ });
+});
diff --git a/spec/javascripts/environments/environments_store_spec.js b/spec/frontend/environments/environments_store_spec.js
index 8abdbcbbe54..8abdbcbbe54 100644
--- a/spec/javascripts/environments/environments_store_spec.js
+++ b/spec/frontend/environments/environments_store_spec.js
diff --git a/spec/frontend/environments/folder/environments_folder_view_spec.js b/spec/frontend/environments/folder/environments_folder_view_spec.js
new file mode 100644
index 00000000000..740225ddd9d
--- /dev/null
+++ b/spec/frontend/environments/folder/environments_folder_view_spec.js
@@ -0,0 +1,180 @@
+import { mount } from '@vue/test-utils';
+import axios from '~/lib/utils/axios_utils';
+import MockAdapter from 'axios-mock-adapter';
+import EnvironmentsFolderViewComponent from '~/environments/folder/environments_folder_view.vue';
+import EnvironmentTable from '~/environments/components/environments_table.vue';
+import { environmentsList } from '../mock_data';
+import { removeBreakLine, removeWhitespace } from 'helpers/text_helper';
+import { GlPagination } from '@gitlab/ui';
+
+describe('Environments Folder View', () => {
+ let mock;
+ let wrapper;
+
+ const mockData = {
+ endpoint: 'environments.json',
+ folderName: 'review',
+ canReadEnvironment: true,
+ cssContainerClass: 'container',
+ canaryDeploymentFeatureId: 'canary_deployment',
+ showCanaryDeploymentCallout: true,
+ userCalloutsPath: '/callouts',
+ lockPromotionSvgPath: '/assets/illustrations/lock-promotion.svg',
+ helpCanaryDeploymentsPath: 'help/canary-deployments',
+ };
+
+ const mockEnvironments = environmentList => {
+ mock.onGet(mockData.endpoint).reply(
+ 200,
+ {
+ environments: environmentList,
+ stopped_count: 1,
+ available_count: 0,
+ },
+ {
+ 'X-nExt-pAge': '2',
+ 'x-page': '1',
+ 'X-Per-Page': '2',
+ 'X-Prev-Page': '',
+ 'X-TOTAL': '20',
+ 'X-Total-Pages': '10',
+ },
+ );
+ };
+
+ const createWrapper = () => {
+ wrapper = mount(EnvironmentsFolderViewComponent, { propsData: mockData });
+ };
+
+ const findEnvironmentsTabAvailable = () => wrapper.find('.js-environments-tab-available');
+
+ const findEnvironmentsTabStopped = () => wrapper.find('.js-environments-tab-stopped');
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ wrapper.destroy();
+ });
+
+ describe('successful request', () => {
+ beforeEach(() => {
+ mockEnvironments(environmentsList);
+ createWrapper();
+ return axios.waitForAll();
+ });
+
+ it('should render a table with environments', () => {
+ const table = wrapper.find(EnvironmentTable);
+
+ expect(table.exists()).toBe(true);
+ expect(table.find('.environment-name').text()).toEqual(environmentsList[0].name);
+ });
+
+ it('should render available tab with count', () => {
+ const tabTable = findEnvironmentsTabAvailable();
+
+ expect(tabTable.text()).toContain('Available');
+ expect(tabTable.find('.badge').text()).toContain('0');
+ });
+
+ it('should render stopped tab with count', () => {
+ const tabTable = findEnvironmentsTabStopped();
+
+ expect(tabTable.text()).toContain('Stopped');
+ expect(tabTable.find('.badge').text()).toContain('1');
+ });
+
+ it('should render parent folder name', () => {
+ expect(removeBreakLine(removeWhitespace(wrapper.find('.js-folder-name').text()))).toContain(
+ 'Environments / review',
+ );
+ });
+
+ describe('pagination', () => {
+ it('should render pagination', () => {
+ expect(wrapper.find(GlPagination).exists()).toBe(true);
+ });
+
+ it('should make an API request when changing page', () => {
+ jest.spyOn(wrapper.vm, 'updateContent').mockImplementation(() => {});
+ wrapper.find('.gl-pagination .page-item:nth-last-of-type(2) .page-link').trigger('click');
+ expect(wrapper.vm.updateContent).toHaveBeenCalledWith({
+ scope: wrapper.vm.scope,
+ page: '10',
+ });
+ });
+
+ it('should make an API request when using tabs', () => {
+ jest.spyOn(wrapper.vm, 'updateContent').mockImplementation(() => {});
+ findEnvironmentsTabStopped().trigger('click');
+ expect(wrapper.vm.updateContent).toHaveBeenCalledWith({ scope: 'stopped', page: '1' });
+ });
+ });
+ });
+
+ describe('unsuccessfull request', () => {
+ beforeEach(() => {
+ mock.onGet(mockData.endpoint).reply(500, { environments: [] });
+ createWrapper();
+ return axios.waitForAll();
+ });
+
+ it('should not render a table', () => {
+ expect(wrapper.find(EnvironmentTable).exists()).toBe(false);
+ });
+
+ it('should render available tab with count 0', () => {
+ const tabTable = findEnvironmentsTabAvailable();
+
+ expect(tabTable.text()).toContain('Available');
+ expect(tabTable.find('.badge').text()).toContain('0');
+ });
+
+ it('should render stopped tab with count 0', () => {
+ const tabTable = findEnvironmentsTabStopped();
+
+ expect(tabTable.text()).toContain('Stopped');
+ expect(tabTable.find('.badge').text()).toContain('0');
+ });
+ });
+
+ describe('methods', () => {
+ beforeEach(() => {
+ mockEnvironments([]);
+ createWrapper();
+ jest.spyOn(window.history, 'pushState').mockImplementation(() => {});
+ return axios.waitForAll();
+ });
+
+ describe('updateContent', () => {
+ it('should set given parameters', () =>
+ wrapper.vm.updateContent({ scope: 'stopped', page: '4' }).then(() => {
+ expect(wrapper.vm.page).toEqual('4');
+ expect(wrapper.vm.scope).toEqual('stopped');
+ expect(wrapper.vm.requestData.page).toEqual('4');
+ }));
+ });
+
+ describe('onChangeTab', () => {
+ it('should set page to 1', () => {
+ jest.spyOn(wrapper.vm, 'updateContent').mockImplementation(() => {});
+ wrapper.vm.onChangeTab('stopped');
+ expect(wrapper.vm.updateContent).toHaveBeenCalledWith({ scope: 'stopped', page: '1' });
+ });
+ });
+
+ describe('onChangePage', () => {
+ it('should update page and keep scope', () => {
+ jest.spyOn(wrapper.vm, 'updateContent').mockImplementation(() => {});
+ wrapper.vm.onChangePage(4);
+ expect(wrapper.vm.updateContent).toHaveBeenCalledWith({
+ scope: wrapper.vm.scope,
+ page: '4',
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/environments/mock_data.js b/spec/frontend/environments/mock_data.js
index a2b581578d2..77c5dad0bbf 100644
--- a/spec/frontend/environments/mock_data.js
+++ b/spec/frontend/environments/mock_data.js
@@ -1,3 +1,31 @@
+const devProps = {
+ id: 7,
+ name: 'DEV',
+ state: 'available',
+ external_url: null,
+ environment_type: null,
+ last_deployment: null,
+ has_stop_action: false,
+ environment_path: '/root/review-app/environments/7',
+ stop_path: '/root/review-app/environments/7/stop',
+ created_at: '2017-01-31T10:53:46.894Z',
+ updated_at: '2017-01-31T10:53:46.894Z',
+};
+
+const buildProps = {
+ id: 12,
+ name: 'build/update-README',
+ state: 'available',
+ external_url: null,
+ environment_type: 'build',
+ last_deployment: null,
+ has_stop_action: false,
+ environment_path: '/root/review-app/environments/12',
+ stop_path: '/root/review-app/environments/12/stop',
+ created_at: '2017-02-01T19:42:18.400Z',
+ updated_at: '2017-02-01T19:42:18.400Z',
+};
+
const environment = {
name: 'production',
size: 1,
@@ -66,6 +94,18 @@ const environment = {
auto_stop_at: null,
};
+const environmentsList = [
+ {
+ size: 1,
+ ...devProps,
+ },
+ {
+ folderName: 'build',
+ size: 5,
+ ...buildProps,
+ },
+];
+
const folder = {
name: 'review',
folderName: 'review',
@@ -78,6 +118,23 @@ const folder = {
},
};
+const serverData = [
+ {
+ name: 'DEV',
+ size: 1,
+ latest: {
+ ...devProps,
+ },
+ },
+ {
+ name: 'build',
+ size: 5,
+ latest: {
+ ...buildProps,
+ },
+ },
+];
+
const tableData = {
name: {
title: 'Environment',
@@ -108,4 +165,4 @@ const tableData = {
},
};
-export { environment, folder, tableData };
+export { environment, environmentsList, folder, serverData, tableData };
diff --git a/spec/frontend/error_tracking/components/error_details_spec.js b/spec/frontend/error_tracking/components/error_details_spec.js
index 35014b00dd8..94bf0189c91 100644
--- a/spec/frontend/error_tracking/components/error_details_spec.js
+++ b/spec/frontend/error_tracking/components/error_details_spec.js
@@ -1,9 +1,15 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import Vuex from 'vuex';
-import { GlLoadingIcon, GlLink, GlBadge, GlFormInput } from '@gitlab/ui';
+import { __ } from '~/locale';
+import { GlLoadingIcon, GlLink, GlBadge, GlFormInput, GlAlert, GlSprintf } from '@gitlab/ui';
import LoadingButton from '~/vue_shared/components/loading_button.vue';
import Stacktrace from '~/error_tracking/components/stacktrace.vue';
import ErrorDetails from '~/error_tracking/components/error_details.vue';
+import {
+ severityLevel,
+ severityLevelVariant,
+ errorStatus,
+} from '~/error_tracking/components/constants';
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -22,7 +28,7 @@ describe('ErrorDetails', () => {
function mountComponent() {
wrapper = shallowMount(ErrorDetails, {
- stubs: { LoadingButton },
+ stubs: { LoadingButton, GlSprintf },
localVue,
store,
mocks,
@@ -31,14 +37,13 @@ describe('ErrorDetails', () => {
projectPath: '/root/gitlab-test',
listPath: '/error_tracking',
issueUpdatePath: '/123',
- issueDetailsPath: '/123/details',
issueStackTracePath: '/stacktrace',
projectIssuesPath: '/test-project/issues/',
csrfToken: 'fakeToken',
},
});
wrapper.setData({
- GQLerror: {
+ error: {
id: 'gid://gitlab/Gitlab::ErrorTracking::DetailedError/129381',
sentryId: 129381,
title: 'Issue title',
@@ -53,8 +58,9 @@ describe('ErrorDetails', () => {
beforeEach(() => {
actions = {
- startPollingDetails: () => {},
startPollingStacktrace: () => {},
+ updateIgnoreStatus: jest.fn(),
+ updateResolveStatus: jest.fn().mockResolvedValue({ closed_issue_iid: 1 }),
};
getters = {
@@ -63,8 +69,6 @@ describe('ErrorDetails', () => {
};
const state = {
- error: {},
- loading: true,
stacktraceData: {},
loadingStacktrace: true,
};
@@ -85,7 +89,7 @@ describe('ErrorDetails', () => {
$apollo: {
query,
queries: {
- GQLerror: {
+ error: {
loading: true,
stopPolling: jest.fn(),
},
@@ -114,9 +118,7 @@ describe('ErrorDetails', () => {
describe('Error details', () => {
beforeEach(() => {
- store.state.details.loading = false;
- store.state.details.error.id = 1;
- mocks.$apollo.queries.GQLerror.loading = false;
+ mocks.$apollo.queries.error.loading = false;
mountComponent();
});
@@ -130,26 +132,60 @@ describe('ErrorDetails', () => {
describe('Badges', () => {
it('should show language and error level badges', () => {
- store.state.details.error.tags = { level: 'error', logger: 'ruby' };
- mountComponent();
+ wrapper.setData({
+ error: {
+ tags: { level: 'error', logger: 'ruby' },
+ },
+ });
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.findAll(GlBadge).length).toBe(2);
});
});
it('should NOT show the badge if the tag is not present', () => {
- store.state.details.error.tags = { level: 'error' };
- mountComponent();
+ wrapper.setData({
+ error: {
+ tags: { level: 'error' },
+ },
+ });
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.findAll(GlBadge).length).toBe(1);
});
});
+
+ it.each(Object.keys(severityLevel))(
+ 'should set correct severity level variant for %s badge',
+ level => {
+ wrapper.setData({
+ error: {
+ tags: { level: severityLevel[level] },
+ },
+ });
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(GlBadge).attributes('variant')).toEqual(
+ severityLevelVariant[severityLevel[level]],
+ );
+ });
+ },
+ );
+
+ it('should fallback for ERROR severityLevelVariant when severityLevel is unknown', () => {
+ wrapper.setData({
+ error: {
+ tags: { level: 'someNewErrorLevel' },
+ },
+ });
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(GlBadge).attributes('variant')).toEqual(
+ severityLevelVariant[severityLevel.ERROR],
+ );
+ });
+ });
});
describe('Stacktrace', () => {
it('should show stacktrace', () => {
store.state.details.loadingStacktrace = false;
- mountComponent();
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
expect(wrapper.find(Stacktrace).exists()).toBe(true);
@@ -159,9 +195,10 @@ describe('ErrorDetails', () => {
it('should NOT show stacktrace if no entries', () => {
store.state.details.loadingStacktrace = false;
store.getters = { 'details/sentryUrl': () => 'sentry.io', 'details/stacktrace': () => [] };
- mountComponent();
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
- expect(wrapper.find(Stacktrace).exists()).toBe(false);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.find(Stacktrace).exists()).toBe(false);
+ });
});
});
@@ -195,20 +232,123 @@ describe('ErrorDetails', () => {
});
});
+ describe('Status update', () => {
+ const findUpdateIgnoreStatusButton = () =>
+ wrapper.find('[data-qa-selector="update_ignore_status_button"]');
+ const findUpdateResolveStatusButton = () =>
+ wrapper.find('[data-qa-selector="update_resolve_status_button"]');
+
+ afterEach(() => {
+ actions.updateIgnoreStatus.mockClear();
+ actions.updateResolveStatus.mockClear();
+ });
+
+ describe('when error is unresolved', () => {
+ beforeEach(() => {
+ store.state.details.errorStatus = errorStatus.UNRESOLVED;
+ mountComponent();
+ });
+
+ it('displays Ignore and Resolve buttons', () => {
+ expect(findUpdateIgnoreStatusButton().text()).toBe(__('Ignore'));
+ expect(findUpdateResolveStatusButton().text()).toBe(__('Resolve'));
+ });
+
+ it('marks error as ignored when ignore button is clicked', () => {
+ findUpdateIgnoreStatusButton().trigger('click');
+ expect(actions.updateIgnoreStatus.mock.calls[0][1]).toEqual(
+ expect.objectContaining({ status: errorStatus.IGNORED }),
+ );
+ });
+
+ it('marks error as resolved when resolve button is clicked', () => {
+ findUpdateResolveStatusButton().trigger('click');
+ expect(actions.updateResolveStatus.mock.calls[0][1]).toEqual(
+ expect.objectContaining({ status: errorStatus.RESOLVED }),
+ );
+ });
+ });
+
+ describe('when error is ignored', () => {
+ beforeEach(() => {
+ store.state.details.errorStatus = errorStatus.IGNORED;
+ mountComponent();
+ });
+
+ it('displays Undo Ignore and Resolve buttons', () => {
+ expect(findUpdateIgnoreStatusButton().text()).toBe(__('Undo ignore'));
+ expect(findUpdateResolveStatusButton().text()).toBe(__('Resolve'));
+ });
+
+ it('marks error as unresolved when ignore button is clicked', () => {
+ findUpdateIgnoreStatusButton().trigger('click');
+ expect(actions.updateIgnoreStatus.mock.calls[0][1]).toEqual(
+ expect.objectContaining({ status: errorStatus.UNRESOLVED }),
+ );
+ });
+
+ it('marks error as resolved when resolve button is clicked', () => {
+ findUpdateResolveStatusButton().trigger('click');
+ expect(actions.updateResolveStatus.mock.calls[0][1]).toEqual(
+ expect.objectContaining({ status: errorStatus.RESOLVED }),
+ );
+ });
+ });
+
+ describe('when error is resolved', () => {
+ beforeEach(() => {
+ store.state.details.errorStatus = errorStatus.RESOLVED;
+ mountComponent();
+ });
+
+ it('displays Ignore and Unresolve buttons', () => {
+ expect(findUpdateIgnoreStatusButton().text()).toBe(__('Ignore'));
+ expect(findUpdateResolveStatusButton().text()).toBe(__('Unresolve'));
+ });
+
+ it('marks error as ignored when ignore button is clicked', () => {
+ findUpdateIgnoreStatusButton().trigger('click');
+ expect(actions.updateIgnoreStatus.mock.calls[0][1]).toEqual(
+ expect.objectContaining({ status: errorStatus.IGNORED }),
+ );
+ });
+
+ it('marks error as unresolved when unresolve button is clicked', () => {
+ findUpdateResolveStatusButton().trigger('click');
+ expect(actions.updateResolveStatus.mock.calls[0][1]).toEqual(
+ expect.objectContaining({ status: errorStatus.UNRESOLVED }),
+ );
+ });
+
+ it('should show alert with closed issueId', () => {
+ const findAlert = () => wrapper.find(GlAlert);
+ const closedIssueId = 123;
+ wrapper.setData({
+ isAlertVisible: true,
+ closedIssueId,
+ });
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().text()).toContain(`#${closedIssueId}`);
+ });
+ });
+ });
+ });
+
describe('GitLab issue link', () => {
- const gitlabIssue = 'https://gitlab.example.com/issues/1';
- const findGitLabLink = () => wrapper.find(`[href="${gitlabIssue}"]`);
+ const gitlabIssuePath = 'https://gitlab.example.com/issues/1';
+ const findGitLabLink = () => wrapper.find(`[href="${gitlabIssuePath}"]`);
const findCreateIssueButton = () => wrapper.find('[data-qa-selector="create_issue_button"]');
const findViewIssueButton = () => wrapper.find('[data-qa-selector="view_issue_button"]');
describe('is present', () => {
beforeEach(() => {
- store.state.details.loading = false;
- store.state.details.error = {
- id: 1,
- gitlab_issue: gitlabIssue,
- };
- mountComponent();
+ wrapper.setData({
+ error: {
+ gitlabIssuePath,
+ },
+ });
});
it('should display the View issue button', () => {
@@ -226,12 +366,11 @@ describe('ErrorDetails', () => {
describe('is not present', () => {
beforeEach(() => {
- store.state.details.loading = false;
- store.state.details.error = {
- id: 1,
- gitlab_issue: null,
- };
- mountComponent();
+ wrapper.setData({
+ error: {
+ gitlabIssuePath: null,
+ },
+ });
});
it('should not display the View issue button', () => {
@@ -255,9 +394,9 @@ describe('ErrorDetails', () => {
const findGitLabCommitLink = () => wrapper.find(`[href$="${gitlabCommitPath}"]`);
it('should display a link', () => {
- mocks.$apollo.queries.GQLerror.loading = false;
+ mocks.$apollo.queries.error.loading = false;
wrapper.setData({
- GQLerror: {
+ error: {
gitlabCommit,
gitlabCommitPath,
},
@@ -268,9 +407,9 @@ describe('ErrorDetails', () => {
});
it('should not display a link', () => {
- mocks.$apollo.queries.GQLerror.loading = false;
+ mocks.$apollo.queries.error.loading = false;
wrapper.setData({
- GQLerror: {
+ error: {
gitlabCommit: null,
},
});
diff --git a/spec/frontend/error_tracking/components/error_tracking_list_spec.js b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
index 310cd676ca1..b632b461eb9 100644
--- a/spec/frontend/error_tracking/components/error_tracking_list_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
@@ -62,6 +62,7 @@ describe('ErrorTrackingList', () => {
sortByField: jest.fn(),
fetchPaginatedResults: jest.fn(),
updateStatus: jest.fn(),
+ removeIgnoredResolvedErrors: jest.fn(),
};
const state = {
@@ -221,6 +222,8 @@ describe('ErrorTrackingList', () => {
});
describe('When the ignore button on an error is clicked', () => {
+ const ignoreErrorButton = () => wrapper.find({ ref: 'ignoreError' });
+
beforeEach(() => {
store.state.list.loading = false;
store.state.list.errors = errorsList;
@@ -235,20 +238,30 @@ describe('ErrorTrackingList', () => {
});
it('sends the "ignored" status and error ID', () => {
- wrapper.find({ ref: 'ignoreError' }).trigger('click');
+ ignoreErrorButton().trigger('click');
expect(actions.updateStatus).toHaveBeenCalledWith(
expect.anything(),
{
- endpoint: '/project/test/-/error_tracking/3.json',
- redirectUrl: '/error_tracking',
+ endpoint: `/project/test/-/error_tracking/${errorsList[0].id}.json`,
status: 'ignored',
},
undefined,
);
});
+
+ it('calls an action to remove the item from the list', () => {
+ ignoreErrorButton().trigger('click');
+ expect(actions.removeIgnoredResolvedErrors).toHaveBeenCalledWith(
+ expect.anything(),
+ '1',
+ undefined,
+ );
+ });
});
describe('When the resolve button on an error is clicked', () => {
+ const resolveErrorButton = () => wrapper.find({ ref: 'resolveError' });
+
beforeEach(() => {
store.state.list.loading = false;
store.state.list.errors = errorsList;
@@ -263,17 +276,25 @@ describe('ErrorTrackingList', () => {
});
it('sends "resolved" status and error ID', () => {
- wrapper.find({ ref: 'resolveError' }).trigger('click');
+ resolveErrorButton().trigger('click');
expect(actions.updateStatus).toHaveBeenCalledWith(
expect.anything(),
{
- endpoint: '/project/test/-/error_tracking/3.json',
- redirectUrl: '/error_tracking',
+ endpoint: `/project/test/-/error_tracking/${errorsList[0].id}.json`,
status: 'resolved',
},
undefined,
);
});
+
+ it('calls an action to remove the item from the list', () => {
+ resolveErrorButton().trigger('click');
+ expect(actions.removeIgnoredResolvedErrors).toHaveBeenCalledWith(
+ expect.anything(),
+ '1',
+ undefined,
+ );
+ });
});
describe('When error tracking is disabled and user is not allowed to enable it', () => {
diff --git a/spec/frontend/error_tracking/store/actions_spec.js b/spec/frontend/error_tracking/store/actions_spec.js
index 8bc53d94345..e4a895902b3 100644
--- a/spec/frontend/error_tracking/store/actions_spec.js
+++ b/spec/frontend/error_tracking/store/actions_spec.js
@@ -10,6 +10,8 @@ jest.mock('~/flash.js');
jest.mock('~/lib/utils/url_utility');
let mock;
+const commit = jest.fn();
+const dispatch = jest.fn().mockResolvedValue();
describe('Sentry common store actions', () => {
beforeEach(() => {
@@ -20,26 +22,22 @@ describe('Sentry common store actions', () => {
mock.restore();
createFlash.mockClear();
});
+ const endpoint = '123/stacktrace';
+ const redirectUrl = '/list';
+ const status = 'resolved';
+ const params = { endpoint, redirectUrl, status };
describe('updateStatus', () => {
- const endpoint = '123/stacktrace';
- const redirectUrl = '/list';
- const status = 'resolved';
-
it('should handle successful status update', done => {
mock.onPut().reply(200, {});
testAction(
actions.updateStatus,
- { endpoint, redirectUrl, status },
+ params,
{},
[
{
- payload: true,
- type: types.SET_UPDATING_RESOLVE_STATUS,
- },
- {
- payload: false,
- type: 'SET_UPDATING_RESOLVE_STATUS',
+ payload: 'resolved',
+ type: types.SET_ERROR_STATUS,
},
],
[],
@@ -52,27 +50,29 @@ describe('Sentry common store actions', () => {
it('should handle unsuccessful status update', done => {
mock.onPut().reply(400, {});
- testAction(
- actions.updateStatus,
- { endpoint, redirectUrl, status },
- {},
- [
- {
- payload: true,
- type: types.SET_UPDATING_RESOLVE_STATUS,
- },
- {
- payload: false,
- type: types.SET_UPDATING_RESOLVE_STATUS,
- },
- ],
- [],
- () => {
- expect(visitUrl).not.toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledTimes(1);
- done();
- },
- );
+ testAction(actions.updateStatus, params, {}, [], [], () => {
+ expect(visitUrl).not.toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ done();
+ });
});
});
+
+ describe('updateResolveStatus', () => {
+ it('handles status update', () =>
+ actions.updateResolveStatus({ commit, dispatch }, params).then(() => {
+ expect(commit).toHaveBeenCalledWith(types.SET_UPDATING_RESOLVE_STATUS, true);
+ expect(commit).toHaveBeenCalledWith(types.SET_UPDATING_RESOLVE_STATUS, false);
+ expect(dispatch).toHaveBeenCalledWith('updateStatus', params);
+ }));
+ });
+
+ describe('updateIgnoreStatus', () => {
+ it('handles status update', () =>
+ actions.updateIgnoreStatus({ commit, dispatch }, params).then(() => {
+ expect(commit).toHaveBeenCalledWith(types.SET_UPDATING_IGNORE_STATUS, true);
+ expect(commit).toHaveBeenCalledWith(types.SET_UPDATING_IGNORE_STATUS, false);
+ expect(dispatch).toHaveBeenCalledWith('updateStatus', params);
+ }));
+ });
});
diff --git a/spec/frontend/error_tracking/store/details/actions_spec.js b/spec/frontend/error_tracking/store/details/actions_spec.js
index 129760bb705..6802300b0f5 100644
--- a/spec/frontend/error_tracking/store/details/actions_spec.js
+++ b/spec/frontend/error_tracking/store/details/actions_spec.js
@@ -4,64 +4,33 @@ import axios from '~/lib/utils/axios_utils';
import createFlash from '~/flash';
import * as actions from '~/error_tracking/store/details/actions';
import * as types from '~/error_tracking/store/details/mutation_types';
+import Poll from '~/lib/utils/poll';
+
+let mockedAdapter;
+let mockedRestart;
jest.mock('~/flash.js');
jest.mock('~/lib/utils/url_utility');
-let mock;
-
describe('Sentry error details store actions', () => {
beforeEach(() => {
- mock = new MockAdapter(axios);
+ mockedAdapter = new MockAdapter(axios);
});
afterEach(() => {
- mock.restore();
+ mockedAdapter.restore();
createFlash.mockClear();
- });
-
- describe('startPollingDetails', () => {
- const endpoint = '123/details';
- it('should commit SET_ERROR with received response', done => {
- const payload = { error: { id: 1 } };
- mock.onGet().reply(200, payload);
- testAction(
- actions.startPollingDetails,
- { endpoint },
- {},
- [
- { type: types.SET_ERROR, payload: payload.error },
- { type: types.SET_LOADING, payload: false },
- ],
- [],
- () => {
- done();
- },
- );
- });
-
- it('should show flash on API error', done => {
- mock.onGet().reply(400);
-
- testAction(
- actions.startPollingDetails,
- { endpoint },
- {},
- [{ type: types.SET_LOADING, payload: false }],
- [],
- () => {
- expect(createFlash).toHaveBeenCalledTimes(1);
- done();
- },
- );
- });
+ if (mockedRestart) {
+ mockedRestart.mockRestore();
+ mockedRestart = null;
+ }
});
describe('startPollingStacktrace', () => {
const endpoint = '123/stacktrace';
it('should commit SET_ERROR with received response', done => {
const payload = { error: [1, 2, 3] };
- mock.onGet().reply(200, payload);
+ mockedAdapter.onGet().reply(200, payload);
testAction(
actions.startPollingStacktrace,
{ endpoint },
@@ -78,7 +47,7 @@ describe('Sentry error details store actions', () => {
});
it('should show flash on API error', done => {
- mock.onGet().reply(400);
+ mockedAdapter.onGet().reply(400);
testAction(
actions.startPollingStacktrace,
@@ -92,5 +61,16 @@ describe('Sentry error details store actions', () => {
},
);
});
+
+ it('should not restart polling when receiving an empty 204 response', done => {
+ mockedRestart = jest.spyOn(Poll.prototype, 'restart');
+ mockedAdapter.onGet().reply(204);
+
+ testAction(actions.startPollingStacktrace, { endpoint }, {}, [], [], () => {
+ mockedRestart = jest.spyOn(Poll.prototype, 'restart');
+ expect(mockedRestart).toHaveBeenCalledTimes(0);
+ done();
+ });
+ });
});
});
diff --git a/spec/frontend/error_tracking/store/list/mutation_spec.js b/spec/frontend/error_tracking/store/list/mutation_spec.js
index 44a75b6aa1f..65f11aeeda1 100644
--- a/spec/frontend/error_tracking/store/list/mutation_spec.js
+++ b/spec/frontend/error_tracking/store/list/mutation_spec.js
@@ -5,6 +5,7 @@ import * as types from '~/error_tracking/store/list/mutation_types';
const ADD_RECENT_SEARCH = mutations[types.ADD_RECENT_SEARCH];
const CLEAR_RECENT_SEARCHES = mutations[types.CLEAR_RECENT_SEARCHES];
const LOAD_RECENT_SEARCHES = mutations[types.LOAD_RECENT_SEARCHES];
+const REMOVE_IGNORED_RESOLVED_ERRORS = mutations[types.REMOVE_IGNORED_RESOLVED_ERRORS];
describe('Error tracking mutations', () => {
describe('SET_ERRORS', () => {
@@ -114,5 +115,29 @@ describe('Error tracking mutations', () => {
expect(localStorage.getItem).toHaveBeenCalledWith('recent-searches/project/errors.json');
});
});
+
+ describe('REMOVE_IGNORED_RESOLVED_ERRORS', () => {
+ it('removes ignored or resolved errors from list', () => {
+ state.errors = [
+ {
+ id: 1,
+ status: 'unresolved',
+ },
+ {
+ id: 2,
+ status: 'ignored',
+ },
+ {
+ id: 3,
+ status: 'unresolved',
+ },
+ ];
+ const ignoredError = state.errors[2].id;
+
+ REMOVE_IGNORED_RESOLVED_ERRORS(state, ignoredError);
+
+ expect(state.errors).not.toContain(ignoredError);
+ });
+ });
});
});
diff --git a/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js b/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js
index 3ce105f27e4..d924f895da8 100644
--- a/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js
+++ b/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { pick, clone } from 'lodash';
import Vuex from 'vuex';
import { createLocalVue, shallowMount } from '@vue/test-utils';
import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
@@ -15,7 +15,7 @@ describe('error tracking settings project dropdown', () => {
wrapper = shallowMount(ProjectDropdown, {
localVue,
propsData: {
- ..._.pick(
+ ...pick(
defaultProps,
'dropdownLabel',
'invalidProjectLabel',
@@ -65,7 +65,7 @@ describe('error tracking settings project dropdown', () => {
describe('populated project list', () => {
beforeEach(() => {
- wrapper.setProps({ projects: _.clone(projectList), hasProjects: true });
+ wrapper.setProps({ projects: clone(projectList), hasProjects: true });
return wrapper.vm.$nextTick();
});
@@ -82,10 +82,10 @@ describe('error tracking settings project dropdown', () => {
});
describe('selected project', () => {
- const selectedProject = _.clone(projectList[0]);
+ const selectedProject = clone(projectList[0]);
beforeEach(() => {
- wrapper.setProps({ projects: _.clone(projectList), selectedProject, hasProjects: true });
+ wrapper.setProps({ projects: clone(projectList), selectedProject, hasProjects: true });
return wrapper.vm.$nextTick();
});
@@ -98,7 +98,7 @@ describe('error tracking settings project dropdown', () => {
describe('invalid project selected', () => {
beforeEach(() => {
wrapper.setProps({
- projects: _.clone(projectList),
+ projects: clone(projectList),
selectedProject: staleProject,
isProjectInvalid: true,
});
diff --git a/spec/frontend/error_tracking_settings/store/getters_spec.js b/spec/frontend/error_tracking_settings/store/getters_spec.js
index 2c5ff084b8a..b135fdee40b 100644
--- a/spec/frontend/error_tracking_settings/store/getters_spec.js
+++ b/spec/frontend/error_tracking_settings/store/getters_spec.js
@@ -47,7 +47,7 @@ describe('Error Tracking Settings - Getters', () => {
it('should display correctly when a project is selected', () => {
[state.selectedProject] = projectList;
- expect(getters.dropdownLabel(state, mockGetters)).toEqual('organizationName | name');
+ expect(getters.dropdownLabel(state, mockGetters)).toEqual('organizationName | slug');
});
it('should display correctly when no project is selected', () => {
diff --git a/spec/frontend/filtered_search/components/recent_searches_dropdown_content_spec.js b/spec/frontend/filtered_search/components/recent_searches_dropdown_content_spec.js
index 2543fb8768b..c0851096d8e 100644
--- a/spec/frontend/filtered_search/components/recent_searches_dropdown_content_spec.js
+++ b/spec/frontend/filtered_search/components/recent_searches_dropdown_content_spec.js
@@ -1,201 +1,125 @@
-import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
import eventHub from '~/filtered_search/event_hub';
import RecentSearchesDropdownContent from '~/filtered_search/components/recent_searches_dropdown_content.vue';
import IssuableFilteredSearchTokenKeys from '~/filtered_search/issuable_filtered_search_token_keys';
-const createComponent = propsData => {
- const Component = Vue.extend(RecentSearchesDropdownContent);
-
- return new Component({
- el: document.createElement('div'),
- propsData,
- });
-};
-
-// Remove all the newlines and whitespace from the formatted markup
-const trimMarkupWhitespace = text => text.replace(/(\n|\s)+/gm, ' ').trim();
-
-describe('RecentSearchesDropdownContent', () => {
- const propsDataWithoutItems = {
- items: [],
- allowedKeys: IssuableFilteredSearchTokenKeys.getKeys(),
- };
- const propsDataWithItems = {
- items: ['foo', 'author:@root label:~foo bar'],
- allowedKeys: IssuableFilteredSearchTokenKeys.getKeys(),
+describe('Recent Searches Dropdown Content', () => {
+ let wrapper;
+
+ const findLocalStorageNote = () => wrapper.find({ ref: 'localStorageNote' });
+ const findDropdownItems = () => wrapper.findAll({ ref: 'dropdownItem' });
+ const findDropdownNote = () => wrapper.find({ ref: 'dropdownNote' });
+
+ const createComponent = props => {
+ wrapper = shallowMount(RecentSearchesDropdownContent, {
+ propsData: {
+ allowedKeys: IssuableFilteredSearchTokenKeys.getKeys(),
+ items: [],
+ isLocalStorageAvailable: false,
+ ...props,
+ },
+ });
};
- let vm;
afterEach(() => {
- if (vm) {
- vm.$destroy();
- }
+ wrapper.destroy();
+ wrapper = null;
});
- describe('with no items', () => {
- let el;
-
+ describe('when local storage is not available', () => {
beforeEach(() => {
- vm = createComponent(propsDataWithoutItems);
- el = vm.$el;
+ createComponent();
});
- it('should render empty state', () => {
- expect(el.querySelector('.dropdown-info-note')).toBeDefined();
-
- const items = el.querySelectorAll('.filtered-search-history-dropdown-item');
-
- expect(items.length).toEqual(propsDataWithoutItems.items.length);
+ it('renders a note about enabling local storage', () => {
+ expect(findLocalStorageNote().exists()).toBe(true);
});
- });
-
- describe('with items', () => {
- let el;
- beforeEach(() => {
- vm = createComponent(propsDataWithItems);
- el = vm.$el;
+ it('does not render dropdown items', () => {
+ expect(findDropdownItems().exists()).toBe(false);
});
- it('should render clear recent searches button', () => {
- expect(el.querySelector('.filtered-search-history-clear-button')).toBeDefined();
+ it('does not render dropdownNote', () => {
+ expect(findDropdownNote().exists()).toBe(false);
});
+ });
- it('should render recent search items', () => {
- const items = el.querySelectorAll('.filtered-search-history-dropdown-item');
-
- expect(items.length).toEqual(propsDataWithItems.items.length);
+ describe('when localStorage is available and items array is not empty', () => {
+ let onRecentSearchesItemSelectedSpy;
+ let onRequestClearRecentSearchesSpy;
- expect(
- trimMarkupWhitespace(
- items[0].querySelector('.filtered-search-history-dropdown-search-token').textContent,
- ),
- ).toEqual('foo');
-
- const item1Tokens = items[1].querySelectorAll('.filtered-search-history-dropdown-token');
-
- expect(item1Tokens.length).toEqual(2);
- expect(item1Tokens[0].querySelector('.name').textContent).toEqual('author:');
- expect(item1Tokens[0].querySelector('.value').textContent).toEqual('@root');
- expect(item1Tokens[1].querySelector('.name').textContent).toEqual('label:');
- expect(item1Tokens[1].querySelector('.value').textContent).toEqual('~foo');
- expect(
- trimMarkupWhitespace(
- items[1].querySelector('.filtered-search-history-dropdown-search-token').textContent,
- ),
- ).toEqual('bar');
+ beforeAll(() => {
+ onRecentSearchesItemSelectedSpy = jest.fn();
+ onRequestClearRecentSearchesSpy = jest.fn();
+ eventHub.$on('recentSearchesItemSelected', onRecentSearchesItemSelectedSpy);
+ eventHub.$on('requestClearRecentSearches', onRequestClearRecentSearchesSpy);
});
- });
-
- describe('if isLocalStorageAvailable is `false`', () => {
- let el;
beforeEach(() => {
- const props = Object.assign({ isLocalStorageAvailable: false }, propsDataWithItems);
-
- vm = createComponent(props);
- el = vm.$el;
+ createComponent({
+ items: ['foo', 'author:@root label:~foo bar'],
+ isLocalStorageAvailable: true,
+ });
});
- it('should render an info note', () => {
- const note = el.querySelector('.dropdown-info-note');
- const items = el.querySelectorAll('.filtered-search-history-dropdown-item');
+ afterAll(() => {
+ eventHub.$off('recentSearchesItemSelected', onRecentSearchesItemSelectedSpy);
+ eventHub.$off('requestClearRecentSearchesSpy', onRequestClearRecentSearchesSpy);
+ });
- expect(note).toBeDefined();
- expect(note.innerText.trim()).toBe('This feature requires local storage to be enabled');
- expect(items.length).toEqual(propsDataWithoutItems.items.length);
+ it('does not render a note about enabling local storage', () => {
+ expect(findLocalStorageNote().exists()).toBe(false);
});
- });
- describe('computed', () => {
- describe('processedItems', () => {
- it('with items', () => {
- vm = createComponent(propsDataWithItems);
- const { processedItems } = vm;
-
- expect(processedItems.length).toEqual(2);
-
- expect(processedItems[0].text).toEqual(propsDataWithItems.items[0]);
- expect(processedItems[0].tokens).toEqual([]);
- expect(processedItems[0].searchToken).toEqual('foo');
-
- expect(processedItems[1].text).toEqual(propsDataWithItems.items[1]);
- expect(processedItems[1].tokens.length).toEqual(2);
- expect(processedItems[1].tokens[0].prefix).toEqual('author:');
- expect(processedItems[1].tokens[0].suffix).toEqual('@root');
- expect(processedItems[1].tokens[1].prefix).toEqual('label:');
- expect(processedItems[1].tokens[1].suffix).toEqual('~foo');
- expect(processedItems[1].searchToken).toEqual('bar');
- });
+ it('does not render dropdownNote', () => {
+ expect(findDropdownNote().exists()).toBe(false);
+ });
- it('with no items', () => {
- vm = createComponent(propsDataWithoutItems);
- const { processedItems } = vm;
+ it('renders a correct amount of dropdown items', () => {
+ expect(findDropdownItems()).toHaveLength(2);
+ });
- expect(processedItems.length).toEqual(0);
- });
+ it('expect second dropdown to have 2 tokens', () => {
+ expect(
+ findDropdownItems()
+ .at(1)
+ .findAll('.js-dropdown-token'),
+ ).toHaveLength(2);
});
- describe('hasItems', () => {
- it('with items', () => {
- vm = createComponent(propsDataWithItems);
- const { hasItems } = vm;
+ it('emits recentSearchesItemSelected on dropdown item click', () => {
+ findDropdownItems()
+ .at(0)
+ .find('.js-dropdown-button')
+ .trigger('click');
- expect(hasItems).toEqual(true);
- });
+ expect(onRecentSearchesItemSelectedSpy).toHaveBeenCalledWith('foo');
+ });
- it('with no items', () => {
- vm = createComponent(propsDataWithoutItems);
- const { hasItems } = vm;
+ it('emits requestClearRecentSearches on Clear resent searches button', () => {
+ wrapper.find({ ref: 'clearButton' }).trigger('click');
- expect(hasItems).toEqual(false);
- });
+ expect(onRequestClearRecentSearchesSpy).toHaveBeenCalled();
});
});
- describe('methods', () => {
- describe('onItemActivated', () => {
- let onRecentSearchesItemSelectedSpy;
-
- beforeEach(() => {
- onRecentSearchesItemSelectedSpy = jest.fn();
- eventHub.$on('recentSearchesItemSelected', onRecentSearchesItemSelectedSpy);
-
- vm = createComponent(propsDataWithItems);
- });
-
- afterEach(() => {
- eventHub.$off('recentSearchesItemSelected', onRecentSearchesItemSelectedSpy);
- });
-
- it('emits event', () => {
- expect(onRecentSearchesItemSelectedSpy).not.toHaveBeenCalled();
- vm.onItemActivated('something');
-
- expect(onRecentSearchesItemSelectedSpy).toHaveBeenCalledWith('something');
+ describe('when locale storage is available and items array is empty', () => {
+ beforeEach(() => {
+ createComponent({
+ isLocalStorageAvailable: true,
});
});
- describe('onRequestClearRecentSearches', () => {
- let onRequestClearRecentSearchesSpy;
-
- beforeEach(() => {
- onRequestClearRecentSearchesSpy = jest.fn();
- eventHub.$on('requestClearRecentSearches', onRequestClearRecentSearchesSpy);
-
- vm = createComponent(propsDataWithItems);
- });
-
- afterEach(() => {
- eventHub.$off('requestClearRecentSearches', onRequestClearRecentSearchesSpy);
- });
+ it('does not render a note about enabling local storage', () => {
+ expect(findLocalStorageNote().exists()).toBe(false);
+ });
- it('emits event', () => {
- expect(onRequestClearRecentSearchesSpy).not.toHaveBeenCalled();
- vm.onRequestClearRecentSearches({ stopPropagation: () => {} });
+ it('does not render dropdown items', () => {
+ expect(findDropdownItems().exists()).toBe(false);
+ });
- expect(onRequestClearRecentSearchesSpy).toHaveBeenCalled();
- });
+ it('renders dropdown note', () => {
+ expect(findDropdownNote().exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/fixtures/merge_requests.rb b/spec/frontend/fixtures/merge_requests.rb
index f20c0aa3540..be11423ec41 100644
--- a/spec/frontend/fixtures/merge_requests.rb
+++ b/spec/frontend/fixtures/merge_requests.rb
@@ -10,20 +10,24 @@ describe Projects::MergeRequestsController, '(JavaScript fixtures)', type: :cont
let(:project) { create(:project, :repository, namespace: namespace, path: 'merge-requests-project') }
# rubocop: disable Layout/TrailingWhitespace
+ let(:description) do
+ <<~MARKDOWN.strip_heredoc
+ - [ ] Task List Item
+ - [ ]
+ - [ ] Task List Item 2
+ MARKDOWN
+ end
+ # rubocop: enable Layout/TrailingWhitespace
+
let(:merge_request) do
create(
:merge_request,
:with_diffs,
source_project: project,
target_project: project,
- description: <<~MARKDOWN.strip_heredoc
- - [ ] Task List Item
- - [ ]
- - [ ] Task List Item 2
- MARKDOWN
+ description: description
)
end
- # rubocop: enable Layout/TrailingWhitespace
let(:merged_merge_request) { create(:merge_request, :merged, source_project: project, target_project: project) }
let(:pipeline) do
@@ -36,10 +40,8 @@ describe Projects::MergeRequestsController, '(JavaScript fixtures)', type: :cont
end
let(:path) { "files/ruby/popen.rb" }
let(:position) do
- Gitlab::Diff::Position.new(
- old_path: path,
- new_path: path,
- old_line: nil,
+ build(:text_diff_position, :added,
+ file: path,
new_line: 14,
diff_refs: merge_request.diff_refs
)
@@ -112,14 +114,8 @@ describe Projects::MergeRequestsController, '(JavaScript fixtures)', type: :cont
let(:merge_request2) { create(:merge_request_with_diffs, :with_image_diffs, source_project: project, title: "Added images") }
let(:image_path) { "files/images/ee_repo_logo.png" }
let(:image_position) do
- Gitlab::Diff::Position.new(
- old_path: image_path,
- new_path: image_path,
- width: 100,
- height: 100,
- x: 1,
- y: 1,
- position_type: "image",
+ build(:image_diff_position,
+ file: image_path,
diff_refs: merge_request2.diff_refs
)
end
@@ -130,6 +126,15 @@ describe Projects::MergeRequestsController, '(JavaScript fixtures)', type: :cont
end
end
+ context 'with mentions' do
+ let(:group) { create(:group) }
+ let(:description) { "@#{group.full_path} @all @#{admin.username}" }
+
+ it 'merge_requests/merge_request_with_mentions.html' do
+ render_merge_request(merge_request)
+ end
+ end
+
private
def render_discussions_json(merge_request)
diff --git a/spec/frontend/fixtures/merge_requests_diffs.rb b/spec/frontend/fixtures/merge_requests_diffs.rb
index 9493cba03bb..7997ee79a01 100644
--- a/spec/frontend/fixtures/merge_requests_diffs.rb
+++ b/spec/frontend/fixtures/merge_requests_diffs.rb
@@ -12,10 +12,8 @@ describe Projects::MergeRequests::DiffsController, '(JavaScript fixtures)', type
let(:path) { "files/ruby/popen.rb" }
let(:selected_commit) { merge_request.all_commits[0] }
let(:position) do
- Gitlab::Diff::Position.new(
- old_path: path,
- new_path: path,
- old_line: nil,
+ build(:text_diff_position, :added,
+ file: path,
new_line: 14,
diff_refs: merge_request.diff_refs
)
diff --git a/spec/frontend/gfm_auto_complete_spec.js b/spec/frontend/gfm_auto_complete_spec.js
index 99869c46f3f..f58615000ee 100644
--- a/spec/frontend/gfm_auto_complete_spec.js
+++ b/spec/frontend/gfm_auto_complete_spec.js
@@ -4,7 +4,7 @@ import $ from 'jquery';
import GfmAutoComplete, { membersBeforeSave } from 'ee_else_ce/gfm_auto_complete';
import 'jquery.caret';
-import 'at.js';
+import '@gitlab/at.js';
import { TEST_HOST } from 'helpers/test_constants';
import { getJSONFixture } from 'helpers/fixtures';
diff --git a/spec/frontend/gl_field_errors_spec.js b/spec/frontend/gl_field_errors_spec.js
index 4653f519f65..078659bf429 100644
--- a/spec/frontend/gl_field_errors_spec.js
+++ b/spec/frontend/gl_field_errors_spec.js
@@ -1,5 +1,3 @@
-/* eslint-disable arrow-body-style */
-
import $ from 'jquery';
import GlFieldErrors from '~/gl_field_errors';
diff --git a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
index 5c784c8000f..3d56bef4b33 100644
--- a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
+++ b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
@@ -18,6 +18,8 @@ exports[`grafana integration component default state to match the default snapsh
<gl-button-stub
class="js-settings-toggle"
+ size="md"
+ variant="secondary"
>
Expand
</gl-button-stub>
@@ -89,6 +91,7 @@ exports[`grafana integration component default state to match the default snapsh
</gl-form-group-stub>
<gl-button-stub
+ size="md"
variant="success"
>
diff --git a/spec/frontend/graphql_shared/utils_spec.js b/spec/frontend/graphql_shared/utils_spec.js
new file mode 100644
index 00000000000..52386bf6ede
--- /dev/null
+++ b/spec/frontend/graphql_shared/utils_spec.js
@@ -0,0 +1,42 @@
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+
+describe('getIdFromGraphQLId', () => {
+ [
+ {
+ input: '',
+ output: null,
+ },
+ {
+ input: null,
+ output: null,
+ },
+ {
+ input: 'gid://',
+ output: null,
+ },
+ {
+ input: 'gid://gitlab/',
+ output: null,
+ },
+ {
+ input: 'gid://gitlab/Environments',
+ output: null,
+ },
+ {
+ input: 'gid://gitlab/Environments/',
+ output: null,
+ },
+ {
+ input: 'gid://gitlab/Environments/123',
+ output: 123,
+ },
+ {
+ input: 'gid://gitlab/DesignManagement::Version/2',
+ output: 2,
+ },
+ ].forEach(({ input, output }) => {
+ it(`getIdFromGraphQLId returns ${output} when passed ${input}`, () => {
+ expect(getIdFromGraphQLId(input)).toBe(output);
+ });
+ });
+});
diff --git a/spec/frontend/helpers/dom_shims/element_scroll_into_view.js b/spec/frontend/helpers/dom_shims/element_scroll_into_view.js
new file mode 100644
index 00000000000..a7262d04db0
--- /dev/null
+++ b/spec/frontend/helpers/dom_shims/element_scroll_into_view.js
@@ -0,0 +1 @@
+Element.prototype.scrollIntoView = jest.fn();
diff --git a/spec/frontend/helpers/dom_shims/index.js b/spec/frontend/helpers/dom_shims/index.js
index 1fc5130cefc..63850b62ff7 100644
--- a/spec/frontend/helpers/dom_shims/index.js
+++ b/spec/frontend/helpers/dom_shims/index.js
@@ -1,2 +1,4 @@
+import './element_scroll_into_view';
import './get_client_rects';
import './inner_text';
+import './window_scroll_to';
diff --git a/spec/frontend/helpers/dom_shims/window_scroll_to.js b/spec/frontend/helpers/dom_shims/window_scroll_to.js
new file mode 100644
index 00000000000..20ae1910bf3
--- /dev/null
+++ b/spec/frontend/helpers/dom_shims/window_scroll_to.js
@@ -0,0 +1 @@
+window.scrollTo = jest.fn();
diff --git a/spec/frontend/ide/components/branches/item_spec.js b/spec/frontend/ide/components/branches/item_spec.js
index 1db3750609b..138443b715e 100644
--- a/spec/frontend/ide/components/branches/item_spec.js
+++ b/spec/frontend/ide/components/branches/item_spec.js
@@ -1,8 +1,8 @@
-import Vue from 'vue';
-import mountCompontent from 'helpers/vue_mount_component_helper';
+import { shallowMount } from '@vue/test-utils';
import router from '~/ide/ide_router';
import Item from '~/ide/components/branches/item.vue';
-import { getTimeago } from '~/lib/utils/datetime_utility';
+import Icon from '~/vue_shared/components/icon.vue';
+import Timeago from '~/vue_shared/components/time_ago_tooltip.vue';
import { projectData } from '../../mock_data';
const TEST_BRANCH = {
@@ -12,45 +12,45 @@ const TEST_BRANCH = {
const TEST_PROJECT_ID = projectData.name_with_namespace;
describe('IDE branch item', () => {
- const Component = Vue.extend(Item);
- let vm;
-
- beforeEach(() => {
- vm = mountCompontent(Component, {
- item: { ...TEST_BRANCH },
- projectId: TEST_PROJECT_ID,
- isActive: false,
+ let wrapper;
+
+ function createComponent(props = {}) {
+ wrapper = shallowMount(Item, {
+ propsData: {
+ item: { ...TEST_BRANCH },
+ projectId: TEST_PROJECT_ID,
+ isActive: false,
+ ...props,
+ },
});
- });
+ }
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
- it('renders branch name and timeago', () => {
- const timeText = getTimeago().format(TEST_BRANCH.committedDate);
-
- expect(vm.$el.textContent).toContain(TEST_BRANCH.name);
- expect(vm.$el.querySelector('time')).toHaveText(timeText);
- expect(vm.$el.querySelector('.ic-mobile-issue-close')).toBe(null);
- });
+ describe('if not active', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+ it('renders branch name and timeago', () => {
+ expect(wrapper.text()).toContain(TEST_BRANCH.name);
+ expect(wrapper.find(Timeago).props('time')).toBe(TEST_BRANCH.committedDate);
+ expect(wrapper.find(Icon).exists()).toBe(false);
+ });
- it('renders link to branch', () => {
- const expectedHref = router.resolve(`/project/${TEST_PROJECT_ID}/edit/${TEST_BRANCH.name}`)
- .href;
+ it('renders link to branch', () => {
+ const expectedHref = router.resolve(`/project/${TEST_PROJECT_ID}/edit/${TEST_BRANCH.name}`)
+ .href;
- expect(vm.$el.textContent).toMatch('a');
- expect(vm.$el).toHaveAttr('href', expectedHref);
+ expect(wrapper.text()).toMatch('a');
+ expect(wrapper.attributes('href')).toBe(expectedHref);
+ });
});
- it('renders icon if isActive', done => {
- vm.isActive = true;
+ it('renders icon if is not active', () => {
+ createComponent({ isActive: true });
- vm.$nextTick()
- .then(() => {
- expect(vm.$el.querySelector('.ic-mobile-issue-close')).not.toBe(null);
- })
- .then(done)
- .catch(done.fail);
+ expect(wrapper.find(Icon).exists()).toBe(true);
});
});
diff --git a/spec/frontend/ide/components/error_message_spec.js b/spec/frontend/ide/components/error_message_spec.js
index 1de496ba3f8..3a4dcc5873d 100644
--- a/spec/frontend/ide/components/error_message_spec.js
+++ b/spec/frontend/ide/components/error_message_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { mount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { GlLoadingIcon } from '@gitlab/ui';
import ErrorMessage from '~/ide/components/error_message.vue';
@@ -15,7 +15,7 @@ describe('IDE error message component', () => {
actions: { setErrorMessage: setErrorMessageMock },
});
- wrapper = shallowMount(ErrorMessage, {
+ wrapper = mount(ErrorMessage, {
propsData: {
message: {
text: 'some text',
@@ -38,15 +38,18 @@ describe('IDE error message component', () => {
wrapper = null;
});
+ const findDismissButton = () => wrapper.find('button[aria-label=Dismiss]');
+ const findActionButton = () => wrapper.find('button.gl-alert-action');
+
it('renders error message', () => {
const text = 'error message';
createComponent({ text });
expect(wrapper.text()).toContain(text);
});
- it('clears error message on click', () => {
+ it('clears error message on dismiss click', () => {
createComponent();
- wrapper.trigger('click');
+ findDismissButton().trigger('click');
expect(setErrorMessageMock).toHaveBeenCalledWith(expect.any(Object), null, undefined);
});
@@ -68,29 +71,27 @@ describe('IDE error message component', () => {
});
it('renders action button', () => {
- const button = wrapper.find('button');
+ const button = findActionButton();
expect(button.exists()).toBe(true);
expect(button.text()).toContain(message.actionText);
});
- it('does not clear error message on click', () => {
- wrapper.trigger('click');
-
- expect(setErrorMessageMock).not.toHaveBeenCalled();
+ it('does not show dismiss button', () => {
+ expect(findDismissButton().exists()).toBe(false);
});
it('dispatches action', () => {
- wrapper.find('button').trigger('click');
+ findActionButton().trigger('click');
expect(actionMock).toHaveBeenCalledWith(message.actionPayload);
});
it('does not dispatch action when already loading', () => {
- wrapper.find('button').trigger('click');
+ findActionButton().trigger('click');
actionMock.mockReset();
return wrapper.vm.$nextTick(() => {
- wrapper.find('button').trigger('click');
+ findActionButton().trigger('click');
return wrapper.vm.$nextTick().then(() => {
expect(actionMock).not.toHaveBeenCalled();
@@ -106,7 +107,7 @@ describe('IDE error message component', () => {
resolveAction = resolve;
}),
);
- wrapper.find('button').trigger('click');
+ findActionButton().trigger('click');
return wrapper.vm.$nextTick(() => {
expect(wrapper.find(GlLoadingIcon).isVisible()).toBe(true);
@@ -115,7 +116,7 @@ describe('IDE error message component', () => {
});
it('hides loading icon when operation finishes', () => {
- wrapper.find('button').trigger('click');
+ findActionButton().trigger('click');
return actionMock()
.then(() => wrapper.vm.$nextTick())
.then(() => {
diff --git a/spec/frontend/ide/components/ide_file_row_spec.js b/spec/frontend/ide/components/ide_file_row_spec.js
new file mode 100644
index 00000000000..c00ad7c8365
--- /dev/null
+++ b/spec/frontend/ide/components/ide_file_row_spec.js
@@ -0,0 +1,117 @@
+import { createLocalVue, mount } from '@vue/test-utils';
+import Vuex from 'vuex';
+import IdeFileRow from '~/ide/components/ide_file_row.vue';
+import FileRow from '~/vue_shared/components/file_row.vue';
+import FileRowExtra from '~/ide/components/file_row_extra.vue';
+import { createStore } from '~/ide/stores';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+const TEST_EXTRA_PROPS = {
+ testattribute: 'abc',
+};
+
+const defaultComponentProps = (type = 'tree') => ({
+ level: 4,
+ file: {
+ type,
+ name: 'js',
+ },
+});
+
+describe('Ide File Row component', () => {
+ let wrapper;
+
+ const createComponent = (props = {}, options = {}) => {
+ wrapper = mount(IdeFileRow, {
+ propsData: {
+ ...defaultComponentProps(),
+ ...props,
+ },
+ store: createStore(),
+ localVue,
+ ...options,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findFileRowExtra = () => wrapper.find(FileRowExtra);
+ const findFileRow = () => wrapper.find(FileRow);
+ const hasDropdownOpen = () => findFileRowExtra().props('dropdownOpen');
+
+ it('fileRow component has listeners', () => {
+ const toggleTreeOpen = jest.fn();
+ createComponent(
+ {},
+ {
+ listeners: {
+ toggleTreeOpen,
+ },
+ },
+ );
+
+ findFileRow().vm.$emit('toggleTreeOpen');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(toggleTreeOpen).toHaveBeenCalled();
+ });
+ });
+
+ describe('default', () => {
+ beforeEach(() => {
+ createComponent(TEST_EXTRA_PROPS);
+ });
+
+ it('renders file row component', () => {
+ const fileRow = findFileRow();
+
+ expect(fileRow.props()).toEqual(expect.objectContaining(defaultComponentProps()));
+ expect(fileRow.attributes()).toEqual(expect.objectContaining(TEST_EXTRA_PROPS));
+ });
+
+ it('renders file row extra', () => {
+ const extra = findFileRowExtra();
+
+ expect(extra.exists()).toBe(true);
+ expect(extra.props()).toEqual({
+ file: defaultComponentProps().file,
+ dropdownOpen: false,
+ });
+ });
+ });
+
+ describe('with open dropdown', () => {
+ beforeEach(() => {
+ createComponent();
+
+ findFileRowExtra().vm.$emit('toggle', true);
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('shows open dropdown', () => {
+ expect(hasDropdownOpen()).toBe(true);
+ });
+
+ it('hides dropdown when mouseleave', () => {
+ findFileRow().vm.$emit('mouseleave');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(hasDropdownOpen()).toEqual(false);
+ });
+ });
+
+ it('hides dropdown on toggle', () => {
+ findFileRowExtra().vm.$emit('toggle', false);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(hasDropdownOpen()).toEqual(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ide/components/ide_status_list_spec.js b/spec/frontend/ide/components/ide_status_list_spec.js
index 2762adfb57d..99c27ca30fb 100644
--- a/spec/frontend/ide/components/ide_status_list_spec.js
+++ b/spec/frontend/ide/components/ide_status_list_spec.js
@@ -1,6 +1,6 @@
import Vuex from 'vuex';
import { createLocalVue, shallowMount } from '@vue/test-utils';
-import IdeStatusList from '~/ide/components/ide_status_list';
+import IdeStatusList from '~/ide/components/ide_status_list.vue';
const TEST_FILE = {
name: 'lorem.md',
diff --git a/spec/frontend/ide/components/ide_status_mr_spec.js b/spec/frontend/ide/components/ide_status_mr_spec.js
new file mode 100644
index 00000000000..4d9ad61201d
--- /dev/null
+++ b/spec/frontend/ide/components/ide_status_mr_spec.js
@@ -0,0 +1,59 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlIcon, GlLink } from '@gitlab/ui';
+import { TEST_HOST } from 'helpers/test_constants';
+import IdeStatusMr from '~/ide/components/ide_status_mr.vue';
+
+const TEST_TEXT = '!9001';
+const TEST_URL = `${TEST_HOST}merge-requests/9001`;
+
+describe('ide/components/ide_status_mr', () => {
+ let wrapper;
+
+ const createComponent = props => {
+ wrapper = shallowMount(IdeStatusMr, {
+ propsData: props,
+ });
+ };
+ const findIcon = () => wrapper.find(GlIcon);
+ const findLink = () => wrapper.find(GlLink);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when mounted', () => {
+ beforeEach(() => {
+ createComponent({
+ text: TEST_TEXT,
+ url: TEST_URL,
+ });
+ });
+
+ it('renders icon', () => {
+ const icon = findIcon();
+
+ expect(icon.exists()).toBe(true);
+ expect(icon.props()).toEqual(
+ expect.objectContaining({
+ name: 'merge-request',
+ }),
+ );
+ });
+
+ it('renders link', () => {
+ const link = findLink();
+
+ expect(link.exists()).toBe(true);
+ expect(link.attributes()).toEqual(
+ expect.objectContaining({
+ href: TEST_URL,
+ }),
+ );
+ expect(link.text()).toEqual(TEST_TEXT);
+ });
+
+ it('renders text', () => {
+ expect(wrapper.text()).toBe(`Merge request ${TEST_TEXT}`);
+ });
+ });
+});
diff --git a/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap b/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap
index 43e606eac6e..db5175c3f7b 100644
--- a/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap
+++ b/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap
@@ -14,7 +14,7 @@ exports[`IDE pipeline stage renders stage details & icon 1`] = `
/>
<strong
- class="prepend-left-8 ide-stage-title"
+ class="prepend-left-8 text-truncate"
data-container="body"
data-original-title=""
title=""
@@ -42,7 +42,7 @@ exports[`IDE pipeline stage renders stage details & icon 1`] = `
</div>
<div
- class="card-body"
+ class="card-body p-0"
>
<item-stub
job="[object Object]"
diff --git a/spec/frontend/ide/components/jobs/detail/scroll_button_spec.js b/spec/frontend/ide/components/jobs/detail/scroll_button_spec.js
index 096851a5401..b8dbca97ade 100644
--- a/spec/frontend/ide/components/jobs/detail/scroll_button_spec.js
+++ b/spec/frontend/ide/components/jobs/detail/scroll_button_spec.js
@@ -1,59 +1,50 @@
-import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import Icon from '~/vue_shared/components/icon.vue';
import ScrollButton from '~/ide/components/jobs/detail/scroll_button.vue';
-import mountComponent from '../../../../helpers/vue_mount_component_helper';
describe('IDE job log scroll button', () => {
- const Component = Vue.extend(ScrollButton);
- let vm;
-
- beforeEach(() => {
- vm = mountComponent(Component, {
- direction: 'up',
- disabled: false,
+ let wrapper;
+
+ const createComponent = props => {
+ wrapper = shallowMount(ScrollButton, {
+ propsData: {
+ direction: 'up',
+ disabled: false,
+ ...props,
+ },
});
- });
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
- describe('iconName', () => {
- ['up', 'down'].forEach(direction => {
- it(`returns icon name for ${direction}`, () => {
- vm.direction = direction;
+ describe.each`
+ direction | icon | title
+ ${'up'} | ${'scroll_up'} | ${'Scroll to top'}
+ ${'down'} | ${'scroll_down'} | ${'Scroll to bottom'}
+ `('for $direction direction', ({ direction, icon, title }) => {
+ beforeEach(() => createComponent({ direction }));
- expect(vm.iconName).toBe(`scroll_${direction}`);
- });
+ it('returns proper icon name', () => {
+ expect(wrapper.find(Icon).props('name')).toBe(icon);
});
- });
- describe('tooltipTitle', () => {
- it('returns title for up', () => {
- expect(vm.tooltipTitle).toBe('Scroll to top');
- });
-
- it('returns title for down', () => {
- vm.direction = 'down';
-
- expect(vm.tooltipTitle).toBe('Scroll to bottom');
+ it('returns proper title', () => {
+ expect(wrapper.attributes('data-original-title')).toBe(title);
});
});
it('emits click event on click', () => {
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
-
- vm.$el.querySelector('.btn-scroll').click();
+ createComponent();
- expect(vm.$emit).toHaveBeenCalledWith('click');
+ wrapper.find('button').trigger('click');
+ expect(wrapper.emitted().click).toBeDefined();
});
- it('disables button when disabled is true', done => {
- vm.disabled = true;
+ it('disables button when disabled is true', () => {
+ createComponent({ disabled: true });
- vm.$nextTick(() => {
- expect(vm.$el.querySelector('.btn-scroll').hasAttribute('disabled')).toBe(true);
-
- done();
- });
+ expect(wrapper.find('button').attributes('disabled')).toBe('disabled');
});
});
diff --git a/spec/frontend/ide/components/panes/right_spec.js b/spec/frontend/ide/components/panes/right_spec.js
index 7e408be96fc..84b2d440b60 100644
--- a/spec/frontend/ide/components/panes/right_spec.js
+++ b/spec/frontend/ide/components/panes/right_spec.js
@@ -75,28 +75,6 @@ describe('ide/components/panes/right.vue', () => {
});
});
- describe('merge request tab', () => {
- it('is shown if there is a currentMergeRequestId', () => {
- store.state.currentMergeRequestId = 1;
-
- createComponent();
-
- expect(wrapper.find(CollapsibleSidebar).props('extensionTabs')).toEqual(
- expect.arrayContaining([
- expect.objectContaining({
- show: true,
- title: 'Merge Request',
- views: expect.arrayContaining([
- expect.objectContaining({
- name: rightSidebarViews.mergeRequestInfo.name,
- }),
- ]),
- }),
- ]),
- );
- });
- });
-
describe('clientside live preview tab', () => {
it('is shown if there is a packageJson and clientsidePreviewEnabled', () => {
Vue.set(store.state.entries, 'package.json', {
diff --git a/spec/frontend/ide/components/preview/navigator_spec.js b/spec/frontend/ide/components/preview/navigator_spec.js
index 650bb7660a4..aa15f391e77 100644
--- a/spec/frontend/ide/components/preview/navigator_spec.js
+++ b/spec/frontend/ide/components/preview/navigator_spec.js
@@ -1,167 +1,177 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { shallowMount } from '@vue/test-utils';
import { TEST_HOST } from 'helpers/test_constants';
+import { GlLoadingIcon } from '@gitlab/ui';
import ClientsideNavigator from '~/ide/components/preview/navigator.vue';
+import { listen } from 'codesandbox-api';
+
+jest.mock('codesandbox-api', () => ({
+ listen: jest.fn().mockReturnValue(jest.fn()),
+}));
describe('IDE clientside preview navigator', () => {
- let vm;
- let Component;
+ let wrapper;
let manager;
+ let listenHandler;
- beforeAll(() => {
- Component = Vue.extend(ClientsideNavigator);
- });
+ const findBackButton = () => wrapper.findAll('button').at(0);
+ const findForwardButton = () => wrapper.findAll('button').at(1);
+ const findRefreshButton = () => wrapper.findAll('button').at(2);
beforeEach(() => {
+ listen.mockClear();
manager = { bundlerURL: TEST_HOST, iframe: { src: '' } };
- vm = mountComponent(Component, { manager });
+ wrapper = shallowMount(ClientsideNavigator, { propsData: { manager } });
+ [[listenHandler]] = listen.mock.calls;
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders readonly URL bar', () => {
- expect(vm.$el.querySelector('input[readonly]').value).toBe('/');
+ listenHandler({ type: 'urlchange', url: manager.bundlerURL });
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find('input[readonly]').element.value).toBe('/');
+ });
});
- it('disables back button when navigationStack is empty', () => {
- expect(vm.$el.querySelector('.ide-navigator-btn')).toHaveAttr('disabled');
- expect(vm.$el.querySelector('.ide-navigator-btn').classList).toContain('disabled-content');
+ it('renders loading icon by default', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
});
- it('disables forward button when forwardNavigationStack is empty', () => {
- vm.forwardNavigationStack = [];
-
- expect(vm.$el.querySelectorAll('.ide-navigator-btn')[1]).toHaveAttr('disabled');
- expect(vm.$el.querySelectorAll('.ide-navigator-btn')[1].classList).toContain(
- 'disabled-content',
- );
+ it('removes loading icon when done event is fired', () => {
+ listenHandler({ type: 'done' });
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ });
});
- it('calls back method when clicking back button', done => {
- vm.navigationStack.push('/test');
- vm.navigationStack.push('/test2');
- jest.spyOn(vm, 'back').mockReturnValue();
-
- vm.$nextTick(() => {
- vm.$el.querySelector('.ide-navigator-btn').click();
-
- expect(vm.back).toHaveBeenCalled();
-
- done();
+ it('does not count visiting same url multiple times', () => {
+ listenHandler({ type: 'done' });
+ listenHandler({ type: 'done', url: `${TEST_HOST}/url1` });
+ listenHandler({ type: 'done', url: `${TEST_HOST}/url1` });
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findBackButton().attributes('disabled')).toBe('disabled');
});
});
- it('calls forward method when clicking forward button', done => {
- vm.forwardNavigationStack.push('/test');
- jest.spyOn(vm, 'forward').mockReturnValue();
+ it('unsubscribes from listen on destroy', () => {
+ const unsubscribeFn = listen();
- vm.$nextTick(() => {
- vm.$el.querySelectorAll('.ide-navigator-btn')[1].click();
-
- expect(vm.forward).toHaveBeenCalled();
-
- done();
- });
+ wrapper.destroy();
+ expect(unsubscribeFn).toHaveBeenCalled();
});
- describe('onUrlChange', () => {
- it('updates the path', () => {
- vm.onUrlChange({ url: `${TEST_HOST}/url` });
-
- expect(vm.path).toBe('/url');
+ describe('back button', () => {
+ beforeEach(() => {
+ listenHandler({ type: 'done' });
+ listenHandler({ type: 'urlchange', url: TEST_HOST });
+ return wrapper.vm.$nextTick();
});
- it('sets currentBrowsingIndex 0 if not already set', () => {
- vm.onUrlChange({ url: `${TEST_HOST}/url` });
-
- expect(vm.currentBrowsingIndex).toBe(0);
+ it('is disabled by default', () => {
+ expect(findBackButton().attributes('disabled')).toBe('disabled');
});
- it('increases currentBrowsingIndex if path doesnt match', () => {
- vm.onUrlChange({ url: `${TEST_HOST}/url` });
-
- vm.onUrlChange({ url: `${TEST_HOST}/url2` });
-
- expect(vm.currentBrowsingIndex).toBe(1);
+ it('is enabled when there is previous entry', () => {
+ listenHandler({ type: 'urlchange', url: `${TEST_HOST}/url1` });
+ return wrapper.vm.$nextTick().then(() => {
+ findBackButton().trigger('click');
+ expect(findBackButton().attributes('disabled')).toBeFalsy();
+ });
});
- it('does not increase currentBrowsingIndex if path matches', () => {
- vm.onUrlChange({ url: `${TEST_HOST}/url` });
-
- vm.onUrlChange({ url: `${TEST_HOST}/url` });
-
- expect(vm.currentBrowsingIndex).toBe(0);
+ it('is disabled when there is no previous entry', () => {
+ listenHandler({ type: 'urlchange', url: `${TEST_HOST}/url1` });
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ findBackButton().trigger('click');
+
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findBackButton().attributes('disabled')).toBe('disabled');
+ });
});
- it('pushes path into navigation stack', () => {
- vm.onUrlChange({ url: `${TEST_HOST}/url` });
+ it('updates manager iframe src', () => {
+ listenHandler({ type: 'urlchange', url: `${TEST_HOST}/url1` });
+ listenHandler({ type: 'urlchange', url: `${TEST_HOST}/url2` });
+ return wrapper.vm.$nextTick().then(() => {
+ findBackButton().trigger('click');
- expect(vm.navigationStack).toEqual(['/url']);
+ expect(manager.iframe.src).toBe(`${TEST_HOST}/url1`);
+ });
});
});
- describe('back', () => {
+ describe('forward button', () => {
beforeEach(() => {
- vm.path = '/test2';
- vm.currentBrowsingIndex = 1;
- vm.navigationStack.push('/test');
- vm.navigationStack.push('/test2');
-
- jest.spyOn(vm, 'visitPath').mockReturnValue();
-
- vm.back();
+ listenHandler({ type: 'done' });
+ listenHandler({ type: 'urlchange', url: TEST_HOST });
+ return wrapper.vm.$nextTick();
});
- it('visits the last entry in navigationStack', () => {
- expect(vm.visitPath).toHaveBeenCalledWith('/test');
+ it('is disabled by default', () => {
+ expect(findForwardButton().attributes('disabled')).toBe('disabled');
});
- it('adds last entry to forwardNavigationStack', () => {
- expect(vm.forwardNavigationStack).toEqual(['/test2']);
+ it('is enabled when there is next entry', () => {
+ listenHandler({ type: 'urlchange', url: `${TEST_HOST}/url1` });
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ findBackButton().trigger('click');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findForwardButton().attributes('disabled')).toBeFalsy();
+ });
});
- it('clears navigation stack if currentBrowsingIndex is 1', () => {
- expect(vm.navigationStack).toEqual([]);
+ it('is disabled when there is no next entry', () => {
+ listenHandler({ type: 'urlchange', url: `${TEST_HOST}/url1` });
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ findBackButton().trigger('click');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ findForwardButton().trigger('click');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findForwardButton().attributes('disabled')).toBe('disabled');
+ });
});
- it('sets currentBrowsingIndex to null is currentBrowsingIndex is 1', () => {
- expect(vm.currentBrowsingIndex).toBe(null);
- });
- });
-
- describe('forward', () => {
- it('calls visitPath with first entry in forwardNavigationStack', () => {
- jest.spyOn(vm, 'visitPath').mockReturnValue();
-
- vm.forwardNavigationStack.push('/test');
- vm.forwardNavigationStack.push('/test2');
+ it('updates manager iframe src', () => {
+ listenHandler({ type: 'urlchange', url: `${TEST_HOST}/url1` });
+ listenHandler({ type: 'urlchange', url: `${TEST_HOST}/url2` });
+ return wrapper.vm.$nextTick().then(() => {
+ findBackButton().trigger('click');
- vm.forward();
-
- expect(vm.visitPath).toHaveBeenCalledWith('/test');
+ expect(manager.iframe.src).toBe(`${TEST_HOST}/url1`);
+ });
});
});
- describe('refresh', () => {
- it('calls refresh with current path', () => {
- jest.spyOn(vm, 'visitPath').mockReturnValue();
-
- vm.path = '/test';
-
- vm.refresh();
-
- expect(vm.visitPath).toHaveBeenCalledWith('/test');
+ describe('refresh button', () => {
+ const url = `${TEST_HOST}/some_url`;
+ beforeEach(() => {
+ listenHandler({ type: 'done' });
+ listenHandler({ type: 'urlchange', url });
+ return wrapper.vm.$nextTick();
});
- });
- describe('visitPath', () => {
- it('updates iframe src with passed in path', () => {
- vm.visitPath('/testpath');
+ it('calls refresh with current path', () => {
+ manager.iframe.src = 'something-other';
+ findRefreshButton().trigger('click');
- expect(manager.iframe.src).toBe(`${TEST_HOST}/testpath`);
+ expect(manager.iframe.src).toBe(url);
});
});
});
diff --git a/spec/frontend/ide/ide_router_extension_spec.js b/spec/frontend/ide/ide_router_extension_spec.js
new file mode 100644
index 00000000000..3e29ecc4a90
--- /dev/null
+++ b/spec/frontend/ide/ide_router_extension_spec.js
@@ -0,0 +1,48 @@
+import VueRouter from 'vue-router';
+import IdeRouter from '~/ide/ide_router_extension';
+
+jest.mock('vue-router');
+
+describe('IDE overrides of VueRouter', () => {
+ const paths = branch => [
+ `${branch}`,
+ `/${branch}`,
+ `/${branch}/-/`,
+ `/edit/${branch}`,
+ `/edit/${branch}/-/`,
+ `/blob/${branch}`,
+ `/blob/${branch}/-/`,
+ `/blob/${branch}/-/src/merge_requests/2`,
+ `/blob/${branch}/-/src/blob/`,
+ `/tree/${branch}/-/src/blob/`,
+ `/tree/${branch}/-/src/tree/`,
+ ];
+ let router;
+
+ beforeEach(() => {
+ VueRouter.mockClear();
+ router = new IdeRouter({
+ mode: 'history',
+ });
+ });
+
+ it.each`
+ path | expected
+ ${'#-test'} | ${'%23-test'}
+ ${'#test'} | ${'%23test'}
+ ${'test#'} | ${'test%23'}
+ ${'test-#'} | ${'test-%23'}
+ ${'test-#-hash'} | ${'test-%23-hash'}
+ ${'test/hash#123'} | ${'test/hash%23123'}
+ `('finds project path when route is $path', ({ path, expected }) => {
+ paths(path).forEach(route => {
+ const expectedPath = route.replace(path, expected);
+
+ router.push(route);
+ expect(VueRouter.prototype.push).toHaveBeenCalledWith(expectedPath, undefined, undefined);
+
+ router.resolve(route);
+ expect(VueRouter.prototype.resolve).toHaveBeenCalledWith(expectedPath, undefined, undefined);
+ });
+ });
+});
diff --git a/spec/frontend/ide/mock_data.js b/spec/frontend/ide/mock_data.js
index 80eb15fe5a6..472516b6a2c 100644
--- a/spec/frontend/ide/mock_data.js
+++ b/spec/frontend/ide/mock_data.js
@@ -18,6 +18,7 @@ export const projectData = {
},
mergeRequests: {},
merge_requests_enabled: true,
+ userPermissions: {},
default_branch: 'master',
};
@@ -165,7 +166,11 @@ export const mergeRequests = [
iid: 1,
title: 'Test merge request',
project_id: 1,
- web_url: `${TEST_HOST}/namespace/project-path/merge_requests/1`,
+ web_url: `${TEST_HOST}/namespace/project-path/-/merge_requests/1`,
+ references: {
+ short: '!1',
+ full: 'namespace/project-path!1',
+ },
},
];
diff --git a/spec/frontend/ide/services/index_spec.js b/spec/frontend/ide/services/index_spec.js
index d676d7539b6..55f174f4663 100644
--- a/spec/frontend/ide/services/index_spec.js
+++ b/spec/frontend/ide/services/index_spec.js
@@ -2,11 +2,17 @@ import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import services from '~/ide/services';
import Api from '~/api';
+import gqClient from '~/ide/services/gql';
import { escapeFileUrl } from '~/lib/utils/url_utility';
+import getUserPermissions from '~/ide/queries/getUserPermissions.query.graphql';
+import { projectData } from '../mock_data';
jest.mock('~/api');
+jest.mock('~/ide/services/gql');
-const TEST_PROJECT_ID = 'alice/wonderland';
+const TEST_NAMESPACE = 'alice';
+const TEST_PROJECT = 'wonderland';
+const TEST_PROJECT_ID = `${TEST_NAMESPACE}/${TEST_PROJECT}`;
const TEST_BRANCH = 'master-patch-123';
const TEST_COMMIT_SHA = '123456789';
const TEST_FILE_PATH = 'README2.md';
@@ -97,7 +103,7 @@ describe('IDE services', () => {
mock
.onGet(
- `${relativeUrlRoot}/${TEST_PROJECT_ID}/raw/${TEST_COMMIT_SHA}/${escapeFileUrl(
+ `${relativeUrlRoot}/${TEST_PROJECT_ID}/-/raw/${TEST_COMMIT_SHA}/${escapeFileUrl(
filePath,
)}`,
)
@@ -111,4 +117,27 @@ describe('IDE services', () => {
},
);
});
+
+ describe('getProjectData', () => {
+ it('combines gql and API requests', () => {
+ const gqlProjectData = {
+ userPermissions: {
+ bogus: true,
+ },
+ };
+ Api.project.mockReturnValue(Promise.resolve({ data: { ...projectData } }));
+ gqClient.query.mockReturnValue(Promise.resolve({ data: { project: gqlProjectData } }));
+
+ return services.getProjectData(TEST_NAMESPACE, TEST_PROJECT).then(response => {
+ expect(response).toEqual({ data: { ...projectData, ...gqlProjectData } });
+ expect(Api.project).toHaveBeenCalledWith(TEST_PROJECT_ID);
+ expect(gqClient.query).toHaveBeenCalledWith({
+ query: getUserPermissions,
+ variables: {
+ projectPath: TEST_PROJECT_ID,
+ },
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/ide/stores/actions/file_spec.js b/spec/frontend/ide/stores/actions/file_spec.js
index a8e48f0b85e..6df963b0d55 100644
--- a/spec/frontend/ide/stores/actions/file_spec.js
+++ b/spec/frontend/ide/stores/actions/file_spec.js
@@ -251,7 +251,7 @@ describe('IDE store file actions', () => {
describe('success', () => {
beforeEach(() => {
- mock.onGet(`${RELATIVE_URL_ROOT}/test/test/7297abc/${localFile.path}`).replyOnce(
+ mock.onGet(`${RELATIVE_URL_ROOT}/test/test/-/7297abc/${localFile.path}`).replyOnce(
200,
{
blame_path: 'blame_path',
@@ -273,7 +273,7 @@ describe('IDE store file actions', () => {
.dispatch('getFileData', { path: localFile.path })
.then(() => {
expect(service.getFileData).toHaveBeenCalledWith(
- `${RELATIVE_URL_ROOT}/test/test/7297abc/${localFile.path}`,
+ `${RELATIVE_URL_ROOT}/test/test/-/7297abc/${localFile.path}`,
);
done();
@@ -345,7 +345,7 @@ describe('IDE store file actions', () => {
localFile.path = 'new-shiny-file';
store.state.entries[localFile.path] = localFile;
- mock.onGet(`${RELATIVE_URL_ROOT}/test/test/7297abc/old-dull-file`).replyOnce(
+ mock.onGet(`${RELATIVE_URL_ROOT}/test/test/-/7297abc/old-dull-file`).replyOnce(
200,
{
blame_path: 'blame_path',
@@ -376,7 +376,7 @@ describe('IDE store file actions', () => {
describe('error', () => {
beforeEach(() => {
- mock.onGet(`${RELATIVE_URL_ROOT}/test/test/7297abc/${localFile.path}`).networkError();
+ mock.onGet(`${RELATIVE_URL_ROOT}/test/test/-/7297abc/${localFile.path}`).networkError();
});
it('dispatches error action', () => {
@@ -389,7 +389,7 @@ describe('IDE store file actions', () => {
)
.then(() => {
expect(dispatch).toHaveBeenCalledWith('setErrorMessage', {
- text: 'An error occurred whilst loading the file.',
+ text: 'An error occurred while loading the file.',
action: expect.any(Function),
actionText: 'Please try again',
actionPayload: {
@@ -500,7 +500,7 @@ describe('IDE store file actions', () => {
)
.catch(() => {
expect(dispatch).toHaveBeenCalledWith('setErrorMessage', {
- text: 'An error occurred whilst loading the file content.',
+ text: 'An error occurred while loading the file content.',
action: expect.any(Function),
actionText: 'Please try again',
actionPayload: {
@@ -534,27 +534,21 @@ describe('IDE store file actions', () => {
.catch(done.fail);
});
- it('adds a newline to the end of the file if it doesnt already exist', done => {
- callAction('content')
- .then(() => {
- expect(tmpFile.content).toBe('content\n');
-
- done();
+ it('adds file into stagedFiles array', done => {
+ store
+ .dispatch('changeFileContent', {
+ path: tmpFile.path,
+ content: 'content',
})
- .catch(done.fail);
- });
-
- it('adds file into changedFiles array', done => {
- callAction()
.then(() => {
- expect(store.state.changedFiles.length).toBe(1);
+ expect(store.state.stagedFiles.length).toBe(1);
done();
})
.catch(done.fail);
});
- it('adds file not more than once into changedFiles array', done => {
+ it('adds file not more than once into stagedFiles array', done => {
store
.dispatch('changeFileContent', {
path: tmpFile.path,
@@ -567,7 +561,7 @@ describe('IDE store file actions', () => {
}),
)
.then(() => {
- expect(store.state.changedFiles.length).toBe(1);
+ expect(store.state.stagedFiles.length).toBe(1);
done();
})
@@ -594,52 +588,6 @@ describe('IDE store file actions', () => {
.catch(done.fail);
});
- describe('when `gon.feature.stageAllByDefault` is true', () => {
- const originalGonFeatures = Object.assign({}, gon.features);
-
- beforeAll(() => {
- gon.features = { stageAllByDefault: true };
- });
-
- afterAll(() => {
- gon.features = originalGonFeatures;
- });
-
- it('adds file into stagedFiles array', done => {
- store
- .dispatch('changeFileContent', {
- path: tmpFile.path,
- content: 'content',
- })
- .then(() => {
- expect(store.state.stagedFiles.length).toBe(1);
-
- done();
- })
- .catch(done.fail);
- });
-
- it('adds file not more than once into stagedFiles array', done => {
- store
- .dispatch('changeFileContent', {
- path: tmpFile.path,
- content: 'content',
- })
- .then(() =>
- store.dispatch('changeFileContent', {
- path: tmpFile.path,
- content: 'content 123',
- }),
- )
- .then(() => {
- expect(store.state.stagedFiles.length).toBe(1);
-
- done();
- })
- .catch(done.fail);
- });
- });
-
it('bursts unused seal', done => {
store
.dispatch('changeFileContent', {
diff --git a/spec/frontend/ide/stores/getters_spec.js b/spec/frontend/ide/stores/getters_spec.js
index 21c5e886738..011be95c1d2 100644
--- a/spec/frontend/ide/stores/getters_spec.js
+++ b/spec/frontend/ide/stores/getters_spec.js
@@ -2,6 +2,8 @@ import * as getters from '~/ide/stores/getters';
import { createStore } from '~/ide/stores';
import { file } from '../helpers';
+const TEST_PROJECT_ID = 'test_project';
+
describe('IDE store getters', () => {
let localState;
let localStore;
@@ -398,4 +400,38 @@ describe('IDE store getters', () => {
},
);
});
+
+ describe('findProjectPermissions', () => {
+ it('returns false if project not found', () => {
+ expect(localStore.getters.findProjectPermissions(TEST_PROJECT_ID)).toEqual({});
+ });
+
+ it('finds permission in given project', () => {
+ const userPermissions = {
+ readMergeRequest: true,
+ createMergeRequestsIn: false,
+ };
+
+ localState.projects[TEST_PROJECT_ID] = { userPermissions };
+
+ expect(localStore.getters.findProjectPermissions(TEST_PROJECT_ID)).toBe(userPermissions);
+ });
+ });
+
+ describe.each`
+ getterName | permissionKey
+ ${'canReadMergeRequests'} | ${'readMergeRequest'}
+ ${'canCreateMergeRequests'} | ${'createMergeRequestIn'}
+ `('$getterName', ({ getterName, permissionKey }) => {
+ it.each([true, false])('finds permission for current project (%s)', val => {
+ localState.projects[TEST_PROJECT_ID] = {
+ userPermissions: {
+ [permissionKey]: val,
+ },
+ };
+ localState.currentProjectId = TEST_PROJECT_ID;
+
+ expect(localStore.getters[getterName]).toBe(val);
+ });
+ });
});
diff --git a/spec/frontend/ide/stores/integration_spec.js b/spec/frontend/ide/stores/integration_spec.js
index 443de18f288..f95f036f572 100644
--- a/spec/frontend/ide/stores/integration_spec.js
+++ b/spec/frontend/ide/stores/integration_spec.js
@@ -61,19 +61,14 @@ describe('IDE store integration', () => {
store.dispatch('createTempEntry', { name: TEST_PATH, type: 'blob' });
});
- it('has changed and staged', () => {
- expect(store.state.changedFiles).toEqual([
- expect.objectContaining({
- path: TEST_PATH,
- tempFile: true,
- deleted: false,
- }),
- ]);
-
+ it('is added to staged as modified', () => {
expect(store.state.stagedFiles).toEqual([
expect.objectContaining({
path: TEST_PATH,
- deleted: true,
+ deleted: false,
+ staged: true,
+ changed: true,
+ tempFile: false,
}),
]);
});
diff --git a/spec/frontend/ide/stores/modules/file_templates/getters_spec.js b/spec/frontend/ide/stores/modules/file_templates/getters_spec.js
index 17cb457881f..5855496a330 100644
--- a/spec/frontend/ide/stores/modules/file_templates/getters_spec.js
+++ b/spec/frontend/ide/stores/modules/file_templates/getters_spec.js
@@ -1,5 +1,5 @@
import createState from '~/ide/stores/state';
-import { activityBarViews } from '~/ide/constants';
+import { leftSidebarViews } from '~/ide/constants';
import * as getters from '~/ide/stores/modules/file_templates/getters';
describe('IDE file templates getters', () => {
@@ -17,7 +17,7 @@ describe('IDE file templates getters', () => {
});
it('returns true if template is found and currentActivityView is edit', () => {
- rootState.currentActivityView = activityBarViews.edit;
+ rootState.currentActivityView = leftSidebarViews.edit.name;
expect(
getters.showFileTemplatesBar(
@@ -31,7 +31,7 @@ describe('IDE file templates getters', () => {
});
it('returns false if template is found and currentActivityView is not edit', () => {
- rootState.currentActivityView = activityBarViews.commit;
+ rootState.currentActivityView = leftSidebarViews.commit.name;
expect(
getters.showFileTemplatesBar(
diff --git a/spec/frontend/ide/stores/modules/pipelines/actions_spec.js b/spec/frontend/ide/stores/modules/pipelines/actions_spec.js
index b08d1cd01da..71918e7e2c2 100644
--- a/spec/frontend/ide/stores/modules/pipelines/actions_spec.js
+++ b/spec/frontend/ide/stores/modules/pipelines/actions_spec.js
@@ -78,7 +78,7 @@ describe('IDE pipelines actions', () => {
{
type: 'setErrorMessage',
payload: {
- text: 'An error occurred whilst fetching the latest pipeline.',
+ text: 'An error occurred while fetching the latest pipeline.',
action: expect.any(Function),
actionText: 'Please try again',
actionPayload: null,
@@ -207,7 +207,7 @@ describe('IDE pipelines actions', () => {
{
type: 'setErrorMessage',
payload: {
- text: 'An error occurred whilst loading the pipelines jobs.',
+ text: 'An error occurred while loading the pipelines jobs.',
action: expect.anything(),
actionText: 'Please try again',
actionPayload: { id: 1 },
@@ -341,7 +341,7 @@ describe('IDE pipelines actions', () => {
{
type: 'setErrorMessage',
payload: {
- text: 'An error occurred whilst fetching the job trace.',
+ text: 'An error occurred while fetching the job trace.',
action: expect.any(Function),
actionText: 'Please try again',
actionPayload: null,
diff --git a/spec/frontend/ide/stores/mutations/file_spec.js b/spec/frontend/ide/stores/mutations/file_spec.js
index cd308ee9991..9b96b910fcb 100644
--- a/spec/frontend/ide/stores/mutations/file_spec.js
+++ b/spec/frontend/ide/stores/mutations/file_spec.js
@@ -356,6 +356,14 @@ describe('IDE store file mutations', () => {
expect(localState.changedFiles.length).toBe(1);
});
+
+ it('bursts unused seal', () => {
+ expect(localState.unusedSeal).toBe(true);
+
+ mutations.ADD_FILE_TO_CHANGED(localState, localFile.path);
+
+ expect(localState.unusedSeal).toBe(false);
+ });
});
describe('REMOVE_FILE_FROM_CHANGED', () => {
@@ -366,6 +374,14 @@ describe('IDE store file mutations', () => {
expect(localState.changedFiles.length).toBe(0);
});
+
+ it('bursts unused seal', () => {
+ expect(localState.unusedSeal).toBe(true);
+
+ mutations.REMOVE_FILE_FROM_CHANGED(localState, localFile.path);
+
+ expect(localState.unusedSeal).toBe(false);
+ });
});
describe.each`
@@ -517,6 +533,19 @@ describe('IDE store file mutations', () => {
},
);
+ describe('STAGE_CHANGE', () => {
+ it('bursts unused seal', () => {
+ expect(localState.unusedSeal).toBe(true);
+
+ mutations.STAGE_CHANGE(localState, {
+ path: localFile.path,
+ diffInfo: localStore.getters.getDiffInfo(localFile.path),
+ });
+
+ expect(localState.unusedSeal).toBe(false);
+ });
+ });
+
describe('TOGGLE_FILE_CHANGED', () => {
it('updates file changed status', () => {
mutations.TOGGLE_FILE_CHANGED(localState, {
diff --git a/spec/frontend/ide/stores/mutations_spec.js b/spec/frontend/ide/stores/mutations_spec.js
index eb89c92914a..9fe75d596fb 100644
--- a/spec/frontend/ide/stores/mutations_spec.js
+++ b/spec/frontend/ide/stores/mutations_spec.js
@@ -196,16 +196,6 @@ describe('Multi-file store mutations', () => {
});
});
- describe('BURST_UNUSED_SEAL', () => {
- it('updates unusedSeal', () => {
- expect(localState.unusedSeal).toBe(true);
-
- mutations.BURST_UNUSED_SEAL(localState);
-
- expect(localState.unusedSeal).toBe(false);
- });
- });
-
describe('SET_ERROR_MESSAGE', () => {
it('updates error message', () => {
mutations.SET_ERROR_MESSAGE(localState, 'error');
@@ -297,6 +287,16 @@ describe('Multi-file store mutations', () => {
expect(localState.changedFiles).toEqual([]);
});
+
+ it('bursts unused seal', () => {
+ localState.entries.test = file('test');
+
+ expect(localState.unusedSeal).toBe(true);
+
+ mutations.DELETE_ENTRY(localState, 'test');
+
+ expect(localState.unusedSeal).toBe(false);
+ });
});
describe('UPDATE_FILE_AFTER_COMMIT', () => {
diff --git a/spec/frontend/import_projects/components/import_projects_table_spec.js b/spec/frontend/import_projects/components/import_projects_table_spec.js
index deffe22ea77..8f60823ee72 100644
--- a/spec/frontend/import_projects/components/import_projects_table_spec.js
+++ b/spec/frontend/import_projects/components/import_projects_table_spec.js
@@ -58,7 +58,7 @@ describe('ImportProjectsTable', () => {
vm.$destroy();
});
- it('renders a loading icon whilst repos are loading', () =>
+ it('renders a loading icon while repos are loading', () =>
vm.$nextTick().then(() => {
expect(vm.$el.querySelector('.js-loading-button-icon')).not.toBeNull();
}));
diff --git a/spec/frontend/issuables_list/components/issuable_spec.js b/spec/frontend/issuables_list/components/issuable_spec.js
index 81f6b60ae25..980def06078 100644
--- a/spec/frontend/issuables_list/components/issuable_spec.js
+++ b/spec/frontend/issuables_list/components/issuable_spec.js
@@ -122,6 +122,10 @@ describe('Issuable component', () => {
expect(finder().exists()).toBe(false);
});
+ it('show relative reference path', () => {
+ expect(wrapper.find('.js-ref-path').text()).toBe(issuable.references.relative);
+ });
+
it('does not have closed text', () => {
expect(wrapper.text()).not.toContain(TEXT_CLOSED);
});
diff --git a/spec/frontend/issuables_list/issuable_list_test_data.js b/spec/frontend/issuables_list/issuable_list_test_data.js
index 617780fd736..19d8ee7f71a 100644
--- a/spec/frontend/issuables_list/issuable_list_test_data.js
+++ b/spec/frontend/issuables_list/issuable_list_test_data.js
@@ -26,6 +26,9 @@ export const simpleIssue = {
web_url: 'http://localhost:3001/h5bp/html5-boilerplate/issues/31',
has_tasks: false,
weight: null,
+ references: {
+ relative: 'html-boilerplate#45',
+ },
};
export const testLabels = [
diff --git a/spec/frontend/issue_show/mock_data.js b/spec/frontend/issue_show/mock_data.js
new file mode 100644
index 00000000000..ff01a004186
--- /dev/null
+++ b/spec/frontend/issue_show/mock_data.js
@@ -0,0 +1,23 @@
+export const initialRequest = {
+ title: '<p>this is a title</p>',
+ title_text: 'this is a title',
+ description: '<p>this is a description!</p>',
+ description_text: 'this is a description',
+ task_status: '2 of 4 completed',
+ updated_at: '2015-05-15T12:31:04.428Z',
+ updated_by_name: 'Some User',
+ updated_by_path: '/some_user',
+ lock_version: 1,
+};
+
+export const secondRequest = {
+ title: '<p>2</p>',
+ title_text: '2',
+ description: '<p>42</p>',
+ description_text: '42',
+ task_status: '0 of 0 completed',
+ updated_at: '2016-05-15T12:31:04.428Z',
+ updated_by_name: 'Other User',
+ updated_by_path: '/other_user',
+ lock_version: 2,
+};
diff --git a/spec/javascripts/jobs/components/job_app_spec.js b/spec/frontend/jobs/components/job_app_spec.js
index 0fcd6080106..8fa289bbe4d 100644
--- a/spec/javascripts/jobs/components/job_app_spec.js
+++ b/spec/frontend/jobs/components/job_app_spec.js
@@ -1,43 +1,57 @@
-import Vue from 'vue';
+import Vuex from 'vuex';
+import { mount, createLocalVue } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
-import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
-import { waitForMutation } from 'spec/helpers/vue_test_utils_helper';
+import { getJSONFixture } from 'helpers/fixtures';
import axios from '~/lib/utils/axios_utils';
-import jobApp from '~/jobs/components/job_app.vue';
+import JobApp from '~/jobs/components/job_app.vue';
import createStore from '~/jobs/store';
-import * as types from '~/jobs/store/mutation_types';
-import { resetStore } from '../store/helpers';
import job from '../mock_data';
-describe('Job App ', () => {
+describe('Job App', () => {
+ const localVue = createLocalVue();
+ localVue.use(Vuex);
+
const delayedJobFixture = getJSONFixture('jobs/delayed.json');
- const Component = Vue.extend(jobApp);
+
let store;
- let vm;
+ let wrapper;
let mock;
- const props = {
+ const initSettings = {
endpoint: `${gl.TEST_HOST}jobs/123.json`,
+ pagePath: `${gl.TEST_HOST}jobs/123`,
+ logState:
+ 'eyJvZmZzZXQiOjE3NDUxLCJuX29wZW5fdGFncyI6MCwiZmdfY29sb3IiOm51bGwsImJnX2NvbG9yIjpudWxsLCJzdHlsZV9tYXNrIjowfQ%3D%3D',
+ };
+
+ const props = {
runnerHelpUrl: 'help/runner',
deploymentHelpUrl: 'help/deployment',
runnerSettingsUrl: 'settings/ci-cd/runners',
variablesSettingsUrl: 'settings/ci-cd/variables',
terminalPath: 'jobs/123/terminal',
- pagePath: `${gl.TEST_HOST}jobs/123`,
projectPath: 'user-name/project-name',
subscriptionsMoreMinutesUrl: 'https://customers.gitlab.com/buy_pipeline_minutes',
- logState:
- 'eyJvZmZzZXQiOjE3NDUxLCJuX29wZW5fdGFncyI6MCwiZmdfY29sb3IiOm51bGwsImJnX2NvbG9yIjpudWxsLCJzdHlsZV9tYXNrIjowfQ%3D%3D',
};
- const waitForJobReceived = () => waitForMutation(store, types.RECEIVE_JOB_SUCCESS);
+ const createComponent = () => {
+ wrapper = mount(JobApp, { propsData: { ...props }, store });
+ };
+
const setupAndMount = ({ jobData = {}, traceData = {} } = {}) => {
- mock.onGet(props.endpoint).replyOnce(200, { ...job, ...jobData });
- mock.onGet(`${props.pagePath}/trace.json`).reply(200, traceData);
+ mock.onGet(initSettings.endpoint).replyOnce(200, { ...job, ...jobData });
+ mock.onGet(`${initSettings.pagePath}/trace.json`).reply(200, traceData);
- vm = mountComponentWithStore(Component, { props, store });
+ const asyncInit = store.dispatch('init', initSettings);
- return waitForJobReceived();
+ createComponent();
+
+ return asyncInit
+ .then(() => {
+ jest.runOnlyPendingTimers();
+ })
+ .then(() => axios.waitForAll())
+ .then(() => wrapper.vm.$nextTick());
};
beforeEach(() => {
@@ -46,95 +60,81 @@ describe('Job App ', () => {
});
afterEach(() => {
- resetStore(store);
- vm.$destroy();
+ wrapper.destroy();
mock.restore();
});
describe('while loading', () => {
beforeEach(() => {
- setupAndMount();
+ store.state.isLoading = true;
+ createComponent();
});
it('renders loading icon', () => {
- expect(vm.$el.querySelector('.js-job-loading')).not.toBeNull();
- expect(vm.$el.querySelector('.js-job-sidebar')).toBeNull();
- expect(vm.$el.querySelector('.js-job-content')).toBeNull();
+ expect(wrapper.find('.js-job-loading').exists()).toBe(true);
+ expect(wrapper.find('.js-job-sidebar').exists()).toBe(false);
+ expect(wrapper.find('.js-job-content').exists()).toBe(false);
});
});
describe('with successful request', () => {
describe('Header section', () => {
describe('job callout message', () => {
- it('should not render the reason when reason is absent', done => {
- setupAndMount()
- .then(() => {
- expect(vm.shouldRenderCalloutMessage).toBe(false);
- })
- .then(done)
- .catch(done.fail);
- });
+ it('should not render the reason when reason is absent', () =>
+ setupAndMount().then(() => {
+ expect(wrapper.vm.shouldRenderCalloutMessage).toBe(false);
+ }));
- it('should render the reason when reason is present', done => {
+ it('should render the reason when reason is present', () =>
setupAndMount({
jobData: {
callout_message: 'There is an unkown failure, please try again',
},
- })
- .then(() => {
- expect(vm.shouldRenderCalloutMessage).toBe(true);
- })
- .then(done)
- .catch(done.fail);
- });
+ }).then(() => {
+ expect(wrapper.vm.shouldRenderCalloutMessage).toBe(true);
+ }));
});
describe('triggered job', () => {
- beforeEach(done => {
+ beforeEach(() => {
const aYearAgo = new Date();
aYearAgo.setFullYear(aYearAgo.getFullYear() - 1);
- setupAndMount({ jobData: { started: aYearAgo.toISOString() } })
- .then(done)
- .catch(done.fail);
+ return setupAndMount({ jobData: { started: aYearAgo.toISOString() } });
});
it('should render provided job information', () => {
expect(
- vm.$el
- .querySelector('.header-main-content')
- .textContent.replace(/\s+/g, ' ')
+ wrapper
+ .find('.header-main-content')
+ .text()
+ .replace(/\s+/g, ' ')
.trim(),
).toContain('passed Job #4757 triggered 1 year ago by Root');
});
it('should render new issue link', () => {
- expect(vm.$el.querySelector('.js-new-issue').getAttribute('href')).toEqual(
- job.new_issue_path,
- );
+ expect(wrapper.find('.js-new-issue').attributes('href')).toEqual(job.new_issue_path);
});
});
describe('created job', () => {
- it('should render created key', done => {
- setupAndMount()
- .then(() => {
- expect(
- vm.$el
- .querySelector('.header-main-content')
- .textContent.replace(/\s+/g, ' ')
- .trim(),
- ).toContain('passed Job #4757 created 3 weeks ago by Root');
- })
- .then(done)
- .catch(done.fail);
- });
+ it('should render created key', () =>
+ setupAndMount().then(() => {
+ expect(
+ wrapper
+ .find('.header-main-content')
+ .text()
+ .replace(/\s+/g, ' ')
+ .trim(),
+ ).toContain('passed Job #4757 created 3 weeks ago by Root');
+ }));
});
});
describe('stuck block', () => {
describe('without active runners availabl', () => {
- it('renders stuck block when there are no runners', done => {
+ it('renders stuck block when there are no runners', () =>
setupAndMount({
jobData: {
status: {
@@ -151,20 +151,14 @@ describe('Job App ', () => {
},
tags: [],
},
- })
- .then(() => {
- expect(vm.$el.querySelector('.js-job-stuck')).not.toBeNull();
- expect(
- vm.$el.querySelector('.js-job-stuck .js-stuck-no-active-runner'),
- ).not.toBeNull();
- })
- .then(done)
- .catch(done.fail);
- });
+ }).then(() => {
+ expect(wrapper.find('.js-job-stuck').exists()).toBe(true);
+ expect(wrapper.find('.js-job-stuck .js-stuck-no-active-runner').exists()).toBe(true);
+ }));
});
describe('when available runners can not run specified tag', () => {
- it('renders tags in stuck block when there are no runners', done => {
+ it('renders tags in stuck block when there are no runners', () =>
setupAndMount({
jobData: {
status: {
@@ -180,18 +174,14 @@ describe('Job App ', () => {
online: false,
},
},
- })
- .then(() => {
- expect(vm.$el.querySelector('.js-job-stuck').textContent).toContain(job.tags[0]);
- expect(vm.$el.querySelector('.js-job-stuck .js-stuck-with-tags')).not.toBeNull();
- })
- .then(done)
- .catch(done.fail);
- });
+ }).then(() => {
+ expect(wrapper.find('.js-job-stuck').text()).toContain(job.tags[0]);
+ expect(wrapper.find('.js-job-stuck .js-stuck-with-tags').exists()).toBe(true);
+ }));
});
describe('when runners are offline and build has tags', () => {
- it('renders message about job being stuck because of no runners with the specified tags', done => {
+ it('renders message about job being stuck because of no runners with the specified tags', () =>
setupAndMount({
jobData: {
status: {
@@ -207,32 +197,24 @@ describe('Job App ', () => {
online: true,
},
},
- })
- .then(() => {
- expect(vm.$el.querySelector('.js-job-stuck').textContent).toContain(job.tags[0]);
- expect(vm.$el.querySelector('.js-job-stuck .js-stuck-with-tags')).not.toBeNull();
- })
- .then(done)
- .catch(done.fail);
- });
+ }).then(() => {
+ expect(wrapper.find('.js-job-stuck').text()).toContain(job.tags[0]);
+ expect(wrapper.find('.js-job-stuck .js-stuck-with-tags').exists()).toBe(true);
+ }));
});
- it('does not renders stuck block when there are no runners', done => {
+ it('does not renders stuck block when there are no runners', () =>
setupAndMount({
jobData: {
runners: { available: true },
},
- })
- .then(() => {
- expect(vm.$el.querySelector('.js-job-stuck')).toBeNull();
- })
- .then(done)
- .catch(done.fail);
- });
+ }).then(() => {
+ expect(wrapper.find('.js-job-stuck').exists()).toBe(false);
+ }));
});
describe('unmet prerequisites block', () => {
- it('renders unmet prerequisites block when there is an unmet prerequisites failure', done => {
+ it('renders unmet prerequisites block when there is an unmet prerequisites failure', () =>
setupAndMount({
jobData: {
status: {
@@ -255,17 +237,13 @@ describe('Job App ', () => {
},
tags: [],
},
- })
- .then(() => {
- expect(vm.$el.querySelector('.js-job-failed')).not.toBeNull();
- })
- .then(done)
- .catch(done.fail);
- });
+ }).then(() => {
+ expect(wrapper.find('.js-job-failed').exists()).toBe(true);
+ }));
});
describe('environments block', () => {
- it('renders environment block when job has environment', done => {
+ it('renders environment block when job has environment', () =>
setupAndMount({
jobData: {
deployment_status: {
@@ -275,26 +253,18 @@ describe('Job App ', () => {
},
},
},
- })
- .then(() => {
- expect(vm.$el.querySelector('.js-job-environment')).not.toBeNull();
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('does not render environment block when job has environment', done => {
- setupAndMount()
- .then(() => {
- expect(vm.$el.querySelector('.js-job-environment')).toBeNull();
- })
- .then(done)
- .catch(done.fail);
- });
+ }).then(() => {
+ expect(wrapper.find('.js-job-environment').exists()).toBe(true);
+ }));
+
+ it('does not render environment block when job has environment', () =>
+ setupAndMount().then(() => {
+ expect(wrapper.find('.js-job-environment').exists()).toBe(false);
+ }));
});
describe('erased block', () => {
- it('renders erased block when `erased` is true', done => {
+ it('renders erased block when `erased` is true', () =>
setupAndMount({
jobData: {
erased_by: {
@@ -303,30 +273,22 @@ describe('Job App ', () => {
},
erased_at: '2016-11-07T11:11:16.525Z',
},
- })
- .then(() => {
- expect(vm.$el.querySelector('.js-job-erased-block')).not.toBeNull();
- })
- .then(done)
- .catch(done.fail);
- });
+ }).then(() => {
+ expect(wrapper.find('.js-job-erased-block').exists()).toBe(true);
+ }));
- it('does not render erased block when `erased` is false', done => {
+ it('does not render erased block when `erased` is false', () =>
setupAndMount({
jobData: {
erased_at: null,
},
- })
- .then(() => {
- expect(vm.$el.querySelector('.js-job-erased-block')).toBeNull();
- })
- .then(done)
- .catch(done.fail);
- });
+ }).then(() => {
+ expect(wrapper.find('.js-job-erased-block').exists()).toBe(false);
+ }));
});
describe('empty states block', () => {
- it('renders empty state when job does not have trace and is not running', done => {
+ it('renders empty state when job does not have trace and is not running', () =>
setupAndMount({
jobData: {
has_trace: false,
@@ -349,15 +311,11 @@ describe('Job App ', () => {
},
},
},
- })
- .then(() => {
- expect(vm.$el.querySelector('.js-job-empty-state')).not.toBeNull();
- })
- .then(done)
- .catch(done.fail);
- });
+ }).then(() => {
+ expect(wrapper.find('.js-job-empty-state').exists()).toBe(true);
+ }));
- it('does not render empty state when job does not have trace but it is running', done => {
+ it('does not render empty state when job does not have trace but it is running', () =>
setupAndMount({
jobData: {
has_trace: false,
@@ -369,39 +327,29 @@ describe('Job App ', () => {
details_path: 'path',
},
},
- })
- .then(() => {
- expect(vm.$el.querySelector('.js-job-empty-state')).toBeNull();
- })
- .then(done)
- .catch(done.fail);
- });
+ }).then(() => {
+ expect(wrapper.find('.js-job-empty-state').exists()).toBe(false);
+ }));
- it('does not render empty state when job has trace but it is not running', done => {
- setupAndMount({ jobData: { has_trace: true } })
- .then(() => {
- expect(vm.$el.querySelector('.js-job-empty-state')).toBeNull();
- })
- .then(done)
- .catch(done.fail);
- done();
- });
+ it('does not render empty state when job has trace but it is not running', () =>
+ setupAndMount({ jobData: { has_trace: true } }).then(() => {
+ expect(wrapper.find('.js-job-empty-state').exists()).toBe(false);
+ }));
- it('displays remaining time for a delayed job', done => {
+ it('displays remaining time for a delayed job', () => {
const oneHourInMilliseconds = 3600000;
- spyOn(Date, 'now').and.callFake(
- () => new Date(delayedJobFixture.scheduled_at).getTime() - oneHourInMilliseconds,
- );
- setupAndMount({ jobData: delayedJobFixture })
- .then(() => {
- expect(vm.$el.querySelector('.js-job-empty-state')).not.toBeNull();
+ jest
+ .spyOn(Date, 'now')
+ .mockImplementation(
+ () => new Date(delayedJobFixture.scheduled_at).getTime() - oneHourInMilliseconds,
+ );
+ return setupAndMount({ jobData: delayedJobFixture }).then(() => {
+ expect(wrapper.find('.js-job-empty-state').exists()).toBe(true);
- const title = vm.$el.querySelector('.js-job-empty-state-title');
+ const title = wrapper.find('.js-job-empty-state-title').text();
- expect(title).toContainText('01:00:00');
- })
- .then(done)
- .catch(done.fail);
+ expect(title).toEqual('This is a delayed job to run in 01:00:00');
+ });
});
});
@@ -420,8 +368,11 @@ describe('Job App ', () => {
},
})
.then(() => {
- vm.$el.querySelectorAll('.blocks-container > *').forEach(block => {
- expect(block.textContent.trim()).not.toBe('');
+ const blocks = wrapper.findAll('.blocks-container > *').wrappers;
+ expect(blocks.length).toBeGreaterThan(0);
+
+ blocks.forEach(block => {
+ expect(block.text().trim()).not.toBe('');
});
})
.then(done)
@@ -431,32 +382,24 @@ describe('Job App ', () => {
});
describe('archived job', () => {
- beforeEach(done => {
- setupAndMount({ jobData: { archived: true } })
- .then(done)
- .catch(done.fail);
- });
+ beforeEach(() => setupAndMount({ jobData: { archived: true } }));
it('renders warning about job being archived', () => {
- expect(vm.$el.querySelector('.js-archived-job ')).not.toBeNull();
+ expect(wrapper.find('.js-archived-job ').exists()).toBe(true);
});
});
describe('non-archived job', () => {
- beforeEach(done => {
- setupAndMount()
- .then(done)
- .catch(done.fail);
- });
+ beforeEach(() => setupAndMount());
it('does not warning about job being archived', () => {
- expect(vm.$el.querySelector('.js-archived-job ')).toBeNull();
+ expect(wrapper.find('.js-archived-job ').exists()).toBe(false);
});
});
describe('trace output', () => {
describe('with append flag', () => {
- it('appends the log content to the existing one', done => {
+ it('appends the log content to the existing one', () =>
setupAndMount({
traceData: {
html: '<span>More<span>',
@@ -467,20 +410,22 @@ describe('Job App ', () => {
},
})
.then(() => {
- vm.$store.state.trace = 'Update';
+ store.state.trace = 'Update';
- return vm.$nextTick();
+ return wrapper.vm.$nextTick();
})
.then(() => {
- expect(vm.$el.querySelector('.js-build-trace').textContent.trim()).toContain('Update');
- })
- .then(done)
- .catch(done.fail);
- });
+ expect(
+ wrapper
+ .find('.js-build-trace')
+ .text()
+ .trim(),
+ ).toEqual('Update');
+ }));
});
describe('without append flag', () => {
- it('replaces the trace', done => {
+ it('replaces the trace', () =>
setupAndMount({
traceData: {
html: '<span>Different<span>',
@@ -488,24 +433,19 @@ describe('Job App ', () => {
append: false,
complete: true,
},
- })
- .then(() => {
- expect(vm.$el.querySelector('.js-build-trace').textContent.trim()).not.toContain(
- 'Update',
- );
-
- expect(vm.$el.querySelector('.js-build-trace').textContent.trim()).toContain(
- 'Different',
- );
- })
- .then(done)
- .catch(done.fail);
- });
+ }).then(() => {
+ expect(
+ wrapper
+ .find('.js-build-trace')
+ .text()
+ .trim(),
+ ).toEqual('Different');
+ }));
});
describe('truncated information', () => {
describe('when size is less than total', () => {
- it('shows information about truncated log', done => {
+ it('shows information about truncated log', () => {
mock.onGet(`${props.pagePath}/trace.json`).reply(200, {
html: '<span>Update</span>',
status: 'success',
@@ -515,7 +455,7 @@ describe('Job App ', () => {
complete: true,
});
- setupAndMount({
+ return setupAndMount({
traceData: {
html: '<span>Update</span>',
status: 'success',
@@ -524,19 +464,19 @@ describe('Job App ', () => {
total: 100,
complete: true,
},
- })
- .then(() => {
- expect(vm.$el.querySelector('.js-truncated-info').textContent.trim()).toContain(
- '50 bytes',
- );
- })
- .then(done)
- .catch(done.fail);
+ }).then(() => {
+ expect(
+ wrapper
+ .find('.js-truncated-info')
+ .text()
+ .trim(),
+ ).toContain('Showing last 50 bytes');
+ });
});
});
describe('when size is equal than total', () => {
- it('does not show the truncated information', done => {
+ it('does not show the truncated information', () =>
setupAndMount({
traceData: {
html: '<span>Update</span>',
@@ -546,20 +486,19 @@ describe('Job App ', () => {
total: 100,
complete: true,
},
- })
- .then(() => {
- expect(vm.$el.querySelector('.js-truncated-info').textContent.trim()).not.toContain(
- '50 bytes',
- );
- })
- .then(done)
- .catch(done.fail);
- });
+ }).then(() => {
+ expect(
+ wrapper
+ .find('.js-truncated-info')
+ .text()
+ .trim(),
+ ).toEqual('');
+ }));
});
});
describe('trace controls', () => {
- beforeEach(done => {
+ beforeEach(() =>
setupAndMount({
traceData: {
html: '<span>Update</span>',
@@ -569,22 +508,20 @@ describe('Job App ', () => {
total: 100,
complete: true,
},
- })
- .then(done)
- .catch(done.fail);
- });
+ }),
+ );
it('should render scroll buttons', () => {
- expect(vm.$el.querySelector('.js-scroll-top')).not.toBeNull();
- expect(vm.$el.querySelector('.js-scroll-bottom')).not.toBeNull();
+ expect(wrapper.find('.js-scroll-top').exists()).toBe(true);
+ expect(wrapper.find('.js-scroll-bottom').exists()).toBe(true);
});
it('should render link to raw ouput', () => {
- expect(vm.$el.querySelector('.js-raw-link-controller')).not.toBeNull();
+ expect(wrapper.find('.js-raw-link-controller').exists()).toBe(true);
});
it('should render link to erase job', () => {
- expect(vm.$el.querySelector('.js-erase-link')).not.toBeNull();
+ expect(wrapper.find('.js-erase-link').exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/jobs/components/log/mock_data.js b/spec/frontend/jobs/components/log/mock_data.js
index 587818045eb..cdf5a3e10b1 100644
--- a/spec/frontend/jobs/components/log/mock_data.js
+++ b/spec/frontend/jobs/components/log/mock_data.js
@@ -34,7 +34,7 @@ export const utilsMockData = [
content: [
{
text:
- 'Using Docker executor with image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.6.5-golang-1.12-git-2.24-lfs-2.9-chrome-73.0-node-12.x-yarn-1.16-postgresql-9.6-graphicsmagick-1.3.33',
+ 'Using Docker executor with image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.6.5-golang-1.12-git-2.24-lfs-2.9-chrome-73.0-node-12.x-yarn-1.16-postgresql-9.6-graphicsmagick-1.3.34',
},
],
section: 'prepare-executor',
diff --git a/spec/frontend/jobs/mock_data.js b/spec/frontend/jobs/mock_data.js
new file mode 100644
index 00000000000..3d40e94d219
--- /dev/null
+++ b/spec/frontend/jobs/mock_data.js
@@ -0,0 +1,1191 @@
+import { TEST_HOST } from 'spec/test_constants';
+
+const threeWeeksAgo = new Date();
+threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21);
+
+export const stages = [
+ {
+ name: 'build',
+ title: 'build: running',
+ groups: [
+ {
+ name: 'build:linux',
+ size: 1,
+ status: {
+ icon: 'status_pending',
+ text: 'pending',
+ label: 'pending',
+ group: 'pending',
+ tooltip: 'pending',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/1180',
+ illustration: {
+ image: 'illustrations/pending_job_empty.svg',
+ size: 'svg-430',
+ title: 'This job has not started yet',
+ content: 'This job is in pending state and is waiting to be picked by a runner',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_pending-5bdf338420e5221ca24353b6bff1c9367189588750632e9a871b7af09ff6a2ae.png',
+ action: {
+ icon: 'cancel',
+ title: 'Cancel',
+ path: '/gitlab-org/gitlab-shell/-/jobs/1180/cancel',
+ method: 'post',
+ },
+ },
+ jobs: [
+ {
+ id: 1180,
+ name: 'build:linux',
+ started: false,
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/1180',
+ cancel_path: '/gitlab-org/gitlab-shell/-/jobs/1180/cancel',
+ playable: false,
+ created_at: '2018-09-28T11:09:57.229Z',
+ updated_at: '2018-09-28T11:09:57.503Z',
+ status: {
+ icon: 'status_pending',
+ text: 'pending',
+ label: 'pending',
+ group: 'pending',
+ tooltip: 'pending',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/1180',
+ illustration: {
+ image: 'illustrations/pending_job_empty.svg',
+ size: 'svg-430',
+ title: 'This job has not started yet',
+ content: 'This job is in pending state and is waiting to be picked by a runner',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_pending-5bdf338420e5221ca24353b6bff1c9367189588750632e9a871b7af09ff6a2ae.png',
+ action: {
+ icon: 'cancel',
+ title: 'Cancel',
+ path: '/gitlab-org/gitlab-shell/-/jobs/1180/cancel',
+ method: 'post',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'build:osx',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/444',
+ illustration: {
+ image: 'illustrations/skipped-job_empty.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-shell/-/jobs/444/retry',
+ method: 'post',
+ },
+ },
+ jobs: [
+ {
+ id: 444,
+ name: 'build:osx',
+ started: '2018-05-18T05:32:20.655Z',
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/444',
+ retry_path: '/gitlab-org/gitlab-shell/-/jobs/444/retry',
+ playable: false,
+ created_at: '2018-05-18T15:32:54.364Z',
+ updated_at: '2018-05-18T15:32:54.364Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/444',
+ illustration: {
+ image: 'illustrations/skipped-job_empty.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-shell/-/jobs/444/retry',
+ method: 'post',
+ },
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_running',
+ text: 'running',
+ label: 'running',
+ group: 'running',
+ tooltip: 'running',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/pipelines/27#build',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_running-9c635b2419a8e1ec991c993061b89cc5aefc0743bb238ecd0c381e7741a70e8c.png',
+ },
+ path: '/gitlab-org/gitlab-shell/pipelines/27#build',
+ dropdown_path: '/gitlab-org/gitlab-shell/pipelines/27/stage.json?stage=build',
+ },
+ {
+ name: 'test',
+ title: 'test: passed with warnings',
+ groups: [
+ {
+ name: 'jenkins',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: null,
+ group: 'success',
+ tooltip: null,
+ has_details: false,
+ details_path: null,
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ jobs: [
+ {
+ id: 459,
+ name: 'jenkins',
+ started: '2018-05-18T09:32:20.658Z',
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/459',
+ playable: false,
+ created_at: '2018-05-18T15:32:55.330Z',
+ updated_at: '2018-05-18T15:32:55.330Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: null,
+ group: 'success',
+ tooltip: null,
+ has_details: false,
+ details_path: null,
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ },
+ ],
+ },
+ {
+ name: 'rspec:linux',
+ size: 3,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: false,
+ details_path: null,
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ jobs: [
+ {
+ id: 445,
+ name: 'rspec:linux 0 3',
+ started: '2018-05-18T07:32:20.655Z',
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/445',
+ retry_path: '/gitlab-org/gitlab-shell/-/jobs/445/retry',
+ playable: false,
+ created_at: '2018-05-18T15:32:54.425Z',
+ updated_at: '2018-05-18T15:32:54.425Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/445',
+ illustration: {
+ image: 'illustrations/skipped-job_empty.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-shell/-/jobs/445/retry',
+ method: 'post',
+ },
+ },
+ },
+ {
+ id: 446,
+ name: 'rspec:linux 1 3',
+ started: '2018-05-18T07:32:20.655Z',
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/446',
+ retry_path: '/gitlab-org/gitlab-shell/-/jobs/446/retry',
+ playable: false,
+ created_at: '2018-05-18T15:32:54.506Z',
+ updated_at: '2018-05-18T15:32:54.506Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/446',
+ illustration: {
+ image: 'illustrations/skipped-job_empty.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-shell/-/jobs/446/retry',
+ method: 'post',
+ },
+ },
+ },
+ {
+ id: 447,
+ name: 'rspec:linux 2 3',
+ started: '2018-05-18T07:32:20.656Z',
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/447',
+ retry_path: '/gitlab-org/gitlab-shell/-/jobs/447/retry',
+ playable: false,
+ created_at: '2018-05-18T15:32:54.572Z',
+ updated_at: '2018-05-18T15:32:54.572Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/447',
+ illustration: {
+ image: 'illustrations/skipped-job_empty.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-shell/-/jobs/447/retry',
+ method: 'post',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'rspec:osx',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/452',
+ illustration: {
+ image: 'illustrations/skipped-job_empty.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-shell/-/jobs/452/retry',
+ method: 'post',
+ },
+ },
+ jobs: [
+ {
+ id: 452,
+ name: 'rspec:osx',
+ started: '2018-05-18T07:32:20.657Z',
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/452',
+ retry_path: '/gitlab-org/gitlab-shell/-/jobs/452/retry',
+ playable: false,
+ created_at: '2018-05-18T15:32:54.920Z',
+ updated_at: '2018-05-18T15:32:54.920Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/452',
+ illustration: {
+ image: 'illustrations/skipped-job_empty.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-shell/-/jobs/452/retry',
+ method: 'post',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'rspec:windows',
+ size: 3,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: false,
+ details_path: null,
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ jobs: [
+ {
+ id: 448,
+ name: 'rspec:windows 0 3',
+ started: '2018-05-18T07:32:20.656Z',
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/448',
+ retry_path: '/gitlab-org/gitlab-shell/-/jobs/448/retry',
+ playable: false,
+ created_at: '2018-05-18T15:32:54.639Z',
+ updated_at: '2018-05-18T15:32:54.639Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/448',
+ illustration: {
+ image: 'illustrations/skipped-job_empty.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-shell/-/jobs/448/retry',
+ method: 'post',
+ },
+ },
+ },
+ {
+ id: 449,
+ name: 'rspec:windows 1 3',
+ started: '2018-05-18T07:32:20.656Z',
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/449',
+ retry_path: '/gitlab-org/gitlab-shell/-/jobs/449/retry',
+ playable: false,
+ created_at: '2018-05-18T15:32:54.703Z',
+ updated_at: '2018-05-18T15:32:54.703Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/449',
+ illustration: {
+ image: 'illustrations/skipped-job_empty.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-shell/-/jobs/449/retry',
+ method: 'post',
+ },
+ },
+ },
+ {
+ id: 451,
+ name: 'rspec:windows 2 3',
+ started: '2018-05-18T07:32:20.657Z',
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/451',
+ retry_path: '/gitlab-org/gitlab-shell/-/jobs/451/retry',
+ playable: false,
+ created_at: '2018-05-18T15:32:54.853Z',
+ updated_at: '2018-05-18T15:32:54.853Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/451',
+ illustration: {
+ image: 'illustrations/skipped-job_empty.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-shell/-/jobs/451/retry',
+ method: 'post',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'spinach:linux',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/453',
+ illustration: {
+ image: 'illustrations/skipped-job_empty.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-shell/-/jobs/453/retry',
+ method: 'post',
+ },
+ },
+ jobs: [
+ {
+ id: 453,
+ name: 'spinach:linux',
+ started: '2018-05-18T07:32:20.657Z',
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/453',
+ retry_path: '/gitlab-org/gitlab-shell/-/jobs/453/retry',
+ playable: false,
+ created_at: '2018-05-18T15:32:54.993Z',
+ updated_at: '2018-05-18T15:32:54.993Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/453',
+ illustration: {
+ image: 'illustrations/skipped-job_empty.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-shell/-/jobs/453/retry',
+ method: 'post',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'spinach:osx',
+ size: 1,
+ status: {
+ icon: 'status_warning',
+ text: 'failed',
+ label: 'failed (allowed to fail)',
+ group: 'failed-with-warnings',
+ tooltip: 'failed - (unknown failure) (allowed to fail)',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/454',
+ illustration: {
+ image: 'illustrations/skipped-job_empty.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-shell/-/jobs/454/retry',
+ method: 'post',
+ },
+ },
+ jobs: [
+ {
+ id: 454,
+ name: 'spinach:osx',
+ started: '2018-05-18T07:32:20.657Z',
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/454',
+ retry_path: '/gitlab-org/gitlab-shell/-/jobs/454/retry',
+ playable: false,
+ created_at: '2018-05-18T15:32:55.053Z',
+ updated_at: '2018-05-18T15:32:55.053Z',
+ status: {
+ icon: 'status_warning',
+ text: 'failed',
+ label: 'failed (allowed to fail)',
+ group: 'failed-with-warnings',
+ tooltip: 'failed - (unknown failure) (allowed to fail)',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/454',
+ illustration: {
+ image: 'illustrations/skipped-job_empty.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-shell/-/jobs/454/retry',
+ method: 'post',
+ },
+ },
+ callout_message: 'There is an unknown failure, please try again',
+ recoverable: true,
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_warning',
+ text: 'passed',
+ label: 'passed with warnings',
+ group: 'success-with-warnings',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/pipelines/27#test',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ path: '/gitlab-org/gitlab-shell/pipelines/27#test',
+ dropdown_path: '/gitlab-org/gitlab-shell/pipelines/27/stage.json?stage=test',
+ },
+ {
+ name: 'deploy',
+ title: 'deploy: running',
+ groups: [
+ {
+ name: 'production',
+ size: 1,
+ status: {
+ icon: 'status_created',
+ text: 'created',
+ label: 'created',
+ group: 'created',
+ tooltip: 'created',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/457',
+ illustration: {
+ image: 'illustrations/job_not_triggered.svg',
+ size: 'svg-306',
+ title: 'This job has not been triggered yet',
+ content:
+ 'This job depends on upstream jobs that need to succeed in order for this job to be triggered',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_created-4b975aa976d24e5a3ea7cd9a5713e6ce2cd9afd08b910415e96675de35f64955.png',
+ action: {
+ icon: 'cancel',
+ title: 'Cancel',
+ path: '/gitlab-org/gitlab-shell/-/jobs/457/cancel',
+ method: 'post',
+ },
+ },
+ jobs: [
+ {
+ id: 457,
+ name: 'production',
+ started: false,
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/457',
+ cancel_path: '/gitlab-org/gitlab-shell/-/jobs/457/cancel',
+ playable: false,
+ created_at: '2018-05-18T15:32:55.259Z',
+ updated_at: '2018-09-28T11:09:57.454Z',
+ status: {
+ icon: 'status_created',
+ text: 'created',
+ label: 'created',
+ group: 'created',
+ tooltip: 'created',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/457',
+ illustration: {
+ image: 'illustrations/job_not_triggered.svg',
+ size: 'svg-306',
+ title: 'This job has not been triggered yet',
+ content:
+ 'This job depends on upstream jobs that need to succeed in order for this job to be triggered',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_created-4b975aa976d24e5a3ea7cd9a5713e6ce2cd9afd08b910415e96675de35f64955.png',
+ action: {
+ icon: 'cancel',
+ title: 'Cancel',
+ path: '/gitlab-org/gitlab-shell/-/jobs/457/cancel',
+ method: 'post',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'staging',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/455',
+ illustration: {
+ image: 'illustrations/skipped-job_empty.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-shell/-/jobs/455/retry',
+ method: 'post',
+ },
+ },
+ jobs: [
+ {
+ id: 455,
+ name: 'staging',
+ started: '2018-05-18T09:32:20.658Z',
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/455',
+ retry_path: '/gitlab-org/gitlab-shell/-/jobs/455/retry',
+ playable: false,
+ created_at: '2018-05-18T15:32:55.119Z',
+ updated_at: '2018-05-18T15:32:55.119Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/455',
+ illustration: {
+ image: 'illustrations/skipped-job_empty.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-shell/-/jobs/455/retry',
+ method: 'post',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'stop staging',
+ size: 1,
+ status: {
+ icon: 'status_created',
+ text: 'created',
+ label: 'created',
+ group: 'created',
+ tooltip: 'created',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/456',
+ illustration: {
+ image: 'illustrations/job_not_triggered.svg',
+ size: 'svg-306',
+ title: 'This job has not been triggered yet',
+ content:
+ 'This job depends on upstream jobs that need to succeed in order for this job to be triggered',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_created-4b975aa976d24e5a3ea7cd9a5713e6ce2cd9afd08b910415e96675de35f64955.png',
+ action: {
+ icon: 'cancel',
+ title: 'Cancel',
+ path: '/gitlab-org/gitlab-shell/-/jobs/456/cancel',
+ method: 'post',
+ },
+ },
+ jobs: [
+ {
+ id: 456,
+ name: 'stop staging',
+ started: false,
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/456',
+ cancel_path: '/gitlab-org/gitlab-shell/-/jobs/456/cancel',
+ playable: false,
+ created_at: '2018-05-18T15:32:55.205Z',
+ updated_at: '2018-09-28T11:09:57.396Z',
+ status: {
+ icon: 'status_created',
+ text: 'created',
+ label: 'created',
+ group: 'created',
+ tooltip: 'created',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/456',
+ illustration: {
+ image: 'illustrations/job_not_triggered.svg',
+ size: 'svg-306',
+ title: 'This job has not been triggered yet',
+ content:
+ 'This job depends on upstream jobs that need to succeed in order for this job to be triggered',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_created-4b975aa976d24e5a3ea7cd9a5713e6ce2cd9afd08b910415e96675de35f64955.png',
+ action: {
+ icon: 'cancel',
+ title: 'Cancel',
+ path: '/gitlab-org/gitlab-shell/-/jobs/456/cancel',
+ method: 'post',
+ },
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_running',
+ text: 'running',
+ label: 'running',
+ group: 'running',
+ tooltip: 'running',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/pipelines/27#deploy',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_running-9c635b2419a8e1ec991c993061b89cc5aefc0743bb238ecd0c381e7741a70e8c.png',
+ },
+ path: '/gitlab-org/gitlab-shell/pipelines/27#deploy',
+ dropdown_path: '/gitlab-org/gitlab-shell/pipelines/27/stage.json?stage=deploy',
+ },
+ {
+ name: 'notify',
+ title: 'notify: manual action',
+ groups: [
+ {
+ name: 'slack',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/458',
+ illustration: {
+ image: 'illustrations/manual_action.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-org/gitlab-shell/-/jobs/458/play',
+ method: 'post',
+ },
+ },
+ jobs: [
+ {
+ id: 458,
+ name: 'slack',
+ started: null,
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/458',
+ play_path: '/gitlab-org/gitlab-shell/-/jobs/458/play',
+ playable: true,
+ created_at: '2018-05-18T15:32:55.303Z',
+ updated_at: '2018-05-18T15:34:08.535Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/458',
+ illustration: {
+ image: 'illustrations/manual_action.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-org/gitlab-shell/-/jobs/458/play',
+ method: 'post',
+ },
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/pipelines/27#notify',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ },
+ path: '/gitlab-org/gitlab-shell/pipelines/27#notify',
+ dropdown_path: '/gitlab-org/gitlab-shell/pipelines/27/stage.json?stage=notify',
+ },
+];
+
+export default {
+ id: 4757,
+ name: 'test',
+ build_path: '/root/ci-mock/-/jobs/4757',
+ retry_path: '/root/ci-mock/-/jobs/4757/retry',
+ cancel_path: '/root/ci-mock/-/jobs/4757/cancel',
+ new_issue_path: '/root/ci-mock/issues/new',
+ playable: false,
+ created_at: threeWeeksAgo.toISOString(),
+ updated_at: threeWeeksAgo.toISOString(),
+ finished_at: threeWeeksAgo.toISOString(),
+ queued: 9.54,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ has_details: true,
+ details_path: `${TEST_HOST}/root/ci-mock/-/jobs/4757`,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/root/ci-mock/-/jobs/4757/retry',
+ method: 'post',
+ },
+ },
+ coverage: 20,
+ erased_at: threeWeeksAgo.toISOString(),
+ erased: false,
+ duration: 6.785563,
+ tags: ['tag'],
+ user: {
+ name: 'Root',
+ username: 'root',
+ id: 1,
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
+ web_url: 'http://localhost:3000/root',
+ },
+ erase_path: '/root/ci-mock/-/jobs/4757/erase',
+ artifacts: [null],
+ runner: {
+ id: 1,
+ description: 'local ci runner',
+ edit_path: '/root/ci-mock/runners/1/edit',
+ },
+ pipeline: {
+ id: 140,
+ user: {
+ name: 'Root',
+ username: 'root',
+ id: 1,
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
+ web_url: 'http://localhost:3000/root',
+ },
+ active: false,
+ coverage: null,
+ source: 'unknown',
+ created_at: '2017-05-24T09:59:58.634Z',
+ updated_at: '2017-06-01T17:32:00.062Z',
+ path: '/root/ci-mock/pipelines/140',
+ flags: {
+ latest: true,
+ stuck: false,
+ yaml_errors: false,
+ retryable: false,
+ cancelable: false,
+ },
+ details: {
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ has_details: true,
+ details_path: '/root/ci-mock/pipelines/140',
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
+ },
+ duration: 6,
+ finished_at: '2017-06-01T17:32:00.042Z',
+ stages: [
+ {
+ dropdown_path: '/jashkenas/underscore/pipelines/16/stage.json?stage=build',
+ name: 'build',
+ path: '/jashkenas/underscore/pipelines/16#build',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ },
+ title: 'build: passed',
+ },
+ {
+ dropdown_path: '/jashkenas/underscore/pipelines/16/stage.json?stage=test',
+ name: 'test',
+ path: '/jashkenas/underscore/pipelines/16#test',
+ status: {
+ icon: 'status_warning',
+ text: 'passed',
+ label: 'passed with warnings',
+ group: 'success-with-warnings',
+ },
+ title: 'test: passed with warnings',
+ },
+ ],
+ },
+ ref: {
+ name: 'abc',
+ path: '/root/ci-mock/commits/abc',
+ tag: false,
+ branch: true,
+ },
+ commit: {
+ id: 'c58647773a6b5faf066d4ad6ff2c9fbba5f180f6',
+ short_id: 'c5864777',
+ title: 'Add new file',
+ created_at: '2017-05-24T10:59:52.000+01:00',
+ parent_ids: ['798e5f902592192afaba73f4668ae30e56eae492'],
+ message: 'Add new file',
+ author_name: 'Root',
+ author_email: 'admin@example.com',
+ authored_date: '2017-05-24T10:59:52.000+01:00',
+ committer_name: 'Root',
+ committer_email: 'admin@example.com',
+ committed_date: '2017-05-24T10:59:52.000+01:00',
+ author: {
+ name: 'Root',
+ username: 'root',
+ id: 1,
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
+ web_url: 'http://localhost:3000/root',
+ },
+ author_gravatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
+ commit_url:
+ 'http://localhost:3000/root/ci-mock/commit/c58647773a6b5faf066d4ad6ff2c9fbba5f180f6',
+ commit_path: '/root/ci-mock/commit/c58647773a6b5faf066d4ad6ff2c9fbba5f180f6',
+ },
+ },
+ metadata: {
+ timeout_human_readable: '1m 40s',
+ timeout_source: 'runner',
+ },
+ merge_request: {
+ iid: 2,
+ path: '/root/ci-mock/merge_requests/2',
+ },
+ raw_path: '/root/ci-mock/builds/4757/raw',
+ has_trace: true,
+};
+
+export const jobsInStage = {
+ name: 'build',
+ title: 'build: running',
+ latest_statuses: [
+ {
+ id: 1180,
+ name: 'build:linux',
+ started: false,
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/1180',
+ cancel_path: '/gitlab-org/gitlab-shell/-/jobs/1180/cancel',
+ playable: false,
+ created_at: '2018-09-28T11:09:57.229Z',
+ updated_at: '2018-09-28T11:09:57.503Z',
+ status: {
+ icon: 'status_pending',
+ text: 'pending',
+ label: 'pending',
+ group: 'pending',
+ tooltip: 'pending',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/1180',
+ illustration: {
+ image: 'illustrations/pending_job_empty.svg',
+ size: 'svg-430',
+ title: 'This job has not started yet',
+ content: 'This job is in pending state and is waiting to be picked by a runner',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_pending-5bdf338420e5221ca24353b6bff1c9367189588750632e9a871b7af09ff6a2ae.png',
+ action: {
+ icon: 'cancel',
+ title: 'Cancel',
+ path: '/gitlab-org/gitlab-shell/-/jobs/1180/cancel',
+ method: 'post',
+ },
+ },
+ },
+ {
+ id: 444,
+ name: 'build:osx',
+ started: '2018-05-18T05:32:20.655Z',
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/444',
+ retry_path: '/gitlab-org/gitlab-shell/-/jobs/444/retry',
+ playable: false,
+ created_at: '2018-05-18T15:32:54.364Z',
+ updated_at: '2018-05-18T15:32:54.364Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/444',
+ illustration: {
+ image: 'illustrations/skipped-job_empty.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-shell/-/jobs/444/retry',
+ method: 'post',
+ },
+ },
+ },
+ ],
+ retried: [
+ {
+ id: 443,
+ name: 'build:linux',
+ started: '2018-05-18T06:32:20.655Z',
+ build_path: '/gitlab-org/gitlab-shell/-/jobs/443',
+ retry_path: '/gitlab-org/gitlab-shell/-/jobs/443/retry',
+ playable: false,
+ created_at: '2018-05-18T15:32:54.296Z',
+ updated_at: '2018-05-18T15:32:54.296Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed (retried)',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/-/jobs/443',
+ illustration: {
+ image: 'illustrations/skipped-job_empty.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-shell/-/jobs/443/retry',
+ method: 'post',
+ },
+ },
+ },
+ ],
+ status: {
+ icon: 'status_running',
+ text: 'running',
+ label: 'running',
+ group: 'running',
+ tooltip: 'running',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-shell/pipelines/27#build',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_running-9c635b2419a8e1ec991c993061b89cc5aefc0743bb238ecd0c381e7741a70e8c.png',
+ },
+ path: '/gitlab-org/gitlab-shell/pipelines/27#build',
+ dropdown_path: '/gitlab-org/gitlab-shell/pipelines/27/stage.json?stage=build',
+};
diff --git a/spec/frontend/jobs/store/mutations_spec.js b/spec/frontend/jobs/store/mutations_spec.js
index d1ab152330e..d77690ffac0 100644
--- a/spec/frontend/jobs/store/mutations_spec.js
+++ b/spec/frontend/jobs/store/mutations_spec.js
@@ -157,17 +157,21 @@ describe('Jobs Store Mutations', () => {
});
});
- describe('STOP_POLLING_TRACE', () => {
- it('sets isTraceComplete to true', () => {
- mutations[types.STOP_POLLING_TRACE](stateCopy);
+ describe('SET_TRACE_TIMEOUT', () => {
+ it('sets the traceTimeout id', () => {
+ const id = 7;
- expect(stateCopy.isTraceComplete).toEqual(true);
+ expect(stateCopy.traceTimeout).not.toEqual(id);
+
+ mutations[types.SET_TRACE_TIMEOUT](stateCopy, id);
+
+ expect(stateCopy.traceTimeout).toEqual(id);
});
});
- describe('RECEIVE_TRACE_ERROR', () => {
- it('resets trace state and sets error to true', () => {
- mutations[types.RECEIVE_TRACE_ERROR](stateCopy);
+ describe('STOP_POLLING_TRACE', () => {
+ it('sets isTraceComplete to true', () => {
+ mutations[types.STOP_POLLING_TRACE](stateCopy);
expect(stateCopy.isTraceComplete).toEqual(true);
});
diff --git a/spec/frontend/lib/utils/datetime_range_spec.js b/spec/frontend/lib/utils/datetime_range_spec.js
new file mode 100644
index 00000000000..8b1f284615d
--- /dev/null
+++ b/spec/frontend/lib/utils/datetime_range_spec.js
@@ -0,0 +1,382 @@
+import _ from 'lodash';
+import {
+ getRangeType,
+ convertToFixedRange,
+ isEqualTimeRanges,
+ findTimeRange,
+ timeRangeToParams,
+ timeRangeFromParams,
+} from '~/lib/utils/datetime_range';
+
+const MOCK_NOW = Date.UTC(2020, 0, 23, 20);
+
+const MOCK_NOW_ISO_STRING = new Date(MOCK_NOW).toISOString();
+
+const mockFixedRange = {
+ label: 'January 2020',
+ start: '2020-01-01T00:00:00.000Z',
+ end: '2020-01-31T23:59:00.000Z',
+};
+
+const mockAnchoredRange = {
+ label: 'First two minutes of 2020',
+ anchor: '2020-01-01T00:00:00.000Z',
+ direction: 'after',
+ duration: {
+ seconds: 60 * 2,
+ },
+};
+
+const mockRollingRange = {
+ label: 'Next 2 minutes',
+ direction: 'after',
+ duration: {
+ seconds: 60 * 2,
+ },
+};
+
+const mockOpenRange = {
+ label: '2020 so far',
+ anchor: '2020-01-01T00:00:00.000Z',
+ direction: 'after',
+};
+
+describe('Date time range utils', () => {
+ describe('getRangeType', () => {
+ it('infers correctly the range type from the input object', () => {
+ const rangeTypes = {
+ fixed: [{ start: MOCK_NOW_ISO_STRING, end: MOCK_NOW_ISO_STRING }],
+ anchored: [{ anchor: MOCK_NOW_ISO_STRING, duration: { seconds: 0 } }],
+ rolling: [{ duration: { seconds: 0 } }],
+ open: [{ anchor: MOCK_NOW_ISO_STRING }],
+ invalid: [
+ {},
+ { start: MOCK_NOW_ISO_STRING },
+ { end: MOCK_NOW_ISO_STRING },
+ { start: 'NOT_A_DATE', end: 'NOT_A_DATE' },
+ { duration: { seconds: 'NOT_A_NUMBER' } },
+ { duration: { seconds: Infinity } },
+ { duration: { minutes: 20 } },
+ { anchor: MOCK_NOW_ISO_STRING, duration: { seconds: 'NOT_A_NUMBER' } },
+ { anchor: MOCK_NOW_ISO_STRING, duration: { seconds: Infinity } },
+ { junk: 'exists' },
+ ],
+ };
+
+ Object.entries(rangeTypes).forEach(([type, examples]) => {
+ examples.forEach(example => expect(getRangeType(example)).toEqual(type));
+ });
+ });
+ });
+
+ describe('convertToFixedRange', () => {
+ beforeEach(() => {
+ jest.spyOn(Date, 'now').mockImplementation(() => MOCK_NOW);
+ });
+
+ afterEach(() => {
+ Date.now.mockRestore();
+ });
+
+ describe('When a fixed range is input', () => {
+ it('converts a fixed range to an equal fixed range', () => {
+ expect(convertToFixedRange(mockFixedRange)).toEqual({
+ start: mockFixedRange.start,
+ end: mockFixedRange.end,
+ });
+ });
+
+ it('throws an error when fixed range does not contain an end time', () => {
+ const aFixedRangeMissingEnd = _.omit(mockFixedRange, 'end');
+
+ expect(() => convertToFixedRange(aFixedRangeMissingEnd)).toThrow();
+ });
+
+ it('throws an error when fixed range does not contain a start time', () => {
+ const aFixedRangeMissingStart = _.omit(mockFixedRange, 'start');
+
+ expect(() => convertToFixedRange(aFixedRangeMissingStart)).toThrow();
+ });
+
+ it('throws an error when the dates cannot be parsed', () => {
+ const wrongStart = { ...mockFixedRange, start: 'I_CANNOT_BE_PARSED' };
+ const wrongEnd = { ...mockFixedRange, end: 'I_CANNOT_BE_PARSED' };
+
+ expect(() => convertToFixedRange(wrongStart)).toThrow();
+ expect(() => convertToFixedRange(wrongEnd)).toThrow();
+ });
+ });
+
+ describe('When an anchored range is input', () => {
+ it('converts to a fixed range', () => {
+ expect(convertToFixedRange(mockAnchoredRange)).toEqual({
+ start: '2020-01-01T00:00:00.000Z',
+ end: '2020-01-01T00:02:00.000Z',
+ });
+ });
+
+ it('converts to a fixed range with a `before` direction', () => {
+ expect(convertToFixedRange({ ...mockAnchoredRange, direction: 'before' })).toEqual({
+ start: '2019-12-31T23:58:00.000Z',
+ end: '2020-01-01T00:00:00.000Z',
+ });
+ });
+
+ it('converts to a fixed range without an explicit direction, defaulting to `before`', () => {
+ const defaultDirectionRange = _.omit(mockAnchoredRange, 'direction');
+
+ expect(convertToFixedRange(defaultDirectionRange)).toEqual({
+ start: '2019-12-31T23:58:00.000Z',
+ end: '2020-01-01T00:00:00.000Z',
+ });
+ });
+
+ it('throws an error when the anchor cannot be parsed', () => {
+ const wrongAnchor = { ...mockAnchoredRange, anchor: 'I_CANNOT_BE_PARSED' };
+
+ expect(() => convertToFixedRange(wrongAnchor)).toThrow();
+ });
+ });
+
+ describe('when a rolling range is input', () => {
+ it('converts to a fixed range', () => {
+ expect(convertToFixedRange(mockRollingRange)).toEqual({
+ start: '2020-01-23T20:00:00.000Z',
+ end: '2020-01-23T20:02:00.000Z',
+ });
+ });
+
+ it('converts to a fixed range with an implicit `before` direction', () => {
+ const noDirection = _.omit(mockRollingRange, 'direction');
+
+ expect(convertToFixedRange(noDirection)).toEqual({
+ start: '2020-01-23T19:58:00.000Z',
+ end: '2020-01-23T20:00:00.000Z',
+ });
+ });
+
+ it('throws an error when the duration is not in the right format', () => {
+ const wrongDuration = { ...mockRollingRange, duration: { minutes: 20 } };
+
+ expect(() => convertToFixedRange(wrongDuration)).toThrow();
+ });
+
+ it('throws an error when the anchor is not valid', () => {
+ const wrongAnchor = { ...mockRollingRange, anchor: 'CAN_T_PARSE_THIS' };
+
+ expect(() => convertToFixedRange(wrongAnchor)).toThrow();
+ });
+ });
+
+ describe('when an open range is input', () => {
+ it('converts to a fixed range with an `after` direction', () => {
+ expect(convertToFixedRange(mockOpenRange)).toEqual({
+ start: '2020-01-01T00:00:00.000Z',
+ end: '2020-01-23T20:00:00.000Z',
+ });
+ });
+
+ it('converts to a fixed range with the explicit `before` direction', () => {
+ const beforeOpenRange = { ...mockOpenRange, direction: 'before' };
+
+ expect(convertToFixedRange(beforeOpenRange)).toEqual({
+ start: '1970-01-01T00:00:00.000Z',
+ end: '2020-01-01T00:00:00.000Z',
+ });
+ });
+
+ it('converts to a fixed range with the implicit `before` direction', () => {
+ const noDirectionOpenRange = _.omit(mockOpenRange, 'direction');
+
+ expect(convertToFixedRange(noDirectionOpenRange)).toEqual({
+ start: '1970-01-01T00:00:00.000Z',
+ end: '2020-01-01T00:00:00.000Z',
+ });
+ });
+
+ it('throws an error when the anchor cannot be parsed', () => {
+ const wrongAnchor = { ...mockOpenRange, anchor: 'CAN_T_PARSE_THIS' };
+
+ expect(() => convertToFixedRange(wrongAnchor)).toThrow();
+ });
+ });
+ });
+
+ describe('isEqualTimeRanges', () => {
+ it('equal only compares relevant properies', () => {
+ expect(
+ isEqualTimeRanges(
+ {
+ ...mockFixedRange,
+ label: 'A label',
+ default: true,
+ },
+ {
+ ...mockFixedRange,
+ label: 'Another label',
+ default: false,
+ anotherKey: 'anotherValue',
+ },
+ ),
+ ).toBe(true);
+
+ expect(
+ isEqualTimeRanges(
+ {
+ ...mockAnchoredRange,
+ label: 'A label',
+ default: true,
+ },
+ {
+ ...mockAnchoredRange,
+ anotherKey: 'anotherValue',
+ },
+ ),
+ ).toBe(true);
+ });
+ });
+
+ describe('findTimeRange', () => {
+ const timeRanges = [
+ {
+ label: 'Before 2020',
+ anchor: '2020-01-01T00:00:00.000Z',
+ },
+ {
+ label: 'Last 30 minutes',
+ duration: { seconds: 60 * 30 },
+ },
+ {
+ label: 'In 2019',
+ start: '2019-01-01T00:00:00.000Z',
+ end: '2019-12-31T12:59:59.999Z',
+ },
+ {
+ label: 'Next 2 minutes',
+ direction: 'after',
+ duration: {
+ seconds: 60 * 2,
+ },
+ },
+ ];
+
+ it('finds a time range', () => {
+ const tr0 = {
+ anchor: '2020-01-01T00:00:00.000Z',
+ };
+ expect(findTimeRange(tr0, timeRanges)).toBe(timeRanges[0]);
+
+ const tr1 = {
+ duration: { seconds: 60 * 30 },
+ };
+ expect(findTimeRange(tr1, timeRanges)).toBe(timeRanges[1]);
+
+ const tr1Direction = {
+ direction: 'before',
+ duration: {
+ seconds: 60 * 30,
+ },
+ };
+ expect(findTimeRange(tr1Direction, timeRanges)).toBe(timeRanges[1]);
+
+ const tr2 = {
+ someOtherLabel: 'Added arbitrarily',
+ start: '2019-01-01T00:00:00.000Z',
+ end: '2019-12-31T12:59:59.999Z',
+ };
+ expect(findTimeRange(tr2, timeRanges)).toBe(timeRanges[2]);
+
+ const tr3 = {
+ direction: 'after',
+ duration: {
+ seconds: 60 * 2,
+ },
+ };
+ expect(findTimeRange(tr3, timeRanges)).toBe(timeRanges[3]);
+ });
+
+ it('doesnot finds a missing time range', () => {
+ const nonExistant = {
+ direction: 'before',
+ duration: {
+ seconds: 200,
+ },
+ };
+ expect(findTimeRange(nonExistant, timeRanges)).toBeUndefined();
+ });
+ });
+
+ describe('conversion to/from params', () => {
+ const mockFixedParams = {
+ start: '2020-01-01T00:00:00.000Z',
+ end: '2020-01-31T23:59:00.000Z',
+ };
+
+ const mockAnchoredParams = {
+ anchor: '2020-01-01T00:00:00.000Z',
+ direction: 'after',
+ duration_seconds: '120',
+ };
+
+ const mockRollingParams = {
+ direction: 'after',
+ duration_seconds: '120',
+ };
+
+ describe('timeRangeToParams', () => {
+ it('converts fixed ranges to params', () => {
+ expect(timeRangeToParams(mockFixedRange)).toEqual(mockFixedParams);
+ });
+
+ it('converts anchored ranges to params', () => {
+ expect(timeRangeToParams(mockAnchoredRange)).toEqual(mockAnchoredParams);
+ });
+
+ it('converts rolling ranges to params', () => {
+ expect(timeRangeToParams(mockRollingRange)).toEqual(mockRollingParams);
+ });
+ });
+
+ describe('timeRangeFromParams', () => {
+ it('converts fixed ranges from params', () => {
+ const params = { ...mockFixedParams, other_param: 'other_value' };
+ const expectedRange = _.omit(mockFixedRange, 'label');
+
+ expect(timeRangeFromParams(params)).toEqual(expectedRange);
+ });
+
+ it('converts anchored ranges to params', () => {
+ const expectedRange = _.omit(mockRollingRange, 'label');
+
+ expect(timeRangeFromParams(mockRollingParams)).toEqual(expectedRange);
+ });
+
+ it('converts rolling ranges from params', () => {
+ const params = { ...mockRollingParams, other_param: 'other_value' };
+ const expectedRange = _.omit(mockRollingRange, 'label');
+
+ expect(timeRangeFromParams(params)).toEqual(expectedRange);
+ });
+
+ it('converts rolling ranges from params with a default direction', () => {
+ const params = {
+ ...mockRollingParams,
+ direction: 'before',
+ other_param: 'other_value',
+ };
+ const expectedRange = _.omit(mockRollingRange, 'label', 'direction');
+
+ expect(timeRangeFromParams(params)).toEqual(expectedRange);
+ });
+
+ it('converts to null when for no relevant params', () => {
+ const range = {
+ useless_param_1: 'value1',
+ useless_param_2: 'value2',
+ };
+
+ expect(timeRangeFromParams(range)).toBe(null);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/url_utility_spec.js b/spec/frontend/lib/utils/url_utility_spec.js
index 048736d75f6..d0abf2c03a9 100644
--- a/spec/frontend/lib/utils/url_utility_spec.js
+++ b/spec/frontend/lib/utils/url_utility_spec.js
@@ -1,5 +1,20 @@
import * as urlUtils from '~/lib/utils/url_utility';
+const shas = {
+ valid: [
+ 'ad9be38573f9ee4c4daec22673478c2dd1d81cd8',
+ '76e07a692f65a2f4fd72f107a3e83908bea9b7eb',
+ '9dd8f215b1e8605b1d59eaf9df1178081cda0aaf',
+ 'f2e0be58c4091b033203bae1cc0302febd54117d',
+ ],
+ invalid: [
+ 'zd9be38573f9ee4c4daec22673478c2dd1d81cd8',
+ ':6e07a692f65a2f4fd72f107a3e83908bea9b7eb',
+ '-dd8f215b1e8605b1d59eaf9df1178081cda0aaf',
+ ' 2e0be58c4091b033203bae1cc0302febd54117d',
+ ],
+};
+
const setWindowLocation = value => {
Object.defineProperty(window, 'location', {
writable: true,
@@ -13,6 +28,12 @@ describe('URL utility', () => {
gon.relative_url_root = '';
});
+ it('escapes special characters', () => {
+ expect(urlUtils.webIDEUrl('/gitlab-org/gitlab-#-foss/merge_requests/1')).toBe(
+ '/-/ide/project/gitlab-org/gitlab-%23-foss/merge_requests/1',
+ );
+ });
+
describe('without relative_url_root', () => {
it('returns IDE path with route', () => {
expect(urlUtils.webIDEUrl('/gitlab-org/gitlab-foss/merge_requests/1')).toBe(
@@ -154,6 +175,44 @@ describe('URL utility', () => {
});
});
+ describe('urlContainsSha', () => {
+ it('returns true when there is a valid 40-character SHA1 hash in the URL', () => {
+ shas.valid.forEach(sha => {
+ expect(
+ urlUtils.urlContainsSha({ url: `http://urlstuff/${sha}/moreurlstuff` }),
+ ).toBeTruthy();
+ });
+ });
+
+ it('returns false when there is not a valid 40-character SHA1 hash in the URL', () => {
+ shas.invalid.forEach(str => {
+ expect(urlUtils.urlContainsSha({ url: `http://urlstuff/${str}/moreurlstuff` })).toBeFalsy();
+ });
+ });
+ });
+
+ describe('getShaFromUrl', () => {
+ let validUrls = [];
+ let invalidUrls = [];
+
+ beforeAll(() => {
+ validUrls = shas.valid.map(sha => `http://urlstuff/${sha}/moreurlstuff`);
+ invalidUrls = shas.invalid.map(str => `http://urlstuff/${str}/moreurlstuff`);
+ });
+
+ it('returns the valid 40-character SHA1 hash from the URL', () => {
+ validUrls.forEach((url, idx) => {
+ expect(urlUtils.getShaFromUrl({ url })).toBe(shas.valid[idx]);
+ });
+ });
+
+ it('returns null from a URL with no valid 40-character SHA1 hash', () => {
+ invalidUrls.forEach(url => {
+ expect(urlUtils.getShaFromUrl({ url })).toBeNull();
+ });
+ });
+ });
+
describe('setUrlFragment', () => {
it('should set fragment when url has no fragment', () => {
const url = urlUtils.setUrlFragment('/home/feature', 'usage');
@@ -174,6 +233,44 @@ describe('URL utility', () => {
});
});
+ describe('updateHistory', () => {
+ const state = { key: 'prop' };
+ const title = 'TITLE';
+ const url = 'URL';
+ const win = {
+ history: {
+ pushState: jest.fn(),
+ replaceState: jest.fn(),
+ },
+ };
+
+ beforeEach(() => {
+ win.history.pushState.mockReset();
+ win.history.replaceState.mockReset();
+ });
+
+ it('should call replaceState if the replace option is true', () => {
+ urlUtils.updateHistory({ state, title, url, replace: true, win });
+
+ expect(win.history.replaceState).toHaveBeenCalledWith(state, title, url);
+ expect(win.history.pushState).not.toHaveBeenCalled();
+ });
+
+ it('should call pushState if the replace option is missing', () => {
+ urlUtils.updateHistory({ state, title, url, win });
+
+ expect(win.history.replaceState).not.toHaveBeenCalled();
+ expect(win.history.pushState).toHaveBeenCalledWith(state, title, url);
+ });
+
+ it('should call pushState if the replace option is false', () => {
+ urlUtils.updateHistory({ state, title, url, replace: false, win });
+
+ expect(win.history.replaceState).not.toHaveBeenCalled();
+ expect(win.history.pushState).toHaveBeenCalledWith(state, title, url);
+ });
+ });
+
describe('getBaseURL', () => {
beforeEach(() => {
setWindowLocation({
@@ -331,6 +428,22 @@ describe('URL utility', () => {
});
});
+ describe('urlIsDifferent', () => {
+ beforeEach(() => {
+ setWindowLocation('current');
+ });
+
+ it('should compare against the window location if no compare value is provided', () => {
+ expect(urlUtils.urlIsDifferent('different')).toBeTruthy();
+ expect(urlUtils.urlIsDifferent('current')).toBeFalsy();
+ });
+
+ it('should use the provided compare value', () => {
+ expect(urlUtils.urlIsDifferent('different', 'current')).toBeTruthy();
+ expect(urlUtils.urlIsDifferent('current', 'current')).toBeFalsy();
+ });
+ });
+
describe('setUrlParams', () => {
it('adds new params as query string', () => {
const url = 'https://gitlab.com/test';
diff --git a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
new file mode 100644
index 00000000000..c705270343b
--- /dev/null
+++ b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
@@ -0,0 +1,102 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Dashboard template matches the default snapshot 1`] = `
+<div
+ class="prometheus-graphs"
+ data-qa-selector="prometheus_graphs"
+>
+ <div
+ class="prometheus-graphs-header gl-p-3 pb-0 border-bottom bg-gray-light"
+ >
+ <div
+ class="row"
+ >
+ <gl-form-group-stub
+ class="col-sm-12 col-md-6 col-lg-2"
+ label="Dashboard"
+ label-for="monitor-dashboards-dropdown"
+ label-size="sm"
+ >
+ <dashboards-dropdown-stub
+ class="mb-0 d-flex"
+ defaultbranch="master"
+ id="monitor-dashboards-dropdown"
+ selecteddashboard="[object Object]"
+ toggle-class="dropdown-menu-toggle"
+ />
+ </gl-form-group-stub>
+
+ <gl-form-group-stub
+ class="col-sm-6 col-md-6 col-lg-2"
+ label="Environment"
+ label-for="monitor-environments-dropdown"
+ label-size="sm"
+ >
+ <gl-dropdown-stub
+ class="mb-0 d-flex"
+ data-qa-selector="environments_dropdown"
+ id="monitor-environments-dropdown"
+ menu-class="monitor-environment-dropdown-menu"
+ text="production"
+ toggle-class="dropdown-menu-toggle"
+ >
+ <div
+ class="d-flex flex-column overflow-hidden"
+ >
+ <gl-dropdown-header-stub
+ class="monitor-environment-dropdown-header text-center"
+ >
+ Environment
+ </gl-dropdown-header-stub>
+
+ <gl-dropdown-divider-stub />
+
+ <gl-search-box-by-type-stub
+ class="m-2"
+ value=""
+ />
+
+ <div
+ class="flex-fill overflow-auto"
+ />
+
+ <div
+ class="text-secondary no-matches-message"
+ >
+
+ No matching results
+
+ </div>
+ </div>
+ </gl-dropdown-stub>
+ </gl-form-group-stub>
+
+ <gl-form-group-stub
+ class="col-sm-6 col-md-6 col-lg-4"
+ label="Show last"
+ label-for="monitor-time-window-dropdown"
+ label-size="sm"
+ >
+ <date-time-picker-stub
+ options="[object Object],[object Object],[object Object],[object Object],[object Object],[object Object],[object Object]"
+ value="[object Object]"
+ />
+ </gl-form-group-stub>
+
+ <!---->
+ </div>
+ </div>
+
+ <empty-state-stub
+ clusterspath="/path/to/clusters"
+ documentationpath="/path/to/docs"
+ emptygettingstartedsvgpath="/path/to/getting-started.svg"
+ emptyloadingsvgpath="/path/to/loading.svg"
+ emptynodatasmallsvgpath="/path/to/no-data-small.svg"
+ emptynodatasvgpath="/path/to/no-data.svg"
+ emptyunabletoconnectsvgpath="/path/to/unable-to-connect.svg"
+ selectedstate="gettingStarted"
+ settingspath="/path/to/settings"
+ />
+</div>
+`;
diff --git a/spec/frontend/monitoring/components/charts/single_stat_spec.js b/spec/frontend/monitoring/components/charts/single_stat_spec.js
index 2410dae112b..1aa7ba867b4 100644
--- a/spec/frontend/monitoring/components/charts/single_stat_spec.js
+++ b/spec/frontend/monitoring/components/charts/single_stat_spec.js
@@ -18,9 +18,53 @@ describe('Single Stat Chart component', () => {
});
describe('computed', () => {
- describe('engineeringNotation', () => {
+ describe('statValue', () => {
it('should interpolate the value and unit props', () => {
- expect(singleStatChart.vm.engineeringNotation).toBe('91MB');
+ expect(singleStatChart.vm.statValue).toBe('91MB');
+ });
+
+ it('should change the value representation to a percentile one', () => {
+ singleStatChart.setProps({
+ graphData: {
+ ...graphDataPrometheusQuery,
+ max_value: 120,
+ },
+ });
+
+ expect(singleStatChart.vm.statValue).toContain('75.8');
+ });
+
+ it('should display NaN for non numeric max_value values', () => {
+ singleStatChart.setProps({
+ graphData: {
+ ...graphDataPrometheusQuery,
+ max_value: 'not a number',
+ },
+ });
+
+ expect(singleStatChart.vm.statValue).toContain('NaN');
+ });
+
+ it('should display NaN for missing query values', () => {
+ singleStatChart.setProps({
+ graphData: {
+ ...graphDataPrometheusQuery,
+ metrics: [
+ {
+ ...graphDataPrometheusQuery.metrics[0],
+ result: [
+ {
+ ...graphDataPrometheusQuery.metrics[0].result[0],
+ value: [''],
+ },
+ ],
+ },
+ ],
+ max_value: 120,
+ },
+ });
+
+ expect(singleStatChart.vm.statValue).toContain('NaN');
});
});
});
diff --git a/spec/frontend/monitoring/components/charts/stacked_column_spec.js b/spec/frontend/monitoring/components/charts/stacked_column_spec.js
new file mode 100644
index 00000000000..abb89ac15ef
--- /dev/null
+++ b/spec/frontend/monitoring/components/charts/stacked_column_spec.js
@@ -0,0 +1,45 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlStackedColumnChart } from '@gitlab/ui/dist/charts';
+import StackedColumnChart from '~/monitoring/components/charts/stacked_column.vue';
+import { stackedColumnMockedData } from '../../mock_data';
+
+jest.mock('~/lib/utils/icon_utils', () => ({
+ getSvgIconPathContent: jest.fn().mockResolvedValue('mockSvgPathContent'),
+}));
+
+describe('Stacked column chart component', () => {
+ let wrapper;
+ const glStackedColumnChart = () => wrapper.find(GlStackedColumnChart);
+
+ beforeEach(() => {
+ wrapper = shallowMount(StackedColumnChart, {
+ propsData: {
+ graphData: stackedColumnMockedData,
+ },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('with graphData present', () => {
+ it('is a Vue instance', () => {
+ expect(glStackedColumnChart().exists()).toBe(true);
+ });
+
+ it('should contain the same number of elements in the seriesNames computed prop as the graphData metrics prop', () =>
+ wrapper.vm
+ .$nextTick()
+ .then(expect(wrapper.vm.seriesNames).toHaveLength(stackedColumnMockedData.metrics.length)));
+
+ it('should contain the same number of elements in the groupBy computed prop as the graphData result prop', () =>
+ wrapper.vm
+ .$nextTick()
+ .then(
+ expect(wrapper.vm.groupBy).toHaveLength(
+ stackedColumnMockedData.metrics[0].result[0].values.length,
+ ),
+ ));
+ });
+});
diff --git a/spec/frontend/monitoring/components/charts/time_series_spec.js b/spec/frontend/monitoring/components/charts/time_series_spec.js
index d9960b3d18e..a911b925b66 100644
--- a/spec/frontend/monitoring/components/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/components/charts/time_series_spec.js
@@ -3,12 +3,13 @@ import { setTestTimeout } from 'helpers/timeout';
import { GlLink } from '@gitlab/ui';
import { GlAreaChart, GlLineChart, GlChartSeriesLabel } from '@gitlab/ui/dist/charts';
import { shallowWrapperContainsSlotText } from 'helpers/vue_test_utils_helper';
+import { chartColorValues } from '~/monitoring/constants';
import { createStore } from '~/monitoring/stores';
import TimeSeries from '~/monitoring/components/charts/time_series.vue';
import * as types from '~/monitoring/stores/mutation_types';
import {
deploymentData,
- metricsGroupsAPIResponse,
+ metricsDashboardPayload,
mockedQueryResultPayload,
mockProjectDir,
mockHost,
@@ -18,6 +19,15 @@ import * as iconUtils from '~/lib/utils/icon_utils';
const mockWidgets = 'mockWidgets';
const mockSvgPathContent = 'mockSvgPathContent';
+
+jest.mock('lodash/throttle', () =>
+ // this throttle mock executes immediately
+ jest.fn(func => {
+ // eslint-disable-next-line no-param-reassign
+ func.cancel = jest.fn();
+ return func;
+ }),
+);
jest.mock('~/lib/utils/icon_utils', () => ({
getSvgIconPathContent: jest.fn().mockImplementation(() => Promise.resolve(mockSvgPathContent)),
}));
@@ -34,7 +44,7 @@ describe('Time series component', () => {
store.commit(
`monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
- metricsGroupsAPIResponse,
+ metricsDashboardPayload,
);
store.commit(`monitoringDashboard/${types.RECEIVE_DEPLOYMENTS_DATA_SUCCESS}`, deploymentData);
@@ -65,6 +75,8 @@ describe('Time series component', () => {
describe('general functions', () => {
let timeSeriesChart;
+ const findChart = () => timeSeriesChart.find({ ref: 'chart' });
+
beforeEach(done => {
timeSeriesChart = makeTimeSeriesChart(mockGraphData, 'area-chart');
timeSeriesChart.vm.$nextTick(done);
@@ -94,6 +106,54 @@ describe('Time series component', () => {
});
});
+ describe('events', () => {
+ describe('datazoom', () => {
+ let eChartMock;
+ let startValue;
+ let endValue;
+
+ beforeEach(done => {
+ eChartMock = {
+ handlers: {},
+ getOption: () => ({
+ dataZoom: [
+ {
+ startValue,
+ endValue,
+ },
+ ],
+ }),
+ off: jest.fn(eChartEvent => {
+ delete eChartMock.handlers[eChartEvent];
+ }),
+ on: jest.fn((eChartEvent, fn) => {
+ eChartMock.handlers[eChartEvent] = fn;
+ }),
+ };
+
+ timeSeriesChart = makeTimeSeriesChart(mockGraphData);
+ timeSeriesChart.vm.$nextTick(() => {
+ findChart().vm.$emit('created', eChartMock);
+ done();
+ });
+ });
+
+ it('handles datazoom event from chart', () => {
+ startValue = 1577836800000; // 2020-01-01T00:00:00.000Z
+ endValue = 1577840400000; // 2020-01-01T01:00:00.000Z
+ eChartMock.handlers.datazoom();
+
+ expect(timeSeriesChart.emitted('datazoom')).toHaveLength(1);
+ expect(timeSeriesChart.emitted('datazoom')[0]).toEqual([
+ {
+ start: new Date(startValue).toISOString(),
+ end: new Date(endValue).toISOString(),
+ },
+ ]);
+ });
+ });
+ });
+
describe('methods', () => {
describe('formatTooltipText', () => {
let mockDate;
@@ -226,6 +286,8 @@ describe('Time series component', () => {
});
describe('computed', () => {
+ const getChartOptions = () => findChart().props('option');
+
describe('chartData', () => {
let chartData;
const seriesData = () => chartData[0];
@@ -254,6 +316,10 @@ describe('Time series component', () => {
it('formats line width correctly', () => {
expect(chartData[0].lineStyle.width).toBe(2);
});
+
+ it('formats line color correctly', () => {
+ expect(chartData[0].lineStyle.color).toBe(chartColorValues[0]);
+ });
});
describe('chartOptions', () => {
@@ -270,7 +336,7 @@ describe('Time series component', () => {
});
return timeSeriesChart.vm.$nextTick().then(() => {
- expect(timeSeriesChart.vm.chartOptions).toEqual(expect.objectContaining(mockOption));
+ expect(getChartOptions()).toEqual(expect.objectContaining(mockOption));
});
});
@@ -286,42 +352,106 @@ describe('Time series component', () => {
});
return timeSeriesChart.vm.$nextTick().then(() => {
- const optionSeries = timeSeriesChart.vm.chartOptions.series;
+ const optionSeries = getChartOptions().series;
expect(optionSeries.length).toEqual(2);
expect(optionSeries[0].name).toEqual(mockSeriesName);
});
});
+
+ it('additional y axis data', () => {
+ const mockCustomYAxisOption = {
+ name: 'Custom y axis label',
+ axisLabel: {
+ formatter: jest.fn(),
+ },
+ };
+
+ timeSeriesChart.setProps({
+ option: {
+ yAxis: mockCustomYAxisOption,
+ },
+ });
+
+ return timeSeriesChart.vm.$nextTick().then(() => {
+ const { yAxis } = getChartOptions();
+
+ expect(yAxis[0]).toMatchObject(mockCustomYAxisOption);
+ });
+ });
+
+ it('additional x axis data', () => {
+ const mockCustomXAxisOption = {
+ name: 'Custom x axis label',
+ };
+
+ timeSeriesChart.setProps({
+ option: {
+ xAxis: mockCustomXAxisOption,
+ },
+ });
+
+ return timeSeriesChart.vm.$nextTick().then(() => {
+ const { xAxis } = getChartOptions();
+
+ expect(xAxis).toMatchObject(mockCustomXAxisOption);
+ });
+ });
});
describe('yAxis formatter', () => {
- let format;
+ let dataFormatter;
+ let deploymentFormatter;
beforeEach(() => {
- format = timeSeriesChart.vm.chartOptions.yAxis.axisLabel.formatter;
+ dataFormatter = getChartOptions().yAxis[0].axisLabel.formatter;
+ deploymentFormatter = getChartOptions().yAxis[1].axisLabel.formatter;
});
it('rounds to 3 decimal places', () => {
- expect(format(0.88888)).toBe('0.889');
+ expect(dataFormatter(0.88888)).toBe('0.889');
+ });
+
+ it('deployment formatter is set as is required to display a tooltip', () => {
+ expect(deploymentFormatter).toEqual(expect.any(Function));
});
});
});
- describe('scatterSeries', () => {
+ describe('deploymentSeries', () => {
it('utilizes deployment data', () => {
- expect(timeSeriesChart.vm.scatterSeries.data).toEqual([
- ['2019-07-16T10:14:25.589Z', 0],
- ['2019-07-16T11:14:25.589Z', 0],
- ['2019-07-16T12:14:25.589Z', 0],
+ expect(timeSeriesChart.vm.deploymentSeries.yAxisIndex).toBe(1); // same as deployment y axis
+ expect(timeSeriesChart.vm.deploymentSeries.data).toEqual([
+ ['2019-07-16T10:14:25.589Z', expect.any(Number)],
+ ['2019-07-16T11:14:25.589Z', expect.any(Number)],
+ ['2019-07-16T12:14:25.589Z', expect.any(Number)],
]);
- expect(timeSeriesChart.vm.scatterSeries.symbolSize).toBe(14);
+ expect(timeSeriesChart.vm.deploymentSeries.symbolSize).toBe(14);
});
});
describe('yAxisLabel', () => {
+ it('y axis is configured correctly', () => {
+ const { yAxis } = getChartOptions();
+
+ expect(yAxis).toHaveLength(2);
+
+ const [dataAxis, deploymentAxis] = yAxis;
+
+ expect(dataAxis.boundaryGap).toHaveLength(2);
+ expect(dataAxis.scale).toBe(true);
+
+ expect(deploymentAxis.show).toBe(false);
+ expect(deploymentAxis.min).toEqual(expect.any(Number));
+ expect(deploymentAxis.max).toEqual(expect.any(Number));
+ expect(deploymentAxis.min).toBeLessThan(deploymentAxis.max);
+ });
+
it('constructs a label for the chart y-axis', () => {
- expect(timeSeriesChart.vm.yAxisLabel).toBe('Memory Used per Pod');
+ const { yAxis } = getChartOptions();
+
+ expect(yAxis[0].name).toBe('Memory Used per Pod');
});
});
});
@@ -346,7 +476,7 @@ describe('Time series component', () => {
glChartComponents.forEach(dynamicComponent => {
describe(`GitLab UI: ${dynamicComponent.chartType}`, () => {
let timeSeriesAreaChart;
- const findChart = () => timeSeriesAreaChart.find(dynamicComponent.component);
+ const findChartComponent = () => timeSeriesAreaChart.find(dynamicComponent.component);
beforeEach(done => {
timeSeriesAreaChart = makeTimeSeriesChart(mockGraphData, dynamicComponent.chartType);
@@ -358,12 +488,12 @@ describe('Time series component', () => {
});
it('is a Vue instance', () => {
- expect(findChart().exists()).toBe(true);
- expect(findChart().isVueInstance()).toBe(true);
+ expect(findChartComponent().exists()).toBe(true);
+ expect(findChartComponent().isVueInstance()).toBe(true);
});
it('receives data properties needed for proper chart render', () => {
- const props = findChart().props();
+ const props = findChartComponent().props();
expect(props.data).toBe(timeSeriesAreaChart.vm.chartData);
expect(props.option).toBe(timeSeriesAreaChart.vm.chartOptions);
@@ -376,16 +506,16 @@ describe('Time series component', () => {
timeSeriesAreaChart.vm.tooltip.title = mockTitle;
timeSeriesAreaChart.vm.$nextTick(() => {
- expect(shallowWrapperContainsSlotText(findChart(), 'tooltipTitle', mockTitle)).toBe(
- true,
- );
+ expect(
+ shallowWrapperContainsSlotText(findChartComponent(), 'tooltipTitle', mockTitle),
+ ).toBe(true);
done();
});
});
describe('when tooltip is showing deployment data', () => {
const mockSha = 'mockSha';
- const commitUrl = `${mockProjectDir}/commit/${mockSha}`;
+ const commitUrl = `${mockProjectDir}/-/commit/${mockSha}`;
beforeEach(done => {
timeSeriesAreaChart.vm.tooltip.isDeployment = true;
@@ -393,9 +523,9 @@ describe('Time series component', () => {
});
it('uses deployment title', () => {
- expect(shallowWrapperContainsSlotText(findChart(), 'tooltipTitle', 'Deployed')).toBe(
- true,
- );
+ expect(
+ shallowWrapperContainsSlotText(findChartComponent(), 'tooltipTitle', 'Deployed'),
+ ).toBe(true);
});
it('renders clickable commit sha in tooltip content', done => {
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
index 85408d57dde..15c82242262 100644
--- a/spec/frontend/monitoring/components/dashboard_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -1,5 +1,5 @@
import { shallowMount, createLocalVue, mount } from '@vue/test-utils';
-import { GlDropdownItem, GlButton, GlToast } from '@gitlab/ui';
+import { GlDropdownItem, GlButton } from '@gitlab/ui';
import VueDraggable from 'vuedraggable';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
@@ -7,16 +7,16 @@ import statusCodes from '~/lib/utils/http_status';
import { metricStates } from '~/monitoring/constants';
import Dashboard from '~/monitoring/components/dashboard.vue';
+import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
import DashboardsDropdown from '~/monitoring/components/dashboards_dropdown.vue';
-import DateTimePicker from '~/monitoring/components/date_time_picker/date_time_picker.vue';
import GroupEmptyState from '~/monitoring/components/group_empty_state.vue';
+import PanelType from 'ee_else_ce/monitoring/components/panel_type.vue';
import { createStore } from '~/monitoring/stores';
import * as types from '~/monitoring/stores/mutation_types';
import { setupComponentStore, propsData } from '../init_utils';
import {
- metricsGroupsAPIResponse,
+ metricsDashboardPayload,
mockedQueryResultPayload,
- mockApiEndpoint,
environmentData,
dashboardGitResponse,
} from '../mock_data';
@@ -29,10 +29,19 @@ describe('Dashboard', () => {
let wrapper;
let mock;
+ const findEnvironmentsDropdown = () => wrapper.find({ ref: 'monitorEnvironmentsDropdown' });
+ const findAllEnvironmentsDropdownItems = () => findEnvironmentsDropdown().findAll(GlDropdownItem);
+ const setSearchTerm = searchTerm => {
+ wrapper.vm.$store.commit(`monitoringDashboard/${types.SET_ENVIRONMENTS_FILTER}`, searchTerm);
+ };
+
const createShallowWrapper = (props = {}, options = {}) => {
wrapper = shallowMount(Dashboard, {
localVue,
propsData: { ...propsData, ...props },
+ methods: {
+ fetchData: jest.fn(),
+ },
store,
...options,
});
@@ -42,6 +51,9 @@ describe('Dashboard', () => {
wrapper = mount(Dashboard, {
localVue,
propsData: { ...propsData, ...props },
+ methods: {
+ fetchData: jest.fn(),
+ },
store,
...options,
});
@@ -55,63 +67,66 @@ describe('Dashboard', () => {
afterEach(() => {
if (wrapper) {
wrapper.destroy();
+ wrapper = null;
}
mock.restore();
});
describe('no metrics are available yet', () => {
beforeEach(() => {
- mock.onGet(mockApiEndpoint).reply(statusCodes.OK, metricsGroupsAPIResponse);
-
+ jest.spyOn(store, 'dispatch');
createShallowWrapper();
});
- afterEach(() => {
- wrapper.destroy();
+ it('shows the environment selector', () => {
+ expect(findEnvironmentsDropdown().exists()).toBe(true);
});
- it('shows the environment selector', () => {
- expect(wrapper.vm.$el.querySelector('.js-environments-dropdown')).toBeTruthy();
+ it('sets endpoints: logs path', () => {
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/setEndpoints',
+ expect.objectContaining({ logsPath: propsData.logsPath }),
+ );
});
});
describe('no data found', () => {
- beforeEach(done => {
- mock.onGet(mockApiEndpoint).reply(statusCodes.OK, metricsGroupsAPIResponse);
-
+ beforeEach(() => {
createShallowWrapper();
- wrapper.vm.$nextTick(done);
- });
-
- afterEach(() => {
- wrapper.destroy();
+ return wrapper.vm.$nextTick();
});
it('shows the environment selector dropdown', () => {
- expect(wrapper.vm.$el.querySelector('.js-environments-dropdown')).toBeTruthy();
+ expect(findEnvironmentsDropdown().exists()).toBe(true);
});
});
describe('request information to the server', () => {
- beforeEach(() => {
- mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
- });
+ it('calls to set time range and fetch data', () => {
+ jest.spyOn(store, 'dispatch');
- it('shows up a loading state', done => {
- createShallowWrapper({ hasMetrics: true });
+ createShallowWrapper({ hasMetrics: true }, { methods: {} });
- wrapper.vm
- .$nextTick()
- .then(() => {
- expect(wrapper.vm.emptyState).toEqual('loading');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/setTimeRange',
+ expect.any(Object),
+ );
- done();
- })
- .catch(done.fail);
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/fetchData', undefined);
+ });
+ });
+
+ it('shows up a loading state', () => {
+ createShallowWrapper({ hasMetrics: true }, { methods: {} });
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm.emptyState).toEqual('loading');
+ });
});
- it('hides the group panels when showPanels is false', done => {
+ it('hides the group panels when showPanels is false', () => {
createMountedWrapper(
{ hasMetrics: true, showPanels: false },
{ stubs: ['graph-group', 'panel-type'] },
@@ -119,21 +134,13 @@ describe('Dashboard', () => {
setupComponentStore(wrapper);
- wrapper.vm
- .$nextTick()
- .then(() => {
- expect(wrapper.vm.showEmptyState).toEqual(false);
- expect(wrapper.vm.$el.querySelector('.prometheus-panel')).toEqual(null);
- // TODO: The last expectation doesn't belong here, it belongs in a `group_group_spec.js` file
- // Issue: https://gitlab.com/gitlab-org/gitlab/issues/118780
- // expect(wrapper.vm.$el.querySelector('.prometheus-graph-group')).toBeTruthy();
-
- done();
- })
- .catch(done.fail);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm.showEmptyState).toEqual(false);
+ expect(wrapper.findAll('.prometheus-panel')).toHaveLength(0);
+ });
});
- it('fetches the metrics data with proper time window', done => {
+ it('fetches the metrics data with proper time window', () => {
jest.spyOn(store, 'dispatch');
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
@@ -143,119 +150,75 @@ describe('Dashboard', () => {
environmentData,
);
- wrapper.vm
- .$nextTick()
- .then(() => {
- expect(store.dispatch).toHaveBeenCalled();
-
- done();
- })
- .catch(done.fail);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(store.dispatch).toHaveBeenCalled();
+ });
});
});
describe('when all requests have been commited by the store', () => {
beforeEach(() => {
- mock.onGet(mockApiEndpoint).reply(statusCodes.OK, metricsGroupsAPIResponse);
-
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
setupComponentStore(wrapper);
- });
- afterEach(() => {
- wrapper.destroy();
+ return wrapper.vm.$nextTick();
});
- it('renders the environments dropdown with a number of environments', done => {
- wrapper.vm
- .$nextTick()
- .then(() => {
- const environmentDropdownItems = wrapper
- .find('.js-environments-dropdown')
- .findAll(GlDropdownItem);
-
- expect(wrapper.vm.environments.length).toEqual(environmentData.length);
- expect(environmentDropdownItems.length).toEqual(wrapper.vm.environments.length);
-
- environmentDropdownItems.wrappers.forEach((itemWrapper, index) => {
- const anchorEl = itemWrapper.find('a');
- if (anchorEl.exists() && environmentData[index].metrics_path) {
- const href = anchorEl.attributes('href');
- expect(href).toBe(environmentData[index].metrics_path);
- }
- });
+ it('renders the environments dropdown with a number of environments', () => {
+ expect(findAllEnvironmentsDropdownItems().length).toEqual(environmentData.length);
- done();
- })
- .catch(done.fail);
+ findAllEnvironmentsDropdownItems().wrappers.forEach((itemWrapper, index) => {
+ const anchorEl = itemWrapper.find('a');
+ if (anchorEl.exists() && environmentData[index].metrics_path) {
+ const href = anchorEl.attributes('href');
+ expect(href).toBe(environmentData[index].metrics_path);
+ }
+ });
});
- it('renders the environments dropdown with a single active element', done => {
- wrapper.vm
- .$nextTick()
- .then(() => {
- const environmentDropdownItems = wrapper
- .find('.js-environments-dropdown')
- .findAll(GlDropdownItem);
- const activeItem = environmentDropdownItems.wrappers.filter(itemWrapper =>
- itemWrapper.find('.active').exists(),
- );
+ it('renders the environments dropdown with a single active element', () => {
+ const activeItem = findAllEnvironmentsDropdownItems().wrappers.filter(itemWrapper =>
+ itemWrapper.find('.active').exists(),
+ );
- expect(activeItem.length).toBe(1);
- done();
- })
- .catch(done.fail);
+ expect(activeItem.length).toBe(1);
});
});
- it('hides the environments dropdown list when there is no environments', done => {
+ it('hides the environments dropdown list when there is no environments', () => {
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
wrapper.vm.$store.commit(
`monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
- metricsGroupsAPIResponse,
+ metricsDashboardPayload,
);
wrapper.vm.$store.commit(
`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
mockedQueryResultPayload,
);
- wrapper.vm
- .$nextTick()
- .then(() => {
- const environmentDropdownItems = wrapper
- .find('.js-environments-dropdown')
- .findAll(GlDropdownItem);
-
- expect(environmentDropdownItems.length).toEqual(0);
- done();
- })
- .catch(done.fail);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findAllEnvironmentsDropdownItems()).toHaveLength(0);
+ });
});
- it('renders the datetimepicker dropdown', done => {
+ it('renders the datetimepicker dropdown', () => {
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
setupComponentStore(wrapper);
- wrapper.vm
- .$nextTick()
- .then(() => {
- expect(wrapper.find(DateTimePicker).exists()).toBe(true);
- done();
- })
- .catch(done.fail);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DateTimePicker).exists()).toBe(true);
+ });
});
describe('when one of the metrics is missing', () => {
- beforeEach(done => {
- mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
-
+ beforeEach(() => {
createShallowWrapper({ hasMetrics: true });
setupComponentStore(wrapper);
- wrapper.vm.$nextTick(done);
+ return wrapper.vm.$nextTick();
});
it('shows a group empty area', () => {
@@ -275,6 +238,82 @@ describe('Dashboard', () => {
});
});
+ describe('searchable environments dropdown', () => {
+ beforeEach(() => {
+ createMountedWrapper(
+ { hasMetrics: true },
+ {
+ attachToDocument: true,
+ stubs: ['graph-group', 'panel-type'],
+ },
+ );
+
+ setupComponentStore(wrapper);
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('renders a search input', () => {
+ expect(wrapper.find({ ref: 'monitorEnvironmentsDropdownSearch' }).exists()).toBe(true);
+ });
+
+ it('renders dropdown items', () => {
+ findAllEnvironmentsDropdownItems().wrappers.forEach((itemWrapper, index) => {
+ const anchorEl = itemWrapper.find('a');
+ if (anchorEl.exists()) {
+ expect(anchorEl.text()).toBe(environmentData[index].name);
+ }
+ });
+ });
+
+ it('filters rendered dropdown items', () => {
+ const searchTerm = 'production';
+ const resultEnvs = environmentData.filter(({ name }) => name.indexOf(searchTerm) !== -1);
+ setSearchTerm(searchTerm);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findAllEnvironmentsDropdownItems().length).toEqual(resultEnvs.length);
+ });
+ });
+
+ it('does not filter dropdown items if search term is empty string', () => {
+ const searchTerm = '';
+ setSearchTerm(searchTerm);
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findAllEnvironmentsDropdownItems().length).toEqual(environmentData.length);
+ });
+ });
+
+ it("shows error message if search term doesn't match", () => {
+ const searchTerm = 'does-not-exist';
+ setSearchTerm(searchTerm);
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find({ ref: 'monitorEnvironmentsDropdownMsg' }).isVisible()).toBe(true);
+ });
+ });
+
+ it('shows loading element when environments fetch is still loading', () => {
+ wrapper.vm.$store.commit(`monitoringDashboard/${types.REQUEST_ENVIRONMENTS_DATA}`);
+
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(wrapper.find({ ref: 'monitorEnvironmentsDropdownLoading' }).exists()).toBe(true);
+ })
+ .then(() => {
+ wrapper.vm.$store.commit(
+ `monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
+ environmentData,
+ );
+ })
+ .then(() => {
+ expect(wrapper.find({ ref: 'monitorEnvironmentsDropdownLoading' }).exists()).toBe(false);
+ });
+ });
+ });
+
describe('drag and drop function', () => {
const findDraggables = () => wrapper.findAll(VueDraggable);
const findEnabledDraggables = () => findDraggables().filter(f => !f.attributes('disabled'));
@@ -282,19 +321,11 @@ describe('Dashboard', () => {
const findRearrangeButton = () => wrapper.find('.js-rearrange-button');
beforeEach(() => {
- mock.onGet(mockApiEndpoint).reply(statusCodes.OK, metricsGroupsAPIResponse);
- });
-
- beforeEach(done => {
createShallowWrapper({ hasMetrics: true });
setupComponentStore(wrapper);
- wrapper.vm.$nextTick(done);
- });
-
- afterEach(() => {
- wrapper.destroy();
+ return wrapper.vm.$nextTick();
});
it('wraps vuedraggable', () => {
@@ -308,9 +339,9 @@ describe('Dashboard', () => {
});
describe('when rearrange is enabled', () => {
- beforeEach(done => {
+ beforeEach(() => {
wrapper.setProps({ rearrangePanelsAvailable: true });
- wrapper.vm.$nextTick(done);
+ return wrapper.vm.$nextTick();
});
it('displays rearrange button', () => {
@@ -323,9 +354,9 @@ describe('Dashboard', () => {
.at(0)
.find('.js-draggable-remove');
- beforeEach(done => {
+ beforeEach(() => {
findRearrangeButton().vm.$emit('click');
- wrapper.vm.$nextTick(done);
+ return wrapper.vm.$nextTick();
});
it('it enables draggables', () => {
@@ -333,9 +364,9 @@ describe('Dashboard', () => {
expect(findEnabledDraggables()).toEqual(findDraggables());
});
- it('metrics can be swapped', done => {
+ it('metrics can be swapped', () => {
const firstDraggable = findDraggables().at(0);
- const mockMetrics = [...metricsGroupsAPIResponse.panel_groups[1].panels];
+ const mockMetrics = [...metricsDashboardPayload.panel_groups[1].panels];
const firstTitle = mockMetrics[0].title;
const secondTitle = mockMetrics[1].title;
@@ -344,33 +375,30 @@ describe('Dashboard', () => {
[mockMetrics[0], mockMetrics[1]] = [mockMetrics[1], mockMetrics[0]];
firstDraggable.vm.$emit('input', mockMetrics);
- wrapper.vm.$nextTick(() => {
+ return wrapper.vm.$nextTick(() => {
const { panels } = wrapper.vm.dashboard.panel_groups[1];
expect(panels[1].title).toEqual(firstTitle);
expect(panels[0].title).toEqual(secondTitle);
- done();
});
});
- it('shows a remove button, which removes a panel', done => {
+ it('shows a remove button, which removes a panel', () => {
expect(findFirstDraggableRemoveButton().isEmpty()).toBe(false);
expect(findDraggablePanels().length).toEqual(expectedPanelCount);
findFirstDraggableRemoveButton().trigger('click');
- wrapper.vm.$nextTick(() => {
+ return wrapper.vm.$nextTick(() => {
expect(findDraggablePanels().length).toEqual(expectedPanelCount - 1);
- done();
});
});
- it('it disables draggables when clicked again', done => {
+ it('it disables draggables when clicked again', () => {
findRearrangeButton().vm.$emit('click');
- wrapper.vm.$nextTick(() => {
+ return wrapper.vm.$nextTick(() => {
expect(findRearrangeButton().attributes('pressed')).toBeFalsy();
expect(findEnabledDraggables().length).toBe(0);
- done();
});
});
});
@@ -378,17 +406,17 @@ describe('Dashboard', () => {
});
describe('cluster health', () => {
- beforeEach(done => {
+ beforeEach(() => {
mock.onGet(propsData.metricsEndpoint).reply(statusCodes.OK, JSON.stringify({}));
- createShallowWrapper({ hasMetrics: true });
+ createShallowWrapper({ hasMetrics: true, showHeader: false });
// all_dashboards is not defined in health dashboards
wrapper.vm.$store.commit(`monitoringDashboard/${types.SET_ALL_DASHBOARDS}`, undefined);
- wrapper.vm.$nextTick(done);
+ return wrapper.vm.$nextTick();
});
- afterEach(() => {
- wrapper.destroy();
+ it('hides dashboard header by default', () => {
+ expect(wrapper.find({ ref: 'prometheusGraphsHeader' }).exists()).toEqual(false);
});
it('renders correctly', () => {
@@ -400,71 +428,53 @@ describe('Dashboard', () => {
describe('dashboard edit link', () => {
const findEditLink = () => wrapper.find('.js-edit-link');
- beforeEach(done => {
- mock.onGet(mockApiEndpoint).reply(statusCodes.OK, metricsGroupsAPIResponse);
-
+ beforeEach(() => {
createShallowWrapper({ hasMetrics: true });
wrapper.vm.$store.commit(
`monitoringDashboard/${types.SET_ALL_DASHBOARDS}`,
dashboardGitResponse,
);
- wrapper.vm.$nextTick(done);
- });
-
- afterEach(() => {
- wrapper.destroy();
+ return wrapper.vm.$nextTick();
});
it('is not present for the default dashboard', () => {
expect(findEditLink().exists()).toBe(false);
});
- it('is present for a custom dashboard, and links to its edit_path', done => {
+ it('is present for a custom dashboard, and links to its edit_path', () => {
const dashboard = dashboardGitResponse[1]; // non-default dashboard
const currentDashboard = dashboard.path;
wrapper.setProps({ currentDashboard });
- wrapper.vm
- .$nextTick()
- .then(() => {
- expect(findEditLink().exists()).toBe(true);
- expect(findEditLink().attributes('href')).toBe(dashboard.project_blob_path);
- done();
- })
- .catch(done.fail);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findEditLink().exists()).toBe(true);
+ expect(findEditLink().attributes('href')).toBe(dashboard.project_blob_path);
+ });
});
});
describe('Dashboard dropdown', () => {
beforeEach(() => {
- mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
-
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
wrapper.vm.$store.commit(
`monitoringDashboard/${types.SET_ALL_DASHBOARDS}`,
dashboardGitResponse,
);
+
+ return wrapper.vm.$nextTick();
});
- it('shows the dashboard dropdown', done => {
- wrapper.vm
- .$nextTick()
- .then(() => {
- const dashboardDropdown = wrapper.find(DashboardsDropdown);
+ it('shows the dashboard dropdown', () => {
+ const dashboardDropdown = wrapper.find(DashboardsDropdown);
- expect(dashboardDropdown.exists()).toBe(true);
- done();
- })
- .catch(done.fail);
+ expect(dashboardDropdown.exists()).toBe(true);
});
});
describe('external dashboard link', () => {
beforeEach(() => {
- mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
-
createMountedWrapper(
{
hasMetrics: true,
@@ -474,80 +484,59 @@ describe('Dashboard', () => {
},
{ stubs: ['graph-group', 'panel-type'] },
);
+
+ return wrapper.vm.$nextTick();
});
- it('shows the link', done => {
- wrapper.vm
- .$nextTick()
- .then(() => {
- const externalDashboardButton = wrapper.find('.js-external-dashboard-link');
+ it('shows the link', () => {
+ const externalDashboardButton = wrapper.find('.js-external-dashboard-link');
- expect(externalDashboardButton.exists()).toBe(true);
- expect(externalDashboardButton.is(GlButton)).toBe(true);
- expect(externalDashboardButton.text()).toContain('View full dashboard');
- done();
- })
- .catch(done.fail);
+ expect(externalDashboardButton.exists()).toBe(true);
+ expect(externalDashboardButton.is(GlButton)).toBe(true);
+ expect(externalDashboardButton.text()).toContain('View full dashboard');
});
});
- // https://gitlab.com/gitlab-org/gitlab-ce/issues/66922
- // eslint-disable-next-line jest/no-disabled-tests
- describe.skip('link to chart', () => {
+ describe('Clipboard text in panels', () => {
const currentDashboard = 'TEST_DASHBOARD';
- localVue.use(GlToast);
- const link = () => wrapper.find('.js-chart-link');
- const clipboardText = () => link().element.dataset.clipboardText;
- beforeEach(done => {
- mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
+ const getClipboardTextAt = i =>
+ wrapper
+ .findAll(PanelType)
+ .at(i)
+ .props('clipboardText');
+ beforeEach(() => {
createShallowWrapper({ hasMetrics: true, currentDashboard });
- setTimeout(done);
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
+ setupComponentStore(wrapper);
- it('adds a copy button to the dropdown', () => {
- expect(link().text()).toContain('Generate link to chart');
+ return wrapper.vm.$nextTick();
});
it('contains a link to the dashboard', () => {
- expect(clipboardText()).toContain(`dashboard=${currentDashboard}`);
- expect(clipboardText()).toContain(`group=`);
- expect(clipboardText()).toContain(`title=`);
- expect(clipboardText()).toContain(`y_label=`);
+ expect(getClipboardTextAt(0)).toContain(`dashboard=${currentDashboard}`);
+ expect(getClipboardTextAt(0)).toContain(`group=`);
+ expect(getClipboardTextAt(0)).toContain(`title=`);
+ expect(getClipboardTextAt(0)).toContain(`y_label=`);
});
- it('undefined parameter is stripped', done => {
+ it('strips the undefined parameter', () => {
wrapper.setProps({ currentDashboard: undefined });
- wrapper.vm.$nextTick(() => {
- expect(clipboardText()).not.toContain(`dashboard=`);
- expect(clipboardText()).toContain(`y_label=`);
- done();
+ return wrapper.vm.$nextTick(() => {
+ expect(getClipboardTextAt(0)).not.toContain(`dashboard=`);
+ expect(getClipboardTextAt(0)).toContain(`y_label=`);
});
});
- it('null parameter is stripped', done => {
+ it('null parameter is stripped', () => {
wrapper.setProps({ currentDashboard: null });
- wrapper.vm.$nextTick(() => {
- expect(clipboardText()).not.toContain(`dashboard=`);
- expect(clipboardText()).toContain(`y_label=`);
- done();
+ return wrapper.vm.$nextTick(() => {
+ expect(getClipboardTextAt(0)).not.toContain(`dashboard=`);
+ expect(getClipboardTextAt(0)).toContain(`y_label=`);
});
});
-
- it('creates a toast when clicked', () => {
- jest.spyOn(wrapper.vm.$toast, 'show').and.stub();
-
- link().vm.$emit('click');
-
- expect(wrapper.vm.$toast.show).toHaveBeenCalled();
- });
});
});
diff --git a/spec/frontend/monitoring/components/dashboard_template_spec.js b/spec/frontend/monitoring/components/dashboard_template_spec.js
new file mode 100644
index 00000000000..38523ab82bc
--- /dev/null
+++ b/spec/frontend/monitoring/components/dashboard_template_spec.js
@@ -0,0 +1,39 @@
+import { shallowMount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import Dashboard from '~/monitoring/components/dashboard.vue';
+import { createStore } from '~/monitoring/stores';
+import { propsData } from '../init_utils';
+
+jest.mock('~/lib/utils/url_utility');
+
+describe('Dashboard template', () => {
+ let wrapper;
+ let store;
+ let mock;
+
+ beforeEach(() => {
+ store = createStore();
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ mock.restore();
+ });
+
+ it('matches the default snapshot', () => {
+ wrapper = shallowMount(Dashboard, {
+ propsData: { ...propsData },
+ methods: {
+ fetchData: jest.fn(),
+ },
+ store,
+ });
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+});
diff --git a/spec/frontend/monitoring/components/dashboard_time_url_spec.js b/spec/frontend/monitoring/components/dashboard_time_url_spec.js
deleted file mode 100644
index 2da377eb79f..00000000000
--- a/spec/frontend/monitoring/components/dashboard_time_url_spec.js
+++ /dev/null
@@ -1,51 +0,0 @@
-import { mount } from '@vue/test-utils';
-import createFlash from '~/flash';
-import MockAdapter from 'axios-mock-adapter';
-import Dashboard from '~/monitoring/components/dashboard.vue';
-import { createStore } from '~/monitoring/stores';
-import { propsData } from '../init_utils';
-import axios from '~/lib/utils/axios_utils';
-
-jest.mock('~/flash');
-
-jest.mock('~/lib/utils/url_utility', () => ({
- getParameterValues: jest.fn().mockReturnValue('<script>alert("XSS")</script>'),
-}));
-
-describe('dashboard invalid url parameters', () => {
- let store;
- let wrapper;
- let mock;
-
- const createMountedWrapper = (props = {}, options = {}) => {
- wrapper = mount(Dashboard, {
- propsData: { ...propsData, ...props },
- store,
- ...options,
- });
- };
-
- beforeEach(() => {
- store = createStore();
- mock = new MockAdapter(axios);
- });
-
- afterEach(() => {
- if (wrapper) {
- wrapper.destroy();
- }
- mock.restore();
- });
-
- it('shows an error message if invalid url parameters are passed', done => {
- createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
-
- wrapper.vm
- .$nextTick()
- .then(() => {
- expect(createFlash).toHaveBeenCalled();
- done();
- })
- .catch(done.fail);
- });
-});
diff --git a/spec/frontend/monitoring/components/dashboard_time_window_spec.js b/spec/frontend/monitoring/components/dashboard_time_window_spec.js
deleted file mode 100644
index 4acc2d75b73..00000000000
--- a/spec/frontend/monitoring/components/dashboard_time_window_spec.js
+++ /dev/null
@@ -1,68 +0,0 @@
-import { mount } from '@vue/test-utils';
-import { GlDropdownItem } from '@gitlab/ui';
-import MockAdapter from 'axios-mock-adapter';
-import axios from '~/lib/utils/axios_utils';
-import statusCodes from '~/lib/utils/http_status';
-import Dashboard from '~/monitoring/components/dashboard.vue';
-import { createStore } from '~/monitoring/stores';
-import { propsData, setupComponentStore } from '../init_utils';
-import { metricsGroupsAPIResponse, mockApiEndpoint } from '../mock_data';
-
-jest.mock('~/lib/utils/url_utility', () => ({
- getParameterValues: jest.fn().mockImplementation(param => {
- if (param === 'start') return ['2019-10-01T18:27:47.000Z'];
- if (param === 'end') return ['2019-10-01T18:57:47.000Z'];
- return [];
- }),
- mergeUrlParams: jest.fn().mockReturnValue('#'),
-}));
-
-describe('dashboard time window', () => {
- let store;
- let wrapper;
- let mock;
-
- const createComponentWrapperMounted = (props = {}, options = {}) => {
- wrapper = mount(Dashboard, {
- propsData: { ...propsData, ...props },
- store,
- ...options,
- });
- };
-
- beforeEach(() => {
- store = createStore();
- mock = new MockAdapter(axios);
- });
-
- afterEach(() => {
- if (wrapper) {
- wrapper.destroy();
- }
- mock.restore();
- });
-
- it('shows an error message if invalid url parameters are passed', done => {
- mock.onGet(mockApiEndpoint).reply(statusCodes.OK, metricsGroupsAPIResponse);
-
- createComponentWrapperMounted({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
-
- setupComponentStore(wrapper);
-
- wrapper.vm
- .$nextTick()
- .then(() => {
- const timeWindowDropdownItems = wrapper
- .find('.js-time-window-dropdown')
- .findAll(GlDropdownItem);
- const activeItem = timeWindowDropdownItems.wrappers.filter(itemWrapper =>
- itemWrapper.find('.active').exists(),
- );
-
- expect(activeItem.length).toBe(1);
-
- done();
- })
- .catch(done.fail);
- });
-});
diff --git a/spec/frontend/monitoring/components/dashboard_url_time_spec.js b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
new file mode 100644
index 00000000000..161c64dd74b
--- /dev/null
+++ b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
@@ -0,0 +1,140 @@
+import { mount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import createFlash from '~/flash';
+import { queryToObject, redirectTo, removeParams, mergeUrlParams } from '~/lib/utils/url_utility';
+import axios from '~/lib/utils/axios_utils';
+import { mockProjectDir } from '../mock_data';
+
+import Dashboard from '~/monitoring/components/dashboard.vue';
+import { createStore } from '~/monitoring/stores';
+import { defaultTimeRange } from '~/monitoring/constants';
+import { propsData } from '../init_utils';
+
+jest.mock('~/flash');
+jest.mock('~/lib/utils/url_utility');
+
+describe('dashboard invalid url parameters', () => {
+ let store;
+ let wrapper;
+ let mock;
+
+ const createMountedWrapper = (props = { hasMetrics: true }, options = {}) => {
+ wrapper = mount(Dashboard, {
+ propsData: { ...propsData, ...props },
+ store,
+ stubs: ['graph-group', 'panel-type'],
+ ...options,
+ });
+ };
+
+ const findDateTimePicker = () => wrapper.find({ ref: 'dateTimePicker' });
+
+ beforeEach(() => {
+ store = createStore();
+ jest.spyOn(store, 'dispatch');
+
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ mock.restore();
+ queryToObject.mockReset();
+ });
+
+ it('passes default url parameters to the time range picker', () => {
+ queryToObject.mockReturnValue({});
+
+ createMountedWrapper();
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findDateTimePicker().props('value')).toEqual(defaultTimeRange);
+
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/setTimeRange',
+ expect.any(Object),
+ );
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/fetchData', undefined);
+ });
+ });
+
+ it('passes a fixed time range in the URL to the time range picker', () => {
+ const params = {
+ start: '2019-01-01T00:00:00.000Z',
+ end: '2019-01-10T00:00:00.000Z',
+ };
+
+ queryToObject.mockReturnValue(params);
+
+ createMountedWrapper();
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findDateTimePicker().props('value')).toEqual(params);
+
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/setTimeRange', params);
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/fetchData', undefined);
+ });
+ });
+
+ it('passes a rolling time range in the URL to the time range picker', () => {
+ queryToObject.mockReturnValue({
+ duration_seconds: '120',
+ });
+
+ createMountedWrapper();
+
+ return wrapper.vm.$nextTick().then(() => {
+ const expectedTimeRange = {
+ duration: { seconds: 60 * 2 },
+ };
+
+ expect(findDateTimePicker().props('value')).toMatchObject(expectedTimeRange);
+
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/setTimeRange',
+ expectedTimeRange,
+ );
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/fetchData', undefined);
+ });
+ });
+
+ it('shows an error message and loads a default time range if invalid url parameters are passed', () => {
+ queryToObject.mockReturnValue({
+ start: '<script>alert("XSS")</script>',
+ end: '<script>alert("XSS")</script>',
+ });
+
+ createMountedWrapper();
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(createFlash).toHaveBeenCalled();
+
+ expect(findDateTimePicker().props('value')).toEqual(defaultTimeRange);
+
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/setTimeRange',
+ defaultTimeRange,
+ );
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/fetchData', undefined);
+ });
+ });
+
+ it('redirects to different time range', () => {
+ const toUrl = `${mockProjectDir}/-/environments/1/metrics`;
+ removeParams.mockReturnValueOnce(toUrl);
+
+ createMountedWrapper();
+
+ return wrapper.vm.$nextTick().then(() => {
+ findDateTimePicker().vm.$emit('input', {
+ duration: { seconds: 120 },
+ });
+
+ // redirect to with new parameters
+ expect(mergeUrlParams).toHaveBeenCalledWith({ duration_seconds: '120' }, toUrl);
+ expect(redirectTo).toHaveBeenCalledTimes(1);
+ });
+ });
+});
diff --git a/spec/frontend/monitoring/components/dashboards_dropdown_spec.js b/spec/frontend/monitoring/components/dashboards_dropdown_spec.js
index 6af5ab4ba75..0bcfabe6415 100644
--- a/spec/frontend/monitoring/components/dashboards_dropdown_spec.js
+++ b/spec/frontend/monitoring/components/dashboards_dropdown_spec.js
@@ -35,13 +35,17 @@ describe('DashboardsDropdown', () => {
const findItems = () => wrapper.findAll(GlDropdownItem);
const findItemAt = i => wrapper.findAll(GlDropdownItem).at(i);
+ const findSearchInput = () => wrapper.find({ ref: 'monitorDashboardsDropdownSearch' });
+ const findNoItemsMsg = () => wrapper.find({ ref: 'monitorDashboardsDropdownMsg' });
+ const setSearchTerm = searchTerm => wrapper.setData({ searchTerm });
describe('when it receives dashboards data', () => {
beforeEach(() => {
wrapper = createComponent();
});
+
it('displays an item for each dashboard', () => {
- expect(wrapper.findAll(GlDropdownItem).length).toEqual(dashboardGitResponse.length);
+ expect(findItems().length).toEqual(dashboardGitResponse.length);
});
it('displays items with the dashboard display name', () => {
@@ -49,6 +53,32 @@ describe('DashboardsDropdown', () => {
expect(findItemAt(1).text()).toBe(dashboardGitResponse[1].display_name);
expect(findItemAt(2).text()).toBe(dashboardGitResponse[2].display_name);
});
+
+ it('displays a search input', () => {
+ expect(findSearchInput().isVisible()).toBe(true);
+ });
+
+ it('hides no message text by default', () => {
+ expect(findNoItemsMsg().isVisible()).toBe(false);
+ });
+
+ it('filters dropdown items when searched for item exists in the list', () => {
+ const searchTerm = 'Default';
+ setSearchTerm(searchTerm);
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findItems()).toHaveLength(1);
+ });
+ });
+
+ it('shows no items found message when searched for item does not exists in the list', () => {
+ const searchTerm = 'does-not-exist';
+ setSearchTerm(searchTerm);
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findNoItemsMsg().isVisible()).toBe(true);
+ });
+ });
});
describe('when a system dashboard is selected', () => {
@@ -101,20 +131,17 @@ describe('DashboardsDropdown', () => {
expect(findModal().contains(DuplicateDashboardForm)).toBe(true);
});
- it('saves a new dashboard', done => {
+ it('saves a new dashboard', () => {
findModal().vm.$emit('ok', okEvent);
- waitForPromises()
- .then(() => {
- expect(okEvent.preventDefault).toHaveBeenCalled();
-
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
- expect(wrapper.vm.$refs.duplicateDashboardModal.hide).toHaveBeenCalled();
- expect(wrapper.emitted().selectDashboard).toBeTruthy();
- expect(findAlert().exists()).toBe(false);
- done();
- })
- .catch(done.fail);
+ return waitForPromises().then(() => {
+ expect(okEvent.preventDefault).toHaveBeenCalled();
+
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.vm.$refs.duplicateDashboardModal.hide).toHaveBeenCalled();
+ expect(wrapper.emitted().selectDashboard).toBeTruthy();
+ expect(findAlert().exists()).toBe(false);
+ });
});
describe('when a new dashboard is saved succesfully', () => {
@@ -137,52 +164,42 @@ describe('DashboardsDropdown', () => {
findModal().vm.$emit('ok', okEvent);
};
- it('to the default branch, redirects to the new dashboard', done => {
+ it('to the default branch, redirects to the new dashboard', () => {
submitForm({
branch: defaultBranch,
});
- waitForPromises()
- .then(() => {
- expect(wrapper.emitted().selectDashboard[0][0]).toEqual(newDashboard);
- done();
- })
- .catch(done.fail);
+ return waitForPromises().then(() => {
+ expect(wrapper.emitted().selectDashboard[0][0]).toEqual(newDashboard);
+ });
});
- it('to a new branch refreshes in the current dashboard', done => {
+ it('to a new branch refreshes in the current dashboard', () => {
submitForm({
branch: 'another-branch',
});
- waitForPromises()
- .then(() => {
- expect(wrapper.emitted().selectDashboard[0][0]).toEqual(dashboardGitResponse[0]);
- done();
- })
- .catch(done.fail);
+ return waitForPromises().then(() => {
+ expect(wrapper.emitted().selectDashboard[0][0]).toEqual(dashboardGitResponse[0]);
+ });
});
});
- it('handles error when a new dashboard is not saved', done => {
+ it('handles error when a new dashboard is not saved', () => {
const errMsg = 'An error occurred';
duplicateDashboardAction.mockRejectedValueOnce(errMsg);
findModal().vm.$emit('ok', okEvent);
- waitForPromises()
- .then(() => {
- expect(okEvent.preventDefault).toHaveBeenCalled();
-
- expect(findAlert().exists()).toBe(true);
- expect(findAlert().text()).toBe(errMsg);
+ return waitForPromises().then(() => {
+ expect(okEvent.preventDefault).toHaveBeenCalled();
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
- expect(wrapper.vm.$refs.duplicateDashboardModal.hide).not.toHaveBeenCalled();
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().text()).toBe(errMsg);
- done();
- })
- .catch(done.fail);
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.vm.$refs.duplicateDashboardModal.hide).not.toHaveBeenCalled();
+ });
});
it('id is correct, as the value of modal directive binding matches modal id', () => {
@@ -224,7 +241,7 @@ describe('DashboardsDropdown', () => {
it('displays an item for each dashboard', () => {
const item = wrapper.findAll({ ref: 'duplicateDashboardItem' });
- expect(findItems().length).toEqual(dashboardGitResponse.length);
+ expect(findItems()).toHaveLength(dashboardGitResponse.length);
expect(item.length).toBe(0);
});
diff --git a/spec/frontend/monitoring/components/date_time_picker/date_time_picker_spec.js b/spec/frontend/monitoring/components/date_time_picker/date_time_picker_spec.js
deleted file mode 100644
index 180e41861f4..00000000000
--- a/spec/frontend/monitoring/components/date_time_picker/date_time_picker_spec.js
+++ /dev/null
@@ -1,170 +0,0 @@
-import { mount } from '@vue/test-utils';
-import DateTimePicker from '~/monitoring/components/date_time_picker/date_time_picker.vue';
-import { timeWindows } from '~/monitoring/constants';
-
-const timeWindowsCount = Object.keys(timeWindows).length;
-const start = '2019-10-10T07:00:00.000Z';
-const end = '2019-10-13T07:00:00.000Z';
-const selectedTimeWindowText = `3 days`;
-
-describe('DateTimePicker', () => {
- let dateTimePicker;
-
- const dropdownToggle = () => dateTimePicker.find('.dropdown-toggle');
- const dropdownMenu = () => dateTimePicker.find('.dropdown-menu');
- const applyButtonElement = () => dateTimePicker.find('button[variant="success"]').element;
- const cancelButtonElement = () => dateTimePicker.find('button.btn-secondary').element;
- const fillInputAndBlur = (input, val) => {
- dateTimePicker.find(input).setValue(val);
- return dateTimePicker.vm.$nextTick().then(() => {
- dateTimePicker.find(input).trigger('blur');
- return dateTimePicker.vm.$nextTick();
- });
- };
-
- const createComponent = props => {
- dateTimePicker = mount(DateTimePicker, {
- propsData: {
- timeWindows,
- start,
- end,
- ...props,
- },
- });
- };
-
- afterEach(() => {
- dateTimePicker.destroy();
- });
-
- it('renders dropdown toggle button with selected text', done => {
- createComponent();
- dateTimePicker.vm.$nextTick(() => {
- expect(dropdownToggle().text()).toBe(selectedTimeWindowText);
- done();
- });
- });
-
- it('renders dropdown with 2 custom time range inputs', () => {
- createComponent();
- dateTimePicker.vm.$nextTick(() => {
- expect(dateTimePicker.findAll('input').length).toBe(2);
- });
- });
-
- it('renders dropdown without a selectedTimeWindow set', done => {
- createComponent({
- selectedTimeWindow: {},
- });
- dateTimePicker.vm.$nextTick(() => {
- expect(dateTimePicker.findAll('input').length).toBe(2);
- done();
- });
- });
-
- it('renders inputs with h/m/s truncated if its all 0s', done => {
- createComponent({
- start: '2019-10-10T00:00:00.000Z',
- end: '2019-10-14T00:10:00.000Z',
- });
- dateTimePicker.vm.$nextTick(() => {
- expect(dateTimePicker.find('#custom-time-from').element.value).toBe('2019-10-10');
- expect(dateTimePicker.find('#custom-time-to').element.value).toBe('2019-10-14 00:10:00');
- done();
- });
- });
-
- it(`renders dropdown with ${timeWindowsCount} items in quick range`, done => {
- createComponent();
- dropdownToggle().trigger('click');
- dateTimePicker.vm.$nextTick(() => {
- expect(dateTimePicker.findAll('.dropdown-item').length).toBe(timeWindowsCount);
- done();
- });
- });
-
- it(`renders dropdown with correct quick range item selected`, done => {
- createComponent();
- dropdownToggle().trigger('click');
- dateTimePicker.vm.$nextTick(() => {
- expect(dateTimePicker.find('.dropdown-item.active').text()).toBe(selectedTimeWindowText);
-
- expect(dateTimePicker.find('.dropdown-item.active svg').isVisible()).toBe(true);
- done();
- });
- });
-
- it('renders a disabled apply button on wrong input', () => {
- createComponent({
- start: 'invalid-input-date',
- });
-
- expect(applyButtonElement().getAttribute('disabled')).toBe('disabled');
- });
-
- it('displays inline error message if custom time range inputs are invalid', done => {
- createComponent();
- fillInputAndBlur('#custom-time-from', '2019-10-01abc')
- .then(() => fillInputAndBlur('#custom-time-to', '2019-10-10abc'))
- .then(() => {
- expect(dateTimePicker.findAll('.invalid-feedback').length).toBe(2);
- done();
- })
- .catch(done);
- });
-
- it('keeps apply button disabled with invalid custom time range inputs', done => {
- createComponent();
- fillInputAndBlur('#custom-time-from', '2019-10-01abc')
- .then(() => fillInputAndBlur('#custom-time-to', '2019-09-19'))
- .then(() => {
- expect(applyButtonElement().getAttribute('disabled')).toBe('disabled');
- done();
- })
- .catch(done);
- });
-
- it('enables apply button with valid custom time range inputs', done => {
- createComponent();
- fillInputAndBlur('#custom-time-from', '2019-10-01')
- .then(() => fillInputAndBlur('#custom-time-to', '2019-10-19'))
- .then(() => {
- expect(applyButtonElement().getAttribute('disabled')).toBeNull();
- done();
- })
- .catch(done.fail);
- });
-
- it('emits dates in an object when apply is clicked', done => {
- createComponent();
- fillInputAndBlur('#custom-time-from', '2019-10-01')
- .then(() => fillInputAndBlur('#custom-time-to', '2019-10-19'))
- .then(() => {
- applyButtonElement().click();
-
- expect(dateTimePicker.emitted().apply).toHaveLength(1);
- expect(dateTimePicker.emitted().apply[0]).toEqual([
- {
- end: '2019-10-19T00:00:00Z',
- start: '2019-10-01T00:00:00Z',
- },
- ]);
- done();
- })
- .catch(done.fail);
- });
-
- it('hides the popover with cancel button', done => {
- createComponent();
- dropdownToggle().trigger('click');
-
- dateTimePicker.vm.$nextTick(() => {
- cancelButtonElement().click();
-
- dateTimePicker.vm.$nextTick(() => {
- expect(dropdownMenu().classes('show')).toBe(false);
- done();
- });
- });
- });
-});
diff --git a/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js b/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js
index 75a488b5c7b..10fd58f749d 100644
--- a/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js
+++ b/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js
@@ -44,30 +44,27 @@ describe('DuplicateDashboardForm', () => {
describe('validates the file name', () => {
const findInvalidFeedback = () => findByRef('fileNameFormGroup').find('.invalid-feedback');
- it('when is empty', done => {
+ it('when is empty', () => {
setValue('fileName', '');
- wrapper.vm.$nextTick(() => {
+ return wrapper.vm.$nextTick(() => {
expect(findByRef('fileNameFormGroup').is('.is-valid')).toBe(true);
expect(findInvalidFeedback().exists()).toBe(false);
- done();
});
});
- it('when is valid', done => {
+ it('when is valid', () => {
setValue('fileName', 'my_dashboard.yml');
- wrapper.vm.$nextTick(() => {
+ return wrapper.vm.$nextTick(() => {
expect(findByRef('fileNameFormGroup').is('.is-valid')).toBe(true);
expect(findInvalidFeedback().exists()).toBe(false);
- done();
});
});
- it('when is not valid', done => {
+ it('when is not valid', () => {
setValue('fileName', 'my_dashboard.exe');
- wrapper.vm.$nextTick(() => {
+ return wrapper.vm.$nextTick(() => {
expect(findByRef('fileNameFormGroup').is('.is-invalid')).toBe(true);
expect(findInvalidFeedback().text()).toBeTruthy();
- done();
});
});
});
@@ -124,30 +121,26 @@ describe('DuplicateDashboardForm', () => {
});
});
- it('when a `default` branch option is set, branch input is invisible and ignored', done => {
+ it('when a `default` branch option is set, branch input is invisible and ignored', () => {
setChecked(wrapper.vm.$options.radioVals.DEFAULT);
setValue('branchName', 'a-new-branch');
expect(lastChange()).resolves.toMatchObject({
branch: defaultBranch,
});
- wrapper.vm.$nextTick(() => {
+
+ return wrapper.vm.$nextTick(() => {
expect(findByRef('branchName').isVisible()).toBe(false);
- done();
});
});
- it('when `new` branch option is chosen, focuses on the branch name input', done => {
+ it('when `new` branch option is chosen, focuses on the branch name input', () => {
setChecked(wrapper.vm.$options.radioVals.NEW);
- wrapper.vm
- .$nextTick()
- .then(() => {
- wrapper.find('form').trigger('change');
- expect(findByRef('branchName').is(':focus')).toBe(true);
- })
- .then(done)
- .catch(done.fail);
+ return wrapper.vm.$nextTick().then(() => {
+ wrapper.find('form').trigger('change');
+ expect(findByRef('branchName').is(':focus')).toBe(true);
+ });
});
});
});
diff --git a/spec/frontend/monitoring/components/graph_group_spec.js b/spec/frontend/monitoring/components/graph_group_spec.js
index 983785d0ecc..28a6af64394 100644
--- a/spec/frontend/monitoring/components/graph_group_spec.js
+++ b/spec/frontend/monitoring/components/graph_group_spec.js
@@ -32,25 +32,23 @@ describe('Graph group component', () => {
expect(findCaretIcon().props('name')).toBe('angle-down');
});
- it('should show the angle-right caret icon when the user collapses the group', done => {
+ it('should show the angle-right caret icon when the user collapses the group', () => {
wrapper.vm.collapse();
- wrapper.vm.$nextTick(() => {
+ return wrapper.vm.$nextTick(() => {
expect(findContent().isVisible()).toBe(false);
expect(findCaretIcon().props('name')).toBe('angle-right');
- done();
});
});
- it('should show the open the group when collapseGroup is set to true', done => {
+ it('should show the open the group when collapseGroup is set to true', () => {
wrapper.setProps({
collapseGroup: true,
});
- wrapper.vm.$nextTick(() => {
+ return wrapper.vm.$nextTick(() => {
expect(findContent().isVisible()).toBe(true);
expect(findCaretIcon().props('name')).toBe('angle-down');
- done();
});
});
@@ -102,13 +100,12 @@ describe('Graph group component', () => {
expect(findCaretIcon().exists()).toBe(false);
});
- it('should show the panel content when clicked', done => {
+ it('should show the panel content when clicked', () => {
wrapper.vm.collapse();
- wrapper.vm.$nextTick(() => {
+ return wrapper.vm.$nextTick(() => {
expect(findContent().isVisible()).toBe(true);
expect(findCaretIcon().exists()).toBe(false);
- done();
});
});
});
diff --git a/spec/frontend/monitoring/components/panel_type_spec.js b/spec/frontend/monitoring/components/panel_type_spec.js
new file mode 100644
index 00000000000..0d79babf386
--- /dev/null
+++ b/spec/frontend/monitoring/components/panel_type_spec.js
@@ -0,0 +1,277 @@
+import { shallowMount } from '@vue/test-utils';
+import AxiosMockAdapter from 'axios-mock-adapter';
+import { setTestTimeout } from 'helpers/timeout';
+import invalidUrl from '~/lib/utils/invalid_url';
+import axios from '~/lib/utils/axios_utils';
+
+import PanelType from '~/monitoring/components/panel_type.vue';
+import EmptyChart from '~/monitoring/components/charts/empty_chart.vue';
+import TimeSeriesChart from '~/monitoring/components/charts/time_series.vue';
+import AnomalyChart from '~/monitoring/components/charts/anomaly.vue';
+import { anomalyMockGraphData, graphDataPrometheusQueryRange } from 'jest/monitoring/mock_data';
+import { createStore } from '~/monitoring/stores';
+
+global.IS_EE = true;
+global.URL.createObjectURL = jest.fn();
+
+const mocks = {
+ $toast: {
+ show: jest.fn(),
+ },
+};
+
+describe('Panel Type component', () => {
+ let axiosMock;
+ let store;
+ let state;
+ let wrapper;
+
+ const exampleText = 'example_text';
+
+ const findCopyLink = () => wrapper.find({ ref: 'copyChartLink' });
+
+ const createWrapper = props => {
+ wrapper = shallowMount(PanelType, {
+ propsData: {
+ ...props,
+ },
+ store,
+ mocks,
+ });
+ };
+
+ beforeEach(() => {
+ setTestTimeout(1000);
+
+ store = createStore();
+ state = store.state.monitoringDashboard;
+
+ axiosMock = new AxiosMockAdapter(axios);
+ });
+
+ afterEach(() => {
+ axiosMock.reset();
+ });
+
+ describe('When no graphData is available', () => {
+ let glEmptyChart;
+ // Deep clone object before modifying
+ const graphDataNoResult = JSON.parse(JSON.stringify(graphDataPrometheusQueryRange));
+ graphDataNoResult.metrics[0].result = [];
+
+ beforeEach(() => {
+ createWrapper({
+ graphData: graphDataNoResult,
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('Empty Chart component', () => {
+ beforeEach(() => {
+ glEmptyChart = wrapper.find(EmptyChart);
+ });
+
+ it('is a Vue instance', () => {
+ expect(glEmptyChart.isVueInstance()).toBe(true);
+ });
+
+ it('it receives a graph title', () => {
+ const props = glEmptyChart.props();
+
+ expect(props.graphTitle).toBe(wrapper.vm.graphData.title);
+ });
+ });
+ });
+
+ describe('when graph data is available', () => {
+ beforeEach(() => {
+ createWrapper({
+ graphData: graphDataPrometheusQueryRange,
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('sets no clipboard copy link on dropdown by default', () => {
+ expect(findCopyLink().exists()).toBe(false);
+ });
+
+ describe('Time Series Chart panel type', () => {
+ it('is rendered', () => {
+ expect(wrapper.find(TimeSeriesChart).isVueInstance()).toBe(true);
+ expect(wrapper.find(TimeSeriesChart).exists()).toBe(true);
+ });
+
+ it('includes a default group id', () => {
+ expect(wrapper.vm.groupId).toBe('panel-type-chart');
+ });
+ });
+
+ describe('Anomaly Chart panel type', () => {
+ beforeEach(() => {
+ wrapper.setProps({
+ graphData: anomalyMockGraphData,
+ });
+ return wrapper.vm.$nextTick();
+ });
+
+ it('is rendered with an anomaly chart', () => {
+ expect(wrapper.find(AnomalyChart).isVueInstance()).toBe(true);
+ expect(wrapper.find(AnomalyChart).exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('View Logs dropdown item', () => {
+ const mockLogsPath = '/path/to/logs';
+ const mockTimeRange = { duration: { seconds: 120 } };
+
+ const findTimeChart = () => wrapper.find({ ref: 'timeChart' });
+ const findViewLogsLink = () => wrapper.find({ ref: 'viewLogsLink' });
+
+ beforeEach(() => {
+ createWrapper({
+ graphData: graphDataPrometheusQueryRange,
+ });
+ return wrapper.vm.$nextTick();
+ });
+
+ it('is not present by default', () =>
+ wrapper.vm.$nextTick(() => {
+ expect(findViewLogsLink().exists()).toBe(false);
+ }));
+
+ it('is not present if a time range is not set', () => {
+ state.logsPath = mockLogsPath;
+ state.timeRange = null;
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findViewLogsLink().exists()).toBe(false);
+ });
+ });
+
+ it('is not present if the logs path is default', () => {
+ state.logsPath = invalidUrl;
+ state.timeRange = mockTimeRange;
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findViewLogsLink().exists()).toBe(false);
+ });
+ });
+
+ it('is not present if the logs path is not set', () => {
+ state.logsPath = null;
+ state.timeRange = mockTimeRange;
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findViewLogsLink().exists()).toBe(false);
+ });
+ });
+
+ it('is present when logs path and time a range is present', () => {
+ state.logsPath = mockLogsPath;
+ state.timeRange = mockTimeRange;
+
+ return wrapper.vm.$nextTick(() => {
+ const href = `${mockLogsPath}?duration_seconds=${mockTimeRange.duration.seconds}`;
+ expect(findViewLogsLink().attributes('href')).toMatch(href);
+ });
+ });
+
+ it('it is overriden when a datazoom event is received', () => {
+ state.logsPath = mockLogsPath;
+ state.timeRange = mockTimeRange;
+
+ const zoomedTimeRange = {
+ start: '2020-01-01T00:00:00.000Z',
+ end: '2020-01-01T01:00:00.000Z',
+ };
+
+ findTimeChart().vm.$emit('datazoom', zoomedTimeRange);
+
+ return wrapper.vm.$nextTick(() => {
+ const start = encodeURIComponent(zoomedTimeRange.start);
+ const end = encodeURIComponent(zoomedTimeRange.end);
+ expect(findViewLogsLink().attributes('href')).toMatch(
+ `${mockLogsPath}?start=${start}&end=${end}`,
+ );
+ });
+ });
+ });
+
+ describe('when cliboard data is available', () => {
+ const clipboardText = 'A value to copy.';
+
+ beforeEach(() => {
+ createWrapper({
+ clipboardText,
+ graphData: graphDataPrometheusQueryRange,
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('sets clipboard text on the dropdown', () => {
+ expect(findCopyLink().exists()).toBe(true);
+ expect(findCopyLink().element.dataset.clipboardText).toBe(clipboardText);
+ });
+
+ it('adds a copy button to the dropdown', () => {
+ expect(findCopyLink().text()).toContain('Generate link to chart');
+ });
+
+ it('opens a toast on click', () => {
+ findCopyLink().vm.$emit('click');
+
+ expect(wrapper.vm.$toast.show).toHaveBeenCalled();
+ });
+ });
+
+ describe('when downloading metrics data as CSV', () => {
+ beforeEach(() => {
+ graphDataPrometheusQueryRange.y_label = 'metric';
+ wrapper = shallowMount(PanelType, {
+ propsData: {
+ clipboardText: exampleText,
+ graphData: graphDataPrometheusQueryRange,
+ },
+ store,
+ });
+ return wrapper.vm.$nextTick();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('csvText', () => {
+ it('converts metrics data from json to csv', () => {
+ const header = `timestamp,${graphDataPrometheusQueryRange.y_label}`;
+ const data = graphDataPrometheusQueryRange.metrics[0].result[0].values;
+ const firstRow = `${data[0][0]},${data[0][1]}`;
+ const secondRow = `${data[1][0]},${data[1][1]}`;
+
+ expect(wrapper.vm.csvText).toBe(`${header}\r\n${firstRow}\r\n${secondRow}\r\n`);
+ });
+ });
+
+ describe('downloadCsv', () => {
+ it('produces a link with a Blob', () => {
+ expect(global.URL.createObjectURL).toHaveBeenLastCalledWith(expect.any(Blob));
+ expect(global.URL.createObjectURL).toHaveBeenLastCalledWith(
+ expect.objectContaining({
+ size: wrapper.vm.csvText.length,
+ type: 'text/plain',
+ }),
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/monitoring/embed/embed_spec.js b/spec/frontend/monitoring/embed/embed_spec.js
index 831ab1ed157..3bb70a02bd9 100644
--- a/spec/frontend/monitoring/embed/embed_spec.js
+++ b/spec/frontend/monitoring/embed/embed_spec.js
@@ -26,10 +26,11 @@ describe('Embed', () => {
beforeEach(() => {
actions = {
- setFeatureFlags: () => {},
- setShowErrorBanner: () => {},
- setEndpoints: () => {},
- fetchMetricsData: () => {},
+ setFeatureFlags: jest.fn(),
+ setShowErrorBanner: jest.fn(),
+ setEndpoints: jest.fn(),
+ setTimeRange: jest.fn(),
+ fetchDashboard: jest.fn(),
};
metricsWithDataGetter = jest.fn();
@@ -76,6 +77,18 @@ describe('Embed', () => {
mountComponent();
});
+ it('calls actions to fetch data', () => {
+ const expectedTimeRangePayload = expect.objectContaining({
+ start: expect.any(String),
+ end: expect.any(String),
+ });
+
+ expect(actions.setTimeRange).toHaveBeenCalledTimes(1);
+ expect(actions.setTimeRange.mock.calls[0][1]).toEqual(expectedTimeRangePayload);
+
+ expect(actions.fetchDashboard).toHaveBeenCalled();
+ });
+
it('shows a chart when metrics are present', () => {
expect(wrapper.find('.metrics-embed').exists()).toBe(true);
expect(wrapper.find(PanelType).exists()).toBe(true);
diff --git a/spec/frontend/monitoring/init_utils.js b/spec/frontend/monitoring/init_utils.js
index 5f229cb6ee5..36c654ba7b3 100644
--- a/spec/frontend/monitoring/init_utils.js
+++ b/spec/frontend/monitoring/init_utils.js
@@ -1,6 +1,6 @@
import * as types from '~/monitoring/stores/mutation_types';
import {
- metricsGroupsAPIResponse,
+ metricsDashboardPayload,
mockedEmptyResult,
mockedQueryResultPayload,
mockedQueryResultPayloadCoresTotal,
@@ -15,6 +15,7 @@ export const propsData = {
clustersPath: '/path/to/clusters',
tagsPath: '/path/to/tags',
projectPath: '/path/to/project',
+ logsPath: '/path/to/logs',
defaultBranch: 'master',
metricsEndpoint: mockApiEndpoint,
deploymentsEndpoint: null,
@@ -23,7 +24,6 @@ export const propsData = {
emptyNoDataSvgPath: '/path/to/no-data.svg',
emptyNoDataSmallSvgPath: '/path/to/no-data-small.svg',
emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg',
- environmentsEndpoint: '/root/hello-prometheus/environments/35',
currentEnvironmentName: 'production',
customMetricsAvailable: false,
customMetricsPath: '',
@@ -33,7 +33,7 @@ export const propsData = {
export const setupComponentStore = wrapper => {
wrapper.vm.$store.commit(
`monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
- metricsGroupsAPIResponse,
+ metricsDashboardPayload,
);
// Load 3 panels to the dashboard, one with an empty result
diff --git a/spec/frontend/monitoring/mock_data.js b/spec/frontend/monitoring/mock_data.js
index 8ed0e232775..4d83933f2b8 100644
--- a/spec/frontend/monitoring/mock_data.js
+++ b/spec/frontend/monitoring/mock_data.js
@@ -169,7 +169,7 @@ export const deploymentData = [
iid: 3,
sha: 'f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
commitUrl:
- 'http://test.host/frontend-fixtures/environments-project/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
+ 'http://test.host/frontend-fixtures/environments-project/-/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
ref: {
name: 'master',
},
@@ -183,7 +183,7 @@ export const deploymentData = [
iid: 2,
sha: 'f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
commitUrl:
- 'http://test.host/frontend-fixtures/environments-project/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
+ 'http://test.host/frontend-fixtures/environments-project/-/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
ref: {
name: 'master',
},
@@ -197,7 +197,7 @@ export const deploymentData = [
iid: 1,
sha: '6511e58faafaa7ad2228990ec57f19d66f7db7c2',
commitUrl:
- 'http://test.host/frontend-fixtures/environments-project/commit/6511e58faafaa7ad2228990ec57f19d66f7db7c2',
+ 'http://test.host/frontend-fixtures/environments-project/-/commit/6511e58faafaa7ad2228990ec57f19d66f7db7c2',
ref: {
name: 'update2-readme',
},
@@ -331,84 +331,17 @@ export const mockedQueryResultPayloadCoresTotal = {
],
};
-export const metricsGroupsAPIResponse = {
- dashboard: 'Environment metrics',
- panel_groups: [
- {
- group: 'Response metrics (NGINX Ingress VTS)',
- priority: 10,
- panels: [
- {
- metrics: [
- {
- id: 'response_metrics_nginx_ingress_throughput_status_code',
- label: 'Status Code',
- metric_id: 1,
- prometheus_endpoint_path:
- '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=sum%28rate%28nginx_upstream_responses_total%7Bupstream%3D~%22%25%7Bkube_namespace%7D-%25%7Bci_environment_slug%7D-.%2A%22%7D%5B2m%5D%29%29+by+%28status_code%29',
- query_range:
- 'sum(rate(nginx_upstream_responses_total{upstream=~"%{kube_namespace}-%{ci_environment_slug}-.*"}[2m])) by (status_code)',
- unit: 'req / sec',
- },
- ],
- title: 'Throughput',
- type: 'area-chart',
- weight: 1,
- y_label: 'Requests / Sec',
- },
- ],
- },
- {
- group: 'System metrics (Kubernetes)',
- priority: 5,
- panels: [
- {
- title: 'Memory Usage (Pod average)',
- type: 'area-chart',
- y_label: 'Memory Used per Pod',
- weight: 2,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_memory_average',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
- label: 'Pod average',
- unit: 'MB',
- metric_id: 17,
- prometheus_endpoint_path:
- '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=avg%28sum%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+by+%28job%29%29+without+%28job%29+%2F+count%28avg%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+without+%28job%29%29+%2F1024%2F1024',
- appearance: {
- line: {
- width: 2,
- },
- },
- },
- ],
- },
- {
- title: 'Core Usage (Total)',
- type: 'area-chart',
- y_label: 'Total Cores',
- weight: 3,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_cores_total',
- query_range:
- 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)',
- label: 'Total',
- unit: 'cores',
- metric_id: 13,
- },
- ],
- },
- ],
- },
- ],
-};
+const extraEnvironmentData = new Array(15).fill(null).map((_, idx) => ({
+ id: `gid://gitlab/Environments/${150 + idx}`,
+ name: `no-deployment/noop-branch-${idx}`,
+ state: 'available',
+ created_at: '2018-07-04T18:39:41.702Z',
+ updated_at: '2018-07-04T18:44:54.010Z',
+}));
export const environmentData = [
{
- id: 34,
+ id: 'gid://gitlab/Environments/34',
name: 'production',
state: 'available',
external_url: 'http://root-autodevops-deploy.my-fake-domain.com',
@@ -426,7 +359,7 @@ export const environmentData = [
},
},
{
- id: 35,
+ id: 'gid://gitlab/Environments/35',
name: 'review/noop-branch',
state: 'available',
external_url: 'http://root-autodevops-deploy-review-noop-branc-die93w.my-fake-domain.com',
@@ -443,14 +376,7 @@ export const environmentData = [
id: 128,
},
},
- {
- id: 36,
- name: 'no-deployment/noop-branch',
- state: 'available',
- created_at: '2018-07-04T18:39:41.702Z',
- updated_at: '2018-07-04T18:44:54.010Z',
- },
-];
+].concat(extraEnvironmentData);
export const metricsDashboardResponse = {
dashboard: {
@@ -517,6 +443,90 @@ export const metricsDashboardResponse = {
status: 'success',
};
+export const metricsDashboardPayload = {
+ dashboard: 'Environment metrics',
+ panel_groups: [
+ {
+ group: 'Response metrics (NGINX Ingress VTS)',
+ priority: 10,
+ panels: [
+ {
+ metrics: [
+ {
+ id: 'response_metrics_nginx_ingress_throughput_status_code',
+ label: 'Status Code',
+ metric_id: 1,
+ prometheus_endpoint_path:
+ '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=sum%28rate%28nginx_upstream_responses_total%7Bupstream%3D~%22%25%7Bkube_namespace%7D-%25%7Bci_environment_slug%7D-.%2A%22%7D%5B2m%5D%29%29+by+%28status_code%29',
+ query_range:
+ 'sum(rate(nginx_upstream_responses_total{upstream=~"%{kube_namespace}-%{ci_environment_slug}-.*"}[2m])) by (status_code)',
+ unit: 'req / sec',
+ },
+ ],
+ title: 'Throughput',
+ type: 'area-chart',
+ weight: 1,
+ y_label: 'Requests / Sec',
+ },
+ ],
+ },
+ {
+ group: 'System metrics (Kubernetes)',
+ priority: 5,
+ panels: [
+ {
+ title: 'Memory Usage (Pod average)',
+ type: 'area-chart',
+ y_label: 'Memory Used per Pod',
+ weight: 2,
+ metrics: [
+ {
+ id: 'system_metrics_kubernetes_container_memory_average',
+ query_range:
+ 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
+ label: 'Pod average',
+ unit: 'MB',
+ metric_id: 17,
+ prometheus_endpoint_path:
+ '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=avg%28sum%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+by+%28job%29%29+without+%28job%29+%2F+count%28avg%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+without+%28job%29%29+%2F1024%2F1024',
+ appearance: {
+ line: {
+ width: 2,
+ },
+ },
+ },
+ ],
+ },
+ {
+ title: 'Core Usage (Total)',
+ type: 'area-chart',
+ y_label: 'Total Cores',
+ weight: 3,
+ metrics: [
+ {
+ id: 'system_metrics_kubernetes_container_cores_total',
+ query_range:
+ 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)',
+ label: 'Total',
+ unit: 'cores',
+ metric_id: 13,
+ },
+ ],
+ },
+ ],
+ },
+ ],
+};
+
+const customDashboardsData = new Array(30).fill(null).map((_, idx) => ({
+ default: false,
+ display_name: `Custom Dashboard ${idx}`,
+ can_edit: true,
+ system_dashboard: false,
+ project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_${idx}.yml`,
+ path: `.gitlab/dashboards/dashboard_${idx}.yml`,
+}));
+
export const dashboardGitResponse = [
{
default: true,
@@ -526,22 +536,7 @@ export const dashboardGitResponse = [
project_blob_path: null,
path: 'config/prometheus/common_metrics.yml',
},
- {
- default: false,
- display_name: 'Custom Dashboard 1',
- can_edit: true,
- system_dashboard: false,
- project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_1.yml`,
- path: '.gitlab/dashboards/dashboard_1.yml',
- },
- {
- default: false,
- display_name: 'Custom Dashboard 2',
- can_edit: true,
- system_dashboard: false,
- project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_2.yml`,
- path: '.gitlab/dashboards/dashboard_2.yml',
- },
+ ...customDashboardsData,
];
export const graphDataPrometheusQuery = {
@@ -670,3 +665,50 @@ export const graphDataPrometheusQueryRangeMultiTrack = {
},
],
};
+
+export const stackedColumnMockedData = {
+ title: 'memories',
+ type: 'stacked-column',
+ x_label: 'x label',
+ y_label: 'y label',
+ metrics: [
+ {
+ label: 'memory_1024',
+ unit: 'count',
+ series_name: 'group 1',
+ prometheus_endpoint_path:
+ '/root/autodevops-deploy-6/-/environments/24/prometheus/api/v1/query_range?query=avg%28sum%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+by+%28job%29%29+without+%28job%29+%2F+count%28avg%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+without+%28job%29%29+%2F1024%2F1024',
+ metric_id: 'undefined_metric_of_ages_1024',
+ metricId: 'undefined_metric_of_ages_1024',
+ result: [
+ {
+ metric: {},
+ values: [
+ ['2020-01-30 12:00:00', '5'],
+ ['2020-01-30 12:01:00', '10'],
+ ['2020-01-30 12:02:00', '15'],
+ ],
+ },
+ ],
+ },
+ {
+ label: 'memory_1000',
+ unit: 'count',
+ series_name: 'group 2',
+ prometheus_endpoint_path:
+ '/root/autodevops-deploy-6/-/environments/24/prometheus/api/v1/query_range?query=avg%28sum%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+by+%28job%29%29+without+%28job%29+%2F+count%28avg%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+without+%28job%29%29+%2F1024%2F1024',
+ metric_id: 'undefined_metric_of_ages_1000',
+ metricId: 'undefined_metric_of_ages_1000',
+ result: [
+ {
+ metric: {},
+ values: [
+ ['2020-01-30 12:00:00', '20'],
+ ['2020-01-30 12:01:00', '25'],
+ ['2020-01-30 12:02:00', '30'],
+ ],
+ },
+ ],
+ },
+ ],
+};
diff --git a/spec/frontend/monitoring/panel_type_spec.js b/spec/frontend/monitoring/panel_type_spec.js
deleted file mode 100644
index e51b69ef14d..00000000000
--- a/spec/frontend/monitoring/panel_type_spec.js
+++ /dev/null
@@ -1,184 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import AxiosMockAdapter from 'axios-mock-adapter';
-import { setTestTimeout } from 'helpers/timeout';
-import axios from '~/lib/utils/axios_utils';
-import PanelType from '~/monitoring/components/panel_type.vue';
-import EmptyChart from '~/monitoring/components/charts/empty_chart.vue';
-import TimeSeriesChart from '~/monitoring/components/charts/time_series.vue';
-import AnomalyChart from '~/monitoring/components/charts/anomaly.vue';
-import { graphDataPrometheusQueryRange } from '../../javascripts/monitoring/mock_data';
-import { anomalyMockGraphData } from '../../frontend/monitoring/mock_data';
-import { createStore } from '~/monitoring/stores';
-
-global.IS_EE = true;
-global.URL.createObjectURL = jest.fn();
-
-describe('Panel Type component', () => {
- let axiosMock;
- let store;
- let panelType;
- const dashboardWidth = 100;
- const exampleText = 'example_text';
-
- const createWrapper = props =>
- shallowMount(PanelType, {
- propsData: {
- ...props,
- },
- store,
- });
-
- beforeEach(() => {
- setTestTimeout(1000);
- axiosMock = new AxiosMockAdapter(axios);
- });
-
- afterEach(() => {
- axiosMock.reset();
- });
-
- describe('When no graphData is available', () => {
- let glEmptyChart;
- // Deep clone object before modifying
- const graphDataNoResult = JSON.parse(JSON.stringify(graphDataPrometheusQueryRange));
- graphDataNoResult.metrics[0].result = [];
-
- beforeEach(() => {
- panelType = createWrapper({
- dashboardWidth,
- graphData: graphDataNoResult,
- });
- });
-
- afterEach(() => {
- panelType.destroy();
- });
-
- describe('Empty Chart component', () => {
- beforeEach(() => {
- glEmptyChart = panelType.find(EmptyChart);
- });
-
- it('is a Vue instance', () => {
- expect(glEmptyChart.isVueInstance()).toBe(true);
- });
-
- it('it receives a graph title', () => {
- const props = glEmptyChart.props();
-
- expect(props.graphTitle).toBe(panelType.vm.graphData.title);
- });
- });
- });
-
- describe('when graph data is available', () => {
- beforeEach(() => {
- store = createStore();
- panelType = createWrapper({
- dashboardWidth,
- graphData: graphDataPrometheusQueryRange,
- });
- });
-
- afterEach(() => {
- panelType.destroy();
- });
-
- it('sets no clipboard copy link on dropdown by default', () => {
- const link = () => panelType.find('.js-chart-link');
- expect(link().exists()).toBe(false);
- });
-
- describe('Time Series Chart panel type', () => {
- it('is rendered', () => {
- expect(panelType.find(TimeSeriesChart).isVueInstance()).toBe(true);
- expect(panelType.find(TimeSeriesChart).exists()).toBe(true);
- });
-
- it('includes a default group id', () => {
- expect(panelType.vm.groupId).toBe('panel-type-chart');
- });
- });
-
- describe('Anomaly Chart panel type', () => {
- beforeEach(done => {
- panelType.setProps({
- graphData: anomalyMockGraphData,
- });
- panelType.vm.$nextTick(done);
- });
-
- it('is rendered with an anomaly chart', () => {
- expect(panelType.find(AnomalyChart).isVueInstance()).toBe(true);
- expect(panelType.find(AnomalyChart).exists()).toBe(true);
- });
- });
- });
-
- describe('when cliboard data is available', () => {
- const clipboardText = 'A value to copy.';
-
- beforeEach(() => {
- store = createStore();
- panelType = createWrapper({
- clipboardText,
- dashboardWidth,
- graphData: graphDataPrometheusQueryRange,
- });
- });
-
- afterEach(() => {
- panelType.destroy();
- });
-
- it('sets clipboard text on the dropdown', () => {
- const link = () => panelType.find('.js-chart-link');
-
- expect(link().exists()).toBe(true);
- expect(link().element.dataset.clipboardText).toBe(clipboardText);
- });
- });
-
- describe('when downloading metrics data as CSV', () => {
- beforeEach(done => {
- graphDataPrometheusQueryRange.y_label = 'metric';
- store = createStore();
- panelType = shallowMount(PanelType, {
- propsData: {
- clipboardText: exampleText,
- dashboardWidth,
- graphData: graphDataPrometheusQueryRange,
- },
- store,
- });
- panelType.vm.$nextTick(done);
- });
-
- afterEach(() => {
- panelType.destroy();
- });
-
- describe('csvText', () => {
- it('converts metrics data from json to csv', () => {
- const header = `timestamp,${graphDataPrometheusQueryRange.y_label}`;
- const data = graphDataPrometheusQueryRange.metrics[0].result[0].values;
- const firstRow = `${data[0][0]},${data[0][1]}`;
- const secondRow = `${data[1][0]},${data[1][1]}`;
-
- expect(panelType.vm.csvText).toBe(`${header}\r\n${firstRow}\r\n${secondRow}\r\n`);
- });
- });
-
- describe('downloadCsv', () => {
- it('produces a link with a Blob', () => {
- expect(global.URL.createObjectURL).toHaveBeenLastCalledWith(expect.any(Blob));
- expect(global.URL.createObjectURL).toHaveBeenLastCalledWith(
- expect.objectContaining({
- size: panelType.vm.csvText.length,
- type: 'text/plain',
- }),
- );
- });
- });
- });
-});
diff --git a/spec/frontend/monitoring/store/actions_spec.js b/spec/frontend/monitoring/store/actions_spec.js
index 975bdd3a27a..11d3109fcd1 100644
--- a/spec/frontend/monitoring/store/actions_spec.js
+++ b/spec/frontend/monitoring/store/actions_spec.js
@@ -17,15 +17,18 @@ import {
fetchPrometheusMetrics,
fetchPrometheusMetric,
setEndpoints,
+ filterEnvironments,
setGettingStartedEmptyState,
duplicateSystemDashboard,
} from '~/monitoring/stores/actions';
+import { gqClient, parseEnvironmentsResponse } from '~/monitoring/stores/utils';
+import getEnvironments from '~/monitoring/queries/getEnvironments.query.graphql';
import storeState from '~/monitoring/stores/state';
import {
deploymentData,
environmentData,
metricsDashboardResponse,
- metricsGroupsAPIResponse,
+ metricsDashboardPayload,
dashboardGitResponse,
} from '../mock_data';
@@ -104,40 +107,105 @@ describe('Monitoring store actions', () => {
.catch(done.fail);
});
});
+
describe('fetchEnvironmentsData', () => {
- it('commits RECEIVE_ENVIRONMENTS_DATA_SUCCESS on error', done => {
- const dispatch = jest.fn();
- const { state } = store;
- state.environmentsEndpoint = '/success';
- mock.onGet(state.environmentsEndpoint).reply(200, {
- environments: environmentData,
+ const dispatch = jest.fn();
+ const { state } = store;
+ state.projectPath = 'gitlab-org/gitlab-test';
+
+ afterEach(() => {
+ resetStore(store);
+ jest.restoreAllMocks();
+ });
+
+ it('setting SET_ENVIRONMENTS_FILTER should dispatch fetchEnvironmentsData', () => {
+ jest.spyOn(gqClient, 'mutate').mockReturnValue(
+ Promise.resolve({
+ data: {
+ project: {
+ data: {
+ environments: [],
+ },
+ },
+ },
+ }),
+ );
+
+ return testAction(
+ filterEnvironments,
+ {},
+ state,
+ [
+ {
+ type: 'SET_ENVIRONMENTS_FILTER',
+ payload: {},
+ },
+ ],
+ [
+ {
+ type: 'fetchEnvironmentsData',
+ },
+ ],
+ );
+ });
+
+ it('fetch environments data call takes in search param', () => {
+ const mockMutate = jest.spyOn(gqClient, 'mutate');
+ const searchTerm = 'Something';
+ const mutationVariables = {
+ mutation: getEnvironments,
+ variables: {
+ projectPath: state.projectPath,
+ search: searchTerm,
+ },
+ };
+ state.environmentsSearchTerm = searchTerm;
+ mockMutate.mockReturnValue(Promise.resolve());
+
+ return fetchEnvironmentsData({
+ state,
+ dispatch,
+ }).then(() => {
+ expect(mockMutate).toHaveBeenCalledWith(mutationVariables);
});
- fetchEnvironmentsData({
+ });
+
+ it('commits RECEIVE_ENVIRONMENTS_DATA_SUCCESS on success', () => {
+ jest.spyOn(gqClient, 'mutate').mockReturnValue(
+ Promise.resolve({
+ data: {
+ project: {
+ data: {
+ environments: environmentData,
+ },
+ },
+ },
+ }),
+ );
+
+ return fetchEnvironmentsData({
state,
dispatch,
- })
- .then(() => {
- expect(dispatch).toHaveBeenCalledWith('receiveEnvironmentsDataSuccess', environmentData);
- done();
- })
- .catch(done.fail);
+ }).then(() => {
+ expect(dispatch).toHaveBeenCalledWith(
+ 'receiveEnvironmentsDataSuccess',
+ parseEnvironmentsResponse(environmentData, state.projectPath),
+ );
+ });
});
- it('commits RECEIVE_ENVIRONMENTS_DATA_FAILURE on error', done => {
- const dispatch = jest.fn();
- const { state } = store;
- state.environmentsEndpoint = '/error';
- mock.onGet(state.environmentsEndpoint).reply(500);
- fetchEnvironmentsData({
+
+ it('commits RECEIVE_ENVIRONMENTS_DATA_FAILURE on error', () => {
+ jest.spyOn(gqClient, 'mutate').mockReturnValue(Promise.reject());
+
+ return fetchEnvironmentsData({
state,
dispatch,
- })
- .then(() => {
- expect(dispatch).toHaveBeenCalledWith('receiveEnvironmentsDataFailure');
- done();
- })
- .catch(done.fail);
+ }).then(() => {
+ expect(dispatch).toHaveBeenCalledWith('receiveEnvironmentsDataFailure');
+ });
});
});
+
describe('Set endpoints', () => {
let mockedState;
beforeEach(() => {
@@ -149,7 +217,6 @@ describe('Monitoring store actions', () => {
{
metricsEndpoint: 'additional_metrics.json',
deploymentsEndpoint: 'deployments.json',
- environmentsEndpoint: 'deployments.json',
},
mockedState,
[
@@ -158,7 +225,6 @@ describe('Monitoring store actions', () => {
payload: {
metricsEndpoint: 'additional_metrics.json',
deploymentsEndpoint: 'deployments.json',
- environmentsEndpoint: 'deployments.json',
},
},
],
@@ -442,7 +508,7 @@ describe('Monitoring store actions', () => {
beforeEach(() => {
state = storeState();
[metric] = metricsDashboardResponse.dashboard.panel_groups[0].panels[0].metrics;
- [data] = metricsGroupsAPIResponse.panel_groups[0].panels[0].metrics;
+ [data] = metricsDashboardPayload.panel_groups[0].panels[0].metrics;
});
it('commits result', done => {
diff --git a/spec/frontend/monitoring/store/getters_spec.js b/spec/frontend/monitoring/store/getters_spec.js
index 9e325fe3cf9..263050b462f 100644
--- a/spec/frontend/monitoring/store/getters_spec.js
+++ b/spec/frontend/monitoring/store/getters_spec.js
@@ -3,7 +3,8 @@ import mutations from '~/monitoring/stores/mutations';
import * as types from '~/monitoring/stores/mutation_types';
import { metricStates } from '~/monitoring/constants';
import {
- metricsGroupsAPIResponse,
+ environmentData,
+ metricsDashboardPayload,
mockedEmptyResult,
mockedQueryResultPayload,
mockedQueryResultPayloadCoresTotal,
@@ -44,7 +45,7 @@ describe('Monitoring store Getters', () => {
setupState({
dashboard: { panel_groups: [] },
});
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsGroupsAPIResponse);
+ mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
groups = state.dashboard.panel_groups;
});
@@ -53,21 +54,21 @@ describe('Monitoring store Getters', () => {
});
it('on an empty metric with no result, returns NO_DATA', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsGroupsAPIResponse);
+ mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyResult);
expect(getMetricStates()).toEqual([metricStates.NO_DATA]);
});
it('on a metric with a result, returns OK', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsGroupsAPIResponse);
+ mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
expect(getMetricStates()).toEqual([metricStates.OK]);
});
it('on a metric with an error, returns an error', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsGroupsAPIResponse);
+ mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
metricId: groups[0].panels[0].metrics[0].metricId,
});
@@ -76,7 +77,7 @@ describe('Monitoring store Getters', () => {
});
it('on multiple metrics with results, returns OK', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsGroupsAPIResponse);
+ mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayloadCoresTotal);
@@ -87,7 +88,7 @@ describe('Monitoring store Getters', () => {
expect(getMetricStates(state.dashboard.panel_groups[1].key)).toEqual([metricStates.OK]);
});
it('on multiple metrics errors', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsGroupsAPIResponse);
+ mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
metricId: groups[0].panels[0].metrics[0].metricId,
@@ -106,7 +107,7 @@ describe('Monitoring store Getters', () => {
});
it('on multiple metrics with errors', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsGroupsAPIResponse);
+ mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
// An success in 1 group
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
@@ -168,27 +169,27 @@ describe('Monitoring store Getters', () => {
});
it('no loaded metric returns empty', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsGroupsAPIResponse);
+ mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
expect(metricsWithData()).toEqual([]);
});
it('an empty metric, returns empty', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsGroupsAPIResponse);
+ mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyResult);
expect(metricsWithData()).toEqual([]);
});
it('a metric with results, it returns a metric', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsGroupsAPIResponse);
+ mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
expect(metricsWithData()).toEqual([mockedQueryResultPayload.metricId]);
});
it('multiple metrics with results, it return multiple metrics', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsGroupsAPIResponse);
+ mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayloadCoresTotal);
@@ -199,7 +200,7 @@ describe('Monitoring store Getters', () => {
});
it('multiple metrics with results, it returns metrics filtered by group', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsGroupsAPIResponse);
+ mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayloadCoresTotal);
@@ -214,4 +215,58 @@ describe('Monitoring store Getters', () => {
});
});
});
+
+ describe('filteredEnvironments', () => {
+ let state;
+ const setupState = (initState = {}) => {
+ state = {
+ ...state,
+ ...initState,
+ };
+ };
+
+ beforeAll(() => {
+ setupState({
+ environments: environmentData,
+ });
+ });
+
+ afterAll(() => {
+ state = null;
+ });
+
+ [
+ {
+ input: '',
+ output: 17,
+ },
+ {
+ input: ' ',
+ output: 17,
+ },
+ {
+ input: null,
+ output: 17,
+ },
+ {
+ input: 'does-not-exist',
+ output: 0,
+ },
+ {
+ input: 'noop-branch-',
+ output: 15,
+ },
+ {
+ input: 'noop-branch-9',
+ output: 1,
+ },
+ ].forEach(({ input, output }) => {
+ it(`filteredEnvironments returns ${output} items for ${input}`, () => {
+ setupState({
+ environmentsSearchTerm: input,
+ });
+ expect(getters.filteredEnvironments(state).length).toBe(output);
+ });
+ });
+ });
});
diff --git a/spec/frontend/monitoring/store/mutations_spec.js b/spec/frontend/monitoring/store/mutations_spec.js
index cb53ab60bdb..d9aebafb9ec 100644
--- a/spec/frontend/monitoring/store/mutations_spec.js
+++ b/spec/frontend/monitoring/store/mutations_spec.js
@@ -5,7 +5,7 @@ import * as types from '~/monitoring/stores/mutation_types';
import state from '~/monitoring/stores/state';
import { metricStates } from '~/monitoring/constants';
import {
- metricsGroupsAPIResponse,
+ metricsDashboardPayload,
deploymentData,
metricsDashboardResponse,
dashboardGitResponse,
@@ -23,7 +23,7 @@ describe('Monitoring mutations', () => {
beforeEach(() => {
stateCopy.dashboard.panel_groups = [];
- payload = metricsGroupsAPIResponse;
+ payload = metricsDashboardPayload;
});
it('adds a key to the group', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, payload);
@@ -81,18 +81,47 @@ describe('Monitoring mutations', () => {
it('should set all the endpoints', () => {
mutations[types.SET_ENDPOINTS](stateCopy, {
metricsEndpoint: 'additional_metrics.json',
- environmentsEndpoint: 'environments.json',
deploymentsEndpoint: 'deployments.json',
dashboardEndpoint: 'dashboard.json',
projectPath: '/gitlab-org/gitlab-foss',
});
expect(stateCopy.metricsEndpoint).toEqual('additional_metrics.json');
- expect(stateCopy.environmentsEndpoint).toEqual('environments.json');
expect(stateCopy.deploymentsEndpoint).toEqual('deployments.json');
expect(stateCopy.dashboardEndpoint).toEqual('dashboard.json');
expect(stateCopy.projectPath).toEqual('/gitlab-org/gitlab-foss');
});
+
+ it('should not remove previously set properties', () => {
+ const defaultLogsPath = stateCopy.logsPath;
+
+ mutations[types.SET_ENDPOINTS](stateCopy, {
+ logsPath: defaultLogsPath,
+ });
+ mutations[types.SET_ENDPOINTS](stateCopy, {
+ dashboardEndpoint: 'dashboard.json',
+ });
+ mutations[types.SET_ENDPOINTS](stateCopy, {
+ projectPath: '/gitlab-org/gitlab-foss',
+ });
+
+ expect(stateCopy).toMatchObject({
+ logsPath: defaultLogsPath,
+ dashboardEndpoint: 'dashboard.json',
+ projectPath: '/gitlab-org/gitlab-foss',
+ });
+ });
+
+ it('should not update unknown properties', () => {
+ mutations[types.SET_ENDPOINTS](stateCopy, {
+ dashboardEndpoint: 'dashboard.json',
+ someOtherProperty: 'some invalid value', // someOtherProperty is not allowed
+ });
+
+ expect(stateCopy.dashboardEndpoint).toBe('dashboard.json');
+ expect(stateCopy.someOtherProperty).toBeUndefined();
+ });
});
+
describe('Individual panel/metric results', () => {
const metricId = '12_system_metrics_kubernetes_container_memory_total';
const result = [
diff --git a/spec/frontend/monitoring/store/utils_spec.js b/spec/frontend/monitoring/store/utils_spec.js
index d562aaaefe9..d322d45457e 100644
--- a/spec/frontend/monitoring/store/utils_spec.js
+++ b/spec/frontend/monitoring/store/utils_spec.js
@@ -1,4 +1,11 @@
-import { normalizeMetric, uniqMetricsId } from '~/monitoring/stores/utils';
+import {
+ normalizeMetric,
+ uniqMetricsId,
+ parseEnvironmentsResponse,
+ removeLeadingSlash,
+} from '~/monitoring/stores/utils';
+
+const projectPath = 'gitlab-org/gitlab-test';
describe('normalizeMetric', () => {
[
@@ -32,3 +39,71 @@ describe('uniqMetricsId', () => {
});
});
});
+
+describe('parseEnvironmentsResponse', () => {
+ [
+ {
+ input: null,
+ output: [],
+ },
+ {
+ input: undefined,
+ output: [],
+ },
+ {
+ input: [],
+ output: [],
+ },
+ {
+ input: [
+ {
+ id: '1',
+ name: 'env-1',
+ },
+ ],
+ output: [
+ {
+ id: 1,
+ name: 'env-1',
+ metrics_path: `${projectPath}/environments/1/metrics`,
+ },
+ ],
+ },
+ {
+ input: [
+ {
+ id: 'gid://gitlab/Environment/12',
+ name: 'env-12',
+ },
+ ],
+ output: [
+ {
+ id: 12,
+ name: 'env-12',
+ metrics_path: `${projectPath}/environments/12/metrics`,
+ },
+ ],
+ },
+ ].forEach(({ input, output }) => {
+ it(`parseEnvironmentsResponse returns ${JSON.stringify(output)} with input ${JSON.stringify(
+ input,
+ )}`, () => {
+ expect(parseEnvironmentsResponse(input, projectPath)).toEqual(output);
+ });
+ });
+});
+
+describe('removeLeadingSlash', () => {
+ [
+ { input: null, output: '' },
+ { input: '', output: '' },
+ { input: 'gitlab-org', output: 'gitlab-org' },
+ { input: 'gitlab-org/gitlab', output: 'gitlab-org/gitlab' },
+ { input: '/gitlab-org/gitlab', output: 'gitlab-org/gitlab' },
+ { input: '////gitlab-org/gitlab', output: 'gitlab-org/gitlab' },
+ ].forEach(({ input, output }) => {
+ it(`removeLeadingSlash returns ${output} with input ${input}`, () => {
+ expect(removeLeadingSlash(input)).toEqual(output);
+ });
+ });
+});
diff --git a/spec/frontend/monitoring/utils_spec.js b/spec/frontend/monitoring/utils_spec.js
index 9b1a331e3b5..2d9417bf971 100644
--- a/spec/frontend/monitoring/utils_spec.js
+++ b/spec/frontend/monitoring/utils_spec.js
@@ -1,14 +1,35 @@
import * as monitoringUtils from '~/monitoring/utils';
-import { timeWindows, timeWindowsKeyNames } from '~/monitoring/constants';
+import { queryToObject, mergeUrlParams, removeParams } from '~/lib/utils/url_utility';
import {
+ mockHost,
+ mockProjectDir,
graphDataPrometheusQuery,
graphDataPrometheusQueryRange,
anomalyMockGraphData,
} from './mock_data';
+jest.mock('~/lib/utils/url_utility');
+
+const mockPath = `${mockHost}${mockProjectDir}/-/environments/29/metrics`;
+
+const generatedLink = 'http://chart.link.com';
+
+const chartTitle = 'Some metric chart';
+
+const range = {
+ start: '2019-01-01T00:00:00.000Z',
+ end: '2019-01-10T00:00:00.000Z',
+};
+
+const rollingRange = {
+ duration: { seconds: 120 },
+};
+
describe('monitoring/utils', () => {
- const generatedLink = 'http://chart.link.com';
- const chartTitle = 'Some metric chart';
+ afterEach(() => {
+ mergeUrlParams.mockReset();
+ queryToObject.mockReset();
+ });
describe('trackGenerateLinkToChartEventOptions', () => {
it('should return Cluster Monitoring options if located on Cluster Health Dashboard', () => {
@@ -58,92 +79,6 @@ describe('monitoring/utils', () => {
});
});
- describe('getTimeDiff', () => {
- function secondsBetween({ start, end }) {
- return (new Date(end) - new Date(start)) / 1000;
- }
-
- function minutesBetween(timeRange) {
- return secondsBetween(timeRange) / 60;
- }
-
- function hoursBetween(timeRange) {
- return minutesBetween(timeRange) / 60;
- }
-
- it('defaults to an 8 hour (28800s) difference', () => {
- const params = monitoringUtils.getTimeDiff();
-
- expect(hoursBetween(params)).toEqual(8);
- });
-
- it('accepts time window as an argument', () => {
- const params = monitoringUtils.getTimeDiff('thirtyMinutes');
-
- expect(minutesBetween(params)).toEqual(30);
- });
-
- it('returns a value for every defined time window', () => {
- const nonDefaultWindows = Object.keys(timeWindows).filter(window => window !== 'eightHours');
-
- nonDefaultWindows.forEach(timeWindow => {
- const params = monitoringUtils.getTimeDiff(timeWindow);
-
- // Ensure we're not returning the default
- expect(hoursBetween(params)).not.toEqual(8);
- });
- });
- });
-
- describe('getTimeWindow', () => {
- [
- {
- args: [
- {
- start: '2019-10-01T18:27:47.000Z',
- end: '2019-10-01T21:27:47.000Z',
- },
- ],
- expected: timeWindowsKeyNames.threeHours,
- },
- {
- args: [
- {
- start: '2019-10-01T28:27:47.000Z',
- end: '2019-10-01T21:27:47.000Z',
- },
- ],
- expected: null,
- },
- {
- args: [
- {
- start: '',
- end: '',
- },
- ],
- expected: null,
- },
- {
- args: [
- {
- start: null,
- end: null,
- },
- ],
- expected: null,
- },
- {
- args: [{}],
- expected: null,
- },
- ].forEach(({ args, expected }) => {
- it(`returns "${expected}" with args=${JSON.stringify(args)}`, () => {
- expect(monitoringUtils.getTimeWindow(...args)).toEqual(expected);
- });
- });
- });
-
describe('graphDataValidatorForValues', () => {
/*
* When dealing with a metric using the query format, e.g.
@@ -174,193 +109,6 @@ describe('monitoring/utils', () => {
});
});
- describe('stringToISODate', () => {
- ['', 'null', undefined, 'abc'].forEach(input => {
- it(`throws error for invalid input like ${input}`, done => {
- try {
- monitoringUtils.stringToISODate(input);
- } catch (e) {
- expect(e).toBeDefined();
- done();
- }
- });
- });
- [
- {
- input: '2019-09-09 01:01:01',
- output: '2019-09-09T01:01:01Z',
- },
- {
- input: '2019-09-09 00:00:00',
- output: '2019-09-09T00:00:00Z',
- },
- {
- input: '2019-09-09 23:59:59',
- output: '2019-09-09T23:59:59Z',
- },
- {
- input: '2019-09-09',
- output: '2019-09-09T00:00:00Z',
- },
- ].forEach(({ input, output }) => {
- it(`returns ${output} from ${input}`, () => {
- expect(monitoringUtils.stringToISODate(input)).toBe(output);
- });
- });
- });
-
- describe('ISODateToString', () => {
- [
- {
- input: new Date('2019-09-09T00:00:00.000Z'),
- output: '2019-09-09 00:00:00',
- },
- {
- input: new Date('2019-09-09T07:00:00.000Z'),
- output: '2019-09-09 07:00:00',
- },
- ].forEach(({ input, output }) => {
- it(`ISODateToString return ${output} for ${input}`, () => {
- expect(monitoringUtils.ISODateToString(input)).toBe(output);
- });
- });
- });
-
- describe('truncateZerosInDateTime', () => {
- [
- {
- input: '',
- output: '',
- },
- {
- input: '2019-10-10',
- output: '2019-10-10',
- },
- {
- input: '2019-10-10 00:00:01',
- output: '2019-10-10 00:00:01',
- },
- {
- input: '2019-10-10 00:00:00',
- output: '2019-10-10',
- },
- ].forEach(({ input, output }) => {
- it(`truncateZerosInDateTime return ${output} for ${input}`, () => {
- expect(monitoringUtils.truncateZerosInDateTime(input)).toBe(output);
- });
- });
- });
-
- describe('isValidDate', () => {
- [
- {
- input: '2019-09-09T00:00:00.000Z',
- output: true,
- },
- {
- input: '2019-09-09T000:00.000Z',
- output: false,
- },
- {
- input: 'a2019-09-09T000:00.000Z',
- output: false,
- },
- {
- input: '2019-09-09T',
- output: false,
- },
- {
- input: '2019-09-09',
- output: true,
- },
- {
- input: '2019-9-9',
- output: true,
- },
- {
- input: '2019-9-',
- output: true,
- },
- {
- input: '2019--',
- output: false,
- },
- {
- input: '2019',
- output: true,
- },
- {
- input: '',
- output: false,
- },
- {
- input: null,
- output: false,
- },
- ].forEach(({ input, output }) => {
- it(`isValidDate return ${output} for ${input}`, () => {
- expect(monitoringUtils.isValidDate(input)).toBe(output);
- });
- });
- });
-
- describe('isDateTimePickerInputValid', () => {
- [
- {
- input: null,
- output: false,
- },
- {
- input: '',
- output: false,
- },
- {
- input: 'xxxx-xx-xx',
- output: false,
- },
- {
- input: '9999-99-19',
- output: false,
- },
- {
- input: '2019-19-23',
- output: false,
- },
- {
- input: '2019-09-23',
- output: true,
- },
- {
- input: '2019-09-23 x',
- output: false,
- },
- {
- input: '2019-09-29 0:0:0',
- output: false,
- },
- {
- input: '2019-09-29 00:00:00',
- output: true,
- },
- {
- input: '2019-09-29 24:24:24',
- output: false,
- },
- {
- input: '2019-09-29 23:24:24',
- output: true,
- },
- {
- input: '2019-09-29 23:24:24 ',
- output: false,
- },
- ].forEach(({ input, output }) => {
- it(`returns ${output} for ${input}`, () => {
- expect(monitoringUtils.isDateTimePickerInputValid(input)).toBe(output);
- });
- });
- });
-
describe('graphDataValidatorForAnomalyValues', () => {
let oneMetric;
let threeMetrics;
@@ -391,4 +139,75 @@ describe('monitoring/utils', () => {
expect(monitoringUtils.graphDataValidatorForAnomalyValues(fourMetrics)).toBe(false);
});
});
+
+ describe('timeRangeFromUrl', () => {
+ const { timeRangeFromUrl } = monitoringUtils;
+
+ it('returns a fixed range when query contains `start` and `end` paramters are given', () => {
+ queryToObject.mockReturnValueOnce(range);
+
+ expect(timeRangeFromUrl()).toEqual(range);
+ });
+
+ it('returns a rolling range when query contains `duration_seconds` paramters are given', () => {
+ const { seconds } = rollingRange.duration;
+
+ queryToObject.mockReturnValueOnce({
+ dashboard: '.gitlab/dashboard/my_dashboard.yml',
+ duration_seconds: `${seconds}`,
+ });
+
+ expect(timeRangeFromUrl()).toEqual(rollingRange);
+ });
+
+ it('returns null when no time range paramters are given', () => {
+ const params = {
+ dashboard: '.gitlab/dashboards/custom_dashboard.yml',
+ param1: 'value1',
+ param2: 'value2',
+ };
+
+ expect(timeRangeFromUrl(params, mockPath)).toBe(null);
+ });
+ });
+
+ describe('removeTimeRangeParams', () => {
+ const { removeTimeRangeParams } = monitoringUtils;
+
+ it('returns when query contains `start` and `end` paramters are given', () => {
+ removeParams.mockReturnValueOnce(mockPath);
+
+ expect(removeTimeRangeParams(`${mockPath}?start=${range.start}&end=${range.end}`)).toEqual(
+ mockPath,
+ );
+ });
+ });
+
+ describe('timeRangeToUrl', () => {
+ const { timeRangeToUrl } = monitoringUtils;
+
+ it('returns a fixed range when query contains `start` and `end` paramters are given', () => {
+ const toUrl = `${mockPath}?start=${range.start}&end=${range.end}`;
+ const fromUrl = mockPath;
+
+ removeParams.mockReturnValueOnce(fromUrl);
+ mergeUrlParams.mockReturnValueOnce(toUrl);
+
+ expect(timeRangeToUrl(range)).toEqual(toUrl);
+ expect(mergeUrlParams).toHaveBeenCalledWith(range, fromUrl);
+ });
+
+ it('returns a rolling range when query contains `duration_seconds` paramters are given', () => {
+ const { seconds } = rollingRange.duration;
+
+ const toUrl = `${mockPath}?duration_seconds=${seconds}`;
+ const fromUrl = mockPath;
+
+ removeParams.mockReturnValueOnce(fromUrl);
+ mergeUrlParams.mockReturnValueOnce(toUrl);
+
+ expect(timeRangeToUrl(rollingRange)).toEqual(toUrl);
+ expect(mergeUrlParams).toHaveBeenCalledWith({ duration_seconds: `${seconds}` }, fromUrl);
+ });
+ });
});
diff --git a/spec/frontend/mr_popover/mr_popover_spec.js b/spec/frontend/mr_popover/mr_popover_spec.js
index 0c0d4c73d91..3f62dca4a57 100644
--- a/spec/frontend/mr_popover/mr_popover_spec.js
+++ b/spec/frontend/mr_popover/mr_popover_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import MRPopover from '~/mr_popover/components/mr_popover';
+import MRPopover from '~/mr_popover/components/mr_popover.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
describe('MR Popover', () => {
diff --git a/spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap b/spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap
index 1e466f266ed..2f4c114dd3d 100644
--- a/spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap
+++ b/spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap
@@ -7,7 +7,7 @@ exports[`JumpToNextDiscussionButton matches the snapshot 1`] = `
>
<button
class="btn btn-default discussion-next-btn"
- title="Jump to next unresolved discussion"
+ title="Jump to next unresolved thread"
>
<icon-stub
name="comment-next"
diff --git a/spec/frontend/notes/components/comment_form_spec.js b/spec/frontend/notes/components/comment_form_spec.js
index ceba31b1a70..a2c7f0b3767 100644
--- a/spec/frontend/notes/components/comment_form_spec.js
+++ b/spec/frontend/notes/components/comment_form_spec.js
@@ -87,7 +87,7 @@ describe('issue_comment_form component', () => {
expect(wrapper.vm.toggleIssueState).toHaveBeenCalled();
});
- it('should disable action button whilst submitting', done => {
+ it('should disable action button while submitting', done => {
const saveNotePromise = Promise.resolve();
wrapper.vm.note = 'hello world';
jest.spyOn(wrapper.vm, 'saveNote').mockReturnValue(saveNotePromise);
diff --git a/spec/frontend/notes/components/discussion_actions_spec.js b/spec/frontend/notes/components/discussion_actions_spec.js
index 2d95a86d8a6..5101b81e3ee 100644
--- a/spec/frontend/notes/components/discussion_actions_spec.js
+++ b/spec/frontend/notes/components/discussion_actions_spec.js
@@ -120,14 +120,5 @@ describe('DiscussionActions', () => {
.trigger('click');
expect(wrapper.vm.$emit).toHaveBeenCalledWith('resolve');
});
-
- it('emits jumpToNextDiscussion event when clicking on jump to next discussion button', () => {
- jest.spyOn(wrapper.vm, '$emit');
- wrapper
- .find(JumpToNextDiscussionButton)
- .find('button')
- .trigger('click');
- expect(wrapper.vm.$emit).toHaveBeenCalledWith('jumpToNextDiscussion');
- });
});
});
diff --git a/spec/frontend/notes/components/discussion_filter_note_spec.js b/spec/frontend/notes/components/discussion_filter_note_spec.js
index 6b5f42a84e8..4701108d315 100644
--- a/spec/frontend/notes/components/discussion_filter_note_spec.js
+++ b/spec/frontend/notes/components/discussion_filter_note_spec.js
@@ -1,93 +1,40 @@
-import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
import DiscussionFilterNote from '~/notes/components/discussion_filter_note.vue';
import eventHub from '~/notes/event_hub';
-import mountComponent from '../../helpers/vue_mount_component_helper';
-
describe('DiscussionFilterNote component', () => {
- let vm;
+ let wrapper;
const createComponent = () => {
- const Component = Vue.extend(DiscussionFilterNote);
-
- return mountComponent(Component);
+ wrapper = shallowMount(DiscussionFilterNote);
};
beforeEach(() => {
- vm = createComponent();
+ createComponent();
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
+ wrapper = null;
});
- describe('computed', () => {
- describe('timelineContent', () => {
- it('returns string containing instruction for switching feed type', () => {
- expect(vm.timelineContent).toBe(
- "You're only seeing <b>other activity</b> in the feed. To add a comment, switch to one of the following options.",
- );
- });
- });
+ it('timelineContent renders a string containing instruction for switching feed type', () => {
+ expect(wrapper.find({ ref: 'timelineContent' }).html()).toBe(
+ "<div>You're only seeing <b>other activity</b> in the feed. To add a comment, switch to one of the following options.</div>",
+ );
});
- describe('methods', () => {
- describe('selectFilter', () => {
- it('emits `dropdownSelect` event on `eventHub` with provided param', () => {
- jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
+ it('emits `dropdownSelect` event with 0 parameter on clicking Show all activity button', () => {
+ jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
+ wrapper.find({ ref: 'showAllActivity' }).vm.$emit('click');
- vm.selectFilter(1);
-
- expect(eventHub.$emit).toHaveBeenCalledWith('dropdownSelect', 1);
- });
- });
+ expect(eventHub.$emit).toHaveBeenCalledWith('dropdownSelect', 0);
});
- describe('template', () => {
- it('renders component container element', () => {
- expect(vm.$el.classList.contains('discussion-filter-note')).toBe(true);
- });
-
- it('renders comment icon element', () => {
- expect(vm.$el.querySelector('.timeline-icon svg use').getAttribute('xlink:href')).toContain(
- 'comment',
- );
- });
-
- it('renders filter information note', () => {
- expect(vm.$el.querySelector('.timeline-content').innerText.trim()).toContain(
- "You're only seeing other activity in the feed. To add a comment, switch to one of the following options.",
- );
- });
-
- it('renders filter buttons', () => {
- const buttonsContainerEl = vm.$el.querySelector('.discussion-filter-actions');
-
- expect(buttonsContainerEl.querySelector('button:first-child').innerText.trim()).toContain(
- 'Show all activity',
- );
-
- expect(buttonsContainerEl.querySelector('button:last-child').innerText.trim()).toContain(
- 'Show comments only',
- );
- });
-
- it('clicking `Show all activity` button calls `selectFilter("all")` method', () => {
- const showAllBtn = vm.$el.querySelector('.discussion-filter-actions button:first-child');
- jest.spyOn(vm, 'selectFilter').mockImplementation(() => {});
-
- showAllBtn.dispatchEvent(new Event('click'));
-
- expect(vm.selectFilter).toHaveBeenCalledWith(0);
- });
-
- it('clicking `Show comments only` button calls `selectFilter("comments")` method', () => {
- const showAllBtn = vm.$el.querySelector('.discussion-filter-actions button:last-child');
- jest.spyOn(vm, 'selectFilter').mockImplementation(() => {});
-
- showAllBtn.dispatchEvent(new Event('click'));
+ it('emits `dropdownSelect` event with 1 parameter on clicking Show comments only button', () => {
+ jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
+ wrapper.find({ ref: 'showComments' }).vm.$emit('click');
- expect(vm.selectFilter).toHaveBeenCalledWith(1);
- });
+ expect(eventHub.$emit).toHaveBeenCalledWith('dropdownSelect', 1);
});
});
diff --git a/spec/frontend/notes/components/discussion_jump_to_next_button_spec.js b/spec/frontend/notes/components/discussion_jump_to_next_button_spec.js
index 58cdf3cb57e..a00dd445c4f 100644
--- a/spec/frontend/notes/components/discussion_jump_to_next_button_spec.js
+++ b/spec/frontend/notes/components/discussion_jump_to_next_button_spec.js
@@ -15,15 +15,4 @@ describe('JumpToNextDiscussionButton', () => {
it('matches the snapshot', () => {
expect(wrapper.vm.$el).toMatchSnapshot();
});
-
- it('emits onClick event on button click', () => {
- const button = wrapper.find({ ref: 'button' });
-
- button.trigger('click');
-
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.emitted().onClick).toBeTruthy();
- expect(wrapper.emitted().onClick.length).toBe(1);
- });
- });
});
diff --git a/spec/frontend/notes/components/discussion_keyboard_navigator_spec.js b/spec/frontend/notes/components/discussion_keyboard_navigator_spec.js
index b38cfa8fb4a..74e827784ec 100644
--- a/spec/frontend/notes/components/discussion_keyboard_navigator_spec.js
+++ b/spec/frontend/notes/components/discussion_keyboard_navigator_spec.js
@@ -53,13 +53,15 @@ describe('notes/components/discussion_keyboard_navigator', () => {
});
describe.each`
- isDiffView | expectedNextId | expectedPrevId
- ${true} | ${NEXT_DIFF_ID} | ${PREV_DIFF_ID}
- ${false} | ${NEXT_ID} | ${PREV_ID}
- `('when isDiffView is $isDiffView', ({ isDiffView, expectedNextId, expectedPrevId }) => {
+ currentAction | expectedNextId | expectedPrevId
+ ${'diffs'} | ${NEXT_DIFF_ID} | ${PREV_DIFF_ID}
+ ${'show'} | ${NEXT_ID} | ${PREV_ID}
+ `('when isDiffView is $isDiffView', ({ currentAction, expectedNextId, expectedPrevId }) => {
beforeEach(() => {
- createComponent({ propsData: { isDiffView } });
+ window.mrTabs = { currentAction };
+ createComponent();
});
+ afterEach(() => delete window.mrTabs);
it('calls jumpToNextDiscussion when pressing `n`', () => {
Mousetrap.trigger('n');
diff --git a/spec/frontend/notes/components/note_app_spec.js b/spec/frontend/notes/components/note_app_spec.js
index f9b69e72619..a51c7c57f6c 100644
--- a/spec/frontend/notes/components/note_app_spec.js
+++ b/spec/frontend/notes/components/note_app_spec.js
@@ -12,6 +12,8 @@ import '~/behaviors/markdown/render_gfm';
import * as mockData from '../../notes/mock_data';
import * as urlUtility from '~/lib/utils/url_utility';
+jest.mock('~/user_popovers', () => jest.fn());
+
setTestTimeout(1000);
describe('note_app', () => {
diff --git a/spec/frontend/notes/components/note_attachment_spec.js b/spec/frontend/notes/components/note_attachment_spec.js
index b14a518b622..9d1051676e1 100644
--- a/spec/frontend/notes/components/note_attachment_spec.js
+++ b/spec/frontend/notes/components/note_attachment_spec.js
@@ -1,23 +1,45 @@
-import Vue from 'vue';
-import noteAttachment from '~/notes/components/note_attachment.vue';
-
-describe('issue note attachment', () => {
- it('should render properly', () => {
- const props = {
- attachment: {
- filename: 'dk.png',
- image: true,
- url: '/dk.png',
+import { shallowMount } from '@vue/test-utils';
+import NoteAttachment from '~/notes/components/note_attachment.vue';
+
+describe('Issue note attachment', () => {
+ let wrapper;
+
+ const findImage = () => wrapper.find({ ref: 'attachmentImage' });
+ const findUrl = () => wrapper.find({ ref: 'attachmentUrl' });
+
+ const createComponent = attachment => {
+ wrapper = shallowMount(NoteAttachment, {
+ propsData: {
+ attachment,
},
- };
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('renders attachment image if it is passed in attachment prop', () => {
+ createComponent({
+ image: 'test-image',
+ });
+
+ expect(findImage().exists()).toBe(true);
+ });
+
+ it('renders attachment url if it is passed in attachment prop', () => {
+ createComponent({
+ url: 'test-url',
+ });
+
+ expect(findUrl().exists()).toBe(true);
+ });
- const Component = Vue.extend(noteAttachment);
- const vm = new Component({
- propsData: props,
- }).$mount();
+ it('does not render image and url if attachment object is empty', () => {
+ createComponent({});
- expect(vm.$el.classList.contains('note-attachment')).toBeTruthy();
- expect(vm.$el.querySelector('img').src).toContain(props.attachment.url);
- expect(vm.$el.querySelector('a').href).toContain(props.attachment.url);
+ expect(findImage().exists()).toBe(false);
+ expect(findUrl().exists()).toBe(false);
});
});
diff --git a/spec/frontend/notes/components/note_header_spec.js b/spec/frontend/notes/components/note_header_spec.js
index 9b432387654..6544ad3e1fe 100644
--- a/spec/frontend/notes/components/note_header_spec.js
+++ b/spec/frontend/notes/components/note_header_spec.js
@@ -1,125 +1,141 @@
-import Vue from 'vue';
-import noteHeader from '~/notes/components/note_header.vue';
-import createStore from '~/notes/stores';
-
-describe('note_header component', () => {
- let store;
- let vm;
- let Component;
-
- beforeEach(() => {
- Component = Vue.extend(noteHeader);
- store = createStore();
- });
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import NoteHeader from '~/notes/components/note_header.vue';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+const actions = {
+ setTargetNoteHash: jest.fn(),
+};
+
+describe('NoteHeader component', () => {
+ let wrapper;
+
+ const findActionsWrapper = () => wrapper.find({ ref: 'discussionActions' });
+ const findChevronIcon = () => wrapper.find({ ref: 'chevronIcon' });
+ const findActionText = () => wrapper.find({ ref: 'actionText' });
+ const findTimestamp = () => wrapper.find({ ref: 'noteTimestamp' });
+
+ const createComponent = props => {
+ wrapper = shallowMount(NoteHeader, {
+ localVue,
+ store: new Vuex.Store({
+ actions,
+ }),
+ propsData: {
+ ...props,
+ actionTextHtml: '',
+ noteId: '1394',
+ },
+ });
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
+ wrapper = null;
});
- describe('individual note', () => {
- beforeEach(() => {
- vm = new Component({
- store,
- propsData: {
- actionText: 'commented',
- actionTextHtml: '',
- author: {
- avatar_url: null,
- id: 1,
- name: 'Root',
- path: '/root',
- state: 'active',
- username: 'root',
- },
- createdAt: '2017-08-02T10:51:58.559Z',
- includeToggle: false,
- noteId: '1394',
- expanded: true,
- },
- }).$mount();
+ it('does not render discussion actions when includeToggle is false', () => {
+ createComponent({
+ includeToggle: false,
});
- it('should render user information', () => {
- expect(vm.$el.querySelector('.note-header-author-name').textContent.trim()).toEqual('Root');
- expect(vm.$el.querySelector('.note-header-info a').getAttribute('href')).toEqual('/root');
- expect(vm.$el.querySelector('.note-header-info a').dataset.userId).toEqual('1');
- expect(vm.$el.querySelector('.note-header-info a').dataset.username).toEqual('root');
- expect(vm.$el.querySelector('.note-header-info a').classList).toContain('js-user-link');
+ expect(findActionsWrapper().exists()).toBe(false);
+ });
+
+ describe('when includes a toggle', () => {
+ it('renders discussion actions', () => {
+ createComponent({
+ includeToggle: true,
+ });
+
+ expect(findActionsWrapper().exists()).toBe(true);
});
- it('should render timestamp link', () => {
- expect(vm.$el.querySelector('a[href="#note_1394"]')).toBeDefined();
+ it('emits toggleHandler event on button click', () => {
+ createComponent({
+ includeToggle: true,
+ });
+
+ wrapper.find('.note-action-button').trigger('click');
+ expect(wrapper.emitted('toggleHandler')).toBeDefined();
+ expect(wrapper.emitted('toggleHandler')).toHaveLength(1);
});
- it('should not render user information when prop `author` is empty object', done => {
- vm.author = {};
- Vue.nextTick()
- .then(() => {
- expect(vm.$el.querySelector('.note-header-author-name')).toBeNull();
- })
- .then(done)
- .catch(done.fail);
+ it('has chevron-up icon if expanded prop is true', () => {
+ createComponent({
+ includeToggle: true,
+ expanded: true,
+ });
+
+ expect(findChevronIcon().classes()).toContain('fa-chevron-up');
});
- });
- describe('discussion', () => {
- beforeEach(() => {
- vm = new Component({
- store,
- propsData: {
- actionText: 'started a discussion',
- actionTextHtml: '',
- author: {
- avatar_url: null,
- id: 1,
- name: 'Root',
- path: '/root',
- state: 'active',
- username: 'root',
- },
- createdAt: '2017-08-02T10:51:58.559Z',
- includeToggle: true,
- noteId: '1395',
- expanded: true,
- },
- }).$mount();
+ it('has chevron-down icon if expanded prop is false', () => {
+ createComponent({
+ includeToggle: true,
+ expanded: false,
+ });
+
+ expect(findChevronIcon().classes()).toContain('fa-chevron-down');
});
+ });
- it('should render toggle button', () => {
- expect(vm.$el.querySelector('.js-vue-toggle-button')).toBeDefined();
+ it('renders an author link if author is passed to props', () => {
+ createComponent({
+ author: {
+ avatar_url: null,
+ id: 1,
+ name: 'Root',
+ path: '/root',
+ state: 'active',
+ username: 'root',
+ },
});
- it('emits toggle event on click', done => {
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
+ expect(wrapper.find('.js-user-link').exists()).toBe(true);
+ });
- vm.$el.querySelector('.js-vue-toggle-button').click();
+ it('renders deleted user text if author is not passed as a prop', () => {
+ createComponent();
- Vue.nextTick(() => {
- expect(vm.$emit).toHaveBeenCalledWith('toggleHandler');
- done();
- });
- });
+ expect(wrapper.text()).toContain('A deleted user');
+ });
+
+ it('does not render created at information if createdAt is not passed as a prop', () => {
+ createComponent();
- it('renders up arrow when open', done => {
- vm.expanded = true;
+ expect(findActionText().exists()).toBe(false);
+ expect(findTimestamp().exists()).toBe(false);
+ });
- Vue.nextTick(() => {
- expect(vm.$el.querySelector('.js-vue-toggle-button i').classList).toContain(
- 'fa-chevron-up',
- );
- done();
+ describe('when createdAt is passed as a prop', () => {
+ it('renders action text and a timestamp', () => {
+ createComponent({
+ createdAt: '2017-08-02T10:51:58.559Z',
});
+
+ expect(findActionText().exists()).toBe(true);
+ expect(findTimestamp().exists()).toBe(true);
});
- it('renders down arrow when closed', done => {
- vm.expanded = false;
+ it('renders correct actionText if passed', () => {
+ createComponent({
+ createdAt: '2017-08-02T10:51:58.559Z',
+ actionText: 'Test action text',
+ });
+
+ expect(findActionText().text()).toBe('Test action text');
+ });
- Vue.nextTick(() => {
- expect(vm.$el.querySelector('.js-vue-toggle-button i').classList).toContain(
- 'fa-chevron-down',
- );
- done();
+ it('calls an action when timestamp is clicked', () => {
+ createComponent({
+ createdAt: '2017-08-02T10:51:58.559Z',
});
+ findTimestamp().trigger('click');
+
+ expect(actions.setTargetNoteHash).toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/notes/mock_data.js b/spec/frontend/notes/mock_data.js
index 01cb70d395c..9ed79c61c22 100644
--- a/spec/frontend/notes/mock_data.js
+++ b/spec/frontend/notes/mock_data.js
@@ -52,7 +52,7 @@ export const noteableDataMock = {
time_estimate: 0,
title: '14',
total_time_spent: 0,
- noteable_note_url: '/group/project/merge_requests/1#note_1',
+ noteable_note_url: '/group/project/-/merge_requests/1#note_1',
updated_at: '2017-08-04T09:53:01.226Z',
updated_by_id: 1,
web_url: '/gitlab-org/gitlab-foss/issues/26',
@@ -101,8 +101,8 @@ export const individualNote = {
{ name: 'art', user: { id: 1, name: 'Root', username: 'root' } },
],
toggle_award_path: '/gitlab-org/gitlab-foss/notes/1390/toggle_award_emoji',
- noteable_note_url: '/group/project/merge_requests/1#note_1',
- note_url: '/group/project/merge_requests/1#note_1',
+ noteable_note_url: '/group/project/-/merge_requests/1#note_1',
+ note_url: '/group/project/-/merge_requests/1#note_1',
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1390&user_id=1',
path: '/gitlab-org/gitlab-foss/notes/1390',
@@ -161,8 +161,8 @@ export const note = {
},
],
toggle_award_path: '/gitlab-org/gitlab-foss/notes/546/toggle_award_emoji',
- note_url: '/group/project/merge_requests/1#note_1',
- noteable_note_url: '/group/project/merge_requests/1#note_1',
+ note_url: '/group/project/-/merge_requests/1#note_1',
+ noteable_note_url: '/group/project/-/merge_requests/1#note_1',
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F7%23note_546&user_id=1',
path: '/gitlab-org/gitlab-foss/notes/546',
@@ -205,7 +205,7 @@ export const discussionMock = {
discussion_id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1',
emoji_awardable: true,
award_emoji: [],
- noteable_note_url: '/group/project/merge_requests/1#note_1',
+ noteable_note_url: '/group/project/-/merge_requests/1#note_1',
toggle_award_path: '/gitlab-org/gitlab-foss/notes/1395/toggle_award_emoji',
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1395&user_id=1',
@@ -253,7 +253,7 @@ export const discussionMock = {
emoji_awardable: true,
award_emoji: [],
toggle_award_path: '/gitlab-org/gitlab-foss/notes/1396/toggle_award_emoji',
- noteable_note_url: '/group/project/merge_requests/1#note_1',
+ noteable_note_url: '/group/project/-/merge_requests/1#note_1',
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1396&user_id=1',
path: '/gitlab-org/gitlab-foss/notes/1396',
@@ -299,7 +299,7 @@ export const discussionMock = {
discussion_id: '9e3bd2f71a01de45fd166e6719eb380ad9f270b1',
emoji_awardable: true,
award_emoji: [],
- noteable_note_url: '/group/project/merge_requests/1#note_1',
+ noteable_note_url: '/group/project/-/merge_requests/1#note_1',
toggle_award_path: '/gitlab-org/gitlab-foss/notes/1437/toggle_award_emoji',
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1437&user_id=1',
@@ -349,7 +349,7 @@ export const loggedOutnoteableData = {
can_create_note: false,
can_update: false,
},
- noteable_note_url: '/group/project/merge_requests/1#note_1',
+ noteable_note_url: '/group/project/-/merge_requests/1#note_1',
create_note_path: '/gitlab-org/gitlab-foss/notes?target_id=98&target_type=issue',
preview_note_path: '/gitlab-org/gitlab-foss/preview_markdown?target_id=98&target_type=Issue',
};
@@ -483,7 +483,7 @@ export const INDIVIDUAL_NOTE_RESPONSE_MAP = {
},
},
],
- noteable_note_url: '/group/project/merge_requests/1#note_1',
+ noteable_note_url: '/group/project/-/merge_requests/1#note_1',
toggle_award_path: '/gitlab-org/gitlab-foss/notes/1390/toggle_award_emoji',
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1390\u0026user_id=1',
@@ -528,7 +528,7 @@ export const INDIVIDUAL_NOTE_RESPONSE_MAP = {
discussion_id: '70d5c92a4039a36c70100c6691c18c27e4b0a790',
emoji_awardable: true,
award_emoji: [],
- noteable_note_url: '/group/project/merge_requests/1#note_1',
+ noteable_note_url: '/group/project/-/merge_requests/1#note_1',
toggle_award_path: '/gitlab-org/gitlab-foss/notes/1391/toggle_award_emoji',
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F26%23note_1391\u0026user_id=1',
@@ -583,7 +583,7 @@ export const INDIVIDUAL_NOTE_RESPONSE_MAP = {
discussion_id: 'a3ed36e29b1957efb3b68c53e2d7a2b24b1df052',
emoji_awardable: true,
award_emoji: [],
- noteable_note_url: '/group/project/merge_requests/1#note_1',
+ noteable_note_url: '/group/project/-/merge_requests/1#note_1',
toggle_award_path: '/gitlab-org/gitlab-foss/notes/1471/toggle_award_emoji',
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F29%23note_1471\u0026user_id=1',
@@ -635,7 +635,7 @@ export const DISCUSSION_NOTE_RESPONSE_MAP = {
emoji_awardable: true,
award_emoji: [],
toggle_award_path: '/gitlab-org/gitlab-foss/notes/1471/toggle_award_emoji',
- noteable_note_url: '/group/project/merge_requests/1#note_1',
+ noteable_note_url: '/group/project/-/merge_requests/1#note_1',
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F29%23note_1471\u0026user_id=1',
path: '/gitlab-org/gitlab-foss/notes/1471',
diff --git a/spec/frontend/notes/old_notes_spec.js b/spec/frontend/notes/old_notes_spec.js
index c1c9d5cef4a..5f7a5d57cd8 100644
--- a/spec/frontend/notes/old_notes_spec.js
+++ b/spec/frontend/notes/old_notes_spec.js
@@ -1,7 +1,6 @@
/* eslint-disable import/no-commonjs, no-new */
import $ from 'jquery';
-import _ from 'underscore';
import MockAdapter from 'axios-mock-adapter';
import '~/behaviors/markdown/render_gfm';
import { createSpyObj } from 'helpers/jest_helpers';
@@ -792,14 +791,11 @@ describe('Old Notes (~/notes.js)', () => {
});
it('should return form metadata with sanitized formContent from form reference', () => {
- jest.spyOn(_, 'escape');
-
sampleComment = '<script>alert("Boom!");</script>';
$form.find('textarea.js-note-text').val(sampleComment);
const { formContent } = notes.getFormData($form);
- expect(_.escape).toHaveBeenCalledWith(sampleComment);
expect(formContent).toEqual('&lt;script&gt;alert(&quot;Boom!&quot;);&lt;/script&gt;');
});
});
@@ -990,7 +986,6 @@ describe('Old Notes (~/notes.js)', () => {
beforeEach(() => {
notes = new Notes('', []);
- jest.spyOn(_, 'escape');
});
it('should return constructed placeholder element for system note based on form contents', () => {
diff --git a/spec/frontend/notes/stores/mutation_spec.js b/spec/frontend/notes/stores/mutation_spec.js
index 49debe348e2..46b4081f6f6 100644
--- a/spec/frontend/notes/stores/mutation_spec.js
+++ b/spec/frontend/notes/stores/mutation_spec.js
@@ -501,7 +501,6 @@ describe('Notes Store mutations', () => {
expect.objectContaining({
resolvableDiscussionsCount: 1,
unresolvedDiscussionsCount: 1,
- hasUnresolvedDiscussions: false,
}),
);
});
@@ -538,7 +537,6 @@ describe('Notes Store mutations', () => {
expect.objectContaining({
resolvableDiscussionsCount: 4,
unresolvedDiscussionsCount: 2,
- hasUnresolvedDiscussions: true,
}),
);
});
diff --git a/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap b/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
index d5ce2c1ee24..9b723ccc3dc 100644
--- a/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
+++ b/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
@@ -39,6 +39,7 @@ exports[`User Operation confirmation modal renders modal with form included 1`]
</form>
<gl-button-stub
+ size="md"
variant="secondary"
>
Cancel
@@ -46,6 +47,7 @@ exports[`User Operation confirmation modal renders modal with form included 1`]
<gl-button-stub
disabled="true"
+ size="md"
variant="warning"
>
@@ -55,6 +57,7 @@ exports[`User Operation confirmation modal renders modal with form included 1`]
<gl-button-stub
disabled="true"
+ size="md"
variant="danger"
>
action
diff --git a/spec/frontend/pages/admin/users/components/__snapshots__/user_operation_confirmation_modal_spec.js.snap b/spec/frontend/pages/admin/users/components/__snapshots__/user_operation_confirmation_modal_spec.js.snap
index 4b4e9997953..dbf8caae357 100644
--- a/spec/frontend/pages/admin/users/components/__snapshots__/user_operation_confirmation_modal_spec.js.snap
+++ b/spec/frontend/pages/admin/users/components/__snapshots__/user_operation_confirmation_modal_spec.js.snap
@@ -6,6 +6,7 @@ exports[`User Operation confirmation modal renders modal with form included 1`]
modalid="user-operation-modal"
ok-title="action"
ok-variant="warning"
+ size="md"
title="title"
titletag="h4"
>
diff --git a/spec/frontend/performance_bar/components/request_selector_spec.js b/spec/frontend/performance_bar/components/request_selector_spec.js
index 871f0485a13..9cc8c5e73f4 100644
--- a/spec/frontend/performance_bar/components/request_selector_spec.js
+++ b/spec/frontend/performance_bar/components/request_selector_spec.js
@@ -5,7 +5,7 @@ describe('request selector', () => {
const requests = [
{
id: 'warningReq',
- url: 'https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/1/discussions.json',
+ url: 'https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/1/discussions.json',
truncatedUrl: 'discussions.json',
hasWarnings: true,
},
diff --git a/spec/frontend/performance_bar/stores/performance_bar_store_spec.js b/spec/frontend/performance_bar/stores/performance_bar_store_spec.js
index 6b7893cb523..56bab71540c 100644
--- a/spec/frontend/performance_bar/stores/performance_bar_store_spec.js
+++ b/spec/frontend/performance_bar/stores/performance_bar_store_spec.js
@@ -15,14 +15,14 @@ describe('PerformanceBarStore', () => {
});
it('keeps the last two components of the path when the last component is numeric', () => {
- store.addRequest('id', 'https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/1');
+ store.addRequest('id', 'https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/1');
expect(findUrl('id')).toEqual('merge_requests/1');
});
it('uses the last component of the path', () => {
store.addRequest(
'id',
- 'https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/1.json?serializer=widget',
+ 'https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/1.json?serializer=widget',
);
expect(findUrl('id')).toEqual('1.json?serializer=widget');
});
diff --git a/spec/frontend/projects/pipelines/charts/components/__snapshots__/pipelines_area_chart_spec.js.snap b/spec/frontend/projects/pipelines/charts/components/__snapshots__/pipelines_area_chart_spec.js.snap
new file mode 100644
index 00000000000..c15971912dd
--- /dev/null
+++ b/spec/frontend/projects/pipelines/charts/components/__snapshots__/pipelines_area_chart_spec.js.snap
@@ -0,0 +1,23 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`PipelinesAreaChart matches the snapshot 1`] = `
+<div
+ class="prepend-top-default"
+>
+ <p>
+ Some title
+ </p>
+
+ <div>
+ <glareachart-stub
+ data="[object Object],[object Object]"
+ height="300"
+ legendaveragetext="Avg"
+ legendmaxtext="Max"
+ option="[object Object]"
+ thresholds=""
+ width="0"
+ />
+ </div>
+</div>
+`;
diff --git a/spec/frontend/projects/pipelines/charts/components/__snapshots__/statistics_list_spec.js.snap b/spec/frontend/projects/pipelines/charts/components/__snapshots__/statistics_list_spec.js.snap
new file mode 100644
index 00000000000..ff0351bd099
--- /dev/null
+++ b/spec/frontend/projects/pipelines/charts/components/__snapshots__/statistics_list_spec.js.snap
@@ -0,0 +1,45 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`StatisticsList matches the snapshot 1`] = `
+<ul>
+ <li>
+ <span>
+ Total:
+ </span>
+
+ <strong>
+ 4 pipelines
+ </strong>
+ </li>
+
+ <li>
+ <span>
+ Successful:
+ </span>
+
+ <strong>
+ 2 pipelines
+ </strong>
+ </li>
+
+ <li>
+ <span>
+ Failed:
+ </span>
+
+ <strong>
+ 2 pipelines
+ </strong>
+ </li>
+
+ <li>
+ <span>
+ Success ratio:
+ </span>
+
+ <strong>
+ 50%
+ </strong>
+ </li>
+</ul>
+`;
diff --git a/spec/frontend/projects/pipelines/charts/components/app_spec.js b/spec/frontend/projects/pipelines/charts/components/app_spec.js
new file mode 100644
index 00000000000..883f2bec5f7
--- /dev/null
+++ b/spec/frontend/projects/pipelines/charts/components/app_spec.js
@@ -0,0 +1,72 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlColumnChart } from '@gitlab/ui/dist/charts';
+import Component from '~/projects/pipelines/charts/components/app.vue';
+import StatisticsList from '~/projects/pipelines/charts/components/statistics_list.vue';
+import PipelinesAreaChart from '~/projects/pipelines/charts/components/pipelines_area_chart.vue';
+import {
+ counts,
+ timesChartData,
+ areaChartData as lastWeekChartData,
+ areaChartData as lastMonthChartData,
+ lastYearChartData,
+} from '../mock_data';
+
+describe('ProjectsPipelinesChartsApp', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = shallowMount(Component, {
+ propsData: {
+ counts,
+ timesChartData,
+ lastWeekChartData,
+ lastMonthChartData,
+ lastYearChartData,
+ },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('overall statistics', () => {
+ it('displays the statistics list', () => {
+ const list = wrapper.find(StatisticsList);
+
+ expect(list.exists()).toBeTruthy();
+ expect(list.props('counts')).toBe(counts);
+ });
+
+ it('displays the commit duration chart', () => {
+ const chart = wrapper.find(GlColumnChart);
+
+ expect(chart.exists()).toBeTruthy();
+ expect(chart.props('yAxisTitle')).toBe('Minutes');
+ expect(chart.props('xAxisTitle')).toBe('Commit');
+ expect(chart.props('data')).toBe(wrapper.vm.timesChartTransformedData);
+ expect(chart.props('option')).toBe(wrapper.vm.$options.timesChartOptions);
+ });
+ });
+
+ describe('pipelines charts', () => {
+ it('displays 3 area charts', () => {
+ expect(wrapper.findAll(PipelinesAreaChart).length).toBe(3);
+ });
+
+ describe('displays individual correctly', () => {
+ it('renders with the correct data', () => {
+ const charts = wrapper.findAll(PipelinesAreaChart);
+
+ for (let i = 0; i < charts.length; i += 1) {
+ const chart = charts.at(i);
+
+ expect(chart.exists()).toBeTruthy();
+ expect(chart.props('chartData')).toBe(wrapper.vm.areaCharts[i].data);
+ expect(chart.text()).toBe(wrapper.vm.areaCharts[i].title);
+ }
+ });
+ });
+ });
+});
diff --git a/spec/frontend/projects/pipelines/charts/components/pipelines_area_chart_spec.js b/spec/frontend/projects/pipelines/charts/components/pipelines_area_chart_spec.js
new file mode 100644
index 00000000000..aea25903023
--- /dev/null
+++ b/spec/frontend/projects/pipelines/charts/components/pipelines_area_chart_spec.js
@@ -0,0 +1,30 @@
+import { mount } from '@vue/test-utils';
+import Component from '~/projects/pipelines/charts/components/pipelines_area_chart.vue';
+import { transformedAreaChartData } from '../mock_data';
+
+describe('PipelinesAreaChart', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = mount(Component, {
+ propsData: {
+ chartData: transformedAreaChartData,
+ },
+ slots: {
+ default: 'Some title',
+ },
+ stubs: {
+ GlAreaChart: true,
+ },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('matches the snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+});
diff --git a/spec/frontend/projects/pipelines/charts/components/statistics_list_spec.js b/spec/frontend/projects/pipelines/charts/components/statistics_list_spec.js
new file mode 100644
index 00000000000..f78608e9cb2
--- /dev/null
+++ b/spec/frontend/projects/pipelines/charts/components/statistics_list_spec.js
@@ -0,0 +1,24 @@
+import { shallowMount } from '@vue/test-utils';
+import Component from '~/projects/pipelines/charts/components/statistics_list.vue';
+import { counts } from '../mock_data';
+
+describe('StatisticsList', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = shallowMount(Component, {
+ propsData: {
+ counts,
+ },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('matches the snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+});
diff --git a/spec/frontend/projects/pipelines/charts/mock_data.js b/spec/frontend/projects/pipelines/charts/mock_data.js
new file mode 100644
index 00000000000..db5164c8f99
--- /dev/null
+++ b/spec/frontend/projects/pipelines/charts/mock_data.js
@@ -0,0 +1,33 @@
+export const counts = {
+ failed: 2,
+ success: 2,
+ total: 4,
+ successRatio: 50,
+};
+
+export const timesChartData = {
+ labels: ['as1234', 'kh423hy', 'ji56bvg', 'th23po'],
+ values: [5, 3, 7, 4],
+};
+
+export const areaChartData = {
+ labels: ['01 Jan', '02 Jan', '03 Jan', '04 Jan', '05 Jan'],
+ totals: [4, 6, 3, 6, 7],
+ success: [3, 5, 3, 3, 5],
+};
+
+export const lastYearChartData = {
+ ...areaChartData,
+ labels: ['Jan', 'Feb', 'Mar', 'Apr', 'May'],
+};
+
+export const transformedAreaChartData = [
+ {
+ name: 'all',
+ data: [['01 Jan', 4], ['02 Jan', 6], ['03 Jan', 3], ['04 Jan', 6], ['05 Jan', 7]],
+ },
+ {
+ name: 'success',
+ data: [['01 Jan', 3], ['02 Jan', 3], ['03 Jan', 3], ['04 Jan', 3], ['05 Jan', 5]],
+ },
+];
diff --git a/spec/frontend/registry/explorer/components/__snapshots__/group_empty_state_spec.js.snap b/spec/frontend/registry/explorer/components/__snapshots__/group_empty_state_spec.js.snap
new file mode 100644
index 00000000000..3761369c944
--- /dev/null
+++ b/spec/frontend/registry/explorer/components/__snapshots__/group_empty_state_spec.js.snap
@@ -0,0 +1,21 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Registry Group Empty state to match the default snapshot 1`] = `
+<div
+ class="container-message"
+ svg-path="foo"
+ title="There are no container images available in this group"
+>
+ <p
+ class="js-no-container-images-text"
+ >
+ With the Container Registry, every project can have its own space to store its Docker images. Push at least one Docker image in one of this group's projects in order to show up here.
+ <gl-link-stub
+ href="baz"
+ target="_blank"
+ >
+ More Information
+ </gl-link-stub>
+ </p>
+</div>
+`;
diff --git a/spec/frontend/registry/explorer/components/__snapshots__/project_empty_state_spec.js.snap b/spec/frontend/registry/explorer/components/__snapshots__/project_empty_state_spec.js.snap
new file mode 100644
index 00000000000..19767aefd1a
--- /dev/null
+++ b/spec/frontend/registry/explorer/components/__snapshots__/project_empty_state_spec.js.snap
@@ -0,0 +1,119 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Registry Project Empty state to match the default snapshot 1`] = `
+<div
+ class="container-message"
+ svg-path="bazFoo"
+ title="There are no container images stored for this project"
+>
+ <p
+ class="js-no-container-images-text"
+ >
+ With the Container Registry, every project can have its own space to store its Docker images.
+ <gl-link-stub
+ href="baz"
+ target="_blank"
+ >
+ More Information
+ </gl-link-stub>
+ </p>
+
+ <h5>
+ Quick Start
+ </h5>
+
+ <p
+ class="js-not-logged-in-to-registry-text"
+ >
+ If you are not already logged in, you need to authenticate to the Container Registry by using your GitLab username and password. If you have
+ <gl-link-stub
+ href="barBaz"
+ target="_blank"
+ >
+ Two-Factor Authentication
+ </gl-link-stub>
+ enabled, use a
+ <gl-link-stub
+ href="fooBaz"
+ target="_blank"
+ >
+ Personal Access Token
+ </gl-link-stub>
+ instead of a password.
+ </p>
+
+ <div
+ class="input-group append-bottom-10"
+ >
+ <input
+ class="form-control monospace"
+ readonly="readonly"
+ type="text"
+ />
+
+ <span
+ class="input-group-append"
+ >
+ <clipboard-button-stub
+ class="input-group-text"
+ cssclass="btn-default"
+ text="docker login bar"
+ title="Copy login command"
+ tooltipplacement="top"
+ />
+ </span>
+ </div>
+
+ <p />
+
+ <p>
+
+ You can add an image to this registry with the following commands:
+
+ </p>
+
+ <div
+ class="input-group append-bottom-10"
+ >
+ <input
+ class="form-control monospace"
+ readonly="readonly"
+ type="text"
+ />
+
+ <span
+ class="input-group-append"
+ >
+ <clipboard-button-stub
+ class="input-group-text"
+ cssclass="btn-default"
+ text="docker build -t foo ."
+ title="Copy build command"
+ tooltipplacement="top"
+ />
+ </span>
+ </div>
+
+ <div
+ class="input-group"
+ >
+ <input
+ class="form-control monospace"
+ readonly="readonly"
+ type="text"
+ />
+
+ <span
+ class="input-group-append"
+ >
+ <clipboard-button-stub
+ class="input-group-text"
+ cssclass="btn-default"
+ text="docker push foo"
+ title="Copy push command"
+ tooltipplacement="top"
+ />
+ </span>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/registry/explorer/components/__snapshots__/registry_breadcrumb_spec.js.snap b/spec/frontend/registry/explorer/components/__snapshots__/registry_breadcrumb_spec.js.snap
new file mode 100644
index 00000000000..feae2f629b7
--- /dev/null
+++ b/spec/frontend/registry/explorer/components/__snapshots__/registry_breadcrumb_spec.js.snap
@@ -0,0 +1,28 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Registry Breadcrumb when is rootRoute renders 1`] = `
+<ul>
+ <li
+ class="foo bar"
+ >
+ baz
+ </li>
+ <li
+ class="foo bar"
+ >
+ foo
+ </li>
+
+ <!---->
+
+ <li>
+ <a
+ class="foo"
+ >
+ <a>
+
+ </a>
+ </a>
+ </li>
+</ul>
+`;
diff --git a/spec/frontend/registry/explorer/components/group_empty_state_spec.js b/spec/frontend/registry/explorer/components/group_empty_state_spec.js
new file mode 100644
index 00000000000..1b4de534317
--- /dev/null
+++ b/spec/frontend/registry/explorer/components/group_empty_state_spec.js
@@ -0,0 +1,40 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlSprintf } from '@gitlab/ui';
+import { GlEmptyState } from '../stubs';
+import groupEmptyState from '~/registry/explorer/components/group_empty_state.vue';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Registry Group Empty state', () => {
+ let wrapper;
+ let store;
+
+ beforeEach(() => {
+ store = new Vuex.Store({
+ state: {
+ config: {
+ noContainersImage: 'foo',
+ helpPagePath: 'baz',
+ },
+ },
+ });
+ wrapper = shallowMount(groupEmptyState, {
+ localVue,
+ store,
+ stubs: {
+ GlEmptyState,
+ GlSprintf,
+ },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('to match the default snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+});
diff --git a/spec/frontend/registry/explorer/components/project_empty_state_spec.js b/spec/frontend/registry/explorer/components/project_empty_state_spec.js
new file mode 100644
index 00000000000..8d4b6ca60a2
--- /dev/null
+++ b/spec/frontend/registry/explorer/components/project_empty_state_spec.js
@@ -0,0 +1,44 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlSprintf } from '@gitlab/ui';
+import { GlEmptyState } from '../stubs';
+import projectEmptyState from '~/registry/explorer/components/project_empty_state.vue';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Registry Project Empty state', () => {
+ let wrapper;
+ let store;
+
+ beforeEach(() => {
+ store = new Vuex.Store({
+ state: {
+ config: {
+ repositoryUrl: 'foo',
+ registryHostUrlWithPort: 'bar',
+ helpPagePath: 'baz',
+ twoFactorAuthHelpLink: 'barBaz',
+ personalAccessTokensHelpLink: 'fooBaz',
+ noContainersImage: 'bazFoo',
+ },
+ },
+ });
+ wrapper = shallowMount(projectEmptyState, {
+ localVue,
+ store,
+ stubs: {
+ GlEmptyState,
+ GlSprintf,
+ },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('to match the default snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+});
diff --git a/spec/frontend/registry/explorer/components/registry_breadcrumb_spec.js b/spec/frontend/registry/explorer/components/registry_breadcrumb_spec.js
new file mode 100644
index 00000000000..7ce5ece21ee
--- /dev/null
+++ b/spec/frontend/registry/explorer/components/registry_breadcrumb_spec.js
@@ -0,0 +1,135 @@
+import { shallowMount } from '@vue/test-utils';
+
+import component from '~/registry/explorer/components/registry_breadcrumb.vue';
+
+describe('Registry Breadcrumb', () => {
+ let wrapper;
+ const nameGenerator = jest.fn();
+
+ const crumb = {
+ classList: ['foo', 'bar'],
+ tagName: 'div',
+ innerHTML: 'baz',
+ querySelector: jest.fn(),
+ children: [
+ {
+ tagName: 'a',
+ classList: ['foo'],
+ },
+ ],
+ };
+
+ const querySelectorReturnValue = {
+ classList: ['js-divider'],
+ tagName: 'svg',
+ innerHTML: 'foo',
+ };
+
+ const crumbs = [crumb, { ...crumb, innerHTML: 'foo' }, { ...crumb, classList: ['baz'] }];
+
+ const routes = [
+ { name: 'foo', meta: { nameGenerator, root: true } },
+ { name: 'baz', meta: { nameGenerator } },
+ ];
+
+ const findDivider = () => wrapper.find('.js-divider');
+ const findRootRoute = () => wrapper.find({ ref: 'rootRouteLink' });
+ const findChildRoute = () => wrapper.find({ ref: 'childRouteLink' });
+ const findLastCrumb = () => wrapper.find({ ref: 'lastCrumb' });
+
+ const mountComponent = $route => {
+ wrapper = shallowMount(component, {
+ propsData: {
+ crumbs,
+ },
+ stubs: {
+ 'router-link': { name: 'router-link', template: '<a><slot></slot></a>', props: ['to'] },
+ },
+ mocks: {
+ $route,
+ $router: {
+ options: {
+ routes,
+ },
+ },
+ },
+ });
+ };
+
+ beforeEach(() => {
+ nameGenerator.mockClear();
+ crumb.querySelector = jest.fn();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('when is rootRoute', () => {
+ beforeEach(() => {
+ mountComponent(routes[0]);
+ });
+
+ it('renders', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('contains a router-link for the child route', () => {
+ expect(findChildRoute().exists()).toBe(true);
+ });
+
+ it('the link text is calculated by nameGenerator', () => {
+ expect(nameGenerator).toHaveBeenCalledWith(routes[0]);
+ expect(nameGenerator).toHaveBeenCalledTimes(1);
+ });
+ });
+
+ describe('when is not rootRoute', () => {
+ beforeEach(() => {
+ crumb.querySelector.mockReturnValue(querySelectorReturnValue);
+ mountComponent(routes[1]);
+ });
+
+ it('renders a divider', () => {
+ expect(findDivider().exists()).toBe(true);
+ });
+
+ it('contains a router-link for the root route', () => {
+ expect(findRootRoute().exists()).toBe(true);
+ });
+
+ it('contains a router-link for the child route', () => {
+ expect(findChildRoute().exists()).toBe(true);
+ });
+
+ it('the link text is calculated by nameGenerator', () => {
+ expect(nameGenerator).toHaveBeenCalledWith(routes[1]);
+ expect(nameGenerator).toHaveBeenCalledTimes(2);
+ });
+ });
+
+ describe('last crumb', () => {
+ const lastChildren = crumb.children[0];
+ beforeEach(() => {
+ nameGenerator.mockReturnValue('foo');
+ mountComponent(routes[0]);
+ });
+
+ it('has the same tag as the last children of the crumbs', () => {
+ expect(findLastCrumb().is(lastChildren.tagName)).toBe(true);
+ });
+
+ it('has the same classes as the last children of the crumbs', () => {
+ expect(findLastCrumb().classes()).toEqual(lastChildren.classList);
+ });
+
+ it('has a link to the current route', () => {
+ expect(findChildRoute().props('to')).toEqual({ to: routes[0].name });
+ });
+
+ it('the link has the correct text', () => {
+ expect(findChildRoute().text()).toEqual('foo');
+ });
+ });
+});
diff --git a/spec/frontend/registry/explorer/mock_data.js b/spec/frontend/registry/explorer/mock_data.js
new file mode 100644
index 00000000000..2d8cd4e42bc
--- /dev/null
+++ b/spec/frontend/registry/explorer/mock_data.js
@@ -0,0 +1,89 @@
+export const headers = {
+ 'X-PER-PAGE': 5,
+ 'X-PAGE': 1,
+ 'X-TOTAL': 13,
+ 'X-TOTAL_PAGES': 1,
+ 'X-NEXT-PAGE': null,
+ 'X-PREVIOUS-PAGE': null,
+};
+export const reposServerResponse = [
+ {
+ destroy_path: 'path',
+ id: '123',
+ location: 'location',
+ path: 'foo',
+ tags_path: 'tags_path',
+ },
+ {
+ destroy_path: 'path_',
+ id: '456',
+ location: 'location_',
+ path: 'bar',
+ tags_path: 'tags_path_',
+ },
+];
+
+export const registryServerResponse = [
+ {
+ name: 'centos7',
+ short_revision: 'b118ab5b0',
+ revision: 'b118ab5b0e90b7cb5127db31d5321ac14961d097516a8e0e72084b6cdc783b43',
+ total_size: 679,
+ layers: 19,
+ location: 'location',
+ created_at: 1505828744434,
+ destroy_path: 'path_',
+ },
+ {
+ name: 'centos6',
+ short_revision: 'b118ab5b0',
+ revision: 'b118ab5b0e90b7cb5127db31d5321ac14961d097516a8e0e72084b6cdc783b43',
+ total_size: 679,
+ layers: 19,
+ location: 'location',
+ created_at: 1505828744434,
+ },
+];
+
+export const imagesListResponse = {
+ data: [
+ {
+ path: 'foo',
+ location: 'location',
+ destroy_path: 'path',
+ },
+ {
+ path: 'bar',
+ location: 'location-2',
+ destroy_path: 'path-2',
+ },
+ ],
+ headers,
+};
+
+export const tagsListResponse = {
+ data: [
+ {
+ tag: 'centos6',
+ revision: 'b118ab5b0e90b7cb5127db31d5321ac14961d097516a8e0e72084b6cdc783b43',
+ short_revision: 'b118ab5b0',
+ size: 19,
+ layers: 10,
+ location: 'location',
+ path: 'bar',
+ created_at: 1505828744434,
+ destroy_path: 'path',
+ },
+ {
+ tag: 'test-image',
+ revision: 'b969de599faea2b3d9b6605a8b0897261c571acaa36db1bdc7349b5775b4e0b4',
+ short_revision: 'b969de599',
+ size: 19,
+ layers: 10,
+ path: 'foo',
+ location: 'location-2',
+ created_at: 1505828744434,
+ },
+ ],
+ headers,
+};
diff --git a/spec/frontend/registry/explorer/pages/details_spec.js b/spec/frontend/registry/explorer/pages/details_spec.js
new file mode 100644
index 00000000000..48f3b0f9b65
--- /dev/null
+++ b/spec/frontend/registry/explorer/pages/details_spec.js
@@ -0,0 +1,293 @@
+import { mount } from '@vue/test-utils';
+import { GlTable, GlPagination, GlLoadingIcon } from '@gitlab/ui';
+import Tracking from '~/tracking';
+import stubChildren from 'helpers/stub_children';
+import component from '~/registry/explorer/pages/details.vue';
+import store from '~/registry/explorer/stores/';
+import { SET_MAIN_LOADING } from '~/registry/explorer/stores/mutation_types/';
+import { tagsListResponse } from '../mock_data';
+import { GlModal } from '../stubs';
+
+describe('Details Page', () => {
+ let wrapper;
+ let dispatchSpy;
+
+ const findDeleteModal = () => wrapper.find(GlModal);
+ const findPagination = () => wrapper.find(GlPagination);
+ const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const findTagsTable = () => wrapper.find(GlTable);
+ const findMainCheckbox = () => wrapper.find({ ref: 'mainCheckbox' });
+ const findFirstRowItem = ref => wrapper.find({ ref });
+ const findBulkDeleteButton = () => wrapper.find({ ref: 'bulkDeleteButton' });
+ // findAll and refs seems to no work falling back to class
+ const findAllDeleteButtons = () => wrapper.findAll('.js-delete-registry');
+ const findAllCheckboxes = () => wrapper.findAll('.js-row-checkbox');
+ const findCheckedCheckboxes = () => findAllCheckboxes().filter(c => c.attributes('checked'));
+
+ const routeId = window.btoa(JSON.stringify({ name: 'foo', tags_path: 'bar' }));
+
+ beforeEach(() => {
+ wrapper = mount(component, {
+ store,
+ stubs: {
+ ...stubChildren(component),
+ GlModal,
+ GlSprintf: false,
+ GlTable: false,
+ },
+ mocks: {
+ $route: {
+ params: {
+ id: routeId,
+ },
+ },
+ },
+ });
+ dispatchSpy = jest.spyOn(store, 'dispatch');
+ store.dispatch('receiveTagsListSuccess', tagsListResponse);
+ jest.spyOn(Tracking, 'event');
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when isLoading is true', () => {
+ beforeAll(() => store.commit(SET_MAIN_LOADING, true));
+
+ afterAll(() => store.commit(SET_MAIN_LOADING, false));
+
+ it('has a loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+
+ it('does not have a main content', () => {
+ expect(findTagsTable().exists()).toBe(false);
+ expect(findPagination().exists()).toBe(false);
+ expect(findDeleteModal().exists()).toBe(false);
+ });
+ });
+
+ describe('table', () => {
+ it.each([
+ 'rowCheckbox',
+ 'rowName',
+ 'rowShortRevision',
+ 'rowSize',
+ 'rowTime',
+ 'singleDeleteButton',
+ ])('%s exist in the table', element => {
+ expect(findFirstRowItem(element).exists()).toBe(true);
+ });
+
+ describe('header checkbox', () => {
+ it('exists', () => {
+ expect(findMainCheckbox().exists()).toBe(true);
+ });
+
+ it('if selected set selectedItem and allSelected', () => {
+ findMainCheckbox().vm.$emit('change');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findMainCheckbox().attributes('checked')).toBeTruthy();
+ expect(findCheckedCheckboxes()).toHaveLength(store.state.tags.length);
+ });
+ });
+
+ it('if deselect unset selectedItem and allSelected', () => {
+ wrapper.setData({ selectedItems: [1, 2], selectAllChecked: true });
+ findMainCheckbox().vm.$emit('change');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findMainCheckbox().attributes('checked')).toBe(undefined);
+ expect(findCheckedCheckboxes()).toHaveLength(0);
+ });
+ });
+ });
+
+ describe('row checkbox', () => {
+ it('if selected adds item to selectedItems', () => {
+ findFirstRowItem('rowCheckbox').vm.$emit('change');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm.selectedItems).toEqual([1]);
+ expect(findFirstRowItem('rowCheckbox').attributes('checked')).toBeTruthy();
+ });
+ });
+
+ it('if deselect remove index from selectedItems', () => {
+ wrapper.setData({ selectedItems: [1] });
+ findFirstRowItem('rowCheckbox').vm.$emit('change');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm.selectedItems.length).toBe(0);
+ expect(findFirstRowItem('rowCheckbox').attributes('checked')).toBe(undefined);
+ });
+ });
+ });
+
+ describe('header delete button', () => {
+ it('exists', () => {
+ expect(findBulkDeleteButton().exists()).toBe(true);
+ });
+
+ it('is disabled if no item is selected', () => {
+ expect(findBulkDeleteButton().attributes('disabled')).toBe('true');
+ });
+
+ it('is enabled if at least one item is selected', () => {
+ wrapper.setData({ selectedItems: [1] });
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findBulkDeleteButton().attributes('disabled')).toBeFalsy();
+ });
+ });
+
+ describe('on click', () => {
+ it('when one item is selected', () => {
+ wrapper.setData({ selectedItems: [1] });
+ findBulkDeleteButton().vm.$emit('click');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findDeleteModal().html()).toContain(
+ 'You are about to remove <b>foo</b>. Are you sure?',
+ );
+ expect(GlModal.methods.show).toHaveBeenCalled();
+ expect(Tracking.event).toHaveBeenCalledWith(undefined, 'click_button', {
+ label: 'registry_tag_delete',
+ });
+ });
+ });
+
+ it('when multiple items are selected', () => {
+ wrapper.setData({ selectedItems: [0, 1] });
+ findBulkDeleteButton().vm.$emit('click');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findDeleteModal().html()).toContain(
+ 'You are about to remove <b>2</b> tags. Are you sure?',
+ );
+ expect(GlModal.methods.show).toHaveBeenCalled();
+ expect(Tracking.event).toHaveBeenCalledWith(undefined, 'click_button', {
+ label: 'bulk_registry_tag_delete',
+ });
+ });
+ });
+ });
+ });
+
+ describe('row delete button', () => {
+ it('exists', () => {
+ expect(
+ findAllDeleteButtons()
+ .at(0)
+ .exists(),
+ ).toBe(true);
+ });
+
+ it('is disabled if the item has no destroy_path', () => {
+ expect(
+ findAllDeleteButtons()
+ .at(1)
+ .attributes('disabled'),
+ ).toBe('true');
+ });
+
+ it('on click', () => {
+ findAllDeleteButtons()
+ .at(0)
+ .vm.$emit('click');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findDeleteModal().html()).toContain(
+ 'You are about to remove <b>bar</b>. Are you sure?',
+ );
+ expect(GlModal.methods.show).toHaveBeenCalled();
+ expect(Tracking.event).toHaveBeenCalledWith(undefined, 'click_button', {
+ label: 'registry_tag_delete',
+ });
+ });
+ });
+ });
+ });
+
+ describe('pagination', () => {
+ it('exists', () => {
+ expect(findPagination().exists()).toBe(true);
+ });
+
+ it('is wired to the correct pagination props', () => {
+ const pagination = findPagination();
+ expect(pagination.props('perPage')).toBe(store.state.tagsPagination.perPage);
+ expect(pagination.props('totalItems')).toBe(store.state.tagsPagination.total);
+ expect(pagination.props('value')).toBe(store.state.tagsPagination.page);
+ });
+
+ it('fetch the data from the API when the v-model changes', () => {
+ dispatchSpy.mockResolvedValue();
+ wrapper.setData({ currentPage: 2 });
+ expect(store.dispatch).toHaveBeenCalledWith('requestTagsList', {
+ id: wrapper.vm.$route.params.id,
+ pagination: { page: 2 },
+ });
+ });
+ });
+
+ describe('modal', () => {
+ it('exists', () => {
+ expect(findDeleteModal().exists()).toBe(true);
+ });
+
+ describe('when ok event is emitted', () => {
+ beforeEach(() => {
+ dispatchSpy.mockResolvedValue();
+ });
+
+ it('tracks confirm_delete', () => {
+ const deleteModal = findDeleteModal();
+ deleteModal.vm.$emit('ok');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(Tracking.event).toHaveBeenCalledWith(undefined, 'confirm_delete', {
+ label: 'registry_tag_delete',
+ });
+ });
+ });
+
+ it('when only one element is selected', () => {
+ const deleteModal = findDeleteModal();
+
+ wrapper.setData({ itemsToBeDeleted: [0] });
+ deleteModal.vm.$emit('ok');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(store.dispatch).toHaveBeenCalledWith('requestDeleteTag', {
+ tag: store.state.tags[0],
+ params: wrapper.vm.$route.params.id,
+ });
+ // itemsToBeDeleted is not represented in the DOM, is used as parking variable between selected and deleted items
+ expect(wrapper.vm.itemsToBeDeleted).toEqual([]);
+ expect(findCheckedCheckboxes()).toHaveLength(0);
+ });
+ });
+
+ it('when multiple elements are selected', () => {
+ const deleteModal = findDeleteModal();
+
+ wrapper.setData({ itemsToBeDeleted: [0, 1] });
+ deleteModal.vm.$emit('ok');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(store.dispatch).toHaveBeenCalledWith('requestDeleteTags', {
+ ids: store.state.tags.map(t => t.name),
+ params: wrapper.vm.$route.params.id,
+ });
+ // itemsToBeDeleted is not represented in the DOM, is used as parking variable between selected and deleted items
+ expect(wrapper.vm.itemsToBeDeleted).toEqual([]);
+ expect(findCheckedCheckboxes()).toHaveLength(0);
+ });
+ });
+ });
+
+ it('tracks cancel_delete when cancel event is emitted', () => {
+ const deleteModal = findDeleteModal();
+ deleteModal.vm.$emit('cancel');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(Tracking.event).toHaveBeenCalledWith(undefined, 'cancel_delete', {
+ label: 'registry_tag_delete',
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/registry/explorer/pages/list_spec.js b/spec/frontend/registry/explorer/pages/list_spec.js
new file mode 100644
index 00000000000..f463dc49035
--- /dev/null
+++ b/spec/frontend/registry/explorer/pages/list_spec.js
@@ -0,0 +1,205 @@
+import VueRouter from 'vue-router';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlPagination, GlLoadingIcon, GlSprintf } from '@gitlab/ui';
+import Tracking from '~/tracking';
+import component from '~/registry/explorer/pages/list.vue';
+import store from '~/registry/explorer/stores/';
+import { SET_MAIN_LOADING } from '~/registry/explorer/stores/mutation_types/';
+import { imagesListResponse } from '../mock_data';
+import { GlModal, GlEmptyState } from '../stubs';
+
+const localVue = createLocalVue();
+localVue.use(VueRouter);
+
+describe('List Page', () => {
+ let wrapper;
+ let dispatchSpy;
+
+ const findDeleteBtn = () => wrapper.find({ ref: 'deleteImageButton' });
+ const findDeleteModal = () => wrapper.find(GlModal);
+ const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const findImagesList = () => wrapper.find({ ref: 'imagesList' });
+ const findRowItems = () => wrapper.findAll({ ref: 'rowItem' });
+ const findEmptyState = () => wrapper.find(GlEmptyState);
+ const findDetailsLink = () => wrapper.find({ ref: 'detailsLink' });
+ const findClipboardButton = () => wrapper.find({ ref: 'clipboardButton' });
+ const findPagination = () => wrapper.find(GlPagination);
+
+ beforeEach(() => {
+ wrapper = shallowMount(component, {
+ localVue,
+ store,
+ stubs: {
+ GlModal,
+ GlEmptyState,
+ GlSprintf,
+ },
+ });
+ dispatchSpy = jest.spyOn(store, 'dispatch');
+ store.dispatch('receiveImagesListSuccess', imagesListResponse);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('connection error', () => {
+ const config = {
+ characterError: true,
+ containersErrorImage: 'foo',
+ helpPagePath: 'bar',
+ };
+
+ beforeAll(() => {
+ store.dispatch('setInitialState', config);
+ });
+
+ afterAll(() => {
+ store.dispatch('setInitialState', {});
+ });
+
+ it('should show an empty state', () => {
+ expect(findEmptyState().exists()).toBe(true);
+ });
+
+ it('empty state should have an svg-path', () => {
+ expect(findEmptyState().attributes('svg-path')).toBe(config.containersErrorImage);
+ });
+
+ it('empty state should have a description', () => {
+ expect(findEmptyState().html()).toContain('connection error');
+ });
+
+ it('should not show the loading or default state', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ expect(findImagesList().exists()).toBe(false);
+ });
+ });
+
+ describe('when isLoading is true', () => {
+ beforeAll(() => store.commit(SET_MAIN_LOADING, true));
+
+ afterAll(() => store.commit(SET_MAIN_LOADING, false));
+
+ it('shows the loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+
+ it('imagesList is not visible', () => {
+ expect(findImagesList().exists()).toBe(false);
+ });
+ });
+
+ describe('list', () => {
+ describe('listElement', () => {
+ let listElements;
+ let firstElement;
+
+ beforeEach(() => {
+ listElements = findRowItems();
+ [firstElement] = store.state.images;
+ });
+
+ it('contains one list element for each image', () => {
+ expect(listElements.length).toBe(store.state.images.length);
+ });
+
+ it('contains a link to the details page', () => {
+ const link = findDetailsLink();
+ expect(link.html()).toContain(firstElement.path);
+ expect(link.props('to').name).toBe('details');
+ });
+
+ it('contains a clipboard button', () => {
+ const button = findClipboardButton();
+ expect(button.exists()).toBe(true);
+ expect(button.props('text')).toBe(firstElement.location);
+ expect(button.props('title')).toBe(firstElement.location);
+ });
+
+ describe('delete image', () => {
+ it('should be possible to delete a repo', () => {
+ const deleteBtn = findDeleteBtn();
+ expect(deleteBtn.exists()).toBe(true);
+ });
+
+ it('should call deleteItem when confirming deletion', () => {
+ dispatchSpy.mockResolvedValue();
+ const itemToDelete = wrapper.vm.images[0];
+ wrapper.setData({ itemToDelete });
+ findDeleteModal().vm.$emit('ok');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'requestDeleteImage',
+ itemToDelete.destroy_path,
+ );
+ });
+ });
+ });
+
+ describe('pagination', () => {
+ it('exists', () => {
+ expect(findPagination().exists()).toBe(true);
+ });
+
+ it('is wired to the correct pagination props', () => {
+ const pagination = findPagination();
+ expect(pagination.props('perPage')).toBe(store.state.pagination.perPage);
+ expect(pagination.props('totalItems')).toBe(store.state.pagination.total);
+ expect(pagination.props('value')).toBe(store.state.pagination.page);
+ });
+
+ it('fetch the data from the API when the v-model changes', () => {
+ dispatchSpy.mockReturnValue();
+ wrapper.setData({ currentPage: 2 });
+ return wrapper.vm.$nextTick().then(() => {
+ expect(store.dispatch).toHaveBeenCalledWith('requestImagesList', { page: 2 });
+ });
+ });
+ });
+ });
+
+ describe('modal', () => {
+ it('exists', () => {
+ expect(findDeleteModal().exists()).toBe(true);
+ });
+
+ it('contains a description with the path of the item to delete', () => {
+ wrapper.setData({ itemToDelete: { path: 'foo' } });
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findDeleteModal().html()).toContain('foo');
+ });
+ });
+ });
+
+ describe('tracking', () => {
+ const testTrackingCall = action => {
+ expect(Tracking.event).toHaveBeenCalledWith(undefined, action, {
+ label: 'registry_repository_delete',
+ });
+ };
+
+ beforeEach(() => {
+ jest.spyOn(Tracking, 'event');
+ dispatchSpy.mockReturnValue();
+ });
+
+ it('send an event when delete button is clicked', () => {
+ const deleteBtn = findDeleteBtn();
+ deleteBtn.vm.$emit('click');
+ testTrackingCall('click_button');
+ });
+ it('send an event when cancel is pressed on modal', () => {
+ const deleteModal = findDeleteModal();
+ deleteModal.vm.$emit('cancel');
+ testTrackingCall('cancel_delete');
+ });
+ it('send an event when confirm is clicked on modal', () => {
+ dispatchSpy.mockReturnValue();
+ const deleteModal = findDeleteModal();
+ deleteModal.vm.$emit('ok');
+ testTrackingCall('confirm_delete');
+ });
+ });
+ });
+});
diff --git a/spec/frontend/registry/explorer/stores/actions_spec.js b/spec/frontend/registry/explorer/stores/actions_spec.js
new file mode 100644
index 00000000000..a3fb29c0eb9
--- /dev/null
+++ b/spec/frontend/registry/explorer/stores/actions_spec.js
@@ -0,0 +1,333 @@
+import axios from '~/lib/utils/axios_utils';
+import MockAdapter from 'axios-mock-adapter';
+import * as actions from '~/registry/explorer/stores/actions';
+import * as types from '~/registry/explorer/stores/mutation_types';
+import testAction from 'helpers/vuex_action_helper';
+import createFlash from '~/flash';
+import { TEST_HOST } from 'helpers/test_constants';
+import { reposServerResponse, registryServerResponse } from '../mock_data';
+
+jest.mock('~/flash.js');
+
+describe('Actions RegistryExplorer Store', () => {
+ let mock;
+ const endpoint = `${TEST_HOST}/endpoint.json`;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('sets initial state', done => {
+ const initialState = {
+ config: {
+ endpoint,
+ },
+ };
+
+ testAction(
+ actions.setInitialState,
+ initialState,
+ null,
+ [{ type: types.SET_INITIAL_STATE, payload: initialState }],
+ [],
+ done,
+ );
+ });
+
+ describe('receives api responses', () => {
+ const response = {
+ data: [1, 2, 3],
+ headers: {
+ page: 1,
+ perPage: 10,
+ },
+ };
+
+ it('images list response', done => {
+ testAction(
+ actions.receiveImagesListSuccess,
+ response,
+ null,
+ [
+ { type: types.SET_IMAGES_LIST_SUCCESS, payload: response.data },
+ { type: types.SET_PAGINATION, payload: response.headers },
+ ],
+ [],
+ done,
+ );
+ });
+
+ it('tags list response', done => {
+ testAction(
+ actions.receiveTagsListSuccess,
+ response,
+ null,
+ [
+ { type: types.SET_TAGS_LIST_SUCCESS, payload: response.data },
+ { type: types.SET_TAGS_PAGINATION, payload: response.headers },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('fetch images list', () => {
+ it('sets the imagesList and pagination', done => {
+ mock.onGet(endpoint).replyOnce(200, reposServerResponse, {});
+
+ testAction(
+ actions.requestImagesList,
+ {},
+ {
+ config: {
+ endpoint,
+ },
+ },
+ [
+ { type: types.SET_MAIN_LOADING, payload: true },
+ { type: types.SET_MAIN_LOADING, payload: false },
+ ],
+ [{ type: 'receiveImagesListSuccess', payload: { data: reposServerResponse, headers: {} } }],
+ done,
+ );
+ });
+
+ it('should create flash on error', done => {
+ testAction(
+ actions.requestImagesList,
+ {},
+ {
+ config: {
+ endpoint: null,
+ },
+ },
+ [
+ { type: types.SET_MAIN_LOADING, payload: true },
+ { type: types.SET_MAIN_LOADING, payload: false },
+ ],
+ [],
+ () => {
+ expect(createFlash).toHaveBeenCalled();
+ done();
+ },
+ );
+ });
+ });
+
+ describe('fetch tags list', () => {
+ const url = `${endpoint}/1}`;
+ const params = window.btoa(JSON.stringify({ tags_path: `${endpoint}/1}` }));
+
+ it('sets the tagsList', done => {
+ mock.onGet(url).replyOnce(200, registryServerResponse, {});
+
+ testAction(
+ actions.requestTagsList,
+ { params },
+ {},
+ [
+ { type: types.SET_MAIN_LOADING, payload: true },
+ { type: types.SET_MAIN_LOADING, payload: false },
+ ],
+ [
+ {
+ type: 'receiveTagsListSuccess',
+ payload: { data: registryServerResponse, headers: {} },
+ },
+ ],
+ done,
+ );
+ });
+
+ it('should create flash on error', done => {
+ testAction(
+ actions.requestTagsList,
+ { params },
+ {},
+ [
+ { type: types.SET_MAIN_LOADING, payload: true },
+ { type: types.SET_MAIN_LOADING, payload: false },
+ ],
+ [],
+ () => {
+ expect(createFlash).toHaveBeenCalled();
+ done();
+ },
+ );
+ });
+ });
+
+ describe('request delete single tag', () => {
+ it('successfully performs the delete request', done => {
+ const deletePath = 'delete/path';
+ const params = window.btoa(JSON.stringify({ tags_path: `${endpoint}/1}`, id: 1 }));
+
+ mock.onDelete(deletePath).replyOnce(200);
+
+ testAction(
+ actions.requestDeleteTag,
+ {
+ tag: {
+ destroy_path: deletePath,
+ },
+ params,
+ },
+ {
+ tagsPagination: {},
+ },
+ [
+ { type: types.SET_MAIN_LOADING, payload: true },
+ { type: types.SET_MAIN_LOADING, payload: false },
+ ],
+ [
+ {
+ type: 'requestTagsList',
+ payload: { pagination: {}, params },
+ },
+ ],
+ () => {
+ expect(createFlash).toHaveBeenCalled();
+ done();
+ },
+ );
+ });
+
+ it('should show flash message on error', done => {
+ testAction(
+ actions.requestDeleteTag,
+ {
+ tag: {
+ destroy_path: null,
+ },
+ },
+ {},
+ [
+ { type: types.SET_MAIN_LOADING, payload: true },
+ { type: types.SET_MAIN_LOADING, payload: false },
+ ],
+ [],
+ () => {
+ expect(createFlash).toHaveBeenCalled();
+ done();
+ },
+ );
+ });
+ });
+
+ describe('request delete multiple tags', () => {
+ const id = 1;
+ const params = window.btoa(JSON.stringify({ id }));
+ const projectPath = 'project-path';
+ const url = `${projectPath}/registry/repository/${id}/tags/bulk_destroy`;
+
+ it('successfully performs the delete request', done => {
+ mock.onDelete(url).replyOnce(200);
+
+ testAction(
+ actions.requestDeleteTags,
+ {
+ ids: [1, 2],
+ params,
+ },
+ {
+ config: {
+ projectPath,
+ },
+ tagsPagination: {},
+ },
+ [
+ { type: types.SET_MAIN_LOADING, payload: true },
+ { type: types.SET_MAIN_LOADING, payload: false },
+ ],
+ [
+ {
+ type: 'requestTagsList',
+ payload: { pagination: {}, params },
+ },
+ ],
+ () => {
+ expect(createFlash).toHaveBeenCalled();
+ done();
+ },
+ );
+ });
+
+ it('should show flash message on error', done => {
+ mock.onDelete(url).replyOnce(500);
+
+ testAction(
+ actions.requestDeleteTags,
+ {
+ ids: [1, 2],
+ params,
+ },
+ {
+ config: {
+ projectPath,
+ },
+ tagsPagination: {},
+ },
+ [
+ { type: types.SET_MAIN_LOADING, payload: true },
+ { type: types.SET_MAIN_LOADING, payload: false },
+ ],
+ [],
+ () => {
+ expect(createFlash).toHaveBeenCalled();
+ done();
+ },
+ );
+ });
+ });
+
+ describe('request delete single image', () => {
+ it('successfully performs the delete request', done => {
+ const deletePath = 'delete/path';
+ mock.onDelete(deletePath).replyOnce(200);
+
+ testAction(
+ actions.requestDeleteImage,
+ deletePath,
+ {
+ pagination: {},
+ },
+ [
+ { type: types.SET_MAIN_LOADING, payload: true },
+ { type: types.SET_MAIN_LOADING, payload: false },
+ ],
+ [
+ {
+ type: 'requestImagesList',
+ payload: { pagination: {} },
+ },
+ ],
+ () => {
+ expect(createFlash).toHaveBeenCalled();
+ done();
+ },
+ );
+ });
+
+ it('should show flash message on error', done => {
+ testAction(
+ actions.requestDeleteImage,
+ null,
+ {},
+ [
+ { type: types.SET_MAIN_LOADING, payload: true },
+ { type: types.SET_MAIN_LOADING, payload: false },
+ ],
+ [],
+ () => {
+ expect(createFlash).toHaveBeenCalled();
+ done();
+ },
+ );
+ });
+ });
+});
diff --git a/spec/frontend/registry/explorer/stores/mutations_spec.js b/spec/frontend/registry/explorer/stores/mutations_spec.js
new file mode 100644
index 00000000000..43f6a95db10
--- /dev/null
+++ b/spec/frontend/registry/explorer/stores/mutations_spec.js
@@ -0,0 +1,86 @@
+import mutations from '~/registry/explorer/stores/mutations';
+import * as types from '~/registry/explorer/stores/mutation_types';
+
+describe('Mutations Registry Explorer Store', () => {
+ let mockState;
+
+ beforeEach(() => {
+ mockState = {};
+ });
+
+ describe('SET_INITIAL_STATE', () => {
+ it('should set the initial state', () => {
+ const payload = { endpoint: 'foo', isGroupPage: true };
+ const expectedState = { ...mockState, config: payload };
+ mutations[types.SET_INITIAL_STATE](mockState, payload);
+
+ expect(mockState).toEqual(expectedState);
+ });
+ });
+
+ describe('SET_IMAGES_LIST_SUCCESS', () => {
+ it('should set the images list', () => {
+ const images = [1, 2, 3];
+ const expectedState = { ...mockState, images };
+ mutations[types.SET_IMAGES_LIST_SUCCESS](mockState, images);
+
+ expect(mockState).toEqual(expectedState);
+ });
+ });
+
+ describe('SET_TAGS_LIST_SUCCESS', () => {
+ it('should set the tags list', () => {
+ const tags = [1, 2, 3];
+ const expectedState = { ...mockState, tags };
+ mutations[types.SET_TAGS_LIST_SUCCESS](mockState, tags);
+
+ expect(mockState).toEqual(expectedState);
+ });
+ });
+
+ describe('SET_MAIN_LOADING', () => {
+ it('should set the isLoading', () => {
+ const expectedState = { ...mockState, isLoading: true };
+ mutations[types.SET_MAIN_LOADING](mockState, true);
+
+ expect(mockState).toEqual(expectedState);
+ });
+ });
+
+ describe('SET_PAGINATION', () => {
+ const generatePagination = () => [
+ {
+ 'X-PAGE': '1',
+ 'X-PER-PAGE': '20',
+ 'X-TOTAL': '100',
+ 'X-TOTAL-PAGES': '5',
+ 'X-NEXT-PAGE': '2',
+ 'X-PREV-PAGE': '0',
+ },
+ {
+ page: 1,
+ perPage: 20,
+ total: 100,
+ totalPages: 5,
+ nextPage: 2,
+ previousPage: 0,
+ },
+ ];
+
+ it('should set the images pagination', () => {
+ const [headers, expectedResult] = generatePagination();
+ const expectedState = { ...mockState, pagination: expectedResult };
+ mutations[types.SET_PAGINATION](mockState, headers);
+
+ expect(mockState).toEqual(expectedState);
+ });
+
+ it('should set the tags pagination', () => {
+ const [headers, expectedResult] = generatePagination();
+ const expectedState = { ...mockState, tagsPagination: expectedResult };
+ mutations[types.SET_TAGS_PAGINATION](mockState, headers);
+
+ expect(mockState).toEqual(expectedState);
+ });
+ });
+});
diff --git a/spec/frontend/registry/explorer/stubs.js b/spec/frontend/registry/explorer/stubs.js
new file mode 100644
index 00000000000..2c2c7587af9
--- /dev/null
+++ b/spec/frontend/registry/explorer/stubs.js
@@ -0,0 +1,11 @@
+export const GlModal = {
+ template: '<div><slot name="modal-title"></slot><slot></slot><slot name="modal-ok"></slot></div>',
+ methods: {
+ show: jest.fn(),
+ },
+};
+
+export const GlEmptyState = {
+ template: '<div><slot name="description"></slot></div>',
+ name: 'GlEmptyStateSTub',
+};
diff --git a/spec/frontend/registry/list/components/__snapshots__/project_empty_state_spec.js.snap b/spec/frontend/registry/list/components/__snapshots__/project_empty_state_spec.js.snap
index d11a9bdeb51..426bc5c0e6c 100644
--- a/spec/frontend/registry/list/components/__snapshots__/project_empty_state_spec.js.snap
+++ b/spec/frontend/registry/list/components/__snapshots__/project_empty_state_spec.js.snap
@@ -84,17 +84,16 @@ exports[`Registry Project Empty state to match the default snapshot 1`] = `
class="input-group-append"
>
<button
- class="btn input-group-text btn-secondary btn-default"
+ class="btn input-group-text btn-secondary btn-md btn-default"
data-clipboard-text="docker login host"
title="Copy login command"
type="button"
>
<svg
- aria-hidden="true"
- class="s16 ic-duplicate"
+ class="gl-icon s16"
>
<use
- xlink:href="#duplicate"
+ href="#copy-to-clipboard"
/>
</svg>
</button>
@@ -122,17 +121,16 @@ exports[`Registry Project Empty state to match the default snapshot 1`] = `
class="input-group-append"
>
<button
- class="btn input-group-text btn-secondary btn-default"
+ class="btn input-group-text btn-secondary btn-md btn-default"
data-clipboard-text="docker build -t url ."
title="Copy build command"
type="button"
>
<svg
- aria-hidden="true"
- class="s16 ic-duplicate"
+ class="gl-icon s16"
>
<use
- xlink:href="#duplicate"
+ href="#copy-to-clipboard"
/>
</svg>
</button>
@@ -152,17 +150,16 @@ exports[`Registry Project Empty state to match the default snapshot 1`] = `
class="input-group-append"
>
<button
- class="btn input-group-text btn-secondary btn-default"
+ class="btn input-group-text btn-secondary btn-md btn-default"
data-clipboard-text="docker push url"
title="Copy push command"
type="button"
>
<svg
- aria-hidden="true"
- class="s16 ic-duplicate"
+ class="gl-icon s16"
>
<use
- xlink:href="#duplicate"
+ href="#copy-to-clipboard"
/>
</svg>
</button>
diff --git a/spec/frontend/registry/settings/components/__snapshots__/settings_form_spec.js.snap b/spec/frontend/registry/settings/components/__snapshots__/settings_form_spec.js.snap
deleted file mode 100644
index d26df308b97..00000000000
--- a/spec/frontend/registry/settings/components/__snapshots__/settings_form_spec.js.snap
+++ /dev/null
@@ -1,181 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`Settings Form renders 1`] = `
-<form>
- <div
- class="card"
- >
- <!---->
- <div
- class="card-header"
- >
-
- Tag expiration policy
-
- </div>
- <div
- class="card-body"
- >
- <!---->
- <!---->
-
- <glformgroup-stub
- id="expiration-policy-toggle-group"
- label="Expiration policy:"
- label-align="right"
- label-cols="3"
- label-for="expiration-policy-toggle"
- >
- <div
- class="d-flex align-items-start"
- >
- <gltoggle-stub
- id="expiration-policy-toggle"
- labeloff="Toggle Status: OFF"
- labelon="Toggle Status: ON"
- />
-
- <span
- class="mb-2 ml-1 lh-2"
- >
- Docker tag expiration policy is
- <strong>
- disabled
- </strong>
- </span>
- </div>
- </glformgroup-stub>
-
- <glformgroup-stub
- id="expiration-policy-interval-group"
- label="Expiration interval:"
- label-align="right"
- label-cols="3"
- label-for="expiration-policy-interval"
- >
- <glformselect-stub
- disabled="true"
- id="expiration-policy-interval"
- value="bar"
- >
- <option
- value="foo"
- >
-
- Foo
-
- </option>
- <option
- value="bar"
- >
-
- Bar
-
- </option>
- </glformselect-stub>
- </glformgroup-stub>
-
- <glformgroup-stub
- id="expiration-policy-schedule-group"
- label="Expiration schedule:"
- label-align="right"
- label-cols="3"
- label-for="expiration-policy-schedule"
- >
- <glformselect-stub
- disabled="true"
- id="expiration-policy-schedule"
- value="bar"
- >
- <option
- value="foo"
- >
-
- Foo
-
- </option>
- <option
- value="bar"
- >
-
- Bar
-
- </option>
- </glformselect-stub>
- </glformgroup-stub>
-
- <glformgroup-stub
- id="expiration-policy-latest-group"
- label="Number of tags to retain:"
- label-align="right"
- label-cols="3"
- label-for="expiration-policy-latest"
- >
- <glformselect-stub
- disabled="true"
- id="expiration-policy-latest"
- value="bar"
- >
- <option
- value="foo"
- >
-
- Foo
-
- </option>
- <option
- value="bar"
- >
-
- Bar
-
- </option>
- </glformselect-stub>
- </glformgroup-stub>
-
- <glformgroup-stub
- id="expiration-policy-name-matching-group"
- invalid-feedback="The value of this input should be less than 255 characters"
- label="Expire Docker tags that match this regex:"
- label-align="right"
- label-cols="3"
- label-for="expiration-policy-name-matching"
- >
- <glformtextarea-stub
- disabled="true"
- id="expiration-policy-name-matching"
- placeholder=".*"
- trim=""
- value=""
- />
- </glformgroup-stub>
-
- </div>
- <div
- class="card-footer"
- >
- <div
- class="d-flex justify-content-end"
- >
- <glbutton-stub
- class="mr-2 d-block"
- type="reset"
- >
- Cancel
- </glbutton-stub>
-
- <glbutton-stub
- class="d-block"
- type="submit"
- variant="success"
- >
-
- Save expiration policy
-
- </glbutton-stub>
- </div>
- </div>
- <!---->
- </div>
-</form>
-`;
diff --git a/spec/frontend/registry/settings/components/registry_settings_app_spec.js b/spec/frontend/registry/settings/components/registry_settings_app_spec.js
index 448ff2b3be9..e9ba65e4387 100644
--- a/spec/frontend/registry/settings/components/registry_settings_app_spec.js
+++ b/spec/frontend/registry/settings/components/registry_settings_app_spec.js
@@ -1,55 +1,74 @@
-import Vuex from 'vuex';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
+import { GlAlert } from '@gitlab/ui';
import component from '~/registry/settings/components/registry_settings_app.vue';
+import SettingsForm from '~/registry/settings/components/settings_form.vue';
import { createStore } from '~/registry/settings/store/';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
+import { SET_IS_DISABLED } from '~/registry/settings/store/mutation_types';
+import { FETCH_SETTINGS_ERROR_MESSAGE } from '~/registry/shared/constants';
describe('Registry Settings App', () => {
let wrapper;
let store;
- let fetchSpy;
- const findSettingsComponent = () => wrapper.find({ ref: 'settings-form' });
- const findLoadingComponent = () => wrapper.find({ ref: 'loading-icon' });
+ const findSettingsComponent = () => wrapper.find(SettingsForm);
+ const findAlert = () => wrapper.find(GlAlert);
- const mountComponent = (options = {}) => {
- fetchSpy = jest.fn();
+ const mountComponent = ({ dispatchMock = 'mockResolvedValue', isDisabled = false } = {}) => {
+ store = createStore();
+ store.commit(SET_IS_DISABLED, isDisabled);
+ const dispatchSpy = jest.spyOn(store, 'dispatch');
+ if (dispatchMock) {
+ dispatchSpy[dispatchMock]();
+ }
wrapper = shallowMount(component, {
- store,
- methods: {
- fetchSettings: fetchSpy,
+ mocks: {
+ $toast: {
+ show: jest.fn(),
+ },
},
- ...options,
+ store,
});
};
- beforeEach(() => {
- store = createStore();
- mountComponent();
- });
-
afterEach(() => {
wrapper.destroy();
});
it('renders', () => {
+ mountComponent();
expect(wrapper.element).toMatchSnapshot();
});
it('call the store function to load the data on mount', () => {
- expect(fetchSpy).toHaveBeenCalled();
+ mountComponent();
+ expect(store.dispatch).toHaveBeenCalledWith('fetchSettings');
});
- it('renders a loader if isLoading is true', () => {
- store.dispatch('toggleLoading');
- return wrapper.vm.$nextTick().then(() => {
- expect(findLoadingComponent().exists()).toBe(true);
- expect(findSettingsComponent().exists()).toBe(false);
- });
+ it('show a toast if fetchSettings fails', () => {
+ mountComponent({ dispatchMock: 'mockRejectedValue' });
+ return wrapper.vm.$nextTick().then(() =>
+ expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(FETCH_SETTINGS_ERROR_MESSAGE, {
+ type: 'error',
+ }),
+ );
});
+
it('renders the setting form', () => {
+ mountComponent();
expect(findSettingsComponent().exists()).toBe(true);
});
+
+ describe('isDisabled', () => {
+ beforeEach(() => {
+ mountComponent({ isDisabled: true });
+ });
+
+ it('the form is hidden', () => {
+ expect(findSettingsComponent().exists()).toBe(false);
+ });
+
+ it('shows an alert', () => {
+ expect(findAlert().exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/registry/settings/components/settings_form_spec.js b/spec/frontend/registry/settings/components/settings_form_spec.js
index bd733e965a4..2b3e529b283 100644
--- a/spec/frontend/registry/settings/components/settings_form_spec.js
+++ b/spec/frontend/registry/settings/components/settings_form_spec.js
@@ -1,168 +1,179 @@
-import Vuex from 'vuex';
-import { mount, createLocalVue } from '@vue/test-utils';
-import stubChildren from 'helpers/stub_children';
+import { shallowMount } from '@vue/test-utils';
+import Tracking from '~/tracking';
import component from '~/registry/settings/components/settings_form.vue';
+import expirationPolicyFields from '~/registry/shared/components/expiration_policy_fields.vue';
import { createStore } from '~/registry/settings/store/';
-import { NAME_REGEX_LENGTH } from '~/registry/settings/constants';
-import { stringifiedFormOptions } from '../mock_data';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
+import {
+ UPDATE_SETTINGS_ERROR_MESSAGE,
+ UPDATE_SETTINGS_SUCCESS_MESSAGE,
+} from '~/registry/shared/constants';
+import { stringifiedFormOptions } from '../../shared/mock_data';
describe('Settings Form', () => {
let wrapper;
let store;
- let saveSpy;
- let resetSpy;
+ let dispatchSpy;
+
+ const GlLoadingIcon = { name: 'gl-loading-icon-stub', template: '<svg></svg>' };
+ const GlCard = {
+ name: 'gl-card-stub',
+ template: `
+ <div>
+ <slot name="header"></slot>
+ <slot></slot>
+ <slot name="footer"></slot>
+ </div>
+ `,
+ };
+
+ const trackingPayload = {
+ label: 'docker_container_retention_and_expiration_policies',
+ };
- const findFormGroup = name => wrapper.find(`#expiration-policy-${name}-group`);
- const findFormElements = (name, father = wrapper) => father.find(`#expiration-policy-${name}`);
+ const findForm = () => wrapper.find({ ref: 'form-element' });
+ const findFields = () => wrapper.find(expirationPolicyFields);
const findCancelButton = () => wrapper.find({ ref: 'cancel-button' });
const findSaveButton = () => wrapper.find({ ref: 'save-button' });
- const findForm = () => wrapper.find({ ref: 'form-element' });
+ const findLoadingIcon = (parent = wrapper) => parent.find(GlLoadingIcon);
- const mountComponent = (options = {}) => {
- saveSpy = jest.fn();
- resetSpy = jest.fn();
- wrapper = mount(component, {
+ const mountComponent = () => {
+ wrapper = shallowMount(component, {
stubs: {
- ...stubChildren(component),
- GlCard: false,
+ GlCard,
+ GlLoadingIcon,
},
- store,
- methods: {
- saveSettings: saveSpy,
- resetSettings: resetSpy,
+ mocks: {
+ $toast: {
+ show: jest.fn(),
+ },
},
- ...options,
+ store,
});
};
beforeEach(() => {
store = createStore();
store.dispatch('setInitialState', stringifiedFormOptions);
+ dispatchSpy = jest.spyOn(store, 'dispatch');
mountComponent();
+ jest.spyOn(Tracking, 'event');
});
afterEach(() => {
wrapper.destroy();
});
- it('renders', () => {
- expect(wrapper.element).toMatchSnapshot();
- });
-
- describe.each`
- elementName | modelName | value | disabledByToggle
- ${'toggle'} | ${'enabled'} | ${true} | ${'not disabled'}
- ${'interval'} | ${'older_than'} | ${'foo'} | ${'disabled'}
- ${'schedule'} | ${'cadence'} | ${'foo'} | ${'disabled'}
- ${'latest'} | ${'keep_n'} | ${'foo'} | ${'disabled'}
- ${'name-matching'} | ${'name_regex'} | ${'foo'} | ${'disabled'}
- `('$elementName form element', ({ elementName, modelName, value, disabledByToggle }) => {
- let formGroup;
+ describe('form', () => {
+ let form;
beforeEach(() => {
- formGroup = findFormGroup(elementName);
- });
- it(`${elementName} form group exist in the dom`, () => {
- expect(formGroup.exists()).toBe(true);
- });
-
- it(`${elementName} form group has a label-for property`, () => {
- expect(formGroup.attributes('label-for')).toBe(`expiration-policy-${elementName}`);
- });
-
- it(`${elementName} form group has a label-cols property`, () => {
- expect(formGroup.attributes('label-cols')).toBe(`${wrapper.vm.$options.labelsConfig.cols}`);
+ form = findForm();
+ dispatchSpy.mockReturnValue();
});
- it(`${elementName} form group has a label-align property`, () => {
- expect(formGroup.attributes('label-align')).toBe(`${wrapper.vm.$options.labelsConfig.align}`);
+ describe('data binding', () => {
+ it('v-model change update the settings property', () => {
+ findFields().vm.$emit('input', 'foo');
+ expect(dispatchSpy).toHaveBeenCalledWith('updateSettings', { settings: 'foo' });
+ });
});
- it(`${elementName} form group contains an input element`, () => {
- expect(findFormElements(elementName, formGroup).exists()).toBe(true);
- });
+ describe('form reset event', () => {
+ beforeEach(() => {
+ form.trigger('reset');
+ });
+ it('calls the appropriate function', () => {
+ expect(dispatchSpy).toHaveBeenCalledWith('resetSettings');
+ });
- it(`${elementName} form element change updated ${modelName} with ${value}`, () => {
- const element = findFormElements(elementName, formGroup);
- const modelUpdateEvent = element.vm.$options.model
- ? element.vm.$options.model.event
- : 'input';
- element.vm.$emit(modelUpdateEvent, value);
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm[modelName]).toBe(value);
+ it('tracks the reset event', () => {
+ expect(Tracking.event).toHaveBeenCalledWith(undefined, 'reset_form', trackingPayload);
});
});
- it(`${elementName} is ${disabledByToggle} by enabled set to false`, () => {
- store.dispatch('updateSettings', { enabled: false });
- const expectation = disabledByToggle === 'disabled' ? 'true' : undefined;
- expect(findFormElements(elementName, formGroup).attributes('disabled')).toBe(expectation);
- });
- });
+ describe('form submit event ', () => {
+ it('save has type submit', () => {
+ mountComponent();
+ expect(findSaveButton().attributes('type')).toBe('submit');
+ });
- describe('form actions', () => {
- let form;
- beforeEach(() => {
- form = findForm();
- });
- it('cancel has type reset', () => {
- expect(findCancelButton().attributes('type')).toBe('reset');
- });
+ it('dispatches the saveSettings action', () => {
+ dispatchSpy.mockResolvedValue();
+ form.trigger('submit');
+ expect(dispatchSpy).toHaveBeenCalledWith('saveSettings');
+ });
- it('form reset event call the appropriate function', () => {
- form.trigger('reset');
- expect(resetSpy).toHaveBeenCalled();
- });
+ it('tracks the submit event', () => {
+ dispatchSpy.mockResolvedValue();
+ form.trigger('submit');
+ expect(Tracking.event).toHaveBeenCalledWith(undefined, 'submit_form', trackingPayload);
+ });
- it('save has type submit', () => {
- expect(findSaveButton().attributes('type')).toBe('submit');
- });
+ it('show a success toast when submit succeed', () => {
+ dispatchSpy.mockResolvedValue();
+ form.trigger('submit');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_SUCCESS_MESSAGE, {
+ type: 'success',
+ });
+ });
+ });
- it('form submit event call the appropriate function', () => {
- form.trigger('submit');
- expect(saveSpy).toHaveBeenCalled();
+ it('show an error toast when submit fails', () => {
+ dispatchSpy.mockRejectedValue();
+ form.trigger('submit');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_ERROR_MESSAGE, {
+ type: 'error',
+ });
+ });
+ });
});
});
- describe('form validation', () => {
- describe(`when name regex is longer than ${NAME_REGEX_LENGTH}`, () => {
- const invalidString = new Array(NAME_REGEX_LENGTH + 2).join(',');
+ describe('form actions', () => {
+ describe('cancel button', () => {
beforeEach(() => {
- store.dispatch('updateSettings', { name_regex: invalidString });
+ store.commit('SET_SETTINGS', { foo: 'bar' });
});
- it('save btn is disabled', () => {
- expect(findSaveButton().attributes('disabled')).toBeTruthy();
+ it('has type reset', () => {
+ expect(findCancelButton().attributes('type')).toBe('reset');
});
- it('nameRegexState is false', () => {
- expect(wrapper.vm.nameRegexState).toBe(false);
+ it('is disabled when isEdited is false', () =>
+ wrapper.vm.$nextTick().then(() => {
+ expect(findCancelButton().attributes('disabled')).toBe('true');
+ }));
+
+ it('is disabled isLoading is true', () => {
+ store.commit('TOGGLE_LOADING');
+ store.commit('UPDATE_SETTINGS', { settings: { foo: 'baz' } });
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findCancelButton().attributes('disabled')).toBe('true');
+ store.commit('TOGGLE_LOADING');
+ });
});
- });
- it('if the user did not type validation is null', () => {
- store.dispatch('updateSettings', { name_regex: null });
- expect(wrapper.vm.nameRegexState).toBe(null);
- return wrapper.vm.$nextTick().then(() => {
- expect(findSaveButton().attributes('disabled')).toBeFalsy();
+ it('is enabled when isLoading is false and isEdited is true', () => {
+ store.commit('UPDATE_SETTINGS', { settings: { foo: 'baz' } });
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findCancelButton().attributes('disabled')).toBe(undefined);
+ });
});
});
- it(`if the user typed and is less than ${NAME_REGEX_LENGTH} state is true`, () => {
- store.dispatch('updateSettings', { name_regex: 'abc' });
- expect(wrapper.vm.nameRegexState).toBe(true);
- });
- });
+ describe('when isLoading is true', () => {
+ beforeEach(() => {
+ store.commit('TOGGLE_LOADING');
+ });
+ afterEach(() => {
+ store.commit('TOGGLE_LOADING');
+ });
- describe('help text', () => {
- it('toggleDescriptionText text reflects enabled property', () => {
- const toggleHelpText = findFormGroup('toggle').find('span');
- expect(toggleHelpText.html()).toContain('disabled');
- wrapper.vm.enabled = true;
- return wrapper.vm.$nextTick().then(() => {
- expect(toggleHelpText.html()).toContain('enabled');
+ it('submit button is disabled and shows a spinner', () => {
+ const button = findSaveButton();
+ expect(button.attributes('disabled')).toBeTruthy();
+ expect(findLoadingIcon(button).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/registry/settings/store/actions_spec.js b/spec/frontend/registry/settings/store/actions_spec.js
index 80fb800ac3a..5038dc82416 100644
--- a/spec/frontend/registry/settings/store/actions_spec.js
+++ b/spec/frontend/registry/settings/store/actions_spec.js
@@ -1,40 +1,42 @@
import Api from '~/api';
-import createFlash from '~/flash';
import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/registry/settings/store/actions';
import * as types from '~/registry/settings/store/mutation_types';
-import {
- UPDATE_SETTINGS_ERROR_MESSAGE,
- FETCH_SETTINGS_ERROR_MESSAGE,
- UPDATE_SETTINGS_SUCCESS_MESSAGE,
-} from '~/registry/settings/constants';
-
-jest.mock('~/flash');
describe('Actions Registry Store', () => {
describe.each`
- actionName | mutationName | payload
- ${'setInitialState'} | ${types.SET_INITIAL_STATE} | ${'foo'}
- ${'updateSettings'} | ${types.UPDATE_SETTINGS} | ${'foo'}
- ${'receiveSettingsSuccess'} | ${types.SET_SETTINGS} | ${'foo'}
- ${'toggleLoading'} | ${types.TOGGLE_LOADING} | ${undefined}
- ${'resetSettings'} | ${types.RESET_SETTINGS} | ${undefined}
- `('%s action invokes %s mutation with payload %s', ({ actionName, mutationName, payload }) => {
- it('should set the initial state', done => {
- testAction(actions[actionName], payload, {}, [{ type: mutationName, payload }], [], done);
- });
- });
-
- describe.each`
- actionName | message
- ${'receiveSettingsError'} | ${FETCH_SETTINGS_ERROR_MESSAGE}
- ${'updateSettingsError'} | ${UPDATE_SETTINGS_ERROR_MESSAGE}
- `('%s action', ({ actionName, message }) => {
- it(`should call createFlash with ${message}`, done => {
- testAction(actions[actionName], null, null, [], [], () => {
- expect(createFlash).toHaveBeenCalledWith(message);
- done();
+ actionName | mutationName | payload
+ ${'setInitialState'} | ${types.SET_INITIAL_STATE} | ${'foo'}
+ ${'updateSettings'} | ${types.UPDATE_SETTINGS} | ${'foo'}
+ ${'toggleLoading'} | ${types.TOGGLE_LOADING} | ${undefined}
+ ${'resetSettings'} | ${types.RESET_SETTINGS} | ${undefined}
+ `(
+ '$actionName invokes $mutationName with payload $payload',
+ ({ actionName, mutationName, payload }) => {
+ it('should set state', done => {
+ testAction(actions[actionName], payload, {}, [{ type: mutationName, payload }], [], done);
});
+ },
+ );
+
+ describe('receiveSettingsSuccess', () => {
+ it('calls SET_SETTINGS when data is present', () => {
+ testAction(
+ actions.receiveSettingsSuccess,
+ 'foo',
+ {},
+ [{ type: types.SET_SETTINGS, payload: 'foo' }],
+ [],
+ );
+ });
+ it('calls SET_IS_DISABLED when data is not present', () => {
+ testAction(
+ actions.receiveSettingsSuccess,
+ null,
+ {},
+ [{ type: types.SET_IS_DISABLED, payload: true }],
+ [],
+ );
});
});
@@ -64,18 +66,6 @@ describe('Actions Registry Store', () => {
done,
);
});
-
- it('should call receiveSettingsError on error', done => {
- Api.project = jest.fn().mockRejectedValue();
- testAction(
- actions.fetchSettings,
- null,
- state,
- [],
- [{ type: 'toggleLoading' }, { type: 'receiveSettingsError' }, { type: 'toggleLoading' }],
- done,
- );
- });
});
describe('saveSettings', () => {
@@ -102,21 +92,6 @@ describe('Actions Registry Store', () => {
{ type: 'receiveSettingsSuccess', payload: payload.data.container_expiration_policy },
{ type: 'toggleLoading' },
],
- () => {
- expect(createFlash).toHaveBeenCalledWith(UPDATE_SETTINGS_SUCCESS_MESSAGE, 'success');
- done();
- },
- );
- });
-
- it('should call receiveSettingsError on error', done => {
- Api.updateProject = jest.fn().mockRejectedValue();
- testAction(
- actions.saveSettings,
- null,
- state,
- [],
- [{ type: 'toggleLoading' }, { type: 'updateSettingsError' }, { type: 'toggleLoading' }],
done,
);
});
diff --git a/spec/frontend/registry/settings/store/getters_spec.js b/spec/frontend/registry/settings/store/getters_spec.js
new file mode 100644
index 00000000000..44631b97a39
--- /dev/null
+++ b/spec/frontend/registry/settings/store/getters_spec.js
@@ -0,0 +1,44 @@
+import * as getters from '~/registry/settings/store/getters';
+import * as utils from '~/registry/shared/utils';
+import { formOptions } from '../../shared/mock_data';
+
+describe('Getters registry settings store', () => {
+ const settings = {
+ cadence: 'foo',
+ keep_n: 'bar',
+ older_than: 'baz',
+ };
+
+ describe.each`
+ getter | variable | formOption
+ ${'getCadence'} | ${'cadence'} | ${'cadence'}
+ ${'getKeepN'} | ${'keep_n'} | ${'keepN'}
+ ${'getOlderThan'} | ${'older_than'} | ${'olderThan'}
+ `('Options getter', ({ getter, variable, formOption }) => {
+ beforeEach(() => {
+ utils.findDefaultOption = jest.fn();
+ });
+
+ it(`${getter} returns ${variable} when ${variable} exists in settings`, () => {
+ expect(getters[getter]({ settings })).toBe(settings[variable]);
+ });
+
+ it(`${getter} calls findDefaultOption when ${variable} does not exists in settings`, () => {
+ getters[getter]({ settings: {}, formOptions });
+ expect(utils.findDefaultOption).toHaveBeenCalledWith(formOptions[formOption]);
+ });
+ });
+
+ describe('getIsDisabled', () => {
+ it('returns false when original is equal to settings', () => {
+ const same = { foo: 'bar' };
+ expect(getters.getIsEdited({ original: same, settings: same })).toBe(false);
+ });
+
+ it('returns true when original is different from settings', () => {
+ expect(getters.getIsEdited({ original: { foo: 'bar' }, settings: { foo: 'baz' } })).toBe(
+ true,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/registry/settings/store/mutations_spec.js b/spec/frontend/registry/settings/store/mutations_spec.js
index 1a0effbe125..8ab0196fd4d 100644
--- a/spec/frontend/registry/settings/store/mutations_spec.js
+++ b/spec/frontend/registry/settings/store/mutations_spec.js
@@ -1,7 +1,7 @@
import mutations from '~/registry/settings/store/mutations';
import * as types from '~/registry/settings/store/mutation_types';
import createState from '~/registry/settings/store/state';
-import { formOptions, stringifiedFormOptions } from '../mock_data';
+import { formOptions, stringifiedFormOptions } from '../../shared/mock_data';
describe('Mutations Registry Store', () => {
let mockState;
@@ -28,10 +28,11 @@ describe('Mutations Registry Store', () => {
mockState.settings = { foo: 'bar' };
const payload = { foo: 'baz' };
const expectedState = { ...mockState, settings: payload };
- mutations[types.UPDATE_SETTINGS](mockState, payload);
+ mutations[types.UPDATE_SETTINGS](mockState, { settings: payload });
expect(mockState.settings).toEqual(expectedState.settings);
});
});
+
describe('SET_SETTINGS', () => {
it('should set the settings and original', () => {
const payload = { foo: 'baz' };
@@ -41,6 +42,7 @@ describe('Mutations Registry Store', () => {
expect(mockState.original).toEqual(expectedState.settings);
});
});
+
describe('RESET_SETTINGS', () => {
it('should copy original over settings', () => {
mockState.settings = { foo: 'bar' };
@@ -49,10 +51,18 @@ describe('Mutations Registry Store', () => {
expect(mockState.settings).toEqual(mockState.original);
});
});
+
describe('TOGGLE_LOADING', () => {
it('should toggle the loading', () => {
mutations[types.TOGGLE_LOADING](mockState);
expect(mockState.isLoading).toEqual(true);
});
});
+
+ describe('SET_IS_DISABLED', () => {
+ it('should set isDisabled', () => {
+ mutations[types.SET_IS_DISABLED](mockState, true);
+ expect(mockState.isDisabled).toEqual(true);
+ });
+ });
});
diff --git a/spec/frontend/registry/shared/components/__snapshots__/expiration_policy_fields_spec.js.snap b/spec/frontend/registry/shared/components/__snapshots__/expiration_policy_fields_spec.js.snap
new file mode 100644
index 00000000000..6e7bc0491ce
--- /dev/null
+++ b/spec/frontend/registry/shared/components/__snapshots__/expiration_policy_fields_spec.js.snap
@@ -0,0 +1,134 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Expiration Policy Form renders 1`] = `
+<div
+ class="lh-2"
+>
+ <gl-form-group-stub
+ id="expiration-policy-toggle-group"
+ label="Expiration policy:"
+ label-align="right"
+ label-cols="3"
+ label-for="expiration-policy-toggle"
+ >
+ <div
+ class="d-flex align-items-start"
+ >
+ <gl-toggle-stub
+ id="expiration-policy-toggle"
+ labeloff="Toggle Status: OFF"
+ labelon="Toggle Status: ON"
+ labelposition="hidden"
+ />
+
+ <span
+ class="mb-2 ml-1 lh-2"
+ >
+ Docker tag expiration policy is
+ <strong>
+ disabled
+ </strong>
+ </span>
+ </div>
+ </gl-form-group-stub>
+
+ <gl-form-group-stub
+ id="expiration-policy-interval-group"
+ label="Expiration interval:"
+ label-align="right"
+ label-cols="3"
+ label-for="expiration-policy-interval"
+ >
+ <gl-form-select-stub
+ disabled="true"
+ id="expiration-policy-interval"
+ >
+ <option
+ value="foo"
+ >
+
+ Foo
+
+ </option>
+ <option
+ value="bar"
+ >
+
+ Bar
+
+ </option>
+ </gl-form-select-stub>
+ </gl-form-group-stub>
+ <gl-form-group-stub
+ id="expiration-policy-schedule-group"
+ label="Expiration schedule:"
+ label-align="right"
+ label-cols="3"
+ label-for="expiration-policy-schedule"
+ >
+ <gl-form-select-stub
+ disabled="true"
+ id="expiration-policy-schedule"
+ >
+ <option
+ value="foo"
+ >
+
+ Foo
+
+ </option>
+ <option
+ value="bar"
+ >
+
+ Bar
+
+ </option>
+ </gl-form-select-stub>
+ </gl-form-group-stub>
+ <gl-form-group-stub
+ id="expiration-policy-latest-group"
+ label="Number of tags to retain:"
+ label-align="right"
+ label-cols="3"
+ label-for="expiration-policy-latest"
+ >
+ <gl-form-select-stub
+ disabled="true"
+ id="expiration-policy-latest"
+ >
+ <option
+ value="foo"
+ >
+
+ Foo
+
+ </option>
+ <option
+ value="bar"
+ >
+
+ Bar
+
+ </option>
+ </gl-form-select-stub>
+ </gl-form-group-stub>
+
+ <gl-form-group-stub
+ id="expiration-policy-name-matching-group"
+ invalid-feedback="The value of this input should be less than 255 characters"
+ label="Docker tags with names matching this regex pattern will expire:"
+ label-align="right"
+ label-cols="3"
+ label-for="expiration-policy-name-matching"
+ >
+ <gl-form-textarea-stub
+ disabled="true"
+ id="expiration-policy-name-matching"
+ placeholder=".*"
+ trim=""
+ value=""
+ />
+ </gl-form-group-stub>
+</div>
+`;
diff --git a/spec/frontend/registry/shared/components/expiration_policy_fields_spec.js b/spec/frontend/registry/shared/components/expiration_policy_fields_spec.js
new file mode 100644
index 00000000000..3782bfeaac4
--- /dev/null
+++ b/spec/frontend/registry/shared/components/expiration_policy_fields_spec.js
@@ -0,0 +1,172 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlSprintf } from '@gitlab/ui';
+import component from '~/registry/shared/components/expiration_policy_fields.vue';
+
+import { NAME_REGEX_LENGTH } from '~/registry/shared/constants';
+import { formOptions } from '../mock_data';
+
+describe('Expiration Policy Form', () => {
+ let wrapper;
+
+ const FORM_ELEMENTS_ID_PREFIX = '#expiration-policy';
+
+ const findFormGroup = name => wrapper.find(`${FORM_ELEMENTS_ID_PREFIX}-${name}-group`);
+ const findFormElements = (name, parent = wrapper) =>
+ parent.find(`${FORM_ELEMENTS_ID_PREFIX}-${name}`);
+
+ const mountComponent = props => {
+ wrapper = shallowMount(component, {
+ stubs: {
+ GlSprintf,
+ },
+ propsData: {
+ formOptions,
+ ...props,
+ },
+ methods: {
+ // override idGenerator to avoid having to test with dynamic uid
+ idGenerator: value => value,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders', () => {
+ mountComponent();
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe.each`
+ elementName | modelName | value | disabledByToggle
+ ${'toggle'} | ${'enabled'} | ${true} | ${'not disabled'}
+ ${'interval'} | ${'older_than'} | ${'foo'} | ${'disabled'}
+ ${'schedule'} | ${'cadence'} | ${'foo'} | ${'disabled'}
+ ${'latest'} | ${'keep_n'} | ${'foo'} | ${'disabled'}
+ ${'name-matching'} | ${'name_regex'} | ${'foo'} | ${'disabled'}
+ `(
+ `${FORM_ELEMENTS_ID_PREFIX}-$elementName form element`,
+ ({ elementName, modelName, value, disabledByToggle }) => {
+ it(`${elementName} form group exist in the dom`, () => {
+ mountComponent();
+ const formGroup = findFormGroup(elementName);
+ expect(formGroup.exists()).toBe(true);
+ });
+
+ it(`${elementName} form group has a label-for property`, () => {
+ mountComponent();
+ const formGroup = findFormGroup(elementName);
+ expect(formGroup.attributes('label-for')).toBe(`expiration-policy-${elementName}`);
+ });
+
+ it(`${elementName} form group has a label-cols property`, () => {
+ mountComponent({ labelCols: '1' });
+ const formGroup = findFormGroup(elementName);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(formGroup.attributes('label-cols')).toBe('1');
+ });
+ });
+
+ it(`${elementName} form group has a label-align property`, () => {
+ mountComponent({ labelAlign: 'foo' });
+ const formGroup = findFormGroup(elementName);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(formGroup.attributes('label-align')).toBe('foo');
+ });
+ });
+
+ it(`${elementName} form group contains an input element`, () => {
+ mountComponent();
+ const formGroup = findFormGroup(elementName);
+ expect(findFormElements(elementName, formGroup).exists()).toBe(true);
+ });
+
+ it(`${elementName} form element change updated ${modelName} with ${value}`, () => {
+ mountComponent();
+ const formGroup = findFormGroup(elementName);
+ const element = findFormElements(elementName, formGroup);
+
+ const modelUpdateEvent = element.vm.$options.model
+ ? element.vm.$options.model.event
+ : 'input';
+ element.vm.$emit(modelUpdateEvent, value);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('input')).toEqual([[{ [modelName]: value }]]);
+ });
+ });
+
+ it(`${elementName} is ${disabledByToggle} by enabled set to false`, () => {
+ mountComponent({ settings: { enabled: false } });
+ const formGroup = findFormGroup(elementName);
+ const expectation = disabledByToggle === 'disabled' ? 'true' : undefined;
+ expect(findFormElements(elementName, formGroup).attributes('disabled')).toBe(expectation);
+ });
+ },
+ );
+
+ describe('when isLoading is true', () => {
+ beforeEach(() => {
+ mountComponent({ isLoading: true });
+ });
+
+ it.each`
+ elementName
+ ${'toggle'}
+ ${'interval'}
+ ${'schedule'}
+ ${'latest'}
+ ${'name-matching'}
+ `(`${FORM_ELEMENTS_ID_PREFIX}-$elementName is disabled`, ({ elementName }) => {
+ expect(findFormElements(elementName).attributes('disabled')).toBe('true');
+ });
+ });
+
+ describe('form validation', () => {
+ describe(`when name regex is longer than ${NAME_REGEX_LENGTH}`, () => {
+ const invalidString = new Array(NAME_REGEX_LENGTH + 2).join(',');
+
+ beforeEach(() => {
+ mountComponent({ value: { name_regex: invalidString } });
+ });
+
+ it('nameRegexState is false', () => {
+ expect(wrapper.vm.nameRegexState).toBe(false);
+ });
+
+ it('emit the @invalidated event', () => {
+ expect(wrapper.emitted('invalidated')).toBeTruthy();
+ });
+ });
+
+ it('if the user did not type validation is null', () => {
+ mountComponent({ value: { name_regex: '' } });
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm.nameRegexState).toBe(null);
+ expect(wrapper.emitted('validated')).toBeTruthy();
+ });
+ });
+
+ it(`if the user typed and is less than ${NAME_REGEX_LENGTH} state is true`, () => {
+ mountComponent({ value: { name_regex: 'foo' } });
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm.nameRegexState).toBe(true);
+ });
+ });
+ });
+
+ describe('help text', () => {
+ it('toggleDescriptionText show disabled when settings.enabled is false', () => {
+ mountComponent();
+ const toggleHelpText = findFormGroup('toggle').find('span');
+ expect(toggleHelpText.html()).toContain('disabled');
+ });
+
+ it('toggleDescriptionText show enabled when settings.enabled is true', () => {
+ mountComponent({ value: { enabled: true } });
+ const toggleHelpText = findFormGroup('toggle').find('span');
+ expect(toggleHelpText.html()).toContain('enabled');
+ });
+ });
+});
diff --git a/spec/frontend/registry/settings/mock_data.js b/spec/frontend/registry/shared/mock_data.js
index 411363c2c95..411363c2c95 100644
--- a/spec/frontend/registry/settings/mock_data.js
+++ b/spec/frontend/registry/shared/mock_data.js
diff --git a/spec/frontend/releases/detail/components/app_spec.js b/spec/frontend/releases/components/app_edit_spec.js
index fd5239ad44e..cb940facbd6 100644
--- a/spec/frontend/releases/detail/components/app_spec.js
+++ b/spec/frontend/releases/components/app_edit_spec.js
@@ -1,10 +1,10 @@
import Vuex from 'vuex';
import { mount } from '@vue/test-utils';
-import ReleaseDetailApp from '~/releases/detail/components/app';
-import { release } from '../../mock_data';
+import ReleaseEditApp from '~/releases/components/app_edit.vue';
+import { release } from '../mock_data';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
-describe('Release detail component', () => {
+describe('Release edit component', () => {
let wrapper;
let releaseClone;
let actions;
@@ -27,9 +27,17 @@ describe('Release detail component', () => {
navigateToReleasesPage: jest.fn(),
};
- const store = new Vuex.Store({ actions, state });
+ const store = new Vuex.Store({
+ modules: {
+ detail: {
+ namespaced: true,
+ actions,
+ state,
+ },
+ },
+ });
- wrapper = mount(ReleaseDetailApp, {
+ wrapper = mount(ReleaseEditApp, {
store,
});
diff --git a/spec/frontend/releases/list/components/evidence_block_spec.js b/spec/frontend/releases/components/evidence_block_spec.js
index 39f3975f665..7b896575965 100644
--- a/spec/frontend/releases/list/components/evidence_block_spec.js
+++ b/spec/frontend/releases/components/evidence_block_spec.js
@@ -2,8 +2,8 @@ import { mount } from '@vue/test-utils';
import { GlLink } from '@gitlab/ui';
import { truncateSha } from '~/lib/utils/text_utility';
import Icon from '~/vue_shared/components/icon.vue';
-import { release } from '../../mock_data';
-import EvidenceBlock from '~/releases/list/components/evidence_block.vue';
+import { release } from '../mock_data';
+import EvidenceBlock from '~/releases/components/evidence_block.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
describe('Evidence Block', () => {
diff --git a/spec/frontend/releases/list/components/release_block_footer_spec.js b/spec/frontend/releases/components/release_block_footer_spec.js
index 07f61303e33..4125d5c7e74 100644
--- a/spec/frontend/releases/list/components/release_block_footer_spec.js
+++ b/spec/frontend/releases/components/release_block_footer_spec.js
@@ -1,9 +1,9 @@
import { mount } from '@vue/test-utils';
import { GlLink } from '@gitlab/ui';
import { trimText } from 'helpers/text_helper';
-import ReleaseBlockFooter from '~/releases/list/components/release_block_footer.vue';
+import ReleaseBlockFooter from '~/releases/components/release_block_footer.vue';
import Icon from '~/vue_shared/components/icon.vue';
-import { release } from '../../mock_data';
+import { release } from '../mock_data';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
jest.mock('~/vue_shared/mixins/timeago', () => ({
diff --git a/spec/frontend/releases/components/release_block_header_spec.js b/spec/frontend/releases/components/release_block_header_spec.js
new file mode 100644
index 00000000000..157df15ff3c
--- /dev/null
+++ b/spec/frontend/releases/components/release_block_header_spec.js
@@ -0,0 +1,56 @@
+import { shallowMount } from '@vue/test-utils';
+import { cloneDeep, merge } from 'lodash';
+import { GlLink } from '@gitlab/ui';
+import ReleaseBlockHeader from '~/releases/components/release_block_header.vue';
+import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+import { release as originalRelease } from '../mock_data';
+
+describe('Release block header', () => {
+ let wrapper;
+ let release;
+
+ const factory = (releaseUpdates = {}) => {
+ wrapper = shallowMount(ReleaseBlockHeader, {
+ propsData: {
+ release: merge({}, release, releaseUpdates),
+ },
+ });
+ };
+
+ beforeEach(() => {
+ release = convertObjectPropsToCamelCase(cloneDeep(originalRelease), {
+ ignoreKeyNames: ['_links'],
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findHeader = () => wrapper.find('h2');
+ const findHeaderLink = () => findHeader().find(GlLink);
+
+ describe('when _links.self is provided', () => {
+ beforeEach(() => {
+ factory();
+ });
+
+ it('renders the title as a link', () => {
+ const link = findHeaderLink();
+
+ expect(link.text()).toBe(release.name);
+ expect(link.attributes('href')).toBe(release._links.self);
+ });
+ });
+
+ describe('when _links.self is missing', () => {
+ beforeEach(() => {
+ factory({ _links: { self: null } });
+ });
+
+ it('renders the title as text', () => {
+ expect(findHeader().text()).toBe(release.name);
+ expect(findHeaderLink().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/releases/list/components/release_block_milestone_info_spec.js b/spec/frontend/releases/components/release_block_milestone_info_spec.js
index 8a63dbbdca7..5a3204a4ce2 100644
--- a/spec/frontend/releases/list/components/release_block_milestone_info_spec.js
+++ b/spec/frontend/releases/components/release_block_milestone_info_spec.js
@@ -1,9 +1,9 @@
import { mount } from '@vue/test-utils';
import { GlProgressBar, GlLink, GlBadge, GlButton } from '@gitlab/ui';
import { trimText } from 'helpers/text_helper';
-import ReleaseBlockMilestoneInfo from '~/releases/list/components/release_block_milestone_info.vue';
-import { milestones } from '../../mock_data';
-import { MAX_MILESTONES_TO_DISPLAY } from '~/releases/list/constants';
+import ReleaseBlockMilestoneInfo from '~/releases/components/release_block_milestone_info.vue';
+import { milestones } from '../mock_data';
+import { MAX_MILESTONES_TO_DISPLAY } from '~/releases/constants';
describe('Release block milestone info', () => {
let wrapper;
diff --git a/spec/frontend/releases/list/components/release_block_spec.js b/spec/frontend/releases/components/release_block_spec.js
index 20c25a4aac2..aba1b8aff41 100644
--- a/spec/frontend/releases/list/components/release_block_spec.js
+++ b/spec/frontend/releases/components/release_block_spec.js
@@ -1,10 +1,11 @@
+import $ from 'jquery';
import { mount } from '@vue/test-utils';
import { first } from 'underscore';
-import EvidenceBlock from '~/releases/list/components/evidence_block.vue';
-import ReleaseBlock from '~/releases/list/components/release_block.vue';
-import ReleaseBlockFooter from '~/releases/list/components/release_block_footer.vue';
+import EvidenceBlock from '~/releases/components/evidence_block.vue';
+import ReleaseBlock from '~/releases/components/release_block.vue';
+import ReleaseBlockFooter from '~/releases/components/release_block_footer.vue';
import timeagoMixin from '~/vue_shared/mixins/timeago';
-import { release } from '../../mock_data';
+import { release } from '../mock_data';
import Icon from '~/vue_shared/components/icon.vue';
import { scrollToElement } from '~/lib/utils/common_utils';
@@ -43,6 +44,7 @@ describe('Release block', () => {
const editButton = () => wrapper.find('.js-edit-button');
beforeEach(() => {
+ jest.spyOn($.fn, 'renderGFM');
releaseClone = JSON.parse(JSON.stringify(release));
});
@@ -66,6 +68,11 @@ describe('Release block', () => {
expect(wrapper.text()).toContain(release.name);
});
+ it('renders release description', () => {
+ expect(wrapper.vm.$refs['gfm-content']).toBeDefined();
+ expect($.fn.renderGFM).toHaveBeenCalledTimes(1);
+ });
+
it('renders release date', () => {
expect(wrapper.text()).toContain(timeagoMixin.methods.timeFormatted(release.released_at));
});
diff --git a/spec/frontend/releases/mock_data.js b/spec/frontend/releases/mock_data.js
index 630f14d3a73..c2d3f00550a 100644
--- a/spec/frontend/releases/mock_data.js
+++ b/spec/frontend/releases/mock_data.js
@@ -108,6 +108,7 @@ export const release = {
],
},
_links: {
+ self: 'http://0.0.0.0:3001/root/release-test/-/releases/v0.3',
edit_url: 'http://0.0.0.0:3001/root/release-test/-/releases/v0.3/edit',
},
};
diff --git a/spec/frontend/releases/detail/store/actions_spec.js b/spec/frontend/releases/stores/modules/detail/actions_spec.js
index 5a1447aa4fc..0c2763822c9 100644
--- a/spec/frontend/releases/detail/store/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/actions_spec.js
@@ -1,10 +1,10 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
-import * as actions from '~/releases/detail/store/actions';
-import * as types from '~/releases/detail/store/mutation_types';
-import { release } from '../../mock_data';
-import state from '~/releases/detail/store/state';
+import * as actions from '~/releases/stores/modules/detail/actions';
+import * as types from '~/releases/stores/modules/detail/mutation_types';
+import { release } from '../../../mock_data';
+import state from '~/releases/stores/modules/detail/state';
import createFlash from '~/flash';
import { redirectTo } from '~/lib/utils/url_utility';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
diff --git a/spec/frontend/releases/detail/store/mutations_spec.js b/spec/frontend/releases/stores/modules/detail/mutations_spec.js
index 106a40c812e..81b2dde75ab 100644
--- a/spec/frontend/releases/detail/store/mutations_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/mutations_spec.js
@@ -5,10 +5,10 @@
* is resolved
*/
-import state from '~/releases/detail/store/state';
-import mutations from '~/releases/detail/store/mutations';
-import * as types from '~/releases/detail/store/mutation_types';
-import { release } from '../../mock_data';
+import state from '~/releases/stores/modules/detail/state';
+import mutations from '~/releases/stores/modules/detail/mutations';
+import * as types from '~/releases/stores/modules/detail/mutation_types';
+import { release } from '../../../mock_data';
describe('Release detail mutations', () => {
let stateClone;
diff --git a/spec/frontend/reports/store/utils_spec.js b/spec/frontend/reports/store/utils_spec.js
index f0141b9e162..0d9a8dd4585 100644
--- a/spec/frontend/reports/store/utils_spec.js
+++ b/spec/frontend/reports/store/utils_spec.js
@@ -35,6 +35,16 @@ describe('Reports store utils', () => {
);
});
+ it('should render text for multiple errored results', () => {
+ const name = 'Test summary';
+ const data = { errored: 7, total: 10 };
+ const result = utils.summaryTextBuilder(name, data);
+
+ expect(result).toBe(
+ 'Test summary contained 7 failed/error test results out of 10 total tests',
+ );
+ });
+
it('should render text for multiple fixed results', () => {
const name = 'Test summary';
const data = { resolved: 4, total: 10 };
@@ -62,6 +72,27 @@ describe('Reports store utils', () => {
'Test summary contained 1 failed/error test result and 1 fixed test result out of 10 total tests',
);
});
+
+ it('should render text for singular failed, errored, and fixed results', () => {
+ // these will be singular when the copy is updated
+ const name = 'Test summary';
+ const data = { failed: 1, errored: 1, resolved: 1, total: 10 };
+ const result = utils.summaryTextBuilder(name, data);
+
+ expect(result).toBe(
+ 'Test summary contained 2 failed/error test results and 1 fixed test result out of 10 total tests',
+ );
+ });
+
+ it('should render text for multiple failed, errored, and fixed results', () => {
+ const name = 'Test summary';
+ const data = { failed: 2, errored: 3, resolved: 4, total: 10 };
+ const result = utils.summaryTextBuilder(name, data);
+
+ expect(result).toBe(
+ 'Test summary contained 5 failed/error test results and 4 fixed test results out of 10 total tests',
+ );
+ });
});
describe('reportTextBuilder', () => {
@@ -89,6 +120,14 @@ describe('Reports store utils', () => {
expect(result).toBe('Rspec found 3 failed/error test results out of 10 total tests');
});
+ it('should render text for multiple errored results', () => {
+ const name = 'Rspec';
+ const data = { errored: 7, total: 10 };
+ const result = utils.reportTextBuilder(name, data);
+
+ expect(result).toBe('Rspec found 7 failed/error test results out of 10 total tests');
+ });
+
it('should render text for multiple fixed results', () => {
const name = 'Rspec';
const data = { resolved: 4, total: 10 };
@@ -116,6 +155,27 @@ describe('Reports store utils', () => {
'Rspec found 1 failed/error test result and 1 fixed test result out of 10 total tests',
);
});
+
+ it('should render text for singular failed, errored, and fixed results', () => {
+ // these will be singular when the copy is updated
+ const name = 'Rspec';
+ const data = { failed: 1, errored: 1, resolved: 1, total: 10 };
+ const result = utils.reportTextBuilder(name, data);
+
+ expect(result).toBe(
+ 'Rspec found 2 failed/error test results and 1 fixed test result out of 10 total tests',
+ );
+ });
+
+ it('should render text for multiple failed, errored, and fixed results', () => {
+ const name = 'Rspec';
+ const data = { failed: 2, errored: 3, resolved: 4, total: 10 };
+ const result = utils.reportTextBuilder(name, data);
+
+ expect(result).toBe(
+ 'Rspec found 5 failed/error test results and 4 fixed test results out of 10 total tests',
+ );
+ });
});
describe('statusIcon', () => {
diff --git a/spec/frontend/repository/components/breadcrumbs_spec.js b/spec/frontend/repository/components/breadcrumbs_spec.js
index bc2abb3db1a..0271db25468 100644
--- a/spec/frontend/repository/components/breadcrumbs_spec.js
+++ b/spec/frontend/repository/components/breadcrumbs_spec.js
@@ -33,6 +33,17 @@ describe('Repository breadcrumbs component', () => {
expect(vm.findAll(RouterLinkStub).length).toEqual(linkCount);
});
+ it('escapes hash in directory path', () => {
+ factory('app/assets/javascripts#');
+
+ expect(
+ vm
+ .findAll(RouterLinkStub)
+ .at(3)
+ .props('to'),
+ ).toEqual('/-/tree//app/assets/javascripts%23');
+ });
+
it('renders last link as active', () => {
factory('app/assets');
diff --git a/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap b/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
index 22e353dddc5..2d411fcce79 100644
--- a/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
+++ b/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
@@ -15,6 +15,57 @@ exports[`Repository table row component renders table row 1`] = `
<a
class="str-truncated"
+ data-qa-selector="file_name_link"
+ href="https://test.com"
+ >
+
+ test
+
+ </a>
+
+ <!---->
+
+ <!---->
+
+ <!---->
+ </td>
+
+ <td
+ class="d-none d-sm-table-cell tree-commit"
+ >
+ <gl-skeleton-loading-stub
+ class="h-auto"
+ lines="1"
+ />
+ </td>
+
+ <td
+ class="tree-time-ago text-right"
+ >
+ <gl-skeleton-loading-stub
+ class="ml-auto h-auto w-50"
+ lines="1"
+ />
+ </td>
+</tr>
+`;
+
+exports[`Repository table row component renders table row for path with special character 1`] = `
+<tr
+ class="tree-item file_1"
+>
+ <td
+ class="tree-item-file-name"
+ >
+ <i
+ aria-label="file"
+ class="fa fa-fw fa-file-text-o"
+ role="img"
+ />
+
+ <a
+ class="str-truncated"
+ data-qa-selector="file_name_link"
href="https://test.com"
>
diff --git a/spec/frontend/repository/components/table/parent_row_spec.js b/spec/frontend/repository/components/table/parent_row_spec.js
index 439c7ff080c..904798e0b83 100644
--- a/spec/frontend/repository/components/table/parent_row_spec.js
+++ b/spec/frontend/repository/components/table/parent_row_spec.js
@@ -31,9 +31,10 @@ describe('Repository parent row component', () => {
});
it.each`
- path | to
- ${'app'} | ${'/tree/master/'}
- ${'app/assets'} | ${'/tree/master/app'}
+ path | to
+ ${'app'} | ${'/-/tree/master/'}
+ ${'app/assets'} | ${'/-/tree/master/app'}
+ ${'app/assets#/test'} | ${'/-/tree/master/app/assets%23'}
`('renders link in $path to $to', ({ path, to }) => {
factory(path);
@@ -48,7 +49,7 @@ describe('Repository parent row component', () => {
vm.find('td').trigger('click');
expect($router.push).toHaveBeenCalledWith({
- path: '/tree/master/app',
+ path: '/-/tree/master/app',
});
});
@@ -60,7 +61,7 @@ describe('Repository parent row component', () => {
vm.find('a').trigger('click');
expect($router.push).not.toHaveBeenCalledWith({
- path: '/tree/master/app',
+ path: '/-/tree/master/app',
});
});
diff --git a/spec/frontend/repository/components/table/row_spec.js b/spec/frontend/repository/components/table/row_spec.js
index b60560366a6..fec9ba3aa2e 100644
--- a/spec/frontend/repository/components/table/row_spec.js
+++ b/spec/frontend/repository/components/table/row_spec.js
@@ -51,6 +51,20 @@ describe('Repository table row component', () => {
});
});
+ it('renders table row for path with special character', () => {
+ factory({
+ id: '1',
+ sha: '123',
+ path: 'test$/test',
+ type: 'file',
+ currentPath: 'test$',
+ });
+
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.element).toMatchSnapshot();
+ });
+ });
+
it.each`
type | component | componentName
${'tree'} | ${RouterLinkStub} | ${'RouterLink'}
@@ -88,13 +102,29 @@ describe('Repository table row component', () => {
vm.trigger('click');
if (pushes) {
- expect($router.push).toHaveBeenCalledWith({ path: '/tree/master/test' });
+ expect($router.push).toHaveBeenCalledWith({ path: '/-/tree/master/test' });
} else {
expect($router.push).not.toHaveBeenCalled();
}
});
});
+ it('pushes new route for directory with hash', () => {
+ factory({
+ id: '1',
+ sha: '123',
+ path: 'test#',
+ type: 'tree',
+ currentPath: '/',
+ });
+
+ return vm.vm.$nextTick().then(() => {
+ vm.trigger('click');
+
+ expect($router.push).toHaveBeenCalledWith({ path: '/-/tree/master/test%23' });
+ });
+ });
+
it.each`
type | pushes
${'tree'} | ${true}
diff --git a/spec/frontend/repository/log_tree_spec.js b/spec/frontend/repository/log_tree_spec.js
index 4271a038680..8da2f39f71f 100644
--- a/spec/frontend/repository/log_tree_spec.js
+++ b/spec/frontend/repository/log_tree_spec.js
@@ -71,7 +71,7 @@ describe('fetchLogsTree', () => {
it('calls axios get', () =>
fetchLogsTree(client, '', '0', resolver).then(() => {
- expect(axios.get).toHaveBeenCalledWith('/gitlab-org/gitlab-foss/refs/master/logs_tree/', {
+ expect(axios.get).toHaveBeenCalledWith('/gitlab-org/gitlab-foss/-/refs/master/logs_tree/', {
params: { format: 'json', offset: '0' },
});
}));
diff --git a/spec/frontend/repository/router_spec.js b/spec/frontend/repository/router_spec.js
index f61a0ccd1e6..8f3ac53c37a 100644
--- a/spec/frontend/repository/router_spec.js
+++ b/spec/frontend/repository/router_spec.js
@@ -4,11 +4,12 @@ import createRouter from '~/repository/router';
describe('Repository router spec', () => {
it.each`
- path | component | componentName
- ${'/'} | ${IndexPage} | ${'IndexPage'}
- ${'/tree/master'} | ${TreePage} | ${'TreePage'}
- ${'/tree/master/app/assets'} | ${TreePage} | ${'TreePage'}
- ${'/tree/123/app/assets'} | ${null} | ${'null'}
+ path | component | componentName
+ ${'/'} | ${IndexPage} | ${'IndexPage'}
+ ${'/tree/master'} | ${TreePage} | ${'TreePage'}
+ ${'/-/tree/master'} | ${TreePage} | ${'TreePage'}
+ ${'/-/tree/master/app/assets'} | ${TreePage} | ${'TreePage'}
+ ${'/-/tree/123/app/assets'} | ${null} | ${'null'}
`('sets component as $componentName for path "$path"', ({ path, component }) => {
const router = createRouter('', 'master');
diff --git a/spec/frontend/repository/utils/dom_spec.js b/spec/frontend/repository/utils/dom_spec.js
index bf98a9e1a4d..0b61161c9d0 100644
--- a/spec/frontend/repository/utils/dom_spec.js
+++ b/spec/frontend/repository/utils/dom_spec.js
@@ -20,11 +20,18 @@ describe('updateElementsVisibility', () => {
});
describe('updateFormAction', () => {
- it('updates form action', () => {
+ it.each`
+ path
+ ${'/test'}
+ ${'test'}
+ ${'/'}
+ `('updates form action for $path', ({ path }) => {
setHTMLFixture('<form class="js-test" action="/"></form>');
- updateFormAction('.js-test', '/gitlab/create', '/test');
+ updateFormAction('.js-test', '/gitlab/create', path);
- expect(document.querySelector('.js-test').action).toBe('http://localhost/gitlab/create/test');
+ expect(document.querySelector('.js-test').action).toBe(
+ `http://localhost/gitlab/create/${path.replace(/^\//, '')}`,
+ );
});
});
diff --git a/spec/frontend/repository/utils/title_spec.js b/spec/frontend/repository/utils/title_spec.js
index 63035933424..a1213c13be8 100644
--- a/spec/frontend/repository/utils/title_spec.js
+++ b/spec/frontend/repository/utils/title_spec.js
@@ -1,4 +1,4 @@
-import { setTitle } from '~/repository/utils/title';
+import { setTitle, updateRefPortionOfTitle } from '~/repository/utils/title';
describe('setTitle', () => {
it.each`
@@ -13,3 +13,26 @@ describe('setTitle', () => {
expect(document.title).toEqual(`${title} · master · GitLab Org / GitLab · GitLab`);
});
});
+
+describe('updateRefPortionOfTitle', () => {
+ const sha = 'abc';
+ const testCases = [
+ [
+ 'updates the title with the SHA',
+ { title: 'part 1 · part 2 · part 3' },
+ 'part 1 · abc · part 3',
+ ],
+ ["makes no change if there's no title", { foo: null }, undefined],
+ [
+ "makes no change if the title doesn't split predictably",
+ { title: 'part 1 - part 2 - part 3' },
+ 'part 1 - part 2 - part 3',
+ ],
+ ];
+
+ it.each(testCases)('%s', (desc, doc, title) => {
+ updateRefPortionOfTitle(sha, doc);
+
+ expect(doc.title).toEqual(title);
+ });
+});
diff --git a/spec/frontend/self_monitor/components/__snapshots__/self_monitor_spec.js.snap b/spec/frontend/self_monitor/components/__snapshots__/self_monitor_spec.js.snap
index 1d0f0c024d6..955716ccbca 100644
--- a/spec/frontend/self_monitor/components/__snapshots__/self_monitor_spec.js.snap
+++ b/spec/frontend/self_monitor/components/__snapshots__/self_monitor_spec.js.snap
@@ -17,6 +17,8 @@ exports[`self monitor component When the self monitor project has not been creat
<gl-button-stub
class="js-settings-toggle"
+ size="md"
+ variant="secondary"
>
Expand
</gl-button-stub>
@@ -47,6 +49,7 @@ exports[`self monitor component When the self monitor project has not been creat
<gl-toggle-stub
labeloff="Toggle Status: OFF"
labelon="Toggle Status: ON"
+ labelposition="hidden"
name="self-monitor-toggle"
/>
</gl-form-group-stub>
@@ -59,6 +62,7 @@ exports[`self monitor component When the self monitor project has not been creat
modalid="delete-self-monitor-modal"
ok-title="Delete project"
ok-variant="danger"
+ size="md"
title="Disable self monitoring?"
titletag="h4"
>
diff --git a/spec/frontend/self_monitor/components/self_monitor_spec.js b/spec/frontend/self_monitor/components/self_monitor_spec.js
index b95c7514047..50b97ae914d 100644
--- a/spec/frontend/self_monitor/components/self_monitor_spec.js
+++ b/spec/frontend/self_monitor/components/self_monitor_spec.js
@@ -11,7 +11,7 @@ describe('self monitor component', () => {
beforeEach(() => {
store = createStore({
projectEnabled: false,
- selfMonitorProjectCreated: false,
+ selfMonitoringProjectExists: false,
createSelfMonitoringProjectPath: '/create',
deleteSelfMonitoringProjectPath: '/delete',
});
@@ -69,7 +69,7 @@ describe('self monitor component', () => {
it('renders the form description with a link', () => {
store = createStore({
projectEnabled: true,
- selfMonitorProjectCreated: true,
+ selfMonitoringProjectExists: true,
createSelfMonitoringProjectPath: '/create',
deleteSelfMonitoringProjectPath: '/delete',
});
diff --git a/spec/frontend/self_monitor/store/actions_spec.js b/spec/frontend/self_monitor/store/actions_spec.js
index 344dbf11954..0326ca6f415 100644
--- a/spec/frontend/self_monitor/store/actions_spec.js
+++ b/spec/frontend/self_monitor/store/actions_spec.js
@@ -140,7 +140,12 @@ describe('self monitor actions', () => {
{ type: types.SET_SHOW_ALERT, payload: true },
{ type: types.SET_PROJECT_CREATED, payload: true },
],
- [],
+ [
+ {
+ payload: true,
+ type: 'setSelfMonitor',
+ },
+ ],
done,
);
});
diff --git a/spec/frontend/sidebar/confidential_issue_sidebar_spec.js b/spec/frontend/sidebar/confidential_issue_sidebar_spec.js
index 13b7c426366..4853d9795b1 100644
--- a/spec/frontend/sidebar/confidential_issue_sidebar_spec.js
+++ b/spec/frontend/sidebar/confidential_issue_sidebar_spec.js
@@ -4,7 +4,7 @@ import ConfidentialIssueSidebar from '~/sidebar/components/confidential/confiden
import EditForm from '~/sidebar/components/confidential/edit_form.vue';
import SidebarService from '~/sidebar/services/sidebar_service';
import createFlash from '~/flash';
-import RecaptchaModal from '~/vue_shared/components/recaptcha_modal';
+import RecaptchaModal from '~/vue_shared/components/recaptcha_modal.vue';
jest.mock('~/flash');
jest.mock('~/sidebar/services/sidebar_service');
diff --git a/spec/frontend/snippet/collapsible_input_spec.js b/spec/frontend/snippet/collapsible_input_spec.js
new file mode 100644
index 00000000000..acd15164c95
--- /dev/null
+++ b/spec/frontend/snippet/collapsible_input_spec.js
@@ -0,0 +1,104 @@
+import setupCollapsibleInputs from '~/snippet/collapsible_input';
+import { setHTMLFixture } from 'helpers/fixtures';
+
+describe('~/snippet/collapsible_input', () => {
+ let formEl;
+ let descriptionEl;
+ let titleEl;
+ let fooEl;
+
+ beforeEach(() => {
+ setHTMLFixture(`
+ <form>
+ <div class="js-collapsible-input js-title">
+ <div class="js-collapsed d-none">
+ <input type="text" />
+ </div>
+ <div class="js-expanded">
+ <textarea>Hello World!</textarea>
+ </div>
+ </div>
+ <div class="js-collapsible-input js-description">
+ <div class="js-collapsed">
+ <input type="text" />
+ </div>
+ <div class="js-expanded d-none">
+ <textarea></textarea>
+ </div>
+ </div>
+ <input type="text" class="js-foo" />
+ </form>
+ `);
+
+ formEl = document.querySelector('form');
+ titleEl = formEl.querySelector('.js-title');
+ descriptionEl = formEl.querySelector('.js-description');
+ fooEl = formEl.querySelector('.js-foo');
+
+ setupCollapsibleInputs();
+ });
+
+ const findInput = el => el.querySelector('textarea,input');
+ const findCollapsed = el => el.querySelector('.js-collapsed');
+ const findExpanded = el => el.querySelector('.js-expanded');
+ const findCollapsedInput = el => findInput(findCollapsed(el));
+ const findExpandedInput = el => findInput(findExpanded(el));
+ const focusIn = target => target.dispatchEvent(new Event('focusin', { bubbles: true }));
+ const expectIsCollapsed = (el, isCollapsed) => {
+ expect(findCollapsed(el).classList.contains('d-none')).toEqual(!isCollapsed);
+ expect(findExpanded(el).classList.contains('d-none')).toEqual(isCollapsed);
+ };
+
+ describe('when collapsed', () => {
+ it('is collapsed', () => {
+ expectIsCollapsed(descriptionEl, true);
+ });
+
+ describe('when focused', () => {
+ beforeEach(() => {
+ focusIn(findCollapsedInput(descriptionEl));
+ });
+
+ it('is expanded', () => {
+ expectIsCollapsed(descriptionEl, false);
+ });
+
+ describe.each`
+ desc | value | isCollapsed
+ ${'is collapsed'} | ${''} | ${true}
+ ${'stays open if given value'} | ${'Hello world!'} | ${false}
+ `('when loses focus', ({ desc, value, isCollapsed }) => {
+ it(desc, () => {
+ findExpandedInput(descriptionEl).value = value;
+ focusIn(fooEl);
+
+ expectIsCollapsed(descriptionEl, isCollapsed);
+ });
+ });
+ });
+ });
+
+ describe('when expanded and has value', () => {
+ it('does not collapse, when focusing out', () => {
+ expectIsCollapsed(titleEl, false);
+
+ focusIn(fooEl);
+
+ expectIsCollapsed(titleEl, false);
+ });
+
+ describe('and loses value', () => {
+ beforeEach(() => {
+ findExpandedInput(titleEl).value = '';
+ });
+
+ it('collapses, when focusing out', () => {
+ expectIsCollapsed(titleEl, false);
+
+ focusIn(fooEl);
+
+ expectIsCollapsed(titleEl, true);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/snippets/components/app_spec.js b/spec/frontend/snippets/components/app_spec.js
index 6576e5b075f..a683ed9aaba 100644
--- a/spec/frontend/snippets/components/app_spec.js
+++ b/spec/frontend/snippets/components/app_spec.js
@@ -1,5 +1,7 @@
import SnippetApp from '~/snippets/components/app.vue';
import SnippetHeader from '~/snippets/components/snippet_header.vue';
+import SnippetTitle from '~/snippets/components/snippet_title.vue';
+import SnippetBlob from '~/snippets/components/snippet_blob_view.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
@@ -35,8 +37,10 @@ describe('Snippet view app', () => {
expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
});
- it('renders SnippetHeader component after the query is finished', () => {
+ it('renders all components after the query is finished', () => {
createComponent();
expect(wrapper.find(SnippetHeader).exists()).toBe(true);
+ expect(wrapper.find(SnippetTitle).exists()).toBe(true);
+ expect(wrapper.find(SnippetBlob).exists()).toBe(true);
});
});
diff --git a/spec/frontend/snippets/components/snippet_blob_view_spec.js b/spec/frontend/snippets/components/snippet_blob_view_spec.js
new file mode 100644
index 00000000000..c4f1dd0ca35
--- /dev/null
+++ b/spec/frontend/snippets/components/snippet_blob_view_spec.js
@@ -0,0 +1,179 @@
+import { mount } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
+import SnippetBlobView from '~/snippets/components/snippet_blob_view.vue';
+import BlobHeader from '~/blob/components/blob_header.vue';
+import BlobEmbeddable from '~/blob/components/blob_embeddable.vue';
+import BlobContent from '~/blob/components/blob_content.vue';
+import { RichViewer, SimpleViewer } from '~/vue_shared/components/blob_viewers';
+import {
+ SNIPPET_VISIBILITY_PRIVATE,
+ SNIPPET_VISIBILITY_INTERNAL,
+ SNIPPET_VISIBILITY_PUBLIC,
+} from '~/snippets/constants';
+
+import { Blob as BlobMock, SimpleViewerMock, RichViewerMock } from 'jest/blob/components/mock_data';
+
+describe('Blob Embeddable', () => {
+ let wrapper;
+ const snippet = {
+ id: 'gid://foo.bar/snippet',
+ webUrl: 'https://foo.bar',
+ visibilityLevel: SNIPPET_VISIBILITY_PUBLIC,
+ };
+ const dataMock = {
+ blob: BlobMock,
+ activeViewerType: SimpleViewerMock.type,
+ };
+
+ function createComponent(
+ props = {},
+ data = dataMock,
+ blobLoading = false,
+ contentLoading = false,
+ ) {
+ const $apollo = {
+ queries: {
+ blob: {
+ loading: blobLoading,
+ },
+ blobContent: {
+ loading: contentLoading,
+ },
+ },
+ };
+
+ wrapper = mount(SnippetBlobView, {
+ propsData: {
+ snippet: {
+ ...snippet,
+ ...props,
+ },
+ },
+ data() {
+ return {
+ ...data,
+ };
+ },
+ mocks: { $apollo },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('rendering', () => {
+ it('renders correct components', () => {
+ createComponent();
+ expect(wrapper.find(BlobEmbeddable).exists()).toBe(true);
+ expect(wrapper.find(BlobHeader).exists()).toBe(true);
+ expect(wrapper.find(BlobContent).exists()).toBe(true);
+ });
+
+ it.each([SNIPPET_VISIBILITY_INTERNAL, SNIPPET_VISIBILITY_PRIVATE, 'foo'])(
+ 'does not render blob-embeddable by default',
+ visibilityLevel => {
+ createComponent({
+ visibilityLevel,
+ });
+ expect(wrapper.find(BlobEmbeddable).exists()).toBe(false);
+ },
+ );
+
+ it('does render blob-embeddable for public snippet', () => {
+ createComponent({
+ visibilityLevel: SNIPPET_VISIBILITY_PUBLIC,
+ });
+ expect(wrapper.find(BlobEmbeddable).exists()).toBe(true);
+ });
+
+ it('shows loading icon while blob data is in flight', () => {
+ createComponent({}, dataMock, true);
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.find('.snippet-file-content').exists()).toBe(false);
+ });
+
+ it('sets simple viewer correctly', () => {
+ createComponent();
+ expect(wrapper.find(SimpleViewer).exists()).toBe(true);
+ });
+
+ it('sets rich viewer correctly', () => {
+ const data = Object.assign({}, dataMock, {
+ activeViewerType: RichViewerMock.type,
+ });
+ createComponent({}, data);
+ expect(wrapper.find(RichViewer).exists()).toBe(true);
+ });
+
+ it('correctly switches viewer type', () => {
+ createComponent();
+ expect(wrapper.find(SimpleViewer).exists()).toBe(true);
+
+ wrapper.vm.switchViewer(RichViewerMock.type);
+
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(wrapper.find(RichViewer).exists()).toBe(true);
+ wrapper.vm.switchViewer(SimpleViewerMock.type);
+ })
+ .then(() => {
+ expect(wrapper.find(SimpleViewer).exists()).toBe(true);
+ });
+ });
+
+ describe('URLS with hash', () => {
+ beforeEach(() => {
+ window.location.hash = '#LC2';
+ });
+
+ afterEach(() => {
+ window.location.hash = '';
+ });
+
+ it('renders simple viewer by default if URL contains hash', () => {
+ createComponent();
+
+ expect(wrapper.vm.activeViewerType).toBe(SimpleViewerMock.type);
+ expect(wrapper.find(SimpleViewer).exists()).toBe(true);
+ });
+
+ describe('switchViewer()', () => {
+ it('by default switches to the passed viewer', () => {
+ createComponent();
+
+ wrapper.vm.switchViewer(RichViewerMock.type);
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(wrapper.vm.activeViewerType).toBe(RichViewerMock.type);
+ expect(wrapper.find(RichViewer).exists()).toBe(true);
+
+ wrapper.vm.switchViewer(SimpleViewerMock.type);
+ })
+ .then(() => {
+ expect(wrapper.vm.activeViewerType).toBe(SimpleViewerMock.type);
+ expect(wrapper.find(SimpleViewer).exists()).toBe(true);
+ });
+ });
+
+ it('respects hash over richViewer in the blob when corresponding parameter is passed', () => {
+ createComponent(
+ {},
+ {
+ blob: BlobMock,
+ },
+ );
+ expect(wrapper.vm.blob.richViewer).toEqual(expect.any(Object));
+
+ wrapper.vm.switchViewer(RichViewerMock.type, true);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm.activeViewerType).toBe(SimpleViewerMock.type);
+ expect(wrapper.find(SimpleViewer).exists()).toBe(true);
+ });
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_alerts_spec.js b/spec/frontend/vue_alerts_spec.js
new file mode 100644
index 00000000000..b2ee6f895a8
--- /dev/null
+++ b/spec/frontend/vue_alerts_spec.js
@@ -0,0 +1,87 @@
+import Vue from 'vue';
+import initVueAlerts from '~/vue_alerts';
+import { setHTMLFixture } from 'helpers/fixtures';
+import { TEST_HOST } from 'helpers/test_constants';
+
+describe('VueAlerts', () => {
+ const alerts = [
+ {
+ title: 'Lorem',
+ html: 'Lorem <strong>Ipsum</strong>',
+ dismissible: true,
+ primaryButtonText: 'Okay!',
+ primaryButtonLink: `${TEST_HOST}/okay`,
+ variant: 'tip',
+ },
+ {
+ title: 'Hello',
+ html: 'Hello <strong>World</strong>',
+ dismissible: false,
+ primaryButtonText: 'No!',
+ primaryButtonLink: `${TEST_HOST}/no`,
+ variant: 'info',
+ },
+ ];
+
+ beforeEach(() => {
+ setHTMLFixture(
+ alerts
+ .map(
+ x => `
+ <div class="js-vue-alert"
+ data-dismissible="${x.dismissible}"
+ data-title="${x.title}"
+ data-primary-button-text="${x.primaryButtonText}"
+ data-primary-button-link="${x.primaryButtonLink}"
+ data-variant="${x.variant}">${x.html}</div>
+ `,
+ )
+ .join('\n'),
+ );
+ });
+
+ const findJsHooks = () => document.querySelectorAll('.js-vue-alert');
+ const findAlerts = () => document.querySelectorAll('.gl-alert');
+ const findAlertDismiss = alert => alert.querySelector('.gl-alert-dismiss');
+
+ const serializeAlert = alert => ({
+ title: alert.querySelector('.gl-alert-title').textContent.trim(),
+ html: alert.querySelector('.gl-alert-body div').innerHTML,
+ dismissible: Boolean(alert.querySelector('.gl-alert-dismiss')),
+ primaryButtonText: alert.querySelector('.gl-alert-action').textContent.trim(),
+ primaryButtonLink: alert.querySelector('.gl-alert-action').href,
+ variant: [...alert.classList].find(x => x.match('gl-alert-')).replace('gl-alert-', ''),
+ });
+
+ it('starts with only JsHooks', () => {
+ expect(findJsHooks().length).toEqual(alerts.length);
+ expect(findAlerts().length).toEqual(0);
+ });
+
+ describe('when mounted', () => {
+ beforeEach(() => {
+ initVueAlerts();
+ });
+
+ it('replaces JsHook with GlAlert', () => {
+ expect(findJsHooks().length).toEqual(0);
+ expect(findAlerts().length).toEqual(alerts.length);
+ });
+
+ it('passes along props to gl-alert', () => {
+ expect([...findAlerts()].map(serializeAlert)).toEqual(alerts);
+ });
+
+ describe('when dismissed', () => {
+ beforeEach(() => {
+ findAlertDismiss(findAlerts()[0]).click();
+
+ return Vue.nextTick();
+ });
+
+ it('hides the alert', () => {
+ expect(findAlerts().length).toEqual(alerts.length - 1);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js
new file mode 100644
index 00000000000..77293a5b187
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js
@@ -0,0 +1,52 @@
+import { mount } from '@vue/test-utils';
+import { GlLink } from '@gitlab/ui';
+import suggestPipelineComponent from '~/vue_merge_request_widget/components/mr_widget_suggest_pipeline.vue';
+import MrWidgetIcon from '~/vue_merge_request_widget/components/mr_widget_icon.vue';
+
+describe('MRWidgetHeader', () => {
+ let wrapper;
+ const pipelinePath = '/foo/bar/add/pipeline/path';
+ const iconName = 'status_notfound';
+
+ beforeEach(() => {
+ wrapper = mount(suggestPipelineComponent, {
+ propsData: { pipelinePath },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('template', () => {
+ it('renders add pipeline file link', () => {
+ const link = wrapper.find(GlLink);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(link.exists()).toBe(true);
+ expect(link.attributes().href).toBe(pipelinePath);
+ });
+ });
+
+ it('renders the expected text', () => {
+ const messageText = /\s*No pipeline\s*Add the .gitlab-ci.yml file\s*to create one./;
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.text()).toMatch(messageText);
+ });
+ });
+
+ it('renders widget icon', () => {
+ const icon = wrapper.find(MrWidgetIcon);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(icon.exists()).toBe(true);
+ expect(icon.props()).toEqual(
+ expect.objectContaining({
+ name: iconName,
+ }),
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_spec.js b/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
index 2902c8280dd..ec7be6b64fc 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
@@ -56,27 +56,27 @@ describe('Deployment component', () => {
const deployGroup = [DeploymentViewButton, DeploymentStopButton];
describe.each`
- status | previous | deploymentDetails | text | actionButtons
- ${CREATED} | ${true} | ${deployDetail} | ${'Can deploy manually to'} | ${deployGroup}
- ${CREATED} | ${true} | ${noDetails} | ${'Will deploy to'} | ${deployGroup}
- ${CREATED} | ${false} | ${deployDetail} | ${'Can deploy manually to'} | ${noActions}
- ${CREATED} | ${false} | ${noDetails} | ${'Will deploy to'} | ${noActions}
- ${RUNNING} | ${true} | ${deployDetail} | ${'Deploying to'} | ${deployGroup}
- ${RUNNING} | ${true} | ${noDetails} | ${'Deploying to'} | ${deployGroup}
- ${RUNNING} | ${false} | ${deployDetail} | ${'Deploying to'} | ${noActions}
- ${RUNNING} | ${false} | ${noDetails} | ${'Deploying to'} | ${noActions}
- ${SUCCESS} | ${true} | ${deployDetail} | ${'Deployed to'} | ${deployGroup}
- ${SUCCESS} | ${true} | ${noDetails} | ${'Deployed to'} | ${deployGroup}
- ${SUCCESS} | ${false} | ${deployDetail} | ${'Deployed to'} | ${deployGroup}
- ${SUCCESS} | ${false} | ${noDetails} | ${'Deployed to'} | ${deployGroup}
- ${FAILED} | ${true} | ${deployDetail} | ${'Failed to deploy to'} | ${deployGroup}
- ${FAILED} | ${true} | ${noDetails} | ${'Failed to deploy to'} | ${deployGroup}
- ${FAILED} | ${false} | ${deployDetail} | ${'Failed to deploy to'} | ${noActions}
- ${FAILED} | ${false} | ${noDetails} | ${'Failed to deploy to'} | ${noActions}
- ${CANCELED} | ${true} | ${deployDetail} | ${'Canceled deploy to'} | ${deployGroup}
- ${CANCELED} | ${true} | ${noDetails} | ${'Canceled deploy to'} | ${deployGroup}
- ${CANCELED} | ${false} | ${deployDetail} | ${'Canceled deploy to'} | ${noActions}
- ${CANCELED} | ${false} | ${noDetails} | ${'Canceled deploy to'} | ${noActions}
+ status | previous | deploymentDetails | text | actionButtons
+ ${CREATED} | ${true} | ${deployDetail} | ${'Can be manually deployed to'} | ${deployGroup}
+ ${CREATED} | ${true} | ${noDetails} | ${'Will deploy to'} | ${deployGroup}
+ ${CREATED} | ${false} | ${deployDetail} | ${'Can be manually deployed to'} | ${noActions}
+ ${CREATED} | ${false} | ${noDetails} | ${'Will deploy to'} | ${noActions}
+ ${RUNNING} | ${true} | ${deployDetail} | ${'Deploying to'} | ${deployGroup}
+ ${RUNNING} | ${true} | ${noDetails} | ${'Deploying to'} | ${deployGroup}
+ ${RUNNING} | ${false} | ${deployDetail} | ${'Deploying to'} | ${noActions}
+ ${RUNNING} | ${false} | ${noDetails} | ${'Deploying to'} | ${noActions}
+ ${SUCCESS} | ${true} | ${deployDetail} | ${'Deployed to'} | ${deployGroup}
+ ${SUCCESS} | ${true} | ${noDetails} | ${'Deployed to'} | ${deployGroup}
+ ${SUCCESS} | ${false} | ${deployDetail} | ${'Deployed to'} | ${deployGroup}
+ ${SUCCESS} | ${false} | ${noDetails} | ${'Deployed to'} | ${deployGroup}
+ ${FAILED} | ${true} | ${deployDetail} | ${'Failed to deploy to'} | ${deployGroup}
+ ${FAILED} | ${true} | ${noDetails} | ${'Failed to deploy to'} | ${deployGroup}
+ ${FAILED} | ${false} | ${deployDetail} | ${'Failed to deploy to'} | ${noActions}
+ ${FAILED} | ${false} | ${noDetails} | ${'Failed to deploy to'} | ${noActions}
+ ${CANCELED} | ${true} | ${deployDetail} | ${'Canceled deployment to'} | ${deployGroup}
+ ${CANCELED} | ${true} | ${noDetails} | ${'Canceled deployment to'} | ${deployGroup}
+ ${CANCELED} | ${false} | ${deployDetail} | ${'Canceled deployment to'} | ${noActions}
+ ${CANCELED} | ${false} | ${noDetails} | ${'Canceled deployment to'} | ${noActions}
`(
'$status + previous: $previous + manual: $deploymentDetails.isManual',
({ status, previous, deploymentDetails, text, actionButtons }) => {
diff --git a/spec/frontend/vue_shared/components/__snapshots__/code_block_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/code_block_spec.js.snap
new file mode 100644
index 00000000000..5347d1efc48
--- /dev/null
+++ b/spec/frontend/vue_shared/components/__snapshots__/code_block_spec.js.snap
@@ -0,0 +1,16 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Code Block matches snapshot 1`] = `
+<pre
+ class="code-block rounded"
+>
+
+ <code
+ class="d-block"
+ >
+ test-code
+ </code>
+
+
+</pre>
+`;
diff --git a/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap
index 3a518029702..2abcc53bf14 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap
@@ -1,14 +1,88 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Expand button on click when short text is provided renders button after text 1`] = `
-"<span><button aria-label=\\"Click to expand text\\" type=\\"button\\" class=\\"btn js-text-expander-prepend text-expander btn-blank btn-secondary\\" style=\\"display: none;\\"><svg aria-hidden=\\"true\\" class=\\"s12 ic-ellipsis_h\\"><use xlink:href=\\"#ellipsis_h\\"></use></svg></button> <!----> <span><p>Expanded!</p></span> <button aria-label=\\"Click to expand text\\" type=\\"button\\" class=\\"btn js-text-expander-append text-expander btn-blank btn-secondary\\" style=\\"\\"><svg aria-hidden=\\"true\\" class=\\"s12 ic-ellipsis_h\\">
- <use xlink:href=\\"#ellipsis_h\\"></use>
- </svg></button></span>"
+<span>
+ <button
+ aria-label="Click to expand text"
+ class="btn js-text-expander-prepend text-expander btn-blank btn-secondary btn-md"
+ style="display: none;"
+ type="button"
+ >
+ <svg
+ aria-hidden="true"
+ class="s12 ic-ellipsis_h"
+ >
+ <use
+ xlink:href="#ellipsis_h"
+ />
+ </svg>
+ </button>
+
+ <!---->
+
+ <span>
+ <p>
+ Expanded!
+ </p>
+ </span>
+
+ <button
+ aria-label="Click to expand text"
+ class="btn js-text-expander-append text-expander btn-blank btn-secondary btn-md"
+ style=""
+ type="button"
+ >
+ <svg
+ aria-hidden="true"
+ class="s12 ic-ellipsis_h"
+ >
+ <use
+ xlink:href="#ellipsis_h"
+ />
+ </svg>
+ </button>
+</span>
`;
exports[`Expand button when short text is provided renders button before text 1`] = `
-"<span><button aria-label=\\"Click to expand text\\" type=\\"button\\" class=\\"btn js-text-expander-prepend text-expander btn-blank btn-secondary\\"><svg aria-hidden=\\"true\\" class=\\"s12 ic-ellipsis_h\\"><use xlink:href=\\"#ellipsis_h\\"></use></svg></button> <span><p>Short</p></span>
-<!----> <button aria-label=\\"Click to expand text\\" type=\\"button\\" class=\\"btn js-text-expander-append text-expander btn-blank btn-secondary\\" style=\\"display: none;\\"><svg aria-hidden=\\"true\\" class=\\"s12 ic-ellipsis_h\\">
- <use xlink:href=\\"#ellipsis_h\\"></use>
- </svg></button></span>"
+<span>
+ <button
+ aria-label="Click to expand text"
+ class="btn js-text-expander-prepend text-expander btn-blank btn-secondary btn-md"
+ type="button"
+ >
+ <svg
+ aria-hidden="true"
+ class="s12 ic-ellipsis_h"
+ >
+ <use
+ xlink:href="#ellipsis_h"
+ />
+ </svg>
+ </button>
+
+ <span>
+ <p>
+ Short
+ </p>
+ </span>
+
+ <!---->
+
+ <button
+ aria-label="Click to expand text"
+ class="btn js-text-expander-append text-expander btn-blank btn-secondary btn-md"
+ style="display: none;"
+ type="button"
+ >
+ <svg
+ aria-hidden="true"
+ class="s12 ic-ellipsis_h"
+ >
+ <use
+ xlink:href="#ellipsis_h"
+ />
+ </svg>
+ </button>
+</span>
`;
diff --git a/spec/frontend/vue_shared/components/__snapshots__/identicon_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/identicon_spec.js.snap
new file mode 100644
index 00000000000..72370cb5b52
--- /dev/null
+++ b/spec/frontend/vue_shared/components/__snapshots__/identicon_spec.js.snap
@@ -0,0 +1,11 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Identicon matches snapshot 1`] = `
+<div
+ class="avatar identicon s40 bg2"
+>
+
+ E
+
+</div>
+`;
diff --git a/spec/frontend/vue_shared/components/blob_viewers/__snapshots__/simple_viewer_spec.js.snap b/spec/frontend/vue_shared/components/blob_viewers/__snapshots__/simple_viewer_spec.js.snap
new file mode 100644
index 00000000000..87f2a8f9eff
--- /dev/null
+++ b/spec/frontend/vue_shared/components/blob_viewers/__snapshots__/simple_viewer_spec.js.snap
@@ -0,0 +1,86 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Blob Simple Viewer component rendering matches the snapshot 1`] = `
+<div
+ class="file-content code js-syntax-highlight qa-file-content"
+>
+ <div
+ class="line-numbers"
+ >
+ <a
+ class="diff-line-num js-line-number"
+ data-line-number="1"
+ href="#LC1"
+ id="L1"
+ >
+ <gl-icon-stub
+ name="link"
+ size="12"
+ />
+
+ 1
+
+ </a>
+ <a
+ class="diff-line-num js-line-number"
+ data-line-number="2"
+ href="#LC2"
+ id="L2"
+ >
+ <gl-icon-stub
+ name="link"
+ size="12"
+ />
+
+ 2
+
+ </a>
+ <a
+ class="diff-line-num js-line-number"
+ data-line-number="3"
+ href="#LC3"
+ id="L3"
+ >
+ <gl-icon-stub
+ name="link"
+ size="12"
+ />
+
+ 3
+
+ </a>
+ </div>
+
+ <div
+ class="blob-content"
+ >
+ <pre
+ class="code highlight"
+ >
+ <code
+ id="blob-code-content"
+ >
+ <span
+ id="LC1"
+ >
+ First
+ </span>
+
+
+ <span
+ id="LC2"
+ >
+ Second
+ </span>
+
+
+ <span
+ id="LC3"
+ >
+ Third
+ </span>
+ </code>
+ </pre>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js b/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
new file mode 100644
index 00000000000..17ea78b5826
--- /dev/null
+++ b/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
@@ -0,0 +1,27 @@
+import { shallowMount } from '@vue/test-utils';
+import RichViewer from '~/vue_shared/components/blob_viewers/rich_viewer.vue';
+
+describe('Blob Rich Viewer component', () => {
+ let wrapper;
+ const content = '<h1 id="markdown">Foo Bar</h1>';
+
+ function createComponent() {
+ wrapper = shallowMount(RichViewer, {
+ propsData: {
+ content,
+ },
+ });
+ }
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the passed content without transformations', () => {
+ expect(wrapper.html()).toContain(content);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js b/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js
new file mode 100644
index 00000000000..d12bfc5c686
--- /dev/null
+++ b/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js
@@ -0,0 +1,81 @@
+import { shallowMount } from '@vue/test-utils';
+import SimpleViewer from '~/vue_shared/components/blob_viewers/simple_viewer.vue';
+import { HIGHLIGHT_CLASS_NAME } from '~/vue_shared/components/blob_viewers/constants';
+
+describe('Blob Simple Viewer component', () => {
+ let wrapper;
+ const contentMock = `<span id="LC1">First</span>\n<span id="LC2">Second</span>\n<span id="LC3">Third</span>`;
+
+ function createComponent(content = contentMock) {
+ wrapper = shallowMount(SimpleViewer, {
+ propsData: {
+ content,
+ },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('does not fail if content is empty', () => {
+ const spy = jest.spyOn(window.console, 'error');
+ createComponent('');
+ expect(spy).not.toHaveBeenCalled();
+ });
+
+ describe('rendering', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('matches the snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('renders exactly three lines', () => {
+ expect(wrapper.findAll('.js-line-number')).toHaveLength(3);
+ });
+
+ it('renders the content without transformations', () => {
+ expect(wrapper.html()).toContain(contentMock);
+ });
+ });
+
+ describe('functionality', () => {
+ const scrollIntoViewMock = jest.fn();
+ HTMLElement.prototype.scrollIntoView = scrollIntoViewMock;
+
+ beforeEach(() => {
+ window.location.hash = '#LC2';
+ createComponent();
+ });
+
+ afterEach(() => {
+ window.location.hash = '';
+ });
+
+ it('scrolls to requested line when rendered', () => {
+ const linetoBeHighlighted = wrapper.find('#LC2');
+ expect(scrollIntoViewMock).toHaveBeenCalled();
+ expect(wrapper.vm.highlightedLine).toBe(linetoBeHighlighted.element);
+ expect(linetoBeHighlighted.classes()).toContain(HIGHLIGHT_CLASS_NAME);
+ });
+
+ it('switches highlighting when another line is selected', () => {
+ const currentlyHighlighted = wrapper.find('#LC2');
+ const hash = '#LC3';
+ const linetoBeHighlighted = wrapper.find(hash);
+
+ expect(wrapper.vm.highlightedLine).toBe(currentlyHighlighted.element);
+
+ wrapper.vm.scrollToLine(hash);
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.vm.highlightedLine).toBe(linetoBeHighlighted.element);
+ expect(currentlyHighlighted.classes()).not.toContain(HIGHLIGHT_CLASS_NAME);
+ expect(linetoBeHighlighted.classes()).toContain(HIGHLIGHT_CLASS_NAME);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/changed_file_icon_spec.js b/spec/frontend/vue_shared/components/changed_file_icon_spec.js
index 5d2aec6734f..8258eb8204c 100644
--- a/spec/frontend/vue_shared/components/changed_file_icon_spec.js
+++ b/spec/frontend/vue_shared/components/changed_file_icon_spec.js
@@ -3,8 +3,7 @@ import ChangedFileIcon from '~/vue_shared/components/changed_file_icon.vue';
import Icon from '~/vue_shared/components/icon.vue';
const changedFile = () => ({ changed: true });
-const stagedFile = () => ({ changed: false, staged: true });
-const changedAndStagedFile = () => ({ changed: true, staged: true });
+const stagedFile = () => ({ changed: true, staged: true });
const newFile = () => ({ changed: true, tempFile: true });
const unchangedFile = () => ({ changed: false, tempFile: false, staged: false, deleted: false });
@@ -55,11 +54,10 @@ describe('Changed file icon', () => {
});
describe.each`
- file | iconName | tooltipText | desc
- ${changedFile()} | ${'file-modified'} | ${'Unstaged modification'} | ${'with file changed'}
- ${stagedFile()} | ${'file-modified-solid'} | ${'Staged modification'} | ${'with file staged'}
- ${changedAndStagedFile()} | ${'file-modified'} | ${'Unstaged and staged modification'} | ${'with file changed and staged'}
- ${newFile()} | ${'file-addition'} | ${'Unstaged addition'} | ${'with file new'}
+ file | iconName | tooltipText | desc
+ ${changedFile()} | ${'file-modified'} | ${'Unstaged modification'} | ${'with file changed'}
+ ${stagedFile()} | ${'file-modified-solid'} | ${'Staged modification'} | ${'with file staged'}
+ ${newFile()} | ${'file-addition'} | ${'Unstaged addition'} | ${'with file new'}
`('$desc', ({ file, iconName, tooltipText }) => {
beforeEach(() => {
factory({ file });
diff --git a/spec/frontend/vue_shared/components/clipboard_button_spec.js b/spec/frontend/vue_shared/components/clipboard_button_spec.js
index 37f71867ab9..07ff86828e7 100644
--- a/spec/frontend/vue_shared/components/clipboard_button_spec.js
+++ b/spec/frontend/vue_shared/components/clipboard_button_spec.js
@@ -1,7 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import { GlButton } from '@gitlab/ui';
+import { GlButton, GlIcon } from '@gitlab/ui';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
-import Icon from '~/vue_shared/components/icon.vue';
describe('clipboard button', () => {
let wrapper;
@@ -29,7 +28,7 @@ describe('clipboard button', () => {
it('renders a button for clipboard', () => {
expect(wrapper.find(GlButton).exists()).toBe(true);
expect(wrapper.attributes('data-clipboard-text')).toBe('copy me');
- expect(wrapper.find(Icon).props('name')).toBe('duplicate');
+ expect(wrapper.find(GlIcon).props('name')).toBe('copy-to-clipboard');
});
it('should have a tooltip with default values', () => {
diff --git a/spec/frontend/vue_shared/components/code_block_spec.js b/spec/frontend/vue_shared/components/code_block_spec.js
index 6b91a20ff76..0d21dd94f7c 100644
--- a/spec/frontend/vue_shared/components/code_block_spec.js
+++ b/spec/frontend/vue_shared/components/code_block_spec.js
@@ -1,33 +1,25 @@
-import Vue from 'vue';
-import component from '~/vue_shared/components/code_block.vue';
-import mountComponent from '../../helpers/vue_mount_component_helper';
+import { shallowMount } from '@vue/test-utils';
+import CodeBlock from '~/vue_shared/components/code_block.vue';
describe('Code Block', () => {
- const Component = Vue.extend(component);
- let vm;
+ let wrapper;
- afterEach(() => {
- vm.$destroy();
- });
-
- it('renders a code block with the provided code', () => {
- const code =
- "Failure/Error: is_expected.to eq(3)\n\n expected: 3\n got: -1\n\n (compared using ==)\n./spec/test_spec.rb:12:in `block (4 levels) in \u003ctop (required)\u003e'";
-
- vm = mountComponent(Component, {
- code,
+ const createComponent = () => {
+ wrapper = shallowMount(CodeBlock, {
+ propsData: {
+ code: 'test-code',
+ },
});
+ };
- expect(vm.$el.querySelector('code').textContent).toEqual(code);
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
});
- it('escapes XSS injections', () => {
- const code = 'CCC&lt;img src=x onerror=alert(document.domain)&gt;';
-
- vm = mountComponent(Component, {
- code,
- });
+ it('matches snapshot', () => {
+ createComponent();
- expect(vm.$el.querySelector('code').textContent).toEqual(code);
+ expect(wrapper.element).toMatchSnapshot();
});
});
diff --git a/spec/frontend/monitoring/components/date_time_picker/date_time_picker_input_spec.js b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_input_spec.js
index 9cac63ad725..2c5bb86d8a5 100644
--- a/spec/frontend/monitoring/components/date_time_picker/date_time_picker_input_spec.js
+++ b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_input_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import DateTimePickerInput from '~/monitoring/components/date_time_picker/date_time_picker_input.vue';
+import DateTimePickerInput from '~/vue_shared/components/date_time_picker/date_time_picker_input.vue';
const inputLabel = 'This is a label';
const inputValue = 'something';
diff --git a/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js
new file mode 100644
index 00000000000..3a75ab2d127
--- /dev/null
+++ b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js
@@ -0,0 +1,173 @@
+import * as dateTimePickerLib from '~/vue_shared/components/date_time_picker/date_time_picker_lib';
+
+describe('date time picker lib', () => {
+ describe('isValidDate', () => {
+ [
+ {
+ input: '2019-09-09T00:00:00.000Z',
+ output: true,
+ },
+ {
+ input: '2019-09-09T000:00.000Z',
+ output: false,
+ },
+ {
+ input: 'a2019-09-09T000:00.000Z',
+ output: false,
+ },
+ {
+ input: '2019-09-09T',
+ output: false,
+ },
+ {
+ input: '2019-09-09',
+ output: true,
+ },
+ {
+ input: '2019-9-9',
+ output: true,
+ },
+ {
+ input: '2019-9-',
+ output: true,
+ },
+ {
+ input: '2019--',
+ output: false,
+ },
+ {
+ input: '2019',
+ output: true,
+ },
+ {
+ input: '',
+ output: false,
+ },
+ {
+ input: null,
+ output: false,
+ },
+ ].forEach(({ input, output }) => {
+ it(`isValidDate return ${output} for ${input}`, () => {
+ expect(dateTimePickerLib.isValidDate(input)).toBe(output);
+ });
+ });
+ });
+
+ describe('stringToISODate', () => {
+ ['', 'null', undefined, 'abc'].forEach(input => {
+ it(`throws error for invalid input like ${input}`, done => {
+ try {
+ dateTimePickerLib.stringToISODate(input);
+ } catch (e) {
+ expect(e).toBeDefined();
+ done();
+ }
+ });
+ });
+ [
+ {
+ input: '2019-09-09 01:01:01',
+ output: '2019-09-09T01:01:01Z',
+ },
+ {
+ input: '2019-09-09 00:00:00',
+ output: '2019-09-09T00:00:00Z',
+ },
+ {
+ input: '2019-09-09 23:59:59',
+ output: '2019-09-09T23:59:59Z',
+ },
+ {
+ input: '2019-09-09',
+ output: '2019-09-09T00:00:00Z',
+ },
+ ].forEach(({ input, output }) => {
+ it(`returns ${output} from ${input}`, () => {
+ expect(dateTimePickerLib.stringToISODate(input)).toBe(output);
+ });
+ });
+ });
+
+ describe('truncateZerosInDateTime', () => {
+ [
+ {
+ input: '',
+ output: '',
+ },
+ {
+ input: '2019-10-10',
+ output: '2019-10-10',
+ },
+ {
+ input: '2019-10-10 00:00:01',
+ output: '2019-10-10 00:00:01',
+ },
+ {
+ input: '2019-10-10 00:00:00',
+ output: '2019-10-10',
+ },
+ ].forEach(({ input, output }) => {
+ it(`truncateZerosInDateTime return ${output} for ${input}`, () => {
+ expect(dateTimePickerLib.truncateZerosInDateTime(input)).toBe(output);
+ });
+ });
+ });
+
+ describe('isDateTimePickerInputValid', () => {
+ [
+ {
+ input: null,
+ output: false,
+ },
+ {
+ input: '',
+ output: false,
+ },
+ {
+ input: 'xxxx-xx-xx',
+ output: false,
+ },
+ {
+ input: '9999-99-19',
+ output: false,
+ },
+ {
+ input: '2019-19-23',
+ output: false,
+ },
+ {
+ input: '2019-09-23',
+ output: true,
+ },
+ {
+ input: '2019-09-23 x',
+ output: false,
+ },
+ {
+ input: '2019-09-29 0:0:0',
+ output: false,
+ },
+ {
+ input: '2019-09-29 00:00:00',
+ output: true,
+ },
+ {
+ input: '2019-09-29 24:24:24',
+ output: false,
+ },
+ {
+ input: '2019-09-29 23:24:24',
+ output: true,
+ },
+ {
+ input: '2019-09-29 23:24:24 ',
+ output: false,
+ },
+ ].forEach(({ input, output }) => {
+ it(`returns ${output} for ${input}`, () => {
+ expect(dateTimePickerLib.isDateTimePickerInputValid(input)).toBe(output);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_spec.js b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_spec.js
new file mode 100644
index 00000000000..90130917d8f
--- /dev/null
+++ b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_spec.js
@@ -0,0 +1,267 @@
+import { mount } from '@vue/test-utils';
+import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
+import {
+ defaultTimeRanges,
+ defaultTimeRange,
+} from '~/vue_shared/components/date_time_picker/date_time_picker_lib';
+
+const optionsCount = defaultTimeRanges.length;
+
+describe('DateTimePicker', () => {
+ let dateTimePicker;
+
+ const dropdownToggle = () => dateTimePicker.find('.dropdown-toggle');
+ const dropdownMenu = () => dateTimePicker.find('.dropdown-menu');
+ const applyButtonElement = () => dateTimePicker.find('button.btn-success').element;
+ const findQuickRangeItems = () => dateTimePicker.findAll('.dropdown-item');
+ const cancelButtonElement = () => dateTimePicker.find('button.btn-secondary').element;
+
+ const createComponent = props => {
+ dateTimePicker = mount(DateTimePicker, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ dateTimePicker.destroy();
+ });
+
+ it('renders dropdown toggle button with selected text', done => {
+ createComponent();
+ dateTimePicker.vm.$nextTick(() => {
+ expect(dropdownToggle().text()).toBe(defaultTimeRange.label);
+ done();
+ });
+ });
+
+ it('renders dropdown with 2 custom time range inputs', () => {
+ createComponent();
+ dateTimePicker.vm.$nextTick(() => {
+ expect(dateTimePicker.findAll('input').length).toBe(2);
+ });
+ });
+
+ it('renders inputs with h/m/s truncated if its all 0s', done => {
+ createComponent({
+ value: {
+ start: '2019-10-10T00:00:00.000Z',
+ end: '2019-10-14T00:10:00.000Z',
+ },
+ });
+ dateTimePicker.vm.$nextTick(() => {
+ expect(dateTimePicker.find('#custom-time-from').element.value).toBe('2019-10-10');
+ expect(dateTimePicker.find('#custom-time-to').element.value).toBe('2019-10-14 00:10:00');
+ done();
+ });
+ });
+
+ it(`renders dropdown with ${optionsCount} (default) items in quick range`, done => {
+ createComponent();
+ dropdownToggle().trigger('click');
+ dateTimePicker.vm.$nextTick(() => {
+ expect(findQuickRangeItems().length).toBe(optionsCount);
+ done();
+ });
+ });
+
+ it('renders dropdown with a default quick range item selected', done => {
+ createComponent();
+ dropdownToggle().trigger('click');
+ dateTimePicker.vm.$nextTick(() => {
+ expect(dateTimePicker.find('.dropdown-item.active').exists()).toBe(true);
+ expect(dateTimePicker.find('.dropdown-item.active').text()).toBe(defaultTimeRange.label);
+ done();
+ });
+ });
+
+ it('renders a disabled apply button on wrong input', () => {
+ createComponent({
+ start: 'invalid-input-date',
+ });
+
+ expect(applyButtonElement().getAttribute('disabled')).toBe('disabled');
+ });
+
+ describe('user input', () => {
+ const fillInputAndBlur = (input, val) => {
+ dateTimePicker.find(input).setValue(val);
+ return dateTimePicker.vm.$nextTick().then(() => {
+ dateTimePicker.find(input).trigger('blur');
+ return dateTimePicker.vm.$nextTick();
+ });
+ };
+
+ beforeEach(done => {
+ createComponent();
+ dateTimePicker.vm.$nextTick(done);
+ });
+
+ it('displays inline error message if custom time range inputs are invalid', done => {
+ fillInputAndBlur('#custom-time-from', '2019-10-01abc')
+ .then(() => fillInputAndBlur('#custom-time-to', '2019-10-10abc'))
+ .then(() => {
+ expect(dateTimePicker.findAll('.invalid-feedback').length).toBe(2);
+ done();
+ })
+ .catch(done);
+ });
+
+ it('keeps apply button disabled with invalid custom time range inputs', done => {
+ fillInputAndBlur('#custom-time-from', '2019-10-01abc')
+ .then(() => fillInputAndBlur('#custom-time-to', '2019-09-19'))
+ .then(() => {
+ expect(applyButtonElement().getAttribute('disabled')).toBe('disabled');
+ done();
+ })
+ .catch(done);
+ });
+
+ it('enables apply button with valid custom time range inputs', done => {
+ fillInputAndBlur('#custom-time-from', '2019-10-01')
+ .then(() => fillInputAndBlur('#custom-time-to', '2019-10-19'))
+ .then(() => {
+ expect(applyButtonElement().getAttribute('disabled')).toBeNull();
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('emits dates in an object when apply is clicked', done => {
+ fillInputAndBlur('#custom-time-from', '2019-10-01')
+ .then(() => fillInputAndBlur('#custom-time-to', '2019-10-19'))
+ .then(() => {
+ applyButtonElement().click();
+
+ expect(dateTimePicker.emitted().input).toHaveLength(1);
+ expect(dateTimePicker.emitted().input[0]).toEqual([
+ {
+ end: '2019-10-19T00:00:00Z',
+ start: '2019-10-01T00:00:00Z',
+ },
+ ]);
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('unchecks quick range when text is input is clicked', done => {
+ const findActiveItems = () => findQuickRangeItems().filter(w => w.is('.active'));
+
+ expect(findActiveItems().length).toBe(1);
+
+ fillInputAndBlur('#custom-time-from', '2019-10-01')
+ .then(() => {
+ expect(findActiveItems().length).toBe(0);
+
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('emits dates in an object when a is clicked', () => {
+ findQuickRangeItems()
+ .at(3) // any item
+ .trigger('click');
+
+ expect(dateTimePicker.emitted().input).toHaveLength(1);
+ expect(dateTimePicker.emitted().input[0][0]).toMatchObject({
+ duration: {
+ seconds: expect.any(Number),
+ },
+ });
+ });
+
+ it('hides the popover with cancel button', done => {
+ dropdownToggle().trigger('click');
+
+ dateTimePicker.vm.$nextTick(() => {
+ cancelButtonElement().click();
+
+ dateTimePicker.vm.$nextTick(() => {
+ expect(dropdownMenu().classes('show')).toBe(false);
+ done();
+ });
+ });
+ });
+ });
+
+ describe('when using non-default time windows', () => {
+ const MOCK_NOW = Date.UTC(2020, 0, 23, 20);
+
+ const otherTimeRanges = [
+ {
+ label: '1 minute',
+ duration: { seconds: 60 },
+ },
+ {
+ label: '2 minutes',
+ duration: { seconds: 60 * 2 },
+ default: true,
+ },
+ {
+ label: '5 minutes',
+ duration: { seconds: 60 * 5 },
+ },
+ ];
+
+ beforeEach(() => {
+ jest.spyOn(Date, 'now').mockImplementation(() => MOCK_NOW);
+ });
+
+ it('renders dropdown with a label in the quick range', done => {
+ createComponent({
+ value: {
+ duration: { seconds: 60 * 5 },
+ },
+ options: otherTimeRanges,
+ });
+ dropdownToggle().trigger('click');
+ dateTimePicker.vm.$nextTick(() => {
+ expect(dropdownToggle().text()).toBe('5 minutes');
+
+ done();
+ });
+ });
+
+ it('renders dropdown with quick range items', done => {
+ createComponent({
+ value: {
+ duration: { seconds: 60 * 2 },
+ },
+ options: otherTimeRanges,
+ });
+ dropdownToggle().trigger('click');
+ dateTimePicker.vm.$nextTick(() => {
+ const items = findQuickRangeItems();
+
+ expect(items.length).toBe(Object.keys(otherTimeRanges).length);
+ expect(items.at(0).text()).toBe('1 minute');
+ expect(items.at(0).is('.active')).toBe(false);
+
+ expect(items.at(1).text()).toBe('2 minutes');
+ expect(items.at(1).is('.active')).toBe(true);
+
+ expect(items.at(2).text()).toBe('5 minutes');
+ expect(items.at(2).is('.active')).toBe(false);
+
+ done();
+ });
+ });
+
+ it('renders dropdown with a label not in the quick range', done => {
+ createComponent({
+ value: {
+ duration: { seconds: 60 * 4 },
+ },
+ });
+ dropdownToggle().trigger('click');
+ dateTimePicker.vm.$nextTick(() => {
+ expect(dropdownToggle().text()).toBe('2020-01-23 19:56:00 to 2020-01-23 20:00:00');
+
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/dismissible_alert_spec.js b/spec/frontend/vue_shared/components/dismissible_alert_spec.js
new file mode 100644
index 00000000000..17905254292
--- /dev/null
+++ b/spec/frontend/vue_shared/components/dismissible_alert_spec.js
@@ -0,0 +1,57 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlAlert } from '@gitlab/ui';
+import DismissibleAlert from '~/vue_shared/components/dismissible_alert.vue';
+
+const TEST_HTML = 'Hello World! <strong>Foo</strong>';
+
+describe('vue_shared/components/dismissible_alert', () => {
+ const testAlertProps = {
+ primaryButtonText: 'Lorem ipsum',
+ primaryButtonLink: '/lorem/ipsum',
+ };
+
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(DismissibleAlert, {
+ propsData: {
+ html: TEST_HTML,
+ ...testAlertProps,
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findAlert = () => wrapper.find(GlAlert);
+
+ describe('with default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('shows alert', () => {
+ const alert = findAlert();
+
+ expect(alert.exists()).toBe(true);
+ expect(alert.props()).toEqual(expect.objectContaining(testAlertProps));
+ });
+
+ it('shows given HTML', () => {
+ expect(findAlert().html()).toContain(TEST_HTML);
+ });
+
+ describe('when dismissed', () => {
+ beforeEach(() => {
+ findAlert().vm.$emit('dismiss');
+ });
+
+ it('hides the alert', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/expand_button_spec.js b/spec/frontend/vue_shared/components/expand_button_spec.js
index 3b1c8f6219c..aea90e5b31f 100644
--- a/spec/frontend/vue_shared/components/expand_button_spec.js
+++ b/spec/frontend/vue_shared/components/expand_button_spec.js
@@ -71,7 +71,7 @@ describe('Expand button', () => {
it('renders button before text', () => {
expect(expanderPrependEl().isVisible()).toBe(true);
expect(expanderAppendEl().isVisible()).toBe(false);
- expect(wrapper.find(ExpandButton).html()).toMatchSnapshot();
+ expect(wrapper.find(ExpandButton).element).toMatchSnapshot();
});
});
@@ -119,7 +119,7 @@ describe('Expand button', () => {
it('renders button after text', () => {
expect(expanderPrependEl().isVisible()).toBe(false);
expect(expanderAppendEl().isVisible()).toBe(true);
- expect(wrapper.find(ExpandButton).html()).toMatchSnapshot();
+ expect(wrapper.find(ExpandButton).element).toMatchSnapshot();
});
});
});
diff --git a/spec/frontend/vue_shared/components/file_tree_spec.js b/spec/frontend/vue_shared/components/file_tree_spec.js
new file mode 100644
index 00000000000..38979d9d844
--- /dev/null
+++ b/spec/frontend/vue_shared/components/file_tree_spec.js
@@ -0,0 +1,88 @@
+import { pick } from 'lodash';
+import { shallowMount } from '@vue/test-utils';
+import FileTree from '~/vue_shared/components/file_tree.vue';
+
+const MockFileRow = {
+ name: 'MockFileRow',
+ render() {
+ return this.$slots.default;
+ },
+};
+
+const TEST_LEVEL = 4;
+const TEST_EXTA_ARGS = {
+ foo: 'lorem-ipsum',
+ bar: 'zoo',
+};
+
+describe('File Tree component', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(FileTree, {
+ propsData: { level: TEST_LEVEL, fileRowComponent: MockFileRow, ...props },
+ attrs: { ...TEST_EXTA_ARGS },
+ });
+ };
+
+ const findFileRow = () => wrapper.find(MockFileRow);
+ const findChildrenTrees = () => wrapper.findAll(FileTree).wrappers.slice(1);
+ const findChildrenTreeProps = () =>
+ findChildrenTrees().map(x => ({
+ ...x.props(),
+ ...pick(x.attributes(), Object.keys(TEST_EXTA_ARGS)),
+ }));
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('file row component', () => {
+ beforeEach(() => {
+ createComponent({ file: {} });
+ });
+
+ it('renders file row component', () => {
+ expect(findFileRow().exists()).toEqual(true);
+ });
+
+ it('contains the required attribute keys', () => {
+ const fileRow = findFileRow();
+
+ // Checking strings b/c value in attributes are always strings
+ expect(fileRow.attributes()).toEqual({
+ file: {}.toString(),
+ level: TEST_LEVEL.toString(),
+ ...TEST_EXTA_ARGS,
+ });
+ });
+ });
+
+ describe('file tree', () => {
+ const createChildren = () => [{ id: 1 }, { id: 2 }];
+ const createChildrenExpectation = (props = {}) =>
+ createChildren().map(file => ({
+ fileRowComponent: MockFileRow,
+ file,
+ ...TEST_EXTA_ARGS,
+ ...props,
+ }));
+
+ it.each`
+ key | value | desc | expectedChildren
+ ${'isHeader'} | ${true} | ${'is shown if file is header'} | ${createChildrenExpectation({ level: 0 })}
+ ${'opened'} | ${true} | ${'is shown if file is open'} | ${createChildrenExpectation({ level: TEST_LEVEL + 1 })}
+ ${'isHeader'} | ${false} | ${'is hidden if file is header'} | ${[]}
+ ${'opened'} | ${false} | ${'is hidden if file is open'} | ${[]}
+ `('$desc', ({ key, value, expectedChildren }) => {
+ createComponent({
+ file: {
+ [key]: value,
+ tree: createChildren(),
+ },
+ });
+
+ expect(findChildrenTreeProps()).toEqual(expectedChildren);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/identicon_spec.js b/spec/frontend/vue_shared/components/identicon_spec.js
index 0b3dbb61c96..5e8b013d480 100644
--- a/spec/frontend/vue_shared/components/identicon_spec.js
+++ b/spec/frontend/vue_shared/components/identicon_spec.js
@@ -1,65 +1,33 @@
-import Vue from 'vue';
-import identiconComponent from '~/vue_shared/components/identicon.vue';
-
-const createComponent = sizeClass => {
- const Component = Vue.extend(identiconComponent);
-
- return new Component({
- propsData: {
- entityId: 1,
- entityName: 'entity-name',
- sizeClass,
- },
- }).$mount();
-};
-
-describe('IdenticonComponent', () => {
- describe('computed', () => {
- let vm;
-
- beforeEach(() => {
- vm = createComponent();
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('identiconBackgroundClass', () => {
- it('should return bg class based on entityId', () => {
- vm.entityId = 4;
-
- expect(vm.identiconBackgroundClass).toBeDefined();
- expect(vm.identiconBackgroundClass).toBe('bg5');
- });
+import { shallowMount } from '@vue/test-utils';
+import IdenticonComponent from '~/vue_shared/components/identicon.vue';
+
+describe('Identicon', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(IdenticonComponent, {
+ propsData: {
+ entityId: 1,
+ entityName: 'entity-name',
+ sizeClass: 's40',
+ },
});
+ };
- describe('identiconTitle', () => {
- it('should return first letter of entity title in uppercase', () => {
- vm.entityName = 'dummy-group';
-
- expect(vm.identiconTitle).toBeDefined();
- expect(vm.identiconTitle).toBe('D');
- });
- });
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
});
- describe('template', () => {
- it('should render identicon', () => {
- const vm = createComponent();
+ it('matches snapshot', () => {
+ createComponent();
- expect(vm.$el.nodeName).toBe('DIV');
- expect(vm.$el.classList.contains('identicon')).toBeTruthy();
- expect(vm.$el.classList.contains('s40')).toBeTruthy();
- expect(vm.$el.classList.contains('bg2')).toBeTruthy();
- vm.$destroy();
- });
+ expect(wrapper.element).toMatchSnapshot();
+ });
- it('should render identicon with provided sizing class', () => {
- const vm = createComponent('s32');
+ it('adds a correct class to identicon', () => {
+ createComponent();
- expect(vm.$el.classList.contains('s32')).toBeTruthy();
- vm.$destroy();
- });
+ expect(wrapper.find({ ref: 'identicon' }).classes()).toContain('bg2');
});
});
diff --git a/spec/frontend/vue_shared/components/issue/__snapshots__/issue_warning_spec.js.snap b/spec/frontend/vue_shared/components/issue/__snapshots__/issue_warning_spec.js.snap
new file mode 100644
index 00000000000..49b18d3e106
--- /dev/null
+++ b/spec/frontend/vue_shared/components/issue/__snapshots__/issue_warning_spec.js.snap
@@ -0,0 +1,62 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Issue Warning Component when issue is confidential but not locked renders information about confidential issue 1`] = `
+<span>
+
+ This is a confidential issue.
+ People without permission will never get a notification.
+
+ <gl-link-stub
+ href="confidential-path"
+ target="_blank"
+ >
+
+ Learn more
+
+ </gl-link-stub>
+</span>
+`;
+
+exports[`Issue Warning Component when issue is locked and confidential renders information about locked and confidential issue 1`] = `
+<span>
+ <span>
+ This issue is
+ <a
+ href=""
+ rel="noopener noreferrer"
+ target="_blank"
+ >
+ confidential
+ </a>
+ and
+ <a
+ href=""
+ rel="noopener noreferrer"
+ target="_blank"
+ >
+ locked
+ </a>
+ .
+ </span>
+
+ People without permission will never get a notification and won't be able to comment.
+
+</span>
+`;
+
+exports[`Issue Warning Component when issue is locked but not confidential renders information about locked issue 1`] = `
+<span>
+
+ This issue is locked.
+ Only project members can comment.
+
+ <gl-link-stub
+ href="locked-path"
+ target="_blank"
+ >
+
+ Learn more
+
+ </gl-link-stub>
+</span>
+`;
diff --git a/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js b/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js
index b00261ae067..ca75c55df26 100644
--- a/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js
+++ b/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js
@@ -18,7 +18,7 @@ describe('IssueAssigneesComponent', () => {
...props,
},
});
- vm = wrapper.vm; // eslint-disable-line
+ vm = wrapper.vm;
};
const findTooltipText = () => wrapper.find('.js-assignee-tooltip').text();
diff --git a/spec/frontend/vue_shared/components/issue/issue_warning_spec.js b/spec/frontend/vue_shared/components/issue/issue_warning_spec.js
index 7bb054b4e6c..891c70bcb5c 100644
--- a/spec/frontend/vue_shared/components/issue/issue_warning_spec.js
+++ b/spec/frontend/vue_shared/components/issue/issue_warning_spec.js
@@ -1,65 +1,105 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import issueWarning from '~/vue_shared/components/issue/issue_warning.vue';
+import { shallowMount } from '@vue/test-utils';
+import IssueWarning from '~/vue_shared/components/issue/issue_warning.vue';
+import Icon from '~/vue_shared/components/icon.vue';
-const IssueWarning = Vue.extend(issueWarning);
+describe('Issue Warning Component', () => {
+ let wrapper;
-function formatWarning(string) {
- // Replace newlines with a space then replace multiple spaces with one space
- return string
- .trim()
- .replace(/\n/g, ' ')
- .replace(/\s\s+/g, ' ');
-}
+ const findIcon = () => wrapper.find(Icon);
+ const findLockedBlock = () => wrapper.find({ ref: 'locked' });
+ const findConfidentialBlock = () => wrapper.find({ ref: 'confidential' });
+ const findLockedAndConfidentialBlock = () => wrapper.find({ ref: 'lockedAndConfidential' });
-describe('Issue Warning Component', () => {
- describe('isLocked', () => {
- it('should render locked issue warning information', () => {
- const props = {
+ const createComponent = props => {
+ wrapper = shallowMount(IssueWarning, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('when issue is locked but not confidential', () => {
+ beforeEach(() => {
+ createComponent({
isLocked: true,
- lockedIssueDocsPath: 'docs/issues/locked',
- };
- const vm = mountComponent(IssueWarning, props);
-
- expect(
- vm.$el.querySelector('.icon use').getAttributeNS('http://www.w3.org/1999/xlink', 'href'),
- ).toMatch(/lock$/);
- expect(formatWarning(vm.$el.querySelector('span').textContent)).toEqual(
- 'This issue is locked. Only project members can comment. Learn more',
- );
- expect(vm.$el.querySelector('a').href).toContain(props.lockedIssueDocsPath);
+ lockedIssueDocsPath: 'locked-path',
+ isConfidential: false,
+ });
+ });
+
+ it('renders information about locked issue', () => {
+ expect(findLockedBlock().exists()).toBe(true);
+ expect(findLockedBlock().element).toMatchSnapshot();
+ });
+
+ it('renders warning icon', () => {
+ expect(findIcon().exists()).toBe(true);
+ });
+
+ it('does not render information about locked and confidential issue', () => {
+ expect(findLockedAndConfidentialBlock().exists()).toBe(false);
+ });
+
+ it('does not render information about confidential issue', () => {
+ expect(findConfidentialBlock().exists()).toBe(false);
});
});
- describe('isConfidential', () => {
- it('should render confidential issue warning information', () => {
- const props = {
+ describe('when issue is confidential but not locked', () => {
+ beforeEach(() => {
+ createComponent({
+ isLocked: false,
isConfidential: true,
- confidentialIssueDocsPath: '/docs/issues/confidential',
- };
- const vm = mountComponent(IssueWarning, props);
-
- expect(
- vm.$el.querySelector('.icon use').getAttributeNS('http://www.w3.org/1999/xlink', 'href'),
- ).toMatch(/eye-slash$/);
- expect(formatWarning(vm.$el.querySelector('span').textContent)).toEqual(
- 'This is a confidential issue. People without permission will never get a notification. Learn more',
- );
- expect(vm.$el.querySelector('a').href).toContain(props.confidentialIssueDocsPath);
+ confidentialIssueDocsPath: 'confidential-path',
+ });
+ });
+
+ it('renders information about confidential issue', () => {
+ expect(findConfidentialBlock().exists()).toBe(true);
+ expect(findConfidentialBlock().element).toMatchSnapshot();
+ });
+
+ it('renders warning icon', () => {
+ expect(wrapper.find(Icon).exists()).toBe(true);
+ });
+
+ it('does not render information about locked issue', () => {
+ expect(findLockedBlock().exists()).toBe(false);
+ });
+
+ it('does not render information about locked and confidential issue', () => {
+ expect(findLockedAndConfidentialBlock().exists()).toBe(false);
});
});
- describe('isLocked and isConfidential', () => {
- it('should render locked and confidential issue warning information', () => {
- const vm = mountComponent(IssueWarning, {
+ describe('when issue is locked and confidential', () => {
+ beforeEach(() => {
+ createComponent({
isLocked: true,
isConfidential: true,
});
+ });
+
+ it('renders information about locked and confidential issue', () => {
+ expect(findLockedAndConfidentialBlock().exists()).toBe(true);
+ expect(findLockedAndConfidentialBlock().element).toMatchSnapshot();
+ });
+
+ it('does not render warning icon', () => {
+ expect(wrapper.find(Icon).exists()).toBe(false);
+ });
+
+ it('does not render information about locked issue', () => {
+ expect(findLockedBlock().exists()).toBe(false);
+ });
- expect(vm.$el.querySelector('.icon')).toBeFalsy();
- expect(formatWarning(vm.$el.querySelector('span').textContent)).toEqual(
- "This issue is confidential and locked. People without permission will never get a notification and won't be able to comment.",
- );
+ it('does not render information about confidential issue', () => {
+ expect(findConfidentialBlock().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap b/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap
new file mode 100644
index 00000000000..29ac754de49
--- /dev/null
+++ b/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap
@@ -0,0 +1,28 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Suggestion Diff component matches snapshot 1`] = `
+<div
+ class="md-suggestion"
+>
+ <suggestion-diff-header-stub
+ class="qa-suggestion-diff-header js-suggestion-diff-header"
+ helppagepath="path_to_docs"
+ />
+
+ <table
+ class="mb-3 md-suggestion-diff js-syntax-highlight code"
+ >
+ <tbody>
+ <suggestion-diff-row-stub
+ line="[object Object]"
+ />
+ <suggestion-diff-row-stub
+ line="[object Object]"
+ />
+ <suggestion-diff-row-stub
+ line="[object Object]"
+ />
+ </tbody>
+ </table>
+</div>
+`;
diff --git a/spec/frontend/vue_shared/components/markdown/suggestion_diff_row_spec.js b/spec/frontend/vue_shared/components/markdown/suggestion_diff_row_spec.js
index 97fcdc67791..6ae405017c9 100644
--- a/spec/frontend/vue_shared/components/markdown/suggestion_diff_row_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/suggestion_diff_row_spec.js
@@ -63,21 +63,59 @@ describe('SuggestionDiffRow', () => {
it('renders the plain text when it is available but rich text is not', () => {
factory({
propsData: {
- line: Object.assign({}, newLine, { rich_text: undefined }),
+ line: {
+ ...newLine,
+ rich_text: undefined,
+ },
},
});
expect(wrapper.find('td.line_content').text()).toEqual('newplaintext');
});
- it('renders a zero-width space when it has no plain or rich texts', () => {
+ it('switches to table-cell display when it has no plain or rich texts', () => {
factory({
propsData: {
- line: Object.assign({}, newLine, { rich_text: undefined, text: undefined }),
+ line: {
+ ...newLine,
+ text: undefined,
+ rich_text: undefined,
+ },
},
});
- expect(wrapper.find('td.line_content').text()).toEqual('\u200B');
+ const lineContent = wrapper.find('td.line_content');
+
+ expect(lineContent.classes()).toContain('d-table-cell');
+ expect(lineContent.text()).toEqual('');
+ });
+
+ it('does not switch to table-cell display if it has either plain or rich texts', () => {
+ let lineContent;
+
+ factory({
+ propsData: {
+ line: {
+ ...newLine,
+ text: undefined,
+ },
+ },
+ });
+
+ lineContent = wrapper.find('td.line_content');
+ expect(lineContent.classes()).not.toContain('d-table-cell');
+
+ factory({
+ propsData: {
+ line: {
+ ...newLine,
+ rich_text: undefined,
+ },
+ },
+ });
+
+ lineContent = wrapper.find('td.line_content');
+ expect(lineContent.classes()).not.toContain('d-table-cell');
});
});
diff --git a/spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js b/spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js
index 3c5e7500ba7..162ac495385 100644
--- a/spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js
@@ -1,9 +1,9 @@
-import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
import SuggestionDiffComponent from '~/vue_shared/components/markdown/suggestion_diff.vue';
-import { selectDiffLines } from '~/vue_shared/components/lib/utils/diff_utils';
+import SuggestionDiffHeader from '~/vue_shared/components/markdown/suggestion_diff_header.vue';
+import SuggestionDiffRow from '~/vue_shared/components/markdown/suggestion_diff_row.vue';
const MOCK_DATA = {
- canApply: true,
suggestion: {
id: 1,
diff_lines: [
@@ -42,60 +42,45 @@ const MOCK_DATA = {
helpPagePath: 'path_to_docs',
};
-const lines = selectDiffLines(MOCK_DATA.suggestion.diff_lines);
-const newLines = lines.filter(line => line.type === 'new');
-
describe('Suggestion Diff component', () => {
- let vm;
-
- beforeEach(done => {
- const Component = Vue.extend(SuggestionDiffComponent);
-
- vm = new Component({
- propsData: MOCK_DATA,
- }).$mount();
-
- Vue.nextTick(done);
- });
-
- describe('init', () => {
- it('renders a suggestion header', () => {
- expect(vm.$el.querySelector('.js-suggestion-diff-header')).not.toBeNull();
- });
-
- it('renders a diff table with syntax highlighting', () => {
- expect(vm.$el.querySelector('.md-suggestion-diff.js-syntax-highlight.code')).not.toBeNull();
- });
+ let wrapper;
- it('renders the oldLineNumber', () => {
- const fromLine = vm.$el.querySelector('.old_line').innerHTML;
-
- expect(parseInt(fromLine, 10)).toBe(lines[0].old_line);
+ const createComponent = () => {
+ wrapper = shallowMount(SuggestionDiffComponent, {
+ propsData: {
+ ...MOCK_DATA,
+ },
});
+ };
- it('renders the oldLineContent', () => {
- const fromContent = vm.$el.querySelector('.line_content.old').innerHTML;
-
- expect(fromContent.includes(lines[0].text)).toBe(true);
- });
+ beforeEach(() => {
+ createComponent();
+ });
- it('renders new lines', () => {
- const newLinesElements = vm.$el.querySelectorAll('.line_holder.new');
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
- newLinesElements.forEach((line, i) => {
- expect(newLinesElements[i].innerHTML.includes(newLines[i].new_line)).toBe(true);
- expect(newLinesElements[i].innerHTML.includes(newLines[i].text)).toBe(true);
- });
- });
+ it('matches snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
});
- describe('applySuggestion', () => {
- it('emits apply event when applySuggestion is called', () => {
- const callback = () => {};
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
- vm.applySuggestion(callback);
+ it('renders a correct amount of suggestion diff rows', () => {
+ expect(wrapper.findAll(SuggestionDiffRow)).toHaveLength(3);
+ });
- expect(vm.$emit).toHaveBeenCalledWith('apply', { suggestionId: vm.suggestion.id, callback });
- });
+ it('emits apply event on sugestion diff header apply', () => {
+ wrapper.find(SuggestionDiffHeader).vm.$emit('apply', 'test-event');
+
+ expect(wrapper.emitted('apply')).toBeDefined();
+ expect(wrapper.emitted('apply')).toEqual([
+ [
+ {
+ callback: 'test-event',
+ suggestionId: 1,
+ },
+ ],
+ ]);
});
});
diff --git a/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap b/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap
new file mode 100644
index 00000000000..f3ce03796f9
--- /dev/null
+++ b/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap
@@ -0,0 +1,62 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Issue placeholder note component matches snapshot 1`] = `
+<timeline-entry-item-stub
+ class="note note-wrapper being-posted fade-in-half"
+>
+ <div
+ class="timeline-icon"
+ >
+ <user-avatar-link-stub
+ imgalt=""
+ imgcssclasses=""
+ imgsize="40"
+ imgsrc="mock_path"
+ linkhref="/root"
+ tooltipplacement="top"
+ tooltiptext=""
+ username=""
+ />
+ </div>
+
+ <div
+ class="timeline-content discussion"
+ >
+ <div
+ class="note-header"
+ >
+ <div
+ class="note-header-info"
+ >
+ <a
+ href="/root"
+ >
+ <span
+ class="d-none d-sm-inline-block bold"
+ >
+ Root
+ </span>
+
+ <span
+ class="note-headline-light"
+ >
+ @root
+ </span>
+ </a>
+ </div>
+ </div>
+
+ <div
+ class="note-body"
+ >
+ <div
+ class="note-text md"
+ >
+ <p>
+ Foo
+ </p>
+ </div>
+ </div>
+ </div>
+</timeline-entry-item-stub>
+`;
diff --git a/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_system_note_spec.js.snap b/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_system_note_spec.js.snap
new file mode 100644
index 00000000000..10c33269107
--- /dev/null
+++ b/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_system_note_spec.js.snap
@@ -0,0 +1,15 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Placeholder system note component matches snapshot 1`] = `
+<timeline-entry-item-stub
+ class="note system-note being-posted fade-in-half"
+>
+ <div
+ class="timeline-content"
+ >
+ <em>
+ This is a placeholder
+ </em>
+ </div>
+</timeline-entry-item-stub>
+`;
diff --git a/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js b/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js
index 45f131194ca..0f30b50da0b 100644
--- a/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js
+++ b/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js
@@ -1,51 +1,55 @@
-import Vue from 'vue';
-import issuePlaceholderNote from '~/vue_shared/components/notes/placeholder_note.vue';
-import createStore from '~/notes/stores';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import IssuePlaceholderNote from '~/vue_shared/components/notes/placeholder_note.vue';
import { userDataMock } from '../../../notes/mock_data';
-describe('issue placeholder system note component', () => {
- let store;
- let vm;
-
- beforeEach(() => {
- const Component = Vue.extend(issuePlaceholderNote);
- store = createStore();
- store.dispatch('setUserData', userDataMock);
- vm = new Component({
- store,
- propsData: { note: { body: 'Foo' } },
- }).$mount();
- });
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+const getters = {
+ getUserData: () => userDataMock,
+};
+
+describe('Issue placeholder note component', () => {
+ let wrapper;
+
+ const findNote = () => wrapper.find({ ref: 'note' });
+
+ const createComponent = (isIndividual = false) => {
+ wrapper = shallowMount(IssuePlaceholderNote, {
+ localVue,
+ store: new Vuex.Store({
+ getters,
+ }),
+ propsData: {
+ note: {
+ body: 'Foo',
+ individual_note: isIndividual,
+ },
+ },
+ });
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
+ wrapper = null;
});
- describe('user information', () => {
- it('should render user avatar with link', () => {
- expect(vm.$el.querySelector('.user-avatar-link').getAttribute('href')).toEqual(
- userDataMock.path,
- );
+ it('matches snapshot', () => {
+ createComponent();
- expect(vm.$el.querySelector('.user-avatar-link img').getAttribute('src')).toEqual(
- `${userDataMock.avatar_url}?width=40`,
- );
- });
+ expect(wrapper.element).toMatchSnapshot();
});
- describe('note content', () => {
- it('should render note header information', () => {
- expect(vm.$el.querySelector('.note-header-info a').getAttribute('href')).toEqual(
- userDataMock.path,
- );
+ it('does not add "discussion" class to individual notes', () => {
+ createComponent(true);
- expect(
- vm.$el.querySelector('.note-header-info .note-headline-light').textContent.trim(),
- ).toEqual(`@${userDataMock.username}`);
- });
+ expect(findNote().classes()).not.toContain('discussion');
+ });
- it('should render note body', () => {
- expect(vm.$el.querySelector('.note-text p').textContent.trim()).toEqual('Foo');
- });
+ it('adds "discussion" class to non-individual notes', () => {
+ createComponent();
+
+ expect(findNote().classes()).toContain('discussion');
});
});
diff --git a/spec/frontend/vue_shared/components/notes/placeholder_system_note_spec.js b/spec/frontend/vue_shared/components/notes/placeholder_system_note_spec.js
index 81c5cd6a057..de6ab43bc41 100644
--- a/spec/frontend/vue_shared/components/notes/placeholder_system_note_spec.js
+++ b/spec/frontend/vue_shared/components/notes/placeholder_system_note_spec.js
@@ -1,27 +1,25 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import placeholderSystemNote from '~/vue_shared/components/notes/placeholder_system_note.vue';
+import { shallowMount } from '@vue/test-utils';
+import PlaceholderSystemNote from '~/vue_shared/components/notes/placeholder_system_note.vue';
-describe('placeholder system note component', () => {
- let PlaceholderSystemNote;
- let vm;
+describe('Placeholder system note component', () => {
+ let wrapper;
- beforeEach(() => {
- PlaceholderSystemNote = Vue.extend(placeholderSystemNote);
- });
+ const createComponent = () => {
+ wrapper = shallowMount(PlaceholderSystemNote, {
+ propsData: {
+ note: { body: 'This is a placeholder' },
+ },
+ });
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
+ wrapper = null;
});
- it('should render system note placeholder with plain text', () => {
- vm = mountComponent(PlaceholderSystemNote, {
- note: { body: 'This is a placeholder' },
- });
+ it('matches snapshot', () => {
+ createComponent();
- expect(vm.$el.tagName).toEqual('LI');
- expect(vm.$el.querySelector('.timeline-content em').textContent.trim()).toEqual(
- 'This is a placeholder',
- );
+ expect(wrapper.element).toMatchSnapshot();
});
});
diff --git a/spec/frontend/vue_shared/components/recaptcha_modal_spec.js b/spec/frontend/vue_shared/components/recaptcha_modal_spec.js
index 223e7187d99..8ab65efd388 100644
--- a/spec/frontend/vue_shared/components/recaptcha_modal_spec.js
+++ b/spec/frontend/vue_shared/components/recaptcha_modal_spec.js
@@ -2,7 +2,7 @@ import { shallowMount } from '@vue/test-utils';
import { eventHub } from '~/vue_shared/components/recaptcha_eventhub';
-import RecaptchaModal from '~/vue_shared/components/recaptcha_modal';
+import RecaptchaModal from '~/vue_shared/components/recaptcha_modal.vue';
describe('RecaptchaModal', () => {
const recaptchaFormId = 'recaptcha-form';
diff --git a/spec/frontend/vue_shared/components/slot_switch_spec.js b/spec/frontend/vue_shared/components/slot_switch_spec.js
index 71e6087c272..73307b5573f 100644
--- a/spec/frontend/vue_shared/components/slot_switch_spec.js
+++ b/spec/frontend/vue_shared/components/slot_switch_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import SlotSwitch from '~/vue_shared/components/slot_switch';
+import SlotSwitch from '~/vue_shared/components/slot_switch.vue';
describe('SlotSwitch', () => {
const slots = {
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
index 2f68e15b0d7..902e83da7be 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { each } from 'lodash';
import { trimText } from 'helpers/text_helper';
import { shallowMount } from '@vue/test-utils';
import { GlLink } from '@gitlab/ui';
@@ -58,7 +58,7 @@ describe('User Avatar Link Component', () => {
});
it('should return necessary props as defined', () => {
- _.each(defaultProps, (val, key) => {
+ each(defaultProps, (val, key) => {
expect(wrapper.vm[key]).toBeDefined();
});
});
diff --git a/spec/graphql/features/authorization_spec.rb b/spec/graphql/features/authorization_spec.rb
index 7ad6a622b4b..5ef1bced179 100644
--- a/spec/graphql/features/authorization_spec.rb
+++ b/spec/graphql/features/authorization_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
describe 'Gitlab::Graphql::Authorization' do
+ include GraphqlHelpers
+
set(:user) { create(:user) }
let(:permission_single) { :foo }
@@ -300,37 +302,4 @@ describe 'Gitlab::Graphql::Authorization' do
allow(Ability).to receive(:allowed?).with(user, permission, test_object).and_return(true)
end
end
-
- def type_factory
- Class.new(Types::BaseObject) do
- graphql_name 'TestType'
-
- field :name, GraphQL::STRING_TYPE, null: true
-
- yield(self) if block_given?
- end
- end
-
- def query_factory
- Class.new(Types::BaseObject) do
- graphql_name 'TestQuery'
-
- yield(self) if block_given?
- end
- end
-
- def execute_query(query_type)
- schema = Class.new(GraphQL::Schema) do
- use Gitlab::Graphql::Authorize
- use Gitlab::Graphql::Connections
-
- query(query_type)
- end
-
- schema.execute(
- query_string,
- context: { current_user: user },
- variables: {}
- )
- end
end
diff --git a/spec/graphql/features/feature_flag_spec.rb b/spec/graphql/features/feature_flag_spec.rb
new file mode 100644
index 00000000000..13b1e472fab
--- /dev/null
+++ b/spec/graphql/features/feature_flag_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Graphql Field feature flags' do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+
+ let(:feature_flag) { 'test_feature' }
+ let(:test_object) { double(name: 'My name') }
+ let(:query_string) { '{ item() { name } }' }
+ let(:result) { execute_query(query_type)['data'] }
+
+ subject { result }
+
+ describe 'Feature flagged field' do
+ let(:type) { type_factory }
+
+ let(:query_type) do
+ query_factory do |query|
+ query.field :item, type, null: true, feature_flag: feature_flag, resolve: ->(obj, args, ctx) { test_object }
+ end
+ end
+
+ it 'returns the value when feature is enabled' do
+ expect(subject['item']).to eq('name' => test_object.name)
+ end
+
+ it 'returns nil when the feature is disabled' do
+ stub_feature_flags(feature_flag => false)
+
+ expect(subject).to be_nil
+ end
+ end
+end
diff --git a/spec/graphql/mutations/todos/restore_many_spec.rb b/spec/graphql/mutations/todos/restore_many_spec.rb
new file mode 100644
index 00000000000..7821ce35a08
--- /dev/null
+++ b/spec/graphql/mutations/todos/restore_many_spec.rb
@@ -0,0 +1,114 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Mutations::Todos::RestoreMany do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:author) { create(:user) }
+ let_it_be(:other_user) { create(:user) }
+
+ let_it_be(:todo1) { create(:todo, user: current_user, author: author, state: :done) }
+ let_it_be(:todo2) { create(:todo, user: current_user, author: author, state: :pending) }
+
+ let_it_be(:other_user_todo) { create(:todo, user: other_user, author: author, state: :done) }
+
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }) }
+
+ describe '#resolve' do
+ it 'restores a single todo' do
+ result = restore_mutation([todo1])
+
+ expect(todo1.reload.state).to eq('pending')
+ expect(todo2.reload.state).to eq('pending')
+ expect(other_user_todo.reload.state).to eq('done')
+
+ todo_ids = result[:updated_ids]
+ expect(todo_ids.size).to eq(1)
+ expect(todo_ids.first).to eq(todo1.to_global_id.to_s)
+ end
+
+ it 'handles a todo which is already pending as expected' do
+ result = restore_mutation([todo2])
+
+ expect_states_were_not_changed
+
+ expect(result[:updated_ids]).to eq([])
+ end
+
+ it 'ignores requests for todos which do not belong to the current user' do
+ restore_mutation([other_user_todo])
+
+ expect_states_were_not_changed
+ end
+
+ it 'ignores invalid GIDs' do
+ expect { mutation.resolve(ids: ['invalid_gid']) }.to raise_error(URI::BadURIError)
+
+ expect_states_were_not_changed
+ end
+
+ it 'restores multiple todos' do
+ todo4 = create(:todo, user: current_user, author: author, state: :done)
+
+ result = restore_mutation([todo1, todo4, todo2])
+
+ expect(result[:updated_ids].size).to eq(2)
+
+ returned_todo_ids = result[:updated_ids]
+ expect(returned_todo_ids).to contain_exactly(todo1.to_global_id.to_s, todo4.to_global_id.to_s)
+
+ expect(todo1.reload.state).to eq('pending')
+ expect(todo2.reload.state).to eq('pending')
+ expect(todo4.reload.state).to eq('pending')
+ expect(other_user_todo.reload.state).to eq('done')
+ end
+
+ it 'fails if one todo does not belong to the current user' do
+ restore_mutation([todo1, todo2, other_user_todo])
+
+ expect(todo1.reload.state).to eq('pending')
+ expect(todo2.reload.state).to eq('pending')
+ expect(other_user_todo.reload.state).to eq('done')
+ end
+
+ it 'fails if too many todos are requested for update' do
+ expect { restore_mutation([todo1] * 51) }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
+
+ it 'does not update todos from another app' do
+ todo4 = create(:todo)
+ todo4_gid = ::URI::GID.parse("gid://otherapp/Todo/#{todo4.id}")
+
+ result = mutation.resolve(ids: [todo4_gid.to_s])
+
+ expect(result[:updated_ids]).to be_empty
+
+ expect_states_were_not_changed
+ end
+
+ it 'does not update todos from another model' do
+ todo4 = create(:todo)
+ todo4_gid = ::URI::GID.parse("gid://#{GlobalID.app}/Project/#{todo4.id}")
+
+ result = mutation.resolve(ids: [todo4_gid.to_s])
+
+ expect(result[:updated_ids]).to be_empty
+
+ expect_states_were_not_changed
+ end
+ end
+
+ def restore_mutation(todos)
+ mutation.resolve(ids: todos.map { |todo| global_id_of(todo) } )
+ end
+
+ def global_id_of(todo)
+ todo.to_global_id.to_s
+ end
+
+ def expect_states_were_not_changed
+ expect(todo1.reload.state).to eq('done')
+ expect(todo2.reload.state).to eq('pending')
+ expect(other_user_todo.reload.state).to eq('done')
+ end
+end
diff --git a/spec/graphql/resolvers/boards_resolver_spec.rb b/spec/graphql/resolvers/boards_resolver_spec.rb
new file mode 100644
index 00000000000..ab77dfa8fc3
--- /dev/null
+++ b/spec/graphql/resolvers/boards_resolver_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Resolvers::BoardsResolver do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+
+ shared_examples_for 'group and project boards resolver' do
+ it 'does not create a default board' do
+ expect(resolve_boards).to eq []
+ end
+
+ it 'calls Boards::ListService' do
+ expect_next_instance_of(Boards::ListService) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ resolve_boards
+ end
+
+ it 'avoids N+1 queries' do
+ control = ActiveRecord::QueryRecorder.new { resolve_boards(args: {}) }
+
+ create(:milestone, "#{board_parent.class.name.underscore}": board_parent)
+ create(:board, resource_parent: board_parent)
+
+ expect { resolve_boards(args: {}) }.not_to exceed_query_limit(control)
+ end
+
+ describe 'multiple_issue_boards_available?' do
+ let!(:board2) { create(:board, name: 'Two', resource_parent: board_parent) }
+ let!(:board1) { create(:board, name: 'One', resource_parent: board_parent) }
+
+ it 'returns multiple boards' do
+ allow(board_parent).to receive(:multiple_issue_boards_available?).and_return(true)
+
+ expect(resolve_boards).to eq [board1, board2]
+ end
+
+ it 'returns only the first boards' do
+ allow(board_parent).to receive(:multiple_issue_boards_available?).and_return(false)
+
+ expect(resolve_boards).to eq [board1]
+ end
+ end
+ end
+
+ describe '#resolve' do
+ context 'when there is no parent' do
+ let(:board_parent) { nil }
+
+ it 'returns none if parent is nil' do
+ expect(resolve_boards).to eq(Board.none)
+ end
+ end
+
+ context 'when project boards' do
+ let(:board_parent) { create(:project, :public, creator_id: user.id, namespace: user.namespace ) }
+
+ it_behaves_like 'group and project boards resolver'
+ end
+
+ context 'when group boards' do
+ let(:board_parent) { create(:group) }
+
+ it_behaves_like 'group and project boards resolver'
+ end
+ end
+
+ def resolve_boards(args: {})
+ resolve(described_class, obj: board_parent, args: args, ctx: { current_user: user })
+ end
+end
diff --git a/spec/graphql/resolvers/error_tracking/sentry_error_collection_resolver_spec.rb b/spec/graphql/resolvers/error_tracking/sentry_error_collection_resolver_spec.rb
new file mode 100644
index 00000000000..3bb8a5c389d
--- /dev/null
+++ b/spec/graphql/resolvers/error_tracking/sentry_error_collection_resolver_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Resolvers::ErrorTracking::SentryErrorCollectionResolver do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:current_user) { create(:user) }
+
+ let(:list_issues_service) { spy('ErrorTracking::ListIssuesService') }
+
+ before do
+ project.add_developer(current_user)
+
+ allow(ErrorTracking::ListIssuesService)
+ .to receive(:new)
+ .and_return list_issues_service
+ end
+
+ describe '#resolve' do
+ it 'returns an error collection object' do
+ expect(resolve_error_collection).to be_a Gitlab::ErrorTracking::ErrorCollection
+ end
+
+ it 'provides the service url' do
+ fake_url = 'http://test.com'
+
+ expect(list_issues_service)
+ .to receive(:external_url)
+ .and_return(fake_url)
+
+ result = resolve_error_collection
+ expect(result.external_url).to eq fake_url
+ end
+
+ it 'provides the project' do
+ expect(resolve_error_collection.project).to eq project
+ end
+ end
+
+ private
+
+ def resolve_error_collection(context = { current_user: current_user })
+ resolve(described_class, obj: project, args: {}, ctx: context)
+ end
+end
diff --git a/spec/graphql/resolvers/error_tracking/sentry_errors_resolver_spec.rb b/spec/graphql/resolvers/error_tracking/sentry_errors_resolver_spec.rb
new file mode 100644
index 00000000000..93f89d077d7
--- /dev/null
+++ b/spec/graphql/resolvers/error_tracking/sentry_errors_resolver_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Resolvers::ErrorTracking::SentryErrorsResolver do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:error_collection) { Gitlab::ErrorTracking::ErrorCollection.new(project: project) }
+
+ let(:list_issues_service) { spy('ErrorTracking::ListIssuesService') }
+
+ let(:issues) { nil }
+ let(:pagination) { nil }
+
+ describe '#resolve' do
+ context 'insufficient user permission' do
+ let(:user) { create(:user) }
+
+ it 'returns nil' do
+ context = { current_user: user }
+
+ expect(resolve_errors({}, context)).to eq nil
+ end
+ end
+
+ context 'user with permission' do
+ before do
+ project.add_developer(current_user)
+
+ allow(ErrorTracking::ListIssuesService)
+ .to receive(:new)
+ .and_return list_issues_service
+ end
+
+ context 'when after arg given' do
+ let(:after) { "1576029072000:0:0" }
+
+ it 'gives the cursor arg' do
+ expect(ErrorTracking::ListIssuesService)
+ .to receive(:new)
+ .with(project, current_user, { cursor: after })
+ .and_return list_issues_service
+
+ resolve_errors({ after: after })
+ end
+ end
+
+ context 'when no issues fetched' do
+ before do
+ allow(list_issues_service)
+ .to receive(:execute)
+ .and_return(
+ issues: nil
+ )
+ end
+ it 'returns nil' do
+ expect(resolve_errors).to eq nil
+ end
+ end
+
+ context 'when issues returned' do
+ let(:issues) { [:issue_1, :issue_2] }
+ let(:pagination) do
+ {
+ 'next' => { 'cursor' => 'next' },
+ 'previous' => { 'cursor' => 'prev' }
+ }
+ end
+
+ before do
+ allow(list_issues_service)
+ .to receive(:execute)
+ .and_return(
+ issues: issues,
+ pagination: pagination
+ )
+ end
+
+ it 'sets the issues' do
+ expect(resolve_errors).to contain_exactly(*issues)
+ end
+
+ it 'sets the pagination variables' do
+ result = resolve_errors
+ expect(result.next_cursor).to eq 'next'
+ expect(result.previous_cursor).to eq 'prev'
+ end
+
+ it 'returns an externally paginated array' do
+ expect(resolve_errors).to be_a Gitlab::Graphql::ExternallyPaginatedArray
+ end
+ end
+ end
+ end
+
+ private
+
+ def resolve_errors(args = {}, context = { current_user: current_user })
+ resolve(described_class, obj: error_collection, args: args, ctx: context)
+ end
+end
diff --git a/spec/graphql/resolvers/milestone_resolver_spec.rb b/spec/graphql/resolvers/milestone_resolver_spec.rb
new file mode 100644
index 00000000000..297130c2027
--- /dev/null
+++ b/spec/graphql/resolvers/milestone_resolver_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Resolvers::MilestoneResolver do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ let_it_be(:current_user) { create(:user) }
+
+ context 'for group milestones' do
+ let_it_be(:now) { Time.now }
+ let_it_be(:group) { create(:group, :private) }
+
+ def resolve_group_milestones(args = {}, context = { current_user: current_user })
+ resolve(described_class, obj: group, args: args, ctx: context)
+ end
+
+ before do
+ group.add_developer(current_user)
+ end
+
+ it 'calls MilestonesFinder#execute' do
+ expect_next_instance_of(MilestonesFinder) do |finder|
+ expect(finder).to receive(:execute)
+ end
+
+ resolve_group_milestones
+ end
+
+ context 'without parameters' do
+ it 'calls MilestonesFinder to retrieve all milestones' do
+ expect(MilestonesFinder).to receive(:new)
+ .with(group_ids: group.id, state: 'all', start_date: nil, end_date: nil)
+ .and_call_original
+
+ resolve_group_milestones
+ end
+ end
+
+ context 'with parameters' do
+ it 'calls MilestonesFinder with correct parameters' do
+ start_date = now
+ end_date = start_date + 1.hour
+
+ expect(MilestonesFinder).to receive(:new)
+ .with(group_ids: group.id, state: 'closed', start_date: start_date, end_date: end_date)
+ .and_call_original
+
+ resolve_group_milestones(start_date: start_date, end_date: end_date, state: 'closed')
+ end
+ end
+
+ context 'by timeframe' do
+ context 'when start_date and end_date are present' do
+ context 'when start date is after end_date' do
+ it 'raises error' do
+ expect do
+ resolve_group_milestones(start_date: now, end_date: now - 2.days)
+ end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, "startDate is after endDate")
+ end
+ end
+ end
+
+ context 'when only start_date is present' do
+ it 'raises error' do
+ expect do
+ resolve_group_milestones(start_date: now)
+ end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/)
+ end
+ end
+
+ context 'when only end_date is present' do
+ it 'raises error' do
+ expect do
+ resolve_group_milestones(end_date: now)
+ end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/)
+ end
+ end
+ end
+
+ context 'when user cannot read milestones' do
+ it 'raises error' do
+ unauthorized_user = create(:user)
+
+ expect do
+ resolve_group_milestones({}, { current_user: unauthorized_user })
+ end.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/types/base_field_spec.rb b/spec/graphql/types/base_field_spec.rb
index 77ef8933717..1f82f316aa7 100644
--- a/spec/graphql/types/base_field_spec.rb
+++ b/spec/graphql/types/base_field_spec.rb
@@ -111,5 +111,70 @@ describe Types::BaseField do
end
end
end
+
+ describe '#visible?' do
+ context 'and has a feature_flag' do
+ let(:flag) { :test_feature }
+ let(:field) { described_class.new(name: 'test', type: GraphQL::STRING_TYPE, feature_flag: flag, null: false) }
+ let(:context) { {} }
+
+ it 'returns false if the feature is not enabled' do
+ stub_feature_flags(flag => false)
+
+ expect(field.visible?(context)).to eq(false)
+ end
+
+ it 'returns true if the feature is enabled' do
+ expect(field.visible?(context)).to eq(true)
+ end
+
+ context 'falsey feature_flag values' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:flag, :feature_value, :visible) do
+ '' | false | true
+ '' | true | true
+ nil | false | true
+ nil | true | true
+ end
+
+ with_them do
+ it 'returns the correct value' do
+ stub_feature_flags(flag => feature_value)
+
+ expect(field.visible?(context)).to eq(visible)
+ end
+ end
+ end
+ end
+ end
+
+ describe '#description' do
+ context 'feature flag given' do
+ let(:field) { described_class.new(name: 'test', type: GraphQL::STRING_TYPE, feature_flag: flag, null: false, description: 'Test description') }
+ let(:flag) { :test_flag }
+
+ it 'prepends the description' do
+ expect(field.description). to eq 'Test description. Available only when feature flag test_flag is enabled.'
+ end
+
+ context 'falsey feature_flag values' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:flag, :feature_value) do
+ '' | false
+ '' | true
+ nil | false
+ nil | true
+ end
+
+ with_them do
+ it 'returns the correct description' do
+ expect(field.description).to eq('Test description')
+ end
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/graphql/types/blob_viewers/type_enum_spec.rb b/spec/graphql/types/blob_viewers/type_enum_spec.rb
new file mode 100644
index 00000000000..7bd4352f388
--- /dev/null
+++ b/spec/graphql/types/blob_viewers/type_enum_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Types::BlobViewers::TypeEnum do
+ it { expect(described_class.graphql_name).to eq('BlobViewersType') }
+
+ it 'exposes all tree entry types' do
+ expect(described_class.values.keys).to include(*%w[rich simple auxiliary])
+ end
+end
diff --git a/spec/graphql/types/commit_type_spec.rb b/spec/graphql/types/commit_type_spec.rb
index 1c3b46ecfde..f5f99229f3a 100644
--- a/spec/graphql/types/commit_type_spec.rb
+++ b/spec/graphql/types/commit_type_spec.rb
@@ -10,7 +10,8 @@ describe GitlabSchema.types['Commit'] do
it 'contains attributes related to commit' do
expect(described_class).to have_graphql_fields(
:id, :sha, :title, :description, :message, :authored_date,
- :author_name, :author, :web_url, :latest_pipeline, :pipelines, :signature_html
+ :author_name, :author_gravatar, :author, :web_url, :latest_pipeline,
+ :pipelines, :signature_html
)
end
end
diff --git a/spec/graphql/types/error_tracking/sentry_detailed_error_type_spec.rb b/spec/graphql/types/error_tracking/sentry_detailed_error_type_spec.rb
index 30cede6f4cf..3a512fee3b3 100644
--- a/spec/graphql/types/error_tracking/sentry_detailed_error_type_spec.rb
+++ b/spec/graphql/types/error_tracking/sentry_detailed_error_type_spec.rb
@@ -20,6 +20,7 @@ describe GitlabSchema.types['SentryDetailedError'] do
message
culprit
externalUrl
+ externalBaseUrl
sentryProjectId
sentryProjectName
sentryProjectSlug
@@ -30,8 +31,10 @@ describe GitlabSchema.types['SentryDetailedError'] do
lastReleaseLastCommit
firstReleaseShortVersion
lastReleaseShortVersion
+ gitlabIssuePath
gitlabCommit
gitlabCommitPath
+ tags
]
is_expected.to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/error_tracking/sentry_error_collection_type_spec.rb b/spec/graphql/types/error_tracking/sentry_error_collection_type_spec.rb
new file mode 100644
index 00000000000..3de0a359c15
--- /dev/null
+++ b/spec/graphql/types/error_tracking/sentry_error_collection_type_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe GitlabSchema.types['SentryErrorCollection'] do
+ it { expect(described_class.graphql_name).to eq('SentryErrorCollection') }
+
+ it { expect(described_class).to require_graphql_authorizations(:read_sentry_issue) }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ errors
+ detailed_error
+ external_url
+ error_stack_trace
+ ]
+
+ is_expected.to have_graphql_fields(*expected_fields)
+ end
+
+ describe 'errors field' do
+ subject { described_class.fields['errors'] }
+
+ it 'returns errors' do
+ aggregate_failures 'testing the correct types are returned' do
+ is_expected.to have_graphql_type(Types::ErrorTracking::SentryErrorType.connection_type)
+ is_expected.to have_graphql_extension(Gitlab::Graphql::Extensions::ExternallyPaginatedArrayExtension)
+ is_expected.to have_graphql_resolver(Resolvers::ErrorTracking::SentryErrorsResolver)
+ end
+ end
+ end
+end
diff --git a/spec/graphql/types/error_tracking/sentry_error_stack_trace_entry_type_spec.rb b/spec/graphql/types/error_tracking/sentry_error_stack_trace_entry_type_spec.rb
new file mode 100644
index 00000000000..ce5fade6fcc
--- /dev/null
+++ b/spec/graphql/types/error_tracking/sentry_error_stack_trace_entry_type_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe GitlabSchema.types['SentryErrorStackTraceEntry'] do
+ it { expect(described_class.graphql_name).to eq('SentryErrorStackTraceEntry') }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ function
+ col
+ line
+ file_name
+ trace_context
+ ]
+
+ is_expected.to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/error_tracking/sentry_error_stack_trace_type_spec.rb b/spec/graphql/types/error_tracking/sentry_error_stack_trace_type_spec.rb
new file mode 100644
index 00000000000..ac41e6903e5
--- /dev/null
+++ b/spec/graphql/types/error_tracking/sentry_error_stack_trace_type_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe GitlabSchema.types['SentryErrorStackTrace'] do
+ it { expect(described_class.graphql_name).to eq('SentryErrorStackTrace') }
+
+ it { expect(described_class).to require_graphql_authorizations(:read_sentry_issue) }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ issue_id
+ date_received
+ stack_trace_entries
+ ]
+
+ is_expected.to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/error_tracking/sentry_error_type_spec.rb b/spec/graphql/types/error_tracking/sentry_error_type_spec.rb
new file mode 100644
index 00000000000..51acd035024
--- /dev/null
+++ b/spec/graphql/types/error_tracking/sentry_error_type_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe GitlabSchema.types['SentryError'] do
+ it { expect(described_class.graphql_name).to eq('SentryError') }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ id
+ sentryId
+ title
+ type
+ userCount
+ count
+ firstSeen
+ lastSeen
+ message
+ culprit
+ externalUrl
+ sentryProjectId
+ sentryProjectName
+ sentryProjectSlug
+ shortId
+ status
+ frequency
+ ]
+
+ is_expected.to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/group_type_spec.rb b/spec/graphql/types/group_type_spec.rb
index de11bad0723..6a0028f6529 100644
--- a/spec/graphql/types/group_type_spec.rb
+++ b/spec/graphql/types/group_type_spec.rb
@@ -10,7 +10,14 @@ describe GitlabSchema.types['Group'] do
it { expect(described_class).to require_graphql_authorizations(:read_group) }
it 'has the expected fields' do
- expected_fields = %w[web_url avatar_url mentions_disabled parent]
+ expected_fields = %w[
+ id name path full_name full_path description description_html visibility
+ lfs_enabled request_access_enabled projects root_storage_statistics
+ web_url avatar_url share_with_group_lock project_creation_level
+ subgroup_creation_level require_two_factor_authentication
+ two_factor_grace_period auto_devops_enabled emails_disabled
+ mentions_disabled parent
+ ]
is_expected.to include_graphql_fields(*expected_fields)
end
diff --git a/spec/graphql/types/permission_types/project_spec.rb b/spec/graphql/types/permission_types/project_spec.rb
index a3a9872ee1a..56c4c2de4df 100644
--- a/spec/graphql/types/permission_types/project_spec.rb
+++ b/spec/graphql/types/permission_types/project_spec.rb
@@ -12,7 +12,8 @@ describe Types::PermissionTypes::Project do
:read_commit_status, :request_access, :create_pipeline, :create_pipeline_schedule,
:create_merge_request_from, :create_wiki, :push_code, :create_deployment, :push_to_delete_protected_branch,
:admin_wiki, :admin_project, :update_pages, :admin_remote_mirror, :create_label,
- :update_wiki, :destroy_wiki, :create_pages, :destroy_pages, :read_pages_content
+ :update_wiki, :destroy_wiki, :create_pages, :destroy_pages, :read_pages_content,
+ :read_merge_request
]
expected_permissions.each do |permission|
diff --git a/spec/graphql/types/query_type_spec.rb b/spec/graphql/types/query_type_spec.rb
index 39a363cb913..ab210f2e918 100644
--- a/spec/graphql/types/query_type_spec.rb
+++ b/spec/graphql/types/query_type_spec.rb
@@ -7,15 +7,10 @@ describe GitlabSchema.types['Query'] do
expect(described_class.graphql_name).to eq('Query')
end
- it do
- is_expected.to have_graphql_fields(:project,
- :namespace,
- :group,
- :echo,
- :metadata,
- :current_user,
- :snippets
- ).at_least
+ it 'has the expected fields' do
+ expected_fields = %i[project namespace group echo metadata current_user snippets]
+
+ expect(described_class).to have_graphql_fields(*expected_fields).at_least
end
describe 'namespace field' do
diff --git a/spec/graphql/types/snippet_type_spec.rb b/spec/graphql/types/snippet_type_spec.rb
index 5524e7a415d..a06d372f668 100644
--- a/spec/graphql/types/snippet_type_spec.rb
+++ b/spec/graphql/types/snippet_type_spec.rb
@@ -5,10 +5,10 @@ require 'spec_helper'
describe GitlabSchema.types['Snippet'] do
it 'has the correct fields' do
expected_fields = [:id, :title, :project, :author,
- :file_name, :content, :description,
+ :file_name, :description,
:visibility_level, :created_at, :updated_at,
:web_url, :raw_url, :notes, :discussions,
- :user_permissions, :description_html]
+ :user_permissions, :description_html, :blob]
is_expected.to have_graphql_fields(*expected_fields)
end
diff --git a/spec/graphql/types/snippets/blob_type_spec.rb b/spec/graphql/types/snippets/blob_type_spec.rb
new file mode 100644
index 00000000000..b6253e96d60
--- /dev/null
+++ b/spec/graphql/types/snippets/blob_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe GitlabSchema.types['SnippetBlob'] do
+ it 'has the correct fields' do
+ expected_fields = [:rich_data, :plain_data,
+ :raw_path, :size, :binary, :name, :path,
+ :simple_viewer, :rich_viewer, :mode]
+
+ is_expected.to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/snippets/blob_viewer_type_spec.rb b/spec/graphql/types/snippets/blob_viewer_type_spec.rb
new file mode 100644
index 00000000000..f1f7608cb69
--- /dev/null
+++ b/spec/graphql/types/snippets/blob_viewer_type_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe GitlabSchema.types['SnippetBlobViewer'] do
+ it 'has the correct fields' do
+ expected_fields = [:type, :load_async, :too_large, :collapsed,
+ :render_error, :file_type, :loading_partial_name]
+
+ is_expected.to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/helpers/application_settings_helper_spec.rb b/spec/helpers/application_settings_helper_spec.rb
index 41008ff8eaf..c2f3e26f97b 100644
--- a/spec/helpers/application_settings_helper_spec.rb
+++ b/spec/helpers/application_settings_helper_spec.rb
@@ -107,7 +107,7 @@ describe ApplicationSettingsHelper do
let(:project) { build(:project) }
before do
- stub_application_setting(instance_administration_project: project)
+ stub_application_setting(self_monitoring_project: project)
end
it 'returns self_monitoring_project_exists true' do
diff --git a/spec/helpers/auth_helper_spec.rb b/spec/helpers/auth_helper_spec.rb
index cb7c670198d..9179019cd6a 100644
--- a/spec/helpers/auth_helper_spec.rb
+++ b/spec/helpers/auth_helper_spec.rb
@@ -73,12 +73,17 @@ describe AuthHelper do
describe 'enabled_button_based_providers' do
before do
- allow(helper).to receive(:auth_providers) { [:twitter, :github] }
+ allow(helper).to receive(:auth_providers) { [:twitter, :github, :google_oauth2] }
end
context 'all providers are enabled to sign in' do
it 'returns all the enabled providers from settings' do
- expect(helper.enabled_button_based_providers).to include('twitter', 'github')
+ expect(helper.enabled_button_based_providers).to include('twitter', 'github', 'google_oauth2')
+ end
+
+ it 'puts google and github in the beginning' do
+ expect(helper.enabled_button_based_providers.first).to eq('google_oauth2')
+ expect(helper.enabled_button_based_providers.second).to eq('github')
end
end
diff --git a/spec/helpers/avatars_helper_spec.rb b/spec/helpers/avatars_helper_spec.rb
index 8b6817efcc4..2a030742cb9 100644
--- a/spec/helpers/avatars_helper_spec.rb
+++ b/spec/helpers/avatars_helper_spec.rb
@@ -22,15 +22,41 @@ describe AvatarsHelper do
end
end
- context 'when providing a project' do
- it_behaves_like 'resource with a default avatar', 'project' do
- let(:resource) { create(:project, name: 'foo') }
- let(:helper_args) { [resource] }
+ shared_examples 'Gitaly exception handling' do
+ before do
+ allow(resource).to receive(:avatar_url).and_raise(error_class)
end
+ it 'handles Gitaly exception gracefully' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ an_instance_of(error_class), source_type: 'Project', source_id: resource.id
+ )
+ expect { project_icon(resource) }.not_to raise_error
+ end
+
+ it_behaves_like 'resource with a default avatar', 'project'
+ end
+
+ context 'when providing a project' do
+ let(:helper_args) { [resource] }
+ let(:resource) { create(:project, name: 'foo') }
+
+ it_behaves_like 'resource with a default avatar', 'project'
+
it_behaves_like 'resource with a custom avatar', 'project' do
let(:resource) { create(:project, :public, avatar: File.open(uploaded_image_temp_path)) }
- let(:helper_args) { [resource] }
+ end
+
+ context 'when Gitaly is unavailable' do
+ let(:error_class) { GRPC::Unavailable }
+
+ include_examples 'Gitaly exception handling'
+ end
+
+ context 'when Gitaly request is taking too long' do
+ let(:error_class) { GRPC::DeadlineExceeded }
+
+ include_examples 'Gitaly exception handling'
end
end
diff --git a/spec/helpers/award_emoji_helper_spec.rb b/spec/helpers/award_emoji_helper_spec.rb
index 2ad6b68a34c..975f32edd42 100644
--- a/spec/helpers/award_emoji_helper_spec.rb
+++ b/spec/helpers/award_emoji_helper_spec.rb
@@ -51,7 +51,7 @@ describe AwardEmojiHelper do
it 'returns correct url' do
@project = merge_request.project
- expected_url = "/#{@project.namespace.path}/#{@project.path}/merge_requests/#{merge_request.iid}/toggle_award_emoji"
+ expected_url = "/#{@project.namespace.path}/#{@project.path}/-/merge_requests/#{merge_request.iid}/toggle_award_emoji"
expect(subject).to eq(expected_url)
end
diff --git a/spec/helpers/blob_helper_spec.rb b/spec/helpers/blob_helper_spec.rb
index 4996e27c2e6..a9f4b03eba5 100644
--- a/spec/helpers/blob_helper_spec.rb
+++ b/spec/helpers/blob_helper_spec.rb
@@ -56,7 +56,7 @@ describe BlobHelper do
stub_feature_flags(web_ide_default: false)
link = helper.edit_blob_button(project, 'master', 'README.md')
- expect(Capybara.string(link).find_link('Edit')[:href]).to eq("/#{project.full_path}/edit/master/README.md")
+ expect(Capybara.string(link).find_link('Edit')[:href]).to eq("/#{project.full_path}/-/edit/master/README.md")
end
it 'returns a link with a Web IDE route' do
@@ -69,7 +69,7 @@ describe BlobHelper do
stub_feature_flags(web_ide_default: false)
link = helper.edit_blob_button(project, 'master', 'README.md', link_opts: { mr_id: 10 })
- expect(Capybara.string(link).find_link('Edit')[:href]).to eq("/#{project.full_path}/edit/master/README.md?mr_id=10")
+ expect(Capybara.string(link).find_link('Edit')[:href]).to eq("/#{project.full_path}/-/edit/master/README.md?mr_id=10")
end
end
@@ -244,8 +244,8 @@ describe BlobHelper do
it 'escapes special characters' do
Rails.application.routes.default_url_options[:script_name] = nil
- expect(helper.ide_edit_path(project, "testing/#hashes", "readme.md#test")).to eq("/-/ide/project/#{project.namespace.path}/#{project.path}/edit/testing/#hashes/-/readme.md%23test")
- expect(helper.ide_edit_path(project, "testing/#hashes", "src#/readme.md#test")).to eq("/-/ide/project/#{project.namespace.path}/#{project.path}/edit/testing/#hashes/-/src%23/readme.md%23test")
+ expect(helper.ide_edit_path(project, "testing/#hashes", "readme.md#test")).to eq("/-/ide/project/#{project.full_path}/edit/testing/%23hashes/-/readme.md%23test")
+ expect(helper.ide_edit_path(project, "testing/#hashes", "src#/readme.md#test")).to eq("/-/ide/project/#{project.full_path}/edit/testing/%23hashes/-/src%23/readme.md%23test")
end
it 'does not escape "/" character' do
diff --git a/spec/helpers/broadcast_messages_helper_spec.rb b/spec/helpers/broadcast_messages_helper_spec.rb
index a0682c0e278..7e181e429d7 100644
--- a/spec/helpers/broadcast_messages_helper_spec.rb
+++ b/spec/helpers/broadcast_messages_helper_spec.rb
@@ -3,6 +3,29 @@
require 'spec_helper'
describe BroadcastMessagesHelper do
+ describe 'current_broadcast_notification_message' do
+ subject { helper.current_broadcast_notification_message }
+
+ context 'with available broadcast notification messages' do
+ let!(:broadcast_message_1) { create(:broadcast_message, broadcast_type: 'notification', starts_at: Time.now - 1.day) }
+ let!(:broadcast_message_2) { create(:broadcast_message, broadcast_type: 'notification', starts_at: Time.now) }
+
+ it { is_expected.to eq broadcast_message_2 }
+
+ context 'when last broadcast message is hidden' do
+ before do
+ helper.request.cookies["hide_broadcast_notification_message_#{broadcast_message_2.id}"] = 'true'
+ end
+
+ it { is_expected.to eq broadcast_message_1 }
+ end
+ end
+
+ context 'without broadcast notification messages' do
+ it { is_expected.to be_nil }
+ end
+ end
+
describe 'broadcast_message' do
let(:current_broadcast_message) { BroadcastMessage.new(message: 'Current Message') }
diff --git a/spec/helpers/button_helper_spec.rb b/spec/helpers/button_helper_spec.rb
index e918c34ffef..cf8887f9731 100644
--- a/spec/helpers/button_helper_spec.rb
+++ b/spec/helpers/button_helper_spec.rb
@@ -173,7 +173,7 @@ describe ButtonHelper do
expect(element.attr('data-clipboard-text')).to eq(nil)
expect(element.inner_text).to eq("")
- expect(element.to_html).to include sprite_icon('duplicate')
+ expect(element.to_html).to include sprite_icon('copy-to-clipboard')
end
end
diff --git a/spec/helpers/clusters_helper_spec.rb b/spec/helpers/clusters_helper_spec.rb
index ff8394b9475..5651b899ed0 100644
--- a/spec/helpers/clusters_helper_spec.rb
+++ b/spec/helpers/clusters_helper_spec.rb
@@ -58,32 +58,4 @@ describe ClustersHelper do
it { is_expected.to eq('Create new cluster') }
end
end
-
- describe '#render_new_provider_form' do
- subject { helper.new_cluster_partial(provider: provider) }
-
- context 'GCP provider' do
- let(:provider) { 'gcp' }
-
- it { is_expected.to eq('clusters/clusters/gcp/new') }
- end
-
- context 'AWS provider' do
- let(:provider) { 'aws' }
-
- it { is_expected.to eq('clusters/clusters/aws/new') }
- end
-
- context 'other provider' do
- let(:provider) { 'other' }
-
- it { is_expected.to eq('clusters/clusters/cloud_providers/cloud_provider_selector') }
- end
-
- context 'no provider' do
- let(:provider) { nil }
-
- it { is_expected.to eq('clusters/clusters/cloud_providers/cloud_provider_selector') }
- end
- end
end
diff --git a/spec/helpers/commits_helper_spec.rb b/spec/helpers/commits_helper_spec.rb
index cbc5566979b..dd268c2411f 100644
--- a/spec/helpers/commits_helper_spec.rb
+++ b/spec/helpers/commits_helper_spec.rb
@@ -69,4 +69,17 @@ describe CommitsHelper do
expect(node[:href]).to eq('http://example.com/file.html')
end
end
+
+ describe '#commit_to_html' do
+ let(:project) { create(:project, :repository) }
+ let(:ref) { 'master' }
+ let(:commit) { project.commit(ref) }
+
+ it 'renders HTML representation of a commit' do
+ assign(:project, project)
+ allow(helper).to receive(:current_user).and_return(project.owner)
+
+ expect(helper.commit_to_html(commit, ref, project)).to include('<div class="commit-content')
+ end
+ end
end
diff --git a/spec/helpers/diff_helper_spec.rb b/spec/helpers/diff_helper_spec.rb
index 7f988c60817..63aa41bbad5 100644
--- a/spec/helpers/diff_helper_spec.rb
+++ b/spec/helpers/diff_helper_spec.rb
@@ -258,7 +258,7 @@ describe DiffHelper do
end
end
- context '#render_overflow_warning?' do
+ describe '#render_overflow_warning?' do
let(:diffs_collection) { instance_double(Gitlab::Diff::FileCollection::MergeRequestDiff, raw_diff_files: diff_files) }
let(:diff_files) { Gitlab::Git::DiffCollection.new(files) }
let(:safe_file) { { too_large: false, diff: '' } }
@@ -303,7 +303,7 @@ describe DiffHelper do
end
end
- context '#diff_file_path_text' do
+ describe '#diff_file_path_text' do
it 'returns full path by default' do
expect(diff_file_path_text(diff_file)).to eq(diff_file.new_path)
end
diff --git a/spec/helpers/environments_helper_spec.rb b/spec/helpers/environments_helper_spec.rb
index b7a6cd4db74..b72fbc9fd3c 100644
--- a/spec/helpers/environments_helper_spec.rb
+++ b/spec/helpers/environments_helper_spec.rb
@@ -20,7 +20,7 @@ describe EnvironmentsHelper do
expect(metrics_data).to include(
'settings-path' => edit_project_service_path(project, 'prometheus'),
'clusters-path' => project_clusters_path(project),
- 'current-environment-name': environment.name,
+ 'current-environment-name' => environment.name,
'documentation-path' => help_page_path('administration/monitoring/prometheus/index.md'),
'empty-getting-started-svg-path' => match_asset_path('/assets/illustrations/monitoring/getting_started.svg'),
'empty-loading-svg-path' => match_asset_path('/assets/illustrations/monitoring/loading.svg'),
@@ -29,7 +29,6 @@ describe EnvironmentsHelper do
'metrics-endpoint' => additional_metrics_project_environment_path(project, environment, format: :json),
'deployments-endpoint' => project_environment_deployments_path(project, environment, format: :json),
'default-branch' => 'master',
- 'environments-endpoint': project_environments_path(project, format: :json),
'project-path' => project_path(project),
'tags-path' => project_tags_path(project),
'has-metrics' => "#{environment.has_metrics?}",
diff --git a/spec/helpers/events_helper_spec.rb b/spec/helpers/events_helper_spec.rb
index 9b28bc19185..062fa8f106e 100644
--- a/spec/helpers/events_helper_spec.rb
+++ b/spec/helpers/events_helper_spec.rb
@@ -98,7 +98,7 @@ describe EventsHelper do
it 'returns a commit note url' do
event.target = create(:note_on_commit, note: '+1 from me')
- expect(subject).to eq("#{project_base_url}/commit/#{event.target.commit_id}#note_#{event.target.id}")
+ expect(subject).to eq("#{project_base_url}/-/commit/#{event.target.commit_id}#note_#{event.target.id}")
end
it 'returns a project snippet note url' do
@@ -116,7 +116,7 @@ describe EventsHelper do
it 'returns a merge request url' do
event.target = create(:note_on_merge_request, note: 'LGTM!')
- expect(subject).to eq("#{project_base_url}/merge_requests/#{event.note_target.iid}#note_#{event.target.id}")
+ expect(subject).to eq("#{project_base_url}/-/merge_requests/#{event.note_target.iid}#note_#{event.target.id}")
end
end
end
diff --git a/spec/helpers/labels_helper_spec.rb b/spec/helpers/labels_helper_spec.rb
index 3e92436241b..7ad554fd618 100644
--- a/spec/helpers/labels_helper_spec.rb
+++ b/spec/helpers/labels_helper_spec.rb
@@ -78,13 +78,21 @@ describe LabelsHelper do
end
context 'with a type argument' do
- ['issue', :issue, 'merge_request', :merge_request].each do |type|
+ ['issue', :issue].each do |type|
context "set to #{type}" do
it 'links to correct page' do
expect(link_to_label(label_presenter, type: type)).to match %r{<a href="/#{label.project.full_path}/#{type.to_s.pluralize}\?label_name%5B%5D=#{label.name}">.*</a>}
end
end
end
+
+ ['merge_request', :merge_request].each do |type|
+ context "set to #{type}" do
+ it 'links to correct page' do
+ expect(link_to_label(label_presenter, type: type)).to match %r{<a href="/#{label.project.full_path}/-/#{type.to_s.pluralize}\?label_name%5B%5D=#{label.name}">.*</a>}
+ end
+ end
+ end
end
context 'with a tooltip argument' do
diff --git a/spec/helpers/markup_helper_spec.rb b/spec/helpers/markup_helper_spec.rb
index a775c69335e..d7cc8afe9c5 100644
--- a/spec/helpers/markup_helper_spec.rb
+++ b/spec/helpers/markup_helper_spec.rb
@@ -103,7 +103,7 @@ describe MarkupHelper do
let(:requested_path) { 'files/images/README.md' }
it 'returns the correct HTML for the image' do
- expanded_path = "/#{project.full_path}/raw/master/files/images/#{image_file}"
+ expanded_path = "/#{project.full_path}/-/raw/master/files/images/#{image_file}"
expect(subject.css('a')[0].attr('href')).to eq(expanded_path)
expect(subject.css('img')[0].attr('data-src')).to eq(expanded_path)
@@ -138,10 +138,20 @@ describe MarkupHelper do
describe 'without redacted attribute' do
it 'renders the markdown value' do
expect(Banzai).to receive(:render_field).with(commit, attribute, {}).and_call_original
+ expect(Banzai).to receive(:post_process)
helper.markdown_field(commit, attribute)
end
end
+
+ context 'when post_process is false' do
+ it 'does not run Markdown post processing' do
+ expect(Banzai).to receive(:render_field).with(commit, attribute, {}).and_call_original
+ expect(Banzai).not_to receive(:post_process)
+
+ helper.markdown_field(commit, attribute, post_process: false)
+ end
+ end
end
describe '#link_to_markdown_field' do
@@ -393,7 +403,7 @@ describe MarkupHelper do
it 'logs the error' do
expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
instance_of(StandardError),
- project_id: project.id, file_name: 'foo.md', context: context
+ project_id: project.id, file_name: 'foo.md'
)
subject
@@ -565,6 +575,14 @@ describe MarkupHelper do
expect(doc.content).to eq "foo 😉\nbar 😀"
end
+
+ it 'does not post-process truncated text', :request_store do
+ object = create_object("hello \n\n [Test](README.md)")
+
+ expect do
+ first_line_in_markdown(object, attribute, nil, project: project)
+ end.not_to change { Gitlab::GitalyClient.get_request_count }
+ end
end
context 'when the asked attribute can be redacted' do
diff --git a/spec/helpers/nav_helper_spec.rb b/spec/helpers/nav_helper_spec.rb
index 8d7572c5b5f..f92dca11136 100644
--- a/spec/helpers/nav_helper_spec.rb
+++ b/spec/helpers/nav_helper_spec.rb
@@ -83,7 +83,7 @@ describe NavHelper, :do_not_mock_admin_mode do
expect(helper.header_links).not_to include(:issues, :merge_requests, :todos, :search)
end
- it 'shows the search box when the user cannot read cross project and he is visiting a project' do
+ it 'shows the search box when the user cannot read cross project and they are visiting a project' do
helper.instance_variable_set(:@project, create(:project))
expect(helper.header_links).to include(:search)
@@ -106,13 +106,13 @@ describe NavHelper, :do_not_mock_admin_mode do
end
end
- context '.admin_monitoring_nav_links' do
+ describe '.admin_monitoring_nav_links' do
subject { helper.admin_monitoring_nav_links }
it { is_expected.to all(be_a(String)) }
end
- context '.group_issues_sub_menu_items' do
+ describe '.group_issues_sub_menu_items' do
subject { helper.group_issues_sub_menu_items }
it { is_expected.to all(be_a(String)) }
diff --git a/spec/helpers/notes_helper_spec.rb b/spec/helpers/notes_helper_spec.rb
index 2da7717ebfc..1dc7f4e98ab 100644
--- a/spec/helpers/notes_helper_spec.rb
+++ b/spec/helpers/notes_helper_spec.rb
@@ -62,10 +62,8 @@ describe NotesHelper do
context 'when the discussion is on an older merge request version' do
let(:position) do
- Gitlab::Diff::Position.new(
- old_path: ".gitmodules",
- new_path: ".gitmodules",
- old_line: nil,
+ build(:text_diff_position, :added,
+ file: ".gitmodules",
new_line: 4,
diff_refs: merge_request_diff1.diff_refs
)
@@ -86,9 +84,8 @@ describe NotesHelper do
context 'when the discussion is on a comparison between merge request versions' do
let(:position) do
- Gitlab::Diff::Position.new(
- old_path: ".gitmodules",
- new_path: ".gitmodules",
+ build(:text_diff_position,
+ file: ".gitmodules",
old_line: 4,
new_line: 4,
diff_refs: merge_request_diff3.compare_with(merge_request_diff1.head_commit_sha).diff_refs
diff --git a/spec/helpers/projects/error_tracking_helper_spec.rb b/spec/helpers/projects/error_tracking_helper_spec.rb
index 325ff32dd89..38a6ef6826b 100644
--- a/spec/helpers/projects/error_tracking_helper_spec.rb
+++ b/spec/helpers/projects/error_tracking_helper_spec.rb
@@ -83,18 +83,12 @@ describe Projects::ErrorTrackingHelper do
describe '#error_details_data' do
let(:issue_id) { 1234 }
let(:route_params) { [project.owner, project, issue_id, { format: :json }] }
- let(:list_path) { project_error_tracking_index_path(project) }
- let(:details_path) { details_namespace_project_error_tracking_index_path(*route_params) }
let(:project_path) { project.full_path }
let(:stack_trace_path) { stack_trace_namespace_project_error_tracking_index_path(*route_params) }
let(:issues_path) { project_issues_path(project) }
let(:result) { helper.error_details_data(project, issue_id) }
- it 'returns the correct list path' do
- expect(result['list-path']).to eq list_path
- end
-
it 'returns the correct issue id' do
expect(result['issue-id']).to eq issue_id
end
@@ -103,10 +97,6 @@ describe Projects::ErrorTrackingHelper do
expect(result['project-path']).to eq project_path
end
- it 'returns the correct details path' do
- expect(result['issue-details-path']).to eq details_path
- end
-
it 'returns the correct stack trace path' do
expect(result['issue-stack-trace-path']).to eq stack_trace_path
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 7fc568bb960..37bc2b382cb 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -5,6 +5,37 @@ require 'spec_helper'
describe ProjectsHelper do
include ProjectForksHelper
+ describe '#project_incident_management_setting' do
+ let(:project) { create(:project) }
+
+ before do
+ helper.instance_variable_set(:@project, project)
+ end
+
+ context 'when incident_management_setting exists' do
+ let(:project_incident_management_setting) do
+ create(:project_incident_management_setting, project: project)
+ end
+
+ it 'return project_incident_management_setting' do
+ expect(helper.project_incident_management_setting).to(
+ eq(project_incident_management_setting)
+ )
+ end
+ end
+
+ context 'when incident_management_setting does not exist' do
+ it 'builds incident_management_setting' do
+ setting = helper.project_incident_management_setting
+
+ expect(setting).not_to be_persisted
+ expect(setting.send_email).to be_falsey
+ expect(setting.create_issue).to be_truthy
+ expect(setting.issue_template_key).to be_nil
+ end
+ end
+ end
+
describe '#error_tracking_setting_project_json' do
let(:project) { create(:project) }
@@ -194,7 +225,7 @@ describe ProjectsHelper do
expect(helper.project_list_cache_key(project).last).to start_with('v')
end
- it 'includes wether or not the user can read cross project' do
+ it 'includes whether or not the user can read cross project' do
expect(helper.project_list_cache_key(project)).to include('cross-project:true')
end
diff --git a/spec/helpers/sourcegraph_helper_spec.rb b/spec/helpers/sourcegraph_helper_spec.rb
index 830bbb3129f..3e8486a5632 100644
--- a/spec/helpers/sourcegraph_helper_spec.rb
+++ b/spec/helpers/sourcegraph_helper_spec.rb
@@ -34,7 +34,7 @@ describe SourcegraphHelper do
end
end
- context '#sourcegraph_experimental_message' do
+ describe '#sourcegraph_experimental_message' do
let(:feature_conditional) { false }
let(:public_only) { false }
diff --git a/spec/helpers/submodule_helper_spec.rb b/spec/helpers/submodule_helper_spec.rb
index fcfce0eaf31..d229753a0f0 100644
--- a/spec/helpers/submodule_helper_spec.rb
+++ b/spec/helpers/submodule_helper_spec.rb
@@ -8,6 +8,11 @@ describe SubmoduleHelper do
let(:submodule_item) { double(id: 'hash', path: 'rack') }
let(:config) { Gitlab.config.gitlab }
let(:repo) { double }
+ let(:submodules) { Gitlab::SubmoduleLinks.new(repo) }
+
+ before do
+ allow(repo).to receive(:submodule_links).and_return(submodules)
+ end
shared_examples 'submodule_links' do
context 'submodule on self' do
@@ -95,34 +100,34 @@ describe SubmoduleHelper do
allow(repo).to receive(:project).and_return(project)
stub_url('./')
- expect(subject).to eq(["/master-project/#{project.path}", "/master-project/#{project.path}/tree/hash"])
+ expect(subject).to eq(["/master-project/#{project.path}", "/master-project/#{project.path}/-/tree/hash"])
end
end
context 'submodule on gitlab.com' do
it 'detects ssh' do
stub_url('git@gitlab.com:gitlab-org/gitlab-foss.git')
- expect(subject).to eq(['https://gitlab.com/gitlab-org/gitlab-foss', 'https://gitlab.com/gitlab-org/gitlab-foss/tree/hash'])
+ expect(subject).to eq(['https://gitlab.com/gitlab-org/gitlab-foss', 'https://gitlab.com/gitlab-org/gitlab-foss/-/tree/hash'])
end
it 'detects http' do
stub_url('http://gitlab.com/gitlab-org/gitlab-foss.git')
- expect(subject).to eq(['https://gitlab.com/gitlab-org/gitlab-foss', 'https://gitlab.com/gitlab-org/gitlab-foss/tree/hash'])
+ expect(subject).to eq(['https://gitlab.com/gitlab-org/gitlab-foss', 'https://gitlab.com/gitlab-org/gitlab-foss/-/tree/hash'])
end
it 'detects https' do
stub_url('https://gitlab.com/gitlab-org/gitlab-foss.git')
- expect(subject).to eq(['https://gitlab.com/gitlab-org/gitlab-foss', 'https://gitlab.com/gitlab-org/gitlab-foss/tree/hash'])
+ expect(subject).to eq(['https://gitlab.com/gitlab-org/gitlab-foss', 'https://gitlab.com/gitlab-org/gitlab-foss/-/tree/hash'])
end
it 'handles urls with no .git on the end' do
stub_url('http://gitlab.com/gitlab-org/gitlab-foss')
- expect(subject).to eq(['https://gitlab.com/gitlab-org/gitlab-foss', 'https://gitlab.com/gitlab-org/gitlab-foss/tree/hash'])
+ expect(subject).to eq(['https://gitlab.com/gitlab-org/gitlab-foss', 'https://gitlab.com/gitlab-org/gitlab-foss/-/tree/hash'])
end
it 'handles urls with trailing whitespace' do
stub_url('http://gitlab.com/gitlab-org/gitlab-foss.git ')
- expect(subject).to eq(['https://gitlab.com/gitlab-org/gitlab-foss', 'https://gitlab.com/gitlab-org/gitlab-foss/tree/hash'])
+ expect(subject).to eq(['https://gitlab.com/gitlab-org/gitlab-foss', 'https://gitlab.com/gitlab-org/gitlab-foss/-/tree/hash'])
end
it 'returns original with non-standard url' do
@@ -163,10 +168,10 @@ describe SubmoduleHelper do
let(:repo) { double(:repo, project: project) }
def expect_relative_link_to_resolve_to(relative_path, expected_path)
- allow(repo).to receive(:submodule_url_for).and_return(relative_path)
+ stub_url(relative_path)
result = subject
- expect(result).to eq([expected_path, "#{expected_path}/tree/#{submodule_item.id}"])
+ expect(result).to eq([expected_path, "#{expected_path}/-/tree/#{submodule_item.id}"])
end
it 'handles project under same group' do
@@ -183,7 +188,7 @@ describe SubmoduleHelper do
context 'repo path resolves to be located at root (namespace absent)' do
it 'returns nil' do
- allow(repo).to receive(:submodule_url_for).and_return('../../test.git')
+ stub_url('../../test.git')
result = subject
@@ -193,7 +198,7 @@ describe SubmoduleHelper do
context 'repo path resolves to be located underneath current project path' do
it 'returns nil because it is not possible to have repo nested under another repo' do
- allow(repo).to receive(:submodule_url_for).and_return('./test.git')
+ stub_url('./test.git')
result = subject
@@ -263,6 +268,7 @@ describe SubmoduleHelper do
end
def stub_url(url)
+ allow(submodules).to receive(:submodule_url_for).and_return(url)
allow(repo).to receive(:submodule_url_for).and_return(url)
end
end
diff --git a/spec/initializers/action_mailer_hooks_spec.rb b/spec/initializers/action_mailer_hooks_spec.rb
index ce6e1ed0fa2..20f96f7e16c 100644
--- a/spec/initializers/action_mailer_hooks_spec.rb
+++ b/spec/initializers/action_mailer_hooks_spec.rb
@@ -35,8 +35,11 @@ describe 'ActionMailer hooks' do
load Rails.root.join('config/initializers/action_mailer_hooks.rb')
if smime_interceptor_enabled
+ # Premailer must be registered before S/MIME or signatures will be mangled
expect(ActionMailer::Base).to(
- have_received(:register_interceptor).with(Gitlab::Email::Hook::SmimeSignatureInterceptor))
+ have_received(:register_interceptor).with(::Premailer::Rails::Hook).ordered)
+ expect(ActionMailer::Base).to(
+ have_received(:register_interceptor).with(Gitlab::Email::Hook::SmimeSignatureInterceptor).ordered)
else
expect(ActionMailer::Base).not_to(
have_received(:register_interceptor).with(Gitlab::Email::Hook::SmimeSignatureInterceptor))
diff --git a/spec/initializers/lograge_spec.rb b/spec/initializers/lograge_spec.rb
index 65652468d93..15165c6db98 100644
--- a/spec/initializers/lograge_spec.rb
+++ b/spec/initializers/lograge_spec.rb
@@ -5,16 +5,37 @@ require 'spec_helper'
describe 'lograge', type: :request do
let(:headers) { { 'X-Request-ID' => 'new-correlation-id' } }
- context 'for API requests' do
- subject { get("/api/v4/endpoint", params: {}, headers: headers) }
+ let(:large_params) do
+ half_limit = Gitlab::Utils::LogLimitedArray::MAXIMUM_ARRAY_LENGTH / 2
+
+ {
+ a: 'a',
+ b: 'b' * half_limit,
+ c: 'c' * half_limit,
+ d: 'd'
+ }
+ end
+
+ let(:limited_params) do
+ large_params.slice(:a, :b).map { |k, v| { key: k.to_s, value: v } } + ['...']
+ end
+ context 'for API requests' do
it 'logs to api_json log' do
# we assert receiving parameters by grape logger
expect_any_instance_of(Gitlab::GrapeLogging::Formatters::LogrageWithTimestamp).to receive(:call)
.with(anything, anything, anything, a_hash_including("correlation_id" => "new-correlation-id"))
.and_call_original
- subject
+ get("/api/v4/endpoint", params: {}, headers: headers)
+ end
+
+ it 'limits param size' do
+ expect(Lograge.formatter).to receive(:call)
+ .with(a_hash_including(params: limited_params))
+ .and_call_original
+
+ get("/api/v4/endpoint", params: large_params, headers: headers)
end
end
@@ -67,6 +88,14 @@ describe 'lograge', type: :request do
subject
end
+
+ it 'limits param size' do
+ expect(Lograge.formatter).to receive(:call)
+ .with(a_hash_including(params: limited_params))
+ .and_call_original
+
+ get("/", params: large_params, headers: headers)
+ end
end
context 'with a log subscriber' do
@@ -94,6 +123,11 @@ describe 'lograge', type: :request do
let(:logger) do
Logger.new(log_output).tap { |logger| logger.formatter = ->(_, _, _, msg) { msg } }
end
+ let(:log_data) { JSON.parse(log_output.string) }
+
+ before do
+ Lograge.logger = logger
+ end
describe 'with an exception' do
let(:exception) { RuntimeError.new('bad request') }
@@ -102,18 +136,29 @@ describe 'lograge', type: :request do
before do
allow(exception).to receive(:backtrace).and_return(backtrace)
event.payload[:exception_object] = exception
- Lograge.logger = logger
end
it 'adds exception data to log' do
subscriber.process_action(event)
- log_data = JSON.parse(log_output.string)
-
expect(log_data['exception.class']).to eq('RuntimeError')
expect(log_data['exception.message']).to eq('bad request')
expect(log_data['exception.backtrace']).to eq(Gitlab::BacktraceCleaner.clean_backtrace(backtrace))
end
end
+
+ describe 'with etag_route' do
+ let(:etag_route) { 'etag route' }
+
+ before do
+ event.payload[:etag_route] = etag_route
+ end
+
+ it 'adds etag_route to log' do
+ subscriber.process_action(event)
+
+ expect(log_data['etag_route']).to eq(etag_route)
+ end
+ end
end
end
diff --git a/spec/initializers/mail_encoding_patch_spec.rb b/spec/initializers/mail_encoding_patch_spec.rb
new file mode 100644
index 00000000000..41074af3503
--- /dev/null
+++ b/spec/initializers/mail_encoding_patch_spec.rb
@@ -0,0 +1,207 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+require 'mail'
+require_relative '../../config/initializers/mail_encoding_patch.rb'
+
+describe 'Mail quoted-printable transfer encoding patch and Unicode characters' do
+ shared_examples 'email encoding' do |email|
+ it 'enclosing in a new object does not change the encoded original' do
+ new_email = Mail.new(email)
+
+ expect(new_email.subject).to eq(email.subject)
+ expect(new_email.from).to eq(email.from)
+ expect(new_email.to).to eq(email.to)
+ expect(new_email.content_type).to eq(email.content_type)
+ expect(new_email.content_transfer_encoding).to eq(email.content_transfer_encoding)
+
+ expect(new_email.encoded).to eq(email.encoded)
+ end
+ end
+
+ context 'with a text email' do
+ context 'with a body that encodes to exactly 74 characters (final newline)' do
+ email = Mail.new do
+ to 'jane.doe@example.com'
+ from 'John Dóe <john.doe@example.com>'
+ subject 'Encoding tést'
+ content_type 'text/plain; charset=UTF-8'
+ content_transfer_encoding 'quoted-printable'
+ body "-123456789-123456789-123456789-123456789-123456789-123456789-123456789-1\n"
+ end
+
+ it_behaves_like 'email encoding', email
+ end
+
+ context 'with a body that encodes to exactly 74 characters (no final newline)' do
+ email = Mail.new do
+ to 'jane.doe@example.com'
+ from 'John Dóe <john.doe@example.com>'
+ subject 'Encoding tést'
+ content_type 'text/plain; charset=UTF-8'
+ content_transfer_encoding 'quoted-printable'
+ body "-123456789-123456789-123456789-123456789-123456789-123456789-123456789-12"
+ end
+
+ it_behaves_like 'email encoding', email
+ end
+
+ context 'with a body that encodes to exactly 75 characters' do
+ email = Mail.new do
+ to 'jane.doe@example.com'
+ from 'John Dóe <john.doe@example.com>'
+ subject 'Encoding tést'
+ content_type 'text/plain; charset=UTF-8'
+ content_transfer_encoding 'quoted-printable'
+ body "-123456789-123456789-123456789-123456789-123456789-123456789-123456789-12\n"
+ end
+
+ it_behaves_like 'email encoding', email
+ end
+ end
+
+ context 'with an html email' do
+ context 'with a body that encodes to exactly 74 characters (final newline)' do
+ email = Mail.new do
+ to 'jane.doe@example.com'
+ from 'John Dóe <john.doe@example.com>'
+ subject 'Encoding tést'
+ content_type 'text/html; charset=UTF-8'
+ content_transfer_encoding 'quoted-printable'
+ body "<p>-123456789-123456789-123456789-123456789-123456789-123456789-1234</p>\n"
+ end
+
+ it_behaves_like 'email encoding', email
+ end
+
+ context 'with a body that encodes to exactly 74 characters (no final newline)' do
+ email = Mail.new do
+ to 'jane.doe@example.com'
+ from 'John Dóe <john.doe@example.com>'
+ subject 'Encoding tést'
+ content_type 'text/html; charset=UTF-8'
+ content_transfer_encoding 'quoted-printable'
+ body "<p>-123456789-123456789-123456789-123456789-123456789-123456789-12345</p>"
+ end
+
+ it_behaves_like 'email encoding', email
+ end
+
+ context 'with a body that encodes to exactly 75 characters' do
+ email = Mail.new do
+ to 'jane.doe@example.com'
+ from 'John Dóe <john.doe@example.com>'
+ subject 'Encoding tést'
+ content_type 'text/html; charset=UTF-8'
+ content_transfer_encoding 'quoted-printable'
+ body "<p>-123456789-123456789-123456789-123456789-123456789-123456789-12345</p>\n"
+ end
+
+ it_behaves_like 'email encoding', email
+ end
+ end
+
+ context 'a multipart email' do
+ email = Mail.new do
+ to 'jane.doe@example.com'
+ from 'John Dóe <john.doe@example.com>'
+ subject 'Encoding tést'
+ end
+
+ text_part = Mail::Part.new do
+ content_type 'text/plain; charset=UTF-8'
+ content_transfer_encoding 'quoted-printable'
+ body "\r\n\r\n@john.doe, now known as John Dóe has accepted your invitation to join the Administrator / htmltest project.\r\n\r\nhttp://169.254.169.254:3000/root/htmltest\r\n\r\n-- \r\nYou're receiving this email because of your account on 169.254.169.254.\r\n\r\n\r\n\r\n"
+ end
+
+ html_part = Mail::Part.new do
+ content_type 'text/html; charset=UTF-8'
+ content_transfer_encoding 'quoted-printable'
+ body "\r\n\r\n@john.doe, now known as John Dóe has accepted your invitation to join the Administrator / htmltest project.\r\n\r\nhttp://169.254.169.254:3000/root/htmltest\r\n\r\n-- \r\nYou're receiving this email because of your account on 169.254.169.254.\r\n\r\n\r\n\r\n"
+ end
+
+ email.text_part = text_part
+ email.html_part = html_part
+
+ it_behaves_like 'email encoding', email
+ end
+
+ context 'with non UTF-8 charset' do
+ email = Mail.new do
+ to 'jane.doe@example.com'
+ from 'John Dóe <john.doe@example.com>'
+ subject 'Encoding tést'
+ content_type 'text/plain; charset=windows-1251'
+ content_transfer_encoding 'quoted-printable'
+ body "This line is very long and will be put in multiple quoted-printable lines. Some Russian character: Д\n\n\n".encode('windows-1251')
+ end
+
+ it_behaves_like 'email encoding', email
+
+ it 'can be decoded back' do
+ expect(Mail.new(email).body.decoded.dup.force_encoding('windows-1251').encode('utf-8')).to include('Some Russian character: Д')
+ end
+ end
+
+ context 'with binary content' do
+ context 'can be encoded with \'base64\' content-transfer-encoding' do
+ image = File.binread('spec/fixtures/rails_sample.jpg')
+
+ email = Mail.new do
+ to 'jane.doe@example.com'
+ from 'John Dóe <john.doe@example.com>'
+ subject 'Encoding tést'
+ end
+
+ part = Mail::Part.new
+ part.body = [image].pack('m')
+ part.content_type = 'image/jpg'
+ part.content_transfer_encoding = 'base64'
+
+ email.parts << part
+
+ it_behaves_like 'email encoding', email
+
+ it 'binary contents are not modified' do
+ expect(email.parts.first.decoded).to eq(image)
+
+ # Enclosing in a new Mail object does not corrupt encoded data
+ expect(Mail.new(email).parts.first.decoded).to eq(image)
+ end
+ end
+
+ context 'encoding fails with \'quoted-printable\' content-transfer-encoding' do
+ image = File.binread('spec/fixtures/rails_sample.jpg')
+
+ email = Mail.new do
+ to 'jane.doe@example.com'
+ from 'John Dóe <john.doe@example.com>'
+ subject 'Encoding tést'
+ end
+
+ part = Mail::Part.new
+ part.body = [image].pack('M*')
+ part.content_type = 'image/jpg'
+ part.content_transfer_encoding = 'quoted-printable'
+
+ email.parts << part
+
+ # The Mail patch in `config/initializers/mail_encoding_patch.rb` fixes
+ # encoding of non-binary content. The failure below is expected since we
+ # reverted some upstream changes in order to properly support SMIME signatures
+ # See https://gitlab.com/gitlab-org/gitlab/issues/197386
+ it 'content cannot be decoded back' do
+ # Headers are ok
+ expect(email.subject).to eq(email.subject)
+ expect(email.from).to eq(email.from)
+ expect(email.to).to eq(email.to)
+ expect(email.content_type).to eq(email.content_type)
+ expect(email.content_transfer_encoding).to eq(email.content_transfer_encoding)
+
+ # Content cannot be recovered
+ expect(email.parts.first.decoded).not_to eq(image)
+ end
+ end
+ end
+end
diff --git a/spec/javascripts/badges/dummy_badge.js b/spec/javascripts/badges/dummy_badge.js
index e8a460cdc76..a0dee89736e 100644
--- a/spec/javascripts/badges/dummy_badge.js
+++ b/spec/javascripts/badges/dummy_badge.js
@@ -1,9 +1,9 @@
-import _ from 'underscore';
+import { uniqueId } from 'lodash';
import { DUMMY_IMAGE_URL, TEST_HOST } from 'spec/test_constants';
import { PROJECT_BADGE } from '~/badges/constants';
export const createDummyBadge = () => {
- const id = _.uniqueId();
+ const id = uniqueId();
return {
id,
name: 'TestBadge',
diff --git a/spec/javascripts/behaviors/shortcuts/shortcuts_issuable_spec.js b/spec/javascripts/behaviors/shortcuts/shortcuts_issuable_spec.js
index 5e457a4e823..f6232026915 100644
--- a/spec/javascripts/behaviors/shortcuts/shortcuts_issuable_spec.js
+++ b/spec/javascripts/behaviors/shortcuts/shortcuts_issuable_spec.js
@@ -52,6 +52,7 @@ describe('ShortcutsIssuable', function() {
return documentFragment;
});
};
+
describe('with empty selection', () => {
it('does not return an error', () => {
ShortcutsIssuable.replyWithSelectedText(true);
@@ -297,5 +298,18 @@ describe('ShortcutsIssuable', function() {
});
});
});
+
+ describe('with a valid selection with no text content', () => {
+ it('returns the proper markdown', done => {
+ stubSelection('<img src="foo" alt="image" />');
+ ShortcutsIssuable.replyWithSelectedText(true);
+
+ setTimeout(() => {
+ expect($(FORM_SELECTOR).val()).toBe('> ![image](http://localhost:9876/foo)\n\n');
+
+ done();
+ });
+ });
+ });
});
});
diff --git a/spec/javascripts/blob/notebook/index_spec.js b/spec/javascripts/blob/notebook/index_spec.js
index 6bb5bac007f..db6ca5bd22d 100644
--- a/spec/javascripts/blob/notebook/index_spec.js
+++ b/spec/javascripts/blob/notebook/index_spec.js
@@ -94,7 +94,7 @@ describe('iPython notebook renderer', () => {
it('shows error message', () => {
expect(document.querySelector('.md').textContent.trim()).toBe(
- 'An error occurred whilst parsing the file.',
+ 'An error occurred while parsing the file.',
);
});
});
@@ -123,7 +123,7 @@ describe('iPython notebook renderer', () => {
it('shows error message', () => {
expect(document.querySelector('.md').textContent.trim()).toBe(
- 'An error occurred whilst loading the file. Please try again later.',
+ 'An error occurred while loading the file. Please try again later.',
);
});
});
diff --git a/spec/javascripts/blob/pdf/index_spec.js b/spec/javascripts/blob/pdf/index_spec.js
index 6ea097da742..66769a8aa47 100644
--- a/spec/javascripts/blob/pdf/index_spec.js
+++ b/spec/javascripts/blob/pdf/index_spec.js
@@ -65,7 +65,7 @@ describe('PDF renderer', () => {
it('shows error message', () => {
expect(document.querySelector('.md').textContent.trim()).toBe(
- 'An error occurred whilst loading the file. Please try again later.',
+ 'An error occurred while loading the file. Please try again later.',
);
});
});
diff --git a/spec/javascripts/create_cluster/gke_cluster/stores/getters_spec.js b/spec/javascripts/create_cluster/gke_cluster/stores/getters_spec.js
index ac92716b0ab..39106c3f6ca 100644
--- a/spec/javascripts/create_cluster/gke_cluster/stores/getters_spec.js
+++ b/spec/javascripts/create_cluster/gke_cluster/stores/getters_spec.js
@@ -1,4 +1,9 @@
-import * as getters from '~/create_cluster/gke_cluster/store/getters';
+import {
+ hasProject,
+ hasZone,
+ hasMachineType,
+ hasValidData,
+} from '~/create_cluster/gke_cluster/store/getters';
import { selectedProjectMock, selectedZoneMock, selectedMachineTypeMock } from '../mock_data';
describe('GCP Cluster Dropdown Store Getters', () => {
@@ -7,6 +12,7 @@ describe('GCP Cluster Dropdown Store Getters', () => {
describe('valid states', () => {
beforeEach(() => {
state = {
+ projectHasBillingEnabled: true,
selectedProject: selectedProjectMock,
selectedZone: selectedZoneMock,
selectedMachineType: selectedMachineTypeMock,
@@ -15,19 +21,25 @@ describe('GCP Cluster Dropdown Store Getters', () => {
describe('hasProject', () => {
it('should return true when project is selected', () => {
- expect(getters.hasProject(state)).toEqual(true);
+ expect(hasProject(state)).toEqual(true);
});
});
describe('hasZone', () => {
it('should return true when zone is selected', () => {
- expect(getters.hasZone(state)).toEqual(true);
+ expect(hasZone(state)).toEqual(true);
});
});
describe('hasMachineType', () => {
it('should return true when machine type is selected', () => {
- expect(getters.hasMachineType(state)).toEqual(true);
+ expect(hasMachineType(state)).toEqual(true);
+ });
+ });
+
+ describe('hasValidData', () => {
+ it('should return true when a project, zone and machine type are selected', () => {
+ expect(hasValidData(state, { hasZone: true, hasMachineType: true })).toEqual(true);
});
});
});
@@ -46,19 +58,45 @@ describe('GCP Cluster Dropdown Store Getters', () => {
describe('hasProject', () => {
it('should return false when project is not selected', () => {
- expect(getters.hasProject(state)).toEqual(false);
+ expect(hasProject(state)).toEqual(false);
});
});
describe('hasZone', () => {
it('should return false when zone is not selected', () => {
- expect(getters.hasZone(state)).toEqual(false);
+ expect(hasZone(state)).toEqual(false);
});
});
describe('hasMachineType', () => {
it('should return false when machine type is not selected', () => {
- expect(getters.hasMachineType(state)).toEqual(false);
+ expect(hasMachineType(state)).toEqual(false);
+ });
+ });
+
+ describe('hasValidData', () => {
+ let getters;
+
+ beforeEach(() => {
+ getters = { hasZone: true, hasMachineType: true };
+ });
+
+ it('should return false when project is not billable', () => {
+ state.projectHasBillingEnabled = false;
+
+ expect(hasValidData(state, getters)).toEqual(false);
+ });
+
+ it('should return false when zone is not selected', () => {
+ getters.hasZone = false;
+
+ expect(hasValidData(state, getters)).toEqual(false);
+ });
+
+ it('should return false when machine type is not selected', () => {
+ getters.hasMachineType = false;
+
+ expect(hasValidData(state, getters)).toEqual(false);
});
});
});
diff --git a/spec/javascripts/cycle_analytics/banner_spec.js b/spec/javascripts/cycle_analytics/banner_spec.js
index 86408c18dda..06fbaa68ffc 100644
--- a/spec/javascripts/cycle_analytics/banner_spec.js
+++ b/spec/javascripts/cycle_analytics/banner_spec.js
@@ -16,8 +16,10 @@ describe('Cycle analytics banner', () => {
vm.$destroy();
});
- it('should render cycle analytics information', () => {
- expect(vm.$el.querySelector('h4').textContent.trim()).toEqual('Introducing Cycle Analytics');
+ it('should render value stream analytics information', () => {
+ expect(vm.$el.querySelector('h4').textContent.trim()).toEqual(
+ 'Introducing Value Stream Analytics',
+ );
expect(
vm.$el
@@ -25,7 +27,7 @@ describe('Cycle analytics banner', () => {
.textContent.trim()
.replace(/[\r\n]+/g, ' '),
).toContain(
- 'Cycle Analytics gives an overview of how much time it takes to go from idea to production in your project.',
+ 'Value Stream Analytics gives an overview of how much time it takes to go from idea to production in your project.',
);
expect(vm.$el.querySelector('a').textContent.trim()).toEqual('Read more');
diff --git a/spec/javascripts/diffs/components/compare_versions_dropdown_spec.js b/spec/javascripts/diffs/components/compare_versions_dropdown_spec.js
index df160d7a363..e0686901483 100644
--- a/spec/javascripts/diffs/components/compare_versions_dropdown_spec.js
+++ b/spec/javascripts/diffs/components/compare_versions_dropdown_spec.js
@@ -9,7 +9,7 @@ const startVersion = { version_index: 4 };
const mergeRequestVersion = {
version_path: '123',
};
-const baseVersionPath = '/gnuwget/wget2/merge_requests/6/diffs?diff_id=37';
+const baseVersionPath = '/gnuwget/wget2/-/merge_requests/6/diffs?diff_id=37';
describe('CompareVersionsDropdown', () => {
let wrapper;
diff --git a/spec/javascripts/diffs/components/diff_file_spec.js b/spec/javascripts/diffs/components/diff_file_spec.js
index b4425b8e8a2..eab4f4fb17f 100644
--- a/spec/javascripts/diffs/components/diff_file_spec.js
+++ b/spec/javascripts/diffs/components/diff_file_spec.js
@@ -1,6 +1,7 @@
import Vue from 'vue';
import { createStore } from 'ee_else_ce/mr_notes/stores';
import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
+import { mockTracking, triggerEvent } from 'spec/helpers/tracking_helper';
import DiffFileComponent from '~/diffs/components/diff_file.vue';
import { diffViewerModes, diffViewerErrors } from '~/ide/constants';
import diffFileMockDataReadable from '../mock_data/diff_file';
@@ -8,12 +9,14 @@ import diffFileMockDataUnreadable from '../mock_data/diff_file_unreadable';
describe('DiffFile', () => {
let vm;
+ let trackingSpy;
beforeEach(() => {
vm = createComponentWithStore(Vue.extend(DiffFileComponent), createStore(), {
file: JSON.parse(JSON.stringify(diffFileMockDataReadable)),
canCurrentUserFork: false,
}).$mount();
+ trackingSpy = mockTracking('_category_', vm.$el, spyOn);
});
afterEach(() => {
@@ -30,6 +33,7 @@ describe('DiffFile', () => {
expect(el.querySelectorAll('.diff-content.hidden').length).toEqual(0);
expect(el.querySelector('.js-file-title')).toBeDefined();
+ expect(el.querySelector('.btn-clipboard')).toBeDefined();
expect(el.querySelector('.file-title-name').innerText.indexOf(file_path)).toBeGreaterThan(-1);
expect(el.querySelector('.js-syntax-highlight')).toBeDefined();
@@ -39,6 +43,25 @@ describe('DiffFile', () => {
.then(() => {
expect(el.querySelectorAll('.line_content').length).toBe(5);
expect(el.querySelectorAll('.js-line-expansion-content').length).toBe(1);
+ triggerEvent('.btn-clipboard');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('should track a click event on copy to clip board button', done => {
+ const el = vm.$el;
+
+ expect(el.querySelector('.btn-clipboard')).toBeDefined();
+ vm.file.renderIt = true;
+ vm.$nextTick()
+ .then(() => {
+ triggerEvent('.btn-clipboard');
+
+ expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_copy_file_button', {
+ label: 'diff_copy_file_path_button',
+ property: 'diff_copy_file',
+ });
})
.then(done)
.catch(done.fail);
diff --git a/spec/javascripts/diffs/components/diff_line_gutter_content_spec.js b/spec/javascripts/diffs/components/diff_line_gutter_content_spec.js
deleted file mode 100644
index 8d20be9971d..00000000000
--- a/spec/javascripts/diffs/components/diff_line_gutter_content_spec.js
+++ /dev/null
@@ -1,105 +0,0 @@
-import Vue from 'vue';
-import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
-import DiffLineGutterContent from '~/diffs/components/diff_line_gutter_content.vue';
-import { createStore } from '~/mr_notes/stores';
-import discussionsMockData from '../mock_data/diff_discussions';
-import diffFileMockData from '../mock_data/diff_file';
-
-describe('DiffLineGutterContent', () => {
- const getDiffFileMock = () => Object.assign({}, diffFileMockData);
- const createComponent = (options = {}) => {
- const cmp = Vue.extend(DiffLineGutterContent);
- const props = Object.assign({}, options);
- props.line = {
- line_code: 'LC_42',
- type: 'new',
- old_line: null,
- new_line: 1,
- discussions: [{ ...discussionsMockData }],
- text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
- rich_text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
- meta_data: null,
- };
- props.fileHash = getDiffFileMock().file_hash;
- props.contextLinesPath = '/context/lines/path';
-
- return createComponentWithStore(cmp, createStore(), props).$mount();
- };
-
- describe('computed', () => {
- describe('lineHref', () => {
- it('should prepend # to lineCode', () => {
- const lineCode = 'LC_42';
- const component = createComponent();
-
- expect(component.lineHref).toEqual(`#${lineCode}`);
- });
-
- it('should return # if there is no lineCode', () => {
- const component = createComponent();
- component.line.line_code = '';
-
- expect(component.lineHref).toEqual('#');
- });
- });
-
- describe('discussions, hasDiscussions, shouldShowAvatarsOnGutter', () => {
- it('should return empty array when there is no discussion', () => {
- const component = createComponent();
- component.line.discussions = [];
-
- expect(component.hasDiscussions).toEqual(false);
- expect(component.shouldShowAvatarsOnGutter).toEqual(false);
- });
-
- it('should return discussions for the given lineCode', () => {
- const cmp = Vue.extend(DiffLineGutterContent);
- const props = {
- line: getDiffFileMock().highlighted_diff_lines[1],
- fileHash: getDiffFileMock().file_hash,
- showCommentButton: true,
- contextLinesPath: '/context/lines/path',
- };
- props.line.discussions = [Object.assign({}, discussionsMockData)];
- const component = createComponentWithStore(cmp, createStore(), props).$mount();
-
- expect(component.hasDiscussions).toEqual(true);
- expect(component.shouldShowAvatarsOnGutter).toEqual(true);
- });
- });
- });
-
- describe('template', () => {
- it('should render comment button', () => {
- const component = createComponent({
- showCommentButton: true,
- });
- Object.defineProperty(component, 'isLoggedIn', {
- get() {
- return true;
- },
- });
-
- expect(component.$el.querySelector('.js-add-diff-note-button')).toBeDefined();
- });
-
- it('should render line link', () => {
- const lineNumber = 42;
- const lineCode = `LC_${lineNumber}`;
- const component = createComponent({ lineNumber, lineCode });
- const link = component.$el.querySelector('a');
-
- expect(link.href.indexOf(`#${lineCode}`)).toBeGreaterThan(-1);
- expect(link.dataset.linenumber).toEqual(lineNumber.toString());
- });
-
- it('should render user avatars', () => {
- const component = createComponent({
- showCommentButton: true,
- lineCode: getDiffFileMock().highlighted_diff_lines[1].line_code,
- });
-
- expect(component.$el.querySelector('.diff-comment-avatar-holders')).not.toBe(null);
- });
- });
-});
diff --git a/spec/javascripts/diffs/components/diff_table_cell_spec.js b/spec/javascripts/diffs/components/diff_table_cell_spec.js
deleted file mode 100644
index f91e3b56805..00000000000
--- a/spec/javascripts/diffs/components/diff_table_cell_spec.js
+++ /dev/null
@@ -1,37 +0,0 @@
-import Vue from 'vue';
-import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
-import { createStore } from '~/mr_notes/stores';
-import DiffTableCell from '~/diffs/components/diff_table_cell.vue';
-import diffFileMockData from '../mock_data/diff_file';
-
-describe('DiffTableCell', () => {
- const createComponent = options =>
- createComponentWithStore(Vue.extend(DiffTableCell), createStore(), {
- line: diffFileMockData.highlighted_diff_lines[0],
- fileHash: diffFileMockData.file_hash,
- contextLinesPath: 'contextLinesPath',
- ...options,
- }).$mount();
-
- it('does not highlight row when isHighlighted prop is false', done => {
- const vm = createComponent({ isHighlighted: false });
-
- vm.$nextTick()
- .then(() => {
- expect(vm.$el.classList).not.toContain('hll');
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('highlights row when isHighlighted prop is true', done => {
- const vm = createComponent({ isHighlighted: true });
-
- vm.$nextTick()
- .then(() => {
- expect(vm.$el.classList).toContain('hll');
- })
- .then(done)
- .catch(done.fail);
- });
-});
diff --git a/spec/javascripts/diffs/mock_data/diff_discussions.js b/spec/javascripts/diffs/mock_data/diff_discussions.js
index 711ab543411..a9b00634104 100644
--- a/spec/javascripts/diffs/mock_data/diff_discussions.js
+++ b/spec/javascripts/diffs/mock_data/diff_discussions.js
@@ -59,9 +59,10 @@ export default {
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-test%2Fmerge_requests%2F20%23note_1749&user_id=1',
path: '/gitlab-org/gitlab-test/notes/1749',
- noteable_note_url: 'http://localhost:3000/gitlab-org/gitlab-test/merge_requests/20#note_1749',
+ noteable_note_url:
+ 'http://localhost:3000/gitlab-org/gitlab-test/-/merge_requests/20#note_1749',
resolve_path:
- '/gitlab-org/gitlab-test/merge_requests/20/discussions/6b232e05bea388c6b043ccc243ba505faac04ea8/resolve',
+ '/gitlab-org/gitlab-test/-/merge_requests/20/discussions/6b232e05bea388c6b043ccc243ba505faac04ea8/resolve',
resolve_with_issue_path:
'/gitlab-org/gitlab-test/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20',
},
@@ -111,9 +112,10 @@ export default {
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-test%2Fmerge_requests%2F20%23note_1753&user_id=1',
path: '/gitlab-org/gitlab-test/notes/1753',
- noteable_note_url: 'http://localhost:3000/gitlab-org/gitlab-test/merge_requests/20#note_1753',
+ noteable_note_url:
+ 'http://localhost:3000/gitlab-org/gitlab-test/-/merge_requests/20#note_1753',
resolve_path:
- '/gitlab-org/gitlab-test/merge_requests/20/discussions/6b232e05bea388c6b043ccc243ba505faac04ea8/resolve',
+ '/gitlab-org/gitlab-test/-/merge_requests/20/discussions/6b232e05bea388c6b043ccc243ba505faac04ea8/resolve',
resolve_with_issue_path:
'/gitlab-org/gitlab-test/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20',
},
@@ -153,9 +155,10 @@ export default {
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-test%2Fmerge_requests%2F20%23note_1754&user_id=1',
path: '/gitlab-org/gitlab-test/notes/1754',
- noteable_note_url: 'http://localhost:3000/gitlab-org/gitlab-test/merge_requests/20#note_1754',
+ noteable_note_url:
+ 'http://localhost:3000/gitlab-org/gitlab-test/-/merge_requests/20#note_1754',
resolve_path:
- '/gitlab-org/gitlab-test/merge_requests/20/discussions/6b232e05bea388c6b043ccc243ba505faac04ea8/resolve',
+ '/gitlab-org/gitlab-test/-/merge_requests/20/discussions/6b232e05bea388c6b043ccc243ba505faac04ea8/resolve',
resolve_with_issue_path:
'/gitlab-org/gitlab-test/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20',
},
@@ -195,9 +198,10 @@ export default {
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-test%2Fmerge_requests%2F20%23note_1755&user_id=1',
path: '/gitlab-org/gitlab-test/notes/1755',
- noteable_note_url: 'http://localhost:3000/gitlab-org/gitlab-test/merge_requests/20#note_1755',
+ noteable_note_url:
+ 'http://localhost:3000/gitlab-org/gitlab-test/-/merge_requests/20#note_1755',
resolve_path:
- '/gitlab-org/gitlab-test/merge_requests/20/discussions/6b232e05bea388c6b043ccc243ba505faac04ea8/resolve',
+ '/gitlab-org/gitlab-test/-/merge_requests/20/discussions/6b232e05bea388c6b043ccc243ba505faac04ea8/resolve',
resolve_with_issue_path:
'/gitlab-org/gitlab-test/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20',
},
@@ -237,9 +241,10 @@ export default {
report_abuse_path:
'/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-test%2Fmerge_requests%2F20%23note_1756&user_id=1',
path: '/gitlab-org/gitlab-test/notes/1756',
- noteable_note_url: 'http://localhost:3000/gitlab-org/gitlab-test/merge_requests/20#note_1756',
+ noteable_note_url:
+ 'http://localhost:3000/gitlab-org/gitlab-test/-/merge_requests/20#note_1756',
resolve_path:
- '/gitlab-org/gitlab-test/merge_requests/20/discussions/6b232e05bea388c6b043ccc243ba505faac04ea8/resolve',
+ '/gitlab-org/gitlab-test/-/merge_requests/20/discussions/6b232e05bea388c6b043ccc243ba505faac04ea8/resolve',
resolve_with_issue_path:
'/gitlab-org/gitlab-test/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20',
},
@@ -248,7 +253,7 @@ export default {
resolvable: true,
resolved: false,
resolve_path:
- '/gitlab-org/gitlab-test/merge_requests/20/discussions/6b232e05bea388c6b043ccc243ba505faac04ea8/resolve',
+ '/gitlab-org/gitlab-test/-/merge_requests/20/discussions/6b232e05bea388c6b043ccc243ba505faac04ea8/resolve',
resolve_with_issue_path:
'/gitlab-org/gitlab-test/issues/new?discussion_to_resolve=6b232e05bea388c6b043ccc243ba505faac04ea8&merge_request_to_resolve_discussions_of=20',
diff_file: {
diff --git a/spec/javascripts/diffs/mock_data/diff_file.js b/spec/javascripts/diffs/mock_data/diff_file.js
index 531686efff1..9dc365b7403 100644
--- a/spec/javascripts/diffs/mock_data/diff_file.js
+++ b/spec/javascripts/diffs/mock_data/diff_file.js
@@ -1,246 +1,5 @@
-// Copied to ee/spec/frontend/diffs/mock_data/diff_file.js
+// No new code should be added to this file. Instead, modify the
+// file this one re-exports from. For more detail about why, see:
+// https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/31349
-export default {
- submodule: false,
- submodule_link: null,
- blob: {
- id: '9e10516ca50788acf18c518a231914a21e5f16f7',
- path: 'CHANGELOG',
- name: 'CHANGELOG',
- mode: '100644',
- readable_text: true,
- icon: 'file-text-o',
- },
- blob_path: 'CHANGELOG',
- blob_name: 'CHANGELOG',
- blob_icon: '<i aria-hidden="true" data-hidden="true" class="fa fa-file-text-o fa-fw"></i>',
- file_hash: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a',
- file_path: 'CHANGELOG',
- new_file: false,
- deleted_file: false,
- renamed_file: false,
- old_path: 'CHANGELOG',
- new_path: 'CHANGELOG',
- mode_changed: false,
- a_mode: '100644',
- b_mode: '100644',
- text: true,
- viewer: {
- name: 'text',
- error: null,
- collapsed: false,
- },
- added_lines: 2,
- removed_lines: 0,
- diff_refs: {
- base_sha: 'e63f41fe459e62e1228fcef60d7189127aeba95a',
- start_sha: 'd9eaefe5a676b820c57ff18cf5b68316025f7962',
- head_sha: 'c48ee0d1bf3b30453f5b32250ce03134beaa6d13',
- },
- content_sha: 'c48ee0d1bf3b30453f5b32250ce03134beaa6d13',
- stored_externally: null,
- external_storage: null,
- old_path_html: 'CHANGELOG',
- new_path_html: 'CHANGELOG',
- edit_path: '/gitlab-org/gitlab-test/edit/spooky-stuff/CHANGELOG',
- view_path: '/gitlab-org/gitlab-test/blob/spooky-stuff/CHANGELOG',
- replaced_view_path: null,
- collapsed: false,
- renderIt: false,
- too_large: false,
- context_lines_path:
- '/gitlab-org/gitlab-test/blob/c48ee0d1bf3b30453f5b32250ce03134beaa6d13/CHANGELOG/diff',
- highlighted_diff_lines: [
- {
- line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_1',
- type: 'new',
- old_line: null,
- new_line: 1,
- discussions: [],
- text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
- rich_text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
- meta_data: null,
- },
- {
- line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_2',
- type: 'new',
- old_line: null,
- new_line: 2,
- discussions: [],
- text: '+<span id="LC2" class="line" lang="plaintext"></span>\n',
- rich_text: '+<span id="LC2" class="line" lang="plaintext"></span>\n',
- meta_data: null,
- },
- {
- line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3',
- type: null,
- old_line: 1,
- new_line: 3,
- discussions: [],
- text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
- rich_text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
- meta_data: null,
- },
- {
- line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_2_4',
- type: null,
- old_line: 2,
- new_line: 4,
- discussions: [],
- text: ' <span id="LC4" class="line" lang="plaintext"></span>\n',
- rich_text: ' <span id="LC4" class="line" lang="plaintext"></span>\n',
- meta_data: null,
- },
- {
- line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_3_5',
- type: null,
- old_line: 3,
- new_line: 5,
- discussions: [],
- text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
- rich_text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
- meta_data: null,
- },
- {
- line_code: null,
- type: 'match',
- old_line: null,
- new_line: null,
- discussions: [],
- text: '',
- rich_text: '',
- meta_data: {
- old_pos: 3,
- new_pos: 5,
- },
- },
- ],
- parallel_diff_lines: [
- {
- left: {
- type: 'empty-cell',
- },
- right: {
- line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_1',
- type: 'new',
- old_line: null,
- new_line: 1,
- discussions: [],
- text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
- rich_text: '<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
- meta_data: null,
- },
- },
- {
- left: {
- type: 'empty-cell',
- },
- right: {
- line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_2',
- type: 'new',
- old_line: null,
- new_line: 2,
- discussions: [],
- text: '+<span id="LC2" class="line" lang="plaintext"></span>\n',
- rich_text: '<span id="LC2" class="line" lang="plaintext"></span>\n',
- meta_data: null,
- },
- },
- {
- left: {
- line_Code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3',
- type: null,
- old_line: 1,
- new_line: 3,
- discussions: [],
- text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
- rich_text: '<span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
- meta_data: null,
- },
- right: {
- line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3',
- type: null,
- old_line: 1,
- new_line: 3,
- discussions: [],
- text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
- rich_text: '<span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n',
- meta_data: null,
- },
- },
- {
- left: {
- line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_2_4',
- type: null,
- old_line: 2,
- new_line: 4,
- discussions: [],
- text: ' <span id="LC4" class="line" lang="plaintext"></span>\n',
- rich_text: '<span id="LC4" class="line" lang="plaintext"></span>\n',
- meta_data: null,
- },
- right: {
- line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_2_4',
- type: null,
- old_line: 2,
- new_line: 4,
- discussions: [],
- text: ' <span id="LC4" class="line" lang="plaintext"></span>\n',
- rich_text: '<span id="LC4" class="line" lang="plaintext"></span>\n',
- meta_data: null,
- },
- },
- {
- left: {
- line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_3_5',
- type: null,
- old_line: 3,
- new_line: 5,
- discussions: [],
- text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
- rich_text: '<span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
- meta_data: null,
- },
- right: {
- line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_3_5',
- type: null,
- old_line: 3,
- new_line: 5,
- discussions: [],
- text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
- rich_text: '<span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n',
- meta_data: null,
- },
- },
- {
- left: {
- line_code: null,
- type: 'match',
- old_line: null,
- new_line: null,
- discussions: [],
- text: '',
- rich_text: '',
- meta_data: {
- old_pos: 3,
- new_pos: 5,
- },
- },
- right: {
- line_code: null,
- type: 'match',
- old_line: null,
- new_line: null,
- discussions: [],
- text: '',
- rich_text: '',
- meta_data: {
- old_pos: 3,
- new_pos: 5,
- },
- },
- },
- ],
- discussions: [],
- renderingLines: false,
-};
+export { default } from '../../../frontend/diffs/mock_data/diff_file';
diff --git a/spec/javascripts/diffs/store/actions_spec.js b/spec/javascripts/diffs/store/actions_spec.js
index af2dd7b4f93..ff17d8ec158 100644
--- a/spec/javascripts/diffs/store/actions_spec.js
+++ b/spec/javascripts/diffs/store/actions_spec.js
@@ -158,16 +158,19 @@ describe('DiffsStoreActions', () => {
const res1 = { diff_files: [], pagination: { next_page: 2 } };
const res2 = { diff_files: [], pagination: {} };
mock
- .onGet(endpointBatch, { params: { page: undefined, per_page: DIFFS_PER_PAGE, w: '1' } })
- .reply(200, res1);
- mock
- .onGet(endpointBatch, { params: { page: 2, per_page: DIFFS_PER_PAGE, w: '1' } })
+ .onGet(endpointBatch, {
+ params: { page: 1, per_page: DIFFS_PER_PAGE, w: '1', view: 'inline' },
+ })
+ .reply(200, res1)
+ .onGet(endpointBatch, {
+ params: { page: 2, per_page: DIFFS_PER_PAGE, w: '1', view: 'inline' },
+ })
.reply(200, res2);
testAction(
fetchDiffFilesBatch,
{},
- { endpointBatch },
+ { endpointBatch, useSingleDiffStyle: true, diffViewType: 'inline' },
[
{ type: types.SET_BATCH_LOADING, payload: true },
{ type: types.SET_RETRIEVING_BATCHES, payload: true },
@@ -188,7 +191,7 @@ describe('DiffsStoreActions', () => {
describe('fetchDiffFilesMeta', () => {
it('should fetch diff meta information', done => {
- const endpointMetadata = '/fetch/diffs_meta';
+ const endpointMetadata = '/fetch/diffs_meta?view=inline';
const mock = new MockAdapter(axios);
const data = { diff_files: [] };
const res = { data };
@@ -213,6 +216,108 @@ describe('DiffsStoreActions', () => {
});
});
+ describe('when the single diff view feature flag is off', () => {
+ describe('fetchDiffFiles', () => {
+ it('should fetch diff files', done => {
+ const endpoint = '/fetch/diff/files?w=1';
+ const mock = new MockAdapter(axios);
+ const res = { diff_files: 1, merge_request_diffs: [] };
+ mock.onGet(endpoint).reply(200, res);
+
+ testAction(
+ fetchDiffFiles,
+ {},
+ {
+ endpoint,
+ diffFiles: [],
+ showWhitespace: false,
+ diffViewType: 'inline',
+ useSingleDiffStyle: false,
+ },
+ [
+ { type: types.SET_LOADING, payload: true },
+ { type: types.SET_LOADING, payload: false },
+ { type: types.SET_MERGE_REQUEST_DIFFS, payload: res.merge_request_diffs },
+ { type: types.SET_DIFF_DATA, payload: res },
+ ],
+ [],
+ () => {
+ mock.restore();
+ done();
+ },
+ );
+
+ fetchDiffFiles({ state: { endpoint }, commit: () => null })
+ .then(data => {
+ expect(data).toEqual(res);
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('fetchDiffFilesBatch', () => {
+ it('should fetch batch diff files', done => {
+ const endpointBatch = '/fetch/diffs_batch';
+ const mock = new MockAdapter(axios);
+ const res1 = { diff_files: [], pagination: { next_page: 2 } };
+ const res2 = { diff_files: [], pagination: {} };
+ mock
+ .onGet(endpointBatch, { params: { page: 1, per_page: DIFFS_PER_PAGE, w: '1' } })
+ .reply(200, res1)
+ .onGet(endpointBatch, { params: { page: 2, per_page: DIFFS_PER_PAGE, w: '1' } })
+ .reply(200, res2);
+
+ testAction(
+ fetchDiffFilesBatch,
+ {},
+ { endpointBatch, useSingleDiffStyle: false },
+ [
+ { type: types.SET_BATCH_LOADING, payload: true },
+ { type: types.SET_RETRIEVING_BATCHES, payload: true },
+ { type: types.SET_DIFF_DATA_BATCH, payload: { diff_files: res1.diff_files } },
+ { type: types.SET_BATCH_LOADING, payload: false },
+ { type: types.SET_DIFF_DATA_BATCH, payload: { diff_files: [] } },
+ { type: types.SET_BATCH_LOADING, payload: false },
+ { type: types.SET_RETRIEVING_BATCHES, payload: false },
+ ],
+ [],
+ () => {
+ mock.restore();
+ done();
+ },
+ );
+ });
+ });
+
+ describe('fetchDiffFilesMeta', () => {
+ it('should fetch diff meta information', done => {
+ const endpointMetadata = '/fetch/diffs_meta?';
+ const mock = new MockAdapter(axios);
+ const data = { diff_files: [] };
+ const res = { data };
+ mock.onGet(endpointMetadata).reply(200, res);
+
+ testAction(
+ fetchDiffFilesMeta,
+ {},
+ { endpointMetadata, useSingleDiffStyle: false },
+ [
+ { type: types.SET_LOADING, payload: true },
+ { type: types.SET_LOADING, payload: false },
+ { type: types.SET_MERGE_REQUEST_DIFFS, payload: [] },
+ { type: types.SET_DIFF_DATA, payload: { data } },
+ ],
+ [],
+ () => {
+ mock.restore();
+ done();
+ },
+ );
+ });
+ });
+ });
+
describe('setHighlightedRow', () => {
it('should mark currently selected diff and set lineHash and fileHash of highlightedRow', () => {
testAction(setHighlightedRow, 'ABC_123', {}, [
diff --git a/spec/javascripts/diffs/store/mutations_spec.js b/spec/javascripts/diffs/store/mutations_spec.js
index 24405dcc796..cb89a89e216 100644
--- a/spec/javascripts/diffs/store/mutations_spec.js
+++ b/spec/javascripts/diffs/store/mutations_spec.js
@@ -55,8 +55,8 @@ describe('DiffsStoreMutations', () => {
const state = {
diffFiles: [
{
- content_sha: diffFileMockData.content_sha,
- file_hash: diffFileMockData.file_hash,
+ ...diffFileMockData,
+ parallel_diff_lines: [],
},
],
};
diff --git a/spec/javascripts/diffs/store/utils_spec.js b/spec/javascripts/diffs/store/utils_spec.js
index 638b4510221..051820cedfa 100644
--- a/spec/javascripts/diffs/store/utils_spec.js
+++ b/spec/javascripts/diffs/store/utils_spec.js
@@ -333,10 +333,10 @@ describe('DiffsStoreUtils', () => {
diff_files: [Object.assign({}, mock, { highlighted_diff_lines: undefined })],
};
- utils.prepareDiffData(preparedDiff);
- utils.prepareDiffData(splitInlineDiff);
- utils.prepareDiffData(splitParallelDiff);
- utils.prepareDiffData(completedDiff, [mock]);
+ preparedDiff.diff_files = utils.prepareDiffData(preparedDiff);
+ splitInlineDiff.diff_files = utils.prepareDiffData(splitInlineDiff);
+ splitParallelDiff.diff_files = utils.prepareDiffData(splitParallelDiff);
+ completedDiff.diff_files = utils.prepareDiffData(completedDiff, [mock]);
});
it('sets the renderIt and collapsed attribute on files', () => {
@@ -390,6 +390,37 @@ describe('DiffsStoreUtils', () => {
expect(completedDiff.diff_files[0].parallel_diff_lines.length).toBeGreaterThan(0);
expect(completedDiff.diff_files[0].highlighted_diff_lines.length).toBeGreaterThan(0);
});
+
+ it('leaves files in the existing state', () => {
+ const priorFiles = [mock];
+ const fakeNewFile = {
+ ...mock,
+ content_sha: 'ABC',
+ file_hash: 'DEF',
+ };
+ const updatedFilesList = utils.prepareDiffData({ diff_files: [fakeNewFile] }, priorFiles);
+
+ expect(updatedFilesList).toEqual([mock, fakeNewFile]);
+ });
+
+ it('completes an existing split diff without overwriting existing diffs', () => {
+ // The current state has a file that has only loaded inline lines
+ const priorFiles = [{ ...mock, parallel_diff_lines: [] }];
+ // The next (batch) load loads two files: the other half of that file, and a new file
+ const fakeBatch = [
+ { ...mock, highlighted_diff_lines: undefined },
+ { ...mock, highlighted_diff_lines: undefined, content_sha: 'ABC', file_hash: 'DEF' },
+ ];
+ const updatedFilesList = utils.prepareDiffData({ diff_files: fakeBatch }, priorFiles);
+
+ expect(updatedFilesList).toEqual([
+ mock,
+ jasmine.objectContaining({
+ content_sha: 'ABC',
+ file_hash: 'DEF',
+ }),
+ ]);
+ });
});
describe('isDiscussionApplicableToLine', () => {
diff --git a/spec/javascripts/editor/editor_lite_spec.js b/spec/javascripts/editor/editor_lite_spec.js
new file mode 100644
index 00000000000..154daccf82d
--- /dev/null
+++ b/spec/javascripts/editor/editor_lite_spec.js
@@ -0,0 +1,111 @@
+import { editor as monacoEditor, Uri } from 'monaco-editor';
+import Editor from '~/editor/editor_lite';
+
+describe('Base editor', () => {
+ let editorEl;
+ let editor;
+ const blobContent = 'Foo Bar';
+ const blobPath = 'test.md';
+ const uri = new Uri('gitlab', false, blobPath);
+ const fakeModel = { foo: 'bar' };
+
+ beforeEach(() => {
+ setFixtures('<div id="editor" data-editor-loading></div>');
+ editorEl = document.getElementById('editor');
+ editor = new Editor();
+ });
+
+ afterEach(() => {
+ editor.dispose();
+ editorEl.remove();
+ });
+
+ it('initializes Editor with basic properties', () => {
+ expect(editor).toBeDefined();
+ expect(editor.editorEl).toBe(null);
+ expect(editor.blobContent).toEqual('');
+ expect(editor.blobPath).toEqual('');
+ });
+
+ it('removes `editor-loading` data attribute from the target DOM element', () => {
+ editor.createInstance({ el: editorEl });
+
+ expect(editorEl.dataset.editorLoading).toBeUndefined();
+ });
+
+ describe('instance of the Editor', () => {
+ let modelSpy;
+ let instanceSpy;
+ let setModel;
+ let dispose;
+
+ beforeEach(() => {
+ setModel = jasmine.createSpy();
+ dispose = jasmine.createSpy();
+ modelSpy = spyOn(monacoEditor, 'createModel').and.returnValue(fakeModel);
+ instanceSpy = spyOn(monacoEditor, 'create').and.returnValue({
+ setModel,
+ dispose,
+ });
+ });
+
+ it('does nothing if no dom element is supplied', () => {
+ editor.createInstance();
+
+ expect(editor.editorEl).toBe(null);
+ expect(editor.blobContent).toEqual('');
+ expect(editor.blobPath).toEqual('');
+
+ expect(modelSpy).not.toHaveBeenCalled();
+ expect(instanceSpy).not.toHaveBeenCalled();
+ expect(setModel).not.toHaveBeenCalled();
+ });
+
+ it('creates model to be supplied to Monaco editor', () => {
+ editor.createInstance({ el: editorEl, blobPath, blobContent });
+
+ expect(modelSpy).toHaveBeenCalledWith(blobContent, undefined, uri);
+ expect(setModel).toHaveBeenCalledWith(fakeModel);
+ });
+
+ it('initializes the instance on a supplied DOM node', () => {
+ editor.createInstance({ el: editorEl });
+
+ expect(editor.editorEl).not.toBe(null);
+ expect(instanceSpy).toHaveBeenCalledWith(editorEl, jasmine.anything());
+ });
+ });
+
+ describe('implementation', () => {
+ beforeEach(() => {
+ editor.createInstance({ el: editorEl, blobPath, blobContent });
+ });
+
+ afterEach(() => {
+ editor.model.dispose();
+ });
+
+ it('correctly proxies value from the model', () => {
+ expect(editor.getValue()).toEqual(blobContent);
+ });
+
+ it('is capable of changing the language of the model', () => {
+ const blobRenamedPath = 'test.js';
+
+ expect(editor.model.getLanguageIdentifier().language).toEqual('markdown');
+ editor.updateModelLanguage(blobRenamedPath);
+
+ expect(editor.model.getLanguageIdentifier().language).toEqual('javascript');
+ });
+
+ it('falls back to plaintext if there is no language associated with an extension', () => {
+ const blobRenamedPath = 'test.myext';
+ const spy = spyOn(console, 'error');
+
+ editor.updateModelLanguage(blobRenamedPath);
+
+ expect(spy).not.toHaveBeenCalled();
+ expect(editor.model.getLanguageIdentifier().language).toEqual('plaintext');
+ });
+ });
+});
diff --git a/spec/javascripts/environments/emtpy_state_spec.js b/spec/javascripts/environments/emtpy_state_spec.js
deleted file mode 100644
index eec06a43a1e..00000000000
--- a/spec/javascripts/environments/emtpy_state_spec.js
+++ /dev/null
@@ -1,54 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import emptyState from '~/environments/components/empty_state.vue';
-
-describe('environments empty state', () => {
- let vm;
- let Component;
-
- beforeEach(() => {
- Component = Vue.extend(emptyState);
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('With permissions', () => {
- beforeEach(() => {
- vm = mountComponent(Component, {
- newPath: 'foo',
- canCreateEnvironment: true,
- helpPath: 'bar',
- });
- });
-
- it('renders empty state and new environment button', () => {
- expect(vm.$el.querySelector('.js-blank-state-title').textContent.trim()).toEqual(
- "You don't have any environments right now",
- );
-
- expect(vm.$el.querySelector('.js-new-environment-button').getAttribute('href')).toEqual(
- 'foo',
- );
- });
- });
-
- describe('Without permission', () => {
- beforeEach(() => {
- vm = mountComponent(Component, {
- newPath: 'foo',
- canCreateEnvironment: false,
- helpPath: 'bar',
- });
- });
-
- it('renders empty state without new button', () => {
- expect(vm.$el.querySelector('.js-blank-state-title').textContent.trim()).toEqual(
- "You don't have any environments right now",
- );
-
- expect(vm.$el.querySelector('.js-new-environment-button')).toBeNull();
- });
- });
-});
diff --git a/spec/javascripts/environments/environment_actions_spec.js b/spec/javascripts/environments/environment_actions_spec.js
deleted file mode 100644
index a844660f7bf..00000000000
--- a/spec/javascripts/environments/environment_actions_spec.js
+++ /dev/null
@@ -1,117 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import { TEST_HOST } from 'spec/test_constants';
-import eventHub from '~/environments/event_hub';
-import EnvironmentActions from '~/environments/components/environment_actions.vue';
-
-describe('EnvironmentActions Component', () => {
- const Component = Vue.extend(EnvironmentActions);
- let vm;
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('manual actions', () => {
- const actions = [
- {
- name: 'bar',
- play_path: 'https://gitlab.com/play',
- },
- {
- name: 'foo',
- play_path: '#',
- },
- {
- name: 'foo bar',
- play_path: 'url',
- playable: false,
- },
- ];
-
- beforeEach(() => {
- vm = mountComponent(Component, { actions });
- });
-
- it('should render a dropdown button with icon and title attribute', () => {
- expect(vm.$el.querySelector('.fa-caret-down')).toBeDefined();
- expect(vm.$el.querySelector('.dropdown-new').getAttribute('data-original-title')).toEqual(
- 'Deploy to...',
- );
-
- expect(vm.$el.querySelector('.dropdown-new').getAttribute('aria-label')).toEqual(
- 'Deploy to...',
- );
- });
-
- it('should render a dropdown with the provided list of actions', () => {
- expect(vm.$el.querySelectorAll('.dropdown-menu li').length).toEqual(actions.length);
- });
-
- it("should render a disabled action when it's not playable", () => {
- expect(
- vm.$el.querySelector('.dropdown-menu li:last-child button').getAttribute('disabled'),
- ).toEqual('disabled');
-
- expect(
- vm.$el.querySelector('.dropdown-menu li:last-child button').classList.contains('disabled'),
- ).toEqual(true);
- });
- });
-
- describe('scheduled jobs', () => {
- const scheduledJobAction = {
- name: 'scheduled action',
- playPath: `${TEST_HOST}/scheduled/job/action`,
- playable: true,
- scheduledAt: '2063-04-05T00:42:00Z',
- };
- const expiredJobAction = {
- name: 'expired action',
- playPath: `${TEST_HOST}/expired/job/action`,
- playable: true,
- scheduledAt: '2018-10-05T08:23:00Z',
- };
- const findDropdownItem = action => {
- const buttons = vm.$el.querySelectorAll('.dropdown-menu li button');
- return Array.prototype.find.call(buttons, element =>
- element.innerText.trim().startsWith(action.name),
- );
- };
-
- beforeEach(() => {
- spyOn(Date, 'now').and.callFake(() => new Date('2063-04-04T00:42:00Z').getTime());
- vm = mountComponent(Component, { actions: [scheduledJobAction, expiredJobAction] });
- });
-
- it('emits postAction event after confirming', () => {
- const emitSpy = jasmine.createSpy('emit');
- eventHub.$on('postAction', emitSpy);
- spyOn(window, 'confirm').and.callFake(() => true);
-
- findDropdownItem(scheduledJobAction).click();
-
- expect(window.confirm).toHaveBeenCalled();
- expect(emitSpy).toHaveBeenCalledWith({ endpoint: scheduledJobAction.playPath });
- });
-
- it('does not emit postAction event if confirmation is cancelled', () => {
- const emitSpy = jasmine.createSpy('emit');
- eventHub.$on('postAction', emitSpy);
- spyOn(window, 'confirm').and.callFake(() => false);
-
- findDropdownItem(scheduledJobAction).click();
-
- expect(window.confirm).toHaveBeenCalled();
- expect(emitSpy).not.toHaveBeenCalled();
- });
-
- it('displays the remaining time in the dropdown', () => {
- expect(findDropdownItem(scheduledJobAction)).toContainText('24:00:00');
- });
-
- it('displays 00:00:00 for expired jobs in the dropdown', () => {
- expect(findDropdownItem(expiredJobAction)).toContainText('00:00:00');
- });
- });
-});
diff --git a/spec/javascripts/environments/environment_external_url_spec.js b/spec/javascripts/environments/environment_external_url_spec.js
deleted file mode 100644
index 056d68a26e9..00000000000
--- a/spec/javascripts/environments/environment_external_url_spec.js
+++ /dev/null
@@ -1,22 +0,0 @@
-import Vue from 'vue';
-import externalUrlComp from '~/environments/components/environment_external_url.vue';
-
-describe('External URL Component', () => {
- let ExternalUrlComponent;
-
- beforeEach(() => {
- ExternalUrlComponent = Vue.extend(externalUrlComp);
- });
-
- it('should link to the provided externalUrl prop', () => {
- const externalURL = 'https://gitlab.com';
- const component = new ExternalUrlComponent({
- propsData: {
- externalUrl: externalURL,
- },
- }).$mount();
-
- expect(component.$el.getAttribute('href')).toEqual(externalURL);
- expect(component.$el.querySelector('fa-external-link')).toBeDefined();
- });
-});
diff --git a/spec/javascripts/environments/environments_app_spec.js b/spec/javascripts/environments/environments_app_spec.js
index 75526c2ba74..6c05b609923 100644
--- a/spec/javascripts/environments/environments_app_spec.js
+++ b/spec/javascripts/environments/environments_app_spec.js
@@ -55,6 +55,26 @@ describe('Environment', () => {
"You don't have any environments right now",
);
});
+
+ describe('when it is possible to enable a review app', () => {
+ beforeEach(done => {
+ mock
+ .onGet(mockData.endpoint)
+ .reply(200, { environments: [], review_app: { can_setup_review_app: true } });
+
+ component = mountComponent(EnvironmentsComponent, mockData);
+
+ setTimeout(() => {
+ done();
+ }, 0);
+ });
+
+ it('should render the enable review app button', () => {
+ expect(component.$el.querySelector('.js-enable-review-app-button').textContent).toContain(
+ 'Enable review app',
+ );
+ });
+ });
});
describe('with paginated environments', () => {
diff --git a/spec/javascripts/environments/folder/environments_folder_view_spec.js b/spec/javascripts/environments/folder/environments_folder_view_spec.js
deleted file mode 100644
index 6530201240f..00000000000
--- a/spec/javascripts/environments/folder/environments_folder_view_spec.js
+++ /dev/null
@@ -1,229 +0,0 @@
-import Vue from 'vue';
-import MockAdapter from 'axios-mock-adapter';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import { removeBreakLine, removeWhitespace } from 'spec/helpers/text_helper';
-import axios from '~/lib/utils/axios_utils';
-import environmentsFolderViewComponent from '~/environments/folder/environments_folder_view.vue';
-import { environmentsList } from '../mock_data';
-
-describe('Environments Folder View', () => {
- let Component;
- let component;
- let mock;
-
- const mockData = {
- endpoint: 'environments.json',
- folderName: 'review',
- canReadEnvironment: true,
- cssContainerClass: 'container',
- canaryDeploymentFeatureId: 'canary_deployment',
- showCanaryDeploymentCallout: true,
- userCalloutsPath: '/callouts',
- lockPromotionSvgPath: '/assets/illustrations/lock-promotion.svg',
- helpCanaryDeploymentsPath: 'help/canary-deployments',
- };
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
-
- Component = Vue.extend(environmentsFolderViewComponent);
- });
-
- afterEach(() => {
- mock.restore();
-
- component.$destroy();
- });
-
- describe('successful request', () => {
- beforeEach(() => {
- mock.onGet(mockData.endpoint).reply(
- 200,
- {
- environments: environmentsList,
- stopped_count: 1,
- available_count: 0,
- },
- {
- 'X-nExt-pAge': '2',
- 'x-page': '1',
- 'X-Per-Page': '2',
- 'X-Prev-Page': '',
- 'X-TOTAL': '20',
- 'X-Total-Pages': '10',
- },
- );
-
- component = mountComponent(Component, mockData);
- });
-
- it('should render a table with environments', done => {
- setTimeout(() => {
- expect(component.$el.querySelectorAll('table')).not.toBeNull();
- expect(component.$el.querySelector('.environment-name').textContent.trim()).toEqual(
- environmentsList[0].name,
- );
- done();
- }, 0);
- });
-
- it('should render available tab with count', done => {
- setTimeout(() => {
- expect(component.$el.querySelector('.js-environments-tab-available').textContent).toContain(
- 'Available',
- );
-
- expect(
- component.$el.querySelector('.js-environments-tab-available .badge').textContent,
- ).toContain('0');
- done();
- }, 0);
- });
-
- it('should render stopped tab with count', done => {
- setTimeout(() => {
- expect(component.$el.querySelector('.js-environments-tab-stopped').textContent).toContain(
- 'Stopped',
- );
-
- expect(
- component.$el.querySelector('.js-environments-tab-stopped .badge').textContent,
- ).toContain('1');
- done();
- }, 0);
- });
-
- it('should render parent folder name', done => {
- setTimeout(() => {
- expect(
- removeBreakLine(
- removeWhitespace(component.$el.querySelector('.js-folder-name').textContent.trim()),
- ),
- ).toContain('Environments / review');
- done();
- }, 0);
- });
-
- describe('pagination', () => {
- it('should render pagination', done => {
- setTimeout(() => {
- expect(component.$el.querySelectorAll('.gl-pagination')).not.toBeNull();
- done();
- }, 0);
- });
-
- it('should make an API request when changing page', done => {
- spyOn(component, 'updateContent');
- setTimeout(() => {
- component.$el
- .querySelector('.gl-pagination .page-item:nth-last-of-type(2) .page-link')
- .click();
-
- expect(component.updateContent).toHaveBeenCalledWith({
- scope: component.scope,
- page: '10',
- });
- done();
- }, 0);
- });
-
- it('should make an API request when using tabs', done => {
- setTimeout(() => {
- spyOn(component, 'updateContent');
- component.$el.querySelector('.js-environments-tab-stopped').click();
-
- expect(component.updateContent).toHaveBeenCalledWith({ scope: 'stopped', page: '1' });
- done();
- });
- });
- });
- });
-
- describe('unsuccessfull request', () => {
- beforeEach(() => {
- mock.onGet(mockData.endpoint).reply(500, {
- environments: [],
- });
-
- component = mountComponent(Component, mockData);
- });
-
- it('should not render a table', done => {
- setTimeout(() => {
- expect(component.$el.querySelector('table')).toBe(null);
- done();
- }, 0);
- });
-
- it('should render available tab with count 0', done => {
- setTimeout(() => {
- expect(component.$el.querySelector('.js-environments-tab-available').textContent).toContain(
- 'Available',
- );
-
- expect(
- component.$el.querySelector('.js-environments-tab-available .badge').textContent,
- ).toContain('0');
- done();
- }, 0);
- });
-
- it('should render stopped tab with count 0', done => {
- setTimeout(() => {
- expect(component.$el.querySelector('.js-environments-tab-stopped').textContent).toContain(
- 'Stopped',
- );
-
- expect(
- component.$el.querySelector('.js-environments-tab-stopped .badge').textContent,
- ).toContain('0');
- done();
- }, 0);
- });
- });
-
- describe('methods', () => {
- beforeEach(() => {
- mock.onGet(mockData.endpoint).reply(200, {
- environments: [],
- });
-
- component = mountComponent(Component, mockData);
- spyOn(window.history, 'pushState').and.stub();
- });
-
- describe('updateContent', () => {
- it('should set given parameters', done => {
- component
- .updateContent({ scope: 'stopped', page: '4' })
- .then(() => {
- expect(component.page).toEqual('4');
- expect(component.scope).toEqual('stopped');
- expect(component.requestData.scope).toEqual('stopped');
- expect(component.requestData.page).toEqual('4');
- done();
- })
- .catch(done.fail);
- });
- });
-
- describe('onChangeTab', () => {
- it('should set page to 1', () => {
- spyOn(component, 'updateContent');
- component.onChangeTab('stopped');
-
- expect(component.updateContent).toHaveBeenCalledWith({ scope: 'stopped', page: '1' });
- });
- });
-
- describe('onChangePage', () => {
- it('should update page and keep scope', () => {
- spyOn(component, 'updateContent');
-
- component.onChangePage(4);
-
- expect(component.updateContent).toHaveBeenCalledWith({ scope: component.scope, page: '4' });
- });
- });
- });
-});
diff --git a/spec/javascripts/environments/mock_data.js b/spec/javascripts/environments/mock_data.js
index 7bb57f938b8..a8be3706b79 100644
--- a/spec/javascripts/environments/mock_data.js
+++ b/spec/javascripts/environments/mock_data.js
@@ -30,43 +30,6 @@ export const environmentsList = [
},
];
-export const serverData = [
- {
- name: 'DEV',
- size: 1,
- latest: {
- id: 7,
- name: 'DEV',
- state: 'available',
- external_url: null,
- environment_type: null,
- last_deployment: null,
- has_stop_action: false,
- environment_path: '/root/review-app/environments/7',
- stop_path: '/root/review-app/environments/7/stop',
- created_at: '2017-01-31T10:53:46.894Z',
- updated_at: '2017-01-31T10:53:46.894Z',
- },
- },
- {
- name: 'build',
- size: 5,
- latest: {
- id: 12,
- name: 'build/update-README',
- state: 'available',
- external_url: null,
- environment_type: 'build',
- last_deployment: null,
- has_stop_action: false,
- environment_path: '/root/review-app/environments/12',
- stop_path: '/root/review-app/environments/12/stop',
- created_at: '2017-02-01T19:42:18.400Z',
- updated_at: '2017-02-01T19:42:18.400Z',
- },
- },
-];
-
export const environment = {
name: 'DEV',
size: 1,
diff --git a/spec/javascripts/flash_spec.js b/spec/javascripts/flash_spec.js
index 28fa87ac097..39ca4eedb69 100644
--- a/spec/javascripts/flash_spec.js
+++ b/spec/javascripts/flash_spec.js
@@ -40,7 +40,7 @@ describe('Flash', () => {
expect(el.style['transition-property']).toBe('opacity');
- expect(el.style['transition-duration']).toBe('0.3s');
+ expect(el.style['transition-duration']).toBe('0.15s');
});
it('sets opacity style', () => {
diff --git a/spec/javascripts/groups/components/group_item_spec.js b/spec/javascripts/groups/components/group_item_spec.js
index 39575ee9f97..2889d7ae4ff 100644
--- a/spec/javascripts/groups/components/group_item_spec.js
+++ b/spec/javascripts/groups/components/group_item_spec.js
@@ -155,6 +155,35 @@ describe('GroupItemComponent', () => {
});
describe('template', () => {
+ let group = null;
+
+ describe('for a group pending deletion', () => {
+ beforeEach(() => {
+ group = { ...mockParentGroupItem, pendingRemoval: true };
+ vm = createComponent(group);
+ });
+
+ it('renders the group pending removal badge', () => {
+ const badgeEl = vm.$el.querySelector('.badge-warning');
+
+ expect(badgeEl).toBeDefined();
+ expect(badgeEl).toContainText('pending removal');
+ });
+ });
+
+ describe('for a group not scheduled for deletion', () => {
+ beforeEach(() => {
+ group = { ...mockParentGroupItem, pendingRemoval: false };
+ vm = createComponent(group);
+ });
+
+ it('does not render the group pending removal badge', () => {
+ const groupTextContainer = vm.$el.querySelector('.group-text-container');
+
+ expect(groupTextContainer).not.toContainText('pending removal');
+ });
+ });
+
it('should render component template correctly', () => {
const visibilityIconEl = vm.$el.querySelector('.item-visibility');
diff --git a/spec/javascripts/ide/components/activity_bar_spec.js b/spec/javascripts/ide/components/activity_bar_spec.js
index 4d878e633fe..823ca29dab9 100644
--- a/spec/javascripts/ide/components/activity_bar_spec.js
+++ b/spec/javascripts/ide/components/activity_bar_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
import store from '~/ide/stores';
-import { activityBarViews } from '~/ide/constants';
+import { leftSidebarViews } from '~/ide/constants';
import ActivityBar from '~/ide/components/activity_bar.vue';
import { createComponentWithStore } from '../../helpers/vue_mount_component_helper';
import { resetStore } from '../helpers';
@@ -34,19 +34,19 @@ describe('IDE activity bar', () => {
it('calls updateActivityBarView with edit value on click', () => {
vm.$el.querySelector('.js-ide-edit-mode').click();
- expect(vm.updateActivityBarView).toHaveBeenCalledWith(activityBarViews.edit);
+ expect(vm.updateActivityBarView).toHaveBeenCalledWith(leftSidebarViews.edit.name);
});
it('calls updateActivityBarView with commit value on click', () => {
vm.$el.querySelector('.js-ide-commit-mode').click();
- expect(vm.updateActivityBarView).toHaveBeenCalledWith(activityBarViews.commit);
+ expect(vm.updateActivityBarView).toHaveBeenCalledWith(leftSidebarViews.commit.name);
});
it('calls updateActivityBarView with review value on click', () => {
vm.$el.querySelector('.js-ide-review-mode').click();
- expect(vm.updateActivityBarView).toHaveBeenCalledWith(activityBarViews.review);
+ expect(vm.updateActivityBarView).toHaveBeenCalledWith(leftSidebarViews.review.name);
});
});
@@ -60,7 +60,7 @@ describe('IDE activity bar', () => {
});
it('sets commit item active', done => {
- vm.$store.state.currentActivityView = activityBarViews.commit;
+ vm.$store.state.currentActivityView = leftSidebarViews.commit.name;
vm.$nextTick(() => {
expect(vm.$el.querySelector('.js-ide-commit-mode').classList).toContain('active');
diff --git a/spec/javascripts/ide/components/commit_sidebar/form_spec.js b/spec/javascripts/ide/components/commit_sidebar/form_spec.js
index e984389bd46..5cb804938ed 100644
--- a/spec/javascripts/ide/components/commit_sidebar/form_spec.js
+++ b/spec/javascripts/ide/components/commit_sidebar/form_spec.js
@@ -4,7 +4,7 @@ import getSetTimeoutPromise from 'spec/helpers/set_timeout_promise_helper';
import { projectData } from 'spec/ide/mock_data';
import store from '~/ide/stores';
import CommitForm from '~/ide/components/commit_sidebar/form.vue';
-import { activityBarViews } from '~/ide/constants';
+import { leftSidebarViews } from '~/ide/constants';
import { resetStore } from '../../helpers';
describe('IDE commit form', () => {
@@ -52,7 +52,7 @@ describe('IDE commit form', () => {
vm.$store.state.stagedFiles.push('test');
vm.$nextTick(() => {
- expect(vm.$el.querySelector('p').textContent).toContain('1 unstaged and 1 staged changes');
+ expect(vm.$el.querySelector('p').textContent).toContain('1 staged and 1 unstaged changes');
done();
});
});
@@ -71,7 +71,7 @@ describe('IDE commit form', () => {
vm.$el.querySelector('.btn-primary').click();
vm.$nextTick(() => {
- expect(store.state.currentActivityView).toBe(activityBarViews.commit);
+ expect(store.state.currentActivityView).toBe(leftSidebarViews.commit.name);
done();
});
@@ -79,7 +79,7 @@ describe('IDE commit form', () => {
it('collapses if lastCommitMsg is set to empty and current view is not commit view', done => {
store.state.lastCommitMsg = 'abc';
- store.state.currentActivityView = activityBarViews.edit;
+ store.state.currentActivityView = leftSidebarViews.edit.name;
vm.$nextTick(() => {
// if commit message is set, form is uncollapsed
@@ -133,7 +133,7 @@ describe('IDE commit form', () => {
vm.$el.querySelector('.btn-primary').click();
vm.$nextTick(() => {
- expect(store.state.currentActivityView).toBe(activityBarViews.commit);
+ expect(store.state.currentActivityView).toBe(leftSidebarViews.commit.name);
expect(vm.isCompact).toBe(false);
done();
diff --git a/spec/javascripts/ide/components/commit_sidebar/new_merge_request_option_spec.js b/spec/javascripts/ide/components/commit_sidebar/new_merge_request_option_spec.js
index 02caf689c50..7c0b4000229 100644
--- a/spec/javascripts/ide/components/commit_sidebar/new_merge_request_option_spec.js
+++ b/spec/javascripts/ide/components/commit_sidebar/new_merge_request_option_spec.js
@@ -1,13 +1,15 @@
import Vue from 'vue';
import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
import { projectData, branches } from 'spec/ide/mock_data';
-import { resetStore } from 'spec/ide/helpers';
import NewMergeRequestOption from '~/ide/components/commit_sidebar/new_merge_request_option.vue';
-import store from '~/ide/stores';
-import consts from '../../../../../app/assets/javascripts/ide/stores/modules/commit/constants';
+import { createStore } from '~/ide/stores';
+import { PERMISSION_CREATE_MR } from '~/ide/constants';
+import consts from '~/ide/stores/modules/commit/constants';
describe('create new MR checkbox', () => {
+ let store;
let vm;
+
const setMR = () => {
vm.$store.state.currentMergeRequestId = '1';
vm.$store.state.projects[store.state.currentProjectId].mergeRequests[
@@ -15,6 +17,10 @@ describe('create new MR checkbox', () => {
] = { foo: 'bar' };
};
+ const setPermissions = permissions => {
+ store.state.projects[store.state.currentProjectId].userPermissions = permissions;
+ };
+
const createComponent = ({ currentBranchId = 'master', createNewBranch = false } = {}) => {
const Component = Vue.extend(NewMergeRequestOption);
@@ -25,20 +31,29 @@ describe('create new MR checkbox', () => {
: consts.COMMIT_TO_CURRENT_BRANCH;
vm.$store.state.currentBranchId = currentBranchId;
- vm.$store.state.currentProjectId = 'abcproject';
- const proj = JSON.parse(JSON.stringify(projectData));
- proj.branches[currentBranchId] = branches.find(branch => branch.name === currentBranchId);
-
- Vue.set(vm.$store.state.projects, 'abcproject', proj);
+ store.state.projects.abcproject.branches[currentBranchId] = branches.find(
+ branch => branch.name === currentBranchId,
+ );
return vm.$mount();
};
+ const findInput = () => vm.$el.querySelector('input[type="checkbox"]');
+ const findLabel = () => vm.$el.querySelector('.js-ide-commit-new-mr');
+
+ beforeEach(() => {
+ store = createStore();
+
+ store.state.currentProjectId = 'abcproject';
+
+ const proj = JSON.parse(JSON.stringify(projectData));
+ proj.userPermissions[PERMISSION_CREATE_MR] = true;
+ Vue.set(store.state.projects, 'abcproject', proj);
+ });
+
afterEach(() => {
vm.$destroy();
-
- resetStore(vm.$store);
});
describe('for default branch', () => {
@@ -160,6 +175,24 @@ describe('create new MR checkbox', () => {
.then(done)
.catch(done.fail);
});
+
+ it('shows enablded checkbox', () => {
+ expect(findLabel().classList.contains('is-disabled')).toBe(false);
+ expect(findInput().disabled).toBe(false);
+ });
+ });
+
+ describe('when user cannot create MR', () => {
+ beforeEach(() => {
+ setPermissions({ [PERMISSION_CREATE_MR]: false });
+
+ createComponent({ currentBranchId: 'regular' });
+ });
+
+ it('disabled checkbox', () => {
+ expect(findLabel().classList.contains('is-disabled')).toBe(true);
+ expect(findInput().disabled).toBe(true);
+ });
});
it('dispatches toggleShouldCreateMR when clicking checkbox', () => {
diff --git a/spec/javascripts/ide/components/file_row_extra_spec.js b/spec/javascripts/ide/components/file_row_extra_spec.js
index 4c2f29f55dd..f498d8251c8 100644
--- a/spec/javascripts/ide/components/file_row_extra_spec.js
+++ b/spec/javascripts/ide/components/file_row_extra_spec.js
@@ -63,7 +63,7 @@ describe('IDE extra file row component', () => {
stagedFilesCount = 1;
unstagedFilesCount = 1;
- expect(vm.folderChangesTooltip).toBe('1 unstaged and 1 staged changes');
+ expect(vm.folderChangesTooltip).toBe('1 staged and 1 unstaged changes');
});
});
diff --git a/spec/javascripts/ide/components/ide_side_bar_spec.js b/spec/javascripts/ide/components/ide_side_bar_spec.js
index a2d15462ac5..28f127a61c0 100644
--- a/spec/javascripts/ide/components/ide_side_bar_spec.js
+++ b/spec/javascripts/ide/components/ide_side_bar_spec.js
@@ -2,7 +2,7 @@ import Vue from 'vue';
import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
import store from '~/ide/stores';
import ideSidebar from '~/ide/components/ide_side_bar.vue';
-import { activityBarViews } from '~/ide/constants';
+import { leftSidebarViews } from '~/ide/constants';
import { resetStore } from '../helpers';
import { projectData } from '../mock_data';
@@ -45,7 +45,7 @@ describe('IdeSidebar', () => {
});
it('renders commit component', done => {
- vm.$store.state.currentActivityView = activityBarViews.commit;
+ vm.$store.state.currentActivityView = leftSidebarViews.commit.name;
vm.$nextTick(() => {
expect(vm.$el.querySelector('.multi-file-commit-panel-section')).not.toBeNull();
diff --git a/spec/javascripts/ide/components/ide_spec.js b/spec/javascripts/ide/components/ide_spec.js
index 048db4a7533..4241b994cba 100644
--- a/spec/javascripts/ide/components/ide_spec.js
+++ b/spec/javascripts/ide/components/ide_spec.js
@@ -61,14 +61,14 @@ describe('ide component, non-empty repo', () => {
});
it('shows error message when set', done => {
- expect(vm.$el.querySelector('.flash-container')).toBe(null);
+ expect(vm.$el.querySelector('.gl-alert')).toBe(null);
vm.$store.state.errorMessage = {
text: 'error',
};
vm.$nextTick(() => {
- expect(vm.$el.querySelector('.flash-container')).not.toBe(null);
+ expect(vm.$el.querySelector('.gl-alert')).not.toBe(null);
done();
});
diff --git a/spec/javascripts/ide/components/ide_status_bar_spec.js b/spec/javascripts/ide/components/ide_status_bar_spec.js
index 69f163574fb..3facf1c266a 100644
--- a/spec/javascripts/ide/components/ide_status_bar_spec.js
+++ b/spec/javascripts/ide/components/ide_status_bar_spec.js
@@ -1,94 +1,129 @@
import Vue from 'vue';
+import _ from 'lodash';
import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
-import store from '~/ide/stores';
-import ideStatusBar from '~/ide/components/ide_status_bar.vue';
+import { TEST_HOST } from 'spec/test_constants';
+import { createStore } from '~/ide/stores';
+import IdeStatusBar from '~/ide/components/ide_status_bar.vue';
import { rightSidebarViews } from '~/ide/constants';
-import { resetStore } from '../helpers';
import { projectData } from '../mock_data';
+const TEST_PROJECT_ID = 'abcproject';
+const TEST_MERGE_REQUEST_ID = '9001';
+const TEST_MERGE_REQUEST_URL = `${TEST_HOST}merge-requests/${TEST_MERGE_REQUEST_ID}`;
+
describe('ideStatusBar', () => {
+ let store;
let vm;
- beforeEach(() => {
- const Component = Vue.extend(ideStatusBar);
+ const createComponent = () => {
+ vm = createComponentWithStore(Vue.extend(IdeStatusBar), store).$mount();
+ };
+ const findMRStatus = () => vm.$el.querySelector('.js-ide-status-mr');
- store.state.currentProjectId = 'abcproject';
- store.state.projects.abcproject = projectData;
+ beforeEach(() => {
+ store = createStore();
+ store.state.currentProjectId = TEST_PROJECT_ID;
+ store.state.projects[TEST_PROJECT_ID] = _.clone(projectData);
store.state.currentBranchId = 'master';
-
- vm = createComponentWithStore(Component, store).$mount();
});
afterEach(() => {
vm.$destroy();
-
- resetStore(vm.$store);
});
- it('renders the statusbar', () => {
- expect(vm.$el.className).toBe('ide-status-bar');
- });
+ describe('default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
- describe('mounted', () => {
it('triggers a setInterval', () => {
expect(vm.intervalId).not.toBe(null);
});
- });
- describe('commitAgeUpdate', () => {
- beforeEach(function() {
- jasmine.clock().install();
- spyOn(vm, 'commitAgeUpdate').and.callFake(() => {});
- vm.startTimer();
+ it('renders the statusbar', () => {
+ expect(vm.$el.className).toBe('ide-status-bar');
});
- afterEach(function() {
- jasmine.clock().uninstall();
- });
+ describe('commitAgeUpdate', () => {
+ beforeEach(function() {
+ jasmine.clock().install();
+ spyOn(vm, 'commitAgeUpdate').and.callFake(() => {});
+ vm.startTimer();
+ });
- it('gets called every second', () => {
- expect(vm.commitAgeUpdate).not.toHaveBeenCalled();
+ afterEach(function() {
+ jasmine.clock().uninstall();
+ });
- jasmine.clock().tick(1100);
+ it('gets called every second', () => {
+ expect(vm.commitAgeUpdate).not.toHaveBeenCalled();
- expect(vm.commitAgeUpdate.calls.count()).toEqual(1);
+ jasmine.clock().tick(1100);
- jasmine.clock().tick(1000);
+ expect(vm.commitAgeUpdate.calls.count()).toEqual(1);
- expect(vm.commitAgeUpdate.calls.count()).toEqual(2);
+ jasmine.clock().tick(1000);
+
+ expect(vm.commitAgeUpdate.calls.count()).toEqual(2);
+ });
});
- });
- describe('getCommitPath', () => {
- it('returns the path to the commit details', () => {
- expect(vm.getCommitPath('abc123de')).toBe('/commit/abc123de');
+ describe('getCommitPath', () => {
+ it('returns the path to the commit details', () => {
+ expect(vm.getCommitPath('abc123de')).toBe('/commit/abc123de');
+ });
+ });
+
+ describe('pipeline status', () => {
+ it('opens right sidebar on clicking icon', done => {
+ spyOn(vm, 'openRightPane');
+ Vue.set(vm.$store.state.pipelines, 'latestPipeline', {
+ details: {
+ status: {
+ text: 'success',
+ details_path: 'test',
+ icon: 'status_success',
+ },
+ },
+ commit: {
+ author_gravatar_url: 'www',
+ },
+ });
+
+ vm.$nextTick()
+ .then(() => {
+ vm.$el.querySelector('.ide-status-pipeline button').click();
+
+ expect(vm.openRightPane).toHaveBeenCalledWith(rightSidebarViews.pipelines);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ it('does not show merge request status', () => {
+ expect(findMRStatus()).toBe(null);
});
});
- describe('pipeline status', () => {
- it('opens right sidebar on clicking icon', done => {
- spyOn(vm, 'openRightPane');
- Vue.set(vm.$store.state.pipelines, 'latestPipeline', {
- details: {
- status: {
- text: 'success',
- details_path: 'test',
- icon: 'status_success',
+ describe('with merge request in store', () => {
+ beforeEach(() => {
+ store.state.projects[TEST_PROJECT_ID].mergeRequests = {
+ [TEST_MERGE_REQUEST_ID]: {
+ web_url: TEST_MERGE_REQUEST_URL,
+ references: {
+ short: `!${TEST_MERGE_REQUEST_ID}`,
},
},
- commit: {
- author_gravatar_url: 'www',
- },
- });
+ };
+ store.state.currentMergeRequestId = TEST_MERGE_REQUEST_ID;
- vm.$nextTick()
- .then(() => {
- vm.$el.querySelector('.ide-status-pipeline button').click();
+ createComponent();
+ });
- expect(vm.openRightPane).toHaveBeenCalledWith(rightSidebarViews.pipelines);
- })
- .then(done)
- .catch(done.fail);
+ it('shows merge request status', () => {
+ expect(findMRStatus().textContent.trim()).toEqual(`Merge request !${TEST_MERGE_REQUEST_ID}`);
+ expect(findMRStatus().querySelector('a').href).toEqual(TEST_MERGE_REQUEST_URL);
});
});
});
diff --git a/spec/javascripts/ide/components/merge_requests/info_spec.js b/spec/javascripts/ide/components/merge_requests/info_spec.js
deleted file mode 100644
index 98a29e5128b..00000000000
--- a/spec/javascripts/ide/components/merge_requests/info_spec.js
+++ /dev/null
@@ -1,51 +0,0 @@
-import Vue from 'vue';
-import '~/behaviors/markdown/render_gfm';
-import { createStore } from '~/ide/stores';
-import Info from '~/ide/components/merge_requests/info.vue';
-import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
-
-describe('IDE merge request details', () => {
- let Component;
- let vm;
-
- beforeAll(() => {
- Component = Vue.extend(Info);
- });
-
- beforeEach(() => {
- const store = createStore();
- store.state.currentProjectId = 'gitlab-ce';
- store.state.currentMergeRequestId = 1;
- store.state.projects['gitlab-ce'] = {
- mergeRequests: {
- 1: {
- iid: 1,
- title: 'Testing',
- title_html: '<span class="title-html">Testing</span>',
- description: 'Description',
- description_html: '<p class="description-html">Description HTML</p>',
- },
- },
- };
-
- vm = createComponentWithStore(Component, store).$mount();
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- it('renders merge request IID', () => {
- expect(vm.$el.querySelector('.detail-page-header').textContent).toContain('!1');
- });
-
- it('renders title as HTML', () => {
- expect(vm.$el.querySelector('.title-html')).not.toBe(null);
- expect(vm.$el.querySelector('.title').textContent).toContain('Testing');
- });
-
- it('renders description as HTML', () => {
- expect(vm.$el.querySelector('.description-html')).not.toBe(null);
- expect(vm.$el.querySelector('.description').textContent).toContain('Description HTML');
- });
-});
diff --git a/spec/javascripts/ide/components/nav_dropdown_button_spec.js b/spec/javascripts/ide/components/nav_dropdown_button_spec.js
index 0d63869fba2..bbaf97164ea 100644
--- a/spec/javascripts/ide/components/nav_dropdown_button_spec.js
+++ b/spec/javascripts/ide/components/nav_dropdown_button_spec.js
@@ -2,62 +2,92 @@ import Vue from 'vue';
import { trimText } from 'spec/helpers/text_helper';
import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
import NavDropdownButton from '~/ide/components/nav_dropdown_button.vue';
-import store from '~/ide/stores';
-import { resetStore } from '../helpers';
+import { createStore } from '~/ide/stores';
describe('NavDropdown', () => {
const TEST_BRANCH_ID = 'lorem-ipsum-dolar';
const TEST_MR_ID = '12345';
- const Component = Vue.extend(NavDropdownButton);
+ let store;
let vm;
beforeEach(() => {
- vm = mountComponentWithStore(Component, { store });
-
- vm.$mount();
+ store = createStore();
});
afterEach(() => {
vm.$destroy();
-
- resetStore(store);
});
- it('renders empty placeholders, if state is falsey', () => {
- expect(trimText(vm.$el.textContent)).toEqual('- -');
- });
+ const createComponent = (props = {}) => {
+ vm = mountComponentWithStore(Vue.extend(NavDropdownButton), { props, store });
+ vm.$mount();
+ };
- it('renders branch name, if state has currentBranchId', done => {
- vm.$store.state.currentBranchId = TEST_BRANCH_ID;
+ const findIcon = name => vm.$el.querySelector(`.ic-${name}`);
+ const findMRIcon = () => findIcon('merge-request');
+ const findBranchIcon = () => findIcon('branch');
- vm.$nextTick()
- .then(() => {
- expect(trimText(vm.$el.textContent)).toEqual(`${TEST_BRANCH_ID} -`);
- })
- .then(done)
- .catch(done.fail);
- });
+ describe('normal', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders empty placeholders, if state is falsey', () => {
+ expect(trimText(vm.$el.textContent)).toEqual('- -');
+ });
- it('renders mr id, if state has currentMergeRequestId', done => {
- vm.$store.state.currentMergeRequestId = TEST_MR_ID;
+ it('renders branch name, if state has currentBranchId', done => {
+ vm.$store.state.currentBranchId = TEST_BRANCH_ID;
- vm.$nextTick()
- .then(() => {
- expect(trimText(vm.$el.textContent)).toEqual(`- !${TEST_MR_ID}`);
- })
- .then(done)
- .catch(done.fail);
+ vm.$nextTick()
+ .then(() => {
+ expect(trimText(vm.$el.textContent)).toEqual(`${TEST_BRANCH_ID} -`);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('renders mr id, if state has currentMergeRequestId', done => {
+ vm.$store.state.currentMergeRequestId = TEST_MR_ID;
+
+ vm.$nextTick()
+ .then(() => {
+ expect(trimText(vm.$el.textContent)).toEqual(`- !${TEST_MR_ID}`);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('renders branch and mr, if state has both', done => {
+ vm.$store.state.currentBranchId = TEST_BRANCH_ID;
+ vm.$store.state.currentMergeRequestId = TEST_MR_ID;
+
+ vm.$nextTick()
+ .then(() => {
+ expect(trimText(vm.$el.textContent)).toEqual(`${TEST_BRANCH_ID} !${TEST_MR_ID}`);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('shows icons', () => {
+ expect(findBranchIcon()).toBeTruthy();
+ expect(findMRIcon()).toBeTruthy();
+ });
});
- it('renders branch and mr, if state has both', done => {
- vm.$store.state.currentBranchId = TEST_BRANCH_ID;
- vm.$store.state.currentMergeRequestId = TEST_MR_ID;
+ describe('with showMergeRequests false', () => {
+ beforeEach(() => {
+ createComponent({ showMergeRequests: false });
+ });
+
+ it('shows single empty placeholder, if state is falsey', () => {
+ expect(trimText(vm.$el.textContent)).toEqual('-');
+ });
- vm.$nextTick()
- .then(() => {
- expect(trimText(vm.$el.textContent)).toEqual(`${TEST_BRANCH_ID} !${TEST_MR_ID}`);
- })
- .then(done)
- .catch(done.fail);
+ it('shows only branch icon', () => {
+ expect(findBranchIcon()).toBeTruthy();
+ expect(findMRIcon()).toBe(null);
+ });
});
});
diff --git a/spec/javascripts/ide/components/nav_dropdown_spec.js b/spec/javascripts/ide/components/nav_dropdown_spec.js
index fe1d0ca371d..dfb4d03540f 100644
--- a/spec/javascripts/ide/components/nav_dropdown_spec.js
+++ b/spec/javascripts/ide/components/nav_dropdown_spec.js
@@ -3,6 +3,9 @@ import Vue from 'vue';
import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
import store from '~/ide/stores';
import NavDropdown from '~/ide/components/nav_dropdown.vue';
+import { PERMISSION_READ_MR } from '~/ide/constants';
+
+const TEST_PROJECT_ID = 'lorem-ipsum';
describe('IDE NavDropdown', () => {
const Component = Vue.extend(NavDropdown);
@@ -10,6 +13,12 @@ describe('IDE NavDropdown', () => {
let $dropdown;
beforeEach(() => {
+ store.state.currentProjectId = TEST_PROJECT_ID;
+ Vue.set(store.state.projects, TEST_PROJECT_ID, {
+ userPermissions: {
+ [PERMISSION_READ_MR]: true,
+ },
+ });
vm = mountComponentWithStore(Component, { store });
$dropdown = $(vm.$el);
@@ -21,6 +30,9 @@ describe('IDE NavDropdown', () => {
vm.$destroy();
});
+ const findIcon = name => vm.$el.querySelector(`.ic-${name}`);
+ const findMRIcon = () => findIcon('merge-request');
+
it('renders nothing initially', () => {
expect(vm.$el).not.toContainElement('.ide-nav-form');
});
@@ -47,4 +59,22 @@ describe('IDE NavDropdown', () => {
.then(done)
.catch(done.fail);
});
+
+ it('renders merge request icon', () => {
+ expect(findMRIcon()).not.toBeNull();
+ });
+
+ describe('when user cannot read merge requests', () => {
+ beforeEach(done => {
+ store.state.projects[TEST_PROJECT_ID].userPermissions = {};
+
+ vm.$nextTick()
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('does not render merge requests', () => {
+ expect(findMRIcon()).toBeNull();
+ });
+ });
});
diff --git a/spec/javascripts/ide/components/new_dropdown/upload_spec.js b/spec/javascripts/ide/components/new_dropdown/upload_spec.js
index 4ebd0977832..66ddf6c0ee6 100644
--- a/spec/javascripts/ide/components/new_dropdown/upload_spec.js
+++ b/spec/javascripts/ide/components/new_dropdown/upload_spec.js
@@ -14,7 +14,7 @@ describe('new dropdown upload', () => {
vm.entryName = 'testing';
- spyOn(vm, '$emit');
+ spyOn(vm, '$emit').and.callThrough();
});
afterEach(() => {
@@ -61,31 +61,44 @@ describe('new dropdown upload', () => {
const binaryTarget = {
result: 'base64,w4I=',
};
- const textFile = {
- name: 'textFile',
- type: 'text/plain',
- };
+ const textFile = new File(['plain text'], 'textFile');
+
const binaryFile = {
name: 'binaryFile',
type: 'image/png',
};
- it('creates file in plain text (without encoding) if the file content is plain text', () => {
+ beforeEach(() => {
+ spyOn(FileReader.prototype, 'readAsText').and.callThrough();
+ });
+
+ it('calls readAsText and creates file in plain text (without encoding) if the file content is plain text', done => {
+ const waitForCreate = new Promise(resolve => vm.$on('create', resolve));
+
vm.createFile(textTarget, textFile);
- expect(vm.$emit).toHaveBeenCalledWith('create', {
- name: textFile.name,
- type: 'blob',
- content: 'plain text',
- base64: false,
- binary: false,
- rawPath: '',
- });
+ expect(FileReader.prototype.readAsText).toHaveBeenCalledWith(textFile);
+
+ waitForCreate
+ .then(() => {
+ expect(vm.$emit).toHaveBeenCalledWith('create', {
+ name: textFile.name,
+ type: 'blob',
+ content: 'plain text',
+ base64: false,
+ binary: false,
+ rawPath: '',
+ });
+ })
+ .then(done)
+ .catch(done.fail);
});
it('splits content on base64 if binary', () => {
vm.createFile(binaryTarget, binaryFile);
+ expect(FileReader.prototype.readAsText).not.toHaveBeenCalledWith(textFile);
+
expect(vm.$emit).toHaveBeenCalledWith('create', {
name: binaryFile.name,
type: 'blob',
diff --git a/spec/javascripts/ide/components/repo_editor_spec.js b/spec/javascripts/ide/components/repo_editor_spec.js
index 8935d8f56fc..ef0299f0d56 100644
--- a/spec/javascripts/ide/components/repo_editor_spec.js
+++ b/spec/javascripts/ide/components/repo_editor_spec.js
@@ -5,7 +5,7 @@ import axios from '~/lib/utils/axios_utils';
import store from '~/ide/stores';
import repoEditor from '~/ide/components/repo_editor.vue';
import Editor from '~/ide/lib/editor';
-import { activityBarViews, FILE_VIEW_MODE_EDITOR, FILE_VIEW_MODE_PREVIEW } from '~/ide/constants';
+import { leftSidebarViews, FILE_VIEW_MODE_EDITOR, FILE_VIEW_MODE_PREVIEW } from '~/ide/constants';
import { createComponentWithStore } from '../../helpers/vue_mount_component_helper';
import setTimeoutPromise from '../../helpers/set_timeout_promise_helper';
import { file, resetStore } from '../helpers';
@@ -359,7 +359,7 @@ describe('RepoEditor', () => {
});
it('hides tabs in review mode', done => {
- vm.$store.state.currentActivityView = activityBarViews.review;
+ vm.$store.state.currentActivityView = leftSidebarViews.review.name;
vm.$nextTick(() => {
expect(vm.$el.querySelector('.nav-links')).toBe(null);
@@ -369,7 +369,7 @@ describe('RepoEditor', () => {
});
it('hides tabs in commit mode', done => {
- vm.$store.state.currentActivityView = activityBarViews.commit;
+ vm.$store.state.currentActivityView = leftSidebarViews.commit.name;
vm.$nextTick(() => {
expect(vm.$el.querySelector('.nav-links')).toBe(null);
diff --git a/spec/javascripts/ide/lib/editor_spec.js b/spec/javascripts/ide/lib/editor_spec.js
index f1973f7798f..556bd45d3a5 100644
--- a/spec/javascripts/ide/lib/editor_spec.js
+++ b/spec/javascripts/ide/lib/editor_spec.js
@@ -74,6 +74,7 @@ describe('Multi-file editor library', () => {
renderSideBySide: true,
renderLineHighlight: 'all',
hideCursorInOverviewRuler: false,
+ theme: 'vs white',
});
});
});
diff --git a/spec/javascripts/ide/stores/actions/merge_request_spec.js b/spec/javascripts/ide/stores/actions/merge_request_spec.js
index ca8f33407fd..ce09cf51ac5 100644
--- a/spec/javascripts/ide/stores/actions/merge_request_spec.js
+++ b/spec/javascripts/ide/stores/actions/merge_request_spec.js
@@ -8,7 +8,7 @@ import actions, {
openMergeRequest,
} from '~/ide/stores/actions/merge_request';
import service from '~/ide/services';
-import { activityBarViews } from '~/ide/constants';
+import { leftSidebarViews, PERMISSION_READ_MR } from '~/ide/constants';
import { resetStore } from '../../helpers';
const TEST_PROJECT = 'abcproject';
@@ -23,6 +23,9 @@ describe('IDE store merge request actions', () => {
store.state.projects[TEST_PROJECT] = {
id: TEST_PROJECT_ID,
mergeRequests: {},
+ userPermissions: {
+ [PERMISSION_READ_MR]: true,
+ },
};
});
@@ -79,6 +82,19 @@ describe('IDE store merge request actions', () => {
})
.catch(done.fail);
});
+
+ it('does nothing if user cannot read MRs', done => {
+ store.state.projects[TEST_PROJECT].userPermissions[PERMISSION_READ_MR] = false;
+
+ store
+ .dispatch('getMergeRequestsForBranch', { projectId: TEST_PROJECT, branchId: 'bar' })
+ .then(() => {
+ expect(service.getProjectMergeRequests).not.toHaveBeenCalled();
+ expect(store.state.currentMergeRequestId).toBe('');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
});
describe('no merge requests for branch available case', () => {
@@ -137,9 +153,7 @@ describe('IDE store merge request actions', () => {
store
.dispatch('getMergeRequestData', { projectId: TEST_PROJECT, mergeRequestId: 1 })
.then(() => {
- expect(service.getProjectMergeRequestData).toHaveBeenCalledWith(TEST_PROJECT, 1, {
- render_html: true,
- });
+ expect(service.getProjectMergeRequestData).toHaveBeenCalledWith(TEST_PROJECT, 1);
done();
})
@@ -180,7 +194,7 @@ describe('IDE store merge request actions', () => {
.then(done.fail)
.catch(() => {
expect(dispatch).toHaveBeenCalledWith('setErrorMessage', {
- text: 'An error occurred whilst loading the merge request.',
+ text: 'An error occurred while loading the merge request.',
action: jasmine.any(Function),
actionText: 'Please try again',
actionPayload: {
@@ -253,7 +267,7 @@ describe('IDE store merge request actions', () => {
.then(done.fail)
.catch(() => {
expect(dispatch).toHaveBeenCalledWith('setErrorMessage', {
- text: 'An error occurred whilst loading the merge request changes.',
+ text: 'An error occurred while loading the merge request changes.',
action: jasmine.any(Function),
actionText: 'Please try again',
actionPayload: {
@@ -323,7 +337,7 @@ describe('IDE store merge request actions', () => {
.then(done.fail)
.catch(() => {
expect(dispatch).toHaveBeenCalledWith('setErrorMessage', {
- text: 'An error occurred whilst loading the merge request version data.',
+ text: 'An error occurred while loading the merge request version data.',
action: jasmine.any(Function),
actionText: 'Please try again',
actionPayload: {
@@ -456,7 +470,7 @@ describe('IDE store merge request actions', () => {
.then(() => {
expect(store.dispatch).toHaveBeenCalledWith(
'updateActivityBarView',
- activityBarViews.review,
+ leftSidebarViews.review.name,
);
testMergeRequestChanges.changes.forEach((change, i) => {
diff --git a/spec/javascripts/ide/stores/actions/tree_spec.js b/spec/javascripts/ide/stores/actions/tree_spec.js
index be350b6f6cc..fabe44ce333 100644
--- a/spec/javascripts/ide/stores/actions/tree_spec.js
+++ b/spec/javascripts/ide/stores/actions/tree_spec.js
@@ -125,7 +125,7 @@ describe('Multi-file store tree actions', () => {
.then(done.fail)
.catch(() => {
expect(dispatch).toHaveBeenCalledWith('setErrorMessage', {
- text: 'An error occurred whilst loading all the files.',
+ text: 'An error occurred while loading all the files.',
action: jasmine.any(Function),
actionText: 'Please try again',
actionPayload: { projectId: 'abc/def', branchId: 'master-testing' },
diff --git a/spec/javascripts/ide/stores/actions_spec.js b/spec/javascripts/ide/stores/actions_spec.js
index d582462d542..364c8421b6b 100644
--- a/spec/javascripts/ide/stores/actions_spec.js
+++ b/spec/javascripts/ide/stores/actions_spec.js
@@ -225,35 +225,6 @@ describe('Multi-file store actions', () => {
.catch(done.fail);
});
- describe('when `gon.feature.stageAllByDefault` is true', () => {
- const originalGonFeatures = Object.assign({}, gon.features);
-
- beforeAll(() => {
- gon.features = { stageAllByDefault: true };
- });
-
- afterAll(() => {
- gon.features = originalGonFeatures;
- });
-
- it('adds tmp file to staged files', done => {
- const name = 'test';
-
- store
- .dispatch('createTempEntry', {
- name,
- branchId: 'mybranch',
- type: 'blob',
- })
- .then(() => {
- expect(store.state.stagedFiles).toEqual([jasmine.objectContaining({ name })]);
-
- done();
- })
- .catch(done.fail);
- });
- });
-
it('adds tmp file to open files', done => {
const name = 'test';
@@ -274,7 +245,7 @@ describe('Multi-file store actions', () => {
.catch(done.fail);
});
- it('adds tmp file to changed files', done => {
+ it('adds tmp file to staged files', done => {
const name = 'test';
store
@@ -284,9 +255,7 @@ describe('Multi-file store actions', () => {
type: 'blob',
})
.then(() => {
- expect(store.state.changedFiles).toEqual([
- jasmine.objectContaining({ name, tempFile: true }),
- ]);
+ expect(store.state.stagedFiles).toEqual([jasmine.objectContaining({ name })]);
done();
})
@@ -294,15 +263,9 @@ describe('Multi-file store actions', () => {
});
it('sets tmp file as active', () => {
- const dispatch = jasmine.createSpy();
- const commit = jasmine.createSpy();
+ createTempEntry(store, { name: 'test', branchId: 'mybranch', type: 'blob' });
- createTempEntry(
- { state: store.state, getters: store.getters, dispatch, commit },
- { name: 'test', branchId: 'mybranch', type: 'blob' },
- );
-
- expect(dispatch).toHaveBeenCalledWith('setFileActive', 'test');
+ expect(store.dispatch).toHaveBeenCalledWith('setFileActive', 'test');
});
it('creates flash message if file already exists', done => {
@@ -591,11 +554,7 @@ describe('Multi-file store actions', () => {
'path',
store.state,
[{ type: types.DELETE_ENTRY, payload: 'path' }],
- [
- { type: 'burstUnusedSeal' },
- { type: 'stageChange', payload: 'path' },
- { type: 'triggerFilesChange' },
- ],
+ [{ type: 'stageChange', payload: 'path' }, { type: 'triggerFilesChange' }],
done,
);
});
@@ -623,7 +582,6 @@ describe('Multi-file store actions', () => {
store.state,
[{ type: types.DELETE_ENTRY, payload: 'testFolder/entry-to-delete' }],
[
- { type: 'burstUnusedSeal' },
{ type: 'stageChange', payload: 'testFolder/entry-to-delete' },
{ type: 'triggerFilesChange' },
],
@@ -688,11 +646,7 @@ describe('Multi-file store actions', () => {
testEntry.path,
store.state,
[{ type: types.DELETE_ENTRY, payload: testEntry.path }],
- [
- { type: 'burstUnusedSeal' },
- { type: 'stageChange', payload: testEntry.path },
- { type: 'triggerFilesChange' },
- ],
+ [{ type: 'stageChange', payload: testEntry.path }, { type: 'triggerFilesChange' }],
done,
);
});
@@ -813,55 +767,19 @@ describe('Multi-file store actions', () => {
});
});
- describe('when `gon.feature.stageAllByDefault` is true', () => {
- const originalGonFeatures = Object.assign({}, gon.features);
-
- beforeAll(() => {
- gon.features = { stageAllByDefault: true };
- });
-
- afterAll(() => {
- gon.features = originalGonFeatures;
- });
-
- it('by default renames an entry and stages it', () => {
- const dispatch = jasmine.createSpy();
- const commit = jasmine.createSpy();
-
- renameEntry(
- { dispatch, commit, state: store.state, getters: store.getters },
- { path: 'orig', name: 'renamed' },
- );
-
- expect(commit.calls.allArgs()).toEqual([
- [types.RENAME_ENTRY, { path: 'orig', name: 'renamed', parentPath: undefined }],
- [types.STAGE_CHANGE, jasmine.objectContaining({ path: 'renamed' })],
- ]);
- });
- });
+ it('by default renames an entry and stages it', () => {
+ const dispatch = jasmine.createSpy();
+ const commit = jasmine.createSpy();
- it('by default renames an entry and adds to changed', done => {
- testAction(
- renameEntry,
+ renameEntry(
+ { dispatch, commit, state: store.state, getters: store.getters },
{ path: 'orig', name: 'renamed' },
- store.state,
- [
- {
- type: types.RENAME_ENTRY,
- payload: {
- path: 'orig',
- name: 'renamed',
- parentPath: undefined,
- },
- },
- {
- type: types.ADD_FILE_TO_CHANGED,
- payload: 'renamed',
- },
- ],
- jasmine.any(Object),
- done,
);
+
+ expect(commit.calls.allArgs()).toEqual([
+ [types.RENAME_ENTRY, { path: 'orig', name: 'renamed', parentPath: undefined }],
+ [types.STAGE_CHANGE, jasmine.objectContaining({ path: 'renamed' })],
+ ]);
});
it('if not changed, completely unstages and discards entry if renamed to original', done => {
diff --git a/spec/javascripts/ide/stores/modules/commit/actions_spec.js b/spec/javascripts/ide/stores/modules/commit/actions_spec.js
index cbc2401262f..fb8cb300209 100644
--- a/spec/javascripts/ide/stores/modules/commit/actions_spec.js
+++ b/spec/javascripts/ide/stores/modules/commit/actions_spec.js
@@ -7,7 +7,7 @@ import eventHub from '~/ide/eventhub';
import consts from '~/ide/stores/modules/commit/constants';
import * as mutationTypes from '~/ide/stores/modules/commit/mutation_types';
import * as actions from '~/ide/stores/modules/commit/actions';
-import { commitActionTypes } from '~/ide/constants';
+import { commitActionTypes, PERMISSION_CREATE_MR } from '~/ide/constants';
import testAction from '../../../../helpers/vuex_action_helper';
const TEST_COMMIT_SHA = '123456789';
@@ -131,7 +131,7 @@ describe('IDE commit module actions', () => {
.dispatch('commit/setLastCommitMessage', { short_id: '123' })
.then(() => {
expect(store.state.lastCommitMsg).toContain(
- 'Your changes have been committed. Commit <a href="http://testing/commit/123" class="commit-sha">123</a>',
+ 'Your changes have been committed. Commit <a href="http://testing/-/commit/123" class="commit-sha">123</a>',
);
})
.then(done)
@@ -149,7 +149,7 @@ describe('IDE commit module actions', () => {
})
.then(() => {
expect(store.state.lastCommitMsg).toBe(
- 'Your changes have been committed. Commit <a href="http://testing/commit/123" class="commit-sha">123</a> with 1 additions, 2 deletions.',
+ 'Your changes have been committed. Commit <a href="http://testing/-/commit/123" class="commit-sha">123</a> with 1 additions, 2 deletions.',
);
})
.then(done)
@@ -313,6 +313,9 @@ describe('IDE commit module actions', () => {
},
},
},
+ userPermissions: {
+ [PERMISSION_CREATE_MR]: true,
+ },
},
},
});
@@ -407,7 +410,7 @@ describe('IDE commit module actions', () => {
.dispatch('commit/commitChanges')
.then(() => {
expect(store.state.lastCommitMsg).toBe(
- 'Your changes have been committed. Commit <a href="webUrl/commit/123" class="commit-sha">123</a> with 1 additions, 2 deletions.',
+ 'Your changes have been committed. Commit <a href="webUrl/-/commit/123" class="commit-sha">123</a> with 1 additions, 2 deletions.',
);
done();
@@ -461,7 +464,7 @@ describe('IDE commit module actions', () => {
.dispatch('commit/commitChanges')
.then(() => {
expect(visitUrl).toHaveBeenCalledWith(
- `webUrl/merge_requests/new?merge_request[source_branch]=${
+ `webUrl/-/merge_requests/new?merge_request[source_branch]=${
store.getters['commit/placeholderBranchName']
}&merge_request[target_branch]=master&nav_source=webide`,
);
diff --git a/spec/javascripts/issue_show/components/app_spec.js b/spec/javascripts/issue_show/components/app_spec.js
index 4c405fbc4d1..fdd6f4e6470 100644
--- a/spec/javascripts/issue_show/components/app_spec.js
+++ b/spec/javascripts/issue_show/components/app_spec.js
@@ -7,13 +7,13 @@ import axios from '~/lib/utils/axios_utils';
import '~/behaviors/markdown/render_gfm';
import issuableApp from '~/issue_show/components/app.vue';
import eventHub from '~/issue_show/event_hub';
-import issueShowData from '../mock_data';
+import { initialRequest, secondRequest } from '../mock_data';
function formatText(text) {
return text.trim().replace(/\s\s+/g, ' ');
}
-const REALTIME_REQUEST_STACK = [issueShowData.initialRequest, issueShowData.secondRequest];
+const REALTIME_REQUEST_STACK = [initialRequest, secondRequest];
describe('Issuable output', () => {
let mock;
diff --git a/spec/javascripts/issue_show/mock_data.js b/spec/javascripts/issue_show/mock_data.js
index f4475aadb8b..1b391bd1588 100644
--- a/spec/javascripts/issue_show/mock_data.js
+++ b/spec/javascripts/issue_show/mock_data.js
@@ -1,24 +1 @@
-export default {
- initialRequest: {
- title: '<p>this is a title</p>',
- title_text: 'this is a title',
- description: '<p>this is a description!</p>',
- description_text: 'this is a description',
- task_status: '2 of 4 completed',
- updated_at: '2015-05-15T12:31:04.428Z',
- updated_by_name: 'Some User',
- updated_by_path: '/some_user',
- lock_version: 1,
- },
- secondRequest: {
- title: '<p>2</p>',
- title_text: '2',
- description: '<p>42</p>',
- description_text: '42',
- task_status: '0 of 0 completed',
- updated_at: '2016-05-15T12:31:04.428Z',
- updated_by_name: 'Other User',
- updated_by_path: '/other_user',
- lock_version: 2,
- },
-};
+export * from '../../frontend/issue_show/mock_data';
diff --git a/spec/javascripts/jobs/components/environments_block_spec.js b/spec/javascripts/jobs/components/environments_block_spec.js
index 64a59d659a7..4f2359e83b6 100644
--- a/spec/javascripts/jobs/components/environments_block_spec.js
+++ b/spec/javascripts/jobs/components/environments_block_spec.js
@@ -4,6 +4,7 @@ import mountComponent from '../../helpers/vue_mount_component_helper';
const TEST_CLUSTER_NAME = 'test_cluster';
const TEST_CLUSTER_PATH = 'path/to/test_cluster';
+const TEST_KUBERNETES_NAMESPACE = 'this-is-a-kubernetes-namespace';
describe('Environments block', () => {
const Component = Vue.extend(component);
@@ -28,17 +29,18 @@ describe('Environments block', () => {
last_deployment: { ...lastDeployment },
});
- const createEnvironmentWithCluster = () => ({
- ...environment,
- last_deployment: {
- ...lastDeployment,
- cluster: { name: TEST_CLUSTER_NAME, path: TEST_CLUSTER_PATH },
- },
+ const createDeploymentWithCluster = () => ({ name: TEST_CLUSTER_NAME, path: TEST_CLUSTER_PATH });
+
+ const createDeploymentWithClusterAndKubernetesNamespace = () => ({
+ name: TEST_CLUSTER_NAME,
+ path: TEST_CLUSTER_PATH,
+ kubernetes_namespace: TEST_KUBERNETES_NAMESPACE,
});
- const createComponent = (deploymentStatus = {}) => {
+ const createComponent = (deploymentStatus = {}, deploymentCluster = {}) => {
vm = mountComponent(Component, {
deploymentStatus,
+ deploymentCluster,
iconStatus: status,
});
};
@@ -62,15 +64,36 @@ describe('Environments block', () => {
expect(findText()).toEqual('This job is deployed to environment.');
});
- it('renders info with cluster', () => {
- createComponent({
- status: 'last',
- environment: createEnvironmentWithCluster(),
+ describe('when there is a cluster', () => {
+ it('renders info with cluster', () => {
+ createComponent(
+ {
+ status: 'last',
+ environment: createEnvironmentWithLastDeployment(),
+ },
+ createDeploymentWithCluster(),
+ );
+
+ expect(findText()).toEqual(
+ `This job is deployed to environment using cluster ${TEST_CLUSTER_NAME}.`,
+ );
});
- expect(findText()).toEqual(
- `This job is deployed to environment using cluster ${TEST_CLUSTER_NAME}.`,
- );
+ describe('when there is a kubernetes namespace', () => {
+ it('renders info with cluster', () => {
+ createComponent(
+ {
+ status: 'last',
+ environment: createEnvironmentWithLastDeployment(),
+ },
+ createDeploymentWithClusterAndKubernetesNamespace(),
+ );
+
+ expect(findText()).toEqual(
+ `This job is deployed to environment using cluster ${TEST_CLUSTER_NAME} and namespace ${TEST_KUBERNETES_NAMESPACE}.`,
+ );
+ });
+ });
});
});
@@ -89,15 +112,36 @@ describe('Environments block', () => {
expect(findJobDeploymentLink().getAttribute('href')).toEqual('bar');
});
- it('renders info with cluster', () => {
- createComponent({
- status: 'out_of_date',
- environment: createEnvironmentWithCluster(),
+ describe('when there is a cluster', () => {
+ it('renders info with cluster', () => {
+ createComponent(
+ {
+ status: 'out_of_date',
+ environment: createEnvironmentWithLastDeployment(),
+ },
+ createDeploymentWithCluster(),
+ );
+
+ expect(findText()).toEqual(
+ `This job is an out-of-date deployment to environment using cluster ${TEST_CLUSTER_NAME}. View the most recent deployment.`,
+ );
});
- expect(findText()).toEqual(
- `This job is an out-of-date deployment to environment using cluster ${TEST_CLUSTER_NAME}. View the most recent deployment.`,
- );
+ describe('when there is a kubernetes namespace', () => {
+ it('renders info with cluster', () => {
+ createComponent(
+ {
+ status: 'out_of_date',
+ environment: createEnvironmentWithLastDeployment(),
+ },
+ createDeploymentWithClusterAndKubernetesNamespace(),
+ );
+
+ expect(findText()).toEqual(
+ `This job is an out-of-date deployment to environment using cluster ${TEST_CLUSTER_NAME} and namespace ${TEST_KUBERNETES_NAMESPACE}. View the most recent deployment.`,
+ );
+ });
+ });
});
});
@@ -143,7 +187,7 @@ describe('Environments block', () => {
});
describe('without last deployment', () => {
- it('renders info about failed deployment', () => {
+ it('renders info about deployment being created', () => {
createComponent({
status: 'creating',
environment,
@@ -151,6 +195,22 @@ describe('Environments block', () => {
expect(findText()).toEqual('This job is creating a deployment to environment.');
});
+
+ describe('when there is a cluster', () => {
+ it('inclues information about the cluster', () => {
+ createComponent(
+ {
+ status: 'creating',
+ environment,
+ },
+ createDeploymentWithCluster(),
+ );
+
+ expect(findText()).toEqual(
+ `This job is creating a deployment to environment using cluster ${TEST_CLUSTER_NAME}.`,
+ );
+ });
+ });
});
describe('without environment', () => {
@@ -167,10 +227,13 @@ describe('Environments block', () => {
describe('with a cluster', () => {
it('renders the cluster link', () => {
- createComponent({
- status: 'last',
- environment: createEnvironmentWithCluster(),
- });
+ createComponent(
+ {
+ status: 'last',
+ environment: createEnvironmentWithLastDeployment(),
+ },
+ createDeploymentWithCluster(),
+ );
expect(findText()).toEqual(
`This job is deployed to environment using cluster ${TEST_CLUSTER_NAME}.`,
@@ -181,18 +244,13 @@ describe('Environments block', () => {
describe('when the cluster is missing the path', () => {
it('renders the name without a link', () => {
- const cluster = {
- name: 'the-cluster',
- };
- createComponent({
- status: 'last',
- environment: Object.assign({}, environment, {
- last_deployment: {
- ...lastDeployment,
- cluster,
- },
- }),
- });
+ createComponent(
+ {
+ status: 'last',
+ environment: createEnvironmentWithLastDeployment(),
+ },
+ { name: 'the-cluster' },
+ );
expect(findText()).toContain('using cluster the-cluster.');
diff --git a/spec/javascripts/jobs/components/stages_dropdown_spec.js b/spec/javascripts/jobs/components/stages_dropdown_spec.js
index e091aece564..f1a01530104 100644
--- a/spec/javascripts/jobs/components/stages_dropdown_spec.js
+++ b/spec/javascripts/jobs/components/stages_dropdown_spec.js
@@ -27,7 +27,7 @@ describe('Stages Dropdown', () => {
},
merge_request: {
iid: 1234,
- path: '/root/detached-merge-request-pipelines/merge_requests/1',
+ path: '/root/detached-merge-request-pipelines/-/merge_requests/1',
title: 'Update README.md',
source_branch: 'feature-1234',
source_branch_path: '/root/detached-merge-request-pipelines/branches/feature-1234',
diff --git a/spec/javascripts/jobs/mock_data.js b/spec/javascripts/jobs/mock_data.js
index 3d40e94d219..f0ba46c058a 100644
--- a/spec/javascripts/jobs/mock_data.js
+++ b/spec/javascripts/jobs/mock_data.js
@@ -1,1191 +1,2 @@
-import { TEST_HOST } from 'spec/test_constants';
-
-const threeWeeksAgo = new Date();
-threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21);
-
-export const stages = [
- {
- name: 'build',
- title: 'build: running',
- groups: [
- {
- name: 'build:linux',
- size: 1,
- status: {
- icon: 'status_pending',
- text: 'pending',
- label: 'pending',
- group: 'pending',
- tooltip: 'pending',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/1180',
- illustration: {
- image: 'illustrations/pending_job_empty.svg',
- size: 'svg-430',
- title: 'This job has not started yet',
- content: 'This job is in pending state and is waiting to be picked by a runner',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_pending-5bdf338420e5221ca24353b6bff1c9367189588750632e9a871b7af09ff6a2ae.png',
- action: {
- icon: 'cancel',
- title: 'Cancel',
- path: '/gitlab-org/gitlab-shell/-/jobs/1180/cancel',
- method: 'post',
- },
- },
- jobs: [
- {
- id: 1180,
- name: 'build:linux',
- started: false,
- build_path: '/gitlab-org/gitlab-shell/-/jobs/1180',
- cancel_path: '/gitlab-org/gitlab-shell/-/jobs/1180/cancel',
- playable: false,
- created_at: '2018-09-28T11:09:57.229Z',
- updated_at: '2018-09-28T11:09:57.503Z',
- status: {
- icon: 'status_pending',
- text: 'pending',
- label: 'pending',
- group: 'pending',
- tooltip: 'pending',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/1180',
- illustration: {
- image: 'illustrations/pending_job_empty.svg',
- size: 'svg-430',
- title: 'This job has not started yet',
- content: 'This job is in pending state and is waiting to be picked by a runner',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_pending-5bdf338420e5221ca24353b6bff1c9367189588750632e9a871b7af09ff6a2ae.png',
- action: {
- icon: 'cancel',
- title: 'Cancel',
- path: '/gitlab-org/gitlab-shell/-/jobs/1180/cancel',
- method: 'post',
- },
- },
- },
- ],
- },
- {
- name: 'build:osx',
- size: 1,
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/444',
- illustration: {
- image: 'illustrations/skipped-job_empty.svg',
- size: 'svg-430',
- title: 'This job does not have a trace.',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/gitlab-org/gitlab-shell/-/jobs/444/retry',
- method: 'post',
- },
- },
- jobs: [
- {
- id: 444,
- name: 'build:osx',
- started: '2018-05-18T05:32:20.655Z',
- build_path: '/gitlab-org/gitlab-shell/-/jobs/444',
- retry_path: '/gitlab-org/gitlab-shell/-/jobs/444/retry',
- playable: false,
- created_at: '2018-05-18T15:32:54.364Z',
- updated_at: '2018-05-18T15:32:54.364Z',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/444',
- illustration: {
- image: 'illustrations/skipped-job_empty.svg',
- size: 'svg-430',
- title: 'This job does not have a trace.',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/gitlab-org/gitlab-shell/-/jobs/444/retry',
- method: 'post',
- },
- },
- },
- ],
- },
- ],
- status: {
- icon: 'status_running',
- text: 'running',
- label: 'running',
- group: 'running',
- tooltip: 'running',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/pipelines/27#build',
- illustration: null,
- favicon:
- '/assets/ci_favicons/favicon_status_running-9c635b2419a8e1ec991c993061b89cc5aefc0743bb238ecd0c381e7741a70e8c.png',
- },
- path: '/gitlab-org/gitlab-shell/pipelines/27#build',
- dropdown_path: '/gitlab-org/gitlab-shell/pipelines/27/stage.json?stage=build',
- },
- {
- name: 'test',
- title: 'test: passed with warnings',
- groups: [
- {
- name: 'jenkins',
- size: 1,
- status: {
- icon: 'status_success',
- text: 'passed',
- label: null,
- group: 'success',
- tooltip: null,
- has_details: false,
- details_path: null,
- illustration: null,
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- },
- jobs: [
- {
- id: 459,
- name: 'jenkins',
- started: '2018-05-18T09:32:20.658Z',
- build_path: '/gitlab-org/gitlab-shell/-/jobs/459',
- playable: false,
- created_at: '2018-05-18T15:32:55.330Z',
- updated_at: '2018-05-18T15:32:55.330Z',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: null,
- group: 'success',
- tooltip: null,
- has_details: false,
- details_path: null,
- illustration: null,
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- },
- },
- ],
- },
- {
- name: 'rspec:linux',
- size: 3,
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- has_details: false,
- details_path: null,
- illustration: null,
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- },
- jobs: [
- {
- id: 445,
- name: 'rspec:linux 0 3',
- started: '2018-05-18T07:32:20.655Z',
- build_path: '/gitlab-org/gitlab-shell/-/jobs/445',
- retry_path: '/gitlab-org/gitlab-shell/-/jobs/445/retry',
- playable: false,
- created_at: '2018-05-18T15:32:54.425Z',
- updated_at: '2018-05-18T15:32:54.425Z',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/445',
- illustration: {
- image: 'illustrations/skipped-job_empty.svg',
- size: 'svg-430',
- title: 'This job does not have a trace.',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/gitlab-org/gitlab-shell/-/jobs/445/retry',
- method: 'post',
- },
- },
- },
- {
- id: 446,
- name: 'rspec:linux 1 3',
- started: '2018-05-18T07:32:20.655Z',
- build_path: '/gitlab-org/gitlab-shell/-/jobs/446',
- retry_path: '/gitlab-org/gitlab-shell/-/jobs/446/retry',
- playable: false,
- created_at: '2018-05-18T15:32:54.506Z',
- updated_at: '2018-05-18T15:32:54.506Z',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/446',
- illustration: {
- image: 'illustrations/skipped-job_empty.svg',
- size: 'svg-430',
- title: 'This job does not have a trace.',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/gitlab-org/gitlab-shell/-/jobs/446/retry',
- method: 'post',
- },
- },
- },
- {
- id: 447,
- name: 'rspec:linux 2 3',
- started: '2018-05-18T07:32:20.656Z',
- build_path: '/gitlab-org/gitlab-shell/-/jobs/447',
- retry_path: '/gitlab-org/gitlab-shell/-/jobs/447/retry',
- playable: false,
- created_at: '2018-05-18T15:32:54.572Z',
- updated_at: '2018-05-18T15:32:54.572Z',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/447',
- illustration: {
- image: 'illustrations/skipped-job_empty.svg',
- size: 'svg-430',
- title: 'This job does not have a trace.',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/gitlab-org/gitlab-shell/-/jobs/447/retry',
- method: 'post',
- },
- },
- },
- ],
- },
- {
- name: 'rspec:osx',
- size: 1,
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/452',
- illustration: {
- image: 'illustrations/skipped-job_empty.svg',
- size: 'svg-430',
- title: 'This job does not have a trace.',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/gitlab-org/gitlab-shell/-/jobs/452/retry',
- method: 'post',
- },
- },
- jobs: [
- {
- id: 452,
- name: 'rspec:osx',
- started: '2018-05-18T07:32:20.657Z',
- build_path: '/gitlab-org/gitlab-shell/-/jobs/452',
- retry_path: '/gitlab-org/gitlab-shell/-/jobs/452/retry',
- playable: false,
- created_at: '2018-05-18T15:32:54.920Z',
- updated_at: '2018-05-18T15:32:54.920Z',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/452',
- illustration: {
- image: 'illustrations/skipped-job_empty.svg',
- size: 'svg-430',
- title: 'This job does not have a trace.',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/gitlab-org/gitlab-shell/-/jobs/452/retry',
- method: 'post',
- },
- },
- },
- ],
- },
- {
- name: 'rspec:windows',
- size: 3,
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- has_details: false,
- details_path: null,
- illustration: null,
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- },
- jobs: [
- {
- id: 448,
- name: 'rspec:windows 0 3',
- started: '2018-05-18T07:32:20.656Z',
- build_path: '/gitlab-org/gitlab-shell/-/jobs/448',
- retry_path: '/gitlab-org/gitlab-shell/-/jobs/448/retry',
- playable: false,
- created_at: '2018-05-18T15:32:54.639Z',
- updated_at: '2018-05-18T15:32:54.639Z',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/448',
- illustration: {
- image: 'illustrations/skipped-job_empty.svg',
- size: 'svg-430',
- title: 'This job does not have a trace.',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/gitlab-org/gitlab-shell/-/jobs/448/retry',
- method: 'post',
- },
- },
- },
- {
- id: 449,
- name: 'rspec:windows 1 3',
- started: '2018-05-18T07:32:20.656Z',
- build_path: '/gitlab-org/gitlab-shell/-/jobs/449',
- retry_path: '/gitlab-org/gitlab-shell/-/jobs/449/retry',
- playable: false,
- created_at: '2018-05-18T15:32:54.703Z',
- updated_at: '2018-05-18T15:32:54.703Z',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/449',
- illustration: {
- image: 'illustrations/skipped-job_empty.svg',
- size: 'svg-430',
- title: 'This job does not have a trace.',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/gitlab-org/gitlab-shell/-/jobs/449/retry',
- method: 'post',
- },
- },
- },
- {
- id: 451,
- name: 'rspec:windows 2 3',
- started: '2018-05-18T07:32:20.657Z',
- build_path: '/gitlab-org/gitlab-shell/-/jobs/451',
- retry_path: '/gitlab-org/gitlab-shell/-/jobs/451/retry',
- playable: false,
- created_at: '2018-05-18T15:32:54.853Z',
- updated_at: '2018-05-18T15:32:54.853Z',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/451',
- illustration: {
- image: 'illustrations/skipped-job_empty.svg',
- size: 'svg-430',
- title: 'This job does not have a trace.',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/gitlab-org/gitlab-shell/-/jobs/451/retry',
- method: 'post',
- },
- },
- },
- ],
- },
- {
- name: 'spinach:linux',
- size: 1,
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/453',
- illustration: {
- image: 'illustrations/skipped-job_empty.svg',
- size: 'svg-430',
- title: 'This job does not have a trace.',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/gitlab-org/gitlab-shell/-/jobs/453/retry',
- method: 'post',
- },
- },
- jobs: [
- {
- id: 453,
- name: 'spinach:linux',
- started: '2018-05-18T07:32:20.657Z',
- build_path: '/gitlab-org/gitlab-shell/-/jobs/453',
- retry_path: '/gitlab-org/gitlab-shell/-/jobs/453/retry',
- playable: false,
- created_at: '2018-05-18T15:32:54.993Z',
- updated_at: '2018-05-18T15:32:54.993Z',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/453',
- illustration: {
- image: 'illustrations/skipped-job_empty.svg',
- size: 'svg-430',
- title: 'This job does not have a trace.',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/gitlab-org/gitlab-shell/-/jobs/453/retry',
- method: 'post',
- },
- },
- },
- ],
- },
- {
- name: 'spinach:osx',
- size: 1,
- status: {
- icon: 'status_warning',
- text: 'failed',
- label: 'failed (allowed to fail)',
- group: 'failed-with-warnings',
- tooltip: 'failed - (unknown failure) (allowed to fail)',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/454',
- illustration: {
- image: 'illustrations/skipped-job_empty.svg',
- size: 'svg-430',
- title: 'This job does not have a trace.',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/gitlab-org/gitlab-shell/-/jobs/454/retry',
- method: 'post',
- },
- },
- jobs: [
- {
- id: 454,
- name: 'spinach:osx',
- started: '2018-05-18T07:32:20.657Z',
- build_path: '/gitlab-org/gitlab-shell/-/jobs/454',
- retry_path: '/gitlab-org/gitlab-shell/-/jobs/454/retry',
- playable: false,
- created_at: '2018-05-18T15:32:55.053Z',
- updated_at: '2018-05-18T15:32:55.053Z',
- status: {
- icon: 'status_warning',
- text: 'failed',
- label: 'failed (allowed to fail)',
- group: 'failed-with-warnings',
- tooltip: 'failed - (unknown failure) (allowed to fail)',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/454',
- illustration: {
- image: 'illustrations/skipped-job_empty.svg',
- size: 'svg-430',
- title: 'This job does not have a trace.',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/gitlab-org/gitlab-shell/-/jobs/454/retry',
- method: 'post',
- },
- },
- callout_message: 'There is an unknown failure, please try again',
- recoverable: true,
- },
- ],
- },
- ],
- status: {
- icon: 'status_warning',
- text: 'passed',
- label: 'passed with warnings',
- group: 'success-with-warnings',
- tooltip: 'passed',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/pipelines/27#test',
- illustration: null,
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- },
- path: '/gitlab-org/gitlab-shell/pipelines/27#test',
- dropdown_path: '/gitlab-org/gitlab-shell/pipelines/27/stage.json?stage=test',
- },
- {
- name: 'deploy',
- title: 'deploy: running',
- groups: [
- {
- name: 'production',
- size: 1,
- status: {
- icon: 'status_created',
- text: 'created',
- label: 'created',
- group: 'created',
- tooltip: 'created',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/457',
- illustration: {
- image: 'illustrations/job_not_triggered.svg',
- size: 'svg-306',
- title: 'This job has not been triggered yet',
- content:
- 'This job depends on upstream jobs that need to succeed in order for this job to be triggered',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_created-4b975aa976d24e5a3ea7cd9a5713e6ce2cd9afd08b910415e96675de35f64955.png',
- action: {
- icon: 'cancel',
- title: 'Cancel',
- path: '/gitlab-org/gitlab-shell/-/jobs/457/cancel',
- method: 'post',
- },
- },
- jobs: [
- {
- id: 457,
- name: 'production',
- started: false,
- build_path: '/gitlab-org/gitlab-shell/-/jobs/457',
- cancel_path: '/gitlab-org/gitlab-shell/-/jobs/457/cancel',
- playable: false,
- created_at: '2018-05-18T15:32:55.259Z',
- updated_at: '2018-09-28T11:09:57.454Z',
- status: {
- icon: 'status_created',
- text: 'created',
- label: 'created',
- group: 'created',
- tooltip: 'created',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/457',
- illustration: {
- image: 'illustrations/job_not_triggered.svg',
- size: 'svg-306',
- title: 'This job has not been triggered yet',
- content:
- 'This job depends on upstream jobs that need to succeed in order for this job to be triggered',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_created-4b975aa976d24e5a3ea7cd9a5713e6ce2cd9afd08b910415e96675de35f64955.png',
- action: {
- icon: 'cancel',
- title: 'Cancel',
- path: '/gitlab-org/gitlab-shell/-/jobs/457/cancel',
- method: 'post',
- },
- },
- },
- ],
- },
- {
- name: 'staging',
- size: 1,
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/455',
- illustration: {
- image: 'illustrations/skipped-job_empty.svg',
- size: 'svg-430',
- title: 'This job does not have a trace.',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/gitlab-org/gitlab-shell/-/jobs/455/retry',
- method: 'post',
- },
- },
- jobs: [
- {
- id: 455,
- name: 'staging',
- started: '2018-05-18T09:32:20.658Z',
- build_path: '/gitlab-org/gitlab-shell/-/jobs/455',
- retry_path: '/gitlab-org/gitlab-shell/-/jobs/455/retry',
- playable: false,
- created_at: '2018-05-18T15:32:55.119Z',
- updated_at: '2018-05-18T15:32:55.119Z',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/455',
- illustration: {
- image: 'illustrations/skipped-job_empty.svg',
- size: 'svg-430',
- title: 'This job does not have a trace.',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/gitlab-org/gitlab-shell/-/jobs/455/retry',
- method: 'post',
- },
- },
- },
- ],
- },
- {
- name: 'stop staging',
- size: 1,
- status: {
- icon: 'status_created',
- text: 'created',
- label: 'created',
- group: 'created',
- tooltip: 'created',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/456',
- illustration: {
- image: 'illustrations/job_not_triggered.svg',
- size: 'svg-306',
- title: 'This job has not been triggered yet',
- content:
- 'This job depends on upstream jobs that need to succeed in order for this job to be triggered',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_created-4b975aa976d24e5a3ea7cd9a5713e6ce2cd9afd08b910415e96675de35f64955.png',
- action: {
- icon: 'cancel',
- title: 'Cancel',
- path: '/gitlab-org/gitlab-shell/-/jobs/456/cancel',
- method: 'post',
- },
- },
- jobs: [
- {
- id: 456,
- name: 'stop staging',
- started: false,
- build_path: '/gitlab-org/gitlab-shell/-/jobs/456',
- cancel_path: '/gitlab-org/gitlab-shell/-/jobs/456/cancel',
- playable: false,
- created_at: '2018-05-18T15:32:55.205Z',
- updated_at: '2018-09-28T11:09:57.396Z',
- status: {
- icon: 'status_created',
- text: 'created',
- label: 'created',
- group: 'created',
- tooltip: 'created',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/456',
- illustration: {
- image: 'illustrations/job_not_triggered.svg',
- size: 'svg-306',
- title: 'This job has not been triggered yet',
- content:
- 'This job depends on upstream jobs that need to succeed in order for this job to be triggered',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_created-4b975aa976d24e5a3ea7cd9a5713e6ce2cd9afd08b910415e96675de35f64955.png',
- action: {
- icon: 'cancel',
- title: 'Cancel',
- path: '/gitlab-org/gitlab-shell/-/jobs/456/cancel',
- method: 'post',
- },
- },
- },
- ],
- },
- ],
- status: {
- icon: 'status_running',
- text: 'running',
- label: 'running',
- group: 'running',
- tooltip: 'running',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/pipelines/27#deploy',
- illustration: null,
- favicon:
- '/assets/ci_favicons/favicon_status_running-9c635b2419a8e1ec991c993061b89cc5aefc0743bb238ecd0c381e7741a70e8c.png',
- },
- path: '/gitlab-org/gitlab-shell/pipelines/27#deploy',
- dropdown_path: '/gitlab-org/gitlab-shell/pipelines/27/stage.json?stage=deploy',
- },
- {
- name: 'notify',
- title: 'notify: manual action',
- groups: [
- {
- name: 'slack',
- size: 1,
- status: {
- icon: 'status_manual',
- text: 'manual',
- label: 'manual play action',
- group: 'manual',
- tooltip: 'manual action',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/458',
- illustration: {
- image: 'illustrations/manual_action.svg',
- size: 'svg-394',
- title: 'This job requires a manual action',
- content:
- 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
- action: {
- icon: 'play',
- title: 'Play',
- path: '/gitlab-org/gitlab-shell/-/jobs/458/play',
- method: 'post',
- },
- },
- jobs: [
- {
- id: 458,
- name: 'slack',
- started: null,
- build_path: '/gitlab-org/gitlab-shell/-/jobs/458',
- play_path: '/gitlab-org/gitlab-shell/-/jobs/458/play',
- playable: true,
- created_at: '2018-05-18T15:32:55.303Z',
- updated_at: '2018-05-18T15:34:08.535Z',
- status: {
- icon: 'status_manual',
- text: 'manual',
- label: 'manual play action',
- group: 'manual',
- tooltip: 'manual action',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/458',
- illustration: {
- image: 'illustrations/manual_action.svg',
- size: 'svg-394',
- title: 'This job requires a manual action',
- content:
- 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
- action: {
- icon: 'play',
- title: 'Play',
- path: '/gitlab-org/gitlab-shell/-/jobs/458/play',
- method: 'post',
- },
- },
- },
- ],
- },
- ],
- status: {
- icon: 'status_manual',
- text: 'manual',
- label: 'manual action',
- group: 'manual',
- tooltip: 'manual action',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/pipelines/27#notify',
- illustration: null,
- favicon:
- '/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
- },
- path: '/gitlab-org/gitlab-shell/pipelines/27#notify',
- dropdown_path: '/gitlab-org/gitlab-shell/pipelines/27/stage.json?stage=notify',
- },
-];
-
-export default {
- id: 4757,
- name: 'test',
- build_path: '/root/ci-mock/-/jobs/4757',
- retry_path: '/root/ci-mock/-/jobs/4757/retry',
- cancel_path: '/root/ci-mock/-/jobs/4757/cancel',
- new_issue_path: '/root/ci-mock/issues/new',
- playable: false,
- created_at: threeWeeksAgo.toISOString(),
- updated_at: threeWeeksAgo.toISOString(),
- finished_at: threeWeeksAgo.toISOString(),
- queued: 9.54,
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- has_details: true,
- details_path: `${TEST_HOST}/root/ci-mock/-/jobs/4757`,
- favicon:
- '/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/root/ci-mock/-/jobs/4757/retry',
- method: 'post',
- },
- },
- coverage: 20,
- erased_at: threeWeeksAgo.toISOString(),
- erased: false,
- duration: 6.785563,
- tags: ['tag'],
- user: {
- name: 'Root',
- username: 'root',
- id: 1,
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
- web_url: 'http://localhost:3000/root',
- },
- erase_path: '/root/ci-mock/-/jobs/4757/erase',
- artifacts: [null],
- runner: {
- id: 1,
- description: 'local ci runner',
- edit_path: '/root/ci-mock/runners/1/edit',
- },
- pipeline: {
- id: 140,
- user: {
- name: 'Root',
- username: 'root',
- id: 1,
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
- web_url: 'http://localhost:3000/root',
- },
- active: false,
- coverage: null,
- source: 'unknown',
- created_at: '2017-05-24T09:59:58.634Z',
- updated_at: '2017-06-01T17:32:00.062Z',
- path: '/root/ci-mock/pipelines/140',
- flags: {
- latest: true,
- stuck: false,
- yaml_errors: false,
- retryable: false,
- cancelable: false,
- },
- details: {
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- has_details: true,
- details_path: '/root/ci-mock/pipelines/140',
- favicon:
- '/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
- },
- duration: 6,
- finished_at: '2017-06-01T17:32:00.042Z',
- stages: [
- {
- dropdown_path: '/jashkenas/underscore/pipelines/16/stage.json?stage=build',
- name: 'build',
- path: '/jashkenas/underscore/pipelines/16#build',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- },
- title: 'build: passed',
- },
- {
- dropdown_path: '/jashkenas/underscore/pipelines/16/stage.json?stage=test',
- name: 'test',
- path: '/jashkenas/underscore/pipelines/16#test',
- status: {
- icon: 'status_warning',
- text: 'passed',
- label: 'passed with warnings',
- group: 'success-with-warnings',
- },
- title: 'test: passed with warnings',
- },
- ],
- },
- ref: {
- name: 'abc',
- path: '/root/ci-mock/commits/abc',
- tag: false,
- branch: true,
- },
- commit: {
- id: 'c58647773a6b5faf066d4ad6ff2c9fbba5f180f6',
- short_id: 'c5864777',
- title: 'Add new file',
- created_at: '2017-05-24T10:59:52.000+01:00',
- parent_ids: ['798e5f902592192afaba73f4668ae30e56eae492'],
- message: 'Add new file',
- author_name: 'Root',
- author_email: 'admin@example.com',
- authored_date: '2017-05-24T10:59:52.000+01:00',
- committer_name: 'Root',
- committer_email: 'admin@example.com',
- committed_date: '2017-05-24T10:59:52.000+01:00',
- author: {
- name: 'Root',
- username: 'root',
- id: 1,
- state: 'active',
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
- web_url: 'http://localhost:3000/root',
- },
- author_gravatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
- commit_url:
- 'http://localhost:3000/root/ci-mock/commit/c58647773a6b5faf066d4ad6ff2c9fbba5f180f6',
- commit_path: '/root/ci-mock/commit/c58647773a6b5faf066d4ad6ff2c9fbba5f180f6',
- },
- },
- metadata: {
- timeout_human_readable: '1m 40s',
- timeout_source: 'runner',
- },
- merge_request: {
- iid: 2,
- path: '/root/ci-mock/merge_requests/2',
- },
- raw_path: '/root/ci-mock/builds/4757/raw',
- has_trace: true,
-};
-
-export const jobsInStage = {
- name: 'build',
- title: 'build: running',
- latest_statuses: [
- {
- id: 1180,
- name: 'build:linux',
- started: false,
- build_path: '/gitlab-org/gitlab-shell/-/jobs/1180',
- cancel_path: '/gitlab-org/gitlab-shell/-/jobs/1180/cancel',
- playable: false,
- created_at: '2018-09-28T11:09:57.229Z',
- updated_at: '2018-09-28T11:09:57.503Z',
- status: {
- icon: 'status_pending',
- text: 'pending',
- label: 'pending',
- group: 'pending',
- tooltip: 'pending',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/1180',
- illustration: {
- image: 'illustrations/pending_job_empty.svg',
- size: 'svg-430',
- title: 'This job has not started yet',
- content: 'This job is in pending state and is waiting to be picked by a runner',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_pending-5bdf338420e5221ca24353b6bff1c9367189588750632e9a871b7af09ff6a2ae.png',
- action: {
- icon: 'cancel',
- title: 'Cancel',
- path: '/gitlab-org/gitlab-shell/-/jobs/1180/cancel',
- method: 'post',
- },
- },
- },
- {
- id: 444,
- name: 'build:osx',
- started: '2018-05-18T05:32:20.655Z',
- build_path: '/gitlab-org/gitlab-shell/-/jobs/444',
- retry_path: '/gitlab-org/gitlab-shell/-/jobs/444/retry',
- playable: false,
- created_at: '2018-05-18T15:32:54.364Z',
- updated_at: '2018-05-18T15:32:54.364Z',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/444',
- illustration: {
- image: 'illustrations/skipped-job_empty.svg',
- size: 'svg-430',
- title: 'This job does not have a trace.',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/gitlab-org/gitlab-shell/-/jobs/444/retry',
- method: 'post',
- },
- },
- },
- ],
- retried: [
- {
- id: 443,
- name: 'build:linux',
- started: '2018-05-18T06:32:20.655Z',
- build_path: '/gitlab-org/gitlab-shell/-/jobs/443',
- retry_path: '/gitlab-org/gitlab-shell/-/jobs/443/retry',
- playable: false,
- created_at: '2018-05-18T15:32:54.296Z',
- updated_at: '2018-05-18T15:32:54.296Z',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed (retried)',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/-/jobs/443',
- illustration: {
- image: 'illustrations/skipped-job_empty.svg',
- size: 'svg-430',
- title: 'This job does not have a trace.',
- },
- favicon:
- '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/gitlab-org/gitlab-shell/-/jobs/443/retry',
- method: 'post',
- },
- },
- },
- ],
- status: {
- icon: 'status_running',
- text: 'running',
- label: 'running',
- group: 'running',
- tooltip: 'running',
- has_details: true,
- details_path: '/gitlab-org/gitlab-shell/pipelines/27#build',
- illustration: null,
- favicon:
- '/assets/ci_favicons/favicon_status_running-9c635b2419a8e1ec991c993061b89cc5aefc0743bb238ecd0c381e7741a70e8c.png',
- },
- path: '/gitlab-org/gitlab-shell/pipelines/27#build',
- dropdown_path: '/gitlab-org/gitlab-shell/pipelines/27/stage.json?stage=build',
-};
+export { default } from '../../frontend/jobs/mock_data';
+export * from '../../frontend/jobs/mock_data';
diff --git a/spec/javascripts/jobs/store/actions_spec.js b/spec/javascripts/jobs/store/actions_spec.js
index c0e8dbf9b22..47257688bd5 100644
--- a/spec/javascripts/jobs/store/actions_spec.js
+++ b/spec/javascripts/jobs/store/actions_spec.js
@@ -15,6 +15,7 @@ import {
scrollBottom,
requestTrace,
fetchTrace,
+ startPollingTrace,
stopPollingTrace,
receiveTraceSuccess,
receiveTraceError,
@@ -241,6 +242,50 @@ describe('Job State actions', () => {
done,
);
});
+
+ describe('when job is incomplete', () => {
+ let tracePayload;
+
+ beforeEach(() => {
+ tracePayload = {
+ html: 'I, [2018-08-17T22:57:45.707325 #1841] INFO -- :',
+ complete: false,
+ };
+
+ mock.onGet(`${TEST_HOST}/endpoint/trace.json`).replyOnce(200, tracePayload);
+ });
+
+ it('dispatches startPollingTrace', done => {
+ testAction(
+ fetchTrace,
+ null,
+ mockedState,
+ [],
+ [
+ { type: 'toggleScrollisInBottom', payload: true },
+ { type: 'receiveTraceSuccess', payload: tracePayload },
+ { type: 'startPollingTrace' },
+ ],
+ done,
+ );
+ });
+
+ it('does not dispatch startPollingTrace when timeout is non-empty', done => {
+ mockedState.traceTimeout = 1;
+
+ testAction(
+ fetchTrace,
+ null,
+ mockedState,
+ [],
+ [
+ { type: 'toggleScrollisInBottom', payload: true },
+ { type: 'receiveTraceSuccess', payload: tracePayload },
+ ],
+ done,
+ );
+ });
+ });
});
describe('error', () => {
@@ -265,16 +310,69 @@ describe('Job State actions', () => {
});
});
+ describe('startPollingTrace', () => {
+ let dispatch;
+ let commit;
+
+ beforeEach(() => {
+ jasmine.clock().install();
+
+ dispatch = jasmine.createSpy();
+ commit = jasmine.createSpy();
+
+ startPollingTrace({ dispatch, commit });
+ });
+
+ afterEach(() => {
+ jasmine.clock().uninstall();
+ });
+
+ it('should save the timeout id but not call fetchTrace', () => {
+ expect(commit).toHaveBeenCalledWith(types.SET_TRACE_TIMEOUT, 1);
+ expect(dispatch).not.toHaveBeenCalledWith('fetchTrace');
+ });
+
+ describe('after timeout has passed', () => {
+ beforeEach(() => {
+ jasmine.clock().tick(4000);
+ });
+
+ it('should clear the timeout id and fetchTrace', () => {
+ expect(commit).toHaveBeenCalledWith(types.SET_TRACE_TIMEOUT, 0);
+ expect(dispatch).toHaveBeenCalledWith('fetchTrace');
+ });
+ });
+ });
+
describe('stopPollingTrace', () => {
+ let origTimeout;
+
+ beforeEach(() => {
+ // Can't use spyOn(window, 'clearTimeout') because this caused unrelated specs to timeout
+ // https://gitlab.com/gitlab-org/gitlab/-/merge_requests/23838#note_280277727
+ origTimeout = window.clearTimeout;
+ window.clearTimeout = jasmine.createSpy();
+ });
+
+ afterEach(() => {
+ window.clearTimeout = origTimeout;
+ });
+
it('should commit STOP_POLLING_TRACE mutation ', done => {
+ const traceTimeout = 7;
+
testAction(
stopPollingTrace,
null,
- mockedState,
- [{ type: types.STOP_POLLING_TRACE }],
+ { ...mockedState, traceTimeout },
+ [{ type: types.SET_TRACE_TIMEOUT, payload: 0 }, { type: types.STOP_POLLING_TRACE }],
[],
- done,
- );
+ )
+ .then(() => {
+ expect(window.clearTimeout).toHaveBeenCalledWith(traceTimeout);
+ })
+ .then(done)
+ .catch(done.fail);
});
});
@@ -292,15 +390,8 @@ describe('Job State actions', () => {
});
describe('receiveTraceError', () => {
- it('should commit RECEIVE_TRACE_ERROR mutation ', done => {
- testAction(
- receiveTraceError,
- null,
- mockedState,
- [{ type: types.RECEIVE_TRACE_ERROR }],
- [],
- done,
- );
+ it('should commit stop polling trace', done => {
+ testAction(receiveTraceError, null, mockedState, [], [{ type: 'stopPollingTrace' }], done);
});
});
diff --git a/spec/javascripts/merge_request_spec.js b/spec/javascripts/merge_request_spec.js
index dc61482fdf3..b6173b9b171 100644
--- a/spec/javascripts/merge_request_spec.js
+++ b/spec/javascripts/merge_request_spec.js
@@ -17,7 +17,7 @@ describe('MergeRequest', function() {
mock = new MockAdapter(axios);
mock
- .onPatch(`${gl.TEST_HOST}/frontend-fixtures/merge-requests-project/merge_requests/1.json`)
+ .onPatch(`${gl.TEST_HOST}/frontend-fixtures/merge-requests-project/-/merge_requests/1.json`)
.reply(200, {});
this.merge = new MergeRequest();
@@ -75,7 +75,7 @@ describe('MergeRequest', function() {
setTimeout(() => {
expect(axios.patch).toHaveBeenCalledWith(
- `${gl.TEST_HOST}/frontend-fixtures/merge-requests-project/merge_requests/1.json`,
+ `${gl.TEST_HOST}/frontend-fixtures/merge-requests-project/-/merge_requests/1.json`,
{
merge_request: {
description: '- [ ] Task List Item\n- [ ] \n- [ ] Task List Item 2\n',
@@ -93,7 +93,9 @@ describe('MergeRequest', function() {
// eslint-disable-next-line jasmine/no-disabled-tests
xit('shows an error notification when tasklist update failed', done => {
mock
- .onPatch(`${gl.TEST_HOST}/frontend-fixtures/merge-requests-project/merge_requests/1.json`)
+ .onPatch(
+ `${gl.TEST_HOST}/frontend-fixtures/merge-requests-project/-/merge_requests/1.json`,
+ )
.reply(409, {});
$('.js-task-list-field').trigger({
diff --git a/spec/javascripts/merge_request_tabs_spec.js b/spec/javascripts/merge_request_tabs_spec.js
index 019aa191dc0..cbb61333d77 100644
--- a/spec/javascripts/merge_request_tabs_spec.js
+++ b/spec/javascripts/merge_request_tabs_spec.js
@@ -146,53 +146,53 @@ describe('MergeRequestTabs', function() {
it('changes from commits', function() {
setLocation({
- pathname: '/foo/bar/merge_requests/1/commits',
+ pathname: '/foo/bar/-/merge_requests/1/commits',
});
- expect(this.subject('show')).toBe('/foo/bar/merge_requests/1');
- expect(this.subject('diffs')).toBe('/foo/bar/merge_requests/1/diffs');
+ expect(this.subject('show')).toBe('/foo/bar/-/merge_requests/1');
+ expect(this.subject('diffs')).toBe('/foo/bar/-/merge_requests/1/diffs');
});
it('changes from diffs', function() {
setLocation({
- pathname: '/foo/bar/merge_requests/1/diffs',
+ pathname: '/foo/bar/-/merge_requests/1/diffs',
});
- expect(this.subject('show')).toBe('/foo/bar/merge_requests/1');
- expect(this.subject('commits')).toBe('/foo/bar/merge_requests/1/commits');
+ expect(this.subject('show')).toBe('/foo/bar/-/merge_requests/1');
+ expect(this.subject('commits')).toBe('/foo/bar/-/merge_requests/1/commits');
});
it('changes from diffs.html', function() {
setLocation({
- pathname: '/foo/bar/merge_requests/1/diffs.html',
+ pathname: '/foo/bar/-/merge_requests/1/diffs.html',
});
- expect(this.subject('show')).toBe('/foo/bar/merge_requests/1');
- expect(this.subject('commits')).toBe('/foo/bar/merge_requests/1/commits');
+ expect(this.subject('show')).toBe('/foo/bar/-/merge_requests/1');
+ expect(this.subject('commits')).toBe('/foo/bar/-/merge_requests/1/commits');
});
it('changes from notes', function() {
setLocation({
- pathname: '/foo/bar/merge_requests/1',
+ pathname: '/foo/bar/-/merge_requests/1',
});
- expect(this.subject('diffs')).toBe('/foo/bar/merge_requests/1/diffs');
- expect(this.subject('commits')).toBe('/foo/bar/merge_requests/1/commits');
+ expect(this.subject('diffs')).toBe('/foo/bar/-/merge_requests/1/diffs');
+ expect(this.subject('commits')).toBe('/foo/bar/-/merge_requests/1/commits');
});
it('includes search parameters and hash string', function() {
setLocation({
- pathname: '/foo/bar/merge_requests/1/diffs',
+ pathname: '/foo/bar/-/merge_requests/1/diffs',
search: '?view=parallel',
hash: '#L15-35',
});
- expect(this.subject('show')).toBe('/foo/bar/merge_requests/1?view=parallel#L15-35');
+ expect(this.subject('show')).toBe('/foo/bar/-/merge_requests/1?view=parallel#L15-35');
});
it('replaces the current history state', function() {
setLocation({
- pathname: '/foo/bar/merge_requests/1',
+ pathname: '/foo/bar/-/merge_requests/1',
});
const newState = this.subject('commits');
@@ -207,10 +207,10 @@ describe('MergeRequestTabs', function() {
it('treats "show" like "notes"', function() {
setLocation({
- pathname: '/foo/bar/merge_requests/1/commits',
+ pathname: '/foo/bar/-/merge_requests/1/commits',
});
- expect(this.subject('show')).toBe('/foo/bar/merge_requests/1');
+ expect(this.subject('show')).toBe('/foo/bar/-/merge_requests/1');
});
});
diff --git a/spec/javascripts/monitoring/components/dashboard_resize_spec.js b/spec/javascripts/monitoring/components/dashboard_resize_spec.js
index 46a6679da18..2422934f4b3 100644
--- a/spec/javascripts/monitoring/components/dashboard_resize_spec.js
+++ b/spec/javascripts/monitoring/components/dashboard_resize_spec.js
@@ -6,7 +6,7 @@ import * as types from '~/monitoring/stores/mutation_types';
import { createStore } from '~/monitoring/stores';
import axios from '~/lib/utils/axios_utils';
import {
- metricsGroupsAPIResponse,
+ metricsDashboardPayload,
mockedEmptyResult,
mockedQueryResultPayload,
mockedQueryResultPayloadCoresTotal,
@@ -30,7 +30,6 @@ const propsData = {
emptyNoDataSvgPath: '/path/to/no-data.svg',
emptyNoDataSmallSvgPath: '/path/to/no-data-small.svg',
emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg',
- environmentsEndpoint: '/root/hello-prometheus/environments/35',
currentEnvironmentName: 'production',
customMetricsAvailable: false,
customMetricsPath: '',
@@ -41,7 +40,7 @@ function setupComponentStore(component) {
// Load 2 panel groups
component.$store.commit(
`monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
- metricsGroupsAPIResponse,
+ metricsDashboardPayload,
);
// Load 3 panels to the dashboard, one with an empty result
@@ -98,7 +97,7 @@ describe('Dashboard', () => {
let panelToggle;
let chart;
beforeEach(() => {
- mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
+ mock.onGet(mockApiEndpoint).reply(200, metricsDashboardPayload);
component = new DashboardComponent({
el: document.querySelector('.prometheus-graphs'),
diff --git a/spec/javascripts/notes/components/discussion_counter_spec.js b/spec/javascripts/notes/components/discussion_counter_spec.js
index de1fd87517a..9c7aed43a3b 100644
--- a/spec/javascripts/notes/components/discussion_counter_spec.js
+++ b/spec/javascripts/notes/components/discussion_counter_spec.js
@@ -7,6 +7,7 @@ import { noteableDataMock, discussionMock, notesDataMock } from '../mock_data';
describe('DiscussionCounter component', () => {
let store;
let vm;
+ const notes = { currentDiscussionId: null };
beforeEach(() => {
window.mrTabs = {};
@@ -25,7 +26,7 @@ describe('DiscussionCounter component', () => {
});
describe('methods', () => {
- describe('jumpToFirstUnresolvedDiscussion', () => {
+ describe('jumpToNextDiscussion', () => {
it('expands unresolved discussion', () => {
window.mrTabs.currentAction = 'show';
@@ -48,13 +49,14 @@ describe('DiscussionCounter component', () => {
store.replaceState({
...store.state,
discussions,
+ notes,
});
- vm.jumpToFirstUnresolvedDiscussion();
+ vm.jumpToNextDiscussion();
expect(vm.expandDiscussion).toHaveBeenCalledWith({ discussionId: firstDiscussionId });
});
- it('jumps to first unresolved discussion from diff tab if all diff discussions are resolved', () => {
+ it('jumps to next unresolved discussion from diff tab if all diff discussions are resolved', () => {
window.mrTabs.currentAction = 'diff';
spyOn(vm, 'switchToDiscussionsTabAndJumpTo').and.stub();
@@ -77,8 +79,9 @@ describe('DiscussionCounter component', () => {
store.replaceState({
...store.state,
discussions,
+ notes,
});
- vm.jumpToFirstUnresolvedDiscussion();
+ vm.jumpToNextDiscussion();
expect(vm.switchToDiscussionsTabAndJumpTo).toHaveBeenCalledWith(unresolvedId);
});
diff --git a/spec/javascripts/notes/components/note_actions_spec.js b/spec/javascripts/notes/components/note_actions_spec.js
index 259122597fb..5d13f587ca7 100644
--- a/spec/javascripts/notes/components/note_actions_spec.js
+++ b/spec/javascripts/notes/components/note_actions_spec.js
@@ -29,7 +29,7 @@ describe('noteActions', () => {
canAwardEmoji: true,
canReportAsAbuse: true,
noteId: '539',
- noteUrl: `${TEST_HOST}/group/project/merge_requests/1#note_1`,
+ noteUrl: `${TEST_HOST}/group/project/-/merge_requests/1#note_1`,
reportAbusePath: `${TEST_HOST}/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%3A3000%2Fgitlab-org%2Fgitlab-ce%2Fissues%2F7%23note_539&user_id=26`,
showReply: false,
};
diff --git a/spec/javascripts/notes/components/noteable_discussion_spec.js b/spec/javascripts/notes/components/noteable_discussion_spec.js
index 6efc6485b9c..ee84fd2b091 100644
--- a/spec/javascripts/notes/components/noteable_discussion_spec.js
+++ b/spec/javascripts/notes/components/noteable_discussion_spec.js
@@ -101,37 +101,6 @@ describe('noteable_discussion component', () => {
});
});
- describe('methods', () => {
- describe('jumpToNextDiscussion', () => {
- it('expands next unresolved thread', done => {
- const discussion2 = getJSONFixture(discussionWithTwoUnresolvedNotes)[0];
- discussion2.resolved = false;
- discussion2.active = true;
- discussion2.id = 'next'; // prepare this for being identified as next one (to be jumped to)
- store.dispatch('setInitialNotes', [discussionMock, discussion2]);
- window.mrTabs.currentAction = 'show';
-
- wrapper.vm
- .$nextTick()
- .then(() => {
- spyOn(wrapper.vm, 'expandDiscussion').and.stub();
-
- const nextDiscussionId = discussion2.id;
-
- setFixtures(`<div class="discussion" data-discussion-id="${nextDiscussionId}"></div>`);
-
- wrapper.vm.jumpToNextDiscussion();
-
- expect(wrapper.vm.expandDiscussion).toHaveBeenCalledWith({
- discussionId: nextDiscussionId,
- });
- })
- .then(done)
- .catch(done.fail);
- });
- });
- });
-
describe('for resolved thread', () => {
beforeEach(() => {
const discussion = getJSONFixture(discussionWithTwoUnresolvedNotes)[0];
diff --git a/spec/javascripts/pipelines/header_component_spec.js b/spec/javascripts/pipelines/header_component_spec.js
index 8c033447ce4..9043f30397d 100644
--- a/spec/javascripts/pipelines/header_component_spec.js
+++ b/spec/javascripts/pipelines/header_component_spec.js
@@ -8,6 +8,7 @@ describe('Pipeline details header', () => {
let props;
beforeEach(() => {
+ spyOn(eventHub, '$emit');
HeaderComponent = Vue.extend(headerComponent);
const threeWeeksAgo = new Date();
@@ -33,8 +34,9 @@ describe('Pipeline details header', () => {
email: 'foo@bar.com',
avatar_url: 'link',
},
- retry_path: 'path',
- delete_path: 'path',
+ retry_path: 'retry',
+ cancel_path: 'cancel',
+ delete_path: 'delete',
},
isLoading: false,
};
@@ -43,9 +45,14 @@ describe('Pipeline details header', () => {
});
afterEach(() => {
+ eventHub.$off();
vm.$destroy();
});
+ const findDeleteModal = () => document.getElementById(headerComponent.DELETE_MODAL_ID);
+ const findDeleteModalSubmit = () =>
+ [...findDeleteModal().querySelectorAll('.btn')].find(x => x.textContent === 'Delete pipeline');
+
it('should render provided pipeline info', () => {
expect(
vm.$el
@@ -56,22 +63,46 @@ describe('Pipeline details header', () => {
});
describe('action buttons', () => {
- it('should call postAction when retry button action is clicked', done => {
- eventHub.$on('headerPostAction', action => {
- expect(action.path).toEqual('path');
- done();
- });
+ it('should not trigger eventHub when nothing happens', () => {
+ expect(eventHub.$emit).not.toHaveBeenCalled();
+ });
+ it('should call postAction when retry button action is clicked', () => {
vm.$el.querySelector('.js-retry-button').click();
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('headerPostAction', 'retry');
+ });
+
+ it('should call postAction when cancel button action is clicked', () => {
+ vm.$el.querySelector('.js-btn-cancel-pipeline').click();
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('headerPostAction', 'cancel');
});
- it('should fire modal event when delete button action is clicked', done => {
- vm.$root.$on('bv::modal::show', action => {
- expect(action.componentId).toEqual('pipeline-delete-modal');
- done();
+ it('does not show delete modal', () => {
+ expect(findDeleteModal()).not.toBeVisible();
+ });
+
+ describe('when delete button action is clicked', () => {
+ beforeEach(done => {
+ vm.$el.querySelector('.js-btn-delete-pipeline').click();
+
+ // Modal needs two ticks to show
+ vm.$nextTick()
+ .then(() => vm.$nextTick())
+ .then(done)
+ .catch(done.fail);
});
- vm.$el.querySelector('.js-btn-delete-pipeline').click();
+ it('should show delete modal', () => {
+ expect(findDeleteModal()).toBeVisible();
+ });
+
+ it('should call delete when modal is submitted', () => {
+ findDeleteModalSubmit().click();
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('headerDeleteAction', 'delete');
+ });
});
});
});
diff --git a/spec/javascripts/releases/list/components/app_spec.js b/spec/javascripts/releases/components/app_index_spec.js
index de6208ab1fd..bcf062f357a 100644
--- a/spec/javascripts/releases/list/components/app_spec.js
+++ b/spec/javascripts/releases/components/app_index_spec.js
@@ -1,16 +1,17 @@
import _ from 'underscore';
import Vue from 'vue';
import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
-import app from '~/releases/list/components/app.vue';
-import createStore from '~/releases/list/store';
+import app from '~/releases/components/app_index.vue';
+import createStore from '~/releases/stores';
+import listModule from '~/releases/stores/modules/list';
import api from '~/api';
-import { resetStore } from '../store/helpers';
+import { resetStore } from '../stores/modules/list/helpers';
import {
pageInfoHeadersWithoutPagination,
pageInfoHeadersWithPagination,
release,
releases,
-} from '../../mock_data';
+} from '../mock_data';
describe('Releases App ', () => {
const Component = Vue.extend(app);
@@ -25,7 +26,7 @@ describe('Releases App ', () => {
};
beforeEach(() => {
- store = createStore();
+ store = createStore({ list: listModule });
releasesPagination = _.range(21).map(index => ({ ...release, tag_name: `${index}.00` }));
});
diff --git a/spec/javascripts/releases/list/store/actions_spec.js b/spec/javascripts/releases/stores/modules/list/actions_spec.js
index f03e019b95c..037c9d8d54a 100644
--- a/spec/javascripts/releases/list/store/actions_spec.js
+++ b/spec/javascripts/releases/stores/modules/list/actions_spec.js
@@ -4,12 +4,12 @@ import {
fetchReleases,
receiveReleasesSuccess,
receiveReleasesError,
-} from '~/releases/list/store/actions';
-import state from '~/releases/list/store/state';
-import * as types from '~/releases/list/store/mutation_types';
+} from '~/releases/stores/modules/list/actions';
+import state from '~/releases/stores/modules/list/state';
+import * as types from '~/releases/stores/modules/list/mutation_types';
import api from '~/api';
import { parseIntPagination } from '~/lib/utils/common_utils';
-import { pageInfoHeadersWithoutPagination, releases } from '../../mock_data';
+import { pageInfoHeadersWithoutPagination, releases } from '../../../mock_data';
describe('Releases State actions', () => {
let mockedState;
diff --git a/spec/javascripts/releases/list/store/helpers.js b/spec/javascripts/releases/stores/modules/list/helpers.js
index fbc89ec2148..435ca36047e 100644
--- a/spec/javascripts/releases/list/store/helpers.js
+++ b/spec/javascripts/releases/stores/modules/list/helpers.js
@@ -1,4 +1,4 @@
-import state from '~/releases/list/store/state';
+import state from '~/releases/stores/modules/list/state';
// eslint-disable-next-line import/prefer-default-export
export const resetStore = store => {
diff --git a/spec/javascripts/releases/list/store/mutations_spec.js b/spec/javascripts/releases/stores/modules/list/mutations_spec.js
index d756c69d53b..3035b916ff6 100644
--- a/spec/javascripts/releases/list/store/mutations_spec.js
+++ b/spec/javascripts/releases/stores/modules/list/mutations_spec.js
@@ -1,8 +1,8 @@
-import state from '~/releases/list/store/state';
-import mutations from '~/releases/list/store/mutations';
-import * as types from '~/releases/list/store/mutation_types';
+import state from '~/releases/stores/modules/list/state';
+import mutations from '~/releases/stores/modules/list/mutations';
+import * as types from '~/releases/stores/modules/list/mutation_types';
import { parseIntPagination } from '~/lib/utils/common_utils';
-import { pageInfoHeadersWithoutPagination, releases } from '../../mock_data';
+import { pageInfoHeadersWithoutPagination, releases } from '../../../mock_data';
describe('Releases Store Mutations', () => {
let stateCopy;
diff --git a/spec/javascripts/reports/components/grouped_test_reports_app_spec.js b/spec/javascripts/reports/components/grouped_test_reports_app_spec.js
index 1b006cdbd4e..154aa881d2d 100644
--- a/spec/javascripts/reports/components/grouped_test_reports_app_spec.js
+++ b/spec/javascripts/reports/components/grouped_test_reports_app_spec.js
@@ -5,6 +5,7 @@ import state from '~/reports/store/state';
import component from '~/reports/components/grouped_test_reports_app.vue';
import mountComponent from '../../helpers/vue_mount_component_helper';
import newFailedTestReports from '../mock_data/new_failures_report.json';
+import newErrorsTestReports from '../mock_data/new_errors_report.json';
import successTestReports from '../mock_data/no_failures_report.json';
import mixedResultsTestReports from '../mock_data/new_and_fixed_failures_report.json';
import resolvedFailures from '../mock_data/resolved_failures.json';
@@ -99,6 +100,34 @@ describe('Grouped Test Reports App', () => {
});
});
+ describe('with new error result', () => {
+ beforeEach(() => {
+ mock.onGet('test_results.json').reply(200, newErrorsTestReports, {});
+ vm = mountComponent(Component, {
+ endpoint: 'test_results.json',
+ });
+ });
+
+ it('renders error summary text + new badge', done => {
+ setTimeout(() => {
+ expect(vm.$el.querySelector('.gl-spinner')).toBeNull();
+ expect(vm.$el.querySelector('.js-code-text').textContent.trim()).toEqual(
+ 'Test summary contained 2 failed/error test results out of 11 total tests',
+ );
+
+ expect(vm.$el.textContent).toContain(
+ 'karma found 2 failed/error test results out of 3 total tests',
+ );
+
+ expect(vm.$el.textContent).toContain('New');
+ expect(vm.$el.textContent).toContain(
+ 'rspec:pg found no changed test results out of 8 total tests',
+ );
+ done();
+ }, 0);
+ });
+ });
+
describe('with mixed results', () => {
beforeEach(() => {
mock.onGet('test_results.json').reply(200, mixedResultsTestReports, {});
@@ -127,7 +156,7 @@ describe('Grouped Test Reports App', () => {
});
});
- describe('with resolved failures', () => {
+ describe('with resolved failures and resolved errors', () => {
beforeEach(() => {
mock.onGet('test_results.json').reply(200, resolvedFailures, {});
vm = mountComponent(Component, {
@@ -139,11 +168,11 @@ describe('Grouped Test Reports App', () => {
setTimeout(() => {
expect(vm.$el.querySelector('.gl-spinner')).toBeNull();
expect(vm.$el.querySelector('.js-code-text').textContent.trim()).toEqual(
- 'Test summary contained 2 fixed test results out of 11 total tests',
+ 'Test summary contained 4 fixed test results out of 11 total tests',
);
expect(vm.$el.textContent).toContain(
- 'rspec:pg found 2 fixed test results out of 8 total tests',
+ 'rspec:pg found 4 fixed test results out of 8 total tests',
);
done();
}, 0);
@@ -161,6 +190,19 @@ describe('Grouped Test Reports App', () => {
done();
}, 0);
});
+
+ it('renders resolved errors', done => {
+ setTimeout(() => {
+ expect(vm.$el.querySelector('.report-block-container').textContent).toContain(
+ resolvedFailures.suites[0].resolved_errors[0].name,
+ );
+
+ expect(vm.$el.querySelector('.report-block-container').textContent).toContain(
+ resolvedFailures.suites[0].resolved_errors[1].name,
+ );
+ done();
+ }, 0);
+ });
});
describe('with error', () => {
diff --git a/spec/javascripts/reports/components/modal_spec.js b/spec/javascripts/reports/components/modal_spec.js
index d42c509e5b5..ff046e64b6e 100644
--- a/spec/javascripts/reports/components/modal_spec.js
+++ b/spec/javascripts/reports/components/modal_spec.js
@@ -42,8 +42,8 @@ describe('Grouped Test Reports Modal', () => {
);
});
- it('renders miliseconds', () => {
- expect(vm.$el.textContent).toContain(`${modalDataStructure.execution_time.value} ms`);
+ it('renders seconds', () => {
+ expect(vm.$el.textContent).toContain(`${modalDataStructure.execution_time.value} s`);
});
it('render title', () => {
diff --git a/spec/javascripts/reports/mock_data/new_and_fixed_failures_report.json b/spec/javascripts/reports/mock_data/new_and_fixed_failures_report.json
index ceaf894375a..6141e5433a6 100644
--- a/spec/javascripts/reports/mock_data/new_and_fixed_failures_report.json
+++ b/spec/javascripts/reports/mock_data/new_and_fixed_failures_report.json
@@ -1 +1,55 @@
-{"status":"failed","summary":{"total":11,"resolved":2,"failed":2},"suites":[{"name":"rspec:pg","status":"failed","summary":{"total":8,"resolved":2,"failed":1},"new_failures":[{"status":"failed","name":"Test#subtract when a is 2 and b is 1 returns correct result","execution_time":0.00908,"system_output":"Failure/Error: is_expected.to eq(1)\n\n expected: 1\n got: 3\n\n (compared using ==)\n./spec/test_spec.rb:43:in `block (4 levels) in <top (required)>'"}],"resolved_failures":[{"status":"success","name":"Test#sum when a is 1 and b is 2 returns summary","execution_time":0.000318,"system_output":null},{"status":"success","name":"Test#sum when a is 100 and b is 200 returns summary","execution_time":0.000074,"system_output":null}],"existing_failures":[]},{"name":"java ant","status":"failed","summary":{"total":3,"resolved":0,"failed":1},"new_failures":[],"resolved_failures":[],"existing_failures":[{"status":"failed","name":"sumTest","execution_time":0.004,"system_output":"junit.framework.AssertionFailedError: expected:<3> but was:<-1>\n\tat CalculatorTest.sumTest(Unknown Source)\n\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n\tat java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n"}]}]} \ No newline at end of file
+{
+ "status": "failed",
+ "summary": { "total": 11, "resolved": 2, "errored": 0, "failed": 2 },
+ "suites": [
+ {
+ "name": "rspec:pg",
+ "status": "failed",
+ "summary": { "total": 8, "resolved": 2, "errored": 0, "failed": 1 },
+ "new_failures": [
+ {
+ "status": "failed",
+ "name": "Test#subtract when a is 2 and b is 1 returns correct result",
+ "execution_time": 0.00908,
+ "system_output": "Failure/Error: is_expected.to eq(1)\n\n expected: 1\n got: 3\n\n (compared using ==)\n./spec/test_spec.rb:43:in `block (4 levels) in <top (required)>'"
+ }
+ ],
+ "resolved_failures": [
+ {
+ "status": "success",
+ "name": "Test#sum when a is 1 and b is 2 returns summary",
+ "execution_time": 0.000318,
+ "system_output": null
+ },
+ {
+ "status": "success",
+ "name": "Test#sum when a is 100 and b is 200 returns summary",
+ "execution_time": 0.000074,
+ "system_output": null
+ }
+ ],
+ "existing_failures": [],
+ "new_errors": [],
+ "resolved_errors": [],
+ "existing_errors": []
+ },
+ {
+ "name": "java ant",
+ "status": "failed",
+ "summary": { "total": 3, "resolved": 0, "errored": 0, "failed": 1 },
+ "new_failures": [],
+ "resolved_failures": [],
+ "existing_failures": [
+ {
+ "status": "failed",
+ "name": "sumTest",
+ "execution_time": 0.004,
+ "system_output": "junit.framework.AssertionFailedError: expected:<3> but was:<-1>\n\tat CalculatorTest.sumTest(Unknown Source)\n\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n\tat java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n"
+ }
+ ],
+ "new_errors": [],
+ "resolved_errors": [],
+ "existing_errors": []
+ }
+ ]
+}
diff --git a/spec/javascripts/reports/mock_data/new_errors_report.json b/spec/javascripts/reports/mock_data/new_errors_report.json
new file mode 100644
index 00000000000..cebf98fdb63
--- /dev/null
+++ b/spec/javascripts/reports/mock_data/new_errors_report.json
@@ -0,0 +1,38 @@
+{
+ "summary": { "total": 11, "resolved": 0, "errored": 2, "failed": 0 },
+ "suites": [
+ {
+ "name": "rspec:pg",
+ "summary": { "total": 8, "resolved": 0, "errored": 0, "failed": 0 },
+ "new_failures": [],
+ "resolved_failures": [],
+ "existing_failures": [],
+ "new_errors": [],
+ "resolved_errors": [],
+ "existing_errors": []
+ },
+ {
+ "name": "karma",
+ "summary": { "total": 3, "resolved": 0, "errored": 2, "failed": 0 },
+ "new_failures": [],
+ "resolved_failures": [],
+ "existing_failures": [],
+ "new_errors": [
+ {
+ "result": "error",
+ "name": "Test#sum when a is 1 and b is 2 returns summary",
+ "execution_time": 0.009411,
+ "system_output": "Failed: Error in render: 'TypeError: Cannot read property 'status' of undefined'"
+ },
+ {
+ "result": "error",
+ "name": "Test#sum when a is 100 and b is 200 returns summary",
+ "execution_time": 0.000162,
+ "system_output": "Failed: Error in render: 'TypeError: Cannot read property 'length' of undefined'"
+ }
+ ],
+ "resolved_errors": [],
+ "existing_errors": []
+ }
+ ]
+}
diff --git a/spec/javascripts/reports/mock_data/new_failures_report.json b/spec/javascripts/reports/mock_data/new_failures_report.json
index 930efe16f65..8b9c12c6271 100644
--- a/spec/javascripts/reports/mock_data/new_failures_report.json
+++ b/spec/javascripts/reports/mock_data/new_failures_report.json
@@ -1 +1,38 @@
-{"summary":{"total":11,"resolved":0,"failed":2},"suites":[{"name":"rspec:pg","summary":{"total":8,"resolved":0,"failed":2},"new_failures":[{"result":"failure","name":"Test#sum when a is 1 and b is 2 returns summary","execution_time":0.009411,"system_output":"Failure/Error: is_expected.to eq(3)\n\n expected: 3\n got: -1\n\n (compared using ==)\n./spec/test_spec.rb:12:in `block (4 levels) in <top (required)>'"},{"result":"failure","name":"Test#sum when a is 100 and b is 200 returns summary","execution_time":0.000162,"system_output":"Failure/Error: is_expected.to eq(300)\n\n expected: 300\n got: -100\n\n (compared using ==)\n./spec/test_spec.rb:21:in `block (4 levels) in <top (required)>'"}],"resolved_failures":[],"existing_failures":[]},{"name":"java ant","summary":{"total":3,"resolved":0,"failed":0},"new_failures":[],"resolved_failures":[],"existing_failures":[]}]} \ No newline at end of file
+{
+ "summary": { "total": 11, "resolved": 0, "errored": 0, "failed": 2 },
+ "suites": [
+ {
+ "name": "rspec:pg",
+ "summary": { "total": 8, "resolved": 0, "errored": 0, "failed": 2 },
+ "new_failures": [
+ {
+ "result": "failure",
+ "name": "Test#sum when a is 1 and b is 2 returns summary",
+ "execution_time": 0.009411,
+ "system_output": "Failure/Error: is_expected.to eq(3)\n\n expected: 3\n got: -1\n\n (compared using ==)\n./spec/test_spec.rb:12:in `block (4 levels) in <top (required)>'"
+ },
+ {
+ "result": "failure",
+ "name": "Test#sum when a is 100 and b is 200 returns summary",
+ "execution_time": 0.000162,
+ "system_output": "Failure/Error: is_expected.to eq(300)\n\n expected: 300\n got: -100\n\n (compared using ==)\n./spec/test_spec.rb:21:in `block (4 levels) in <top (required)>'"
+ }
+ ],
+ "resolved_failures": [],
+ "existing_failures": [],
+ "new_errors": [],
+ "resolved_errors": [],
+ "existing_errors": []
+ },
+ {
+ "name": "java ant",
+ "summary": { "total": 3, "resolved": 0, "errored": 0, "failed": 0 },
+ "new_failures": [],
+ "resolved_failures": [],
+ "existing_failures": [],
+ "new_errors": [],
+ "resolved_errors": [],
+ "existing_errors": []
+ }
+ ]
+}
diff --git a/spec/javascripts/reports/mock_data/no_failures_report.json b/spec/javascripts/reports/mock_data/no_failures_report.json
index 6c0675ff7dc..7da9e0c6211 100644
--- a/spec/javascripts/reports/mock_data/no_failures_report.json
+++ b/spec/javascripts/reports/mock_data/no_failures_report.json
@@ -1 +1,28 @@
-{"status":"success","summary":{"total":11,"resolved":0,"failed":0},"suites":[{"name":"rspec:pg","status":"success","summary":{"total":8,"resolved":0,"failed":0},"new_failures":[],"resolved_failures":[],"existing_failures":[]},{"name":"java ant","status":"success","summary":{"total":3,"resolved":0,"failed":0},"new_failures":[],"resolved_failures":[],"existing_failures":[]}]} \ No newline at end of file
+{
+ "status": "success",
+ "summary": { "total": 11, "resolved": 0, "errored": 0, "failed": 0 },
+ "suites": [
+ {
+ "name": "rspec:pg",
+ "status": "success",
+ "summary": { "total": 8, "resolved": 0, "errored": 0, "failed": 0 },
+ "new_failures": [],
+ "resolved_failures": [],
+ "existing_failures": [],
+ "new_errors": [],
+ "resolved_errors": [],
+ "existing_errors": []
+ },
+ {
+ "name": "java ant",
+ "status": "success",
+ "summary": { "total": 3, "resolved": 0, "errored": 0, "failed": 0 },
+ "new_failures": [],
+ "resolved_failures": [],
+ "existing_failures": [],
+ "new_errors": [],
+ "resolved_errors": [],
+ "existing_errors": []
+ }
+ ]
+}
diff --git a/spec/javascripts/reports/mock_data/resolved_failures.json b/spec/javascripts/reports/mock_data/resolved_failures.json
index d1f347ce5e6..49de6aa840b 100644
--- a/spec/javascripts/reports/mock_data/resolved_failures.json
+++ b/spec/javascripts/reports/mock_data/resolved_failures.json
@@ -1,11 +1,11 @@
{
"status": "success",
- "summary": { "total": 11, "resolved": 2, "failed": 0 },
+ "summary": { "total": 11, "resolved": 4, "errored": 0, "failed": 0 },
"suites": [
{
"name": "rspec:pg",
"status": "success",
- "summary": { "total": 8, "resolved": 2, "failed": 0 },
+ "summary": { "total": 8, "resolved": 4, "errored": 0, "failed": 0 },
"new_failures": [],
"resolved_failures": [
{
@@ -23,15 +23,36 @@
"stack_trace": null
}
],
- "existing_failures": []
+ "existing_failures": [],
+ "new_errors": [],
+ "resolved_errors": [
+ {
+ "status": "success",
+ "name": "Test#sum when a is 4 and b is 4 returns summary",
+ "execution_time": 0.00342,
+ "system_output": null,
+ "stack_trace": null
+ },
+ {
+ "status": "success",
+ "name": "Test#sum when a is 40 and b is 400 returns summary",
+ "execution_time": 0.0000231,
+ "system_output": null,
+ "stack_trace": null
+ }
+ ],
+ "existing_errors": []
},
{
"name": "java ant",
"status": "success",
- "summary": { "total": 3, "resolved": 0, "failed": 0 },
+ "summary": { "total": 3, "resolved": 0, "errored": 0, "failed": 0 },
"new_failures": [],
"resolved_failures": [],
- "existing_failures": []
+ "existing_failures": [],
+ "new_errors": [],
+ "resolved_errors": [],
+ "existing_errors": []
}
]
}
diff --git a/spec/javascripts/search_autocomplete_spec.js b/spec/javascripts/search_autocomplete_spec.js
index 1798f9962e2..e9bc1fc51e8 100644
--- a/spec/javascripts/search_autocomplete_spec.js
+++ b/spec/javascripts/search_autocomplete_spec.js
@@ -18,11 +18,11 @@ describe('Search autocomplete dropdown', () => {
const projectIssuesPath = '/gitlab-org/gitlab-foss/issues';
- const projectMRsPath = '/gitlab-org/gitlab-foss/merge_requests';
+ const projectMRsPath = '/gitlab-org/gitlab-foss/-/merge_requests';
- const groupIssuesPath = '/groups/gitlab-org/issues';
+ const groupIssuesPath = '/groups/gitlab-org/-/issues';
- const groupMRsPath = '/groups/gitlab-org/merge_requests';
+ const groupMRsPath = '/groups/gitlab-org/-/merge_requests';
const projectName = 'GitLab Community Edition';
diff --git a/spec/javascripts/user_popovers_spec.js b/spec/javascripts/user_popovers_spec.js
index e2fc359644d..6ac22fca2d3 100644
--- a/spec/javascripts/user_popovers_spec.js
+++ b/spec/javascripts/user_popovers_spec.js
@@ -2,17 +2,22 @@ import initUserPopovers from '~/user_popovers';
import UsersCache from '~/lib/utils/users_cache';
describe('User Popovers', () => {
- const fixtureTemplate = 'merge_requests/diff_comment.html';
+ const fixtureTemplate = 'merge_requests/merge_request_with_mentions.html';
preloadFixtures(fixtureTemplate);
- const selector = '.js-user-link';
+ const selector = '.js-user-link, .gfm-project_member';
const dummyUser = { name: 'root' };
const dummyUserStatus = { message: 'active' };
+ let popovers;
+
const triggerEvent = (eventName, el) => {
- const event = document.createEvent('MouseEvents');
- event.initMouseEvent(eventName, true, true, window);
+ const event = new MouseEvent(eventName, {
+ bubbles: true,
+ cancelable: true,
+ view: window,
+ });
el.dispatchEvent(event);
};
@@ -26,46 +31,65 @@ describe('User Popovers', () => {
const userStatusCacheSpy = () => Promise.resolve(dummyUserStatus);
spyOn(UsersCache, 'retrieveStatusById').and.callFake(userId => userStatusCacheSpy(userId));
- initUserPopovers(document.querySelectorAll('.js-user-link'));
+ popovers = initUserPopovers(document.querySelectorAll(selector));
});
- it('Should Show+Hide Popover on mouseenter and mouseleave', done => {
- const targetLink = document.querySelector(selector);
- const { userId } = targetLink.dataset;
- triggerEvent('mouseenter', targetLink);
+ it('initializes a popover for each user link with a user id', () => {
+ const linksWithUsers = Array.from(document.querySelectorAll(selector)).filter(
+ ({ dataset }) => dataset.user || dataset.userId,
+ );
- setTimeout(() => {
- const shownPopover = document.querySelector('.popover');
+ expect(linksWithUsers.length).toBe(popovers.length);
+ });
- expect(shownPopover).not.toBeNull();
- expect(targetLink.getAttribute('aria-describedby')).not.toBeNull();
+ it('does not initialize the user popovers twice for the same element', () => {
+ const newPopovers = initUserPopovers(document.querySelectorAll(selector));
+ const samePopovers = popovers.every((popover, index) => newPopovers[index] === popover);
- expect(shownPopover.innerHTML).toContain(dummyUser.name);
- expect(UsersCache.retrieveById).toHaveBeenCalledWith(userId.toString());
+ expect(samePopovers).toBe(true);
+ });
- triggerEvent('mouseleave', targetLink);
+ describe('when user link emits mouseenter event', () => {
+ let userLink;
- setTimeout(() => {
- // After Mouse leave it should be hidden now
- expect(document.querySelector('.popover')).toBeNull();
- expect(targetLink.getAttribute('aria-describedby')).toBeNull();
- done();
- });
- }, 210); // We need to wait until the 200ms mouseover delay is over, only then the popover will be visible
- });
+ beforeEach(() => {
+ userLink = document.querySelector(selector);
- it('Should Not show a popover on short mouse over', done => {
- const targetLink = document.querySelector(selector);
- const { userId } = targetLink.dataset;
- triggerEvent('mouseenter', targetLink);
+ triggerEvent('mouseenter', userLink);
+ });
- setTimeout(() => {
- expect(document.querySelector('.popover')).toBeNull();
- expect(UsersCache.retrieveById).not.toHaveBeenCalledWith(userId.toString());
+ it('removes title attribute from user links', () => {
+ expect(userLink.getAttribute('title')).toBeFalsy();
+ expect(userLink.dataset.originalTitle).toBeFalsy();
+ });
- triggerEvent('mouseleave', targetLink);
+ it('populates popovers with preloaded user data', () => {
+ const { name, userId, username } = userLink.dataset;
+ const [firstPopover] = popovers;
+
+ expect(firstPopover.$props.user).toEqual(
+ jasmine.objectContaining({
+ name,
+ userId,
+ username,
+ }),
+ );
+ });
+
+ it('fetches user info and status from the user cache', () => {
+ const { userId } = userLink.dataset;
- done();
+ expect(UsersCache.retrieveById).toHaveBeenCalledWith(userId);
+ expect(UsersCache.retrieveStatusById).toHaveBeenCalledWith(userId);
});
});
+
+ it('removes aria-describedby attribute from the user link on mouseleave', () => {
+ const userLink = document.querySelector(selector);
+
+ userLink.setAttribute('aria-describedby', 'popover');
+ triggerEvent('mouseleave', userLink);
+
+ expect(userLink.getAttribute('aria-describedby')).toBe(null);
+ });
});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js
index 4f6451473e8..efccd507fe2 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js
@@ -25,7 +25,7 @@ describe('MRWidgetChecking', () => {
it('renders information about merging', () => {
expect(vm.$el.querySelector('.media-body').textContent.trim()).toEqual(
- 'Checking ability to merge automatically',
+ 'Checking ability to merge automatically…',
);
});
});
diff --git a/spec/javascripts/vue_mr_widget/mock_data.js b/spec/javascripts/vue_mr_widget/mock_data.js
index c7ca93c58cf..2eaba46cdce 100644
--- a/spec/javascripts/vue_mr_widget/mock_data.js
+++ b/spec/javascripts/vue_mr_widget/mock_data.js
@@ -16,6 +16,7 @@ export default {
updated_at: '2017-04-07T15:39:25.852Z',
time_estimate: 0,
total_time_spent: 0,
+ human_access: 'Maintainer',
human_time_estimate: null,
human_total_time_spent: null,
in_progress_merge_commit_sha: null,
@@ -34,6 +35,7 @@ export default {
target_branch: 'master',
target_project_id: 19,
target_project_full_path: '/group2/project2',
+ merge_request_add_ci_config_path: '/group2/project2/new/pipeline',
metrics: {
merged_by: {
name: 'Administrator',
@@ -146,7 +148,7 @@ export default {
},
merge_request: {
iid: 1,
- path: '/root/detached-merge-request-pipelines/merge_requests/1',
+ path: '/root/detached-merge-request-pipelines/-/merge_requests/1',
title: 'Update README.md',
source_branch: 'feature-1',
source_branch_path: '/root/detached-merge-request-pipelines/branches/feature-1',
@@ -196,7 +198,7 @@ export default {
can_be_merged: true,
has_ci: true,
ci_status: 'success',
- pipeline_status_path: '/root/acets-app/merge_requests/22/pipeline_status',
+ pipeline_status_path: '/root/acets-app/-/merge_requests/22/pipeline_status',
issues_links: {
closing: '',
mentioned_but_not_closing: '',
@@ -209,30 +211,30 @@ export default {
},
target_branch_path: '/root/acets-app/branches/master',
source_branch_path: '/root/acets-app/branches/daaaa',
- conflict_resolution_ui_path: '/root/acets-app/merge_requests/22/conflicts',
- remove_wip_path: '/root/acets-app/merge_requests/22/remove_wip',
- cancel_auto_merge_path: '/root/acets-app/merge_requests/22/cancel_auto_merge',
+ conflict_resolution_ui_path: '/root/acets-app/-/merge_requests/22/conflicts',
+ remove_wip_path: '/root/acets-app/-/merge_requests/22/remove_wip',
+ cancel_auto_merge_path: '/root/acets-app/-/merge_requests/22/cancel_auto_merge',
create_issue_to_resolve_discussions_path:
'/root/acets-app/issues/new?merge_request_to_resolve_discussions_of=22',
- merge_path: '/root/acets-app/merge_requests/22/merge',
+ merge_path: '/root/acets-app/-/merge_requests/22/merge',
cherry_pick_in_fork_path:
'/root/acets-app/forks?continue%5Bnotice%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+has+been+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.+Try+to+revert+this+commit+again.&continue%5Bnotice_now%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+is+being+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.&continue%5Bto%5D=%2Froot%2Facets-app%2Fmerge_requests%2F22&namespace_key=1',
revert_in_fork_path:
'/root/acets-app/forks?continue%5Bnotice%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+has+been+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.+Try+to+cherry-pick+this+commit+again.&continue%5Bnotice_now%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+is+being+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.&continue%5Bto%5D=%2Froot%2Facets-app%2Fmerge_requests%2F22&namespace_key=1',
- email_patches_path: '/root/acets-app/merge_requests/22.patch',
- plain_diff_path: '/root/acets-app/merge_requests/22.diff',
- merge_request_basic_path: '/root/acets-app/merge_requests/22.json?serializer=basic',
- merge_request_widget_path: '/root/acets-app/merge_requests/22/widget.json',
+ email_patches_path: '/root/acets-app/-/merge_requests/22.patch',
+ plain_diff_path: '/root/acets-app/-/merge_requests/22.diff',
+ merge_request_basic_path: '/root/acets-app/-/merge_requests/22.json?serializer=basic',
+ merge_request_widget_path: '/root/acets-app/-/merge_requests/22/widget.json',
merge_request_cached_widget_path: '/cached.json',
- merge_check_path: '/root/acets-app/merge_requests/22/merge_check',
- ci_environments_status_url: '/root/acets-app/merge_requests/22/ci_environments_status',
+ merge_check_path: '/root/acets-app/-/merge_requests/22/merge_check',
+ ci_environments_status_url: '/root/acets-app/-/merge_requests/22/ci_environments_status',
project_archived: false,
default_merge_commit_message_with_description:
"Merge branch 'daaaa' into 'master'\n\nUpdate README.md\n\nSee merge request !22",
default_squash_commit_message: 'Test squash commit message',
diverged_commits_count: 0,
only_allow_merge_if_pipeline_succeeds: false,
- commit_change_content_path: '/root/acets-app/merge_requests/22/commit_change_content',
+ commit_change_content_path: '/root/acets-app/-/merge_requests/22/commit_change_content',
merge_commit_path:
'http://localhost:3000/root/acets-app/commit/53027d060246c8f47e4a9310fb332aa52f221775',
troubleshooting_docs_path: 'help',
diff --git a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
index 296be43f793..35c1495be58 100644
--- a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
@@ -94,6 +94,61 @@ describe('mrWidgetOptions', () => {
});
});
+ describe('shouldSuggestPipelines', () => {
+ describe('given suggestPipeline feature flag is enabled', () => {
+ beforeEach(() => {
+ gon.features = { suggestPipeline: true };
+ vm = mountComponent(MrWidgetOptions, {
+ mrData: { ...mockData },
+ });
+ });
+
+ afterEach(() => {
+ gon.features = {};
+ });
+
+ it('should suggest pipelines when none exist', () => {
+ vm.mr.mergeRequestAddCiConfigPath = 'some/path';
+ vm.mr.hasCI = false;
+
+ expect(vm.shouldSuggestPipelines).toBeTruthy();
+ });
+
+ it('should not suggest pipelines when they exist', () => {
+ vm.mr.mergeRequestAddCiConfigPath = null;
+ vm.mr.hasCI = false;
+
+ expect(vm.shouldSuggestPipelines).toBeFalsy();
+ });
+
+ it('should not suggest pipelines hasCI is true', () => {
+ vm.mr.mergeRequestAddCiConfigPath = 'some/path';
+ vm.mr.hasCI = true;
+
+ expect(vm.shouldSuggestPipelines).toBeFalsy();
+ });
+ });
+
+ describe('given suggestPipeline feature flag is not enabled', () => {
+ beforeEach(() => {
+ gon.features = { suggestPipeline: false };
+ vm = mountComponent(MrWidgetOptions, {
+ mrData: { ...mockData },
+ });
+ });
+
+ afterEach(() => {
+ gon.features = {};
+ });
+
+ it('should not suggest pipelines', () => {
+ vm.mr.mergeRequestAddCiConfigPath = null;
+
+ expect(vm.shouldSuggestPipelines).toBeFalsy();
+ });
+ });
+ });
+
describe('shouldRenderRelatedLinks', () => {
it('should return false for the initial data', () => {
expect(vm.shouldRenderRelatedLinks).toBeFalsy();
diff --git a/spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js b/spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js
index e2cd0f084fd..272f6cad5fc 100644
--- a/spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js
+++ b/spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js
@@ -83,4 +83,18 @@ describe('MergeRequestStore', () => {
});
});
});
+
+ describe('setPaths', () => {
+ it('should set the add ci config path', () => {
+ store.setData({ ...mockData });
+
+ expect(store.mergeRequestAddCiConfigPath).toEqual('/group2/project2/new/pipeline');
+ });
+
+ it('should set humanAccess=Maintainer when user has that role', () => {
+ store.setData({ ...mockData });
+
+ expect(store.humanAccess).toEqual('Maintainer');
+ });
+ });
});
diff --git a/spec/javascripts/vue_shared/components/bar_chart_spec.js b/spec/javascripts/vue_shared/components/bar_chart_spec.js
deleted file mode 100644
index 8f673c146ec..00000000000
--- a/spec/javascripts/vue_shared/components/bar_chart_spec.js
+++ /dev/null
@@ -1,79 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import BarChart from '~/vue_shared/components/bar_chart.vue';
-
-function getRandomArbitrary(min, max) {
- return Math.random() * (max - min) + min;
-}
-
-function generateRandomData(dataNumber) {
- const randomGraphData = [];
-
- for (let i = 1; i <= dataNumber; i += 1) {
- randomGraphData.push({
- name: `random ${i}`,
- value: parseInt(getRandomArbitrary(1, 8), 10),
- });
- }
-
- return randomGraphData;
-}
-
-describe('Bar chart component', () => {
- let barChart;
- const graphData = generateRandomData(10);
-
- beforeEach(() => {
- const BarChartComponent = Vue.extend(BarChart);
-
- barChart = mountComponent(BarChartComponent, {
- graphData,
- yAxisLabel: 'data',
- });
- });
-
- afterEach(() => {
- barChart.$destroy();
- });
-
- it('calculates the padding for even distribution across bars', () => {
- barChart.vbWidth = 1000;
- const result = barChart.calculatePadding(30);
-
- // since padding can't be higher than 1 and lower than 0
- // for more info: https://github.com/d3/d3-scale#band-scales
- expect(result).not.toBeLessThan(0);
- expect(result).not.toBeGreaterThan(1);
- });
-
- it('formats the tooltip title', () => {
- const tooltipTitle = barChart.setTooltipTitle(barChart.graphData[0]);
-
- expect(tooltipTitle).toContain('random 1:');
- });
-
- it('has a translates the bar graphs on across the X axis', () => {
- barChart.panX = 100;
-
- expect(barChart.barTranslationTransform).toEqual('translate(100, 0)');
- });
-
- it('translates the scroll indicator to the far right side', () => {
- barChart.vbWidth = 500;
-
- expect(barChart.scrollIndicatorTransform).toEqual('translate(420, 0)');
- });
-
- it('translates the x-axis to the bottom of the viewbox and pan coordinates', () => {
- barChart.panX = 100;
- barChart.vbHeight = 250;
-
- expect(barChart.xAxisLocation).toEqual('translate(100, 250)');
- });
-
- it('rotates the x axis labels a total of 90 degress (CCW)', () => {
- const xAxisLabel = barChart.$el.querySelector('.x-axis').querySelectorAll('text')[0];
-
- expect(xAxisLabel.getAttribute('transform')).toEqual('rotate(-90)');
- });
-});
diff --git a/spec/javascripts/vue_shared/components/content_viewer/content_viewer_spec.js b/spec/javascripts/vue_shared/components/content_viewer/content_viewer_spec.js
index e3f6609f128..e2a1ed931f1 100644
--- a/spec/javascripts/vue_shared/components/content_viewer/content_viewer_spec.js
+++ b/spec/javascripts/vue_shared/components/content_viewer/content_viewer_spec.js
@@ -58,14 +58,34 @@ describe('ContentViewer', () => {
it('renders fallback download control', done => {
createComponent({
- path: 'test.abc',
+ path: 'somepath/test.abc',
fileSize: 1024,
});
setTimeout(() => {
- expect(vm.$el.querySelector('.file-info').textContent.trim()).toContain('test.abc');
- expect(vm.$el.querySelector('.file-info').textContent.trim()).toContain('(1.00 KiB)');
- expect(vm.$el.querySelector('.btn.btn-default').textContent.trim()).toContain('Download');
+ expect(
+ vm.$el
+ .querySelector('.file-info')
+ .textContent.trim()
+ .replace(/\s+/, ' '),
+ ).toEqual('test.abc (1.00 KiB)');
+
+ expect(vm.$el.querySelector('.btn.btn-default').textContent.trim()).toEqual('Download');
+
+ done();
+ });
+ });
+
+ it('renders fallback download control for file with a data URL path properly', done => {
+ createComponent({
+ path: 'data:application/octet-stream;base64,U0VMRUNUICfEhHNnc2cnIGZyb20gVGFibGVuYW1lOwoK',
+ filePath: 'somepath/test.abc',
+ });
+
+ setTimeout(() => {
+ expect(vm.$el.querySelector('.file-info').textContent.trim()).toEqual('test.abc');
+ expect(vm.$el.querySelector('.btn.btn-default')).toHaveAttr('download', 'test.abc');
+ expect(vm.$el.querySelector('.btn.btn-default').textContent.trim()).toEqual('Download');
done();
});
diff --git a/spec/javascripts/vue_shared/components/diff_viewer/diff_viewer_spec.js b/spec/javascripts/vue_shared/components/diff_viewer/diff_viewer_spec.js
index c743f1f6ad7..6a83790093a 100644
--- a/spec/javascripts/vue_shared/components/diff_viewer/diff_viewer_spec.js
+++ b/spec/javascripts/vue_shared/components/diff_viewer/diff_viewer_spec.js
@@ -38,11 +38,11 @@ describe('DiffViewer', () => {
setTimeout(() => {
expect(vm.$el.querySelector('.deleted img').getAttribute('src')).toBe(
- `//raw/DEF/${RED_BOX_IMAGE_URL}`,
+ `//-/raw/DEF/${RED_BOX_IMAGE_URL}`,
);
expect(vm.$el.querySelector('.added img').getAttribute('src')).toBe(
- `//raw/ABC/${GREEN_BOX_IMAGE_URL}`,
+ `//-/raw/ABC/${GREEN_BOX_IMAGE_URL}`,
);
done();
diff --git a/spec/javascripts/vue_shared/components/file_row_spec.js b/spec/javascripts/vue_shared/components/file_row_spec.js
index 2d80099fafe..11fcb9b89c1 100644
--- a/spec/javascripts/vue_shared/components/file_row_spec.js
+++ b/spec/javascripts/vue_shared/components/file_row_spec.js
@@ -1,7 +1,6 @@
import Vue from 'vue';
import { file } from 'spec/ide/helpers';
import FileRow from '~/vue_shared/components/file_row.vue';
-import FileRowExtra from '~/ide/components/file_row_extra.vue';
import mountComponent from '../../helpers/vue_mount_component_helper';
describe('File row component', () => {
@@ -17,10 +16,6 @@ describe('File row component', () => {
vm.$destroy();
});
- const findNewDropdown = () => vm.$el.querySelector('.ide-new-btn .dropdown');
- const findNewDropdownButton = () => vm.$el.querySelector('.ide-new-btn .dropdown button');
- const findFileRow = () => vm.$el.querySelector('.file-row');
-
it('renders name', () => {
createComponent({
file: file('t4'),
@@ -42,7 +37,7 @@ describe('File row component', () => {
});
spyOn(vm, '$emit').and.stub();
- vm.$el.querySelector('.file-row').click();
+ vm.$el.click();
expect(vm.$emit).toHaveBeenCalledWith('toggleTreeOpen', vm.file.path);
});
@@ -87,61 +82,6 @@ describe('File row component', () => {
level: 0,
});
- expect(vm.$el.querySelector('.js-file-row-header')).not.toBe(null);
- });
-
- describe('new dropdown', () => {
- beforeEach(() => {
- createComponent({
- file: file('t5'),
- level: 1,
- extraComponent: FileRowExtra,
- });
- });
-
- it('renders in extra component', () => {
- expect(findNewDropdown()).not.toBe(null);
- });
-
- it('is hidden at start', () => {
- expect(findNewDropdown()).not.toHaveClass('show');
- });
-
- it('is opened when button is clicked', done => {
- expect(vm.dropdownOpen).toBe(false);
- findNewDropdownButton().dispatchEvent(new Event('click'));
-
- vm.$nextTick()
- .then(() => {
- expect(vm.dropdownOpen).toBe(true);
- expect(findNewDropdown()).toHaveClass('show');
- })
- .then(done)
- .catch(done.fail);
- });
-
- describe('when opened', () => {
- beforeEach(() => {
- vm.dropdownOpen = true;
- });
-
- it('stays open when button triggers mouseout', () => {
- findNewDropdownButton().dispatchEvent(new Event('mouseout'));
-
- expect(vm.dropdownOpen).toBe(true);
- });
-
- it('stays open when button triggers mouseleave', () => {
- findNewDropdownButton().dispatchEvent(new Event('mouseleave'));
-
- expect(vm.dropdownOpen).toBe(true);
- });
-
- it('closes when row triggers mouseleave', () => {
- findFileRow().dispatchEvent(new Event('mouseleave'));
-
- expect(vm.dropdownOpen).toBe(false);
- });
- });
+ expect(vm.$el.classList).toContain('js-file-row-header');
});
});
diff --git a/spec/javascripts/vue_shared/components/header_ci_component_spec.js b/spec/javascripts/vue_shared/components/header_ci_component_spec.js
index ea2eed2886a..b1abc972e1d 100644
--- a/spec/javascripts/vue_shared/components/header_ci_component_spec.js
+++ b/spec/javascripts/vue_shared/components/header_ci_component_spec.js
@@ -1,5 +1,5 @@
import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
+import mountComponent, { mountComponentWithSlots } from 'spec/helpers/vue_mount_component_helper';
import headerCi from '~/vue_shared/components/header_ci_component.vue';
describe('Header CI Component', () => {
@@ -27,14 +27,6 @@ describe('Header CI Component', () => {
email: 'foo@bar.com',
avatar_url: 'link',
},
- actions: [
- {
- label: 'Retry',
- path: 'path',
- cssClass: 'btn',
- isLoading: false,
- },
- ],
hasSidebarButton: true,
};
});
@@ -43,6 +35,8 @@ describe('Header CI Component', () => {
vm.$destroy();
});
+ const findActionButtons = () => vm.$el.querySelector('.header-action-buttons');
+
describe('render', () => {
beforeEach(() => {
vm = mountComponent(HeaderCi, props);
@@ -68,24 +62,23 @@ describe('Header CI Component', () => {
expect(vm.$el.querySelector('.js-user-link').innerText.trim()).toContain(props.user.name);
});
- it('should render provided actions', () => {
- const btn = vm.$el.querySelector('.btn');
+ it('should render sidebar toggle button', () => {
+ expect(vm.$el.querySelector('.js-sidebar-build-toggle')).not.toBeNull();
+ });
- expect(btn.tagName).toEqual('BUTTON');
- expect(btn.textContent.trim()).toEqual(props.actions[0].label);
+ it('should not render header action buttons when empty', () => {
+ expect(findActionButtons()).toBeNull();
});
+ });
- it('should show loading icon', done => {
- vm.actions[0].isLoading = true;
+ describe('slot', () => {
+ it('should render header action buttons', () => {
+ vm = mountComponentWithSlots(HeaderCi, { props, slots: { default: 'Test Actions' } });
- Vue.nextTick(() => {
- expect(vm.$el.querySelector('.btn .gl-spinner').getAttribute('style')).toBeFalsy();
- done();
- });
- });
+ const buttons = findActionButtons();
- it('should render sidebar toggle button', () => {
- expect(vm.$el.querySelector('.js-sidebar-build-toggle')).not.toBeNull();
+ expect(buttons).not.toBeNull();
+ expect(buttons.textContent).toEqual('Test Actions');
});
});
diff --git a/spec/javascripts/vue_shared/components/pagination/graphql_pagination_spec.js b/spec/javascripts/vue_shared/components/pagination/graphql_pagination_spec.js
deleted file mode 100644
index 9e72a0e2480..00000000000
--- a/spec/javascripts/vue_shared/components/pagination/graphql_pagination_spec.js
+++ /dev/null
@@ -1,73 +0,0 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import GraphqlPagination from '~/vue_shared/components/pagination/graphql_pagination.vue';
-
-const localVue = createLocalVue();
-
-describe('Graphql Pagination component', () => {
- let wrapper;
- function factory({ hasNextPage = true, hasPreviousPage = true }) {
- wrapper = shallowMount(localVue.extend(GraphqlPagination), {
- propsData: {
- hasNextPage,
- hasPreviousPage,
- },
- localVue,
- });
- }
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('without previous page', () => {
- beforeEach(() => {
- factory({ hasPreviousPage: false });
- });
-
- it('renders disabled previous button', () => {
- expect(wrapper.find('.js-prev-btn').attributes().disabled).toEqual('true');
- });
- });
-
- describe('with previous page', () => {
- beforeEach(() => {
- factory({ hasPreviousPage: true });
- });
-
- it('renders enabled previous button', () => {
- expect(wrapper.find('.js-prev-btn').attributes().disabled).toEqual(undefined);
- });
-
- it('emits previousClicked on click', () => {
- wrapper.find('.js-prev-btn').vm.$emit('click');
-
- expect(wrapper.emitted().previousClicked.length).toBe(1);
- });
- });
-
- describe('without next page', () => {
- beforeEach(() => {
- factory({ hasNextPage: false });
- });
-
- it('renders disabled next button', () => {
- expect(wrapper.find('.js-next-btn').attributes().disabled).toEqual('true');
- });
- });
-
- describe('with next page', () => {
- beforeEach(() => {
- factory({ hasNextPage: true });
- });
-
- it('renders enabled next button', () => {
- expect(wrapper.find('.js-next-btn').attributes().disabled).toEqual(undefined);
- });
-
- it('emits nextClicked on click', () => {
- wrapper.find('.js-next-btn').vm.$emit('click');
-
- expect(wrapper.emitted().nextClicked.length).toBe(1);
- });
- });
-});
diff --git a/spec/javascripts/vue_shared/components/project_selector/project_selector_spec.js b/spec/javascripts/vue_shared/components/project_selector/project_selector_spec.js
index 2b60ea0fd74..178df54b465 100644
--- a/spec/javascripts/vue_shared/components/project_selector/project_selector_spec.js
+++ b/spec/javascripts/vue_shared/components/project_selector/project_selector_spec.js
@@ -1,5 +1,5 @@
import Vue from 'vue';
-import _ from 'underscore';
+import { head } from 'lodash';
import { GlSearchBoxByType, GlInfiniteScroll } from '@gitlab/ui';
import { mount, createLocalVue } from '@vue/test-utils';
@@ -99,9 +99,9 @@ describe('ProjectSelector component', () => {
it(`triggers a "projectClicked" event when a project is clicked`, () => {
spyOn(vm, '$emit');
- wrapper.find(ProjectListItem).vm.$emit('click', _.first(searchResults));
+ wrapper.find(ProjectListItem).vm.$emit('click', head(searchResults));
- expect(vm.$emit).toHaveBeenCalledWith('projectClicked', _.first(searchResults));
+ expect(vm.$emit).toHaveBeenCalledWith('projectClicked', head(searchResults));
});
it(`shows a "no results" message if showNoResultsMessage === true`, () => {
diff --git a/spec/javascripts/vue_shared/components/tooltip_on_truncate_spec.js b/spec/javascripts/vue_shared/components/tooltip_on_truncate_spec.js
index a8d39b7b5fe..5f432f2a1b5 100644
--- a/spec/javascripts/vue_shared/components/tooltip_on_truncate_spec.js
+++ b/spec/javascripts/vue_shared/components/tooltip_on_truncate_spec.js
@@ -1,149 +1,160 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { mount, shallowMount } from '@vue/test-utils';
import TooltipOnTruncate from '~/vue_shared/components/tooltip_on_truncate.vue';
-const TEST_TITLE = 'lorem-ipsum-dolar-sit-amit-consectur-adipiscing-elit-sed-do';
-const STYLE_TRUNCATED = 'display: inline-block; max-width: 20px;';
-const STYLE_NORMAL = 'display: inline-block; max-width: 1000px;';
+const TEXT_SHORT = 'lorem';
+const TEXT_LONG = 'lorem-ipsum-dolar-sit-amit-consectur-adipiscing-elit-sed-do';
-const localVue = createLocalVue();
+const TEXT_TRUNCATE = 'white-space: nowrap; overflow:hidden;';
+const STYLE_NORMAL = `${TEXT_TRUNCATE} display: inline-block; max-width: 1000px;`; // does not overflows
+const STYLE_OVERFLOWED = `${TEXT_TRUNCATE} display: inline-block; max-width: 50px;`; // overflowed when text is long
const createElementWithStyle = (style, content) => `<a href="#" style="${style}">${content}</a>`;
describe('TooltipOnTruncate component', () => {
let wrapper;
+ let parent;
const createComponent = ({ propsData, ...options } = {}) => {
- wrapper = shallowMount(localVue.extend(TooltipOnTruncate), {
- localVue,
+ wrapper = shallowMount(TooltipOnTruncate, {
attachToDocument: true,
propsData: {
- title: TEST_TITLE,
...propsData,
},
+ attrs: {
+ style: STYLE_OVERFLOWED,
+ },
...options,
});
};
+ const createWrappedComponent = ({ propsData, ...options }) => {
+ // set a parent around the tested component
+ parent = mount(
+ {
+ props: {
+ title: { default: '' },
+ },
+ template: `
+ <TooltipOnTruncate :title="title" truncate-target="child" style="${STYLE_OVERFLOWED}">
+ <div>{{title}}</div>
+ </TooltipOnTruncate>
+ `,
+ components: {
+ TooltipOnTruncate,
+ },
+ },
+ {
+ propsData: { ...propsData },
+ attachToDocument: true,
+ ...options,
+ },
+ );
+
+ wrapper = parent.find(TooltipOnTruncate);
+ };
+
+ const hasTooltip = () => wrapper.classes('js-show-tooltip');
+
afterEach(() => {
wrapper.destroy();
});
- const hasTooltip = () => wrapper.classes('js-show-tooltip');
-
describe('with default target', () => {
- it('renders tooltip if truncated', done => {
+ it('renders tooltip if truncated', () => {
createComponent({
- attrs: {
- style: STYLE_TRUNCATED,
+ propsData: {
+ title: TEXT_LONG,
},
slots: {
- default: [TEST_TITLE],
+ default: [TEXT_LONG],
},
});
- wrapper.vm
- .$nextTick()
- .then(() => {
- expect(hasTooltip()).toBe(true);
- expect(wrapper.attributes('data-original-title')).toEqual(TEST_TITLE);
- expect(wrapper.attributes('data-placement')).toEqual('top');
- })
- .then(done)
- .catch(done.fail);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(hasTooltip()).toBe(true);
+ expect(wrapper.attributes('data-original-title')).toEqual(TEXT_LONG);
+ expect(wrapper.attributes('data-placement')).toEqual('top');
+ });
});
- it('does not render tooltip if normal', done => {
+ it('does not render tooltip if normal', () => {
createComponent({
- attrs: {
- style: STYLE_NORMAL,
+ propsData: {
+ title: TEXT_SHORT,
},
slots: {
- default: [TEST_TITLE],
+ default: [TEXT_SHORT],
},
});
- wrapper.vm
- .$nextTick()
- .then(() => {
- expect(hasTooltip()).toBe(false);
- })
- .then(done)
- .catch(done.fail);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(hasTooltip()).toBe(false);
+ });
});
});
describe('with child target', () => {
- it('renders tooltip if truncated', done => {
+ it('renders tooltip if truncated', () => {
createComponent({
attrs: {
style: STYLE_NORMAL,
},
propsData: {
+ title: TEXT_LONG,
truncateTarget: 'child',
},
slots: {
- default: createElementWithStyle(STYLE_TRUNCATED, TEST_TITLE),
+ default: createElementWithStyle(STYLE_OVERFLOWED, TEXT_LONG),
},
});
- wrapper.vm
- .$nextTick()
- .then(() => {
- expect(hasTooltip()).toBe(true);
- })
- .then(done)
- .catch(done.fail);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(hasTooltip()).toBe(true);
+ });
});
- it('does not render tooltip if normal', done => {
+ it('does not render tooltip if normal', () => {
createComponent({
propsData: {
truncateTarget: 'child',
},
slots: {
- default: createElementWithStyle(STYLE_NORMAL, TEST_TITLE),
+ default: createElementWithStyle(STYLE_NORMAL, TEXT_LONG),
},
});
- wrapper.vm
- .$nextTick()
- .then(() => {
- expect(hasTooltip()).toBe(false);
- })
- .then(done)
- .catch(done.fail);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(hasTooltip()).toBe(false);
+ });
});
});
describe('with fn target', () => {
- it('renders tooltip if truncated', done => {
+ it('renders tooltip if truncated', () => {
createComponent({
attrs: {
style: STYLE_NORMAL,
},
propsData: {
+ title: TEXT_LONG,
truncateTarget: el => el.childNodes[1],
},
slots: {
default: [
- createElementWithStyle('', TEST_TITLE),
- createElementWithStyle(STYLE_TRUNCATED, TEST_TITLE),
+ createElementWithStyle('', TEXT_LONG),
+ createElementWithStyle(STYLE_OVERFLOWED, TEXT_LONG),
],
},
});
- wrapper.vm
- .$nextTick()
- .then(() => {
- expect(hasTooltip()).toBe(true);
- })
- .then(done)
- .catch(done.fail);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(hasTooltip()).toBe(true);
+ });
});
});
describe('placement', () => {
- it('sets data-placement when tooltip is rendered', done => {
+ it('sets data-placement when tooltip is rendered', () => {
const placement = 'bottom';
createComponent({
@@ -151,21 +162,75 @@ describe('TooltipOnTruncate component', () => {
placement,
},
attrs: {
- style: STYLE_TRUNCATED,
+ style: STYLE_OVERFLOWED,
},
slots: {
- default: TEST_TITLE,
+ default: TEXT_LONG,
},
});
- wrapper.vm
- .$nextTick()
- .then(() => {
- expect(hasTooltip()).toBe(true);
- expect(wrapper.attributes('data-placement')).toEqual(placement);
- })
- .then(done)
- .catch(done.fail);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(hasTooltip()).toBe(true);
+ expect(wrapper.attributes('data-placement')).toEqual(placement);
+ });
+ });
+ });
+
+ describe('updates when title and slot content changes', () => {
+ describe('is initialized with a long text', () => {
+ beforeEach(() => {
+ createWrappedComponent({
+ propsData: { title: TEXT_LONG },
+ });
+ return parent.vm.$nextTick();
+ });
+
+ it('renders tooltip', () => {
+ expect(hasTooltip()).toBe(true);
+ expect(wrapper.attributes('data-original-title')).toEqual(TEXT_LONG);
+ expect(wrapper.attributes('data-placement')).toEqual('top');
+ });
+
+ it('does not render tooltip after updated to a short text', () => {
+ parent.setProps({
+ title: TEXT_SHORT,
+ });
+
+ return wrapper.vm
+ .$nextTick()
+ .then(() => wrapper.vm.$nextTick()) // wait 2 times to get an updated slot
+ .then(() => {
+ expect(hasTooltip()).toBe(false);
+ });
+ });
+ });
+
+ describe('is initialized with a short text', () => {
+ beforeEach(() => {
+ createWrappedComponent({
+ propsData: { title: TEXT_SHORT },
+ });
+ return wrapper.vm.$nextTick();
+ });
+
+ it('does not render tooltip', () => {
+ expect(hasTooltip()).toBe(false);
+ });
+
+ it('renders tooltip after updated to a long text', () => {
+ parent.setProps({
+ title: TEXT_LONG,
+ });
+
+ return wrapper.vm
+ .$nextTick()
+ .then(() => wrapper.vm.$nextTick()) // wait 2 times to get an updated slot
+ .then(() => {
+ expect(hasTooltip()).toBe(true);
+ expect(wrapper.attributes('data-original-title')).toEqual(TEXT_LONG);
+ expect(wrapper.attributes('data-placement')).toEqual('top');
+ });
+ });
});
});
});
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index 81c4563feb6..9980f4d8e23 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -188,4 +188,46 @@ describe API::Helpers do
subject.track_event('my_event', category: nil)
end
end
+
+ describe '#order_options_with_tie_breaker' do
+ subject { Class.new.include(described_class).new.order_options_with_tie_breaker }
+
+ before do
+ allow_any_instance_of(described_class).to receive(:params).and_return(params)
+ end
+
+ context 'with non-id order given' do
+ context 'with ascending order' do
+ let(:params) { { order_by: 'name', sort: 'asc' } }
+
+ it 'adds id based ordering with same direction as primary order' do
+ is_expected.to eq({ 'name' => 'asc', 'id' => 'asc' })
+ end
+ end
+
+ context 'with descending order' do
+ let(:params) { { order_by: 'name', sort: 'desc' } }
+
+ it 'adds id based ordering with same direction as primary order' do
+ is_expected.to eq({ 'name' => 'desc', 'id' => 'desc' })
+ end
+ end
+ end
+
+ context 'with non-id order but no direction given' do
+ let(:params) { { order_by: 'name' } }
+
+ it 'adds ID ASC order' do
+ is_expected.to eq({ 'name' => nil, 'id' => 'asc' })
+ end
+ end
+
+ context 'with id order given' do
+ let(:params) { { order_by: 'id', sort: 'asc' } }
+
+ it 'does not add an additional order' do
+ is_expected.to eq({ 'id' => 'asc' })
+ end
+ end
+ end
end
diff --git a/spec/lib/backup/repository_spec.rb b/spec/lib/backup/repository_spec.rb
index 32e718d4b3b..2ac1b0d2583 100644
--- a/spec/lib/backup/repository_spec.rb
+++ b/spec/lib/backup/repository_spec.rb
@@ -86,6 +86,22 @@ describe Backup::Repository do
expect(pool_repository.object_pool.exists?).to be(true)
end
end
+
+ it 'cleans existing repositories' do
+ wiki_repository_spy = spy(:wiki)
+
+ allow_next_instance_of(ProjectWiki) do |project_wiki|
+ allow(project_wiki).to receive(:repository).and_return(wiki_repository_spy)
+ end
+
+ expect_next_instance_of(Repository) do |repo|
+ expect(repo).to receive(:remove)
+ end
+
+ subject.restore
+
+ expect(wiki_repository_spy).to have_received(:remove)
+ end
end
describe '#empty_repo?' do
diff --git a/spec/lib/banzai/filter/commit_range_reference_filter_spec.rb b/spec/lib/banzai/filter/commit_range_reference_filter_spec.rb
index a82b890be42..5cfb0e6e6f7 100644
--- a/spec/lib/banzai/filter/commit_range_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/commit_range_reference_filter_spec.rb
@@ -229,10 +229,10 @@ describe Banzai::Filter::CommitRangeReferenceFilter do
end
it 'ignores invalid commit IDs on the referenced project' do
- exp = act = "Fixed #{project2.to_reference}@#{commit1.id.reverse}...#{commit2.id}"
+ exp = act = "Fixed #{project2.to_reference_base}@#{commit1.id.reverse}...#{commit2.id}"
expect(reference_filter(act).to_html).to eq exp
- exp = act = "Fixed #{project2.to_reference}@#{commit1.id}...#{commit2.id.reverse}"
+ exp = act = "Fixed #{project2.to_reference_base}@#{commit1.id}...#{commit2.id.reverse}"
expect(reference_filter(act).to_html).to eq exp
end
end
diff --git a/spec/lib/banzai/filter/gollum_tags_filter_spec.rb b/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
index 9d179ef2a49..1580177eaad 100644
--- a/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
+++ b/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
@@ -100,19 +100,4 @@ describe Banzai::Filter::GollumTagsFilter do
expect(doc.at_css('code').text).to eq '[[link-in-backticks]]'
end
end
-
- context 'table of contents' do
- it 'replaces [[<em>TOC</em>]] with ToC result' do
- doc = described_class.call("<p>[[<em>TOC</em>]]</p>", { project_wiki: project_wiki }, { toc: "FOO" })
-
- expect(doc.to_html).to eq("FOO")
- end
-
- it 'handles an empty ToC result' do
- input = "<p>[[<em>TOC</em>]]</p>"
- doc = described_class.call(input, project_wiki: project_wiki)
-
- expect(doc.to_html).to eq ''
- end
- end
end
diff --git a/spec/lib/banzai/filter/label_reference_filter_spec.rb b/spec/lib/banzai/filter/label_reference_filter_spec.rb
index 66af26bc51c..82df5064896 100644
--- a/spec/lib/banzai/filter/label_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/label_reference_filter_spec.rb
@@ -369,7 +369,7 @@ describe Banzai::Filter::LabelReferenceFilter do
end
context 'with project reference' do
- let(:reference) { "#{project.to_reference}#{group_label.to_reference(format: :name)}" }
+ let(:reference) { "#{project.to_reference_base}#{group_label.to_reference(format: :name)}" }
it 'links to a valid reference' do
doc = reference_filter("See #{reference}", project: project)
@@ -385,7 +385,7 @@ describe Banzai::Filter::LabelReferenceFilter do
end
it 'ignores invalid label names' do
- exp = act = %(Label #{project.to_reference}#{Label.reference_prefix}"#{group_label.name.reverse}")
+ exp = act = %(Label #{project.to_reference_base}#{Label.reference_prefix}"#{group_label.name.reverse}")
expect(reference_filter(act).to_html).to eq exp
end
diff --git a/spec/lib/banzai/filter/milestone_reference_filter_spec.rb b/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
index 2fe8c9074df..0c8413adcba 100644
--- a/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
@@ -367,15 +367,17 @@ describe Banzai::Filter::MilestoneReferenceFilter do
expect(doc.css('a').first.text).to eq(urls.milestone_url(milestone))
end
- it 'does not support cross-project references' do
+ it 'does not support cross-project references', :aggregate_failures do
another_group = create(:group)
another_project = create(:project, :public, group: group)
- project_reference = another_project.to_reference(project)
+ project_reference = another_project.to_reference_base(project)
+ input_text = "See #{project_reference}#{reference}"
milestone.update!(group: another_group)
- doc = reference_filter("See #{project_reference}#{reference}")
+ doc = reference_filter(input_text)
+ expect(input_text).to match(Milestone.reference_pattern)
expect(doc.css('a')).to be_empty
end
diff --git a/spec/lib/banzai/filter/project_reference_filter_spec.rb b/spec/lib/banzai/filter/project_reference_filter_spec.rb
index d0b4542d503..a054b79ec03 100644
--- a/spec/lib/banzai/filter/project_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/project_reference_filter_spec.rb
@@ -10,7 +10,7 @@ describe Banzai::Filter::ProjectReferenceFilter do
end
def get_reference(project)
- project.to_reference_with_postfix
+ project.to_reference
end
let(:project) { create(:project, :public) }
diff --git a/spec/lib/banzai/filter/repository_link_filter_spec.rb b/spec/lib/banzai/filter/repository_link_filter_spec.rb
index c87f452a3df..cf73c77ecb8 100644
--- a/spec/lib/banzai/filter/repository_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/repository_link_filter_spec.rb
@@ -152,11 +152,11 @@ describe Banzai::Filter::RepositoryLinkFilter do
it 'rebuilds absolute URL for a file in the repo' do
doc = filter(link('/doc/api/README.md'))
expect(doc.at_css('a')['href'])
- .to eq "/#{project_path}/blob/#{ref}/doc/api/README.md"
+ .to eq "/#{project_path}/-/blob/#{ref}/doc/api/README.md"
end
it 'does not modify relative URLs in system notes' do
- path = "#{project_path}/merge_requests/1/diffs"
+ path = "#{project_path}/-/merge_requests/1/diffs"
doc = filter(link(path), system_note: true)
expect(doc.at_css('a')['href']).to eq path
@@ -170,13 +170,13 @@ describe Banzai::Filter::RepositoryLinkFilter do
it 'rebuilds relative URL for a file in the repo' do
doc = filter(link('doc/api/README.md'))
expect(doc.at_css('a')['href'])
- .to eq "/#{project_path}/blob/#{ref}/doc/api/README.md"
+ .to eq "/#{project_path}/-/blob/#{ref}/doc/api/README.md"
end
it 'rebuilds relative URL for a file in the repo with leading ./' do
doc = filter(link('./doc/api/README.md'))
expect(doc.at_css('a')['href'])
- .to eq "/#{project_path}/blob/#{ref}/doc/api/README.md"
+ .to eq "/#{project_path}/-/blob/#{ref}/doc/api/README.md"
end
it 'rebuilds relative URL for a file in the repo up one directory' do
@@ -184,7 +184,7 @@ describe Banzai::Filter::RepositoryLinkFilter do
doc = filter(relative_link, requested_path: 'doc/update/7.14-to-8.0.md')
expect(doc.at_css('a')['href'])
- .to eq "/#{project_path}/blob/#{ref}/doc/api/README.md"
+ .to eq "/#{project_path}/-/blob/#{ref}/doc/api/README.md"
end
it 'rebuilds relative URL for a file in the repo up multiple directories' do
@@ -192,7 +192,7 @@ describe Banzai::Filter::RepositoryLinkFilter do
doc = filter(relative_link, requested_path: 'doc/foo/bar/baz/README.md')
expect(doc.at_css('a')['href'])
- .to eq "/#{project_path}/blob/#{ref}/doc/api/README.md"
+ .to eq "/#{project_path}/-/blob/#{ref}/doc/api/README.md"
end
it 'rebuilds relative URL for a file in the repository root' do
@@ -200,47 +200,47 @@ describe Banzai::Filter::RepositoryLinkFilter do
doc = filter(relative_link, requested_path: 'doc/some-file.md')
expect(doc.at_css('a')['href'])
- .to eq "/#{project_path}/blob/#{ref}/README.md"
+ .to eq "/#{project_path}/-/blob/#{ref}/README.md"
end
it 'rebuilds relative URL for a file in the repo with an anchor' do
doc = filter(link('README.md#section'))
expect(doc.at_css('a')['href'])
- .to eq "/#{project_path}/blob/#{ref}/README.md#section"
+ .to eq "/#{project_path}/-/blob/#{ref}/README.md#section"
end
it 'rebuilds relative URL for a directory in the repo' do
doc = filter(link('doc/api/'))
expect(doc.at_css('a')['href'])
- .to eq "/#{project_path}/tree/#{ref}/doc/api"
+ .to eq "/#{project_path}/-/tree/#{ref}/doc/api"
end
it 'rebuilds relative URL for an image in the repo' do
doc = filter(image('files/images/logo-black.png'))
expect(doc.at_css('img')['src'])
- .to eq "/#{project_path}/raw/#{ref}/files/images/logo-black.png"
+ .to eq "/#{project_path}/-/raw/#{ref}/files/images/logo-black.png"
end
it 'rebuilds relative URL for link to an image in the repo' do
doc = filter(link('files/images/logo-black.png'))
expect(doc.at_css('a')['href'])
- .to eq "/#{project_path}/raw/#{ref}/files/images/logo-black.png"
+ .to eq "/#{project_path}/-/raw/#{ref}/files/images/logo-black.png"
end
it 'rebuilds relative URL for a video in the repo' do
doc = filter(video('files/videos/intro.mp4'), commit: project.commit('video'), ref: 'video')
expect(doc.at_css('video')['src'])
- .to eq "/#{project_path}/raw/video/files/videos/intro.mp4"
+ .to eq "/#{project_path}/-/raw/video/files/videos/intro.mp4"
end
it 'rebuilds relative URL for audio in the repo' do
doc = filter(audio('files/audio/sample.wav'), commit: project.commit('audio'), ref: 'audio')
expect(doc.at_css('audio')['src'])
- .to eq "/#{project_path}/raw/audio/files/audio/sample.wav"
+ .to eq "/#{project_path}/-/raw/audio/files/audio/sample.wav"
end
it 'does not modify relative URL with an anchor only' do
@@ -267,7 +267,7 @@ describe Banzai::Filter::RepositoryLinkFilter do
allow_any_instance_of(described_class).to receive(:uri_type).and_return(:raw)
doc = filter(image(escaped))
- expect(doc.at_css('img')['src']).to eq "/#{project_path}/raw/#{Addressable::URI.escape(ref)}/#{escaped}"
+ expect(doc.at_css('img')['src']).to eq "/#{project_path}/-/raw/#{Addressable::URI.escape(ref)}/#{escaped}"
end
context 'when requested path is a file in the repo' do
@@ -275,7 +275,7 @@ describe Banzai::Filter::RepositoryLinkFilter do
it 'rebuilds URL relative to the containing directory' do
doc = filter(link('users.md'))
- expect(doc.at_css('a')['href']).to eq "/#{project_path}/blob/#{Addressable::URI.escape(ref)}/doc/api/users.md"
+ expect(doc.at_css('a')['href']).to eq "/#{project_path}/-/blob/#{Addressable::URI.escape(ref)}/doc/api/users.md"
end
end
@@ -284,7 +284,7 @@ describe Banzai::Filter::RepositoryLinkFilter do
it 'rebuilds URL relative to the directory' do
doc = filter(link('users.md'))
- expect(doc.at_css('a')['href']).to eq "/#{project_path}/blob/#{Addressable::URI.escape(ref)}/doc/api/users.md"
+ expect(doc.at_css('a')['href']).to eq "/#{project_path}/-/blob/#{Addressable::URI.escape(ref)}/doc/api/users.md"
end
end
@@ -295,7 +295,7 @@ describe Banzai::Filter::RepositoryLinkFilter do
it 'correctly escapes the ref' do
doc = filter(link('.gitkeep'))
- expect(doc.at_css('a')['href']).to eq "/#{project_path}/blob/#{Addressable::URI.escape(ref)}/foo/bar/.gitkeep"
+ expect(doc.at_css('a')['href']).to eq "/#{project_path}/-/blob/#{Addressable::URI.escape(ref)}/foo/bar/.gitkeep"
end
end
@@ -314,7 +314,7 @@ describe Banzai::Filter::RepositoryLinkFilter do
doc = filter(link(path))
expect(doc.at_css('a')['href'])
- .to eq "/#{project_path}/raw/#{ref_escaped}/files/images/logo-black.png"
+ .to eq "/#{project_path}/-/raw/#{ref_escaped}/files/images/logo-black.png"
end
end
@@ -325,7 +325,7 @@ describe Banzai::Filter::RepositoryLinkFilter do
it 'does not escape the space twice' do
doc = filter(link('README.md'))
- expect(doc.at_css('a')['href']).to eq "/#{project_path}/blob/#{Addressable::URI.escape(ref)}/with%20space/README.md"
+ expect(doc.at_css('a')['href']).to eq "/#{project_path}/-/blob/#{Addressable::URI.escape(ref)}/with%20space/README.md"
end
end
end
diff --git a/spec/lib/banzai/filter/table_of_contents_tag_filter_spec.rb b/spec/lib/banzai/filter/table_of_contents_tag_filter_spec.rb
new file mode 100644
index 00000000000..20f32d7347d
--- /dev/null
+++ b/spec/lib/banzai/filter/table_of_contents_tag_filter_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Banzai::Filter::TableOfContentsTagFilter do
+ include FilterSpecHelper
+
+ context 'table of contents' do
+ let(:html) { '<p>[[<em>TOC</em>]]</p>' }
+
+ it 'replaces [[<em>TOC</em>]] with ToC result' do
+ doc = filter(html, {}, { toc: "FOO" })
+
+ expect(doc.to_html).to eq("FOO")
+ end
+
+ it 'handles an empty ToC result' do
+ doc = filter(html)
+
+ expect(doc.to_html).to eq ''
+ end
+ end
+end
diff --git a/spec/lib/banzai/pipeline/full_pipeline_spec.rb b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
index f63b86d1451..4fa39da3eb4 100644
--- a/spec/lib/banzai/pipeline/full_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
@@ -99,4 +99,35 @@ describe Banzai::Pipeline::FullPipeline do
end
end
end
+
+ describe 'table of contents' do
+ let(:project) { create(:project, :public) }
+ let(:markdown) do
+ <<-MARKDOWN.strip_heredoc
+ [[_TOC_]]
+
+ # Header
+ MARKDOWN
+ end
+ let(:invalid_markdown) do
+ <<-MARKDOWN.strip_heredoc
+ test [[_TOC_]]
+
+ # Header
+ MARKDOWN
+ end
+
+ it 'inserts a table of contents' do
+ output = described_class.to_html(markdown, project: project)
+
+ expect(output).to include("<ul class=\"section-nav\">")
+ expect(output).to include("<li><a href=\"#header\">Header</a></li>")
+ end
+
+ it 'does not insert a table of contents' do
+ output = described_class.to_html(invalid_markdown, project: project)
+
+ expect(output).to include("test [[<em>TOC</em>]]")
+ end
+ end
end
diff --git a/spec/lib/container_registry/client_spec.rb b/spec/lib/container_registry/client_spec.rb
index a493b96b1e4..5d2334a6d8f 100644
--- a/spec/lib/container_registry/client_spec.rb
+++ b/spec/lib/container_registry/client_spec.rb
@@ -146,4 +146,57 @@ describe ContainerRegistry::Client do
expect(subject).to eq 'sha256:123'
end
end
+
+ describe '#delete_repository_tag_by_name' do
+ subject { client.delete_repository_tag_by_name('group/test', 'a') }
+
+ context 'when the tag exists' do
+ before do
+ stub_request(:delete, "http://container-registry/v2/group/test/tags/reference/a")
+ .to_return(status: 200, body: "")
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when the tag does not exist' do
+ before do
+ stub_request(:delete, "http://container-registry/v2/group/test/tags/reference/a")
+ .to_return(status: 404, body: "")
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when an error occurs' do
+ before do
+ stub_request(:delete, "http://container-registry/v2/group/test/tags/reference/a")
+ .to_return(status: 500, body: "")
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#supports_tag_delete?' do
+ subject { client.supports_tag_delete? }
+
+ context 'when the server supports tag deletion' do
+ before do
+ stub_request(:options, "http://container-registry/v2/name/tags/reference/tag")
+ .to_return(status: 200, body: "", headers: { 'Allow' => 'DELETE' })
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when the server does not support tag deletion' do
+ before do
+ stub_request(:options, "http://container-registry/v2/name/tags/reference/tag")
+ .to_return(status: 404, body: "")
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
end
diff --git a/spec/lib/container_registry/registry_spec.rb b/spec/lib/container_registry/registry_spec.rb
index 7cf70a1f562..e509566fae8 100644
--- a/spec/lib/container_registry/registry_spec.rb
+++ b/spec/lib/container_registry/registry_spec.rb
@@ -14,7 +14,7 @@ describe ContainerRegistry::Registry do
it { expect(subject).not_to be_nil }
- context '#path' do
+ describe '#path' do
subject { registry.path }
context 'path from URL' do
diff --git a/spec/lib/container_registry/tag_spec.rb b/spec/lib/container_registry/tag_spec.rb
index 9447112e4a8..085c73caa97 100644
--- a/spec/lib/container_registry/tag_spec.rb
+++ b/spec/lib/container_registry/tag_spec.rb
@@ -70,26 +70,26 @@ describe ContainerRegistry::Tag do
headers: { 'Content-Type' => 'application/vnd.docker.distribution.manifest.v1+prettyjws' })
end
- context '#layers' do
+ describe '#layers' do
subject { tag.layers }
it { expect(subject.length).to eq(1) }
end
- context '#total_size' do
+ describe '#total_size' do
subject { tag.total_size }
it { is_expected.to be_nil }
end
context 'config processing' do
- context '#config' do
+ describe '#config' do
subject { tag.config }
it { is_expected.to be_nil }
end
- context '#created_at' do
+ describe '#created_at' do
subject { tag.created_at }
it { is_expected.to be_nil }
@@ -113,7 +113,7 @@ describe ContainerRegistry::Tag do
body: File.read(Rails.root + 'spec/fixtures/container_registry/config_blob_helm.json'))
end
- context '#created_at' do
+ describe '#created_at' do
subject { tag.created_at }
it { is_expected.to be_nil }
@@ -130,13 +130,13 @@ describe ContainerRegistry::Tag do
headers: { 'Content-Type' => 'application/vnd.docker.distribution.manifest.v2+json' })
end
- context '#layers' do
+ describe '#layers' do
subject { tag.layers }
it { expect(subject.length).to eq(1) }
end
- context '#total_size' do
+ describe '#total_size' do
subject { tag.total_size }
it { is_expected.to eq(2319870) }
@@ -144,13 +144,13 @@ describe ContainerRegistry::Tag do
context 'config processing' do
shared_examples 'a processable' do
- context '#config' do
+ describe '#config' do
subject { tag.config }
it { is_expected.not_to be_nil }
end
- context '#created_at' do
+ describe '#created_at' do
subject { tag.created_at }
it { is_expected.not_to be_nil }
diff --git a/spec/lib/extracts_path_spec.rb b/spec/lib/extracts_path_spec.rb
index 861ef79b2f8..ef9929a9dce 100644
--- a/spec/lib/extracts_path_spec.rb
+++ b/spec/lib/extracts_path_spec.rb
@@ -31,7 +31,7 @@ describe ExtractsPath do
it "log tree path has no escape sequences" do
assign_ref_vars
- expect(@logs_path).to eq("/#{@project.full_path}/refs/#{ref}/logs_tree/files/ruby/popen.rb")
+ expect(@logs_path).to eq("/#{@project.full_path}/-/refs/#{ref}/logs_tree/files/ruby/popen.rb")
end
context 'ref contains %20' do
diff --git a/spec/lib/feature/gitaly_spec.rb b/spec/lib/feature/gitaly_spec.rb
index 4e07acf9c1a..afb522d05e1 100644
--- a/spec/lib/feature/gitaly_spec.rb
+++ b/spec/lib/feature/gitaly_spec.rb
@@ -5,10 +5,6 @@ require 'spec_helper'
describe Feature::Gitaly do
let(:feature_flag) { "mep_mep" }
- before do
- stub_const("#{described_class}::SERVER_FEATURE_FLAGS", [feature_flag])
- end
-
describe ".enabled?" do
context 'when the gate is closed' do
before do
@@ -28,15 +24,13 @@ describe Feature::Gitaly do
end
describe ".server_feature_flags" do
- context 'when one flag is disabled' do
- before do
- stub_feature_flags(gitaly_mep_mep: false)
- end
+ before do
+ allow(Feature).to receive(:persisted_names).and_return(%w[gitaly_mep_mep foo])
+ end
- subject { described_class.server_feature_flags }
+ subject { described_class.server_feature_flags }
- it { is_expected.to be_a(Hash) }
- it { is_expected.to eq("gitaly-feature-mep-mep" => "false") }
- end
+ it { is_expected.to be_a(Hash) }
+ it { is_expected.to eq("gitaly-feature-mep-mep" => "true") }
end
end
diff --git a/spec/lib/gitaly/server_spec.rb b/spec/lib/gitaly/server_spec.rb
index 184d049d1fb..5142f705251 100644
--- a/spec/lib/gitaly/server_spec.rb
+++ b/spec/lib/gitaly/server_spec.rb
@@ -66,6 +66,53 @@ describe Gitaly::Server do
end
end
+ context "when examining disk statistics for a given server" do
+ let(:disk_available) { 42 }
+ let(:disk_used) { 42 }
+ let(:storage_status) { double('storage_status') }
+
+ before do
+ allow(storage_status).to receive(:storage_name).and_return('default')
+ allow(storage_status).to receive(:available).and_return(disk_available)
+ allow(storage_status).to receive(:used).and_return(disk_used)
+ response = double("response")
+ allow(response).to receive(:storage_statuses).and_return([storage_status])
+ allow_next_instance_of(Gitlab::GitalyClient::ServerService) do |instance|
+ allow(instance).to receive(:disk_statistics).and_return(response)
+ end
+ end
+
+ describe '#disk_available' do
+ subject { server.disk_available }
+
+ it { is_expected.to be_present }
+
+ it "returns disk available for the storage of the instantiated server" do
+ is_expected.to eq(disk_available)
+ end
+ end
+
+ describe '#disk_used' do
+ subject { server.disk_used }
+
+ it { is_expected.to be_present }
+
+ it "returns disk used for the storage of the instantiated server" do
+ is_expected.to eq(disk_used)
+ end
+ end
+
+ describe '#disk_stats' do
+ subject { server.disk_stats }
+
+ it { is_expected.to be_present }
+
+ it "returns the storage of the instantiated server" do
+ is_expected.to eq(storage_status)
+ end
+ end
+ end
+
describe '#expected_version?' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/lib/gitlab/alerting/alert_spec.rb b/spec/lib/gitlab/alerting/alert_spec.rb
new file mode 100644
index 00000000000..90e93d189e2
--- /dev/null
+++ b/spec/lib/gitlab/alerting/alert_spec.rb
@@ -0,0 +1,226 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Alerting::Alert do
+ let_it_be(:project) { create(:project) }
+
+ let(:alert) { build(:alerting_alert, project: project, payload: payload) }
+ let(:payload) { {} }
+
+ shared_context 'gitlab alert' do
+ let(:gitlab_alert_id) { gitlab_alert.prometheus_metric_id.to_s }
+ let!(:gitlab_alert) { create(:prometheus_alert, project: project) }
+
+ before do
+ payload['labels'] = { 'gitlab_alert_id' => gitlab_alert_id }
+ end
+ end
+
+ shared_examples 'invalid alert' do
+ it 'is invalid' do
+ expect(alert).not_to be_valid
+ end
+ end
+
+ shared_examples 'parse payload' do |*pairs|
+ context 'without payload' do
+ it { is_expected.to be_nil }
+ end
+
+ pairs.each do |pair|
+ context "with #{pair}" do
+ let(:value) { 'some value' }
+
+ before do
+ section, name = pair.split('/')
+ payload[section] = { name => value }
+ end
+
+ it { is_expected.to eq(value) }
+ end
+ end
+ end
+
+ describe '#gitlab_alert' do
+ subject { alert.gitlab_alert }
+
+ context 'without payload' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'with gitlab alert' do
+ include_context 'gitlab alert'
+
+ it { is_expected.to eq(gitlab_alert) }
+ end
+
+ context 'with unknown gitlab alert' do
+ include_context 'gitlab alert' do
+ let(:gitlab_alert_id) { 'unknown' }
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#title' do
+ subject { alert.title }
+
+ it_behaves_like 'parse payload',
+ 'annotations/title',
+ 'annotations/summary',
+ 'labels/alertname'
+
+ context 'with gitlab alert' do
+ include_context 'gitlab alert'
+
+ context 'with annotations/title' do
+ let(:value) { 'annotation title' }
+
+ before do
+ payload['annotations'] = { 'title' => value }
+ end
+
+ it { is_expected.to eq(gitlab_alert.title) }
+ end
+ end
+ end
+
+ describe '#description' do
+ subject { alert.description }
+
+ it_behaves_like 'parse payload', 'annotations/description'
+ end
+
+ describe '#annotations' do
+ subject { alert.annotations }
+
+ context 'without payload' do
+ it { is_expected.to eq([]) }
+ end
+
+ context 'with payload' do
+ before do
+ payload['annotations'] = { 'foo' => 'value1', 'bar' => 'value2' }
+ end
+
+ it 'parses annotations' do
+ expect(subject.size).to eq(2)
+ expect(subject.map(&:label)).to eq(%w[foo bar])
+ expect(subject.map(&:value)).to eq(%w[value1 value2])
+ end
+ end
+ end
+
+ describe '#environment' do
+ subject { alert.environment }
+
+ context 'without gitlab_alert' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'with gitlab alert' do
+ include_context 'gitlab alert'
+
+ it { is_expected.to eq(gitlab_alert.environment) }
+ end
+ end
+
+ describe '#starts_at' do
+ subject { alert.starts_at }
+
+ context 'with empty startsAt' do
+ before do
+ payload['startsAt'] = nil
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'with invalid startsAt' do
+ before do
+ payload['startsAt'] = 'invalid'
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'with payload' do
+ let(:time) { Time.now.change(usec: 0) }
+
+ before do
+ payload['startsAt'] = time.rfc3339
+ end
+
+ it { is_expected.to eq(time) }
+ end
+ end
+
+ describe '#full_query' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { alert.full_query }
+
+ where(:generator_url, :expected_query) do
+ nil | nil
+ 'http://localhost' | nil
+ 'invalid url' | nil
+ 'http://localhost:9090/graph?g1.expr=vector%281%29' | nil
+ 'http://localhost:9090/graph?g0.expr=vector%281%29' | 'vector(1)'
+ end
+
+ with_them do
+ before do
+ payload['generatorURL'] = generator_url
+ end
+
+ it { is_expected.to eq(expected_query) }
+ end
+
+ context 'with gitlab alert' do
+ include_context 'gitlab alert'
+
+ before do
+ payload['generatorURL'] = 'http://localhost:9090/graph?g0.expr=vector%281%29'
+ end
+
+ it { is_expected.to eq(gitlab_alert.full_query) }
+ end
+ end
+
+ describe '#alert_markdown' do
+ subject { alert.alert_markdown }
+
+ it_behaves_like 'parse payload', 'annotations/gitlab_incident_markdown'
+ end
+
+ describe '#valid?' do
+ before do
+ payload.update(
+ 'annotations' => { 'title' => 'some title' },
+ 'startsAt' => Time.now.rfc3339
+ )
+ end
+
+ subject { alert }
+
+ it { is_expected.to be_valid }
+
+ context 'without project' do
+ # Redefine to prevent:
+ # project is a NilClass - rspec-set works with ActiveRecord models only
+ let(:alert) { build(:alerting_alert, project: nil, payload: payload) }
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'without starts_at' do
+ before do
+ payload['startsAt'] = nil
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb b/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb
new file mode 100644
index 00000000000..a38aea7b972
--- /dev/null
+++ b/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb
@@ -0,0 +1,134 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+describe Gitlab::Alerting::NotificationPayloadParser do
+ describe '.call' do
+ let(:starts_at) { Time.current.change(usec: 0) }
+ let(:payload) do
+ {
+ 'title' => 'alert title',
+ 'start_time' => starts_at.rfc3339,
+ 'description' => 'Description',
+ 'monitoring_tool' => 'Monitoring tool name',
+ 'service' => 'Service',
+ 'hosts' => ['gitlab.com']
+ }
+ end
+
+ subject { described_class.call(payload) }
+
+ it 'returns Prometheus-like payload' do
+ is_expected.to eq(
+ {
+ 'annotations' => {
+ 'title' => 'alert title',
+ 'description' => 'Description',
+ 'monitoring_tool' => 'Monitoring tool name',
+ 'service' => 'Service',
+ 'hosts' => ['gitlab.com']
+ },
+ 'startsAt' => starts_at.rfc3339
+ }
+ )
+ end
+
+ context 'when title is blank' do
+ before do
+ payload[:title] = ''
+ end
+
+ it 'sets a predefined title' do
+ expect(subject.dig('annotations', 'title')).to eq('New: Incident')
+ end
+ end
+
+ context 'when hosts attribute is a string' do
+ before do
+ payload[:hosts] = 'gitlab.com'
+ end
+
+ it 'returns hosts as an array of one element' do
+ expect(subject.dig('annotations', 'hosts')).to eq(['gitlab.com'])
+ end
+ end
+
+ context 'when the time is in unsupported format' do
+ before do
+ payload[:start_time] = 'invalid/date/format'
+ end
+
+ it 'sets startsAt to a current time in RFC3339 format' do
+ expect(subject['startsAt']).to eq(starts_at.rfc3339)
+ end
+ end
+
+ context 'when payload is blank' do
+ let(:payload) { {} }
+
+ it 'returns default parameters' do
+ is_expected.to eq(
+ 'annotations' => { 'title' => 'New: Incident' },
+ 'startsAt' => starts_at.rfc3339
+ )
+ end
+ end
+
+ context 'when payload attributes have blank lines' do
+ let(:payload) do
+ {
+ 'title' => '',
+ 'start_time' => '',
+ 'description' => '',
+ 'monitoring_tool' => '',
+ 'service' => '',
+ 'hosts' => ['']
+ }
+ end
+
+ it 'returns default parameters' do
+ is_expected.to eq(
+ 'annotations' => { 'title' => 'New: Incident' },
+ 'startsAt' => starts_at.rfc3339
+ )
+ end
+ end
+
+ context 'when payload has secondary params' do
+ let(:payload) do
+ {
+ 'description' => 'Description',
+ 'additional' => {
+ 'params' => {
+ '1' => 'Some value 1',
+ '2' => 'Some value 2',
+ 'blank' => ''
+ }
+ }
+ }
+ end
+
+ it 'adds secondary params to annotations' do
+ is_expected.to eq(
+ 'annotations' => {
+ 'title' => 'New: Incident',
+ 'description' => 'Description',
+ 'additional.params.1' => 'Some value 1',
+ 'additional.params.2' => 'Some value 2'
+ },
+ 'startsAt' => starts_at.rfc3339
+ )
+ end
+ end
+
+ context 'when secondary params hash is too big' do
+ before do
+ allow(Gitlab::Utils::SafeInlineHash).to receive(:merge_keys!).and_raise(ArgumentError)
+ end
+
+ it 'catches and re-raises an error' do
+ expect { subject }.to raise_error Gitlab::Alerting::NotificationPayloadParser::BadPayloadError, 'The payload is too big'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/application_context_spec.rb b/spec/lib/gitlab/application_context_spec.rb
index 482bf0dc192..c6bc3c945a8 100644
--- a/spec/lib/gitlab/application_context_spec.rb
+++ b/spec/lib/gitlab/application_context_spec.rb
@@ -43,11 +43,11 @@ describe Gitlab::ApplicationContext do
describe '#to_lazy_hash' do
let(:user) { build(:user) }
let(:project) { build(:project) }
- let(:namespace) { build(:group) }
- let(:subgroup) { build(:group, parent: namespace) }
+ let(:namespace) { create(:group) }
+ let(:subgroup) { create(:group, parent: namespace) }
def result(context)
- context.to_lazy_hash.transform_values { |v| v.call }
+ context.to_lazy_hash.transform_values { |v| v.respond_to?(:call) ? v.call : v }
end
it 'does not call the attributes until needed' do
@@ -78,27 +78,19 @@ describe Gitlab::ApplicationContext do
expect(result(context))
.to include(project: project.full_path, root_namespace: project.full_path_components.first)
end
+ end
+
+ describe '#use' do
+ let(:context) { described_class.new(user: build(:user)) }
+
+ it 'yields control' do
+ expect { |b| context.use(&b) }.to yield_control
+ end
+
+ it 'passes the expected context on to labkit' do
+ expect(Labkit::Context).to receive(:with_context).with(a_hash_including(user: duck_type(:call)))
- context 'only include values for which an option was specified' do
- using RSpec::Parameterized::TableSyntax
-
- where(:provided_options, :expected_context_keys) do
- [:user, :namespace, :project] | [:user, :project, :root_namespace]
- [:user, :project] | [:user, :project, :root_namespace]
- [:user, :namespace] | [:user, :root_namespace]
- [:user] | [:user]
- [] | []
- end
-
- with_them do
- it do
- # Build a hash that has all `provided_options` as keys, and `nil` as value
- provided_values = provided_options.map { |key| [key, nil] }.to_h
- context = described_class.new(provided_values)
-
- expect(context.to_lazy_hash.keys).to contain_exactly(*expected_context_keys)
- end
- end
+ context.use {}
end
end
end
diff --git a/spec/lib/gitlab/asciidoc/include_processor_spec.rb b/spec/lib/gitlab/asciidoc/include_processor_spec.rb
index 5fec4d9e208..72fa05939ae 100644
--- a/spec/lib/gitlab/asciidoc/include_processor_spec.rb
+++ b/spec/lib/gitlab/asciidoc/include_processor_spec.rb
@@ -35,6 +35,12 @@ describe Gitlab::Asciidoc::IncludeProcessor do
expect(processor.send(:include_allowed?, 'foo.adoc', reader)).to be_truthy
end
+ it 'allows the Nth + 1 include' do
+ (max_includes - 1).times { processor.send(:read_blob, ref, 'a.adoc') }
+
+ expect(processor.send(:include_allowed?, 'foo.adoc', reader)).to be_truthy
+ end
+
it 'disallows the Nth + 1 include' do
max_includes.times { processor.send(:read_blob, ref, 'a.adoc') }
diff --git a/spec/lib/gitlab/auth/current_user_mode_spec.rb b/spec/lib/gitlab/auth/current_user_mode_spec.rb
index 3b3db0f7315..7c2fdac6c25 100644
--- a/spec/lib/gitlab/auth/current_user_mode_spec.rb
+++ b/spec/lib/gitlab/auth/current_user_mode_spec.rb
@@ -2,10 +2,10 @@
require 'spec_helper'
-describe Gitlab::Auth::CurrentUserMode, :do_not_mock_admin_mode do
+describe Gitlab::Auth::CurrentUserMode, :do_not_mock_admin_mode, :request_store do
include_context 'custom session'
- let(:user) { build(:user) }
+ let(:user) { build_stubbed(:user) }
subject { described_class.new(user) }
@@ -13,54 +13,66 @@ describe Gitlab::Auth::CurrentUserMode, :do_not_mock_admin_mode do
allow(ActiveSession).to receive(:list_sessions).with(user).and_return([session])
end
- describe '#admin_mode?', :request_store do
- context 'when the user is a regular user' do
- it 'is false by default' do
- expect(subject.admin_mode?).to be(false)
- end
+ shared_examples 'admin mode cannot be enabled' do
+ it 'is false by default' do
+ expect(subject.admin_mode?).to be(false)
+ end
- it 'cannot be enabled with a valid password' do
- subject.enable_admin_mode!(password: user.password)
+ it 'cannot be enabled with a valid password' do
+ subject.enable_admin_mode!(password: user.password)
- expect(subject.admin_mode?).to be(false)
- end
+ expect(subject.admin_mode?).to be(false)
+ end
- it 'cannot be enabled with an invalid password' do
- subject.enable_admin_mode!(password: nil)
+ it 'cannot be enabled with an invalid password' do
+ subject.enable_admin_mode!(password: nil)
- expect(subject.admin_mode?).to be(false)
- end
+ expect(subject.admin_mode?).to be(false)
+ end
- it 'cannot be enabled with empty params' do
- subject.enable_admin_mode!
+ it 'cannot be enabled with empty params' do
+ subject.enable_admin_mode!
- expect(subject.admin_mode?).to be(false)
- end
+ expect(subject.admin_mode?).to be(false)
+ end
- it 'disable has no effect' do
- subject.enable_admin_mode!
- subject.disable_admin_mode!
+ it 'disable has no effect' do
+ subject.enable_admin_mode!
+ subject.disable_admin_mode!
+
+ expect(subject.admin_mode?).to be(false)
+ end
+
+ context 'skipping password validation' do
+ it 'cannot be enabled with a valid password' do
+ subject.enable_admin_mode!(password: user.password, skip_password_validation: true)
expect(subject.admin_mode?).to be(false)
end
- context 'skipping password validation' do
- it 'cannot be enabled with a valid password' do
- subject.enable_admin_mode!(password: user.password, skip_password_validation: true)
+ it 'cannot be enabled with an invalid password' do
+ subject.enable_admin_mode!(skip_password_validation: true)
- expect(subject.admin_mode?).to be(false)
- end
+ expect(subject.admin_mode?).to be(false)
+ end
+ end
+ end
- it 'cannot be enabled with an invalid password' do
- subject.enable_admin_mode!(skip_password_validation: true)
+ describe '#admin_mode?' do
+ context 'when the user is a regular user' do
+ it_behaves_like 'admin mode cannot be enabled'
- expect(subject.admin_mode?).to be(false)
+ context 'bypassing session' do
+ it_behaves_like 'admin mode cannot be enabled' do
+ around do |example|
+ described_class.bypass_session!(user.id) { example.run }
+ end
end
end
end
context 'when the user is an admin' do
- let(:user) { build(:user, :admin) }
+ let(:user) { build_stubbed(:user, :admin) }
context 'when admin mode not requested' do
it 'is false by default' do
@@ -148,11 +160,36 @@ describe Gitlab::Auth::CurrentUserMode, :do_not_mock_admin_mode do
end
end
end
+
+ context 'bypassing session' do
+ it 'is active by default' do
+ described_class.bypass_session!(user.id) do
+ expect(subject.admin_mode?).to be(true)
+ end
+ end
+
+ it 'enable has no effect' do
+ described_class.bypass_session!(user.id) do
+ subject.request_admin_mode!
+ subject.enable_admin_mode!(password: user.password)
+
+ expect(subject.admin_mode?).to be(true)
+ end
+ end
+
+ it 'disable has no effect' do
+ described_class.bypass_session!(user.id) do
+ subject.disable_admin_mode!
+
+ expect(subject.admin_mode?).to be(true)
+ end
+ end
+ end
end
end
describe '#enable_admin_mode!' do
- let(:user) { build(:user, :admin) }
+ let(:user) { build_stubbed(:user, :admin) }
it 'creates a timestamp in the session' do
subject.request_admin_mode!
@@ -163,7 +200,7 @@ describe Gitlab::Auth::CurrentUserMode, :do_not_mock_admin_mode do
end
describe '#enable_sessionless_admin_mode!' do
- let(:user) { build(:user, :admin) }
+ let(:user) { build_stubbed(:user, :admin) }
it 'enabled admin mode without password' do
subject.enable_sessionless_admin_mode!
@@ -173,7 +210,7 @@ describe Gitlab::Auth::CurrentUserMode, :do_not_mock_admin_mode do
end
describe '#disable_admin_mode!' do
- let(:user) { build(:user, :admin) }
+ let(:user) { build_stubbed(:user, :admin) }
it 'sets the session timestamp to nil' do
subject.request_admin_mode!
@@ -183,6 +220,73 @@ describe Gitlab::Auth::CurrentUserMode, :do_not_mock_admin_mode do
end
end
+ describe '.bypass_session!' do
+ context 'with a regular user' do
+ it 'admin mode is false' do
+ described_class.bypass_session!(user.id) do
+ expect(subject.admin_mode?).to be(false)
+ expect(described_class.bypass_session_admin_id).to be(user.id)
+ end
+
+ expect(described_class.bypass_session_admin_id).to be_nil
+ end
+ end
+
+ context 'with an admin user' do
+ let(:user) { build_stubbed(:user, :admin) }
+
+ it 'admin mode is true' do
+ described_class.bypass_session!(user.id) do
+ expect(subject.admin_mode?).to be(true)
+ expect(described_class.bypass_session_admin_id).to be(user.id)
+ end
+
+ expect(described_class.bypass_session_admin_id).to be_nil
+ end
+ end
+ end
+
+ describe '.with_current_request_admin_mode' do
+ context 'with a regular user' do
+ it 'user is not available inside nor outside the yielded block' do
+ described_class.with_current_admin(user) do
+ expect(described_class.current_admin).to be_nil
+ end
+
+ expect(described_class.bypass_session_admin_id).to be_nil
+ end
+ end
+
+ context 'with an admin user' do
+ let(:user) { build_stubbed(:user, :admin) }
+
+ context 'admin mode is disabled' do
+ it 'user is not available inside nor outside the yielded block' do
+ described_class.with_current_admin(user) do
+ expect(described_class.current_admin).to be_nil
+ end
+
+ expect(described_class.bypass_session_admin_id).to be_nil
+ end
+ end
+
+ context 'admin mode is enabled' do
+ before do
+ subject.request_admin_mode!
+ subject.enable_admin_mode!(password: user.password)
+ end
+
+ it 'user is available only inside the yielded block' do
+ described_class.with_current_admin(user) do
+ expect(described_class.current_admin).to be(user)
+ end
+
+ expect(described_class.current_admin).to be_nil
+ end
+ end
+ end
+ end
+
def expected_session_entry(value_matcher)
{
Gitlab::Auth::CurrentUserMode::SESSION_STORE_KEY => a_hash_including(
diff --git a/spec/lib/gitlab/auth/saml/auth_hash_spec.rb b/spec/lib/gitlab/auth/saml/auth_hash_spec.rb
index 13636a495d1..8b88c16f317 100644
--- a/spec/lib/gitlab/auth/saml/auth_hash_spec.rb
+++ b/spec/lib/gitlab/auth/saml/auth_hash_spec.rb
@@ -95,6 +95,17 @@ describe Gitlab::Auth::Saml::AuthHash do
end
end
+ context 'with ADFS SAML response_object' do
+ before do
+ auth_hash_data[:extra][:response_object] = { document:
+ saml_xml(File.read('spec/fixtures/authentication/adfs_saml_response.xml')) }
+ end
+
+ it 'can extract authn_context' do
+ expect(saml_auth_hash.authn_context).to eq 'urn:federation:authentication:windows'
+ end
+ end
+
context 'without response_object' do
it 'returns an empty string' do
expect(saml_auth_hash.authn_context).to be_nil
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 1f943bebbec..ed763f63756 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -460,6 +460,20 @@ describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
end
+ context 'when the deploy token is of group type' do
+ let(:project_with_group) { create(:project, group: create(:group)) }
+ let(:deploy_token) { create(:deploy_token, :group, read_repository: true, groups: [project_with_group.group]) }
+ let(:login) { deploy_token.username }
+
+ subject { gl_auth.find_for_git_client(login, deploy_token.token, project: project_with_group, ip: 'ip') }
+
+ it 'succeeds when login and a group deploy token are valid' do
+ auth_success = Gitlab::Auth::Result.new(deploy_token, project_with_group, :deploy_token, [:download_code, :read_container_image])
+
+ expect(subject).to eq(auth_success)
+ end
+ end
+
context 'when the deploy token has read_registry as a scope' do
let(:deploy_token) { create(:deploy_token, read_repository: false, projects: [project]) }
let(:login) { deploy_token.username }
@@ -469,10 +483,10 @@ describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
stub_container_registry_config(enabled: true)
end
- it 'succeeds when login and token are valid' do
+ it 'succeeds when login and a project token are valid' do
auth_success = Gitlab::Auth::Result.new(deploy_token, project, :deploy_token, [:read_container_image])
- expect(gl_auth.find_for_git_client(login, deploy_token.token, project: nil, ip: 'ip'))
+ expect(gl_auth.find_for_git_client(login, deploy_token.token, project: project, ip: 'ip'))
.to eq(auth_success)
end
diff --git a/spec/lib/gitlab/background_migration/activate_prometheus_services_for_shared_cluster_applications_spec.rb b/spec/lib/gitlab/background_migration/activate_prometheus_services_for_shared_cluster_applications_spec.rb
deleted file mode 100644
index 0edf87e1354..00000000000
--- a/spec/lib/gitlab/background_migration/activate_prometheus_services_for_shared_cluster_applications_spec.rb
+++ /dev/null
@@ -1,75 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::ActivatePrometheusServicesForSharedClusterApplications, :migration, schema: 2020_01_14_113341 do
- include MigrationHelpers::PrometheusServiceHelpers
-
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:services) { table(:services) }
- let(:namespace) { namespaces.create(name: 'user', path: 'user') }
- let(:project) { projects.create(namespace_id: namespace.id) }
-
- let(:columns) do
- %w(project_id active properties type template push_events
- issues_events merge_requests_events tag_push_events
- note_events category default wiki_page_events pipeline_events
- confidential_issues_events commit_events job_events
- confidential_note_events deployment_events)
- end
-
- describe '#perform' do
- it 'is idempotent' do
- expect { subject.perform(project.id) }.to change { services.order(:id).map { |row| row.attributes } }
-
- expect { subject.perform(project.id) }.not_to change { services.order(:id).map { |row| row.attributes } }
- end
-
- context 'non prometheus services' do
- it 'does not change them' do
- other_type = 'SomeOtherService'
- services.create(service_params_for(project.id, active: true, type: other_type))
-
- expect { subject.perform(project.id) }.not_to change { services.where(type: other_type).order(:id).map { |row| row.attributes } }
- end
- end
-
- context 'prometheus services are configured manually ' do
- it 'does not change them' do
- properties = '{"api_url":"http://test.dev","manual_configuration":"1"}'
- services.create(service_params_for(project.id, properties: properties, active: false))
-
- expect { subject.perform(project.id) }.not_to change { services.order(:id).map { |row| row.attributes } }
- end
- end
-
- context 'prometheus integration services do not exist' do
- it 'creates missing services entries' do
- subject.perform(project.id)
-
- rows = services.order(:id).map { |row| row.attributes.slice(*columns).symbolize_keys }
-
- expect([service_params_for(project.id, active: true)]).to eq rows
- end
- end
-
- context 'prometheus integration services exist' do
- context 'in active state' do
- it 'does not change them' do
- services.create(service_params_for(project.id, active: true))
-
- expect { subject.perform(project.id) }.not_to change { services.order(:id).map { |row| row.attributes } }
- end
- end
-
- context 'not in active state' do
- it 'sets active attribute to true' do
- service = services.create(service_params_for(project.id))
-
- expect { subject.perform(project.id) }.to change { service.reload.active? }.from(false).to(true)
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_project_fullpath_in_repo_config_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_fullpath_in_repo_config_spec.rb
index 5cad479ff05..4714712f733 100644
--- a/spec/lib/gitlab/background_migration/backfill_project_fullpath_in_repo_config_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_project_fullpath_in_repo_config_spec.rb
@@ -8,7 +8,7 @@ describe Gitlab::BackgroundMigration::BackfillProjectFullpathInRepoConfig, :migr
let(:group) { namespaces.create!(name: 'foo', path: 'foo') }
let(:subgroup) { namespaces.create!(name: 'bar', path: 'bar', parent_id: group.id) }
- describe described_class::Storage::HashedProject do
+ describe described_class::Storage::Hashed do
let(:project) { double(id: 555) }
subject(:project_storage) { described_class.new(project) }
diff --git a/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb
new file mode 100644
index 00000000000..718109bb720
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::BackfillProjectSettings, :migration, schema: 20200114113341 do
+ let(:projects) { table(:projects) }
+ let(:project_settings) { table(:project_settings) }
+ let(:namespace) { table(:namespaces).create(name: 'user', path: 'user') }
+ let(:project) { projects.create(namespace_id: namespace.id) }
+
+ subject { described_class.new }
+
+ describe '#perform' do
+ it 'creates settings for all projects in range' do
+ projects.create(id: 5, namespace_id: namespace.id)
+ projects.create(id: 7, namespace_id: namespace.id)
+ projects.create(id: 8, namespace_id: namespace.id)
+
+ subject.perform(5, 7)
+
+ expect(project_settings.all.pluck(:project_id)).to contain_exactly(5, 7)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb b/spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb
new file mode 100644
index 00000000000..0dca542cb9f
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::FixProjectsWithoutProjectFeature, :migration, schema: 2020_01_27_111840 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:project_features) { table(:project_features) }
+
+ let(:namespace) { namespaces.create(name: 'foo', path: 'foo') }
+
+ let!(:project) { projects.create!(namespace_id: namespace.id) }
+ let(:private_project_without_feature) { projects.create!(namespace_id: namespace.id, visibility_level: 0) }
+ let(:public_project_without_feature) { projects.create!(namespace_id: namespace.id, visibility_level: 20) }
+ let!(:projects_without_feature) { [private_project_without_feature, public_project_without_feature] }
+
+ before do
+ project_features.create({ project_id: project.id, pages_access_level: 20 })
+ end
+
+ subject { described_class.new.perform(Project.minimum(:id), Project.maximum(:id)) }
+
+ def project_feature_records
+ project_features.order(:project_id).pluck(:project_id)
+ end
+
+ def features(project)
+ project_features.find_by(project_id: project.id)&.attributes
+ end
+
+ it 'creates a ProjectFeature for projects without it' do
+ expect { subject }.to change { project_feature_records }.from([project.id]).to([project.id, *projects_without_feature.map(&:id)])
+ end
+
+ it 'creates ProjectFeature records with default values for a public project' do
+ subject
+
+ expect(features(public_project_without_feature)).to include(
+ {
+ "merge_requests_access_level" => 20,
+ "issues_access_level" => 20,
+ "wiki_access_level" => 20,
+ "snippets_access_level" => 20,
+ "builds_access_level" => 20,
+ "repository_access_level" => 20,
+ "pages_access_level" => 20,
+ "forking_access_level" => 20
+ }
+ )
+ end
+
+ it 'creates ProjectFeature records with default values for a private project' do
+ subject
+
+ expect(features(private_project_without_feature)).to include("pages_access_level" => 10)
+ end
+
+ context 'when access control to pages is forced' do
+ before do
+ allow(::Gitlab::Pages).to receive(:access_control_is_forced?).and_return(true)
+ end
+
+ it 'creates ProjectFeature records with default values for a public project' do
+ subject
+
+ expect(features(public_project_without_feature)).to include("pages_access_level" => 10)
+ end
+ end
+
+ it 'sets created_at/updated_at timestamps' do
+ subject
+
+ expect(project_features.where('created_at IS NULL OR updated_at IS NULL')).to be_empty
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
index 664e3810fc9..4c70877befe 100644
--- a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, :migration, schema: 20190924152703 do
+describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, :migration, schema: 20200130145430 do
let(:services) { table(:services) }
# we need to define the classes due to encryption
diff --git a/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb
index 44f537ca8dd..f25804c3ca2 100644
--- a/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb
+++ b/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
# Rollback DB to 10.5 (later than this was originally written for) because it still needs to work.
-describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :sidekiq, :migration, schema: 20180208183958 do
+describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :migration, schema: 20180208183958 do
include MigrationsHelpers::TrackUntrackedUploadsHelpers
subject { described_class.new }
diff --git a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
index 591368ee98e..9072431e8f6 100644
--- a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
+++ b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
# Rollback DB to 10.5 (later than this was originally written for) because it still needs to work.
-describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq, :migration, schema: 20180208183958 do
+describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :migration, schema: 20180208183958 do
include MigrationsHelpers::TrackUntrackedUploadsHelpers
let!(:untracked_files_for_uploads) { table(:untracked_files_for_uploads) }
diff --git a/spec/lib/gitlab/background_migration/schedule_calculate_wiki_sizes_spec.rb b/spec/lib/gitlab/background_migration/schedule_calculate_wiki_sizes_spec.rb
index 399db4ac259..cee9a3e8822 100644
--- a/spec/lib/gitlab/background_migration/schedule_calculate_wiki_sizes_spec.rb
+++ b/spec/lib/gitlab/background_migration/schedule_calculate_wiki_sizes_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190527194900_schedule_calculate_wiki_sizes.rb')
-describe ScheduleCalculateWikiSizes, :migration, :sidekiq do
+describe ScheduleCalculateWikiSizes, :migration do
let(:migration_class) { Gitlab::BackgroundMigration::CalculateWikiSizes }
let(:migration_name) { migration_class.to_s.demodulize }
diff --git a/spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb b/spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb
new file mode 100644
index 00000000000..37280110b91
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::UpdateExistingSubgroupToMatchVisibilityLevelOfParent, :migration, schema: 2020_01_10_121314 do
+ include MigrationHelpers::NamespacesHelpers
+
+ context 'private visibility level' do
+ it 'updates the project visibility' do
+ parent = create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE)
+ child = create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: parent.id)
+
+ expect { subject.perform([parent.id], Gitlab::VisibilityLevel::PRIVATE) }.to change { child.reload.visibility_level }.to(Gitlab::VisibilityLevel::PRIVATE)
+ end
+
+ it 'updates sub-sub groups' do
+ parent = create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE)
+ middle_group = create_namespace('middle', Gitlab::VisibilityLevel::PRIVATE, parent_id: parent.id)
+ child = create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: middle_group.id)
+
+ subject.perform([parent.id, middle_group.id], Gitlab::VisibilityLevel::PRIVATE)
+
+ expect(child.reload.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
+
+ it 'updates all sub groups' do
+ parent = create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE)
+ middle_group = create_namespace('middle', Gitlab::VisibilityLevel::PUBLIC, parent_id: parent.id)
+ child = create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: middle_group.id)
+
+ subject.perform([parent.id], Gitlab::VisibilityLevel::PRIVATE)
+
+ expect(child.reload.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ expect(middle_group.reload.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
+
+ context 'internal visibility level' do
+ it 'updates the project visibility' do
+ parent = create_namespace('parent', Gitlab::VisibilityLevel::INTERNAL)
+ child = create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: parent.id)
+
+ expect { subject.perform([parent.id], Gitlab::VisibilityLevel::INTERNAL) }.to change { child.reload.visibility_level }.to(Gitlab::VisibilityLevel::INTERNAL)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration_spec.rb b/spec/lib/gitlab/background_migration_spec.rb
index 66a0b11606f..71959f54b38 100644
--- a/spec/lib/gitlab/background_migration_spec.rb
+++ b/spec/lib/gitlab/background_migration_spec.rb
@@ -76,7 +76,7 @@ describe Gitlab::BackgroundMigration do
end
end
- context 'when there are scheduled jobs present', :sidekiq, :redis do
+ context 'when there are scheduled jobs present', :redis do
it 'steals all jobs from the scheduled sets' do
Sidekiq::Testing.disable! do
BackgroundMigrationWorker.perform_in(10.minutes, 'Object')
@@ -91,7 +91,7 @@ describe Gitlab::BackgroundMigration do
end
end
- context 'when there are enqueued and scheduled jobs present', :sidekiq, :redis do
+ context 'when there are enqueued and scheduled jobs present', :redis do
it 'steals from the scheduled sets queue first' do
Sidekiq::Testing.disable! do
expect(described_class).to receive(:perform)
@@ -107,7 +107,7 @@ describe Gitlab::BackgroundMigration do
end
end
- context 'when retry_dead_jobs is true', :sidekiq, :redis do
+ context 'when retry_dead_jobs is true', :redis do
let(:retry_queue) do
[double(args: ['Object', [3]], queue: described_class.queue, delete: true)]
end
@@ -165,6 +165,32 @@ describe Gitlab::BackgroundMigration do
end
end
+ describe '.remaining', :redis do
+ context 'when there are jobs remaining' do
+ let(:queue) { Array.new(12) }
+
+ before do
+ allow(Sidekiq::Queue).to receive(:new)
+ .with(described_class.queue)
+ .and_return(Array.new(12))
+
+ Sidekiq::Testing.disable! do
+ BackgroundMigrationWorker.perform_in(10.minutes, 'Foo')
+ end
+ end
+
+ it 'returns the enqueued jobs plus the scheduled jobs' do
+ expect(described_class.remaining).to eq(13)
+ end
+ end
+
+ context 'when there are no jobs remaining' do
+ it 'returns zero' do
+ expect(described_class.remaining).to be_zero
+ end
+ end
+ end
+
describe '.exists?' do
context 'when there are enqueued jobs present' do
let(:queue) do
@@ -186,7 +212,7 @@ describe Gitlab::BackgroundMigration do
end
end
- context 'when there are scheduled jobs present', :sidekiq, :redis do
+ context 'when there are scheduled jobs present', :redis do
before do
Sidekiq::Testing.disable! do
BackgroundMigrationWorker.perform_in(10.minutes, 'Foo')
diff --git a/spec/lib/gitlab/batch_worker_context_spec.rb b/spec/lib/gitlab/batch_worker_context_spec.rb
new file mode 100644
index 00000000000..0ba30287ae5
--- /dev/null
+++ b/spec/lib/gitlab/batch_worker_context_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BatchWorkerContext do
+ subject(:batch_context) do
+ described_class.new(
+ %w(hello world),
+ arguments_proc: -> (word) { word },
+ context_proc: -> (word) { { user: build_stubbed(:user, username: word) } }
+ )
+ end
+
+ describe "#arguments" do
+ it "returns all the expected arguments in arrays" do
+ expect(batch_context.arguments).to eq([%w(hello), %w(world)])
+ end
+ end
+
+ describe "#context_for" do
+ it "returns the correct application context for the arguments" do
+ context = batch_context.context_for(%w(world))
+
+ expect(context).to be_a(Gitlab::ApplicationContext)
+ expect(context.to_lazy_hash[:user].call).to eq("world")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb b/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb
index c2816f35cec..fc9266f75fb 100644
--- a/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb
+++ b/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb
@@ -114,6 +114,24 @@ describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cache do
pipeline_status.load_status
pipeline_status.load_status
end
+
+ it 'handles Gitaly unavailable exceptions gracefully' do
+ allow(pipeline_status).to receive(:commit).and_raise(GRPC::Unavailable)
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ an_instance_of(GRPC::Unavailable), project_id: project.id
+ )
+ expect { pipeline_status.load_status }.not_to raise_error
+ end
+
+ it 'handles Gitaly timeout exceptions gracefully' do
+ allow(pipeline_status).to receive(:commit).and_raise(GRPC::DeadlineExceeded)
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ an_instance_of(GRPC::DeadlineExceeded), project_id: project.id
+ )
+ expect { pipeline_status.load_status }.not_to raise_error
+ end
end
describe "#load_from_project", :clean_gitlab_redis_cache do
diff --git a/spec/lib/gitlab/ci/build/rules/rule/clause/exists_spec.rb b/spec/lib/gitlab/ci/build/rules/rule/clause/exists_spec.rb
index 3605bac7dfc..10843a1435a 100644
--- a/spec/lib/gitlab/ci/build/rules/rule/clause/exists_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules/rule/clause/exists_spec.rb
@@ -18,7 +18,7 @@ describe Gitlab::Ci::Build::Rules::Rule::Clause::Exists do
before do
stub_const('Gitlab::Ci::Build::Rules::Rule::Clause::Exists::MAX_PATTERN_COMPARISONS', 2)
- expect(File).to receive(:fnmatch?).exactly(2).times.and_call_original
+ expect(File).to receive(:fnmatch?).twice.and_call_original
end
it { is_expected.to be_truthy }
diff --git a/spec/lib/gitlab/ci/build/rules_spec.rb b/spec/lib/gitlab/ci/build/rules_spec.rb
index 1ebcc4f9414..31a9fa055e1 100644
--- a/spec/lib/gitlab/ci/build/rules_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules_spec.rb
@@ -102,9 +102,9 @@ describe Gitlab::Ci::Build::Rules do
end
context 'with one rule without any clauses' do
- let(:rule_list) { [{ when: 'manual' }] }
+ let(:rule_list) { [{ when: 'manual', allow_failure: true }] }
- it { is_expected.to eq(described_class::Result.new('manual')) }
+ it { is_expected.to eq(described_class::Result.new('manual', nil, true)) }
end
context 'with one matching rule' do
@@ -166,5 +166,51 @@ describe Gitlab::Ci::Build::Rules do
end
end
end
+
+ context 'with only allow_failure' do
+ context 'with matching rule' do
+ let(:rule_list) { [{ if: '$VAR == null', allow_failure: true }] }
+
+ it { is_expected.to eq(described_class::Result.new('on_success', nil, true)) }
+ end
+
+ context 'with non-matching rule' do
+ let(:rule_list) { [{ if: '$VAR != null', allow_failure: true }] }
+
+ it { is_expected.to eq(described_class::Result.new('never')) }
+ end
+ end
+ end
+
+ describe 'Gitlab::Ci::Build::Rules::Result' do
+ let(:when_value) { 'on_success' }
+ let(:start_in) { nil }
+ let(:allow_failure) { nil }
+
+ subject { Gitlab::Ci::Build::Rules::Result.new(when_value, start_in, allow_failure) }
+
+ describe '#build_attributes' do
+ it 'compacts nil values' do
+ expect(subject.build_attributes).to eq(options: {}, when: 'on_success')
+ end
+ end
+
+ describe '#pass?' do
+ context "'when' is 'never'" do
+ let!(:when_value) { 'never' }
+
+ it 'returns false' do
+ expect(subject.pass?).to eq(false)
+ end
+ end
+
+ context "'when' is 'on_success'" do
+ let!(:when_value) { 'on_success' }
+
+ it 'returns true' do
+ expect(subject.pass?).to eq(true)
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
new file mode 100644
index 00000000000..ad388886681
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
@@ -0,0 +1,229 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Config::Entry::Bridge do
+ subject { described_class.new(config, name: :my_bridge) }
+
+ it_behaves_like 'with inheritable CI config' do
+ let(:inheritable_key) { 'default' }
+ let(:inheritable_class) { Gitlab::Ci::Config::Entry::Default }
+
+ # These are entries defined in Default
+ # that we know that we don't want to inherit
+ # as they do not have sense in context of Bridge
+ let(:ignored_inheritable_columns) do
+ %i[before_script after_script image services cache interruptible timeout
+ retry tags artifacts]
+ end
+ end
+
+ describe '.matching?' do
+ subject { described_class.matching?(name, config) }
+
+ context 'when config is not a hash' do
+ let(:name) { :my_trigger }
+ let(:config) { 'string' }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when config is a regular job' do
+ let(:name) { :my_trigger }
+ let(:config) do
+ { script: 'ls -al' }
+ end
+
+ it { is_expected.to be_falsey }
+
+ context 'with rules' do
+ let(:config) do
+ {
+ script: 'ls -al',
+ rules: [{ if: '$VAR == "value"', when: 'always' }]
+ }
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when config is a bridge job' do
+ let(:name) { :my_trigger }
+ let(:config) do
+ { trigger: 'other-project' }
+ end
+
+ it { is_expected.to be_truthy }
+
+ context 'with rules' do
+ let(:config) do
+ {
+ trigger: 'other-project',
+ rules: [{ if: '$VAR == "value"', when: 'always' }]
+ }
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ context 'when config is a hidden job' do
+ let(:name) { '.my_trigger' }
+ let(:config) do
+ { trigger: 'other-project' }
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '.new' do
+ before do
+ subject.compose!
+ end
+
+ let(:base_config) do
+ {
+ trigger: { project: 'some/project', branch: 'feature' },
+ extends: '.some-key',
+ stage: 'deploy',
+ variables: { VARIABLE: '123' }
+ }
+ end
+
+ context 'when trigger config is a non-empty string' do
+ let(:config) { { trigger: 'some/project' } }
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#value' do
+ it 'is returns a bridge job configuration' do
+ expect(subject.value).to eq(name: :my_bridge,
+ trigger: { project: 'some/project' },
+ ignore: false,
+ stage: 'test',
+ only: { refs: %w[branches tags] },
+ scheduling_type: :stage)
+ end
+ end
+ end
+
+ context 'when bridge trigger is a hash' do
+ let(:config) do
+ { trigger: { project: 'some/project', branch: 'feature' } }
+ end
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#value' do
+ it 'is returns a bridge job configuration hash' do
+ expect(subject.value).to eq(name: :my_bridge,
+ trigger: { project: 'some/project',
+ branch: 'feature' },
+ ignore: false,
+ stage: 'test',
+ only: { refs: %w[branches tags] },
+ scheduling_type: :stage)
+ end
+ end
+ end
+
+ context 'when bridge configuration contains trigger, when, extends, stage, only, except, and variables' do
+ let(:config) do
+ base_config.merge({
+ when: 'always',
+ only: { variables: %w[$SOMEVARIABLE] },
+ except: { refs: %w[feature] }
+ })
+ end
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when bridge configuration uses rules' do
+ let(:config) { base_config.merge({ rules: [{ if: '$VAR == null', when: 'never' }] }) }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when bridge configuration uses rules with job:when' do
+ let(:config) do
+ base_config.merge({
+ when: 'always',
+ rules: [{ if: '$VAR == null', when: 'never' }]
+ })
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when bridge configuration uses rules with only' do
+ let(:config) do
+ base_config.merge({
+ only: { variables: %w[$SOMEVARIABLE] },
+ rules: [{ if: '$VAR == null', when: 'never' }]
+ })
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when bridge configuration uses rules with except' do
+ let(:config) do
+ base_config.merge({
+ except: { refs: %w[feature] },
+ rules: [{ if: '$VAR == null', when: 'never' }]
+ })
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when bridge has only job needs' do
+ let(:config) do
+ {
+ needs: ['some_job']
+ }
+ end
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+ end
+
+ context 'when bridge config contains unknown keys' do
+ let(:config) { { unknown: 123 } }
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'is returns an error about unknown config key' do
+ expect(subject.errors.first)
+ .to match /config contains unknown keys: unknown/
+ end
+ end
+ end
+
+ context 'when bridge config contains build-specific attributes' do
+ let(:config) { { script: 'something' } }
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns an error message' do
+ expect(subject.errors.first)
+ .to match /contains unknown keys: script/
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index 649689f7d3b..313b504ab59 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -110,6 +110,10 @@ describe Gitlab::Ci::Config::Entry::Job do
it { expect(entry).to be_valid }
+ it "returns scheduling_type as :dag" do
+ expect(entry.value[:scheduling_type]).to eq(:dag)
+ end
+
context 'when has dependencies' do
let(:config) do
{
@@ -598,7 +602,8 @@ describe Gitlab::Ci::Config::Entry::Job do
ignore: false,
after_script: %w[cleanup],
only: { refs: %w[branches tags] },
- variables: {})
+ variables: {},
+ scheduling_type: :stage)
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/jobs_spec.rb b/spec/lib/gitlab/ci/config/entry/jobs_spec.rb
index 61c8956d41f..c8c188d71bf 100644
--- a/spec/lib/gitlab/ci/config/entry/jobs_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/jobs_spec.rb
@@ -5,27 +5,31 @@ require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Jobs do
let(:entry) { described_class.new(config) }
+ let(:config) do
+ {
+ '.hidden_job'.to_sym => { script: 'something' },
+ '.hidden_bridge'.to_sym => { trigger: 'my/project' },
+ regular_job: { script: 'something' },
+ my_trigger: { trigger: 'my/project' }
+ }
+ end
+
describe '.all_types' do
subject { described_class.all_types }
it { is_expected.to include(::Gitlab::Ci::Config::Entry::Hidden) }
it { is_expected.to include(::Gitlab::Ci::Config::Entry::Job) }
+ it { is_expected.to include(::Gitlab::Ci::Config::Entry::Bridge) }
end
describe '.find_type' do
using RSpec::Parameterized::TableSyntax
- let(:config) do
- {
- '.hidden_job'.to_sym => { script: 'something' },
- regular_job: { script: 'something' },
- invalid_job: 'text'
- }
- end
-
where(:name, :type) do
:'.hidden_job' | ::Gitlab::Ci::Config::Entry::Hidden
+ :'.hidden_bridge' | ::Gitlab::Ci::Config::Entry::Hidden
:regular_job | ::Gitlab::Ci::Config::Entry::Job
+ :my_trigger | ::Gitlab::Ci::Config::Entry::Bridge
:invalid_job | nil
end
@@ -42,8 +46,6 @@ describe Gitlab::Ci::Config::Entry::Jobs do
end
context 'when entry config value is correct' do
- let(:config) { { rspec: { script: 'rspec' } } }
-
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
@@ -88,43 +90,43 @@ describe Gitlab::Ci::Config::Entry::Jobs do
entry.compose!
end
- let(:config) do
- { rspec: { script: 'rspec' },
- spinach: { script: 'spinach' },
- '.hidden'.to_sym => {} }
- end
-
describe '#value' do
it 'returns key value' do
expect(entry.value).to eq(
- rspec: { name: :rspec,
- script: %w[rspec],
- ignore: false,
- stage: 'test',
- only: { refs: %w[branches tags] },
- variables: {} },
- spinach: { name: :spinach,
- script: %w[spinach],
- ignore: false,
- stage: 'test',
- only: { refs: %w[branches tags] },
- variables: {} })
+ my_trigger: {
+ ignore: false,
+ name: :my_trigger,
+ only: { refs: %w[branches tags] },
+ stage: 'test',
+ trigger: { project: 'my/project' },
+ scheduling_type: :stage
+ },
+ regular_job: {
+ ignore: false,
+ name: :regular_job,
+ only: { refs: %w[branches tags] },
+ script: ['something'],
+ stage: 'test',
+ variables: {},
+ scheduling_type: :stage
+ })
end
end
describe '#descendants' do
it 'creates valid descendant nodes' do
- expect(entry.descendants.count).to eq 3
- expect(entry.descendants.first(2))
- .to all(be_an_instance_of(Gitlab::Ci::Config::Entry::Job))
- expect(entry.descendants.last)
- .to be_an_instance_of(Gitlab::Ci::Config::Entry::Hidden)
+ expect(entry.descendants.map(&:class)).to eq [
+ Gitlab::Ci::Config::Entry::Hidden,
+ Gitlab::Ci::Config::Entry::Hidden,
+ Gitlab::Ci::Config::Entry::Job,
+ Gitlab::Ci::Config::Entry::Bridge
+ ]
end
end
describe '#value' do
it 'returns value of visible jobs only' do
- expect(entry.value.keys).to eq [:rspec, :spinach]
+ expect(entry.value.keys).to eq [:regular_job, :my_trigger]
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/reports_spec.rb b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
index 8562885c90c..c64bb0a4cc3 100644
--- a/spec/lib/gitlab/ci/config/entry/reports_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
@@ -43,6 +43,7 @@ describe Gitlab::Ci::Config::Entry::Reports do
:license_management | 'gl-license-management-report.json'
:license_scanning | 'gl-license-scanning-report.json'
:performance | 'performance.json'
+ :lsif | 'lsif.json'
end
with_them do
diff --git a/spec/lib/gitlab/ci/config/entry/retry_spec.rb b/spec/lib/gitlab/ci/config/entry/retry_spec.rb
index f9efd2e014d..bb3c0b0004d 100644
--- a/spec/lib/gitlab/ci/config/entry/retry_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/retry_spec.rb
@@ -95,14 +95,20 @@ describe Gitlab::Ci::Config::Entry::Retry do
# values are valid. If they are not it means the documentation and this
# array must be updated.
RETRY_WHEN_IN_DOCUMENTATION = %w[
- always
- unknown_failure
- script_failure
- api_failure
- stuck_or_timeout_failure
- runner_system_failure
- missing_dependency_failure
- runner_unsupported
+ always
+ unknown_failure
+ script_failure
+ api_failure
+ stuck_or_timeout_failure
+ runner_system_failure
+ missing_dependency_failure
+ runner_unsupported
+ stale_schedule
+ job_execution_timeout
+ archived_failure
+ unmet_prerequisites
+ scheduler_failure
+ data_integrity_failure
].freeze
RETRY_WHEN_IN_DOCUMENTATION.each do |reason|
diff --git a/spec/lib/gitlab/ci/config/entry/root_spec.rb b/spec/lib/gitlab/ci/config/entry/root_spec.rb
index 95a5b8e88fb..cf0a3cfa963 100644
--- a/spec/lib/gitlab/ci/config/entry/root_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/root_spec.rb
@@ -130,7 +130,8 @@ describe Gitlab::Ci::Config::Entry::Root do
variables: {},
ignore: false,
after_script: ['make clean'],
- only: { refs: %w[branches tags] } }
+ only: { refs: %w[branches tags] },
+ scheduling_type: :stage }
)
expect(root.jobs_value[:spinach]).to eq(
{ name: :spinach,
@@ -143,7 +144,8 @@ describe Gitlab::Ci::Config::Entry::Root do
variables: {},
ignore: false,
after_script: ['make clean'],
- only: { refs: %w[branches tags] } }
+ only: { refs: %w[branches tags] },
+ scheduling_type: :stage }
)
expect(root.jobs_value[:release]).to eq(
{ name: :release,
@@ -157,7 +159,8 @@ describe Gitlab::Ci::Config::Entry::Root do
only: { refs: %w(branches tags) },
variables: {},
after_script: [],
- ignore: false }
+ ignore: false,
+ scheduling_type: :stage }
)
end
end
@@ -203,7 +206,8 @@ describe Gitlab::Ci::Config::Entry::Root do
variables: {},
ignore: false,
after_script: ['make clean'],
- only: { refs: %w[branches tags] } },
+ only: { refs: %w[branches tags] },
+ scheduling_type: :stage },
spinach: { name: :spinach,
before_script: [],
script: %w[spinach],
@@ -214,7 +218,8 @@ describe Gitlab::Ci::Config::Entry::Root do
variables: { 'VAR' => 'AA' },
ignore: false,
after_script: ['make clean'],
- only: { refs: %w[branches tags] } }
+ only: { refs: %w[branches tags] },
+ scheduling_type: :stage }
)
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
index 216f5d0c77d..20db5f02fc7 100644
--- a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
@@ -27,8 +27,14 @@ describe Gitlab::Ci::Config::Entry::Rules::Rule do
it { is_expected.to be_valid }
end
+ context 'with an allow_failure: value but no clauses' do
+ let(:config) { { allow_failure: true } }
+
+ it { is_expected.to be_valid }
+ end
+
context 'when specifying an if: clause' do
- let(:config) { { if: '$THIS || $THAT', when: 'manual' } }
+ let(:config) { { if: '$THIS || $THAT', when: 'manual', allow_failure: true } }
it { is_expected.to be_valid }
@@ -37,6 +43,12 @@ describe Gitlab::Ci::Config::Entry::Rules::Rule do
it { is_expected.to eq('manual') }
end
+
+ describe '#allow_failure' do
+ subject { entry.allow_failure }
+
+ it { is_expected.to eq(true) }
+ end
end
context 'using a list of multiple expressions' do
@@ -328,16 +340,43 @@ describe Gitlab::Ci::Config::Entry::Rules::Rule do
end
end
end
+
+ context 'allow_failure: validation' do
+ context 'with an invalid string allow_failure:' do
+ let(:config) do
+ { if: '$THIS == "that"', allow_failure: 'always' }
+ end
+
+ it { is_expected.to be_a(described_class) }
+ it { is_expected.not_to be_valid }
+
+ it 'returns an error about invalid allow_failure:' do
+ expect(subject.errors).to include(/rule allow failure should be a boolean value/)
+ end
+
+ context 'when composed' do
+ before do
+ subject.compose!
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns an error about invalid allow_failure:' do
+ expect(subject.errors).to include(/rule allow failure should be a boolean value/)
+ end
+ end
+ end
+ end
end
describe '#value' do
subject { entry.value }
context 'when specifying an if: clause' do
- let(:config) { { if: '$THIS || $THAT', when: 'manual' } }
+ let(:config) { { if: '$THIS || $THAT', when: 'manual', allow_failure: true } }
it 'stores the expression as "if"' do
- expect(subject).to eq(if: '$THIS || $THAT', when: 'manual')
+ expect(subject).to eq(if: '$THIS || $THAT', when: 'manual', allow_failure: true)
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/trigger_spec.rb b/spec/lib/gitlab/ci/config/entry/trigger_spec.rb
new file mode 100644
index 00000000000..752c3f59a95
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/trigger_spec.rb
@@ -0,0 +1,164 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Config::Entry::Trigger do
+ subject { described_class.new(config) }
+
+ context 'when trigger config is a non-empty string' do
+ let(:config) { 'some/project' }
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#value' do
+ it 'returns a trigger configuration hash' do
+ expect(subject.value).to eq(project: 'some/project')
+ end
+ end
+ end
+
+ context 'when trigger config an empty string' do
+ let(:config) { '' }
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns an error about an empty config' do
+ expect(subject.errors.first)
+ .to match /config can't be blank/
+ end
+ end
+ end
+
+ context 'when trigger is a hash' do
+ context 'when branch is provided' do
+ let(:config) { { project: 'some/project', branch: 'feature' } }
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#value' do
+ it 'returns a trigger configuration hash' do
+ expect(subject.value)
+ .to eq(project: 'some/project', branch: 'feature')
+ end
+ end
+ end
+
+ context 'when strategy is provided' do
+ context 'when strategy is depend' do
+ let(:config) { { project: 'some/project', strategy: 'depend' } }
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#value' do
+ it 'returns a trigger configuration hash' do
+ expect(subject.value)
+ .to eq(project: 'some/project', strategy: 'depend')
+ end
+ end
+ end
+
+ context 'when strategy is invalid' do
+ let(:config) { { project: 'some/project', strategy: 'notdepend' } }
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns an error about unknown config key' do
+ expect(subject.errors.first)
+ .to match /trigger strategy should be depend/
+ end
+ end
+ end
+ end
+
+ describe '#include' do
+ context 'with simple include' do
+ let(:config) { { include: 'path/to/config.yml' } }
+
+ it { is_expected.to be_valid }
+
+ it 'returns a trigger configuration hash' do
+ expect(subject.value).to eq(include: 'path/to/config.yml' )
+ end
+ end
+
+ context 'with project' do
+ let(:config) { { project: 'some/project', include: 'path/to/config.yml' } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns an error' do
+ expect(subject.errors.first)
+ .to match /config contains unknown keys: project/
+ end
+ end
+
+ context 'with branch' do
+ let(:config) { { branch: 'feature', include: 'path/to/config.yml' } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns an error' do
+ expect(subject.errors.first)
+ .to match /config contains unknown keys: branch/
+ end
+ end
+
+ context 'when feature flag is off' do
+ before do
+ stub_feature_flags(ci_parent_child_pipeline: false)
+ end
+
+ let(:config) { { include: 'path/to/config.yml' } }
+
+ it 'is returns an error if include is used' do
+ expect(subject.errors.first)
+ .to match /config must specify project/
+ end
+ end
+ end
+
+ context 'when config contains unknown keys' do
+ let(:config) { { project: 'some/project', unknown: 123 } }
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns an error about unknown config key' do
+ expect(subject.errors.first)
+ .to match /config contains unknown keys: unknown/
+ end
+ end
+ end
+ end
+
+ context 'when trigger configuration is not valid' do
+ context 'when branch is not provided' do
+ let(:config) { 123 }
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns an error message' do
+ expect(subject.errors.first)
+ .to match /has to be either a string or a hash/
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/parsers/test/junit_spec.rb b/spec/lib/gitlab/ci/parsers/test/junit_spec.rb
index 6a7fe7a5927..b91cf1dd3ed 100644
--- a/spec/lib/gitlab/ci/parsers/test/junit_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/test/junit_spec.rb
@@ -99,7 +99,7 @@ describe Gitlab::Ci::Parsers::Test::Junit do
let(:testcase_content) { '<error>Some error</error>' }
it_behaves_like '<testcase> XML parser',
- ::Gitlab::Ci::Reports::TestCase::STATUS_FAILED,
+ ::Gitlab::Ci::Reports::TestCase::STATUS_ERROR,
'Some error'
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 5526ec9e16f..1f5fc000832 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -6,7 +6,7 @@ describe Gitlab::Ci::Pipeline::Seed::Build do
let(:project) { create(:project, :repository) }
let(:head_sha) { project.repository.head_commit.id }
let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: head_sha) }
- let(:attributes) { { name: 'rspec', ref: 'master' } }
+ let(:attributes) { { name: 'rspec', ref: 'master', scheduling_type: :stage } }
let(:previous_stages) { [] }
let(:seed_build) { described_class.new(pipeline, attributes, previous_stages) }
@@ -244,7 +244,9 @@ describe Gitlab::Ci::Pipeline::Seed::Build do
context 'when job is a bridge' do
let(:attributes) do
- { name: 'rspec', ref: 'master', options: { trigger: 'my/project' } }
+ {
+ name: 'rspec', ref: 'master', options: { trigger: 'my/project' }, scheduling_type: :stage
+ }
end
it { is_expected.to be_a(::Ci::Bridge) }
diff --git a/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb
index 90f4b06cea0..c5c91135f60 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb
@@ -33,13 +33,18 @@ describe Gitlab::Ci::Pipeline::Seed::Deployment do
expect(subject.iid).to be_present
expect(subject.environment.name).to eq('production')
expect(subject.cluster).to be_nil
+ expect(subject.deployment_cluster).to be_nil
end
context 'when environment has deployment platform' do
let!(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
- it 'returns a deployment with cluster id' do
- expect(subject.cluster).to eq(cluster)
+ it 'sets the cluster and deployment_cluster' do
+ expect(subject.cluster).to eq(cluster) # until we stop double writing in 12.9: https://gitlab.com/gitlab-org/gitlab/issues/202628
+ expect(subject.deployment_cluster).to have_attributes(
+ cluster_id: cluster.id,
+ kubernetes_namespace: subject.environment.deployment_namespace
+ )
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb
index a978084876f..875fd457bd0 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb
@@ -10,9 +10,9 @@ describe Gitlab::Ci::Pipeline::Seed::Stage do
let(:attributes) do
{ name: 'test',
index: 0,
- builds: [{ name: 'rspec' },
- { name: 'spinach' },
- { name: 'deploy', only: { refs: ['feature'] } }] }
+ builds: [{ name: 'rspec', scheduling_type: :stage },
+ { name: 'spinach', scheduling_type: :stage },
+ { name: 'deploy', only: { refs: ['feature'] } }], scheduling_type: :stage }
end
subject do
diff --git a/spec/lib/gitlab/ci/reports/test_reports_comparer_spec.rb b/spec/lib/gitlab/ci/reports/test_reports_comparer_spec.rb
index 48eef0643b2..d731afe1fff 100644
--- a/spec/lib/gitlab/ci/reports/test_reports_comparer_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_reports_comparer_spec.rb
@@ -57,6 +57,17 @@ describe Gitlab::Ci::Reports::TestReportsComparer do
is_expected.to eq(Gitlab::Ci::Reports::TestCase::STATUS_FAILED)
end
end
+
+ context 'when there is an error test case in head suites' do
+ before do
+ head_reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
+ head_reports.get_suite('junit').add_test_case(create_test_case_java_error)
+ end
+
+ it 'returns the total status in head suite' do
+ is_expected.to eq(Gitlab::Ci::Reports::TestCase::STATUS_FAILED)
+ end
+ end
end
describe '#total_count' do
@@ -75,7 +86,7 @@ describe Gitlab::Ci::Reports::TestReportsComparer do
describe '#resolved_count' do
subject { comparer.resolved_count }
- context 'when there is a resolved test case in head suites' do
+ context 'when there is a resolved failure test case in head suites' do
before do
base_reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
base_reports.get_suite('junit').add_test_case(create_test_case_java_failed)
@@ -88,6 +99,19 @@ describe Gitlab::Ci::Reports::TestReportsComparer do
end
end
+ context 'when there is a resolved error test case in head suites' do
+ before do
+ base_reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
+ base_reports.get_suite('junit').add_test_case(create_test_case_java_error)
+ head_reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
+ head_reports.get_suite('junit').add_test_case(create_test_case_java_success)
+ end
+
+ it 'returns the correct count' do
+ is_expected.to eq(1)
+ end
+ end
+
context 'when there are no resolved test cases in head suites' do
before do
base_reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
@@ -127,4 +151,30 @@ describe Gitlab::Ci::Reports::TestReportsComparer do
end
end
end
+
+ describe '#error_count' do
+ subject { comparer.error_count }
+
+ context 'when there is an error test case in head suites' do
+ before do
+ head_reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
+ head_reports.get_suite('junit').add_test_case(create_test_case_java_error)
+ end
+
+ it 'returns the correct count' do
+ is_expected.to eq(1)
+ end
+ end
+
+ context 'when there are no error test cases in head suites' do
+ before do
+ head_reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
+ head_reports.get_suite('junit').add_test_case(create_test_case_rspec_success)
+ end
+
+ it 'returns the correct count' do
+ is_expected.to eq(0)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/reports/test_suite_comparer_spec.rb b/spec/lib/gitlab/ci/reports/test_suite_comparer_spec.rb
index cf4690bb334..2d2179a690b 100644
--- a/spec/lib/gitlab/ci/reports/test_suite_comparer_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_suite_comparer_spec.rb
@@ -9,8 +9,9 @@ describe Gitlab::Ci::Reports::TestSuiteComparer do
let(:name) { 'rpsec' }
let(:base_suite) { Gitlab::Ci::Reports::TestSuite.new(name) }
let(:head_suite) { Gitlab::Ci::Reports::TestSuite.new(name) }
- let(:test_case_success) { create_test_case_rspec_success }
- let(:test_case_failed) { create_test_case_rspec_failed }
+ let(:test_case_success) { create_test_case_java_success }
+ let(:test_case_failed) { create_test_case_java_failed }
+ let(:test_case_error) { create_test_case_java_error }
describe '#new_failures' do
subject { comparer.new_failures }
@@ -135,6 +136,129 @@ describe Gitlab::Ci::Reports::TestSuiteComparer do
end
end
+ describe '#new_errors' do
+ subject { comparer.new_errors }
+
+ context 'when head suite has a new error test case which does not exist in base' do
+ before do
+ base_suite.add_test_case(test_case_success)
+ head_suite.add_test_case(test_case_error)
+ end
+
+ it 'returns the error test case' do
+ is_expected.to eq([test_case_error])
+ end
+ end
+
+ context 'when head suite still has an error test case which errored in base' do
+ before do
+ base_suite.add_test_case(test_case_error)
+ head_suite.add_test_case(test_case_error)
+ end
+
+ it 'does not return the error test case' do
+ is_expected.to be_empty
+ end
+ end
+
+ context 'when head suite has a success test case which errored in base' do
+ before do
+ base_suite.add_test_case(test_case_error)
+ head_suite.add_test_case(test_case_success)
+ end
+
+ it 'does not return the error test case' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
+ describe '#existing_errors' do
+ subject { comparer.existing_errors }
+
+ context 'when head suite has a new error test case which does not exist in base' do
+ before do
+ base_suite.add_test_case(test_case_success)
+ head_suite.add_test_case(test_case_error)
+ end
+
+ it 'does not return the error test case' do
+ is_expected.to be_empty
+ end
+ end
+
+ context 'when head suite still has an error test case which errored in base' do
+ before do
+ base_suite.add_test_case(test_case_error)
+ head_suite.add_test_case(test_case_error)
+ end
+
+ it 'returns the error test case' do
+ is_expected.to eq([test_case_error])
+ end
+ end
+
+ context 'when head suite has a success test case which errored in base' do
+ before do
+ base_suite.add_test_case(test_case_error)
+ head_suite.add_test_case(test_case_success)
+ end
+
+ it 'does not return the error test case' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
+ describe '#resolved_errors' do
+ subject { comparer.resolved_errors }
+
+ context 'when head suite has a new error test case which does not exist in base' do
+ before do
+ base_suite.add_test_case(test_case_success)
+ head_suite.add_test_case(test_case_error)
+ end
+
+ it 'does not return the error test case' do
+ is_expected.to be_empty
+ end
+
+ it 'returns the correct resolved count' do
+ expect(comparer.resolved_count).to eq(0)
+ end
+ end
+
+ context 'when head suite still has an error test case which errored in base' do
+ before do
+ base_suite.add_test_case(test_case_error)
+ head_suite.add_test_case(test_case_error)
+ end
+
+ it 'does not return the error test case' do
+ is_expected.to be_empty
+ end
+
+ it 'returns the correct resolved count' do
+ expect(comparer.resolved_count).to eq(0)
+ end
+ end
+
+ context 'when head suite has a success test case which errored in base' do
+ before do
+ base_suite.add_test_case(test_case_error)
+ head_suite.add_test_case(test_case_success)
+ end
+
+ it 'returns the resolved test case' do
+ is_expected.to eq([test_case_success])
+ end
+
+ it 'returns the correct resolved count' do
+ expect(comparer.resolved_count).to eq(1)
+ end
+ end
+ end
+
describe '#total_count' do
subject { comparer.total_count }
@@ -208,7 +332,17 @@ describe Gitlab::Ci::Reports::TestSuiteComparer do
head_suite.add_test_case(test_case_failed)
end
- it 'returns the total status in head suite' do
+ it 'returns the total status in head suite as failed' do
+ is_expected.to eq(Gitlab::Ci::Reports::TestCase::STATUS_FAILED)
+ end
+ end
+
+ context 'when there is an error test case in head suite' do
+ before do
+ head_suite.add_test_case(test_case_error)
+ end
+
+ it 'returns the total status in head suite as failed' do
is_expected.to eq(Gitlab::Ci::Reports::TestCase::STATUS_FAILED)
end
end
diff --git a/spec/lib/gitlab/ci/reports/test_suite_spec.rb b/spec/lib/gitlab/ci/reports/test_suite_spec.rb
index 8646db43bc8..217713fd899 100644
--- a/spec/lib/gitlab/ci/reports/test_suite_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_suite_spec.rb
@@ -74,6 +74,15 @@ describe Gitlab::Ci::Reports::TestSuite do
it { is_expected.to eq(Gitlab::Ci::Reports::TestCase::STATUS_FAILED) }
end
+
+ context 'when a test case errored' do
+ before do
+ test_suite.add_test_case(test_case_success)
+ test_suite.add_test_case(test_case_error)
+ end
+
+ it { is_expected.to eq(Gitlab::Ci::Reports::TestCase::STATUS_FAILED) }
+ end
end
Gitlab::Ci::Reports::TestCase::STATUS_TYPES.each do |status_type|
diff --git a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb
index 795e8e51276..1b034656e7d 100644
--- a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb
+++ b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb
@@ -12,7 +12,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
stub_feature_flags(ci_enable_live_trace: true)
end
- context "#initialize" do
+ describe "#initialize" do
context 'when a chunk exists' do
before do
build.trace.set('ABC')
@@ -35,7 +35,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
end
end
- context "#seek" do
+ describe "#seek" do
subject { chunked_io.seek(pos, where) }
before do
@@ -66,7 +66,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
end
end
- context "#eof?" do
+ describe "#eof?" do
subject { chunked_io.eof? }
before do
@@ -90,7 +90,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
end
end
- context "#each_line" do
+ describe "#each_line" do
let(:string_io) { StringIO.new(sample_trace_raw) }
context 'when buffer size is smaller than file size' do
@@ -134,7 +134,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
end
end
- context "#read" do
+ describe "#read" do
subject { chunked_io.read(length) }
context 'when read the whole size' do
@@ -254,7 +254,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
end
end
- context "#readline" do
+ describe "#readline" do
subject { chunked_io.readline }
let(:string_io) { StringIO.new(sample_trace_raw) }
@@ -334,7 +334,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
end
end
- context "#write" do
+ describe "#write" do
subject { chunked_io.write(data) }
let(:data) { sample_trace_raw }
@@ -399,7 +399,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
end
end
- context "#truncate" do
+ describe "#truncate" do
let(:offset) { 10 }
context 'when data does not exist' do
@@ -432,7 +432,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
end
end
- context "#destroy!" do
+ describe "#destroy!" do
subject { chunked_io.destroy! }
before do
diff --git a/spec/lib/gitlab/ci/trace/section_parser_spec.rb b/spec/lib/gitlab/ci/trace/section_parser_spec.rb
index 6e8504a1584..24ce4d34411 100644
--- a/spec/lib/gitlab/ci/trace/section_parser_spec.rb
+++ b/spec/lib/gitlab/ci/trace/section_parser_spec.rb
@@ -74,7 +74,7 @@ describe Gitlab::Ci::Trace::SectionParser do
let(:lines) { build_lines(trace) }
it 'must handle correctly byte positioning' do
- expect(subject).to receive(:find_next_marker).exactly(2).times.and_call_original
+ expect(subject).to receive(:find_next_marker).twice.and_call_original
subject.parse!
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 11168a969fc..e5c5aaa2265 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -36,7 +36,8 @@ module Gitlab
interruptible: true,
allow_failure: false,
when: "on_success",
- yaml_variables: []
+ yaml_variables: [],
+ scheduling_type: :stage
})
end
end
@@ -66,7 +67,8 @@ module Gitlab
],
allow_failure: false,
when: 'on_success',
- yaml_variables: []
+ yaml_variables: [],
+ scheduling_type: :stage
})
end
end
@@ -126,7 +128,8 @@ module Gitlab
interruptible: true,
allow_failure: false,
when: "on_success",
- yaml_variables: []
+ yaml_variables: [],
+ scheduling_type: :stage
})
end
end
@@ -282,6 +285,7 @@ module Gitlab
allow_failure: false,
when: "on_success",
yaml_variables: [],
+ scheduling_type: :stage,
options: { script: ["rspec"] },
only: { refs: ["branches"] } }] },
{ name: "deploy",
@@ -293,6 +297,7 @@ module Gitlab
allow_failure: false,
when: "on_success",
yaml_variables: [],
+ scheduling_type: :stage,
options: { script: ["cap prod"] },
only: { refs: ["tags"] } }] },
{ name: ".post",
@@ -642,7 +647,8 @@ module Gitlab
},
allow_failure: false,
when: "on_success",
- yaml_variables: []
+ yaml_variables: [],
+ scheduling_type: :stage
})
end
@@ -674,7 +680,8 @@ module Gitlab
},
allow_failure: false,
when: "on_success",
- yaml_variables: []
+ yaml_variables: [],
+ scheduling_type: :stage
})
end
end
@@ -702,7 +709,8 @@ module Gitlab
},
allow_failure: false,
when: "on_success",
- yaml_variables: []
+ yaml_variables: [],
+ scheduling_type: :stage
})
end
@@ -728,7 +736,8 @@ module Gitlab
},
allow_failure: false,
when: "on_success",
- yaml_variables: []
+ yaml_variables: [],
+ scheduling_type: :stage
})
end
end
@@ -1250,7 +1259,8 @@ module Gitlab
},
when: "on_success",
allow_failure: false,
- yaml_variables: []
+ yaml_variables: [],
+ scheduling_type: :stage
})
end
@@ -1604,7 +1614,8 @@ module Gitlab
},
when: "on_success",
allow_failure: false,
- yaml_variables: []
+ yaml_variables: [],
+ scheduling_type: :stage
)
expect(subject.builds[4]).to eq(
stage: "test",
@@ -1618,7 +1629,8 @@ module Gitlab
],
when: "on_success",
allow_failure: false,
- yaml_variables: []
+ yaml_variables: [],
+ scheduling_type: :dag
)
end
end
@@ -1644,7 +1656,8 @@ module Gitlab
},
when: "on_success",
allow_failure: false,
- yaml_variables: []
+ yaml_variables: [],
+ scheduling_type: :stage
)
expect(subject.builds[4]).to eq(
stage: "test",
@@ -1660,7 +1673,8 @@ module Gitlab
],
when: "on_success",
allow_failure: false,
- yaml_variables: []
+ yaml_variables: [],
+ scheduling_type: :dag
)
end
end
@@ -1682,7 +1696,8 @@ module Gitlab
],
when: "on_success",
allow_failure: false,
- yaml_variables: []
+ yaml_variables: [],
+ scheduling_type: :dag
)
end
end
@@ -1712,7 +1727,8 @@ module Gitlab
],
when: "on_success",
allow_failure: false,
- yaml_variables: []
+ yaml_variables: [],
+ scheduling_type: :dag
)
end
end
@@ -1849,7 +1865,8 @@ module Gitlab
},
when: "on_success",
allow_failure: false,
- yaml_variables: []
+ yaml_variables: [],
+ scheduling_type: :stage
})
end
end
@@ -1895,7 +1912,8 @@ module Gitlab
},
when: "on_success",
allow_failure: false,
- yaml_variables: []
+ yaml_variables: [],
+ scheduling_type: :stage
})
expect(subject.second).to eq({
stage: "build",
@@ -1907,7 +1925,8 @@ module Gitlab
},
when: "on_success",
allow_failure: false,
- yaml_variables: []
+ yaml_variables: [],
+ scheduling_type: :stage
})
end
end
diff --git a/spec/lib/gitlab/cleanup/project_uploads_spec.rb b/spec/lib/gitlab/cleanup/project_uploads_spec.rb
index 5787cce7d20..d1e3a73686e 100644
--- a/spec/lib/gitlab/cleanup/project_uploads_spec.rb
+++ b/spec/lib/gitlab/cleanup/project_uploads_spec.rb
@@ -8,8 +8,8 @@ describe Gitlab::Cleanup::ProjectUploads do
let(:logger) { double(:logger) }
before do
- allow(logger).to receive(:info).at_least(1).times
- allow(logger).to receive(:debug).at_least(1).times
+ allow(logger).to receive(:info).at_least(:once)
+ allow(logger).to receive(:debug).at_least(:once)
end
describe '#run!' do
diff --git a/spec/lib/gitlab/conflict/file_spec.rb b/spec/lib/gitlab/conflict/file_spec.rb
index adf5a232a75..966648bcc5a 100644
--- a/spec/lib/gitlab/conflict/file_spec.rb
+++ b/spec/lib/gitlab/conflict/file_spec.rb
@@ -153,79 +153,79 @@ describe Gitlab::Conflict::File do
context 'with an example file' do
let(:raw_conflict_content) do
- <<FILE
- # Ensure there is no match line header here
- def username_regexp
- default_regexp
- end
-
-<<<<<<< files/ruby/regex.rb
-def project_name_regexp
- /\A[a-zA-Z0-9][a-zA-Z0-9_\-\. ]*\z/
-end
-
-def name_regexp
- /\A[a-zA-Z0-9_\-\. ]*\z/
-=======
-def project_name_regex
- %r{\A[a-zA-Z0-9][a-zA-Z0-9_\-\. ]*\z}
-end
-
-def name_regex
- %r{\A[a-zA-Z0-9_\-\. ]*\z}
->>>>>>> files/ruby/regex.rb
-end
-
-# Some extra lines
-# To force a match line
-# To be created
-
-def path_regexp
- default_regexp
-end
-
-<<<<<<< files/ruby/regex.rb
-def archive_formats_regexp
- /(zip|tar|7z|tar\.gz|tgz|gz|tar\.bz2|tbz|tbz2|tb2|bz2)/
-=======
-def archive_formats_regex
- %r{(zip|tar|7z|tar\.gz|tgz|gz|tar\.bz2|tbz|tbz2|tb2|bz2)}
->>>>>>> files/ruby/regex.rb
-end
-
-def git_reference_regexp
- # Valid git ref regexp, see:
- # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html
- %r{
- (?!
- (?# doesn't begins with)
- \/| (?# rule #6)
- (?# doesn't contain)
- .*(?:
- [\/.]\.| (?# rule #1,3)
- \/\/| (?# rule #6)
- @\{| (?# rule #8)
- \\ (?# rule #9)
- )
- )
- [^\000-\040\177~^:?*\[]+ (?# rule #4-5)
- (?# doesn't end with)
- (?<!\.lock) (?# rule #1)
- (?<![\/.]) (?# rule #6-7)
- }x
-end
-
-protected
-
-<<<<<<< files/ruby/regex.rb
-def default_regexp
- /\A[.?]?[a-zA-Z0-9][a-zA-Z0-9_\-\.]*(?<!\.git)\z/
-=======
-def default_regex
- %r{\A[.?]?[a-zA-Z0-9][a-zA-Z0-9_\-\.]*(?<!\.git)\z}
->>>>>>> files/ruby/regex.rb
-end
-FILE
+ <<~FILE
+ # Ensure there is no match line header here
+ def username_regexp
+ default_regexp
+ end
+
+ <<<<<<< files/ruby/regex.rb
+ def project_name_regexp
+ /\A[a-zA-Z0-9][a-zA-Z0-9_\-\. ]*\z/
+ end
+
+ def name_regexp
+ /\A[a-zA-Z0-9_\-\. ]*\z/
+ =======
+ def project_name_regex
+ %r{\A[a-zA-Z0-9][a-zA-Z0-9_\-\. ]*\z}
+ end
+
+ def name_regex
+ %r{\A[a-zA-Z0-9_\-\. ]*\z}
+ >>>>>>> files/ruby/regex.rb
+ end
+
+ # Some extra lines
+ # To force a match line
+ # To be created
+
+ def path_regexp
+ default_regexp
+ end
+
+ <<<<<<< files/ruby/regex.rb
+ def archive_formats_regexp
+ /(zip|tar|7z|tar\.gz|tgz|gz|tar\.bz2|tbz|tbz2|tb2|bz2)/
+ =======
+ def archive_formats_regex
+ %r{(zip|tar|7z|tar\.gz|tgz|gz|tar\.bz2|tbz|tbz2|tb2|bz2)}
+ >>>>>>> files/ruby/regex.rb
+ end
+
+ def git_reference_regexp
+ # Valid git ref regexp, see:
+ # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html
+ %r{
+ (?!
+ (?# doesn't begins with)
+ \/| (?# rule #6)
+ (?# doesn't contain)
+ .*(?:
+ [\/.]\.| (?# rule #1,3)
+ \/\/| (?# rule #6)
+ @\{| (?# rule #8)
+ \\ (?# rule #9)
+ )
+ )
+ [^\000-\040\177~^:?*\[]+ (?# rule #4-5)
+ (?# doesn't end with)
+ (?<!\.lock) (?# rule #1)
+ (?<![\/.]) (?# rule #6-7)
+ }x
+ end
+
+ protected
+
+ <<<<<<< files/ruby/regex.rb
+ def default_regexp
+ /\A[.?]?[a-zA-Z0-9][a-zA-Z0-9_\-\.]*(?<!\.git)\z/
+ =======
+ def default_regex
+ %r{\A[.?]?[a-zA-Z0-9][a-zA-Z0-9_\-\.]*(?<!\.git)\z}
+ >>>>>>> files/ruby/regex.rb
+ end
+ FILE
end
let(:sections) { conflict_file.sections }
@@ -258,7 +258,7 @@ FILE
describe '#as_json' do
it 'includes the blob path for the file' do
expect(conflict_file.as_json[:blob_path])
- .to eq("/#{project.full_path}/blob/#{our_commit.oid}/files/ruby/regex.rb")
+ .to eq("/#{project.full_path}/-/blob/#{our_commit.oid}/files/ruby/regex.rb")
end
it 'includes the blob icon for the file' do
diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
index 1d404915617..bbbbf91bd44 100644
--- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
+++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
@@ -19,7 +19,7 @@ describe Gitlab::ContentSecurityPolicy::ConfigLoader do
}
end
- context '.default_settings_hash' do
+ describe '.default_settings_hash' do
it 'returns empty defaults' do
settings = described_class.default_settings_hash
@@ -33,7 +33,7 @@ describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
end
- context '#load' do
+ describe '#load' do
subject { described_class.new(csp_config[:directives]) }
def expected_config(directive)
diff --git a/spec/lib/gitlab/current_settings_spec.rb b/spec/lib/gitlab/current_settings_spec.rb
index eced96a4c77..adbd7eabd18 100644
--- a/spec/lib/gitlab/current_settings_spec.rb
+++ b/spec/lib/gitlab/current_settings_spec.rb
@@ -120,17 +120,13 @@ describe Gitlab::CurrentSettings do
end
context 'with pending migrations' do
+ let(:current_settings) { described_class.current_application_settings }
+
before do
- expect_any_instance_of(ActiveRecord::MigrationContext).to receive(:needs_migration?).and_return(true)
+ allow(Gitlab::Runtime).to receive(:rake?).and_return(false)
end
shared_examples 'a non-persisted ApplicationSetting object' do
- let(:current_settings) { described_class.current_application_settings }
-
- it 'returns a FakeApplicationSettings object' do
- expect(current_settings).to be_a(Gitlab::FakeApplicationSettings)
- end
-
it 'uses the default value from ApplicationSetting.defaults' do
expect(current_settings.signup_enabled).to eq(ApplicationSetting.defaults[:signup_enabled])
end
@@ -144,18 +140,16 @@ describe Gitlab::CurrentSettings do
end
end
- context 'with no ApplicationSetting DB record' do
- it_behaves_like 'a non-persisted ApplicationSetting object'
- end
-
- context 'with an existing ApplicationSetting DB record' do
- let!(:db_settings) { ApplicationSetting.build_from_defaults(home_page_url: 'http://mydomain.com').save! && ApplicationSetting.last }
- let(:current_settings) { described_class.current_application_settings }
+ context 'in a Rake task' do
+ before do
+ allow(Gitlab::Runtime).to receive(:rake?).and_return(true)
+ expect_any_instance_of(ActiveRecord::MigrationContext).to receive(:needs_migration?).and_return(true)
+ end
it_behaves_like 'a non-persisted ApplicationSetting object'
- it 'uses the value from the DB attribute if present and not overridden by an accessor' do
- expect(current_settings.home_page_url).to eq(db_settings.home_page_url)
+ it 'returns a FakeApplicationSettings object' do
+ expect(current_settings).to be_a(Gitlab::FakeApplicationSettings)
end
context 'when a new column is used before being migrated' do
@@ -168,6 +162,20 @@ describe Gitlab::CurrentSettings do
end
end
end
+
+ context 'with no ApplicationSetting DB record' do
+ it_behaves_like 'a non-persisted ApplicationSetting object'
+ end
+
+ context 'with an existing ApplicationSetting DB record' do
+ let!(:db_settings) { ApplicationSetting.build_from_defaults(home_page_url: 'http://mydomain.com').save! && ApplicationSetting.last }
+
+ it_behaves_like 'a non-persisted ApplicationSetting object'
+
+ it 'uses the value from the DB attribute if present and not overridden by an accessor' do
+ expect(current_settings.home_page_url).to eq(db_settings.home_page_url)
+ end
+ end
end
context 'when ApplicationSettings.current is present' do
diff --git a/spec/lib/gitlab/danger/helper_spec.rb b/spec/lib/gitlab/danger/helper_spec.rb
index ae0fcf443c5..2561e763429 100644
--- a/spec/lib/gitlab/danger/helper_spec.rb
+++ b/spec/lib/gitlab/danger/helper_spec.rb
@@ -335,14 +335,14 @@ describe Gitlab::Danger::Helper do
it 'returns false when on a normal merge request' do
expect(fake_gitlab).to receive(:mr_json)
- .and_return('web_url' => 'https://gitlab.com/gitlab-org/gitlab/merge_requests/1')
+ .and_return('web_url' => 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/1')
expect(helper).not_to be_security_mr
end
it 'returns true when on a security merge request' do
expect(fake_gitlab).to receive(:mr_json)
- .and_return('web_url' => 'https://gitlab.com/gitlab-org/security/gitlab/merge_requests/1')
+ .and_return('web_url' => 'https://gitlab.com/gitlab-org/security/gitlab/-/merge_requests/1')
expect(helper).to be_security_mr
end
diff --git a/spec/lib/gitlab/danger/teammate_spec.rb b/spec/lib/gitlab/danger/teammate_spec.rb
index bf6152ff3c2..570f4bd27cc 100644
--- a/spec/lib/gitlab/danger/teammate_spec.rb
+++ b/spec/lib/gitlab/danger/teammate_spec.rb
@@ -176,7 +176,7 @@ describe Gitlab::Danger::Teammate do
it 'returns true if request fails' do
expect(Gitlab::Danger::RequestHelper).to receive(:http_get_json)
- .exactly(2).times
+ .twice
.and_raise(Gitlab::Danger::RequestHelper::HTTPError.new)
expect(subject.available?).to be true
diff --git a/spec/lib/gitlab/data_builder/build_spec.rb b/spec/lib/gitlab/data_builder/build_spec.rb
index fdb855de786..da27125c9a6 100644
--- a/spec/lib/gitlab/data_builder/build_spec.rb
+++ b/spec/lib/gitlab/data_builder/build_spec.rb
@@ -4,7 +4,8 @@ require 'spec_helper'
describe Gitlab::DataBuilder::Build do
let(:runner) { create(:ci_runner, :instance) }
- let(:build) { create(:ci_build, :running, runner: runner) }
+ let(:user) { create(:user) }
+ let(:build) { create(:ci_build, :running, runner: runner, user: user) }
describe '.build' do
let(:data) do
@@ -22,6 +23,15 @@ describe Gitlab::DataBuilder::Build do
it { expect(data[:project_id]).to eq(build.project.id) }
it { expect(data[:project_name]).to eq(build.project.full_name) }
it { expect(data[:pipeline_id]).to eq(build.pipeline.id) }
+ it {
+ expect(data[:user]).to eq(
+ {
+ name: user.name,
+ username: user.username,
+ avatar_url: user.avatar_url(only_path: false),
+ email: user.email
+ })
+ }
it { expect(data[:commit][:id]).to eq(build.pipeline.id) }
it { expect(data[:runner][:id]).to eq(build.runner.id) }
it { expect(data[:runner][:description]).to eq(build.runner.description) }
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index 635bf56b72e..da22da8de0f 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -11,7 +11,8 @@ describe Gitlab::DataBuilder::Pipeline do
project: project,
status: 'success',
sha: project.commit.sha,
- ref: project.default_branch)
+ ref: project.default_branch,
+ user: user)
end
let!(:build) { create(:ci_build, pipeline: pipeline) }
@@ -37,6 +38,12 @@ describe Gitlab::DataBuilder::Pipeline do
expect(build_data[:allow_failure]).to eq(build.allow_failure)
expect(project_data).to eq(project.hook_attrs(backward: false))
expect(data[:merge_request]).to be_nil
+ expect(data[:user]).to eq({
+ name: user.name,
+ username: user.username,
+ avatar_url: user.avatar_url(only_path: false),
+ email: user.email
+ })
end
context 'pipeline without variables' do
@@ -77,7 +84,7 @@ describe Gitlab::DataBuilder::Pipeline do
expect(merge_request_attrs[:target_project_id]).to eq(merge_request.target_project_id)
expect(merge_request_attrs[:state]).to eq(merge_request.state)
expect(merge_request_attrs[:merge_status]).to eq(merge_request.merge_status)
- expect(merge_request_attrs[:url]).to eq("http://localhost/#{merge_request.target_project.full_path}/merge_requests/#{merge_request.iid}")
+ expect(merge_request_attrs[:url]).to eq("http://localhost/#{merge_request.target_project.full_path}/-/merge_requests/#{merge_request.iid}")
end
end
end
diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb
new file mode 100644
index 00000000000..0731791d9b0
--- /dev/null
+++ b/spec/lib/gitlab/database/batch_count_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Database::BatchCount do
+ let(:model) { Issue }
+ let(:column) { :author_id }
+
+ let(:in_transaction) { false }
+ let(:user) { create(:user) }
+ let(:another_user) { create(:user) }
+
+ before do
+ create_list(:issue, 3, author: user )
+ create_list(:issue, 2, author: another_user )
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(in_transaction)
+ end
+
+ describe '#batch_count' do
+ it 'counts table' do
+ expect(described_class.batch_count(model)).to eq(5)
+ end
+
+ it 'counts with :id field' do
+ expect(described_class.batch_count(model, :id)).to eq(5)
+ end
+
+ it 'counts with "id" field' do
+ expect(described_class.batch_count(model, 'id')).to eq(5)
+ end
+
+ it 'counts with table.id field' do
+ expect(described_class.batch_count(model, "#{model.table_name}.id")).to eq(5)
+ end
+
+ it 'counts table with batch_size 50K' do
+ expect(described_class.batch_count(model, batch_size: 50_000)).to eq(5)
+ end
+
+ it 'will not count table with batch_size 1K' do
+ fallback = ::Gitlab::Database::BatchCounter::FALLBACK
+ expect(described_class.batch_count(model, batch_size: fallback / 2)).to eq(fallback)
+ end
+
+ it 'counts with a small edge case batch_sizes than result' do
+ stub_const('Gitlab::Database::BatchCounter::MIN_REQUIRED_BATCH_SIZE', 0)
+
+ [1, 2, 4, 5, 6].each { |i| expect(described_class.batch_count(model, batch_size: i)).to eq(5) }
+ end
+
+ context 'in a transaction' do
+ let(:in_transaction) { true }
+
+ it 'cannot count' do
+ expect do
+ described_class.batch_count(model)
+ end.to raise_error 'BatchCount can not be run inside a transaction'
+ end
+ end
+ end
+
+ describe '#batch_distinct_count' do
+ it 'counts with :id field' do
+ expect(described_class.batch_distinct_count(model, :id)).to eq(5)
+ end
+
+ it 'counts with column field' do
+ expect(described_class.batch_distinct_count(model, column)).to eq(2)
+ end
+
+ it 'counts with "id" field' do
+ expect(described_class.batch_distinct_count(model, "#{column}")).to eq(2)
+ end
+
+ it 'counts with table.column field' do
+ expect(described_class.batch_distinct_count(model, "#{model.table_name}.#{column}")).to eq(2)
+ end
+
+ it 'counts with :column field with batch_size of 50K' do
+ expect(described_class.batch_distinct_count(model, column, batch_size: 50_000)).to eq(2)
+ end
+
+ it 'will not count table with batch_size 1K' do
+ fallback = ::Gitlab::Database::BatchCounter::FALLBACK
+ expect(described_class.batch_distinct_count(model, column, batch_size: fallback / 2)).to eq(fallback)
+ end
+
+ it 'counts with a small edge case batch_sizes than result' do
+ stub_const('Gitlab::Database::BatchCounter::MIN_REQUIRED_BATCH_SIZE', 0)
+
+ [1, 2, 4, 5, 6].each { |i| expect(described_class.batch_distinct_count(model, column, batch_size: i)).to eq(2) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/count_spec.rb b/spec/lib/gitlab/database/count_spec.rb
index 71c25f23b6b..2469ce482e7 100644
--- a/spec/lib/gitlab/database/count_spec.rb
+++ b/spec/lib/gitlab/database/count_spec.rb
@@ -10,7 +10,7 @@ describe Gitlab::Database::Count do
let(:models) { [Project, Identity] }
- context '.approximate_counts' do
+ describe '.approximate_counts' do
context 'fallbacks' do
subject { described_class.approximate_counts(models, strategies: strategies) }
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index e0b4c8ae1f7..4cf2553b90d 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -1158,7 +1158,7 @@ describe Gitlab::Database::MigrationHelpers do
end
end
- describe 'sidekiq migration helpers', :sidekiq, :redis do
+ describe 'sidekiq migration helpers', :redis do
let(:worker) do
Class.new do
include Sidekiq::Worker
@@ -1221,7 +1221,7 @@ describe Gitlab::Database::MigrationHelpers do
end
end
- describe '#bulk_queue_background_migration_jobs_by_range', :sidekiq do
+ describe '#bulk_queue_background_migration_jobs_by_range' do
context 'when the model has an ID column' do
let!(:id1) { create(:user).id }
let!(:id2) { create(:user).id }
@@ -1293,7 +1293,7 @@ describe Gitlab::Database::MigrationHelpers do
end
end
- describe '#queue_background_migration_jobs_by_range_at_intervals', :sidekiq do
+ describe '#queue_background_migration_jobs_by_range_at_intervals' do
context 'when the model has an ID column' do
let!(:id1) { create(:user).id }
let!(:id2) { create(:user).id }
@@ -1518,4 +1518,379 @@ describe Gitlab::Database::MigrationHelpers do
model.create_or_update_plan_limit('project_hooks', 'free', 10)
end
end
+
+ describe '#with_lock_retries' do
+ let(:buffer) { StringIO.new }
+ let(:in_memory_logger) { Gitlab::JsonLogger.new(buffer) }
+ let(:env) { { 'DISABLE_LOCK_RETRIES' => 'true' } }
+
+ it 'sets the migration class name in the logs' do
+ model.with_lock_retries(env: env, logger: in_memory_logger) { }
+
+ buffer.rewind
+ expect(buffer.read).to include("\"class\":\"#{model.class}\"")
+ end
+ end
+
+ describe '#backfill_iids' do
+ include MigrationsHelpers
+
+ class self::Issue < ActiveRecord::Base
+ include AtomicInternalId
+
+ self.table_name = 'issues'
+ self.inheritance_column = :_type_disabled
+
+ belongs_to :project, class_name: "::Project"
+
+ has_internal_id :iid,
+ scope: :project,
+ init: ->(s) { s&.project&.issues&.maximum(:iid) },
+ backfill: true,
+ presence: false
+ end
+
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:issues) { table(:issues) }
+
+ def setup
+ namespace = namespaces.create!(name: 'foo', path: 'foo')
+ project = projects.create!(namespace_id: namespace.id)
+
+ project
+ end
+
+ it 'generates iids properly for models created after the migration' do
+ project = setup
+
+ model.backfill_iids('issues')
+
+ issue = self.class::Issue.create!(project_id: project.id)
+
+ expect(issue.iid).to eq(1)
+ end
+
+ it 'generates iids properly for models created after the migration when iids are backfilled' do
+ project = setup
+ issue_a = issues.create!(project_id: project.id)
+
+ model.backfill_iids('issues')
+
+ issue_b = self.class::Issue.create!(project_id: project.id)
+
+ expect(issue_a.reload.iid).to eq(1)
+ expect(issue_b.iid).to eq(2)
+ end
+
+ it 'generates iids properly for models created after the migration across multiple projects' do
+ project_a = setup
+ project_b = setup
+ issues.create!(project_id: project_a.id)
+ issues.create!(project_id: project_b.id)
+ issues.create!(project_id: project_b.id)
+
+ model.backfill_iids('issues')
+
+ issue_a = self.class::Issue.create!(project_id: project_a.id)
+ issue_b = self.class::Issue.create!(project_id: project_b.id)
+
+ expect(issue_a.iid).to eq(2)
+ expect(issue_b.iid).to eq(3)
+ end
+
+ context 'when the new code creates a row post deploy but before the migration runs' do
+ it 'does not change the row iid' do
+ project = setup
+ issue = self.class::Issue.create!(project_id: project.id)
+
+ model.backfill_iids('issues')
+
+ expect(issue.reload.iid).to eq(1)
+ end
+
+ it 'backfills iids for rows already in the database' do
+ project = setup
+ issue_a = issues.create!(project_id: project.id)
+ issue_b = issues.create!(project_id: project.id)
+ issue_c = self.class::Issue.create!(project_id: project.id)
+
+ model.backfill_iids('issues')
+
+ expect(issue_a.reload.iid).to eq(1)
+ expect(issue_b.reload.iid).to eq(2)
+ expect(issue_c.reload.iid).to eq(3)
+ end
+
+ it 'backfills iids across multiple projects' do
+ project_a = setup
+ project_b = setup
+ issue_a = issues.create!(project_id: project_a.id)
+ issue_b = issues.create!(project_id: project_b.id)
+ issue_c = self.class::Issue.create!(project_id: project_a.id)
+ issue_d = self.class::Issue.create!(project_id: project_b.id)
+
+ model.backfill_iids('issues')
+
+ expect(issue_a.reload.iid).to eq(1)
+ expect(issue_b.reload.iid).to eq(1)
+ expect(issue_c.reload.iid).to eq(2)
+ expect(issue_d.reload.iid).to eq(2)
+ end
+
+ it 'generates iids properly for models created after the migration' do
+ project = setup
+ issue_a = issues.create!(project_id: project.id)
+ issue_b = issues.create!(project_id: project.id)
+ issue_c = self.class::Issue.create!(project_id: project.id)
+
+ model.backfill_iids('issues')
+
+ issue_d = self.class::Issue.create!(project_id: project.id)
+ issue_e = self.class::Issue.create!(project_id: project.id)
+
+ expect(issue_a.reload.iid).to eq(1)
+ expect(issue_b.reload.iid).to eq(2)
+ expect(issue_c.reload.iid).to eq(3)
+ expect(issue_d.iid).to eq(4)
+ expect(issue_e.iid).to eq(5)
+ end
+
+ it 'backfills iids and properly generates iids for new models across multiple projects' do
+ project_a = setup
+ project_b = setup
+ issue_a = issues.create!(project_id: project_a.id)
+ issue_b = issues.create!(project_id: project_b.id)
+ issue_c = self.class::Issue.create!(project_id: project_a.id)
+ issue_d = self.class::Issue.create!(project_id: project_b.id)
+
+ model.backfill_iids('issues')
+
+ issue_e = self.class::Issue.create!(project_id: project_a.id)
+ issue_f = self.class::Issue.create!(project_id: project_b.id)
+ issue_g = self.class::Issue.create!(project_id: project_a.id)
+
+ expect(issue_a.reload.iid).to eq(1)
+ expect(issue_b.reload.iid).to eq(1)
+ expect(issue_c.reload.iid).to eq(2)
+ expect(issue_d.reload.iid).to eq(2)
+ expect(issue_e.iid).to eq(3)
+ expect(issue_f.iid).to eq(3)
+ expect(issue_g.iid).to eq(4)
+ end
+ end
+
+ context 'when the new code creates a model and then old code creates a model post deploy but before the migration runs' do
+ it 'backfills iids' do
+ project = setup
+ issue_a = issues.create!(project_id: project.id)
+ issue_b = self.class::Issue.create!(project_id: project.id)
+ issue_c = issues.create!(project_id: project.id)
+
+ model.backfill_iids('issues')
+
+ expect(issue_a.reload.iid).to eq(1)
+ expect(issue_b.reload.iid).to eq(2)
+ expect(issue_c.reload.iid).to eq(3)
+ end
+
+ it 'generates an iid for a new model after the migration' do
+ project = setup
+ issue_a = issues.create!(project_id: project.id)
+ issue_b = issues.create!(project_id: project.id)
+ issue_c = self.class::Issue.create!(project_id: project.id)
+ issue_d = issues.create!(project_id: project.id)
+
+ model.backfill_iids('issues')
+
+ issue_e = self.class::Issue.create!(project_id: project.id)
+
+ expect(issue_a.reload.iid).to eq(1)
+ expect(issue_b.reload.iid).to eq(2)
+ expect(issue_c.reload.iid).to eq(3)
+ expect(issue_d.reload.iid).to eq(4)
+ expect(issue_e.iid).to eq(5)
+ end
+ end
+
+ context 'when the new code and old code alternate creating models post deploy but before the migration runs' do
+ it 'backfills iids' do
+ project = setup
+ issue_a = issues.create!(project_id: project.id)
+ issue_b = self.class::Issue.create!(project_id: project.id)
+ issue_c = issues.create!(project_id: project.id)
+ issue_d = self.class::Issue.create!(project_id: project.id)
+
+ model.backfill_iids('issues')
+
+ expect(issue_a.reload.iid).to eq(1)
+ expect(issue_b.reload.iid).to eq(2)
+ expect(issue_c.reload.iid).to eq(3)
+ expect(issue_d.reload.iid).to eq(4)
+ end
+
+ it 'generates an iid for a new model after the migration' do
+ project = setup
+ issue_a = issues.create!(project_id: project.id)
+ issue_b = issues.create!(project_id: project.id)
+ issue_c = self.class::Issue.create!(project_id: project.id)
+ issue_d = issues.create!(project_id: project.id)
+ issue_e = self.class::Issue.create!(project_id: project.id)
+
+ model.backfill_iids('issues')
+
+ issue_f = self.class::Issue.create!(project_id: project.id)
+
+ expect(issue_a.reload.iid).to eq(1)
+ expect(issue_b.reload.iid).to eq(2)
+ expect(issue_c.reload.iid).to eq(3)
+ expect(issue_d.reload.iid).to eq(4)
+ expect(issue_e.reload.iid).to eq(5)
+ expect(issue_f.iid).to eq(6)
+ end
+ end
+
+ context 'when the new code creates and deletes a model post deploy but before the migration runs' do
+ it 'backfills iids for rows already in the database' do
+ project = setup
+ issue_a = issues.create!(project_id: project.id)
+ issue_b = issues.create!(project_id: project.id)
+ issue_c = self.class::Issue.create!(project_id: project.id)
+ issue_c.delete
+
+ model.backfill_iids('issues')
+
+ expect(issue_a.reload.iid).to eq(1)
+ expect(issue_b.reload.iid).to eq(2)
+ end
+
+ it 'successfully creates a new model after the migration' do
+ project = setup
+ issue_a = issues.create!(project_id: project.id)
+ issue_b = issues.create!(project_id: project.id)
+ issue_c = self.class::Issue.create!(project_id: project.id)
+ issue_c.delete
+
+ model.backfill_iids('issues')
+
+ issue_d = self.class::Issue.create!(project_id: project.id)
+
+ expect(issue_a.reload.iid).to eq(1)
+ expect(issue_b.reload.iid).to eq(2)
+ expect(issue_d.iid).to eq(3)
+ end
+ end
+
+ context 'when the new code creates and deletes a model and old code creates a model post deploy but before the migration runs' do
+ it 'backfills iids' do
+ project = setup
+ issue_a = issues.create!(project_id: project.id)
+ issue_b = issues.create!(project_id: project.id)
+ issue_c = self.class::Issue.create!(project_id: project.id)
+ issue_c.delete
+ issue_d = issues.create!(project_id: project.id)
+
+ model.backfill_iids('issues')
+
+ expect(issue_a.reload.iid).to eq(1)
+ expect(issue_b.reload.iid).to eq(2)
+ expect(issue_d.reload.iid).to eq(3)
+ end
+
+ it 'successfully creates a new model after the migration' do
+ project = setup
+ issue_a = issues.create!(project_id: project.id)
+ issue_b = issues.create!(project_id: project.id)
+ issue_c = self.class::Issue.create!(project_id: project.id)
+ issue_c.delete
+ issue_d = issues.create!(project_id: project.id)
+
+ model.backfill_iids('issues')
+
+ issue_e = self.class::Issue.create!(project_id: project.id)
+
+ expect(issue_a.reload.iid).to eq(1)
+ expect(issue_b.reload.iid).to eq(2)
+ expect(issue_d.reload.iid).to eq(3)
+ expect(issue_e.iid).to eq(4)
+ end
+ end
+
+ context 'when the new code creates and deletes a model and then creates another model post deploy but before the migration runs' do
+ it 'successfully generates an iid for a new model after the migration' do
+ project = setup
+ issue_a = issues.create!(project_id: project.id)
+ issue_b = issues.create!(project_id: project.id)
+ issue_c = self.class::Issue.create!(project_id: project.id)
+ issue_c.delete
+ issue_d = self.class::Issue.create!(project_id: project.id)
+
+ model.backfill_iids('issues')
+
+ expect(issue_a.reload.iid).to eq(1)
+ expect(issue_b.reload.iid).to eq(2)
+ expect(issue_d.reload.iid).to eq(3)
+ end
+
+ it 'successfully generates an iid for a new model after the migration' do
+ project = setup
+ issue_a = issues.create!(project_id: project.id)
+ issue_b = issues.create!(project_id: project.id)
+ issue_c = self.class::Issue.create!(project_id: project.id)
+ issue_c.delete
+ issue_d = self.class::Issue.create!(project_id: project.id)
+
+ model.backfill_iids('issues')
+
+ issue_e = self.class::Issue.create!(project_id: project.id)
+
+ expect(issue_a.reload.iid).to eq(1)
+ expect(issue_b.reload.iid).to eq(2)
+ expect(issue_d.reload.iid).to eq(3)
+ expect(issue_e.iid).to eq(4)
+ end
+ end
+
+ context 'when the first model is created for a project after the migration' do
+ it 'generates an iid' do
+ project_a = setup
+ project_b = setup
+ issue_a = issues.create!(project_id: project_a.id)
+
+ model.backfill_iids('issues')
+
+ issue_b = self.class::Issue.create!(project_id: project_b.id)
+
+ expect(issue_a.reload.iid).to eq(1)
+ expect(issue_b.reload.iid).to eq(1)
+ end
+ end
+
+ context 'when a row already has an iid set in the database' do
+ it 'backfills iids' do
+ project = setup
+ issue_a = issues.create!(project_id: project.id, iid: 1)
+ issue_b = issues.create!(project_id: project.id, iid: 2)
+
+ model.backfill_iids('issues')
+
+ expect(issue_a.reload.iid).to eq(1)
+ expect(issue_b.reload.iid).to eq(2)
+ end
+
+ it 'backfills for multiple projects' do
+ project_a = setup
+ project_b = setup
+ issue_a = issues.create!(project_id: project_a.id, iid: 1)
+ issue_b = issues.create!(project_id: project_b.id, iid: 1)
+ issue_c = issues.create!(project_id: project_a.id, iid: 2)
+
+ model.backfill_iids('issues')
+
+ expect(issue_a.reload.iid).to eq(1)
+ expect(issue_b.reload.iid).to eq(1)
+ expect(issue_c.reload.iid).to eq(2)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/sha_attribute_spec.rb b/spec/lib/gitlab/database/sha_attribute_spec.rb
index c6fc55291f5..15695bc8069 100644
--- a/spec/lib/gitlab/database/sha_attribute_spec.rb
+++ b/spec/lib/gitlab/database/sha_attribute_spec.rb
@@ -25,7 +25,7 @@ describe Gitlab::Database::ShaAttribute do
describe '#serialize' do
it 'converts a SHA String to binary data' do
- expect(attribute.serialize(sha).to_s).to eq(binary_sha)
+ expect(described_class.serialize(sha).to_s).to eq(binary_sha)
end
end
end
diff --git a/spec/lib/gitlab/database/subquery_spec.rb b/spec/lib/gitlab/database/subquery_spec.rb
deleted file mode 100644
index 70380e02f16..00000000000
--- a/spec/lib/gitlab/database/subquery_spec.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::Subquery do
- describe '.self_join' do
- set(:project) { create(:project) }
-
- it 'allows you to delete_all rows with WHERE and LIMIT' do
- events = create_list(:event, 8, project: project)
-
- expect do
- described_class.self_join(Event.where('id < ?', events[5]).recent.limit(2)).delete_all
- end.to change { Event.count }.by(-2)
- end
- end
-end
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
new file mode 100644
index 00000000000..b6321f2eab1
--- /dev/null
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -0,0 +1,150 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Database::WithLockRetries do
+ let(:env) { {} }
+ let(:logger) { Gitlab::Database::WithLockRetries::NULL_LOGGER }
+ let(:subject) { described_class.new(env: env, logger: logger, timing_configuration: timing_configuration) }
+
+ let(:timing_configuration) do
+ [
+ [1.second, 1.second],
+ [1.second, 1.second],
+ [1.second, 1.second],
+ [1.second, 1.second],
+ [1.second, 1.second]
+ ]
+ end
+
+ describe '#run' do
+ it 'requires block' do
+ expect { subject.run }.to raise_error(StandardError, 'no block given')
+ end
+
+ context 'when DISABLE_LOCK_RETRIES is set' do
+ let(:env) { { 'DISABLE_LOCK_RETRIES' => 'true' } }
+
+ it 'executes the passed block without retrying' do
+ object = double
+
+ expect(object).to receive(:method).once
+
+ subject.run { object.method }
+ end
+ end
+
+ context 'when lock retry is enabled' do
+ class ActiveRecordSecond < ActiveRecord::Base
+ end
+
+ let(:lock_fiber) do
+ Fiber.new do
+ # Initiating a second DB connection for the lock
+ conn = ActiveRecordSecond.establish_connection(Rails.configuration.database_configuration[Rails.env]).connection
+ conn.transaction do
+ conn.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
+
+ Fiber.yield
+ end
+ ActiveRecordSecond.remove_connection # force disconnect
+ end
+ end
+
+ before do
+ lock_fiber.resume # start the transaction and lock the table
+ end
+
+ context 'lock_fiber' do
+ it 'acquires lock successfully' do
+ check_exclusive_lock_query = """
+ SELECT 1
+ FROM pg_locks l
+ JOIN pg_class t ON l.relation = t.oid
+ WHERE t.relkind = 'r' AND l.mode = 'ExclusiveLock' AND t.relname = '#{Project.table_name}'
+ """
+
+ expect(ActiveRecord::Base.connection.execute(check_exclusive_lock_query).to_a).to be_present
+ end
+ end
+
+ shared_examples 'retriable exclusive lock on `projects`' do
+ it 'succeeds executing the given block' do
+ lock_attempts = 0
+ lock_acquired = false
+
+ expect_any_instance_of(Gitlab::Database::WithLockRetries).to receive(:sleep).exactly(retry_count - 1).times # we don't sleep in the last iteration
+
+ allow_any_instance_of(Gitlab::Database::WithLockRetries).to receive(:run_block_with_transaction).and_wrap_original do |method|
+ lock_fiber.resume if lock_attempts == retry_count
+
+ method.call
+ end
+
+ subject.run do
+ lock_attempts += 1
+
+ if lock_attempts == retry_count # we reached the last retry iteration, if we kill the thread, the last try (no lock_timeout) will succeed)
+ lock_fiber.resume
+ end
+
+ ActiveRecord::Base.transaction do
+ ActiveRecord::Base.connection.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
+ lock_acquired = true
+ end
+ end
+
+ expect(lock_attempts).to eq(retry_count)
+ expect(lock_acquired).to eq(true)
+ end
+ end
+
+ context 'after 3 iterations' do
+ let(:retry_count) { 4 }
+
+ it_behaves_like 'retriable exclusive lock on `projects`'
+ end
+
+ context 'after the retries, without setting lock_timeout' do
+ let(:retry_count) { timing_configuration.size }
+
+ it_behaves_like 'retriable exclusive lock on `projects`'
+ end
+
+ context 'when statement timeout is reached' do
+ it 'raises QueryCanceled error' do
+ lock_acquired = false
+ ActiveRecord::Base.connection.execute("SET LOCAL statement_timeout='100ms'")
+
+ expect do
+ subject.run do
+ ActiveRecord::Base.connection.execute("SELECT 1 FROM pg_sleep(0.11)") # 110ms
+ lock_acquired = true
+ end
+ end.to raise_error(ActiveRecord::QueryCanceled)
+
+ expect(lock_acquired).to eq(false)
+ end
+ end
+ end
+ end
+
+ context 'casting durations correctly' do
+ let(:timing_configuration) { [[0.015.seconds, 0.025.seconds], [0.015.seconds, 0.025.seconds]] } # 15ms, 25ms
+
+ it 'executes `SET LOCAL lock_timeout` using the configured timeout value in milliseconds' do
+ expect(ActiveRecord::Base.connection).to receive(:execute).with("SAVEPOINT active_record_1").and_call_original
+ expect(ActiveRecord::Base.connection).to receive(:execute).with("SET LOCAL lock_timeout TO '15ms'").and_call_original
+ expect(ActiveRecord::Base.connection).to receive(:execute).with("RELEASE SAVEPOINT active_record_1").and_call_original
+
+ subject.run { }
+ end
+
+ it 'calls `sleep` after the first iteration fails, using the configured sleep time' do
+ expect(subject).to receive(:run_block_with_transaction).and_raise(ActiveRecord::LockWaitTimeout).twice
+ expect(subject).to receive(:sleep).with(0.025)
+
+ subject.run { }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
index 10efdd44f20..0b8f64e97a1 100644
--- a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
+++ b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
@@ -76,6 +76,14 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
it_behaves_like 'has prometheus service', 'http://localhost:9090'
+ it 'is idempotent' do
+ result1 = subject.execute
+ expect(result1[:status]).to eq(:success)
+
+ result2 = subject.execute
+ expect(result2[:status]).to eq(:success)
+ end
+
it "tracks successful install" do
expect(::Gitlab::Tracking).to receive(:event).twice
expect(::Gitlab::Tracking).to receive(:event).with('self_monitoring', 'project_created')
@@ -103,7 +111,7 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
end
it 'creates project with correct name and description' do
- path = 'administration/monitoring/gitlab_instance_administration_project/index'
+ path = 'administration/monitoring/gitlab_self_monitoring_project/index'
docs_path = Rails.application.routes.url_helpers.help_page_path(path)
expect(result[:status]).to eq(:success)
@@ -122,13 +130,37 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
it 'saves the project id' do
expect(result[:status]).to eq(:success)
- expect(application_setting.instance_administration_project_id).to eq(project.id)
+ expect(application_setting.self_monitoring_project_id).to eq(project.id)
+ end
+
+ it 'expires application_setting cache' do
+ expect(Gitlab::CurrentSettings).to receive(:expire_current_application_settings)
+ expect(result[:status]).to eq(:success)
+ end
+
+ it 'creates an environment for the project' do
+ expect(project.default_environment.name).to eq('production')
+ end
+
+ context 'when the environment creation fails' do
+ let(:environment) { build(:environment, name: 'production') }
+
+ it 'returns error' do
+ allow(Environment).to receive(:new).and_return(environment)
+ allow(environment).to receive(:save).and_return(false)
+
+ expect(result).to eq(
+ status: :error,
+ message: 'Could not create environment',
+ last_step: :create_environment
+ )
+ end
end
it 'returns error when saving project ID fails' do
allow(application_setting).to receive(:update).and_call_original
allow(application_setting).to receive(:update)
- .with(instance_administration_project_id: anything)
+ .with(self_monitoring_project_id: anything)
.and_return(false)
expect(result).to eq(
@@ -144,7 +176,7 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
before do
application_setting.instance_administrators_group_id = existing_group.id
- application_setting.instance_administration_project_id = existing_project.id
+ application_setting.self_monitoring_project_id = existing_project.id
end
it 'returns success' do
diff --git a/spec/lib/gitlab/database_importers/self_monitoring/project/delete_service_spec.rb b/spec/lib/gitlab/database_importers/self_monitoring/project/delete_service_spec.rb
index 6446ab1beb4..89d816c0cf7 100644
--- a/spec/lib/gitlab/database_importers/self_monitoring/project/delete_service_spec.rb
+++ b/spec/lib/gitlab/database_importers/self_monitoring/project/delete_service_spec.rb
@@ -24,7 +24,7 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::DeleteService do
let(:application_setting) do
create(
:application_setting,
- instance_administration_project_id: project.id,
+ self_monitoring_project_id: project.id,
instance_administrators_group_id: group.id
)
end
@@ -38,7 +38,7 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::DeleteService do
it 'deletes project ID from application settings' do
subject.execute
- expect(application_setting.reload.instance_administration_project_id).to be_nil
+ expect(application_setting.reload.self_monitoring_project_id).to be_nil
end
it 'does not delete group' do
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 4a0eab3ea27..b99f311de29 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -394,6 +394,12 @@ describe Gitlab::Database do
expect(described_class.cached_table_exists?(:bogus_table_name)).to be_falsey
end
end
+
+ it 'returns false when database does not exist' do
+ expect(ActiveRecord::Base).to receive(:connection) { raise ActiveRecord::NoDatabaseError, 'broken' }
+
+ expect(described_class.cached_table_exists?(:projects)).to be(false)
+ end
end
describe '.exists?' do
diff --git a/spec/lib/gitlab/dependency_linker/godeps_json_linker_spec.rb b/spec/lib/gitlab/dependency_linker/godeps_json_linker_spec.rb
index 9f8542a76c9..7128689e362 100644
--- a/spec/lib/gitlab/dependency_linker/godeps_json_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker/godeps_json_linker_spec.rb
@@ -75,8 +75,8 @@ describe Gitlab::DependencyLinker::GodepsJsonLinker do
end
it 'links GitLab projects' do
- expect(subject).to include(link('gitlab.com/group/project/path', 'https://gitlab.com/group/project/tree/master/path'))
- expect(subject).to include(link('gitlab.com/group/subgroup/project.git/path', 'https://gitlab.com/group/subgroup/project/tree/master/path'))
+ expect(subject).to include(link('gitlab.com/group/project/path', 'https://gitlab.com/group/project/-/tree/master/path'))
+ expect(subject).to include(link('gitlab.com/group/subgroup/project.git/path', 'https://gitlab.com/group/subgroup/project/-/tree/master/path'))
end
it 'links Golang packages' do
diff --git a/spec/lib/gitlab/diff/deprecated_highlight_cache_spec.rb b/spec/lib/gitlab/diff/deprecated_highlight_cache_spec.rb
deleted file mode 100644
index 7e46632ea77..00000000000
--- a/spec/lib/gitlab/diff/deprecated_highlight_cache_spec.rb
+++ /dev/null
@@ -1,70 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::Diff::DeprecatedHighlightCache do
- let(:merge_request) { create(:merge_request_with_diffs) }
-
- subject(:cache) { described_class.new(merge_request.diffs, backend: backend) }
-
- describe '#decorate' do
- let(:backend) { double('backend').as_null_object }
-
- # Manually creates a Diff::File object to avoid triggering the cache on
- # the FileCollection::MergeRequestDiff
- let(:diff_file) do
- diffs = merge_request.diffs
- raw_diff = diffs.diffable.raw_diffs(diffs.diff_options.merge(paths: ['CHANGELOG'])).first
- Gitlab::Diff::File.new(raw_diff,
- repository: diffs.project.repository,
- diff_refs: diffs.diff_refs,
- fallback_diff_refs: diffs.fallback_diff_refs)
- end
-
- it 'does not calculate highlighting when reading from cache' do
- cache.write_if_empty
- cache.decorate(diff_file)
-
- expect_any_instance_of(Gitlab::Diff::Highlight).not_to receive(:highlight)
-
- diff_file.highlighted_diff_lines
- end
-
- it 'assigns highlighted diff lines to the DiffFile' do
- cache.write_if_empty
- cache.decorate(diff_file)
-
- expect(diff_file.highlighted_diff_lines.size).to be > 5
- end
-
- it 'submits a single reading from the cache' do
- cache.decorate(diff_file)
- cache.decorate(diff_file)
-
- expect(backend).to have_received(:read).with(cache.key).once
- end
- end
-
- describe '#write_if_empty' do
- let(:backend) { double('backend', read: {}).as_null_object }
-
- it 'submits a single writing to the cache' do
- cache.write_if_empty
- cache.write_if_empty
-
- expect(backend).to have_received(:write).with(cache.key,
- hash_including('CHANGELOG-false-false-false'),
- expires_in: 1.week).once
- end
- end
-
- describe '#clear' do
- let(:backend) { double('backend').as_null_object }
-
- it 'clears cache' do
- cache.clear
-
- expect(backend).to have_received(:delete).with(cache.key)
- end
- end
-end
diff --git a/spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb b/spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb
index a4f74ddc8c2..c2b6ca4164c 100644
--- a/spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb
@@ -38,21 +38,6 @@ describe Gitlab::Diff::FileCollection::MergeRequestDiff do
let(:diffable) { merge_request.merge_request_diff }
end
- context 'using Gitlab::Diff::DeprecatedHighlightCache' do
- before do
- stub_feature_flags(hset_redis_diff_caching: false)
- end
-
- it 'uses a different cache key if diff line keys change' do
- mr_diff = described_class.new(merge_request.merge_request_diff, diff_options: nil)
- key = mr_diff.cache_key
-
- stub_const('Gitlab::Diff::Line::SERIALIZE_KEYS', [:foo])
-
- expect(mr_diff.cache_key).not_to eq(key)
- end
- end
-
it_behaves_like 'diff statistics' do
let(:collection_default_args) do
{ diff_options: {} }
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index c468af4db68..61d7400b95e 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -169,18 +169,18 @@ describe Gitlab::Diff::File do
end
end
- describe '#old_blob' do
- it 'returns blob of commit of base commit' do
- old_data = diff_file.old_blob.data
+ describe '#old_blob and #new_blob' do
+ it 'returns blob of base commit and the new commit' do
+ items = [
+ [diff_file.new_content_sha, diff_file.new_path], [diff_file.old_content_sha, diff_file.old_path]
+ ]
- expect(old_data).to include('raise "System commands must be given as an array of strings"')
- end
- end
+ expect(project.repository).to receive(:blobs_at).with(items, blob_size_limit: 10.megabytes).and_call_original
- describe '#new_blob' do
- it 'returns blob of new commit' do
+ old_data = diff_file.old_blob.data
data = diff_file.new_blob.data
+ expect(old_data).to include('raise "System commands must be given as an array of strings"')
expect(data).to include('raise RuntimeError, "System commands must be given as an array of strings"')
end
end
diff --git a/spec/lib/gitlab/diff/formatters/image_formatter_spec.rb b/spec/lib/gitlab/diff/formatters/image_formatter_spec.rb
index 2e6eb71d37d..edf30ffc56f 100644
--- a/spec/lib/gitlab/diff/formatters/image_formatter_spec.rb
+++ b/spec/lib/gitlab/diff/formatters/image_formatter_spec.rb
@@ -3,20 +3,34 @@
require 'spec_helper'
describe Gitlab::Diff::Formatters::ImageFormatter do
- it_behaves_like "position formatter" do
- let(:base_attrs) do
- {
- base_sha: 123,
- start_sha: 456,
- head_sha: 789,
- old_path: 'old_image.png',
- new_path: 'new_image.png',
- position_type: 'image'
- }
- end
+ let(:base_attrs) do
+ {
+ base_sha: 123,
+ start_sha: 456,
+ head_sha: 789,
+ old_path: 'old_image.png',
+ new_path: 'new_image.png',
+ position_type: 'image'
+ }
+ end
+
+ let(:attrs) do
+ base_attrs.merge(width: 100, height: 100, x: 1, y: 2)
+ end
+
+ it_behaves_like 'position formatter'
+
+ describe '#==' do
+ subject { described_class.new(attrs) }
+
+ it { is_expected.to eq(subject) }
+
+ [:width, :height, :x, :y].each do |attr|
+ let(:other_formatter) do
+ described_class.new(attrs.merge(attr => 9))
+ end
- let(:attrs) do
- base_attrs.merge(width: 100, height: 100, x: 1, y: 2)
+ it { is_expected.not_to eq(other_formatter) }
end
end
end
diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb
index c73ec84e332..218c393c409 100644
--- a/spec/lib/gitlab/diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb
@@ -135,16 +135,6 @@ describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
expect { cache.send(:write_to_redis_hash, diff_hash) }
.to change { Gitlab::Redis::Cache.with { |r| r.hgetall(cache_key) } }
end
-
- # Note that this spec and the code it confirms can be removed when
- # :hset_redis_diff_caching is fully launched.
- #
- it 'attempts to clear deprecated cache entries' do
- expect_any_instance_of(Gitlab::Diff::DeprecatedHighlightCache)
- .to receive(:clear).and_call_original
-
- cache.send(:write_to_redis_hash, diff_hash)
- end
end
describe '#clear' do
@@ -154,4 +144,10 @@ describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
cache.clear
end
end
+
+ describe 'metrics' do
+ it 'defines :gitlab_redis_diff_caching_memory_usage_bytes histogram' do
+ expect(described_class).to respond_to(:gitlab_redis_diff_caching_memory_usage_bytes)
+ end
+ end
end
diff --git a/spec/lib/gitlab/diff/lines_unfolder_spec.rb b/spec/lib/gitlab/diff/lines_unfolder_spec.rb
index 3134ff3d817..ebcbe1fb1a0 100644
--- a/spec/lib/gitlab/diff/lines_unfolder_spec.rb
+++ b/spec/lib/gitlab/diff/lines_unfolder_spec.rb
@@ -212,14 +212,7 @@ describe Gitlab::Diff::LinesUnfolder do
context 'position requires a middle expansion and new match lines' do
let(:position) do
- Gitlab::Diff::Position.new(base_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19",
- start_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19",
- head_sha: "1487062132228de836236c522fe52fed4980a46c",
- old_path: "build-aux/flatpak/org.gnome.Nautilus.json",
- new_path: "build-aux/flatpak/org.gnome.Nautilus.json",
- position_type: "text",
- old_line: 43,
- new_line: 40)
+ build(:text_diff_position, old_line: 43, new_line: 40)
end
context 'blob lines' do
@@ -321,14 +314,7 @@ describe Gitlab::Diff::LinesUnfolder do
context 'position requires a middle expansion and no top match line' do
let(:position) do
- Gitlab::Diff::Position.new(base_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19",
- start_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19",
- head_sha: "1487062132228de836236c522fe52fed4980a46c",
- old_path: "build-aux/flatpak/org.gnome.Nautilus.json",
- new_path: "build-aux/flatpak/org.gnome.Nautilus.json",
- position_type: "text",
- old_line: 16,
- new_line: 17)
+ build(:text_diff_position, old_line: 16, new_line: 17)
end
context 'blob lines' do
@@ -422,14 +408,7 @@ describe Gitlab::Diff::LinesUnfolder do
context 'position requires a middle expansion and no bottom match line' do
let(:position) do
- Gitlab::Diff::Position.new(base_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19",
- start_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19",
- head_sha: "1487062132228de836236c522fe52fed4980a46c",
- old_path: "build-aux/flatpak/org.gnome.Nautilus.json",
- new_path: "build-aux/flatpak/org.gnome.Nautilus.json",
- position_type: "text",
- old_line: 82,
- new_line: 79)
+ build(:text_diff_position, old_line: 82, new_line: 79)
end
context 'blob lines' do
@@ -523,14 +502,7 @@ describe Gitlab::Diff::LinesUnfolder do
context 'position requires a short top expansion' do
let(:position) do
- Gitlab::Diff::Position.new(base_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19",
- start_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19",
- head_sha: "1487062132228de836236c522fe52fed4980a46c",
- old_path: "build-aux/flatpak/org.gnome.Nautilus.json",
- new_path: "build-aux/flatpak/org.gnome.Nautilus.json",
- position_type: "text",
- old_line: 6,
- new_line: 6)
+ build(:text_diff_position, old_line: 6, new_line: 6)
end
context 'blob lines' do
@@ -621,14 +593,7 @@ describe Gitlab::Diff::LinesUnfolder do
context 'position sits between two match lines (no expasion needed)' do
let(:position) do
- Gitlab::Diff::Position.new(base_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19",
- start_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19",
- head_sha: "1487062132228de836236c522fe52fed4980a46c",
- old_path: "build-aux/flatpak/org.gnome.Nautilus.json",
- new_path: "build-aux/flatpak/org.gnome.Nautilus.json",
- position_type: "text",
- old_line: 64,
- new_line: 61)
+ build(:text_diff_position, old_line: 64, new_line: 61)
end
context 'diff lines' do
@@ -640,14 +605,7 @@ describe Gitlab::Diff::LinesUnfolder do
context 'position requires bottom expansion and new match lines' do
let(:position) do
- Gitlab::Diff::Position.new(base_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19",
- start_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19",
- head_sha: "1487062132228de836236c522fe52fed4980a46c",
- old_path: "build-aux/flatpak/org.gnome.Nautilus.json",
- new_path: "build-aux/flatpak/org.gnome.Nautilus.json",
- position_type: "text",
- old_line: 107,
- new_line: 99)
+ build(:text_diff_position, old_line: 107, new_line: 99)
end
context 'blob lines' do
@@ -744,14 +702,7 @@ describe Gitlab::Diff::LinesUnfolder do
context 'position requires bottom expansion and no new match line' do
let(:position) do
- Gitlab::Diff::Position.new(base_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19",
- start_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19",
- head_sha: "1487062132228de836236c522fe52fed4980a46c",
- old_path: "build-aux/flatpak/org.gnome.Nautilus.json",
- new_path: "build-aux/flatpak/org.gnome.Nautilus.json",
- position_type: "text",
- old_line: 95,
- new_line: 87)
+ build(:text_diff_position, old_line: 95, new_line: 87)
end
context 'blob lines' do
@@ -844,16 +795,7 @@ describe Gitlab::Diff::LinesUnfolder do
end
context 'positioned on an image' do
- let(:position) do
- Gitlab::Diff::Position.new(
- base_sha: '1c59dfa64afbea8c721bb09a06a9d326c952ea19',
- start_sha: '1c59dfa64afbea8c721bb09a06a9d326c952ea19',
- head_sha: '1487062132228de836236c522fe52fed4980a46c',
- old_path: 'image.jpg',
- new_path: 'image.jpg',
- position_type: 'image'
- )
- end
+ let(:position) { build(:image_diff_position) }
before do
allow(old_blob).to receive(:binary?).and_return(binary?)
diff --git a/spec/lib/gitlab/diff/position_collection_spec.rb b/spec/lib/gitlab/diff/position_collection_spec.rb
index f2a8312587c..dd8434ab10b 100644
--- a/spec/lib/gitlab/diff/position_collection_spec.rb
+++ b/spec/lib/gitlab/diff/position_collection_spec.rb
@@ -5,36 +5,17 @@ require 'spec_helper'
describe Gitlab::Diff::PositionCollection do
let(:merge_request) { build(:merge_request) }
- def build_text_position(attrs = {})
- attributes = {
- old_path: "files/ruby/popen.rb",
- new_path: "files/ruby/popen.rb",
- old_line: nil,
- new_line: 14,
- diff_refs: merge_request.diff_refs
- }.merge(attrs)
-
- Gitlab::Diff::Position.new(attributes)
+ let(:text_position) do
+ build(:text_diff_position, :added, diff_refs: diff_refs)
end
-
- def build_image_position(attrs = {})
- attributes = {
- old_path: "files/images/any_image.png",
- new_path: "files/images/any_image.png",
- width: 10,
- height: 10,
- x: 1,
- y: 1,
- diff_refs: merge_request.diff_refs,
- position_type: "image"
- }.merge(attrs)
-
- Gitlab::Diff::Position.new(attributes)
+ let(:folded_text_position) do
+ build(:text_diff_position, diff_refs: diff_refs, old_line: 1, new_line: 1)
+ end
+ let(:image_position) do
+ build(:image_diff_position, diff_refs: diff_refs)
end
- let(:text_position) { build_text_position }
- let(:folded_text_position) { build_text_position(old_line: 1, new_line: 1) }
- let(:image_position) { build_image_position }
+ let(:diff_refs) { merge_request.diff_refs }
let(:invalid_position) { 'a position' }
let(:head_sha) { merge_request.diff_head_sha }
@@ -71,7 +52,9 @@ describe Gitlab::Diff::PositionCollection do
end
describe '#concat' do
- let(:new_text_position) { build_text_position(old_line: 1, new_line: 1) }
+ let(:new_text_position) do
+ build(:text_diff_position, diff_refs: diff_refs, old_line: 1, new_line: 1)
+ end
it 'returns a Gitlab::Diff::Position' do
expect(collection.concat([new_text_position])).to be_a(described_class)
diff --git a/spec/lib/gitlab/diff/position_spec.rb b/spec/lib/gitlab/diff/position_spec.rb
index 839780b53fe..4b11ff16c38 100644
--- a/spec/lib/gitlab/diff/position_spec.rb
+++ b/spec/lib/gitlab/diff/position_spec.rb
@@ -35,6 +35,32 @@ describe Gitlab::Diff::Position do
}
end
+ describe 'factory' do
+ it 'produces a complete text position' do
+ position = build(:text_diff_position)
+
+ expect(position).to be_complete
+ expect(position).to have_attributes(position_type: 'text')
+ end
+
+ it 'produces a complete image position' do
+ position = build(:image_diff_position)
+
+ expect(position).to be_complete
+ expect(position).to have_attributes(position_type: 'image')
+ end
+
+ it 'allows the diff_refs to be passed as a single object' do
+ head_sha = Digest::SHA1.hexdigest(SecureRandom.hex)
+ base_sha = Digest::SHA1.hexdigest(SecureRandom.hex)
+ start_sha = Digest::SHA1.hexdigest(SecureRandom.hex)
+
+ refs = ::Gitlab::Diff::DiffRefs.new(base_sha: base_sha, start_sha: start_sha, head_sha: head_sha)
+
+ expect(build(:diff_position, diff_refs: refs).diff_refs).to eq(refs)
+ end
+ end
+
describe "position for an added text file" do
let(:commit) { project.commit("2ea1f3dec713d940208fb5ce4a38765ecb5d3f73") }
diff --git a/spec/lib/gitlab/diff/suggestion_diff_spec.rb b/spec/lib/gitlab/diff/suggestion_diff_spec.rb
index 5a32c2bea37..0d4fe33bc47 100644
--- a/spec/lib/gitlab/diff/suggestion_diff_spec.rb
+++ b/spec/lib/gitlab/diff/suggestion_diff_spec.rb
@@ -51,5 +51,20 @@ describe Gitlab::Diff::SuggestionDiff do
expect(diff_lines[index].to_hash).to include(expected_line)
end
end
+
+ describe 'when the suggestion is for the last line of a file' do
+ it 'returns a correct value if there is no newline at the end of the file' do
+ from_content = "One line test"
+ to_content = "Successful test!"
+ suggestion = instance_double(Suggestion, from_line: 1,
+ from_content: from_content,
+ to_content: to_content)
+
+ diff_lines = described_class.new(suggestion).diff_lines
+
+ expect(diff_lines.first.text).to eq("-One line test")
+ expect(diff_lines.last.text).to eq("+Successful test!")
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/email/attachment_uploader_spec.rb b/spec/lib/gitlab/email/attachment_uploader_spec.rb
index c69b2f1eabc..462be76a58d 100644
--- a/spec/lib/gitlab/email/attachment_uploader_spec.rb
+++ b/spec/lib/gitlab/email/attachment_uploader_spec.rb
@@ -16,5 +16,20 @@ describe Gitlab::Email::AttachmentUploader do
expect(link[:alt]).to eq("bricks")
expect(link[:url]).to include("bricks.png")
end
+
+ context 'with a signed message' do
+ let(:message_raw) { fixture_file("emails/valid_reply_signed_smime.eml") }
+
+ it 'uploads all attachments except the signature' do
+ links = described_class.new(message).execute(upload_parent: project, uploader_class: FileUploader)
+
+ expect(links).not_to include(a_hash_including(alt: 'smime.p7s'))
+
+ image_link = links.first
+ expect(image_link).not_to be_nil
+ expect(image_link[:alt]).to eq('gitlab_logo')
+ expect(image_link[:url]).to include('gitlab_logo.png')
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/email/handler_spec.rb b/spec/lib/gitlab/email/handler_spec.rb
index 5229b778ccf..5014e4c22ce 100644
--- a/spec/lib/gitlab/email/handler_spec.rb
+++ b/spec/lib/gitlab/email/handler_spec.rb
@@ -3,17 +3,23 @@
require 'spec_helper'
describe Gitlab::Email::Handler do
+ let(:email) { Mail.new { body 'email' } }
+
describe '.for' do
it 'picks issue handler if there is no merge request prefix' do
- expect(described_class.for('email', 'project+key')).to be_an_instance_of(Gitlab::Email::Handler::CreateIssueHandler)
+ expect(described_class.for(email, 'project+key')).to be_an_instance_of(Gitlab::Email::Handler::CreateIssueHandler)
end
it 'picks merge request handler if there is merge request key' do
- expect(described_class.for('email', 'project+merge-request+key')).to be_an_instance_of(Gitlab::Email::Handler::CreateMergeRequestHandler)
+ expect(described_class.for(email, 'project+merge-request+key')).to be_an_instance_of(Gitlab::Email::Handler::CreateMergeRequestHandler)
end
it 'returns nil if no handler is found' do
- expect(described_class.for('email', '')).to be_nil
+ expect(described_class.for(email, '')).to be_nil
+ end
+
+ it 'returns nil if provided email is nil' do
+ expect(described_class.for(nil, '')).to be_nil
end
end
@@ -25,7 +31,7 @@ describe Gitlab::Email::Handler do
it 'picks each handler at least once' do
matched_handlers = addresses.map do |address|
- described_class.for('email', address).class
+ described_class.for(email, address).class
end
expect(matched_handlers.uniq).to match_array(ce_handlers)
@@ -34,7 +40,7 @@ describe Gitlab::Email::Handler do
it 'can pick exactly one handler for each address' do
addresses.each do |address|
matched_handlers = ce_handlers.select do |handler|
- handler.new('email', address).can_handle?
+ handler.new(email, address).can_handle?
end
expect(matched_handlers.count).to eq(1), "#{address} matches #{matched_handlers.count} handlers: #{matched_handlers}"
diff --git a/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb b/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb
index a65214fab61..36954252b6b 100644
--- a/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb
+++ b/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb
@@ -20,8 +20,14 @@ describe Gitlab::Email::Hook::SmimeSignatureInterceptor do
Gitlab::Email::Smime::Certificate.new(@cert[:key], @cert[:cert])
end
+ let(:mail_body) { "signed hello with Unicode €áø and\r\n newlines\r\n" }
+
let(:mail) do
- ActionMailer::Base.mail(to: 'test@example.com', from: 'info@example.com', body: 'signed hello')
+ ActionMailer::Base.mail(to: 'test@example.com',
+ from: 'info@example.com',
+ content_transfer_encoding: 'quoted-printable',
+ content_type: 'text/plain; charset=UTF-8',
+ body: mail_body)
end
before do
@@ -46,9 +52,16 @@ describe Gitlab::Email::Hook::SmimeSignatureInterceptor do
ca_cert: root_certificate.cert,
signed_data: mail.encoded)
+ # re-verify signature from a new Mail object content
+ # See https://gitlab.com/gitlab-org/gitlab/issues/197386
+ Gitlab::Email::Smime::Signer.verify_signature(
+ cert: certificate.cert,
+ ca_cert: root_certificate.cert,
+ signed_data: Mail.new(mail).encoded)
+
# envelope in a Mail object and obtain the body
decoded_mail = Mail.new(p7enc.data)
- expect(decoded_mail.body.encoded).to eq('signed hello')
+ expect(decoded_mail.body.decoded.dup.force_encoding(decoded_mail.charset)).to eq(mail_body)
end
end
diff --git a/spec/lib/gitlab/email/receiver_spec.rb b/spec/lib/gitlab/email/receiver_spec.rb
index 018219e5647..d860968ab98 100644
--- a/spec/lib/gitlab/email/receiver_spec.rb
+++ b/spec/lib/gitlab/email/receiver_spec.rb
@@ -5,6 +5,14 @@ require 'spec_helper'
describe Gitlab::Email::Receiver do
include_context :email_shared_context
+ shared_examples 'correctly finds the mail key' do
+ specify do
+ expect(Gitlab::Email::Handler).to receive(:for).with(an_instance_of(Mail::Message), 'gitlabhq/gitlabhq+auth_token').and_return(handler)
+
+ receiver.execute
+ end
+ end
+
context 'when the email contains a valid email address in a header' do
let(:handler) { double(:handler) }
@@ -27,6 +35,12 @@ describe Gitlab::Email::Receiver do
it_behaves_like 'correctly finds the mail key'
end
+
+ context 'when enclosed with angle brackets in an Envelope-To header' do
+ let(:email_raw) { fixture_file('emails/envelope_to_header_with_angle_brackets.eml') }
+
+ it_behaves_like 'correctly finds the mail key'
+ end
end
context "when we cannot find a capable handler" do
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index 08718bc92a1..6764d48d14b 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -145,6 +145,17 @@ describe Gitlab::ErrorTracking do
)
end
+ context 'with filterable parameters' do
+ let(:extra) { { test: 1, my_token: 'test' } }
+
+ it 'filters parameters' do
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(
+ hash_including({ 'extra.test' => 1, 'extra.my_token' => '[FILTERED]' }))
+
+ described_class.track_exception(exception, extra)
+ end
+ end
+
context 'the exception implements :sentry_extra_data' do
let(:extra_info) { { event: 'explosion', size: :massive } }
let(:exception) { double(message: 'bang!', sentry_extra_data: extra_info, backtrace: caller) }
diff --git a/spec/lib/gitlab/etag_caching/middleware_spec.rb b/spec/lib/gitlab/etag_caching/middleware_spec.rb
index 24df67b3058..5e9df555241 100644
--- a/spec/lib/gitlab/etag_caching/middleware_spec.rb
+++ b/spec/lib/gitlab/etag_caching/middleware_spec.rb
@@ -8,6 +8,7 @@ describe Gitlab::EtagCaching::Middleware do
let(:app_status_code) { 200 }
let(:if_none_match) { nil }
let(:enabled_path) { '/gitlab-org/gitlab-foss/noteable/issue/1/notes' }
+ let(:endpoint) { 'issue_notes' }
context 'when ETag caching is not enabled for current route' do
let(:path) { '/gitlab-org/gitlab-foss/tree/master/noteable/issue/1/notes' }
@@ -50,9 +51,9 @@ describe Gitlab::EtagCaching::Middleware do
it 'tracks "etag_caching_key_not_found" event' do
expect(Gitlab::Metrics).to receive(:add_event)
- .with(:etag_caching_middleware_used, endpoint: 'issue_notes')
+ .with(:etag_caching_middleware_used, endpoint: endpoint)
expect(Gitlab::Metrics).to receive(:add_event)
- .with(:etag_caching_key_not_found, endpoint: 'issue_notes')
+ .with(:etag_caching_key_not_found, endpoint: endpoint)
middleware.call(build_request(path, if_none_match))
end
@@ -74,6 +75,37 @@ describe Gitlab::EtagCaching::Middleware do
end
end
+ shared_examples 'sends a process_action.action_controller notification' do |status_code|
+ let(:expected_items) do
+ {
+ etag_route: endpoint,
+ params: {},
+ format: :html,
+ method: 'GET',
+ path: enabled_path,
+ status: status_code
+ }
+ end
+
+ it 'sends the expected payload' do
+ payload = payload_for('process_action.action_controller') do
+ middleware.call(build_request(path, if_none_match))
+ end
+
+ expect(payload).to include(expected_items)
+
+ expect(payload[:headers].env['HTTP_IF_NONE_MATCH']).to eq('W/"123"')
+ end
+
+ it 'log subscriber processes action' do
+ expect_any_instance_of(ActionController::LogSubscriber).to receive(:process_action)
+ .with(instance_of(ActiveSupport::Notifications::Event))
+ .and_call_original
+
+ middleware.call(build_request(path, if_none_match))
+ end
+ end
+
context 'when If-None-Match header matches ETag in store' do
let(:path) { enabled_path }
let(:if_none_match) { 'W/"123"' }
@@ -94,6 +126,8 @@ describe Gitlab::EtagCaching::Middleware do
expect(status).to eq 304
end
+ it_behaves_like 'sends a process_action.action_controller notification', 304
+
it 'returns empty body' do
_, _, body = middleware.call(build_request(path, if_none_match))
@@ -102,9 +136,9 @@ describe Gitlab::EtagCaching::Middleware do
it 'tracks "etag_caching_cache_hit" event' do
expect(Gitlab::Metrics).to receive(:add_event)
- .with(:etag_caching_middleware_used, endpoint: 'issue_notes')
+ .with(:etag_caching_middleware_used, endpoint: endpoint)
expect(Gitlab::Metrics).to receive(:add_event)
- .with(:etag_caching_cache_hit, endpoint: 'issue_notes')
+ .with(:etag_caching_cache_hit, endpoint: endpoint)
middleware.call(build_request(path, if_none_match))
end
@@ -120,6 +154,8 @@ describe Gitlab::EtagCaching::Middleware do
expect(status).to eq 429
end
+
+ it_behaves_like 'sends a process_action.action_controller notification', 429
end
end
@@ -141,9 +177,9 @@ describe Gitlab::EtagCaching::Middleware do
mock_app_response
expect(Gitlab::Metrics).to receive(:add_event)
- .with(:etag_caching_middleware_used, endpoint: 'issue_notes')
+ .with(:etag_caching_middleware_used, endpoint: endpoint)
expect(Gitlab::Metrics).to receive(:add_event)
- .with(:etag_caching_resource_changed, endpoint: 'issue_notes')
+ .with(:etag_caching_resource_changed, endpoint: endpoint)
middleware.call(build_request(path, if_none_match))
end
@@ -159,9 +195,9 @@ describe Gitlab::EtagCaching::Middleware do
it 'tracks "etag_caching_header_missing" event' do
expect(Gitlab::Metrics).to receive(:add_event)
- .with(:etag_caching_middleware_used, endpoint: 'issue_notes')
+ .with(:etag_caching_middleware_used, endpoint: endpoint)
expect(Gitlab::Metrics).to receive(:add_event)
- .with(:etag_caching_header_missing, endpoint: 'issue_notes')
+ .with(:etag_caching_header_missing, endpoint: endpoint)
middleware.call(build_request(path, if_none_match))
end
@@ -197,6 +233,21 @@ describe Gitlab::EtagCaching::Middleware do
end
def build_request(path, if_none_match)
- { 'PATH_INFO' => path, 'HTTP_IF_NONE_MATCH' => if_none_match }
+ { 'PATH_INFO' => path,
+ 'HTTP_IF_NONE_MATCH' => if_none_match,
+ 'rack.input' => '',
+ 'REQUEST_METHOD' => 'GET' }
+ end
+
+ def payload_for(event)
+ payload = nil
+ subscription = ActiveSupport::Notifications.subscribe event do |_, _, _, _, extra_payload|
+ payload = extra_payload
+ end
+
+ yield
+
+ ActiveSupport::Notifications.unsubscribe(subscription)
+ payload
end
end
diff --git a/spec/lib/gitlab/etag_caching/router_spec.rb b/spec/lib/gitlab/etag_caching/router_spec.rb
index e25ce4df4aa..b69e4668d61 100644
--- a/spec/lib/gitlab/etag_caching/router_spec.rb
+++ b/spec/lib/gitlab/etag_caching/router_spec.rb
@@ -59,7 +59,7 @@ describe Gitlab::EtagCaching::Router do
it 'matches commit pipelines endpoint' do
result = described_class.match(
- '/my-group/my-project/commit/aa8260d253a53f73f6c26c734c72fdd600f6e6d4/pipelines.json'
+ '/my-group/my-project/-/commit/aa8260d253a53f73f6c26c734c72fdd600f6e6d4/pipelines.json'
)
expect(result).to be_present
@@ -68,7 +68,7 @@ describe Gitlab::EtagCaching::Router do
it 'matches new merge request pipelines endpoint' do
result = described_class.match(
- '/my-group/my-project/merge_requests/new.json'
+ '/my-group/my-project/-/merge_requests/new.json'
)
expect(result).to be_present
@@ -77,7 +77,7 @@ describe Gitlab::EtagCaching::Router do
it 'matches merge request pipelines endpoint' do
result = described_class.match(
- '/my-group/my-project/merge_requests/234/pipelines.json'
+ '/my-group/my-project/-/merge_requests/234/pipelines.json'
)
expect(result).to be_present
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index e4624accd58..1506794cbb5 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -96,10 +96,10 @@ describe Gitlab::Experimentation do
expect(Gitlab::Tracking).to receive(:event).with(
'Team',
'start',
- label: nil,
- property: 'experimental_group'
+ property: 'experimental_group',
+ value: 'team_id'
)
- controller.track_experiment_event(:test_experiment, 'start')
+ controller.track_experiment_event(:test_experiment, 'start', 'team_id')
end
end
@@ -112,10 +112,10 @@ describe Gitlab::Experimentation do
expect(Gitlab::Tracking).to receive(:event).with(
'Team',
'start',
- label: nil,
- property: 'control_group'
+ property: 'control_group',
+ value: 'team_id'
)
- controller.track_experiment_event(:test_experiment, 'start')
+ controller.track_experiment_event(:test_experiment, 'start', 'team_id')
end
end
end
@@ -144,13 +144,13 @@ describe Gitlab::Experimentation do
end
it 'pushes the right parameters to gon' do
- controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
+ controller.frontend_experimentation_tracking_data(:test_experiment, 'start', 'team_id')
expect(Gon.tracking_data).to eq(
{
category: 'Team',
action: 'start',
- label: nil,
- property: 'experimental_group'
+ property: 'experimental_group',
+ value: 'team_id'
}
)
end
@@ -164,12 +164,23 @@ describe Gitlab::Experimentation do
end
it 'pushes the right parameters to gon' do
+ controller.frontend_experimentation_tracking_data(:test_experiment, 'start', 'team_id')
+ expect(Gon.tracking_data).to eq(
+ {
+ category: 'Team',
+ action: 'start',
+ property: 'control_group',
+ value: 'team_id'
+ }
+ )
+ end
+
+ it 'does not send nil value to gon' do
controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
expect(Gon.tracking_data).to eq(
{
category: 'Team',
action: 'start',
- label: nil,
property: 'control_group'
}
)
diff --git a/spec/lib/gitlab/gfm/reference_rewriter_spec.rb b/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
index d16f34af325..a3904f4a97c 100644
--- a/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
+++ b/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
@@ -8,7 +8,7 @@ describe Gitlab::Gfm::ReferenceRewriter do
let(:new_project) { create(:project, name: 'new-project', group: group) }
let(:user) { create(:user) }
- let(:old_project_ref) { old_project.to_reference(new_project) }
+ let(:old_project_ref) { old_project.to_reference_base(new_project) }
let(:text) { 'some text' }
before do
@@ -35,7 +35,7 @@ describe Gitlab::Gfm::ReferenceRewriter do
context 'description with ignored elements' do
let(:text) do
- "Hi. This references #1, but not `#2`\n" +
+ "Hi. This references #1, but not `#2`\n" \
'<pre>and not !1</pre>'
end
diff --git a/spec/lib/gitlab/git/blob_spec.rb b/spec/lib/gitlab/git/blob_spec.rb
index a659af3d22e..294e67a19d4 100644
--- a/spec/lib/gitlab/git/blob_spec.rb
+++ b/spec/lib/gitlab/git/blob_spec.rb
@@ -244,6 +244,61 @@ describe Gitlab::Git::Blob, :seed_helper do
end
end
end
+
+ context 'when large number of blobs requested' do
+ let(:first_batch) do
+ [
+ [SeedRepo::Commit::ID, 'files/ruby/popen.rb'],
+ [SeedRepo::Commit::ID, 'six']
+ ]
+ end
+
+ let(:second_batch) do
+ [
+ [SeedRepo::Commit::ID, 'some'],
+ [SeedRepo::Commit::ID, 'other']
+ ]
+ end
+
+ let(:third_batch) do
+ [
+ [SeedRepo::Commit::ID, 'files']
+ ]
+ end
+
+ let(:blob_references) do
+ first_batch + second_batch + third_batch
+ end
+
+ let(:client) { repository.gitaly_blob_client }
+ let(:limit) { 10.megabytes }
+
+ before do
+ stub_const('Gitlab::Git::Blob::BATCH_SIZE', 2)
+ end
+
+ context 'blobs_fetch_in_batches is enabled' do
+ it 'fetches the blobs in batches' do
+ expect(client).to receive(:get_blobs).with(first_batch, limit).ordered
+ expect(client).to receive(:get_blobs).with(second_batch, limit).ordered
+ expect(client).to receive(:get_blobs).with(third_batch, limit).ordered
+
+ subject
+ end
+ end
+
+ context 'blobs_fetch_in_batches is disabled' do
+ before do
+ stub_feature_flags(blobs_fetch_in_batches: false)
+ end
+
+ it 'fetches the blobs in a single batch' do
+ expect(client).to receive(:get_blobs).with(blob_references, limit)
+
+ subject
+ end
+ end
+ end
end
describe '.batch_metadata' do
@@ -533,4 +588,14 @@ describe Gitlab::Git::Blob, :seed_helper do
end
end
end
+
+ describe 'metrics' do
+ it 'defines :gitlab_blob_truncated_true counter' do
+ expect(described_class).to respond_to(:gitlab_blob_truncated_true)
+ end
+
+ it 'defines :gitlab_blob_truncated_false counter' do
+ expect(described_class).to respond_to(:gitlab_blob_truncated_false)
+ end
+ end
end
diff --git a/spec/lib/gitlab/git/conflict/parser_spec.rb b/spec/lib/gitlab/git/conflict/parser_spec.rb
index 600c870acd4..92ea44394b1 100644
--- a/spec/lib/gitlab/git/conflict/parser_spec.rb
+++ b/spec/lib/gitlab/git/conflict/parser_spec.rb
@@ -10,80 +10,80 @@ describe Gitlab::Git::Conflict::Parser do
context 'when the file has valid conflicts' do
let(:text) do
- <<CONFLICT
-module Gitlab
- module Regexp
- extend self
-
- def username_regexp
- default_regexp
- end
-
-<<<<<<< files/ruby/regex.rb
- def project_name_regexp
- /\A[a-zA-Z0-9][a-zA-Z0-9_\-\. ]*\z/
- end
-
- def name_regexp
- /\A[a-zA-Z0-9_\-\. ]*\z/
-=======
- def project_name_regex
- %r{\A[a-zA-Z0-9][a-zA-Z0-9_\-\. ]*\z}
- end
-
- def name_regex
- %r{\A[a-zA-Z0-9_\-\. ]*\z}
->>>>>>> files/ruby/regex.rb
- end
-
- def path_regexp
- default_regexp
- end
-
-<<<<<<< files/ruby/regex.rb
- def archive_formats_regexp
- /(zip|tar|7z|tar\.gz|tgz|gz|tar\.bz2|tbz|tbz2|tb2|bz2)/
-=======
- def archive_formats_regex
- %r{(zip|tar|7z|tar\.gz|tgz|gz|tar\.bz2|tbz|tbz2|tb2|bz2)}
->>>>>>> files/ruby/regex.rb
- end
-
- def git_reference_regexp
- # Valid git ref regexp, see:
- # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html
- %r{
- (?!
- (?# doesn't begins with)
- \/| (?# rule #6)
- (?# doesn't contain)
- .*(?:
- [\/.]\.| (?# rule #1,3)
- \/\/| (?# rule #6)
- @\{| (?# rule #8)
- \\ (?# rule #9)
- )
- )
- [^\000-\040\177~^:?*\[]+ (?# rule #4-5)
- (?# doesn't end with)
- (?<!\.lock) (?# rule #1)
- (?<![\/.]) (?# rule #6-7)
- }x
- end
-
- protected
-
-<<<<<<< files/ruby/regex.rb
- def default_regexp
- /\A[.?]?[a-zA-Z0-9][a-zA-Z0-9_\-\.]*(?<!\.git)\z/
-=======
- def default_regex
- %r{\A[.?]?[a-zA-Z0-9][a-zA-Z0-9_\-\.]*(?<!\.git)\z}
->>>>>>> files/ruby/regex.rb
- end
- end
-end
-CONFLICT
+ <<~CONFLICT
+ module Gitlab
+ module Regexp
+ extend self
+
+ def username_regexp
+ default_regexp
+ end
+
+ <<<<<<< files/ruby/regex.rb
+ def project_name_regexp
+ /\A[a-zA-Z0-9][a-zA-Z0-9_\-\. ]*\z/
+ end
+
+ def name_regexp
+ /\A[a-zA-Z0-9_\-\. ]*\z/
+ =======
+ def project_name_regex
+ %r{\A[a-zA-Z0-9][a-zA-Z0-9_\-\. ]*\z}
+ end
+
+ def name_regex
+ %r{\A[a-zA-Z0-9_\-\. ]*\z}
+ >>>>>>> files/ruby/regex.rb
+ end
+
+ def path_regexp
+ default_regexp
+ end
+
+ <<<<<<< files/ruby/regex.rb
+ def archive_formats_regexp
+ /(zip|tar|7z|tar\.gz|tgz|gz|tar\.bz2|tbz|tbz2|tb2|bz2)/
+ =======
+ def archive_formats_regex
+ %r{(zip|tar|7z|tar\.gz|tgz|gz|tar\.bz2|tbz|tbz2|tb2|bz2)}
+ >>>>>>> files/ruby/regex.rb
+ end
+
+ def git_reference_regexp
+ # Valid git ref regexp, see:
+ # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html
+ %r{
+ (?!
+ (?# doesn't begins with)
+ \/| (?# rule #6)
+ (?# doesn't contain)
+ .*(?:
+ [\/.]\.| (?# rule #1,3)
+ \/\/| (?# rule #6)
+ @\{| (?# rule #8)
+ \\ (?# rule #9)
+ )
+ )
+ [^\000-\040\177~^:?*\[]+ (?# rule #4-5)
+ (?# doesn't end with)
+ (?<!\.lock) (?# rule #1)
+ (?<![\/.]) (?# rule #6-7)
+ }x
+ end
+
+ protected
+
+ <<<<<<< files/ruby/regex.rb
+ def default_regexp
+ /\A[.?]?[a-zA-Z0-9][a-zA-Z0-9_\-\.]*(?<!\.git)\z/
+ =======
+ def default_regex
+ %r{\A[.?]?[a-zA-Z0-9][a-zA-Z0-9_\-\.]*(?<!\.git)\z}
+ >>>>>>> files/ruby/regex.rb
+ end
+ end
+ end
+ CONFLICT
end
let(:lines) do
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 07fef203691..a9d7beb0fea 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -310,8 +310,8 @@ describe Gitlab::Git::Repository, :seed_helper do
with_them do
before do
- repository.create_branch('left-branch', 'master')
- repository.create_branch('right-branch', 'master')
+ repository.create_branch('left-branch')
+ repository.create_branch('right-branch')
left.times do
new_commit_edit_new_file_on_branch(repository_rugged, 'encoding/CHANGELOG', 'left-branch', 'some more content for a', 'some stuff')
@@ -350,8 +350,8 @@ describe Gitlab::Git::Repository, :seed_helper do
with_them do
before do
- repository.create_branch('left-branch', 'master')
- repository.create_branch('right-branch', 'master')
+ repository.create_branch('left-branch')
+ repository.create_branch('right-branch')
left.times do
new_commit_edit_new_file_on_branch(repository_rugged, 'encoding/CHANGELOG', 'left-branch', 'some more content for a', 'some stuff')
@@ -420,55 +420,6 @@ describe Gitlab::Git::Repository, :seed_helper do
end
end
- describe "#delete_branch" do
- let(:repository) { mutable_repository }
-
- after do
- ensure_seeds
- end
-
- it "removes the branch from the repo" do
- branch_name = "to-be-deleted-soon"
-
- repository.create_branch(branch_name)
- expect(repository_rugged.branches[branch_name]).not_to be_nil
-
- repository.delete_branch(branch_name)
- expect(repository_rugged.branches[branch_name]).to be_nil
- end
-
- context "when branch does not exist" do
- it "raises a DeleteBranchError exception" do
- expect { repository.delete_branch("this-branch-does-not-exist") }.to raise_error(Gitlab::Git::Repository::DeleteBranchError)
- end
- end
- end
-
- describe "#create_branch" do
- let(:repository) { mutable_repository }
-
- after do
- ensure_seeds
- end
-
- it "creates a new branch" do
- expect(repository.create_branch('new_branch', 'master')).not_to be_nil
- end
-
- it "creates a new branch with the right name" do
- expect(repository.create_branch('another_branch', 'master').name).to eq('another_branch')
- end
-
- it "fails if we create an existing branch" do
- repository.create_branch('duplicated_branch', 'master')
- expect {repository.create_branch('duplicated_branch', 'master')}.to raise_error("Branch duplicated_branch already exists")
- end
-
- it "fails if we create a branch from a non existing ref" do
- expect {repository.create_branch('branch_based_in_wrong_ref', 'master_2_the_revenge')}.to raise_error("Invalid reference master_2_the_revenge")
- end
- end
-
describe '#delete_refs' do
let(:repository) { mutable_repository }
@@ -506,8 +457,8 @@ describe Gitlab::Git::Repository, :seed_helper do
let(:utf8_branch) { 'branch-é' }
before do
- repository.create_branch(new_branch, 'master')
- repository.create_branch(utf8_branch, 'master')
+ repository.create_branch(new_branch)
+ repository.create_branch(utf8_branch)
end
after do
@@ -609,32 +560,30 @@ describe Gitlab::Git::Repository, :seed_helper do
describe '#search_files_by_content' do
let(:repository) { mutable_repository }
let(:repository_rugged) { mutable_repository_rugged }
+ let(:ref) { 'search-files-by-content-branch' }
+ let(:content) { 'foobarbazmepmep' }
before do
- repository.create_branch('search-files-by-content-branch', 'master')
- new_commit_edit_new_file_on_branch(repository_rugged, 'encoding/CHANGELOG', 'search-files-by-content-branch', 'committing something', 'search-files-by-content change')
- new_commit_edit_new_file_on_branch(repository_rugged, 'anotherfile', 'search-files-by-content-branch', 'committing something', 'search-files-by-content change')
+ repository.create_branch(ref)
+ new_commit_edit_new_file_on_branch(repository_rugged, 'encoding/CHANGELOG', ref, 'committing something', content)
+ new_commit_edit_new_file_on_branch(repository_rugged, 'anotherfile', ref, 'committing something', content)
end
after do
ensure_seeds
end
- shared_examples 'search files by content' do
- it 'has 2 items' do
- expect(search_results.size).to eq(2)
- end
+ subject do
+ repository.search_files_by_content(content, ref)
+ end
- it 'has the correct matching line' do
- expect(search_results).to contain_exactly("search-files-by-content-branch:encoding/CHANGELOG\u00001\u0000search-files-by-content change\n",
- "search-files-by-content-branch:anotherfile\u00001\u0000search-files-by-content change\n")
- end
+ it 'has 2 items' do
+ expect(subject.size).to eq(2)
end
- it_should_behave_like 'search files by content' do
- let(:search_results) do
- repository.search_files_by_content('search-files-by-content', 'search-files-by-content-branch')
- end
+ it 'has the correct matching line' do
+ expect(subject).to contain_exactly("#{ref}:encoding/CHANGELOG\u00001\u0000#{content}\n",
+ "#{ref}:anotherfile\u00001\u0000#{content}\n")
end
end
@@ -1116,7 +1065,7 @@ describe Gitlab::Git::Repository, :seed_helper do
before do
create_remote_branch('joe', 'remote_branch', 'master')
- repository.create_branch('local_branch', 'master')
+ repository.create_branch('local_branch')
end
after do
@@ -1142,7 +1091,7 @@ describe Gitlab::Git::Repository, :seed_helper do
before do
create_remote_branch('joe', 'remote_branch', 'master')
- repository.create_branch('local_branch', 'master')
+ repository.create_branch('local_branch')
end
after do
@@ -1192,7 +1141,7 @@ describe Gitlab::Git::Repository, :seed_helper do
context 'when no branch names are specified' do
before do
- repository.create_branch('identical', 'master')
+ repository.create_branch('identical')
end
after do
@@ -1303,7 +1252,7 @@ describe Gitlab::Git::Repository, :seed_helper do
let(:branch_name) { "ʕ•ᴥ•ʔ" }
before do
- repository.create_branch(branch_name, "master")
+ repository.create_branch(branch_name)
end
after do
@@ -1447,7 +1396,7 @@ describe Gitlab::Git::Repository, :seed_helper do
before do
create_remote_branch('joe', 'remote_branch', 'master')
- repository.create_branch('local_branch', 'master')
+ repository.create_branch('local_branch')
end
after do
diff --git a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
index 9b29046fce9..300d7bb14b6 100644
--- a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
+++ b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
@@ -31,7 +31,7 @@ describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
Gitlab::GitalyClient.instance_variable_set(:@can_use_disk, {})
end
- context '#execute_rugged_call', :request_store do
+ describe '#execute_rugged_call', :request_store do
let(:args) { ['refs/heads/master', 1] }
before do
@@ -122,6 +122,12 @@ describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
allow(Gitlab::Runtime).to receive(:puma?).and_return(true)
end
+ it "returns false when Puma doesn't support the cli_config method" do
+ allow(::Puma).to receive(:respond_to?).with(:cli_config).and_return(false)
+
+ expect(subject.running_puma_with_multiple_threads?).to be_falsey
+ end
+
it 'returns false for single thread Puma' do
allow(::Puma).to receive_message_chain(:cli_config, :options).and_return(max_threads: 1)
diff --git a/spec/lib/gitlab/git_access_snippet_spec.rb b/spec/lib/gitlab/git_access_snippet_spec.rb
new file mode 100644
index 00000000000..ffb3d86408a
--- /dev/null
+++ b/spec/lib/gitlab/git_access_snippet_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::GitAccessSnippet do
+ include GitHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:personal_snippet) { create(:personal_snippet, :private, :repository) }
+
+ let(:protocol) { 'ssh' }
+ let(:changes) { Gitlab::GitAccess::ANY }
+ let(:push_access_check) { access.check('git-receive-pack', changes) }
+ let(:pull_access_check) { access.check('git-upload-pack', changes) }
+ let(:snippet) { personal_snippet }
+ let(:actor) { personal_snippet.author }
+
+ describe 'when feature flag :version_snippets is enabled' do
+ it 'allows push and pull access' do
+ aggregate_failures do
+ expect { pull_access_check }.not_to raise_error
+ expect { push_access_check }.not_to raise_error
+ end
+ end
+ end
+
+ describe 'when feature flag :version_snippets is disabled' do
+ before do
+ stub_feature_flags(version_snippets: false)
+ end
+
+ it 'does not allow push and pull access' do
+ aggregate_failures do
+ expect { push_access_check }.to raise_snippet_not_found
+ expect { pull_access_check }.to raise_snippet_not_found
+ end
+ end
+ end
+
+ describe '#check_snippet_accessibility!' do
+ context 'when the snippet exists' do
+ it 'allows push and pull access' do
+ aggregate_failures do
+ expect { pull_access_check }.not_to raise_error
+ expect { push_access_check }.not_to raise_error
+ end
+ end
+ end
+
+ context 'when the snippet is nil' do
+ let(:snippet) { nil }
+
+ it 'blocks push and pull with "not found"' do
+ aggregate_failures do
+ expect { pull_access_check }.to raise_snippet_not_found
+ expect { push_access_check }.to raise_snippet_not_found
+ end
+ end
+ end
+
+ context 'when the snippet does not have a repository' do
+ let(:snippet) { build_stubbed(:personal_snippet) }
+
+ it 'blocks push and pull with "not found"' do
+ aggregate_failures do
+ expect { pull_access_check }.to raise_snippet_not_found
+ expect { push_access_check }.to raise_snippet_not_found
+ end
+ end
+ end
+ end
+
+ private
+
+ def access
+ described_class.new(actor, snippet, protocol,
+ authentication_abilities: [],
+ namespace_path: nil, project_path: nil,
+ redirected_path: nil, auth_result_type: nil)
+ end
+
+ def raise_snippet_not_found
+ raise_error(Gitlab::GitAccess::NotFoundError, Gitlab::GitAccess::ERROR_MESSAGES[:snippet_not_found])
+ end
+end
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index 36bde9de12d..0831021b22b 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -75,6 +75,32 @@ describe Gitlab::GitAccess do
end
end
+ describe '#check_namespace!' do
+ context 'when namespace exists' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'allows push and pull access' do
+ aggregate_failures do
+ expect { push_access_check }.not_to raise_error
+ expect { pull_access_check }.not_to raise_error
+ end
+ end
+ end
+
+ context 'when namespace does not exist' do
+ let(:namespace_path) { nil }
+
+ it 'does not allow push and pull access' do
+ aggregate_failures do
+ expect { push_access_check }.to raise_not_found
+ expect { pull_access_check }.to raise_not_found
+ end
+ end
+ end
+ end
+
describe '#check_project_accessibility!' do
context 'when the project exists' do
context 'when actor exists' do
@@ -731,7 +757,7 @@ describe Gitlab::GitAccess do
allow(project).to receive(:lfs_enabled?).and_return(true)
expect_next_instance_of(Gitlab::Checks::LfsIntegrity) do |instance|
- expect(instance).to receive(:objects_missing?).exactly(1).times
+ expect(instance).to receive(:objects_missing?).once
end
push_access_check
diff --git a/spec/lib/gitlab/git_ref_validator_spec.rb b/spec/lib/gitlab/git_ref_validator_spec.rb
index 1531317c514..28cc13f02de 100644
--- a/spec/lib/gitlab/git_ref_validator_spec.rb
+++ b/spec/lib/gitlab/git_ref_validator_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Gitlab::GitRefValidator do
using RSpec::Parameterized::TableSyntax
- context '.validate' do
+ describe '.validate' do
it { expect(described_class.validate('feature/new')).to be true }
it { expect(described_class.validate('implement_@all')).to be true }
it { expect(described_class.validate('my_new_feature')).to be true }
@@ -37,7 +37,7 @@ describe Gitlab::GitRefValidator do
it { expect(described_class.validate("\xA0\u0000\xB0")).to be false }
end
- context '.validate_merge_request_branch' do
+ describe '.validate_merge_request_branch' do
it { expect(described_class.validate_merge_request_branch('HEAD')).to be true }
it { expect(described_class.validate_merge_request_branch('feature/new')).to be true }
it { expect(described_class.validate_merge_request_branch('implement_@all')).to be true }
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 820578dfc6e..5c36d6d35af 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -279,4 +279,32 @@ describe Gitlab::GitalyClient::CommitService do
expect(subject.deletions).to eq(15)
end
end
+
+ describe '#find_commits' do
+ it 'sends an RPC request with NONE when default' do
+ request = Gitaly::FindCommitsRequest.new(
+ repository: repository_message,
+ disable_walk: true,
+ order: 'NONE'
+ )
+
+ expect_any_instance_of(Gitaly::CommitService::Stub).to receive(:find_commits)
+ .with(request, kind_of(Hash)).and_return([])
+
+ client.find_commits(order: 'default')
+ end
+
+ it 'sends an RPC request' do
+ request = Gitaly::FindCommitsRequest.new(
+ repository: repository_message,
+ disable_walk: true,
+ order: 'TOPO'
+ )
+
+ expect_any_instance_of(Gitaly::CommitService::Stub).to receive(:find_commits)
+ .with(request, kind_of(Hash)).and_return([])
+
+ client.find_commits(order: 'topo')
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index ebf56c0ae66..b03c1feb429 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -52,7 +52,7 @@ describe Gitlab::GitalyClient do
end
describe '.filesystem_id' do
- it 'returns an empty string when the storage is not found in the response' do
+ it 'returns an empty string when the relevant storage status is not found in the response' do
response = double("response")
allow(response).to receive(:storage_statuses).and_return([])
allow_next_instance_of(Gitlab::GitalyClient::ServerService) do |instance|
@@ -63,6 +63,63 @@ describe Gitlab::GitalyClient do
end
end
+ context 'when the relevant storage status is not found' do
+ before do
+ response = double('response')
+ allow(response).to receive(:storage_statuses).and_return([])
+ allow_next_instance_of(Gitlab::GitalyClient::ServerService) do |instance|
+ allow(instance).to receive(:disk_statistics).and_return(response)
+ expect(instance).to receive(:storage_disk_statistics)
+ end
+ end
+
+ describe '.filesystem_disk_available' do
+ it 'returns nil when the relevant storage status is not found in the response' do
+ expect(described_class.filesystem_disk_available('default')).to eq(nil)
+ end
+ end
+
+ describe '.filesystem_disk_used' do
+ it 'returns nil when the relevant storage status is not found in the response' do
+ expect(described_class.filesystem_disk_used('default')).to eq(nil)
+ end
+ end
+ end
+
+ context 'when the relevant storage status is found' do
+ let(:disk_available) { 42 }
+ let(:disk_used) { 42 }
+ let(:storage_status) { double('storage_status') }
+
+ before do
+ allow(storage_status).to receive(:storage_name).and_return('default')
+ allow(storage_status).to receive(:used).and_return(disk_used)
+ allow(storage_status).to receive(:available).and_return(disk_available)
+ response = double('response')
+ allow(response).to receive(:storage_statuses).and_return([storage_status])
+ allow_next_instance_of(Gitlab::GitalyClient::ServerService) do |instance|
+ allow(instance).to receive(:disk_statistics).and_return(response)
+ end
+ expect_next_instance_of(Gitlab::GitalyClient::ServerService) do |instance|
+ expect(instance).to receive(:storage_disk_statistics).and_return(storage_status)
+ end
+ end
+
+ describe '.filesystem_disk_available' do
+ it 'returns disk available when the relevant storage status is found in the response' do
+ expect(storage_status).to receive(:available)
+ expect(described_class.filesystem_disk_available('default')).to eq(disk_available)
+ end
+ end
+
+ describe '.filesystem_disk_used' do
+ it 'returns disk used when the relevant storage status is found in the response' do
+ expect(storage_status).to receive(:used)
+ expect(described_class.filesystem_disk_used('default')).to eq(disk_used)
+ end
+ end
+ end
+
describe '.stub_class' do
it 'returns the gRPC health check stub' do
expect(described_class.stub_class(:health_check)).to eq(::Grpc::Health::V1::Health::Stub)
diff --git a/spec/lib/gitlab/gitlab_import/client_spec.rb b/spec/lib/gitlab/gitlab_import/client_spec.rb
index 246ef6c02f2..6e4e88093bb 100644
--- a/spec/lib/gitlab/gitlab_import/client_spec.rb
+++ b/spec/lib/gitlab/gitlab_import/client_spec.rb
@@ -13,9 +13,7 @@ describe Gitlab::GitlabImport::Client do
end
it 'all OAuth2 client options are symbols' do
- client.client.options.keys.each do |key|
- expect(key).to be_kind_of(Symbol)
- end
+ expect(client.client.options.keys).to all(be_kind_of(Symbol))
end
it 'uses membership and simple flags' do
diff --git a/spec/lib/gitlab/gl_repository/repo_type_spec.rb b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
index 9e09e1411ab..7cf0442fbe1 100644
--- a/spec/lib/gitlab/gl_repository/repo_type_spec.rb
+++ b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
@@ -2,19 +2,28 @@
require 'spec_helper'
describe Gitlab::GlRepository::RepoType do
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:personal_snippet) { create(:personal_snippet, author: project.owner) }
+ let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.owner) }
describe Gitlab::GlRepository::PROJECT do
it_behaves_like 'a repo type' do
let(:expected_identifier) { "project-#{project.id}" }
let(:expected_id) { project.id.to_s }
- let(:expected_suffix) { "" }
+ let(:expected_suffix) { '' }
let(:expected_repository) { project.repository }
+ let(:expected_container) { project }
end
- it "knows its type" do
+ it 'knows its type' do
expect(described_class).not_to be_wiki
expect(described_class).to be_project
+ expect(described_class).not_to be_snippet
+ end
+
+ it 'checks if repository path is valid' do
+ expect(described_class.valid?(project.repository.full_path)).to be_truthy
+ expect(described_class.valid?(project.wiki.repository.full_path)).to be_truthy
end
end
@@ -22,13 +31,54 @@ describe Gitlab::GlRepository::RepoType do
it_behaves_like 'a repo type' do
let(:expected_identifier) { "wiki-#{project.id}" }
let(:expected_id) { project.id.to_s }
- let(:expected_suffix) { ".wiki" }
+ let(:expected_suffix) { '.wiki' }
let(:expected_repository) { project.wiki.repository }
+ let(:expected_container) { project }
end
- it "knows its type" do
+ it 'knows its type' do
expect(described_class).to be_wiki
expect(described_class).not_to be_project
+ expect(described_class).not_to be_snippet
+ end
+
+ it 'checks if repository path is valid' do
+ expect(described_class.valid?(project.repository.full_path)).to be_falsey
+ expect(described_class.valid?(project.wiki.repository.full_path)).to be_truthy
+ end
+ end
+
+ describe Gitlab::GlRepository::SNIPPET do
+ context 'when PersonalSnippet' do
+ it_behaves_like 'a repo type' do
+ let(:expected_id) { personal_snippet.id.to_s }
+ let(:expected_identifier) { "snippet-#{expected_id}" }
+ let(:expected_suffix) { '' }
+ let(:expected_repository) { personal_snippet.repository }
+ let(:expected_container) { personal_snippet }
+ end
+
+ it 'knows its type' do
+ expect(described_class).to be_snippet
+ expect(described_class).not_to be_wiki
+ expect(described_class).not_to be_project
+ end
+ end
+
+ context 'when ProjectSnippet' do
+ it_behaves_like 'a repo type' do
+ let(:expected_id) { project_snippet.id.to_s }
+ let(:expected_identifier) { "snippet-#{expected_id}" }
+ let(:expected_suffix) { '' }
+ let(:expected_repository) { project_snippet.repository }
+ let(:expected_container) { project_snippet }
+ end
+
+ it 'knows its type' do
+ expect(described_class).to be_snippet
+ expect(described_class).not_to be_wiki
+ expect(described_class).not_to be_project
+ end
end
end
end
diff --git a/spec/lib/gitlab/gl_repository_spec.rb b/spec/lib/gitlab/gl_repository_spec.rb
index 3290bef8aa5..3cfc4c2a132 100644
--- a/spec/lib/gitlab/gl_repository_spec.rb
+++ b/spec/lib/gitlab/gl_repository_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
describe ::Gitlab::GlRepository do
describe '.parse' do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
it 'parses a project gl_repository' do
expect(described_class.parse("project-#{project.id}")).to eq([project, Gitlab::GlRepository::PROJECT])
@@ -14,8 +14,12 @@ describe ::Gitlab::GlRepository do
expect(described_class.parse("wiki-#{project.id}")).to eq([project, Gitlab::GlRepository::WIKI])
end
- it 'throws an argument error on an invalid gl_repository' do
+ it 'throws an argument error on an invalid gl_repository type' do
expect { described_class.parse("badformat-#{project.id}") }.to raise_error(ArgumentError)
end
+
+ it 'throws an argument error on an invalid gl_repository id' do
+ expect { described_class.parse("project-foo") }.to raise_error(ArgumentError)
+ end
end
end
diff --git a/spec/lib/gitlab/gpg_spec.rb b/spec/lib/gitlab/gpg_spec.rb
index 27a3010eeed..c7b9775f642 100644
--- a/spec/lib/gitlab/gpg_spec.rb
+++ b/spec/lib/gitlab/gpg_spec.rb
@@ -208,15 +208,15 @@ describe Gitlab::Gpg do
allow(FileUtils).to receive(:remove_entry).with(any_args).and_call_original
end
- it "tries for #{seconds}" do
- expect(Retriable).to receive(:retriable).with(a_hash_including(max_elapsed_time: seconds))
+ it "tries for #{seconds} or 15 times" do
+ expect(Retriable).to receive(:retriable).with(a_hash_including(max_elapsed_time: seconds, tries: 15))
described_class.using_tmp_keychain {}
end
it 'tries at least 2 times to remove the tmp dir before raising', :aggregate_failures do
- expect(Retriable).to receive(:sleep).at_least(2).times
- expect(FileUtils).to receive(:remove_entry).with(tmp_dir).at_least(2).times.and_raise('Deletion failed')
+ expect(Retriable).to receive(:sleep).at_least(:twice)
+ expect(FileUtils).to receive(:remove_entry).with(tmp_dir).at_least(:twice).and_raise('Deletion failed')
expect { described_class.using_tmp_keychain { } }.to raise_error(described_class::CleanupError)
end
diff --git a/spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb
index d943540fe1f..26fc5344871 100644
--- a/spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb
+++ b/spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb
@@ -4,10 +4,15 @@ require 'spec_helper'
describe Gitlab::Graphql::Connections::Keyset::Conditions::NotNullCondition do
describe '#build' do
- let(:condition) { described_class.new(Issue.arel_table, %w(relative_position id), [1500, 500], ['>', '>'], before_or_after) }
+ let(:operators) { ['>', '>'] }
+ let(:before_or_after) { :after }
+ let(:condition) { described_class.new(arel_table, order_list, values, operators, before_or_after) }
context 'when there is only one ordering field' do
- let(:condition) { described_class.new(Issue.arel_table, ['id'], [500], ['>'], :after) }
+ let(:arel_table) { Issue.arel_table }
+ let(:order_list) { [double(named_function: nil, attribute_name: 'id')] }
+ let(:values) { [500] }
+ let(:operators) { ['>'] }
it 'generates a single condition sql' do
expected_sql = <<~SQL
@@ -18,38 +23,92 @@ describe Gitlab::Graphql::Connections::Keyset::Conditions::NotNullCondition do
end
end
- context 'when :after' do
- let(:before_or_after) { :after }
+ context 'when ordering by a column attribute' do
+ let(:arel_table) { Issue.arel_table }
+ let(:order_list) { [double(named_function: nil, attribute_name: 'relative_position'), double(named_function: nil, attribute_name: 'id')] }
+ let(:values) { [1500, 500] }
- it 'generates :after sql' do
- expected_sql = <<~SQL
- ("issues"."relative_position" > 1500)
- OR (
- "issues"."relative_position" = 1500
- AND
- "issues"."id" > 500
- )
- OR ("issues"."relative_position" IS NULL)
- SQL
+ shared_examples ':after condition' do
+ it 'generates :after sql' do
+ expected_sql = <<~SQL
+ ("issues"."relative_position" > 1500)
+ OR (
+ "issues"."relative_position" = 1500
+ AND
+ "issues"."id" > 500
+ )
+ OR ("issues"."relative_position" IS NULL)
+ SQL
- expect(condition.build.squish).to eq expected_sql.squish
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
+ end
+
+ context 'when :after' do
+ it_behaves_like ':after condition'
+ end
+
+ context 'when :before' do
+ let(:before_or_after) { :before }
+
+ it 'generates :before sql' do
+ expected_sql = <<~SQL
+ ("issues"."relative_position" > 1500)
+ OR (
+ "issues"."relative_position" = 1500
+ AND
+ "issues"."id" > 500
+ )
+ SQL
+
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
+ end
+
+ context 'when :foo' do
+ let(:before_or_after) { :foo }
+
+ it_behaves_like ':after condition'
end
end
- context 'when :before' do
- let(:before_or_after) { :before }
+ context 'when ordering by LOWER' do
+ let(:arel_table) { Project.arel_table }
+ let(:relation) { Project.order(arel_table['name'].lower.asc).order(:id) }
+ let(:order_list) { Gitlab::Graphql::Connections::Keyset::OrderInfo.build_order_list(relation) }
+ let(:values) { ['Test', 500] }
- it 'generates :before sql' do
- expected_sql = <<~SQL
- ("issues"."relative_position" > 1500)
- OR (
- "issues"."relative_position" = 1500
- AND
- "issues"."id" > 500
- )
- SQL
+ context 'when :after' do
+ it 'generates :after sql' do
+ expected_sql = <<~SQL
+ (LOWER("projects"."name") > 'test')
+ OR (
+ LOWER("projects"."name") = 'test'
+ AND
+ "projects"."id" > 500
+ )
+ OR (LOWER("projects"."name") IS NULL)
+ SQL
- expect(condition.build.squish).to eq expected_sql.squish
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
+ end
+
+ context 'when :before' do
+ let(:before_or_after) { :before }
+
+ it 'generates :before sql' do
+ expected_sql = <<~SQL
+ (LOWER("projects"."name") > 'test')
+ OR (
+ LOWER("projects"."name") = 'test'
+ AND
+ "projects"."id" > 500
+ )
+ SQL
+
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
end
end
end
diff --git a/spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb
index 7fce94adb81..be0a21b2438 100644
--- a/spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb
+++ b/spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb
@@ -4,38 +4,91 @@ require 'spec_helper'
describe Gitlab::Graphql::Connections::Keyset::Conditions::NullCondition do
describe '#build' do
- let(:condition) { described_class.new(Issue.arel_table, %w(relative_position id), [nil, 500], [nil, '>'], before_or_after) }
+ let(:values) { [nil, 500] }
+ let(:operators) { [nil, '>'] }
+ let(:before_or_after) { :after }
+ let(:condition) { described_class.new(arel_table, order_list, values, operators, before_or_after) }
- context 'when :after' do
- let(:before_or_after) { :after }
+ context 'when ordering by a column attribute' do
+ let(:arel_table) { Issue.arel_table }
+ let(:order_list) { [double(named_function: nil, attribute_name: 'relative_position'), double(named_function: nil, attribute_name: 'id')] }
- it 'generates sql' do
- expected_sql = <<~SQL
+ shared_examples ':after condition' do
+ it 'generates sql' do
+ expected_sql = <<~SQL
+ (
+ "issues"."relative_position" IS NULL
+ AND
+ "issues"."id" > 500
+ )
+ SQL
+
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
+ end
+
+ context 'when :after' do
+ it_behaves_like ':after condition'
+ end
+
+ context 'when :before' do
+ let(:before_or_after) { :before }
+
+ it 'generates :before sql' do
+ expected_sql = <<~SQL
+ (
+ "issues"."relative_position" IS NULL
+ AND
+ "issues"."id" > 500
+ )
+ OR ("issues"."relative_position" IS NOT NULL)
+ SQL
+
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
+ end
+
+ context 'when :foo' do
+ let(:before_or_after) { :foo }
+
+ it_behaves_like ':after condition'
+ end
+ end
+
+ context 'when ordering by LOWER' do
+ let(:arel_table) { Project.arel_table }
+ let(:relation) { Project.order(arel_table['name'].lower.asc).order(:id) }
+ let(:order_list) { Gitlab::Graphql::Connections::Keyset::OrderInfo.build_order_list(relation) }
+
+ context 'when :after' do
+ it 'generates sql' do
+ expected_sql = <<~SQL
(
- "issues"."relative_position" IS NULL
+ LOWER("projects"."name") IS NULL
AND
- "issues"."id" > 500
+ "projects"."id" > 500
)
- SQL
+ SQL
- expect(condition.build.squish).to eq expected_sql.squish
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
end
- end
- context 'when :before' do
- let(:before_or_after) { :before }
+ context 'when :before' do
+ let(:before_or_after) { :before }
- it 'generates :before sql' do
- expected_sql = <<~SQL
+ it 'generates :before sql' do
+ expected_sql = <<~SQL
(
- "issues"."relative_position" IS NULL
+ LOWER("projects"."name") IS NULL
AND
- "issues"."id" > 500
+ "projects"."id" > 500
)
- OR ("issues"."relative_position" IS NOT NULL)
- SQL
+ OR (LOWER("projects"."name") IS NOT NULL)
+ SQL
- expect(condition.build.squish).to eq expected_sql.squish
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
end
end
end
diff --git a/spec/lib/gitlab/graphql/connections/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/connection_spec.rb
index f617e8b3ce7..c193ab2b50f 100644
--- a/spec/lib/gitlab/graphql/connections/keyset/connection_spec.rb
+++ b/spec/lib/gitlab/graphql/connections/keyset/connection_spec.rb
@@ -103,7 +103,75 @@ describe Gitlab::Graphql::Connections::Keyset::Connection do
end
end
- context 'when multiple orders are defined' do
+ shared_examples 'nodes are in ascending order' do
+ context 'when no cursor is passed' do
+ let(:arguments) { {} }
+
+ it 'returns projects in ascending order' do
+ expect(subject.sliced_nodes).to eq(ascending_nodes)
+ end
+ end
+
+ context 'when before cursor value is not NULL' do
+ let(:arguments) { { before: encoded_cursor(ascending_nodes[2]) } }
+
+ it 'returns all projects before the cursor' do
+ expect(subject.sliced_nodes).to eq(ascending_nodes.first(2))
+ end
+ end
+
+ context 'when after cursor value is not NULL' do
+ let(:arguments) { { after: encoded_cursor(ascending_nodes[1]) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq(ascending_nodes.last(3))
+ end
+ end
+
+ context 'when before and after cursor' do
+ let(:arguments) { { before: encoded_cursor(ascending_nodes.last), after: encoded_cursor(ascending_nodes.first) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq(ascending_nodes[1..3])
+ end
+ end
+ end
+
+ shared_examples 'nodes are in descending order' do
+ context 'when no cursor is passed' do
+ let(:arguments) { {} }
+
+ it 'only returns projects in descending order' do
+ expect(subject.sliced_nodes).to eq(descending_nodes)
+ end
+ end
+
+ context 'when before cursor value is not NULL' do
+ let(:arguments) { { before: encoded_cursor(descending_nodes[2]) } }
+
+ it 'returns all projects before the cursor' do
+ expect(subject.sliced_nodes).to eq(descending_nodes.first(2))
+ end
+ end
+
+ context 'when after cursor value is not NULL' do
+ let(:arguments) { { after: encoded_cursor(descending_nodes[1]) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq(descending_nodes.last(3))
+ end
+ end
+
+ context 'when before and after cursor' do
+ let(:arguments) { { before: encoded_cursor(descending_nodes.last), after: encoded_cursor(descending_nodes.first) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq(descending_nodes[1..3])
+ end
+ end
+ end
+
+ context 'when multiple orders with nil values are defined' do
let!(:project1) { create(:project, last_repository_check_at: 10.days.ago) } # Asc: project5 Desc: project3
let!(:project2) { create(:project, last_repository_check_at: nil) } # Asc: project1 Desc: project1
let!(:project3) { create(:project, last_repository_check_at: 5.days.ago) } # Asc: project3 Desc: project5
@@ -114,14 +182,9 @@ describe Gitlab::Graphql::Connections::Keyset::Connection do
let(:nodes) do
Project.order(Arel.sql('projects.last_repository_check_at IS NULL')).order(last_repository_check_at: :asc).order(id: :asc)
end
+ let(:ascending_nodes) { [project5, project1, project3, project2, project4] }
- context 'when no cursor is passed' do
- let(:arguments) { {} }
-
- it 'returns projects in ascending order' do
- expect(subject.sliced_nodes).to eq([project5, project1, project3, project2, project4])
- end
- end
+ it_behaves_like 'nodes are in ascending order'
context 'when before cursor value is NULL' do
let(:arguments) { { before: encoded_cursor(project4) } }
@@ -131,14 +194,6 @@ describe Gitlab::Graphql::Connections::Keyset::Connection do
end
end
- context 'when before cursor value is not NULL' do
- let(:arguments) { { before: encoded_cursor(project3) } }
-
- it 'returns all projects before the cursor' do
- expect(subject.sliced_nodes).to eq([project5, project1])
- end
- end
-
context 'when after cursor value is NULL' do
let(:arguments) { { after: encoded_cursor(project2) } }
@@ -146,36 +201,15 @@ describe Gitlab::Graphql::Connections::Keyset::Connection do
expect(subject.sliced_nodes).to eq([project4])
end
end
-
- context 'when after cursor value is not NULL' do
- let(:arguments) { { after: encoded_cursor(project1) } }
-
- it 'returns all projects after the cursor' do
- expect(subject.sliced_nodes).to eq([project3, project2, project4])
- end
- end
-
- context 'when before and after cursor' do
- let(:arguments) { { before: encoded_cursor(project4), after: encoded_cursor(project5) } }
-
- it 'returns all projects after the cursor' do
- expect(subject.sliced_nodes).to eq([project1, project3, project2])
- end
- end
end
context 'when descending' do
let(:nodes) do
Project.order(Arel.sql('projects.last_repository_check_at IS NULL')).order(last_repository_check_at: :desc).order(id: :asc)
end
+ let(:descending_nodes) { [project3, project1, project5, project2, project4] }
- context 'when no cursor is passed' do
- let(:arguments) { {} }
-
- it 'only returns projects in descending order' do
- expect(subject.sliced_nodes).to eq([project3, project1, project5, project2, project4])
- end
- end
+ it_behaves_like 'nodes are in descending order'
context 'when before cursor value is NULL' do
let(:arguments) { { before: encoded_cursor(project4) } }
@@ -185,14 +219,6 @@ describe Gitlab::Graphql::Connections::Keyset::Connection do
end
end
- context 'when before cursor value is not NULL' do
- let(:arguments) { { before: encoded_cursor(project5) } }
-
- it 'returns all projects before the cursor' do
- expect(subject.sliced_nodes).to eq([project3, project1])
- end
- end
-
context 'when after cursor value is NULL' do
let(:arguments) { { after: encoded_cursor(project2) } }
@@ -200,22 +226,32 @@ describe Gitlab::Graphql::Connections::Keyset::Connection do
expect(subject.sliced_nodes).to eq([project4])
end
end
+ end
+ end
- context 'when after cursor value is not NULL' do
- let(:arguments) { { after: encoded_cursor(project1) } }
+ context 'when ordering uses LOWER' do
+ let!(:project1) { create(:project, name: 'A') } # Asc: project1 Desc: project4
+ let!(:project2) { create(:project, name: 'c') } # Asc: project5 Desc: project2
+ let!(:project3) { create(:project, name: 'b') } # Asc: project3 Desc: project3
+ let!(:project4) { create(:project, name: 'd') } # Asc: project2 Desc: project5
+ let!(:project5) { create(:project, name: 'a') } # Asc: project4 Desc: project1
- it 'returns all projects after the cursor' do
- expect(subject.sliced_nodes).to eq([project5, project2, project4])
- end
+ context 'when ascending' do
+ let(:nodes) do
+ Project.order(Arel::Table.new(:projects)['name'].lower.asc).order(id: :asc)
end
+ let(:ascending_nodes) { [project1, project5, project3, project2, project4] }
- context 'when before and after cursor' do
- let(:arguments) { { before: encoded_cursor(project4), after: encoded_cursor(project3) } }
+ it_behaves_like 'nodes are in ascending order'
+ end
- it 'returns all projects after the cursor' do
- expect(subject.sliced_nodes).to eq([project1, project5, project2])
- end
+ context 'when descending' do
+ let(:nodes) do
+ Project.order(Arel::Table.new(:projects)['name'].lower.desc).order(id: :desc)
end
+ let(:descending_nodes) { [project4, project2, project3, project5, project1] }
+
+ it_behaves_like 'nodes are in descending order'
end
end
diff --git a/spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb
index 17ddcaefeeb..eb823fc0122 100644
--- a/spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb
+++ b/spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb
@@ -37,6 +37,20 @@ describe Gitlab::Graphql::Connections::Keyset::OrderInfo do
expect(order_list.count).to eq 1
end
end
+
+ context 'when order contains LOWER' do
+ let(:relation) { Project.order(Arel::Table.new(:projects)['name'].lower.asc).order(:id) }
+
+ it 'does not ignore the SQL order' do
+ expect(order_list.count).to eq 2
+ expect(order_list.first.attribute_name).to eq 'name'
+ expect(order_list.first.named_function).to be_kind_of(Arel::Nodes::NamedFunction)
+ expect(order_list.first.named_function.to_sql).to eq 'LOWER("projects"."name")'
+ expect(order_list.first.operator_for(:after)).to eq '>'
+ expect(order_list.last.attribute_name).to eq 'id'
+ expect(order_list.last.operator_for(:after)).to eq '>'
+ end
+ end
end
describe '#validate_ordering' do
diff --git a/spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb
index 59e153d9e07..b46ce4bf023 100644
--- a/spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb
+++ b/spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb
@@ -13,6 +13,7 @@ describe Gitlab::Graphql::Connections::Keyset::QueryBuilder do
describe '#conditions' do
let(:relation) { Issue.order(relative_position: :desc).order(:id) }
let(:order_list) { Gitlab::Graphql::Connections::Keyset::OrderInfo.build_order_list(relation) }
+ let(:arel_table) { Issue.arel_table }
let(:builder) { described_class.new(arel_table, order_list, decoded_cursor, before_or_after) }
let(:before_or_after) { :after }
@@ -100,9 +101,35 @@ describe Gitlab::Graphql::Connections::Keyset::QueryBuilder do
end
end
end
- end
- def arel_table
- Issue.arel_table
+ context 'when sorting using LOWER' do
+ let(:relation) { Project.order(Arel::Table.new(:projects)['name'].lower.asc).order(:id) }
+ let(:arel_table) { Project.arel_table }
+ let(:decoded_cursor) { { 'name' => 'Test', 'id' => 100 } }
+
+ context 'when no values are nil' do
+ context 'when :after' do
+ it 'generates the correct condition' do
+ conditions = builder.conditions
+
+ expect(conditions).to include '(LOWER("projects"."name") > \'test\')'
+ expect(conditions).to include '"projects"."id" > 100'
+ expect(conditions).to include 'OR (LOWER("projects"."name") IS NULL)'
+ end
+ end
+
+ context 'when :before' do
+ let(:before_or_after) { :before }
+
+ it 'generates the correct condition' do
+ conditions = builder.conditions
+
+ expect(conditions).to include '(LOWER("projects"."name") < \'test\')'
+ expect(conditions).to include '"projects"."id" < 100'
+ expect(conditions).to include 'LOWER("projects"."name") = \'test\''
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/graphql/representation/submodule_tree_entry_spec.rb b/spec/lib/gitlab/graphql/representation/submodule_tree_entry_spec.rb
index 28056a6085d..ed092a846ae 100644
--- a/spec/lib/gitlab/graphql/representation/submodule_tree_entry_spec.rb
+++ b/spec/lib/gitlab/graphql/representation/submodule_tree_entry_spec.rb
@@ -21,7 +21,7 @@ describe Gitlab::Graphql::Representation::SubmoduleTreeEntry do
)
expect(entries.map(&:tree_url)).to contain_exactly(
- "https://gitlab.com/gitlab-org/gitlab-grack/tree/645f6c4c82fd3f5e06f67134450a570b795e55a6",
+ "https://gitlab.com/gitlab-org/gitlab-grack/-/tree/645f6c4c82fd3f5e06f67134450a570b795e55a6",
"https://github.com/gitlabhq/gitlab-shell/tree/79bceae69cb5750d6567b223597999bfa91cb3b9",
"https://github.com/randx/six/tree/409f37c4f05865e4fb208c771485f211a22c4c2d"
)
diff --git a/spec/lib/gitlab/hashed_storage/migrator_spec.rb b/spec/lib/gitlab/hashed_storage/migrator_spec.rb
index ce7f2c4530d..f3cbb811679 100644
--- a/spec/lib/gitlab/hashed_storage/migrator_spec.rb
+++ b/spec/lib/gitlab/hashed_storage/migrator_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::HashedStorage::Migrator, :sidekiq, :redis do
+describe Gitlab::HashedStorage::Migrator, :redis do
describe '#bulk_schedule_migration' do
it 'schedules job to HashedStorage::MigratorWorker' do
Sidekiq::Testing.fake! do
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 08e57e541a4..4dadb310029 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -6,8 +6,6 @@ issues:
- assignees
- updated_by
- milestone
-- issue_milestones
-- milestones
- notes
- resource_label_events
- resource_weight_events
@@ -38,8 +36,6 @@ issues:
- vulnerability_links
- related_vulnerabilities
- user_mentions
-- blocked_by_issue_links
-- blocked_by_issues
events:
- author
- project
@@ -82,8 +78,6 @@ milestone:
- boards
- milestone_releases
- releases
-- issue_milestones
-- merge_request_milestones
snippets:
- author
- project
@@ -91,6 +85,7 @@ snippets:
- award_emoji
- user_agent_detail
- user_mentions
+- snippet_repository
releases:
- author
- project
@@ -112,8 +107,6 @@ merge_requests:
- assignee
- updated_by
- milestone
-- merge_request_milestones
-- milestones
- notes
- resource_label_events
- label_links
@@ -125,6 +118,8 @@ merge_requests:
- merge_user
- merge_request_diffs
- merge_request_diff
+- merge_request_context_commits
+- merge_request_context_commit_diff_files
- events
- merge_requests_closing_issues
- cached_closes_issues
@@ -154,12 +149,6 @@ merge_requests:
- deployment_merge_requests
- deployments
- user_mentions
-issue_milestones:
-- milestone
-- issue
-merge_request_milestones:
-- milestone
-- merge_request
external_pull_requests:
- project
merge_request_diff:
@@ -170,6 +159,9 @@ merge_request_diff_commits:
- merge_request_diff
merge_request_diff_files:
- merge_request_diff
+merge_request_context_commits:
+- merge_request
+- diff_files
ci_pipelines:
- project
- user
@@ -210,6 +202,7 @@ ci_pipelines:
- vulnerabilities_occurrence_pipelines
- vulnerability_findings
- pipeline_config
+- security_scans
pipeline_variables:
- pipeline
stages:
@@ -446,6 +439,7 @@ project:
- package_files
- tracing_setting
- alerting_setting
+- project_setting
- webide_pipelines
- reviews
- incident_management_setting
@@ -607,4 +601,4 @@ epic:
- due_date_sourcing_epic
- events
- resource_label_events
-- user_mentions \ No newline at end of file
+- user_mentions
diff --git a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
index 6f90798f815..56ec6ec0f59 100644
--- a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
@@ -216,8 +216,6 @@ describe Gitlab::ImportExport::FastHashSerializer do
end
def setup_project
- issue = create(:issue, assignees: [user])
- snippet = create(:project_snippet)
release = create(:release)
group = create(:group)
@@ -228,12 +226,14 @@ describe Gitlab::ImportExport::FastHashSerializer do
:wiki_enabled,
:builds_private,
description: 'description',
- issues: [issue],
- snippets: [snippet],
releases: [release],
group: group,
approvals_before_merge: 1
)
+ allow(project).to receive(:commit).and_return(Commit.new(RepoHelpers.sample_commit, project))
+
+ issue = create(:issue, assignees: [user], project: project)
+ snippet = create(:project_snippet, project: project)
project_label = create(:label, project: project)
group_label = create(:group_label, group: group)
create(:label_link, label: project_label, target: issue)
diff --git a/spec/lib/gitlab/import_export/group_object_builder_spec.rb b/spec/lib/gitlab/import_export/group_object_builder_spec.rb
new file mode 100644
index 00000000000..08b2dae1147
--- /dev/null
+++ b/spec/lib/gitlab/import_export/group_object_builder_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::GroupObjectBuilder do
+ let(:group) { create(:group) }
+ let(:base_attributes) do
+ {
+ 'title' => 'title',
+ 'description' => 'description',
+ 'group' => group
+ }
+ end
+
+ context 'labels' do
+ let(:label_attributes) { base_attributes.merge('type' => 'GroupLabel') }
+
+ it 'finds the existing group label' do
+ group_label = create(:group_label, base_attributes)
+
+ expect(described_class.build(Label, label_attributes)).to eq(group_label)
+ end
+
+ it 'creates a new label' do
+ label = described_class.build(Label, label_attributes)
+
+ expect(label.persisted?).to be true
+ end
+
+ context 'when description is an empty string' do
+ let(:label_attributes) { base_attributes.merge('type' => 'GroupLabel', 'description' => '') }
+
+ it 'finds the existing group label' do
+ group_label = create(:group_label, label_attributes)
+
+ expect(described_class.build(Label, label_attributes)).to eq(group_label)
+ end
+ end
+ end
+
+ context 'milestones' do
+ it 'finds the existing group milestone' do
+ milestone = create(:milestone, base_attributes)
+
+ expect(described_class.build(Milestone, base_attributes)).to eq(milestone)
+ end
+
+ it 'creates a new milestone' do
+ milestone = described_class.build(Milestone, base_attributes)
+
+ expect(milestone.persisted?).to be true
+ end
+ end
+
+ describe '#initialize' do
+ context 'when attributes contain description as empty string' do
+ let(:attributes) { base_attributes.merge('description' => '') }
+
+ it 'converts empty string to nil' do
+ builder = described_class.new(Label, attributes)
+
+ expect(builder.send(:attributes)).to include({ 'description' => nil })
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/group_project_object_builder_spec.rb b/spec/lib/gitlab/import_export/group_project_object_builder_spec.rb
index 355757654da..34049cbf570 100644
--- a/spec/lib/gitlab/import_export/group_project_object_builder_spec.rb
+++ b/spec/lib/gitlab/import_export/group_project_object_builder_spec.rb
@@ -3,13 +3,15 @@
require 'spec_helper'
describe Gitlab::ImportExport::GroupProjectObjectBuilder do
- let(:project) do
+ let!(:group) { create(:group, :private) }
+ let!(:subgroup) { create(:group, :private, parent: group) }
+ let!(:project) do
create(:project, :repository,
:builds_disabled,
:issues_disabled,
name: 'project',
path: 'project',
- group: create(:group))
+ group: subgroup)
end
let(:lru_cache) { subject.send(:lru_cache) }
@@ -75,6 +77,15 @@ describe Gitlab::ImportExport::GroupProjectObjectBuilder do
'group' => project.group)).to eq(group_label)
end
+ it 'finds the existing group label in root ancestor' do
+ group_label = create(:group_label, name: 'group label', group: group)
+
+ expect(described_class.build(Label,
+ 'title' => 'group label',
+ 'project' => project,
+ 'group' => group)).to eq(group_label)
+ end
+
it 'creates a new label' do
label = described_class.build(Label,
'title' => 'group label',
@@ -95,6 +106,15 @@ describe Gitlab::ImportExport::GroupProjectObjectBuilder do
'group' => project.group)).to eq(milestone)
end
+ it 'finds the existing group milestone in root ancestor' do
+ milestone = create(:milestone, name: 'group milestone', group: group)
+
+ expect(described_class.build(Milestone,
+ 'title' => 'group milestone',
+ 'project' => project,
+ 'group' => group)).to eq(milestone)
+ end
+
it 'creates a new milestone' do
milestone = described_class.build(Milestone,
'title' => 'group milestone',
diff --git a/spec/lib/gitlab/import_export/group_relation_factory_spec.rb b/spec/lib/gitlab/import_export/group_relation_factory_spec.rb
new file mode 100644
index 00000000000..9208b2ad203
--- /dev/null
+++ b/spec/lib/gitlab/import_export/group_relation_factory_spec.rb
@@ -0,0 +1,120 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::GroupRelationFactory do
+ let(:group) { create(:group) }
+ let(:members_mapper) { double('members_mapper').as_null_object }
+ let(:user) { create(:admin) }
+ let(:excluded_keys) { [] }
+ let(:created_object) do
+ described_class.create(relation_sym: relation_sym,
+ relation_hash: relation_hash,
+ members_mapper: members_mapper,
+ object_builder: Gitlab::ImportExport::GroupObjectBuilder,
+ user: user,
+ importable: group,
+ excluded_keys: excluded_keys)
+ end
+
+ context 'label object' do
+ let(:relation_sym) { :group_label }
+ let(:id) { random_id }
+ let(:original_group_id) { random_id }
+
+ let(:relation_hash) do
+ {
+ 'id' => 123456,
+ 'title' => 'Bruffefunc',
+ 'color' => '#1d2da4',
+ 'project_id' => nil,
+ 'created_at' => '2019-11-20T17:02:20.546Z',
+ 'updated_at' => '2019-11-20T17:02:20.546Z',
+ 'template' => false,
+ 'description' => 'Description',
+ 'group_id' => original_group_id,
+ 'type' => 'GroupLabel',
+ 'priorities' => [],
+ 'textColor' => '#FFFFFF'
+ }
+ end
+
+ it 'does not have the original ID' do
+ expect(created_object.id).not_to eq(id)
+ end
+
+ it 'does not have the original group_id' do
+ expect(created_object.group_id).not_to eq(original_group_id)
+ end
+
+ it 'has the new group_id' do
+ expect(created_object.group_id).to eq(group.id)
+ end
+
+ context 'excluded attributes' do
+ let(:excluded_keys) { %w[description] }
+
+ it 'are removed from the imported object' do
+ expect(created_object.description).to be_nil
+ end
+ end
+ end
+
+ context 'Notes user references' do
+ let(:relation_sym) { :notes }
+ let(:new_user) { create(:user) }
+ let(:exported_member) do
+ {
+ 'id' => 111,
+ 'access_level' => 30,
+ 'source_id' => 1,
+ 'source_type' => 'Namespace',
+ 'user_id' => 3,
+ 'notification_level' => 3,
+ 'created_at' => '2016-11-18T09:29:42.634Z',
+ 'updated_at' => '2016-11-18T09:29:42.634Z',
+ 'user' => {
+ 'id' => 999,
+ 'email' => new_user.email,
+ 'username' => new_user.username
+ }
+ }
+ end
+
+ let(:relation_hash) do
+ {
+ 'id' => 4947,
+ 'note' => 'note',
+ 'noteable_type' => 'Epic',
+ 'author_id' => 999,
+ 'created_at' => '2016-11-18T09:29:42.634Z',
+ 'updated_at' => '2016-11-18T09:29:42.634Z',
+ 'project_id' => 1,
+ 'attachment' => {
+ 'url' => nil
+ },
+ 'noteable_id' => 377,
+ 'system' => true,
+ 'author' => {
+ 'name' => 'Administrator'
+ },
+ 'events' => []
+ }
+ end
+
+ let(:members_mapper) do
+ Gitlab::ImportExport::MembersMapper.new(
+ exported_members: [exported_member],
+ user: user,
+ importable: group)
+ end
+
+ it 'maps the right author to the imported note' do
+ expect(created_object.author).to eq(new_user)
+ end
+ end
+
+ def random_id
+ rand(1000..10000)
+ end
+end
diff --git a/spec/lib/gitlab/import_export/group_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group_tree_restorer_spec.rb
new file mode 100644
index 00000000000..b2c8398d358
--- /dev/null
+++ b/spec/lib/gitlab/import_export/group_tree_restorer_spec.rb
@@ -0,0 +1,153 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::GroupTreeRestorer do
+ include ImportExport::CommonUtil
+
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+
+ describe 'restore group tree' do
+ before(:context) do
+ # Using an admin for import, so we can check assignment of existing members
+ user = create(:admin, email: 'root@gitlabexample.com')
+ create(:user, email: 'adriene.mcclure@gitlabexample.com')
+ create(:user, email: 'gwendolyn_robel@gitlabexample.com')
+
+ RSpec::Mocks.with_temporary_scope do
+ @group = create(:group, name: 'group', path: 'group')
+ @shared = Gitlab::ImportExport::Shared.new(@group)
+
+ setup_import_export_config('group_exports/complex')
+
+ group_tree_restorer = described_class.new(user: user, shared: @shared, group: @group, group_hash: nil)
+
+ @restored_group_json = group_tree_restorer.restore
+ end
+ end
+
+ context 'JSON' do
+ it 'restores models based on JSON' do
+ expect(@restored_group_json).to be_truthy
+ end
+
+ it 'has the group description' do
+ expect(Group.find_by_path('group').description).to eq('Group Description')
+ end
+
+ it 'has group labels' do
+ expect(@group.labels.count).to eq(10)
+ end
+
+ context 'issue boards' do
+ it 'has issue boards' do
+ expect(@group.boards.count).to eq(1)
+ end
+
+ it 'has board label lists' do
+ lists = @group.boards.find_by(name: 'first board').lists
+
+ expect(lists.count).to eq(3)
+ expect(lists.first.label.title).to eq('TSL')
+ expect(lists.second.label.title).to eq('Sosync')
+ end
+ end
+
+ it 'has badges' do
+ expect(@group.badges.count).to eq(1)
+ end
+
+ it 'has milestones' do
+ expect(@group.milestones.count).to eq(5)
+ end
+
+ it 'has group children' do
+ expect(@group.children.count).to eq(2)
+ end
+
+ it 'has group members' do
+ expect(@group.members.map(&:user).map(&:email)).to contain_exactly('root@gitlabexample.com', 'adriene.mcclure@gitlabexample.com', 'gwendolyn_robel@gitlabexample.com')
+ end
+ end
+ end
+
+ context 'excluded attributes' do
+ let!(:source_user) { create(:user, id: 123) }
+ let!(:importer_user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: importer_user, shared: shared, group: group, group_hash: nil) }
+ let(:group_json) { ActiveSupport::JSON.decode(IO.read(File.join(shared.export_path, 'group.json'))) }
+
+ shared_examples 'excluded attributes' do
+ excluded_attributes = %w[
+ id
+ owner_id
+ parent_id
+ created_at
+ updated_at
+ runners_token
+ runners_token_encrypted
+ saml_discovery_token
+ ]
+
+ before do
+ group.add_owner(importer_user)
+
+ setup_import_export_config('group_exports/complex')
+ end
+
+ excluded_attributes.each do |excluded_attribute|
+ it 'does not allow override of excluded attributes' do
+ expect(group_json[excluded_attribute]).not_to eq(group.public_send(excluded_attribute))
+ end
+ end
+ end
+
+ include_examples 'excluded attributes'
+ end
+
+ context 'group.json file access check' do
+ let(:user) { create(:user) }
+ let!(:group) { create(:group, name: 'group2', path: 'group2') }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group, group_hash: nil) }
+ let(:restored_group_json) { group_tree_restorer.restore }
+
+ it 'does not read a symlink' do
+ Dir.mktmpdir do |tmpdir|
+ setup_symlink(tmpdir, 'group.json')
+ allow(shared).to receive(:export_path).and_call_original
+
+ expect(group_tree_restorer.restore).to eq(false)
+ expect(shared.errors).to include('Incorrect JSON format')
+ end
+ end
+ end
+
+ context 'group visibility levels' do
+ let(:user) { create(:user) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group, group_hash: nil) }
+
+ before do
+ setup_import_export_config(filepath)
+
+ group_tree_restorer.restore
+ end
+
+ shared_examples 'with visibility level' do |visibility_level, expected_visibilities|
+ context "when visibility level is #{visibility_level}" do
+ let(:group) { create(:group, visibility_level) }
+ let(:filepath) { "group_exports/visibility_levels/#{visibility_level}" }
+
+ it "imports all subgroups as #{visibility_level}" do
+ expect(group.children.map(&:visibility_level)).to eq(expected_visibilities)
+ end
+ end
+ end
+
+ include_examples 'with visibility level', :public, [20, 10, 0]
+ include_examples 'with visibility level', :private, [0, 0, 0]
+ include_examples 'with visibility level', :internal, [10, 10, 0]
+ end
+end
diff --git a/spec/lib/gitlab/import_export/group_tree_saver_spec.rb b/spec/lib/gitlab/import_export/group_tree_saver_spec.rb
index b856441981a..7f49c7af8fa 100644
--- a/spec/lib/gitlab/import_export/group_tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/group_tree_saver_spec.rb
@@ -80,7 +80,7 @@ describe Gitlab::ImportExport::GroupTreeSaver do
end
it 'saves the correct json' do
- expect(saved_group_json).to include({ 'description' => 'description', 'visibility_level' => 20 })
+ expect(saved_group_json).to include({ 'description' => 'description' })
end
it 'has milestones' do
@@ -95,6 +95,10 @@ describe Gitlab::ImportExport::GroupTreeSaver do
expect(saved_group_json['boards']).not_to be_empty
end
+ it 'has board label list' do
+ expect(saved_group_json['boards'].first['lists']).not_to be_empty
+ end
+
it 'has group members' do
expect(saved_group_json['members']).not_to be_empty
end
@@ -153,9 +157,26 @@ describe Gitlab::ImportExport::GroupTreeSaver do
end
context 'group attributes' do
- it 'does not contain the runners token' do
- expect(saved_group_json).not_to include("runners_token" => 'token')
+ shared_examples 'excluded attributes' do
+ excluded_attributes = %w[
+ id
+ owner_id
+ parent_id
+ created_at
+ updated_at
+ runners_token
+ runners_token_encrypted
+ saml_discovery_token
+ ]
+
+ excluded_attributes.each do |excluded_attribute|
+ it 'does not contain excluded attribute' do
+ expect(saved_group_json).not_to include(excluded_attribute => group.public_send(excluded_attribute))
+ end
+ end
end
+
+ include_examples 'excluded attributes'
end
end
end
@@ -168,7 +189,8 @@ describe Gitlab::ImportExport::GroupTreeSaver do
create(:group_badge, group: group)
group_label = create(:group_label, group: group)
create(:label_priority, label: group_label, priority: 1)
- create(:board, group: group)
+ board = create(:board, group: group)
+ create(:list, board: board, label: group_label)
create(:group_badge, group: group)
group
diff --git a/spec/lib/gitlab/import_export/hash_util_spec.rb b/spec/lib/gitlab/import_export/hash_util_spec.rb
index ddd874ddecf..b97c6665d0e 100644
--- a/spec/lib/gitlab/import_export/hash_util_spec.rb
+++ b/spec/lib/gitlab/import_export/hash_util_spec.rb
@@ -9,7 +9,7 @@ describe Gitlab::ImportExport::HashUtil do
describe '.deep_symbolize_array!' do
it 'symbolizes keys' do
expect { described_class.deep_symbolize_array!(stringified_array) }.to change {
- stringified_array.first.keys.first
+ stringified_array.first.each_key.first
}.from('test').to(:test)
end
end
@@ -17,13 +17,13 @@ describe Gitlab::ImportExport::HashUtil do
describe '.deep_symbolize_array_with_date!' do
it 'symbolizes keys' do
expect { described_class.deep_symbolize_array_with_date!(stringified_array_with_date) }.to change {
- stringified_array_with_date.first.keys.first
+ stringified_array_with_date.first.each_key.first
}.from('test_date').to(:test_date)
end
it 'transforms date strings into Time objects' do
expect { described_class.deep_symbolize_array_with_date!(stringified_array_with_date) }.to change {
- stringified_array_with_date.first.values.first.class
+ stringified_array_with_date.first.each_value.first.class
}.from(String).to(ActiveSupport::TimeWithZone)
end
end
diff --git a/spec/lib/gitlab/import_export/import_export_equivalence_spec.rb b/spec/lib/gitlab/import_export/import_export_equivalence_spec.rb
new file mode 100644
index 00000000000..50b26637cb1
--- /dev/null
+++ b/spec/lib/gitlab/import_export/import_export_equivalence_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# Verifies that given an exported project meta-data tree, when importing this
+# tree and then exporting it again, we should obtain the initial tree.
+#
+# This equivalence only works up to a certain extent, for instance we need
+# to ignore:
+#
+# - row IDs and foreign key IDs
+# - some timestamps
+# - randomly generated fields like tokens
+#
+# as these are expected to change between import/export cycles.
+describe Gitlab::ImportExport do
+ include ImportExport::CommonUtil
+ include ConfigurationHelper
+ include ImportExport::ProjectTreeExpectations
+
+ let(:json_fixture) { 'complex' }
+
+ it 'yields the initial tree when importing and exporting it again' do
+ project = create(:project, creator: create(:user, :admin))
+
+ # We first generate a test fixture dynamically from a seed-fixture, so as to
+ # account for any fields in the initial fixture that are missing and set to
+ # defaults during import (ideally we should have realistic test fixtures
+ # that "honestly" represent exports)
+ expect(
+ restore_then_save_project(
+ project,
+ import_path: seed_fixture_path,
+ export_path: test_fixture_path)
+ ).to be true
+ # Import, then export again from the generated fixture. Any residual changes
+ # in the JSON will count towards comparison i.e. test failures.
+ expect(
+ restore_then_save_project(
+ project,
+ import_path: test_fixture_path,
+ export_path: test_tmp_path)
+ ).to be true
+
+ imported_json = JSON.parse(File.read("#{test_fixture_path}/project.json"))
+ exported_json = JSON.parse(File.read("#{test_tmp_path}/project.json"))
+
+ assert_relations_match(imported_json, exported_json)
+ end
+
+ private
+
+ def seed_fixture_path
+ "#{fixtures_path}/#{json_fixture}"
+ end
+
+ def test_fixture_path
+ "#{test_tmp_path}/#{json_fixture}"
+ end
+end
diff --git a/spec/lib/gitlab/import_export/import_failure_service_spec.rb b/spec/lib/gitlab/import_export/import_failure_service_spec.rb
index 0351f88afdb..324328181e4 100644
--- a/spec/lib/gitlab/import_export/import_failure_service_spec.rb
+++ b/spec/lib/gitlab/import_export/import_failure_service_spec.rb
@@ -6,6 +6,7 @@ describe Gitlab::ImportExport::ImportFailureService do
let(:importable) { create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') }
let(:label) { create(:label) }
let(:subject) { described_class.new(importable) }
+ let(:action) { "save_relation" }
let(:relation_key) { "labels" }
let(:relation_index) { 0 }
@@ -15,7 +16,12 @@ describe Gitlab::ImportExport::ImportFailureService do
let(:correlation_id) { 'my-correlation-id' }
let(:retry_count) { 2 }
let(:log_import_failure) do
- subject.log_import_failure(relation_key, relation_index, exception, retry_count)
+ subject.log_import_failure(
+ source: action,
+ relation_key: relation_key,
+ relation_index: relation_index,
+ exception: exception,
+ retry_count: retry_count)
end
before do
@@ -44,7 +50,7 @@ describe Gitlab::ImportExport::ImportFailureService do
describe '#with_retry' do
let(:perform_retry) do
- subject.with_retry(relation_key, relation_index) do
+ subject.with_retry(action: action, relation_key: relation_key, relation_index: relation_index) do
label.save!
end
end
@@ -60,7 +66,12 @@ describe Gitlab::ImportExport::ImportFailureService do
end
it 'retries and logs import failure once with correct params' do
- expect(subject).to receive(:log_import_failure).with(relation_key, relation_index, instance_of(exception), 1).once
+ expect(subject).to receive(:log_import_failure).with(
+ source: action,
+ relation_key: relation_key,
+ relation_index: relation_index,
+ exception: instance_of(exception),
+ retry_count: 1).once
perform_retry
end
@@ -85,7 +96,11 @@ describe Gitlab::ImportExport::ImportFailureService do
maximum_retry_count.times do |index|
retry_count = index + 1
- expect(subject).to receive(:log_import_failure).with(relation_key, relation_index, instance_of(exception), retry_count)
+ expect(subject).to receive(:log_import_failure).with(
+ source: action, relation_key: relation_key,
+ relation_index: relation_index,
+ exception: instance_of(exception),
+ retry_count: retry_count)
end
expect { perform_retry }.to raise_exception(exception)
diff --git a/spec/lib/gitlab/import_export/merge_request_parser_spec.rb b/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
index c437efede4c..ab834ac3fa8 100644
--- a/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
+++ b/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
@@ -40,8 +40,6 @@ describe Gitlab::ImportExport::MergeRequestParser do
allow(instance).to receive(:branch_exists?).with(merge_request.source_branch).and_return(false)
allow(instance).to receive(:fork_merge_request?).and_return(true)
end
- allow(Gitlab::GitalyClient).to receive(:migrate).and_call_original
- allow(Gitlab::GitalyClient).to receive(:migrate).with(:fetch_ref).and_return([nil, 0])
expect(parsed_merge_request).to eq(merge_request)
end
diff --git a/spec/lib/gitlab/import_export/project_tree_loader_spec.rb b/spec/lib/gitlab/import_export/project_tree_loader_spec.rb
new file mode 100644
index 00000000000..b22de5a3f7b
--- /dev/null
+++ b/spec/lib/gitlab/import_export/project_tree_loader_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::ProjectTreeLoader do
+ let(:fixture) { 'spec/fixtures/lib/gitlab/import_export/with_duplicates.json' }
+ let(:project_tree) { JSON.parse(File.read(fixture)) }
+
+ context 'without de-duplicating entries' do
+ let(:parsed_tree) do
+ subject.load(fixture)
+ end
+
+ it 'parses the JSON into the expected tree' do
+ expect(parsed_tree).to eq(project_tree)
+ end
+
+ it 'does not de-duplicate entries' do
+ expect(parsed_tree['duped_hash_with_id']).not_to be(parsed_tree['array'][0]['duped_hash_with_id'])
+ end
+ end
+
+ context 'with de-duplicating entries' do
+ let(:parsed_tree) do
+ subject.load(fixture, dedup_entries: true)
+ end
+
+ it 'parses the JSON into the expected tree' do
+ expect(parsed_tree).to eq(project_tree)
+ end
+
+ it 'de-duplicates equal values' do
+ expect(parsed_tree['duped_hash_with_id']).to be(parsed_tree['array'][0]['duped_hash_with_id'])
+ expect(parsed_tree['duped_hash_with_id']).to be(parsed_tree['nested']['duped_hash_with_id'])
+ expect(parsed_tree['duped_array']).to be(parsed_tree['array'][1]['duped_array'])
+ expect(parsed_tree['duped_array']).to be(parsed_tree['nested']['duped_array'])
+ end
+
+ it 'does not de-duplicate hashes without IDs' do
+ expect(parsed_tree['duped_hash_no_id']).to eq(parsed_tree['array'][2]['duped_hash_no_id'])
+ expect(parsed_tree['duped_hash_no_id']).not_to be(parsed_tree['array'][2]['duped_hash_no_id'])
+ end
+
+ it 'keeps single entries intact' do
+ expect(parsed_tree['simple']).to eq(42)
+ expect(parsed_tree['nested']['array']).to eq(["don't touch"])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
index ac9a63e8414..c899217d164 100644
--- a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
@@ -13,8 +13,8 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
# Using an admin for import, so we can check assignment of existing members
@user = create(:admin)
@existing_members = [
- create(:user, username: 'bernard_willms'),
- create(:user, username: 'saul_will')
+ create(:user, email: 'bernard_willms@gitlabexample.com'),
+ create(:user, email: 'saul_will@gitlabexample.com')
]
RSpec::Mocks.with_temporary_scope do
@@ -450,7 +450,9 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
context 'project.json file access check' do
let(:user) { create(:user) }
let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
- let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) }
+ let(:project_tree_restorer) do
+ described_class.new(user: user, shared: shared, project: project)
+ end
let(:restored_project_json) { project_tree_restorer.restore }
it 'does not read a symlink' do
@@ -498,6 +500,58 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
end
+ context 'when post import action throw non-retriable exception' do
+ let(:exception) { StandardError.new('post_import_error') }
+
+ before do
+ setup_import_export_config('light')
+ expect(project)
+ .to receive(:merge_requests)
+ .and_raise(exception)
+ end
+
+ it 'report post import error' do
+ expect(restored_project_json).to eq(false)
+ expect(shared.errors).to include('post_import_error')
+ end
+ end
+
+ context 'when post import action throw retriable exception one time' do
+ let(:exception) { GRPC::DeadlineExceeded.new }
+
+ before do
+ setup_import_export_config('light')
+ expect(project)
+ .to receive(:merge_requests)
+ .and_raise(exception)
+ expect(project)
+ .to receive(:merge_requests)
+ .and_call_original
+ expect(restored_project_json).to eq(true)
+ end
+
+ it_behaves_like 'restores project successfully',
+ issues: 1,
+ labels: 2,
+ label_with_priorities: 'A project label',
+ milestones: 1,
+ first_issue_labels: 1,
+ services: 1,
+ import_failures: 1
+
+ it 'records the failures in the database' do
+ import_failure = ImportFailure.last
+
+ expect(import_failure.project_id).to eq(project.id)
+ expect(import_failure.relation_key).to be_nil
+ expect(import_failure.relation_index).to be_nil
+ expect(import_failure.exception_class).to eq('GRPC::DeadlineExceeded')
+ expect(import_failure.exception_message).to be_present
+ expect(import_failure.correlation_id_value).not_to be_empty
+ expect(import_failure.created_at).to be_present
+ end
+ end
+
context 'when the project has overridden params in import data' do
before do
setup_import_export_config('light')
@@ -673,7 +727,9 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
let(:project) { create(:project) }
let(:user) { create(:user) }
let(:tree_hash) { { 'visibility_level' => visibility } }
- let(:restorer) { described_class.new(user: user, shared: shared, project: project) }
+ let(:restorer) do
+ described_class.new(user: user, shared: shared, project: project)
+ end
before do
expect(restorer).to receive(:read_tree_hash) { tree_hash }
diff --git a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
index 29d0099d5c1..126ac289a56 100644
--- a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
@@ -334,8 +334,6 @@ describe Gitlab::ImportExport::ProjectTreeSaver do
end
def setup_project
- issue = create(:issue, assignees: [user])
- snippet = create(:project_snippet)
release = create(:release)
group = create(:group)
@@ -346,12 +344,14 @@ describe Gitlab::ImportExport::ProjectTreeSaver do
:wiki_enabled,
:builds_private,
description: 'description',
- issues: [issue],
- snippets: [snippet],
releases: [release],
group: group,
approvals_before_merge: 1
)
+ allow(project).to receive(:commit).and_return(Commit.new(RepoHelpers.sample_commit, project))
+
+ issue = create(:issue, assignees: [user], project: project)
+ snippet = create(:project_snippet, project: project)
project_label = create(:label, project: project)
group_label = create(:group_label, group: group)
create(:label_link, label: project_label, target: issue)
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index ad363233bfe..807b017a67c 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -30,6 +30,7 @@ Issue:
- last_edited_at
- last_edited_by_id
- discussion_locked
+- health_status
Event:
- id
- target_type
@@ -225,6 +226,31 @@ MergeRequestDiffFile:
- b_mode
- too_large
- binary
+MergeRequestContextCommit:
+- id
+- authored_date
+- committed_date
+- relative_order
+- sha
+- author_name
+- author_email
+- committer_name
+- committer_email
+- message
+- merge_request_id
+MergeRequestContextCommitDiffFile:
+- sha
+- relative_order
+- new_file
+- renamed_file
+- deleted_file
+- new_path
+- old_path
+- a_mode
+- b_mode
+- too_large
+- binary
+- text
MergeRequest::Metrics:
- id
- created_at
@@ -334,6 +360,7 @@ CommitStatus:
- upstream_pipeline_id
- interruptible
- processed
+- scheduling_type
Ci::Variable:
- id
- project_id
@@ -758,6 +785,7 @@ ZoomMeeting:
ServiceDeskSetting:
- project_id
- issue_template_key
+- project_key
ContainerExpirationPolicy:
- created_at
- updated_at
@@ -798,3 +826,4 @@ Epic:
- state_id
- start_date_sourcing_epic_id
- due_date_sourcing_epic_id
+ - health_status
diff --git a/spec/lib/gitlab/incoming_email_spec.rb b/spec/lib/gitlab/incoming_email_spec.rb
index 598336d0b31..f5a6ea4d5b0 100644
--- a/spec/lib/gitlab/incoming_email_spec.rb
+++ b/spec/lib/gitlab/incoming_email_spec.rb
@@ -99,8 +99,8 @@ describe Gitlab::IncomingEmail do
context 'self.scan_fallback_references' do
let(:references) do
- '<issue_1@localhost>' +
- ' <reply-59d8df8370b7e95c5a49fbf86aeb2c93@localhost>' +
+ '<issue_1@localhost>' \
+ ' <reply-59d8df8370b7e95c5a49fbf86aeb2c93@localhost>' \
',<exchange@microsoft.com>'
end
diff --git a/spec/lib/gitlab/internal_post_receive/response_spec.rb b/spec/lib/gitlab/internal_post_receive/response_spec.rb
index f43762c9248..d90b85a41ed 100644
--- a/spec/lib/gitlab/internal_post_receive/response_spec.rb
+++ b/spec/lib/gitlab/internal_post_receive/response_spec.rb
@@ -9,8 +9,8 @@ describe Gitlab::InternalPostReceive::Response do
context 'when there are urls_data' do
it 'adds a message for each merge request URL' do
urls_data = [
- { new_merge_request: false, branch_name: 'foo', url: 'http://example.com/foo/bar/merge_requests/1' },
- { new_merge_request: true, branch_name: 'bar', url: 'http://example.com/foo/bar/merge_requests/new?merge_request%5Bsource_branch%5D=bar' }
+ { new_merge_request: false, branch_name: 'foo', url: 'http://example.com/foo/bar/-/merge_requests/1' },
+ { new_merge_request: true, branch_name: 'bar', url: 'http://example.com/foo/bar/-/merge_requests/new?merge_request%5Bsource_branch%5D=bar' }
]
subject.add_merge_request_urls(urls_data)
@@ -24,13 +24,13 @@ describe Gitlab::InternalPostReceive::Response do
describe '#add_merge_request_url' do
context 'when :new_merge_request is false' do
it 'adds a basic message to view the existing merge request' do
- url_data = { new_merge_request: false, branch_name: 'foo', url: 'http://example.com/foo/bar/merge_requests/1' }
+ url_data = { new_merge_request: false, branch_name: 'foo', url: 'http://example.com/foo/bar/-/merge_requests/1' }
subject.add_merge_request_url(url_data)
message = <<~MESSAGE.strip
View merge request for foo:
- http://example.com/foo/bar/merge_requests/1
+ http://example.com/foo/bar/-/merge_requests/1
MESSAGE
expect(subject.messages.first.message).to eq(message)
@@ -40,13 +40,13 @@ describe Gitlab::InternalPostReceive::Response do
context 'when :new_merge_request is true' do
it 'adds a basic message to create a new merge request' do
- url_data = { new_merge_request: true, branch_name: 'bar', url: 'http://example.com/foo/bar/merge_requests/new?merge_request%5Bsource_branch%5D=bar' }
+ url_data = { new_merge_request: true, branch_name: 'bar', url: 'http://example.com/foo/bar/-/merge_requests/new?merge_request%5Bsource_branch%5D=bar' }
subject.add_merge_request_url(url_data)
message = <<~MESSAGE.strip
To create a merge request for bar, visit:
- http://example.com/foo/bar/merge_requests/new?merge_request%5Bsource_branch%5D=bar
+ http://example.com/foo/bar/-/merge_requests/new?merge_request%5Bsource_branch%5D=bar
MESSAGE
expect(subject.messages.first.message).to eq(message)
diff --git a/spec/lib/gitlab/kubernetes/generic_secret_spec.rb b/spec/lib/gitlab/kubernetes/generic_secret_spec.rb
new file mode 100644
index 00000000000..fe1d4cc11e6
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/generic_secret_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Kubernetes::GenericSecret do
+ let(:secret) { described_class.new(name, data, namespace) }
+ let(:name) { 'example-name' }
+ let(:data) { 'example-data' }
+ let(:namespace) { 'example-namespace' }
+
+ describe '#generate' do
+ subject { secret.generate }
+
+ let(:resource) do
+ ::Kubeclient::Resource.new(
+ type: 'Opaque',
+ metadata: { name: name, namespace: namespace },
+ data: data
+ )
+ end
+
+ it { is_expected.to eq(resource) }
+ end
+end
diff --git a/spec/lib/gitlab/kubernetes/kube_client_spec.rb b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
index e08981a3415..1959fbca33b 100644
--- a/spec/lib/gitlab/kubernetes/kube_client_spec.rb
+++ b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
@@ -92,6 +92,16 @@ describe Gitlab::Kubernetes::KubeClient do
it_behaves_like 'local address'
end
+
+ it 'falls back to default options, but allows overriding' do
+ client = Gitlab::Kubernetes::KubeClient.new(api_url, {})
+ defaults = Gitlab::Kubernetes::KubeClient::DEFAULT_KUBECLIENT_OPTIONS
+ expect(client.kubeclient_options[:timeouts]).to eq(defaults[:timeouts])
+
+ client = Gitlab::Kubernetes::KubeClient.new(api_url, timeouts: { read: 7 })
+ expect(client.kubeclient_options[:timeouts][:read]).to eq(7)
+ expect(client.kubeclient_options[:timeouts][:open]).to eq(defaults[:timeouts][:open])
+ end
end
describe '#core_client' do
@@ -229,20 +239,30 @@ describe Gitlab::Kubernetes::KubeClient do
end
end
- describe 'extensions API group' do
- let(:api_groups) { ['apis/extensions'] }
+ describe '#get_deployments' do
let(:extensions_client) { client.extensions_client }
+ let(:apps_client) { client.apps_client }
+
+ include_examples 'redirection not allowed', 'get_deployments'
+ include_examples 'dns rebinding not allowed', 'get_deployments'
- describe '#get_deployments' do
- include_examples 'redirection not allowed', 'get_deployments'
- include_examples 'dns rebinding not allowed', 'get_deployments'
+ it 'delegates to the extensions client' do
+ expect(extensions_client).to receive(:get_deployments)
- it 'delegates to the extensions client' do
- expect(client).to delegate_method(:get_deployments).to(:extensions_client)
+ client.get_deployments
+ end
+
+ context 'extensions does not have deployments for Kubernetes 1.16+ clusters' do
+ before do
+ WebMock
+ .stub_request(:get, api_url + '/apis/extensions/v1beta1')
+ .to_return(kube_response(kube_1_16_extensions_v1beta1_discovery_body))
end
- it 'responds to the method' do
- expect(client).to respond_to :get_deployments
+ it 'delegates to the apps client' do
+ expect(apps_client).to receive(:get_deployments)
+
+ client.get_deployments
end
end
end
diff --git a/spec/lib/gitlab/kubernetes/tls_secret_spec.rb b/spec/lib/gitlab/kubernetes/tls_secret_spec.rb
new file mode 100644
index 00000000000..438a0dc79fc
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/tls_secret_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Kubernetes::TlsSecret do
+ let(:secret) { described_class.new(name, cert, key, namespace) }
+ let(:name) { 'example-name' }
+ let(:cert) { 'example-cert' }
+ let(:key) { 'example-key' }
+ let(:namespace) { 'example-namespace' }
+
+ let(:data) do
+ {
+ 'tls.crt': Base64.strict_encode64(cert),
+ 'tls.key': Base64.strict_encode64(key)
+ }
+ end
+
+ describe '#generate' do
+ subject { secret.generate }
+
+ let(:resource) do
+ ::Kubeclient::Resource.new(
+ type: 'kubernetes.io/tls',
+ metadata: { name: name, namespace: namespace },
+ data: data
+ )
+ end
+
+ it { is_expected.to eq(resource) }
+ end
+end
diff --git a/spec/lib/gitlab/legacy_github_import/client_spec.rb b/spec/lib/gitlab/legacy_github_import/client_spec.rb
index 194518a1f36..8d1786ae49a 100644
--- a/spec/lib/gitlab/legacy_github_import/client_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/client_spec.rb
@@ -13,9 +13,7 @@ describe Gitlab::LegacyGithubImport::Client do
end
it 'convert OAuth2 client options to symbols' do
- client.client.options.keys.each do |key|
- expect(key).to be_kind_of(Symbol)
- end
+ expect(client.client.options.keys).to all(be_kind_of(Symbol))
end
it 'does not crash (e.g. Settingslogic::MissingSetting) when verify_ssl config is not present' do
diff --git a/spec/lib/gitlab/legacy_github_import/importer_spec.rb b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
index c6ee0a3c094..7fef763f64d 100644
--- a/spec/lib/gitlab/legacy_github_import/importer_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
@@ -205,7 +205,7 @@ describe Gitlab::LegacyGithubImport::Importer do
let(:gh_pull_request) { Gitlab::LegacyGithubImport::PullRequestFormatter.new(project, closed_pull_request) }
it 'does remove branches' do
- expect(subject).to receive(:remove_branch).at_least(2).times
+ expect(subject).to receive(:remove_branch).at_least(:twice)
subject.send(:clean_up_restored_branches, gh_pull_request)
end
end
diff --git a/spec/lib/gitlab/log_timestamp_formatter_spec.rb b/spec/lib/gitlab/log_timestamp_formatter_spec.rb
new file mode 100644
index 00000000000..1a76d02889b
--- /dev/null
+++ b/spec/lib/gitlab/log_timestamp_formatter_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::LogTimestampFormatter do
+ subject { described_class.new }
+
+ let(:formatted_timestamp) { Time.now.utc.iso8601(3) }
+
+ it 'logs the timestamp in UTC and ISO8601.3 format' do
+ Timecop.freeze(Time.now) do
+ expect(subject.call('', Time.now, '', '')).to include formatted_timestamp
+ end
+ end
+end
diff --git a/spec/lib/gitlab/looping_batcher_spec.rb b/spec/lib/gitlab/looping_batcher_spec.rb
new file mode 100644
index 00000000000..b03e969c1e7
--- /dev/null
+++ b/spec/lib/gitlab/looping_batcher_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::LoopingBatcher, :use_clean_rails_memory_store_caching do
+ describe '#next_range!' do
+ let(:model_class) { LfsObject }
+ let(:key) { 'looping_batcher_spec' }
+ let(:batch_size) { 2 }
+
+ subject { described_class.new(model_class, key: key, batch_size: batch_size).next_range! }
+
+ context 'when there are no records' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'when there are records' do
+ let!(:records) { create_list(model_class.underscore, 3) }
+
+ context 'when it has never been called before' do
+ it { is_expected.to be_a Range }
+
+ it 'starts from the beginning' do
+ expect(subject.first).to eq(1)
+ end
+
+ it 'ends at a full batch' do
+ expect(subject.last).to eq(records.second.id)
+ end
+
+ context 'when the batch size is greater than the number of records' do
+ let(:batch_size) { 5 }
+
+ it 'ends at the last ID' do
+ expect(subject.last).to eq(records.last.id)
+ end
+ end
+ end
+
+ context 'when it was called before' do
+ context 'when the previous batch included the end of the table' do
+ before do
+ described_class.new(model_class, key: key, batch_size: model_class.count).next_range!
+ end
+
+ it 'starts from the beginning' do
+ expect(subject).to eq(1..records.second.id)
+ end
+ end
+
+ context 'when the previous batch did not include the end of the table' do
+ before do
+ described_class.new(model_class, key: key, batch_size: model_class.count - 1).next_range!
+ end
+
+ it 'starts after the previous batch' do
+ expect(subject).to eq(records.last.id..records.last.id)
+ end
+ end
+
+ context 'if cache is cleared' do
+ it 'starts from the beginning' do
+ Rails.cache.clear
+
+ expect(subject).to eq(1..records.second.id)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/mail_room/mail_room_spec.rb b/spec/lib/gitlab/mail_room/mail_room_spec.rb
index cb3e214d38b..5d41ee06263 100644
--- a/spec/lib/gitlab/mail_room/mail_room_spec.rb
+++ b/spec/lib/gitlab/mail_room/mail_room_spec.rb
@@ -4,9 +4,10 @@ require 'spec_helper'
describe Gitlab::MailRoom do
let(:default_port) { 143 }
- let(:default_config) do
+ let(:yml_config) do
{
- enabled: false,
+ enabled: true,
+ address: 'address@example.com',
port: default_port,
ssl: false,
start_tls: false,
@@ -16,37 +17,73 @@ describe Gitlab::MailRoom do
}
end
+ let(:custom_config) { {} }
+ let(:incoming_email_config) { yml_config.merge(custom_config) }
+ let(:service_desk_email_config) { yml_config.merge(custom_config) }
+
+ let(:configs) do
+ {
+ incoming_email: incoming_email_config,
+ service_desk_email: service_desk_email_config
+ }
+ end
+
before do
- described_class.reset_config!
- allow(File).to receive(:exist?).and_return true
+ described_class.instance_variable_set(:@enabled_configs, nil)
end
- describe '#config' do
- context 'if the yml file cannot be found' do
+ describe '#enabled_configs' do
+ before do
+ allow(described_class).to receive(:load_yaml).and_return(configs)
+ end
+
+ context 'when both email and address is set' do
+ it 'returns email configs' do
+ expect(described_class.enabled_configs.size).to eq(2)
+ end
+ end
+
+ context 'when the yml file cannot be found' do
before do
- allow(File).to receive(:exist?).and_return false
+ allow(described_class).to receive(:config_file).and_return('not_existing_file')
end
- it 'returns an empty hash' do
- expect(described_class.config).to be_empty
+ it 'returns an empty list' do
+ expect(described_class.enabled_configs).to be_empty
end
end
- before do
- allow(described_class).to receive(:load_from_yaml).and_return(default_config)
+ context 'when email is disabled' do
+ let(:custom_config) { { enabled: false } }
+
+ it 'returns an empty list' do
+ expect(described_class.enabled_configs).to be_empty
+ end
end
- it 'sets up config properly' do
- expected_result = default_config
+ context 'when email is enabled but address is not set' do
+ let(:custom_config) { { enabled: true, address: '' } }
- expect(described_class.config).to match expected_result
+ it 'returns an empty list' do
+ expect(described_class.enabled_configs).to be_empty
+ end
end
context 'when a config value is missing from the yml file' do
+ let(:yml_config) { {} }
+ let(:custom_config) { { enabled: true, address: 'address@example.com' } }
+
it 'overwrites missing values with the default' do
- stub_config(port: nil)
+ expect(described_class.enabled_configs.first[:port]).to eq(Gitlab::MailRoom::DEFAULT_CONFIG[:port])
+ end
+ end
+
+ context 'when only incoming_email config is present' do
+ let(:configs) { { incoming_email: incoming_email_config } }
- expect(described_class.config[:port]).to eq default_port
+ it 'returns only encoming_email' do
+ expect(described_class.enabled_configs.size).to eq(1)
+ expect(described_class.enabled_configs.first[:worker]).to eq('EmailReceiverWorker')
end
end
@@ -57,50 +94,31 @@ describe Gitlab::MailRoom do
allow(Gitlab::Redis::Queues).to receive(:new).and_return(fake_redis_queues)
end
- target_proc = proc { described_class.config[:redis_url] }
+ it 'sets redis config' do
+ config = described_class.enabled_configs.first
- it_behaves_like 'only truthy if both enabled and address are truthy', target_proc
+ expect(config[:redis_url]).to eq('localhost')
+ expect(config[:sentinels]).to eq('yes, them')
+ end
end
describe 'setting up the log path' do
context 'if the log path is a relative path' do
- it 'expands the log path to an absolute value' do
- stub_config(log_path: 'tiny_log.log')
+ let(:custom_config) { { log_path: 'tiny_log.log' } }
- new_path = Pathname.new(described_class.config[:log_path])
+ it 'expands the log path to an absolute value' do
+ new_path = Pathname.new(described_class.enabled_configs.first[:log_path])
expect(new_path.absolute?).to be_truthy
end
end
context 'if the log path is absolute path' do
- it 'leaves the path as-is' do
- new_path = '/dev/null'
- stub_config(log_path: new_path)
+ let(:custom_config) { { log_path: '/dev/null' } }
- expect(described_class.config[:log_path]).to eq new_path
+ it 'leaves the path as-is' do
+ expect(described_class.enabled_configs.first[:log_path]).to eq '/dev/null'
end
end
end
end
-
- describe '#enabled?' do
- target_proc = proc { described_class.enabled? }
-
- it_behaves_like 'only truthy if both enabled and address are truthy', target_proc
- end
-
- describe '#reset_config?' do
- it 'resets config' do
- described_class.instance_variable_set(:@config, { some_stuff: 'hooray' })
-
- described_class.reset_config!
-
- expect(described_class.instance_variable_get(:@config)).to be_nil
- end
- end
-
- def stub_config(override_values)
- modified_config = default_config.merge(override_values)
- allow(described_class).to receive(:load_from_yaml).and_return(modified_config)
- end
end
diff --git a/spec/lib/gitlab/metrics/dashboard/finder_spec.rb b/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
index 697bedf7362..2d3b61e61ce 100644
--- a/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
@@ -44,6 +44,12 @@ describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store_cachi
it_behaves_like 'valid dashboard service response'
end
+ context 'when the self monitoring dashboard is specified' do
+ let(:dashboard_path) { self_monitoring_dashboard_path }
+
+ it_behaves_like 'valid dashboard service response'
+ end
+
context 'when no dashboard is specified' do
let(:service_call) { described_class.find(project, user, environment: environment) }
@@ -152,5 +158,33 @@ describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store_cachi
expect(all_dashboard_paths).to contain_exactly(system_dashboard, project_dashboard)
end
end
+
+ context 'when the project is self monitoring' do
+ let(:self_monitoring_dashboard) do
+ {
+ path: self_monitoring_dashboard_path,
+ display_name: 'Default',
+ default: true,
+ system_dashboard: false
+ }
+ end
+ let(:dashboard_path) { '.gitlab/dashboards/test.yml' }
+ let(:project) { project_with_dashboard(dashboard_path) }
+
+ before do
+ stub_application_setting(self_monitoring_project_id: project.id)
+ end
+
+ it 'includes self monitoring and project dashboards' do
+ project_dashboard = {
+ path: dashboard_path,
+ display_name: 'test.yml',
+ default: false,
+ system_dashboard: false
+ }
+
+ expect(all_dashboard_paths).to contain_exactly(self_monitoring_dashboard, project_dashboard)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
index e186a383059..e8860d50437 100644
--- a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
@@ -12,6 +12,7 @@ describe Gitlab::Metrics::Dashboard::Processor do
[
Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter,
Gitlab::Metrics::Dashboard::Stages::ProjectMetricsInserter,
+ Gitlab::Metrics::Dashboard::Stages::ProjectMetricsDetailsInserter,
Gitlab::Metrics::Dashboard::Stages::EndpointInserter,
Gitlab::Metrics::Dashboard::Stages::Sorter
]
@@ -25,6 +26,10 @@ describe Gitlab::Metrics::Dashboard::Processor do
end
end
+ it 'includes boolean to indicate if panel group has custom metrics' do
+ expect(dashboard[:panel_groups]).to all(include( { has_custom_metrics: boolean } ))
+ end
+
context 'when the dashboard is not present' do
let(:dashboard_yml) { nil }
@@ -62,7 +67,7 @@ describe Gitlab::Metrics::Dashboard::Processor do
'metric_a1', # group priority 1, panel weight 1
project_business_metric.id, # group priority 0, panel weight nil (0)
project_response_metric.id, # group priority -5, panel weight nil (0)
- project_system_metric.id, # group priority -10, panel weight nil (0)
+ project_system_metric.id # group priority -10, panel weight nil (0)
]
actual_metrics_order = all_metrics.map { |m| m[:id] || m[:metric_id] }
@@ -145,7 +150,8 @@ describe Gitlab::Metrics::Dashboard::Processor do
unit: metric.unit,
label: metric.legend,
metric_id: metric.id,
- prometheus_endpoint_path: prometheus_path(metric.query)
+ prometheus_endpoint_path: prometheus_path(metric.query),
+ edit_path: edit_metric_path(metric)
}
end
@@ -165,4 +171,11 @@ describe Gitlab::Metrics::Dashboard::Processor do
identifier: metric
)
end
+
+ def edit_metric_path(metric)
+ Gitlab::Routing.url_helpers.edit_project_prometheus_metric_path(
+ project,
+ metric.id
+ )
+ end
end
diff --git a/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb b/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb
index e0c8133994b..c0d71bfe5d0 100644
--- a/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb
@@ -30,6 +30,12 @@ describe Gitlab::Metrics::Dashboard::ServiceSelector do
end
end
+ context 'when the path is for the self monitoring dashboard' do
+ let(:arguments) { { dashboard_path: self_monitoring_dashboard_path } }
+
+ it { is_expected.to be Metrics::Dashboard::SelfMonitoringDashboardService }
+ end
+
context 'when the embedded flag is provided' do
let(:arguments) { { embedded: true } }
diff --git a/spec/lib/gitlab/metrics/dashboard/url_spec.rb b/spec/lib/gitlab/metrics/dashboard/url_spec.rb
index daaf66cba46..9ccd1c06d6b 100644
--- a/spec/lib/gitlab/metrics/dashboard/url_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/url_spec.rb
@@ -3,39 +3,7 @@
require 'spec_helper'
describe Gitlab::Metrics::Dashboard::Url do
- shared_examples_for 'a regex which matches the expected url' do
- it { is_expected.to be_a Regexp }
-
- it 'matches a metrics dashboard link with named params' do
- expect(subject).to match url
-
- subject.match(url) do |m|
- expect(m.named_captures).to eq expected_params
- end
- end
- end
-
- shared_examples_for 'does not match non-matching urls' do
- it 'does not match other gitlab urls that contain the term metrics' do
- url = Gitlab::Routing.url_helpers.active_common_namespace_project_prometheus_metrics_url('foo', 'bar', :json)
-
- expect(subject).not_to match url
- end
-
- it 'does not match other gitlab urls' do
- url = Gitlab.config.gitlab.url
-
- expect(subject).not_to match url
- end
-
- it 'does not match non-gitlab urls' do
- url = 'https://www.super_awesome_site.com/'
-
- expect(subject).not_to match url
- end
- end
-
- describe '#regex' do
+ describe '#metrics_regex' do
let(:url) do
Gitlab::Routing.url_helpers.metrics_namespace_project_environment_url(
'foo',
@@ -59,10 +27,9 @@ describe Gitlab::Metrics::Dashboard::Url do
}
end
- subject { described_class.regex }
+ subject { described_class.metrics_regex }
- it_behaves_like 'a regex which matches the expected url'
- it_behaves_like 'does not match non-matching urls'
+ it_behaves_like 'regex which matches url when expected'
end
describe '#grafana_regex' do
@@ -89,15 +56,14 @@ describe Gitlab::Metrics::Dashboard::Url do
subject { described_class.grafana_regex }
- it_behaves_like 'a regex which matches the expected url'
- it_behaves_like 'does not match non-matching urls'
+ it_behaves_like 'regex which matches url when expected'
end
describe '#build_dashboard_url' do
it 'builds the url for the dashboard endpoint' do
url = described_class.build_dashboard_url('foo', 'bar', 1)
- expect(url).to match described_class.regex
+ expect(url).to match described_class.metrics_regex
end
end
end
diff --git a/spec/lib/gitlab/middleware/go_spec.rb b/spec/lib/gitlab/middleware/go_spec.rb
index 2b90035d148..99c2a364dfc 100644
--- a/spec/lib/gitlab/middleware/go_spec.rb
+++ b/spec/lib/gitlab/middleware/go_spec.rb
@@ -241,7 +241,7 @@ describe Gitlab::Middleware::Go do
project_url = "http://#{Gitlab.config.gitlab.host}/#{path}"
expect(response[0]).to eq(200)
expect(response[1]['Content-Type']).to eq('text/html')
- expected_body = %{<html><head><meta name="go-import" content="#{Gitlab.config.gitlab.host}/#{path} git #{repository_url}" /><meta name="go-source" content="#{Gitlab.config.gitlab.host}/#{path} #{project_url} #{project_url}/tree/#{branch}{/dir} #{project_url}/blob/#{branch}{/dir}/{file}#L{line}" /></head><body>go get #{Gitlab.config.gitlab.url}/#{path}</body></html>}
+ expected_body = %{<html><head><meta name="go-import" content="#{Gitlab.config.gitlab.host}/#{path} git #{repository_url}" /><meta name="go-source" content="#{Gitlab.config.gitlab.host}/#{path} #{project_url} #{project_url}/-/tree/#{branch}{/dir} #{project_url}/-/blob/#{branch}{/dir}/{file}#L{line}" /></head><body>go get #{Gitlab.config.gitlab.url}/#{path}</body></html>}
expect(response[2].body).to eq([expected_body])
end
end
diff --git a/spec/lib/gitlab/private_commit_email_spec.rb b/spec/lib/gitlab/private_commit_email_spec.rb
index 10bf624bbdd..7b7a0f7c0ca 100644
--- a/spec/lib/gitlab/private_commit_email_spec.rb
+++ b/spec/lib/gitlab/private_commit_email_spec.rb
@@ -8,7 +8,7 @@ describe Gitlab::PrivateCommitEmail do
let(:valid_email) { "#{id}-foo@#{hostname}" }
let(:invalid_email) { "#{id}-foo@users.noreply.bar.com" }
- context '.regex' do
+ describe '.regex' do
subject { described_class.regex }
it { is_expected.to match("1-foo@#{hostname}") }
@@ -18,7 +18,7 @@ describe Gitlab::PrivateCommitEmail do
it { is_expected.not_to match('foobar@gitlab.com') }
end
- context '.user_id_for_email' do
+ describe '.user_id_for_email' do
it 'parses user id from email' do
expect(described_class.user_id_for_email(valid_email)).to eq(id)
end
@@ -28,7 +28,7 @@ describe Gitlab::PrivateCommitEmail do
end
end
- context '.user_ids_for_email' do
+ describe '.user_ids_for_email' do
it 'returns deduplicated user IDs for each valid email' do
result = described_class.user_ids_for_emails([valid_email, valid_email, invalid_email])
@@ -41,7 +41,7 @@ describe Gitlab::PrivateCommitEmail do
end
end
- context '.for_user' do
+ describe '.for_user' do
it 'returns email in the format id-username@hostname' do
user = create(:user)
diff --git a/spec/lib/gitlab/profiler_spec.rb b/spec/lib/gitlab/profiler_spec.rb
index 8f6fb6eda65..0186d48fd1b 100644
--- a/spec/lib/gitlab/profiler_spec.rb
+++ b/spec/lib/gitlab/profiler_spec.rb
@@ -192,4 +192,43 @@ describe Gitlab::Profiler do
expect(described_class.log_load_times_by_model(null_logger)).to be_nil
end
end
+
+ describe '.print_by_total_time' do
+ let(:stdout) { StringIO.new }
+
+ let(:output) do
+ stdout.rewind
+ stdout.read
+ end
+
+ let_it_be(:result) do
+ RubyProf.profile do
+ sleep 0.1
+ 1.to_s
+ end
+ end
+
+ before do
+ stub_const('STDOUT', stdout)
+ end
+
+ it 'prints a profile result sorted by total time' do
+ described_class.print_by_total_time(result)
+
+ total_times =
+ output
+ .scan(/^\s+\d+\.\d+\s+(\d+\.\d+)/)
+ .map { |(total)| total.to_f }
+
+ expect(output).to include('Kernel#sleep')
+ expect(total_times).to eq(total_times.sort.reverse)
+ expect(total_times).not_to eq(total_times.uniq)
+ end
+
+ it 'accepts a max_percent option' do
+ described_class.print_by_total_time(result, max_percent: 50)
+
+ expect(output).not_to include('Kernel#sleep')
+ end
+ end
end
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index ae4c14e4deb..d206d31eb96 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -31,7 +31,7 @@ describe Gitlab::ProjectSearchResults do
let(:results) { described_class.new(user, project, query) }
where(:scope, :count_method, :expected) do
- 'blobs' | :blobs_count | '1234'
+ 'blobs' | :limited_blobs_count | max_limited_count
'notes' | :limited_notes_count | max_limited_count
'wiki_blobs' | :wiki_blobs_count | '1234'
'commits' | :commits_count | '1234'
@@ -141,9 +141,9 @@ describe Gitlab::ProjectSearchResults do
describe 'blob search' do
let(:project) { create(:project, :public, :repository) }
+ let(:blob_type) { 'blobs' }
it_behaves_like 'general blob search', 'repository', 'blobs' do
- let(:blob_type) { 'blobs' }
let(:disabled_project) { create(:project, :public, :repository, :repository_disabled) }
let(:private_project) { create(:project, :public, :repository, :repository_private) }
let(:expected_file_by_path) { 'files/images/wm.svg' }
@@ -151,9 +151,36 @@ describe Gitlab::ProjectSearchResults do
end
it_behaves_like 'blob search repository ref', 'project' do
- let(:blob_type) { 'blobs' }
let(:entity) { project }
end
+
+ context 'pagination' do
+ let(:per_page) { 20 }
+ let(:count_limit) { described_class::COUNT_LIMIT }
+ let(:file_finder) { instance_double('Gitlab::FileFinder') }
+ let(:results) { described_class.new(user, project, query, per_page: per_page) }
+ let(:repository_ref) { 'master' }
+
+ before do
+ allow(file_finder).to receive(:find).and_return([])
+ expect(Gitlab::FileFinder).to receive(:new).with(project, repository_ref).and_return(file_finder)
+ end
+
+ it 'limits search results based on the first page' do
+ expect(file_finder).to receive(:find).with(query, content_match_cutoff: count_limit)
+ results.objects(blob_type, 1)
+ end
+
+ it 'limits search results based on the second page' do
+ expect(file_finder).to receive(:find).with(query, content_match_cutoff: count_limit + per_page)
+ results.objects(blob_type, 2)
+ end
+
+ it 'limits search results based on the third page' do
+ expect(file_finder).to receive(:find).with(query, content_match_cutoff: count_limit + per_page * 2)
+ results.objects(blob_type, 3)
+ end
+ end
end
describe 'wiki search' do
diff --git a/spec/lib/gitlab/prometheus/queries/knative_invocation_query_spec.rb b/spec/lib/gitlab/prometheus/queries/knative_invocation_query_spec.rb
index fa2dccc7c92..ad254d3c50a 100644
--- a/spec/lib/gitlab/prometheus/queries/knative_invocation_query_spec.rb
+++ b/spec/lib/gitlab/prometheus/queries/knative_invocation_query_spec.rb
@@ -6,7 +6,7 @@ describe Gitlab::Prometheus::Queries::KnativeInvocationQuery do
include PrometheusHelpers
let(:project) { create(:project) }
- let(:serverless_func) { Serverless::Function.new(project, 'test-name', 'test-ns') }
+ let(:serverless_func) { ::Serverless::Function.new(project, 'test-name', 'test-ns') }
let(:client) { double('prometheus_client') }
subject { described_class.new(client) }
diff --git a/spec/lib/gitlab/query_limiting/middleware_spec.rb b/spec/lib/gitlab/query_limiting/middleware_spec.rb
index f996ea38bb9..f397843df54 100644
--- a/spec/lib/gitlab/query_limiting/middleware_spec.rb
+++ b/spec/lib/gitlab/query_limiting/middleware_spec.rb
@@ -26,7 +26,7 @@ describe Gitlab::QueryLimiting::Middleware do
:controller,
action_name: 'show',
class: double(:class, name: 'UsersController'),
- content_type: 'text/html'
+ media_type: 'text/html'
)
}
@@ -39,7 +39,7 @@ describe Gitlab::QueryLimiting::Middleware do
:controller,
action_name: 'show',
class: double(:class, name: 'UsersController'),
- content_type: 'application/json'
+ media_type: 'application/json'
)
}
diff --git a/spec/lib/gitlab/quick_actions/extractor_spec.rb b/spec/lib/gitlab/quick_actions/extractor_spec.rb
index f1acb5b7049..2536e4a372b 100644
--- a/spec/lib/gitlab/quick_actions/extractor_spec.rb
+++ b/spec/lib/gitlab/quick_actions/extractor_spec.rb
@@ -294,4 +294,22 @@ describe Gitlab::QuickActions::Extractor do
expect(msg).to eq expected_msg
end
end
+
+ describe '#redact_commands' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:text, :expected) do
+ "hello\n/labels ~label1 ~label2\nworld" | "hello\n`/labels ~label1 ~label2`\nworld"
+ "hello\n/open\n/labels ~label1\nworld" | "hello\n`/open`\n`/labels ~label1`\nworld"
+ "hello\n/reopen\nworld" | "hello\n`/reopen`\nworld"
+ "/reopen\nworld" | "`/reopen`\nworld"
+ "hello\n/open" | "hello\n`/open`"
+ end
+
+ with_them do
+ it 'encloses quick actions with code span markdown' do
+ expect(extractor.redact_commands(text)).to eq(expected)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/quick_actions/substitution_definition_spec.rb b/spec/lib/gitlab/quick_actions/substitution_definition_spec.rb
index d0bb032f776..a09aca31cdc 100644
--- a/spec/lib/gitlab/quick_actions/substitution_definition_spec.rb
+++ b/spec/lib/gitlab/quick_actions/substitution_definition_spec.rb
@@ -19,11 +19,29 @@ EOF
expect(subject.perform_substitution(self, nil)).to be_nil
end
- it 'performs the substitution by default' do
- expect(subject.perform_substitution(self, content)).to eq <<EOF
+ context 'when content contains command name' do
+ it 'performs the substitution by default' do
+ expect(subject.perform_substitution(self, content)).to eq <<EOF
Hello! Let's do this!
I like this stuff foo
EOF
+ end
+ end
+
+ context 'when content contains command name in word' do
+ let(:content) do
+ <<EOF
+Hello! Let's do this!
+`/sub_names` I like this stuff
+EOF
+ end
+
+ it 'does not perform the substitution' do
+ expect(subject.perform_substitution(self, content)).to eq <<EOF
+Hello! Let's do this!
+`/sub_names` I like this stuff
+EOF
+ end
end
end
@@ -41,5 +59,9 @@ EOF
it 'is nil if content does not have the command' do
expect(subject.match('blah')).to be_falsey
end
+
+ it 'is nil if content contains the command as prefix' do
+ expect(subject.match('/sub_namex')).to be_falsey
+ end
end
end
diff --git a/spec/lib/gitlab/redis/boolean_spec.rb b/spec/lib/gitlab/redis/boolean_spec.rb
new file mode 100644
index 00000000000..bfacf0c448b
--- /dev/null
+++ b/spec/lib/gitlab/redis/boolean_spec.rb
@@ -0,0 +1,150 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+describe Gitlab::Redis::Boolean do
+ subject(:redis_boolean) { described_class.new(bool) }
+
+ let(:bool) { true }
+ let(:label_section) { "#{described_class::LABEL}#{described_class::DELIMITER}" }
+
+ describe "#to_s" do
+ subject { redis_boolean.to_s }
+
+ context "true" do
+ let(:bool) { true }
+
+ it { is_expected.to eq("#{label_section}#{described_class::TRUE_STR}") }
+ end
+
+ context "false" do
+ let(:bool) { false }
+
+ it { is_expected.to eq("#{label_section}#{described_class::FALSE_STR}") }
+ end
+ end
+
+ describe ".encode" do
+ subject { redis_boolean.class.encode(bool) }
+
+ context "true" do
+ let(:bool) { true }
+
+ it { is_expected.to eq("#{label_section}#{described_class::TRUE_STR}") }
+ end
+
+ context "false" do
+ let(:bool) { false }
+
+ it { is_expected.to eq("#{label_section}#{described_class::FALSE_STR}") }
+ end
+ end
+
+ describe ".decode" do
+ subject { redis_boolean.class.decode(str) }
+
+ context "valid encoded bool" do
+ let(:str) { "#{label_section}#{bool_str}" }
+
+ context "true" do
+ let(:bool_str) { described_class::TRUE_STR }
+
+ it { is_expected.to be(true) }
+ end
+
+ context "false" do
+ let(:bool_str) { described_class::FALSE_STR }
+
+ it { is_expected.to be(false) }
+ end
+ end
+
+ context "partially invalid bool" do
+ let(:str) { "#{label_section}whoops" }
+
+ it "raises an error" do
+ expect { subject }.to raise_error(described_class::NotAnEncodedBooleanStringError)
+ end
+ end
+
+ context "invalid encoded bool" do
+ let(:str) { "whoops" }
+
+ it "raises an error" do
+ expect { subject }.to raise_error(described_class::NotAnEncodedBooleanStringError)
+ end
+ end
+ end
+
+ describe ".true?" do
+ subject { redis_boolean.class.true?(str) }
+
+ context "valid encoded bool" do
+ let(:str) { "#{label_section}#{bool_str}" }
+
+ context "true" do
+ let(:bool_str) { described_class::TRUE_STR }
+
+ it { is_expected.to be(true) }
+ end
+
+ context "false" do
+ let(:bool_str) { described_class::FALSE_STR }
+
+ it { is_expected.to be(false) }
+ end
+ end
+
+ context "partially invalid bool" do
+ let(:str) { "#{label_section}whoops" }
+
+ it "raises an error" do
+ expect { subject }.to raise_error(described_class::NotAnEncodedBooleanStringError)
+ end
+ end
+
+ context "invalid encoded bool" do
+ let(:str) { "whoops" }
+
+ it "raises an error" do
+ expect { subject }.to raise_error(described_class::NotAnEncodedBooleanStringError)
+ end
+ end
+ end
+
+ describe ".false?" do
+ subject { redis_boolean.class.false?(str) }
+
+ context "valid encoded bool" do
+ let(:str) { "#{label_section}#{bool_str}" }
+
+ context "true" do
+ let(:bool_str) { described_class::TRUE_STR }
+
+ it { is_expected.to be(false) }
+ end
+
+ context "false" do
+ let(:bool_str) { described_class::FALSE_STR }
+
+ it { is_expected.to be(true) }
+ end
+ end
+
+ context "partially invalid bool" do
+ let(:str) { "#{label_section}whoops" }
+
+ it "raises an error" do
+ expect { subject }.to raise_error(described_class::NotAnEncodedBooleanStringError)
+ end
+ end
+
+ context "invalid encoded bool" do
+ let(:str) { "whoops" }
+
+ it "raises an error" do
+ expect { subject }.to raise_error(described_class::NotAnEncodedBooleanStringError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/repository_cache_adapter_spec.rb b/spec/lib/gitlab/repository_cache_adapter_spec.rb
index fd1338b55a6..b4fc504ea60 100644
--- a/spec/lib/gitlab/repository_cache_adapter_spec.rb
+++ b/spec/lib/gitlab/repository_cache_adapter_spec.rb
@@ -7,6 +7,7 @@ describe Gitlab::RepositoryCacheAdapter do
let(:repository) { project.repository }
let(:cache) { repository.send(:cache) }
let(:redis_set_cache) { repository.send(:redis_set_cache) }
+ let(:redis_hash_cache) { repository.send(:redis_hash_cache) }
describe '#cache_method_output', :use_clean_rails_memory_store_caching do
let(:fallback) { 10 }
@@ -212,6 +213,8 @@ describe Gitlab::RepositoryCacheAdapter do
expect(cache).to receive(:expire).with(:branch_names)
expect(redis_set_cache).to receive(:expire).with(:rendered_readme)
expect(redis_set_cache).to receive(:expire).with(:branch_names)
+ expect(redis_hash_cache).to receive(:delete).with(:rendered_readme)
+ expect(redis_hash_cache).to receive(:delete).with(:branch_names)
repository.expire_method_caches(%i(rendered_readme branch_names))
end
diff --git a/spec/lib/gitlab/repository_cache_spec.rb b/spec/lib/gitlab/repository_cache_spec.rb
index 1b7dd1766da..e787288fc51 100644
--- a/spec/lib/gitlab/repository_cache_spec.rb
+++ b/spec/lib/gitlab/repository_cache_spec.rb
@@ -50,6 +50,18 @@ describe Gitlab::RepositoryCache do
end
end
+ describe '#write' do
+ it 'writes the given key and value to the cache' do
+ cache.write(:test, 'test')
+ expect(backend).to have_received(:write).with("test:#{namespace}", 'test')
+ end
+
+ it 'passes additional options to the backend' do
+ cache.write(:test, 'test', expires_in: 10.minutes)
+ expect(backend).to have_received(:write).with("test:#{namespace}", 'test', expires_in: 10.minutes)
+ end
+ end
+
describe '#fetch_without_caching_false', :use_clean_rails_memory_store_caching do
let(:key) { :foo }
let(:backend) { Rails.cache }
diff --git a/spec/lib/gitlab/repository_hash_cache_spec.rb b/spec/lib/gitlab/repository_hash_cache_spec.rb
new file mode 100644
index 00000000000..014a2f235b9
--- /dev/null
+++ b/spec/lib/gitlab/repository_hash_cache_spec.rb
@@ -0,0 +1,184 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+describe Gitlab::RepositoryHashCache, :clean_gitlab_redis_cache do
+ let_it_be(:project) { create(:project) }
+ let(:repository) { project.repository }
+ let(:namespace) { "#{repository.full_path}:#{project.id}" }
+ let(:cache) { described_class.new(repository) }
+ let(:test_hash) do
+ { "test" => "value" }
+ end
+
+ describe "#cache_key" do
+ subject { cache.cache_key(:example) }
+
+ it "includes the namespace" do
+ is_expected.to eq("example:#{namespace}:hash")
+ end
+
+ context "with a given namespace" do
+ let(:extra_namespace) { "my:data" }
+ let(:cache) { described_class.new(repository, extra_namespace: extra_namespace) }
+
+ it "includes the full namespace" do
+ is_expected.to eq("example:#{namespace}:#{extra_namespace}:hash")
+ end
+ end
+ end
+
+ describe "#delete" do
+ subject { cache.delete(:example) }
+
+ context "key exists" do
+ before do
+ cache.write(:example, test_hash)
+ end
+
+ it { is_expected.to eq(1) }
+
+ it "deletes the given key from the cache" do
+ subject
+
+ expect(cache.read_members(:example, ["test"])).to eq({ "test" => nil })
+ end
+ end
+
+ context "key doesn't exist" do
+ it { is_expected.to eq(0) }
+ end
+ end
+
+ describe "#key?" do
+ subject { cache.key?(:example, "test") }
+
+ context "key exists" do
+ before do
+ cache.write(:example, test_hash)
+ end
+
+ it { is_expected.to be(true) }
+ end
+
+ context "key doesn't exist" do
+ it { is_expected.to be(false) }
+ end
+ end
+
+ describe "#read_members" do
+ subject { cache.read_members(:example, keys) }
+
+ let(:keys) { %w(test missing) }
+
+ context "all data is cached" do
+ before do
+ cache.write(:example, test_hash.merge({ "missing" => false }))
+ end
+
+ it { is_expected.to eq({ "test" => "value", "missing" => "false" }) }
+ end
+
+ context "partial data is cached" do
+ before do
+ cache.write(:example, test_hash)
+ end
+
+ it { is_expected.to eq({ "test" => "value", "missing" => nil }) }
+ end
+
+ context "no data is cached" do
+ it { is_expected.to eq({ "test" => nil, "missing" => nil }) }
+ end
+
+ context "empty keys are passed for some reason" do
+ let(:keys) { [] }
+
+ it "raises an error" do
+ expect { subject }.to raise_error(Gitlab::RepositoryHashCache::InvalidKeysProvidedError)
+ end
+ end
+ end
+
+ describe "#write" do
+ subject { cache.write(:example, test_hash) }
+
+ it { is_expected.to be(true) }
+
+ it "actually writes stuff to Redis" do
+ subject
+
+ expect(cache.read_members(:example, ["test"])).to eq(test_hash)
+ end
+ end
+
+ describe "#fetch_and_add_missing" do
+ subject do
+ cache.fetch_and_add_missing(:example, keys) do |missing_keys, hash|
+ missing_keys.each do |key|
+ hash[key] = "was_missing"
+ end
+ end
+ end
+
+ let(:keys) { %w(test) }
+
+ it "records metrics" do
+ # Here we expect it to receive "test" as a missing key because we
+ # don't write to the cache before this test
+ expect(cache).to receive(:record_metrics).with(:example, { "test" => "was_missing" }, ["test"])
+
+ subject
+ end
+
+ context "fully cached" do
+ let(:keys) { %w(test another) }
+
+ before do
+ cache.write(:example, test_hash.merge({ "another" => "not_missing" }))
+ end
+
+ it "returns a hash" do
+ is_expected.to eq({ "test" => "value", "another" => "not_missing" })
+ end
+
+ it "doesn't write to the cache" do
+ expect(cache).not_to receive(:write)
+
+ subject
+ end
+ end
+
+ context "partially cached" do
+ let(:keys) { %w(test missing) }
+
+ before do
+ cache.write(:example, test_hash)
+ end
+
+ it "returns a hash" do
+ is_expected.to eq({ "test" => "value", "missing" => "was_missing" })
+ end
+
+ it "writes to the cache" do
+ expect(cache).to receive(:write).with(:example, { "missing" => "was_missing" })
+
+ subject
+ end
+ end
+
+ context "uncached" do
+ let(:keys) { %w(test missing) }
+
+ it "returns a hash" do
+ is_expected.to eq({ "test" => "was_missing", "missing" => "was_missing" })
+ end
+
+ it "writes to the cache" do
+ expect(cache).to receive(:write).with(:example, { "test" => "was_missing", "missing" => "was_missing" })
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/request_context_spec.rb b/spec/lib/gitlab/request_context_spec.rb
index 5785dbfd850..7e2e05c9f1b 100644
--- a/spec/lib/gitlab/request_context_spec.rb
+++ b/spec/lib/gitlab/request_context_spec.rb
@@ -24,18 +24,6 @@ describe Gitlab::RequestContext, :request_store do
expect(subject.request_deadline).to be_nil
end
-
- it 'only checks the feature once per request-instance' do
- expect(Feature).to receive(:enabled?).with(:request_deadline).once
-
- 2.times { subject.request_deadline }
- end
-
- it 'returns nil when the feature is disabled' do
- stub_feature_flags(request_deadline: false)
-
- expect(subject.request_deadline).to be_nil
- end
end
describe '#ensure_request_deadline_not_exceeded!' do
diff --git a/spec/lib/gitlab/rugged_instrumentation_spec.rb b/spec/lib/gitlab/rugged_instrumentation_spec.rb
index 4dcc8ae514a..64c0ce1b65e 100644
--- a/spec/lib/gitlab/rugged_instrumentation_spec.rb
+++ b/spec/lib/gitlab/rugged_instrumentation_spec.rb
@@ -15,7 +15,7 @@ describe Gitlab::RuggedInstrumentation, :request_store do
end
end
- context '.increment_query_count' do
+ describe '.increment_query_count' do
it 'tracks query counts' do
expect(subject.query_count).to eq(0)
diff --git a/spec/lib/gitlab/runtime_spec.rb b/spec/lib/gitlab/runtime_spec.rb
index 194ed49bb32..34a775fc206 100644
--- a/spec/lib/gitlab/runtime_spec.rb
+++ b/spec/lib/gitlab/runtime_spec.rb
@@ -3,8 +3,26 @@
require 'spec_helper'
describe Gitlab::Runtime do
+ shared_examples "valid runtime" do |runtime, max_threads|
+ it "identifies itself" do
+ expect(subject.identify).to eq(runtime)
+ expect(subject.public_send("#{runtime}?")).to be(true)
+ end
+
+ it "does not identify as others" do
+ (described_class::AVAILABLE_RUNTIMES - [runtime]).each do |runtime|
+ expect(subject.public_send("#{runtime}?")).to eq(false)
+ end
+ end
+
+ it "reports its maximum concurrency" do
+ expect(subject.max_threads).to eq(max_threads)
+ end
+ end
+
before do
allow(described_class).to receive(:process_name).and_return('ruby')
+ stub_rails_env('production')
end
context "when unknown" do
@@ -26,138 +44,65 @@ describe Gitlab::Runtime do
context "puma" do
let(:puma_type) { double('::Puma') }
- let(:options) do
- {
- max_threads: 2
- }
- end
before do
stub_const('::Puma', puma_type)
- allow(puma_type).to receive_message_chain(:cli_config, :options).and_return(options)
- end
-
- it "identifies itself" do
- expect(subject.identify).to eq(:puma)
- expect(subject.puma?).to be(true)
- end
-
- it "does not identify as others" do
- expect(subject.unicorn?).to be(false)
- expect(subject.sidekiq?).to be(false)
- expect(subject.console?).to be(false)
- expect(subject.rake?).to be(false)
- expect(subject.rspec?).to be(false)
+ allow(puma_type).to receive_message_chain(:cli_config, :options).and_return(max_threads: 2)
end
- it "reports its maximum concurrency" do
- expect(subject.max_threads).to eq(2)
- end
+ it_behaves_like "valid runtime", :puma, 3
end
context "unicorn" do
- let(:unicorn_type) { Module.new }
- let(:unicorn_server_type) { Class.new }
-
before do
- stub_const('::Unicorn', unicorn_type)
- stub_const('::Unicorn::HttpServer', unicorn_server_type)
- end
-
- it "identifies itself" do
- expect(subject.identify).to eq(:unicorn)
- expect(subject.unicorn?).to be(true)
- end
-
- it "does not identify as others" do
- expect(subject.puma?).to be(false)
- expect(subject.sidekiq?).to be(false)
- expect(subject.console?).to be(false)
- expect(subject.rake?).to be(false)
- expect(subject.rspec?).to be(false)
+ stub_const('::Unicorn', Module.new)
+ stub_const('::Unicorn::HttpServer', Class.new)
end
- it "reports its maximum concurrency" do
- expect(subject.max_threads).to eq(1)
- end
+ it_behaves_like "valid runtime", :unicorn, 1
end
context "sidekiq" do
let(:sidekiq_type) { double('::Sidekiq') }
- let(:options) do
- {
- concurrency: 2
- }
- end
before do
stub_const('::Sidekiq', sidekiq_type)
allow(sidekiq_type).to receive(:server?).and_return(true)
- allow(sidekiq_type).to receive(:options).and_return(options)
+ allow(sidekiq_type).to receive(:options).and_return(concurrency: 2)
end
- it "identifies itself" do
- expect(subject.identify).to eq(:sidekiq)
- expect(subject.sidekiq?).to be(true)
- end
-
- it "does not identify as others" do
- expect(subject.unicorn?).to be(false)
- expect(subject.puma?).to be(false)
- expect(subject.console?).to be(false)
- expect(subject.rake?).to be(false)
- expect(subject.rspec?).to be(false)
- end
-
- it "reports its maximum concurrency" do
- expect(subject.max_threads).to eq(2)
- end
+ it_behaves_like "valid runtime", :sidekiq, 4
end
context "console" do
- let(:console_type) { double('::Rails::Console') }
-
before do
- stub_const('::Rails::Console', console_type)
+ stub_const('::Rails::Console', double('::Rails::Console'))
end
- it "identifies itself" do
- expect(subject.identify).to eq(:console)
- expect(subject.console?).to be(true)
- end
+ it_behaves_like "valid runtime", :console, 1
+ end
- it "does not identify as others" do
- expect(subject.unicorn?).to be(false)
- expect(subject.sidekiq?).to be(false)
- expect(subject.puma?).to be(false)
- expect(subject.rake?).to be(false)
- expect(subject.rspec?).to be(false)
+ context "test suite" do
+ before do
+ stub_rails_env('test')
end
- it "reports its maximum concurrency" do
- expect(subject.max_threads).to eq(1)
- end
+ it_behaves_like "valid runtime", :test_suite, 1
end
- context "rspec" do
+ context "geo log cursor" do
before do
- allow(described_class).to receive(:process_name).and_return('rspec')
+ stub_const('::GeoLogCursorOptionParser', double('::GeoLogCursorOptionParser'))
end
- it "identifies itself" do
- expect(subject.identify).to eq(:rspec)
- expect(subject.rspec?).to be(true)
- end
+ it_behaves_like "valid runtime", :geo_log_cursor, 1
+ end
- it "does not identify as others" do
- expect(subject.unicorn?).to be(false)
- expect(subject.sidekiq?).to be(false)
- expect(subject.rake?).to be(false)
- expect(subject.puma?).to be(false)
+ context "rails runner" do
+ before do
+ stub_const('::Rails::Command::RunnerCommand', double('::Rails::Command::RunnerCommand'))
end
- it "reports its maximum concurrency" do
- expect(subject.max_threads).to eq(1)
- end
+ it_behaves_like "valid runtime", :rails_runner, 1
end
end
diff --git a/spec/lib/gitlab/safe_request_store_spec.rb b/spec/lib/gitlab/safe_request_store_spec.rb
index bae87e43615..def05a3f285 100644
--- a/spec/lib/gitlab/safe_request_store_spec.rb
+++ b/spec/lib/gitlab/safe_request_store_spec.rb
@@ -38,7 +38,7 @@ describe Gitlab::SafeRequestStore do
describe '.clear!' do
context 'when RequestStore is active', :request_store do
it 'uses RequestStore' do
- expect(RequestStore).to receive(:clear!).twice.and_call_original
+ expect(RequestStore).to receive(:clear!).once.and_call_original
described_class.clear!
end
@@ -56,7 +56,7 @@ describe Gitlab::SafeRequestStore do
describe '.end!' do
context 'when RequestStore is active', :request_store do
it 'uses RequestStore' do
- expect(RequestStore).to receive(:end!).twice.and_call_original
+ expect(RequestStore).to receive(:end!).once.and_call_original
described_class.end!
end
diff --git a/spec/lib/gitlab/sanitizers/exif_spec.rb b/spec/lib/gitlab/sanitizers/exif_spec.rb
index 11e430e0be4..f0b733817b3 100644
--- a/spec/lib/gitlab/sanitizers/exif_spec.rb
+++ b/spec/lib/gitlab/sanitizers/exif_spec.rb
@@ -30,7 +30,7 @@ describe Gitlab::Sanitizers::Exif do
end
it 'processes only uploads created since specified date' do
- expect(sanitizer).to receive(:clean).exactly(2).times
+ expect(sanitizer).to receive(:clean).twice
sanitizer.batch_clean(since: 2.days.ago)
end
diff --git a/spec/lib/gitlab/search/found_blob_spec.rb b/spec/lib/gitlab/search/found_blob_spec.rb
index 07842faa638..ce6a54100a5 100644
--- a/spec/lib/gitlab/search/found_blob_spec.rb
+++ b/spec/lib/gitlab/search/found_blob_spec.rb
@@ -156,4 +156,14 @@ describe Gitlab::Search::FoundBlob do
end
end
end
+
+ describe 'policy' do
+ let(:project) { build(:project, :repository) }
+
+ subject { described_class.new(project: project) }
+
+ it 'works with policy' do
+ expect(Ability.allowed?(project.creator, :read_blob, subject)).to be_truthy
+ end
+ end
end
diff --git a/spec/lib/gitlab/search/found_wiki_page_spec.rb b/spec/lib/gitlab/search/found_wiki_page_spec.rb
new file mode 100644
index 00000000000..e8b6728aba5
--- /dev/null
+++ b/spec/lib/gitlab/search/found_wiki_page_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Search::FoundWikiPage do
+ let(:project) { create(:project, :public, :repository) }
+
+ describe 'policy' do
+ let(:project) { build(:project, :repository) }
+ let(:found_blob) { Gitlab::Search::FoundBlob.new(project: project) }
+
+ subject { described_class.new(found_blob) }
+
+ it 'works with policy' do
+ expect(Ability.allowed?(project.creator, :read_wiki_page, subject)).to be_truthy
+ end
+ end
+end
diff --git a/spec/lib/gitlab/serverless/domain_spec.rb b/spec/lib/gitlab/serverless/domain_spec.rb
new file mode 100644
index 00000000000..ae5551977d4
--- /dev/null
+++ b/spec/lib/gitlab/serverless/domain_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Serverless::Domain do
+ describe '.generate_uuid' do
+ it 'has 14 characters' do
+ expect(described_class.generate_uuid.length).to eq(described_class::UUID_LENGTH)
+ end
+
+ it 'consists of only hexadecimal characters' do
+ expect(described_class.generate_uuid).to match(/\A\h+\z/)
+ end
+
+ it 'uses random characters' do
+ uuid = 'abcd1234567890'
+
+ expect(SecureRandom).to receive(:hex).with(described_class::UUID_LENGTH / 2).and_return(uuid)
+ expect(described_class.generate_uuid).to eq(uuid)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/serverless/function_uri_spec.rb b/spec/lib/gitlab/serverless/function_uri_spec.rb
new file mode 100644
index 00000000000..cd4abeb89f5
--- /dev/null
+++ b/spec/lib/gitlab/serverless/function_uri_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Serverless::FunctionURI do
+ let(:function) { 'test-function' }
+ let(:domain) { 'serverless.gitlab.io' }
+ let(:pages_domain) { create(:pages_domain, :instance_serverless, domain: domain) }
+ let!(:cluster) { create(:serverless_domain_cluster, uuid: 'abcdef12345678', pages_domain: pages_domain) }
+ let(:valid_cluster) { 'aba1cdef123456f278' }
+ let(:invalid_cluster) { 'aba1cdef123456f178' }
+ let!(:environment) { create(:environment, name: 'test') }
+
+ let(:valid_uri) { "https://#{function}-#{valid_cluster}#{"%x" % environment.id}-#{environment.slug}.#{domain}" }
+ let(:valid_fqdn) { "#{function}-#{valid_cluster}#{"%x" % environment.id}-#{environment.slug}.#{domain}" }
+ let(:invalid_uri) { "https://#{function}-#{invalid_cluster}#{"%x" % environment.id}-#{environment.slug}.#{domain}" }
+
+ shared_examples 'a valid FunctionURI class' do
+ describe '#to_s' do
+ it 'matches valid URI' do
+ expect(subject.to_s).to eq valid_uri
+ end
+ end
+
+ describe '#function' do
+ it 'returns function' do
+ expect(subject.function).to eq function
+ end
+ end
+
+ describe '#cluster' do
+ it 'returns cluster' do
+ expect(subject.cluster).to eq cluster
+ end
+ end
+
+ describe '#environment' do
+ it 'returns environment' do
+ expect(subject.environment).to eq environment
+ end
+ end
+ end
+
+ describe '.new' do
+ context 'with valid arguments' do
+ subject { described_class.new(function: function, cluster: cluster, environment: environment) }
+
+ it_behaves_like 'a valid FunctionURI class'
+ end
+
+ context 'with invalid arguments' do
+ subject { described_class.new(function: function, environment: environment) }
+
+ it 'raises an exception' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+ end
+
+ describe '.parse' do
+ context 'with valid URI' do
+ subject { described_class.parse(valid_uri) }
+
+ it_behaves_like 'a valid FunctionURI class'
+ end
+
+ context 'with valid FQDN' do
+ subject { described_class.parse(valid_fqdn) }
+
+ it_behaves_like 'a valid FunctionURI class'
+ end
+
+ context 'with invalid URI' do
+ subject { described_class.parse(invalid_uri) }
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/serverless/service_spec.rb b/spec/lib/gitlab/serverless/service_spec.rb
new file mode 100644
index 00000000000..f618dd02cdb
--- /dev/null
+++ b/spec/lib/gitlab/serverless/service_spec.rb
@@ -0,0 +1,134 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Serverless::Service do
+ let(:cluster) { create(:cluster) }
+ let(:environment) { create(:environment) }
+ let(:attributes) do
+ {
+ 'apiVersion' => 'serving.knative.dev/v1alpha1',
+ 'kind' => 'Service',
+ 'metadata' => {
+ 'creationTimestamp' => '2019-10-22T21:19:13Z',
+ 'name' => 'kubetest',
+ 'namespace' => 'project1-1-environment1'
+ },
+ 'spec' => {
+ 'runLatest' => {
+ 'configuration' => {
+ 'build' => {
+ 'template' => {
+ 'name' => 'some-image'
+ }
+ }
+ }
+ }
+ },
+ 'environment_scope' => '*',
+ 'cluster' => cluster,
+ 'environment' => environment,
+ 'podcount' => 0
+ }
+ end
+
+ it 'exposes methods extracting data from the attributes hash' do
+ service = Gitlab::Serverless::Service.new(attributes)
+
+ expect(service.name).to eq('kubetest')
+ expect(service.namespace).to eq('project1-1-environment1')
+ expect(service.environment_scope).to eq('*')
+ expect(service.podcount).to eq(0)
+ expect(service.created_at).to eq(DateTime.parse('2019-10-22T21:19:13Z'))
+ expect(service.image).to eq('some-image')
+ expect(service.cluster).to eq(cluster)
+ expect(service.environment).to eq(environment)
+ end
+
+ it 'returns nil for missing attributes' do
+ service = Gitlab::Serverless::Service.new({})
+
+ [:name, :namespace, :environment_scope, :cluster, :podcount, :created_at, :image, :description, :url, :environment].each do |method|
+ expect(service.send(method)).to be_nil
+ end
+ end
+
+ describe '#description' do
+ it 'extracts the description in knative 7 format if available' do
+ attributes = {
+ 'spec' => {
+ 'template' => {
+ 'metadata' => {
+ 'annotations' => {
+ 'Description' => 'some description'
+ }
+ }
+ }
+ }
+ }
+ service = Gitlab::Serverless::Service.new(attributes)
+
+ expect(service.description).to eq('some description')
+ end
+
+ it 'extracts the description in knative 5/6 format if 7 is not available' do
+ attributes = {
+ 'spec' => {
+ 'runLatest' => {
+ 'configuration' => {
+ 'revisionTemplate' => {
+ 'metadata' => {
+ 'annotations' => {
+ 'Description' => 'some description'
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ service = Gitlab::Serverless::Service.new(attributes)
+
+ expect(service.description).to eq('some description')
+ end
+ end
+
+ describe '#url' do
+ it 'returns proxy URL if cluster has serverless domain' do
+ # cluster = create(:cluster)
+ knative = create(:clusters_applications_knative, :installed, cluster: cluster)
+ create(:serverless_domain_cluster, clusters_applications_knative_id: knative.id)
+ service = Gitlab::Serverless::Service.new(attributes.merge('cluster' => cluster))
+
+ expect(Gitlab::Serverless::FunctionURI).to receive(:new).with(
+ function: service.name,
+ cluster: service.cluster.serverless_domain,
+ environment: service.environment
+ ).and_return('https://proxy.example.com')
+
+ expect(service.url).to eq('https://proxy.example.com')
+ end
+
+ it 'returns the URL from the knative 6/7 format' do
+ attributes = {
+ 'status' => {
+ 'url' => 'https://example.com'
+ }
+ }
+ service = Gitlab::Serverless::Service.new(attributes)
+
+ expect(service.url).to eq('https://example.com')
+ end
+
+ it 'returns the URL from the knative 5 format' do
+ attributes = {
+ 'status' => {
+ 'domain' => 'example.com'
+ }
+ }
+ service = Gitlab::Serverless::Service.new(attributes)
+
+ expect(service.url).to eq('http://example.com')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/shell_spec.rb b/spec/lib/gitlab/shell_spec.rb
index eefc548a4d9..7b8d1b6cd9b 100644
--- a/spec/lib/gitlab/shell_spec.rb
+++ b/spec/lib/gitlab/shell_spec.rb
@@ -397,7 +397,7 @@ describe Gitlab::Shell do
describe 'namespace actions' do
subject { described_class.new }
- let(:storage) { Gitlab.config.repositories.storages.keys.first }
+ let(:storage) { Gitlab.config.repositories.storages.each_key.first }
describe '#add_namespace' do
it 'creates a namespace' do
diff --git a/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb b/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb
new file mode 100644
index 00000000000..e6d0055df64
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb
@@ -0,0 +1,217 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+
+describe Gitlab::SidekiqConfig::CliMethods do
+ let(:dummy_root) { '/tmp/' }
+
+ describe '.worker_queues' do
+ def expand_path(path)
+ File.join(dummy_root, path)
+ end
+
+ def stub_exists(exists: true)
+ ['app/workers/all_queues.yml', 'ee/app/workers/all_queues.yml'].each do |path|
+ allow(File).to receive(:exist?).with(expand_path(path)).and_return(exists)
+ end
+ end
+
+ def stub_contents(foss_queues, ee_queues)
+ allow(YAML).to receive(:load_file)
+ .with(expand_path('app/workers/all_queues.yml'))
+ .and_return(foss_queues)
+
+ allow(YAML).to receive(:load_file)
+ .with(expand_path('ee/app/workers/all_queues.yml'))
+ .and_return(ee_queues)
+ end
+
+ before do
+ described_class.clear_memoization!
+ end
+
+ context 'when the file exists' do
+ before do
+ stub_exists(exists: true)
+ end
+
+ shared_examples 'valid file contents' do
+ it 'memoizes the result' do
+ result = described_class.worker_queues(dummy_root)
+
+ stub_exists(exists: false)
+
+ expect(described_class.worker_queues(dummy_root)).to eq(result)
+ end
+
+ it 'flattens and joins the contents' do
+ expected_queues = %w[queue_a queue_b]
+ expected_queues = expected_queues.first(1) unless Gitlab.ee?
+
+ expect(described_class.worker_queues(dummy_root))
+ .to match_array(expected_queues)
+ end
+ end
+
+ context 'when the file contains an array of strings' do
+ before do
+ stub_contents(['queue_a'], ['queue_b'])
+ end
+
+ include_examples 'valid file contents'
+ end
+
+ context 'when the file contains an array of hashes' do
+ before do
+ stub_contents([{ name: 'queue_a' }], [{ name: 'queue_b' }])
+ end
+
+ include_examples 'valid file contents'
+ end
+ end
+
+ context 'when the file does not exist' do
+ before do
+ stub_exists(exists: false)
+ end
+
+ it 'returns an empty array' do
+ expect(described_class.worker_queues(dummy_root)).to be_empty
+ end
+ end
+ end
+
+ describe '.expand_queues' do
+ let(:worker_queues) do
+ ['cronjob:stuck_import_jobs', 'cronjob:stuck_merge_jobs', 'post_receive']
+ end
+
+ it 'defaults the value of the second argument to .worker_queues' do
+ allow(described_class).to receive(:worker_queues).and_return([])
+
+ expect(described_class.expand_queues(['cronjob']))
+ .to contain_exactly('cronjob')
+
+ allow(described_class).to receive(:worker_queues).and_return(worker_queues)
+
+ expect(described_class.expand_queues(['cronjob']))
+ .to contain_exactly('cronjob', 'cronjob:stuck_import_jobs', 'cronjob:stuck_merge_jobs')
+ end
+
+ it 'expands queue namespaces to concrete queue names' do
+ expect(described_class.expand_queues(['cronjob'], worker_queues))
+ .to contain_exactly('cronjob', 'cronjob:stuck_import_jobs', 'cronjob:stuck_merge_jobs')
+ end
+
+ it 'lets concrete queue names pass through' do
+ expect(described_class.expand_queues(['post_receive'], worker_queues))
+ .to contain_exactly('post_receive')
+ end
+
+ it 'lets unknown queues pass through' do
+ expect(described_class.expand_queues(['unknown'], worker_queues))
+ .to contain_exactly('unknown')
+ end
+ end
+
+ describe '.query_workers' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:queues) do
+ [
+ {
+ name: 'a',
+ feature_category: :category_a,
+ has_external_dependencies: false,
+ latency_sensitive: false,
+ resource_boundary: :cpu
+ },
+ {
+ name: 'a_2',
+ feature_category: :category_a,
+ has_external_dependencies: false,
+ latency_sensitive: true,
+ resource_boundary: :none
+ },
+ {
+ name: 'b',
+ feature_category: :category_b,
+ has_external_dependencies: true,
+ latency_sensitive: true,
+ resource_boundary: :memory
+ },
+ {
+ name: 'c',
+ feature_category: :category_c,
+ has_external_dependencies: false,
+ latency_sensitive: false,
+ resource_boundary: :memory
+ }
+ ]
+ end
+
+ context 'with valid input' do
+ where(:query, :selected_queues) do
+ # feature_category
+ 'feature_category=category_a' | %w(a a_2)
+ 'feature_category=category_a,category_c' | %w(a a_2 c)
+ 'feature_category=category_a|feature_category=category_c' | %w(a a_2 c)
+ 'feature_category!=category_a' | %w(b c)
+
+ # has_external_dependencies
+ 'has_external_dependencies=true' | %w(b)
+ 'has_external_dependencies=false' | %w(a a_2 c)
+ 'has_external_dependencies=true,false' | %w(a a_2 b c)
+ 'has_external_dependencies=true|has_external_dependencies=false' | %w(a a_2 b c)
+ 'has_external_dependencies!=true' | %w(a a_2 c)
+
+ # latency_sensitive
+ 'latency_sensitive=true' | %w(a_2 b)
+ 'latency_sensitive=false' | %w(a c)
+ 'latency_sensitive=true,false' | %w(a a_2 b c)
+ 'latency_sensitive=true|latency_sensitive=false' | %w(a a_2 b c)
+ 'latency_sensitive!=true' | %w(a c)
+
+ # name
+ 'name=a' | %w(a)
+ 'name=a,b' | %w(a b)
+ 'name=a,a_2|name=b' | %w(a a_2 b)
+ 'name!=a,a_2' | %w(b c)
+
+ # resource_boundary
+ 'resource_boundary=memory' | %w(b c)
+ 'resource_boundary=memory,cpu' | %w(a b c)
+ 'resource_boundary=memory|resource_boundary=cpu' | %w(a b c)
+ 'resource_boundary!=memory,cpu' | %w(a_2)
+
+ # combinations
+ 'feature_category=category_a&latency_sensitive=true' | %w(a_2)
+ 'feature_category=category_a&latency_sensitive=true|feature_category=category_c' | %w(a_2 c)
+ end
+
+ with_them do
+ it do
+ expect(described_class.query_workers(query, queues))
+ .to match_array(selected_queues)
+ end
+ end
+ end
+
+ context 'with invalid input' do
+ where(:query, :error) do
+ 'feature_category="category_a"' | described_class::InvalidTerm
+ 'feature_category=' | described_class::InvalidTerm
+ 'feature_category~category_a' | described_class::InvalidTerm
+ 'worker_name=a' | described_class::UnknownPredicate
+ end
+
+ with_them do
+ it do
+ expect { described_class.query_workers(query, queues) }
+ .to raise_error(error)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_config/worker_spec.rb b/spec/lib/gitlab/sidekiq_config/worker_spec.rb
new file mode 100644
index 00000000000..38edd0f5eeb
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_config/worker_spec.rb
@@ -0,0 +1,128 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+describe Gitlab::SidekiqConfig::Worker do
+ def create_worker(queue:, **attributes)
+ namespace = queue.include?(':') && queue.split(':').first
+ inner_worker = double(
+ queue: queue,
+ queue_namespace: namespace,
+ get_feature_category: attributes[:feature_category],
+ get_weight: attributes[:weight],
+ get_worker_resource_boundary: attributes[:resource_boundary],
+ latency_sensitive_worker?: attributes[:latency_sensitive],
+ worker_has_external_dependencies?: attributes[:has_external_dependencies]
+ )
+
+ described_class.new(inner_worker, ee: false)
+ end
+
+ describe '#ee?' do
+ it 'returns the EE status set on creation' do
+ expect(described_class.new(double, ee: true)).to be_ee
+ expect(described_class.new(double, ee: false)).not_to be_ee
+ end
+ end
+
+ describe '#==' do
+ def worker_with_yaml(yaml)
+ described_class.new(double, ee: false).tap do |worker|
+ allow(worker).to receive(:to_yaml).and_return(yaml)
+ end
+ end
+
+ it 'defines two workers as equal if their YAML representations are equal' do
+ expect(worker_with_yaml('a')).to eq(worker_with_yaml('a'))
+ expect(worker_with_yaml('a')).not_to eq(worker_with_yaml('b'))
+ end
+
+ it 'returns true when a worker is compared with its YAML representation' do
+ expect(worker_with_yaml('a')).to eq('a')
+ expect(worker_with_yaml(a: 1, b: 2)).to eq(a: 1, b: 2)
+ end
+ end
+
+ describe 'delegations' do
+ [
+ :feature_category_not_owned?, :get_feature_category, :get_weight,
+ :get_worker_resource_boundary, :latency_sensitive_worker?, :queue,
+ :queue_namespace, :worker_has_external_dependencies?
+ ].each do |meth|
+ it "delegates #{meth} to the worker class" do
+ worker = double
+
+ expect(worker).to receive(meth)
+
+ described_class.new(worker, ee: false).send(meth)
+ end
+ end
+ end
+
+ describe 'sorting' do
+ it 'sorts queues with a namespace before those without a namespace' do
+ namespaced_worker = create_worker(queue: 'namespace:queue')
+ plain_worker = create_worker(queue: 'a_queue')
+
+ expect([plain_worker, namespaced_worker].sort)
+ .to eq([namespaced_worker, plain_worker])
+ end
+
+ it 'sorts alphabetically by queue' do
+ workers = [
+ create_worker(queue: 'namespace:a'),
+ create_worker(queue: 'namespace:b'),
+ create_worker(queue: 'other_namespace:a'),
+ create_worker(queue: 'other_namespace:b'),
+ create_worker(queue: 'a'),
+ create_worker(queue: 'b')
+ ]
+
+ expect(workers.shuffle.sort).to eq(workers)
+ end
+ end
+
+ describe 'YAML encoding' do
+ it 'encodes the worker in YAML as a hash of the queue' do
+ attributes_a = {
+ feature_category: :source_code_management,
+ has_external_dependencies: false,
+ latency_sensitive: false,
+ resource_boundary: :memory,
+ weight: 2
+ }
+
+ attributes_b = {
+ feature_category: :not_owned,
+ has_external_dependencies: true,
+ latency_sensitive: true,
+ resource_boundary: :unknown,
+ weight: 1
+ }
+
+ worker_a = create_worker(queue: 'a', **attributes_a)
+ worker_b = create_worker(queue: 'b', **attributes_b)
+
+ expect(YAML.dump(worker_a))
+ .to eq(YAML.dump(attributes_a.reverse_merge(name: 'a')))
+
+ expect(YAML.dump([worker_a, worker_b]))
+ .to eq(YAML.dump([attributes_a.reverse_merge(name: 'a'),
+ attributes_b.reverse_merge(name: 'b')]))
+ end
+ end
+
+ describe '#namespace_and_weight' do
+ it 'returns a namespace, weight pair for the worker' do
+ expect(create_worker(queue: 'namespace:a', weight: 2).namespace_and_weight)
+ .to eq(['namespace', 2])
+ end
+ end
+
+ describe '#queue_and_weight' do
+ it 'returns a queue, weight pair for the worker' do
+ expect(create_worker(queue: 'namespace:a', weight: 2).queue_and_weight)
+ .to eq(['namespace:a', 2])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_config_spec.rb b/spec/lib/gitlab/sidekiq_config_spec.rb
index 49efbac160a..85de1d029c3 100644
--- a/spec/lib/gitlab/sidekiq_config_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config_spec.rb
@@ -5,10 +5,10 @@ require 'spec_helper'
describe Gitlab::SidekiqConfig do
describe '.workers' do
it 'includes all workers' do
- workers = described_class.workers
+ worker_classes = described_class.workers.map(&:klass)
- expect(workers).to include(PostReceive)
- expect(workers).to include(MergeWorker)
+ expect(worker_classes).to include(PostReceive)
+ expect(worker_classes).to include(MergeWorker)
end
end
@@ -24,24 +24,101 @@ describe Gitlab::SidekiqConfig do
end
end
- describe '.expand_queues' do
- it 'expands queue namespaces to concrete queue names' do
- queues = described_class.expand_queues(%w[cronjob])
+ describe '.workers_for_all_queues_yml' do
+ it 'returns a tuple with FOSS workers first' do
+ expect(described_class.workers_for_all_queues_yml.first)
+ .to include(an_object_having_attributes(queue: 'post_receive'))
+ end
+ end
- expect(queues).to include('cronjob:stuck_import_jobs')
- expect(queues).to include('cronjob:stuck_merge_jobs')
+ describe '.all_queues_yml_outdated?' do
+ let(:workers) do
+ [
+ MergeWorker,
+ PostReceive,
+ ProcessCommitWorker
+ ].map { |worker| described_class::Worker.new(worker, ee: false) }
end
- it 'lets concrete queue names pass through' do
- queues = described_class.expand_queues(%w[post_receive])
+ before do
+ allow(described_class).to receive(:workers).and_return(workers)
+ allow(Gitlab).to receive(:ee?).and_return(false)
+ end
- expect(queues).to include('post_receive')
+ it 'returns true if the YAML file does not matcph the application code' do
+ allow(YAML).to receive(:load_file)
+ .with(described_class::FOSS_QUEUE_CONFIG_PATH)
+ .and_return(workers.first(2).map(&:to_yaml))
+
+ expect(described_class.all_queues_yml_outdated?).to be(true)
+ end
+
+ it 'returns false if the YAML file matches the application code' do
+ allow(YAML).to receive(:load_file)
+ .with(described_class::FOSS_QUEUE_CONFIG_PATH)
+ .and_return(workers.map(&:to_yaml))
+
+ expect(described_class.all_queues_yml_outdated?).to be(false)
+ end
+ end
+
+ describe '.queues_for_sidekiq_queues_yml' do
+ before do
+ workers = [
+ Namespaces::RootStatisticsWorker,
+ Namespaces::ScheduleAggregationWorker,
+ MergeWorker,
+ ProcessCommitWorker
+ ].map { |worker| described_class::Worker.new(worker, ee: false) }
+
+ allow(described_class).to receive(:workers).and_return(workers)
+ end
+
+ it 'returns queues and weights, aggregating namespaces with the same weight' do
+ expected_queues = [
+ ['merge', 5],
+ ['process_commit', 3],
+ ['update_namespace_statistics', 1]
+ ]
+
+ expect(described_class.queues_for_sidekiq_queues_yml).to eq(expected_queues)
+ end
+ end
+
+ describe '.sidekiq_queues_yml_outdated?' do
+ before do
+ workers = [
+ Namespaces::RootStatisticsWorker,
+ Namespaces::ScheduleAggregationWorker,
+ MergeWorker,
+ ProcessCommitWorker
+ ].map { |worker| described_class::Worker.new(worker, ee: false) }
+
+ allow(described_class).to receive(:workers).and_return(workers)
+ end
+
+ let(:expected_queues) do
+ [
+ ['merge', 5],
+ ['process_commit', 3],
+ ['update_namespace_statistics', 1]
+ ]
+ end
+
+ it 'returns true if the YAML file does not match the application code' do
+ allow(YAML).to receive(:load_file)
+ .with(described_class::SIDEKIQ_QUEUES_PATH)
+ .and_return(queues: expected_queues.reverse)
+
+ expect(described_class.sidekiq_queues_yml_outdated?).to be(true)
end
- it 'lets unknown queues pass through' do
- queues = described_class.expand_queues(%w[unknown])
+ it 'returns false if the YAML file matches the application code' do
+ allow(YAML).to receive(:load_file)
+ .with(described_class::SIDEKIQ_QUEUES_PATH)
+ .and_return(queues: expected_queues)
- expect(queues).to include('unknown')
+ expect(described_class.sidekiq_queues_yml_outdated?).to be(false)
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 43cdb998091..f294d7f7fcd 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -18,7 +18,10 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
"jid" => "da883554ee4fe414012f5f42",
"created_at" => created_at.to_f,
"enqueued_at" => created_at.to_f,
- "correlation_id" => 'cid'
+ "correlation_id" => 'cid',
+ "error_message" => "wrong number of arguments (2 for 3)",
+ "error_class" => "ArgumentError",
+ "error_backtrace" => []
}
end
@@ -26,7 +29,7 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
let(:clock_thread_cputime_start) { 0.222222299 }
let(:clock_thread_cputime_end) { 1.333333799 }
let(:start_payload) do
- job.merge(
+ job.except('error_backtrace', 'error_class', 'error_message').merge(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: start',
'job_status' => 'start',
'pid' => Process.pid,
@@ -99,13 +102,8 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
context 'when the job args are bigger than the maximum allowed' do
it 'keeps args from the front until they exceed the limit' do
Timecop.freeze(timestamp) do
- job['args'] = [
- 1,
- 2,
- 'a' * (described_class::MAXIMUM_JOB_ARGUMENTS_LENGTH / 2),
- 'b' * (described_class::MAXIMUM_JOB_ARGUMENTS_LENGTH / 2),
- 3
- ]
+ half_limit = Gitlab::Utils::LogLimitedArray::MAXIMUM_ARRAY_LENGTH / 2
+ job['args'] = [1, 2, 'a' * half_limit, 'b' * half_limit, 3]
expected_args = job['args'].take(3) + ['...']
diff --git a/spec/lib/gitlab/sidekiq_middleware/admin_mode/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/admin_mode/client_spec.rb
new file mode 100644
index 00000000000..f6449bae8c3
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/admin_mode/client_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::SidekiqMiddleware::AdminMode::Client, :do_not_mock_admin_mode, :request_store do
+ include AdminModeHelper
+
+ let(:worker) do
+ Class.new do
+ def perform; end
+ end
+ end
+
+ let(:job) { {} }
+ let(:queue) { :test }
+
+ it 'yields block' do
+ expect do |b|
+ subject.call(worker, job, queue, nil, &b)
+ end.to yield_control.once
+ end
+
+ context 'user is a regular user' do
+ it 'no admin mode field in payload' do
+ subject.call(worker, job, queue, nil) { nil }
+
+ expect(job).not_to include('admin_mode_user_id')
+ end
+ end
+
+ context 'user is an administrator' do
+ let(:admin) { create(:admin) }
+
+ context 'admin mode disabled' do
+ it 'no admin mode field in payload' do
+ subject.call(worker, job, queue, nil) { nil }
+
+ expect(job).not_to include('admin_mode_user_id')
+ end
+ end
+
+ context 'admin mode enabled' do
+ before do
+ enable_admin_mode!(admin)
+ end
+
+ context 'when sidekiq required context not set' do
+ it 'no admin mode field in payload' do
+ subject.call(worker, job, queue, nil) { nil }
+
+ expect(job).not_to include('admin_mode_user_id')
+ end
+ end
+
+ context 'when user stored in current request' do
+ it 'has admin mode field in payload' do
+ Gitlab::Auth::CurrentUserMode.with_current_admin(admin) do
+ subject.call(worker, job, queue, nil) { nil }
+
+ expect(job).to include('admin_mode_user_id' => admin.id)
+ end
+ end
+ end
+
+ context 'when bypassing session' do
+ it 'has admin mode field in payload' do
+ Gitlab::Auth::CurrentUserMode.bypass_session!(admin.id) do
+ subject.call(worker, job, queue, nil) { nil }
+
+ expect(job).to include('admin_mode_user_id' => admin.id)
+ end
+ end
+ end
+ end
+ end
+
+ context 'admin mode feature disabled' do
+ before do
+ stub_feature_flags(user_mode_in_session: false)
+ end
+
+ it 'yields block' do
+ expect do |b|
+ subject.call(worker, job, queue, nil, &b)
+ end.to yield_control.once
+ end
+
+ it 'no admin mode field in payload' do
+ subject.call(worker, job, queue, nil) { nil }
+
+ expect(job).not_to include('admin_mode_user_id')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/admin_mode/server_spec.rb b/spec/lib/gitlab/sidekiq_middleware/admin_mode/server_spec.rb
new file mode 100644
index 00000000000..60475f0e403
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/admin_mode/server_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::SidekiqMiddleware::AdminMode::Server, :do_not_mock_admin_mode, :request_store do
+ include AdminModeHelper
+
+ let(:worker) do
+ Class.new do
+ def perform; end
+ end
+ end
+
+ let(:job) { {} }
+ let(:queue) { :test }
+
+ it 'yields block' do
+ expect do |b|
+ subject.call(worker, job, queue, &b)
+ end.to yield_control.once
+ end
+
+ context 'job has no admin mode field' do
+ it 'session is not bypassed' do
+ subject.call(worker, job, queue) do
+ expect(Gitlab::Auth::CurrentUserMode.bypass_session_admin_id).to be_nil
+ end
+ end
+ end
+
+ context 'job has admin mode field' do
+ let(:admin) { create(:admin) }
+
+ context 'nil admin mode id' do
+ let(:job) { { 'admin_mode_user_id' => nil } }
+
+ it 'session is not bypassed' do
+ subject.call(worker, job, queue) do
+ expect(Gitlab::Auth::CurrentUserMode.bypass_session_admin_id).to be_nil
+ end
+ end
+ end
+
+ context 'valid admin mode id' do
+ let(:job) { { 'admin_mode_user_id' => admin.id } }
+
+ it 'session is bypassed' do
+ subject.call(worker, job, queue) do
+ expect(Gitlab::Auth::CurrentUserMode.bypass_session_admin_id).to be(admin.id)
+ end
+ end
+ end
+ end
+
+ context 'admin mode feature disabled' do
+ before do
+ stub_feature_flags(user_mode_in_session: false)
+ end
+
+ it 'yields block' do
+ expect do |b|
+ subject.call(worker, job, queue, &b)
+ end.to yield_control.once
+ end
+
+ it 'session is not bypassed' do
+ subject.call(worker, job, queue) do
+ expect(Gitlab::Auth::CurrentUserMode.bypass_session_admin_id).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
index 6516016e67f..daee2c0bbd0 100644
--- a/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
@@ -21,13 +21,19 @@ describe Gitlab::SidekiqMiddleware::ClientMetrics do
describe '#call' do
it 'yields block' do
- expect { |b| subject.call(worker, job, :test, double, &b) }.to yield_control.once
+ expect { |b| subject.call(worker_class, job, :test, double, &b) }.to yield_control.once
end
- it 'increments enqueued jobs metric' do
+ it 'increments enqueued jobs metric with correct labels when worker is a string of the class' do
expect(enqueued_jobs_metric).to receive(:increment).with(labels, 1)
- subject.call(worker, job, :test, double) { nil }
+ subject.call(worker_class.to_s, job, :test, double) { nil }
+ end
+
+ it 'increments enqueued jobs metric with correct labels' do
+ expect(enqueued_jobs_metric).to receive(:increment).with(labels, 1)
+
+ subject.call(worker_class, job, :test, double) { nil }
end
end
end
@@ -46,7 +52,7 @@ describe Gitlab::SidekiqMiddleware::ClientMetrics do
context "when workers are attributed" do
def create_attributed_worker_class(latency_sensitive, external_dependencies, resource_boundary, category)
- Class.new do
+ klass = Class.new do
include Sidekiq::Worker
include WorkerAttributes
@@ -55,6 +61,7 @@ describe Gitlab::SidekiqMiddleware::ClientMetrics do
worker_resource_boundary resource_boundary unless resource_boundary == :unknown
feature_category category unless category.nil?
end
+ stub_const("TestAttributedWorker", klass)
end
let(:latency_sensitive) { false }
diff --git a/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
new file mode 100644
index 00000000000..9cb89b1bc10
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::SidekiqMiddleware::WorkerContext::Client do
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ 'TestWithContextWorker'
+ end
+
+ include ApplicationWorker
+
+ def self.job_for_args(args)
+ jobs.find { |job| job['args'] == args }
+ end
+
+ def perform(*args)
+ end
+ end
+ end
+
+ before do
+ stub_const('TestWithContextWorker', worker_class)
+ end
+
+ describe "#call" do
+ it 'applies a context for jobs scheduled in batch' do
+ user_per_job = { 'job1' => build_stubbed(:user, username: 'user-1'),
+ 'job2' => build_stubbed(:user, username: 'user-2') }
+
+ TestWithContextWorker.bulk_perform_async_with_contexts(
+ %w(job1 job2),
+ arguments_proc: -> (name) { [name, 1, 2, 3] },
+ context_proc: -> (name) { { user: user_per_job[name] } }
+ )
+
+ job1 = TestWithContextWorker.job_for_args(['job1', 1, 2, 3])
+ job2 = TestWithContextWorker.job_for_args(['job2', 1, 2, 3])
+
+ expect(job1['meta.user']).to eq(user_per_job['job1'].username)
+ expect(job2['meta.user']).to eq(user_per_job['job2'].username)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb b/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb
new file mode 100644
index 00000000000..f64ebece930
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::SidekiqMiddleware::WorkerContext::Server do
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ "TestWorker"
+ end
+
+ # To keep track of the context that was active for certain arguments
+ cattr_accessor(:contexts) { {} }
+
+ include ApplicationWorker
+
+ worker_context user: nil
+
+ def perform(identifier, *args)
+ self.class.contexts.merge!(identifier => Labkit::Context.current.to_h)
+ end
+ end
+ end
+
+ let(:other_worker) do
+ Class.new do
+ def self.name
+ "OtherWorker"
+ end
+
+ include Sidekiq::Worker
+
+ def perform
+ end
+ end
+ end
+
+ before do
+ stub_const("TestWorker", worker_class)
+ stub_const("OtherWorker", other_worker)
+ end
+
+ around do |example|
+ Sidekiq::Testing.inline! { example.run }
+ end
+
+ before(:context) do
+ Sidekiq::Testing.server_middleware do |chain|
+ chain.add described_class
+ end
+ end
+
+ after(:context) do
+ Sidekiq::Testing.server_middleware do |chain|
+ chain.remove described_class
+ end
+ end
+
+ describe "#call" do
+ it 'applies a class context' do
+ Gitlab::ApplicationContext.with_context(user: build_stubbed(:user)) do
+ TestWorker.perform_async("identifier", 1)
+ end
+
+ expect(TestWorker.contexts['identifier'].keys).not_to include('meta.user')
+ end
+
+ it "doesn't fail for unknown workers" do
+ expect { OtherWorker.perform_async }.not_to raise_error
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index 473d85c0143..19242d25e27 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -44,7 +44,9 @@ describe Gitlab::SidekiqMiddleware do
Gitlab::SidekiqMiddleware::ServerMetrics,
Gitlab::SidekiqMiddleware::ArgumentsLogger,
Gitlab::SidekiqMiddleware::MemoryKiller,
- Gitlab::SidekiqMiddleware::RequestStoreMiddleware
+ Gitlab::SidekiqMiddleware::RequestStoreMiddleware,
+ Gitlab::SidekiqMiddleware::WorkerContext::Server,
+ Gitlab::SidekiqMiddleware::AdminMode::Server
]
end
let(:enabled_sidekiq_middlewares) { all_sidekiq_middlewares - disabled_sidekiq_middlewares }
@@ -109,6 +111,15 @@ describe Gitlab::SidekiqMiddleware do
let(:queue) { 'default' }
let(:redis_pool) { Sidekiq.redis_pool }
let(:middleware_expected_args) { [worker_class_arg, job, queue, redis_pool] }
+ let(:expected_middlewares) do
+ [
+ Gitlab::SidekiqStatus::ClientMiddleware,
+ Gitlab::SidekiqMiddleware::ClientMetrics,
+ Gitlab::SidekiqMiddleware::WorkerContext::Client,
+ Labkit::Middleware::Sidekiq::Client,
+ Gitlab::SidekiqMiddleware::AdminMode::Client
+ ]
+ end
before do
described_class.client_configurator.call(chain)
@@ -119,8 +130,9 @@ describe Gitlab::SidekiqMiddleware do
# this will prevent the full middleware chain from being executed.
# This test ensures that this does not happen
it "invokes the chain" do
- expect_any_instance_of(Gitlab::SidekiqStatus::ClientMiddleware).to receive(:call).with(*middleware_expected_args).once.and_call_original
- expect_any_instance_of(Labkit::Middleware::Sidekiq::Client).to receive(:call).with(*middleware_expected_args).once.and_call_original
+ expected_middlewares do |middleware|
+ expect_any_instance_of(middleware).to receive(:call).with(*middleware_expected_args).once.ordered.and_call_original
+ end
expect { |b| chain.invoke(worker_class_arg, job, queue, redis_pool, &b) }.to yield_control.once
end
diff --git a/spec/lib/gitlab/sidekiq_versioning_spec.rb b/spec/lib/gitlab/sidekiq_versioning_spec.rb
index dade5961775..11c866894c2 100644
--- a/spec/lib/gitlab/sidekiq_versioning_spec.rb
+++ b/spec/lib/gitlab/sidekiq_versioning_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::SidekiqVersioning, :sidekiq, :redis do
+describe Gitlab::SidekiqVersioning, :redis do
let(:foo_worker) do
Class.new do
def self.name
diff --git a/spec/lib/gitlab/submodule_links_spec.rb b/spec/lib/gitlab/submodule_links_spec.rb
index f0c8825de74..1f2848a29e9 100644
--- a/spec/lib/gitlab/submodule_links_spec.rb
+++ b/spec/lib/gitlab/submodule_links_spec.rb
@@ -38,7 +38,7 @@ describe Gitlab::SubmoduleLinks do
end
it 'returns links and caches the by ref' do
- expect(subject).to eq(['https://gitlab.com/gitlab-org/gitlab-foss', 'https://gitlab.com/gitlab-org/gitlab-foss/tree/hash'])
+ expect(subject).to eq(['https://gitlab.com/gitlab-org/gitlab-foss', 'https://gitlab.com/gitlab-org/gitlab-foss/-/tree/hash'])
cache_store = links.instance_variable_get("@cache_store")
@@ -49,7 +49,7 @@ describe Gitlab::SubmoduleLinks do
let(:ref) { 'signed-commits' }
it 'returns links' do
- expect(subject).to eq(['https://gitlab.com/gitlab-org/gitlab-foss', 'https://gitlab.com/gitlab-org/gitlab-foss/tree/hash'])
+ expect(subject).to eq(['https://gitlab.com/gitlab-org/gitlab-foss', 'https://gitlab.com/gitlab-org/gitlab-foss/-/tree/hash'])
end
end
end
diff --git a/spec/lib/gitlab/tab_width_spec.rb b/spec/lib/gitlab/tab_width_spec.rb
new file mode 100644
index 00000000000..3b5014d27e4
--- /dev/null
+++ b/spec/lib/gitlab/tab_width_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+describe Gitlab::TabWidth, lib: true do
+ describe '.css_class_for_user' do
+ it 'returns default CSS class when user is nil' do
+ css_class = described_class.css_class_for_user(nil)
+
+ expect(css_class).to eq('tab-width-8')
+ end
+
+ it "returns CSS class for user's tab width", :aggregate_failures do
+ [1, 6, 12].each do |i|
+ user = double('user', tab_width: i)
+ css_class = described_class.css_class_for_user(user)
+
+ expect(css_class).to eq("tab-width-#{i}")
+ end
+ end
+
+ it 'raises if tab width is out of valid range', :aggregate_failures do
+ [0, 13, 'foo', nil].each do |i|
+ expect do
+ user = double('user', tab_width: i)
+ described_class.css_class_for_user(user)
+ end.to raise_error(ArgumentError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/tcp_checker_spec.rb b/spec/lib/gitlab/tcp_checker_spec.rb
index 49f04f269ae..9474e79cc5d 100644
--- a/spec/lib/gitlab/tcp_checker_spec.rb
+++ b/spec/lib/gitlab/tcp_checker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::TcpChecker do
+describe Gitlab::TcpChecker, :permit_dns do
before do
@server = TCPServer.new('localhost', 0)
_, @port, _, @ip = @server.addr
diff --git a/spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb b/spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb
index 68402e64012..cdd681a9345 100644
--- a/spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb
+++ b/spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'fast_spec_helper'
-require 'support/shared_examples/malicious_regexp_shared_examples'
+require 'support/shared_examples/lib/gitlab/malicious_regexp_shared_examples'
require 'support/helpers/stub_feature_flags'
describe Gitlab::UntrustedRegexp::RubySyntax do
diff --git a/spec/lib/gitlab/untrusted_regexp_spec.rb b/spec/lib/gitlab/untrusted_regexp_spec.rb
index 4cc21e94a83..60f14d0277b 100644
--- a/spec/lib/gitlab/untrusted_regexp_spec.rb
+++ b/spec/lib/gitlab/untrusted_regexp_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'fast_spec_helper'
-require 'support/shared_examples/malicious_regexp_shared_examples'
+require 'support/shared_examples/lib/gitlab/malicious_regexp_shared_examples'
describe Gitlab::UntrustedRegexp do
describe '#initialize' do
diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb
index a68ba489986..97859c82e9e 100644
--- a/spec/lib/gitlab/url_blocker_spec.rb
+++ b/spec/lib/gitlab/url_blocker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::UrlBlocker do
+describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
include StubRequests
describe '#validate!' do
diff --git a/spec/lib/gitlab/url_builder_spec.rb b/spec/lib/gitlab/url_builder_spec.rb
index 0aab02b6c4c..49011b100ab 100644
--- a/spec/lib/gitlab/url_builder_spec.rb
+++ b/spec/lib/gitlab/url_builder_spec.rb
@@ -10,7 +10,7 @@ describe Gitlab::UrlBuilder do
url = described_class.build(commit)
- expect(url).to eq "#{Settings.gitlab['url']}/#{commit.project.full_path}/commit/#{commit.id}"
+ expect(url).to eq "#{Settings.gitlab['url']}/#{commit.project.full_path}/-/commit/#{commit.id}"
end
end
@@ -55,7 +55,7 @@ describe Gitlab::UrlBuilder do
url = described_class.build(merge_request)
- expect(url).to eq "#{Settings.gitlab['url']}/#{merge_request.project.full_path}/merge_requests/#{merge_request.iid}"
+ expect(url).to eq "#{Settings.gitlab['url']}/#{merge_request.project.full_path}/-/merge_requests/#{merge_request.iid}"
end
end
@@ -86,7 +86,7 @@ describe Gitlab::UrlBuilder do
url = described_class.build(note)
- expect(url).to eq "#{Settings.gitlab['url']}/#{note.project.full_path}/commit/#{note.commit_id}#note_#{note.id}"
+ expect(url).to eq "#{Settings.gitlab['url']}/#{note.project.full_path}/-/commit/#{note.commit_id}#note_#{note.id}"
end
end
@@ -96,7 +96,7 @@ describe Gitlab::UrlBuilder do
url = described_class.build(note)
- expect(url).to eq "#{Settings.gitlab['url']}/#{note.project.full_path}/commit/#{note.commit_id}#note_#{note.id}"
+ expect(url).to eq "#{Settings.gitlab['url']}/#{note.project.full_path}/-/commit/#{note.commit_id}#note_#{note.id}"
end
end
@@ -118,7 +118,7 @@ describe Gitlab::UrlBuilder do
url = described_class.build(note)
- expect(url).to eq "#{Settings.gitlab['url']}/#{merge_request.project.full_path}/merge_requests/#{merge_request.iid}#note_#{note.id}"
+ expect(url).to eq "#{Settings.gitlab['url']}/#{merge_request.project.full_path}/-/merge_requests/#{merge_request.iid}#note_#{note.id}"
end
end
@@ -129,7 +129,7 @@ describe Gitlab::UrlBuilder do
url = described_class.build(note)
- expect(url).to eq "#{Settings.gitlab['url']}/#{merge_request.project.full_path}/merge_requests/#{merge_request.iid}#note_#{note.id}"
+ expect(url).to eq "#{Settings.gitlab['url']}/#{merge_request.project.full_path}/-/merge_requests/#{merge_request.iid}#note_#{note.id}"
end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index cf1dacd088e..b50481a85cd 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -6,370 +6,392 @@ describe Gitlab::UsageData do
let(:projects) { create_list(:project, 4) }
let!(:board) { create(:board, project: projects[0]) }
- describe '#data' do
- before do
- create(:jira_service, project: projects[0])
- create(:jira_service, :without_properties_callback, project: projects[1])
- create(:jira_service, :jira_cloud_service, project: projects[2])
- create(:jira_service, :without_properties_callback, project: projects[3],
- properties: { url: 'https://mysite.atlassian.net' })
- create(:prometheus_service, project: projects[1])
- create(:service, project: projects[0], type: 'SlackSlashCommandsService', active: true)
- create(:service, project: projects[1], type: 'SlackService', active: true)
- create(:service, project: projects[2], type: 'SlackService', active: true)
- create(:service, project: projects[2], type: 'MattermostService', active: false)
- create(:service, project: projects[2], type: 'MattermostService', active: true, template: true)
- create(:service, project: projects[2], type: 'CustomIssueTrackerService', active: true)
- create(:project_error_tracking_setting, project: projects[0])
- create(:project_error_tracking_setting, project: projects[1], enabled: false)
- create_list(:issue, 4, project: projects[0])
- create(:zoom_meeting, project: projects[0], issue: projects[0].issues[0], issue_status: :added)
- create_list(:zoom_meeting, 2, project: projects[0], issue: projects[0].issues[1], issue_status: :removed)
- create(:zoom_meeting, project: projects[0], issue: projects[0].issues[2], issue_status: :added)
- create_list(:zoom_meeting, 2, project: projects[0], issue: projects[0].issues[2], issue_status: :removed)
- create(:sentry_issue, issue: projects[0].issues[0])
-
- # Enabled clusters
- gcp_cluster = create(:cluster_provider_gcp, :created).cluster
- create(:cluster_provider_aws, :created)
- create(:cluster_platform_kubernetes)
- create(:cluster, :group)
-
- # Disabled clusters
- create(:cluster, :disabled)
- create(:cluster, :group, :disabled)
- create(:cluster, :group, :disabled)
-
- # Applications
- create(:clusters_applications_helm, :installed, cluster: gcp_cluster)
- create(:clusters_applications_ingress, :installed, cluster: gcp_cluster)
- create(:clusters_applications_cert_manager, :installed, cluster: gcp_cluster)
- create(:clusters_applications_prometheus, :installed, cluster: gcp_cluster)
- create(:clusters_applications_crossplane, :installed, cluster: gcp_cluster)
- create(:clusters_applications_runner, :installed, cluster: gcp_cluster)
- create(:clusters_applications_knative, :installed, cluster: gcp_cluster)
- create(:clusters_applications_elastic_stack, :installed, cluster: gcp_cluster)
-
- create(:grafana_integration, project: projects[0], enabled: true)
- create(:grafana_integration, project: projects[1], enabled: true)
- create(:grafana_integration, project: projects[2], enabled: false)
-
- allow(Gitlab::GrafanaEmbedUsageData).to receive(:issue_count).and_return(2)
-
- ProjectFeature.first.update_attribute('repository_access_level', 0)
- end
-
- subject { described_class.data }
-
- it 'gathers usage data', :aggregate_failures do
- expect(subject.keys).to include(*%i(
- active_user_count
- counts
- recorded_at
- edition
- version
- installation_type
- uuid
- hostname
- mattermost_enabled
- signup_enabled
- ldap_enabled
- gravatar_enabled
- omniauth_enabled
- reply_by_email_enabled
- container_registry_enabled
- dependency_proxy_enabled
- gitlab_shared_runners_enabled
- gitlab_pages
- git
- gitaly
- database
- avg_cycle_analytics
- influxdb_metrics_enabled
- prometheus_metrics_enabled
- web_ide_clientside_preview_enabled
- ingress_modsecurity_enabled
- ))
- end
-
- it 'gathers usage counts' do
- smau_keys = %i(
- snippet_create
- snippet_update
- snippet_comment
- merge_request_comment
- merge_request_create
- commit_comment
- wiki_pages_create
- wiki_pages_update
- wiki_pages_delete
- web_ide_views
- web_ide_commits
- web_ide_merge_requests
- web_ide_previews
- navbar_searches
- cycle_analytics_views
- productivity_analytics_views
- source_code_pushes
- )
-
- expected_keys = %i(
- assignee_lists
- boards
- ci_builds
- ci_internal_pipelines
- ci_external_pipelines
- ci_pipeline_config_auto_devops
- ci_pipeline_config_repository
- ci_runners
- ci_triggers
- ci_pipeline_schedules
- auto_devops_enabled
- auto_devops_disabled
- deploy_keys
- deployments
- successful_deployments
- failed_deployments
- environments
- clusters
- clusters_enabled
- project_clusters_enabled
- group_clusters_enabled
- clusters_disabled
- project_clusters_disabled
- group_clusters_disabled
- clusters_platforms_eks
- clusters_platforms_gke
- clusters_platforms_user
- clusters_applications_helm
- clusters_applications_ingress
- clusters_applications_cert_managers
- clusters_applications_prometheus
- clusters_applications_crossplane
- clusters_applications_runner
- clusters_applications_knative
- clusters_applications_elastic_stack
- in_review_folder
- grafana_integrated_projects
- groups
- issues
- issues_created_from_gitlab_error_tracking_ui
- issues_with_associated_zoom_link
- issues_using_zoom_quick_actions
- issues_with_embedded_grafana_charts_approx
- keys
- label_lists
- labels
- lfs_objects
- merge_requests
- milestone_lists
- milestones
- notes
- pool_repositories
- projects
- projects_imported_from_github
- projects_asana_active
- projects_jira_active
- projects_jira_server_active
- projects_jira_cloud_active
- projects_slack_notifications_active
- projects_slack_slash_active
- projects_slack_active
- projects_slack_slash_commands_active
- projects_custom_issue_tracker_active
- projects_mattermost_active
- projects_prometheus_active
- projects_with_repositories_enabled
- projects_with_error_tracking_enabled
- pages_domains
- protected_branches
- releases
- remote_mirrors
- snippets
- suggestions
- todos
- uploads
- web_hooks
- ).push(*smau_keys)
-
- count_data = subject[:counts]
-
- expect(count_data[:boards]).to eq(1)
- expect(count_data[:projects]).to eq(4)
- expect(count_data.values_at(*smau_keys)).to all(be_an(Integer))
- expect(count_data.keys).to include(*expected_keys)
- expect(expected_keys - count_data.keys).to be_empty
- end
-
- it 'gathers projects data correctly', :aggregate_failures do
- count_data = subject[:counts]
-
- expect(count_data[:projects]).to eq(4)
- expect(count_data[:projects_asana_active]).to eq(0)
- expect(count_data[:projects_prometheus_active]).to eq(1)
- expect(count_data[:projects_jira_active]).to eq(4)
- expect(count_data[:projects_jira_server_active]).to eq(2)
- expect(count_data[:projects_jira_cloud_active]).to eq(2)
- expect(count_data[:projects_slack_notifications_active]).to eq(2)
- expect(count_data[:projects_slack_slash_active]).to eq(1)
- expect(count_data[:projects_slack_active]).to eq(2)
- expect(count_data[:projects_slack_slash_commands_active]).to eq(1)
- expect(count_data[:projects_custom_issue_tracker_active]).to eq(1)
- expect(count_data[:projects_mattermost_active]).to eq(0)
- expect(count_data[:projects_with_repositories_enabled]).to eq(3)
- expect(count_data[:projects_with_error_tracking_enabled]).to eq(1)
- expect(count_data[:issues_created_from_gitlab_error_tracking_ui]).to eq(1)
- expect(count_data[:issues_with_associated_zoom_link]).to eq(2)
- expect(count_data[:issues_using_zoom_quick_actions]).to eq(3)
- expect(count_data[:issues_with_embedded_grafana_charts_approx]).to eq(2)
-
- expect(count_data[:clusters_enabled]).to eq(4)
- expect(count_data[:project_clusters_enabled]).to eq(3)
- expect(count_data[:group_clusters_enabled]).to eq(1)
- expect(count_data[:clusters_disabled]).to eq(3)
- expect(count_data[:project_clusters_disabled]).to eq(1)
- expect(count_data[:group_clusters_disabled]).to eq(2)
- expect(count_data[:group_clusters_enabled]).to eq(1)
- expect(count_data[:clusters_platforms_eks]).to eq(1)
- expect(count_data[:clusters_platforms_gke]).to eq(1)
- expect(count_data[:clusters_platforms_user]).to eq(1)
- expect(count_data[:clusters_applications_helm]).to eq(1)
- expect(count_data[:clusters_applications_ingress]).to eq(1)
- expect(count_data[:clusters_applications_cert_managers]).to eq(1)
- expect(count_data[:clusters_applications_crossplane]).to eq(1)
- expect(count_data[:clusters_applications_prometheus]).to eq(1)
- expect(count_data[:clusters_applications_runner]).to eq(1)
- expect(count_data[:clusters_applications_knative]).to eq(1)
- expect(count_data[:clusters_applications_elastic_stack]).to eq(1)
- expect(count_data[:grafana_integrated_projects]).to eq(2)
- end
+ before do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ end
+ [true, false].each do |usage_ping_batch_counter_on|
+ describe "when the feature flag usage_ping_batch_counter is set to #{usage_ping_batch_counter_on}" do
+ before do
+ stub_feature_flags(usage_ping_batch_counter: usage_ping_batch_counter_on)
+ end
- it 'works when queries time out' do
- allow_any_instance_of(ActiveRecord::Relation)
- .to receive(:count).and_raise(ActiveRecord::StatementInvalid.new(''))
+ describe '#data' do
+ before do
+ create(:jira_service, project: projects[0])
+ create(:jira_service, :without_properties_callback, project: projects[1])
+ create(:jira_service, :jira_cloud_service, project: projects[2])
+ create(:jira_service, :without_properties_callback, project: projects[3],
+ properties: { url: 'https://mysite.atlassian.net' })
+ create(:prometheus_service, project: projects[1])
+ create(:service, project: projects[0], type: 'SlackSlashCommandsService', active: true)
+ create(:service, project: projects[1], type: 'SlackService', active: true)
+ create(:service, project: projects[2], type: 'SlackService', active: true)
+ create(:service, project: projects[2], type: 'MattermostService', active: false)
+ create(:service, project: projects[2], type: 'MattermostService', active: true, template: true)
+ create(:service, project: projects[2], type: 'CustomIssueTrackerService', active: true)
+ create(:project_error_tracking_setting, project: projects[0])
+ create(:project_error_tracking_setting, project: projects[1], enabled: false)
+ create(:alerts_service, project: projects[0])
+ create(:alerts_service, :inactive, project: projects[1])
+ create_list(:issue, 2, project: projects[0], author: User.alert_bot)
+ create_list(:issue, 2, project: projects[1], author: User.alert_bot)
+ create_list(:issue, 4, project: projects[0])
+ create(:zoom_meeting, project: projects[0], issue: projects[0].issues[0], issue_status: :added)
+ create_list(:zoom_meeting, 2, project: projects[0], issue: projects[0].issues[1], issue_status: :removed)
+ create(:zoom_meeting, project: projects[0], issue: projects[0].issues[2], issue_status: :added)
+ create_list(:zoom_meeting, 2, project: projects[0], issue: projects[0].issues[2], issue_status: :removed)
+ create(:sentry_issue, issue: projects[0].issues[0])
+
+ # Enabled clusters
+ gcp_cluster = create(:cluster_provider_gcp, :created).cluster
+ create(:cluster_provider_aws, :created)
+ create(:cluster_platform_kubernetes)
+ create(:cluster, :group)
+
+ # Disabled clusters
+ create(:cluster, :disabled)
+ create(:cluster, :group, :disabled)
+ create(:cluster, :group, :disabled)
+
+ # Applications
+ create(:clusters_applications_helm, :installed, cluster: gcp_cluster)
+ create(:clusters_applications_ingress, :installed, cluster: gcp_cluster)
+ create(:clusters_applications_cert_manager, :installed, cluster: gcp_cluster)
+ create(:clusters_applications_prometheus, :installed, cluster: gcp_cluster)
+ create(:clusters_applications_crossplane, :installed, cluster: gcp_cluster)
+ create(:clusters_applications_runner, :installed, cluster: gcp_cluster)
+ create(:clusters_applications_knative, :installed, cluster: gcp_cluster)
+ create(:clusters_applications_elastic_stack, :installed, cluster: gcp_cluster)
+ create(:clusters_applications_jupyter, :installed, cluster: gcp_cluster)
+
+ create(:grafana_integration, project: projects[0], enabled: true)
+ create(:grafana_integration, project: projects[1], enabled: true)
+ create(:grafana_integration, project: projects[2], enabled: false)
+
+ allow(Gitlab::GrafanaEmbedUsageData).to receive(:issue_count).and_return(2)
+
+ ProjectFeature.first.update_attribute('repository_access_level', 0)
+ end
+
+ subject { described_class.data }
+
+ it 'gathers usage data', :aggregate_failures do
+ expect(subject.keys).to include(*%i(
+ active_user_count
+ counts
+ recorded_at
+ edition
+ version
+ installation_type
+ uuid
+ hostname
+ mattermost_enabled
+ signup_enabled
+ ldap_enabled
+ gravatar_enabled
+ omniauth_enabled
+ reply_by_email_enabled
+ container_registry_enabled
+ dependency_proxy_enabled
+ gitlab_shared_runners_enabled
+ gitlab_pages
+ git
+ gitaly
+ database
+ avg_cycle_analytics
+ influxdb_metrics_enabled
+ prometheus_metrics_enabled
+ web_ide_clientside_preview_enabled
+ ingress_modsecurity_enabled
+ ))
+ end
+
+ it 'gathers usage counts' do
+ smau_keys = %i(
+ snippet_create
+ snippet_update
+ snippet_comment
+ merge_request_comment
+ merge_request_create
+ commit_comment
+ wiki_pages_create
+ wiki_pages_update
+ wiki_pages_delete
+ web_ide_views
+ web_ide_commits
+ web_ide_merge_requests
+ web_ide_previews
+ navbar_searches
+ cycle_analytics_views
+ productivity_analytics_views
+ source_code_pushes
+ )
+
+ expected_keys = %i(
+ assignee_lists
+ boards
+ ci_builds
+ ci_internal_pipelines
+ ci_external_pipelines
+ ci_pipeline_config_auto_devops
+ ci_pipeline_config_repository
+ ci_runners
+ ci_triggers
+ ci_pipeline_schedules
+ auto_devops_enabled
+ auto_devops_disabled
+ deploy_keys
+ deployments
+ successful_deployments
+ failed_deployments
+ environments
+ clusters
+ clusters_enabled
+ project_clusters_enabled
+ group_clusters_enabled
+ clusters_disabled
+ project_clusters_disabled
+ group_clusters_disabled
+ clusters_platforms_eks
+ clusters_platforms_gke
+ clusters_platforms_user
+ clusters_applications_helm
+ clusters_applications_ingress
+ clusters_applications_cert_managers
+ clusters_applications_prometheus
+ clusters_applications_crossplane
+ clusters_applications_runner
+ clusters_applications_knative
+ clusters_applications_elastic_stack
+ clusters_applications_jupyter
+ in_review_folder
+ grafana_integrated_projects
+ groups
+ issues
+ issues_created_from_gitlab_error_tracking_ui
+ issues_with_associated_zoom_link
+ issues_using_zoom_quick_actions
+ issues_with_embedded_grafana_charts_approx
+ incident_issues
+ keys
+ label_lists
+ labels
+ lfs_objects
+ merge_requests
+ milestone_lists
+ milestones
+ notes
+ pool_repositories
+ projects
+ projects_imported_from_github
+ projects_asana_active
+ projects_jira_active
+ projects_jira_server_active
+ projects_jira_cloud_active
+ projects_slack_notifications_active
+ projects_slack_slash_active
+ projects_slack_active
+ projects_slack_slash_commands_active
+ projects_custom_issue_tracker_active
+ projects_mattermost_active
+ projects_prometheus_active
+ projects_with_repositories_enabled
+ projects_with_error_tracking_enabled
+ projects_with_alerts_service_enabled
+ pages_domains
+ protected_branches
+ releases
+ remote_mirrors
+ snippets
+ suggestions
+ todos
+ uploads
+ web_hooks
+ ).push(*smau_keys)
+
+ count_data = subject[:counts]
+
+ expect(count_data[:boards]).to eq(1)
+ expect(count_data[:projects]).to eq(4)
+ expect(count_data.values_at(*smau_keys)).to all(be_an(Integer))
+ expect(count_data.keys).to include(*expected_keys)
+ expect(expected_keys - count_data.keys).to be_empty
+ end
+
+ it 'gathers projects data correctly', :aggregate_failures do
+ count_data = subject[:counts]
+
+ expect(count_data[:projects]).to eq(4)
+ expect(count_data[:projects_asana_active]).to eq(0)
+ expect(count_data[:projects_prometheus_active]).to eq(1)
+ expect(count_data[:projects_jira_active]).to eq(4)
+ expect(count_data[:projects_jira_server_active]).to eq(2)
+ expect(count_data[:projects_jira_cloud_active]).to eq(2)
+ expect(count_data[:projects_slack_notifications_active]).to eq(2)
+ expect(count_data[:projects_slack_slash_active]).to eq(1)
+ expect(count_data[:projects_slack_active]).to eq(2)
+ expect(count_data[:projects_slack_slash_commands_active]).to eq(1)
+ expect(count_data[:projects_custom_issue_tracker_active]).to eq(1)
+ expect(count_data[:projects_mattermost_active]).to eq(0)
+ expect(count_data[:projects_with_repositories_enabled]).to eq(3)
+ expect(count_data[:projects_with_error_tracking_enabled]).to eq(1)
+ expect(count_data[:projects_with_alerts_service_enabled]).to eq(1)
+ expect(count_data[:issues_created_from_gitlab_error_tracking_ui]).to eq(1)
+ expect(count_data[:issues_with_associated_zoom_link]).to eq(2)
+ expect(count_data[:issues_using_zoom_quick_actions]).to eq(3)
+ expect(count_data[:issues_with_embedded_grafana_charts_approx]).to eq(2)
+ expect(count_data[:incident_issues]).to eq(4)
+
+ expect(count_data[:clusters_enabled]).to eq(4)
+ expect(count_data[:project_clusters_enabled]).to eq(3)
+ expect(count_data[:group_clusters_enabled]).to eq(1)
+ expect(count_data[:clusters_disabled]).to eq(3)
+ expect(count_data[:project_clusters_disabled]).to eq(1)
+ expect(count_data[:group_clusters_disabled]).to eq(2)
+ expect(count_data[:group_clusters_enabled]).to eq(1)
+ expect(count_data[:clusters_platforms_eks]).to eq(1)
+ expect(count_data[:clusters_platforms_gke]).to eq(1)
+ expect(count_data[:clusters_platforms_user]).to eq(1)
+ expect(count_data[:clusters_applications_helm]).to eq(1)
+ expect(count_data[:clusters_applications_ingress]).to eq(1)
+ expect(count_data[:clusters_applications_cert_managers]).to eq(1)
+ expect(count_data[:clusters_applications_crossplane]).to eq(1)
+ expect(count_data[:clusters_applications_prometheus]).to eq(1)
+ expect(count_data[:clusters_applications_runner]).to eq(1)
+ expect(count_data[:clusters_applications_knative]).to eq(1)
+ expect(count_data[:clusters_applications_elastic_stack]).to eq(1)
+ expect(count_data[:grafana_integrated_projects]).to eq(2)
+ expect(count_data[:clusters_applications_jupyter]).to eq(1)
+ end
+
+ it 'works when queries time out' do
+ allow_any_instance_of(ActiveRecord::Relation)
+ .to receive(:count).and_raise(ActiveRecord::StatementInvalid.new(''))
+
+ expect { subject }.not_to raise_error
+ end
+ end
- expect { subject }.not_to raise_error
- end
- end
+ describe '#usage_data_counters' do
+ subject { described_class.usage_data_counters }
- describe '#usage_data_counters' do
- subject { described_class.usage_data_counters }
+ it { is_expected.to all(respond_to :totals) }
- it { is_expected.to all(respond_to :totals) }
+ describe 'the results of calling #totals on all objects in the array' do
+ subject { described_class.usage_data_counters.map(&:totals) }
- describe 'the results of calling #totals on all objects in the array' do
- subject { described_class.usage_data_counters.map(&:totals) }
+ it { is_expected.to all(be_a Hash) }
+ it { is_expected.to all(have_attributes(keys: all(be_a Symbol), values: all(be_a Integer))) }
+ end
- it { is_expected.to all(be_a Hash) }
- it { is_expected.to all(have_attributes(keys: all(be_a Symbol), values: all(be_a Integer))) }
- end
+ it 'does not have any conflicts' do
+ all_keys = subject.flat_map { |counter| counter.totals.keys }
- it 'does not have any conflicts' do
- all_keys = subject.flat_map { |counter| counter.totals.keys }
+ expect(all_keys.size).to eq all_keys.to_set.size
+ end
+ end
- expect(all_keys.size).to eq all_keys.to_set.size
- end
- end
+ describe '#features_usage_data_ce' do
+ subject { described_class.features_usage_data_ce }
+
+ it 'gathers feature usage data', :aggregate_failures do
+ expect(subject[:mattermost_enabled]).to eq(Gitlab.config.mattermost.enabled)
+ expect(subject[:signup_enabled]).to eq(Gitlab::CurrentSettings.allow_signup?)
+ expect(subject[:ldap_enabled]).to eq(Gitlab.config.ldap.enabled)
+ expect(subject[:gravatar_enabled]).to eq(Gitlab::CurrentSettings.gravatar_enabled?)
+ expect(subject[:omniauth_enabled]).to eq(Gitlab::Auth.omniauth_enabled?)
+ expect(subject[:reply_by_email_enabled]).to eq(Gitlab::IncomingEmail.enabled?)
+ expect(subject[:container_registry_enabled]).to eq(Gitlab.config.registry.enabled)
+ expect(subject[:dependency_proxy_enabled]).to eq(Gitlab.config.dependency_proxy.enabled)
+ expect(subject[:gitlab_shared_runners_enabled]).to eq(Gitlab.config.gitlab_ci.shared_runners_enabled)
+ expect(subject[:web_ide_clientside_preview_enabled]).to eq(Gitlab::CurrentSettings.web_ide_clientside_preview_enabled?)
+ end
+ end
- describe '#features_usage_data_ce' do
- subject { described_class.features_usage_data_ce }
-
- it 'gathers feature usage data', :aggregate_failures do
- expect(subject[:mattermost_enabled]).to eq(Gitlab.config.mattermost.enabled)
- expect(subject[:signup_enabled]).to eq(Gitlab::CurrentSettings.allow_signup?)
- expect(subject[:ldap_enabled]).to eq(Gitlab.config.ldap.enabled)
- expect(subject[:gravatar_enabled]).to eq(Gitlab::CurrentSettings.gravatar_enabled?)
- expect(subject[:omniauth_enabled]).to eq(Gitlab::Auth.omniauth_enabled?)
- expect(subject[:reply_by_email_enabled]).to eq(Gitlab::IncomingEmail.enabled?)
- expect(subject[:container_registry_enabled]).to eq(Gitlab.config.registry.enabled)
- expect(subject[:dependency_proxy_enabled]).to eq(Gitlab.config.dependency_proxy.enabled)
- expect(subject[:gitlab_shared_runners_enabled]).to eq(Gitlab.config.gitlab_ci.shared_runners_enabled)
- expect(subject[:web_ide_clientside_preview_enabled]).to eq(Gitlab::CurrentSettings.web_ide_clientside_preview_enabled?)
- end
- end
+ describe '#components_usage_data' do
+ subject { described_class.components_usage_data }
+
+ it 'gathers components usage data', :aggregate_failures do
+ expect(subject[:gitlab_pages][:enabled]).to eq(Gitlab.config.pages.enabled)
+ expect(subject[:gitlab_pages][:version]).to eq(Gitlab::Pages::VERSION)
+ expect(subject[:git][:version]).to eq(Gitlab::Git.version)
+ expect(subject[:database][:adapter]).to eq(Gitlab::Database.adapter_name)
+ expect(subject[:database][:version]).to eq(Gitlab::Database.version)
+ expect(subject[:gitaly][:version]).to be_present
+ expect(subject[:gitaly][:servers]).to be >= 1
+ expect(subject[:gitaly][:filesystems]).to be_an(Array)
+ expect(subject[:gitaly][:filesystems].first).to be_a(String)
+ end
+ end
- describe '#components_usage_data' do
- subject { described_class.components_usage_data }
-
- it 'gathers components usage data', :aggregate_failures do
- expect(subject[:gitlab_pages][:enabled]).to eq(Gitlab.config.pages.enabled)
- expect(subject[:gitlab_pages][:version]).to eq(Gitlab::Pages::VERSION)
- expect(subject[:git][:version]).to eq(Gitlab::Git.version)
- expect(subject[:database][:adapter]).to eq(Gitlab::Database.adapter_name)
- expect(subject[:database][:version]).to eq(Gitlab::Database.version)
- expect(subject[:gitaly][:version]).to be_present
- expect(subject[:gitaly][:servers]).to be >= 1
- expect(subject[:gitaly][:filesystems]).to be_an(Array)
- expect(subject[:gitaly][:filesystems].first).to be_a(String)
- end
- end
+ describe '#ingress_modsecurity_usage' do
+ subject { described_class.ingress_modsecurity_usage }
+
+ it 'gathers variable data' do
+ allow_any_instance_of(
+ ::Clusters::Applications::IngressModsecurityUsageService
+ ).to receive(:execute).and_return(
+ {
+ ingress_modsecurity_blocking: 1,
+ ingress_modsecurity_disabled: 2
+ }
+ )
+
+ expect(subject[:ingress_modsecurity_blocking]).to eq(1)
+ expect(subject[:ingress_modsecurity_disabled]).to eq(2)
+ end
+ end
- describe '#ingress_modsecurity_usage' do
- subject { described_class.ingress_modsecurity_usage }
-
- it 'gathers variable data' do
- allow_any_instance_of(
- ::Clusters::Applications::IngressModsecurityUsageService
- ).to receive(:execute).and_return(
- {
- ingress_modsecurity_blocking: 1,
- ingress_modsecurity_disabled: 2
- }
- )
-
- expect(subject[:ingress_modsecurity_blocking]).to eq(1)
- expect(subject[:ingress_modsecurity_disabled]).to eq(2)
- end
- end
+ describe '#license_usage_data' do
+ subject { described_class.license_usage_data }
- describe '#license_usage_data' do
- subject { described_class.license_usage_data }
+ it 'gathers license data', :aggregate_failures do
+ expect(subject[:uuid]).to eq(Gitlab::CurrentSettings.uuid)
+ expect(subject[:version]).to eq(Gitlab::VERSION)
+ expect(subject[:installation_type]).to eq('gitlab-development-kit')
+ expect(subject[:active_user_count]).to eq(User.active.count)
+ expect(subject[:recorded_at]).to be_a(Time)
+ end
+ end
- it 'gathers license data', :aggregate_failures do
- expect(subject[:uuid]).to eq(Gitlab::CurrentSettings.uuid)
- expect(subject[:version]).to eq(Gitlab::VERSION)
- expect(subject[:installation_type]).to eq('gitlab-development-kit')
- expect(subject[:active_user_count]).to eq(User.active.count)
- expect(subject[:recorded_at]).to be_a(Time)
- end
- end
+ describe '#count' do
+ let(:relation) { double(:relation) }
- describe '#count' do
- let(:relation) { double(:relation) }
+ it 'returns the count when counting succeeds' do
+ allow(relation).to receive(:count).and_return(1)
- it 'returns the count when counting succeeds' do
- allow(relation).to receive(:count).and_return(1)
+ expect(described_class.count(relation, batch: false)).to eq(1)
+ end
- expect(described_class.count(relation)).to eq(1)
- end
+ it 'returns the fallback value when counting fails' do
+ allow(relation).to receive(:count).and_raise(ActiveRecord::StatementInvalid.new(''))
- it 'returns the fallback value when counting fails' do
- allow(relation).to receive(:count).and_raise(ActiveRecord::StatementInvalid.new(''))
+ expect(described_class.count(relation, fallback: 15, batch: false)).to eq(15)
+ end
+ end
- expect(described_class.count(relation, fallback: 15)).to eq(15)
- end
- end
+ describe '#approximate_counts' do
+ it 'gets approximate counts for selected models', :aggregate_failures do
+ create(:label)
- describe '#approximate_counts' do
- it 'gets approximate counts for selected models', :aggregate_failures do
- create(:label)
+ expect(Gitlab::Database::Count).to receive(:approximate_counts)
+ .with(described_class::APPROXIMATE_COUNT_MODELS).once.and_call_original
- expect(Gitlab::Database::Count).to receive(:approximate_counts)
- .with(described_class::APPROXIMATE_COUNT_MODELS).once.and_call_original
+ counts = described_class.approximate_counts.values
- counts = described_class.approximate_counts.values
+ expect(counts.count).to eq(described_class::APPROXIMATE_COUNT_MODELS.count)
+ expect(counts.any? { |count| count < 0 }).to be_falsey
+ end
- expect(counts.count).to eq(described_class::APPROXIMATE_COUNT_MODELS.count)
- expect(counts.any? { |count| count < 0 }).to be_falsey
- end
+ it 'returns default values if counts can not be retrieved', :aggregate_failures do
+ described_class::APPROXIMATE_COUNT_MODELS.map do |model|
+ model.name.underscore.pluralize.to_sym
+ end
- it 'returns default values if counts can not be retrieved', :aggregate_failures do
- described_class::APPROXIMATE_COUNT_MODELS.map do |model|
- model.name.underscore.pluralize.to_sym
+ expect(Gitlab::Database::Count).to receive(:approximate_counts).and_return({})
+ expect(described_class.approximate_counts.values.uniq).to eq([-1])
+ end
end
-
- expect(Gitlab::Database::Count).to receive(:approximate_counts).and_return({})
- expect(described_class.approximate_counts.values.uniq).to eq([-1])
end
end
end
diff --git a/spec/lib/gitlab/user_access_spec.rb b/spec/lib/gitlab/user_access_spec.rb
index 4e7c43a6856..2f4ab2e71db 100644
--- a/spec/lib/gitlab/user_access_spec.rb
+++ b/spec/lib/gitlab/user_access_spec.rb
@@ -160,7 +160,7 @@ describe Gitlab::UserAccess do
expect(access.can_push_to_branch?('master')).to be_falsey
end
- it 'does not allow the user to push if he does not have push access to the canonical project' do
+ it 'does not allow the user to push if they do not have push access to the canonical project' do
canonical_project.add_guest(user)
expect(access.can_push_to_branch?('awesome-feature')).to be_falsey
diff --git a/spec/lib/gitlab/utils/deep_size_spec.rb b/spec/lib/gitlab/utils/deep_size_spec.rb
index ccd202b33f7..5a155fb6c80 100644
--- a/spec/lib/gitlab/utils/deep_size_spec.rb
+++ b/spec/lib/gitlab/utils/deep_size_spec.rb
@@ -17,29 +17,45 @@ describe Gitlab::Utils::DeepSize do
let(:max_size) { 1.kilobyte }
let(:max_depth) { 10 }
- let(:deep_size) { described_class.new(data, max_size: max_size, max_depth: max_depth) }
- describe '#evaluate' do
- context 'when data within size and depth limits' do
- it 'returns true' do
- expect(deep_size).to be_valid
+ subject(:deep_size) { described_class.new(data, max_size: max_size, max_depth: max_depth) }
+
+ it { expect(described_class::DEFAULT_MAX_SIZE).to eq(1.megabyte) }
+ it { expect(described_class::DEFAULT_MAX_DEPTH).to eq(100) }
+
+ describe '#initialize' do
+ context 'when max_size is nil' do
+ let(:max_size) { nil }
+
+ it 'sets max_size to DEFAULT_MAX_SIZE' do
+ expect(subject.instance_variable_get(:@max_size)).to eq(described_class::DEFAULT_MAX_SIZE)
+ end
+ end
+
+ context 'when max_depth is nil' do
+ let(:max_depth) { nil }
+
+ it 'sets max_depth to DEFAULT_MAX_DEPTH' do
+ expect(subject.instance_variable_get(:@max_depth)).to eq(described_class::DEFAULT_MAX_DEPTH)
end
end
+ end
+
+ describe '#valid?' do
+ context 'when data within size and depth limits' do
+ it { is_expected.to be_valid }
+ end
context 'when data not within size limit' do
let(:max_size) { 200.bytes }
- it 'returns false' do
- expect(deep_size).not_to be_valid
- end
+ it { is_expected.not_to be_valid }
end
context 'when data not within depth limit' do
let(:max_depth) { 2 }
- it 'returns false' do
- expect(deep_size).not_to be_valid
- end
+ it { is_expected.not_to be_valid }
end
end
diff --git a/spec/lib/gitlab/utils/log_limited_array_spec.rb b/spec/lib/gitlab/utils/log_limited_array_spec.rb
new file mode 100644
index 00000000000..2729b2c7b6f
--- /dev/null
+++ b/spec/lib/gitlab/utils/log_limited_array_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+describe Gitlab::Utils::LogLimitedArray do
+ describe '.log_limited_array' do
+ context 'when the argument is not an array' do
+ it 'returns an empty array' do
+ expect(described_class.log_limited_array('aa')).to eq([])
+ end
+ end
+
+ context 'when the argument is an array' do
+ context 'when the array is under the limit' do
+ it 'returns the array unchanged' do
+ expect(described_class.log_limited_array(%w(a b))).to eq(%w(a b))
+ end
+ end
+
+ context 'when the array exceeds the limit' do
+ it 'replaces arguments after the limit with an ellipsis string' do
+ half_limit = described_class::MAXIMUM_ARRAY_LENGTH / 2
+ long_array = ['a' * half_limit, 'b' * half_limit, 'c']
+
+ expect(described_class.log_limited_array(long_array))
+ .to eq(long_array.take(1) + ['...'])
+ end
+ end
+
+ context 'when the array contains arrays and hashes' do
+ it 'calculates the size based on the JSON representation' do
+ long_array = [
+ 'a',
+ ['b'] * 10,
+ { c: 'c' * 10 },
+ # Each character in the array takes up four characters: the
+ # character itself, the two quotes, and the comma (closing
+ # square bracket for the last item)
+ ['d'] * (described_class::MAXIMUM_ARRAY_LENGTH / 4),
+ 'e'
+ ]
+
+ expect(described_class.log_limited_array(long_array))
+ .to eq(long_array.take(3) + ['...'])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/x509/commit_spec.rb b/spec/lib/gitlab/x509/commit_spec.rb
new file mode 100644
index 00000000000..9cddf27ddce
--- /dev/null
+++ b/spec/lib/gitlab/x509/commit_spec.rb
@@ -0,0 +1,208 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe Gitlab::X509::Commit do
+ describe '#signature' do
+ let(:signature) { described_class.new(commit).signature }
+
+ let(:user1_certificate_attributes) do
+ {
+ subject_key_identifier: X509Helpers::User1.certificate_subject_key_identifier,
+ subject: X509Helpers::User1.certificate_subject,
+ email: X509Helpers::User1.certificate_email,
+ serial_number: X509Helpers::User1.certificate_serial
+ }
+ end
+
+ let(:user1_issuer_attributes) do
+ {
+ subject_key_identifier: X509Helpers::User1.issuer_subject_key_identifier,
+ subject: X509Helpers::User1.certificate_issuer,
+ crl_url: X509Helpers::User1.certificate_crl
+ }
+ end
+
+ shared_examples 'returns the cached signature on second call' do
+ it 'returns the cached signature on second call' do
+ x509_commit = described_class.new(commit)
+
+ expect(x509_commit).to receive(:create_cached_signature).and_call_original
+ signature
+
+ # consecutive call
+ expect(x509_commit).not_to receive(:create_cached_signature).and_call_original
+ signature
+ end
+ end
+
+ let!(:project) { create :project, :repository, path: X509Helpers::User1.path }
+ let!(:commit_sha) { X509Helpers::User1.commit }
+
+ context 'unsigned commit' do
+ let!(:commit) { create :commit, project: project, sha: commit_sha }
+
+ it 'returns nil' do
+ expect(described_class.new(commit).signature).to be_nil
+ end
+ end
+
+ context 'valid signature from known user' do
+ let!(:commit) { create :commit, project: project, sha: commit_sha, created_at: Time.utc(2019, 1, 1, 20, 15, 0), committer_email: X509Helpers::User1.emails.first }
+
+ let!(:user) { create(:user, email: X509Helpers::User1.emails.first) }
+
+ before do
+ allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily)
+ .with(Gitlab::Git::Repository, commit_sha)
+ .and_return(
+ [
+ X509Helpers::User1.signed_commit_signature,
+ X509Helpers::User1.signed_commit_base_data
+ ]
+ )
+ end
+
+ it 'returns an unverified signature' do
+ expect(signature).to have_attributes(
+ commit_sha: commit_sha,
+ project: project,
+ verification_status: 'unverified'
+ )
+ expect(signature.x509_certificate).to have_attributes(user1_certificate_attributes)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(user1_issuer_attributes)
+ expect(signature.persisted?).to be_truthy
+ end
+ end
+
+ context 'verified signature from known user' do
+ let!(:commit) { create :commit, project: project, sha: commit_sha, created_at: Time.utc(2019, 1, 1, 20, 15, 0), committer_email: X509Helpers::User1.emails.first }
+
+ let!(:user) { create(:user, email: X509Helpers::User1.emails.first) }
+
+ before do
+ allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily)
+ .with(Gitlab::Git::Repository, commit_sha)
+ .and_return(
+ [
+ X509Helpers::User1.signed_commit_signature,
+ X509Helpers::User1.signed_commit_base_data
+ ]
+ )
+ end
+
+ context 'with trusted certificate store' do
+ before do
+ store = OpenSSL::X509::Store.new
+ certificate = OpenSSL::X509::Certificate.new X509Helpers::User1.trust_cert
+ store.add_cert(certificate)
+ allow(OpenSSL::X509::Store).to receive(:new)
+ .and_return(
+ store
+ )
+ end
+
+ it 'returns a verified signature' do
+ expect(signature).to have_attributes(
+ commit_sha: commit_sha,
+ project: project,
+ verification_status: 'verified'
+ )
+ expect(signature.x509_certificate).to have_attributes(user1_certificate_attributes)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(user1_issuer_attributes)
+ expect(signature.persisted?).to be_truthy
+ end
+ end
+
+ context 'without trusted certificate within store' do
+ before do
+ store = OpenSSL::X509::Store.new
+ allow(OpenSSL::X509::Store).to receive(:new)
+ .and_return(
+ store
+ )
+ end
+
+ it 'returns an unverified signature' do
+ expect(signature).to have_attributes(
+ commit_sha: commit_sha,
+ project: project,
+ verification_status: 'unverified'
+ )
+ expect(signature.x509_certificate).to have_attributes(user1_certificate_attributes)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(user1_issuer_attributes)
+ expect(signature.persisted?).to be_truthy
+ end
+ end
+ end
+
+ context 'unverified signature from unknown user' do
+ let!(:commit) { create :commit, project: project, sha: commit_sha, created_at: Time.utc(2019, 1, 1, 20, 15, 0), committer_email: X509Helpers::User1.emails.first }
+
+ before do
+ allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily)
+ .with(Gitlab::Git::Repository, commit_sha)
+ .and_return(
+ [
+ X509Helpers::User1.signed_commit_signature,
+ X509Helpers::User1.signed_commit_base_data
+ ]
+ )
+ end
+
+ it 'returns an unverified signature' do
+ expect(signature).to have_attributes(
+ commit_sha: commit_sha,
+ project: project,
+ verification_status: 'unverified'
+ )
+ expect(signature.x509_certificate).to have_attributes(user1_certificate_attributes)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(user1_issuer_attributes)
+ expect(signature.persisted?).to be_truthy
+ end
+ end
+
+ context 'invalid signature' do
+ let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: X509Helpers::User1.emails.first }
+
+ let!(:user) { create(:user, email: X509Helpers::User1.emails.first) }
+
+ before do
+ allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily)
+ .with(Gitlab::Git::Repository, commit_sha)
+ .and_return(
+ [
+ # Corrupt the key
+ X509Helpers::User1.signed_commit_signature.tr('A', 'B'),
+ X509Helpers::User1.signed_commit_base_data
+ ]
+ )
+ end
+
+ it 'returns nil' do
+ expect(described_class.new(commit).signature).to be_nil
+ end
+ end
+
+ context 'invalid commit message' do
+ let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: X509Helpers::User1.emails.first }
+
+ let!(:user) { create(:user, email: X509Helpers::User1.emails.first) }
+
+ before do
+ allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily)
+ .with(Gitlab::Git::Repository, commit_sha)
+ .and_return(
+ [
+ X509Helpers::User1.signed_commit_signature,
+ # Corrupt the commit message
+ 'x'
+ ]
+ )
+ end
+
+ it 'returns nil' do
+ expect(described_class.new(commit).signature).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/marginalia_spec.rb b/spec/lib/marginalia_spec.rb
index 5dc54af99ce..db428bb65c4 100644
--- a/spec/lib/marginalia_spec.rb
+++ b/spec/lib/marginalia_spec.rb
@@ -18,7 +18,7 @@ describe 'Marginalia spec' do
end
end
- class MarginaliaTestMailer < BaseMailer
+ class MarginaliaTestMailer < ApplicationMailer
def first_user
User.first
end
diff --git a/spec/lib/microsoft_teams/notifier_spec.rb b/spec/lib/microsoft_teams/notifier_spec.rb
index 64ab8d85807..25538db159e 100644
--- a/spec/lib/microsoft_teams/notifier_spec.rb
+++ b/spec/lib/microsoft_teams/notifier_spec.rb
@@ -17,7 +17,7 @@ describe MicrosoftTeams::Notifier do
text: '[#1 Awesome issue](http://localhost/namespace2/gitlabhq/issues/1)',
image: 'http://someimage.com'
},
- attachments: 'please fix'
+ attachments: "[GitLab](https://gitlab.com)\n\n- _Ruby_\n- **Go**\n"
}
end
@@ -31,13 +31,7 @@ describe MicrosoftTeams::Notifier do
'activityImage' => 'http://someimage.com'
},
{
- 'title' => 'Details',
- 'facts' => [
- {
- 'name' => 'Attachments',
- 'value' => 'please fix'
- }
- ]
+ text: "[GitLab](https://gitlab.com)\n\n- _Ruby_\n- **Go**\n"
}
],
'title' => 'JohnDoe4/project2',
@@ -54,4 +48,14 @@ describe MicrosoftTeams::Notifier do
expect(subject.ping(options)).to be true
end
end
+
+ describe '#body' do
+ it 'returns Markdown-based body when HTML was passed' do
+ expect(subject.send(:body, options)).to eq(body.to_json)
+ end
+
+ it 'fails when empty Hash was passed' do
+ expect { subject.send(:body, {}) }.to raise_error(ArgumentError)
+ end
+ end
end
diff --git a/spec/lib/omni_auth/strategies/jwt_spec.rb b/spec/lib/omni_auth/strategies/jwt_spec.rb
index a8c565aa705..f2b682850e3 100644
--- a/spec/lib/omni_auth/strategies/jwt_spec.rb
+++ b/spec/lib/omni_auth/strategies/jwt_spec.rb
@@ -6,7 +6,7 @@ describe OmniAuth::Strategies::Jwt do
include Rack::Test::Methods
include DeviseHelpers
- context '#decoded' do
+ describe '#decoded' do
subject { described_class.new({}) }
let(:timestamp) { Time.now.to_i }
diff --git a/spec/lib/quality/kubernetes_client_spec.rb b/spec/lib/quality/kubernetes_client_spec.rb
index 6a62ef456c1..3a362dfccbf 100644
--- a/spec/lib/quality/kubernetes_client_spec.rb
+++ b/spec/lib/quality/kubernetes_client_spec.rb
@@ -102,7 +102,7 @@ RSpec.describe Quality::KubernetesClient do
it 'calls kubectl to retrieve the resource names' do
expect(Gitlab::Popen).to receive(:popen_with_detail)
.with(["kubectl get #{described_class::RESOURCE_LIST} " +
- %(--namespace "#{namespace}" -o custom-columns=NAME:.metadata.name)])
+ %(--namespace "#{namespace}" -o name)])
.and_return(Gitlab::Popen::Result.new([], raw_resource_names_str, '', double(success?: true)))
expect(subject.__send__(:raw_resource_names)).to eq(raw_resource_names)
diff --git a/spec/lib/quality/test_level_spec.rb b/spec/lib/quality/test_level_spec.rb
index 13817bdcc72..757a003946b 100644
--- a/spec/lib/quality/test_level_spec.rb
+++ b/spec/lib/quality/test_level_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Quality::TestLevel do
context 'when level is unit' do
it 'returns a pattern' do
expect(subject.pattern(:unit))
- .to eq("spec/{bin,config,db,dependencies,factories,finders,frontend,graphql,haml_lint,helpers,initializers,javascripts,lib,models,policies,presenters,rack_servers,routing,rubocop,serializers,services,sidekiq,tasks,uploaders,validators,views,workers,elastic_integration}{,/**/}*_spec.rb")
+ .to eq("spec/{bin,config,db,dependencies,factories,finders,frontend,graphql,haml_lint,helpers,initializers,javascripts,lib,models,policies,presenters,rack_servers,replicators,routing,rubocop,serializers,services,sidekiq,support_specs,tasks,uploaders,validators,views,workers,elastic_integration}{,/**/}*_spec.rb")
end
end
@@ -82,7 +82,7 @@ RSpec.describe Quality::TestLevel do
context 'when level is unit' do
it 'returns a regexp' do
expect(subject.regexp(:unit))
- .to eq(%r{spec/(bin|config|db|dependencies|factories|finders|frontend|graphql|haml_lint|helpers|initializers|javascripts|lib|models|policies|presenters|rack_servers|routing|rubocop|serializers|services|sidekiq|tasks|uploaders|validators|views|workers|elastic_integration)})
+ .to eq(%r{spec/(bin|config|db|dependencies|factories|finders|frontend|graphql|haml_lint|helpers|initializers|javascripts|lib|models|policies|presenters|rack_servers|replicators|routing|rubocop|serializers|services|sidekiq|support_specs|tasks|uploaders|validators|views|workers|elastic_integration)})
end
end
diff --git a/spec/lib/rspec_flaky/report_spec.rb b/spec/lib/rspec_flaky/report_spec.rb
index 6a98a7a4e6b..1f0eff83db0 100644
--- a/spec/lib/rspec_flaky/report_spec.rb
+++ b/spec/lib/rspec_flaky/report_spec.rb
@@ -3,11 +3,11 @@
require 'spec_helper'
describe RspecFlaky::Report, :aggregate_failures do
- let(:a_hundred_days) { 3600 * 24 * 100 }
+ let(:thirty_one_days) { 3600 * 24 * 31 }
let(:collection_hash) do
{
a: { example_id: 'spec/foo/bar_spec.rb:2' },
- b: { example_id: 'spec/foo/baz_spec.rb:3', first_flaky_at: (Time.now - a_hundred_days).to_s, last_flaky_at: (Time.now - a_hundred_days).to_s }
+ b: { example_id: 'spec/foo/baz_spec.rb:3', first_flaky_at: (Time.now - thirty_one_days).to_s, last_flaky_at: (Time.now - thirty_one_days).to_s }
}
end
let(:suite_flaky_example_report) do
@@ -109,7 +109,7 @@ describe RspecFlaky::Report, :aggregate_failures do
end
describe '#prune_outdated' do
- it 'returns a new collection without the examples older than 90 days by default' do
+ it 'returns a new collection without the examples older than 30 days by default' do
new_report = flaky_examples.to_h.dup.tap { |r| r.delete(:b) }
new_flaky_examples = report.prune_outdated
@@ -119,7 +119,7 @@ describe RspecFlaky::Report, :aggregate_failures do
end
it 'accepts a given number of days' do
- new_flaky_examples = report.prune_outdated(days: 200)
+ new_flaky_examples = report.prune_outdated(days: 32)
expect(new_flaky_examples.to_h).to eq(report.to_h)
end
diff --git a/spec/lib/safe_zip/entry_spec.rb b/spec/lib/safe_zip/entry_spec.rb
index 0974f732188..be3d46917ee 100644
--- a/spec/lib/safe_zip/entry_spec.rb
+++ b/spec/lib/safe_zip/entry_spec.rb
@@ -25,13 +25,13 @@ describe SafeZip::Entry do
FileUtils.remove_entry_secure(target_path)
end
- context '#path_dir' do
+ describe '#path_dir' do
subject { entry.path_dir }
it { is_expected.to eq(target_path + '/public/folder') }
end
- context '#exist?' do
+ describe '#exist?' do
subject { entry.exist? }
context 'when entry does not exist' do
diff --git a/spec/lib/safe_zip/extract_spec.rb b/spec/lib/safe_zip/extract_spec.rb
index 3b8c64c1c9f..d388135c3fb 100644
--- a/spec/lib/safe_zip/extract_spec.rb
+++ b/spec/lib/safe_zip/extract_spec.rb
@@ -12,7 +12,7 @@ describe SafeZip::Extract do
FileUtils.remove_entry_secure(target_path)
end
- context '#extract' do
+ describe '#extract' do
subject { object.extract(directories: directories, to: target_path) }
shared_examples 'extracts archive' do |param|
diff --git a/spec/lib/sentry/client/issue_link_spec.rb b/spec/lib/sentry/client/issue_link_spec.rb
index 35a69be6de5..3434e93365e 100644
--- a/spec/lib/sentry/client/issue_link_spec.rb
+++ b/spec/lib/sentry/client/issue_link_spec.rb
@@ -5,18 +5,18 @@ require 'spec_helper'
describe Sentry::Client::IssueLink do
include SentryClientHelpers
- let(:error_tracking_setting) { create(:project_error_tracking_setting, api_url: sentry_url) }
- let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
- let(:client) { error_tracking_setting.sentry_client }
+ let_it_be(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
+ let_it_be(:error_tracking_setting) { create(:project_error_tracking_setting, api_url: sentry_url) }
+ let_it_be(:issue) { create(:issue, project: error_tracking_setting.project) }
- let(:issue_link_sample_response) { JSON.parse(fixture_file('sentry/issue_link_sample_response.json')) }
+ let(:client) { error_tracking_setting.sentry_client }
+ let(:sentry_issue_id) { 11111111 }
describe '#create_issue_link' do
+ let(:sentry_issue_link_url) { "https://sentrytest.gitlab.com/api/0/groups/#{sentry_issue_id}/integrations/#{integration_id}/" }
let(:integration_id) { 44444 }
- let(:sentry_issue_id) { 11111111 }
- let(:issue) { create(:issue, project: error_tracking_setting.project) }
- let(:sentry_issue_link_url) { "https://sentrytest.gitlab.com/api/0/groups/#{sentry_issue_id}/integrations/#{integration_id}/" }
+ let(:issue_link_sample_response) { JSON.parse(fixture_file('sentry/global_integration_link_sample_response.json')) }
let(:sentry_api_response) { issue_link_sample_response }
let!(:sentry_api_request) { stub_sentry_request(sentry_issue_link_url, :put, body: sentry_api_response, status: 201) }
@@ -37,5 +37,29 @@ describe Sentry::Client::IssueLink do
it_behaves_like 'maps Sentry exceptions', :put
end
+
+ context 'when integration_id is not provided' do
+ let(:sentry_issue_link_url) { "https://sentrytest.gitlab.com/api/0/issues/#{sentry_issue_id}/plugins/gitlab/link/" }
+ let(:integration_id) { nil }
+
+ let(:issue_link_sample_response) { JSON.parse(fixture_file('sentry/plugin_link_sample_response.json')) }
+ let!(:sentry_api_request) { stub_sentry_request(sentry_issue_link_url, :post, body: sentry_api_response) }
+
+ it_behaves_like 'calls sentry api'
+
+ it { is_expected.to be_present }
+
+ context 'redirects' do
+ let(:sentry_api_url) { sentry_issue_link_url }
+
+ it_behaves_like 'no Sentry redirects', :post
+ end
+
+ context 'when exception is raised' do
+ let(:sentry_request_url) { sentry_issue_link_url }
+
+ it_behaves_like 'maps Sentry exceptions', :post
+ end
+ end
end
end
diff --git a/spec/lib/sentry/client/issue_spec.rb b/spec/lib/sentry/client/issue_spec.rb
index 061ebcfdc06..2762c5b5cb9 100644
--- a/spec/lib/sentry/client/issue_spec.rb
+++ b/spec/lib/sentry/client/issue_spec.rb
@@ -8,7 +8,7 @@ describe Sentry::Client::Issue do
let(:token) { 'test-token' }
let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0' }
let(:client) { Sentry::Client.new(sentry_url, token) }
- let(:issue_id) { 503504 }
+ let(:issue_id) { 11 }
describe '#list_issues' do
shared_examples 'issues have correct return type' do |klass|
@@ -243,7 +243,7 @@ describe Sentry::Client::Issue do
end
it 'has a correct external URL' do
- expect(subject.external_url).to eq('https://sentrytest.gitlab.com/api/0/issues/503504')
+ expect(subject.external_url).to eq('https://sentrytest.gitlab.com/api/0/issues/11')
end
it 'issue has a correct external base url' do
diff --git a/spec/mailers/emails/pipelines_spec.rb b/spec/mailers/emails/pipelines_spec.rb
index 8d4afe9f00f..ad1aa915fbb 100644
--- a/spec/mailers/emails/pipelines_spec.rb
+++ b/spec/mailers/emails/pipelines_spec.rb
@@ -19,6 +19,25 @@ describe Emails::Pipelines do
expect(subject).to have_body_text status_text
end
+ context 'when pipeline on master branch has a merge request' do
+ let(:pipeline) { create(:ci_pipeline, ref: 'master', sha: sha, project: project) }
+
+ let!(:merge_request) do
+ create(:merge_request, source_branch: 'master', target_branch: 'feature',
+ source_project: project, target_project: project)
+ end
+
+ it 'has correct information that there is no merge request link' do
+ expect(subject)
+ .to have_subject "#{project.name} | Pipeline ##{pipeline.id} has " \
+ "#{status} for #{pipeline.source_ref} | " \
+ "#{pipeline.short_sha}".to_s
+
+ expect(subject).to have_body_text pipeline.source_ref
+ expect(subject).to have_body_text status_text
+ end
+ end
+
context 'when pipeline for merge requests' do
let(:pipeline) { merge_request.all_pipelines.first }
@@ -28,7 +47,7 @@ describe Emails::Pipelines do
target_project: project)
end
- it 'has a correct information with merge request link' do
+ it 'has correct information that there is a merge request link' do
expect(subject)
.to have_subject "#{project.name} | Pipeline ##{pipeline.id} has " \
"#{status} for #{pipeline.source_ref} | " \
@@ -39,6 +58,27 @@ describe Emails::Pipelines do
expect(subject).not_to have_body_text pipeline.ref
end
end
+
+ context 'when branch pipeline is set to a merge request as a head pipeline' do
+ let(:pipeline) do
+ create(:ci_pipeline, project: project, ref: ref, sha: sha,
+ merge_requests_as_head_pipeline: [merge_request])
+ end
+
+ let(:merge_request) do
+ create(:merge_request, source_project: project, target_project: project)
+ end
+
+ it 'has correct information that there is a merge request link' do
+ expect(subject)
+ .to have_subject "#{project.name} | Pipeline ##{pipeline.id} has " \
+ "#{status} for #{pipeline.source_ref} | " \
+ "#{pipeline.short_sha} in !#{merge_request.iid}".to_s
+
+ expect(subject).to have_body_text merge_request.to_reference
+ expect(subject).to have_body_text pipeline.source_ref
+ end
+ end
end
describe '#pipeline_success_email' do
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index e4a7d62eb02..19b15a6c6e2 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -686,7 +686,7 @@ describe Notify do
let(:project_snippet) { create(:project_snippet, project: project) }
let(:project_snippet_note) { create(:note_on_project_snippet, project: project, noteable: project_snippet) }
- subject { described_class.note_project_snippet_email(project_snippet_note.author_id, project_snippet_note.id) }
+ subject { described_class.note_snippet_email(project_snippet_note.author_id, project_snippet_note.id) }
it_behaves_like 'appearance header and footer enabled'
it_behaves_like 'appearance header and footer not enabled'
@@ -696,10 +696,20 @@ describe Notify do
end
it_behaves_like 'a user cannot unsubscribe through footer link'
- it 'has the correct subject and body' do
+ it 'has the correct subject' do
is_expected.to have_referable_subject(project_snippet, reply: true)
+ end
+
+ it 'has the correct body' do
is_expected.to have_body_text project_snippet_note.note
end
+
+ it 'links to the project snippet' do
+ target_url = project_snippet_url(project,
+ project_snippet_note.noteable,
+ { anchor: "note_#{project_snippet_note.id}" })
+ is_expected.to have_body_text target_url
+ end
end
describe 'project was moved' do
@@ -1650,15 +1660,23 @@ describe Notify do
let(:personal_snippet) { create(:personal_snippet) }
let(:personal_snippet_note) { create(:note_on_personal_snippet, noteable: personal_snippet) }
- subject { described_class.note_personal_snippet_email(personal_snippet_note.author_id, personal_snippet_note.id) }
+ subject { described_class.note_snippet_email(personal_snippet_note.author_id, personal_snippet_note.id) }
it_behaves_like 'a user cannot unsubscribe through footer link'
it_behaves_like 'appearance header and footer enabled'
it_behaves_like 'appearance header and footer not enabled'
- it 'has the correct subject and body' do
+ it 'has the correct subject' do
is_expected.to have_referable_subject(personal_snippet, reply: true)
+ end
+
+ it 'has the correct body' do
is_expected.to have_body_text personal_snippet_note.note
end
+
+ it 'links to the personal snippet' do
+ target_url = gitlab_snippet_url(personal_snippet_note.noteable)
+ is_expected.to have_body_text target_url
+ end
end
end
diff --git a/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb b/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb
index 53c176fc46f..a84cac0623b 100644
--- a/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb
+++ b/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190924152703_migrate_issue_trackers_data.rb')
-describe MigrateIssueTrackersData, :migration, :sidekiq do
+describe MigrateIssueTrackersData, :migration do
let(:services) { table(:services) }
let(:migration_class) { Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData }
let(:migration_name) { migration_class.to_s.demodulize }
@@ -14,28 +14,28 @@ describe MigrateIssueTrackersData, :migration, :sidekiq do
}
end
let!(:jira_service) do
- services.create(id: 10, type: 'JiraService', properties: properties, category: 'issue_tracker')
+ services.create(type: 'JiraService', properties: properties, category: 'issue_tracker')
end
let!(:jira_service_nil) do
- services.create(id: 11, type: 'JiraService', properties: nil, category: 'issue_tracker')
+ services.create(type: 'JiraService', properties: nil, category: 'issue_tracker')
end
let!(:bugzilla_service) do
- services.create(id: 12, type: 'BugzillaService', properties: properties, category: 'issue_tracker')
+ services.create(type: 'BugzillaService', properties: properties, category: 'issue_tracker')
end
let!(:youtrack_service) do
- services.create(id: 13, type: 'YoutrackService', properties: properties, category: 'issue_tracker')
+ services.create(type: 'YoutrackService', properties: properties, category: 'issue_tracker')
end
let!(:youtrack_service_empty) do
- services.create(id: 14, type: 'YoutrackService', properties: '', category: 'issue_tracker')
+ services.create(type: 'YoutrackService', properties: '', category: 'issue_tracker')
end
let!(:gitlab_service) do
- services.create(id: 15, type: 'GitlabIssueTrackerService', properties: properties, category: 'issue_tracker')
+ services.create(type: 'GitlabIssueTrackerService', properties: properties, category: 'issue_tracker')
end
let!(:gitlab_service_empty) do
- services.create(id: 16, type: 'GitlabIssueTrackerService', properties: {}, category: 'issue_tracker')
+ services.create(type: 'GitlabIssueTrackerService', properties: {}, category: 'issue_tracker')
end
let!(:other_service) do
- services.create(id: 17, type: 'OtherService', properties: properties, category: 'other_category')
+ services.create(type: 'OtherService', properties: properties, category: 'other_category')
end
before do
diff --git a/spec/migrations/20200122123016_backfill_project_settings_spec.rb b/spec/migrations/20200122123016_backfill_project_settings_spec.rb
new file mode 100644
index 00000000000..fec18d6d52b
--- /dev/null
+++ b/spec/migrations/20200122123016_backfill_project_settings_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200122123016_backfill_project_settings.rb')
+
+describe BackfillProjectSettings, :migration, :sidekiq, schema: 20200114113341 do
+ let(:projects) { table(:projects) }
+ let(:namespace) { table(:namespaces).create(name: 'user', path: 'user') }
+ let(:project) { projects.create(namespace_id: namespace.id) }
+
+ describe '#up' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+
+ projects.create(id: 1, namespace_id: namespace.id)
+ projects.create(id: 2, namespace_id: namespace.id)
+ projects.create(id: 3, namespace_id: namespace.id)
+ end
+
+ it 'schedules BackfillProjectSettings background jobs' do
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, 1, 2)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, 3, 3)
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb b/spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb
new file mode 100644
index 00000000000..b51708dd5cd
--- /dev/null
+++ b/spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb
@@ -0,0 +1,104 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200130145430_reschedule_migrate_issue_trackers_data.rb')
+
+describe RescheduleMigrateIssueTrackersData, :migration do
+ let(:services) { table(:services) }
+ let(:migration_class) { Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData }
+ let(:migration_name) { migration_class.to_s.demodulize }
+
+ let(:properties) do
+ {
+ 'url' => 'http://example.com'
+ }
+ end
+ let!(:jira_service) do
+ services.create(id: 10, type: 'JiraService', properties: properties, category: 'issue_tracker')
+ end
+ let!(:jira_service_nil) do
+ services.create(id: 11, type: 'JiraService', properties: nil, category: 'issue_tracker')
+ end
+ let!(:bugzilla_service) do
+ services.create(id: 12, type: 'BugzillaService', properties: properties, category: 'issue_tracker')
+ end
+ let!(:youtrack_service) do
+ services.create(id: 13, type: 'YoutrackService', properties: properties, category: 'issue_tracker')
+ end
+ let!(:youtrack_service_empty) do
+ services.create(id: 14, type: 'YoutrackService', properties: '', category: 'issue_tracker')
+ end
+ let!(:gitlab_service) do
+ services.create(id: 15, type: 'GitlabIssueTrackerService', properties: properties, category: 'issue_tracker')
+ end
+ let!(:gitlab_service_empty) do
+ services.create(id: 16, type: 'GitlabIssueTrackerService', properties: {}, category: 'issue_tracker')
+ end
+ let!(:other_service) do
+ services.create(id: 17, type: 'OtherService', properties: properties, category: 'other_category')
+ end
+
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+ end
+
+ describe "#up" do
+ it 'schedules background migrations at correct time' do
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(migration_name).to be_scheduled_delayed_migration(3.minutes, jira_service.id, bugzilla_service.id)
+ expect(migration_name).to be_scheduled_delayed_migration(6.minutes, youtrack_service.id, gitlab_service.id)
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+ end
+
+ describe "#down" do
+ let(:issue_tracker_data) { table(:issue_tracker_data) }
+ let(:jira_tracker_data) { table(:jira_tracker_data) }
+
+ let!(:valid_issue_tracker_data) do
+ issue_tracker_data.create(
+ service_id: bugzilla_service.id,
+ encrypted_issues_url: 'http://url.com',
+ encrypted_issues_url_iv: 'somevalue'
+ )
+ end
+ let!(:invalid_issue_tracker_data) do
+ issue_tracker_data.create(
+ service_id: bugzilla_service.id,
+ encrypted_issues_url: 'http:url.com',
+ encrypted_issues_url_iv: nil
+ )
+ end
+ let!(:valid_jira_tracker_data) do
+ jira_tracker_data.create(
+ service_id: bugzilla_service.id,
+ encrypted_url: 'http://url.com',
+ encrypted_url_iv: 'somevalue'
+ )
+ end
+ let!(:invalid_jira_tracker_data) do
+ jira_tracker_data.create(
+ service_id: bugzilla_service.id,
+ encrypted_url: 'http://url.com',
+ encrypted_url_iv: nil
+ )
+ end
+
+ it 'removes the invalid jira tracker data' do
+ expect { described_class.new.down }.to change { jira_tracker_data.count }.from(2).to(1)
+
+ expect(jira_tracker_data.all).to eq([valid_jira_tracker_data])
+ end
+
+ it 'removes the invalid issue tracker data' do
+ expect { described_class.new.down }.to change { issue_tracker_data.count }.from(2).to(1)
+
+ expect(issue_tracker_data.all).to eq([valid_issue_tracker_data])
+ end
+ end
+end
diff --git a/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb b/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb
index f4155eab1bf..4de43e21ed3 100644
--- a/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb
+++ b/spec/migrations/active_record/schedule_set_confidential_note_events_on_services_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180122154930_schedule_set_confidential_note_events_on_services.rb')
-describe ScheduleSetConfidentialNoteEventsOnServices, :migration, :sidekiq do
+describe ScheduleSetConfidentialNoteEventsOnServices, :migration do
let(:services_table) { table(:services) }
let(:migration_class) { Gitlab::BackgroundMigration::SetConfidentialNoteEventsOnServices }
let(:migration_name) { migration_class.to_s.demodulize }
diff --git a/spec/migrations/add_deploy_token_type_to_deploy_tokens_spec.rb b/spec/migrations/add_deploy_token_type_to_deploy_tokens_spec.rb
new file mode 100644
index 00000000000..fb8213a6bd6
--- /dev/null
+++ b/spec/migrations/add_deploy_token_type_to_deploy_tokens_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20200122161638_add_deploy_token_type_to_deploy_tokens.rb')
+
+describe AddDeployTokenTypeToDeployTokens, :migration do
+ let(:deploy_tokens) { table(:deploy_tokens) }
+ let(:deploy_token) do
+ deploy_tokens.create(name: 'token_test',
+ username: 'gitlab+deploy-token-1',
+ token_encrypted: 'dr8rPXwM+Mbs2p3Bg1+gpnXqrnH/wu6vaHdcc7A3isPR67WB',
+ read_repository: true,
+ expires_at: Time.now + 1.year)
+ end
+
+ it 'updates the deploy_token_type column to 2' do
+ expect(deploy_token).not_to respond_to(:deploy_token_type)
+
+ migrate!
+
+ deploy_token.reload
+ expect(deploy_token.deploy_token_type).to eq(2)
+ end
+end
diff --git a/spec/migrations/assure_commits_count_for_merge_request_diff_spec.rb b/spec/migrations/assure_commits_count_for_merge_request_diff_spec.rb
index ae53b4e6443..17342dcaab3 100644
--- a/spec/migrations/assure_commits_count_for_merge_request_diff_spec.rb
+++ b/spec/migrations/assure_commits_count_for_merge_request_diff_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180425131009_assure_commits_count_for_merge_request_diff.rb')
-describe AssureCommitsCountForMergeRequestDiff, :migration, :sidekiq, :redis do
+describe AssureCommitsCountForMergeRequestDiff, :migration, :redis do
let(:migration) { spy('migration') }
before do
diff --git a/spec/migrations/backfill_operations_feature_flags_iid_spec.rb b/spec/migrations/backfill_operations_feature_flags_iid_spec.rb
new file mode 100644
index 00000000000..f7a223e794a
--- /dev/null
+++ b/spec/migrations/backfill_operations_feature_flags_iid_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200117194850_backfill_operations_feature_flags_iid.rb')
+
+describe BackfillOperationsFeatureFlagsIid, :migration do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:flags) { table(:operations_feature_flags) }
+
+ def setup
+ namespace = namespaces.create!(name: 'foo', path: 'foo')
+ project = projects.create!(namespace_id: namespace.id)
+
+ project
+ end
+
+ it 'migrates successfully when there are no flags in the database' do
+ setup
+
+ disable_migrations_output { migrate! }
+
+ expect(flags.count).to eq(0)
+ end
+
+ it 'migrates successfully with a row in the table in both FOSS and EE' do
+ project = setup
+ flags.create!(project_id: project.id, active: true, name: 'test_flag')
+
+ disable_migrations_output { migrate! }
+
+ expect(flags.count).to eq(1)
+ end
+end
diff --git a/spec/migrations/cleanup_build_stage_migration_spec.rb b/spec/migrations/cleanup_build_stage_migration_spec.rb
index 532212810c8..2142b7b5275 100644
--- a/spec/migrations/cleanup_build_stage_migration_spec.rb
+++ b/spec/migrations/cleanup_build_stage_migration_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180420010616_cleanup_build_stage_migration.rb')
-describe CleanupBuildStageMigration, :migration, :sidekiq, :redis do
+describe CleanupBuildStageMigration, :migration, :redis do
let(:migration) { spy('migration') }
before do
diff --git a/spec/migrations/cleanup_legacy_artifact_migration_spec.rb b/spec/migrations/cleanup_legacy_artifact_migration_spec.rb
index dc269d32e5a..0ab7d7ec05f 100644
--- a/spec/migrations/cleanup_legacy_artifact_migration_spec.rb
+++ b/spec/migrations/cleanup_legacy_artifact_migration_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20190104182041_cleanup_legacy_artifact_migration.rb')
-describe CleanupLegacyArtifactMigration, :migration, :sidekiq, :redis do
+describe CleanupLegacyArtifactMigration, :migration, :redis do
let(:migration) { spy('migration') }
context 'when still legacy artifacts exist' do
diff --git a/spec/migrations/cleanup_stages_position_migration_spec.rb b/spec/migrations/cleanup_stages_position_migration_spec.rb
index 649fda1bb4e..c2077629919 100644
--- a/spec/migrations/cleanup_stages_position_migration_spec.rb
+++ b/spec/migrations/cleanup_stages_position_migration_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180604123514_cleanup_stages_position_migration.rb')
-describe CleanupStagesPositionMigration, :migration, :sidekiq, :redis do
+describe CleanupStagesPositionMigration, :migration, :redis do
let(:migration) { spy('migration') }
before do
diff --git a/spec/migrations/delete_internal_ids_where_feature_flags_usage_spec.rb b/spec/migrations/delete_internal_ids_where_feature_flags_usage_spec.rb
new file mode 100644
index 00000000000..b9c6b489aca
--- /dev/null
+++ b/spec/migrations/delete_internal_ids_where_feature_flags_usage_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200117194900_delete_internal_ids_where_feature_flags_usage')
+
+describe DeleteInternalIdsWhereFeatureFlagsUsage, :migration do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:internal_ids) { table(:internal_ids) }
+
+ def setup
+ namespace = namespaces.create!(name: 'foo', path: 'foo')
+ project = projects.create!(namespace_id: namespace.id)
+
+ project
+ end
+
+ it 'deletes feature flag rows from the internal_ids table' do
+ project = setup
+ internal_ids.create!(project_id: project.id, usage: 6, last_value: 1)
+
+ disable_migrations_output { migrate! }
+
+ expect(internal_ids.count).to eq(0)
+ end
+
+ it 'does not delete issue rows from the internal_ids table' do
+ project = setup
+ internal_ids.create!(project_id: project.id, usage: 0, last_value: 1)
+
+ disable_migrations_output { migrate! }
+
+ expect(internal_ids.count).to eq(1)
+ end
+
+ it 'does not delete merge request rows from the internal_ids table' do
+ project = setup
+ internal_ids.create!(project_id: project.id, usage: 1, last_value: 1)
+
+ disable_migrations_output { migrate! }
+
+ expect(internal_ids.count).to eq(1)
+ end
+end
diff --git a/spec/migrations/drop_background_migration_jobs_spec.rb b/spec/migrations/drop_background_migration_jobs_spec.rb
new file mode 100644
index 00000000000..ac76e897f6c
--- /dev/null
+++ b/spec/migrations/drop_background_migration_jobs_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20200116051619_drop_background_migration_jobs.rb')
+
+describe DropBackgroundMigrationJobs, :sidekiq, :redis, :migration, schema: 2020_01_16_051619 do
+ subject(:migration) { described_class.new }
+
+ describe '#up' do
+ context 'there are only affected jobs on the queue' do
+ it 'removes enqueued ActivatePrometheusServicesForSharedClusterApplications background jobs' do
+ Sidekiq::Testing.disable! do # https://github.com/mperham/sidekiq/wiki/testing#api Sidekiq's API does not have a testing mode
+ Sidekiq::Client.push('queue' => described_class::QUEUE, 'class' => ::BackgroundMigrationWorker, 'args' => [described_class::DROPPED_JOB_CLASS, 1])
+
+ expect { migration.up }.to change { Sidekiq::Queue.new(described_class::QUEUE).size }.from(1).to(0)
+ end
+ end
+ end
+
+ context "there aren't any affected jobs on the queue" do
+ it 'skips other enqueued jobs' do
+ Sidekiq::Testing.disable! do
+ Sidekiq::Client.push('queue' => described_class::QUEUE, 'class' => ::BackgroundMigrationWorker, 'args' => ['SomeOtherClass', 1])
+
+ expect { migration.up }.not_to change { Sidekiq::Queue.new(described_class::QUEUE).size }
+ end
+ end
+ end
+
+ context "there are multiple types of jobs on the queue" do
+ it 'skips other enqueued jobs' do
+ Sidekiq::Testing.disable! do
+ queue = Sidekiq::Queue.new(described_class::QUEUE)
+ # this job will be deleted
+ Sidekiq::Client.push('queue' => described_class::QUEUE, 'class' => ::BackgroundMigrationWorker, 'args' => [described_class::DROPPED_JOB_CLASS, 1])
+ # this jobs will be skipped
+ skipped_jobs_args = [['SomeOtherClass', 1], [described_class::DROPPED_JOB_CLASS, 'wrong id type'], [described_class::DROPPED_JOB_CLASS, 1, 'some wired argument']]
+ skipped_jobs_args.each do |args|
+ Sidekiq::Client.push('queue' => described_class::QUEUE, 'class' => ::BackgroundMigrationWorker, 'args' => args)
+ end
+
+ migration.up
+
+ expect(queue.size).to be 3
+ expect(queue.map(&:args)).to match_array skipped_jobs_args
+ end
+ end
+ end
+
+ context "other queues" do
+ it 'does not modify them' do
+ Sidekiq::Testing.disable! do
+ Sidekiq::Client.push('queue' => 'other', 'class' => ::BackgroundMigrationWorker, 'args' => ['SomeOtherClass', 1])
+ Sidekiq::Client.push('queue' => 'other', 'class' => ::BackgroundMigrationWorker, 'args' => [described_class::DROPPED_JOB_CLASS, 1])
+
+ expect { migration.up }.not_to change { Sidekiq::Queue.new('other').size }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/enqueue_reset_merge_status_second_run_spec.rb b/spec/migrations/enqueue_reset_merge_status_second_run_spec.rb
index 3c880c6f5fd..bdc248f2cf2 100644
--- a/spec/migrations/enqueue_reset_merge_status_second_run_spec.rb
+++ b/spec/migrations/enqueue_reset_merge_status_second_run_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190620112608_enqueue_reset_merge_status_second_run.rb')
-describe EnqueueResetMergeStatusSecondRun, :migration, :sidekiq do
+describe EnqueueResetMergeStatusSecondRun, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:namespace) { namespaces.create(name: 'gitlab', path: 'gitlab-org') }
diff --git a/spec/migrations/enqueue_reset_merge_status_spec.rb b/spec/migrations/enqueue_reset_merge_status_spec.rb
index a6dd2e08079..4b312a3bc62 100644
--- a/spec/migrations/enqueue_reset_merge_status_spec.rb
+++ b/spec/migrations/enqueue_reset_merge_status_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190528180441_enqueue_reset_merge_status.rb')
-describe EnqueueResetMergeStatus, :migration, :sidekiq do
+describe EnqueueResetMergeStatus, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:namespace) { namespaces.create(name: 'gitlab', path: 'gitlab-org') }
diff --git a/spec/migrations/enqueue_verify_pages_domain_workers_spec.rb b/spec/migrations/enqueue_verify_pages_domain_workers_spec.rb
index 327fb09ffec..8efaab871a1 100644
--- a/spec/migrations/enqueue_verify_pages_domain_workers_spec.rb
+++ b/spec/migrations/enqueue_verify_pages_domain_workers_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180216121030_enqueue_verify_pages_domain_workers')
-describe EnqueueVerifyPagesDomainWorkers, :sidekiq, :migration do
+describe EnqueueVerifyPagesDomainWorkers, :migration do
around do |example|
Sidekiq::Testing.fake! do
example.run
diff --git a/spec/migrations/fix_projects_without_project_feature_spec.rb b/spec/migrations/fix_projects_without_project_feature_spec.rb
new file mode 100644
index 00000000000..6e0345da078
--- /dev/null
+++ b/spec/migrations/fix_projects_without_project_feature_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200127111840_fix_projects_without_project_feature.rb')
+
+describe FixProjectsWithoutProjectFeature, :migration do
+ let(:namespace) { table(:namespaces).create(name: 'gitlab', path: 'gitlab-org') }
+
+ let!(:projects) do
+ [
+ table(:projects).create(namespace_id: namespace.id, name: 'foo 1'),
+ table(:projects).create(namespace_id: namespace.id, name: 'foo 2'),
+ table(:projects).create(namespace_id: namespace.id, name: 'foo 3')
+ ]
+ end
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+ end
+
+ around do |example|
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ example.call
+ end
+ end
+ end
+
+ it 'schedules jobs for ranges of projects' do
+ migrate!
+
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(2.minutes, projects[0].id, projects[1].id)
+
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(4.minutes, projects[2].id, projects[2].id)
+ end
+
+ it 'schedules jobs according to the configured batch size' do
+ expect { migrate! }.to change { BackgroundMigrationWorker.jobs.size }.by(2)
+ end
+end
diff --git a/spec/migrations/migrate_cluster_configure_worker_sidekiq_queue_spec.rb b/spec/migrations/migrate_cluster_configure_worker_sidekiq_queue_spec.rb
index 4e7438fc182..a3ed9b722d5 100644
--- a/spec/migrations/migrate_cluster_configure_worker_sidekiq_queue_spec.rb
+++ b/spec/migrations/migrate_cluster_configure_worker_sidekiq_queue_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20181219145520_migrate_cluster_configure_worker_sidekiq_queue.rb')
-describe MigrateClusterConfigureWorkerSidekiqQueue, :sidekiq, :redis do
+describe MigrateClusterConfigureWorkerSidekiqQueue, :redis do
include Gitlab::Database::MigrationHelpers
include StubWorker
diff --git a/spec/migrations/migrate_create_commit_signature_worker_sidekiq_queue_spec.rb b/spec/migrations/migrate_create_commit_signature_worker_sidekiq_queue_spec.rb
new file mode 100644
index 00000000000..3d7803b7563
--- /dev/null
+++ b/spec/migrations/migrate_create_commit_signature_worker_sidekiq_queue_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200206091544_migrate_create_commit_signature_worker_sidekiq_queue.rb')
+
+describe MigrateCreateCommitSignatureWorkerSidekiqQueue, :sidekiq, :redis do
+ include Gitlab::Database::MigrationHelpers
+ include StubWorker
+
+ context 'when there are jobs in the queue' do
+ it 'correctly migrates queue when migrating up' do
+ Sidekiq::Testing.disable! do
+ stub_worker(queue: 'create_commit_signature').perform_async('Something', [1])
+ stub_worker(queue: 'create_gpg_signature').perform_async('Something', [1])
+
+ described_class.new.up
+
+ expect(sidekiq_queue_length('create_gpg_signature')).to eq 0
+ expect(sidekiq_queue_length('create_commit_signature')).to eq 2
+ end
+ end
+
+ it 'correctly migrates queue when migrating down' do
+ Sidekiq::Testing.disable! do
+ stub_worker(queue: 'create_gpg_signature').perform_async('Something', [1])
+
+ described_class.new.down
+
+ expect(sidekiq_queue_length('create_gpg_signature')).to eq 1
+ expect(sidekiq_queue_length('create_commit_signature')).to eq 0
+ end
+ end
+ end
+
+ context 'when there are no jobs in the queues' do
+ it 'does not raise error when migrating up' do
+ expect { described_class.new.up }.not_to raise_error
+ end
+
+ it 'does not raise error when migrating down' do
+ expect { described_class.new.down }.not_to raise_error
+ end
+ end
+end
diff --git a/spec/migrations/migrate_create_trace_artifact_sidekiq_queue_spec.rb b/spec/migrations/migrate_create_trace_artifact_sidekiq_queue_spec.rb
index d54aac50dc8..6e0bd487d1f 100644
--- a/spec/migrations/migrate_create_trace_artifact_sidekiq_queue_spec.rb
+++ b/spec/migrations/migrate_create_trace_artifact_sidekiq_queue_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180306074045_migrate_create_trace_artifact_sidekiq_queue.rb')
-describe MigrateCreateTraceArtifactSidekiqQueue, :sidekiq, :redis do
+describe MigrateCreateTraceArtifactSidekiqQueue, :redis do
include Gitlab::Database::MigrationHelpers
include StubWorker
diff --git a/spec/migrations/migrate_discussion_id_on_promoted_epics_spec.rb b/spec/migrations/migrate_discussion_id_on_promoted_epics_spec.rb
index 5e25d1aed82..deeea74bd3b 100644
--- a/spec/migrations/migrate_discussion_id_on_promoted_epics_spec.rb
+++ b/spec/migrations/migrate_discussion_id_on_promoted_epics_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190715193142_migrate_discussion_id_on_promoted_epics.rb')
-describe MigrateDiscussionIdOnPromotedEpics, :migration, :sidekiq do
+describe MigrateDiscussionIdOnPromotedEpics, :migration do
let(:migration_class) { described_class::MIGRATION }
let(:migration_name) { migration_class.to_s.demodulize }
diff --git a/spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb b/spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb
index 98bbe0ed5a2..d6259023c01 100644
--- a/spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb
+++ b/spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180816161409_migrate_legacy_artifacts_to_job_artifacts.rb')
-describe MigrateLegacyArtifactsToJobArtifacts, :migration, :sidekiq do
+describe MigrateLegacyArtifactsToJobArtifacts, :migration do
let(:migration_class) { Gitlab::BackgroundMigration::MigrateLegacyArtifacts }
let(:migration_name) { migration_class.to_s.demodulize }
diff --git a/spec/migrations/migrate_object_storage_upload_sidekiq_queue_spec.rb b/spec/migrations/migrate_object_storage_upload_sidekiq_queue_spec.rb
index 6a188f34854..aa4951b2f14 100644
--- a/spec/migrations/migrate_object_storage_upload_sidekiq_queue_spec.rb
+++ b/spec/migrations/migrate_object_storage_upload_sidekiq_queue_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180603190921_migrate_object_storage_upload_sidekiq_queue.rb')
-describe MigrateObjectStorageUploadSidekiqQueue, :sidekiq, :redis do
+describe MigrateObjectStorageUploadSidekiqQueue, :redis do
include Gitlab::Database::MigrationHelpers
include StubWorker
diff --git a/spec/migrations/migrate_storage_migrator_sidekiq_queue_spec.rb b/spec/migrations/migrate_storage_migrator_sidekiq_queue_spec.rb
index d8f39ce4e71..557eb52632f 100644
--- a/spec/migrations/migrate_storage_migrator_sidekiq_queue_spec.rb
+++ b/spec/migrations/migrate_storage_migrator_sidekiq_queue_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190124200344_migrate_storage_migrator_sidekiq_queue.rb')
-describe MigrateStorageMigratorSidekiqQueue, :sidekiq, :redis do
+describe MigrateStorageMigratorSidekiqQueue, :redis do
include Gitlab::Database::MigrationHelpers
include StubWorker
diff --git a/spec/migrations/migrate_store_security_reports_sidekiq_queue_spec.rb b/spec/migrations/migrate_store_security_reports_sidekiq_queue_spec.rb
new file mode 100644
index 00000000000..ddffa036af1
--- /dev/null
+++ b/spec/migrations/migrate_store_security_reports_sidekiq_queue_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200213220159_migrate_store_security_reports_sidekiq_queue.rb')
+
+describe MigrateStoreSecurityReportsSidekiqQueue, :redis do
+ include Gitlab::Database::MigrationHelpers
+ include StubWorker
+
+ context 'when there are jobs in the queue' do
+ it 'migrates queue when migrating up' do
+ Sidekiq::Testing.disable! do
+ stub_worker(queue: 'pipeline_default:store_security_reports').perform_async(1, 5)
+
+ described_class.new.up
+
+ expect(sidekiq_queue_length('pipeline_default:store_security_reports')).to eq 0
+ expect(sidekiq_queue_length('security_scans:store_security_reports')).to eq 1
+ end
+ end
+
+ it 'migrates queue when migrating down' do
+ Sidekiq::Testing.disable! do
+ stub_worker(queue: 'security_scans:store_security_reports').perform_async(1, 5)
+
+ described_class.new.down
+
+ expect(sidekiq_queue_length('pipeline_default:store_security_reports')).to eq 1
+ expect(sidekiq_queue_length('security_scans:store_security_reports')).to eq 0
+ end
+ end
+ end
+end
diff --git a/spec/migrations/migrate_sync_security_reports_to_report_approval_rules_sidekiq_queue_spec.rb b/spec/migrations/migrate_sync_security_reports_to_report_approval_rules_sidekiq_queue_spec.rb
new file mode 100644
index 00000000000..6dfaff06ddb
--- /dev/null
+++ b/spec/migrations/migrate_sync_security_reports_to_report_approval_rules_sidekiq_queue_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200213220211_migrate_sync_security_reports_to_report_approval_rules_sidekiq_queue.rb')
+
+describe MigrateSyncSecurityReportsToReportApprovalRulesSidekiqQueue, :redis do
+ include Gitlab::Database::MigrationHelpers
+ include StubWorker
+
+ context 'when there are jobs in the queue' do
+ it 'migrates queue when migrating up' do
+ Sidekiq::Testing.disable! do
+ stub_worker(queue: 'pipeline_default:sync_security_reports_to_report_approval_rules').perform_async(1, 5)
+
+ described_class.new.up
+
+ expect(sidekiq_queue_length('pipeline_default:sync_security_reports_to_report_approval_rules')).to eq 0
+ expect(sidekiq_queue_length('security_scans:sync_security_reports_to_report_approval_rules')).to eq 1
+ end
+ end
+
+ it 'migrates queue when migrating down' do
+ Sidekiq::Testing.disable! do
+ stub_worker(queue: 'security_scans:sync_security_reports_to_report_approval_rules').perform_async(1, 5)
+
+ described_class.new.down
+
+ expect(sidekiq_queue_length('pipeline_default:sync_security_reports_to_report_approval_rules')).to eq 1
+ expect(sidekiq_queue_length('security_scans:sync_security_reports_to_report_approval_rules')).to eq 0
+ end
+ end
+ end
+end
diff --git a/spec/migrations/migrate_update_head_pipeline_for_merge_request_sidekiq_queue_spec.rb b/spec/migrations/migrate_update_head_pipeline_for_merge_request_sidekiq_queue_spec.rb
index e517eef1320..204c38b3fc5 100644
--- a/spec/migrations/migrate_update_head_pipeline_for_merge_request_sidekiq_queue_spec.rb
+++ b/spec/migrations/migrate_update_head_pipeline_for_merge_request_sidekiq_queue_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180307012445_migrate_update_head_pipeline_for_merge_request_sidekiq_queue.rb')
-describe MigrateUpdateHeadPipelineForMergeRequestSidekiqQueue, :sidekiq, :redis do
+describe MigrateUpdateHeadPipelineForMergeRequestSidekiqQueue, :redis do
include Gitlab::Database::MigrationHelpers
include StubWorker
diff --git a/spec/migrations/patch_prometheus_services_for_shared_cluster_applications_spec.rb b/spec/migrations/patch_prometheus_services_for_shared_cluster_applications_spec.rb
deleted file mode 100644
index 83f994c2a94..00000000000
--- a/spec/migrations/patch_prometheus_services_for_shared_cluster_applications_spec.rb
+++ /dev/null
@@ -1,134 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20200114113341_patch_prometheus_services_for_shared_cluster_applications.rb')
-
-describe PatchPrometheusServicesForSharedClusterApplications, :migration, :sidekiq do
- include MigrationHelpers::PrometheusServiceHelpers
-
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:services) { table(:services) }
- let(:clusters) { table(:clusters) }
- let(:cluster_groups) { table(:cluster_groups) }
- let(:clusters_applications_prometheus) { table(:clusters_applications_prometheus) }
- let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
-
- let(:application_statuses) do
- {
- errored: -1,
- installed: 3,
- updated: 5
- }
- end
-
- let(:cluster_types) do
- {
- instance_type: 1,
- group_type: 2
- }
- end
-
- describe '#up' do
- let!(:project_with_missing_service) { projects.create!(name: 'gitlab', path: 'gitlab-ce', namespace_id: namespace.id) }
- let(:project_with_inactive_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
- let(:project_with_active_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
- let(:project_with_manual_active_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
- let(:project_with_manual_inactive_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
- let(:project_with_active_not_prometheus_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
- let(:project_with_inactive_not_prometheus_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
-
- before do
- services.create(service_params_for(project_with_inactive_service.id, active: false))
- services.create(service_params_for(project_with_active_service.id, active: true))
- services.create(service_params_for(project_with_active_not_prometheus_service.id, active: true, type: 'other'))
- services.create(service_params_for(project_with_inactive_not_prometheus_service.id, active: false, type: 'other'))
- services.create(service_params_for(project_with_manual_inactive_service.id, active: false, properties: { some: 'data' }.to_json))
- services.create(service_params_for(project_with_manual_active_service.id, active: true, properties: { some: 'data' }.to_json))
- end
-
- shared_examples 'patch prometheus services post migration' do
- context 'prometheus application is installed on the cluster' do
- it 'schedules a background migration' do
- clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:installed], version: '123')
-
- Sidekiq::Testing.fake! do
- Timecop.freeze do
- background_migrations = [["ActivatePrometheusServicesForSharedClusterApplications", project_with_missing_service.id],
- ["ActivatePrometheusServicesForSharedClusterApplications", project_with_inactive_service.id],
- ["ActivatePrometheusServicesForSharedClusterApplications", project_with_active_not_prometheus_service.id],
- ["ActivatePrometheusServicesForSharedClusterApplications", project_with_inactive_not_prometheus_service.id]]
-
- migrate!
-
- enqueued_migrations = BackgroundMigrationWorker.jobs.map { |job| job['args'] }
- expect(enqueued_migrations).to match_array(background_migrations)
- end
- end
- end
- end
-
- context 'prometheus application was recently updated on the cluster' do
- it 'schedules a background migration' do
- clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:updated], version: '123')
-
- Sidekiq::Testing.fake! do
- Timecop.freeze do
- background_migrations = [["ActivatePrometheusServicesForSharedClusterApplications", project_with_missing_service.id],
- ["ActivatePrometheusServicesForSharedClusterApplications", project_with_inactive_service.id],
- ["ActivatePrometheusServicesForSharedClusterApplications", project_with_active_not_prometheus_service.id],
- ["ActivatePrometheusServicesForSharedClusterApplications", project_with_inactive_not_prometheus_service.id]]
-
- migrate!
-
- enqueued_migrations = BackgroundMigrationWorker.jobs.map { |job| job['args'] }
- expect(enqueued_migrations).to match_array(background_migrations)
- end
- end
- end
- end
-
- context 'prometheus application failed to install on the cluster' do
- it 'does not schedule a background migration' do
- clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:errored], version: '123')
-
- Sidekiq::Testing.fake! do
- Timecop.freeze do
- migrate!
-
- expect(BackgroundMigrationWorker.jobs.size).to eq 0
- end
- end
- end
- end
-
- context 'prometheus application is NOT installed on the cluster' do
- it 'does not schedule a background migration' do
- Sidekiq::Testing.fake! do
- Timecop.freeze do
- migrate!
-
- expect(BackgroundMigrationWorker.jobs.size).to eq 0
- end
- end
- end
- end
- end
-
- context 'Cluster is group_type' do
- let(:cluster) { clusters.create(name: 'cluster', cluster_type: cluster_types[:group_type]) }
-
- before do
- cluster_groups.create(group_id: namespace.id, cluster_id: cluster.id)
- end
-
- it_behaves_like 'patch prometheus services post migration'
- end
-
- context 'Cluster is instance_type' do
- let(:cluster) { clusters.create(name: 'cluster', cluster_type: cluster_types[:instance_type]) }
-
- it_behaves_like 'patch prometheus services post migration'
- end
- end
-end
diff --git a/spec/migrations/remove_packages_deprecated_dependencies_spec.rb b/spec/migrations/remove_packages_deprecated_dependencies_spec.rb
new file mode 100644
index 00000000000..0b7efe371a6
--- /dev/null
+++ b/spec/migrations/remove_packages_deprecated_dependencies_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20200210135504_remove_packages_deprecated_dependencies.rb')
+
+describe RemovePackagesDeprecatedDependencies, :migration do
+ let(:projects) { table(:projects) }
+ let(:packages) { table(:packages_packages) }
+ let(:dependency_links) { table(:packages_dependency_links) }
+ let(:dependencies) { table(:packages_dependencies) }
+
+ before do
+ projects.create!(id: 123, name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: 1)
+ packages.create!(id: 1, name: 'package', version: '1.0.0', package_type: 4, project_id: 123)
+ 5.times do |i|
+ dependencies.create!(id: i, name: "pkg_dependency_#{i}", version_pattern: '~1.0.0')
+ dependency_links.create!(package_id: 1, dependency_id: i, dependency_type: 5)
+ end
+ dependencies.create!(id: 10, name: 'valid_pkg_dependency', version_pattern: '~2.5.0')
+ dependency_links.create!(package_id: 1, dependency_id: 10, dependency_type: 1)
+ end
+
+ it 'removes all dependency links with type 5' do
+ expect(dependency_links.count).to eq 6
+
+ migrate!
+
+ expect(dependency_links.count).to eq 1
+ end
+end
diff --git a/spec/migrations/reschedule_builds_stages_migration_spec.rb b/spec/migrations/reschedule_builds_stages_migration_spec.rb
index f9707d8f90b..8127934afab 100644
--- a/spec/migrations/reschedule_builds_stages_migration_spec.rb
+++ b/spec/migrations/reschedule_builds_stages_migration_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180405101928_reschedule_builds_stages_migration')
-describe RescheduleBuildsStagesMigration, :sidekiq, :migration do
+describe RescheduleBuildsStagesMigration, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:pipelines) { table(:ci_pipelines) }
diff --git a/spec/migrations/reschedule_commits_count_for_merge_request_diff_spec.rb b/spec/migrations/reschedule_commits_count_for_merge_request_diff_spec.rb
index a62650c44fb..0e34e63fcc1 100644
--- a/spec/migrations/reschedule_commits_count_for_merge_request_diff_spec.rb
+++ b/spec/migrations/reschedule_commits_count_for_merge_request_diff_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180309121820_reschedule_commits_count_for_merge_request_diff')
-describe RescheduleCommitsCountForMergeRequestDiff, :migration, :sidekiq do
+describe RescheduleCommitsCountForMergeRequestDiff, :migration do
let(:merge_request_diffs) { table(:merge_request_diffs) }
let(:merge_requests) { table(:merge_requests) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/save_instance_administrators_group_id_spec.rb b/spec/migrations/save_instance_administrators_group_id_spec.rb
new file mode 100644
index 00000000000..eab41017480
--- /dev/null
+++ b/spec/migrations/save_instance_administrators_group_id_spec.rb
@@ -0,0 +1,99 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200210092405_save_instance_administrators_group_id')
+
+describe SaveInstanceAdministratorsGroupId, :migration do
+ let(:application_settings_table) { table(:application_settings) }
+
+ let(:instance_administrators_group) do
+ table(:namespaces).create!(
+ id: 1,
+ name: 'GitLab Instance Administrators',
+ path: 'gitlab-instance-administrators-random',
+ type: 'Group'
+ )
+ end
+
+ let(:self_monitoring_project) do
+ table(:projects).create!(
+ id: 2,
+ name: 'Self Monitoring',
+ path: 'self_monitoring',
+ namespace_id: instance_administrators_group.id
+ )
+ end
+
+ context 'when project ID is saved but group ID is not' do
+ let(:application_settings) do
+ application_settings_table.create!(instance_administration_project_id: self_monitoring_project.id)
+ end
+
+ it 'saves instance administrators group ID' do
+ expect(application_settings.instance_administration_project_id).to eq(self_monitoring_project.id)
+ expect(application_settings.instance_administrators_group_id).to be_nil
+
+ migrate!
+
+ expect(application_settings.reload.instance_administrators_group_id).to eq(instance_administrators_group.id)
+ expect(application_settings.instance_administration_project_id).to eq(self_monitoring_project.id)
+ end
+ end
+
+ context 'when group ID is saved but project ID is not' do
+ let(:application_settings) do
+ application_settings_table.create!(instance_administrators_group_id: instance_administrators_group.id)
+ end
+
+ it 'does not make changes' do
+ expect(application_settings.instance_administrators_group_id).to eq(instance_administrators_group.id)
+ expect(application_settings.instance_administration_project_id).to be_nil
+
+ migrate!
+
+ expect(application_settings.reload.instance_administrators_group_id).to eq(instance_administrators_group.id)
+ expect(application_settings.instance_administration_project_id).to be_nil
+ end
+ end
+
+ context 'when group ID and project ID are both saved' do
+ let(:application_settings) do
+ application_settings_table.create!(
+ instance_administrators_group_id: instance_administrators_group.id,
+ instance_administration_project_id: self_monitoring_project.id
+ )
+ end
+
+ it 'does not make changes' do
+ expect(application_settings.instance_administrators_group_id).to eq(instance_administrators_group.id)
+ expect(application_settings.instance_administration_project_id).to eq(self_monitoring_project.id)
+
+ migrate!
+
+ expect(application_settings.reload.instance_administrators_group_id).to eq(instance_administrators_group.id)
+ expect(application_settings.instance_administration_project_id).to eq(self_monitoring_project.id)
+ end
+ end
+
+ context 'when neither group ID nor project ID is saved' do
+ let(:application_settings) do
+ application_settings_table.create!
+ end
+
+ it 'does not make changes' do
+ expect(application_settings.instance_administrators_group_id).to be_nil
+ expect(application_settings.instance_administration_project_id).to be_nil
+
+ migrate!
+
+ expect(application_settings.reload.instance_administrators_group_id).to be_nil
+ expect(application_settings.instance_administration_project_id).to be_nil
+ end
+ end
+
+ context 'when application_settings table has no rows' do
+ it 'does not fail' do
+ migrate!
+ end
+ end
+end
diff --git a/spec/migrations/schedule_digest_personal_access_tokens_spec.rb b/spec/migrations/schedule_digest_personal_access_tokens_spec.rb
index ff859d07ff2..915397f0f4f 100644
--- a/spec/migrations/schedule_digest_personal_access_tokens_spec.rb
+++ b/spec/migrations/schedule_digest_personal_access_tokens_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180913142237_schedule_digest_personal_access_tokens.rb')
-describe ScheduleDigestPersonalAccessTokens, :migration, :sidekiq do
+describe ScheduleDigestPersonalAccessTokens, :migration do
let(:personal_access_tokens) { table(:personal_access_tokens) }
let(:users) { table(:users) }
diff --git a/spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb b/spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb
index a0241f1d20c..43333ab9837 100644
--- a/spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb
+++ b/spec/migrations/schedule_fill_valid_time_for_pages_domain_certificates_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190524073827_schedule_fill_valid_time_for_pages_domain_certificates.rb')
-describe ScheduleFillValidTimeForPagesDomainCertificates, :migration, :sidekiq do
+describe ScheduleFillValidTimeForPagesDomainCertificates, :migration do
let(:migration_class) { described_class::MIGRATION }
let(:migration_name) { migration_class.to_s.demodulize }
diff --git a/spec/migrations/schedule_pages_metadata_migration_spec.rb b/spec/migrations/schedule_pages_metadata_migration_spec.rb
index 100ed520a32..bf9442808bc 100644
--- a/spec/migrations/schedule_pages_metadata_migration_spec.rb
+++ b/spec/migrations/schedule_pages_metadata_migration_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20191002031332_schedule_pages_metadata_migration')
-describe SchedulePagesMetadataMigration, :migration, :sidekiq do
+describe SchedulePagesMetadataMigration, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/schedule_populate_merge_request_assignees_table_spec.rb b/spec/migrations/schedule_populate_merge_request_assignees_table_spec.rb
index e397fbb7138..44ef72baa86 100644
--- a/spec/migrations/schedule_populate_merge_request_assignees_table_spec.rb
+++ b/spec/migrations/schedule_populate_merge_request_assignees_table_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190322132835_schedule_populate_merge_request_assignees_table.rb')
-describe SchedulePopulateMergeRequestAssigneesTable, :migration, :sidekiq do
+describe SchedulePopulateMergeRequestAssigneesTable, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:namespace) { namespaces.create(name: 'gitlab', path: 'gitlab-org') }
diff --git a/spec/migrations/schedule_recalculate_project_authorizations_spec.rb b/spec/migrations/schedule_recalculate_project_authorizations_spec.rb
index a739606ca8f..77ad2b2dc8e 100644
--- a/spec/migrations/schedule_recalculate_project_authorizations_spec.rb
+++ b/spec/migrations/schedule_recalculate_project_authorizations_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200204113223_schedule_recalculate_project_authorizations.rb')
-describe ScheduleRecalculateProjectAuthorizations, :migration, :sidekiq do
+describe ScheduleRecalculateProjectAuthorizations, :migration do
let(:users_table) { table(:users) }
let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) }
diff --git a/spec/migrations/schedule_runners_token_encryption_spec.rb b/spec/migrations/schedule_runners_token_encryption_spec.rb
index 6b9538c4d17..60abb98f629 100644
--- a/spec/migrations/schedule_runners_token_encryption_spec.rb
+++ b/spec/migrations/schedule_runners_token_encryption_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20181121111200_schedule_runners_token_encryption')
-describe ScheduleRunnersTokenEncryption, :migration, :sidekiq do
+describe ScheduleRunnersTokenEncryption, :migration do
let(:settings) { table(:application_settings) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb b/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb
index 845b0515177..c022610be08 100644
--- a/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb
+++ b/spec/migrations/schedule_set_confidential_note_events_on_webhooks_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180104131052_schedule_set_confidential_note_events_on_webhooks.rb')
-describe ScheduleSetConfidentialNoteEventsOnWebhooks, :migration, :sidekiq do
+describe ScheduleSetConfidentialNoteEventsOnWebhooks, :migration do
let(:web_hooks_table) { table(:web_hooks) }
let(:migration_class) { Gitlab::BackgroundMigration::SetConfidentialNoteEventsOnWebhooks }
let(:migration_name) { migration_class.to_s.demodulize }
diff --git a/spec/migrations/schedule_stages_index_migration_spec.rb b/spec/migrations/schedule_stages_index_migration_spec.rb
index 9ebc648f9d8..f2e9abe1eb8 100644
--- a/spec/migrations/schedule_stages_index_migration_spec.rb
+++ b/spec/migrations/schedule_stages_index_migration_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180420080616_schedule_stages_index_migration')
-describe ScheduleStagesIndexMigration, :sidekiq, :migration do
+describe ScheduleStagesIndexMigration, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:pipelines) { table(:ci_pipelines) }
diff --git a/spec/migrations/schedule_sync_issuables_state_id_spec.rb b/spec/migrations/schedule_sync_issuables_state_id_spec.rb
index 4f841e8ce04..21844edeb40 100644
--- a/spec/migrations/schedule_sync_issuables_state_id_spec.rb
+++ b/spec/migrations/schedule_sync_issuables_state_id_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190214112022_schedule_sync_issuables_state_id.rb')
-describe ScheduleSyncIssuablesStateId, :migration, :sidekiq do
+describe ScheduleSyncIssuablesStateId, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:merge_requests) { table(:merge_requests) }
diff --git a/spec/migrations/schedule_sync_issuables_state_id_where_nil_spec.rb b/spec/migrations/schedule_sync_issuables_state_id_where_nil_spec.rb
index 105c05bb7ca..5dbe0d973ae 100644
--- a/spec/migrations/schedule_sync_issuables_state_id_where_nil_spec.rb
+++ b/spec/migrations/schedule_sync_issuables_state_id_where_nil_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190506135400_schedule_sync_issuables_state_id_where_nil')
-describe ScheduleSyncIssuablesStateIdWhereNil, :migration, :sidekiq do
+describe ScheduleSyncIssuablesStateIdWhereNil, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:merge_requests) { table(:merge_requests) }
diff --git a/spec/migrations/schedule_update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb b/spec/migrations/schedule_update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb
new file mode 100644
index 00000000000..221f266cb70
--- /dev/null
+++ b/spec/migrations/schedule_update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200110121314_schedule_update_existing_subgroup_to_match_visibility_level_of_parent.rb')
+
+describe ScheduleUpdateExistingSubgroupToMatchVisibilityLevelOfParent, :migration do
+ include MigrationHelpers::NamespacesHelpers
+ let(:migration_class) { described_class::MIGRATION }
+ let(:migration_name) { migration_class.to_s.demodulize }
+
+ context 'private visibility level' do
+ it 'correctly schedules background migrations' do
+ parent = create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE)
+ create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: parent.id)
+
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ expect(migration_name).to be_scheduled_migration_with_multiple_args([parent.id], Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
+ end
+
+ it 'correctly schedules background migrations for groups and subgroups' do
+ parent = create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE)
+ middle_group = create_namespace('middle_group', Gitlab::VisibilityLevel::PRIVATE, parent_id: parent.id)
+ create_namespace('middle_empty_group', Gitlab::VisibilityLevel::PRIVATE, parent_id: parent.id)
+ create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: middle_group.id)
+
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ expect(migration_name).to be_scheduled_migration_with_multiple_args([middle_group.id, parent.id], Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
+ end
+ end
+
+ context 'internal visibility level' do
+ it 'correctly schedules background migrations' do
+ parent = create_namespace('parent', Gitlab::VisibilityLevel::INTERNAL)
+ middle_group = create_namespace('child', Gitlab::VisibilityLevel::INTERNAL, parent_id: parent.id)
+ create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: middle_group.id)
+
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ expect(migration_name).to be_scheduled_migration_with_multiple_args([parent.id, middle_group.id], Gitlab::VisibilityLevel::INTERNAL)
+ end
+ end
+ end
+ end
+
+ context 'mixed visibility levels' do
+ it 'correctly schedules background migrations' do
+ parent1 = create_namespace('parent1', Gitlab::VisibilityLevel::INTERNAL)
+ create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: parent1.id)
+ parent2 = create_namespace('parent2', Gitlab::VisibilityLevel::PRIVATE)
+ middle_group = create_namespace('middle_group', Gitlab::VisibilityLevel::INTERNAL, parent_id: parent2.id)
+ create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: middle_group.id)
+
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ expect(migration_name).to be_scheduled_migration_with_multiple_args([parent1.id, middle_group.id], Gitlab::VisibilityLevel::INTERNAL)
+ expect(migration_name).to be_scheduled_migration_with_multiple_args([parent2.id], Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/services_remove_temporary_index_on_project_id_spec.rb b/spec/migrations/services_remove_temporary_index_on_project_id_spec.rb
new file mode 100644
index 00000000000..f730d7aecfd
--- /dev/null
+++ b/spec/migrations/services_remove_temporary_index_on_project_id_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200203104214_services_remove_temporary_index_on_project_id.rb')
+
+describe ServicesRemoveTemporaryIndexOnProjectId, :migration do
+ let(:migration_instance) { described_class.new }
+
+ it 'adds and removes temporary partial index in up and down methods' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(migration_instance.index_exists?(:services, :project_id, name: described_class::INDEX_NAME)).to be true
+ }
+
+ migration.after -> {
+ expect(migration_instance.index_exists?(:services, :project_id, name: described_class::INDEX_NAME)).to be false
+ }
+ end
+ end
+
+ describe '#up' do
+ context 'index does not exist' do
+ it 'skips removal action' do
+ migrate!
+
+ expect { migrate! }.not_to change { migration_instance.index_exists?(:services, :project_id, name: described_class::INDEX_NAME) }
+ end
+ end
+ end
+
+ describe '#down' do
+ context 'index already exists' do
+ it 'skips creation of duplicated temporary partial index on project_id' do
+ schema_migrate_down!
+
+ expect { schema_migrate_down! }.not_to change { migration_instance.index_exists?(:services, :project_id, name: described_class::INDEX_NAME) }
+ end
+ end
+ end
+end
diff --git a/spec/migrations/sync_issuables_state_id_spec.rb b/spec/migrations/sync_issuables_state_id_spec.rb
index c6e4b504bbb..3138d2bec33 100644
--- a/spec/migrations/sync_issuables_state_id_spec.rb
+++ b/spec/migrations/sync_issuables_state_id_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190911251732_sync_issuables_state_id')
-describe SyncIssuablesStateId, :migration, :sidekiq do
+describe SyncIssuablesStateId, :migration do
let(:migration) { described_class.new }
describe '#up' do
diff --git a/spec/migrations/update_fingerprint_sha256_within_keys_spec.rb b/spec/migrations/update_fingerprint_sha256_within_keys_spec.rb
index bdb661af904..20a506ea976 100644
--- a/spec/migrations/update_fingerprint_sha256_within_keys_spec.rb
+++ b/spec/migrations/update_fingerprint_sha256_within_keys_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200106071113_update_fingerprint_sha256_within_keys.rb')
-describe UpdateFingerprintSha256WithinKeys, :sidekiq, :migration do
+describe UpdateFingerprintSha256WithinKeys, :migration do
let(:key_table) { table(:keys) }
describe '#up' do
diff --git a/spec/migrations/update_timestamp_softwarelicensespolicy_spec.rb b/spec/migrations/update_timestamp_softwarelicensespolicy_spec.rb
new file mode 100644
index 00000000000..539da8ac92a
--- /dev/null
+++ b/spec/migrations/update_timestamp_softwarelicensespolicy_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require Rails.root.join('db', 'migrate', '20200116175538_update_timestamp_softwarelicensespolicy.rb')
+
+describe UpdateTimestampSoftwarelicensespolicy, :migration do
+ let(:software_licenses_policy) { table(:software_license_policies) }
+ let(:projects) { table(:projects) }
+ let(:licenses) { table(:software_licenses) }
+
+ before do
+ projects.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: 1)
+ licenses.create!(name: 'MIT')
+ software_licenses_policy.create!(project_id: projects.first.id, software_license_id: licenses.first.id, created_at: nil, updated_at: nil)
+ end
+
+ it 'creates timestamps' do
+ migrate!
+
+ expect(software_licenses_policy.first.created_at).to be_present
+ expect(software_licenses_policy.first.updated_at).to be_present
+ end
+end
diff --git a/spec/models/abuse_report_spec.rb b/spec/models/abuse_report_spec.rb
index 814df472389..2c4fa398636 100644
--- a/spec/models/abuse_report_spec.rb
+++ b/spec/models/abuse_report_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe AbuseReport do
- set(:report) { create(:abuse_report) }
- set(:user) { create(:admin) }
+ let_it_be(:report, reload: true) { create(:abuse_report) }
+ let_it_be(:user, reload: true) { create(:admin) }
subject { report }
it { expect(subject).to be_valid }
diff --git a/spec/models/award_emoji_spec.rb b/spec/models/award_emoji_spec.rb
index b15b26b1630..b2d58dd95ad 100644
--- a/spec/models/award_emoji_spec.rb
+++ b/spec/models/award_emoji_spec.rb
@@ -45,8 +45,8 @@ describe AwardEmoji do
end
describe 'scopes' do
- set(:thumbsup) { create(:award_emoji, name: 'thumbsup') }
- set(:thumbsdown) { create(:award_emoji, name: 'thumbsdown') }
+ let_it_be(:thumbsup) { create(:award_emoji, name: 'thumbsup') }
+ let_it_be(:thumbsdown) { create(:award_emoji, name: 'thumbsdown') }
describe '.upvotes' do
it { expect(described_class.upvotes).to contain_exactly(thumbsup) }
diff --git a/spec/models/badge_spec.rb b/spec/models/badge_spec.rb
index c661f5384ea..60ae579eb03 100644
--- a/spec/models/badge_spec.rb
+++ b/spec/models/badge_spec.rb
@@ -81,13 +81,13 @@ describe Badge do
let(:badge) { build(:badge, link_url: placeholder_url, image_url: placeholder_url) }
let!(:project) { create(:project) }
- context '#rendered_link_url' do
+ describe '#rendered_link_url' do
let(:method) { :link_url }
it_behaves_like 'rendered_links'
end
- context '#rendered_image_url' do
+ describe '#rendered_image_url' do
let(:method) { :image_url }
it_behaves_like 'rendered_links'
diff --git a/spec/models/badges/project_badge_spec.rb b/spec/models/badges/project_badge_spec.rb
index d41c5cf2ca1..c0e85d3de87 100644
--- a/spec/models/badges/project_badge_spec.rb
+++ b/spec/models/badges/project_badge_spec.rb
@@ -30,13 +30,13 @@ describe ProjectBadge do
let(:badge) { build(:project_badge, link_url: placeholder_url, image_url: placeholder_url) }
let!(:project) { badge.project }
- context '#rendered_link_url' do
+ describe '#rendered_link_url' do
let(:method) { :link_url }
it_behaves_like 'rendered_links'
end
- context '#rendered_image_url' do
+ describe '#rendered_image_url' do
let(:method) { :image_url }
it_behaves_like 'rendered_links'
diff --git a/spec/models/blob_spec.rb b/spec/models/blob_spec.rb
index c7ca0625b77..a0193b29bb3 100644
--- a/spec/models/blob_spec.rb
+++ b/spec/models/blob_spec.rb
@@ -6,6 +6,8 @@ describe Blob do
include FakeBlobHelpers
let(:project) { build(:project, lfs_enabled: true) }
+ let(:personal_snippet) { build(:personal_snippet) }
+ let(:project_snippet) { build(:project_snippet, project: project) }
before do
allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
@@ -18,73 +20,146 @@ describe Blob do
end
describe '.lazy' do
- let(:project) { create(:project, :repository) }
- let(:same_project) { Project.find(project.id) }
- let(:other_project) { create(:project, :repository) }
let(:commit_id) { 'e63f41fe459e62e1228fcef60d7189127aeba95a' }
+ let(:blob_size_limit) { 10 * 1024 * 1024 }
- it 'does not fetch blobs when none are accessed' do
- expect(project.repository).not_to receive(:blobs_at)
+ shared_examples '.lazy checks' do
+ it 'does not fetch blobs when none are accessed' do
+ expect(container.repository).not_to receive(:blobs_at)
- described_class.lazy(project, commit_id, 'CHANGELOG')
- end
+ described_class.lazy(container, commit_id, 'CHANGELOG')
+ end
+
+ it 'fetches all blobs for the same repository when one is accessed' do
+ expect(container.repository).to receive(:blobs_at)
+ .with([[commit_id, 'CHANGELOG'], [commit_id, 'CONTRIBUTING.md']], blob_size_limit: blob_size_limit)
+ .once.and_call_original
+ expect(other_container.repository).not_to receive(:blobs_at)
+
+ changelog = described_class.lazy(container, commit_id, 'CHANGELOG')
+ contributing = described_class.lazy(same_container, commit_id, 'CONTRIBUTING.md')
+
+ described_class.lazy(other_container, commit_id, 'CHANGELOG')
+
+ # Access property so the values are loaded
+ changelog.id
+ contributing.id
+ end
+
+ it 'does not include blobs from previous requests in later requests' do
+ changelog = described_class.lazy(container, commit_id, 'CHANGELOG')
+ contributing = described_class.lazy(same_container, commit_id, 'CONTRIBUTING.md')
- it 'fetches all blobs for the same repository when one is accessed' do
- expect(project.repository).to receive(:blobs_at).with([[commit_id, 'CHANGELOG'], [commit_id, 'CONTRIBUTING.md']]).once.and_call_original
- expect(other_project.repository).not_to receive(:blobs_at)
+ # Access property so the values are loaded
+ changelog.id
+ contributing.id
- changelog = described_class.lazy(project, commit_id, 'CHANGELOG')
- contributing = described_class.lazy(same_project, commit_id, 'CONTRIBUTING.md')
+ readme = described_class.lazy(container, commit_id, 'README.md')
- described_class.lazy(other_project, commit_id, 'CHANGELOG')
+ expect(container.repository).to receive(:blobs_at)
+ .with([[commit_id, 'README.md']], blob_size_limit: blob_size_limit).once.and_call_original
- # Access property so the values are loaded
- changelog.id
- contributing.id
+ readme.id
+ end
end
- it 'does not include blobs from previous requests in later requests' do
- changelog = described_class.lazy(project, commit_id, 'CHANGELOG')
- contributing = described_class.lazy(same_project, commit_id, 'CONTRIBUTING.md')
+ context 'with project' do
+ let(:container) { create(:project, :repository) }
+ let(:same_container) { Project.find(container.id) }
+ let(:other_container) { create(:project, :repository) }
- # Access property so the values are loaded
- changelog.id
- contributing.id
+ it_behaves_like '.lazy checks'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { create(:personal_snippet, :repository) }
+ let(:same_container) { PersonalSnippet.find(container.id) }
+ let(:other_container) { create(:personal_snippet, :repository) }
- readme = described_class.lazy(project, commit_id, 'README.md')
+ it_behaves_like '.lazy checks'
+ end
- expect(project.repository).to receive(:blobs_at).with([[commit_id, 'README.md']]).once.and_call_original
+ context 'with project snippet' do
+ let(:container) { create(:project_snippet, :repository) }
+ let(:same_container) { ProjectSnippet.find(container.id) }
+ let(:other_container) { create(:project_snippet, :repository) }
- readme.id
+ it_behaves_like '.lazy checks'
end
end
describe '#data' do
- context 'using a binary blob' do
- it 'returns the data as-is' do
- data = "\n\xFF\xB9\xC3"
- blob = fake_blob(binary: true, data: data)
+ shared_examples '#data checks' do
+ context 'using a binary blob' do
+ it 'returns the data as-is' do
+ data = "\n\xFF\xB9\xC3"
+ blob = fake_blob(binary: true, data: data, container: container)
- expect(blob.data).to eq(data)
+ expect(blob.data).to eq(data)
+ end
end
- end
- context 'using a text blob' do
- it 'converts the data to UTF-8' do
- blob = fake_blob(binary: false, data: "\n\xFF\xB9\xC3")
+ context 'using a text blob' do
+ it 'converts the data to UTF-8' do
+ blob = fake_blob(binary: false, data: "\n\xFF\xB9\xC3", container: container)
- expect(blob.data).to eq("\n���")
+ expect(blob.data).to eq("\n���")
+ end
end
end
+
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like '#data checks'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like '#data checks'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
+
+ it_behaves_like '#data checks'
+ end
end
describe '#external_storage_error?' do
+ shared_examples 'no error' do
+ it do
+ expect(blob.external_storage_error?).to be_falsey
+ end
+ end
+
+ shared_examples 'returns error' do
+ it do
+ expect(blob.external_storage_error?).to be_truthy
+ end
+ end
+
context 'if the blob is stored in LFS' do
- let(:blob) { fake_blob(path: 'file.pdf', lfs: true) }
+ let(:blob) { fake_blob(path: 'file.pdf', lfs: true, container: container) }
context 'when the project has LFS enabled' do
- it 'returns false' do
- expect(blob.external_storage_error?).to be_falsey
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'no error'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like 'returns error'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
+
+ it_behaves_like 'no error'
end
end
@@ -93,17 +168,39 @@ describe Blob do
project.lfs_enabled = false
end
- it 'returns true' do
- expect(blob.external_storage_error?).to be_truthy
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'returns error'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
+
+ it_behaves_like 'returns error'
end
end
end
context 'if the blob is not stored in LFS' do
- let(:blob) { fake_blob(path: 'file.md') }
+ let(:blob) { fake_blob(path: 'file.md', container: container) }
- it 'returns false' do
- expect(blob.external_storage_error?).to be_falsey
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'no error'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like 'no error'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
+
+ it_behaves_like 'no error'
end
end
end
@@ -112,19 +209,59 @@ describe Blob do
context 'if the blob is stored in LFS' do
let(:blob) { fake_blob(path: 'file.pdf', lfs: true) }
- context 'when the project has LFS enabled' do
- it 'returns true' do
+ shared_examples 'returns true' do
+ it do
expect(blob.stored_externally?).to be_truthy
end
end
+ shared_examples 'returns false' do
+ it do
+ expect(blob.stored_externally?).to be_falsey
+ end
+ end
+
+ context 'when the project has LFS enabled' do
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'returns true'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like 'returns true'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
+
+ it_behaves_like 'returns true'
+ end
+ end
+
context 'when the project does not have LFS enabled' do
before do
project.lfs_enabled = false
end
- it 'returns false' do
- expect(blob.stored_externally?).to be_falsey
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'returns false'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like 'returns false'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
+
+ it_behaves_like 'returns false'
end
end
end
@@ -139,21 +276,63 @@ describe Blob do
end
describe '#binary?' do
+ shared_examples 'returns true' do
+ it do
+ expect(blob.binary?).to be_truthy
+ end
+ end
+
+ shared_examples 'returns false' do
+ it do
+ expect(blob.binary?).to be_falsey
+ end
+ end
+
context 'if the blob is stored externally' do
+ let(:blob) { fake_blob(path: file, lfs: true) }
+
context 'if the extension has a rich viewer' do
context 'if the viewer is binary' do
- it 'returns true' do
- blob = fake_blob(path: 'file.pdf', lfs: true)
+ let(:file) { 'file.pdf' }
- expect(blob.binary?).to be_truthy
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'returns true'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like 'returns true'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
+
+ it_behaves_like 'returns true'
end
end
context 'if the viewer is text-based' do
- it 'return false' do
- blob = fake_blob(path: 'file.md', lfs: true)
+ let(:file) { 'file.md' }
+
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'returns false'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
- expect(blob.binary?).to be_falsey
+ it_behaves_like 'returns false'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
+
+ it_behaves_like 'returns false'
end
end
end
@@ -161,54 +340,138 @@ describe Blob do
context "if the extension doesn't have a rich viewer" do
context 'if the extension has a text mime type' do
context 'if the extension is for a programming language' do
- it 'returns false' do
- blob = fake_blob(path: 'file.txt', lfs: true)
+ let(:file) { 'file.txt' }
+
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'returns false'
+ end
- expect(blob.binary?).to be_falsey
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like 'returns false'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
+
+ it_behaves_like 'returns false'
end
end
context 'if the extension is not for a programming language' do
- it 'returns false' do
- blob = fake_blob(path: 'file.ics', lfs: true)
+ let(:file) { 'file.ics' }
+
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'returns false'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like 'returns false'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
- expect(blob.binary?).to be_falsey
+ it_behaves_like 'returns false'
end
end
end
context 'if the extension has a binary mime type' do
context 'if the extension is for a programming language' do
- it 'returns false' do
- blob = fake_blob(path: 'file.rb', lfs: true)
+ let(:file) { 'file.rb' }
+
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'returns false'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like 'returns false'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
- expect(blob.binary?).to be_falsey
+ it_behaves_like 'returns false'
end
end
context 'if the extension is not for a programming language' do
- it 'returns true' do
- blob = fake_blob(path: 'file.exe', lfs: true)
+ let(:file) { 'file.exe' }
- expect(blob.binary?).to be_truthy
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'returns true'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like 'returns true'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
+
+ it_behaves_like 'returns true'
end
end
end
context 'if the extension has an unknown mime type' do
context 'if the extension is for a programming language' do
- it 'returns false' do
- blob = fake_blob(path: 'file.ini', lfs: true)
+ let(:file) { 'file.ini' }
+
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'returns false'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like 'returns false'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
- expect(blob.binary?).to be_falsey
+ it_behaves_like 'returns false'
end
end
context 'if the extension is not for a programming language' do
- it 'returns true' do
- blob = fake_blob(path: 'file.wtf', lfs: true)
+ let(:file) { 'file.wtf' }
+
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'returns true'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
- expect(blob.binary?).to be_truthy
+ it_behaves_like 'returns true'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
+
+ it_behaves_like 'returns true'
end
end
end
@@ -217,18 +480,46 @@ describe Blob do
context 'if the blob is not stored externally' do
context 'if the blob is binary' do
- it 'returns true' do
- blob = fake_blob(path: 'file.pdf', binary: true)
+ let(:blob) { fake_blob(path: 'file.pdf', binary: true, container: container) }
- expect(blob.binary?).to be_truthy
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'returns true'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like 'returns true'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
+
+ it_behaves_like 'returns true'
end
end
context 'if the blob is text-based' do
- it 'return false' do
- blob = fake_blob(path: 'file.md')
+ let(:blob) { fake_blob(path: 'file.md', container: container) }
+
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'returns false'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like 'returns false'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
- expect(blob.binary?).to be_falsey
+ it_behaves_like 'returns false'
end
end
end
@@ -385,38 +676,110 @@ describe Blob do
end
describe '#rendered_as_text?' do
+ shared_examples 'returns true' do
+ it do
+ expect(blob.rendered_as_text?(ignore_errors: ignore_errors)).to be_truthy
+ end
+ end
+
+ shared_examples 'returns false' do
+ it do
+ expect(blob.rendered_as_text?(ignore_errors: ignore_errors)).to be_falsey
+ end
+ end
+
context 'when ignoring errors' do
+ let(:ignore_errors) { true }
+
context 'when the simple viewer is text-based' do
- it 'returns true' do
- blob = fake_blob(path: 'file.md', size: 100.megabytes)
+ let(:blob) { fake_blob(path: 'file.md', size: 100.megabytes, container: container) }
- expect(blob.rendered_as_text?).to be_truthy
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'returns true'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like 'returns true'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
+
+ it_behaves_like 'returns true'
end
end
context 'when the simple viewer is binary' do
- it 'returns false' do
- blob = fake_blob(path: 'file.pdf', binary: true, size: 100.megabytes)
+ let(:blob) { fake_blob(path: 'file.pdf', binary: true, size: 100.megabytes, container: container) }
+
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'returns false'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like 'returns false'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
- expect(blob.rendered_as_text?).to be_falsey
+ it_behaves_like 'returns false'
end
end
end
context 'when not ignoring errors' do
+ let(:ignore_errors) { false }
+
context 'when the viewer has render errors' do
- it 'returns false' do
- blob = fake_blob(path: 'file.md', size: 100.megabytes)
+ let(:blob) { fake_blob(path: 'file.md', size: 100.megabytes, container: container) }
- expect(blob.rendered_as_text?(ignore_errors: false)).to be_falsey
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'returns false'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like 'returns false'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
+
+ it_behaves_like 'returns false'
end
end
context "when the viewer doesn't have render errors" do
- it 'returns true' do
- blob = fake_blob(path: 'file.md')
+ let(:blob) { fake_blob(path: 'file.md', container: container) }
+
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'returns true'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like 'returns true'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
- expect(blob.rendered_as_text?(ignore_errors: false)).to be_truthy
+ it_behaves_like 'returns true'
end
end
end
diff --git a/spec/models/blob_viewer/gitlab_ci_yml_spec.rb b/spec/models/blob_viewer/gitlab_ci_yml_spec.rb
index 02993052124..e645733e02d 100644
--- a/spec/models/blob_viewer/gitlab_ci_yml_spec.rb
+++ b/spec/models/blob_viewer/gitlab_ci_yml_spec.rb
@@ -6,9 +6,8 @@ describe BlobViewer::GitlabCiYml do
include FakeBlobHelpers
include RepoHelpers
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user) }
-
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
let(:data) { File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) }
let(:blob) { fake_blob(path: '.gitlab-ci.yml', data: data) }
let(:sha) { sample_commit.id }
diff --git a/spec/models/board_spec.rb b/spec/models/board_spec.rb
index 0987c8e2b65..2d5309b4d23 100644
--- a/spec/models/board_spec.rb
+++ b/spec/models/board_spec.rb
@@ -16,26 +16,29 @@ describe Board do
end
describe '#order_by_name_asc' do
- let!(:second_board) { create(:board, name: 'Secondary board', project: project) }
- let!(:first_board) { create(:board, name: 'First board', project: project) }
+ let!(:board_B) { create(:board, project: project, name: 'B') }
+ let!(:board_C) { create(:board, project: project, name: 'C') }
+ let!(:board_a) { create(:board, project: project, name: 'a') }
+ let!(:board_A) { create(:board, project: project, name: 'A') }
- it 'returns in alphabetical order' do
- expect(project.boards.order_by_name_asc).to eq [first_board, second_board]
+ it 'returns in case-insensitive alphabetical order and then by ascending id' do
+ expect(project.boards.order_by_name_asc).to eq [board_a, board_A, board_B, board_C]
end
end
describe '#first_board' do
- let!(:other_board) { create(:board, name: 'Other board', project: other_project) }
- let!(:second_board) { create(:board, name: 'Secondary board', project: project) }
- let!(:first_board) { create(:board, name: 'First board', project: project) }
+ let!(:board_B) { create(:board, project: project, name: 'B') }
+ let!(:board_C) { create(:board, project: project, name: 'C') }
+ let!(:board_a) { create(:board, project: project, name: 'a') }
+ let!(:board_A) { create(:board, project: project, name: 'A') }
- it 'return the first alphabetical board as a relation' do
- expect(project.boards.first_board).to eq [first_board]
+ it 'return the first case-insensitive alphabetical board as a relation' do
+ expect(project.boards.first_board).to eq [board_a]
end
# BoardsActions#board expects this behavior
it 'raises an error when find is done on a non-existent record' do
- expect { project.boards.first_board.find(second_board.id) }.to raise_error(ActiveRecord::RecordNotFound)
+ expect { project.boards.first_board.find(board_A.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
end
end
diff --git a/spec/models/chat_name_spec.rb b/spec/models/chat_name_spec.rb
index 82991937644..863c28a86fb 100644
--- a/spec/models/chat_name_spec.rb
+++ b/spec/models/chat_name_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe ChatName do
- set(:chat_name) { create(:chat_name) }
+ let_it_be(:chat_name) { create(:chat_name) }
subject { chat_name }
it { is_expected.to belong_to(:service) }
diff --git a/spec/models/chat_team_spec.rb b/spec/models/chat_team_spec.rb
index 76beb3d506b..107fdaccc68 100644
--- a/spec/models/chat_team_spec.rb
+++ b/spec/models/chat_team_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe ChatTeam do
- set(:chat_team) { create(:chat_team) }
+ let_it_be(:chat_team) { create(:chat_team) }
subject { chat_team }
# Associations
diff --git a/spec/models/ci/artifact_blob_spec.rb b/spec/models/ci/artifact_blob_spec.rb
index f63816fd92a..99983686670 100644
--- a/spec/models/ci/artifact_blob_spec.rb
+++ b/spec/models/ci/artifact_blob_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Ci::ArtifactBlob do
- set(:project) { create(:project, :public) }
- set(:build) { create(:ci_build, :artifacts, project: project) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:build) { create(:ci_build, :artifacts, project: project) }
let(:entry) { build.artifacts_metadata_entry('other_artifacts_0.1.2/another-subdirectory/banana_sample.gif') }
subject { described_class.new(entry) }
@@ -51,7 +51,7 @@ describe Ci::ArtifactBlob do
allow(Gitlab.config.pages).to receive(:artifacts_server).and_return(true)
end
- context '.gif extension' do
+ describe '.gif extension' do
it 'returns nil' do
expect(subject.external_url(build.project, build)).to be_nil
end
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index a871f9b3fe6..31e13122b95 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -3,15 +3,26 @@
require 'spec_helper'
describe Ci::Bridge do
- set(:project) { create(:project) }
- set(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:target_project) { create(:project, name: 'project', namespace: create(:namespace, name: 'my')) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let(:bridge) do
- create(:ci_bridge, pipeline: pipeline)
+ create(:ci_bridge, :variables, status: :created,
+ options: options,
+ pipeline: pipeline)
+ end
+
+ let(:options) do
+ { trigger: { project: 'my/project', branch: 'master' } }
end
it { is_expected.to include_module(Ci::PipelineDelegator) }
+ it 'has many sourced pipelines' do
+ expect(bridge).to have_many(:sourced_pipelines)
+ end
+
describe '#tags' do
it 'only has a bridge tag' do
expect(bridge.tags).to eq [:bridge]
@@ -41,4 +52,252 @@ describe Ci::Bridge do
expect(bridge.scoped_variables_hash.keys).to include(*variables)
end
end
+
+ describe 'state machine transitions' do
+ context 'when bridge points towards downstream' do
+ it 'schedules downstream pipeline creation' do
+ expect(bridge).to receive(:schedule_downstream_pipeline!)
+
+ bridge.enqueue!
+ end
+ end
+ end
+
+ describe 'state machine transitions' do
+ context 'when bridge points towards downstream' do
+ it 'schedules downstream pipeline creation' do
+ expect(bridge).to receive(:schedule_downstream_pipeline!)
+
+ bridge.enqueue!
+ end
+ end
+ end
+
+ describe 'state machine transitions' do
+ context 'when bridge points towards downstream' do
+ it 'schedules downstream pipeline creation' do
+ expect(bridge).to receive(:schedule_downstream_pipeline!)
+
+ bridge.enqueue!
+ end
+ end
+ end
+
+ describe '#inherit_status_from_downstream!' do
+ let(:downstream_pipeline) { build(:ci_pipeline, status: downstream_status) }
+
+ before do
+ bridge.status = 'pending'
+ create(:ci_sources_pipeline, pipeline: downstream_pipeline, source_job: bridge)
+ end
+
+ subject { bridge.inherit_status_from_downstream!(downstream_pipeline) }
+
+ context 'when status is not supported' do
+ (::Ci::Pipeline::AVAILABLE_STATUSES - ::Ci::Pipeline::COMPLETED_STATUSES).map(&:to_s).each do |status|
+ context "when status is #{status}" do
+ let(:downstream_status) { status }
+
+ it 'returns false' do
+ expect(subject).to eq(false)
+ end
+
+ it 'does not change the bridge status' do
+ expect { subject }.not_to change { bridge.status }.from('pending')
+ end
+ end
+ end
+ end
+
+ context 'when status is supported' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:downstream_status, :upstream_status) do
+ [
+ %w[success success],
+ *::Ci::Pipeline.completed_statuses.without(:success).map { |status| [status.to_s, 'failed'] }
+ ]
+ end
+
+ with_them do
+ it 'inherits the downstream status' do
+ expect { subject }.to change { bridge.status }.from('pending').to(upstream_status)
+ end
+ end
+ end
+ end
+
+ describe '#dependent?' do
+ subject { bridge.dependent? }
+
+ context 'when bridge has strategy depend' do
+ let(:options) { { trigger: { project: 'my/project', strategy: 'depend' } } }
+
+ it { is_expected.to be true }
+ end
+
+ context 'when bridge does not have strategy depend' do
+ it { is_expected.to be false }
+ end
+ end
+
+ describe '#yaml_variables' do
+ it 'returns YAML variables' do
+ expect(bridge.yaml_variables)
+ .to include(key: 'BRIDGE', value: 'cross', public: true)
+ end
+ end
+
+ describe '#downstream_variables' do
+ it 'returns variables that are going to be passed downstream' do
+ expect(bridge.downstream_variables)
+ .to include(key: 'BRIDGE', value: 'cross')
+ end
+
+ context 'when using variables interpolation' do
+ let(:yaml_variables) do
+ [
+ {
+ key: 'EXPANDED',
+ value: '$BRIDGE-bridge',
+ public: true
+ },
+ {
+ key: 'UPSTREAM_CI_PIPELINE_ID',
+ value: '$CI_PIPELINE_ID',
+ public: true
+ },
+ {
+ key: 'UPSTREAM_CI_PIPELINE_URL',
+ value: '$CI_PIPELINE_URL',
+ public: true
+ }
+ ]
+ end
+
+ before do
+ bridge.yaml_variables.concat(yaml_variables)
+ end
+
+ it 'correctly expands variables with interpolation' do
+ expanded_values = pipeline
+ .persisted_variables
+ .to_hash
+ .transform_keys { |key| "UPSTREAM_#{key}" }
+ .map { |key, value| { key: key, value: value } }
+ .push(key: 'EXPANDED', value: 'cross-bridge')
+
+ expect(bridge.downstream_variables)
+ .to match(a_collection_including(*expanded_values))
+ end
+ end
+
+ context 'when recursive interpolation has been used' do
+ before do
+ bridge.yaml_variables << { key: 'EXPANDED', value: '$EXPANDED', public: true }
+ end
+
+ it 'does not expand variable recursively' do
+ expect(bridge.downstream_variables)
+ .to include(key: 'EXPANDED', value: '$EXPANDED')
+ end
+ end
+ end
+
+ describe 'metadata support' do
+ it 'reads YAML variables from metadata' do
+ expect(bridge.yaml_variables).not_to be_empty
+ expect(bridge.metadata).to be_a Ci::BuildMetadata
+ expect(bridge.read_attribute(:yaml_variables)).to be_nil
+ expect(bridge.metadata.config_variables).to be bridge.yaml_variables
+ end
+
+ it 'reads options from metadata' do
+ expect(bridge.options).not_to be_empty
+ expect(bridge.metadata).to be_a Ci::BuildMetadata
+ expect(bridge.read_attribute(:options)).to be_nil
+ expect(bridge.metadata.config_options).to be bridge.options
+ end
+ end
+
+ describe '#triggers_child_pipeline?' do
+ subject { bridge.triggers_child_pipeline? }
+
+ context 'when bridge defines a downstream YAML' do
+ let(:options) do
+ {
+ trigger: {
+ include: 'path/to/child.yml'
+ }
+ }
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when bridge does not define a downstream YAML' do
+ let(:options) do
+ {
+ trigger: {
+ project: project.full_path
+ }
+ }
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#yaml_for_downstream' do
+ subject { bridge.yaml_for_downstream }
+
+ context 'when bridge defines a downstream YAML' do
+ let(:options) do
+ {
+ trigger: {
+ include: 'path/to/child.yml'
+ }
+ }
+ end
+
+ let(:yaml) do
+ <<~EOY
+ ---
+ include: path/to/child.yml
+ EOY
+ end
+
+ it { is_expected.to eq yaml }
+ end
+
+ context 'when bridge does not define a downstream YAML' do
+ let(:options) { {} }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#target_ref' do
+ context 'when trigger is defined' do
+ it 'returns a ref name' do
+ expect(bridge.target_ref).to eq 'master'
+ end
+
+ context 'when using variable expansion' do
+ let(:options) { { trigger: { project: 'my/project', branch: '$BRIDGE-master' } } }
+
+ it 'correctly expands variables' do
+ expect(bridge.target_ref).to eq('cross-master')
+ end
+ end
+ end
+
+ context 'when trigger does not have project defined' do
+ let(:options) { nil }
+
+ it 'returns nil' do
+ expect(bridge.target_ref).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/models/ci/build_metadata_spec.rb b/spec/models/ci/build_metadata_spec.rb
index da95a2d30f5..588e5872cc8 100644
--- a/spec/models/ci/build_metadata_spec.rb
+++ b/spec/models/ci/build_metadata_spec.rb
@@ -3,11 +3,11 @@
require 'spec_helper'
describe Ci::BuildMetadata do
- set(:user) { create(:user) }
- set(:group) { create(:group) }
- set(:project) { create(:project, :repository, group: group, build_timeout: 2000) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, group: group, build_timeout: 2000) }
- set(:pipeline) do
+ let_it_be(:pipeline) do
create(:ci_pipeline, project: project,
sha: project.commit.id,
ref: project.default_branch,
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 38e15fc4582..4bfb5771bb8 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -3,11 +3,11 @@
require 'spec_helper'
describe Ci::Build do
- set(:user) { create(:user) }
- set(:group) { create(:group) }
- set(:project) { create(:project, :repository, group: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group, reload: true) { create(:group) }
+ let_it_be(:project, reload: true) { create(:project, :repository, group: group) }
- set(:pipeline) do
+ let_it_be(:pipeline, reload: true) do
create(:ci_pipeline, project: project,
sha: project.commit.id,
ref: project.default_branch,
@@ -33,7 +33,7 @@ describe Ci::Build do
it { is_expected.to respond_to(:has_trace?) }
it { is_expected.to respond_to(:trace) }
- it { is_expected.to delegate_method(:merge_request_event?).to(:pipeline) }
+ it { is_expected.to delegate_method(:merge_request?).to(:pipeline) }
it { is_expected.to delegate_method(:merge_request_ref?).to(:pipeline) }
it { is_expected.to delegate_method(:legacy_detached_merge_request_pipeline?).to(:pipeline) }
@@ -762,8 +762,10 @@ describe Ci::Build do
let(:needs) { }
let!(:final) do
+ scheduling_type = needs.present? ? :dag : :stage
+
create(:ci_build,
- pipeline: pipeline, name: 'final',
+ pipeline: pipeline, name: 'final', scheduling_type: scheduling_type,
stage_idx: 3, stage: 'deploy', options: {
dependencies: dependencies
}
@@ -2338,14 +2340,24 @@ describe Ci::Build do
end
end
- describe '#has_expiring_artifacts?' do
+ describe '#has_expiring_archive_artifacts?' do
context 'when artifacts have expiration date set' do
before do
build.update(artifacts_expire_at: 1.day.from_now)
end
- it 'has expiring artifacts' do
- expect(build).to have_expiring_artifacts
+ context 'and job artifacts archive record exists' do
+ let!(:archive) { create(:ci_job_artifact, :archive, job: build) }
+
+ it 'has expiring artifacts' do
+ expect(build).to have_expiring_archive_artifacts
+ end
+ end
+
+ context 'and job artifacts archive record does not exist' do
+ it 'does not have expiring artifacts' do
+ expect(build).not_to have_expiring_archive_artifacts
+ end
end
end
@@ -2355,7 +2367,7 @@ describe Ci::Build do
end
it 'does not have expiring artifacts' do
- expect(build).not_to have_expiring_artifacts
+ expect(build).not_to have_expiring_archive_artifacts
end
end
end
@@ -2391,6 +2403,8 @@ describe Ci::Build do
{ key: 'GITLAB_CI', value: 'true', public: true, masked: false },
{ key: 'CI_SERVER_URL', value: Gitlab.config.gitlab.url, public: true, masked: false },
{ key: 'CI_SERVER_HOST', value: Gitlab.config.gitlab.host, public: true, masked: false },
+ { key: 'CI_SERVER_PORT', value: Gitlab.config.gitlab.port.to_s, public: true, masked: false },
+ { key: 'CI_SERVER_PROTOCOL', value: Gitlab.config.gitlab.protocol, public: true, masked: false },
{ key: 'CI_SERVER_NAME', value: 'GitLab', public: true, masked: false },
{ key: 'CI_SERVER_VERSION', value: Gitlab::VERSION, public: true, masked: false },
{ key: 'CI_SERVER_VERSION_MAJOR', value: Gitlab.version_info.major.to_s, public: true, masked: false },
@@ -2995,7 +3009,8 @@ describe Ci::Build do
stage: 'test',
ref: 'feature',
project: project,
- pipeline: pipeline
+ pipeline: pipeline,
+ scheduling_type: :stage
)
end
@@ -3599,7 +3614,7 @@ describe Ci::Build do
end
describe '.matches_tag_ids' do
- set(:build) { create(:ci_build, project: project, user: user) }
+ let_it_be(:build, reload: true) { create(:ci_build, project: project, user: user) }
let(:tag_ids) { ::ActsAsTaggableOn::Tag.named_any(tag_list).ids }
subject { described_class.where(id: build).matches_tag_ids(tag_ids) }
@@ -3646,7 +3661,7 @@ describe Ci::Build do
end
describe '.matches_tags' do
- set(:build) { create(:ci_build, project: project, user: user) }
+ let_it_be(:build, reload: true) { create(:ci_build, project: project, user: user) }
subject { described_class.where(id: build).with_any_tags }
@@ -3672,7 +3687,7 @@ describe Ci::Build do
end
describe 'pages deployments' do
- set(:build) { create(:ci_build, project: project, user: user) }
+ let_it_be(:build, reload: true) { create(:ci_build, project: project, user: user) }
context 'when job is "pages"' do
before do
@@ -3839,9 +3854,13 @@ describe Ci::Build do
end
describe '#artifacts_metadata_entry' do
- set(:build) { create(:ci_build, project: project) }
+ let_it_be(:build) { create(:ci_build, project: project) }
let(:path) { 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif' }
+ around do |example|
+ Timecop.freeze { example.run }
+ end
+
before do
stub_artifacts_object_storage
end
@@ -3935,7 +3954,7 @@ describe Ci::Build do
end
describe '#supported_runner?' do
- set(:build) { create(:ci_build) }
+ let_it_be(:build) { create(:ci_build) }
subject { build.supported_runner?(runner_features) }
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
index 69fd167e0c8..f08f05a09bf 100644
--- a/spec/models/ci/build_trace_chunk_spec.rb
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
include ExclusiveLeaseHelpers
- set(:build) { create(:ci_build, :running) }
+ let_it_be(:build) { create(:ci_build, :running) }
let(:chunk_index) { 0 }
let(:data_store) { :redis }
let(:raw_data) { nil }
@@ -24,10 +24,45 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
context 'FastDestroyAll' do
let(:parent) { create(:project) }
let(:pipeline) { create(:ci_pipeline, project: parent) }
- let(:build) { create(:ci_build, :running, :trace_live, pipeline: pipeline, project: parent) }
+ let!(:build) { create(:ci_build, :running, :trace_live, pipeline: pipeline, project: parent) }
let(:subjects) { build.trace_chunks }
- it_behaves_like 'fast destroyable'
+ describe 'Forbid #destroy and #destroy_all' do
+ it 'does not delete database rows and associted external data' do
+ expect(external_data_counter).to be > 0
+ expect(subjects.count).to be > 0
+
+ expect { subjects.first.destroy }.to raise_error('`destroy` and `destroy_all` are forbidden. Please use `fast_destroy_all`')
+ expect { subjects.destroy_all }.to raise_error('`destroy` and `destroy_all` are forbidden. Please use `fast_destroy_all`') # rubocop: disable DestroyAll
+
+ expect(subjects.count).to be > 0
+ expect(external_data_counter).to be > 0
+ end
+ end
+
+ describe '.fast_destroy_all' do
+ it 'deletes database rows and associted external data' do
+ expect(external_data_counter).to be > 0
+ expect(subjects.count).to be > 0
+
+ expect { subjects.fast_destroy_all }.not_to raise_error
+
+ expect(subjects.count).to eq(0)
+ expect(external_data_counter).to eq(0)
+ end
+ end
+
+ describe '.use_fast_destroy' do
+ it 'performs cascading delete with fast_destroy_all' do
+ expect(external_data_counter).to be > 0
+ expect(subjects.count).to be > 0
+
+ expect { parent.destroy }.not_to raise_error
+
+ expect(subjects.count).to eq(0)
+ expect(external_data_counter).to eq(0)
+ end
+ end
def external_data_counter
Gitlab::Redis::SharedState.with do |redis|
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 76e31fddd98..d2fe0d7eeca 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -111,6 +111,18 @@ describe Ci::JobArtifact do
end
end
+ describe '.for_sha' do
+ it 'returns job artifacts for a given pipeline sha' do
+ first_pipeline = create(:ci_pipeline)
+ second_pipeline = create(:ci_pipeline, sha: Digest::SHA1.hexdigest(SecureRandom.hex))
+ first_artifact = create(:ci_job_artifact, job: create(:ci_build, pipeline: first_pipeline))
+ second_artifact = create(:ci_job_artifact, job: create(:ci_build, pipeline: second_pipeline))
+
+ expect(described_class.for_sha(first_pipeline.sha)).to eq([first_artifact])
+ expect(described_class.for_sha(second_pipeline.sha)).to eq([second_artifact])
+ end
+ end
+
describe 'callbacks' do
subject { create(:ci_job_artifact, :archive) }
diff --git a/spec/models/ci/persistent_ref_spec.rb b/spec/models/ci/persistent_ref_spec.rb
index ece478fdd36..4cece0664cf 100644
--- a/spec/models/ci/persistent_ref_spec.rb
+++ b/spec/models/ci/persistent_ref_spec.rb
@@ -11,7 +11,7 @@ describe Ci::PersistentRef do
pipeline.succeed!
end
- context '#exist?' do
+ describe '#exist?' do
subject { pipeline.persistent_ref.exist? }
let(:pipeline) { create(:ci_pipeline, sha: sha, project: project) }
@@ -31,7 +31,7 @@ describe Ci::PersistentRef do
end
end
- context '#create' do
+ describe '#create' do
subject { pipeline.persistent_ref.create }
let(:pipeline) { create(:ci_pipeline, sha: sha, project: project) }
@@ -81,7 +81,7 @@ describe Ci::PersistentRef do
end
end
- context '#delete' do
+ describe '#delete' do
subject { pipeline.persistent_ref.delete }
let(:pipeline) { create(:ci_pipeline, sha: sha, project: project) }
diff --git a/spec/models/ci/pipeline_schedule_spec.rb b/spec/models/ci/pipeline_schedule_spec.rb
index aee43025288..4ed4b7e38d8 100644
--- a/spec/models/ci/pipeline_schedule_spec.rb
+++ b/spec/models/ci/pipeline_schedule_spec.rb
@@ -80,9 +80,9 @@ describe Ci::PipelineSchedule do
it 'preloads the associations' do
subject
- query = ActiveRecord::QueryRecorder.new { subject.each(&:project) }
+ query = ActiveRecord::QueryRecorder.new { subject.map(&:project).each(&:route) }
- expect(query.count).to eq(2)
+ expect(query.count).to eq(3)
end
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 013581c0d94..cf1690df9ba 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -7,7 +7,7 @@ describe Ci::Pipeline, :mailer do
include StubRequests
let(:user) { create(:user) }
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
let(:pipeline) do
create(:ci_empty_pipeline, status: :created, project: project)
@@ -162,6 +162,23 @@ describe Ci::Pipeline, :mailer do
end
end
+ describe '#merge_request?' do
+ let(:pipeline) { create(:ci_pipeline, merge_request: merge_request) }
+ let(:merge_request) { create(:merge_request) }
+
+ it 'returns true' do
+ expect(pipeline).to be_merge_request
+ end
+
+ context 'when merge request is nil' do
+ let(:merge_request) { nil }
+
+ it 'returns false' do
+ expect(pipeline).not_to be_merge_request
+ end
+ end
+ end
+
describe '#detached_merge_request_pipeline?' do
subject { pipeline.detached_merge_request_pipeline? }
@@ -231,7 +248,7 @@ describe Ci::Pipeline, :mailer do
describe '#legacy_detached_merge_request_pipeline?' do
subject { pipeline.legacy_detached_merge_request_pipeline? }
- set(:merge_request) { create(:merge_request) }
+ let_it_be(:merge_request) { create(:merge_request) }
let(:ref) { 'feature' }
let(:target_sha) { nil }
@@ -367,48 +384,6 @@ describe Ci::Pipeline, :mailer do
end
end
- describe 'Validations for merge request pipelines' do
- let(:pipeline) do
- build(:ci_pipeline, source: source, merge_request: merge_request)
- end
-
- let(:merge_request) do
- create(:merge_request,
- source_project: project,
- source_branch: 'feature',
- target_project: project,
- target_branch: 'master')
- end
-
- context 'when source is merge request' do
- let(:source) { :merge_request_event }
-
- context 'when merge request is specified' do
- it { expect(pipeline).to be_valid }
- end
-
- context 'when merge request is empty' do
- let(:merge_request) { nil }
-
- it { expect(pipeline).not_to be_valid }
- end
- end
-
- context 'when source is web' do
- let(:source) { :web }
-
- context 'when merge request is specified' do
- it { expect(pipeline).not_to be_valid }
- end
-
- context 'when merge request is empty' do
- let(:merge_request) { nil }
-
- it { expect(pipeline).to be_valid }
- end
- end
- end
-
describe 'modules' do
it_behaves_like 'AtomicInternalId', validate_presence: false do
let(:internal_id_attribute) { :iid }
@@ -612,9 +587,9 @@ describe Ci::Pipeline, :mailer do
]
end
- context 'when source is merge request' do
+ context 'when pipeline is merge request' do
let(:pipeline) do
- create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request)
+ create(:ci_pipeline, merge_request: merge_request)
end
let(:merge_request) do
@@ -651,7 +626,7 @@ describe Ci::Pipeline, :mailer do
'CI_MERGE_REQUEST_TITLE' => merge_request.title,
'CI_MERGE_REQUEST_ASSIGNEES' => merge_request.assignee_username_list,
'CI_MERGE_REQUEST_MILESTONE' => milestone.title,
- 'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).join(','),
+ 'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).sort.join(','),
'CI_MERGE_REQUEST_EVENT_TYPE' => pipeline.merge_request_event_type.to_s)
end
@@ -1142,6 +1117,10 @@ describe Ci::Pipeline, :mailer do
end
describe 'pipeline caching' do
+ before do
+ pipeline.config_source = 'repository_source'
+ end
+
it 'performs ExpirePipelinesCacheWorker' do
expect(ExpirePipelineCacheWorker).to receive(:perform_async).with(pipeline.id)
@@ -1210,6 +1189,32 @@ describe Ci::Pipeline, :mailer do
end
end
+ context 'when pipeline is bridge triggered' do
+ before do
+ pipeline.source_bridge = create(:ci_bridge)
+ end
+
+ context 'when source bridge is dependent on pipeline status' do
+ before do
+ allow(pipeline.source_bridge).to receive(:dependent?).and_return(true)
+ end
+
+ it 'schedules the pipeline bridge worker' do
+ expect(::Ci::PipelineBridgeStatusWorker).to receive(:perform_async)
+
+ pipeline.succeed!
+ end
+ end
+
+ context 'when source bridge is not dependent on pipeline status' do
+ it 'does not schedule the pipeline bridge worker' do
+ expect(::Ci::PipelineBridgeStatusWorker).not_to receive(:perform_async)
+
+ pipeline.succeed!
+ end
+ end
+ end
+
def auto_devops_pipelines_completed_total(status)
Gitlab::Metrics.counter(:auto_devops_pipelines_completed_total, 'Number of completed auto devops pipelines').get(status: status)
end
@@ -1237,9 +1242,9 @@ describe Ci::Pipeline, :mailer do
is_expected.to be_truthy
end
- context 'when source is merge request' do
+ context 'when pipeline is merge request' do
let(:pipeline) do
- create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request)
+ create(:ci_pipeline, merge_request: merge_request)
end
let(:merge_request) do
@@ -2652,6 +2657,40 @@ describe Ci::Pipeline, :mailer do
end
end
+ describe '#test_reports_count', :use_clean_rails_memory_store_caching do
+ subject { pipeline.test_reports }
+
+ context 'when pipeline has multiple builds with test reports' do
+ let!(:build_rspec) { create(:ci_build, :success, name: 'rspec', pipeline: pipeline, project: project) }
+ let!(:build_java) { create(:ci_build, :success, name: 'java', pipeline: pipeline, project: project) }
+
+ before do
+ create(:ci_job_artifact, :junit, job: build_rspec, project: project)
+ create(:ci_job_artifact, :junit_with_ant, job: build_java, project: project)
+ end
+
+ it 'returns test report count equal to test reports total_count' do
+ expect(subject.total_count).to eq(7)
+ expect(subject.total_count).to eq(pipeline.test_reports_count)
+ end
+
+ it 'reads from cache when records are cached' do
+ expect(Rails.cache.fetch(['project', project.id, 'pipeline', pipeline.id, 'test_reports_count'], force: false)).to be_nil
+
+ pipeline.test_reports_count
+
+ expect(ActiveRecord::QueryRecorder.new { pipeline.test_reports_count }.count).to eq(0)
+ end
+ end
+
+ context 'when pipeline does not have any builds with test reports' do
+ it 'returns empty test report count' do
+ expect(subject.total_count).to eq(0)
+ expect(subject.total_count).to eq(pipeline.test_reports_count)
+ end
+ end
+ end
+
describe '#total_size' do
let!(:build_job1) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
let!(:build_job2) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
@@ -2883,4 +2922,82 @@ describe Ci::Pipeline, :mailer do
end
end
end
+
+ describe 'upstream status interactions' do
+ context 'when a pipeline has an upstream status' do
+ context 'when an upstream status is a bridge' do
+ let(:bridge) { create(:ci_bridge, status: :pending) }
+
+ before do
+ create(:ci_sources_pipeline, pipeline: pipeline, source_job: bridge)
+ end
+
+ describe '#bridge_triggered?' do
+ it 'is a pipeline triggered by a bridge' do
+ expect(pipeline).to be_bridge_triggered
+ end
+ end
+
+ describe '#source_job' do
+ it 'has a correct source job' do
+ expect(pipeline.source_job).to eq bridge
+ end
+ end
+
+ describe '#source_bridge' do
+ it 'has a correct bridge source' do
+ expect(pipeline.source_bridge).to eq bridge
+ end
+ end
+
+ describe '#update_bridge_status!' do
+ it 'can update bridge status if it is running' do
+ pipeline.update_bridge_status!
+
+ expect(bridge.reload).to be_success
+ end
+
+ it 'can not update bridge status if is not active' do
+ bridge.success!
+
+ expect { pipeline.update_bridge_status! }
+ .to raise_error Ci::Pipeline::BridgeStatusError
+ end
+ end
+ end
+
+ context 'when an upstream status is a build' do
+ let(:build) { create(:ci_build) }
+
+ before do
+ create(:ci_sources_pipeline, pipeline: pipeline, source_job: build)
+ end
+
+ describe '#bridge_triggered?' do
+ it 'is a pipeline that has not been triggered by a bridge' do
+ expect(pipeline).not_to be_bridge_triggered
+ end
+ end
+
+ describe '#source_job' do
+ it 'has a correct source job' do
+ expect(pipeline.source_job).to eq build
+ end
+ end
+
+ describe '#source_bridge' do
+ it 'does not have a bridge source' do
+ expect(pipeline.source_bridge).to be_nil
+ end
+ end
+
+ describe '#update_bridge_status!' do
+ it 'can not update upstream job status' do
+ expect { pipeline.update_bridge_status! }
+ .to raise_error ArgumentError
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index 87dbcbf870e..1e0544c14c5 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Ci::Processable do
- set(:project) { create(:project) }
- set(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
describe '#aggregated_needs_names' do
let(:with_aggregated_needs) { pipeline.processables.select_with_aggregated_needs(project) }
@@ -52,4 +52,97 @@ describe Ci::Processable do
end
end
end
+
+ describe 'validate presence of scheduling_type' do
+ context 'on create' do
+ let(:processable) do
+ build(
+ :ci_build, :created, project: project, pipeline: pipeline,
+ importing: importing, scheduling_type: nil
+ )
+ end
+
+ context 'when importing' do
+ let(:importing) { true }
+
+ context 'when validate_scheduling_type_of_processables is true' do
+ before do
+ stub_feature_flags(validate_scheduling_type_of_processables: true)
+ end
+
+ it 'does not validate' do
+ expect(processable).to be_valid
+ end
+ end
+
+ context 'when validate_scheduling_type_of_processables is false' do
+ before do
+ stub_feature_flags(validate_scheduling_type_of_processables: false)
+ end
+
+ it 'does not validate' do
+ expect(processable).to be_valid
+ end
+ end
+ end
+
+ context 'when not importing' do
+ let(:importing) { false }
+
+ context 'when validate_scheduling_type_of_processables is true' do
+ before do
+ stub_feature_flags(validate_scheduling_type_of_processables: true)
+ end
+
+ it 'validates' do
+ expect(processable).not_to be_valid
+ end
+ end
+
+ context 'when validate_scheduling_type_of_processables is false' do
+ before do
+ stub_feature_flags(validate_scheduling_type_of_processables: false)
+ end
+
+ it 'does not validate' do
+ expect(processable).to be_valid
+ end
+ end
+ end
+ end
+
+ context 'on update' do
+ let(:processable) { create(:ci_build, :created, project: project, pipeline: pipeline) }
+
+ it 'does not validate' do
+ processable.scheduling_type = nil
+ expect(processable).to be_valid
+ end
+ end
+ end
+
+ describe '.populate_scheduling_type!' do
+ let!(:build_without_needs) { create(:ci_build, project: project, pipeline: pipeline) }
+ let!(:build_with_needs) { create(:ci_build, project: project, pipeline: pipeline) }
+ let!(:needs_relation) { create(:ci_build_need, build: build_with_needs) }
+ let!(:another_build) { create(:ci_build, project: project) }
+
+ before do
+ Ci::Processable.update_all(scheduling_type: nil)
+ end
+
+ it 'populates scheduling_type of processables' do
+ expect do
+ pipeline.processables.populate_scheduling_type!
+ end.to change(pipeline.processables.where(scheduling_type: nil), :count).from(2).to(0)
+
+ expect(build_without_needs.reload.scheduling_type).to eq('stage')
+ expect(build_with_needs.reload.scheduling_type).to eq('dag')
+ end
+
+ it 'does not affect processables from other pipelines' do
+ pipeline.processables.populate_scheduling_type!
+ expect(another_build.reload.scheduling_type).to be_nil
+ end
+ end
end
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 5c9a03a26ec..0192c8ed17d 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -25,7 +25,7 @@ describe Ci::Runner do
end
end
- context '#exactly_one_group' do
+ describe '#exactly_one_group' do
let(:group) { create(:group) }
let(:runner) { create(:ci_runner, :group, groups: [group]) }
@@ -38,8 +38,8 @@ describe Ci::Runner do
end
context 'runner_type validations' do
- set(:group) { create(:group) }
- set(:project) { create(:project) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project) }
let(:group_runner) { create(:ci_runner, :group, groups: [group]) }
let(:project_runner) { create(:ci_runner, :project, projects: [project]) }
let(:instance_runner) { create(:ci_runner, :instance) }
@@ -322,7 +322,7 @@ describe Ci::Runner do
end
describe '#can_pick?' do
- set(:pipeline) { create(:ci_pipeline) }
+ let_it_be(:pipeline) { create(:ci_pipeline) }
let(:build) { create(:ci_build, pipeline: pipeline) }
let(:runner_project) { build.project }
let(:runner) { create(:ci_runner, :project, projects: [runner_project], tag_list: tag_list, run_untagged: run_untagged) }
diff --git a/spec/models/ci/sources/pipeline_spec.rb b/spec/models/ci/sources/pipeline_spec.rb
index 63bee5bfb55..5023747b487 100644
--- a/spec/models/ci/sources/pipeline_spec.rb
+++ b/spec/models/ci/sources/pipeline_spec.rb
@@ -8,6 +8,7 @@ describe Ci::Sources::Pipeline do
it { is_expected.to belong_to(:source_project) }
it { is_expected.to belong_to(:source_job) }
+ it { is_expected.to belong_to(:source_bridge) }
it { is_expected.to belong_to(:source_pipeline) }
it { is_expected.to validate_presence_of(:project) }
diff --git a/spec/models/clusters/applications/cert_manager_spec.rb b/spec/models/clusters/applications/cert_manager_spec.rb
index 6b85f9bb127..31209a70018 100644
--- a/spec/models/clusters/applications/cert_manager_spec.rb
+++ b/spec/models/clusters/applications/cert_manager_spec.rb
@@ -65,7 +65,7 @@ describe Clusters::Applications::CertManager do
cert_manager.email = cert_email
end
- it 'uses his/her email to register issuer with certificate provider' do
+ it 'uses their email to register issuer with certificate provider' do
expect(subject.files).to eq(cert_manager.files.merge(cluster_issuer_file))
end
end
diff --git a/spec/models/clusters/applications/ingress_spec.rb b/spec/models/clusters/applications/ingress_spec.rb
index c1158698601..c086ab23058 100644
--- a/spec/models/clusters/applications/ingress_spec.rb
+++ b/spec/models/clusters/applications/ingress_spec.rb
@@ -102,7 +102,7 @@ describe Clusters::Applications::Ingress do
it 'is initialized with ingress arguments' do
expect(subject.name).to eq('ingress')
expect(subject.chart).to eq('stable/nginx-ingress')
- expect(subject.version).to eq('1.22.1')
+ expect(subject.version).to eq('1.29.3')
expect(subject).to be_rbac
expect(subject.files).to eq(ingress.files)
end
@@ -119,7 +119,7 @@ describe Clusters::Applications::Ingress do
let(:ingress) { create(:clusters_applications_ingress, :errored, version: 'nginx') }
it 'is initialized with the locked version' do
- expect(subject.version).to eq('1.22.1')
+ expect(subject.version).to eq('1.29.3')
end
end
end
diff --git a/spec/models/clusters/applications/knative_spec.rb b/spec/models/clusters/applications/knative_spec.rb
index 68ac3f0d483..993cc7d0203 100644
--- a/spec/models/clusters/applications/knative_spec.rb
+++ b/spec/models/clusters/applications/knative_spec.rb
@@ -17,7 +17,7 @@ describe Clusters::Applications::Knative do
end
describe 'associations' do
- it { is_expected.to have_one(:serverless_domain_cluster).class_name('Serverless::DomainCluster').with_foreign_key('clusters_applications_knative_id').inverse_of(:knative) }
+ it { is_expected.to have_one(:serverless_domain_cluster).class_name('::Serverless::DomainCluster').with_foreign_key('clusters_applications_knative_id').inverse_of(:knative) }
end
describe 'when cloud run is enabled' do
diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb
index cf33d2b4273..ba344a234b8 100644
--- a/spec/models/clusters/applications/prometheus_spec.rb
+++ b/spec/models/clusters/applications/prometheus_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
describe Clusters::Applications::Prometheus do
include KubernetesHelpers
+ include StubRequests
include_examples 'cluster application core specs', :clusters_applications_prometheus
include_examples 'cluster application status specs', :clusters_applications_prometheus
@@ -320,6 +321,16 @@ describe Clusters::Applications::Prometheus do
it { is_expected.to be_falsey }
end
+
+ context 'when the kubernetes URL is blocked' do
+ before do
+ blocked_ip = '127.0.0.1' # localhost addresses are blocked by default
+
+ stub_all_dns(cluster.platform.api_url, ip_address: blocked_ip)
+ end
+
+ it { is_expected.to be_falsey }
+ end
end
context 'when a kubenetes client is not present' do
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 44ca4a06e2d..23592cb0c70 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -26,6 +26,7 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
it { is_expected.to have_one(:application_runner) }
it { is_expected.to have_many(:kubernetes_namespaces) }
it { is_expected.to have_one(:cluster_project) }
+ it { is_expected.to have_many(:deployment_clusters) }
it { is_expected.to delegate_method(:status).to(:provider) }
it { is_expected.to delegate_method(:status_reason).to(:provider) }
@@ -544,7 +545,7 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
end
describe '#applications' do
- set(:cluster) { create(:cluster) }
+ let_it_be(:cluster, reload: true) { create(:cluster) }
subject { cluster.applications }
diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb
index 4271cf9f1b3..f0e6dd53664 100644
--- a/spec/models/clusters/platforms/kubernetes_spec.rb
+++ b/spec/models/clusters/platforms/kubernetes_spec.rb
@@ -361,8 +361,10 @@ describe Clusters::Platforms::Kubernetes do
describe '#calculate_reactive_cache_for' do
let(:service) { create(:cluster_platform_kubernetes, :configured) }
- let(:pod) { kube_pod }
- let(:namespace) { pod["metadata"]["namespace"] }
+ let(:expected_pod_cached_data) do
+ kube_pod.tap { |kp| kp['metadata'].delete('namespace') }
+ end
+ let(:namespace) { "project-namespace" }
let(:environment) { instance_double(Environment, deployment_namespace: namespace) }
subject { service.calculate_reactive_cache_for(environment) }
@@ -381,7 +383,7 @@ describe Clusters::Platforms::Kubernetes do
stub_kubeclient_deployments(namespace)
end
- it { is_expected.to include(pods: [pod]) }
+ it { is_expected.to include(pods: [expected_pod_cached_data]) }
end
context 'when kubernetes responds with 500s' do
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index 782d1ac4552..26cc68eb58c 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -3,8 +3,10 @@
require 'spec_helper'
describe Commit do
- let(:project) { create(:project, :public, :repository) }
- let(:commit) { project.commit }
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:personal_snippet) { create(:personal_snippet, :repository) }
+ let_it_be(:project_snippet) { create(:project_snippet, :repository) }
+ let(:commit) { project.commit }
describe 'modules' do
subject { described_class }
@@ -17,49 +19,67 @@ describe Commit do
end
describe '.lazy' do
- set(:project) { create(:project, :repository) }
+ shared_examples '.lazy checks' do
+ context 'when the commits are found' do
+ let(:oids) do
+ %w(
+ 498214de67004b1da3d820901307bed2a68a8ef6
+ c642fe9b8b9f28f9225d7ea953fe14e74748d53b
+ 6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9
+ 048721d90c449b244b7b4c53a9186b04330174ec
+ 281d3a76f31c812dbf48abce82ccf6860adedd81
+ )
+ end
- context 'when the commits are found' do
- let(:oids) do
- %w(
- 498214de67004b1da3d820901307bed2a68a8ef6
- c642fe9b8b9f28f9225d7ea953fe14e74748d53b
- 6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9
- 048721d90c449b244b7b4c53a9186b04330174ec
- 281d3a76f31c812dbf48abce82ccf6860adedd81
- )
- end
+ subject { oids.map { |oid| described_class.lazy(container, oid) } }
- subject { oids.map { |oid| described_class.lazy(project, oid) } }
+ it 'batches requests for commits' do
+ expect(container.repository).to receive(:commits_by).once.and_call_original
- it 'batches requests for commits' do
- expect(project.repository).to receive(:commits_by).once.and_call_original
+ subject.first.title
+ subject.last.title
+ end
- subject.first.title
- subject.last.title
- end
+ it 'maintains ordering' do
+ subject.each_with_index do |commit, i|
+ expect(commit.id).to eq(oids[i])
+ end
+ end
- it 'maintains ordering' do
- subject.each_with_index do |commit, i|
- expect(commit.id).to eq(oids[i])
+ it 'does not attempt to replace methods via BatchLoader' do
+ subject.each do |commit|
+ expect(commit).to receive(:method_missing).and_call_original
+
+ commit.id
+ end
end
end
- it 'does not attempt to replace methods via BatchLoader' do
- subject.each do |commit|
- expect(commit).to receive(:method_missing).and_call_original
+ context 'when not found' do
+ it 'returns nil as commit' do
+ commit = described_class.lazy(container, 'deadbeef').__sync
- commit.id
+ expect(commit).to be_nil
end
end
end
- context 'when not found' do
- it 'returns nil as commit' do
- commit = described_class.lazy(project, 'deadbeef').__sync
+ context 'with project' do
+ let(:container) { project }
- expect(commit).to be_nil
- end
+ it_behaves_like '.lazy checks'
+ end
+
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like '.lazy checks'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
+
+ it_behaves_like '.lazy checks'
end
end
@@ -231,15 +251,43 @@ describe Commit do
end
describe '#to_reference' do
- let(:project) { create(:project, :repository, path: 'sample-project') }
+ context 'with project' do
+ let(:project) { create(:project, :repository, path: 'sample-project') }
+
+ it 'returns a String reference to the object' do
+ expect(commit.to_reference).to eq commit.id
+ end
+
+ it 'supports a cross-project reference' do
+ another_project = build(:project, :repository, name: 'another-project', namespace: project.namespace)
+ expect(commit.to_reference(another_project)).to eq "sample-project@#{commit.id}"
+ end
+ end
+
+ context 'with personal snippet' do
+ let(:commit) { personal_snippet.commit }
- it 'returns a String reference to the object' do
- expect(commit.to_reference).to eq commit.id
+ it 'returns a String reference to the object' do
+ expect(commit.to_reference).to eq "$#{personal_snippet.id}@#{commit.id}"
+ end
+
+ it 'supports a cross-snippet reference' do
+ another_snippet = build(:personal_snippet)
+ expect(commit.to_reference(another_snippet)).to eq "$#{personal_snippet.id}@#{commit.id}"
+ end
end
- it 'supports a cross-project reference' do
- another_project = build(:project, :repository, name: 'another-project', namespace: project.namespace)
- expect(commit.to_reference(another_project)).to eq "sample-project@#{commit.id}"
+ context 'with project snippet' do
+ let(:commit) { project_snippet.commit }
+
+ it 'returns a String reference to the object' do
+ expect(commit.to_reference).to eq "$#{project_snippet.id}@#{commit.id}"
+ end
+
+ it 'supports a cross-snippet project reference' do
+ another_snippet = build(:personal_snippet)
+ expect(commit.to_reference(another_snippet)).to eq "#{project_snippet.project.path}$#{project_snippet.id}@#{commit.id}"
+ end
end
end
@@ -264,13 +312,41 @@ describe Commit do
describe '#reference_link_text' do
let(:project) { create(:project, :repository, path: 'sample-project') }
- it 'returns a String reference to the object' do
- expect(commit.reference_link_text).to eq commit.short_id
+ context 'with project' do
+ it 'returns a String reference to the object' do
+ expect(commit.reference_link_text).to eq commit.short_id
+ end
+
+ it 'supports a cross-project reference' do
+ another_project = build(:project, :repository, name: 'another-project', namespace: project.namespace)
+ expect(commit.reference_link_text(another_project)).to eq "sample-project@#{commit.short_id}"
+ end
+ end
+
+ context 'with personal snippet' do
+ let(:commit) { personal_snippet.commit }
+
+ it 'returns a String reference to the object' do
+ expect(commit.reference_link_text).to eq "$#{personal_snippet.id}@#{commit.short_id}"
+ end
+
+ it 'supports a cross-snippet reference' do
+ another_snippet = build(:personal_snippet, :repository)
+ expect(commit.reference_link_text(another_snippet)).to eq "$#{personal_snippet.id}@#{commit.short_id}"
+ end
end
- it 'supports a cross-project reference' do
- another_project = build(:project, :repository, name: 'another-project', namespace: project.namespace)
- expect(commit.reference_link_text(another_project)).to eq "sample-project@#{commit.short_id}"
+ context 'with project snippet' do
+ let(:commit) { project_snippet.commit }
+
+ it 'returns a String reference to the object' do
+ expect(commit.reference_link_text).to eq "$#{project_snippet.id}@#{commit.short_id}"
+ end
+
+ it 'supports a cross-snippet project reference' do
+ another_snippet = build(:project_snippet, :repository)
+ expect(commit.reference_link_text(another_snippet)).to eq "#{project_snippet.project.path}$#{project_snippet.id}@#{commit.short_id}"
+ end
end
end
@@ -401,6 +477,26 @@ eos
expect(commit.closes_issues).to be_empty
end
+
+ context 'with personal snippet' do
+ let(:commit) { personal_snippet.commit }
+
+ it 'does not call Gitlab::ClosingIssueExtractor' do
+ expect(Gitlab::ClosingIssueExtractor).not_to receive(:new)
+
+ commit.closes_issues
+ end
+ end
+
+ context 'with project snippet' do
+ let(:commit) { project_snippet.commit }
+
+ it 'does not call Gitlab::ClosingIssueExtractor' do
+ expect(Gitlab::ClosingIssueExtractor).not_to receive(:new)
+
+ commit.closes_issues
+ end
+ end
end
it_behaves_like 'a mentionable' do
@@ -597,19 +693,39 @@ eos
end
describe '.from_hash' do
- let(:new_commit) { described_class.from_hash(commit.to_hash, project) }
+ subject { described_class.from_hash(commit.to_hash, container) }
+
+ shared_examples 'returns Commit' do
+ it 'returns a Commit' do
+ expect(subject).to be_an_instance_of(described_class)
+ end
+
+ it 'wraps a Gitlab::Git::Commit' do
+ expect(subject.raw).to be_an_instance_of(Gitlab::Git::Commit)
+ end
- it 'returns a Commit' do
- expect(new_commit).to be_an_instance_of(described_class)
+ it 'stores the correct commit fields' do
+ expect(subject.id).to eq(commit.id)
+ expect(subject.message).to eq(commit.message)
+ end
end
- it 'wraps a Gitlab::Git::Commit' do
- expect(new_commit.raw).to be_an_instance_of(Gitlab::Git::Commit)
+ context 'with project' do
+ let(:container) { project }
+
+ it_behaves_like 'returns Commit'
end
- it 'stores the correct commit fields' do
- expect(new_commit.id).to eq(commit.id)
- expect(new_commit.message).to eq(commit.message)
+ context 'with personal snippet' do
+ let(:container) { personal_snippet }
+
+ it_behaves_like 'returns Commit'
+ end
+
+ context 'with project snippet' do
+ let(:container) { project_snippet }
+
+ it_behaves_like 'returns Commit'
end
end
@@ -670,5 +786,39 @@ eos
expect(commit1.merge_requests).to contain_exactly(merge_request1, merge_request2)
expect(commit2.merge_requests).to contain_exactly(merge_request1)
end
+
+ context 'with personal snippet' do
+ it 'returns empty relation' do
+ expect(personal_snippet.repository.commit.merge_requests).to eq MergeRequest.none
+ end
+ end
+
+ context 'with project snippet' do
+ it 'returns empty relation' do
+ expect(project_snippet.project).not_to receive(:merge_requests)
+ expect(project_snippet.repository.commit.merge_requests).to eq MergeRequest.none
+ end
+ end
+ end
+
+ describe 'signed commits' do
+ let(:gpg_signed_commit) { project.commit_by(oid: '0b4bc9a49b562e85de7cc9e834518ea6828729b9') }
+ let(:x509_signed_commit) { project.commit_by(oid: '189a6c924013fc3fe40d6f1ec1dc20214183bc97') }
+ let(:unsigned_commit) { project.commit_by(oid: '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51') }
+ let!(:commit) { create(:commit, project: project) }
+
+ it 'returns signature_type properly' do
+ expect(gpg_signed_commit.signature_type).to eq(:PGP)
+ expect(x509_signed_commit.signature_type).to eq(:X509)
+ expect(unsigned_commit.signature_type).to eq(:NONE)
+ expect(commit.signature_type).to eq(:NONE)
+ end
+
+ it 'returns has_signature? properly' do
+ expect(gpg_signed_commit.has_signature?).to be_truthy
+ expect(x509_signed_commit.has_signature?).to be_truthy
+ expect(unsigned_commit.has_signature?).to be_falsey
+ expect(commit.has_signature?).to be_falsey
+ end
end
end
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 40652614101..e1a748da7fd 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe CommitStatus do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
- set(:pipeline) do
+ let_it_be(:pipeline) do
create(:ci_pipeline, project: project, sha: project.commit.id)
end
diff --git a/spec/models/concerns/avatarable_spec.rb b/spec/models/concerns/avatarable_spec.rb
index c750be6b75c..100953549ea 100644
--- a/spec/models/concerns/avatarable_spec.rb
+++ b/spec/models/concerns/avatarable_spec.rb
@@ -48,14 +48,14 @@ describe Avatarable do
end
it 'calls local_url twice for path and URLs' do
- expect(project.avatar).to receive(:local_url).exactly(2).times.and_call_original
+ expect(project.avatar).to receive(:local_url).twice.and_call_original
expect(project.avatar_path(only_path: true)).to eq(avatar_path)
expect(project.avatar_path(only_path: false)).to eq(avatar_url)
end
it 'calls local_url twice for different sizes' do
- expect(project.avatar).to receive(:local_url).exactly(2).times.and_call_original
+ expect(project.avatar).to receive(:local_url).twice.and_call_original
expect(project.avatar_path).to eq(avatar_path)
expect(project.avatar_path(size: 40)).to eq(avatar_path + "?width=40")
@@ -64,7 +64,7 @@ describe Avatarable do
it 'handles unpersisted objects' do
new_project = build(:project, :with_avatar)
path = [relative_url_root, new_project.avatar.local_url].join
- expect(new_project.avatar).to receive(:local_url).exactly(2).times.and_call_original
+ expect(new_project.avatar).to receive(:local_url).twice.and_call_original
2.times do
expect(new_project.avatar_path).to eq(path)
diff --git a/spec/models/concerns/batch_destroy_dependent_associations_spec.rb b/spec/models/concerns/batch_destroy_dependent_associations_spec.rb
index 1fe90d3cc9a..d2373926802 100644
--- a/spec/models/concerns/batch_destroy_dependent_associations_spec.rb
+++ b/spec/models/concerns/batch_destroy_dependent_associations_spec.rb
@@ -15,7 +15,7 @@ describe BatchDestroyDependentAssociations do
end
describe '#dependent_associations_to_destroy' do
- set(:project) { TestProject.new }
+ let_it_be(:project) { TestProject.new }
it 'returns the right associations' do
expect(project.dependent_associations_to_destroy.map(&:name)).to match_array([:builds])
@@ -23,9 +23,9 @@ describe BatchDestroyDependentAssociations do
end
describe '#destroy_dependent_associations_in_batches' do
- set(:project) { create(:project) }
- set(:build) { create(:ci_build, project: project) }
- set(:notification_setting) { create(:notification_setting, project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:build) { create(:ci_build, project: project) }
+ let_it_be(:notification_setting) { create(:notification_setting, project: project) }
let!(:todos) { create(:todo, project: project) }
it 'destroys multiple builds' do
diff --git a/spec/models/concerns/bulk_insert_safe_spec.rb b/spec/models/concerns/bulk_insert_safe_spec.rb
new file mode 100644
index 00000000000..91884680738
--- /dev/null
+++ b/spec/models/concerns/bulk_insert_safe_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe BulkInsertSafe do
+ class BulkInsertItem < ApplicationRecord
+ include BulkInsertSafe
+ end
+
+ module InheritedUnsafeMethods
+ extend ActiveSupport::Concern
+
+ included do
+ after_save -> { "unsafe" }
+ end
+ end
+
+ module InheritedSafeMethods
+ extend ActiveSupport::Concern
+
+ included do
+ after_initialize -> { "safe" }
+ end
+ end
+
+ it_behaves_like 'a BulkInsertSafe model', BulkInsertItem
+
+ context 'when inheriting class methods' do
+ it 'raises an error when method is not bulk-insert safe' do
+ expect { BulkInsertItem.include(InheritedUnsafeMethods) }.to(
+ raise_error(subject::MethodNotAllowedError))
+ end
+
+ it 'does not raise an error when method is bulk-insert safe' do
+ expect { BulkInsertItem.include(InheritedSafeMethods) }.not_to raise_error
+ end
+ end
+end
diff --git a/spec/models/concerns/delete_with_limit_spec.rb b/spec/models/concerns/delete_with_limit_spec.rb
new file mode 100644
index 00000000000..52085f970f3
--- /dev/null
+++ b/spec/models/concerns/delete_with_limit_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe DeleteWithLimit do
+ describe '.delete_with_limit' do
+ it 'deletes a limited amount of rows' do
+ create_list(:web_hook_log, 4)
+
+ expect do
+ WebHookLog.delete_with_limit(2)
+ end.to change { WebHookLog.count }.by(-2)
+ end
+ end
+end
diff --git a/spec/models/concerns/discussion_on_diff_spec.rb b/spec/models/concerns/discussion_on_diff_spec.rb
index baddca47dfa..f091861bd41 100644
--- a/spec/models/concerns/discussion_on_diff_spec.rb
+++ b/spec/models/concerns/discussion_on_diff_spec.rb
@@ -59,6 +59,18 @@ describe DiscussionOnDiff do
end
end
+ context "when the diff line does not exist on a corrupt diff note" do
+ subject { create(:diff_note_on_merge_request, line_number: 18).to_discussion }
+
+ before do
+ allow(subject).to receive(:diff_line) { nil }
+ end
+
+ it "returns an empty array" do
+ expect(truncated_lines).to eq([])
+ end
+ end
+
context 'when the discussion is on an image' do
subject { create(:image_diff_note_on_merge_request).to_discussion }
diff --git a/spec/models/concerns/mentionable_spec.rb b/spec/models/concerns/mentionable_spec.rb
index 883f678b8f5..13a3d1cdd82 100644
--- a/spec/models/concerns/mentionable_spec.rb
+++ b/spec/models/concerns/mentionable_spec.rb
@@ -26,6 +26,42 @@ describe Mentionable do
expect(mentionable.referenced_mentionables).to be_empty
end
end
+
+ describe '#any_mentionable_attributes_changed?' do
+ Message = Struct.new(:text)
+
+ let(:mentionable) { Example.new }
+ let(:changes) do
+ msg = Message.new('test')
+
+ changes = {}
+ changes[msg] = ['', 'some message']
+ changes[:random_sym_key] = ['', 'some message']
+ changes["random_string_key"] = ['', 'some message']
+ changes
+ end
+
+ it 'returns true with key string' do
+ changes["message"] = ['', 'some message']
+
+ allow(mentionable).to receive(:saved_changes).and_return(changes)
+
+ expect(mentionable.send(:any_mentionable_attributes_changed?)).to be true
+ end
+
+ it 'returns false with key symbol' do
+ changes[:message] = ['', 'some message']
+ allow(mentionable).to receive(:saved_changes).and_return(changes)
+
+ expect(mentionable.send(:any_mentionable_attributes_changed?)).to be false
+ end
+
+ it 'returns false when no attr_mentionable keys' do
+ allow(mentionable).to receive(:saved_changes).and_return(changes)
+
+ expect(mentionable.send(:any_mentionable_attributes_changed?)).to be false
+ end
+ end
end
describe Issue, "Mentionable" do
diff --git a/spec/models/concerns/milestoneable_spec.rb b/spec/models/concerns/milestoneable_spec.rb
index 186bf2c6290..0b19c0542ee 100644
--- a/spec/models/concerns/milestoneable_spec.rb
+++ b/spec/models/concerns/milestoneable_spec.rb
@@ -35,41 +35,6 @@ describe Milestoneable do
it { is_expected.to be_invalid }
end
-
- context 'when valid and saving' do
- it 'copies the value to the new milestones relationship' do
- subject.save!
-
- expect(subject.milestones).to match_array([milestone])
- end
-
- context 'with old values in milestones relationship' do
- let(:old_milestone) { create(:milestone, project: project) }
-
- before do
- subject.milestone = old_milestone
- subject.save!
- end
-
- it 'replaces old values' do
- expect(subject.milestones).to match_array([old_milestone])
-
- subject.milestone = milestone
- subject.save!
-
- expect(subject.milestones).to match_array([milestone])
- end
-
- it 'can nullify the milestone' do
- expect(subject.milestones).to match_array([old_milestone])
-
- subject.milestone = nil
- subject.save!
-
- expect(subject.milestones).to match_array([])
- end
- end
- end
end
end
diff --git a/spec/models/concerns/project_features_compatibility_spec.rb b/spec/models/concerns/project_features_compatibility_spec.rb
index 1fe176ab5af..8346c4ad4cc 100644
--- a/spec/models/concerns/project_features_compatibility_spec.rb
+++ b/spec/models/concerns/project_features_compatibility_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
describe ProjectFeaturesCompatibility do
let(:project) { create(:project) }
- let(:features_except_repository) { %w(issues wiki builds merge_requests snippets) }
- let(:features) { features_except_repository + ['repository'] }
+ let(:features_enabled) { %w(issues wiki builds merge_requests snippets) }
+ let(:features) { features_enabled + %w(repository pages) }
# We had issues_enabled, snippets_enabled, builds_enabled, merge_requests_enabled and issues_enabled fields on projects table
# All those fields got moved to a new table called project_feature and are now integers instead of booleans
@@ -13,37 +13,53 @@ describe ProjectFeaturesCompatibility do
# So we can keep it compatible
it "converts fields from 'true' to ProjectFeature::ENABLED" do
- features_except_repository.each do |feature|
+ features_enabled.each do |feature|
project.update_attribute("#{feature}_enabled".to_sym, "true")
expect(project.project_feature.public_send("#{feature}_access_level")).to eq(ProjectFeature::ENABLED)
end
end
it "converts fields from 'false' to ProjectFeature::DISABLED" do
- features_except_repository.each do |feature|
+ features_enabled.each do |feature|
project.update_attribute("#{feature}_enabled".to_sym, "false")
expect(project.project_feature.public_send("#{feature}_access_level")).to eq(ProjectFeature::DISABLED)
end
end
it "converts fields from true to ProjectFeature::ENABLED" do
- features_except_repository.each do |feature|
+ features_enabled.each do |feature|
project.update_attribute("#{feature}_enabled".to_sym, true)
expect(project.project_feature.public_send("#{feature}_access_level")).to eq(ProjectFeature::ENABLED)
end
end
it "converts fields from false to ProjectFeature::DISABLED" do
- features_except_repository.each do |feature|
+ features_enabled.each do |feature|
project.update_attribute("#{feature}_enabled".to_sym, false)
expect(project.project_feature.public_send("#{feature}_access_level")).to eq(ProjectFeature::DISABLED)
end
end
- it "accepts private as ProjectFeature::PRIVATE" do
- features.each do |feature|
- project.update!("#{feature}_access_level".to_sym => 'private')
- expect(project.project_feature.public_send("#{feature}_access_level")).to eq(ProjectFeature::PRIVATE)
+ describe "access levels" do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:access_level, :expected_result) do
+ 'disabled' | ProjectFeature::DISABLED
+ 'private' | ProjectFeature::PRIVATE
+ 'enabled' | ProjectFeature::ENABLED
+ 'public' | ProjectFeature::PUBLIC
+ end
+
+ with_them do
+ it "accepts access level" do
+ features.each do |feature|
+ # Only pages as public access level
+ next if feature != 'pages' && access_level == 'public'
+
+ project.update!("#{feature}_access_level".to_sym => access_level)
+ expect(project.project_feature.public_send("#{feature}_access_level")).to eq(expected_result)
+ end
+ end
end
end
end
diff --git a/spec/models/concerns/reactive_caching_spec.rb b/spec/models/concerns/reactive_caching_spec.rb
index 4af6906ce2c..6d4eeae641f 100644
--- a/spec/models/concerns/reactive_caching_spec.rb
+++ b/spec/models/concerns/reactive_caching_spec.rb
@@ -165,11 +165,25 @@ describe ReactiveCaching, :use_clean_rails_memory_store_caching do
describe '#exclusively_update_reactive_cache!' do
subject(:go!) { instance.exclusively_update_reactive_cache! }
+ shared_examples 'successful cache' do
+ it 'caches the result of #calculate_reactive_cache' do
+ go!
+
+ expect(read_reactive_cache(instance)).to eq(calculation.call)
+ end
+
+ it 'does not raise the exception' do
+ expect { go! }.not_to raise_exception(ReactiveCaching::ExceededReactiveCacheLimit)
+ end
+ end
+
context 'when the lease is free and lifetime is not exceeded' do
before do
- stub_reactive_cache(instance, "preexisting")
+ stub_reactive_cache(instance, 'preexisting')
end
+ it_behaves_like 'successful cache'
+
it 'takes and releases the lease' do
expect_to_obtain_exclusive_lease(cache_key, 'uuid')
expect_to_cancel_exclusive_lease(cache_key, 'uuid')
@@ -177,19 +191,13 @@ describe ReactiveCaching, :use_clean_rails_memory_store_caching do
go!
end
- it 'caches the result of #calculate_reactive_cache' do
- go!
-
- expect(read_reactive_cache(instance)).to eq(calculation.call)
- end
-
- it "enqueues a repeat worker" do
+ it 'enqueues a repeat worker' do
expect_reactive_cache_update_queued(instance)
go!
end
- it "calls a reactive_cache_updated only once if content did not change on subsequent update" do
+ it 'calls a reactive_cache_updated only once if content did not change on subsequent update' do
expect(instance).to receive(:calculate_reactive_cache).twice
expect(instance).to receive(:reactive_cache_updated).once
@@ -202,6 +210,43 @@ describe ReactiveCaching, :use_clean_rails_memory_store_caching do
go!
end
+ context 'when calculated object size exceeds default reactive_cache_hard_limit' do
+ let(:calculation) { -> { 'a' * 2 * 1.megabyte } }
+
+ shared_examples 'ExceededReactiveCacheLimit' do
+ it 'raises ExceededReactiveCacheLimit exception and does not cache new data' do
+ expect { go! }.to raise_exception(ReactiveCaching::ExceededReactiveCacheLimit)
+
+ expect(read_reactive_cache(instance)).not_to eq(calculation.call)
+ end
+ end
+
+ context 'when reactive_cache_hard_limit feature flag is enabled' do
+ it_behaves_like 'ExceededReactiveCacheLimit'
+
+ context 'when reactive_cache_hard_limit is overridden' do
+ let(:test_class) { Class.new(CacheTest) { self.reactive_cache_hard_limit = 3.megabytes } }
+ let(:instance) { test_class.new(666, &calculation) }
+
+ it_behaves_like 'successful cache'
+
+ context 'when cache size is over the overridden limit' do
+ let(:calculation) { -> { 'a' * 4 * 1.megabyte } }
+
+ it_behaves_like 'ExceededReactiveCacheLimit'
+ end
+ end
+ end
+
+ context 'when reactive_cache_limit feature flag is disabled' do
+ before do
+ stub_feature_flags(reactive_cache_limit: false)
+ end
+
+ it_behaves_like 'successful cache'
+ end
+ end
+
context 'and #calculate_reactive_cache raises an exception' do
before do
stub_reactive_cache(instance, "preexisting")
@@ -256,8 +301,8 @@ describe ReactiveCaching, :use_clean_rails_memory_store_caching do
it { expect(subject.reactive_cache_lease_timeout).to be_a(ActiveSupport::Duration) }
it { expect(subject.reactive_cache_refresh_interval).to be_a(ActiveSupport::Duration) }
it { expect(subject.reactive_cache_lifetime).to be_a(ActiveSupport::Duration) }
-
it { expect(subject.reactive_cache_key).to respond_to(:call) }
+ it { expect(subject.reactive_cache_hard_limit).to be_a(Integer) }
it { expect(subject.reactive_cache_worker_finder).to respond_to(:call) }
end
end
diff --git a/spec/models/concerns/redis_cacheable_spec.rb b/spec/models/concerns/redis_cacheable_spec.rb
index a9dca27f258..f88d64e2013 100644
--- a/spec/models/concerns/redis_cacheable_spec.rb
+++ b/spec/models/concerns/redis_cacheable_spec.rb
@@ -28,7 +28,7 @@ describe RedisCacheable do
end
describe '#cached_attribute' do
- subject { instance.cached_attribute(payload.keys.first) }
+ subject { instance.cached_attribute(payload.each_key.first) }
it 'gets the cache attribute' do
Gitlab::Redis::SharedState.with do |redis|
@@ -36,7 +36,7 @@ describe RedisCacheable do
.and_return(payload.to_json)
end
- expect(subject).to eq(payload.values.first)
+ expect(subject).to eq(payload.each_value.first)
end
end
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index f78a089bc2e..c891fdcb6b5 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -58,7 +58,7 @@ describe Group, 'Routable' do
end
end
- context '.find_by_full_path' do
+ describe '.find_by_full_path' do
let!(:nested_group) { create(:group, parent: group) }
context 'without any redirect routes' do
diff --git a/spec/models/concerns/sortable_spec.rb b/spec/models/concerns/sortable_spec.rb
index 184f7986a6f..18ac4d19938 100644
--- a/spec/models/concerns/sortable_spec.rb
+++ b/spec/models/concerns/sortable_spec.rb
@@ -4,17 +4,18 @@ require 'spec_helper'
describe Sortable do
describe '.order_by' do
+ let(:arel_table) { Group.arel_table }
let(:relation) { Group.all }
describe 'ordering by id' do
it 'ascending' do
- expect(relation).to receive(:reorder).with(id: :asc)
+ expect(relation).to receive(:reorder).with(arel_table['id'].asc)
relation.order_by('id_asc')
end
it 'descending' do
- expect(relation).to receive(:reorder).with(id: :desc)
+ expect(relation).to receive(:reorder).with(arel_table['id'].desc)
relation.order_by('id_desc')
end
@@ -22,19 +23,19 @@ describe Sortable do
describe 'ordering by created day' do
it 'ascending' do
- expect(relation).to receive(:reorder).with(created_at: :asc)
+ expect(relation).to receive(:reorder).with(arel_table['created_at'].asc)
relation.order_by('created_asc')
end
it 'descending' do
- expect(relation).to receive(:reorder).with(created_at: :desc)
+ expect(relation).to receive(:reorder).with(arel_table['created_at'].desc)
relation.order_by('created_desc')
end
it 'order by "date"' do
- expect(relation).to receive(:reorder).with(created_at: :desc)
+ expect(relation).to receive(:reorder).with(arel_table['created_at'].desc)
relation.order_by('created_date')
end
@@ -66,13 +67,13 @@ describe Sortable do
describe 'ordering by Updated Time' do
it 'ascending' do
- expect(relation).to receive(:reorder).with(updated_at: :asc)
+ expect(relation).to receive(:reorder).with(arel_table['updated_at'].asc)
relation.order_by('updated_asc')
end
it 'descending' do
- expect(relation).to receive(:reorder).with(updated_at: :desc)
+ expect(relation).to receive(:reorder).with(arel_table['updated_at'].desc)
relation.order_by('updated_desc')
end
diff --git a/spec/models/concerns/triggerable_hooks_spec.rb b/spec/models/concerns/triggerable_hooks_spec.rb
index ac1bc51d950..10a6c1aa821 100644
--- a/spec/models/concerns/triggerable_hooks_spec.rb
+++ b/spec/models/concerns/triggerable_hooks_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe TriggerableHooks do
TestableHook.create!(url: 'http://example2.com', push_events: true)
filter1 = double(:filter1)
filter2 = double(:filter2)
- allow(ActiveHookFilter).to receive(:new).exactly(2).times.and_return(filter1, filter2)
+ allow(ActiveHookFilter).to receive(:new).twice.and_return(filter1, filter2)
expect(filter1).to receive(:matches?).and_return(true)
expect(filter2).to receive(:matches?).and_return(false)
diff --git a/spec/models/concerns/x509_serial_number_attribute_spec.rb b/spec/models/concerns/x509_serial_number_attribute_spec.rb
new file mode 100644
index 00000000000..18a1d85204c
--- /dev/null
+++ b/spec/models/concerns/x509_serial_number_attribute_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe X509SerialNumberAttribute do
+ let(:model) { Class.new { include X509SerialNumberAttribute } }
+
+ before do
+ columns = [
+ double(:column, name: 'name', type: :text),
+ double(:column, name: 'serial_number', type: :binary)
+ ]
+
+ allow(model).to receive(:columns).and_return(columns)
+ end
+
+ describe '#x509_serial_number_attribute' do
+ context 'when in non-production' do
+ before do
+ stub_rails_env('development')
+ end
+
+ context 'when the table exists' do
+ before do
+ allow(model).to receive(:table_exists?).and_return(true)
+ end
+
+ it 'defines a x509 serial number attribute for a binary column' do
+ expect(model).to receive(:attribute)
+ .with(:serial_number, an_instance_of(Gitlab::Database::X509SerialNumberAttribute))
+
+ model.x509_serial_number_attribute(:serial_number)
+ end
+
+ it 'raises ArgumentError when the column type is not :binary' do
+ expect { model.x509_serial_number_attribute(:name) }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when the table does not exist' do
+ it 'allows the attribute to be added and issues a warning' do
+ allow(model).to receive(:table_exists?).and_return(false)
+
+ expect(model).not_to receive(:columns)
+ expect(model).to receive(:attribute)
+ expect(model).to receive(:warn)
+
+ model.x509_serial_number_attribute(:name)
+ end
+ end
+
+ context 'when the column does not exist' do
+ it 'allows the attribute to be added and issues a warning' do
+ allow(model).to receive(:table_exists?).and_return(true)
+
+ expect(model).to receive(:columns)
+ expect(model).to receive(:attribute)
+ expect(model).to receive(:warn)
+
+ model.x509_serial_number_attribute(:no_name)
+ end
+ end
+
+ context 'when other execeptions are raised' do
+ it 'logs and re-rasises the error' do
+ allow(model).to receive(:table_exists?).and_raise(ActiveRecord::NoDatabaseError.new('does not exist'))
+
+ expect(model).not_to receive(:columns)
+ expect(model).not_to receive(:attribute)
+ expect(Gitlab::AppLogger).to receive(:error)
+
+ expect { model.x509_serial_number_attribute(:name) }.to raise_error(ActiveRecord::NoDatabaseError)
+ end
+ end
+ end
+
+ context 'when in production' do
+ before do
+ stub_rails_env('production')
+ end
+
+ it 'defines a x509 serial number attribute' do
+ expect(model).not_to receive(:table_exists?)
+ expect(model).not_to receive(:columns)
+ expect(model).to receive(:attribute).with(:serial_number, an_instance_of(Gitlab::Database::X509SerialNumberAttribute))
+
+ model.x509_serial_number_attribute(:serial_number)
+ end
+ end
+ end
+end
diff --git a/spec/models/container_expiration_policy_spec.rb b/spec/models/container_expiration_policy_spec.rb
index 1bce4c3b20a..c22362ed5d4 100644
--- a/spec/models/container_expiration_policy_spec.rb
+++ b/spec/models/container_expiration_policy_spec.rb
@@ -49,9 +49,9 @@ RSpec.describe ContainerExpirationPolicy, type: :model do
it 'preloads the associations' do
subject
- query = ActiveRecord::QueryRecorder.new { subject.each(&:project) }
+ query = ActiveRecord::QueryRecorder.new { subject.map(&:project).map(&:full_path) }
- expect(query.count).to eq(2)
+ expect(query.count).to eq(3)
end
end
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index 0a3065140bf..5ed812652c5 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -85,7 +85,7 @@ describe ContainerRepository do
context 'when action succeeds' do
it 'returns status that indicates success' do
expect(repository.client)
- .to receive(:delete_repository_tag)
+ .to receive(:delete_repository_tag_by_digest)
.twice
.and_return(true)
@@ -96,7 +96,7 @@ describe ContainerRepository do
context 'when action fails' do
it 'returns status that indicates failure' do
expect(repository.client)
- .to receive(:delete_repository_tag)
+ .to receive(:delete_repository_tag_by_digest)
.twice
.and_return(false)
@@ -105,6 +105,36 @@ describe ContainerRepository do
end
end
+ describe '#delete_tag_by_name' do
+ let(:repository) do
+ create(:container_repository, name: 'my_image',
+ tags: { latest: '123', rc1: '234' },
+ project: project)
+ end
+
+ context 'when action succeeds' do
+ it 'returns status that indicates success' do
+ expect(repository.client)
+ .to receive(:delete_repository_tag_by_name)
+ .with(repository.path, "latest")
+ .and_return(true)
+
+ expect(repository.delete_tag_by_name('latest')).to be_truthy
+ end
+ end
+
+ context 'when action fails' do
+ it 'returns status that indicates failure' do
+ expect(repository.client)
+ .to receive(:delete_repository_tag_by_name)
+ .with(repository.path, "latest")
+ .and_return(false)
+
+ expect(repository.delete_tag_by_name('latest')).to be_falsey
+ end
+ end
+ end
+
describe '#location' do
context 'when registry is running on a custom port' do
before do
diff --git a/spec/models/cycle_analytics/group_level_spec.rb b/spec/models/cycle_analytics/group_level_spec.rb
index 03fe8c3b50b..1f410a7c539 100644
--- a/spec/models/cycle_analytics/group_level_spec.rb
+++ b/spec/models/cycle_analytics/group_level_spec.rb
@@ -22,10 +22,6 @@ describe CycleAnalytics::GroupLevel do
describe '#stats' do
before do
- allow_next_instance_of(Gitlab::ReferenceExtractor) do |instance|
- allow(instance).to receive(:issues).and_return([issue])
- end
-
create_cycle(user, project, issue, mr, milestone, pipeline)
deploy_master(user, project)
end
diff --git a/spec/models/deploy_token_spec.rb b/spec/models/deploy_token_spec.rb
index 8d951ab6f0f..568699cf3f6 100644
--- a/spec/models/deploy_token_spec.rb
+++ b/spec/models/deploy_token_spec.rb
@@ -7,6 +7,10 @@ describe DeployToken do
it { is_expected.to have_many :project_deploy_tokens }
it { is_expected.to have_many(:projects).through(:project_deploy_tokens) }
+ it { is_expected.to have_many :group_deploy_tokens }
+ it { is_expected.to have_many(:groups).through(:group_deploy_tokens) }
+
+ it_behaves_like 'having unique enum values'
describe 'validations' do
let(:username_format_message) { "can contain only letters, digits, '_', '-', '+', and '.'" }
@@ -15,6 +19,29 @@ describe DeployToken do
it { is_expected.to allow_value('GitLab+deploy_token-3.14').for(:username) }
it { is_expected.not_to allow_value('<script>').for(:username).with_message(username_format_message) }
it { is_expected.not_to allow_value('').for(:username).with_message(username_format_message) }
+ it { is_expected.to validate_presence_of(:deploy_token_type) }
+ end
+
+ describe 'deploy_token_type validations' do
+ context 'when a deploy token is associated to a group' do
+ it 'does not allow setting a project to it' do
+ group_token = create(:deploy_token, :group)
+ group_token.projects << build(:project)
+
+ expect(group_token).not_to be_valid
+ expect(group_token.errors.full_messages).to include('Deploy token cannot have projects assigned')
+ end
+ end
+
+ context 'when a deploy token is associated to a project' do
+ it 'does not allow setting a group to it' do
+ project_token = create(:deploy_token)
+ project_token.groups << build(:group)
+
+ expect(project_token).not_to be_valid
+ expect(project_token.errors.full_messages).to include('Deploy token cannot have groups assigned')
+ end
+ end
end
describe '#ensure_token' do
@@ -123,33 +150,148 @@ describe DeployToken do
end
end
+ describe '#holder' do
+ subject { deploy_token.holder }
+
+ context 'when the token is of project type' do
+ it 'returns the relevant holder token' do
+ expect(subject).to eq(deploy_token.project_deploy_tokens.first)
+ end
+ end
+
+ context 'when the token is of group type' do
+ let(:group) { create(:group) }
+ let(:deploy_token) { create(:deploy_token, :group) }
+
+ it 'returns the relevant holder token' do
+ expect(subject).to eq(deploy_token.group_deploy_tokens.first)
+ end
+ end
+ end
+
describe '#has_access_to?' do
let(:project) { create(:project) }
subject { deploy_token.has_access_to?(project) }
- context 'when deploy token is active and related to project' do
- let(:deploy_token) { create(:deploy_token, projects: [project]) }
+ context 'when a project is not passed in' do
+ let(:project) { nil }
- it { is_expected.to be_truthy }
+ it { is_expected.to be_falsy }
end
- context 'when deploy token is active but not related to project' do
- let(:deploy_token) { create(:deploy_token) }
+ context 'when a project is passed in' do
+ context 'when deploy token is active and related to project' do
+ let(:deploy_token) { create(:deploy_token, projects: [project]) }
- it { is_expected.to be_falsy }
- end
+ it { is_expected.to be_truthy }
+ end
- context 'when deploy token is revoked and related to project' do
- let(:deploy_token) { create(:deploy_token, :revoked, projects: [project]) }
+ context 'when deploy token is active but not related to project' do
+ let(:deploy_token) { create(:deploy_token) }
- it { is_expected.to be_falsy }
- end
+ it { is_expected.to be_falsy }
+ end
- context 'when deploy token is revoked and not related to the project' do
- let(:deploy_token) { create(:deploy_token, :revoked) }
+ context 'when deploy token is revoked and related to project' do
+ let(:deploy_token) { create(:deploy_token, :revoked, projects: [project]) }
- it { is_expected.to be_falsy }
+ it { is_expected.to be_falsy }
+ end
+
+ context 'when deploy token is revoked and not related to the project' do
+ let(:deploy_token) { create(:deploy_token, :revoked) }
+
+ it { is_expected.to be_falsy }
+ end
+
+ context 'and when the token is of group type' do
+ let_it_be(:group) { create(:group) }
+ let(:deploy_token) { create(:deploy_token, :group) }
+
+ before do
+ deploy_token.groups << group
+ end
+
+ context 'and the allow_group_deploy_token feature flag is turned off' do
+ it 'is false' do
+ stub_feature_flags(allow_group_deploy_token: false)
+
+ is_expected.to be_falsy
+ end
+ end
+
+ context 'and the allow_group_deploy_token feature flag is turned on' do
+ before do
+ stub_feature_flags(allow_group_deploy_token: true)
+ end
+
+ context 'and the passed-in project does not belong to any group' do
+ it { is_expected.to be_falsy }
+ end
+
+ context 'and the passed-in project belongs to the token group' do
+ it 'is true' do
+ group.projects << project
+
+ is_expected.to be_truthy
+ end
+ end
+
+ context 'and the passed-in project belongs to a subgroup' do
+ let(:child_group) { create(:group, parent_id: group.id) }
+ let(:grandchild_group) { create(:group, parent_id: child_group.id) }
+
+ before do
+ grandchild_group.projects << project
+ end
+
+ context 'and the token group is an ancestor (grand-parent) of this group' do
+ it { is_expected.to be_truthy }
+ end
+
+ context 'and the token group is not ancestor of this group' do
+ let(:child2_group) { create(:group, parent_id: group.id) }
+
+ it 'is false' do
+ deploy_token.groups = [child2_group]
+
+ is_expected.to be_falsey
+ end
+ end
+ end
+
+ context 'and the passed-in project does not belong to the token group' do
+ it { is_expected.to be_falsy }
+ end
+
+ context 'and the project belongs to a group that is parent of the token group' do
+ let(:super_group) { create(:group) }
+ let(:deploy_token) { create(:deploy_token, :group) }
+ let(:group) { create(:group, parent_id: super_group.id) }
+
+ it 'is false' do
+ super_group.projects << project
+
+ is_expected.to be_falsey
+ end
+ end
+ end
+ end
+
+ context 'and the token is of project type' do
+ let(:deploy_token) { create(:deploy_token, projects: [project]) }
+
+ context 'and the passed-in project is the same as the token project' do
+ it { is_expected.to be_truthy }
+ end
+
+ context 'and the passed-in project is not the same as the token project' do
+ subject { deploy_token.has_access_to?(create(:project)) }
+
+ it { is_expected.to be_falsey }
+ end
+ end
end
end
@@ -181,7 +323,7 @@ describe DeployToken do
end
end
- context 'when passign a value' do
+ context 'when passing a value' do
let(:expires_at) { Date.today + 5.months }
let(:deploy_token) { create(:deploy_token, expires_at: expires_at) }
diff --git a/spec/models/deployment_cluster_spec.rb b/spec/models/deployment_cluster_spec.rb
new file mode 100644
index 00000000000..8bb09e9a510
--- /dev/null
+++ b/spec/models/deployment_cluster_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe DeploymentCluster do
+ let(:cluster) { create(:cluster) }
+ let(:deployment) { create(:deployment) }
+ let(:kubernetes_namespace) { 'an-example-namespace' }
+
+ subject { described_class.new(deployment: deployment, cluster: cluster, kubernetes_namespace: kubernetes_namespace) }
+
+ it { is_expected.to belong_to(:deployment).required }
+ it { is_expected.to belong_to(:cluster).required }
+
+ it do
+ is_expected.to have_attributes(
+ cluster_id: cluster.id,
+ deployment_id: deployment.id,
+ kubernetes_namespace: kubernetes_namespace
+ )
+ end
+end
diff --git a/spec/models/deployment_metrics_spec.rb b/spec/models/deployment_metrics_spec.rb
index 32c04e15b73..5a4ae0bbe79 100644
--- a/spec/models/deployment_metrics_spec.rb
+++ b/spec/models/deployment_metrics_spec.rb
@@ -87,7 +87,7 @@ describe DeploymentMetrics do
expect(prometheus_adapter).to receive(:query).with(:deployment, deployment).and_return(simple_metrics)
end
- it { is_expected.to eq(simple_metrics.merge({ deployment_time: deployment.created_at.to_i })) }
+ it { is_expected.to eq(simple_metrics.merge({ deployment_time: deployment.finished_at.to_i })) }
end
end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 0c1b259d6bf..ab7e12cd43c 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -10,6 +10,7 @@ describe Deployment do
it { is_expected.to belong_to(:cluster).class_name('Clusters::Cluster') }
it { is_expected.to belong_to(:user) }
it { is_expected.to belong_to(:deployable) }
+ it { is_expected.to have_one(:deployment_cluster) }
it { is_expected.to have_many(:deployment_merge_requests) }
it { is_expected.to have_many(:merge_requests).through(:deployment_merge_requests) }
@@ -17,6 +18,7 @@ describe Deployment do
it { is_expected.to delegate_method(:commit).to(:project) }
it { is_expected.to delegate_method(:commit_title).to(:commit).as(:try) }
it { is_expected.to delegate_method(:manual_actions).to(:deployable).as(:try) }
+ it { is_expected.to delegate_method(:kubernetes_namespace).to(:deployment_cluster).as(:kubernetes_namespace) }
it { is_expected.to validate_presence_of(:ref) }
it { is_expected.to validate_presence_of(:sha) }
@@ -50,6 +52,22 @@ describe Deployment do
end
end
+ describe '.stoppable' do
+ subject { described_class.stoppable }
+
+ context 'when deployment is stoppable' do
+ let!(:deployment) { create(:deployment, :success, on_stop: 'stop-review') }
+
+ it { is_expected.to eq([deployment]) }
+ end
+
+ context 'when deployment is not stoppable' do
+ let!(:deployment) { create(:deployment, :failed) }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
describe '.success' do
subject { described_class.success }
@@ -263,6 +281,45 @@ describe Deployment do
expect(last_deployments).to match_array(deployments.last(2))
end
end
+
+ describe 'active' do
+ subject { described_class.active }
+
+ it 'retrieves the active deployments' do
+ deployment1 = create(:deployment, status: :created )
+ deployment2 = create(:deployment, status: :running )
+ create(:deployment, status: :failed )
+ create(:deployment, status: :canceled )
+
+ is_expected.to contain_exactly(deployment1, deployment2)
+ end
+ end
+
+ describe 'older_than' do
+ let(:deployment) { create(:deployment) }
+
+ subject { described_class.older_than(deployment) }
+
+ it 'retrives the correct older deployments' do
+ older_deployment1 = create(:deployment)
+ older_deployment2 = create(:deployment)
+ deployment
+ create(:deployment)
+
+ is_expected.to contain_exactly(older_deployment1, older_deployment2)
+ end
+ end
+
+ describe 'with_deployable' do
+ subject { described_class.with_deployable }
+
+ it 'retrieves deployments with deployable builds' do
+ with_deployable = create(:deployment)
+ create(:deployment, deployable: nil)
+
+ is_expected.to contain_exactly(with_deployable)
+ end
+ end
end
describe '#includes_commit?' do
@@ -495,7 +552,7 @@ describe Deployment do
end
end
- context '#update_status' do
+ describe '#update_status' do
let(:deploy) { create(:deployment, status: :running) }
it 'changes the status' do
diff --git a/spec/models/diff_viewer/server_side_spec.rb b/spec/models/diff_viewer/server_side_spec.rb
index 27de0584b8a..0a14eae26f3 100644
--- a/spec/models/diff_viewer/server_side_spec.rb
+++ b/spec/models/diff_viewer/server_side_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe DiffViewer::ServerSide do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:commit) { project.commit_by(oid: '570e7b2abdd848b95f2f578043fc23bd6f6fd24d') }
let!(:diff_file) { commit.diffs.diff_file_with_new_path('files/ruby/popen.rb') }
diff --git a/spec/models/email_spec.rb b/spec/models/email_spec.rb
index cae88f39660..aa3a60b867a 100644
--- a/spec/models/email_spec.rb
+++ b/spec/models/email_spec.rb
@@ -15,11 +15,6 @@ describe Email do
end
describe '#update_invalid_gpg_signatures' do
- let(:user) do
- create(:user, email: 'tula.torphy@abshire.ca').tap do |user|
- user.skip_reconfirmation!
- end
- end
let(:user) { create(:user) }
it 'synchronizes the gpg keys when the email is updated' do
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index af7ab24d7d6..72143d69fc8 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -7,6 +7,7 @@ describe Environment, :use_clean_rails_memory_store_caching do
using RSpec::Parameterized::TableSyntax
include RepoHelpers
include StubENV
+ include CreateEnvironmentsHelpers
let(:project) { create(:project, :repository) }
@@ -114,6 +115,72 @@ describe Environment, :use_clean_rails_memory_store_caching do
end
end
+ describe '.auto_stoppable' do
+ subject { described_class.auto_stoppable(limit) }
+
+ let(:limit) { 100 }
+
+ context 'when environment is auto-stoppable' do
+ let!(:environment) { create(:environment, :auto_stoppable) }
+
+ it { is_expected.to eq([environment]) }
+ end
+
+ context 'when environment is not auto-stoppable' do
+ let!(:environment) { create(:environment) }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
+ describe '.stop_actions' do
+ subject { environments.stop_actions }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let(:environments) { Environment.all }
+
+ before_all do
+ project.add_developer(user)
+ project.repository.add_branch(user, 'review/feature-1', 'master')
+ project.repository.add_branch(user, 'review/feature-2', 'master')
+ end
+
+ shared_examples_for 'correct filtering' do
+ it 'returns stop actions for available environments only' do
+ expect(subject.count).to eq(1)
+ expect(subject.first.name).to eq('stop_review_app')
+ expect(subject.first.ref).to eq('review/feature-1')
+ end
+ end
+
+ before do
+ create_review_app(user, project, 'review/feature-1')
+ create_review_app(user, project, 'review/feature-2')
+ end
+
+ it 'returns stop actions for environments' do
+ expect(subject.count).to eq(2)
+ expect(subject).to match_array(Ci::Build.where(name: 'stop_review_app'))
+ end
+
+ context 'when one of the stop actions has already been executed' do
+ before do
+ Ci::Build.where(ref: 'review/feature-2').find_by_name('stop_review_app').enqueue!
+ end
+
+ it_behaves_like 'correct filtering'
+ end
+
+ context 'when one of the deployments does not have stop action' do
+ before do
+ Deployment.where(ref: 'review/feature-2').update_all(on_stop: nil)
+ end
+
+ it_behaves_like 'correct filtering'
+ end
+ end
+
describe '.pluck_names' do
subject { described_class.pluck_names }
@@ -449,7 +516,7 @@ describe Environment, :use_clean_rails_memory_store_caching do
describe '#reset_auto_stop' do
subject { environment.reset_auto_stop }
- let(:environment) { create(:environment, :auto_stopped) }
+ let(:environment) { create(:environment, :auto_stoppable) }
it 'nullifies the auto_stop_at' do
expect { subject }.to change(environment, :auto_stop_at).from(Time).to(nil)
diff --git a/spec/models/error_tracking/project_error_tracking_setting_spec.rb b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
index 5b402e572c3..e81480ab88f 100644
--- a/spec/models/error_tracking/project_error_tracking_setting_spec.rb
+++ b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
@@ -19,6 +19,19 @@ describe ErrorTracking::ProjectErrorTrackingSetting do
it { is_expected.to allow_value("http://gitlab.com/api/0/projects/project1/something").for(:api_url) }
it { is_expected.not_to allow_values("http://gitlab.com/api/0/projects/project1/something€").for(:api_url) }
+ it 'disallows non-booleans in enabled column' do
+ is_expected.not_to allow_value(
+ nil
+ ).for(:enabled)
+ end
+
+ it 'allows booleans in enabled column' do
+ is_expected.to allow_value(
+ true,
+ false
+ ).for(:enabled)
+ end
+
it 'rejects invalid api_urls' do
is_expected.not_to allow_values(
"https://replaceme.com/'><script>alert(document.cookie)</script>", # unsafe
@@ -267,7 +280,7 @@ describe ErrorTracking::ProjectErrorTrackingSetting do
end
it { expect(result[:issue].gitlab_commit).to eq(commit_id) }
- it { expect(result[:issue].gitlab_commit_path).to eq("/#{project.namespace.path}/#{project.path}/commit/#{commit_id}") }
+ it { expect(result[:issue].gitlab_commit_path).to eq("/#{project.namespace.path}/#{project.path}/-/commit/#{commit_id}") }
end
end
diff --git a/spec/models/event_collection_spec.rb b/spec/models/event_collection_spec.rb
index c421ffa000d..e6f80a4c4d0 100644
--- a/spec/models/event_collection_spec.rb
+++ b/spec/models/event_collection_spec.rb
@@ -4,10 +4,10 @@ require 'spec_helper'
describe EventCollection do
describe '#to_a' do
- set(:group) { create(:group) }
- set(:project) { create(:project_empty_repo, group: group) }
- set(:projects) { Project.where(id: project.id) }
- set(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project_empty_repo, group: group) }
+ let_it_be(:projects) { Project.where(id: project.id) }
+ let_it_be(:user) { create(:user) }
context 'with project events' do
before do
diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb
index 990141cf511..97ea32a120d 100644
--- a/spec/models/event_spec.rb
+++ b/spec/models/event_spec.rb
@@ -173,6 +173,7 @@ describe Event do
end
context 'commit note event' do
+ let(:project) { create(:project, :public, :repository) }
let(:target) { note_on_commit }
it do
@@ -185,7 +186,7 @@ describe Event do
end
context 'private project' do
- let(:project) { create(:project, :private) }
+ let(:project) { create(:project, :private, :repository) }
it do
aggregate_failures do
diff --git a/spec/models/group_deploy_token_spec.rb b/spec/models/group_deploy_token_spec.rb
new file mode 100644
index 00000000000..d38abafa7ed
--- /dev/null
+++ b/spec/models/group_deploy_token_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GroupDeployToken, type: :model do
+ let(:group) { create(:group) }
+ let(:deploy_token) { create(:deploy_token) }
+
+ subject(:group_deploy_token) { create(:group_deploy_token, group: group, deploy_token: deploy_token) }
+
+ it { is_expected.to belong_to :group }
+ it { is_expected.to belong_to :deploy_token }
+
+ it { is_expected.to validate_presence_of :deploy_token }
+ it { is_expected.to validate_presence_of :group }
+ it { is_expected.to validate_uniqueness_of(:deploy_token_id).scoped_to(:group_id) }
+end
diff --git a/spec/models/guest_spec.rb b/spec/models/guest_spec.rb
index 93862e98172..57eb077031c 100644
--- a/spec/models/guest_spec.rb
+++ b/spec/models/guest_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe Guest do
- set(:public_project) { create(:project, :public) }
- set(:private_project) { create(:project, :private) }
- set(:internal_project) { create(:project, :internal) }
+ let_it_be(:public_project, reload: true) { create(:project, :public) }
+ let_it_be(:private_project) { create(:project, :private) }
+ let_it_be(:internal_project) { create(:project, :internal) }
describe '.can_pull?' do
context 'when project is private' do
diff --git a/spec/models/hooks/web_hook_log_spec.rb b/spec/models/hooks/web_hook_log_spec.rb
index 3520720d9a4..128601794cf 100644
--- a/spec/models/hooks/web_hook_log_spec.rb
+++ b/spec/models/hooks/web_hook_log_spec.rb
@@ -69,4 +69,20 @@ describe WebHookLog do
it { expect(web_hook_log.success?).to be_falsey }
end
end
+
+ describe '#internal_error?' do
+ let(:web_hook_log) { build_stubbed(:web_hook_log, response_status: status) }
+
+ context 'when response status is not an internal error' do
+ let(:status) { '200' }
+
+ it { expect(web_hook_log.internal_error?).to be_falsey }
+ end
+
+ context 'when response status is an internal error' do
+ let(:status) { 'internal error' }
+
+ it { expect(web_hook_log.internal_error?).to be_truthy }
+ end
+ end
end
diff --git a/spec/models/identity_spec.rb b/spec/models/identity_spec.rb
index 74ddc2d6284..9f120775a3c 100644
--- a/spec/models/identity_spec.rb
+++ b/spec/models/identity_spec.rb
@@ -13,7 +13,7 @@ describe Identity do
end
describe 'validations' do
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
context 'with existing user and provider' do
before do
diff --git a/spec/models/incident_management/project_incident_management_setting_spec.rb b/spec/models/incident_management/project_incident_management_setting_spec.rb
new file mode 100644
index 00000000000..ac3f97e2d89
--- /dev/null
+++ b/spec/models/incident_management/project_incident_management_setting_spec.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe IncidentManagement::ProjectIncidentManagementSetting do
+ let_it_be(:project) { create(:project, :repository, create_templates: :issue) }
+
+ describe 'Associations' do
+ it { is_expected.to belong_to(:project) }
+ end
+
+ describe 'Validations' do
+ describe 'validate issue_template_exists' do
+ subject { build(:project_incident_management_setting, project: project) }
+
+ context 'with create_issue enabled' do
+ before do
+ subject.create_issue = true
+ end
+
+ context 'with valid issue_template_key' do
+ before do
+ subject.issue_template_key = 'bug'
+ end
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'with empty issue_template_key' do
+ before do
+ subject.issue_template_key = ''
+ end
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'with nil issue_template_key' do
+ before do
+ subject.issue_template_key = nil
+ end
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'with invalid issue_template_key' do
+ before do
+ subject.issue_template_key = 'unknown'
+ end
+
+ it { is_expected.to be_invalid }
+
+ it 'returns error' do
+ subject.valid?
+
+ expect(subject.errors[:issue_template_key]).to eq(['not found'])
+ end
+ end
+ end
+
+ context 'with create_issue disabled' do
+ before do
+ subject.create_issue = false
+ end
+
+ context 'with unknown issue_template_key' do
+ before do
+ subject.issue_template_key = 'unknown'
+ end
+
+ it { is_expected.to be_valid }
+ end
+ end
+ end
+ end
+
+ describe '#issue_template_content' do
+ subject { build(:project_incident_management_setting, project: project) }
+
+ shared_examples 'no content' do
+ it 'returns no content' do
+ expect(subject.issue_template_content).to be_nil
+ end
+ end
+
+ context 'with valid issue_template_key' do
+ before do
+ subject.issue_template_key = 'bug'
+ end
+
+ it 'returns issue content' do
+ expect(subject.issue_template_content).to eq('something valid')
+ end
+ end
+
+ context 'with unknown issue_template_key' do
+ before do
+ subject.issue_template_key = 'unknown'
+ end
+
+ it_behaves_like 'no content'
+ end
+
+ context 'without issue_template_key' do
+ before do
+ subject.issue_template_key = nil
+ end
+
+ it_behaves_like 'no content'
+ end
+ end
+end
diff --git a/spec/models/issue_assignee_spec.rb b/spec/models/issue_assignee_spec.rb
new file mode 100644
index 00000000000..2d59ba15101
--- /dev/null
+++ b/spec/models/issue_assignee_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe IssueAssignee do
+ let(:issue) { create(:issue) }
+
+ subject { issue.issue_assignees.build(assignee: create(:user)) }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:issue).class_name('Issue') }
+ it { is_expected.to belong_to(:assignee).class_name('User') }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_uniqueness_of(:assignee).scoped_to(:issue_id) }
+ end
+end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 5c3f7c09e22..c0501fb16c6 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -84,6 +84,16 @@ describe Issue do
end
end
+ describe '.simple_sorts' do
+ it 'includes all keys' do
+ expect(described_class.simple_sorts.keys).to include(
+ *%w(created_asc created_at_asc created_date created_desc created_at_desc
+ closest_future_date closest_future_date_asc due_date due_date_asc due_date_desc
+ id_asc id_desc relative_position relative_position_asc
+ updated_desc updated_asc updated_at_asc updated_at_desc))
+ end
+ end
+
describe '#order_by_position_and_priority' do
let(:project) { create :project }
let(:p1) { create(:label, title: 'P1', project: project, priority: 1) }
@@ -349,7 +359,7 @@ describe Issue do
allow(subject.project).to receive(:repository).and_return(repository)
end
- context '#to_branch_name does not exists' do
+ describe '#to_branch_name does not exists' do
before do
allow(repository).to receive(:branch_exists?).and_return(false)
end
@@ -359,7 +369,7 @@ describe Issue do
end
end
- context '#to_branch_name exists not ending with -index' do
+ describe '#to_branch_name exists not ending with -index' do
before do
allow(repository).to receive(:branch_exists?).and_return(true)
allow(repository).to receive(:branch_exists?).with(/#{subject.to_branch_name}-\d/).and_return(false)
@@ -370,7 +380,7 @@ describe Issue do
end
end
- context '#to_branch_name exists ending with -index' do
+ describe '#to_branch_name exists ending with -index' do
before do
allow(repository).to receive(:branch_exists?).and_return(true)
allow(repository).to receive(:branch_exists?).with("#{subject.to_branch_name}-3").and_return(false)
diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb
index 1ae90cae4b1..c9b41c9d82e 100644
--- a/spec/models/key_spec.rb
+++ b/spec/models/key_spec.rb
@@ -189,7 +189,7 @@ describe Key, :mailer do
it 'removes key from authorized_file' do
key = create(:personal_key)
- expect(GitlabShellWorker).to receive(:perform_async).with(:remove_key, key.shell_id, key.key)
+ expect(GitlabShellWorker).to receive(:perform_async).with(:remove_key, key.shell_id)
key.destroy
end
end
diff --git a/spec/models/label_link_spec.rb b/spec/models/label_link_spec.rb
index b160e72e759..0a5cb5374b0 100644
--- a/spec/models/label_link_spec.rb
+++ b/spec/models/label_link_spec.rb
@@ -7,4 +7,6 @@ describe LabelLink do
it { is_expected.to belong_to(:label) }
it { is_expected.to belong_to(:target) }
+
+ it_behaves_like 'a BulkInsertSafe model', LabelLink
end
diff --git a/spec/models/label_note_spec.rb b/spec/models/label_note_spec.rb
index dd2c702a7a9..34560acfa9e 100644
--- a/spec/models/label_note_spec.rb
+++ b/spec/models/label_note_spec.rb
@@ -3,20 +3,20 @@
require 'spec_helper'
describe LabelNote do
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user) }
- set(:label) { create(:label, project: project) }
- set(:label2) { create(:label, project: project) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:label) { create(:label, project: project) }
+ let_it_be(:label2) { create(:label, project: project) }
let(:resource_parent) { project }
context 'when resource is issue' do
- set(:resource) { create(:issue, project: project) }
+ let_it_be(:resource) { create(:issue, project: project) }
it_behaves_like 'label note created from events'
end
context 'when resource is merge request' do
- set(:resource) { create(:merge_request, source_project: project, target_project: project) }
+ let_it_be(:resource) { create(:merge_request, source_project: project, target_project: project) }
it_behaves_like 'label note created from events'
end
diff --git a/spec/models/label_spec.rb b/spec/models/label_spec.rb
index baf2cfeab0c..dc878c2d3c0 100644
--- a/spec/models/label_spec.rb
+++ b/spec/models/label_spec.rb
@@ -183,6 +183,31 @@ describe Label do
end
end
+ describe '.top_labels_by_target' do
+ let(:label) { create(:label) }
+ let(:popular_label) { create(:label) }
+ let(:merge_request1) { create(:merge_request) }
+ let(:merge_request2) { create(:merge_request) }
+
+ before do
+ merge_request1.labels = [label, popular_label]
+ merge_request2.labels = [popular_label]
+ end
+
+ it 'returns distinct labels, ordered by usage in the given target relation' do
+ top_labels = described_class.top_labels_by_target(MergeRequest.all)
+
+ expect(top_labels).to match_array([popular_label, label])
+ end
+
+ it 'excludes labels that are not assigned to any records in the given target relation' do
+ merge_requests = MergeRequest.where(id: merge_request2.id)
+ top_labels = described_class.top_labels_by_target(merge_requests)
+
+ expect(top_labels).to match_array([popular_label])
+ end
+ end
+
describe '.optionally_subscribed_by' do
let!(:user) { create(:user) }
let!(:label) { create(:label) }
diff --git a/spec/models/lfs_file_lock_spec.rb b/spec/models/lfs_file_lock_spec.rb
index a42346c341d..0a47ded43fb 100644
--- a/spec/models/lfs_file_lock_spec.rb
+++ b/spec/models/lfs_file_lock_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe LfsFileLock do
- set(:lfs_file_lock) { create(:lfs_file_lock) }
+ let_it_be(:lfs_file_lock, reload: true) { create(:lfs_file_lock) }
subject { lfs_file_lock }
it { is_expected.to belong_to(:project) }
diff --git a/spec/models/lfs_object_spec.rb b/spec/models/lfs_object_spec.rb
index 44445429d3e..51713906d06 100644
--- a/spec/models/lfs_object_spec.rb
+++ b/spec/models/lfs_object_spec.rb
@@ -44,8 +44,8 @@ describe LfsObject do
end
describe '#project_allowed_access?' do
- set(:lfs_object) { create(:lfs_objects_project).lfs_object }
- set(:project) { create(:project) }
+ let_it_be(:lfs_object) { create(:lfs_objects_project).lfs_object }
+ let_it_be(:project, reload: true) { create(:project) }
it 'returns true when project is linked' do
create(:lfs_objects_project, lfs_object: lfs_object, project: project)
@@ -58,9 +58,9 @@ describe LfsObject do
end
context 'when project is a member of a fork network' do
- set(:fork_network) { create(:fork_network) }
- set(:fork_network_root_project) { fork_network.root_project }
- set(:fork_network_membership) { create(:fork_network_member, project: project, fork_network: fork_network) }
+ let_it_be(:fork_network) { create(:fork_network) }
+ let_it_be(:fork_network_root_project, reload: true) { fork_network.root_project }
+ let_it_be(:fork_network_membership) { create(:fork_network_member, project: project, fork_network: fork_network) }
it 'returns true for all members when forked project is linked' do
create(:lfs_objects_project, lfs_object: lfs_object, project: project)
diff --git a/spec/models/lfs_objects_project_spec.rb b/spec/models/lfs_objects_project_spec.rb
index e320f873989..31300828a43 100644
--- a/spec/models/lfs_objects_project_spec.rb
+++ b/spec/models/lfs_objects_project_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe LfsObjectsProject do
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
subject do
create(:lfs_objects_project, project: project)
diff --git a/spec/models/list_user_preference_spec.rb b/spec/models/list_user_preference_spec.rb
index 1335a3700dc..10a7bf41f4e 100644
--- a/spec/models/list_user_preference_spec.rb
+++ b/spec/models/list_user_preference_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe ListUserPreference do
- set(:user) { create(:user) }
- set(:list) { create(:list) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:list) { create(:list) }
before do
list.update_preferences_for(user, { collapsed: true })
diff --git a/spec/models/merge_request_assignee_spec.rb b/spec/models/merge_request_assignee_spec.rb
new file mode 100644
index 00000000000..d6aab15d990
--- /dev/null
+++ b/spec/models/merge_request_assignee_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MergeRequestAssignee do
+ let(:merge_request) { create(:merge_request) }
+
+ subject { merge_request.merge_request_assignees.build(assignee: create(:user)) }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:merge_request).class_name('MergeRequest') }
+ it { is_expected.to belong_to(:assignee).class_name('User') }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_uniqueness_of(:assignee).scoped_to(:merge_request_id) }
+ end
+end
diff --git a/spec/models/merge_request_context_commit_diff_file_spec.rb b/spec/models/merge_request_context_commit_diff_file_spec.rb
new file mode 100644
index 00000000000..37d44662326
--- /dev/null
+++ b/spec/models/merge_request_context_commit_diff_file_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MergeRequestContextCommitDiffFile do
+ describe 'associations' do
+ it { is_expected.to belong_to(:merge_request_context_commit) }
+ end
+end
diff --git a/spec/models/merge_request_context_commit_spec.rb b/spec/models/merge_request_context_commit_spec.rb
new file mode 100644
index 00000000000..5a1bf9874ac
--- /dev/null
+++ b/spec/models/merge_request_context_commit_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MergeRequestContextCommit do
+ let(:merge_request) { create(:merge_request) }
+ let(:project) { merge_request.project }
+ let(:raw_repository) { project.repository.raw_repository }
+ let(:commits) do
+ [
+ project.commit('5937ac0a7beb003549fc5fd26fc247adbce4a52e'),
+ project.commit('570e7b2abdd848b95f2f578043fc23bd6f6fd24d')
+ ]
+ end
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:merge_request) }
+ it { is_expected.to have_many(:diff_files).class_name("MergeRequestContextCommitDiffFile") }
+ end
+
+ describe '.delete_bulk' do
+ let(:context_commit1) { create(:merge_request_context_commit, merge_request: merge_request, sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e') }
+ let(:context_commit2) { create(:merge_request_context_commit, merge_request: merge_request, sha: '570e7b2abdd848b95f2f578043fc23bd6f6fd24d') }
+
+ it 'deletes context commits for given commit sha\'s and returns the commit' do
+ expect(described_class.delete_bulk(merge_request, [context_commit1, context_commit2])).to eq(2)
+ end
+
+ it 'doesn\'t delete context commits when commit sha\'s are not passed' do
+ expect(described_class.delete_bulk(merge_request, [])).to eq(0)
+ end
+ end
+end
diff --git a/spec/models/merge_request_diff_commit_spec.rb b/spec/models/merge_request_diff_commit_spec.rb
index c0a09dab0b0..a296122ae09 100644
--- a/spec/models/merge_request_diff_commit_spec.rb
+++ b/spec/models/merge_request_diff_commit_spec.rb
@@ -6,6 +6,8 @@ describe MergeRequestDiffCommit do
let(:merge_request) { create(:merge_request) }
let(:project) { merge_request.project }
+ it_behaves_like 'a BulkInsertSafe model', MergeRequestDiffCommit
+
describe '#to_hash' do
subject { merge_request.commits.first }
@@ -18,7 +20,6 @@ describe MergeRequestDiffCommit do
end
describe '.create_bulk' do
- let(:sha_attribute) { Gitlab::Database::ShaAttribute.new }
let(:merge_request_diff_id) { merge_request.merge_request_diff.id }
let(:commits) do
[
@@ -38,7 +39,7 @@ describe MergeRequestDiffCommit do
"committer_email": "dmitriy.zaporozhets@gmail.com",
"merge_request_diff_id": merge_request_diff_id,
"relative_order": 0,
- "sha": sha_attribute.serialize("5937ac0a7beb003549fc5fd26fc247adbce4a52e")
+ "sha": Gitlab::Database::ShaAttribute.serialize("5937ac0a7beb003549fc5fd26fc247adbce4a52e")
},
{
"message": "Change some files\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
@@ -50,7 +51,7 @@ describe MergeRequestDiffCommit do
"committer_email": "dmitriy.zaporozhets@gmail.com",
"merge_request_diff_id": merge_request_diff_id,
"relative_order": 1,
- "sha": sha_attribute.serialize("570e7b2abdd848b95f2f578043fc23bd6f6fd24d")
+ "sha": Gitlab::Database::ShaAttribute.serialize("570e7b2abdd848b95f2f578043fc23bd6f6fd24d")
}
]
end
@@ -81,7 +82,7 @@ describe MergeRequestDiffCommit do
"committer_email": "alejorro70@gmail.com",
"merge_request_diff_id": merge_request_diff_id,
"relative_order": 0,
- "sha": sha_attribute.serialize("ba3343bc4fa403a8dfbfcab7fc1a8c29ee34bd69")
+ "sha": Gitlab::Database::ShaAttribute.serialize("ba3343bc4fa403a8dfbfcab7fc1a8c29ee34bd69")
}]
end
diff --git a/spec/models/merge_request_diff_file_spec.rb b/spec/models/merge_request_diff_file_spec.rb
index 84f9c9d06ba..6ecbc5bf832 100644
--- a/spec/models/merge_request_diff_file_spec.rb
+++ b/spec/models/merge_request_diff_file_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
describe MergeRequestDiffFile do
+ it_behaves_like 'a BulkInsertSafe model', MergeRequestDiffFile
+
describe '#diff' do
context 'when diff is not stored' do
let(:unpacked) { 'unpacked' }
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index 78b9e8bc217..8167241faa8 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -54,20 +54,20 @@ describe MergeRequestDiff do
end
describe '.ids_for_external_storage_migration' do
- set(:merge_request) { create(:merge_request) }
- set(:outdated) { merge_request.merge_request_diff }
- set(:latest) { merge_request.create_merge_request_diff }
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:outdated) { merge_request.merge_request_diff }
+ let_it_be(:latest) { merge_request.create_merge_request_diff }
- set(:closed_mr) { create(:merge_request, :closed_last_month) }
+ let_it_be(:closed_mr) { create(:merge_request, :closed_last_month) }
let(:closed) { closed_mr.merge_request_diff }
- set(:merged_mr) { create(:merge_request, :merged_last_month) }
+ let_it_be(:merged_mr) { create(:merge_request, :merged_last_month) }
let(:merged) { merged_mr.merge_request_diff }
- set(:recently_closed_mr) { create(:merge_request, :closed) }
+ let_it_be(:recently_closed_mr) { create(:merge_request, :closed) }
let(:closed_recently) { recently_closed_mr.merge_request_diff }
- set(:recently_merged_mr) { create(:merge_request, :merged) }
+ let_it_be(:recently_merged_mr) { create(:merge_request, :merged) }
let(:merged_recently) { recently_merged_mr.merge_request_diff }
before do
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index c6894c04385..36fd5d21e73 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -277,6 +277,7 @@ describe MergeRequest do
describe 'respond to' do
it { is_expected.to respond_to(:unchecked?) }
+ it { is_expected.to respond_to(:checking?) }
it { is_expected.to respond_to(:can_be_merged?) }
it { is_expected.to respond_to(:cannot_be_merged?) }
it { is_expected.to respond_to(:merge_params) }
@@ -301,7 +302,11 @@ describe MergeRequest do
it 'returns empty requests' do
latest_merge_request_diff = merge_request.merge_request_diffs.create
- latest_merge_request_diff.merge_request_diff_commits.where(sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0').delete_all
+
+ MergeRequestDiffCommit.where(
+ merge_request_diff_id: latest_merge_request_diff,
+ sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0'
+ ).delete_all
expect(by_commit_sha).to be_empty
end
@@ -326,6 +331,16 @@ describe MergeRequest do
end
end
+ describe '.by_cherry_pick_sha' do
+ it 'returns merge requests that match the given merge commit' do
+ note = create(:track_mr_picking_note, commit_id: '456abc')
+
+ create(:track_mr_picking_note, commit_id: '456def')
+
+ expect(described_class.by_cherry_pick_sha('456abc')).to eq([note.noteable])
+ end
+ end
+
describe '.in_projects' do
it 'returns the merge requests for a set of projects' do
expect(described_class.in_projects(Project.all)).to eq([subject])
@@ -1076,8 +1091,8 @@ describe MergeRequest do
end
describe '#can_remove_source_branch?' do
- set(:user) { create(:user) }
- set(:merge_request) { create(:merge_request, :simple) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:merge_request, reload: true) { create(:merge_request, :simple) }
subject { merge_request }
@@ -2084,45 +2099,67 @@ describe MergeRequest do
describe '#check_mergeability' do
let(:mergeability_service) { double }
+ subject { create(:merge_request, merge_status: 'unchecked') }
+
before do
allow(MergeRequests::MergeabilityCheckService).to receive(:new) do
mergeability_service
end
end
- context 'if the merge status is unchecked' do
- before do
- subject.mark_as_unchecked!
- end
-
+ shared_examples_for 'method that executes MergeabilityCheckService' do
it 'executes MergeabilityCheckService' do
expect(mergeability_service).to receive(:execute)
subject.check_mergeability
end
- end
- context 'if the merge status is checked' do
- context 'and feature flag is enabled' do
- it 'executes MergeabilityCheckService' do
- expect(mergeability_service).not_to receive(:execute)
+ context 'when async is true' do
+ context 'and async_merge_request_check_mergeability feature flag is enabled' do
+ it 'executes MergeabilityCheckService asynchronously' do
+ expect(mergeability_service).to receive(:async_execute)
- subject.check_mergeability
+ subject.check_mergeability(async: true)
+ end
end
- end
- context 'and feature flag is disabled' do
- before do
- stub_feature_flags(merge_requests_conditional_mergeability_check: false)
- end
+ context 'and async_merge_request_check_mergeability feature flag is disabled' do
+ before do
+ stub_feature_flags(async_merge_request_check_mergeability: false)
+ end
- it 'does not execute MergeabilityCheckService' do
- expect(mergeability_service).to receive(:execute)
+ it 'executes MergeabilityCheckService' do
+ expect(mergeability_service).to receive(:execute)
- subject.check_mergeability
+ subject.check_mergeability(async: true)
+ end
end
end
end
+
+ context 'if the merge status is unchecked' do
+ it_behaves_like 'method that executes MergeabilityCheckService'
+ end
+
+ context 'if the merge status is checking' do
+ before do
+ subject.mark_as_checking!
+ end
+
+ it_behaves_like 'method that executes MergeabilityCheckService'
+ end
+
+ context 'if the merge status is checked' do
+ before do
+ subject.mark_as_mergeable!
+ end
+
+ it 'does not call MergeabilityCheckService' do
+ expect(MergeRequests::MergeabilityCheckService).not_to receive(:new)
+
+ subject.check_mergeability
+ end
+ end
end
describe '#mergeable_state?' do
@@ -3145,7 +3182,7 @@ describe MergeRequest do
describe 'check_state?' do
it 'indicates whether MR is still checking for mergeability' do
state_machine = described_class.state_machines[:merge_status]
- check_states = [:unchecked, :cannot_be_merged_recheck]
+ check_states = [:unchecked, :cannot_be_merged_recheck, :checking]
check_states.each do |merge_status|
expect(state_machine.check_state?(merge_status)).to be true
@@ -3539,4 +3576,44 @@ describe MergeRequest do
expect(merge_request.recent_visible_deployments.count).to eq(10)
end
end
+
+ describe '#diffable_merge_ref?' do
+ context 'diff_compare_with_head enabled' do
+ context 'merge request can be merged' do
+ context 'merge_to_ref is not calculated' do
+ it 'returns true' do
+ expect(subject.diffable_merge_ref?).to eq(false)
+ end
+ end
+
+ context 'merge_to_ref is calculated' do
+ before do
+ MergeRequests::MergeToRefService.new(subject.project, subject.author).execute(subject)
+ end
+
+ it 'returns true' do
+ expect(subject.diffable_merge_ref?).to eq(true)
+ end
+ end
+ end
+
+ context 'merge request cannot be merged' do
+ it 'returns false' do
+ subject.mark_as_unchecked!
+
+ expect(subject.diffable_merge_ref?).to eq(false)
+ end
+ end
+ end
+
+ context 'diff_compare_with_head disabled' do
+ before do
+ stub_feature_flags(diff_compare_with_head: { enabled: false, thing: subject.target_project })
+ end
+
+ it 'returns false' do
+ expect(subject.diffable_merge_ref?).to eq(false)
+ end
+ end
+ end
end
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index d84a8665dc8..04587ef4240 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -197,6 +197,15 @@ describe Milestone do
end
end
+ it_behaves_like 'within_timeframe scope' do
+ let_it_be(:now) { Time.now }
+ let_it_be(:project) { create(:project, :empty_repo) }
+ let_it_be(:resource_1) { create(:milestone, project: project, start_date: now - 1.day, due_date: now + 1.day) }
+ let_it_be(:resource_2) { create(:milestone, project: project, start_date: now + 2.days, due_date: now + 3.days) }
+ let_it_be(:resource_3) { create(:milestone, project: project, due_date: now) }
+ let_it_be(:resource_4) { create(:milestone, project: project, start_date: now) }
+ end
+
describe "#percent_complete" do
it "does not count open issues" do
milestone.issues << issue
@@ -517,9 +526,9 @@ describe Milestone do
end
describe '.sort_by_attribute' do
- set(:milestone_1) { create(:milestone, title: 'Foo') }
- set(:milestone_2) { create(:milestone, title: 'Bar') }
- set(:milestone_3) { create(:milestone, title: 'Zoo') }
+ let_it_be(:milestone_1) { create(:milestone, title: 'Foo') }
+ let_it_be(:milestone_2) { create(:milestone, title: 'Bar') }
+ let_it_be(:milestone_3) { create(:milestone, title: 'Zoo') }
context 'ordering by name ascending' do
it 'sorts by title ascending' do
@@ -555,7 +564,7 @@ describe Milestone do
end
it 'returns the quantity of milestones in each possible state' do
- expected_count = { opened: 5, closed: 6, all: 11 }
+ expected_count = { opened: 2, closed: 6, all: 8 }
count = described_class.states_count(Project.all, Group.all)
expect(count).to eq(expected_count)
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 2fa3f426da4..a50608a17b6 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -732,12 +732,12 @@ describe Note do
end
describe '#noteable_ability_name' do
- it 'returns project_snippet for a project snippet note' do
- expect(build(:note_on_project_snippet).noteable_ability_name).to eq('project_snippet')
+ it 'returns snippet for a project snippet note' do
+ expect(build(:note_on_project_snippet).noteable_ability_name).to eq('snippet')
end
- it 'returns personal_snippet for a personal snippet note' do
- expect(build(:note_on_personal_snippet).noteable_ability_name).to eq('personal_snippet')
+ it 'returns snippet for a personal snippet note' do
+ expect(build(:note_on_personal_snippet).noteable_ability_name).to eq('snippet')
end
it 'returns merge_request for an MR note' do
diff --git a/spec/models/notification_recipient_spec.rb b/spec/models/notification_recipient_spec.rb
index 2ba53818e54..f6a36dbb3fc 100644
--- a/spec/models/notification_recipient_spec.rb
+++ b/spec/models/notification_recipient_spec.rb
@@ -80,7 +80,7 @@ describe NotificationRecipient do
end
end
- context '#notification_setting' do
+ describe '#notification_setting' do
context 'for child groups' do
let!(:moved_group) { create(:group) }
let(:group) { create(:group) }
diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb
index b1df13e8c2a..d2a54c3eea7 100644
--- a/spec/models/pages_domain_spec.rb
+++ b/spec/models/pages_domain_spec.rb
@@ -104,6 +104,14 @@ describe PagesDomain do
describe 'validate certificate' do
subject { domain }
+ context 'serverless domain' do
+ it 'requires certificate and key to be present' do
+ expect(build(:pages_domain, :without_certificate, :without_key, usage: :serverless)).not_to be_valid
+ expect(build(:pages_domain, :without_certificate, usage: :serverless)).not_to be_valid
+ expect(build(:pages_domain, :without_key, usage: :serverless)).not_to be_valid
+ end
+ end
+
context 'with matching key' do
let(:domain) { build(:pages_domain) }
@@ -180,8 +188,12 @@ describe PagesDomain do
expect(subject.wildcard).to eq(false)
end
- it 'defaults domain_type to project' do
- expect(subject.domain_type).to eq('project')
+ it 'defaults scope to project' do
+ expect(subject.scope).to eq('project')
+ end
+
+ it 'defaults usage to pages' do
+ expect(subject.usage).to eq('pages')
end
end
@@ -315,11 +327,11 @@ describe PagesDomain do
end
describe '#update_daemon' do
- context 'when domain_type is instance' do
- it 'does nothing' do
+ context 'when usage is serverless' do
+ it 'does not call the UpdatePagesConfigurationService' do
expect(Projects::UpdatePagesConfigurationService).not_to receive(:new)
- create(:pages_domain, domain_type: :instance)
+ create(:pages_domain, usage: :serverless)
end
end
@@ -348,9 +360,9 @@ describe PagesDomain do
end
context 'configuration updates when attributes change' do
- set(:project1) { create(:project) }
- set(:project2) { create(:project) }
- set(:domain) { create(:pages_domain) }
+ let_it_be(:project1) { create(:project) }
+ let_it_be(:project2) { create(:project) }
+ let_it_be(:domain) { create(:pages_domain) }
where(:attribute, :old_value, :new_value, :update_expected) do
now = Time.now
@@ -398,8 +410,8 @@ describe PagesDomain do
end
context 'TLS configuration' do
- set(:domain_without_tls) { create(:pages_domain, :without_certificate, :without_key) }
- set(:domain) { create(:pages_domain) }
+ let_it_be(:domain_without_tls) { create(:pages_domain, :without_certificate, :without_key) }
+ let_it_be(:domain) { create(:pages_domain) }
let(:cert1) { domain.certificate }
let(:cert2) { cert1 + ' ' }
@@ -551,6 +563,28 @@ describe PagesDomain do
end
end
+ describe '.instance_serverless' do
+ subject { described_class.instance_serverless }
+
+ before do
+ create(:pages_domain, wildcard: true)
+ create(:pages_domain, :instance_serverless)
+ create(:pages_domain, scope: :instance)
+ create(:pages_domain, :instance_serverless)
+ create(:pages_domain, usage: :serverless)
+ end
+
+ it 'returns domains that are wildcard, instance-level, and serverless' do
+ expect(subject.length).to eq(2)
+
+ subject.each do |domain|
+ expect(domain.wildcard).to eq(true)
+ expect(domain.usage).to eq('serverless')
+ expect(domain.scope).to eq('instance')
+ end
+ end
+ end
+
describe '.need_auto_ssl_renewal' do
subject { described_class.need_auto_ssl_renewal }
diff --git a/spec/models/personal_snippet_spec.rb b/spec/models/personal_snippet_spec.rb
index 276c8e22731..4a949a75cbd 100644
--- a/spec/models/personal_snippet_spec.rb
+++ b/spec/models/personal_snippet_spec.rb
@@ -16,4 +16,13 @@ describe PersonalSnippet do
end
end
end
+
+ it_behaves_like 'model with repository' do
+ let_it_be(:container) { create(:personal_snippet, :repository) }
+ let(:stubbed_container) { build_stubbed(:personal_snippet) }
+ let(:expected_full_path) { "@snippets/#{container.id}" }
+ let(:expected_repository_klass) { Repository }
+ let(:expected_storage_klass) { Storage::Hashed }
+ let(:expected_web_url_path) { "snippets/#{container.id}" }
+ end
end
diff --git a/spec/models/project_auto_devops_spec.rb b/spec/models/project_auto_devops_spec.rb
index 2a821b20aa8..5af25ac1437 100644
--- a/spec/models/project_auto_devops_spec.rb
+++ b/spec/models/project_auto_devops_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe ProjectAutoDevops do
- set(:project) { build(:project) }
+ let_it_be(:project) { build(:project) }
it_behaves_like 'having unique enum values'
diff --git a/spec/models/project_ci_cd_setting_spec.rb b/spec/models/project_ci_cd_setting_spec.rb
index eb3a7e527c9..312cbbb0948 100644
--- a/spec/models/project_ci_cd_setting_spec.rb
+++ b/spec/models/project_ci_cd_setting_spec.rb
@@ -32,6 +32,12 @@ describe ProjectCiCdSetting do
end
end
+ describe '#forward_deployment_enabled' do
+ it 'is true by default' do
+ expect(described_class.new.forward_deployment_enabled).to be_truthy
+ end
+ end
+
describe '#default_git_depth' do
let(:default_value) { described_class::DEFAULT_GIT_DEPTH }
diff --git a/spec/models/project_group_link_spec.rb b/spec/models/project_group_link_spec.rb
index cd997224122..63ce08c4d30 100644
--- a/spec/models/project_group_link_spec.rb
+++ b/spec/models/project_group_link_spec.rb
@@ -47,4 +47,12 @@ describe ProjectGroupLink do
group_users.each { |user| expect(user.authorized_projects).not_to include(project) }
end
end
+
+ describe 'search by group name' do
+ let_it_be(:project_group_link) { create(:project_group_link) }
+ let_it_be(:group) { project_group_link.group }
+
+ it { expect(described_class.search(group.name)).to eq([project_group_link]) }
+ it { expect(described_class.search('not-a-group-name')).to be_empty }
+ end
end
diff --git a/spec/models/project_services/alerts_service_spec.rb b/spec/models/project_services/alerts_service_spec.rb
new file mode 100644
index 00000000000..4e63ece26d8
--- /dev/null
+++ b/spec/models/project_services/alerts_service_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe AlertsService do
+ let_it_be(:project) { create(:project) }
+ let(:service_params) { { project: project, active: active } }
+ let(:active) { true }
+ let(:service) { described_class.new(service_params) }
+
+ shared_context 'when active' do
+ let(:active) { true }
+ end
+
+ shared_context 'when inactive' do
+ let(:active) { false }
+ end
+
+ shared_context 'when persisted' do
+ before do
+ service.save!
+ service.reload
+ end
+ end
+
+ describe '#url' do
+ include Gitlab::Routing
+
+ subject { service.url }
+
+ it { is_expected.to eq(project_alerts_notify_url(project, format: :json)) }
+ end
+
+ describe '#json_fields' do
+ subject { service.json_fields }
+
+ it { is_expected.to eq(%w(active token)) }
+ end
+
+ describe '#as_json' do
+ subject { service.as_json(only: service.json_fields) }
+
+ it { is_expected.to eq('active' => true, 'token' => nil) }
+ end
+
+ describe '#token' do
+ shared_context 'reset token' do
+ before do
+ service.token = ''
+ service.valid?
+ end
+ end
+
+ shared_context 'assign token' do |token|
+ before do
+ service.token = token
+ service.valid?
+ end
+ end
+
+ shared_examples 'valid token' do
+ it { is_expected.to match(/\A\h{32}\z/) }
+ end
+
+ shared_examples 'no token' do
+ it { is_expected.to be_blank }
+ end
+
+ subject { service.token }
+
+ context 'when active' do
+ include_context 'when active'
+
+ context 'when resetting' do
+ let!(:previous_token) { service.token }
+
+ include_context 'reset token'
+
+ it_behaves_like 'valid token'
+
+ it { is_expected.not_to eq(previous_token) }
+ end
+
+ context 'when assigning' do
+ include_context 'assign token', 'random token'
+
+ it_behaves_like 'valid token'
+ end
+ end
+
+ context 'when inactive' do
+ include_context 'when inactive'
+
+ context 'when resetting' do
+ let!(:previous_token) { service.token }
+
+ include_context 'reset token'
+
+ it_behaves_like 'no token'
+ end
+ end
+
+ context 'when persisted' do
+ include_context 'when persisted'
+
+ it_behaves_like 'valid token'
+ end
+ end
+end
diff --git a/spec/models/project_services/bamboo_service_spec.rb b/spec/models/project_services/bamboo_service_spec.rb
index 65d227a17f9..1b946278790 100644
--- a/spec/models/project_services/bamboo_service_spec.rb
+++ b/spec/models/project_services/bamboo_service_spec.rb
@@ -148,7 +148,7 @@ describe BambooService, :use_clean_rails_memory_store_caching do
end
shared_examples 'reactive cache calculation' do
- context '#build_page' do
+ describe '#build_page' do
subject { service.calculate_reactive_cache('123', 'unused')[:build_page] }
it 'returns a specific URL when status is 500' do
@@ -180,7 +180,7 @@ describe BambooService, :use_clean_rails_memory_store_caching do
end
end
- context '#commit_status' do
+ describe '#commit_status' do
subject { service.calculate_reactive_cache('123', 'unused')[:commit_status] }
it 'sets commit status to :error when status is 500' do
diff --git a/spec/models/project_services/buildkite_service_spec.rb b/spec/models/project_services/buildkite_service_spec.rb
index ca196069055..c622b7706c6 100644
--- a/spec/models/project_services/buildkite_service_spec.rb
+++ b/spec/models/project_services/buildkite_service_spec.rb
@@ -83,7 +83,7 @@ describe BuildkiteService, :use_clean_rails_memory_store_caching do
end
describe '#calculate_reactive_cache' do
- context '#commit_status' do
+ describe '#commit_status' do
subject { service.calculate_reactive_cache('123', 'unused')[:commit_status] }
it 'sets commit status to :error when status is 500' do
diff --git a/spec/models/project_services/chat_message/issue_message_spec.rb b/spec/models/project_services/chat_message/issue_message_spec.rb
index d3adc62c38e..c4d10be8331 100644
--- a/spec/models/project_services/chat_message/issue_message_spec.rb
+++ b/spec/models/project_services/chat_message/issue_message_spec.rb
@@ -31,7 +31,7 @@ describe ChatMessage::IssueMessage do
context 'without markdown' do
let(:color) { '#C95823' }
- context '#initialize' do
+ describe '#initialize' do
before do
args[:object_attributes][:description] = nil
end
diff --git a/spec/models/project_services/chat_message/merge_message_spec.rb b/spec/models/project_services/chat_message/merge_message_spec.rb
index b56eb19dd55..150ee6f7472 100644
--- a/spec/models/project_services/chat_message/merge_message_spec.rb
+++ b/spec/models/project_services/chat_message/merge_message_spec.rb
@@ -52,7 +52,7 @@ describe ChatMessage::MergeMessage do
context 'open' do
it 'returns a message regarding opening of merge requests' do
expect(subject.pretext).to eq(
- 'Test User (test.user) opened <http://somewhere.com/merge_requests/100|!100 *Merge Request title*> in <http://somewhere.com|project_name>')
+ 'Test User (test.user) opened <http://somewhere.com/-/merge_requests/100|!100 *Merge Request title*> in <http://somewhere.com|project_name>')
expect(subject.attachments).to be_empty
end
end
@@ -63,7 +63,7 @@ describe ChatMessage::MergeMessage do
end
it 'returns a message regarding closing of merge requests' do
expect(subject.pretext).to eq(
- 'Test User (test.user) closed <http://somewhere.com/merge_requests/100|!100 *Merge Request title*> in <http://somewhere.com|project_name>')
+ 'Test User (test.user) closed <http://somewhere.com/-/merge_requests/100|!100 *Merge Request title*> in <http://somewhere.com|project_name>')
expect(subject.attachments).to be_empty
end
end
@@ -77,12 +77,12 @@ describe ChatMessage::MergeMessage do
context 'open' do
it 'returns a message regarding opening of merge requests' do
expect(subject.pretext).to eq(
- 'Test User (test.user) opened [!100 *Merge Request title*](http://somewhere.com/merge_requests/100) in [project_name](http://somewhere.com)')
+ 'Test User (test.user) opened [!100 *Merge Request title*](http://somewhere.com/-/merge_requests/100) in [project_name](http://somewhere.com)')
expect(subject.attachments).to be_empty
expect(subject.activity).to eq({
title: 'Merge Request opened by Test User (test.user)',
subtitle: 'in [project_name](http://somewhere.com)',
- text: '[!100 *Merge Request title*](http://somewhere.com/merge_requests/100)',
+ text: '[!100 *Merge Request title*](http://somewhere.com/-/merge_requests/100)',
image: 'http://someavatar.com'
})
end
@@ -95,12 +95,12 @@ describe ChatMessage::MergeMessage do
it 'returns a message regarding closing of merge requests' do
expect(subject.pretext).to eq(
- 'Test User (test.user) closed [!100 *Merge Request title*](http://somewhere.com/merge_requests/100) in [project_name](http://somewhere.com)')
+ 'Test User (test.user) closed [!100 *Merge Request title*](http://somewhere.com/-/merge_requests/100) in [project_name](http://somewhere.com)')
expect(subject.attachments).to be_empty
expect(subject.activity).to eq({
title: 'Merge Request closed by Test User (test.user)',
subtitle: 'in [project_name](http://somewhere.com)',
- text: '[!100 *Merge Request title*](http://somewhere.com/merge_requests/100)',
+ text: '[!100 *Merge Request title*](http://somewhere.com/-/merge_requests/100)',
image: 'http://someavatar.com'
})
end
diff --git a/spec/models/project_services/chat_notification_service_spec.rb b/spec/models/project_services/chat_notification_service_spec.rb
index e8c5f5d611a..45ea4cd74ed 100644
--- a/spec/models/project_services/chat_notification_service_spec.rb
+++ b/spec/models/project_services/chat_notification_service_spec.rb
@@ -35,6 +35,7 @@ describe ChatNotificationService do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:webhook_url) { 'https://example.gitlab.com/' }
+ let(:data) { Gitlab::DataBuilder::Push.build_sample(subject.project, user) }
before do
allow(chat_service).to receive_messages(
@@ -51,9 +52,6 @@ describe ChatNotificationService do
context 'with a repository' do
it 'returns true' do
- subject.project = project
- data = Gitlab::DataBuilder::Push.build_sample(project, user)
-
expect(chat_service).to receive(:notify).and_return(true)
expect(chat_service.execute(data)).to be true
end
@@ -62,11 +60,19 @@ describe ChatNotificationService do
context 'with an empty repository' do
it 'returns true' do
subject.project = create(:project, :empty_repo)
- data = Gitlab::DataBuilder::Push.build_sample(subject.project, user)
expect(chat_service).to receive(:notify).and_return(true)
expect(chat_service.execute(data)).to be true
end
end
+
+ context 'with a project with name containing spaces' do
+ it 'does not remove spaces' do
+ allow(project).to receive(:full_name).and_return('Project Name')
+
+ expect(chat_service).to receive(:get_message).with(any_args, hash_including(project_name: 'Project Name'))
+ chat_service.execute(data)
+ end
+ end
end
end
diff --git a/spec/models/project_services/drone_ci_service_spec.rb b/spec/models/project_services/drone_ci_service_spec.rb
index a771d1bf27f..0639a4c1f23 100644
--- a/spec/models/project_services/drone_ci_service_spec.rb
+++ b/spec/models/project_services/drone_ci_service_spec.rb
@@ -86,7 +86,7 @@ describe DroneCiService, :use_clean_rails_memory_store_caching do
describe '#calculate_reactive_cache' do
include_context :drone_ci_service
- context '#commit_status' do
+ describe '#commit_status' do
subject { drone.calculate_reactive_cache(sha, branch)[:commit_status] }
it 'sets commit status to :error when status is 500' do
diff --git a/spec/models/project_services/emails_on_push_service_spec.rb b/spec/models/project_services/emails_on_push_service_spec.rb
index 56f094ecb48..ce1952b503f 100644
--- a/spec/models/project_services/emails_on_push_service_spec.rb
+++ b/spec/models/project_services/emails_on_push_service_spec.rb
@@ -21,6 +21,22 @@ describe EmailsOnPushService do
end
end
+ context 'when properties is missing branches_to_be_notified' do
+ subject { described_class.new(properties: {}) }
+
+ it 'sets the default value to all' do
+ expect(subject.branches_to_be_notified).to eq('all')
+ end
+ end
+
+ context 'when branches_to_be_notified is already set' do
+ subject { described_class.new(properties: { branches_to_be_notified: 'protected' }) }
+
+ it 'does not overwrite it with the default value' do
+ expect(subject.branches_to_be_notified).to eq('protected')
+ end
+ end
+
context 'project emails' do
let(:push_data) { { object_kind: 'push' } }
let(:project) { create(:project, :repository) }
diff --git a/spec/models/project_services/hipchat_service_spec.rb b/spec/models/project_services/hipchat_service_spec.rb
index a1bd0855708..ae6e93cfe3a 100644
--- a/spec/models/project_services/hipchat_service_spec.rb
+++ b/spec/models/project_services/hipchat_service_spec.rb
@@ -352,7 +352,7 @@ describe HipchatService do
end
end
- context "#message_options" do
+ describe "#message_options" do
it "is set to the defaults" do
expect(hipchat.__send__(:message_options)).to eq({ notify: false, color: 'yellow' })
end
diff --git a/spec/models/project_services/jira_service_spec.rb b/spec/models/project_services/jira_service_spec.rb
index 8c14089fff2..832c19adf1d 100644
--- a/spec/models/project_services/jira_service_spec.rb
+++ b/spec/models/project_services/jira_service_spec.rb
@@ -3,7 +3,6 @@
require 'spec_helper'
describe JiraService do
- include Gitlab::Routing
include AssetsHelpers
let(:title) { 'custom title' }
@@ -153,7 +152,7 @@ describe JiraService do
end
end
- context '#update' do
+ describe '#update' do
context 'basic update' do
let(:new_username) { 'new_username' }
let(:new_url) { 'http://jira-new.example.com' }
@@ -422,7 +421,7 @@ describe JiraService do
GlobalID: 'GitLab',
relationship: 'mentioned on',
object: {
- url: "#{Gitlab.config.gitlab.url}/#{project.full_path}/commit/#{commit_id}",
+ url: "#{Gitlab.config.gitlab.url}/#{project.full_path}/-/commit/#{commit_id}",
title: "Solved by commit #{commit_id}.",
icon: { title: 'GitLab', url16x16: favicon_path },
status: { resolved: true }
@@ -465,7 +464,7 @@ describe JiraService do
@jira_service.close_issue(resource, ExternalIssue.new('JIRA-123', project))
expect(WebMock).to have_requested(:post, @comment_url).with(
- body: %r{#{custom_base_url}/#{project.full_path}/commit/#{commit_id}}
+ body: %r{#{custom_base_url}/#{project.full_path}/-/commit/#{commit_id}}
).once
end
@@ -480,7 +479,7 @@ describe JiraService do
@jira_service.close_issue(resource, ExternalIssue.new('JIRA-123', project))
expect(WebMock).to have_requested(:post, @comment_url).with(
- body: %r{#{Gitlab.config.gitlab.url}/#{project.full_path}/commit/#{commit_id}}
+ body: %r{#{Gitlab.config.gitlab.url}/#{project.full_path}/-/commit/#{commit_id}}
).once
end
@@ -543,47 +542,55 @@ describe JiraService do
end
end
- describe '#test_settings' do
+ describe '#test' do
let(:jira_service) do
described_class.new(
- project: create(:project),
- url: 'http://jira.example.com',
- username: 'jira_username',
- password: 'jira_password'
+ url: url,
+ username: username,
+ password: password
)
end
- def test_settings(api_url = nil)
- api_url ||= 'jira.example.com'
- test_url = "http://#{api_url}/rest/api/2/serverInfo"
+ def test_settings(url = 'jira.example.com')
+ test_url = "http://#{url}/rest/api/2/serverInfo"
- WebMock.stub_request(:get, test_url).with(basic_auth: %w(jira_username jira_password)).to_return(body: { url: 'http://url' }.to_json )
+ WebMock.stub_request(:get, test_url).with(basic_auth: [username, password])
+ .to_return(body: { url: 'http://url' }.to_json )
jira_service.test(nil)
end
context 'when the test succeeds' do
- it 'tries to get Jira project with URL when API URL not set' do
- test_settings('jira.example.com')
- end
-
- it 'returns correct result' do
- expect(test_settings).to eq( { success: true, result: { 'url' => 'http://url' } })
+ it 'gets Jira project with URL when API URL not set' do
+ expect(test_settings).to eq(success: true, result: { 'url' => 'http://url' })
end
- it 'tries to get Jira project with API URL if set' do
+ it 'gets Jira project with API URL if set' do
jira_service.update(api_url: 'http://jira.api.com')
- test_settings('jira.api.com')
+
+ expect(test_settings('jira.api.com')).to eq(success: true, result: { 'url' => 'http://url' })
end
end
context 'when the test fails' do
it 'returns result with the error' do
test_url = 'http://jira.example.com/rest/api/2/serverInfo'
- WebMock.stub_request(:get, test_url).with(basic_auth: %w(jira_username jira_password))
+
+ WebMock.stub_request(:get, test_url).with(basic_auth: [username, password])
.to_raise(JIRA::HTTPError.new(double(message: 'Some specific failure.')))
- expect(jira_service.test(nil)).to eq( { success: false, result: 'Some specific failure.' })
+ expect(jira_service).to receive(:log_error).with(
+ "Error sending message",
+ hash_including(
+ client_url: url,
+ error: hash_including(
+ exception_class: 'JIRA::HTTPError',
+ exception_message: 'Some specific failure.'
+ )
+ )
+ )
+
+ expect(jira_service.test(nil)).to eq(success: false, result: 'Some specific failure.')
end
end
end
diff --git a/spec/models/project_services/microsoft_teams_service_spec.rb b/spec/models/project_services/microsoft_teams_service_spec.rb
index 83d3c8b3a70..d93b8a2cb40 100644
--- a/spec/models/project_services/microsoft_teams_service_spec.rb
+++ b/spec/models/project_services/microsoft_teams_service_spec.rb
@@ -37,9 +37,9 @@ describe MicrosoftTeamsService do
end
describe "#execute" do
- let(:user) { create(:user) }
+ let(:user) { create(:user) }
- set(:project) { create(:project, :repository, :wiki_repo) }
+ let_it_be(:project) { create(:project, :repository, :wiki_repo) }
before do
allow(chat_service).to receive_messages(
diff --git a/spec/models/project_services/pipelines_email_service_spec.rb b/spec/models/project_services/pipelines_email_service_spec.rb
index 67358d6c3d6..f29414c80c9 100644
--- a/spec/models/project_services/pipelines_email_service_spec.rb
+++ b/spec/models/project_services/pipelines_email_service_spec.rb
@@ -305,8 +305,8 @@ describe PipelinesEmailService, :mailer do
end
context 'with recipients list separating with newlines' do
- let(:recipients) { "\ntest@gitlab.com, \r\nexample@gitlab.com" }
- let(:receivers) { %w[test@gitlab.com example@gitlab.com] }
+ let(:recipients) { "\ntest@gitlab.com, \r\nexample@gitlab.com\rother@gitlab.com" }
+ let(:receivers) { %w[test@gitlab.com example@gitlab.com other@gitlab.com] }
context 'with failed pipeline' do
before do
diff --git a/spec/models/project_services/prometheus_service_spec.rb b/spec/models/project_services/prometheus_service_spec.rb
index 5934510fdda..1922bb065cf 100644
--- a/spec/models/project_services/prometheus_service_spec.rb
+++ b/spec/models/project_services/prometheus_service_spec.rb
@@ -70,7 +70,7 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
before do
service.api_url = 'http://localhost:9090'
- stub_application_setting(instance_administration_project_id: project.id)
+ stub_application_setting(self_monitoring_project_id: project.id)
stub_config(prometheus: { enable: true, listen_address: 'localhost:9090' })
end
@@ -169,7 +169,7 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
context 'cluster belongs to projects group' do
- set(:group) { create(:group) }
+ let_it_be(:group) { create(:group) }
let(:project) { create(:prometheus_project, group: group) }
let(:cluster) { create(:cluster_for_group, :with_installed_helm, groups: [group]) }
diff --git a/spec/models/project_services/youtrack_service_spec.rb b/spec/models/project_services/youtrack_service_spec.rb
index dcc40d8f343..0067793f8d8 100644
--- a/spec/models/project_services/youtrack_service_spec.rb
+++ b/spec/models/project_services/youtrack_service_spec.rb
@@ -37,6 +37,10 @@ describe YoutrackService do
it 'does allow project prefix on the reference' do
expect(described_class.reference_pattern.match('YT-123')[:issue]).to eq('YT-123')
end
+
+ it 'does not allow issue number to be followed by a letter' do
+ expect(described_class.reference_pattern.match('YT-123A')).to eq(nil)
+ end
end
context 'overriding properties' do
diff --git a/spec/models/project_setting_spec.rb b/spec/models/project_setting_spec.rb
new file mode 100644
index 00000000000..5cfb932eb2a
--- /dev/null
+++ b/spec/models/project_setting_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ProjectSetting, type: :model do
+ it { is_expected.to belong_to(:project) }
+end
diff --git a/spec/models/project_snippet_spec.rb b/spec/models/project_snippet_spec.rb
index 903671afb13..09b4ec3677c 100644
--- a/spec/models/project_snippet_spec.rb
+++ b/spec/models/project_snippet_spec.rb
@@ -32,4 +32,13 @@ describe ProjectSnippet do
end
end
end
+
+ it_behaves_like 'model with repository' do
+ let_it_be(:container) { create(:project_snippet, :repository) }
+ let(:stubbed_container) { build_stubbed(:project_snippet) }
+ let(:expected_full_path) { "#{container.project.full_path}/@snippets/#{container.id}" }
+ let(:expected_repository_klass) { Repository }
+ let(:expected_storage_klass) { Storage::Hashed }
+ let(:expected_web_url_path) { "#{container.project.full_path}/snippets/#{container.id}" }
+ end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index c57f47b5738..6c90a1b5614 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -69,6 +69,7 @@ describe Project do
it { is_expected.to have_one(:forked_from_project).through(:fork_network_member) }
it { is_expected.to have_one(:auto_devops).class_name('ProjectAutoDevops') }
it { is_expected.to have_one(:error_tracking_setting).class_name('ErrorTracking::ProjectErrorTrackingSetting') }
+ it { is_expected.to have_one(:project_setting) }
it { is_expected.to have_many(:commit_statuses) }
it { is_expected.to have_many(:ci_pipelines) }
it { is_expected.to have_many(:builds) }
@@ -106,6 +107,15 @@ describe Project do
it { is_expected.to have_many(:sourced_pipelines) }
it { is_expected.to have_many(:source_pipelines) }
+ it_behaves_like 'model with repository' do
+ let_it_be(:container) { create(:project, :repository, path: 'somewhere') }
+ let(:stubbed_container) { build_stubbed(:project) }
+ let(:expected_full_path) { "#{container.namespace.full_path}/somewhere" }
+ let(:expected_repository_klass) { Repository }
+ let(:expected_storage_klass) { Storage::Hashed }
+ let(:expected_web_url_path) { "#{container.namespace.full_path}/somewhere" }
+ end
+
it 'has an inverse relationship with merge requests' do
expect(described_class.reflect_on_association(:merge_requests).has_inverse?).to eq(:target_project)
end
@@ -131,26 +141,37 @@ describe Project do
end
context 'when creating a new project' do
- it 'automatically creates a CI/CD settings row' do
- project = create(:project)
+ let_it_be(:project) { create(:project) }
+ it 'automatically creates a CI/CD settings row' do
expect(project.ci_cd_settings).to be_an_instance_of(ProjectCiCdSetting)
expect(project.ci_cd_settings).to be_persisted
end
it 'automatically creates a container expiration policy row' do
- project = create(:project)
-
expect(project.container_expiration_policy).to be_an_instance_of(ContainerExpirationPolicy)
expect(project.container_expiration_policy).to be_persisted
end
- it 'automatically creates a Pages metadata row' do
- project = create(:project)
+ it 'does not create another container expiration policy if there is already one' do
+ project = build(:project)
+
+ expect do
+ container_expiration_policy = create(:container_expiration_policy, project: project)
+ expect(project.container_expiration_policy).to eq(container_expiration_policy)
+ end.to change { ContainerExpirationPolicy.count }.by(1)
+ end
+
+ it 'automatically creates a Pages metadata row' do
expect(project.pages_metadatum).to be_an_instance_of(ProjectPagesMetadatum)
expect(project.pages_metadatum).to be_persisted
end
+
+ it 'automatically creates a project setting row' do
+ expect(project.project_setting).to be_an_instance_of(ProjectSetting)
+ expect(project.project_setting).to be_persisted
+ end
end
context 'updating cd_cd_settings' do
@@ -275,6 +296,12 @@ describe Project do
end
end
+ it 'validates presence of project_feature' do
+ project = build(:project, project_feature: nil)
+
+ expect(project).not_to be_valid
+ end
+
describe 'import_url' do
it 'does not allow an invalid URI as import_url' do
project = build(:project, import_url: 'invalid://')
@@ -514,7 +541,6 @@ describe Project do
describe 'Respond to' do
it { is_expected.to respond_to(:url_to_repo) }
- it { is_expected.to respond_to(:repo_exists?) }
it { is_expected.to respond_to(:execute_hooks) }
it { is_expected.to respond_to(:owner) }
it { is_expected.to respond_to(:path_with_namespace) }
@@ -532,111 +558,114 @@ describe Project do
it { is_expected.to delegate_method(:last_pipeline).to(:commit).with_arguments(allow_nil: true) }
end
- describe '#to_reference_with_postfix' do
- it 'returns the full path with reference_postfix' do
- namespace = create(:namespace, path: 'sample-namespace')
- project = create(:project, path: 'sample-project', namespace: namespace)
-
- expect(project.to_reference_with_postfix).to eq 'sample-namespace/sample-project>'
- end
- end
+ describe 'reference methods' do
+ let_it_be(:owner) { create(:user, name: 'Gitlab') }
+ let_it_be(:namespace) { create(:namespace, name: 'Sample namespace', path: 'sample-namespace', owner: owner) }
+ let_it_be(:project) { create(:project, name: 'Sample project', path: 'sample-project', namespace: namespace) }
+ let_it_be(:group) { create(:group, name: 'Group', path: 'sample-group') }
+ let_it_be(:another_project) { create(:project, namespace: namespace) }
+ let_it_be(:another_namespace_project) { create(:project, name: 'another-project') }
- describe '#to_reference' do
- let(:owner) { create(:user, name: 'Gitlab') }
- let(:namespace) { create(:namespace, path: 'sample-namespace', owner: owner) }
- let(:project) { create(:project, path: 'sample-project', namespace: namespace) }
- let(:group) { create(:group, name: 'Group', path: 'sample-group') }
+ describe '#to_reference' do
+ it 'returns the path with reference_postfix' do
+ expect(project.to_reference).to eq("#{project.full_path}>")
+ end
- context 'when nil argument' do
- it 'returns nil' do
- expect(project.to_reference).to be_nil
+ it 'returns the path with reference_postfix when arg is self' do
+ expect(project.to_reference(project)).to eq("#{project.full_path}>")
end
- end
- context 'when full is true' do
- it 'returns complete path to the project' do
- expect(project.to_reference(full: true)).to eq 'sample-namespace/sample-project'
- expect(project.to_reference(project, full: true)).to eq 'sample-namespace/sample-project'
- expect(project.to_reference(group, full: true)).to eq 'sample-namespace/sample-project'
+ it 'returns the full_path with reference_postfix when full' do
+ expect(project.to_reference(full: true)).to eq("#{project.full_path}>")
end
- end
- context 'when same project argument' do
- it 'returns nil' do
- expect(project.to_reference(project)).to be_nil
+ it 'returns the full_path with reference_postfix when cross-project' do
+ expect(project.to_reference(build_stubbed(:project))).to eq("#{project.full_path}>")
end
end
- context 'when cross namespace project argument' do
- let(:another_namespace_project) { create(:project, name: 'another-project') }
-
- it 'returns complete path to the project' do
- expect(project.to_reference(another_namespace_project)).to eq 'sample-namespace/sample-project'
+ describe '#to_reference_base' do
+ context 'when nil argument' do
+ it 'returns nil' do
+ expect(project.to_reference_base).to be_nil
+ end
end
- end
- context 'when same namespace / cross-project argument' do
- let(:another_project) { create(:project, namespace: namespace) }
+ context 'when full is true' do
+ it 'returns complete path to the project', :aggregate_failures do
+ be_full_path = eq('sample-namespace/sample-project')
- it 'returns path to the project' do
- expect(project.to_reference(another_project)).to eq 'sample-project'
+ expect(project.to_reference_base(full: true)).to be_full_path
+ expect(project.to_reference_base(project, full: true)).to be_full_path
+ expect(project.to_reference_base(group, full: true)).to be_full_path
+ end
end
- end
- context 'when different namespace / cross-project argument' do
- let(:another_namespace) { create(:namespace, path: 'another-namespace', owner: owner) }
- let(:another_project) { create(:project, path: 'another-project', namespace: another_namespace) }
+ context 'when same project argument' do
+ it 'returns nil' do
+ expect(project.to_reference_base(project)).to be_nil
+ end
+ end
- it 'returns full path to the project' do
- expect(project.to_reference(another_project)).to eq 'sample-namespace/sample-project'
+ context 'when cross namespace project argument' do
+ it 'returns complete path to the project' do
+ expect(project.to_reference_base(another_namespace_project)).to eq 'sample-namespace/sample-project'
+ end
end
- end
- context 'when argument is a namespace' do
- context 'with same project path' do
+ context 'when same namespace / cross-project argument' do
it 'returns path to the project' do
- expect(project.to_reference(namespace)).to eq 'sample-project'
+ expect(project.to_reference_base(another_project)).to eq 'sample-project'
end
end
- context 'with different project path' do
+ context 'when different namespace / cross-project argument with same owner' do
+ let(:another_namespace_same_owner) { create(:namespace, path: 'another-namespace', owner: owner) }
+ let(:another_project_same_owner) { create(:project, path: 'another-project', namespace: another_namespace_same_owner) }
+
it 'returns full path to the project' do
- expect(project.to_reference(group)).to eq 'sample-namespace/sample-project'
+ expect(project.to_reference_base(another_project_same_owner)).to eq 'sample-namespace/sample-project'
end
end
- end
- end
- describe '#to_human_reference' do
- let(:owner) { create(:user, name: 'Gitlab') }
- let(:namespace) { create(:namespace, name: 'Sample namespace', owner: owner) }
- let(:project) { create(:project, name: 'Sample project', namespace: namespace) }
+ context 'when argument is a namespace' do
+ context 'with same project path' do
+ it 'returns path to the project' do
+ expect(project.to_reference_base(namespace)).to eq 'sample-project'
+ end
+ end
- context 'when nil argument' do
- it 'returns nil' do
- expect(project.to_human_reference).to be_nil
+ context 'with different project path' do
+ it 'returns full path to the project' do
+ expect(project.to_reference_base(group)).to eq 'sample-namespace/sample-project'
+ end
+ end
end
end
- context 'when same project argument' do
- it 'returns nil' do
- expect(project.to_human_reference(project)).to be_nil
+ describe '#to_human_reference' do
+ context 'when nil argument' do
+ it 'returns nil' do
+ expect(project.to_human_reference).to be_nil
+ end
end
- end
-
- context 'when cross namespace project argument' do
- let(:another_namespace_project) { create(:project, name: 'another-project') }
- it 'returns complete name with namespace of the project' do
- expect(project.to_human_reference(another_namespace_project)).to eq 'Gitlab / Sample project'
+ context 'when same project argument' do
+ it 'returns nil' do
+ expect(project.to_human_reference(project)).to be_nil
+ end
end
- end
- context 'when same namespace / cross-project argument' do
- let(:another_project) { create(:project, namespace: namespace) }
+ context 'when cross namespace project argument' do
+ it 'returns complete name with namespace of the project' do
+ expect(project.to_human_reference(another_namespace_project)).to eq 'Gitlab / Sample project'
+ end
+ end
- it 'returns name of the project' do
- expect(project.to_human_reference(another_project)).to eq 'Sample project'
+ context 'when same namespace / cross-project argument' do
+ it 'returns name of the project' do
+ expect(project.to_human_reference(another_project)).to eq 'Sample project'
+ end
end
end
end
@@ -665,44 +694,6 @@ describe Project do
expect(project.url_to_repo).to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + 'somewhere.git')
end
- describe "#web_url" do
- let(:project) { create(:project, path: "somewhere") }
-
- context 'when given the only_path option' do
- subject { project.web_url(only_path: only_path) }
-
- context 'when only_path is false' do
- let(:only_path) { false }
-
- it 'returns the full web URL for this repo' do
- expect(subject).to eq("#{Gitlab.config.gitlab.url}/#{project.namespace.full_path}/somewhere")
- end
- end
-
- context 'when only_path is true' do
- let(:only_path) { true }
-
- it 'returns the relative web URL for this repo' do
- expect(subject).to eq("/#{project.namespace.full_path}/somewhere")
- end
- end
-
- context 'when only_path is nil' do
- let(:only_path) { nil }
-
- it 'returns the full web URL for this repo' do
- expect(subject).to eq("#{Gitlab.config.gitlab.url}/#{project.namespace.full_path}/somewhere")
- end
- end
- end
-
- context 'when not given the only_path option' do
- it 'returns the full web URL for this repo' do
- expect(project.web_url).to eq("#{Gitlab.config.gitlab.url}/#{project.namespace.full_path}/somewhere")
- end
- end
- end
-
describe "#readme_url" do
context 'with a non-existing repository' do
let(:project) { create(:project) }
@@ -725,7 +716,7 @@ describe Project do
let(:project) { create(:project, :repository) }
it 'returns the README' do
- expect(project.readme_url).to eq("#{project.web_url}/blob/master/README.md")
+ expect(project.readme_url).to eq("#{project.web_url}/-/blob/master/README.md")
end
end
end
@@ -932,14 +923,6 @@ describe Project do
end
end
- describe '#repository' do
- let(:project) { create(:project, :repository) }
-
- it 'returns valid repo' do
- expect(project.repository).to be_kind_of(Repository)
- end
- end
-
describe '#default_issues_tracker?' do
it "is true if used internal tracker" do
project = build(:project)
@@ -955,24 +938,6 @@ describe Project do
end
end
- describe '#empty_repo?' do
- context 'when the repo does not exist' do
- let(:project) { build_stubbed(:project) }
-
- it 'returns true' do
- expect(project.empty_repo?).to be(true)
- end
- end
-
- context 'when the repo exists' do
- let(:project) { create(:project, :repository) }
- let(:empty_project) { create(:project, :empty_repo) }
-
- it { expect(empty_project.empty_repo?).to be(true) }
- it { expect(project.empty_repo?).to be(false) }
- end
- end
-
describe '#external_issue_tracker' do
let(:project) { create(:project) }
let(:ext_project) { create(:redmine_project) }
@@ -1344,8 +1309,8 @@ describe Project do
describe '.trending' do
let(:group) { create(:group, :public) }
- let(:project1) { create(:project, :public, group: group) }
- let(:project2) { create(:project, :public, group: group) }
+ let(:project1) { create(:project, :public, :repository, group: group) }
+ let(:project2) { create(:project, :public, :repository, group: group) }
before do
create_list(:note_on_commit, 2, project: project1)
@@ -2025,6 +1990,23 @@ describe Project do
expect(project.reload.import_url).to eq('http://test.com')
end
+
+ it 'saves the url credentials percent decoded' do
+ url = 'http://user:pass%21%3F%40@github.com/t.git'
+ project = build(:project, import_url: url)
+
+ # When the credentials are not decoded this expectation fails
+ expect(project.import_url).to eq(url)
+ expect(project.import_data.credentials).to eq(user: 'user', password: 'pass!?@')
+ end
+
+ it 'saves url with no credentials' do
+ url = 'http://github.com/t.git'
+ project = build(:project, import_url: url)
+
+ expect(project.import_url).to eq(url)
+ expect(project.import_data.credentials).to eq(user: nil, password: nil)
+ end
end
describe '#container_registry_url' do
@@ -2153,6 +2135,28 @@ describe Project do
end
end
+ describe '#uses_default_ci_config?' do
+ let(:project) { build(:project)}
+
+ it 'has a custom ci config path' do
+ project.ci_config_path = 'something_custom'
+
+ expect(project.uses_default_ci_config?).to be_falsey
+ end
+
+ it 'has a blank ci config path' do
+ project.ci_config_path = ''
+
+ expect(project.uses_default_ci_config?).to be_truthy
+ end
+
+ it 'does not have a custom ci config path' do
+ project.ci_config_path = nil
+
+ expect(project.uses_default_ci_config?).to be_truthy
+ end
+ end
+
describe '#latest_successful_build_for_ref' do
let(:project) { create(:project, :repository) }
let(:pipeline) { create_pipeline(project) }
@@ -2730,16 +2734,44 @@ describe Project do
describe '#all_lfs_objects' do
let(:lfs_object) { create(:lfs_object) }
- before do
- project.lfs_objects << lfs_object
+ context 'when LFS object is only associated to the source' do
+ before do
+ project.lfs_objects << lfs_object
+ end
+
+ it 'returns the lfs object for a project' do
+ expect(project.all_lfs_objects).to contain_exactly(lfs_object)
+ end
+
+ it 'returns the lfs object for a fork' do
+ expect(forked_project.all_lfs_objects).to contain_exactly(lfs_object)
+ end
end
- it 'returns the lfs object for a project' do
- expect(project.all_lfs_objects).to contain_exactly(lfs_object)
+ context 'when LFS object is only associated to the fork' do
+ before do
+ forked_project.lfs_objects << lfs_object
+ end
+
+ it 'returns nothing' do
+ expect(project.all_lfs_objects).to be_empty
+ end
+
+ it 'returns the lfs object for a fork' do
+ expect(forked_project.all_lfs_objects).to contain_exactly(lfs_object)
+ end
end
- it 'returns the lfs object for a fork' do
- expect(forked_project.all_lfs_objects).to contain_exactly(lfs_object)
+ context 'when LFS object is associated to both source and fork' do
+ before do
+ project.lfs_objects << lfs_object
+ forked_project.lfs_objects << lfs_object
+ end
+
+ it 'returns the lfs object for the source and fork' do
+ expect(project.all_lfs_objects).to contain_exactly(lfs_object)
+ expect(forked_project.all_lfs_objects).to contain_exactly(lfs_object)
+ end
end
end
end
@@ -3407,59 +3439,6 @@ describe Project do
end
end
- describe '#http_url_to_repo' do
- let(:project) { create(:project) }
-
- context 'when a custom HTTP clone URL root is not set' do
- it 'returns the url to the repo without a username' do
- expect(project.http_url_to_repo).to eq("#{project.web_url}.git")
- expect(project.http_url_to_repo).not_to include('@')
- end
- end
-
- context 'when a custom HTTP clone URL root is set' do
- before do
- stub_application_setting(custom_http_clone_url_root: custom_http_clone_url_root)
- end
-
- context 'when custom HTTP clone URL root has a relative URL root' do
- context 'when custom HTTP clone URL root ends with a slash' do
- let(:custom_http_clone_url_root) { 'https://git.example.com:51234/mygitlab/' }
-
- it 'returns the url to the repo, with the root replaced with the custom one' do
- expect(project.http_url_to_repo).to eq("https://git.example.com:51234/mygitlab/#{project.full_path}.git")
- end
- end
-
- context 'when custom HTTP clone URL root does not end with a slash' do
- let(:custom_http_clone_url_root) { 'https://git.example.com:51234/mygitlab' }
-
- it 'returns the url to the repo, with the root replaced with the custom one' do
- expect(project.http_url_to_repo).to eq("https://git.example.com:51234/mygitlab/#{project.full_path}.git")
- end
- end
- end
-
- context 'when custom HTTP clone URL root does not have a relative URL root' do
- context 'when custom HTTP clone URL root ends with a slash' do
- let(:custom_http_clone_url_root) { 'https://git.example.com:51234/' }
-
- it 'returns the url to the repo, with the root replaced with the custom one' do
- expect(project.http_url_to_repo).to eq("https://git.example.com:51234/#{project.full_path}.git")
- end
- end
-
- context 'when custom HTTP clone URL root does not end with a slash' do
- let(:custom_http_clone_url_root) { 'https://git.example.com:51234' }
-
- it 'returns the url to the repo, with the root replaced with the custom one' do
- expect(project.http_url_to_repo).to eq("https://git.example.com:51234/#{project.full_path}.git")
- end
- end
- end
- end
- end
-
describe '#lfs_http_url_to_repo' do
let(:project) { create(:project) }
@@ -3780,6 +3759,25 @@ describe Project do
end
end
+ describe '.wrap_with_cte' do
+ let!(:user) { create(:user) }
+
+ let!(:private_project) do
+ create(:project, :private, creator: user, namespace: user.namespace)
+ end
+
+ let!(:public_project) { create(:project, :public) }
+
+ let(:projects) { described_class.all.public_or_visible_to_user(user) }
+
+ subject { described_class.wrap_with_cte(projects) }
+
+ it 'wrapped query matches original' do
+ expect(subject.to_sql).to match(/^WITH "projects_cte" AS/)
+ expect(subject).to match_array(projects)
+ end
+ end
+
describe '#pages_available?' do
let(:project) { create(:project, group: group) }
@@ -3931,7 +3929,7 @@ describe Project do
end
context 'legacy storage' do
- set(:project) { create(:project, :repository, :legacy_storage) }
+ let_it_be(:project) { create(:project, :repository, :legacy_storage) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:project_storage) { project.send(:storage) }
@@ -3992,7 +3990,7 @@ describe Project do
end
it 'schedules HashedStorage::ProjectMigrateWorker with delayed start when the project repo is in use' do
- Gitlab::ReferenceCounter.new(Gitlab::GlRepository::PROJECT.identifier_for_subject(project)).increase
+ Gitlab::ReferenceCounter.new(Gitlab::GlRepository::PROJECT.identifier_for_container(project)).increase
expect(HashedStorage::ProjectMigrateWorker).to receive(:perform_in)
@@ -4000,7 +3998,7 @@ describe Project do
end
it 'schedules HashedStorage::ProjectMigrateWorker with delayed start when the wiki repo is in use' do
- Gitlab::ReferenceCounter.new(Gitlab::GlRepository::WIKI.identifier_for_subject(project)).increase
+ Gitlab::ReferenceCounter.new(Gitlab::GlRepository::WIKI.identifier_for_container(project)).increase
expect(HashedStorage::ProjectMigrateWorker).to receive(:perform_in)
@@ -4030,7 +4028,7 @@ describe Project do
end
context 'hashed storage' do
- set(:project) { create(:project, :repository, skip_disk_validation: true) }
+ let_it_be(:project) { create(:project, :repository, skip_disk_validation: true) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:hash) { Digest::SHA2.hexdigest(project.id.to_s) }
let(:hashed_prefix) { File.join('@hashed', hash[0..1], hash[2..3]) }
@@ -4120,7 +4118,7 @@ describe Project do
end
describe '#has_ci?' do
- set(:project) { create(:project) }
+ let_it_be(:project, reload: true) { create(:project) }
let(:repository) { double }
before do
@@ -4164,7 +4162,7 @@ describe Project do
Feature.get(:force_autodevops_on_by_default).enable_percentage_of_actors(0)
end
- set(:project) { create(:project) }
+ let_it_be(:project, reload: true) { create(:project) }
subject { project.auto_devops_enabled? }
@@ -4299,7 +4297,7 @@ describe Project do
end
describe '#has_auto_devops_implicitly_enabled?' do
- set(:project) { create(:project) }
+ let_it_be(:project, reload: true) { create(:project) }
context 'when disabled in settings' do
before do
@@ -4360,7 +4358,7 @@ describe Project do
end
describe '#has_auto_devops_implicitly_disabled?' do
- set(:project) { create(:project) }
+ let_it_be(:project, reload: true) { create(:project) }
before do
allow(Feature).to receive(:enabled?).and_call_original
@@ -4438,7 +4436,7 @@ describe Project do
end
describe '#api_variables' do
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
it 'exposes API v4 URL' do
expect(project.api_variables.first[:key]).to eq 'CI_API_V4_URL'
@@ -4635,7 +4633,7 @@ describe Project do
end
describe '#write_repository_config' do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
it 'writes full path in .git/config when key is missing' do
project.write_repository_config
@@ -4726,7 +4724,7 @@ describe Project do
end
describe '#has_active_hooks?' do
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
it { expect(project.has_active_hooks?).to be_falsey }
@@ -4753,7 +4751,7 @@ describe Project do
end
describe '#has_active_services?' do
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
it { expect(project.has_active_services?).to be_falsey }
@@ -5036,21 +5034,11 @@ describe Project do
end
end
- context '#commits_by' do
- let(:project) { create(:project, :repository) }
- let(:commits) { project.repository.commits('HEAD', limit: 3).commits }
- let(:commit_shas) { commits.map(&:id) }
-
- it 'retrieves several commits from the repository by oid' do
- expect(project.commits_by(oids: commit_shas)).to eq commits
- end
- end
-
- context '#members_among' do
+ describe '#members_among' do
let(:users) { create_list(:user, 3) }
- set(:group) { create(:group) }
- set(:project) { create(:project, namespace: group) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, namespace: group) }
before do
project.add_guest(users.first)
@@ -5568,6 +5556,102 @@ describe Project do
end
end
+ describe 'with_issues_or_mrs_available_for_user' do
+ before do
+ Project.delete_all
+ end
+
+ it 'returns correct projects' do
+ user = create(:user)
+ project1 = create(:project, :public, :merge_requests_disabled, :issues_enabled)
+ project2 = create(:project, :public, :merge_requests_disabled, :issues_disabled)
+ project3 = create(:project, :public, :issues_enabled, :merge_requests_enabled)
+ project4 = create(:project, :private, :issues_private, :merge_requests_private)
+
+ [project1, project2, project3, project4].each { |project| project.add_developer(user) }
+
+ expect(described_class.with_issues_or_mrs_available_for_user(user))
+ .to contain_exactly(project1, project3, project4)
+ end
+ end
+
+ describe '#limited_protected_branches' do
+ let(:project) { create(:project) }
+ let!(:protected_branch) { create(:protected_branch, project: project) }
+ let!(:another_protected_branch) { create(:protected_branch, project: project) }
+
+ subject { project.limited_protected_branches(1) }
+
+ it 'returns limited number of protected branches based on specified limit' do
+ expect(subject).to eq([another_protected_branch])
+ end
+ end
+
+ describe '#lfs_objects_oids' do
+ let(:project) { create(:project) }
+ let(:lfs_object) { create(:lfs_object) }
+ let(:another_lfs_object) { create(:lfs_object) }
+
+ subject { project.lfs_objects_oids }
+
+ context 'when project has associated LFS objects' do
+ before do
+ create(:lfs_objects_project, lfs_object: lfs_object, project: project)
+ create(:lfs_objects_project, lfs_object: another_lfs_object, project: project)
+ end
+
+ it 'returns OIDs of LFS objects' do
+ expect(subject).to match_array([lfs_object.oid, another_lfs_object.oid])
+ end
+ end
+
+ context 'when project has no associated LFS objects' do
+ it 'returns empty array' do
+ expect(subject).to be_empty
+ end
+ end
+ end
+
+ describe '#alerts_service_activated?' do
+ let!(:project) { create(:project) }
+
+ subject { project.alerts_service_activated? }
+
+ context 'when project has an activated alerts service' do
+ before do
+ create(:alerts_service, project: project)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when project has an inactive alerts service' do
+ before do
+ create(:alerts_service, :inactive, project: project)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#self_monitoring?' do
+ let_it_be(:project) { create(:project) }
+
+ subject { project.self_monitoring? }
+
+ context 'when the project is instance self monitoring' do
+ before do
+ stub_application_setting(self_monitoring_project_id: project.id)
+ end
+
+ it { is_expected.to be true }
+ end
+
+ context 'when the project is not self monitoring' do
+ it { is_expected.to be false }
+ end
+ end
+
def rugged_config
rugged_repo(project.repository).config
end
diff --git a/spec/models/prometheus_alert_spec.rb b/spec/models/prometheus_alert_spec.rb
new file mode 100644
index 00000000000..cdcdb46a6c4
--- /dev/null
+++ b/spec/models/prometheus_alert_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe PrometheusAlert do
+ let_it_be(:project) { build(:project) }
+ let(:metric) { build(:prometheus_metric) }
+
+ describe '.distinct_projects' do
+ let(:project1) { create(:project) }
+ let(:project2) { create(:project) }
+
+ before do
+ create(:prometheus_alert, project: project1)
+ create(:prometheus_alert, project: project1)
+ create(:prometheus_alert, project: project2)
+ end
+
+ subject { described_class.distinct_projects.count }
+
+ it 'returns a count of all distinct projects which have an alert' do
+ expect(subject).to eq(2)
+ end
+ end
+
+ describe 'operators' do
+ it 'contains the correct equality operator' do
+ expect(described_class::OPERATORS_MAP.values).to include('==')
+ expect(described_class::OPERATORS_MAP.values).not_to include('=')
+ end
+ end
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:environment) }
+ end
+
+ describe 'project validations' do
+ let(:environment) { build(:environment, project: project) }
+ let(:metric) { build(:prometheus_metric, project: project) }
+
+ subject do
+ build(:prometheus_alert, prometheus_metric: metric, environment: environment, project: project)
+ end
+
+ it { is_expected.to validate_presence_of(:environment) }
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:prometheus_metric) }
+
+ context 'when environment and metric belongs same project' do
+ it { is_expected.to be_valid }
+ end
+
+ context 'when environment belongs to different project' do
+ let(:environment) { build(:environment) }
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when metric belongs to different project' do
+ let(:metric) { build(:prometheus_metric) }
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when metric is common' do
+ let(:metric) { build(:prometheus_metric, :common) }
+
+ it { is_expected.to be_valid }
+ end
+ end
+
+ describe '#full_query' do
+ before do
+ subject.operator = "gt"
+ subject.threshold = 1
+ subject.prometheus_metric = metric
+ end
+
+ it 'returns the concatenated query' do
+ expect(subject.full_query).to eq("#{metric.query} > 1.0")
+ end
+ end
+
+ describe '#to_param' do
+ before do
+ subject.operator = "gt"
+ subject.threshold = 1
+ subject.prometheus_metric = metric
+ end
+
+ it 'returns the params of the prometheus alert' do
+ expect(subject.to_param).to eq(
+ "alert" => metric.title,
+ "expr" => "#{metric.query} > 1.0",
+ "for" => "5m",
+ "labels" => {
+ "gitlab" => "hook",
+ "gitlab_alert_id" => metric.id
+ })
+ end
+ end
+end
diff --git a/spec/models/prometheus_metric_spec.rb b/spec/models/prometheus_metric_spec.rb
index a123ff5a2a6..93abef063cb 100644
--- a/spec/models/prometheus_metric_spec.rb
+++ b/spec/models/prometheus_metric_spec.rb
@@ -67,6 +67,7 @@ describe PrometheusMetric do
it_behaves_like 'group_title', :business, 'Business metrics (Custom)'
it_behaves_like 'group_title', :response, 'Response metrics (Custom)'
it_behaves_like 'group_title', :system, 'System metrics (Custom)'
+ it_behaves_like 'group_title', :cluster_health, 'Cluster Health'
end
describe '#priority' do
@@ -82,6 +83,7 @@ describe PrometheusMetric do
:business | 0
:response | -5
:system | -10
+ :cluster_health | 10
end
with_them do
@@ -106,6 +108,7 @@ describe PrometheusMetric do
:business | %w()
:response | %w()
:system | %w()
+ :cluster_health | %w(container_memory_usage_bytes container_cpu_usage_seconds_total)
end
with_them do
diff --git a/spec/models/protected_branch_spec.rb b/spec/models/protected_branch_spec.rb
index 267434a4148..7f8a60dafa8 100644
--- a/spec/models/protected_branch_spec.rb
+++ b/spec/models/protected_branch_spec.rb
@@ -220,4 +220,32 @@ describe ProtectedBranch do
end
end
end
+
+ describe '.by_name' do
+ let!(:protected_branch) { create(:protected_branch, name: 'master') }
+ let!(:another_protected_branch) { create(:protected_branch, name: 'stable') }
+
+ it 'returns protected branches with a matching name' do
+ expect(described_class.by_name(protected_branch.name))
+ .to eq([protected_branch])
+ end
+
+ it 'returns protected branches with a partially matching name' do
+ expect(described_class.by_name(protected_branch.name[0..2]))
+ .to eq([protected_branch])
+ end
+
+ it 'returns protected branches with a matching name regardless of the casing' do
+ expect(described_class.by_name(protected_branch.name.upcase))
+ .to eq([protected_branch])
+ end
+
+ it 'returns nothing when nothing matches' do
+ expect(described_class.by_name('unknown')).to be_empty
+ end
+
+ it 'return nothing when query is blank' do
+ expect(described_class.by_name('')).to be_empty
+ end
+ end
end
diff --git a/spec/models/push_event_spec.rb b/spec/models/push_event_spec.rb
index 5509ed87308..8682e1c797b 100644
--- a/spec/models/push_event_spec.rb
+++ b/spec/models/push_event_spec.rb
@@ -74,7 +74,7 @@ describe PushEvent do
create(:push_event_payload, event: event4, ref: 'baz', action: :removed)
create(:push_event_payload, event: event5, ref: 'baz', ref_type: :tag)
- project.repository.create_branch('bar', 'master')
+ project.repository.create_branch('bar')
create(
:merge_request,
@@ -83,7 +83,7 @@ describe PushEvent do
source_branch: 'bar'
)
- project.repository.create_branch('qux', 'master')
+ project.repository.create_branch('qux')
create(
:merge_request,
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index 2f84b92b806..85e398c7d5f 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -53,12 +53,6 @@ RSpec.describe Release do
end
end
- describe 'callbacks' do
- it 'creates a new Evidence object on after_commit', :sidekiq_inline do
- expect { release }.to change(Evidence, :count).by(1)
- end
- end
-
describe '#assets_count' do
subject { release.assets_count }
diff --git a/spec/models/releases/source_spec.rb b/spec/models/releases/source_spec.rb
index c8ac8e31c97..d7af6fd90a6 100644
--- a/spec/models/releases/source_spec.rb
+++ b/spec/models/releases/source_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Releases::Source do
- set(:project) { create(:project, :repository, name: 'finance-cal') }
+ let_it_be(:project) { create(:project, :repository, name: 'finance-cal') }
let(:tag_name) { 'v1.0' }
describe '.all' do
diff --git a/spec/models/remote_mirror_spec.rb b/spec/models/remote_mirror_spec.rb
index 79d45da8a1e..f5e718e0e09 100644
--- a/spec/models/remote_mirror_spec.rb
+++ b/spec/models/remote_mirror_spec.rb
@@ -227,7 +227,7 @@ describe RemoteMirror, :mailer do
end
end
- context '#sync' do
+ describe '#sync' do
let(:remote_mirror) { create(:project, :repository, :remote_mirror).remote_mirrors.first }
around do |example|
@@ -297,7 +297,7 @@ describe RemoteMirror, :mailer do
end
end
- context '#ensure_remote!' do
+ describe '#ensure_remote!' do
let(:remote_mirror) { create(:project, :repository, :remote_mirror).remote_mirrors.first }
let(:project) { remote_mirror.project }
let(:repository) { project.repository }
@@ -321,7 +321,7 @@ describe RemoteMirror, :mailer do
end
end
- context '#url=' do
+ describe '#url=' do
let(:remote_mirror) { create(:project, :repository, :remote_mirror).remote_mirrors.first }
it 'resets all the columns when URL changes' do
@@ -340,7 +340,7 @@ describe RemoteMirror, :mailer do
end
end
- context '#updated_since?' do
+ describe '#updated_since?' do
let(:remote_mirror) { create(:project, :repository, :remote_mirror).remote_mirrors.first }
let(:timestamp) { Time.now - 5.minutes }
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 38f3777c902..00ffc3cae54 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -40,7 +40,7 @@ describe Repository do
end
describe '#branch_names_contains' do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository }
subject { repository.branch_names_contains(sample_commit.id) }
@@ -325,10 +325,18 @@ describe Repository do
expect(repository.commits(nil, all: true, limit: 60).size).to eq(60)
end
end
+
+ context "when 'order' flag is set" do
+ it 'passes order option to perform the query' do
+ expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(order: 'topo')).and_call_original
+
+ repository.commits('master', limit: 1, order: 'topo')
+ end
+ end
end
describe '#new_commits' do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository }
subject { repository.new_commits(rev) }
@@ -356,7 +364,7 @@ describe Repository do
end
describe '#commits_by' do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:oids) { TestEnv::BRANCH_SHA.values }
subject { project.repository.commits_by(oids: oids) }
@@ -372,7 +380,7 @@ describe Repository do
context 'when some commits are not found ' do
let(:oids) do
- ['deadbeef'] + TestEnv::BRANCH_SHA.values.first(10)
+ ['deadbeef'] + TestEnv::BRANCH_SHA.each_value.first(10)
end
it 'returns only found commits' do
@@ -482,6 +490,129 @@ describe Repository do
end
end
+ describe "#root_ref_sha" do
+ let(:commit) { double("commit", sha: "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3") }
+
+ subject { repository.root_ref_sha }
+
+ before do
+ allow(repository).to receive(:commit).with(repository.root_ref) { commit }
+ end
+
+ it { is_expected.to eq(commit.sha) }
+ end
+
+ describe "#merged_branch_names", :clean_gitlab_redis_cache do
+ subject { repository.merged_branch_names(branch_names) }
+
+ let(:branch_names) { %w(test beep boop definitely_merged) }
+ let(:already_merged) { Set.new(["definitely_merged"]) }
+
+ let(:write_hash) do
+ {
+ "test" => Gitlab::Redis::Boolean.new(false).to_s,
+ "beep" => Gitlab::Redis::Boolean.new(false).to_s,
+ "boop" => Gitlab::Redis::Boolean.new(false).to_s,
+ "definitely_merged" => Gitlab::Redis::Boolean.new(true).to_s
+ }
+ end
+
+ let(:read_hash) do
+ {
+ "test" => Gitlab::Redis::Boolean.new(false).to_s,
+ "beep" => Gitlab::Redis::Boolean.new(false).to_s,
+ "boop" => Gitlab::Redis::Boolean.new(false).to_s,
+ "definitely_merged" => Gitlab::Redis::Boolean.new(true).to_s
+ }
+ end
+
+ let(:cache) { repository.send(:redis_hash_cache) }
+ let(:cache_key) { cache.cache_key(:merged_branch_names) }
+
+ before do
+ allow(repository.raw_repository).to receive(:merged_branch_names).with(branch_names).and_return(already_merged)
+ end
+
+ it { is_expected.to eq(already_merged) }
+ it { is_expected.to be_a(Set) }
+
+ describe "cache expiry" do
+ before do
+ allow(cache).to receive(:delete).with(anything)
+ end
+
+ it "is expired when the branches caches are expired" do
+ expect(cache).to receive(:delete).with(:merged_branch_names).at_least(:once)
+
+ repository.send(:expire_branches_cache)
+ end
+
+ it "is expired when the repository caches are expired" do
+ expect(cache).to receive(:delete).with(:merged_branch_names).at_least(:once)
+
+ repository.send(:expire_all_method_caches)
+ end
+ end
+
+ context "cache is empty" do
+ before do
+ cache.delete(:merged_branch_names)
+ end
+
+ it { is_expected.to eq(already_merged) }
+
+ describe "cache values" do
+ it "writes the values to redis" do
+ expect(cache).to receive(:write).with(:merged_branch_names, write_hash)
+
+ subject
+ end
+
+ it "matches the supplied hash" do
+ subject
+
+ expect(cache.read_members(:merged_branch_names, branch_names)).to eq(read_hash)
+ end
+ end
+ end
+
+ context "cache is not empty" do
+ before do
+ cache.write(:merged_branch_names, write_hash)
+ end
+
+ it { is_expected.to eq(already_merged) }
+
+ it "doesn't fetch from the disk" do
+ expect(repository.raw_repository).not_to receive(:merged_branch_names)
+
+ subject
+ end
+ end
+
+ context "cache is partially complete" do
+ before do
+ allow(repository.raw_repository).to receive(:merged_branch_names).with(["boop"]).and_return([])
+ hash = write_hash.except("boop")
+ cache.write(:merged_branch_names, hash)
+ end
+
+ it { is_expected.to eq(already_merged) }
+
+ it "does fetch from the disk" do
+ expect(repository.raw_repository).to receive(:merged_branch_names).with(["boop"])
+
+ subject
+ end
+ end
+
+ context "requested branches array is empty" do
+ let(:branch_names) { [] }
+
+ it { is_expected.to eq(already_merged) }
+ end
+ end
+
describe '#can_be_merged?' do
context 'mergeable branches' do
subject { repository.can_be_merged?('0b4bc9a49b562e85de7cc9e834518ea6828729b9', 'master') }
@@ -1512,79 +1643,57 @@ describe Repository do
end
end
- context 'when two_step_rebase feature is enabled' do
- before do
- stub_feature_flags(two_step_rebase: true)
- end
-
- it_behaves_like 'a method that can rebase successfully'
-
- it 'executes the new Gitaly RPC' do
- expect_any_instance_of(Gitlab::GitalyClient::OperationService).to receive(:rebase)
- expect_any_instance_of(Gitlab::GitalyClient::OperationService).not_to receive(:user_rebase)
+ it_behaves_like 'a method that can rebase successfully'
- repository.rebase(user, merge_request)
- end
+ it 'executes the new Gitaly RPC' do
+ expect_any_instance_of(Gitlab::GitalyClient::OperationService).to receive(:rebase)
- describe 'rolling back the `rebase_commit_sha`' do
- let(:new_sha) { Digest::SHA1.hexdigest('foo') }
-
- it 'does not rollback when there are no errors' do
- second_response = double(pre_receive_error: nil, git_error: nil)
- mock_gitaly(second_response)
-
- repository.rebase(user, merge_request)
+ repository.rebase(user, merge_request)
+ end
- expect(merge_request.reload.rebase_commit_sha).to eq(new_sha)
- end
+ describe 'rolling back the `rebase_commit_sha`' do
+ let(:new_sha) { Digest::SHA1.hexdigest('foo') }
- it 'does rollback when a PreReceiveError is encountered in the second step' do
- second_response = double(pre_receive_error: 'my_error', git_error: nil)
- mock_gitaly(second_response)
+ it 'does not rollback when there are no errors' do
+ second_response = double(pre_receive_error: nil, git_error: nil)
+ mock_gitaly(second_response)
- expect do
- repository.rebase(user, merge_request)
- end.to raise_error(Gitlab::Git::PreReceiveError)
+ repository.rebase(user, merge_request)
- expect(merge_request.reload.rebase_commit_sha).to be_nil
- end
+ expect(merge_request.reload.rebase_commit_sha).to eq(new_sha)
+ end
- it 'does rollback when a GitError is encountered in the second step' do
- second_response = double(pre_receive_error: nil, git_error: 'git error')
- mock_gitaly(second_response)
+ it 'does rollback when a PreReceiveError is encountered in the second step' do
+ second_response = double(pre_receive_error: 'my_error', git_error: nil)
+ mock_gitaly(second_response)
- expect do
- repository.rebase(user, merge_request)
- end.to raise_error(Gitlab::Git::Repository::GitError)
+ expect do
+ repository.rebase(user, merge_request)
+ end.to raise_error(Gitlab::Git::PreReceiveError)
- expect(merge_request.reload.rebase_commit_sha).to be_nil
- end
+ expect(merge_request.reload.rebase_commit_sha).to be_nil
+ end
- def mock_gitaly(second_response)
- responses = [
- double(rebase_sha: new_sha).as_null_object,
- second_response
- ]
+ it 'does rollback when a GitError is encountered in the second step' do
+ second_response = double(pre_receive_error: nil, git_error: 'git error')
+ mock_gitaly(second_response)
- expect_any_instance_of(
- Gitaly::OperationService::Stub
- ).to receive(:user_rebase_confirmable).and_return(responses.each)
- end
- end
- end
+ expect do
+ repository.rebase(user, merge_request)
+ end.to raise_error(Gitlab::Git::Repository::GitError)
- context 'when two_step_rebase feature is disabled' do
- before do
- stub_feature_flags(two_step_rebase: false)
+ expect(merge_request.reload.rebase_commit_sha).to be_nil
end
- it_behaves_like 'a method that can rebase successfully'
-
- it 'executes the deprecated Gitaly RPC' do
- expect_any_instance_of(Gitlab::GitalyClient::OperationService).to receive(:user_rebase)
- expect_any_instance_of(Gitlab::GitalyClient::OperationService).not_to receive(:rebase)
+ def mock_gitaly(second_response)
+ responses = [
+ double(rebase_sha: new_sha).as_null_object,
+ second_response
+ ]
- repository.rebase(user, merge_request)
+ expect_any_instance_of(
+ Gitaly::OperationService::Stub
+ ).to receive(:user_rebase_confirmable).and_return(responses.each)
end
end
end
@@ -1772,6 +1881,7 @@ describe Repository do
:avatar,
:exists?,
:root_ref,
+ :merged_branch_names,
:has_visible_content?,
:issue_template_names,
:merge_request_template_names,
@@ -1805,10 +1915,28 @@ describe Repository do
end
describe '#after_import' do
+ subject { repository.after_import }
+
it 'flushes and builds the cache' do
expect(repository).to receive(:expire_content_cache)
- repository.after_import
+ subject
+ end
+
+ it 'calls DetectRepositoryLanguagesWorker' do
+ expect(DetectRepositoryLanguagesWorker).to receive(:perform_async)
+
+ subject
+ end
+
+ context 'with a wiki repository' do
+ let(:repository) { project.wiki.repository }
+
+ it 'does not call DetectRepositoryLanguagesWorker' do
+ expect(DetectRepositoryLanguagesWorker).not_to receive(:perform_async)
+
+ subject
+ end
end
end
@@ -1929,7 +2057,7 @@ describe Repository do
describe '#expire_branches_cache' do
it 'expires the cache' do
expect(repository).to receive(:expire_method_caches)
- .with(%i(branch_names branch_count has_visible_content?))
+ .with(%i(branch_names merged_branch_names branch_count has_visible_content?))
.and_call_original
repository.expire_branches_cache
@@ -2420,15 +2548,6 @@ describe Repository do
2.times { repository.ancestor?(commit.id, ancestor.id) }
end
- it 'increments a counter with cache hits' do
- counter = Gitlab::Metrics.counter(:repository_ancestor_calls_total, 'Repository ancestor calls')
-
- expect do
- 2.times { repository.ancestor?(commit.id, ancestor.id) }
- end.to change { counter.get(cache_hit: 'true') }.by(1)
- .and change { counter.get(cache_hit: 'false') }.by(1)
- end
-
it 'returns the value from the request store' do
repository.__send__(:request_store_cache).write(cache_key, "it's apparent")
@@ -2480,7 +2599,7 @@ describe Repository do
end
describe 'commit cache' do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
it 'caches based on SHA' do
# Gets the commit oid, and warms the cache
@@ -2628,7 +2747,7 @@ describe Repository do
end
describe '#merge_base' do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
subject(:repository) { project.repository }
it 'only makes one gitaly call' do
@@ -2686,8 +2805,47 @@ describe Repository do
end
end
+ describe '#create_from_bundle' do
+ let(:project) { create(:project) }
+ let(:repository) { project.repository }
+ let(:valid_bundle_path) { File.join(Dir.tmpdir, "repo-#{SecureRandom.hex}.bundle") }
+ let(:raw_repository) { repository.raw }
+
+ before do
+ allow(raw_repository).to receive(:create_from_bundle).and_return({})
+ end
+
+ after do
+ FileUtils.rm_rf(valid_bundle_path)
+ end
+
+ it 'calls out to the raw_repository to create a repo from bundle' do
+ expect(raw_repository).to receive(:create_from_bundle)
+
+ repository.create_from_bundle(valid_bundle_path)
+ end
+
+ it 'calls after_create' do
+ expect(repository).to receive(:after_create)
+
+ repository.create_from_bundle(valid_bundle_path)
+ end
+
+ context 'when exception is raised' do
+ before do
+ allow(raw_repository).to receive(:create_from_bundle).and_raise(::Gitlab::Git::BundleFile::InvalidBundleError)
+ end
+
+ it 'after_create is not executed' do
+ expect(repository).not_to receive(:after_create)
+
+ expect {repository.create_from_bundle(valid_bundle_path)}.to raise_error(::Gitlab::Git::BundleFile::InvalidBundleError)
+ end
+ end
+ end
+
describe "#blobs_metadata" do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository }
def expect_metadata_blob(thing)
@@ -2714,4 +2872,10 @@ describe Repository do
.to change { Gitlab::GitalyClient.get_request_count }.by(1)
end
end
+
+ describe '#submodule_links' do
+ it 'returns an instance of Gitlab::SubmoduleLinks' do
+ expect(repository.submodule_links).to be_a(Gitlab::SubmoduleLinks)
+ end
+ end
end
diff --git a/spec/models/sent_notification_spec.rb b/spec/models/sent_notification_spec.rb
index 7539bf1e957..fedaae372c4 100644
--- a/spec/models/sent_notification_spec.rb
+++ b/spec/models/sent_notification_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe SentNotification do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
describe 'validation' do
describe 'note validity' do
diff --git a/spec/models/sentry_issue_spec.rb b/spec/models/sentry_issue_spec.rb
index 7dc1cea4617..b4c1cf57761 100644
--- a/spec/models/sentry_issue_spec.rb
+++ b/spec/models/sentry_issue_spec.rb
@@ -13,16 +13,45 @@ describe SentryIssue do
it { is_expected.to validate_presence_of(:issue) }
it { is_expected.to validate_uniqueness_of(:issue) }
it { is_expected.to validate_presence_of(:sentry_issue_identifier) }
+
+ it 'allows duplicated sentry_issue_identifier' do
+ duplicate_sentry_issue = build(:sentry_issue, sentry_issue_identifier: sentry_issue.sentry_issue_identifier)
+
+ expect(duplicate_sentry_issue).to be_valid
+ end
+
+ it 'validates uniqueness of sentry_issue_identifier per project' do
+ second_issue = create(:issue, project: sentry_issue.issue.project)
+ duplicate_sentry_issue = build(:sentry_issue, issue: second_issue, sentry_issue_identifier: sentry_issue.sentry_issue_identifier)
+
+ expect(duplicate_sentry_issue).to be_invalid
+ expect(duplicate_sentry_issue.errors.full_messages.first).to include('is already associated')
+ end
+ end
+
+ describe 'callbacks' do
+ context 'after create commit do' do
+ it 'updates Sentry with a reciprocal link on creation' do
+ issue = create(:issue)
+
+ expect(ErrorTrackingIssueLinkWorker).to receive(:perform_async).with(issue.id)
+
+ create(:sentry_issue, issue: issue)
+ end
+ end
end
describe '.for_project_and_identifier' do
- let!(:sentry_issue) { create(:sentry_issue) }
- let(:project) { sentry_issue.issue.project }
- let(:identifier) { sentry_issue.sentry_issue_identifier }
- let!(:second_sentry_issue) { create(:sentry_issue) }
+ it 'finds the most recent per project and sentry_issue_identifier' do
+ sentry_issue = create(:sentry_issue)
+ create(:sentry_issue)
+ project = sentry_issue.issue.project
+ sentry_issue_3 = build(:sentry_issue, issue: create(:issue, project: project), sentry_issue_identifier: sentry_issue.sentry_issue_identifier)
+ sentry_issue_3.save(validate: false)
- subject { described_class.for_project_and_identifier(project, identifier) }
+ result = described_class.for_project_and_identifier(project, sentry_issue.sentry_issue_identifier)
- it { is_expected.to eq(sentry_issue) }
+ expect(result).to eq(sentry_issue_3)
+ end
end
end
diff --git a/spec/models/serverless/domain_cluster_spec.rb b/spec/models/serverless/domain_cluster_spec.rb
index 73d7d64d35e..bd645b7d0aa 100644
--- a/spec/models/serverless/domain_cluster_spec.rb
+++ b/spec/models/serverless/domain_cluster_spec.rb
@@ -2,16 +2,23 @@
require 'spec_helper'
-describe Serverless::DomainCluster do
+describe ::Serverless::DomainCluster do
subject { create(:serverless_domain_cluster) }
describe 'validations' do
it { is_expected.to validate_presence_of(:pages_domain) }
it { is_expected.to validate_presence_of(:knative) }
- it { is_expected.to validate_presence_of(:uuid) }
+ it { is_expected.to validate_presence_of(:uuid) }
+ it { is_expected.to validate_length_of(:uuid).is_equal_to(Gitlab::Serverless::Domain::UUID_LENGTH) }
it { is_expected.to validate_uniqueness_of(:uuid) }
- it { is_expected.to validate_length_of(:uuid).is_equal_to(14) }
+
+ it 'validates that uuid has only hex characters' do
+ subject = build(:serverless_domain_cluster, uuid: 'z1234567890123')
+ subject.valid?
+
+ expect(subject.errors[:uuid]).to include('only allows hex characters')
+ end
end
describe 'associations' do
@@ -19,4 +26,36 @@ describe Serverless::DomainCluster do
it { is_expected.to belong_to(:knative) }
it { is_expected.to belong_to(:creator).optional }
end
+
+ describe 'uuid' do
+ context 'when nil' do
+ it 'generates a value by default' do
+ attributes = build(:serverless_domain_cluster).attributes.merge(uuid: nil)
+ expect(Gitlab::Serverless::Domain).to receive(:generate_uuid).and_call_original
+
+ subject = Serverless::DomainCluster.new(attributes)
+
+ expect(subject.uuid).not_to be_blank
+ end
+ end
+
+ context 'when not nil' do
+ it 'does not override the existing value' do
+ uuid = 'abcd1234567890'
+ expect(build(:serverless_domain_cluster, uuid: uuid).uuid).to eq(uuid)
+ end
+ end
+ end
+
+ describe 'domain' do
+ it { is_expected.to respond_to(:domain) }
+ end
+
+ describe 'certificate' do
+ it { is_expected.to respond_to(:certificate) }
+ end
+
+ describe 'key' do
+ it { is_expected.to respond_to(:key) }
+ end
end
diff --git a/spec/models/serverless/function_spec.rb b/spec/models/serverless/function_spec.rb
index 1854d5f9415..810d4409a34 100644
--- a/spec/models/serverless/function_spec.rb
+++ b/spec/models/serverless/function_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Serverless::Function do
+describe ::Serverless::Function do
let(:project) { create(:project) }
let(:func) { described_class.new(project, 'test', 'test-ns') }
diff --git a/spec/models/snippet_repository_spec.rb b/spec/models/snippet_repository_spec.rb
new file mode 100644
index 00000000000..9befbb02b17
--- /dev/null
+++ b/spec/models/snippet_repository_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe SnippetRepository do
+ describe 'associations' do
+ it { is_expected.to belong_to(:shard) }
+ it { is_expected.to belong_to(:snippet) }
+ end
+
+ describe '.find_snippet' do
+ it 'finds snippet by disk path' do
+ snippet = create(:snippet)
+ snippet.track_snippet_repository
+
+ expect(described_class.find_snippet(snippet.disk_path)).to eq(snippet)
+ end
+
+ it 'returns nil when it does not find the snippet' do
+ expect(described_class.find_snippet('@@unexisting/path/to/snippet')).to be_nil
+ end
+ end
+end
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index ae43c0d585a..93bc42c144d 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -19,6 +19,7 @@ describe Snippet do
it { is_expected.to have_many(:notes).dependent(:destroy) }
it { is_expected.to have_many(:award_emoji).dependent(:destroy) }
it { is_expected.to have_many(:user_mentions).class_name("SnippetUserMention") }
+ it { is_expected.to have_one(:snippet_repository) }
end
describe 'validation' do
@@ -525,4 +526,109 @@ describe Snippet do
snippet.to_json(params)
end
end
+
+ describe '#storage' do
+ let(:snippet) { create(:snippet) }
+
+ it "stores snippet in #{Storage::Hashed::SNIPPET_REPOSITORY_PATH_PREFIX} dir" do
+ expect(snippet.storage.disk_path).to start_with Storage::Hashed::SNIPPET_REPOSITORY_PATH_PREFIX
+ end
+ end
+
+ describe '#track_snippet_repository' do
+ let(:snippet) { create(:snippet, :repository) }
+
+ context 'when a snippet repository entry does not exist' do
+ it 'creates a new entry' do
+ expect { snippet.track_snippet_repository }.to change(snippet, :snippet_repository)
+ end
+
+ it 'tracks the snippet storage location' do
+ snippet.track_snippet_repository
+
+ expect(snippet.snippet_repository).to have_attributes(
+ disk_path: snippet.disk_path,
+ shard_name: snippet.repository_storage
+ )
+ end
+ end
+
+ context 'when a tracking entry exists' do
+ let!(:snippet_repository) { create(:snippet_repository, snippet: snippet) }
+ let!(:shard) { create(:shard, name: 'foo') }
+
+ it 'does not create a new entry in the database' do
+ expect { snippet.track_snippet_repository }.not_to change(snippet, :snippet_repository)
+ end
+
+ it 'updates the snippet storage location' do
+ allow(snippet).to receive(:disk_path).and_return('fancy/new/path')
+ allow(snippet).to receive(:repository_storage).and_return('foo')
+
+ snippet.track_snippet_repository
+
+ expect(snippet.snippet_repository).to have_attributes(
+ disk_path: 'fancy/new/path',
+ shard_name: 'foo'
+ )
+ end
+ end
+ end
+
+ describe '#create_repository' do
+ let(:snippet) { create(:snippet) }
+
+ it 'creates the repository' do
+ expect(snippet.repository).to receive(:after_create).and_call_original
+
+ expect(snippet.create_repository).to be_truthy
+ expect(snippet.repository.exists?).to be_truthy
+ end
+
+ it 'tracks snippet repository' do
+ expect do
+ snippet.create_repository
+ end.to change(SnippetRepository, :count).by(1)
+ end
+
+ context 'when repository exists' do
+ let(:snippet) { create(:snippet, :repository) }
+
+ it 'does not try to create repository' do
+ expect(snippet.repository).not_to receive(:after_create)
+
+ expect(snippet.create_repository).to be_nil
+ end
+
+ it 'does not track snippet repository' do
+ expect do
+ snippet.create_repository
+ end.not_to change(SnippetRepository, :count)
+ end
+ end
+ end
+
+ describe '#repository_storage' do
+ let(:snippet) { create(:snippet) }
+
+ it 'returns default repository storage' do
+ expect(Gitlab::CurrentSettings).to receive(:pick_repository_storage)
+
+ snippet.repository_storage
+ end
+
+ context 'when snippet_project is already created' do
+ let!(:snippet_repository) { create(:snippet_repository, snippet: snippet) }
+
+ before do
+ allow(snippet_repository).to receive(:shard_name).and_return('foo')
+ end
+
+ it 'returns repository_storage from snippet_project' do
+ expect(Gitlab::CurrentSettings).not_to receive(:pick_repository_storage)
+
+ expect(snippet.repository_storage).to eq 'foo'
+ end
+ end
+ end
end
diff --git a/spec/models/spam_log_spec.rb b/spec/models/spam_log_spec.rb
index f4e073dc38f..8ebd97de9ff 100644
--- a/spec/models/spam_log_spec.rb
+++ b/spec/models/spam_log_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe SpamLog do
- let(:admin) { create(:admin) }
+ let_it_be(:admin) { create(:admin) }
describe 'associations' do
it { is_expected.to belong_to(:user) }
@@ -31,4 +31,29 @@ describe SpamLog do
expect { User.find(user.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
end
+
+ describe '.verify_recaptcha!' do
+ let_it_be(:spam_log) { create(:spam_log, user: admin, recaptcha_verified: false) }
+
+ context 'the record cannot be found' do
+ it 'updates nothing' do
+ expect(instance_of(described_class)).not_to receive(:update!)
+
+ described_class.verify_recaptcha!(id: spam_log.id, user_id: admin.id)
+
+ expect(spam_log.recaptcha_verified).to be_falsey
+ end
+
+ it 'does not error despite not finding a record' do
+ expect { described_class.verify_recaptcha!(id: -1, user_id: admin.id) }.not_to raise_error
+ end
+ end
+
+ context 'the record exists' do
+ it 'updates recaptcha_verified' do
+ expect { described_class.verify_recaptcha!(id: spam_log.id, user_id: admin.id) }
+ .to change { spam_log.reload.recaptcha_verified }.from(false).to(true)
+ end
+ end
+ end
end
diff --git a/spec/models/todo_spec.rb b/spec/models/todo_spec.rb
index ea09c6caed3..3f0c95b2513 100644
--- a/spec/models/todo_spec.rb
+++ b/spec/models/todo_spec.rb
@@ -313,6 +313,36 @@ describe Todo do
end
end
+ describe '.for_ids' do
+ it 'returns the expected todos' do
+ todo1 = create(:todo)
+ todo2 = create(:todo)
+ todo3 = create(:todo)
+ create(:todo)
+
+ expect(described_class.for_ids([todo2.id, todo1.id, todo3.id])).to contain_exactly(todo1, todo2, todo3)
+ end
+
+ it 'returns an empty collection when no ids are given' do
+ create(:todo)
+
+ expect(described_class.for_ids([])).to be_empty
+ end
+ end
+
+ describe '.for_user' do
+ it 'returns the expected todos' do
+ user1 = create(:user)
+ user2 = create(:user)
+
+ todo1 = create(:todo, user: user1)
+ todo2 = create(:todo, user: user1)
+ create(:todo, user: user2)
+
+ expect(described_class.for_user(user1)).to contain_exactly(todo1, todo2)
+ end
+ end
+
describe '.any_for_target?' do
it 'returns true if there are todos for a given target' do
todo = create(:todo)
diff --git a/spec/models/trending_project_spec.rb b/spec/models/trending_project_spec.rb
index 4a248b71574..39f5d686eb4 100644
--- a/spec/models/trending_project_spec.rb
+++ b/spec/models/trending_project_spec.rb
@@ -4,11 +4,11 @@ require 'spec_helper'
describe TrendingProject do
let(:user) { create(:user) }
- let(:public_project1) { create(:project, :public) }
- let(:public_project2) { create(:project, :public) }
- let(:public_project3) { create(:project, :public) }
- let(:private_project) { create(:project, :private) }
- let(:internal_project) { create(:project, :internal) }
+ let(:public_project1) { create(:project, :public, :repository) }
+ let(:public_project2) { create(:project, :public, :repository) }
+ let(:public_project3) { create(:project, :public, :repository) }
+ let(:private_project) { create(:project, :private, :repository) }
+ let(:internal_project) { create(:project, :internal, :repository) }
before do
create_list(:note_on_commit, 3, project: public_project1)
diff --git a/spec/models/user_callout_spec.rb b/spec/models/user_callout_spec.rb
index de6534b480a..a084b1ac662 100644
--- a/spec/models/user_callout_spec.rb
+++ b/spec/models/user_callout_spec.rb
@@ -17,4 +17,37 @@ describe UserCallout do
it { is_expected.to validate_presence_of(:feature_name) }
it { is_expected.to validate_uniqueness_of(:feature_name).scoped_to(:user_id).ignoring_case_sensitivity }
end
+
+ describe 'scopes' do
+ describe '.with_feature_name' do
+ let(:second_feature_name) { described_class.feature_names.keys.second }
+ let(:last_feature_name) { described_class.feature_names.keys.last }
+
+ it 'returns callout for requested feature name only' do
+ callout1 = create(:user_callout, feature_name: second_feature_name )
+ create(:user_callout, feature_name: last_feature_name )
+
+ callouts = described_class.with_feature_name(second_feature_name)
+
+ expect(callouts).to match_array([callout1])
+ end
+ end
+
+ describe '.with_dismissed_after' do
+ let(:some_feature_name) { described_class.feature_names.keys.second }
+ let(:callout_dismissed_month_ago) { create(:user_callout, feature_name: some_feature_name, dismissed_at: 1.month.ago )}
+
+ it 'does not return callouts dismissed before specified date' do
+ callouts = described_class.with_dismissed_after(15.days.ago)
+
+ expect(callouts).to match_array([])
+ end
+
+ it 'returns callouts dismissed after specified date' do
+ callouts = described_class.with_dismissed_after(2.months.ago)
+
+ expect(callouts).to match_array([callout_dismissed_month_ago])
+ end
+ end
+ end
end
diff --git a/spec/models/user_preference_spec.rb b/spec/models/user_preference_spec.rb
index bb88983e140..7884b87cc26 100644
--- a/spec/models/user_preference_spec.rb
+++ b/spec/models/user_preference_spec.rb
@@ -85,4 +85,19 @@ describe UserPreference do
expect(user_preference.timezone).to eq(Time.zone.tzinfo.name)
end
end
+
+ describe '#tab_width' do
+ it 'is set to 8 by default' do
+ # Intentionally not using factory here to test the constructor.
+ pref = UserPreference.new
+ expect(pref.tab_width).to eq(8)
+ end
+
+ it do
+ is_expected.to validate_numericality_of(:tab_width)
+ .only_integer
+ .is_greater_than_or_equal_to(1)
+ .is_less_than_or_equal_to(12)
+ end
+ end
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 6f393d169a2..cd84bf54e8f 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -20,11 +20,15 @@ describe User, :do_not_mock_admin_mode do
describe 'delegations' do
it { is_expected.to delegate_method(:path).to(:namespace).with_prefix }
+
+ it { is_expected.to delegate_method(:tab_width).to(:user_preference) }
+ it { is_expected.to delegate_method(:tab_width=).to(:user_preference).with_arguments(5) }
end
describe 'associations' do
it { is_expected.to have_one(:namespace) }
it { is_expected.to have_one(:status) }
+ it { is_expected.to have_one(:max_access_level_membership) }
it { is_expected.to have_many(:snippets).dependent(:destroy) }
it { is_expected.to have_many(:members) }
it { is_expected.to have_many(:project_members) }
@@ -299,6 +303,20 @@ describe User, :do_not_mock_admin_mode do
end
end
+ context 'bad regex' do
+ before do
+ allow_any_instance_of(ApplicationSetting).to receive(:domain_whitelist).and_return(['([a-zA-Z0-9]+)+\.com'])
+ end
+
+ it 'does not hang on evil input' do
+ user = build(:user, email: 'user@aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.com')
+
+ expect do
+ Timeout.timeout(2.seconds) { user.valid? }
+ end.not_to raise_error
+ end
+ end
+
context 'when a signup domain is whitelisted and subdomains are allowed' do
before do
allow_any_instance_of(ApplicationSetting).to receive(:domain_whitelist).and_return(['example.com', '*.example.com'])
@@ -352,6 +370,20 @@ describe User, :do_not_mock_admin_mode do
allow_any_instance_of(ApplicationSetting).to receive(:domain_blacklist).and_return(['example.com'])
end
+ context 'bad regex' do
+ before do
+ allow_any_instance_of(ApplicationSetting).to receive(:domain_blacklist).and_return(['([a-zA-Z0-9]+)+\.com'])
+ end
+
+ it 'does not hang on evil input' do
+ user = build(:user, email: 'user@aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!.com')
+
+ expect do
+ Timeout.timeout(2.seconds) { user.valid? }
+ end.not_to raise_error
+ end
+ end
+
context 'when a signup domain is blacklisted' do
it 'accepts info@test.com' do
user = build(:user, email: 'info@test.com')
@@ -677,7 +709,7 @@ describe User, :do_not_mock_admin_mode do
end
describe 'before save hook' do
- context '#default_private_profile_to_false' do
+ describe '#default_private_profile_to_false' do
let(:user) { create(:user, private_profile: true) }
it 'converts nil to false' do
@@ -839,9 +871,36 @@ describe User, :do_not_mock_admin_mode do
describe '#highest_role' do
let(:user) { create(:user) }
-
let(:group) { create(:group) }
+ context 'with association :max_access_level_membership' do
+ let(:another_user) { create(:user) }
+
+ before do
+ create(:project, group: group) do |project|
+ group.add_user(user, GroupMember::GUEST)
+ group.add_user(another_user, GroupMember::DEVELOPER)
+ end
+
+ create(:project, group: create(:group)) do |project|
+ project.add_guest(another_user)
+ end
+
+ create(:project, group: create(:group)) do |project|
+ project.add_maintainer(user)
+ end
+ end
+
+ it 'returns the correct highest role' do
+ users = User.includes(:max_access_level_membership).where(id: [user.id, another_user.id])
+
+ expect(users.collect { |u| [u.id, u.highest_role] }).to contain_exactly(
+ [user.id, Gitlab::Access::MAINTAINER],
+ [another_user.id, Gitlab::Access::DEVELOPER]
+ )
+ end
+ end
+
it 'returns NO_ACCESS if none has been set' do
expect(user.highest_role).to eq(Gitlab::Access::NO_ACCESS)
end
@@ -2168,7 +2227,7 @@ describe User, :do_not_mock_admin_mode do
describe '.find_by_private_commit_email' do
context 'with email' do
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
it 'returns user through private commit email' do
expect(described_class.find_by_private_commit_email(user.private_commit_email)).to eq(user)
@@ -2957,9 +3016,9 @@ describe User, :do_not_mock_admin_mode do
end
end
- describe '#can_read_all_resources?' do
+ describe '#can_read_all_resources?', :request_store do
it 'returns false for regular user' do
- user = build(:user)
+ user = build_stubbed(:user)
expect(user.can_read_all_resources?).to be_falsy
end
@@ -2967,7 +3026,7 @@ describe User, :do_not_mock_admin_mode do
context 'for admin user' do
include_context 'custom session'
- let(:user) { build(:user, :admin) }
+ let(:user) { build_stubbed(:user, :admin) }
context 'when admin mode is disabled' do
it 'returns false' do
@@ -3130,7 +3189,7 @@ describe User, :do_not_mock_admin_mode do
end
end
- context '.active' do
+ describe '.active' do
before do
described_class.ghost
create(:user, name: 'user', state: 'active')
@@ -3150,7 +3209,7 @@ describe User, :do_not_mock_admin_mode do
end
end
- context '#invalidate_issue_cache_counts' do
+ describe '#invalidate_issue_cache_counts' do
let(:user) { build_stubbed(:user) }
it 'invalidates cache for issue counter' do
@@ -3164,7 +3223,7 @@ describe User, :do_not_mock_admin_mode do
end
end
- context '#invalidate_merge_request_cache_counts' do
+ describe '#invalidate_merge_request_cache_counts' do
let(:user) { build_stubbed(:user) }
it 'invalidates cache for Merge Request counter' do
@@ -3178,7 +3237,7 @@ describe User, :do_not_mock_admin_mode do
end
end
- context '#invalidate_personal_projects_count' do
+ describe '#invalidate_personal_projects_count' do
let(:user) { build_stubbed(:user) }
it 'invalidates cache for personal projects counter' do
@@ -4056,4 +4115,130 @@ describe User, :do_not_mock_admin_mode do
end
end
end
+
+ describe '#read_only_attribute?' do
+ context 'when LDAP server is enabled' do
+ before do
+ allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true)
+ end
+
+ %i[name email location].each do |attribute|
+ it "is true for #{attribute}" do
+ expect(subject.read_only_attribute?(attribute)).to be_truthy
+ end
+ end
+
+ context 'and ldap_readonly_attributes feature is disabled' do
+ before do
+ stub_feature_flags(ldap_readonly_attributes: false)
+ end
+
+ %i[name email location].each do |attribute|
+ it "is false" do
+ expect(subject.read_only_attribute?(attribute)).to be_falsey
+ end
+ end
+ end
+ end
+
+ context 'when synced attributes metadata is present' do
+ it 'delegates to synced_attributes_metadata' do
+ subject.build_user_synced_attributes_metadata
+
+ expect(subject.build_user_synced_attributes_metadata)
+ .to receive(:read_only?).with(:email).and_return('return-value')
+ expect(subject.read_only_attribute?(:email)).to eq('return-value')
+ end
+ end
+
+ context 'when synced attributes metadata is present' do
+ it 'is false for any attribute' do
+ expect(subject.read_only_attribute?(:email)).to be_falsey
+ end
+ end
+ end
+
+ describe 'internal methods' do
+ let_it_be(:user) { create(:user) }
+ let!(:ghost) { described_class.ghost }
+ let!(:alert_bot) { described_class.alert_bot }
+ let!(:non_internal) { [user] }
+ let!(:internal) { [ghost, alert_bot] }
+
+ it 'returns non internal users' do
+ expect(described_class.internal).to eq(internal)
+ expect(internal.all?(&:internal?)).to eq(true)
+ end
+
+ it 'returns internal users' do
+ expect(described_class.non_internal).to eq(non_internal)
+ expect(non_internal.all?(&:internal?)).to eq(false)
+ end
+
+ describe '#bot?' do
+ it 'marks bot users' do
+ expect(user.bot?).to eq(false)
+ expect(ghost.bot?).to eq(false)
+
+ expect(alert_bot.bot?).to eq(true)
+ end
+ end
+ end
+
+ describe '#dismissed_callout?' do
+ subject(:user) { create(:user) }
+
+ let(:feature_name) { UserCallout.feature_names.each_key.first }
+
+ context 'when no callout dismissal record exists' do
+ it 'returns false when no ignore_dismissal_earlier_than provided' do
+ expect(user.dismissed_callout?(feature_name: feature_name)).to eq false
+ end
+
+ it 'returns false when ignore_dismissal_earlier_than provided' do
+ expect(user.dismissed_callout?(feature_name: feature_name, ignore_dismissal_earlier_than: 3.months.ago)).to eq false
+ end
+ end
+
+ context 'when dismissed callout exists' do
+ before do
+ create(:user_callout, user: user, feature_name: feature_name, dismissed_at: 4.months.ago)
+ end
+
+ it 'returns true when no ignore_dismissal_earlier_than provided' do
+ expect(user.dismissed_callout?(feature_name: feature_name)).to eq true
+ end
+
+ it 'returns true when ignore_dismissal_earlier_than is earlier than dismissed_at' do
+ expect(user.dismissed_callout?(feature_name: feature_name, ignore_dismissal_earlier_than: 6.months.ago)).to eq true
+ end
+
+ it 'returns false when ignore_dismissal_earlier_than is later than dismissed_at' do
+ expect(user.dismissed_callout?(feature_name: feature_name, ignore_dismissal_earlier_than: 3.months.ago)).to eq false
+ end
+ end
+ end
+
+ describe 'bots & humans' do
+ it 'returns corresponding users' do
+ human = create(:user)
+ bot = create(:user, :bot)
+
+ expect(described_class.humans).to match_array([human])
+ expect(described_class.bots).to match_array([bot])
+ end
+ end
+
+ describe '#hook_attrs' do
+ it 'includes name, username, avatar_url, and email' do
+ user = create(:user)
+ user_attributes = {
+ name: user.name,
+ username: user.username,
+ avatar_url: user.avatar_url(only_path: false),
+ email: user.email
+ }
+ expect(user.hook_attrs).to eq(user_attributes)
+ end
+ end
end
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index a7c28519c5a..be5479cfc11 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -7,7 +7,18 @@ describe WikiPage do
let(:user) { project.owner }
let(:wiki) { ProjectWiki.new(project, user) }
- subject { described_class.new(wiki) }
+ let(:new_page) do
+ described_class.new(wiki).tap do |page|
+ page.attributes = { title: 'test page', content: 'test content' }
+ end
+ end
+
+ let(:existing_page) do
+ create_page('test page', 'test content')
+ wiki.find_page('test page')
+ end
+
+ subject { new_page }
describe '.group_by_directory' do
context 'when there are no pages' do
@@ -44,7 +55,7 @@ describe WikiPage do
WikiDirectory.new('dir_2', pages)
end
- context "#list_pages" do
+ describe "#list_pages" do
context 'sort by title' do
let(:grouped_entries) { described_class.group_by_directory(wiki.list_pages) }
let(:expected_grouped_entries) { [dir_1_1, dir_1, page_dir_2, dir_2, page_1, page_6] }
@@ -100,56 +111,134 @@ describe WikiPage do
describe "#initialize" do
context "when initialized with an existing page" do
- before do
- create_page("test page", "test content")
- @page = wiki.wiki.page(title: "test page")
- @wiki_page = described_class.new(wiki, @page, true)
- end
+ subject { existing_page }
it "sets the slug attribute" do
- expect(@wiki_page.slug).to eq("test-page")
+ expect(subject.slug).to eq("test-page")
end
it "sets the title attribute" do
- expect(@wiki_page.title).to eq("test page")
+ expect(subject.title).to eq("test page")
end
it "sets the formatted content attribute" do
- expect(@wiki_page.content).to eq("test content")
+ expect(subject.content).to eq("test content")
end
it "sets the format attribute" do
- expect(@wiki_page.format).to eq(:markdown)
+ expect(subject.format).to eq(:markdown)
end
it "sets the message attribute" do
- expect(@wiki_page.message).to eq("test commit")
+ expect(subject.message).to eq("test commit")
end
it "sets the version attribute" do
- expect(@wiki_page.version).to be_a Gitlab::Git::WikiPageVersion
+ expect(subject.version).to be_a Gitlab::Git::WikiPageVersion
end
end
end
describe "validations" do
- before do
- subject.attributes = { title: 'title', content: 'content' }
- end
-
it "validates presence of title" do
subject.attributes.delete(:title)
- expect(subject.valid?).to be_falsey
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.keys).to contain_exactly(:title)
end
it "validates presence of content" do
subject.attributes.delete(:content)
- expect(subject.valid?).to be_falsey
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.keys).to contain_exactly(:content)
+ end
+
+ describe '#validate_path_limits' do
+ let(:max_title) { described_class::MAX_TITLE_BYTES }
+ let(:max_directory) { described_class::MAX_DIRECTORY_BYTES }
+
+ where(:character) do
+ ['a', 'ä', '🙈']
+ end
+
+ with_them do
+ let(:size) { character.bytesize.to_f }
+ let(:valid_title) { character * (max_title / size).floor }
+ let(:valid_directory) { character * (max_directory / size).floor }
+ let(:invalid_title) { character * ((max_title + 1) / size).ceil }
+ let(:invalid_directory) { character * ((max_directory + 1) / size).ceil }
+
+ it 'accepts page titles below the limit' do
+ subject.title = valid_title
+
+ expect(subject).to be_valid
+ end
+
+ it 'accepts directories below the limit' do
+ subject.title = valid_directory + '/foo'
+
+ expect(subject).to be_valid
+ end
+
+ it 'accepts a path with page title and directory below the limit' do
+ subject.title = "#{valid_directory}/#{valid_title}"
+
+ expect(subject).to be_valid
+ end
+
+ it 'rejects page titles exceeding the limit' do
+ subject.title = invalid_title
+
+ expect(subject).not_to be_valid
+ expect(subject.errors[:title]).to contain_exactly(
+ "exceeds the limit of #{max_title} bytes for page titles"
+ )
+ end
+
+ it 'rejects directories exceeding the limit' do
+ subject.title = invalid_directory + '/foo'
+
+ expect(subject).not_to be_valid
+ expect(subject.errors[:title]).to contain_exactly(
+ "exceeds the limit of #{max_directory} bytes for directory names"
+ )
+ end
+
+ it 'rejects a page with both title and directory exceeding the limit' do
+ subject.title = "#{invalid_directory}/#{invalid_title}"
+
+ expect(subject).not_to be_valid
+ expect(subject.errors[:title]).to contain_exactly(
+ "exceeds the limit of #{max_title} bytes for page titles",
+ "exceeds the limit of #{max_directory} bytes for directory names"
+ )
+ end
+ end
+
+ context 'with an existing page title exceeding the limit' do
+ subject do
+ title = 'a' * (max_title + 1)
+ create_page(title, 'content')
+ wiki.find_page(title)
+ end
+
+ it 'accepts the exceeding title length when unchanged' do
+ expect(subject).to be_valid
+ end
+
+ it 'rejects the exceeding title length when changed' do
+ subject.title = 'b' * (max_title + 1)
+
+ expect(subject).not_to be_valid
+ expect(subject.errors).to include(:title)
+ end
+ end
end
end
describe "#create" do
- let(:wiki_attr) do
+ let(:attributes) do
{
title: "Index",
content: "Home Page",
@@ -158,22 +247,19 @@ describe WikiPage do
}
end
- after do
- destroy_page("Index")
- end
-
context "with valid attributes" do
it "saves the wiki page" do
- subject.create(wiki_attr)
+ subject.create(attributes)
+
expect(wiki.find_page("Index")).not_to be_nil
end
it "returns true" do
- expect(subject.create(wiki_attr)).to eq(true)
+ expect(subject.create(attributes)).to eq(true)
end
it 'saves the wiki page with message' do
- subject.create(wiki_attr)
+ subject.create(attributes)
expect(wiki.find_page("Index").message).to eq 'Custom Commit Message'
end
@@ -183,40 +269,37 @@ describe WikiPage do
describe "dot in the title" do
let(:title) { 'Index v1.2.3' }
- before do
- @wiki_attr = { title: title, content: "Home Page", format: "markdown" }
- end
-
describe "#create" do
- after do
- destroy_page(title)
- end
+ let(:attributes) { { title: title, content: "Home Page", format: "markdown" } }
context "with valid attributes" do
it "saves the wiki page" do
- subject.create(@wiki_attr)
+ subject.create(attributes)
+
expect(wiki.find_page(title)).not_to be_nil
end
it "returns true" do
- expect(subject.create(@wiki_attr)).to eq(true)
+ expect(subject.create(attributes)).to eq(true)
end
end
end
describe "#update" do
- before do
+ subject do
create_page(title, "content")
- @page = wiki.find_page(title)
+ wiki.find_page(title)
end
it "updates the content of the page" do
- @page.update(content: "new content")
- @page = wiki.find_page(title)
+ subject.update(content: "new content")
+ page = wiki.find_page(title)
+
+ expect(page.content).to eq('new content')
end
it "returns true" do
- expect(@page.update(content: "more content")).to be_truthy
+ expect(subject.update(content: "more content")).to be_truthy
end
end
end
@@ -226,66 +309,55 @@ describe WikiPage do
it 'raises an error if a page with the same path already exists' do
create_page('New Page', 'content')
create_page('foo/bar', 'content')
+
expect { create_page('New Page', 'other content') }.to raise_error Gitlab::Git::Wiki::DuplicatePageError
expect { create_page('foo/bar', 'other content') }.to raise_error Gitlab::Git::Wiki::DuplicatePageError
-
- destroy_page('New Page')
- destroy_page('bar', 'foo')
end
it 'if the title is preceded by a / it is removed' do
create_page('/New Page', 'content')
expect(wiki.find_page('New Page')).not_to be_nil
-
- destroy_page('New Page')
end
end
end
describe "#update" do
- before do
- create_page("Update", "content")
- @page = wiki.find_page("Update")
- end
-
- after do
- destroy_page(@page.title, @page.directory)
- end
+ subject { existing_page }
context "with valid attributes" do
it "updates the content of the page" do
new_content = "new content"
- @page.update(content: new_content)
- @page = wiki.find_page("Update")
+ subject.update(content: new_content)
+ page = wiki.find_page('test page')
- expect(@page.content).to eq("new content")
+ expect(page.content).to eq("new content")
end
it "updates the title of the page" do
new_title = "Index v.1.2.4"
- @page.update(title: new_title)
- @page = wiki.find_page(new_title)
+ subject.update(title: new_title)
+ page = wiki.find_page(new_title)
- expect(@page.title).to eq(new_title)
+ expect(page.title).to eq(new_title)
end
it "returns true" do
- expect(@page.update(content: "more content")).to be_truthy
+ expect(subject.update(content: "more content")).to be_truthy
end
end
context 'with same last commit sha' do
it 'returns true' do
- expect(@page.update(content: 'more content', last_commit_sha: @page.last_commit_sha)).to be_truthy
+ expect(subject.update(content: 'more content', last_commit_sha: subject.last_commit_sha)).to be_truthy
end
end
context 'with different last commit sha' do
it 'raises exception' do
- expect { @page.update(content: 'more content', last_commit_sha: 'xxx') }.to raise_error(WikiPage::PageChangedError)
+ expect { subject.update(content: 'more content', last_commit_sha: 'xxx') }.to raise_error(WikiPage::PageChangedError)
end
end
@@ -293,23 +365,21 @@ describe WikiPage do
it 'raises an error if the page already exists' do
create_page('Existing Page', 'content')
- expect { @page.update(title: 'Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
- expect(@page.title).to eq 'Update'
- expect(@page.content).to eq 'new_content'
-
- destroy_page('Existing Page')
+ expect { subject.update(title: 'Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
+ expect(subject.title).to eq 'test page'
+ expect(subject.content).to eq 'new_content'
end
it 'updates the content and rename the file' do
new_title = 'Renamed Page'
new_content = 'updated content'
- expect(@page.update(title: new_title, content: new_content)).to be_truthy
+ expect(subject.update(title: new_title, content: new_content)).to be_truthy
- @page = wiki.find_page(new_title)
+ page = wiki.find_page(new_title)
- expect(@page).not_to be_nil
- expect(@page.content).to eq new_content
+ expect(page).not_to be_nil
+ expect(page.content).to eq new_content
end
end
@@ -317,18 +387,16 @@ describe WikiPage do
it 'raises an error if the page already exists' do
create_page('foo/Existing Page', 'content')
- expect { @page.update(title: 'foo/Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
- expect(@page.title).to eq 'Update'
- expect(@page.content).to eq 'new_content'
-
- destroy_page('Existing Page', 'foo')
+ expect { subject.update(title: 'foo/Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
+ expect(subject.title).to eq 'test page'
+ expect(subject.content).to eq 'new_content'
end
it 'updates the content and moves the file' do
new_title = 'foo/Other Page'
new_content = 'new_content'
- expect(@page.update(title: new_title, content: new_content)).to be_truthy
+ expect(subject.update(title: new_title, content: new_content)).to be_truthy
page = wiki.find_page(new_title)
@@ -337,120 +405,101 @@ describe WikiPage do
end
context 'in subdir' do
- before do
+ subject do
create_page('foo/Existing Page', 'content')
- @page = wiki.find_page('foo/Existing Page')
+ wiki.find_page('foo/Existing Page')
end
it 'moves the page to the root folder if the title is preceded by /' do
- expect(@page.slug).to eq 'foo/Existing-Page'
- expect(@page.update(title: '/Existing Page', content: 'new_content')).to be_truthy
- expect(@page.slug).to eq 'Existing-Page'
+ expect(subject.slug).to eq 'foo/Existing-Page'
+ expect(subject.update(title: '/Existing Page', content: 'new_content')).to be_truthy
+ expect(subject.slug).to eq 'Existing-Page'
end
it 'does nothing if it has the same title' do
- original_path = @page.slug
+ original_path = subject.slug
- expect(@page.update(title: 'Existing Page', content: 'new_content')).to be_truthy
- expect(@page.slug).to eq original_path
+ expect(subject.update(title: 'Existing Page', content: 'new_content')).to be_truthy
+ expect(subject.slug).to eq original_path
end
end
context 'in root dir' do
it 'does nothing if the title is preceded by /' do
- original_path = @page.slug
+ original_path = subject.slug
- expect(@page.update(title: '/Update', content: 'new_content')).to be_truthy
- expect(@page.slug).to eq original_path
+ expect(subject.update(title: '/test page', content: 'new_content')).to be_truthy
+ expect(subject.slug).to eq original_path
end
end
end
context "with invalid attributes" do
it 'aborts update if title blank' do
- expect(@page.update(title: '', content: 'new_content')).to be_falsey
- expect(@page.content).to eq 'new_content'
+ expect(subject.update(title: '', content: 'new_content')).to be_falsey
+ expect(subject.content).to eq 'new_content'
- page = wiki.find_page('Update')
- expect(page.content).to eq 'content'
+ page = wiki.find_page('test page')
- @page.title = 'Update'
+ expect(page.content).to eq 'test content'
end
end
end
describe "#destroy" do
- before do
- create_page("Delete Page", "content")
- @page = wiki.find_page("Delete Page")
- end
+ subject { existing_page }
it "deletes the page" do
- @page.delete
+ subject.delete
+
expect(wiki.list_pages).to be_empty
end
it "returns true" do
- expect(@page.delete).to eq(true)
+ expect(subject.delete).to eq(true)
end
end
describe "#versions" do
- let(:page) { wiki.find_page("Update") }
-
- before do
- create_page("Update", "content")
- end
-
- after do
- destroy_page("Update")
- end
+ subject { existing_page }
it "returns an array of all commits for the page" do
- 3.times { |i| page.update(content: "content #{i}") }
+ 3.times { |i| subject.update(content: "content #{i}") }
- expect(page.versions.count).to eq(4)
+ expect(subject.versions.count).to eq(4)
end
it 'returns instances of WikiPageVersion' do
- expect(page.versions).to all( be_a(Gitlab::Git::WikiPageVersion) )
+ expect(subject.versions).to all( be_a(Gitlab::Git::WikiPageVersion) )
end
end
describe "#title" do
- before do
- create_page("Title", "content")
- @page = wiki.find_page("Title")
- end
-
- after do
- destroy_page("Title")
- end
-
it "replaces a hyphen to a space" do
- @page.title = "Import-existing-repositories-into-GitLab"
- expect(@page.title).to eq("Import existing repositories into GitLab")
+ subject.title = "Import-existing-repositories-into-GitLab"
+
+ expect(subject.title).to eq("Import existing repositories into GitLab")
end
it 'unescapes html' do
- @page.title = 'foo &amp; bar'
+ subject.title = 'foo &amp; bar'
- expect(@page.title).to eq('foo & bar')
+ expect(subject.title).to eq('foo & bar')
end
end
describe '#path' do
let(:path) { 'mypath.md' }
- let(:wiki_page) { instance_double('Gitlab::Git::WikiPage', path: path).as_null_object }
+ let(:git_page) { instance_double('Gitlab::Git::WikiPage', path: path).as_null_object }
it 'returns the path when persisted' do
- page = described_class.new(wiki, wiki_page, true)
+ page = described_class.new(wiki, git_page, true)
expect(page.path).to eq(path)
end
it 'returns nil when not persisted' do
- page = described_class.new(wiki, wiki_page, false)
+ page = described_class.new(wiki, git_page, false)
expect(page.path).to be_nil
end
@@ -458,39 +507,38 @@ describe WikiPage do
describe '#directory' do
context 'when the page is at the root directory' do
- it 'returns an empty string' do
+ subject do
create_page('file', 'content')
- page = wiki.find_page('file')
+ wiki.find_page('file')
+ end
- expect(page.directory).to eq('')
+ it 'returns an empty string' do
+ expect(subject.directory).to eq('')
end
end
context 'when the page is inside an actual directory' do
- it 'returns the full directory hierarchy' do
+ subject do
create_page('dir_1/dir_1_1/file', 'content')
- page = wiki.find_page('dir_1/dir_1_1/file')
+ wiki.find_page('dir_1/dir_1_1/file')
+ end
- expect(page.directory).to eq('dir_1/dir_1_1')
+ it 'returns the full directory hierarchy' do
+ expect(subject.directory).to eq('dir_1/dir_1_1')
end
end
end
describe '#historical?' do
- let(:page) { wiki.find_page('Update') }
- let(:old_version) { page.versions.last.id }
- let(:old_page) { wiki.find_page('Update', old_version) }
- let(:latest_version) { page.versions.first.id }
- let(:latest_page) { wiki.find_page('Update', latest_version) }
+ subject { existing_page }
- before do
- create_page('Update', 'content')
- @page = wiki.find_page('Update')
- 3.times { |i| @page.update(content: "content #{i}") }
- end
+ let(:old_version) { subject.versions.last.id }
+ let(:old_page) { wiki.find_page(subject.title, old_version) }
+ let(:latest_version) { subject.versions.first.id }
+ let(:latest_page) { wiki.find_page(subject.title, latest_version) }
- after do
- destroy_page('Update')
+ before do
+ 3.times { |i| subject.update(content: "content #{i}") }
end
it 'returns true when requesting an old version' do
@@ -520,56 +568,48 @@ describe WikiPage do
describe '#to_partial_path' do
it 'returns the relative path to the partial to be used' do
- page = build(:wiki_page)
-
- expect(page.to_partial_path).to eq('projects/wikis/wiki_page')
+ expect(subject.to_partial_path).to eq('projects/wikis/wiki_page')
end
end
describe '#==' do
- let(:original_wiki_page) { create(:wiki_page) }
+ subject { existing_page }
it 'returns true for identical wiki page' do
- expect(original_wiki_page).to eq(original_wiki_page)
+ expect(subject).to eq(subject)
end
it 'returns false for updated wiki page' do
- updated_wiki_page = original_wiki_page.update(content: "Updated content")
- expect(original_wiki_page).not_to eq(updated_wiki_page)
+ subject.update(content: "Updated content")
+ updated_page = wiki.find_page('test page')
+
+ expect(updated_page).not_to be_nil
+ expect(updated_page).not_to eq(subject)
end
end
describe '#last_commit_sha' do
- before do
- create_page("Update", "content")
- @page = wiki.find_page("Update")
- end
-
- after do
- destroy_page("Update")
- end
+ subject { existing_page }
it 'returns commit sha' do
- expect(@page.last_commit_sha).to eq @page.last_version.sha
+ expect(subject.last_commit_sha).to eq subject.last_version.sha
end
it 'is changed after page updated' do
- last_commit_sha_before_update = @page.last_commit_sha
+ last_commit_sha_before_update = subject.last_commit_sha
- @page.update(content: "new content")
- @page = wiki.find_page("Update")
+ subject.update(content: "new content")
+ page = wiki.find_page('test page')
- expect(@page.last_commit_sha).not_to eq last_commit_sha_before_update
+ expect(page.last_commit_sha).not_to eq last_commit_sha_before_update
end
end
describe '#hook_attrs' do
it 'adds absolute urls for images in the content' do
- create_page("test page", "test![WikiPage_Image](/uploads/abc/WikiPage_Image.png)")
- page = wiki.wiki.page(title: "test page")
- wiki_page = described_class.new(wiki, page, true)
+ subject.attributes[:content] = 'test![WikiPage_Image](/uploads/abc/WikiPage_Image.png)'
- expect(wiki_page.hook_attrs['content']).to eq("test![WikiPage_Image](#{Settings.gitlab.url}/uploads/abc/WikiPage_Image.png)")
+ expect(subject.hook_attrs['content']).to eq("test![WikiPage_Image](#{Settings.gitlab.url}/uploads/abc/WikiPage_Image.png)")
end
end
@@ -587,11 +627,6 @@ describe WikiPage do
wiki.wiki.write_page(name, :markdown, content, commit_details)
end
- def destroy_page(title, dir = '')
- page = wiki.wiki.page(title: title, dir: dir)
- wiki.delete_page(page, "test commit")
- end
-
def get_slugs(page_or_dir)
if page_or_dir.is_a? WikiPage
[page_or_dir.slug]
diff --git a/spec/models/x509_certificate_spec.rb b/spec/models/x509_certificate_spec.rb
new file mode 100644
index 00000000000..187d37334a1
--- /dev/null
+++ b/spec/models/x509_certificate_spec.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe X509Certificate do
+ describe 'validation' do
+ it { is_expected.to validate_presence_of(:subject_key_identifier) }
+ it { is_expected.to validate_presence_of(:subject) }
+ it { is_expected.to validate_presence_of(:email) }
+ it { is_expected.to validate_presence_of(:serial_number) }
+ it { is_expected.to validate_presence_of(:x509_issuer_id) }
+ end
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:x509_issuer).required }
+ end
+
+ describe '.safe_create!' do
+ let(:subject_key_identifier) { 'CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD' }
+ let(:subject) { 'CN=gitlab@example.com,OU=Example,O=World' }
+ let(:email) { 'gitlab@example.com' }
+ let(:serial_number) { '123456789' }
+ let(:issuer) { create(:x509_issuer) }
+
+ let(:attributes) do
+ {
+ subject_key_identifier: subject_key_identifier,
+ subject: subject,
+ email: email,
+ serial_number: serial_number,
+ x509_issuer_id: issuer.id
+ }
+ end
+
+ it 'creates a new certificate if it was not found' do
+ expect { described_class.safe_create!(attributes) }.to change { described_class.count }.by(1)
+ end
+
+ it 'assigns the correct attributes when creating' do
+ certificate = described_class.safe_create!(attributes)
+
+ expect(certificate.subject_key_identifier).to eq(subject_key_identifier)
+ expect(certificate.subject).to eq(subject)
+ expect(certificate.email).to eq(email)
+ end
+ end
+
+ describe 'validators' do
+ it 'accepts correct subject_key_identifier' do
+ subject_key_identifiers = [
+ 'AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB',
+ 'CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD'
+ ]
+
+ subject_key_identifiers.each do |identifier|
+ expect(build(:x509_certificate, subject_key_identifier: identifier)).to be_valid
+ end
+ end
+
+ it 'rejects invalid subject_key_identifier' do
+ subject_key_identifiers = [
+ 'AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB',
+ 'CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:GG',
+ 'random string',
+ '12321342545356434523412341245452345623453542345234523453245'
+ ]
+
+ subject_key_identifiers.each do |identifier|
+ expect(build(:x509_certificate, subject_key_identifier: identifier)).to be_invalid
+ end
+ end
+
+ it 'accepts correct email address' do
+ emails = [
+ 'smime@example.org',
+ 'smime@example.com'
+ ]
+
+ emails.each do |email|
+ expect(build(:x509_certificate, email: email)).to be_valid
+ end
+ end
+
+ it 'rejects invalid email' do
+ emails = [
+ 'this is not an email',
+ '@example.org'
+ ]
+
+ emails.each do |email|
+ expect(build(:x509_certificate, email: email)).to be_invalid
+ end
+ end
+
+ it 'accepts valid serial_number' do
+ expect(build(:x509_certificate, serial_number: 123412341234)).to be_valid
+
+ # rfc 5280 - 4.1.2.2 Serial number (20 octets is the maximum)
+ expect(build(:x509_certificate, serial_number: 1461501637330902918203684832716283019655932542975)).to be_valid
+ expect(build(:x509_certificate, serial_number: 'ffffffffffffffffffffffffffffffffffffffff'.to_i(16))).to be_valid
+ end
+
+ it 'rejects invalid serial_number' do
+ expect(build(:x509_certificate, serial_number: "sgsgfsdgdsfg")).to be_invalid
+ end
+ end
+end
diff --git a/spec/models/x509_commit_signature_spec.rb b/spec/models/x509_commit_signature_spec.rb
new file mode 100644
index 00000000000..a2f72228a86
--- /dev/null
+++ b/spec/models/x509_commit_signature_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe X509CommitSignature do
+ let(:commit_sha) { '189a6c924013fc3fe40d6f1ec1dc20214183bc97' }
+ let(:project) { create(:project, :public, :repository) }
+ let!(:commit) { create(:commit, project: project, sha: commit_sha) }
+ let(:x509_certificate) { create(:x509_certificate) }
+ let(:x509_signature) { create(:x509_commit_signature, commit_sha: commit_sha) }
+
+ it_behaves_like 'having unique enum values'
+
+ describe 'validation' do
+ it { is_expected.to validate_presence_of(:commit_sha) }
+ it { is_expected.to validate_presence_of(:project_id) }
+ it { is_expected.to validate_presence_of(:x509_certificate_id) }
+ end
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:project).required }
+ it { is_expected.to belong_to(:x509_certificate).required }
+ end
+
+ describe '.safe_create!' do
+ let(:attributes) do
+ {
+ commit_sha: commit_sha,
+ project: project,
+ x509_certificate_id: x509_certificate.id,
+ verification_status: "verified"
+ }
+ end
+
+ it 'finds a signature by commit sha if it existed' do
+ x509_signature
+
+ expect(described_class.safe_create!(commit_sha: commit_sha)).to eq(x509_signature)
+ end
+
+ it 'creates a new signature if it was not found' do
+ expect { described_class.safe_create!(attributes) }.to change { described_class.count }.by(1)
+ end
+
+ it 'assigns the correct attributes when creating' do
+ signature = described_class.safe_create!(attributes)
+
+ expect(signature.project).to eq(project)
+ expect(signature.commit_sha).to eq(commit_sha)
+ expect(signature.x509_certificate_id).to eq(x509_certificate.id)
+ end
+ end
+end
diff --git a/spec/models/x509_issuer_spec.rb b/spec/models/x509_issuer_spec.rb
new file mode 100644
index 00000000000..f1067cad655
--- /dev/null
+++ b/spec/models/x509_issuer_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe X509Issuer do
+ describe 'validation' do
+ it { is_expected.to validate_presence_of(:subject_key_identifier) }
+ it { is_expected.to validate_presence_of(:subject) }
+ it { is_expected.to validate_presence_of(:crl_url) }
+ end
+
+ describe '.safe_create!' do
+ let(:issuer_subject_key_identifier) { 'AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB' }
+ let(:issuer_subject) { 'CN=PKI,OU=Example,O=World' }
+ let(:issuer_crl_url) { 'http://example.com/pki.crl' }
+
+ let(:attributes) do
+ {
+ subject_key_identifier: issuer_subject_key_identifier,
+ subject: issuer_subject,
+ crl_url: issuer_crl_url
+ }
+ end
+
+ it 'creates a new issuer if it was not found' do
+ expect { described_class.safe_create!(attributes) }.to change { described_class.count }.by(1)
+ end
+
+ it 'assigns the correct attributes when creating' do
+ issuer = described_class.safe_create!(attributes)
+
+ expect(issuer.subject_key_identifier).to eq(issuer_subject_key_identifier)
+ expect(issuer.subject).to eq(issuer_subject)
+ expect(issuer.crl_url).to eq(issuer_crl_url)
+ end
+ end
+
+ describe 'validators' do
+ it 'accepts correct subject_key_identifier' do
+ subject_key_identifiers = [
+ 'AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB',
+ 'CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD'
+ ]
+
+ subject_key_identifiers.each do |identifier|
+ expect(build(:x509_issuer, subject_key_identifier: identifier)).to be_valid
+ end
+ end
+
+ it 'rejects invalid subject_key_identifier' do
+ subject_key_identifiers = [
+ 'AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB:AB',
+ 'CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:CD:GG',
+ 'random string',
+ '12321342545356434523412341245452345623453542345234523453245'
+ ]
+
+ subject_key_identifiers.each do |identifier|
+ expect(build(:x509_issuer, subject_key_identifier: identifier)).to be_invalid
+ end
+ end
+
+ it 'accepts valid crl_url' do
+ expect(build(:x509_issuer, crl_url: "https://pki.example.org")).to be_valid
+ end
+
+ it 'rejects invalid crl_url' do
+ expect(build(:x509_issuer, crl_url: "ht://pki.example.org")).to be_invalid
+ end
+ end
+end
diff --git a/spec/policies/base_policy_spec.rb b/spec/policies/base_policy_spec.rb
index 81aee4cfcac..ae5af9e0f29 100644
--- a/spec/policies/base_policy_spec.rb
+++ b/spec/policies/base_policy_spec.rb
@@ -23,8 +23,8 @@ describe BasePolicy, :do_not_mock_admin_mode do
end
describe 'read cross project' do
- let(:current_user) { create(:user) }
- let(:user) { create(:user) }
+ let(:current_user) { build_stubbed(:user) }
+ let(:user) { build_stubbed(:user) }
subject { described_class.new(current_user, [user]) }
@@ -38,7 +38,7 @@ describe BasePolicy, :do_not_mock_admin_mode do
it { is_expected.not_to be_allowed(:read_cross_project) }
context 'for admins' do
- let(:current_user) { build(:admin) }
+ let(:current_user) { build_stubbed(:admin) }
subject { described_class.new(current_user, nil) }
@@ -56,14 +56,14 @@ describe BasePolicy, :do_not_mock_admin_mode do
end
describe 'full private access' do
- let(:current_user) { create(:user) }
+ let(:current_user) { build_stubbed(:user) }
subject { described_class.new(current_user, nil) }
it { is_expected.not_to be_allowed(:read_all_resources) }
context 'for admins' do
- let(:current_user) { build(:admin) }
+ let(:current_user) { build_stubbed(:admin) }
it 'allowed when in admin mode' do
enable_admin_mode!(current_user)
diff --git a/spec/policies/global_policy_spec.rb b/spec/policies/global_policy_spec.rb
index d227c018694..2d261241486 100644
--- a/spec/policies/global_policy_spec.rb
+++ b/spec/policies/global_policy_spec.rb
@@ -363,21 +363,21 @@ describe GlobalPolicy do
end
end
- describe 'create_personal_snippet' do
+ describe 'create_snippet' do
context 'when anonymous' do
let(:current_user) { nil }
- it { is_expected.not_to be_allowed(:create_personal_snippet) }
+ it { is_expected.not_to be_allowed(:create_snippet) }
end
context 'regular user' do
- it { is_expected.to be_allowed(:create_personal_snippet) }
+ it { is_expected.to be_allowed(:create_snippet) }
end
context 'when external' do
let(:current_user) { build(:user, :external) }
- it { is_expected.not_to be_allowed(:create_personal_snippet) }
+ it { is_expected.not_to be_allowed(:create_snippet) }
end
end
end
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index ae9d125f970..5a9ca9f7b7e 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -438,7 +438,7 @@ describe GroupPolicy do
end
end
- context "create_projects" do
+ context 'create_projects' do
context 'when group has no project creation level set' do
before_all do
group.update(project_creation_level: nil)
@@ -560,7 +560,7 @@ describe GroupPolicy do
end
end
- context "create_subgroup" do
+ context 'create_subgroup' do
context 'when group has subgroup creation level set to owner' do
before_all do
group.update(subgroup_creation_level: ::Gitlab::Access::OWNER_SUBGROUP_ACCESS)
diff --git a/spec/policies/note_policy_spec.rb b/spec/policies/note_policy_spec.rb
index 5aee66275d4..2619bb2fe3c 100644
--- a/spec/policies/note_policy_spec.rb
+++ b/spec/policies/note_policy_spec.rb
@@ -54,6 +54,27 @@ describe NotePolicy do
end
end
+ context 'when the noteable is a personal snippet' do
+ let(:noteable) { create(:personal_snippet, :public) }
+ let(:note) { create(:note, noteable: noteable, author: user) }
+
+ it 'can edit note' do
+ expect(policy).to be_allowed(:admin_note)
+ expect(policy).to be_allowed(:resolve_note)
+ expect(policy).to be_allowed(:read_note)
+ end
+
+ context 'when it is private' do
+ let(:noteable) { create(:personal_snippet, :private) }
+
+ it 'can not edit nor read the note' do
+ expect(policy).to be_disallowed(:admin_note)
+ expect(policy).to be_disallowed(:resolve_note)
+ expect(policy).to be_disallowed(:read_note)
+ end
+ end
+ end
+
context 'when the project is public' do
context 'when the note author is not a project member' do
it 'can edit a note' do
@@ -79,26 +100,6 @@ describe NotePolicy do
end
end
- context 'when the noteable is a personal snippet' do
- let(:noteable) { create(:personal_snippet, :public) }
-
- it 'can edit note' do
- expect(policy).to be_allowed(:admin_note)
- expect(policy).to be_allowed(:resolve_note)
- expect(policy).to be_allowed(:read_note)
- end
-
- context 'when it is private' do
- let(:noteable) { create(:personal_snippet, :private) }
-
- it 'can not edit nor read the note' do
- expect(policy).to be_disallowed(:admin_note)
- expect(policy).to be_disallowed(:resolve_note)
- expect(policy).to be_disallowed(:read_note)
- end
- end
- end
-
context 'when a discussion is confidential' do
before do
issue.update_attribute(:confidential, true)
diff --git a/spec/policies/personal_snippet_policy_spec.rb b/spec/policies/personal_snippet_policy_spec.rb
index 0abdf7382be..a6b76620c29 100644
--- a/spec/policies/personal_snippet_policy_spec.rb
+++ b/spec/policies/personal_snippet_policy_spec.rb
@@ -10,8 +10,8 @@ describe PersonalSnippetPolicy do
let(:author_permissions) do
[
- :update_personal_snippet,
- :admin_personal_snippet
+ :update_snippet,
+ :admin_snippet
]
end
@@ -24,7 +24,7 @@ describe PersonalSnippetPolicy do
subject { permissions(admin_user) }
it do
- is_expected.to be_allowed(:read_personal_snippet)
+ is_expected.to be_allowed(:read_snippet)
is_expected.to be_allowed(:create_note)
is_expected.to be_allowed(:award_emoji)
is_expected.to be_allowed(*author_permissions)
@@ -39,7 +39,7 @@ describe PersonalSnippetPolicy do
subject { permissions(nil) }
it do
- is_expected.to be_allowed(:read_personal_snippet)
+ is_expected.to be_allowed(:read_snippet)
is_expected.to be_disallowed(:create_note)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
@@ -50,7 +50,7 @@ describe PersonalSnippetPolicy do
subject { permissions(regular_user) }
it do
- is_expected.to be_allowed(:read_personal_snippet)
+ is_expected.to be_allowed(:read_snippet)
is_expected.to be_allowed(:create_note)
is_expected.to be_allowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
@@ -61,7 +61,7 @@ describe PersonalSnippetPolicy do
subject { permissions(snippet.author) }
it do
- is_expected.to be_allowed(:read_personal_snippet)
+ is_expected.to be_allowed(:read_snippet)
is_expected.to be_allowed(:create_note)
is_expected.to be_allowed(:award_emoji)
is_expected.to be_allowed(*author_permissions)
@@ -78,7 +78,7 @@ describe PersonalSnippetPolicy do
subject { permissions(nil) }
it do
- is_expected.to be_disallowed(:read_personal_snippet)
+ is_expected.to be_disallowed(:read_snippet)
is_expected.to be_disallowed(:create_note)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
@@ -89,7 +89,7 @@ describe PersonalSnippetPolicy do
subject { permissions(regular_user) }
it do
- is_expected.to be_allowed(:read_personal_snippet)
+ is_expected.to be_allowed(:read_snippet)
is_expected.to be_allowed(:create_note)
is_expected.to be_allowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
@@ -100,7 +100,7 @@ describe PersonalSnippetPolicy do
subject { permissions(external_user) }
it do
- is_expected.to be_disallowed(:read_personal_snippet)
+ is_expected.to be_disallowed(:read_snippet)
is_expected.to be_disallowed(:create_note)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
@@ -111,7 +111,7 @@ describe PersonalSnippetPolicy do
subject { permissions(snippet.author) }
it do
- is_expected.to be_allowed(:read_personal_snippet)
+ is_expected.to be_allowed(:read_snippet)
is_expected.to be_allowed(:create_note)
is_expected.to be_allowed(:award_emoji)
is_expected.to be_allowed(*author_permissions)
@@ -128,7 +128,7 @@ describe PersonalSnippetPolicy do
subject { permissions(nil) }
it do
- is_expected.to be_disallowed(:read_personal_snippet)
+ is_expected.to be_disallowed(:read_snippet)
is_expected.to be_disallowed(:create_note)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
@@ -139,7 +139,7 @@ describe PersonalSnippetPolicy do
subject { permissions(regular_user) }
it do
- is_expected.to be_disallowed(:read_personal_snippet)
+ is_expected.to be_disallowed(:read_snippet)
is_expected.to be_disallowed(:create_note)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
@@ -150,7 +150,7 @@ describe PersonalSnippetPolicy do
subject { permissions(external_user) }
it do
- is_expected.to be_disallowed(:read_personal_snippet)
+ is_expected.to be_disallowed(:read_snippet)
is_expected.to be_disallowed(:create_note)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
@@ -161,7 +161,7 @@ describe PersonalSnippetPolicy do
subject { permissions(snippet.author) }
it do
- is_expected.to be_allowed(:read_personal_snippet)
+ is_expected.to be_allowed(:read_snippet)
is_expected.to be_allowed(:create_note)
is_expected.to be_allowed(:award_emoji)
is_expected.to be_allowed(*author_permissions)
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index e47204c774b..3b08726c75a 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -17,7 +17,7 @@ describe ProjectPolicy do
%i[
read_project read_board read_list read_wiki read_issue
read_project_for_iids read_issue_iid read_label
- read_milestone read_project_snippet read_project_member read_note
+ read_milestone read_snippet read_project_member read_note
create_project create_issue create_note upload_file create_merge_request_in
award_emoji read_release
]
@@ -25,7 +25,7 @@ describe ProjectPolicy do
let(:base_reporter_permissions) do
%i[
- download_code fork_project create_project_snippet update_issue
+ download_code fork_project create_snippet update_issue
admin_issue admin_label admin_list read_commit_status read_build
read_container_image read_pipeline read_environment read_deployment
read_merge_request download_wiki_code read_sentry_issue
@@ -48,8 +48,8 @@ describe ProjectPolicy do
let(:base_maintainer_permissions) do
%i[
- push_to_delete_protected_branch update_project_snippet
- admin_project_snippet admin_project_member admin_note admin_wiki admin_project
+ push_to_delete_protected_branch update_snippet
+ admin_snippet admin_project_member admin_note admin_wiki admin_project
admin_commit_status admin_build admin_container_image
admin_pipeline admin_environment admin_deployment destroy_release add_cluster
daily_statistics
@@ -559,4 +559,18 @@ describe ProjectPolicy do
end
end
end
+
+ context 'alert bot' do
+ let(:current_user) { User.alert_bot }
+
+ subject { described_class.new(current_user, project) }
+
+ it { is_expected.to be_allowed(:reporter_access) }
+
+ context 'within a private project' do
+ let(:project) { create(:project, :private) }
+
+ it { is_expected.to be_allowed(:admin_issue) }
+ end
+ end
end
diff --git a/spec/policies/project_snippet_policy_spec.rb b/spec/policies/project_snippet_policy_spec.rb
index 374636874ee..b55d565a57c 100644
--- a/spec/policies/project_snippet_policy_spec.rb
+++ b/spec/policies/project_snippet_policy_spec.rb
@@ -12,8 +12,8 @@ describe ProjectSnippetPolicy do
let(:author) { other_user }
let(:author_permissions) do
[
- :update_project_snippet,
- :admin_project_snippet
+ :update_snippet,
+ :admin_snippet
]
end
@@ -26,7 +26,7 @@ describe ProjectSnippetPolicy do
end
it do
- expect_allowed(:read_project_snippet, :create_note)
+ expect_allowed(:read_snippet, :create_note)
expect_disallowed(*author_permissions)
end
end
@@ -38,8 +38,8 @@ describe ProjectSnippetPolicy do
context 'not snippet author' do
it do
- expect_allowed(:read_project_snippet, :create_note)
- expect_disallowed(:admin_project_snippet)
+ expect_allowed(:read_snippet, :create_note)
+ expect_disallowed(:admin_snippet)
end
end
end
@@ -53,7 +53,7 @@ describe ProjectSnippetPolicy do
end
it do
- expect_allowed(:read_project_snippet, :create_note)
+ expect_allowed(:read_snippet, :create_note)
expect_allowed(*author_permissions)
end
end
@@ -64,15 +64,15 @@ describe ProjectSnippetPolicy do
end
it do
- expect_allowed(:read_project_snippet, :create_note)
- expect_disallowed(:admin_project_snippet)
+ expect_allowed(:read_snippet, :create_note)
+ expect_disallowed(:admin_snippet)
end
end
context 'not a project member' do
it do
- expect_allowed(:read_project_snippet, :create_note)
- expect_disallowed(:admin_project_snippet)
+ expect_allowed(:read_snippet, :create_note)
+ expect_disallowed(:admin_snippet)
end
end
end
@@ -85,7 +85,7 @@ describe ProjectSnippetPolicy do
let(:current_user) { nil }
it do
- expect_allowed(:read_project_snippet)
+ expect_allowed(:read_snippet)
expect_disallowed(*author_permissions)
end
end
@@ -94,7 +94,7 @@ describe ProjectSnippetPolicy do
let(:current_user) { regular_user }
it do
- expect_allowed(:read_project_snippet, :create_note)
+ expect_allowed(:read_snippet, :create_note)
expect_disallowed(*author_permissions)
end
@@ -105,7 +105,7 @@ describe ProjectSnippetPolicy do
let(:current_user) { external_user }
it do
- expect_allowed(:read_project_snippet, :create_note)
+ expect_allowed(:read_snippet, :create_note)
expect_disallowed(*author_permissions)
end
@@ -115,7 +115,7 @@ describe ProjectSnippetPolicy do
end
it do
- expect_allowed(:read_project_snippet, :create_note)
+ expect_allowed(:read_snippet, :create_note)
expect_disallowed(*author_permissions)
end
end
@@ -129,7 +129,7 @@ describe ProjectSnippetPolicy do
let(:current_user) { nil }
it do
- expect_disallowed(:read_project_snippet)
+ expect_disallowed(:read_snippet)
expect_disallowed(*author_permissions)
end
end
@@ -138,7 +138,7 @@ describe ProjectSnippetPolicy do
let(:current_user) { regular_user }
it do
- expect_allowed(:read_project_snippet, :create_note)
+ expect_allowed(:read_snippet, :create_note)
expect_disallowed(*author_permissions)
end
@@ -149,7 +149,7 @@ describe ProjectSnippetPolicy do
let(:current_user) { external_user }
it do
- expect_disallowed(:read_project_snippet, :create_note)
+ expect_disallowed(:read_snippet, :create_note)
expect_disallowed(*author_permissions)
end
@@ -159,7 +159,7 @@ describe ProjectSnippetPolicy do
end
it do
- expect_allowed(:read_project_snippet, :create_note)
+ expect_allowed(:read_snippet, :create_note)
expect_disallowed(*author_permissions)
end
end
@@ -173,7 +173,7 @@ describe ProjectSnippetPolicy do
let(:current_user) { nil }
it do
- expect_disallowed(:read_project_snippet)
+ expect_disallowed(:read_snippet)
expect_disallowed(*author_permissions)
end
end
@@ -182,7 +182,7 @@ describe ProjectSnippetPolicy do
let(:current_user) { regular_user }
it do
- expect_disallowed(:read_project_snippet, :create_note)
+ expect_disallowed(:read_snippet, :create_note)
expect_disallowed(*author_permissions)
end
@@ -193,7 +193,7 @@ describe ProjectSnippetPolicy do
let(:current_user) { external_user }
it do
- expect_disallowed(:read_project_snippet, :create_note)
+ expect_disallowed(:read_snippet, :create_note)
expect_disallowed(*author_permissions)
end
@@ -203,7 +203,7 @@ describe ProjectSnippetPolicy do
end
it do
- expect_allowed(:read_project_snippet, :create_note)
+ expect_allowed(:read_snippet, :create_note)
expect_disallowed(*author_permissions)
end
end
@@ -214,7 +214,7 @@ describe ProjectSnippetPolicy do
let(:current_user) { create(:admin) }
it do
- expect_allowed(:read_project_snippet, :create_note)
+ expect_allowed(:read_snippet, :create_note)
expect_allowed(*author_permissions)
end
end
diff --git a/spec/presenters/blob_presenter_spec.rb b/spec/presenters/blob_presenter_spec.rb
index 8680e8b9b45..ee7bfd1256d 100644
--- a/spec/presenters/blob_presenter_spec.rb
+++ b/spec/presenters/blob_presenter_spec.rb
@@ -21,7 +21,7 @@ describe BlobPresenter, :seed_helper do
subject { described_class.new(blob) }
- it { expect(subject.web_url).to eq("http://localhost/#{project.full_path}/blob/#{blob.commit_id}/#{blob.path}") }
+ it { expect(subject.web_url).to eq("http://localhost/#{project.full_path}/-/blob/#{blob.commit_id}/#{blob.path}") }
end
describe '#highlight' do
diff --git a/spec/presenters/ci/bridge_presenter_spec.rb b/spec/presenters/ci/bridge_presenter_spec.rb
index 1c2eeced20c..2a4c40a7eaa 100644
--- a/spec/presenters/ci/bridge_presenter_spec.rb
+++ b/spec/presenters/ci/bridge_presenter_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe Ci::BridgePresenter do
- set(:project) { create(:project) }
- set(:pipeline) { create(:ci_pipeline, project: project) }
- set(:bridge) { create(:ci_bridge, pipeline: pipeline, status: :failed) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:bridge) { create(:ci_bridge, pipeline: pipeline, status: :failed) }
subject(:presenter) do
described_class.new(bridge)
diff --git a/spec/presenters/ci/pipeline_presenter_spec.rb b/spec/presenters/ci/pipeline_presenter_spec.rb
index fd391478eb4..700d1f5cbb6 100644
--- a/spec/presenters/ci/pipeline_presenter_spec.rb
+++ b/spec/presenters/ci/pipeline_presenter_spec.rb
@@ -209,6 +209,15 @@ describe Ci::PipelinePresenter do
"<a class=\"mr-iid\" href=\"#{merge_request_path(mr_2)}\">#{mr_2.to_reference} #{mr_2.title}</a>, " \
"<a class=\"mr-iid\" href=\"#{merge_request_path(mr_1)}\">#{mr_1.to_reference} #{mr_1.title}</a>")
}
+
+ context 'with a limit passed' do
+ subject { presenter.all_related_merge_request_text(limit: 1) }
+
+ it {
+ is_expected.to eq("2 related merge requests: " \
+ "<a class=\"mr-iid\" href=\"#{merge_request_path(mr_2)}\">#{mr_2.to_reference} #{mr_2.title}</a>")
+ }
+ end
end
end
diff --git a/spec/presenters/ci/trigger_presenter_spec.rb b/spec/presenters/ci/trigger_presenter_spec.rb
index ac3967f4f77..41cb436f928 100644
--- a/spec/presenters/ci/trigger_presenter_spec.rb
+++ b/spec/presenters/ci/trigger_presenter_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
describe Ci::TriggerPresenter do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
- set(:trigger) do
+ let_it_be(:trigger) do
create(:ci_trigger, token: '123456789abcd', project: project)
end
diff --git a/spec/presenters/event_presenter_spec.rb b/spec/presenters/event_presenter_spec.rb
index 79f5e359141..eb94d838370 100644
--- a/spec/presenters/event_presenter_spec.rb
+++ b/spec/presenters/event_presenter_spec.rb
@@ -5,11 +5,11 @@ require 'spec_helper'
describe EventPresenter do
include Gitlab::Routing.url_helpers
- set(:group) { create(:group) }
- set(:project) { create(:project, group: group) }
- set(:target) { create(:milestone, project: project) }
- set(:group_event) { create(:event, :created, project: nil, group: group, target: target) }
- set(:project_event) { create(:event, :created, project: project, target: target) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:target) { create(:milestone, project: project) }
+ let_it_be(:group_event) { create(:event, :created, project: nil, group: group, target: target) }
+ let_it_be(:project_event) { create(:event, :created, project: project, target: target) }
describe '#resource_parent_name' do
context 'with group event' do
diff --git a/spec/presenters/label_presenter_spec.rb b/spec/presenters/label_presenter_spec.rb
index e9f9657490d..9578d017af5 100644
--- a/spec/presenters/label_presenter_spec.rb
+++ b/spec/presenters/label_presenter_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
describe LabelPresenter do
include Gitlab::Routing.url_helpers
- set(:group) { create(:group) }
- set(:project) { create(:project, group: group) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
let(:label) { build_stubbed(:label, project: project).present(issuable_subject: project) }
let(:group_label) { build_stubbed(:group_label, group: group).present(issuable_subject: project) }
diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb
index ce437090d43..025f083ab27 100644
--- a/spec/presenters/merge_request_presenter_spec.rb
+++ b/spec/presenters/merge_request_presenter_spec.rb
@@ -101,7 +101,7 @@ describe MergeRequestPresenter do
allow(presenter).to receive_message_chain(:conflicts, :can_be_resolved_by?).with(user) { true }
expect(path)
- .to eq("/#{project.full_path}/merge_requests/#{resource.iid}/conflicts")
+ .to eq("/#{project.full_path}/-/merge_requests/#{resource.iid}/conflicts")
end
end
end
@@ -179,7 +179,7 @@ describe MergeRequestPresenter do
it 'returns correct link with correct text' do
is_expected
- .to match("#{project.full_path}/merge_requests/#{resource.iid}/assign_related_issues")
+ .to match("#{project.full_path}/-/merge_requests/#{resource.iid}/assign_related_issues")
is_expected
.to match("Assign yourself to this issue")
@@ -192,7 +192,7 @@ describe MergeRequestPresenter do
it 'returns correct link with correct text' do
is_expected
- .to match("#{project.full_path}/merge_requests/#{resource.iid}/assign_related_issues")
+ .to match("#{project.full_path}/-/merge_requests/#{resource.iid}/assign_related_issues")
is_expected
.to match("Assign yourself to these issues")
@@ -221,7 +221,7 @@ describe MergeRequestPresenter do
.with(user)
.and_return(true)
- is_expected.to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}/cancel_auto_merge")
+ is_expected.to eq("/#{resource.project.full_path}/-/merge_requests/#{resource.iid}/cancel_auto_merge")
end
end
@@ -248,7 +248,7 @@ describe MergeRequestPresenter do
.and_return(true)
is_expected
- .to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}/merge")
+ .to eq("/#{resource.project.full_path}/-/merge_requests/#{resource.iid}/merge")
end
end
@@ -312,7 +312,7 @@ describe MergeRequestPresenter do
project.add_maintainer(user)
is_expected
- .to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}/remove_wip")
+ .to eq("/#{resource.project.full_path}/-/merge_requests/#{resource.iid}/remove_wip")
end
end
@@ -334,7 +334,7 @@ describe MergeRequestPresenter do
allow(resource).to receive(:target_branch_exists?) { true }
is_expected
- .to eq("/#{resource.target_project.full_path}/commits/#{resource.target_branch}")
+ .to eq("/#{resource.target_project.full_path}/-/commits/#{resource.target_branch}")
end
end
@@ -358,7 +358,7 @@ describe MergeRequestPresenter do
allow(resource).to receive(:source_branch_exists?) { true }
is_expected
- .to eq("/#{resource.source_project.full_path}/commits/#{resource.source_branch}")
+ .to eq("/#{resource.source_project.full_path}/-/commits/#{resource.source_branch}")
end
end
@@ -382,7 +382,7 @@ describe MergeRequestPresenter do
allow(resource).to receive(:target_branch_exists?) { true }
is_expected
- .to eq("/#{resource.target_project.full_path}/tree/#{resource.target_branch}")
+ .to eq("/#{resource.target_project.full_path}/-/tree/#{resource.target_branch}")
end
end
@@ -496,7 +496,7 @@ describe MergeRequestPresenter do
allow(resource).to receive(:source_branch_exists?) { true }
is_expected
- .to eq("<a href=\"/#{resource.source_project.full_path}/tree/#{resource.source_branch}\">#{resource.source_branch}</a>")
+ .to eq("<a href=\"/#{resource.source_project.full_path}/-/tree/#{resource.source_branch}\">#{resource.source_branch}</a>")
end
it 'escapes html, when source_branch does not exist' do
@@ -535,7 +535,7 @@ describe MergeRequestPresenter do
it 'returns path' do
is_expected
- .to eq("/#{project.full_path}/merge_requests/#{resource.iid}/rebase")
+ .to eq("/#{project.full_path}/-/merge_requests/#{resource.iid}/rebase")
end
end
diff --git a/spec/presenters/milestone_presenter_spec.rb b/spec/presenters/milestone_presenter_spec.rb
new file mode 100644
index 00000000000..3d7b3ad6d78
--- /dev/null
+++ b/spec/presenters/milestone_presenter_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MilestonePresenter do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:milestone) { create(:milestone, group: group) }
+ let_it_be(:presenter) { described_class.new(milestone, current_user: user) }
+
+ before do
+ group.add_developer(user)
+ end
+
+ describe '#milestone_path' do
+ it 'returns correct path' do
+ expect(presenter.milestone_path).to eq("/groups/#{group.full_path}/-/milestones/#{milestone.iid}")
+ end
+ end
+end
diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb
index 620ef3ff21a..af191172d33 100644
--- a/spec/presenters/project_presenter_spec.rb
+++ b/spec/presenters/project_presenter_spec.rb
@@ -297,7 +297,7 @@ describe ProjectPresenter do
is_link: false,
label: a_string_including("New file"),
link: presenter.project_new_blob_path(project, 'master'),
- class_modifier: 'success'
+ class_modifier: 'missing'
)
end
@@ -467,7 +467,7 @@ describe ProjectPresenter do
expect(presenter.kubernetes_cluster_anchor_data).to have_attributes(
is_link: false,
- label: a_string_including('Kubernetes configured'),
+ label: a_string_including('Kubernetes'),
link: presenter.project_cluster_path(project, cluster)
)
end
@@ -480,7 +480,7 @@ describe ProjectPresenter do
expect(presenter.kubernetes_cluster_anchor_data).to have_attributes(
is_link: false,
- label: a_string_including('Kubernetes configured'),
+ label: a_string_including('Kubernetes'),
link: presenter.project_clusters_path(project)
)
end
diff --git a/spec/presenters/projects/prometheus/alert_presenter_spec.rb b/spec/presenters/projects/prometheus/alert_presenter_spec.rb
new file mode 100644
index 00000000000..fc6ddcbfe02
--- /dev/null
+++ b/spec/presenters/projects/prometheus/alert_presenter_spec.rb
@@ -0,0 +1,235 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::Prometheus::AlertPresenter do
+ let_it_be(:project) { create(:project) }
+
+ let(:presenter) { described_class.new(alert) }
+ let(:payload) { {} }
+ let(:alert) { create(:alerting_alert, project: project, payload: payload) }
+
+ describe '#project_full_path' do
+ subject { presenter.project_full_path }
+
+ it { is_expected.to eq(project.full_path) }
+ end
+
+ describe '#starts_at' do
+ subject { presenter.starts_at }
+
+ before do
+ payload['startsAt'] = starts_at
+ end
+
+ context 'with valid datetime' do
+ let(:datetime) { Time.now }
+ let(:starts_at) { datetime.rfc3339 }
+
+ it { is_expected.to eq(datetime.rfc3339) }
+ end
+
+ context 'with invalid datetime' do
+ let(:starts_at) { 'invalid' }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#issue_summary_markdown' do
+ let(:markdown_line_break) { ' ' }
+
+ subject { presenter.issue_summary_markdown }
+
+ context 'without default payload' do
+ it do
+ is_expected.to eq(
+ <<~MARKDOWN.chomp
+ #### Summary
+
+ **Start time:** #{presenter.starts_at}
+
+ MARKDOWN
+ )
+ end
+ end
+
+ context 'with annotations' do
+ before do
+ payload['annotations'] = { 'title' => 'Alert Title', 'foo' => 'value1', 'bar' => 'value2' }
+ end
+
+ it do
+ is_expected.to eq(
+ <<~MARKDOWN.chomp
+ #### Summary
+
+ **Start time:** #{presenter.starts_at}
+
+ #### Alert Details
+
+ **foo:** value1#{markdown_line_break}
+ **bar:** value2
+ MARKDOWN
+ )
+ end
+ end
+
+ context 'with full query' do
+ before do
+ payload['generatorURL'] = 'http://host?g0.expr=query'
+ end
+
+ it do
+ is_expected.to eq(
+ <<~MARKDOWN.chomp
+ #### Summary
+
+ **Start time:** #{presenter.starts_at}#{markdown_line_break}
+ **full_query:** `query`
+
+ MARKDOWN
+ )
+ end
+ end
+
+ context 'with the Generic Alert parameters' do
+ let(:generic_alert_params) do
+ {
+ 'title' => 'The Generic Alert Title',
+ 'description' => 'The Generic Alert Description',
+ 'monitoring_tool' => 'monitoring_tool_name',
+ 'service' => 'service_name',
+ 'hosts' => ['http://localhost:3000', 'http://localhost:3001']
+ }
+ end
+
+ before do
+ payload['annotations'] = generic_alert_params
+ end
+
+ it do
+ is_expected.to eq(
+ <<~MARKDOWN.chomp
+ #### Summary
+
+ **Start time:** #{presenter.starts_at}#{markdown_line_break}
+ **Service:** service_name#{markdown_line_break}
+ **Monitoring tool:** monitoring_tool_name#{markdown_line_break}
+ **Hosts:** http://localhost:3000 http://localhost:3001
+
+ #### Alert Details
+
+ **description:** The Generic Alert Description
+ MARKDOWN
+ )
+ end
+
+ context 'when hosts is a string' do
+ before do
+ payload['annotations'] = { 'hosts' => 'http://localhost:3000' }
+ end
+
+ it do
+ is_expected.to eq(
+ <<~MARKDOWN.chomp
+ #### Summary
+
+ **Start time:** #{presenter.starts_at}#{markdown_line_break}
+ **Hosts:** http://localhost:3000
+
+ MARKDOWN
+ )
+ end
+ end
+ end
+ end
+
+ context 'with gitlab alert' do
+ let(:gitlab_alert) { create(:prometheus_alert, project: project) }
+ let(:metric_id) { gitlab_alert.prometheus_metric_id }
+
+ let(:alert) do
+ create(:alerting_alert, project: project, metric_id: metric_id)
+ end
+
+ describe '#full_title' do
+ let(:query_title) do
+ "#{gitlab_alert.title} #{gitlab_alert.computed_operator} #{gitlab_alert.threshold} for 5 minutes"
+ end
+
+ let(:expected_subject) do
+ "#{alert.environment.name}: #{query_title}"
+ end
+
+ subject { presenter.full_title }
+
+ it { is_expected.to eq(expected_subject) }
+ end
+
+ describe '#metric_query' do
+ subject { presenter.metric_query }
+
+ it { is_expected.to eq(gitlab_alert.full_query) }
+ end
+
+ describe '#environment_name' do
+ subject { presenter.environment_name }
+
+ it { is_expected.to eq(alert.environment.name) }
+ end
+
+ describe '#performance_dashboard_link' do
+ let(:expected_link) do
+ Gitlab::Routing.url_helpers
+ .metrics_project_environment_url(project, alert.environment)
+ end
+
+ subject { presenter.performance_dashboard_link }
+
+ it { is_expected.to eq(expected_link) }
+ end
+ end
+
+ context 'without gitlab alert' do
+ describe '#full_title' do
+ subject { presenter.full_title }
+
+ context 'with title' do
+ let(:title) { 'some title' }
+
+ before do
+ expect(alert).to receive(:title).and_return(title)
+ end
+
+ it { is_expected.to eq(title) }
+ end
+
+ context 'without title' do
+ it { is_expected.to eq('') }
+ end
+ end
+
+ describe '#metric_query' do
+ subject { presenter.metric_query }
+
+ it { is_expected.to be_nil }
+ end
+
+ describe '#environment_name' do
+ subject { presenter.environment_name }
+
+ it { is_expected.to be_nil }
+ end
+
+ describe '#performance_dashboard_link' do
+ let(:expected_link) do
+ Gitlab::Routing.url_helpers.metrics_project_environments_url(project)
+ end
+
+ subject { presenter.performance_dashboard_link }
+
+ it { is_expected.to eq(expected_link) }
+ end
+ end
+end
diff --git a/spec/presenters/release_presenter_spec.rb b/spec/presenters/release_presenter_spec.rb
index 4c6142f2edb..82f312622ff 100644
--- a/spec/presenters/release_presenter_spec.rb
+++ b/spec/presenters/release_presenter_spec.rb
@@ -51,6 +51,22 @@ describe ReleasePresenter do
end
end
+ describe '#self_url' do
+ subject { presenter.self_url }
+
+ it 'returns its own url' do
+ is_expected.to match /#{project_release_url(project, release)}/
+ end
+
+ context 'when release_show_page feature flag is disabled' do
+ before do
+ stub_feature_flags(release_show_page: false)
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+
describe '#merge_requests_url' do
subject { presenter.merge_requests_url }
diff --git a/spec/presenters/sentry_detailed_error_presenter_spec.rb b/spec/presenters/sentry_error_presenter_spec.rb
index e483b6d41a1..5f3f1d33b86 100644
--- a/spec/presenters/sentry_detailed_error_presenter_spec.rb
+++ b/spec/presenters/sentry_error_presenter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe SentryDetailedErrorPresenter do
+describe SentryErrorPresenter do
let(:error) { build(:detailed_error_tracking_error) }
let(:presenter) { described_class.new(error) }
@@ -10,7 +10,7 @@ describe SentryDetailedErrorPresenter do
subject { presenter.frequency }
it 'returns an array of frequency structs' do
- expect(subject).to include(a_kind_of(SentryDetailedErrorPresenter::FrequencyStruct))
+ expect(subject).to include(a_kind_of(SentryErrorPresenter::FrequencyStruct))
end
it 'converts the times into UTC time objects' do
diff --git a/spec/presenters/snippet_blob_presenter_spec.rb b/spec/presenters/snippet_blob_presenter_spec.rb
new file mode 100644
index 00000000000..fa10d1a7f30
--- /dev/null
+++ b/spec/presenters/snippet_blob_presenter_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe SnippetBlobPresenter do
+ describe '#rich_data' do
+ let(:snippet) { build(:personal_snippet) }
+
+ subject { described_class.new(snippet.blob).rich_data }
+
+ it 'returns nil when the snippet blob is binary' do
+ allow(snippet.blob).to receive(:binary?).and_return(true)
+
+ expect(subject).to be_nil
+ end
+
+ it 'returns markdown content when snippet file is markup' do
+ snippet.file_name = 'test.md'
+ snippet.content = '*foo*'
+
+ expect(subject).to eq '<p data-sourcepos="1:1-1:5" dir="auto"><em>foo</em></p>'
+ end
+
+ it 'returns syntax highlighted content' do
+ snippet.file_name = 'test.rb'
+ snippet.content = 'class Foo;end'
+
+ expect(subject)
+ .to eq '<span id="LC1" class="line" lang="ruby"><span class="k">class</span> <span class="nc">Foo</span><span class="p">;</span><span class="k">end</span></span>'
+ end
+
+ it 'returns plain text highlighted content' do
+ snippet.file_name = 'test'
+ snippet.content = 'foo'
+
+ expect(subject).to eq '<span id="LC1" class="line" lang="plaintext">foo</span>'
+ end
+ end
+
+ describe '#plain_data' do
+ let(:snippet) { build(:personal_snippet) }
+
+ subject { described_class.new(snippet.blob).plain_data }
+
+ it 'returns nil when the snippet blob is binary' do
+ allow(snippet.blob).to receive(:binary?).and_return(true)
+
+ expect(subject).to be_nil
+ end
+
+ it 'returns plain content when snippet file is markup' do
+ snippet.file_name = 'test.md'
+ snippet.content = '*foo*'
+
+ expect(subject).to eq '<span id="LC1" class="line" lang="markdown"><span class="ge">*foo*</span></span>'
+ end
+
+ it 'returns plain syntax content' do
+ snippet.file_name = 'test.rb'
+ snippet.content = 'class Foo;end'
+
+ expect(subject)
+ .to eq '<span id="LC1" class="line" lang="">class Foo;end</span>'
+ end
+
+ it 'returns plain text highlighted content' do
+ snippet.file_name = 'test'
+ snippet.content = 'foo'
+
+ expect(subject).to eq '<span id="LC1" class="line" lang="">foo</span>'
+ end
+ end
+
+ describe '#raw_path' do
+ subject { described_class.new(snippet.blob).raw_path }
+
+ context 'with ProjectSnippet' do
+ let!(:project) { create(:project) }
+ let(:snippet) { build(:project_snippet, project: project, id: 1) }
+
+ it 'returns the raw path' do
+ expect(subject).to eq "/#{snippet.project.full_path}/snippets/1/raw"
+ end
+ end
+
+ context 'with PersonalSnippet' do
+ let(:snippet) { build(:personal_snippet, id: 1) }
+
+ it 'returns the raw path' do
+ expect(subject).to eq "/snippets/1/raw"
+ end
+ end
+ end
+end
diff --git a/spec/presenters/snippet_presenter_spec.rb b/spec/presenters/snippet_presenter_spec.rb
index 87f2220979c..e2117905559 100644
--- a/spec/presenters/snippet_presenter_spec.rb
+++ b/spec/presenters/snippet_presenter_spec.rb
@@ -62,8 +62,8 @@ describe SnippetPresenter do
context 'with PersonalSnippet' do
let(:snippet) { personal_snippet }
- it 'checks read_personal_snippet' do
- expect(presenter).to receive(:can?).with(user, :read_personal_snippet, snippet)
+ it 'checks read_snippet' do
+ expect(presenter).to receive(:can?).with(user, :read_snippet, snippet)
subject
end
@@ -72,8 +72,8 @@ describe SnippetPresenter do
context 'with ProjectSnippet' do
let(:snippet) { project_snippet }
- it 'checks read_project_snippet ' do
- expect(presenter).to receive(:can?).with(user, :read_project_snippet, snippet)
+ it 'checks read_snippet ' do
+ expect(presenter).to receive(:can?).with(user, :read_snippet, snippet)
subject
end
@@ -86,8 +86,8 @@ describe SnippetPresenter do
context 'with PersonalSnippet' do
let(:snippet) { personal_snippet }
- it 'checks update_personal_snippet' do
- expect(presenter).to receive(:can?).with(user, :update_personal_snippet, snippet)
+ it 'checks update_snippet' do
+ expect(presenter).to receive(:can?).with(user, :update_snippet, snippet)
subject
end
@@ -96,8 +96,8 @@ describe SnippetPresenter do
context 'with ProjectSnippet' do
let(:snippet) { project_snippet }
- it 'checks update_project_snippet ' do
- expect(presenter).to receive(:can?).with(user, :update_project_snippet, snippet)
+ it 'checks update_snippet ' do
+ expect(presenter).to receive(:can?).with(user, :update_snippet, snippet)
subject
end
@@ -110,8 +110,8 @@ describe SnippetPresenter do
context 'with PersonalSnippet' do
let(:snippet) { personal_snippet }
- it 'checks admin_personal_snippet' do
- expect(presenter).to receive(:can?).with(user, :admin_personal_snippet, snippet)
+ it 'checks admin_snippet' do
+ expect(presenter).to receive(:can?).with(user, :admin_snippet, snippet)
subject
end
@@ -120,8 +120,8 @@ describe SnippetPresenter do
context 'with ProjectSnippet' do
let(:snippet) { project_snippet }
- it 'checks admin_project_snippet ' do
- expect(presenter).to receive(:can?).with(user, :admin_project_snippet, snippet)
+ it 'checks admin_snippet ' do
+ expect(presenter).to receive(:can?).with(user, :admin_snippet, snippet)
subject
end
diff --git a/spec/presenters/tree_entry_presenter_spec.rb b/spec/presenters/tree_entry_presenter_spec.rb
index d74ee5dc28f..0c29fe3e5ff 100644
--- a/spec/presenters/tree_entry_presenter_spec.rb
+++ b/spec/presenters/tree_entry_presenter_spec.rb
@@ -11,6 +11,6 @@ describe TreeEntryPresenter do
let(:presenter) { described_class.new(tree) }
describe '.web_url' do
- it { expect(presenter.web_url).to eq("http://localhost/#{project.full_path}/tree/#{tree.commit_id}/#{tree.path}") }
+ it { expect(presenter.web_url).to eq("http://localhost/#{project.full_path}/-/tree/#{tree.commit_id}/#{tree.path}") }
end
end
diff --git a/spec/requests/api/access_requests_spec.rb b/spec/requests/api/access_requests_spec.rb
index 3bfca00776f..17f4cde5b8c 100644
--- a/spec/requests/api/access_requests_spec.rb
+++ b/spec/requests/api/access_requests_spec.rb
@@ -3,12 +3,12 @@
require 'spec_helper'
describe API::AccessRequests do
- set(:maintainer) { create(:user) }
- set(:developer) { create(:user) }
- set(:access_requester) { create(:user) }
- set(:stranger) { create(:user) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:access_requester) { create(:user) }
+ let_it_be(:stranger) { create(:user) }
- set(:project) do
+ let_it_be(:project) do
create(:project, :public, creator_id: maintainer.id, namespace: maintainer.namespace) do |project|
project.add_developer(developer)
project.add_maintainer(maintainer)
@@ -16,7 +16,7 @@ describe API::AccessRequests do
end
end
- set(:group) do
+ let_it_be(:group) do
create(:group, :public) do |group|
group.add_developer(developer)
group.add_owner(maintainer)
diff --git a/spec/requests/api/api_spec.rb b/spec/requests/api/api_spec.rb
new file mode 100644
index 00000000000..c794db4cb0b
--- /dev/null
+++ b/spec/requests/api/api_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::API do
+ let(:user) { create(:user, last_activity_on: Date.yesterday) }
+
+ describe 'Record user last activity in after hook' do
+ # It does not matter which endpoint is used because last_activity_on should
+ # be updated on every request. `/groups` is used as an example
+ # to represent any API endpoint
+
+ it 'updates the users last_activity_on date' do
+ expect { get api('/groups', user) }.to change { user.reload.last_activity_on }.to(Date.today)
+ end
+
+ context 'when the the api_activity_logging feature is disabled' do
+ it 'does not touch last_activity_on' do
+ stub_feature_flags(api_activity_logging: false)
+
+ expect { get api('/groups', user) }.not_to change { user.reload.last_activity_on }
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/applications_spec.rb b/spec/requests/api/applications_spec.rb
index 438d5dbf018..d110751e661 100644
--- a/spec/requests/api/applications_spec.rb
+++ b/spec/requests/api/applications_spec.rb
@@ -21,6 +21,7 @@ describe API::Applications, :api do
expect(json_response['application_id']).to eq application.uid
expect(json_response['secret']).to eq application.secret
expect(json_response['callback_url']).to eq application.redirect_uri
+ expect(json_response['confidential']).to eq application.confidential
end
it 'does not allow creating an application with the wrong redirect_uri format' do
@@ -72,6 +73,16 @@ describe API::Applications, :api do
expect(json_response).to be_a Hash
expect(json_response['error']).to eq('scopes is missing')
end
+
+ it 'does not allow creating an application with confidential set to nil' do
+ expect do
+ post api('/applications', admin_user), params: { name: 'application_name', redirect_uri: 'http://application.url', scopes: '', confidential: nil }
+ end.not_to change { Doorkeeper::Application.count }
+
+ expect(response).to have_gitlab_http_status(400)
+ expect(json_response).to be_a Hash
+ expect(json_response['message']['confidential'].first).to eq('is not included in the list')
+ end
end
context 'authorized user without authorization' do
diff --git a/spec/requests/api/award_emoji_spec.rb b/spec/requests/api/award_emoji_spec.rb
index 19a34314bb8..4a830f2b449 100644
--- a/spec/requests/api/award_emoji_spec.rb
+++ b/spec/requests/api/award_emoji_spec.rb
@@ -3,14 +3,13 @@
require 'spec_helper'
describe API::AwardEmoji do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
- set(:issue) { create(:issue, project: project) }
- set(:award_emoji) { create(:award_emoji, awardable: issue, user: user) }
- let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
- let!(:downvote) { create(:award_emoji, :downvote, awardable: merge_request, user: user) }
-
- set(:note) { create(:note, project: project, noteable: issue) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:award_emoji) { create(:award_emoji, awardable: issue, user: user) }
+ let_it_be(:note) { create(:note, project: project, noteable: issue) }
+ let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let!(:downvote) { create(:award_emoji, :downvote, awardable: merge_request, user: user) }
before do
project.add_maintainer(user)
diff --git a/spec/requests/api/boards_spec.rb b/spec/requests/api/boards_spec.rb
index 510ef9d7d0a..f53bfedb49a 100644
--- a/spec/requests/api/boards_spec.rb
+++ b/spec/requests/api/boards_spec.rb
@@ -3,35 +3,35 @@
require 'spec_helper'
describe API::Boards do
- set(:user) { create(:user) }
- set(:non_member) { create(:user) }
- set(:guest) { create(:user) }
- set(:admin) { create(:user, :admin) }
- set(:board_parent) { create(:project, :public, creator_id: user.id, namespace: user.namespace ) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:non_member) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:admin) { create(:user, :admin) }
+ let_it_be(:board_parent, reload: true) { create(:project, :public, creator_id: user.id, namespace: user.namespace ) }
- set(:dev_label) do
+ let_it_be(:dev_label) do
create(:label, title: 'Development', color: '#FFAABB', project: board_parent)
end
- set(:test_label) do
+ let_it_be(:test_label) do
create(:label, title: 'Testing', color: '#FFAACC', project: board_parent)
end
- set(:ux_label) do
+ let_it_be(:ux_label) do
create(:label, title: 'UX', color: '#FF0000', project: board_parent)
end
- set(:dev_list) do
+ let_it_be(:dev_list) do
create(:list, label: dev_label, position: 1)
end
- set(:test_list) do
+ let_it_be(:test_list) do
create(:list, label: test_label, position: 2)
end
- set(:milestone) { create(:milestone, project: board_parent) }
- set(:board_label) { create(:label, project: board_parent) }
- set(:board) { create(:board, project: board_parent, lists: [dev_list, test_list]) }
+ let_it_be(:milestone) { create(:milestone, project: board_parent) }
+ let_it_be(:board_label) { create(:label, project: board_parent) }
+ let_it_be(:board) { create(:board, project: board_parent, lists: [dev_list, test_list]) }
it_behaves_like 'group and project boards', "/projects/:id/boards"
@@ -66,11 +66,11 @@ describe API::Boards do
end
describe "POST /groups/:id/boards/lists" do
- set(:group) { create(:group) }
- set(:board_parent) { create(:group, parent: group ) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:board_parent) { create(:group, parent: group ) }
let(:url) { "/groups/#{board_parent.id}/boards/#{board.id}/lists" }
- set(:board) { create(:board, group: board_parent) }
+ let_it_be(:board) { create(:board, group: board_parent) }
it 'creates a new board list for ancestor group labels' do
group.add_developer(user)
diff --git a/spec/requests/api/branches_spec.rb b/spec/requests/api/branches_spec.rb
index 99374d28324..046ec40f218 100644
--- a/spec/requests/api/branches_spec.rb
+++ b/spec/requests/api/branches_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe API::Branches do
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
let(:project) { create(:project, :repository, creator: user, path: 'my.project') }
let(:guest) { create(:user).tap { |u| project.add_guest(u) } }
let(:branch_name) { 'feature' }
@@ -608,7 +608,7 @@ describe API::Branches do
expect(json_response['message']).to eq('Branch name is invalid')
end
- it 'returns 400 if branch already exists' do
+ it 'returns 400 if branch already exists', :clean_gitlab_redis_cache do
post api(route, user), params: { branch: 'new_design1', ref: branch_sha }
expect(response).to have_gitlab_http_status(201)
diff --git a/spec/requests/api/broadcast_messages_spec.rb b/spec/requests/api/broadcast_messages_spec.rb
index 9dc639a25a2..7d71b83e147 100644
--- a/spec/requests/api/broadcast_messages_spec.rb
+++ b/spec/requests/api/broadcast_messages_spec.rb
@@ -3,56 +3,32 @@
require 'spec_helper'
describe API::BroadcastMessages do
- set(:user) { create(:user) }
- set(:admin) { create(:admin) }
- set(:message) { create(:broadcast_message) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:message) { create(:broadcast_message) }
describe 'GET /broadcast_messages' do
- it 'returns a 401 for anonymous users' do
- get api('/broadcast_messages')
-
- expect(response).to have_gitlab_http_status(401)
- end
-
- it 'returns a 403 for users' do
- get api('/broadcast_messages', user)
-
- expect(response).to have_gitlab_http_status(403)
- end
-
- it 'returns an Array of BroadcastMessages for admins' do
+ it 'returns an Array of BroadcastMessages' do
create(:broadcast_message)
- get api('/broadcast_messages', admin)
+ get api('/broadcast_messages')
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_kind_of(Array)
expect(json_response.first.keys)
- .to match_array(%w(id message starts_at ends_at color font active target_path))
+ .to match_array(%w(id message starts_at ends_at color font active target_path broadcast_type))
end
end
describe 'GET /broadcast_messages/:id' do
- it 'returns a 401 for anonymous users' do
+ it 'returns the specified message' do
get api("/broadcast_messages/#{message.id}")
- expect(response).to have_gitlab_http_status(401)
- end
-
- it 'returns a 403 for users' do
- get api("/broadcast_messages/#{message.id}", user)
-
- expect(response).to have_gitlab_http_status(403)
- end
-
- it 'returns the specified message for admins' do
- get api("/broadcast_messages/#{message.id}", admin)
-
expect(response).to have_gitlab_http_status(200)
expect(json_response['id']).to eq message.id
expect(json_response.keys)
- .to match_array(%w(id message starts_at ends_at color font active target_path))
+ .to match_array(%w(id message starts_at ends_at color font active target_path broadcast_type))
end
end
@@ -109,6 +85,32 @@ describe API::BroadcastMessages do
expect(response).to have_gitlab_http_status(201)
expect(json_response['target_path']).to eq attrs[:target_path]
end
+
+ it 'accepts a broadcast type' do
+ attrs = attributes_for(:broadcast_message, broadcast_type: 'notification')
+
+ post api('/broadcast_messages', admin), params: attrs
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['broadcast_type']).to eq attrs[:broadcast_type]
+ end
+
+ it 'uses default broadcast type' do
+ attrs = attributes_for(:broadcast_message)
+
+ post api('/broadcast_messages', admin), params: attrs
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['broadcast_type']).to eq 'banner'
+ end
+
+ it 'errors for invalid broadcast type' do
+ attrs = attributes_for(:broadcast_message, broadcast_type: 'invalid-type')
+
+ post api('/broadcast_messages', admin), params: attrs
+
+ expect(response).to have_gitlab_http_status(400)
+ end
end
end
@@ -168,6 +170,23 @@ describe API::BroadcastMessages do
expect(response).to have_gitlab_http_status(200)
expect(json_response['target_path']).to eq attrs[:target_path]
end
+
+ it 'accepts a new broadcast_type' do
+ attrs = { broadcast_type: 'notification' }
+
+ put api("/broadcast_messages/#{message.id}", admin), params: attrs
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['broadcast_type']).to eq attrs[:broadcast_type]
+ end
+
+ it 'errors for invalid broadcast type' do
+ attrs = { broadcast_type: 'invalid-type' }
+
+ put api("/broadcast_messages/#{message.id}", admin), params: attrs
+
+ expect(response).to have_gitlab_http_status(400)
+ end
end
end
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index e390f3945a9..c179de249d5 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -12,7 +12,6 @@ describe API::Commits do
let(:project) { create(:project, :repository, creator: user, path: 'my.project') }
let(:branch_with_dot) { project.repository.find_branch('ends-with.json') }
let(:branch_with_slash) { project.repository.find_branch('improve/awesome') }
-
let(:project_id) { project.id }
let(:current_user) { nil }
@@ -241,6 +240,62 @@ describe API::Commits do
end
end
end
+
+ context 'with order parameter' do
+ let(:route) { "/projects/#{project_id}/repository/commits?ref_name=0031876&per_page=6&order=#{order}" }
+
+ context 'set to topo' do
+ let(:order) { 'topo' }
+
+ # git log --graph -n 6 --pretty=format:"%h" --topo-order 0031876
+ # * 0031876
+ # |\
+ # | * 48ca272
+ # | * 335bc94
+ # * | bf6e164
+ # * | 9d526f8
+ # |/
+ # * 1039376
+ it 'returns project commits ordered by topo order' do
+ commits = project.repository.commits("0031876", limit: 6, order: 'topo')
+
+ get api(route, current_user)
+
+ expect(json_response.size).to eq(6)
+ expect(json_response.map { |entry| entry["id"] }).to eq(commits.map(&:id))
+ end
+ end
+
+ context 'set to default' do
+ let(:order) { 'default' }
+
+ # git log --graph -n 6 --pretty=format:"%h" --date-order 0031876
+ # * 0031876
+ # |\
+ # * | bf6e164
+ # | * 48ca272
+ # * | 9d526f8
+ # | * 335bc94
+ # |/
+ # * 1039376
+ it 'returns project commits ordered by default order' do
+ commits = project.repository.commits("0031876", limit: 6, order: 'default')
+
+ get api(route, current_user)
+
+ expect(json_response.size).to eq(6)
+ expect(json_response.map { |entry| entry["id"] }).to eq(commits.map(&:id))
+ end
+ end
+
+ context 'set to an invalid parameter' do
+ let(:order) { 'invalid' }
+
+ it_behaves_like '400 response' do
+ let(:request) { get api(route, current_user) }
+ end
+ end
+ end
end
end
diff --git a/spec/requests/api/deployments_spec.rb b/spec/requests/api/deployments_spec.rb
index d8fc234cbae..151f67061eb 100644
--- a/spec/requests/api/deployments_spec.rb
+++ b/spec/requests/api/deployments_spec.rb
@@ -122,7 +122,29 @@ describe API::Deployments do
describe 'POST /projects/:id/deployments' do
let!(:project) { create(:project, :repository) }
- let(:sha) { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' }
+ # * ddd0f15ae83993f5cb66a927a28673882e99100b (HEAD -> master, origin/master, origin/HEAD) Merge branch 'po-fix-test-en
+ # |\
+ # | * 2d1db523e11e777e49377cfb22d368deec3f0793 Correct test_env.rb path for adding branch
+ # |/
+ # * 1e292f8fedd741b75372e19097c76d327140c312 Merge branch 'cherry-pick-ce369011' into 'master'
+
+ let_it_be(:sha) { 'ddd0f15ae83993f5cb66a927a28673882e99100b' }
+ let_it_be(:first_deployment_sha) { '1e292f8fedd741b75372e19097c76d327140c312' }
+
+ before do
+ # Creating the first deployment is an edge-case that is already covered by unit testing,
+ # here we want to see the behavior of a running system so we create a first deployment
+ post(
+ api("/projects/#{project.id}/deployments", user),
+ params: {
+ environment: 'production',
+ sha: first_deployment_sha,
+ ref: 'master',
+ tag: false,
+ status: 'success'
+ }
+ )
+ end
context 'as a maintainer' do
it 'creates a new deployment' do
@@ -163,6 +185,7 @@ describe API::Deployments do
mr = create(
:merge_request,
:merged,
+ merge_commit_sha: sha,
target_project: project,
source_project: project,
target_branch: 'master',
@@ -215,6 +238,7 @@ describe API::Deployments do
mr = create(
:merge_request,
:merged,
+ merge_commit_sha: sha,
target_project: project,
source_project: project,
target_branch: 'master',
@@ -236,6 +260,43 @@ describe API::Deployments do
expect(deploy.merge_requests).to eq([mr])
end
+
+ it 'links any picked merge requests to the deployment', :sidekiq_inline do
+ mr = create(
+ :merge_request,
+ :merged,
+ merge_commit_sha: sha,
+ target_project: project,
+ source_project: project,
+ target_branch: 'master',
+ source_branch: 'foo'
+ )
+
+ # we branch from the previous deployment and cherry-pick mr into the new branch
+ branch = project.repository.add_branch(developer, 'stable', first_deployment_sha)
+ expect(branch).not_to be_nil
+
+ result = ::Commits::CherryPickService
+ .new(project, developer, commit: mr.merge_commit, start_branch: 'stable', branch_name: 'stable')
+ .execute
+ expect(result[:status]).to eq(:success), result[:message]
+
+ pick_sha = result[:result]
+
+ post(
+ api("/projects/#{project.id}/deployments", developer),
+ params: {
+ environment: 'production',
+ sha: pick_sha,
+ ref: 'stable',
+ tag: false,
+ status: 'success'
+ }
+ )
+
+ deploy = project.deployments.last
+ expect(deploy.merge_requests).to eq([mr])
+ end
end
context 'as non member' do
diff --git a/spec/requests/api/error_tracking_spec.rb b/spec/requests/api/error_tracking_spec.rb
index 48ddc7f5a75..120248bdbc6 100644
--- a/spec/requests/api/error_tracking_spec.rb
+++ b/spec/requests/api/error_tracking_spec.rb
@@ -3,13 +3,39 @@
require 'spec_helper'
describe API::ErrorTracking do
- describe "GET /projects/:id/error_tracking/settings" do
- let(:user) { create(:user) }
- let(:setting) { create(:project_error_tracking_setting) }
- let(:project) { setting.project }
+ let_it_be(:user) { create(:user) }
+ let(:setting) { create(:project_error_tracking_setting) }
+ let(:project) { setting.project }
+
+ shared_examples 'returns project settings' do
+ it 'returns correct project settings' do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq(
+ 'active' => setting.reload.enabled,
+ 'project_name' => setting.project_name,
+ 'sentry_external_url' => setting.sentry_external_url,
+ 'api_url' => setting.api_url
+ )
+ end
+ end
+
+ shared_examples 'returns 404' do
+ it 'returns correct project settings' do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message'])
+ .to eq('404 Error Tracking Setting Not Found')
+ end
+ end
+
+ describe "PATCH /projects/:id/error_tracking/settings" do
+ let(:params) { { active: false } }
def make_request
- get api("/projects/#{project.id}/error_tracking/settings", user)
+ patch api("/projects/#{project.id}/error_tracking/settings", user), params: params
end
context 'when authenticated as maintainer' do
@@ -17,16 +43,119 @@ describe API::ErrorTracking do
project.add_maintainer(user)
end
- it 'returns project settings' do
- make_request
+ context 'patch settings' do
+ it_behaves_like 'returns project settings'
+
+ it 'updates enabled flag' do
+ expect(setting).to be_enabled
+
+ make_request
+
+ expect(json_response).to include('active' => false)
+ expect(setting.reload).not_to be_enabled
+ end
+
+ context 'active is invalid' do
+ let(:params) { { active: "randomstring" } }
+
+ it 'returns active is invalid if non boolean' do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error'])
+ .to eq('active is invalid')
+ end
+ end
+
+ context 'active is empty' do
+ let(:params) { { active: '' } }
+
+ it 'returns 400' do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error'])
+ .to eq('active is empty')
+ end
+ end
+ end
+
+ context 'without a project setting' do
+ let(:project) { create(:project) }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'patch settings' do
+ it_behaves_like 'returns 404'
+ end
+ end
+ end
+
+ context 'when authenticated as reporter' do
+ before do
+ project.add_reporter(user)
+ end
+
+ context 'patch request' do
+ it 'returns 403' do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ context 'when authenticated as developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ context 'patch request' do
+ it 'returns 403' do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ context 'when authenticated as non-member' do
+ context 'patch request' do
+ it 'returns 404' do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'when unauthenticated' do
+ let(:user) { nil }
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to eq(
- 'active' => setting.enabled,
- 'project_name' => setting.project_name,
- 'sentry_external_url' => setting.sentry_external_url,
- 'api_url' => setting.api_url
- )
+ context 'patch request' do
+ it 'returns 401 for update request' do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
+ end
+
+ describe "GET /projects/:id/error_tracking/settings" do
+ def make_request
+ get api("/projects/#{project.id}/error_tracking/settings", user)
+ end
+
+ context 'when authenticated as maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'get settings' do
+ it_behaves_like 'returns project settings'
end
end
@@ -37,12 +166,8 @@ describe API::ErrorTracking do
project.add_maintainer(user)
end
- it 'returns 404' do
- make_request
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect(json_response['message'])
- .to eq('404 Error Tracking Setting Not Found')
+ context 'get settings' do
+ it_behaves_like 'returns 404'
end
end
@@ -58,6 +183,18 @@ describe API::ErrorTracking do
end
end
+ context 'when authenticated as developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'returns 403' do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
context 'when authenticated as non-member' do
it 'returns 404' do
make_request
diff --git a/spec/requests/api/events_spec.rb b/spec/requests/api/events_spec.rb
index 240f9a02877..30e6a1340a8 100644
--- a/spec/requests/api/events_spec.rb
+++ b/spec/requests/api/events_spec.rb
@@ -171,6 +171,18 @@ describe API::Events do
expect(json_response[0]['target_id']).to eq(closed_issue.id)
end
end
+
+ context 'when scope is passed' do
+ context 'when unauthenticated' do
+ it 'returns no user events' do
+ get api("/users/#{user.username}/events?scope=all")
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.size).to eq(0)
+ end
+ end
+ end
end
it 'returns a 404 error if not found' do
diff --git a/spec/requests/api/features_spec.rb b/spec/requests/api/features_spec.rb
index dfd14f89dbf..d7b0bf881a6 100644
--- a/spec/requests/api/features_spec.rb
+++ b/spec/requests/api/features_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe API::Features do
- set(:user) { create(:user) }
- set(:admin) { create(:admin) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:admin) { create(:admin) }
before do
Flipper.unregister_groups
diff --git a/spec/requests/api/graphql/current_user_query_spec.rb b/spec/requests/api/graphql/current_user_query_spec.rb
index 9db638ea59e..2b38b8e98ab 100644
--- a/spec/requests/api/graphql/current_user_query_spec.rb
+++ b/spec/requests/api/graphql/current_user_query_spec.rb
@@ -16,7 +16,7 @@ describe 'getting project information' do
end
context 'when there is a current_user' do
- set(:current_user) { create(:user) }
+ let_it_be(:current_user) { create(:user) }
it_behaves_like 'a working graphql query'
diff --git a/spec/requests/api/graphql/gitlab_schema_spec.rb b/spec/requests/api/graphql/gitlab_schema_spec.rb
index 2cb8436662b..8d020cd3a4e 100644
--- a/spec/requests/api/graphql/gitlab_schema_spec.rb
+++ b/spec/requests/api/graphql/gitlab_schema_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe 'GitlabSchema configurations' do
include GraphqlHelpers
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
shared_examples 'imposing query limits' do
describe 'timeouts' do
@@ -67,24 +67,45 @@ describe 'GitlabSchema configurations' do
end
end
- context 'a deep but simple recursive introspective query' do
- it 'fails due to recursion' do
- query = File.read(Rails.root.join('spec/fixtures/api/graphql/recursive-introspection.graphql'))
+ context 'failing queries' do
+ before do
+ allow(GitlabSchema).to receive(:max_query_recursion).and_return 1
+ end
- post_graphql(query, current_user: nil)
+ context 'a recursive introspective query' do
+ it 'fails due to recursion' do
+ query = File.read(Rails.root.join('spec/fixtures/api/graphql/recursive-introspection.graphql'))
- expect_graphql_errors_to_include [/Recursive query/]
+ post_graphql(query, current_user: nil)
+
+ expect_graphql_errors_to_include [/Recursive query/]
+ end
end
- end
- context 'a deep recursive non-introspective query' do
- it 'fails due to recursion, complexity and depth' do
- allow(GitlabSchema).to receive(:max_query_complexity).and_return 1
- query = File.read(Rails.root.join('spec/fixtures/api/graphql/recursive-query.graphql'))
+ context 'a recursive non-introspective query' do
+ before do
+ allow(GitlabSchema).to receive(:max_query_complexity).and_return 1
+ allow(GitlabSchema).to receive(:max_query_depth).and_return 1
+ allow(GitlabSchema).to receive(:max_query_complexity).and_return 1
+ end
- post_graphql(query, current_user: nil)
+ shared_examples 'fails due to recursion, complexity and depth' do |fixture_file|
+ it 'fails due to recursion, complexity and depth' do
+ query = File.read(Rails.root.join(fixture_file))
+
+ post_graphql(query, current_user: nil)
+
+ expect_graphql_errors_to_include [/Recursive query/, /exceeds max complexity/, /exceeds max depth/]
+ end
+ end
- expect_graphql_errors_to_include [/Recursive query/, /exceeds max complexity/, /exceeds max depth/]
+ context 'using `nodes` notation' do
+ it_behaves_like 'fails due to recursion, complexity and depth', 'spec/fixtures/api/graphql/recursive-query-nodes.graphql'
+ end
+
+ context 'using `edges -> node` notation' do
+ it_behaves_like 'fails due to recursion, complexity and depth', 'spec/fixtures/api/graphql/recursive-query-edges-node.graphql'
+ end
end
end
end
diff --git a/spec/requests/api/graphql/group/milestones_spec.rb b/spec/requests/api/graphql/group/milestones_spec.rb
new file mode 100644
index 00000000000..f8e3c0026f5
--- /dev/null
+++ b/spec/requests/api/graphql/group/milestones_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Milestones through GroupQuery' do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:now) { Time.now }
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:milestone_1) { create(:milestone, group: group) }
+ let_it_be(:milestone_2) { create(:milestone, group: group, state: :closed, start_date: now, due_date: now + 1.day) }
+ let_it_be(:milestone_3) { create(:milestone, group: group, start_date: now, due_date: now + 2.days) }
+ let_it_be(:milestone_4) { create(:milestone, group: group, state: :closed, start_date: now - 2.days, due_date: now - 1.day) }
+ let_it_be(:milestone_from_other_group) { create(:milestone, group: create(:group)) }
+
+ let(:milestone_data) { graphql_data['group']['milestones']['edges'] }
+
+ describe 'Get list of milestones from a group' do
+ before do
+ group.add_developer(user)
+ end
+
+ context 'when the request is correct' do
+ before do
+ fetch_milestones(user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns milestones successfully' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(graphql_errors).to be_nil
+ expect_array_response(milestone_1.to_global_id.to_s, milestone_2.to_global_id.to_s, milestone_3.to_global_id.to_s, milestone_4.to_global_id.to_s)
+ end
+ end
+
+ context 'when filtering by timeframe' do
+ it 'fetches milestones between start_date and due_date' do
+ fetch_milestones(user, { start_date: now.to_s, end_date: (now + 2.days).to_s })
+
+ expect_array_response(milestone_2.to_global_id.to_s, milestone_3.to_global_id.to_s)
+ end
+ end
+
+ context 'when filtering by state' do
+ it 'returns milestones with given state' do
+ fetch_milestones(user, { state: :active })
+
+ expect_array_response(milestone_1.to_global_id.to_s, milestone_3.to_global_id.to_s)
+ end
+ end
+
+ def fetch_milestones(user = nil, args = {})
+ post_graphql(milestones_query(args), current_user: user)
+ end
+
+ def milestones_query(args = {})
+ milestone_node = <<~NODE
+ edges {
+ node {
+ id
+ title
+ state
+ }
+ }
+ NODE
+
+ graphql_query_for("group",
+ { full_path: group.full_path },
+ [query_graphql_field("milestones", args, milestone_node)]
+ )
+ end
+
+ def expect_array_response(*items)
+ expect(response).to have_gitlab_http_status(:success)
+ expect(milestone_data).to be_an Array
+ expect(milestone_node_array('id')).to match_array(items)
+ end
+
+ def milestone_node_array(extract_attribute = nil)
+ node_array(milestone_data, extract_attribute)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/group_query_spec.rb b/spec/requests/api/graphql/group_query_spec.rb
index e0f1e4dbe9e..6e2663fb090 100644
--- a/spec/requests/api/graphql/group_query_spec.rb
+++ b/spec/requests/api/graphql/group_query_spec.rb
@@ -55,7 +55,7 @@ describe 'getting group information' do
post_graphql(group_query(group1), current_user: user1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(graphql_data['group']['id']).to eq(group1.to_global_id.to_s)
expect(graphql_data['group']['name']).to eq(group1.name)
expect(graphql_data['group']['path']).to eq(group1.path)
diff --git a/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb b/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb
index b24981873c8..3fdeccc84f9 100644
--- a/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb
+++ b/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
describe 'Adding an AwardEmoji' do
include GraphqlHelpers
- set(:current_user) { create(:user) }
- set(:project) { create(:project) }
- set(:awardable) { create(:note, project: project) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:awardable) { create(:note, project: project) }
let(:emoji_name) { 'thumbsup' }
let(:mutation) do
variables = {
diff --git a/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb b/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb
index 5e2c0e668a5..bc796b34db4 100644
--- a/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb
+++ b/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
describe 'Toggling an AwardEmoji' do
include GraphqlHelpers
- set(:current_user) { create(:user) }
- set(:project) { create(:project) }
- set(:awardable) { create(:note, project: project) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:awardable) { create(:note, project: project) }
let(:emoji_name) { 'thumbsup' }
let(:mutation) do
variables = {
diff --git a/spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb b/spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb
index b04fcb9aece..4c535434faa 100644
--- a/spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe 'Adding a DiffNote' do
include GraphqlHelpers
- set(:current_user) { create(:user) }
+ let_it_be(:current_user) { create(:user) }
let(:noteable) { create(:merge_request, source_project: project, target_project: project) }
let(:project) { create(:project, :repository) }
let(:diff_refs) { noteable.diff_refs }
diff --git a/spec/requests/api/graphql/mutations/notes/create/image_diff_note_spec.rb b/spec/requests/api/graphql/mutations/notes/create/image_diff_note_spec.rb
index 3ba6c689024..0bba3e79434 100644
--- a/spec/requests/api/graphql/mutations/notes/create/image_diff_note_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/create/image_diff_note_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe 'Adding an image DiffNote' do
include GraphqlHelpers
- set(:current_user) { create(:user) }
+ let_it_be(:current_user) { create(:user) }
let(:noteable) { create(:merge_request, source_project: project, target_project: project) }
let(:project) { create(:project, :repository) }
let(:diff_refs) { noteable.diff_refs }
diff --git a/spec/requests/api/graphql/mutations/notes/create/note_spec.rb b/spec/requests/api/graphql/mutations/notes/create/note_spec.rb
index 14aaa430ac9..9a78d44245e 100644
--- a/spec/requests/api/graphql/mutations/notes/create/note_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/create/note_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe 'Adding a Note' do
include GraphqlHelpers
- set(:current_user) { create(:user) }
+ let_it_be(:current_user) { create(:user) }
let(:noteable) { create(:merge_request, source_project: project, target_project: project) }
let(:project) { create(:project) }
let(:discussion) { nil }
diff --git a/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb b/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb
new file mode 100644
index 00000000000..0362fef2d2e
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb
@@ -0,0 +1,244 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Updating an image DiffNote' do
+ include GraphqlHelpers
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:noteable) { create(:merge_request, :with_diffs) }
+ let_it_be(:original_body) { 'Original body' }
+ let_it_be(:original_position) do
+ Gitlab::Diff::Position.new(
+ old_path: 'files/images/any_image.png',
+ new_path: 'files/images/any_image.png',
+ width: 10,
+ height: 20,
+ x: 1,
+ y: 2,
+ diff_refs: noteable.diff_refs,
+ position_type: 'image'
+ )
+ end
+ let_it_be(:updated_body) { 'Updated body' }
+ let_it_be(:updated_width) { 50 }
+ let_it_be(:updated_height) { 100 }
+ let_it_be(:updated_x) { 5 }
+ let_it_be(:updated_y) { 10 }
+ let(:updated_position) do
+ {
+ width: updated_width,
+ height: updated_height,
+ x: updated_x,
+ y: updated_y
+ }
+ end
+ let!(:diff_note) do
+ create(:image_diff_note_on_merge_request,
+ noteable: noteable,
+ project: noteable.project,
+ note: original_body,
+ position: original_position)
+ end
+ let(:mutation) do
+ variables = {
+ id: GitlabSchema.id_from_object(diff_note).to_s,
+ body: updated_body,
+ position: updated_position
+ }
+
+ graphql_mutation(:update_image_diff_note, variables)
+ end
+
+ def mutation_response
+ graphql_mutation_response(:update_image_diff_note)
+ end
+
+ context 'when the user does not have permission' do
+ let_it_be(:current_user) { create(:user) }
+
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ['The resource that you are attempting to access does not exist or you don\'t have permission to perform this action']
+
+ it 'does not update the DiffNote' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ diff_note.reload
+
+ expect(diff_note).to have_attributes(
+ note: original_body,
+ position: have_attributes(
+ width: original_position.width,
+ height: original_position.height,
+ x: original_position.x,
+ y: original_position.y
+ )
+ )
+ end
+ end
+
+ context 'when the user has permission' do
+ let(:current_user) { diff_note.author }
+
+ it 'updates the DiffNote' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ diff_note.reload
+
+ expect(diff_note).to have_attributes(
+ note: updated_body,
+ position: have_attributes(
+ width: updated_width,
+ height: updated_height,
+ x: updated_x,
+ y: updated_y
+ )
+ )
+ end
+
+ it 'returns the updated DiffNote' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['note']).to include(
+ 'body' => updated_body,
+ 'position' => hash_including(
+ 'width' => updated_width,
+ 'height' => updated_height,
+ 'x' => updated_x,
+ 'y' => updated_y
+ )
+ )
+ end
+
+ describe 'updating single properties at a time' do
+ where(:property, :new_value) do
+ :body | 'foo'
+ :width | 19
+ :height | 18
+ :x | 17
+ :y | 16
+ end
+
+ with_them do
+ # Properties that will be POSTed:
+ let(:updated_body) { value(:body) }
+ let(:updated_width) { value(:width) }
+ let(:updated_height) { value(:height) }
+ let(:updated_x) { value(:x) }
+ let(:updated_y) { value(:y) }
+ # Expectations of the properties:
+ let(:expected_body) { value(:body) || original_body }
+ let(:expected_width) { value(:width) || original_position.width }
+ let(:expected_height) { value(:height) || original_position.height }
+ let(:expected_x) { value(:x) || original_position.x }
+ let(:expected_y) { value(:y) || original_position.y }
+
+ def value(prop)
+ new_value if property == prop
+ end
+
+ it 'updates the DiffNote correctly' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ diff_note.reload
+
+ expect(diff_note).to have_attributes(
+ note: expected_body,
+ position: have_attributes(
+ width: expected_width,
+ height: expected_height,
+ x: expected_x,
+ y: expected_y
+ )
+ )
+ end
+ end
+
+ context 'when position is nil' do
+ let(:updated_position) { nil }
+
+ it 'updates the DiffNote correctly' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ diff_note.reload
+
+ expect(diff_note).to have_attributes(
+ note: updated_body,
+ position: original_position
+ )
+ end
+ end
+ end
+
+ context 'when both body and position args are blank' do
+ let(:updated_body) { nil }
+ let(:updated_position) { nil }
+
+ it_behaves_like 'a mutation that returns top-level errors', errors: ['body or position arguments are required']
+ end
+
+ context 'when resource is not a DiffNote on an image' do
+ let!(:diff_note) { create(:diff_note_on_merge_request, note: original_body) }
+
+ it_behaves_like 'a mutation that returns top-level errors', errors: ['Resource is not an ImageDiffNote']
+ end
+
+ context 'when there are ActiveRecord validation errors' do
+ before do
+ expect(diff_note).to receive_message_chain(
+ :errors,
+ :full_messages
+ ).and_return(['Error 1', 'Error 2'])
+
+ expect_next_instance_of(Notes::UpdateService) do |service|
+ expect(service).to receive(:execute).and_return(diff_note)
+ end
+ end
+
+ it_behaves_like 'a mutation that returns errors in the response', errors: ['Error 1', 'Error 2']
+
+ it 'does not update the DiffNote' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ diff_note.reload
+
+ expect(diff_note).to have_attributes(
+ note: original_body,
+ position: have_attributes(
+ width: original_position.width,
+ height: original_position.height,
+ x: original_position.x,
+ y: original_position.y
+ )
+ )
+ end
+
+ it 'returns the DiffNote with its original body' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['note']).to include(
+ 'body' => original_body,
+ 'position' => hash_including(
+ 'width' => original_position.width,
+ 'height' => original_position.height,
+ 'x' => original_position.x,
+ 'y' => original_position.y
+ )
+ )
+ end
+ end
+
+ context 'when body only contains quick actions' do
+ let(:updated_body) { '/close' }
+
+ it 'returns a nil note and empty errors' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response).to include(
+ 'errors' => [],
+ 'note' => nil
+ )
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/notes/update_spec.rb b/spec/requests/api/graphql/mutations/notes/update/note_spec.rb
index 958f640995a..a5c6b72005e 100644
--- a/spec/requests/api/graphql/mutations/notes/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/update/note_spec.rb
@@ -22,7 +22,7 @@ describe 'Updating a Note' do
end
context 'when the user does not have permission' do
- let(:current_user) { create(:user) }
+ let_it_be(:current_user) { create(:user) }
it_behaves_like 'a mutation that returns top-level errors',
errors: ['The resource that you are attempting to access does not exist or you don\'t have permission to perform this action']
@@ -68,5 +68,18 @@ describe 'Updating a Note' do
expect(mutation_response['note']['body']).to eq(original_body)
end
end
+
+ context 'when body only contains quick actions' do
+ let(:updated_body) { '/close' }
+
+ it 'returns a nil note and empty errors' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response).to include(
+ 'errors' => [],
+ 'note' => nil
+ )
+ end
+ end
end
end
diff --git a/spec/requests/api/graphql/mutations/snippets/create_spec.rb b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
index 9ef45c0f6bc..cb19f50b5b5 100644
--- a/spec/requests/api/graphql/mutations/snippets/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
@@ -67,7 +67,8 @@ describe 'Creating a Snippet' do
it 'returns the created Snippet' do
post_graphql_mutation(mutation, current_user: current_user)
- expect(mutation_response['snippet']['content']).to eq(content)
+ expect(mutation_response['snippet']['blob']['richData']).to match(content)
+ expect(mutation_response['snippet']['blob']['plainData']).to match(content)
expect(mutation_response['snippet']['title']).to eq(title)
expect(mutation_response['snippet']['description']).to eq(description)
expect(mutation_response['snippet']['fileName']).to eq(file_name)
@@ -92,7 +93,8 @@ describe 'Creating a Snippet' do
it 'returns the created Snippet' do
post_graphql_mutation(mutation, current_user: current_user)
- expect(mutation_response['snippet']['content']).to eq(content)
+ expect(mutation_response['snippet']['blob']['richData']).to match(content)
+ expect(mutation_response['snippet']['blob']['plainData']).to match(content)
expect(mutation_response['snippet']['title']).to eq(title)
expect(mutation_response['snippet']['description']).to eq(description)
expect(mutation_response['snippet']['fileName']).to eq(file_name)
diff --git a/spec/requests/api/graphql/mutations/snippets/update_spec.rb b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
index deaa9e8a237..e9481a36287 100644
--- a/spec/requests/api/graphql/mutations/snippets/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
@@ -56,7 +56,8 @@ describe 'Updating a Snippet' do
it 'returns the updated Snippet' do
post_graphql_mutation(mutation, current_user: current_user)
- expect(mutation_response['snippet']['content']).to eq(updated_content)
+ expect(mutation_response['snippet']['blob']['richData']).to match(updated_content)
+ expect(mutation_response['snippet']['blob']['plainData']).to match(updated_content)
expect(mutation_response['snippet']['title']).to eq(updated_title)
expect(mutation_response['snippet']['description']).to eq(updated_description)
expect(mutation_response['snippet']['fileName']).to eq(updated_file_name)
@@ -77,7 +78,8 @@ describe 'Updating a Snippet' do
it 'returns the Snippet with its original values' do
post_graphql_mutation(mutation, current_user: current_user)
- expect(mutation_response['snippet']['content']).to eq(original_content)
+ expect(mutation_response['snippet']['blob']['richData']).to match(original_content)
+ expect(mutation_response['snippet']['blob']['plainData']).to match(original_content)
expect(mutation_response['snippet']['title']).to eq(original_title)
expect(mutation_response['snippet']['description']).to eq(original_description)
expect(mutation_response['snippet']['fileName']).to eq(original_file_name)
diff --git a/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb b/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb
index 664206dec29..a1f9fa1f10c 100644
--- a/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb
+++ b/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb
@@ -57,6 +57,10 @@ describe 'getting a detailed sentry error' do
expect(error_data['firstSeen']).to eql sentry_detailed_error.first_seen
expect(error_data['lastSeen']).to eql sentry_detailed_error.last_seen
expect(error_data['gitlabCommit']).to be nil
+ expect(error_data['externalBaseUrl']).to eq sentry_detailed_error.external_base_url
+ expect(error_data['gitlabIssuePath']).to eq sentry_detailed_error.gitlab_issue
+ expect(error_data['tags']['logger']).to eq sentry_detailed_error.tags[:logger]
+ expect(error_data['tags']['level']).to eq sentry_detailed_error.tags[:level]
end
it 'is expected to return the frequency correctly' do
diff --git a/spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb b/spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb
new file mode 100644
index 00000000000..06a0bfc0d32
--- /dev/null
+++ b/spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb
@@ -0,0 +1,256 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe 'sentry errors requests' do
+ include GraphqlHelpers
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:project_setting) { create(:project_error_tracking_setting, project: project) }
+ let_it_be(:current_user) { project.owner }
+
+ let(:query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ query_graphql_field('sentryErrors', {}, fields)
+ )
+ end
+
+ describe 'getting a detailed sentry error' do
+ let_it_be(:sentry_detailed_error) { build(:detailed_error_tracking_error) }
+ let(:sentry_gid) { sentry_detailed_error.to_global_id.to_s }
+
+ let(:detailed_fields) do
+ all_graphql_fields_for('SentryDetailedError'.classify)
+ end
+
+ let(:fields) do
+ query_graphql_field('detailedError', { id: sentry_gid }, detailed_fields)
+ end
+
+ let(:error_data) { graphql_data.dig('project', 'sentryErrors', 'detailedError') }
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+ end
+
+ context 'when data is loading via reactive cache' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it 'is expected to return an empty error' do
+ expect(error_data).to be_nil
+ end
+ end
+
+ context 'reactive cache returns data' do
+ before do
+ allow_any_instance_of(ErrorTracking::ProjectErrorTrackingSetting)
+ .to receive(:issue_details)
+ .and_return(issue: sentry_detailed_error)
+
+ post_graphql(query, current_user: current_user)
+ end
+
+ let(:sentry_error) { sentry_detailed_error }
+ let(:error) { error_data }
+
+ it_behaves_like 'setting sentry error data'
+
+ it 'is expected to return the frequency correctly' do
+ aggregate_failures 'it returns the frequency correctly' do
+ expect(error_data['frequency'].count).to eql sentry_detailed_error.frequency.count
+
+ first_frequency = error_data['frequency'].first
+ expect(Time.parse(first_frequency['time'])).to eql Time.at(sentry_detailed_error.frequency[0][0], in: 0)
+ expect(first_frequency['count']).to eql sentry_detailed_error.frequency[0][1]
+ end
+ end
+
+ context 'user does not have permission' do
+ let(:current_user) { create(:user) }
+
+ it 'is expected to return an empty error' do
+ expect(error_data).to be_nil
+ end
+ end
+ end
+
+ context 'sentry api returns an error' do
+ before do
+ expect_any_instance_of(ErrorTracking::ProjectErrorTrackingSetting)
+ .to receive(:issue_details)
+ .and_return(error: 'error message')
+
+ post_graphql(query, current_user: current_user)
+ end
+
+ it 'is expected to handle the error and return nil' do
+ expect(error_data).to be_nil
+ end
+ end
+ end
+
+ describe 'getting an errors list' do
+ let_it_be(:sentry_error) { build(:error_tracking_error) }
+ let_it_be(:pagination) do
+ {
+ 'next' => { 'cursor' => '2222' },
+ 'previous' => { 'cursor' => '1111' }
+ }
+ end
+
+ let(:fields) do
+ <<~QUERY
+ errors {
+ nodes {
+ #{all_graphql_fields_for('SentryError'.classify)}
+ }
+ pageInfo {
+ hasNextPage
+ hasPreviousPage
+ startCursor
+ endCursor
+ }
+ }
+ QUERY
+ end
+
+ let(:error_data) { graphql_data.dig('project', 'sentryErrors', 'errors', 'nodes') }
+ let(:pagination_data) { graphql_data.dig('project', 'sentryErrors', 'errors', 'pageInfo') }
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+ end
+
+ context 'when data is loading via reactive cache' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it 'is expected to return nil' do
+ expect(error_data).to be_nil
+ end
+ end
+
+ context 'reactive cache returns data' do
+ before do
+ expect_any_instance_of(ErrorTracking::ProjectErrorTrackingSetting)
+ .to receive(:list_sentry_issues)
+ .and_return(issues: [sentry_error], pagination: pagination)
+
+ post_graphql(query, current_user: current_user)
+ end
+
+ let(:error) { error_data.first }
+
+ it 'is expected to return an array of data' do
+ expect(error_data).to be_a Array
+ expect(error_data.count).to eq 1
+ end
+
+ it_behaves_like 'setting sentry error data'
+
+ it 'sets the pagination correctly' do
+ expect(pagination_data['startCursor']).to eq(pagination['previous']['cursor'])
+ expect(pagination_data['endCursor']).to eq(pagination['next']['cursor'])
+ end
+
+ it 'is expected to return the frequency correctly' do
+ aggregate_failures 'it returns the frequency correctly' do
+ error = error_data.first
+
+ expect(error['frequency'].count).to eql sentry_error.frequency.count
+
+ first_frequency = error['frequency'].first
+
+ expect(Time.parse(first_frequency['time'])).to eql Time.at(sentry_error.frequency[0][0], in: 0)
+ expect(first_frequency['count']).to eql sentry_error.frequency[0][1]
+ end
+ end
+ end
+
+ context 'sentry api itself errors out' do
+ before do
+ expect_any_instance_of(ErrorTracking::ProjectErrorTrackingSetting)
+ .to receive(:list_sentry_issues)
+ .and_return(error: 'error message')
+
+ post_graphql(query, current_user: current_user)
+ end
+
+ it 'is expected to handle the error and return nil' do
+ expect(error_data).to be_nil
+ end
+ end
+ end
+
+ describe 'getting a stack trace' do
+ let_it_be(:sentry_stack_trace) { build(:error_tracking_error_event) }
+ let(:sentry_gid) { Gitlab::ErrorTracking::DetailedError.new(id: 1).to_global_id.to_s }
+
+ let(:stack_trace_fields) do
+ all_graphql_fields_for('SentryErrorStackTrace'.classify)
+ end
+
+ let(:fields) do
+ query_graphql_field('errorStackTrace', { id: sentry_gid }, stack_trace_fields)
+ end
+
+ let(:stack_trace_data) { graphql_data.dig('project', 'sentryErrors', 'errorStackTrace') }
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+ end
+
+ context 'when data is loading via reactive cache' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it 'is expected to return an empty error' do
+ expect(stack_trace_data).to be_nil
+ end
+ end
+
+ context 'reactive cache returns data' do
+ before do
+ allow_any_instance_of(ErrorTracking::ProjectErrorTrackingSetting)
+ .to receive(:issue_latest_event)
+ .and_return(latest_event: sentry_stack_trace)
+
+ post_graphql(query, current_user: current_user)
+ end
+
+ it_behaves_like 'setting stack trace error'
+
+ context 'user does not have permission' do
+ let(:current_user) { create(:user) }
+
+ it 'is expected to return an empty error' do
+ expect(stack_trace_data).to be_nil
+ end
+ end
+ end
+
+ context 'sentry api returns an error' do
+ before do
+ expect_any_instance_of(ErrorTracking::ProjectErrorTrackingSetting)
+ .to receive(:issue_latest_event)
+ .and_return(error: 'error message')
+
+ post_graphql(query, current_user: current_user)
+ end
+
+ it 'is expected to handle the error and return nil' do
+ expect(stack_trace_data).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/project/merge_request_spec.rb b/spec/requests/api/graphql/project/merge_request_spec.rb
index 70c21666799..e1fe6470881 100644
--- a/spec/requests/api/graphql/project/merge_request_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request_spec.rb
@@ -14,7 +14,7 @@ describe 'getting merge request information nested in a project' do
graphql_query_for(
'project',
{ 'fullPath' => project.full_path },
- query_graphql_field('mergeRequest', iid: merge_request.iid)
+ query_graphql_field('mergeRequest', iid: merge_request.iid.to_s)
)
end
diff --git a/spec/requests/api/graphql/tasks/task_completion_status_spec.rb b/spec/requests/api/graphql/tasks/task_completion_status_spec.rb
index c457a6d7c25..c727750c0ce 100644
--- a/spec/requests/api/graphql/tasks/task_completion_status_spec.rb
+++ b/spec/requests/api/graphql/tasks/task_completion_status_spec.rb
@@ -9,8 +9,8 @@ describe 'getting task completion status information' do
DESCRIPTION_1_DONE = '- [x] task 1\n- [ ] task 2'
DESCRIPTION_2_DONE = '- [x] task 1\n- [x] task 2'
- set(:user1) { create(:user) }
- set(:project) { create(:project, :repository, :public) }
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:project) { create(:project, :repository, :public) }
let(:fields) do
<<~QUERY
@@ -25,7 +25,7 @@ describe 'getting task completion status information' do
graphql_query_for(
'project',
{ 'fullPath' => project.full_path },
- query_graphql_field(type, { iid: iid }, fields)
+ query_graphql_field(type, { iid: iid.to_s }, fields)
)
end
@@ -33,7 +33,7 @@ describe 'getting task completion status information' do
it 'returns the expected task completion status' do
post_graphql(create_task_completion_status_query_for(type, item.iid), current_user: user1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
task_completion_status = graphql_data.dig('project', type, 'taskCompletionStatus')
expect(task_completion_status).not_to be_nil
diff --git a/spec/requests/api/graphql_spec.rb b/spec/requests/api/graphql_spec.rb
index d0378278600..cad9329fcb8 100644
--- a/spec/requests/api/graphql_spec.rb
+++ b/spec/requests/api/graphql_spec.rb
@@ -46,7 +46,7 @@ describe 'GraphQL' do
end
it 'logs the exception in Sentry and continues with the request' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(1).times
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once)
expect(Gitlab::GraphqlLogger).to receive(:info)
post_graphql(query, variables: {})
diff --git a/spec/requests/api/group_boards_spec.rb b/spec/requests/api/group_boards_spec.rb
index 232ec9aca32..d2d10f357fe 100644
--- a/spec/requests/api/group_boards_spec.rb
+++ b/spec/requests/api/group_boards_spec.rb
@@ -3,42 +3,42 @@
require 'spec_helper'
describe API::GroupBoards do
- set(:user) { create(:user) }
- set(:non_member) { create(:user) }
- set(:guest) { create(:user) }
- set(:admin) { create(:user, :admin) }
- set(:board_parent) { create(:group, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:non_member) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:admin) { create(:user, :admin) }
+ let_it_be(:board_parent) { create(:group, :public) }
before do
board_parent.add_owner(user)
end
- set(:project) { create(:project, :public, namespace: board_parent ) }
+ let_it_be(:project) { create(:project, :public, namespace: board_parent ) }
- set(:dev_label) do
+ let_it_be(:dev_label) do
create(:group_label, title: 'Development', color: '#FFAABB', group: board_parent)
end
- set(:test_label) do
+ let_it_be(:test_label) do
create(:group_label, title: 'Testing', color: '#FFAACC', group: board_parent)
end
- set(:ux_label) do
+ let_it_be(:ux_label) do
create(:group_label, title: 'UX', color: '#FF0000', group: board_parent)
end
- set(:dev_list) do
+ let_it_be(:dev_list) do
create(:list, label: dev_label, position: 1)
end
- set(:test_list) do
+ let_it_be(:test_list) do
create(:list, label: test_label, position: 2)
end
- set(:milestone) { create(:milestone, group: board_parent) }
- set(:board_label) { create(:group_label, group: board_parent) }
+ let_it_be(:milestone) { create(:milestone, group: board_parent) }
+ let_it_be(:board_label) { create(:group_label, group: board_parent) }
- set(:board) { create(:board, group: board_parent, lists: [dev_list, test_list]) }
+ let_it_be(:board) { create(:board, group: board_parent, lists: [dev_list, test_list]) }
it_behaves_like 'group and project boards', "/groups/:id/boards", false
diff --git a/spec/requests/api/group_export_spec.rb b/spec/requests/api/group_export_spec.rb
index ac4853e5388..6128f2e4a87 100644
--- a/spec/requests/api/group_export_spec.rb
+++ b/spec/requests/api/group_export_spec.rb
@@ -30,25 +30,39 @@ describe API::GroupExport do
group.add_owner(user)
end
- context 'when export file exists' do
+ context 'group_import_export feature flag enabled' do
before do
- upload.export_file = fixture_file_upload('spec/fixtures/group_export.tar.gz', "`/tar.gz")
- upload.save!
+ stub_feature_flags(group_import_export: true)
end
- it 'downloads exported group archive' do
- get api(download_path, user)
+ context 'when export file exists' do
+ before do
+ upload.export_file = fixture_file_upload('spec/fixtures/group_export.tar.gz', "`/tar.gz")
+ upload.save!
+ end
- expect(response).to have_gitlab_http_status(200)
- end
+ it 'downloads exported group archive' do
+ get api(download_path, user)
- context 'when export_file.file does not exist' do
- before do
- expect_next_instance_of(ImportExportUploader) do |uploader|
- expect(uploader).to receive(:file).and_return(nil)
+ expect(response).to have_gitlab_http_status(200)
+ end
+
+ context 'when export_file.file does not exist' do
+ before do
+ expect_next_instance_of(ImportExportUploader) do |uploader|
+ expect(uploader).to receive(:file).and_return(nil)
+ end
+ end
+
+ it 'returns 404' do
+ get api(download_path, user)
+
+ expect(response).to have_gitlab_http_status(404)
end
end
+ end
+ context 'when export file does not exist' do
it 'returns 404' do
get api(download_path, user)
@@ -57,8 +71,12 @@ describe API::GroupExport do
end
end
- context 'when export file does not exist' do
- it 'returns 404' do
+ context 'group_import_export feature flag disabled' do
+ before do
+ stub_feature_flags(group_import_export: false)
+ end
+
+ it 'responds with 404 Not Found' do
get api(download_path, user)
expect(response).to have_gitlab_http_status(404)
@@ -67,27 +85,45 @@ describe API::GroupExport do
end
describe 'POST /groups/:group_id/export' do
- context 'when user is a group owner' do
+ context 'group_import_export feature flag enabled' do
before do
- group.add_owner(user)
+ stub_feature_flags(group_import_export: true)
end
- it 'accepts download' do
- post api(path, user)
+ context 'when user is a group owner' do
+ before do
+ group.add_owner(user)
+ end
+
+ it 'accepts download' do
+ post api(path, user)
+
+ expect(response).to have_gitlab_http_status(202)
+ end
+ end
+
+ context 'when user is not a group owner' do
+ before do
+ group.add_developer(user)
+ end
- expect(response).to have_gitlab_http_status(202)
+ it 'forbids the request' do
+ post api(path, user)
+
+ expect(response).to have_gitlab_http_status(403)
+ end
end
end
- context 'when user is not a group owner' do
+ context 'group_import_export feature flag disabled' do
before do
- group.add_developer(user)
+ stub_feature_flags(group_import_export: false)
end
- it 'forbids the request' do
+ it 'responds with 404 Not Found' do
post api(path, user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(404)
end
end
end
diff --git a/spec/requests/api/group_import_spec.rb b/spec/requests/api/group_import_spec.rb
new file mode 100644
index 00000000000..1594881677f
--- /dev/null
+++ b/spec/requests/api/group_import_spec.rb
@@ -0,0 +1,304 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::GroupImport do
+ include WorkhorseHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let(:path) { '/groups/import' }
+ let(:file) { File.join('spec', 'fixtures', 'group_export.tar.gz') }
+ let(:export_path) { "#{Dir.tmpdir}/group_export_spec" }
+ let(:workhorse_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
+ let(:workhorse_header) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => workhorse_token } }
+
+ before do
+ allow_next_instance_of(Gitlab::ImportExport) do |import_export|
+ expect(import_export).to receive(:storage_path).and_return(export_path)
+ end
+
+ stub_uploads_object_storage(ImportExportUploader)
+ end
+
+ after do
+ FileUtils.rm_rf(export_path, secure: true)
+ end
+
+ describe 'POST /groups/import' do
+ let(:file_upload) { fixture_file_upload(file) }
+ let(:params) do
+ {
+ path: 'test-import-group',
+ name: 'test-import-group',
+ file: fixture_file_upload(file)
+ }
+ end
+
+ subject { post api('/groups/import', user), params: params, headers: workhorse_header }
+
+ shared_examples 'when all params are correct' do
+ context 'when user is authorized to create new group' do
+ it 'creates new group and accepts request' do
+ subject
+
+ expect(response).to have_gitlab_http_status(202)
+ end
+
+ it 'creates private group' do
+ expect { subject }.to change { Group.count }.by(1)
+
+ group = Group.find_by(name: 'test-import-group')
+
+ expect(group.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
+
+ context 'when importing to a parent group' do
+ before do
+ group.add_owner(user)
+ end
+
+ it 'creates new group and accepts request' do
+ params[:parent_id] = group.id
+
+ subject
+
+ expect(response).to have_gitlab_http_status(202)
+ expect(group.children.count).to eq(1)
+ end
+
+ context 'when parent group is private or internal' do
+ let(:public_parent_group) { create(:group, :public) }
+ let(:internal_parent_group) { create(:group, :internal) }
+
+ before do
+ public_parent_group.add_owner(user)
+ internal_parent_group.add_owner(user)
+ end
+
+ it 'imports public group' do
+ params[:parent_id] = public_parent_group.id
+
+ subject
+
+ expect(response).to have_gitlab_http_status(202)
+ expect(public_parent_group.children.first.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
+ end
+
+ it 'imports internal group' do
+ params[:parent_id] = internal_parent_group.id
+
+ subject
+
+ expect(response).to have_gitlab_http_status(202)
+ expect(internal_parent_group.children.first.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
+ end
+ end
+
+ context 'when parent group is invalid' do
+ it 'returns 404 and does not create new group' do
+ params[:parent_id] = 99999
+
+ expect { subject }.not_to change { Group.count }
+
+ expect(response).to have_gitlab_http_status(404)
+ expect(json_response['message']).to eq('404 Group Not Found')
+ end
+
+ context 'when user is not an owner of parent group' do
+ it 'returns 403 Forbidden HTTP status' do
+ params[:parent_id] = create(:group).id
+
+ subject
+
+ expect(response).to have_gitlab_http_status(403)
+ expect(json_response['message']).to eq('403 Forbidden')
+ end
+ end
+ end
+ end
+
+ context 'when group creation failed' do
+ before do
+ allow_next_instance_of(Group) do |group|
+ allow(group).to receive(:persisted?).and_return(false)
+ end
+ end
+
+ it 'returns 400 HTTP status' do
+ subject
+
+ expect(response).to have_gitlab_http_status(400)
+ end
+ end
+ end
+
+ context 'when user is not authorized to create new group' do
+ let(:user) { create(:user, can_create_group: false) }
+
+ it 'forbids the request' do
+ subject
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+ end
+
+ shared_examples 'when some params are missing' do
+ context 'when required params are missing' do
+ shared_examples 'missing parameter' do |params, error_message|
+ it 'returns 400 HTTP status' do
+ params[:file] = file_upload
+
+ expect do
+ post api('/groups/import', user), params: params, headers: workhorse_header
+ end.not_to change { Group.count }.from(1)
+
+ expect(response).to have_gitlab_http_status(400)
+ expect(json_response['error']).to eq(error_message)
+ end
+ end
+
+ include_examples 'missing parameter', { name: 'test' }, 'path is missing'
+ include_examples 'missing parameter', { path: 'test' }, 'name is missing'
+ end
+ end
+
+ context 'with object storage disabled' do
+ before do
+ stub_uploads_object_storage(ImportExportUploader, enabled: false)
+ end
+
+ context 'without a file from workhorse' do
+ it 'rejects the request' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'without a workhorse header' do
+ it 'rejects request without a workhorse header' do
+ post api('/groups/import', user), params: params
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when params from workhorse are correct' do
+ let(:params) do
+ {
+ path: 'test-import-group',
+ name: 'test-import-group',
+ 'file.path' => file_upload.path,
+ 'file.name' => file_upload.original_filename
+ }
+ end
+
+ include_examples 'when all params are correct'
+ include_examples 'when some params are missing'
+ end
+
+ it "doesn't attempt to migrate file to object storage" do
+ expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
+
+ subject
+ end
+ end
+
+ context 'with object storage enabled' do
+ before do
+ stub_uploads_object_storage(ImportExportUploader, enabled: true)
+
+ allow(ImportExportUploader).to receive(:workhorse_upload_path).and_return('/')
+ end
+
+ context 'with direct upload enabled' do
+ let(:file_name) { 'group_export.tar.gz' }
+ let!(:fog_connection) do
+ stub_uploads_object_storage(ImportExportUploader, direct_upload: true)
+ end
+ let(:tmp_object) do
+ fog_connection.directories.new(key: 'uploads').files.create(
+ key: "tmp/uploads/#{file_name}",
+ body: file_upload
+ )
+ end
+ let(:fog_file) { fog_to_uploaded_file(tmp_object) }
+ let(:params) do
+ {
+ path: 'test-import-group',
+ name: 'test-import-group',
+ file: fog_file,
+ 'file.remote_id' => file_name,
+ 'file.size' => fog_file.size
+ }
+ end
+
+ it 'accepts the request and stores the file' do
+ expect { subject }.to change { Group.count }.by(1)
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ end
+
+ include_examples 'when all params are correct'
+ include_examples 'when some params are missing'
+ end
+ end
+ end
+
+ describe 'POST /groups/import/authorize' do
+ subject { post api('/groups/import/authorize', user), headers: workhorse_header }
+
+ it 'authorizes importing group with workhorse header' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ end
+
+ it 'rejects requests that bypassed gitlab-workhorse' do
+ workhorse_header.delete(Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'when using remote storage' do
+ context 'when direct upload is enabled' do
+ before do
+ stub_uploads_object_storage(ImportExportUploader, enabled: true, direct_upload: true)
+ end
+
+ it 'responds with status 200, location of file remote store and object details' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(json_response).not_to have_key('TempPath')
+ expect(json_response['RemoteObject']).to have_key('ID')
+ expect(json_response['RemoteObject']).to have_key('GetURL')
+ expect(json_response['RemoteObject']).to have_key('StoreURL')
+ expect(json_response['RemoteObject']).to have_key('DeleteURL')
+ expect(json_response['RemoteObject']).to have_key('MultipartUpload')
+ end
+ end
+
+ context 'when direct upload is disabled' do
+ before do
+ stub_uploads_object_storage(ImportExportUploader, enabled: true, direct_upload: false)
+ end
+
+ it 'handles as a local file' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(json_response['TempPath']).to eq(ImportExportUploader.workhorse_local_upload_path)
+ expect(json_response['RemoteObject']).to be_nil
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index 12e6e7c7a09..c3b5f9ded21 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -16,7 +16,7 @@ describe API::Internal::Base do
get api("/internal/check"), params: { secret_token: secret_token }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['api_version']).to eq(API::API.version)
expect(json_response['redis']).to be(true)
end
@@ -34,13 +34,13 @@ describe API::Internal::Base do
get api("/internal/check"),
headers: { API::Helpers::GITLAB_SHARED_SECRET_HEADER => Base64.encode64(secret_token) }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'returns 401 when no credentials provided' do
get(api("/internal/check"))
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
@@ -126,7 +126,7 @@ describe API::Internal::Base do
it 'returns the correct information about the key' do
lfs_auth_key(key.id, project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['username']).to eq(user.username)
expect(json_response['repository_http_path']).to eq(project.http_url_to_repo)
expect(json_response['expires_in']).to eq(Gitlab::LfsToken::DEFAULT_EXPIRE_TIME)
@@ -136,7 +136,7 @@ describe API::Internal::Base do
it 'returns the correct information about the user' do
lfs_auth_user(user.id, project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['username']).to eq(user.username)
expect(json_response['repository_http_path']).to eq(project.http_url_to_repo)
expect(Gitlab::LfsToken.new(user).token_valid?(json_response['lfs_token'])).to be_truthy
@@ -145,19 +145,19 @@ describe API::Internal::Base do
it 'returns a 404 when no key or user is provided' do
lfs_auth_project(project)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 404 when the wrong key is provided' do
lfs_auth_key(key.id + 12345, project)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 404 when the wrong user is provided' do
lfs_auth_user(user.id + 12345, project)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -167,7 +167,7 @@ describe API::Internal::Base do
it 'returns the correct information about the key' do
lfs_auth_key(key.id, project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['username']).to eq("lfs+deploy-key-#{key.id}")
expect(json_response['repository_http_path']).to eq(project.http_url_to_repo)
expect(Gitlab::LfsToken.new(key).token_valid?(json_response['lfs_token'])).to be_truthy
@@ -179,7 +179,7 @@ describe API::Internal::Base do
it "finds a user by key id" do
get(api("/internal/discover"), params: { key_id: key.id, secret_token: secret_token })
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(user.name)
end
@@ -187,7 +187,7 @@ describe API::Internal::Base do
it "finds a user by username" do
get(api("/internal/discover"), params: { username: user.username, secret_token: secret_token })
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(user.name)
end
@@ -195,7 +195,7 @@ describe API::Internal::Base do
it 'responds successfully when a user is not found' do
get(api('/internal/discover'), params: { username: 'noone', secret_token: secret_token })
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to eq('null')
end
@@ -203,7 +203,7 @@ describe API::Internal::Base do
it 'response successfully when passing invalid params' do
get(api('/internal/discover'), params: { nothing: 'to find a user', secret_token: secret_token })
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to eq('null')
end
@@ -268,7 +268,7 @@ describe API::Internal::Base do
end
context 'with env passed as a JSON' do
- let(:gl_repository) { Gitlab::GlRepository::WIKI.identifier_for_subject(project) }
+ let(:gl_repository) { Gitlab::GlRepository::WIKI.identifier_for_container(project) }
it 'sets env in RequestStore' do
obj_dir_relative = './objects'
@@ -284,7 +284,7 @@ describe API::Internal::Base do
GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE: alt_obj_dirs_relative
}.to_json)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -292,7 +292,7 @@ describe API::Internal::Base do
it 'responds with success' do
push(key, project.wiki)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response["status"]).to be_truthy
expect(json_response["gl_project_path"]).to eq(project.wiki.full_path)
expect(json_response["gl_repository"]).to eq("wiki-#{project.id}")
@@ -304,7 +304,7 @@ describe API::Internal::Base do
it 'responds with success' do
pull(key, project.wiki)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response["status"]).to be_truthy
expect(json_response["gl_project_path"]).to eq(project.wiki.full_path)
expect(json_response["gl_repository"]).to eq("wiki-#{project.id}")
@@ -313,10 +313,14 @@ describe API::Internal::Base do
end
context "git pull" do
+ before do
+ allow(Feature).to receive(:persisted_names).and_return(%w[gitaly_mep_mep])
+ end
+
it "has the correct payload" do
pull(key, project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response["status"]).to be_truthy
expect(json_response["gl_repository"]).to eq("project-#{project.id}")
expect(json_response["gl_project_path"]).to eq(project.full_path)
@@ -326,7 +330,7 @@ describe API::Internal::Base do
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
expect(json_response["gitaly"]["address"]).to eq(Gitlab::GitalyClient.address(project.repository_storage))
expect(json_response["gitaly"]["token"]).to eq(Gitlab::GitalyClient.token(project.repository_storage))
- expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-inforef-uploadpack-cache' => 'true', 'gitaly-feature-get-tag-messages-go' => 'true', 'gitaly-feature-filter-shas-with-signatures-go' => 'true', 'gitaly-feature-cache-invalidator' => 'true')
+ expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-mep-mep' => 'true')
expect(user.reload.last_activity_on).to eql(Date.today)
end
end
@@ -336,7 +340,7 @@ describe API::Internal::Base do
it do
push(key, project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response["status"]).to be_truthy
expect(json_response["gl_repository"]).to eq("project-#{project.id}")
expect(json_response["gl_project_path"]).to eq(project.full_path)
@@ -346,7 +350,6 @@ describe API::Internal::Base do
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
expect(json_response["gitaly"]["address"]).to eq(Gitlab::GitalyClient.address(project.repository_storage))
expect(json_response["gitaly"]["token"]).to eq(Gitlab::GitalyClient.token(project.repository_storage))
- expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-inforef-uploadpack-cache' => 'true', 'gitaly-feature-get-tag-messages-go' => 'true', 'gitaly-feature-filter-shas-with-signatures-go' => 'true', 'gitaly-feature-cache-invalidator' => 'true')
expect(user.reload.last_activity_on).to be_nil
end
end
@@ -406,7 +409,7 @@ describe API::Internal::Base do
it do
pull(key, project)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
expect(json_response["status"]).to be_falsey
expect(user.reload.last_activity_on).to be_nil
end
@@ -416,7 +419,7 @@ describe API::Internal::Base do
it do
push(key, project)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
expect(json_response["status"]).to be_falsey
expect(user.reload.last_activity_on).to be_nil
end
@@ -461,7 +464,7 @@ describe API::Internal::Base do
it do
push(key, project)
- expect(response).to have_gitlab_http_status(300)
+ expect(response).to have_gitlab_http_status(:multiple_choices)
expect(json_response['status']).to be_truthy
expect(json_response['payload']).to eql(payload)
expect(json_response['gl_console_messages']).to eql(console_messages)
@@ -480,7 +483,7 @@ describe API::Internal::Base do
it "has the correct payload" do
pull(key, project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['gl_console_messages']).to eq([])
end
end
@@ -497,7 +500,7 @@ describe API::Internal::Base do
pull(key, project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['gl_console_messages']).to eq(console_messages)
end
end
@@ -515,7 +518,7 @@ describe API::Internal::Base do
it do
pull(key, personal_project)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
expect(json_response["status"]).to be_falsey
expect(user.reload.last_activity_on).to be_nil
end
@@ -525,7 +528,7 @@ describe API::Internal::Base do
it do
push(key, personal_project)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
expect(json_response["status"]).to be_falsey
expect(user.reload.last_activity_on).to be_nil
end
@@ -542,7 +545,7 @@ describe API::Internal::Base do
end
push(key, personal_project)
- expect(response).to have_gitlab_http_status(503)
+ expect(response).to have_gitlab_http_status(:service_unavailable)
expect(json_response['status']).to be_falsey
expect(json_response['message']).to eq("Foo")
expect(user.reload.last_activity_on).to be_nil
@@ -560,7 +563,7 @@ describe API::Internal::Base do
it do
pull(key, project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response["status"]).to be_truthy
end
end
@@ -569,7 +572,7 @@ describe API::Internal::Base do
it do
push(key, project)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
expect(json_response["status"]).to be_falsey
end
end
@@ -586,7 +589,7 @@ describe API::Internal::Base do
it do
archive(key, project)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response["status"]).to be_truthy
expect(json_response["gitaly"]).not_to be_nil
expect(json_response["gitaly"]["repository"]).not_to be_nil
@@ -594,7 +597,6 @@ describe API::Internal::Base do
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
expect(json_response["gitaly"]["address"]).to eq(Gitlab::GitalyClient.address(project.repository_storage))
expect(json_response["gitaly"]["token"]).to eq(Gitlab::GitalyClient.token(project.repository_storage))
- expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-inforef-uploadpack-cache' => 'true', 'gitaly-feature-get-tag-messages-go' => 'true', 'gitaly-feature-filter-shas-with-signatures-go' => 'true', 'gitaly-feature-cache-invalidator' => 'true')
end
end
@@ -602,7 +604,7 @@ describe API::Internal::Base do
it do
archive(key, project)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response["status"]).to be_falsey
end
end
@@ -614,7 +616,7 @@ describe API::Internal::Base do
pull(key, project)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response["status"]).to be_falsey
end
@@ -630,7 +632,7 @@ describe API::Internal::Base do
}
)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response["status"]).to be_falsey
end
end
@@ -639,7 +641,7 @@ describe API::Internal::Base do
it do
pull(OpenStruct.new(id: 0), project)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response["status"]).to be_falsey
end
end
@@ -712,14 +714,14 @@ describe API::Internal::Base do
it 'rejects the push' do
push(key, project)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['status']).to be_falsy
end
it 'rejects the SSH pull' do
pull(key, project)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['status']).to be_falsy
end
end
@@ -748,7 +750,7 @@ describe API::Internal::Base do
#
# post api("/internal/notify_post_receive"), valid_params
#
- # expect(response).to have_gitlab_http_status(200)
+ # expect(response).to have_gitlab_http_status(:ok)
# end
#
# it "calls the Gitaly client with the wiki's repository if it's a wiki" do
@@ -760,7 +762,7 @@ describe API::Internal::Base do
#
# post api("/internal/notify_post_receive"), valid_wiki_params
#
- # expect(response).to have_gitlab_http_status(200)
+ # expect(response).to have_gitlab_http_status(:ok)
# end
#
# it "returns 500 if the gitaly call fails" do
@@ -769,7 +771,7 @@ describe API::Internal::Base do
#
# post api("/internal/notify_post_receive"), valid_params
#
- # expect(response).to have_gitlab_http_status(500)
+ # expect(response).to have_gitlab_http_status(:internal_server_error)
# end
#
# context 'with a gl_repository parameter' do
@@ -790,7 +792,7 @@ describe API::Internal::Base do
#
# post api("/internal/notify_post_receive"), valid_params
#
- # expect(response).to have_gitlab_http_status(200)
+ # expect(response).to have_gitlab_http_status(:ok)
# end
#
# it "calls the Gitaly client with the wiki's repository if it's a wiki" do
@@ -802,13 +804,15 @@ describe API::Internal::Base do
#
# post api("/internal/notify_post_receive"), valid_wiki_params
#
- # expect(response).to have_gitlab_http_status(200)
+ # expect(response).to have_gitlab_http_status(:ok)
# end
# end
# end
describe 'POST /internal/post_receive', :clean_gitlab_redis_shared_state do
let(:identifier) { 'key-123' }
+ let(:branch_name) { 'feature' }
+ let(:push_options) { ['ci.skip', 'another push option'] }
let(:valid_params) do
{
@@ -820,192 +824,33 @@ describe API::Internal::Base do
}
end
- let(:branch_name) { 'feature' }
-
let(:changes) do
"#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{branch_name}"
end
- let(:push_options) do
- ['ci.skip',
- 'another push option']
- end
+ subject { post api('/internal/post_receive'), params: valid_params }
before do
project.add_developer(user)
allow_any_instance_of(Gitlab::Identifier).to receive(:identify).and_return(user)
end
- it 'enqueues a PostReceive worker job' do
- expect(PostReceive).to receive(:perform_async)
- .with(gl_repository, identifier, changes, { ci: { skip: true } })
-
- post api('/internal/post_receive'), params: valid_params
- end
-
- it 'decreases the reference counter and returns the result' do
- expect(Gitlab::ReferenceCounter).to receive(:new).with(gl_repository)
- .and_return(reference_counter)
- expect(reference_counter).to receive(:decrease).and_return(true)
-
- post api('/internal/post_receive'), params: valid_params
-
- expect(json_response['reference_counter_decreased']).to be(true)
- end
-
- it 'returns link to create new merge request' do
- post api('/internal/post_receive'), params: valid_params
-
+ it 'executes PostReceiveService' do
message = <<~MESSAGE.strip
To create a merge request for #{branch_name}, visit:
- http://#{Gitlab.config.gitlab.host}/#{project.full_path}/merge_requests/new?merge_request%5Bsource_branch%5D=#{branch_name}
+ http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/new?merge_request%5Bsource_branch%5D=#{branch_name}
MESSAGE
- expect(json_response['messages']).to include(build_basic_message(message))
- end
-
- it 'returns the link to an existing merge request when it exists' do
- merge_request = create(:merge_request, source_project: project, source_branch: branch_name, target_branch: 'master')
+ subject
- post api('/internal/post_receive'), params: valid_params
-
- message = <<~MESSAGE.strip
- View merge request for feature:
- #{project_merge_request_url(project, merge_request)}
- MESSAGE
-
- expect(json_response['messages']).to include(build_basic_message(message))
- end
-
- it 'returns no merge request messages if printing_merge_request_link_enabled is false' do
- project.update!(printing_merge_request_link_enabled: false)
-
- post api('/internal/post_receive'), params: valid_params
-
- expect(json_response['messages']).to be_blank
- end
-
- it 'does not invoke MergeRequests::PushOptionsHandlerService' do
- expect(MergeRequests::PushOptionsHandlerService).not_to receive(:new)
-
- post api('/internal/post_receive'), params: valid_params
+ expect(json_response).to eq({
+ 'messages' => [{ 'message' => message, 'type' => 'basic' }],
+ 'reference_counter_decreased' => true
+ })
end
it_behaves_like 'storing arguments in the application context' do
let(:expected_params) { { user: user.username, project: project.full_path } }
-
- subject { post api('/internal/post_receive'), params: valid_params }
- end
-
- context 'when there are merge_request push options' do
- before do
- valid_params[:push_options] = ['merge_request.create']
- end
-
- it 'invokes MergeRequests::PushOptionsHandlerService' do
- expect(MergeRequests::PushOptionsHandlerService).to receive(:new)
-
- post api('/internal/post_receive'), params: valid_params
- end
-
- it 'creates a new merge request' do
- expect do
- Sidekiq::Testing.fake! do
- post api('/internal/post_receive'), params: valid_params
- end
- end.to change { MergeRequest.count }.by(1)
- end
-
- it 'links to the newly created merge request' do
- post api('/internal/post_receive'), params: valid_params
-
- message = <<~MESSAGE.strip
- View merge request for #{branch_name}:
- http://#{Gitlab.config.gitlab.host}/#{project.full_path}/merge_requests/1
- MESSAGE
-
- expect(json_response['messages']).to include(build_basic_message(message))
- end
-
- it 'adds errors on the service instance to warnings' do
- expect_any_instance_of(
- MergeRequests::PushOptionsHandlerService
- ).to receive(:errors).at_least(:once).and_return(['my error'])
-
- post api('/internal/post_receive'), params: valid_params
-
- message = "WARNINGS:\nError encountered with push options 'merge_request.create': my error"
- expect(json_response['messages']).to include(build_alert_message(message))
- end
-
- it 'adds ActiveRecord errors on invalid MergeRequest records to warnings' do
- invalid_merge_request = MergeRequest.new
- invalid_merge_request.errors.add(:base, 'my error')
-
- expect_any_instance_of(
- MergeRequests::CreateService
- ).to receive(:execute).and_return(invalid_merge_request)
-
- post api('/internal/post_receive'), params: valid_params
-
- message = "WARNINGS:\nError encountered with push options 'merge_request.create': my error"
- expect(json_response['messages']).to include(build_alert_message(message))
- end
- end
-
- context 'broadcast message exists' do
- let!(:broadcast_message) { create(:broadcast_message, starts_at: 1.day.ago, ends_at: 1.day.from_now ) }
-
- it 'outputs a broadcast message' do
- post api('/internal/post_receive'), params: valid_params
-
- expect(response).to have_gitlab_http_status(200)
- expect(json_response['messages']).to include(build_alert_message(broadcast_message.message))
- end
- end
-
- context 'broadcast message does not exist' do
- it 'does not output a broadcast message' do
- post api('/internal/post_receive'), params: valid_params
-
- expect(response).to have_gitlab_http_status(200)
- expect(has_alert_messages?(json_response['messages'])).to be_falsey
- end
- end
-
- context 'nil broadcast message' do
- it 'does not output a broadcast message' do
- allow(BroadcastMessage).to receive(:current).and_return(nil)
-
- post api('/internal/post_receive'), params: valid_params
-
- expect(response).to have_gitlab_http_status(200)
- expect(has_alert_messages?(json_response['messages'])).to be_falsey
- end
- end
-
- context 'with a redirected data' do
- it 'returns redirected message on the response' do
- project_moved = Gitlab::Checks::ProjectMoved.new(project, user, 'http', 'foo/baz')
- project_moved.add_message
-
- post api('/internal/post_receive'), params: valid_params
-
- expect(response).to have_gitlab_http_status(200)
- expect(json_response['messages']).to include(build_basic_message(project_moved.message))
- end
- end
-
- context 'with new project data' do
- it 'returns new project message on the response' do
- project_created = Gitlab::Checks::ProjectCreated.new(project, user, 'http')
- project_created.add_message
-
- post api('/internal/post_receive'), params: valid_params
-
- expect(response).to have_gitlab_http_status(200)
- expect(json_response['messages']).to include(build_basic_message(project_created.message))
- end
end
context 'with an orphaned write deploy key' do
@@ -1014,9 +859,9 @@ describe API::Internal::Base do
expect(Gitlab::Checks::ProjectMoved).not_to receive(:fetch_message)
- post api('/internal/post_receive'), params: valid_params
+ subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -1028,9 +873,9 @@ describe API::Internal::Base do
expect(Gitlab::Checks::ProjectMoved).not_to receive(:fetch_message)
- post api('/internal/post_receive'), params: valid_params
+ subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -1054,9 +899,9 @@ describe API::Internal::Base do
def gl_repository_for(project_or_wiki)
case project_or_wiki
when ProjectWiki
- Gitlab::GlRepository::WIKI.identifier_for_subject(project_or_wiki.project)
+ Gitlab::GlRepository::WIKI.identifier_for_container(project_or_wiki.project)
when Project
- Gitlab::GlRepository::PROJECT.identifier_for_subject(project_or_wiki)
+ Gitlab::GlRepository::PROJECT.identifier_for_container(project_or_wiki)
else
nil
end
@@ -1140,18 +985,4 @@ describe API::Internal::Base do
}
)
end
-
- def build_alert_message(message)
- { 'type' => 'alert', 'message' => message }
- end
-
- def build_basic_message(message)
- { 'type' => 'basic', 'message' => message }
- end
-
- def has_alert_messages?(messages)
- messages.any? do |message|
- message['type'] == 'alert'
- end
- end
end
diff --git a/spec/requests/api/internal/pages_spec.rb b/spec/requests/api/internal/pages_spec.rb
index 2887163fe58..9a8c1a0e03b 100644
--- a/spec/requests/api/internal/pages_spec.rb
+++ b/spec/requests/api/internal/pages_spec.rb
@@ -22,7 +22,7 @@ describe API::Internal::Pages do
it 'responds with 404 Not Found' do
query_host('pages.gitlab.io')
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -31,7 +31,7 @@ describe API::Internal::Pages do
it 'responds with 401 Unauthorized' do
query_host('pages.gitlab.io')
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -51,7 +51,7 @@ describe API::Internal::Pages do
it 'responds with 204 no content' do
query_host('pages.gitlab.io')
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(response.body).to be_empty
end
end
@@ -65,7 +65,7 @@ describe API::Internal::Pages do
it 'responds with 204 No Content' do
query_host('pages.gitlab.io')
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
end
@@ -75,7 +75,7 @@ describe API::Internal::Pages do
query_host('pages.gitlab.io')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('internal/pages/virtual_domain')
expect(json_response['certificate']).to eq(pages_domain.certificate)
@@ -114,7 +114,7 @@ describe API::Internal::Pages do
query_host('mygroup.gitlab-pages.io')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('internal/pages/virtual_domain')
expect(json_response['lookup_paths']).to eq(
@@ -141,7 +141,7 @@ describe API::Internal::Pages do
query_host('mygroup.gitlab-pages.io')
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('internal/pages/virtual_domain')
expect(json_response['lookup_paths']).to eq(
diff --git a/spec/requests/api/issues/get_group_issues_spec.rb b/spec/requests/api/issues/get_group_issues_spec.rb
index ef63902ffd7..0a95f9114a5 100644
--- a/spec/requests/api/issues/get_group_issues_spec.rb
+++ b/spec/requests/api/issues/get_group_issues_spec.rb
@@ -3,18 +3,16 @@
require 'spec_helper'
describe API::Issues do
- set(:user) { create(:user) }
- let(:user2) { create(:user) }
- let(:non_member) { create(:user) }
- set(:guest) { create(:user) }
- set(:author) { create(:author) }
- set(:assignee) { create(:assignee) }
- let(:admin) { create(:user, :admin) }
-
- let(:issue_title) { 'foo' }
- let(:issue_description) { 'closed' }
-
- let(:no_milestone_title) { 'None' }
+ let_it_be(:user) { create(:user) }
+ let(:user2) { create(:user) }
+ let(:non_member) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:author) { create(:author) }
+ let_it_be(:assignee) { create(:assignee) }
+ let(:admin) { create(:user, :admin) }
+ let(:issue_title) { 'foo' }
+ let(:issue_description) { 'closed' }
+ let(:no_milestone_title) { 'None' }
let(:any_milestone_title) { 'Any' }
before do
@@ -74,7 +72,7 @@ describe API::Issues do
it 'returns issues statistics' do
get api("/groups/#{group.id}/issues_statistics", user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['statistics']).not_to be_nil
expect(json_response['statistics']['counts']['all']).to eq counts[:all]
expect(json_response['statistics']['counts']['closed']).to eq counts[:closed]
@@ -345,7 +343,7 @@ describe API::Issues do
it 'exposes known attributes' do
get api(base_url, admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.last.keys).to include(*%w(id iid project_id title description))
expect(json_response.last).not_to have_key('subscribed')
end
@@ -529,7 +527,7 @@ describe API::Issues do
it 'returns an array of issues with no milestone' do
get api(base_url, user), params: { milestone: no_milestone_title }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect_paginated_array_response(group_confidential_issue.id)
end
@@ -676,20 +674,20 @@ describe API::Issues do
it 'returns error when multiple assignees are passed' do
get api(base_url, user), params: { assignee_username: [assignee.username, another_assignee.username], scope: 'all' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response["error"]).to include("allows one value, but found 2")
end
it 'returns error when assignee_username and assignee_id are passed together' do
get api(base_url, user), params: { assignee_username: [assignee.username], assignee_id: another_assignee.id, scope: 'all' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response["error"]).to include("mutually exclusive")
end
end
end
- context "#to_reference" do
+ describe "#to_reference" do
it 'exposes reference path in context of group' do
get api(base_url, user)
diff --git a/spec/requests/api/issues/get_project_issues_spec.rb b/spec/requests/api/issues/get_project_issues_spec.rb
index e031cc9b0c6..539841fe460 100644
--- a/spec/requests/api/issues/get_project_issues_spec.rb
+++ b/spec/requests/api/issues/get_project_issues_spec.rb
@@ -3,18 +3,18 @@
require 'spec_helper'
describe API::Issues do
- set(:user) { create(:user) }
- set(:project) { create(:project, :public, :repository, creator_id: user.id, namespace: user.namespace) }
- set(:private_mrs_project) do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, :public, :repository, creator_id: user.id, namespace: user.namespace) }
+ let_it_be(:private_mrs_project) do
create(:project, :public, :repository, creator_id: user.id, namespace: user.namespace, merge_requests_access_level: ProjectFeature::PRIVATE)
end
- let(:user2) { create(:user) }
- let(:non_member) { create(:user) }
- set(:guest) { create(:user) }
- set(:author) { create(:author) }
- set(:assignee) { create(:assignee) }
- let(:admin) { create(:user, :admin) }
+ let(:user2) { create(:user) }
+ let(:non_member) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:author) { create(:author) }
+ let_it_be(:assignee) { create(:assignee) }
+ let(:admin) { create(:user, :admin) }
let(:issue_title) { 'foo' }
let(:issue_description) { 'closed' }
let!(:closed_issue) do
@@ -48,12 +48,12 @@ describe API::Issues do
title: issue_title,
description: issue_description
end
- set(:label) do
+ let_it_be(:label) do
create(:label, title: 'label', color: '#FFAABB', project: project)
end
let!(:label_link) { create(:label_link, label: label, target: issue) }
let(:milestone) { create(:milestone, title: '1.0.0', project: project) }
- set(:empty_milestone) do
+ let_it_be(:empty_milestone) do
create(:milestone, title: '2.0.0', project: project)
end
let!(:note) { create(:note_on_issue, author: user, project: project, noteable: issue) }
@@ -93,7 +93,7 @@ describe API::Issues do
it 'returns project issues statistics' do
get api("/issues_statistics", user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['statistics']).not_to be_nil
expect(json_response['statistics']['counts']['all']).to eq counts[:all]
expect(json_response['statistics']['counts']['closed']).to eq counts[:closed]
@@ -196,7 +196,7 @@ describe API::Issues do
get api("/projects/#{max_project_id + 1}/issues", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 404 on private projects for other users' do
@@ -205,7 +205,7 @@ describe API::Issues do
get api("/projects/#{private_project.id}/issues", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns no issues when user has access to project but not issues' do
@@ -472,7 +472,7 @@ describe API::Issues do
it 'exposes known attributes' do
get api("#{base_url}/issues", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.last.keys).to include(*%w(id iid project_id title description))
expect(json_response.last).not_to have_key('subscribed')
end
@@ -565,14 +565,14 @@ describe API::Issues do
it 'returns error when multiple assignees are passed' do
get api("/issues", user), params: { assignee_username: [assignee.username, another_assignee.username], scope: 'all' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response["error"]).to include("allows one value, but found 2")
end
it 'returns error when assignee_username and assignee_id are passed together' do
get api("/issues", user), params: { assignee_username: [assignee.username], assignee_id: another_assignee.id, scope: 'all' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response["error"]).to include("mutually exclusive")
end
end
@@ -583,14 +583,14 @@ describe API::Issues do
it 'returns public issues' do
get api("/projects/#{project.id}/issues/#{issue.iid}")
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
it 'exposes known attributes' do
get api("/projects/#{project.id}/issues/#{issue.iid}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(issue.id)
expect(json_response['iid']).to eq(issue.iid)
expect(json_response['project_id']).to eq(issue.project.id)
@@ -630,7 +630,7 @@ describe API::Issues do
it 'exposes the closed_at attribute' do
get api("/projects/#{project.id}/issues/#{closed_issue.iid}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['closed_at']).to be_present
end
@@ -650,39 +650,39 @@ describe API::Issues do
it 'returns a project issue by internal id' do
get api("/projects/#{project.id}/issues/#{issue.iid}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq(issue.title)
expect(json_response['iid']).to eq(issue.iid)
end
it 'returns 404 if issue id not found' do
get api("/projects/#{project.id}/issues/54321", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 404 if the issue ID is used' do
get api("/projects/#{project.id}/issues/#{issue.id}", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context 'confidential issues' do
it 'returns 404 for non project members' do
get api("/projects/#{project.id}/issues/#{confidential_issue.iid}", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 404 for project members with guest role' do
get api("/projects/#{project.id}/issues/#{confidential_issue.iid}", guest)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns confidential issue for project members' do
get api("/projects/#{project.id}/issues/#{confidential_issue.iid}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq(confidential_issue.title)
expect(json_response['iid']).to eq(confidential_issue.iid)
end
@@ -690,7 +690,7 @@ describe API::Issues do
it 'returns confidential issue for author' do
get api("/projects/#{project.id}/issues/#{confidential_issue.iid}", author)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq(confidential_issue.title)
expect(json_response['iid']).to eq(confidential_issue.iid)
end
@@ -698,7 +698,7 @@ describe API::Issues do
it 'returns confidential issue for assignee' do
get api("/projects/#{project.id}/issues/#{confidential_issue.iid}", assignee)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq(confidential_issue.title)
expect(json_response['iid']).to eq(confidential_issue.iid)
end
@@ -706,7 +706,7 @@ describe API::Issues do
it 'returns confidential issue for admin' do
get api("/projects/#{project.id}/issues/#{confidential_issue.iid}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq(confidential_issue.title)
expect(json_response['iid']).to eq(confidential_issue.iid)
end
@@ -744,7 +744,7 @@ describe API::Issues do
it "returns 404 when issue doesn't exists" do
get api("/projects/#{project.id}/issues/0/closed_by", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -774,7 +774,7 @@ describe API::Issues do
get_related_merge_requests(project.id, issue.iid)
expect_paginated_array_response(related_mr.id)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.last).not_to have_key('subscribed')
end
@@ -785,7 +785,7 @@ describe API::Issues do
get_related_merge_requests(private_project.id, private_issue.iid)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -824,7 +824,7 @@ describe API::Issues do
it "returns 404 when issue doesn't exists" do
get_related_merge_requests(project.id, 0, user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -835,14 +835,14 @@ describe API::Issues do
it 'returns unauthorized' do
get api("/projects/#{project.id}/issues/#{issue.iid}/user_agent_detail")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
it 'exposes known attributes' do
get api("/projects/#{project.id}/issues/#{issue.iid}/user_agent_detail", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['user_agent']).to eq(user_agent_detail.user_agent)
expect(json_response['ip_address']).to eq(user_agent_detail.ip_address)
expect(json_response['akismet_submitted']).to eq(user_agent_detail.submitted)
@@ -851,7 +851,7 @@ describe API::Issues do
it 'returns unauthorized for non-admin users' do
get api("/projects/#{project.id}/issues/#{issue.iid}/user_agent_detail", user)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -863,7 +863,7 @@ describe API::Issues do
it 'returns 404 if the issue is confidential' do
post api("/projects/#{project.id}/issues/#{confidential_issue.iid}/participants", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/requests/api/issues/issues_spec.rb b/spec/requests/api/issues/issues_spec.rb
index a3538aa98b1..6fea6201a65 100644
--- a/spec/requests/api/issues/issues_spec.rb
+++ b/spec/requests/api/issues/issues_spec.rb
@@ -3,18 +3,18 @@
require 'spec_helper'
describe API::Issues do
- set(:user) { create(:user) }
- set(:project) { create(:project, :public, :repository, creator_id: user.id, namespace: user.namespace) }
- set(:private_mrs_project) do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, :public, :repository, creator_id: user.id, namespace: user.namespace) }
+ let_it_be(:private_mrs_project) do
create(:project, :public, :repository, creator_id: user.id, namespace: user.namespace, merge_requests_access_level: ProjectFeature::PRIVATE)
end
- let(:user2) { create(:user) }
- let(:non_member) { create(:user) }
- set(:guest) { create(:user) }
- set(:author) { create(:author) }
- set(:assignee) { create(:assignee) }
- let(:admin) { create(:user, :admin) }
+ let(:user2) { create(:user) }
+ let(:non_member) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:author) { create(:author) }
+ let_it_be(:assignee) { create(:assignee) }
+ let(:admin) { create(:user, :admin) }
let(:issue_title) { 'foo' }
let(:issue_description) { 'closed' }
let!(:closed_issue) do
@@ -48,12 +48,12 @@ describe API::Issues do
title: issue_title,
description: issue_description
end
- set(:label) do
+ let_it_be(:label) do
create(:label, title: 'label', color: '#FFAABB', project: project)
end
let!(:label_link) { create(:label_link, label: label, target: issue) }
let(:milestone) { create(:milestone, title: '1.0.0', project: project) }
- set(:empty_milestone) do
+ let_it_be(:empty_milestone) do
create(:milestone, title: '2.0.0', project: project)
end
let!(:note) { create(:note_on_issue, author: user, project: project, noteable: issue) }
@@ -76,7 +76,7 @@ describe API::Issues do
it 'returns issues statistics' do
get api("/issues_statistics", user), params: params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['statistics']).not_to be_nil
expect(json_response['statistics']['counts']['all']).to eq counts[:all]
expect(json_response['statistics']['counts']['closed']).to eq counts[:closed]
@@ -89,39 +89,39 @@ describe API::Issues do
it 'returns an array of all issues' do
get api('/issues'), params: { scope: 'all' }
- expect(response).to have_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
end
it 'returns authentication error without any scope' do
get api('/issues')
- expect(response).to have_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns authentication error when scope is assigned-to-me' do
get api('/issues'), params: { scope: 'assigned-to-me' }
- expect(response).to have_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns authentication error when scope is created-by-me' do
get api('/issues'), params: { scope: 'created-by-me' }
- expect(response).to have_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns an array of issues matching state in milestone' do
get api('/issues'), params: { milestone: 'foo', scope: 'all' }
- expect(response).to have_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect_paginated_array_response([])
end
it 'returns an array of issues matching state in milestone' do
get api('/issues'), params: { milestone: milestone.title, scope: 'all' }
- expect(response).to have_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect_paginated_array_response([issue.id, closed_issue.id])
end
@@ -129,19 +129,19 @@ describe API::Issues do
it 'returns authentication error without any scope' do
get api('/issues_statistics')
- expect(response).to have_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns authentication error when scope is assigned_to_me' do
get api('/issues_statistics'), params: { scope: 'assigned_to_me' }
- expect(response).to have_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns authentication error when scope is created_by_me' do
get api('/issues_statistics'), params: { scope: 'created_by_me' }
- expect(response).to have_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
context 'no state is treated as all state' do
@@ -642,14 +642,14 @@ describe API::Issues do
it 'accepts only predefined order by params' do
API::Helpers::IssuesHelpers.sort_options.each do |sort_opt|
get api('/issues', user), params: { order_by: sort_opt, sort: 'asc' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
it 'fails to sort with non predefined options' do
%w(milestone title abracadabra).each do |sort_opt|
get api('/issues', user), params: { order_by: sort_opt, sort: 'asc' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
@@ -657,14 +657,14 @@ describe API::Issues do
it 'matches V4 response schema' do
get api('/issues', user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/issues')
end
it 'returns a related merge request count of 0 if there are no related merge requests' do
get api('/issues', user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/issues')
expect(json_response.first).to include('merge_requests_count' => 0)
end
@@ -674,7 +674,7 @@ describe API::Issues do
get api('/issues', user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/issues')
expect(json_response.first).to include('merge_requests_count' => 1)
end
@@ -767,17 +767,43 @@ describe API::Issues do
it 'returns error when multiple assignees are passed' do
get api("/issues", user), params: { assignee_username: [assignee.username, another_assignee.username], scope: 'all' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response["error"]).to include("allows one value, but found 2")
end
it 'returns error when assignee_username and assignee_id are passed together' do
get api("/issues", user), params: { assignee_username: [assignee.username], assignee_id: another_assignee.id, scope: 'all' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response["error"]).to include("mutually exclusive")
end
end
+
+ context 'filtering by non_archived' do
+ let_it_be(:group1) { create(:group) }
+ let_it_be(:archived_project) { create(:project, :archived, namespace: group1) }
+ let_it_be(:active_project) { create(:project, namespace: group1) }
+ let_it_be(:issue1) { create(:issue, project: active_project) }
+ let_it_be(:issue2) { create(:issue, project: active_project) }
+ let_it_be(:issue3) { create(:issue, project: archived_project) }
+
+ before do
+ archived_project.add_developer(user)
+ active_project.add_developer(user)
+ end
+
+ it 'returns issues from non archived projects only by default' do
+ get api("/groups/#{group1.id}/issues", user), params: { scope: 'all' }
+
+ expect_paginated_array_response([issue2.id, issue1.id])
+ end
+
+ it 'returns issues from archived and non archived projects when non_archived is false' do
+ get api("/groups/#{group1.id}/issues", user), params: { non_archived: false, scope: 'all' }
+
+ expect_paginated_array_response([issue3.id, issue2.id, issue1.id])
+ end
+ end
end
context "when returns issue merge_requests_count for different access levels" do
@@ -809,7 +835,7 @@ describe API::Issues do
it 'exposes full reference path' do
get api("/projects/#{project.id}/issues/#{issue.iid}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['references']['short']).to eq("##{issue.iid}")
expect(json_response['references']['relative']).to eq("##{issue.iid}")
expect(json_response['references']['full']).to eq("#{project.parent.path}/#{project.path}##{issue.iid}")
@@ -819,12 +845,12 @@ describe API::Issues do
describe 'DELETE /projects/:id/issues/:issue_iid' do
it 'rejects a non member from deleting an issue' do
delete api("/projects/#{project.id}/issues/#{issue.iid}", non_member)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'rejects a developer from deleting an issue' do
delete api("/projects/#{project.id}/issues/#{issue.iid}", author)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
context 'when the user is project owner' do
@@ -834,7 +860,7 @@ describe API::Issues do
it 'deletes the issue if an admin requests it' do
delete api("/projects/#{project.id}/issues/#{issue.iid}", owner)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it_behaves_like '412 response' do
@@ -846,14 +872,14 @@ describe API::Issues do
it 'returns 404 when trying to delete an issue' do
delete api("/projects/#{project.id}/issues/123", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
it 'returns 404 when using the issue ID instead of IID' do
delete api("/projects/#{project.id}/issues/#{issue.id}", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
diff --git a/spec/requests/api/issues/post_projects_issues_spec.rb b/spec/requests/api/issues/post_projects_issues_spec.rb
index 67404cf10df..0ec13eb2b31 100644
--- a/spec/requests/api/issues/post_projects_issues_spec.rb
+++ b/spec/requests/api/issues/post_projects_issues_spec.rb
@@ -3,17 +3,17 @@
require 'spec_helper'
describe API::Issues do
- set(:user) { create(:user) }
- set(:project) do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) do
create(:project, :public, creator_id: user.id, namespace: user.namespace)
end
- let(:user2) { create(:user) }
- let(:non_member) { create(:user) }
- set(:guest) { create(:user) }
- set(:author) { create(:author) }
- set(:assignee) { create(:assignee) }
- let(:admin) { create(:user, :admin) }
+ let(:user2) { create(:user) }
+ let(:non_member) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:author) { create(:author) }
+ let_it_be(:assignee) { create(:assignee) }
+ let(:admin) { create(:user, :admin) }
let(:issue_title) { 'foo' }
let(:issue_description) { 'closed' }
let!(:closed_issue) do
@@ -47,12 +47,12 @@ describe API::Issues do
title: issue_title,
description: issue_description
end
- set(:label) do
+ let_it_be(:label) do
create(:label, title: 'label', color: '#FFAABB', project: project)
end
let!(:label_link) { create(:label_link, label: label, target: issue) }
let(:milestone) { create(:milestone, title: '1.0.0', project: project) }
- set(:empty_milestone) do
+ let_it_be(:empty_milestone) do
create(:milestone, title: '2.0.0', project: project)
end
let!(:note) { create(:note_on_issue, author: user, project: project, noteable: issue) }
@@ -75,7 +75,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues", user),
params: { title: 'new issue', assignee_id: user2.id }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq('new issue')
expect(json_response['assignee']['name']).to eq(user2.name)
expect(json_response['assignees'].first['name']).to eq(user2.name)
@@ -85,7 +85,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues", user),
params: { title: 'new issue', assignee_id: '' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq('new issue')
expect(json_response['assignee']).to be_nil
end
@@ -96,7 +96,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues", user),
params: { title: 'new issue', assignee_ids: [user2.id, guest.id] }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq('new issue')
expect(json_response['assignees'].count).to eq(1)
end
@@ -112,7 +112,7 @@ describe API::Issues do
it 'renders 403' do
post api("/projects/#{project.id}/issues", not_member), params: { title: 'new issue' }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -122,7 +122,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues", admin),
params: { title: 'new issue', iid: 9001 }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['iid']).to eq 9001
end
end
@@ -132,7 +132,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues", user),
params: { title: 'new issue', iid: 9001 }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['iid']).to eq 9001
end
end
@@ -146,7 +146,7 @@ describe API::Issues do
post api("/projects/#{group_project.id}/issues", user2),
params: { title: 'new issue', iid: 9001 }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['iid']).to eq 9001
end
end
@@ -156,7 +156,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues", user2),
params: { title: 'new issue', iid: 9001 }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['iid']).not_to eq 9001
end
end
@@ -166,7 +166,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues", admin),
params: { title: 'new issue', iid: issue.iid }
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
expect(json_response['message']).to eq 'Duplicated issue'
end
end
@@ -176,7 +176,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues", user),
params: { title: 'new issue', labels: 'label, label2', weight: 3, assignee_ids: [user2.id] }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq('new issue')
expect(json_response['description']).to be_nil
expect(json_response['labels']).to eq(%w(label label2))
@@ -189,7 +189,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues", user),
params: { title: 'new issue', labels: %w(label label2), weight: 3, assignee_ids: [user2.id] }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq('new issue')
expect(json_response['description']).to be_nil
expect(json_response['labels']).to eq(%w(label label2))
@@ -202,7 +202,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues", user),
params: { title: 'new issue', confidential: true }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq('new issue')
expect(json_response['confidential']).to be_truthy
end
@@ -211,7 +211,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues", user),
params: { title: 'new issue', confidential: 'y' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq('new issue')
expect(json_response['confidential']).to be_truthy
end
@@ -220,7 +220,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues", user),
params: { title: 'new issue', confidential: false }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq('new issue')
expect(json_response['confidential']).to be_falsy
end
@@ -229,13 +229,13 @@ describe API::Issues do
post api("/projects/#{project.id}/issues", user),
params: { title: 'new issue', confidential: 'foo' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('confidential is invalid')
end
it 'returns a 400 bad request if title not given' do
post api("/projects/#{project.id}/issues", user), params: { labels: 'label, label2' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'allows special label names' do
@@ -269,7 +269,7 @@ describe API::Issues do
it 'returns 400 if title is too long' do
post api("/projects/#{project.id}/issues", user),
params: { title: 'g' * 256 }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['title']).to eq([
'is too long (maximum is 255 characters)'
])
@@ -317,7 +317,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues", user),
params: { title: 'new issue', due_date: due_date }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq('new issue')
expect(json_response['description']).to be_nil
expect(json_response['due_date']).to eq(due_date)
@@ -332,7 +332,7 @@ describe API::Issues do
it 'sets the creation time on the new issue' do
post api("/projects/#{project.id}/issues", admin), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(Time.parse(json_response['created_at'])).to be_like_time(creation_time)
end
end
@@ -341,7 +341,7 @@ describe API::Issues do
it 'sets the creation time on the new issue' do
post api("/projects/#{project.id}/issues", user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(Time.parse(json_response['created_at'])).to be_like_time(creation_time)
end
end
@@ -353,7 +353,7 @@ describe API::Issues do
group.add_owner(user2)
post api("/projects/#{group_project.id}/issues", user2), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(Time.parse(json_response['created_at'])).to be_like_time(creation_time)
end
end
@@ -362,7 +362,7 @@ describe API::Issues do
it 'ignores the given creation time' do
post api("/projects/#{project.id}/issues", user2), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(Time.parse(json_response['created_at'])).not_to be_like_time(creation_time)
end
end
@@ -389,10 +389,10 @@ describe API::Issues do
end
before do
- expect_next_instance_of(SpamService) do |spam_service|
+ expect_next_instance_of(Spam::SpamCheckService) do |spam_service|
expect(spam_service).to receive_messages(check_for_spam?: true)
end
- expect_next_instance_of(AkismetService) do |akismet_service|
+ expect_next_instance_of(Spam::AkismetService) do |akismet_service|
expect(akismet_service).to receive_messages(spam?: true)
end
end
@@ -417,7 +417,7 @@ describe API::Issues do
it 'returns correct status and message' do
post_issue
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq({ 'error' => 'Spam detected' })
end
@@ -435,7 +435,7 @@ describe API::Issues do
it 'returns correct status' do
post_issue
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
end
it 'creates a new spam log entry' do
@@ -453,7 +453,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues/#{issue.iid}/move", user),
params: { to_project_id: target_project.id }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['project_id']).to eq(target_project.id)
end
@@ -462,7 +462,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues/#{issue.iid}/move", user),
params: { to_project_id: project.id }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq(s_('MoveIssue|Cannot move issue to project it originates from!'))
end
end
@@ -472,7 +472,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues/#{issue.iid}/move", user),
params: { to_project_id: target_project2.id }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq(s_('MoveIssue|Cannot move issue due to insufficient permissions!'))
end
end
@@ -481,7 +481,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues/#{issue.iid}/move", admin),
params: { to_project_id: target_project2.id }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['project_id']).to eq(target_project2.id)
end
@@ -490,7 +490,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues/#{issue.id}/move", user),
params: { to_project_id: target_project.id }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Issue Not Found')
end
end
@@ -500,7 +500,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues/123/move", user),
params: { to_project_id: target_project.id }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Issue Not Found')
end
end
@@ -510,7 +510,7 @@ describe API::Issues do
post api("/projects/0/issues/#{issue.iid}/move", user),
params: { to_project_id: target_project.id }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Project Not Found')
end
end
@@ -520,7 +520,7 @@ describe API::Issues do
post api("/projects/#{project.id}/issues/#{issue.iid}/move", user),
params: { to_project_id: 0 }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -529,32 +529,32 @@ describe API::Issues do
it 'subscribes to an issue' do
post api("/projects/#{project.id}/issues/#{issue.iid}/subscribe", user2)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['subscribed']).to eq(true)
end
it 'returns 304 if already subscribed' do
post api("/projects/#{project.id}/issues/#{issue.iid}/subscribe", user)
- expect(response).to have_gitlab_http_status(304)
+ expect(response).to have_gitlab_http_status(:not_modified)
end
it 'returns 404 if the issue is not found' do
post api("/projects/#{project.id}/issues/123/subscribe", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 404 if the issue ID is used instead of the iid' do
post api("/projects/#{project.id}/issues/#{issue.id}/subscribe", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 404 if the issue is confidential' do
post api("/projects/#{project.id}/issues/#{confidential_issue.iid}/subscribe", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -562,32 +562,32 @@ describe API::Issues do
it 'unsubscribes from an issue' do
post api("/projects/#{project.id}/issues/#{issue.iid}/unsubscribe", user)
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['subscribed']).to eq(false)
end
it 'returns 304 if not subscribed' do
post api("/projects/#{project.id}/issues/#{issue.iid}/unsubscribe", user2)
- expect(response).to have_gitlab_http_status(304)
+ expect(response).to have_gitlab_http_status(:not_modified)
end
it 'returns 404 if the issue is not found' do
post api("/projects/#{project.id}/issues/123/unsubscribe", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 404 if using the issue ID instead of iid' do
post api("/projects/#{project.id}/issues/#{issue.id}/unsubscribe", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 404 if the issue is confidential' do
post api("/projects/#{project.id}/issues/#{confidential_issue.iid}/unsubscribe", non_member)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/requests/api/issues/put_projects_issues_spec.rb b/spec/requests/api/issues/put_projects_issues_spec.rb
index 43f302ed194..b4332c555e1 100644
--- a/spec/requests/api/issues/put_projects_issues_spec.rb
+++ b/spec/requests/api/issues/put_projects_issues_spec.rb
@@ -3,17 +3,17 @@
require 'spec_helper'
describe API::Issues do
- set(:user) { create(:user) }
- set(:project) do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) do
create(:project, :public, creator_id: user.id, namespace: user.namespace)
end
- let(:user2) { create(:user) }
- let(:non_member) { create(:user) }
- set(:guest) { create(:user) }
- set(:author) { create(:author) }
- set(:assignee) { create(:assignee) }
- let(:admin) { create(:user, :admin) }
+ let(:user2) { create(:user) }
+ let(:non_member) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:author) { create(:author) }
+ let_it_be(:assignee) { create(:assignee) }
+ let(:admin) { create(:user, :admin) }
let(:issue_title) { 'foo' }
let(:issue_description) { 'closed' }
let!(:closed_issue) do
@@ -47,12 +47,12 @@ describe API::Issues do
title: issue_title,
description: issue_description
end
- set(:label) do
+ let_it_be(:label) do
create(:label, title: 'label', color: '#FFAABB', project: project)
end
let!(:label_link) { create(:label_link, label: label, target: issue) }
let(:milestone) { create(:milestone, title: '1.0.0', project: project) }
- set(:empty_milestone) do
+ let_it_be(:empty_milestone) do
create(:milestone, title: '2.0.0', project: project)
end
let!(:note) { create(:note_on_issue, author: user, project: project, noteable: issue) }
@@ -73,7 +73,7 @@ describe API::Issues do
it 'updates a project issue' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { title: 'updated title' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq('updated title')
end
@@ -81,13 +81,13 @@ describe API::Issues do
it 'returns 404 error if issue iid not found' do
put api("/projects/#{project.id}/issues/44444", user),
params: { title: 'updated title' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 404 error if issue id is used instead of the iid' do
put api("/projects/#{project.id}/issues/#{issue.id}", user),
params: { title: 'updated title' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'allows special label names' do
@@ -124,33 +124,33 @@ describe API::Issues do
it 'returns 403 for non project members' do
put api("/projects/#{project.id}/issues/#{confidential_issue.iid}", non_member),
params: { title: 'updated title' }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'returns 403 for project members with guest role' do
put api("/projects/#{project.id}/issues/#{confidential_issue.iid}", guest),
params: { title: 'updated title' }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'updates a confidential issue for project members' do
put api("/projects/#{project.id}/issues/#{confidential_issue.iid}", user),
params: { title: 'updated title' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq('updated title')
end
it 'updates a confidential issue for author' do
put api("/projects/#{project.id}/issues/#{confidential_issue.iid}", author),
params: { title: 'updated title' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq('updated title')
end
it 'updates a confidential issue for admin' do
put api("/projects/#{project.id}/issues/#{confidential_issue.iid}", admin),
params: { title: 'updated title' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq('updated title')
end
@@ -158,7 +158,7 @@ describe API::Issues do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { confidential: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['confidential']).to be_truthy
end
@@ -166,7 +166,7 @@ describe API::Issues do
put api("/projects/#{project.id}/issues/#{confidential_issue.iid}", user),
params: { confidential: false }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['confidential']).to be_falsy
end
@@ -174,7 +174,7 @@ describe API::Issues do
put api("/projects/#{project.id}/issues/#{confidential_issue.iid}", user),
params: { confidential: 'foo' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('confidential is invalid')
end
end
@@ -194,10 +194,10 @@ describe API::Issues do
end
before do
- expect_next_instance_of(SpamService) do |spam_service|
+ expect_next_instance_of(Spam::SpamCheckService) do |spam_service|
expect(spam_service).to receive_messages(check_for_spam?: true)
end
- expect_next_instance_of(AkismetService) do |akismet_service|
+ expect_next_instance_of(Spam::AkismetService) do |akismet_service|
expect(akismet_service).to receive_messages(spam?: true)
end
end
@@ -214,7 +214,7 @@ describe API::Issues do
it 'returns correct status and message' do
update_issue
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to include('message' => { 'error' => 'Spam detected' })
end
@@ -232,7 +232,7 @@ describe API::Issues do
it 'returns correct status and message' do
update_issue
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'creates a new spam log entry' do
@@ -248,7 +248,7 @@ describe API::Issues do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { assignee_id: 0 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['assignee']).to be_nil
end
@@ -257,7 +257,7 @@ describe API::Issues do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { assignee_id: user2.id }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['assignee']['name']).to eq(user2.name)
end
@@ -267,7 +267,7 @@ describe API::Issues do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { assignee_ids: [0] }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['assignees']).to be_empty
end
@@ -276,7 +276,7 @@ describe API::Issues do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { assignee_ids: [user2.id] }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['assignees'].first['name']).to eq(user2.name)
end
@@ -286,7 +286,7 @@ describe API::Issues do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { assignee_ids: [user2.id, guest.id] }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['assignees'].size).to eq(1)
end
@@ -300,7 +300,7 @@ describe API::Issues do
it 'does not update labels if not present' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { title: 'updated title' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to eq([label.title])
end
@@ -309,7 +309,7 @@ describe API::Issues do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), params: { labels: '' }
end
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to eq([])
expect(json_response['updated_at']).to be > Time.now
end
@@ -319,7 +319,7 @@ describe API::Issues do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), params: { labels: [''] }
end
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to eq([])
expect(json_response['updated_at']).to be > Time.now
end
@@ -329,7 +329,7 @@ describe API::Issues do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { labels: 'foo,bar' }
end
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'foo'
expect(json_response['labels']).to include 'bar'
expect(json_response['updated_at']).to be > Time.now
@@ -340,7 +340,7 @@ describe API::Issues do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { labels: %w(foo bar) }
end
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'foo'
expect(json_response['labels']).to include 'bar'
expect(json_response['updated_at']).to be > Time.now
@@ -377,7 +377,7 @@ describe API::Issues do
it 'returns 400 if title is too long' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { title: 'g' * 256 }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['title']).to eq([
'is too long (maximum is 255 characters)'
])
@@ -388,7 +388,7 @@ describe API::Issues do
it 'updates a project issue' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { labels: 'label2', state_event: 'close' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'label2'
expect(json_response['state']).to eq 'closed'
@@ -397,7 +397,7 @@ describe API::Issues do
it 'reopens a project isssue' do
put api("/projects/#{project.id}/issues/#{closed_issue.iid}", user), params: { state_event: 'reopen' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['state']).to eq 'opened'
end
@@ -407,7 +407,7 @@ describe API::Issues do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { labels: 'label3', state_event: 'close', updated_at: update_time }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'label3'
expect(Time.parse(json_response['updated_at'])).to be_like_time(update_time)
end
@@ -420,7 +420,7 @@ describe API::Issues do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), params: { due_date: due_date }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['due_date']).to eq(due_date)
end
end
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 1e1099ebcb6..652be20f1e4 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -31,11 +31,11 @@ describe API::Jobs do
end
end
- set(:project) do
+ let_it_be(:project, reload: true) do
create(:project, :repository, public_builds: false)
end
- set(:pipeline) do
+ let_it_be(:pipeline, reload: true) do
create(:ci_empty_pipeline, project: project,
sha: project.commit.id,
ref: project.default_branch)
diff --git a/spec/requests/api/lsif_data_spec.rb b/spec/requests/api/lsif_data_spec.rb
new file mode 100644
index 00000000000..214bc832cda
--- /dev/null
+++ b/spec/requests/api/lsif_data_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+describe API::LsifData do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:commit) { project.commit }
+
+ describe 'GET lsif/info' do
+ let(:endpoint_path) { "/projects/#{project.id}/commits/#{commit.id}/lsif/info" }
+
+ context 'user does not have access to the project' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'returns 403' do
+ get api(endpoint_path, user), params: { path: 'main.go' }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'user has access to the project' do
+ before do
+ project.add_reporter(user)
+ end
+
+ context 'code_navigation feature is disabled' do
+ before do
+ stub_feature_flags(code_navigation: false)
+ end
+
+ it 'returns 404' do
+ get api(endpoint_path, user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'there is no job artifact for the passed commit' do
+ it 'returns 404' do
+ get api(endpoint_path, user), params: { path: 'main.go' }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'lsif data is stored as a job artifact' do
+ let!(:pipeline) { create(:ci_pipeline, project: project, sha: commit.id) }
+ let!(:artifact) { create(:ci_job_artifact, :lsif, job: create(:ci_build, pipeline: pipeline)) }
+
+ it 'returns code navigation info for a given path' do
+ get api(endpoint_path, user), params: { path: 'main.go' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.parsed_body.last).to eq({
+ 'end_char' => 18,
+ 'end_line' => 8,
+ 'start_char' => 13,
+ 'start_line' => 8,
+ 'definition_url' => project_blob_path(project, "#{commit.id}/morestrings/reverse.go", anchor: 'L5'),
+ 'hover' => [{
+ 'language' => 'go',
+ 'value' => Gitlab::Highlight.highlight(nil, 'func Func2(i int) string', language: 'go')
+ }]
+ })
+ end
+
+ context 'the stored file is too large' do
+ it 'returns 413' do
+ allow_any_instance_of(JobArtifactUploader).to receive(:cached_size).and_return(20.megabytes)
+
+ get api(endpoint_path, user), params: { path: 'main.go' }
+
+ expect(response).to have_gitlab_http_status(:payload_too_large)
+ end
+ end
+
+ context 'the user does not have access to the pipeline' do
+ let(:project) { create(:project, :repository, builds_access_level: ProjectFeature::DISABLED) }
+
+ it 'returns 403' do
+ get api(endpoint_path, user), params: { path: 'main.go' }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/markdown_spec.rb b/spec/requests/api/markdown_spec.rb
index 99263f2fc1e..8a1e1b05c9a 100644
--- a/spec/requests/api/markdown_spec.rb
+++ b/spec/requests/api/markdown_spec.rb
@@ -52,8 +52,8 @@ describe API::Markdown do
end
context "when arguments are valid" do
- set(:project) { create(:project) }
- set(:issue) { create(:issue, project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue) { create(:issue, project: project) }
let(:text) { ":tada: Hello world! :100: #{issue.to_reference}" }
context "when not using gfm" do
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index ae0596bea98..862afd11b86 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -6,13 +6,14 @@ describe API::MergeRequests do
include ProjectForksHelper
let(:base_time) { Time.now }
- set(:user) { create(:user) }
- set(:user2) { create(:user) }
- set(:admin) { create(:user, :admin) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:admin) { create(:user, :admin) }
let(:project) { create(:project, :public, :repository, creator: user, namespace: user.namespace, only_allow_merge_if_pipeline_succeeds: false) }
let(:milestone) { create(:milestone, title: '1.0.0', project: project) }
let(:milestone1) { create(:milestone, title: '0.9', project: project) }
- let!(:merge_request) { create(:merge_request, :simple, milestone: milestone1, author: user, assignees: [user], source_project: project, target_project: project, source_branch: 'markdown', title: "Test", created_at: base_time) }
+ let(:merge_request_context_commit) {create(:merge_request_context_commit, message: 'test')}
+ let!(:merge_request) { create(:merge_request, :simple, milestone: milestone1, author: user, assignees: [user], merge_request_context_commits: [merge_request_context_commit], source_project: project, target_project: project, source_branch: 'markdown', title: "Test", created_at: base_time) }
let!(:merge_request_closed) { create(:merge_request, state: "closed", milestone: milestone1, author: user, assignees: [user], source_project: project, target_project: project, title: "Closed test", created_at: base_time + 1.second) }
let!(:merge_request_merged) { create(:merge_request, state: "merged", author: user, assignees: [user], source_project: project, target_project: project, title: "Merged test", created_at: base_time + 2.seconds, merge_commit_sha: '9999999999999999999999999999999999999999') }
let!(:merge_request_locked) { create(:merge_request, state: "locked", milestone: milestone1, author: user, assignees: [user], source_project: project, target_project: project, title: "Locked test", created_at: base_time + 1.second) }
@@ -40,8 +41,7 @@ describe API::MergeRequests do
it 'returns merge requests for public projects' do
get api(endpoint_path)
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
+ expect_successful_response_with_paginated_array
end
end
@@ -65,16 +65,32 @@ describe API::MergeRequests do
end.not_to exceed_query_limit(control)
end
+ context 'when merge request is unchecked' do
+ before do
+ merge_request.mark_as_unchecked!
+ end
+
+ it 'checks mergeability asynchronously' do
+ expect_next_instance_of(MergeRequests::MergeabilityCheckService) do |service|
+ expect(service).not_to receive(:execute)
+ expect(service).to receive(:async_execute)
+ end
+
+ get api(endpoint_path, user)
+ end
+ end
+
context 'with labels' do
include_context 'with labels'
it 'returns an array of all merge_requests' do
get api(endpoint_path, user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(4)
+ expect_paginated_array_response([
+ merge_request_merged.id, merge_request_locked.id,
+ merge_request_closed.id, merge_request.id
+ ])
+
expect(json_response.last['title']).to eq(merge_request.title)
expect(json_response.last).to have_key('web_url')
expect(json_response.last['sha']).to eq(merge_request.diff_head_sha)
@@ -95,7 +111,7 @@ describe API::MergeRequests do
get api(path, user)
- expect(response).to have_gitlab_http_status(200)
+ expect_successful_response_with_paginated_array
expect(json_response.last['labels'].pluck('name')).to eq([label2.title, label.title])
expect(json_response.last['labels'].first).to match_schema('/public_api/v4/label_basic')
end
@@ -123,11 +139,11 @@ describe API::MergeRequests do
get api(path, user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
+ expect_paginated_array_response([
+ merge_request_merged.id, merge_request_locked.id,
+ merge_request_closed.id, merge_request.id
+ ])
expect(json_response.last.keys).to match_array(%w(id iid title web_url created_at description project_id state updated_at))
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(4)
expect(json_response.last['iid']).to eq(merge_request.iid)
expect(json_response.last['title']).to eq(merge_request.title)
expect(json_response.last).to have_key('web_url')
@@ -141,10 +157,10 @@ describe API::MergeRequests do
get api(path, user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(4)
+ expect_paginated_array_response([
+ merge_request_merged.id, merge_request_locked.id,
+ merge_request_closed.id, merge_request.id
+ ])
expect(json_response.last['title']).to eq(merge_request.title)
end
@@ -153,10 +169,7 @@ describe API::MergeRequests do
get api(path, user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response([merge_request.id])
expect(json_response.last['title']).to eq(merge_request.title)
end
@@ -165,10 +178,7 @@ describe API::MergeRequests do
get api(path, user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response([merge_request_closed.id])
expect(json_response.first['title']).to eq(merge_request_closed.title)
end
@@ -177,10 +187,7 @@ describe API::MergeRequests do
get api(path, user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response([merge_request_merged.id])
expect(json_response.first['title']).to eq(merge_request_merged.title)
end
@@ -194,17 +201,13 @@ describe API::MergeRequests do
it 'returns an empty array if no issue matches milestone' do
get api(endpoint_path, user), params: { milestone: '1.0.0' }
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_empty_array_response
end
it 'returns an empty array if milestone does not exist' do
get api(endpoint_path, user), params: { milestone: 'foo' }
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_empty_array_response
end
it 'returns an array of merge requests in given milestone' do
@@ -218,9 +221,7 @@ describe API::MergeRequests do
it 'returns an array of merge requests matching state in milestone' do
get api(endpoint_path, user), params: { milestone: '0.9', state: 'closed' }
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response([merge_request_closed.id])
expect(json_response.first['id']).to eq(merge_request_closed.id)
end
@@ -232,8 +233,7 @@ describe API::MergeRequests do
get api(path, user)
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
+ expect_successful_response_with_paginated_array
expect(json_response.length).to eq(1)
expect(json_response.first['labels']).to eq([label2.title, label.title])
end
@@ -243,9 +243,7 @@ describe API::MergeRequests do
get api(path, user)
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_empty_array_response
end
it 'returns an empty array if no merge request matches labels' do
@@ -253,9 +251,7 @@ describe API::MergeRequests do
get api(path, user)
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_empty_array_response
end
it 'returns an array of labeled merge requests where all labels match' do
@@ -263,8 +259,7 @@ describe API::MergeRequests do
get api(path, user)
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
+ expect_successful_response_with_paginated_array
expect(json_response.length).to eq(1)
expect(json_response.first['labels']).to eq([label2.title, label.title])
end
@@ -272,9 +267,7 @@ describe API::MergeRequests do
it 'returns an array of merge requests with any label when filtering by any label' do
get api(endpoint_path, user), params: { labels: [" #{label.title} ", " #{label2.title} "] }
- expect_paginated_array_response
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response([merge_request.id])
expect(json_response.first['labels']).to eq([label2.title, label.title])
expect(json_response.first['id']).to eq(merge_request.id)
end
@@ -282,9 +275,7 @@ describe API::MergeRequests do
it 'returns an array of merge requests with any label when filtering by any label' do
get api(endpoint_path, user), params: { labels: ["#{label.title} , #{label2.title}"] }
- expect_paginated_array_response
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response([merge_request.id])
expect(json_response.first['labels']).to eq([label2.title, label.title])
expect(json_response.first['id']).to eq(merge_request.id)
end
@@ -292,18 +283,16 @@ describe API::MergeRequests do
it 'returns an array of merge requests with any label when filtering by any label' do
get api(endpoint_path, user), params: { labels: IssuesFinder::FILTER_ANY }
- expect_paginated_array_response
- expect(json_response.length).to eq(1)
+ expect_paginated_array_response([merge_request.id])
expect(json_response.first['id']).to eq(merge_request.id)
end
it 'returns an array of merge requests without a label when filtering by no label' do
get api(endpoint_path, user), params: { labels: IssuesFinder::FILTER_NONE }
- response_ids = json_response.map { |merge_request| merge_request['id'] }
-
- expect_paginated_array_response
- expect(response_ids).to contain_exactly(merge_request_closed.id, merge_request_merged.id, merge_request_locked.id)
+ expect_response_contain_exactly(
+ merge_request_merged.id, merge_request_locked.id, merge_request_closed.id
+ )
end
end
@@ -323,10 +312,7 @@ describe API::MergeRequests do
get api(path, user)
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
- expect(json_response.first['id']).to eq(mr2.id)
+ expect_response_contain_exactly(mr2.id)
end
context 'with ordering' do
@@ -340,10 +326,10 @@ describe API::MergeRequests do
get api(path, user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(4)
+ expect_paginated_array_response([
+ merge_request_closed.id, merge_request_locked.id,
+ merge_request_merged.id, merge_request.id
+ ])
response_dates = json_response.map { |merge_request| merge_request['created_at'] }
expect(response_dates).to eq(response_dates.sort)
end
@@ -353,10 +339,10 @@ describe API::MergeRequests do
get api(path, user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(4)
+ expect_paginated_array_response([
+ merge_request.id, merge_request_merged.id,
+ merge_request_locked.id, merge_request_closed.id
+ ])
response_dates = json_response.map { |merge_request| merge_request['created_at'] }
expect(response_dates).to eq(response_dates.sort.reverse)
end
@@ -398,10 +384,10 @@ describe API::MergeRequests do
get api(path, user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(4)
+ expect_paginated_array_response([
+ merge_request.id, merge_request_locked.id,
+ merge_request_merged.id, merge_request_closed.id
+ ])
response_dates = json_response.map { |merge_request| merge_request['updated_at'] }
expect(response_dates).to eq(response_dates.sort.reverse)
end
@@ -411,10 +397,10 @@ describe API::MergeRequests do
get api(path, user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(4)
+ expect_paginated_array_response([
+ merge_request_closed.id, merge_request_locked.id,
+ merge_request_merged.id, merge_request.id
+ ])
response_dates = json_response.map { |merge_request| merge_request['created_at'] }
expect(response_dates).to eq(response_dates.sort)
end
@@ -424,7 +410,9 @@ describe API::MergeRequests do
it 'returns merge requests with the given source branch' do
get api(endpoint_path, user), params: { source_branch: merge_request_closed.source_branch, state: 'all' }
- expect_response_contain_exactly(merge_request_closed, merge_request_merged, merge_request_locked)
+ expect_response_contain_exactly(
+ merge_request_merged.id, merge_request_locked.id, merge_request_closed.id
+ )
end
end
@@ -432,7 +420,9 @@ describe API::MergeRequests do
it 'returns merge requests with the given target branch' do
get api(endpoint_path, user), params: { target_branch: merge_request_closed.target_branch, state: 'all' }
- expect_response_contain_exactly(merge_request_closed, merge_request_merged, merge_request_locked)
+ expect_response_contain_exactly(
+ merge_request_merged.id, merge_request_locked.id, merge_request_closed.id
+ )
end
end
end
@@ -455,7 +445,10 @@ describe API::MergeRequests do
it 'returns an array of all merge requests' do
get api('/merge_requests', user), params: { scope: 'all' }
- expect_paginated_array_response
+ expect_response_contain_exactly(
+ merge_request_merged.id, merge_request_locked.id,
+ merge_request_closed.id, merge_request.id
+ )
end
it "returns authentication error without any scope" do
@@ -491,30 +484,25 @@ describe API::MergeRequests do
it 'returns an array of all merge requests except unauthorized ones' do
get api('/merge_requests', user), params: { scope: :all }
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.map { |mr| mr['id'] })
- .to contain_exactly(merge_request.id, merge_request_closed.id, merge_request_merged.id, merge_request_locked.id, merge_request2.id)
+ expect_response_contain_exactly(
+ merge_request_merged.id, merge_request2.id, merge_request_locked.id,
+ merge_request_closed.id, merge_request.id
+ )
end
it "returns an array of no merge_requests when wip=yes" do
get api("/merge_requests", user), params: { wip: 'yes' }
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_empty_array_response
end
it "returns an array of no merge_requests when wip=no" do
get api("/merge_requests", user), params: { wip: 'no' }
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.map { |mr| mr['id'] })
- .to contain_exactly(merge_request.id, merge_request_closed.id, merge_request_merged.id, merge_request_locked.id, merge_request2.id)
+ expect_response_contain_exactly(
+ merge_request_merged.id, merge_request2.id, merge_request_locked.id,
+ merge_request_closed.id, merge_request.id
+ )
end
it 'does not return unauthorized merge requests' do
@@ -523,7 +511,10 @@ describe API::MergeRequests do
get api('/merge_requests', user), params: { scope: :all }
- expect_response_contain_exactly(merge_request2, merge_request_merged, merge_request_closed, merge_request, merge_request_locked)
+ expect_response_contain_exactly(
+ merge_request_merged.id, merge_request2.id, merge_request_locked.id,
+ merge_request_closed.id, merge_request.id
+ )
expect(json_response.map { |mr| mr['id'] }).not_to include(merge_request3.id)
end
@@ -532,7 +523,7 @@ describe API::MergeRequests do
get api('/merge_requests', user2)
- expect_response_ordered_exactly(merge_request3)
+ expect_response_contain_exactly(merge_request3.id)
end
it 'returns an array of merge requests authored by the given user' do
@@ -540,7 +531,7 @@ describe API::MergeRequests do
get api('/merge_requests', user), params: { author_id: user2.id, scope: :all }
- expect_response_ordered_exactly(merge_request3)
+ expect_response_contain_exactly(merge_request3.id)
end
it 'returns an array of merge requests assigned to the given user' do
@@ -548,7 +539,7 @@ describe API::MergeRequests do
get api('/merge_requests', user), params: { assignee_id: user2.id, scope: :all }
- expect_response_ordered_exactly(merge_request3)
+ expect_response_contain_exactly(merge_request3.id)
end
it 'returns an array of merge requests with no assignee' do
@@ -556,7 +547,7 @@ describe API::MergeRequests do
get api('/merge_requests', user), params: { assignee_id: 'None', scope: :all }
- expect_response_ordered_exactly(merge_request3)
+ expect_response_contain_exactly(merge_request3.id)
end
it 'returns an array of merge requests with any assignee' do
@@ -565,7 +556,10 @@ describe API::MergeRequests do
get api('/merge_requests', user), params: { assignee_id: 'Any', scope: :all }
- expect_response_contain_exactly(merge_request, merge_request2, merge_request_closed, merge_request_merged, merge_request_locked)
+ expect_response_contain_exactly(
+ merge_request_merged.id, merge_request2.id, merge_request_locked.id,
+ merge_request_closed.id, merge_request.id
+ )
end
it 'returns an array of merge requests assigned to me' do
@@ -573,7 +567,7 @@ describe API::MergeRequests do
get api('/merge_requests', user2), params: { scope: 'assigned_to_me' }
- expect_response_ordered_exactly(merge_request3)
+ expect_response_contain_exactly(merge_request3.id)
end
it 'returns an array of merge requests assigned to me (kebab-case)' do
@@ -581,7 +575,7 @@ describe API::MergeRequests do
get api('/merge_requests', user2), params: { scope: 'assigned-to-me' }
- expect_response_ordered_exactly(merge_request3)
+ expect_response_contain_exactly(merge_request3.id)
end
it 'returns an array of merge requests created by me' do
@@ -589,7 +583,7 @@ describe API::MergeRequests do
get api('/merge_requests', user2), params: { scope: 'created_by_me' }
- expect_response_ordered_exactly(merge_request3)
+ expect_response_contain_exactly(merge_request3.id)
end
it 'returns an array of merge requests created by me (kebab-case)' do
@@ -597,7 +591,7 @@ describe API::MergeRequests do
get api('/merge_requests', user2), params: { scope: 'created-by-me' }
- expect_response_ordered_exactly(merge_request3)
+ expect_response_contain_exactly(merge_request3.id)
end
it 'returns merge requests reacted by the authenticated user by the given emoji' do
@@ -606,14 +600,16 @@ describe API::MergeRequests do
get api('/merge_requests', user2), params: { my_reaction_emoji: award_emoji.name, scope: 'all' }
- expect_response_ordered_exactly(merge_request3)
+ expect_response_contain_exactly(merge_request3.id)
end
context 'source_branch param' do
it 'returns merge requests with the given source branch' do
get api('/merge_requests', user), params: { source_branch: merge_request_closed.source_branch, state: 'all' }
- expect_response_contain_exactly(merge_request_closed, merge_request_merged, merge_request_locked)
+ expect_response_contain_exactly(
+ merge_request_merged.id, merge_request_locked.id, merge_request_closed.id
+ )
end
end
@@ -621,7 +617,9 @@ describe API::MergeRequests do
it 'returns merge requests with the given target branch' do
get api('/merge_requests', user), params: { target_branch: merge_request_closed.target_branch, state: 'all' }
- expect_response_contain_exactly(merge_request_closed, merge_request_merged, merge_request_locked)
+ expect_response_contain_exactly(
+ merge_request_merged.id, merge_request_locked.id, merge_request_closed.id
+ )
end
end
@@ -630,7 +628,7 @@ describe API::MergeRequests do
get api('/merge_requests?created_before=2000-01-02T00:00:00.060Z', user)
- expect_response_ordered_exactly(merge_request2)
+ expect_response_contain_exactly(merge_request2.id)
end
it 'returns merge requests created after a specific date' do
@@ -638,7 +636,7 @@ describe API::MergeRequests do
get api("/merge_requests?created_after=#{merge_request2.created_at}", user)
- expect_response_ordered_exactly(merge_request2)
+ expect_response_contain_exactly(merge_request2.id)
end
it 'returns merge requests updated before a specific date' do
@@ -646,7 +644,7 @@ describe API::MergeRequests do
get api('/merge_requests?updated_before=2000-01-02T00:00:00.060Z', user)
- expect_response_ordered_exactly(merge_request2)
+ expect_response_contain_exactly(merge_request2.id)
end
it 'returns merge requests updated after a specific date' do
@@ -654,7 +652,7 @@ describe API::MergeRequests do
get api("/merge_requests?updated_after=#{merge_request2.updated_at}", user)
- expect_response_ordered_exactly(merge_request2)
+ expect_response_contain_exactly(merge_request2.id)
end
context 'search params' do
@@ -665,25 +663,25 @@ describe API::MergeRequests do
it 'returns merge requests matching given search string for title' do
get api("/merge_requests", user), params: { search: merge_request.title }
- expect_response_ordered_exactly(merge_request)
+ expect_response_contain_exactly(merge_request.id)
end
it 'returns merge requests matching given search string for title and scoped in title' do
get api("/merge_requests", user), params: { search: merge_request.title, in: 'title' }
- expect_response_ordered_exactly(merge_request)
+ expect_response_contain_exactly(merge_request.id)
end
- it 'returns an empty array if no merge reques matches given search string for description and scoped in title' do
+ it 'returns an empty array if no merge request matches given search string for description and scoped in title' do
get api("/merge_requests", user), params: { search: merge_request.description, in: 'title' }
- expect_response_contain_exactly
+ expect_empty_array_response
end
it 'returns merge requests for project matching given search string for description' do
get api("/merge_requests", user), params: { project_id: project.id, search: merge_request.description }
- expect_response_ordered_exactly(merge_request)
+ expect_response_contain_exactly(merge_request.id)
end
end
@@ -691,7 +689,7 @@ describe API::MergeRequests do
it 'returns merge requests with the given state' do
get api('/merge_requests', user), params: { state: 'locked' }
- expect_response_contain_exactly(merge_request_locked)
+ expect_response_contain_exactly(merge_request_locked.id)
end
end
end
@@ -713,18 +711,13 @@ describe API::MergeRequests do
it "returns an array of no merge_requests when wip=yes" do
get api("/projects/#{project.id}/merge_requests", user), params: { wip: 'yes' }
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_empty_array_response
end
it 'returns merge_request by "iids" array' do
get api(endpoint_path, user), params: { iids: [merge_request.iid, merge_request_closed.iid] }
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(2)
+ expect_paginated_array_response([merge_request_closed.id, merge_request.id])
expect(json_response.first['title']).to eq merge_request_closed.title
expect(json_response.first['id']).to eq merge_request_closed.id
end
@@ -765,7 +758,7 @@ describe API::MergeRequests do
it_behaves_like 'merge requests list'
end
- context "#to_reference" do
+ describe "#to_reference" do
it 'exposes reference path in context of group' do
get api("/groups/#{group.id}/merge_requests", user)
@@ -791,6 +784,33 @@ describe API::MergeRequests do
end
end
end
+
+ context 'with archived projects' do
+ let(:project2) { create(:project, :public, :archived, namespace: group) }
+ let!(:merge_request_archived) { create(:merge_request, title: 'archived mr', author: user, source_project: project2, target_project: project2) }
+
+ it 'returns an array excluding merge_requests from archived projects' do
+ get api(endpoint_path, user)
+
+ expect_response_contain_exactly(
+ merge_request_merged.id, merge_request_locked.id,
+ merge_request_closed.id, merge_request.id
+ )
+ end
+
+ context 'with non_archived param set as false' do
+ it 'returns an array including merge_requests from archived projects' do
+ path = endpoint_path + '?non_archived=false'
+
+ get api(path, user)
+
+ expect_response_contain_exactly(
+ merge_request_merged.id, merge_request_archived.id, merge_request_locked.id,
+ merge_request_closed.id, merge_request.id
+ )
+ end
+ end
+ end
end
describe "GET /projects/:id/merge_requests/:merge_request_iid" do
@@ -1003,6 +1023,21 @@ describe API::MergeRequests do
expect(json_response['user']['can_merge']).to be_falsy
end
+
+ context 'when merge request is unchecked' do
+ before do
+ merge_request.mark_as_unchecked!
+ end
+
+ it 'checks mergeability asynchronously' do
+ expect_next_instance_of(MergeRequests::MergeabilityCheckService) do |service|
+ expect(service).not_to receive(:execute)
+ expect(service).to receive(:async_execute)
+ end
+
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user)
+ end
+ end
end
describe 'GET /projects/:id/merge_requests/:merge_request_iid/participants' do
@@ -1016,9 +1051,7 @@ describe API::MergeRequests do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/commits", user)
commit = merge_request.commits.first
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
+ expect_successful_response_with_paginated_array
expect(json_response.size).to eq(merge_request.commits.size)
expect(json_response.first['id']).to eq(commit.id)
expect(json_response.first['title']).to eq(commit.title)
@@ -1036,6 +1069,24 @@ describe API::MergeRequests do
end
end
+ describe 'GET /projects/:id/merge_requests/:merge_request_iid/:context_commits' do
+ it 'returns a 200 when merge request is valid' do
+ context_commit = merge_request.context_commits.first
+
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/context_commits", user)
+
+ expect_successful_response_with_paginated_array
+ expect(json_response.size).to eq(merge_request.context_commits.size)
+ expect(json_response.first['id']).to eq(context_commit.id)
+ expect(json_response.first['title']).to eq(context_commit.title)
+ end
+
+ it 'returns a 404 when merge_request_iid not found' do
+ get api("/projects/#{project.id}/merge_requests/0/context_commits", user)
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
describe 'GET /projects/:id/merge_requests/:merge_request_iid/changes' do
it 'returns the change information of the merge_request' do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/changes", user)
@@ -1064,9 +1115,7 @@ describe API::MergeRequests do
it 'returns a paginated array of corresponding pipelines' do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/pipelines")
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
+ expect_successful_response_with_paginated_array
expect(json_response.count).to eq(1)
expect(json_response.first['id']).to eq(pipeline.id)
end
@@ -1312,7 +1361,7 @@ describe API::MergeRequests do
expect(json_response['labels']).to eq([])
end
- xit 'empty label param as array, does not add any labels' do
+ it 'empty label param as array, does not add any labels' do
params[:labels] = []
post api("/projects/#{project.id}/merge_requests", user), params: params
@@ -1409,7 +1458,7 @@ describe API::MergeRequests do
end
end
- context 'forked projects' do
+ context 'forked projects', :sidekiq_might_not_need_inline do
let!(:user2) { create(:user) }
let(:project) { create(:project, :public, :repository) }
let!(:forked_project) { fork_project(project, user2, repository: true) }
@@ -1510,6 +1559,93 @@ describe API::MergeRequests do
end
end
+ describe "POST /projects/:id/merge_requests/:merge_request_iid/context_commits" do
+ let(:merge_request_iid) { merge_request.iid }
+ let(:authenticated_user) { user }
+ let(:commit) { project.repository.commit }
+
+ let(:params) do
+ {
+ commits: [commit.id]
+ }
+ end
+
+ let(:params_empty_commits) do
+ {
+ commits: []
+ }
+ end
+
+ let(:params_invalid_shas) do
+ {
+ commits: ['invalid']
+ }
+ end
+
+ describe 'when authenticated' do
+ it 'creates and returns the new context commit' do
+ post api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits", authenticated_user), params: params
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response).to be_an Array
+ expect(json_response.first['short_id']).to eq(commit.short_id)
+ expect(json_response.first['title']).to eq(commit.title)
+ expect(json_response.first['message']).to eq(commit.message)
+ expect(json_response.first['author_name']).to eq(commit.author_name)
+ expect(json_response.first['author_email']).to eq(commit.author_email)
+ expect(json_response.first['committer_name']).to eq(commit.committer_name)
+ expect(json_response.first['committer_email']).to eq(commit.committer_email)
+ end
+
+ context 'doesnt create when its already created' do
+ before do
+ create(:merge_request_context_commit, merge_request: merge_request, sha: commit.id)
+ end
+ it 'returns 400 when the context commit is already created' do
+ post api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits", authenticated_user), params: params
+ expect(response).to have_gitlab_http_status(400)
+ expect(json_response['message']).to eq("Context commits: [\"#{commit.id}\"] are already created")
+ end
+ end
+
+ it 'returns 400 when one or more shas are invalid' do
+ post api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits", authenticated_user), params: params_invalid_shas
+ expect(response).to have_gitlab_http_status(400)
+ expect(json_response['message']).to eq('One or more context commits\' sha is not valid.')
+ end
+
+ it 'returns 400 when the commits are empty' do
+ post api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits", authenticated_user), params: params_empty_commits
+ expect(response).to have_gitlab_http_status(400)
+ end
+
+ it 'returns 400 when params is empty' do
+ post api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits", authenticated_user)
+ expect(response).to have_gitlab_http_status(400)
+ end
+
+ it 'returns 403 when creating new context commit for guest role' do
+ guest = create(:user)
+ project.add_guest(guest)
+ post api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits", guest), params: params
+ expect(response).to have_gitlab_http_status(403)
+ end
+
+ it 'returns 403 when creating new context commit for reporter role' do
+ reporter = create(:user)
+ project.add_reporter(reporter)
+ post api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits", reporter), params: params
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+
+ context 'when unauthenticated' do
+ it 'returns 401 if user tries to create context commits' do
+ post api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits"), params: params
+ expect(response).to have_gitlab_http_status(401)
+ end
+ end
+ end
+
describe "DELETE /projects/:id/merge_requests/:merge_request_iid" do
context "when the user is developer" do
let(:developer) { create(:user) }
@@ -1549,6 +1685,79 @@ describe API::MergeRequests do
end
end
+ describe "DELETE /projects/:id/merge_requests/:merge_request_iid/context_commits" do
+ let(:merge_request_iid) { merge_request.iid }
+ let(:authenticated_user) { user }
+ let(:commit) { project.repository.commit }
+
+ context "when authenticated" do
+ let(:params) do
+ {
+ commits: [commit.id]
+ }
+ end
+
+ let(:params_invalid_shas) do
+ {
+ commits: ["invalid"]
+ }
+ end
+
+ let(:params_empty_commits) do
+ {
+ commits: []
+ }
+ end
+
+ it "deletes context commit" do
+ delete api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/context_commits", authenticated_user), params: params
+
+ expect(response).to have_gitlab_http_status(204)
+ end
+
+ it "returns 400 when invalid commit sha is passed" do
+ delete api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/context_commits", authenticated_user), params: params_invalid_shas
+
+ expect(response).to have_gitlab_http_status(400)
+ expect(json_response["message"]).to eq('One or more context commits\' sha is not valid.')
+ end
+
+ it "returns 400 when commits is empty" do
+ delete api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/context_commits", authenticated_user), params: params_empty_commits
+
+ expect(response).to have_gitlab_http_status(400)
+ end
+
+ it "returns 400 when no params is passed" do
+ delete api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/context_commits", authenticated_user)
+
+ expect(response).to have_gitlab_http_status(400)
+ end
+
+ it 'returns 403 when deleting existing context commit for guest role' do
+ guest = create(:user)
+ project.add_guest(guest)
+ delete api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits", guest), params: params
+ expect(response).to have_gitlab_http_status(403)
+ end
+
+ it 'returns 403 when deleting existing context commit for reporter role' do
+ reporter = create(:user)
+ project.add_reporter(reporter)
+ delete api("/projects/#{project.id}/merge_requests/#{merge_request_iid}/context_commits", reporter), params: params
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+
+ context "when unauthenticated" do
+ it "returns 401, unauthorised error" do
+ delete api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/context_commits")
+
+ expect(response).to have_gitlab_http_status(401)
+ end
+ end
+ end
+
describe "PUT /projects/:id/merge_requests/:merge_request_iid/merge", :clean_gitlab_redis_cache do
let(:pipeline) { create(:ci_pipeline) }
@@ -1989,7 +2198,7 @@ describe API::MergeRequests do
expect(json_response['labels']).to eq []
end
- xit 'empty label as array, removes labels' do
+ it 'empty label as array, removes labels' do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user),
params: {
title: 'new issue',
@@ -1997,7 +2206,6 @@ describe API::MergeRequests do
}
expect(response.status).to eq(200)
- # fails, as grape ommits for some reason empty array as optional param value, so nothing it passed along
expect(json_response['labels']).to eq []
end
@@ -2063,9 +2271,7 @@ describe API::MergeRequests do
get api("/projects/#{project.id}/merge_requests/#{mr.iid}/closes_issues", user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
+ expect_successful_response_with_paginated_array
expect(json_response.length).to eq(1)
expect(json_response.first['id']).to eq(issue.id)
end
@@ -2073,10 +2279,7 @@ describe API::MergeRequests do
it 'returns an empty array when there are no issues to be closed' do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/closes_issues", user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
+ expect_empty_array_response
end
it 'handles external issues' do
@@ -2089,9 +2292,7 @@ describe API::MergeRequests do
get api("/projects/#{jira_project.id}/merge_requests/#{merge_request.iid}/closes_issues", user)
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
+ expect_successful_response_with_paginated_array
expect(json_response.length).to eq(2)
expect(json_response.second['title']).to eq(ext_issue.title)
expect(json_response.second['id']).to eq(ext_issue.id)
@@ -2303,22 +2504,4 @@ describe API::MergeRequests do
merge_request_closed.save
merge_request_closed
end
-
- def expect_response_contain_exactly(*items)
- expect_paginated_array_response
- expect(json_response.length).to eq(items.size)
- expect(json_response.map { |element| element['id'] }).to contain_exactly(*items.map(&:id))
- end
-
- def expect_response_ordered_exactly(*items)
- expect_paginated_array_response
- expect(json_response.length).to eq(items.size)
- expect(json_response.map { |element| element['id'] }).to eq(items.map(&:id))
- end
-
- def expect_paginated_array_response
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- end
end
diff --git a/spec/requests/api/pages/internal_access_spec.rb b/spec/requests/api/pages/internal_access_spec.rb
index 821a210a414..91139b987df 100644
--- a/spec/requests/api/pages/internal_access_spec.rb
+++ b/spec/requests/api/pages/internal_access_spec.rb
@@ -6,16 +6,15 @@ describe "Internal Project Pages Access" do
using RSpec::Parameterized::TableSyntax
include AccessMatchers
- set(:group) { create(:group) }
- set(:project) { create(:project, :internal, pages_access_level: ProjectFeature::ENABLED, namespace: group) }
-
- set(:admin) { create(:admin) }
- set(:owner) { create(:user) }
- set(:master) { create(:user) }
- set(:developer) { create(:user) }
- set(:reporter) { create(:user) }
- set(:guest) { create(:user) }
- set(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project, reload: true) { create(:project, :internal, pages_access_level: ProjectFeature::ENABLED, namespace: group) }
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:master) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:user) { create(:user) }
before do
allow(Gitlab.config.pages).to receive(:access_control).and_return(true)
diff --git a/spec/requests/api/pages/pages_spec.rb b/spec/requests/api/pages/pages_spec.rb
index 2085c509eff..62d43ecff16 100644
--- a/spec/requests/api/pages/pages_spec.rb
+++ b/spec/requests/api/pages/pages_spec.rb
@@ -32,7 +32,7 @@ describe API::Pages do
it 'returns 204' do
delete api("/projects/#{project.id}/pages", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it 'removes the pages' do
@@ -53,7 +53,7 @@ describe API::Pages do
it 'returns 204' do
delete api("/projects/#{project.id}/pages", admin)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
end
@@ -63,7 +63,7 @@ describe API::Pages do
delete api("/projects/#{id}/pages", admin)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/requests/api/pages/private_access_spec.rb b/spec/requests/api/pages/private_access_spec.rb
index ec84762b05a..7c592ccfd43 100644
--- a/spec/requests/api/pages/private_access_spec.rb
+++ b/spec/requests/api/pages/private_access_spec.rb
@@ -6,16 +6,15 @@ describe "Private Project Pages Access" do
using RSpec::Parameterized::TableSyntax
include AccessMatchers
- set(:group) { create(:group) }
- set(:project) { create(:project, :private, pages_access_level: ProjectFeature::ENABLED, namespace: group) }
-
- set(:admin) { create(:admin) }
- set(:owner) { create(:user) }
- set(:master) { create(:user) }
- set(:developer) { create(:user) }
- set(:reporter) { create(:user) }
- set(:guest) { create(:user) }
- set(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project, reload: true) { create(:project, :private, pages_access_level: ProjectFeature::ENABLED, namespace: group) }
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:master) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:user) { create(:user) }
before do
allow(Gitlab.config.pages).to receive(:access_control).and_return(true)
diff --git a/spec/requests/api/pages/public_access_spec.rb b/spec/requests/api/pages/public_access_spec.rb
index 67b8cfb8fbc..f2fe64434c6 100644
--- a/spec/requests/api/pages/public_access_spec.rb
+++ b/spec/requests/api/pages/public_access_spec.rb
@@ -6,16 +6,15 @@ describe "Public Project Pages Access" do
using RSpec::Parameterized::TableSyntax
include AccessMatchers
- set(:group) { create(:group) }
- set(:project) { create(:project, :public, pages_access_level: ProjectFeature::ENABLED, namespace: group) }
-
- set(:admin) { create(:admin) }
- set(:owner) { create(:user) }
- set(:master) { create(:user) }
- set(:developer) { create(:user) }
- set(:reporter) { create(:user) }
- set(:guest) { create(:user) }
- set(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project, reload: true) { create(:project, :public, pages_access_level: ProjectFeature::ENABLED, namespace: group) }
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:master) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:user) { create(:user) }
before do
allow(Gitlab.config.pages).to receive(:access_control).and_return(true)
diff --git a/spec/requests/api/pipeline_schedules_spec.rb b/spec/requests/api/pipeline_schedules_spec.rb
index 5c8ccce2e37..fdb9508ed08 100644
--- a/spec/requests/api/pipeline_schedules_spec.rb
+++ b/spec/requests/api/pipeline_schedules_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe API::PipelineSchedules do
- set(:developer) { create(:user) }
- set(:user) { create(:user) }
- set(:project) { create(:project, :repository, public_builds: false) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository, public_builds: false) }
before do
project.add_developer(developer)
@@ -322,10 +322,60 @@ describe API::PipelineSchedules do
end
end
+ describe 'POST /projects/:id/pipeline_schedules/:pipeline_schedule_id/play' do
+ let_it_be(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project) }
+
+ let(:route) { ->(id) { "/projects/#{project.id}/pipeline_schedules/#{id}/play" } }
+
+ context 'authenticated user with `:play_pipeline_schedule` permission' do
+ it 'schedules a pipeline worker' do
+ project.add_developer(developer)
+
+ expect(RunPipelineScheduleWorker)
+ .to receive(:perform_async)
+ .with(pipeline_schedule.id, developer.id)
+ .and_call_original
+ post api(route[pipeline_schedule.id], developer)
+
+ expect(response).to have_gitlab_http_status(:created)
+ end
+
+ it 'renders an error if scheduling failed' do
+ project.add_developer(developer)
+
+ expect(RunPipelineScheduleWorker)
+ .to receive(:perform_async)
+ .with(pipeline_schedule.id, developer.id)
+ .and_return(nil)
+ post api(route[pipeline_schedule.id], developer)
+
+ expect(response).to have_gitlab_http_status(:internal_server_error)
+ end
+ end
+
+ context 'authenticated user with insufficient access' do
+ it 'responds with not found' do
+ project.add_guest(user)
+
+ post api(route[pipeline_schedule.id], user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'unauthenticated user' do
+ it 'responds with unauthorized' do
+ post api(route[pipeline_schedule.id])
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
+
describe 'POST /projects/:id/pipeline_schedules/:pipeline_schedule_id/variables' do
let(:params) { attributes_for(:ci_pipeline_schedule_variable) }
- set(:pipeline_schedule) do
+ let_it_be(:pipeline_schedule) do
create(:ci_pipeline_schedule, project: project, owner: developer)
end
@@ -382,7 +432,7 @@ describe API::PipelineSchedules do
end
describe 'PUT /projects/:id/pipeline_schedules/:pipeline_schedule_id/variables/:key' do
- set(:pipeline_schedule) do
+ let_it_be(:pipeline_schedule) do
create(:ci_pipeline_schedule, project: project, owner: developer)
end
@@ -422,7 +472,7 @@ describe API::PipelineSchedules do
describe 'DELETE /projects/:id/pipeline_schedules/:pipeline_schedule_id/variables/:key' do
let(:maintainer) { create(:user) }
- set(:pipeline_schedule) do
+ let_it_be(:pipeline_schedule) do
create(:ci_pipeline_schedule, project: project, owner: developer)
end
diff --git a/spec/requests/api/project_container_repositories_spec.rb b/spec/requests/api/project_container_repositories_spec.rb
index d04db134db0..98b3416a2bc 100644
--- a/spec/requests/api/project_container_repositories_spec.rb
+++ b/spec/requests/api/project_container_repositories_spec.rb
@@ -5,12 +5,11 @@ require 'spec_helper'
describe API::ProjectContainerRepositories do
include ExclusiveLeaseHelpers
- set(:project) { create(:project, :private) }
- set(:maintainer) { create(:user) }
- set(:developer) { create(:user) }
- set(:reporter) { create(:user) }
- set(:guest) { create(:user) }
-
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:guest) { create(:user) }
let(:root_repository) { create(:container_repository, :root, project: project) }
let(:test_repository) { create(:container_repository, project: project) }
@@ -142,12 +141,14 @@ describe API::ProjectContainerRepositories do
let(:worker_params) do
{ name_regex: 'v10.*',
keep_n: 100,
- older_than: '1 day' }
+ older_than: '1 day',
+ container_expiration_policy: false }
end
let(:lease_key) { "container_repository:cleanup_tags:#{root_repository.id}" }
it 'schedules cleanup of tags repository' do
+ stub_last_activity_update
stub_exclusive_lease(lease_key, timeout: 1.hour)
expect(CleanupContainerRepositoryWorker).to receive(:perform_async)
.with(maintainer.id, root_repository.id, worker_params)
diff --git a/spec/requests/api/project_export_spec.rb b/spec/requests/api/project_export_spec.rb
index 37f2cc85a50..98214a8c471 100644
--- a/spec/requests/api/project_export_spec.rb
+++ b/spec/requests/api/project_export_spec.rb
@@ -3,13 +3,13 @@
require 'spec_helper'
describe API::ProjectExport, :clean_gitlab_redis_cache do
- set(:project) { create(:project) }
- set(:project_none) { create(:project) }
- set(:project_started) { create(:project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:project_none) { create(:project) }
+ let_it_be(:project_started) { create(:project) }
let(:project_finished) { create(:project, :with_export) }
let(:project_after_export) { create(:project, :with_export) }
- set(:user) { create(:user) }
- set(:admin) { create(:admin) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:admin) { create(:admin) }
let(:path) { "/projects/#{project.id}/export" }
let(:path_none) { "/projects/#{project_none.id}/export" }
diff --git a/spec/requests/api/project_hooks_spec.rb b/spec/requests/api/project_hooks_spec.rb
index 06c09b100ac..b466bcb1a12 100644
--- a/spec/requests/api/project_hooks_spec.rb
+++ b/spec/requests/api/project_hooks_spec.rb
@@ -215,7 +215,7 @@ describe API::ProjectHooks, 'ProjectHooks' do
expect(response).to have_gitlab_http_status(404)
end
- it "returns a 404 if a user attempts to delete project hooks he/she does not own" do
+ it "returns a 404 if a user attempts to delete project hooks they do not own" do
test_user = create(:user)
other_project = create(:project)
other_project.add_maintainer(test_user)
diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb
index 186f0f52a46..71dd8fee0ae 100644
--- a/spec/requests/api/project_import_spec.rb
+++ b/spec/requests/api/project_import_spec.rb
@@ -196,6 +196,19 @@ describe API::ProjectImport do
end
end
+ context 'when request exceeds the rate limit' do
+ before do
+ allow(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
+ end
+
+ it 'prevents users from importing projects' do
+ post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.id }
+
+ expect(response).to have_gitlab_http_status(429)
+ expect(json_response['message']['error']).to eq('This endpoint has been requested too many times. Try again later.')
+ end
+ end
+
def stub_import(namespace)
expect_any_instance_of(ProjectImportState).to receive(:schedule)
expect(::Projects::CreateService).to receive(:new).with(user, hash_including(namespace_id: namespace.id)).and_call_original
diff --git a/spec/requests/api/project_snippets_spec.rb b/spec/requests/api/project_snippets_spec.rb
index bfb6f10efa3..2c6a13efc12 100644
--- a/spec/requests/api/project_snippets_spec.rb
+++ b/spec/requests/api/project_snippets_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe API::ProjectSnippets do
- set(:project) { create(:project, :public) }
- set(:user) { create(:user) }
- set(:admin) { create(:admin) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:admin) { create(:admin) }
describe "GET /projects/:project_id/snippets/:id/user_agent_detail" do
let(:snippet) { create(:project_snippet, :public, project: project) }
@@ -179,7 +179,7 @@ describe API::ProjectSnippets do
end
before do
- allow_next_instance_of(AkismetService) do |instance|
+ allow_next_instance_of(Spam::AkismetService) do |instance|
allow(instance).to receive(:spam?).and_return(true)
end
end
@@ -271,7 +271,7 @@ describe API::ProjectSnippets do
end
before do
- allow_next_instance_of(AkismetService) do |instance|
+ allow_next_instance_of(Spam::AkismetService) do |instance|
allow(instance).to receive(:spam?).and_return(true)
end
end
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index fce49d0248c..97b61b44856 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -6,8 +6,6 @@ shared_examples 'languages and percentages JSON response' do
let(:expected_languages) { project.repository.languages.map { |language| language.values_at(:label, :value)}.to_h }
before do
- allow(DetectRepositoryLanguagesWorker).to receive(:perform_async).and_call_original
-
allow(project.repository).to receive(:languages).and_return(
[{ value: 66.69, label: "Ruby", color: "#701516", highlight: "#701516" },
{ value: 22.98, label: "JavaScript", color: "#f1e05a", highlight: "#f1e05a" },
@@ -812,7 +810,7 @@ describe API::Projects do
post api('/projects', user), params: project
- expect(json_response['readme_url']).to eql("#{Gitlab.config.gitlab.url}/#{json_response['namespace']['full_path']}/somewhere/blob/master/README.md")
+ expect(json_response['readme_url']).to eql("#{Gitlab.config.gitlab.url}/#{json_response['namespace']['full_path']}/somewhere/-/blob/master/README.md")
end
it 'sets tag list to a project' do
@@ -1340,6 +1338,7 @@ describe API::Projects do
expect(json_response['path']).to be_present
expect(json_response['issues_enabled']).to be_present
expect(json_response['merge_requests_enabled']).to be_present
+ expect(json_response['can_create_merge_request_in']).to be_present
expect(json_response['wiki_enabled']).to be_present
expect(json_response['jobs_enabled']).to be_present
expect(json_response['snippets_enabled']).to be_present
@@ -1390,15 +1389,18 @@ describe API::Projects do
expect(json_response['path']).to be_present
expect(json_response['issues_enabled']).to be_present
expect(json_response['merge_requests_enabled']).to be_present
+ expect(json_response['can_create_merge_request_in']).to be_present
expect(json_response['wiki_enabled']).to be_present
expect(json_response['jobs_enabled']).to be_present
expect(json_response['snippets_enabled']).to be_present
expect(json_response['snippets_access_level']).to be_present
+ expect(json_response['pages_access_level']).to be_present
expect(json_response['repository_access_level']).to be_present
expect(json_response['issues_access_level']).to be_present
expect(json_response['merge_requests_access_level']).to be_present
expect(json_response['wiki_access_level']).to be_present
expect(json_response['builds_access_level']).to be_present
+ expect(json_response).to have_key('emails_disabled')
expect(json_response['resolve_outdated_diff_discussions']).to eq(project.resolve_outdated_diff_discussions)
expect(json_response['remove_source_branch_after_merge']).to be_truthy
expect(json_response['container_registry_enabled']).to be_present
@@ -1409,18 +1411,18 @@ describe API::Projects do
expect(json_response['namespace']).to be_present
expect(json_response['import_status']).to be_present
expect(json_response).to include("import_error")
- expect(json_response['avatar_url']).to be_nil
+ expect(json_response).to have_key('avatar_url')
expect(json_response['star_count']).to be_present
expect(json_response['forks_count']).to be_present
expect(json_response['public_jobs']).to be_present
- expect(json_response['ci_config_path']).to be_nil
+ expect(json_response).to have_key('ci_config_path')
expect(json_response['shared_with_groups']).to be_an Array
expect(json_response['shared_with_groups'].length).to eq(1)
expect(json_response['shared_with_groups'][0]['group_id']).to eq(group.id)
expect(json_response['shared_with_groups'][0]['group_name']).to eq(group.name)
expect(json_response['shared_with_groups'][0]['group_full_path']).to eq(group.full_path)
expect(json_response['shared_with_groups'][0]['group_access_level']).to eq(link.group_access)
- expect(json_response['shared_with_groups'][0]['expires_at']).to be_nil
+ expect(json_response['shared_with_groups'][0]).to have_key('expires_at')
expect(json_response['only_allow_merge_if_pipeline_succeeds']).to eq(project.only_allow_merge_if_pipeline_succeeds)
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to eq(project.only_allow_merge_if_all_discussions_are_resolved)
expect(json_response['ci_default_git_depth']).to eq(project.ci_default_git_depth)
@@ -2230,6 +2232,26 @@ describe API::Projects do
expect(json_response['builds_access_level']).to eq('private')
end
+ it 'updates pages_access_level' do
+ project_param = { pages_access_level: 'private' }
+
+ put api("/projects/#{project3.id}", user), params: project_param
+
+ expect(response).to have_gitlab_http_status(:ok)
+
+ expect(json_response['pages_access_level']).to eq('private')
+ end
+
+ it 'updates emails_disabled' do
+ project_param = { emails_disabled: true }
+
+ put api("/projects/#{project3.id}", user), params: project_param
+
+ expect(response).to have_gitlab_http_status(200)
+
+ expect(json_response['emails_disabled']).to eq(true)
+ end
+
it 'updates build_git_strategy' do
project_param = { build_git_strategy: 'clone' }
diff --git a/spec/requests/api/protected_branches_spec.rb b/spec/requests/api/protected_branches_spec.rb
index 67ce704b3f3..8499a165d8b 100644
--- a/spec/requests/api/protected_branches_spec.rb
+++ b/spec/requests/api/protected_branches_spec.rb
@@ -12,18 +12,18 @@ describe API::ProtectedBranches do
end
describe "GET /projects/:id/protected_branches" do
+ let(:params) { {} }
let(:route) { "/projects/#{project.id}/protected_branches" }
shared_examples_for 'protected branches' do
it 'returns the protected branches' do
- get api(route, user), params: { per_page: 100 }
+ get api(route, user), params: params.merge(per_page: 100)
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
protected_branch_names = json_response.map { |x| x['name'] }
- expected_branch_names = project.protected_branches.map { |x| x['name'] }
expect(protected_branch_names).to match_array(expected_branch_names)
end
end
@@ -33,7 +33,19 @@ describe API::ProtectedBranches do
project.add_maintainer(user)
end
- it_behaves_like 'protected branches'
+ context 'when search param is not present' do
+ it_behaves_like 'protected branches' do
+ let(:expected_branch_names) { project.protected_branches.map { |x| x['name'] } }
+ end
+ end
+
+ context 'when search param is present' do
+ it_behaves_like 'protected branches' do
+ let(:another_protected_branch) { create(:protected_branch, project: project, name: 'stable') }
+ let(:params) { { search: another_protected_branch.name } }
+ let(:expected_branch_names) { [another_protected_branch.name] }
+ end
+ end
end
context 'when authenticated as a guest' do
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index d3fe4c22b1d..12fd9f431e5 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -9,6 +9,7 @@ describe API::Releases do
let(:guest) { create(:user) }
let(:non_project_member) { create(:user) }
let(:commit) { create(:commit, project: project) }
+ let(:last_release) { project.releases.last }
before do
project.add_maintainer(maintainer)
@@ -60,9 +61,9 @@ describe API::Releases do
it 'returns rendered helper paths' do
get api("/projects/#{project.id}/releases", maintainer)
- expect(json_response.first['commit_path']).to eq("/#{release_2.project.full_path}/commit/#{release_2.commit.id}")
+ expect(json_response.first['commit_path']).to eq("/#{release_2.project.full_path}/-/commit/#{release_2.commit.id}")
expect(json_response.first['tag_path']).to eq("/#{release_2.project.full_path}/-/tags/#{release_2.tag}")
- expect(json_response.second['commit_path']).to eq("/#{release_1.project.full_path}/commit/#{release_1.commit.id}")
+ expect(json_response.second['commit_path']).to eq("/#{release_1.project.full_path}/-/commit/#{release_1.commit.id}")
expect(json_response.second['tag_path']).to eq("/#{release_1.project.full_path}/-/tags/#{release_1.tag}")
end
@@ -76,7 +77,7 @@ describe API::Releases do
mr_uri = URI.parse(links['merge_requests_url'])
issue_uri = URI.parse(links['issues_url'])
- expect(mr_uri.path).to eq("#{path_base}/merge_requests")
+ expect(mr_uri.path).to eq("#{path_base}/-/merge_requests")
expect(issue_uri.path).to eq("#{path_base}/issues")
expect(mr_uri.query).to eq(expected_query)
expect(issue_uri.query).to eq(expected_query)
@@ -163,7 +164,7 @@ describe API::Releases do
expect(response).to match_response_schema('public_api/v4/releases')
expect(json_response.first['assets']['count']).to eq(release.links.count + release.sources.count)
- expect(json_response.first['commit_path']).to eq("/#{release.project.full_path}/commit/#{release.commit.id}")
+ expect(json_response.first['commit_path']).to eq("/#{release.project.full_path}/-/commit/#{release.commit.id}")
expect(json_response.first['tag_path']).to eq("/#{release.project.full_path}/-/tags/#{release.tag}")
end
end
@@ -213,7 +214,7 @@ describe API::Releases do
expect(json_response['author']['name']).to eq(maintainer.name)
expect(json_response['commit']['id']).to eq(commit.id)
expect(json_response['assets']['count']).to eq(4)
- expect(json_response['commit_path']).to eq("/#{release.project.full_path}/commit/#{release.commit.id}")
+ expect(json_response['commit_path']).to eq("/#{release.project.full_path}/-/commit/#{release.commit.id}")
expect(json_response['tag_path']).to eq("/#{release.project.full_path}/-/tags/#{release.tag}")
end
@@ -709,6 +710,109 @@ describe API::Releases do
expect(response).to have_gitlab_http_status(:conflict)
end
end
+
+ context 'Evidence collection' do
+ let(:params) do
+ {
+ name: 'New release',
+ tag_name: 'v0.1',
+ description: 'Super nice release',
+ released_at: released_at
+ }.compact
+ end
+
+ around do |example|
+ Timecop.freeze { example.run }
+ end
+
+ subject do
+ post api("/projects/#{project.id}/releases", maintainer), params: params
+ end
+
+ context 'historical release' do
+ let(:released_at) { 3.weeks.ago }
+
+ it 'does not execute CreateEvidenceWorker' do
+ expect { subject }.not_to change(CreateEvidenceWorker.jobs, :size)
+ end
+
+ it 'does not create an Evidence object', :sidekiq_inline do
+ expect { subject }.not_to change(Evidence, :count)
+ end
+
+ it 'is a historical release' do
+ subject
+
+ expect(last_release.historical_release?).to be_truthy
+ end
+
+ it 'is not an upcoming release' do
+ subject
+
+ expect(last_release.upcoming_release?).to be_falsy
+ end
+ end
+
+ context 'immediate release' do
+ let(:released_at) { nil }
+
+ it 'sets `released_at` to the current dttm' do
+ subject
+
+ expect(last_release.updated_at).to be_like_time(Time.now)
+ end
+
+ it 'queues CreateEvidenceWorker' do
+ expect { subject }.to change(CreateEvidenceWorker.jobs, :size).by(1)
+ end
+
+ it 'creates Evidence', :sidekiq_inline do
+ expect { subject }.to change(Evidence, :count).by(1)
+ end
+
+ it 'is not a historical release' do
+ subject
+
+ expect(last_release.historical_release?).to be_falsy
+ end
+
+ it 'is not an upcoming release' do
+ subject
+
+ expect(last_release.upcoming_release?).to be_falsy
+ end
+ end
+
+ context 'upcoming release' do
+ let(:released_at) { 1.day.from_now }
+
+ it 'queues CreateEvidenceWorker' do
+ expect { subject }.to change(CreateEvidenceWorker.jobs, :size).by(1)
+ end
+
+ it 'queues CreateEvidenceWorker at the released_at timestamp' do
+ subject
+
+ expect(CreateEvidenceWorker.jobs.last['at']).to eq(released_at.to_i)
+ end
+
+ it 'creates Evidence', :sidekiq_inline do
+ expect { subject }.to change(Evidence, :count).by(1)
+ end
+
+ it 'is not a historical release' do
+ subject
+
+ expect(last_release.historical_release?).to be_falsy
+ end
+
+ it 'is an upcoming release' do
+ subject
+
+ expect(last_release.upcoming_release?).to be_truthy
+ end
+ end
+ end
end
describe 'PUT /projects/:id/releases/:tag_name' do
diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb
index ba301147d43..8bca458bece 100644
--- a/spec/requests/api/repositories_spec.rb
+++ b/spec/requests/api/repositories_spec.rb
@@ -362,6 +362,29 @@ describe API::Repositories do
expect(json_response['diffs']).to be_empty
expect(json_response['compare_same_ref']).to be_truthy
end
+
+ it "returns an empty string when the diff overflows" do
+ stub_const('Gitlab::Git::DiffCollection::DEFAULT_LIMITS', { max_files: 2, max_lines: 2 })
+
+ get api(route, current_user), params: { from: 'master', to: 'feature' }
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['commits']).to be_present
+ expect(json_response['diffs']).to be_present
+ expect(json_response['diffs'].first['diff']).to be_empty
+ end
+
+ it "returns a 404 when from ref is unknown" do
+ get api(route, current_user), params: { from: 'unknown_ref', to: 'master' }
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+
+ it "returns a 404 when to ref is unknown" do
+ get api(route, current_user), params: { from: 'master', to: 'unknown_ref' }
+
+ expect(response).to have_gitlab_http_status(404)
+ end
end
context 'when unauthenticated', 'and project is public' do
diff --git a/spec/requests/api/resource_label_events_spec.rb b/spec/requests/api/resource_label_events_spec.rb
index 8bac378787c..7619399458a 100644
--- a/spec/requests/api/resource_label_events_spec.rb
+++ b/spec/requests/api/resource_label_events_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe API::ResourceLabelEvents do
- set(:user) { create(:user) }
- set(:project) { create(:project, :public, namespace: user.namespace) }
- set(:label) { create(:label, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, :public, namespace: user.namespace) }
+ let_it_be(:label) { create(:label, project: project) }
before do
project.add_developer(user)
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index e3ba366dfcc..9d01a44916c 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -311,11 +311,33 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
describe '/api/v4/jobs' do
+ shared_examples 'application context metadata' do |api_route|
+ it 'contains correct context metadata' do
+ # Avoids popping the context from the thread so we can
+ # check its content after the request.
+ allow(Labkit::Context).to receive(:pop)
+
+ send_request
+
+ Labkit::Context.with_context do |context|
+ expected_context = {
+ 'meta.caller_id' => api_route,
+ 'meta.user' => job.user.username,
+ 'meta.project' => job.project.full_path,
+ 'meta.root_namespace' => job.project.full_path_components.first
+ }
+
+ expect(context.to_h).to include(expected_context)
+ end
+ end
+ end
+
let(:root_namespace) { create(:namespace) }
let(:namespace) { create(:namespace, parent: root_namespace) }
let(:project) { create(:project, namespace: namespace, shared_runners_enabled: false) }
let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master') }
let(:runner) { create(:ci_runner, :project, projects: [project]) }
+ let(:user) { create(:user) }
let(:job) do
create(:ci_build, :artifacts, :extended_options,
pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
@@ -984,12 +1006,18 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
describe 'PUT /api/v4/jobs/:id' do
- let(:job) { create(:ci_build, :pending, :trace_live, pipeline: pipeline, runner_id: runner.id) }
+ let(:job) do
+ create(:ci_build, :pending, :trace_live, pipeline: pipeline, project: project, user: user, runner_id: runner.id)
+ end
before do
job.run!
end
+ it_behaves_like 'application context metadata', '/api/:version/jobs/:id' do
+ let(:send_request) { update_job(state: 'success') }
+ end
+
context 'when status is given' do
it 'mark job as succeeded' do
update_job(state: 'success')
@@ -1139,7 +1167,10 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
describe 'PATCH /api/v4/jobs/:id/trace' do
- let(:job) { create(:ci_build, :running, :trace_live, runner_id: runner.id, pipeline: pipeline) }
+ let(:job) do
+ create(:ci_build, :running, :trace_live,
+ project: project, user: user, runner_id: runner.id, pipeline: pipeline)
+ end
let(:headers) { { API::Helpers::Runner::JOB_TOKEN_HEADER => job.token, 'Content-Type' => 'text/plain' } }
let(:headers_with_range) { headers.merge({ 'Content-Range' => '11-20' }) }
let(:update_interval) { 10.seconds.to_i }
@@ -1148,6 +1179,10 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
initial_patch_the_trace
end
+ it_behaves_like 'application context metadata', '/api/:version/jobs/:id/trace' do
+ let(:send_request) { patch_the_trace }
+ end
+
context 'when request is valid' do
it 'gets correct response' do
expect(response.status).to eq 202
@@ -1399,7 +1434,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
describe 'artifacts' do
- let(:job) { create(:ci_build, :pending, pipeline: pipeline, runner_id: runner.id) }
+ let(:job) { create(:ci_build, :pending, user: user, project: project, pipeline: pipeline, runner_id: runner.id) }
let(:jwt_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt_token } }
let(:headers_with_token) { headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.token) }
@@ -1418,12 +1453,16 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
authorize_artifacts_with_token_in_params
end
+ it_behaves_like 'application context metadata', '/api/:version/jobs/:id/artifacts/authorize' do
+ let(:send_request) { subject }
+ end
+
shared_examples 'authorizes local file' do
it 'succeeds' do
subject
expect(response).to have_gitlab_http_status(200)
- expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to be_nil
end
@@ -1443,7 +1482,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
subject
expect(response).to have_gitlab_http_status(200)
- expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response).not_to have_key('TempPath')
expect(json_response['RemoteObject']).to have_key('ID')
expect(json_response['RemoteObject']).to have_key('GetURL')
@@ -1519,7 +1558,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
authorize_artifacts_with_token_in_headers
expect(response).to have_gitlab_http_status(200)
- expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).not_to be_nil
end
@@ -1571,6 +1610,12 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
describe 'POST /api/v4/jobs/:id/artifacts' do
+ it_behaves_like 'application context metadata', '/api/:version/jobs/:id/artifacts' do
+ let(:send_request) do
+ upload_artifacts(file_upload, headers_with_token)
+ end
+ end
+
context 'when artifacts are being stored inside of tmp path' do
before do
# by configuring this path we allow to pass temp file from any path
@@ -1607,7 +1652,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it_behaves_like 'successful artifacts upload'
end
- context 'for file stored remotelly' do
+ context 'for file stored remotely' do
let!(:fog_connection) do
stub_artifacts_object_storage(direct_upload: true)
end
@@ -1894,6 +1939,46 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
end
+ context 'when artifacts already exist for the job' do
+ let(:params) do
+ {
+ artifact_type: :archive,
+ artifact_format: :zip,
+ 'file.sha256' => uploaded_sha256
+ }
+ end
+
+ let(:existing_sha256) { '0' * 64 }
+
+ let!(:existing_artifact) do
+ create(:ci_job_artifact, :archive, file_sha256: existing_sha256, job: job)
+ end
+
+ context 'when sha256 is the same of the existing artifact' do
+ let(:uploaded_sha256) { existing_sha256 }
+
+ it 'ignores the new artifact' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.job_artifacts_archive).to eq(existing_artifact)
+ end
+ end
+
+ context 'when sha256 is different than the existing artifact' do
+ let(:uploaded_sha256) { '1' * 64 }
+
+ it 'logs and returns an error' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(job.reload.job_artifacts_archive).to eq(existing_artifact)
+ end
+ end
+ end
+
context 'when artifacts are being stored outside of tmp path' do
let(:new_tmpdir) { Dir.mktmpdir }
@@ -1931,6 +2016,10 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
describe 'GET /api/v4/jobs/:id/artifacts' do
let(:token) { job.token }
+ it_behaves_like 'application context metadata', '/api/:version/jobs/:id/artifacts' do
+ let(:send_request) { download_artifact }
+ end
+
context 'when job has artifacts' do
let(:job) { create(:ci_build) }
let(:store) { JobArtifactUploader::Store::LOCAL }
diff --git a/spec/requests/api/runners_spec.rb b/spec/requests/api/runners_spec.rb
index 7bad30d107d..c54487a68fe 100644
--- a/spec/requests/api/runners_spec.rb
+++ b/spec/requests/api/runners_spec.rb
@@ -557,7 +557,7 @@ describe API::Runners do
end
describe 'GET /runners/:id/jobs' do
- set(:job_1) { create(:ci_build) }
+ let_it_be(:job_1) { create(:ci_build) }
let!(:job_2) { create(:ci_build, :running, runner: shared_runner, project: project) }
let!(:job_3) { create(:ci_build, :failed, runner: shared_runner, project: project) }
let!(:job_4) { create(:ci_build, :running, runner: project_runner, project: project) }
diff --git a/spec/requests/api/search_spec.rb b/spec/requests/api/search_spec.rb
index 24d7f1e313c..04794b2ba58 100644
--- a/spec/requests/api/search_spec.rb
+++ b/spec/requests/api/search_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
describe API::Search do
- set(:user) { create(:user) }
- set(:group) { create(:group) }
- set(:project) { create(:project, :wiki_repo, :public, name: 'awesome project', group: group) }
- set(:repo_project) { create(:project, :public, :repository, group: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project, reload: true) { create(:project, :wiki_repo, :public, name: 'awesome project', group: group) }
+ let_it_be(:repo_project) { create(:project, :public, :repository, group: group) }
shared_examples 'response is correct' do |schema:, size: 1|
it { expect(response).to have_gitlab_http_status(200) }
diff --git a/spec/requests/api/services_spec.rb b/spec/requests/api/services_spec.rb
index 08f58387bf8..323164f26f0 100644
--- a/spec/requests/api/services_spec.rb
+++ b/spec/requests/api/services_spec.rb
@@ -3,10 +3,10 @@
require "spec_helper"
describe API::Services do
- set(:user) { create(:user) }
- set(:user2) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user2) { create(:user) }
- set(:project) do
+ let_it_be(:project, reload: true) do
create(:project, creator_id: user.id, namespace: user.namespace)
end
@@ -127,21 +127,6 @@ describe API::Services do
expect(json_response['properties'].keys).to match_array(service_instance.api_field_names)
end
- it "returns empty hash or nil values if properties and data fields are empty" do
- # deprecated services are not valid for update
- initialized_service.update_attribute(:properties, {})
-
- if initialized_service.data_fields_present?
- initialized_service.data_fields.destroy
- initialized_service.reload
- end
-
- get api("/projects/#{project.id}/services/#{dashed_service}", user)
-
- expect(response).to have_gitlab_http_status(200)
- expect(json_response['properties'].values.compact).to be_empty
- end
-
it "returns error when authenticated but not a project owner" do
project.add_developer(user2)
get api("/projects/#{project.id}/services/#{dashed_service}", user2)
@@ -241,10 +226,42 @@ describe API::Services do
end
it 'accepts a username for update' do
- put api("/projects/#{project.id}/services/mattermost", user), params: params.merge(username: 'new_username')
+ put api("/projects/#{project.id}/services/#{service_name}", user), params: params.merge(username: 'new_username')
expect(response).to have_gitlab_http_status(200)
expect(json_response['properties']['username']).to eq('new_username')
end
end
+
+ describe 'Microsoft Teams service' do
+ let(:service_name) { 'microsoft-teams' }
+ let(:params) do
+ {
+ webhook: 'https://hook.example.com',
+ branches_to_be_notified: 'default',
+ notify_only_broken_pipelines: false
+ }
+ end
+
+ before do
+ project.create_microsoft_teams_service(
+ active: true,
+ properties: params
+ )
+ end
+
+ it 'accepts branches_to_be_notified for update' do
+ put api("/projects/#{project.id}/services/#{service_name}", user), params: params.merge(branches_to_be_notified: 'all')
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['properties']['branches_to_be_notified']).to eq('all')
+ end
+
+ it 'accepts notify_only_broken_pipelines for update' do
+ put api("/projects/#{project.id}/services/#{service_name}", user), params: params.merge(notify_only_broken_pipelines: true)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['properties']['notify_only_broken_pipelines']).to eq(true)
+ end
+ end
end
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index af86ba86303..1a6bd4e6c0d 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe API::Settings, 'Settings' do
let(:user) { create(:user) }
- set(:admin) { create(:admin) }
+ let_it_be(:admin) { create(:admin) }
describe "GET /application/settings" do
it "returns application settings" do
diff --git a/spec/requests/api/snippets_spec.rb b/spec/requests/api/snippets_spec.rb
index f32be7a8765..21565265b99 100644
--- a/spec/requests/api/snippets_spec.rb
+++ b/spec/requests/api/snippets_spec.rb
@@ -89,8 +89,8 @@ describe API::Snippets do
end
describe 'GET /snippets/:id/raw' do
- set(:author) { create(:user) }
- set(:snippet) { create(:personal_snippet, :private, author: author) }
+ let_it_be(:author) { create(:user) }
+ let_it_be(:snippet) { create(:personal_snippet, :private, author: author) }
it 'requires authentication' do
get api("/snippets/#{snippet.id}", nil)
@@ -137,10 +137,10 @@ describe API::Snippets do
end
describe 'GET /snippets/:id' do
- set(:admin) { create(:user, :admin) }
- set(:author) { create(:user) }
- set(:private_snippet) { create(:personal_snippet, :private, author: author) }
- set(:internal_snippet) { create(:personal_snippet, :internal, author: author) }
+ let_it_be(:admin) { create(:user, :admin) }
+ let_it_be(:author) { create(:user) }
+ let_it_be(:private_snippet) { create(:personal_snippet, :private, author: author) }
+ let_it_be(:internal_snippet) { create(:personal_snippet, :internal, author: author) }
it 'requires authentication' do
get api("/snippets/#{private_snippet.id}", nil)
@@ -238,7 +238,7 @@ describe API::Snippets do
end
before do
- allow_next_instance_of(AkismetService) do |instance|
+ allow_next_instance_of(Spam::AkismetService) do |instance|
allow(instance).to receive(:spam?).and_return(true)
end
end
@@ -327,7 +327,7 @@ describe API::Snippets do
end
before do
- allow_next_instance_of(AkismetService) do |instance|
+ allow_next_instance_of(Spam::AkismetService) do |instance|
allow(instance).to receive(:spam?).and_return(true)
end
end
diff --git a/spec/requests/api/task_completion_status_spec.rb b/spec/requests/api/task_completion_status_spec.rb
index ee2531197b1..a2891e1d983 100644
--- a/spec/requests/api/task_completion_status_spec.rb
+++ b/spec/requests/api/task_completion_status_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe 'task completion status response' do
- set(:user) { create(:user) }
- set(:project) do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) do
create(:project, :public, creator_id: user.id, namespace: user.namespace)
end
diff --git a/spec/requests/api/triggers_spec.rb b/spec/requests/api/triggers_spec.rb
index d54d112cd9f..35e41f5ae52 100644
--- a/spec/requests/api/triggers_spec.rb
+++ b/spec/requests/api/triggers_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe API::Triggers do
- set(:user) { create(:user) }
- set(:user2) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user2) { create(:user) }
let!(:trigger_token) { 'secure_token' }
let!(:trigger_token_2) { 'secure_token_2' }
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 0a22a09b8a6..12ac601c013 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -77,6 +77,14 @@ describe API::Users do
expect(json_response.first.keys).not_to include 'highest_role'
end
+ it "does not return the current or last sign-in ip addresses" do
+ get api("/users"), params: { username: user.username }
+
+ expect(response).to match_response_schema('public_api/v4/user/basics')
+ expect(json_response.first.keys).not_to include 'current_sign_in_ip'
+ expect(json_response.first.keys).not_to include 'last_sign_in_ip'
+ end
+
context "when public level is restricted" do
before do
stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
@@ -314,6 +322,14 @@ describe API::Users do
expect(json_response.keys).not_to include 'highest_role'
end
+ it "does not return the user's sign in IPs" do
+ get api("/users/#{user.id}", user)
+
+ expect(response).to match_response_schema('public_api/v4/user/basic')
+ expect(json_response.keys).not_to include 'current_sign_in_ip'
+ expect(json_response.keys).not_to include 'last_sign_in_ip'
+ end
+
context 'when authenticated as admin' do
it 'includes the `is_admin` field' do
get api("/users/#{user.id}", admin)
@@ -328,12 +344,34 @@ describe API::Users do
expect(response).to match_response_schema('public_api/v4/user/admin')
expect(json_response.keys).to include 'created_at'
end
+
it 'includes the `highest_role` field' do
get api("/users/#{user.id}", admin)
expect(response).to match_response_schema('public_api/v4/user/admin')
expect(json_response['highest_role']).to be(0)
end
+
+ context 'when user has not logged in' do
+ it 'does not include the sign in IPs' do
+ get api("/users/#{user.id}", admin)
+
+ expect(response).to match_response_schema('public_api/v4/user/admin')
+ expect(json_response).to include('current_sign_in_ip' => nil, 'last_sign_in_ip' => nil)
+ end
+ end
+
+ context 'when user has logged in' do
+ let_it_be(:signed_in_user) { create(:user, :with_sign_ins) }
+
+ it 'includes the sign in IPs' do
+ get api("/users/#{signed_in_user.id}", admin)
+
+ expect(response).to match_response_schema('public_api/v4/user/admin')
+ expect(json_response['current_sign_in_ip']).to eq('127.0.0.1')
+ expect(json_response['last_sign_in_ip']).to eq('127.0.0.1')
+ end
+ end
end
context 'for an anonymous user' do
@@ -423,7 +461,7 @@ describe API::Users do
end
it "creates user with optional attributes" do
- optional_attributes = { confirm: true }
+ optional_attributes = { confirm: true, theme_id: 2, color_scheme_id: 4 }
attributes = attributes_for(:user).merge(optional_attributes)
post api('/users', admin), params: attributes
@@ -538,6 +576,15 @@ describe API::Users do
expect(response).to have_gitlab_http_status(400)
end
+ it "doesn't create user with invalid optional attributes" do
+ optional_attributes = { theme_id: 50, color_scheme_id: 50 }
+ attributes = attributes_for(:user).merge(optional_attributes)
+
+ post api('/users', admin), params: attributes
+
+ expect(response).to have_gitlab_http_status(400)
+ end
+
it 'returns 400 error if user does not validate' do
post api('/users', admin),
params: {
@@ -702,7 +749,7 @@ describe API::Users do
expect(user.email).to eq('new@email.com')
end
- it 'updates user with his own username' do
+ it 'updates user with their own username' do
put api("/users/#{user.id}", admin), params: { username: user.username }
expect(response).to have_gitlab_http_status(200)
@@ -740,6 +787,12 @@ describe API::Users do
expect(user.reload.external?).to be_truthy
end
+ it "private profile is false by default" do
+ put api("/users/#{user.id}", admin), params: {}
+
+ expect(user.reload.private_profile).to eq(false)
+ end
+
it "updates private profile" do
put api("/users/#{user.id}", admin), params: { private_profile: true }
@@ -747,14 +800,24 @@ describe API::Users do
expect(user.reload.private_profile).to eq(true)
end
- it "updates private profile when nil is given to false" do
- admin.update(private_profile: true)
+ it "updates private profile to false when nil is given" do
+ user.update(private_profile: true)
put api("/users/#{user.id}", admin), params: { private_profile: nil }
+ expect(response).to have_gitlab_http_status(200)
expect(user.reload.private_profile).to eq(false)
end
+ it "does not modify private profile when field is not provided" do
+ user.update(private_profile: true)
+
+ put api("/users/#{user.id}", admin), params: {}
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(user.reload.private_profile).to eq(true)
+ end
+
it "does not update admin status" do
put api("/users/#{admin_user.id}", admin), params: { can_create_group: false }
@@ -770,6 +833,34 @@ describe API::Users do
expect(user.reload.email).not_to eq('invalid email')
end
+ it "updates theme id" do
+ put api("/users/#{user.id}", admin), params: { theme_id: 5 }
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(user.reload.theme_id).to eq(5)
+ end
+
+ it "does not update invalid theme id" do
+ put api("/users/#{user.id}", admin), params: { theme_id: 50 }
+
+ expect(response).to have_gitlab_http_status(400)
+ expect(user.reload.theme_id).not_to eq(50)
+ end
+
+ it "updates color scheme id" do
+ put api("/users/#{user.id}", admin), params: { color_scheme_id: 5 }
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(user.reload.color_scheme_id).to eq(5)
+ end
+
+ it "does not update invalid color scheme id" do
+ put api("/users/#{user.id}", admin), params: { color_scheme_id: 50 }
+
+ expect(response).to have_gitlab_http_status(400)
+ expect(user.reload.color_scheme_id).not_to eq(50)
+ end
+
context 'when the current user is not an admin' do
it "is not available" do
expect do
@@ -858,6 +949,45 @@ describe API::Users do
end
end
+ describe "DELETE /users/:id/identities/:provider" do
+ let(:test_user) { create(:omniauth_user, provider: 'ldapmain') }
+
+ context 'when unauthenticated' do
+ it 'returns authentication error' do
+ delete api("/users/#{test_user.id}/identities/ldapmain")
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'when authenticated' do
+ it 'deletes identity of given provider' do
+ expect do
+ delete api("/users/#{test_user.id}/identities/ldapmain", admin)
+ end.to change { test_user.identities.count }.by(-1)
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+
+ it_behaves_like '412 response' do
+ let(:request) { api("/users/#{test_user.id}/identities/ldapmain", admin) }
+ end
+
+ it 'returns 404 error if user not found' do
+ delete api("/users/0/identities/ldapmain", admin)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 User Not Found')
+ end
+
+ it 'returns 404 error if identity not found' do
+ delete api("/users/#{test_user.id}/identities/saml", admin)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 Identity Not Found')
+ end
+ end
+ end
+
describe "POST /users/:id/keys" do
before do
admin
@@ -913,6 +1043,27 @@ describe API::Users do
end
end
+ describe 'GET /user/:user_id/keys' do
+ it 'returns 404 for non-existing user' do
+ get api("/users/#{not_existing_user_id}/keys")
+
+ expect(response).to have_gitlab_http_status(404)
+ expect(json_response['message']).to eq('404 User Not Found')
+ end
+
+ it 'returns array of ssh keys' do
+ user.keys << key
+ user.save
+
+ get api("/users/#{user.username}/keys")
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.first['title']).to eq(key.title)
+ end
+ end
+
describe 'DELETE /user/:id/keys/:key_id' do
before do
admin
@@ -2079,6 +2230,83 @@ describe API::Users do
end
end
+ describe "GET /users/:id/memberships" do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:group) { create(:group) }
+ let(:requesting_user) { create(:user) }
+
+ before_all do
+ project.add_guest(user)
+ group.add_guest(user)
+ end
+
+ it "responses with 403" do
+ get api("/users/#{user.id}/memberships", requesting_user)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'requested by admin user' do
+ let(:requesting_user) { create(:user, :admin) }
+
+ it "responses successfully" do
+ get api("/users/#{user.id}/memberships", requesting_user)
+
+ aggregate_failures 'expect successful response including groups and projects' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/memberships')
+ expect(response).to include_pagination_headers
+ expect(json_response).to contain_exactly(
+ a_hash_including('source_type' => 'Project'),
+ a_hash_including('source_type' => 'Namespace')
+ )
+ end
+ end
+
+ it 'does not submit N+1 DB queries' do
+ # Avoid setup queries
+ get api("/users/#{user.id}/memberships", requesting_user)
+
+ control = ActiveRecord::QueryRecorder.new do
+ get api("/users/#{user.id}/memberships", requesting_user)
+ end
+
+ create_list(:project, 5).map { |project| project.add_guest(user) }
+
+ expect do
+ get api("/users/#{user.id}/memberships", requesting_user)
+ end.not_to exceed_query_limit(control)
+ end
+
+ context 'with type filter' do
+ it "only returns project memberships" do
+ get api("/users/#{user.id}/memberships?type=Project", requesting_user)
+
+ aggregate_failures do
+ expect(json_response).to contain_exactly(a_hash_including('source_type' => 'Project'))
+ expect(json_response).not_to include(a_hash_including('source_type' => 'Namespace'))
+ end
+ end
+
+ it "only returns group memberships" do
+ get api("/users/#{user.id}/memberships?type=Namespace", requesting_user)
+
+ aggregate_failures do
+ expect(json_response).to contain_exactly(a_hash_including('source_type' => 'Namespace'))
+ expect(json_response).not_to include(a_hash_including('source_type' => 'Project'))
+ end
+ end
+
+ it "recognizes unsupported types" do
+ get api("/users/#{user.id}/memberships?type=foo", requesting_user)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+ end
+ end
+
context "user activities", :clean_gitlab_redis_shared_state do
let!(:old_active_user) { create(:user, last_activity_on: Time.utc(2000, 1, 1)) }
let!(:newly_active_user) { create(:user, last_activity_on: 2.days.ago.midday) }
diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb
index 42b4bd71b88..381ad45d477 100644
--- a/spec/requests/git_http_spec.rb
+++ b/spec/requests/git_http_spec.rb
@@ -92,7 +92,7 @@ describe 'Git HTTP requests' do
it 'allows pulls' do
download(path, env) do |response|
expect(response).to have_gitlab_http_status(:ok)
- expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
end
end
@@ -101,14 +101,14 @@ describe 'Git HTTP requests' do
it 'allows pushes', :sidekiq_might_not_need_inline do
upload(path, env) do |response|
expect(response).to have_gitlab_http_status(:ok)
- expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
end
end
shared_examples_for 'project path without .git suffix' do
context "GET info/refs" do
- let(:path) { "/#{project_path}/info/refs" }
+ let(:path) { "/#{repository_path}/info/refs" }
context "when no params are added" do
before do
@@ -116,7 +116,7 @@ describe 'Git HTTP requests' do
end
it "redirects to the .git suffix version" do
- expect(response).to redirect_to("/#{project_path}.git/info/refs")
+ expect(response).to redirect_to("/#{repository_path}.git/info/refs")
end
end
@@ -128,7 +128,7 @@ describe 'Git HTTP requests' do
end
it "redirects to the .git suffix version" do
- expect(response).to redirect_to("/#{project_path}.git/info/refs?service=#{params[:service]}")
+ expect(response).to redirect_to("/#{repository_path}.git/info/refs?service=#{params[:service]}")
end
end
@@ -140,7 +140,7 @@ describe 'Git HTTP requests' do
end
it "redirects to the .git suffix version" do
- expect(response).to redirect_to("/#{project_path}.git/info/refs?service=#{params[:service]}")
+ expect(response).to redirect_to("/#{repository_path}.git/info/refs?service=#{params[:service]}")
end
end
@@ -159,13 +159,13 @@ describe 'Git HTTP requests' do
context "POST git-upload-pack" do
it "fails to find a route" do
- expect { clone_post(project_path) }.to raise_error(ActionController::RoutingError)
+ expect { clone_post(repository_path) }.to raise_error(ActionController::RoutingError)
end
end
context "POST git-receive-pack" do
it "fails to find a route" do
- expect { push_post(project_path) }.to raise_error(ActionController::RoutingError)
+ expect { push_post(repository_path) }.to raise_error(ActionController::RoutingError)
end
end
end
@@ -211,7 +211,7 @@ describe 'Git HTTP requests' do
end
it_behaves_like 'project path without .git suffix' do
- let(:project_path) { "#{user.namespace.path}/project.git-project" }
+ let(:repository_path) { "#{user.namespace.path}/project.git-project" }
end
end
end
@@ -509,7 +509,7 @@ describe 'Git HTTP requests' do
download(path, env) do
expect(response).to have_gitlab_http_status(:ok)
- expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
end
@@ -518,7 +518,7 @@ describe 'Git HTTP requests' do
upload(path, env) do
expect(response).to have_gitlab_http_status(:ok)
- expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
end
@@ -820,7 +820,7 @@ describe 'Git HTTP requests' do
end
it_behaves_like 'project path without .git suffix' do
- let(:project_path) { create(:project, :repository, :public, path: 'project.git-project').full_path }
+ let(:repository_path) { create(:project, :repository, :public, path: 'project.git-project').full_path }
end
context "retrieving an info/refs file" do
@@ -834,7 +834,7 @@ describe 'Git HTTP requests' do
Blob.decorate(Gitlab::Git::Blob.find(project.repository, 'master', 'bar/branch-test.txt'), project)
end
- get "/#{project.full_path}/blob/master/info/refs"
+ get "/#{project.full_path}/-/blob/master/info/refs"
end
it "returns the file" do
@@ -844,11 +844,11 @@ describe 'Git HTTP requests' do
context "when the file does not exist" do
before do
- get "/#{project.full_path}/blob/master/info/refs"
+ get "/#{project.full_path}/-/blob/master/info/refs"
end
it "redirects" do
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
end
@@ -890,7 +890,7 @@ describe 'Git HTTP requests' do
it "responds with status 200" do
clone_get(path, env) do |response|
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
diff --git a/spec/requests/jwt_controller_spec.rb b/spec/requests/jwt_controller_spec.rb
index 199c2dbe9ca..754ab3e6a45 100644
--- a/spec/requests/jwt_controller_spec.rb
+++ b/spec/requests/jwt_controller_spec.rb
@@ -15,12 +15,12 @@ describe JwtController do
context 'existing service' do
subject! { get '/jwt/auth', params: parameters }
- it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response).to have_gitlab_http_status(:ok) }
context 'returning custom http code' do
let(:service) { double(execute: { http_status: 505 }) }
- it { expect(response).to have_gitlab_http_status(505) }
+ it { expect(response).to have_gitlab_http_status(:http_version_not_supported) }
end
end
@@ -43,7 +43,7 @@ describe JwtController do
subject! { get '/jwt/auth', params: parameters, headers: headers }
- it { expect(response).to have_gitlab_http_status(401) }
+ it { expect(response).to have_gitlab_http_status(:unauthorized) }
end
context 'using personal access tokens' do
@@ -58,7 +58,7 @@ describe JwtController do
subject! { get '/jwt/auth', params: parameters, headers: headers }
it 'authenticates correctly' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(service_class).to have_received(:new).with(nil, user, ActionController::Parameters.new(parameters).permit!)
end
end
@@ -96,7 +96,7 @@ describe JwtController do
context 'without personal token' do
it 'rejects the authorization attempt' do
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
expect(response.body).to include('You must use a personal access token with \'api\' scope for Git over HTTP')
end
end
@@ -106,7 +106,7 @@ describe JwtController do
let(:headers) { { authorization: credentials(user.username, access_token.token) } }
it 'accepts the authorization attempt' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -116,7 +116,7 @@ describe JwtController do
get '/jwt/auth', params: parameters, headers: headers
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -127,7 +127,7 @@ describe JwtController do
it 'rejects the authorization attempt' do
get '/jwt/auth', params: parameters, headers: headers
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
expect(response.body).not_to include('You must use a personal access token with \'api\' scope for Git over HTTP')
end
end
@@ -139,7 +139,7 @@ describe JwtController do
end
get '/jwt/auth', params: parameters, headers: headers
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
expect(response.body).to include('You must use a personal access token with \'api\' scope for Git over HTTP')
end
end
@@ -150,7 +150,7 @@ describe JwtController do
it 'accepts the authorization attempt' do
get '/jwt/auth', params: parameters
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'allows read access' do
@@ -163,7 +163,7 @@ describe JwtController do
context 'unknown service' do
subject! { get '/jwt/auth', params: { service: 'unknown' } }
- it { expect(response).to have_gitlab_http_status(404) }
+ it { expect(response).to have_gitlab_http_status(:not_found) }
end
def credentials(login, password)
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 62b9ee1d361..c6403a6ab75 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -227,7 +227,7 @@ describe 'Git LFS API and storage' do
end
it 'responds with redirect' do
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
it 'responds with the file location' do
@@ -907,7 +907,7 @@ describe 'Git LFS API and storage' do
it_behaves_like 'LFS http 200 response'
it 'uses the gitlab-workhorse content type' do
- expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
end
@@ -1011,7 +1011,7 @@ describe 'Git LFS API and storage' do
it 'responds with status 403' do
subject
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -1027,7 +1027,7 @@ describe 'Git LFS API and storage' do
it 'responds with status 200' do
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
object = LfsObject.find_by_oid(sample_oid)
expect(object).to be_present
@@ -1070,7 +1070,7 @@ describe 'Git LFS API and storage' do
it 'rejects slashes in the tempfile name (path traversal)' do
put_finalize('../bar', with_tempfile: true)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -1193,8 +1193,8 @@ describe 'Git LFS API and storage' do
it_behaves_like 'LFS http 200 response'
- it 'LFS object is linked to the source project' do
- expect(lfs_object.projects.pluck(:id)).to include(upstream_project.id)
+ it 'LFS object is linked to the forked project' do
+ expect(lfs_object.projects.pluck(:id)).to include(project.id)
end
end
end
diff --git a/spec/requests/lfs_locks_api_spec.rb b/spec/requests/lfs_locks_api_spec.rb
index 41f54162266..41cf1a80205 100644
--- a/spec/requests/lfs_locks_api_spec.rb
+++ b/spec/requests/lfs_locks_api_spec.rb
@@ -23,7 +23,7 @@ describe 'Git LFS File Locking API' do
it 'returns a forbidden 403 response' do
post_lfs_json url, body, headers
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -51,7 +51,7 @@ describe 'Git LFS File Locking API' do
it 'return an error message' do
post_lfs_json url, body, headers
- expect(response).to have_gitlab_http_status(409)
+ expect(response).to have_gitlab_http_status(:conflict)
expect(json_response.keys).to match_array(%w(lock message documentation_url))
expect(json_response['message']).to match(/already locked/)
@@ -68,7 +68,7 @@ describe 'Git LFS File Locking API' do
it 'creates the lock' do
post_lfs_json url, body, headers
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['lock'].keys).to match_array(%w(id path locked_at owner))
end
@@ -87,7 +87,7 @@ describe 'Git LFS File Locking API' do
do_get url, nil, headers
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['locks'].size).to eq(2)
expect(json_response['locks'].first.keys).to match_array(%w(id path locked_at owner))
@@ -106,7 +106,7 @@ describe 'Git LFS File Locking API' do
post_lfs_json url, nil, headers
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['ours'].size).to eq(1)
expect(json_response['ours'].first['path']).to eq('README')
@@ -126,7 +126,7 @@ describe 'Git LFS File Locking API' do
it 'deletes the lock' do
post_lfs_json url, nil, headers
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'returns the deleted lock' do
@@ -142,7 +142,7 @@ describe 'Git LFS File Locking API' do
project.add_maintainer(maintainer)
post_lfs_json url, { force: true }, headers
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
diff --git a/spec/requests/openid_connect_spec.rb b/spec/requests/openid_connect_spec.rb
index bac1a4e18c8..d7c08484dc4 100644
--- a/spec/requests/openid_connect_spec.rb
+++ b/spec/requests/openid_connect_spec.rb
@@ -75,7 +75,7 @@ describe 'OpenID Connect requests' do
it 'userinfo response is unauthorized' do
request_user_info!
- expect(response).to have_gitlab_http_status 403
+ expect(response).to have_gitlab_http_status(:forbidden)
expect(response.body).to be_blank
end
end
@@ -177,7 +177,7 @@ describe 'OpenID Connect requests' do
it 'correctly returns the configuration' do
get '/.well-known/openid-configuration'
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['issuer']).to eq('http://localhost')
expect(json_response['jwks_uri']).to eq('http://www.example.com/oauth/discovery/keys')
expect(json_response['scopes_supported']).to eq(%w[api read_user read_repository write_repository sudo openid profile email])
diff --git a/spec/requests/profiles/notifications_controller_spec.rb b/spec/requests/profiles/notifications_controller_spec.rb
new file mode 100644
index 00000000000..41349d6c12d
--- /dev/null
+++ b/spec/requests/profiles/notifications_controller_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'view user notifications' do
+ let(:user) do
+ create(:user) do |user|
+ user.emails.create(email: 'original@example.com')
+ user.emails.create(email: 'new@example.com')
+ user.notification_email = 'original@example.com'
+ user.save!
+ end
+ end
+
+ before do
+ login_as(user)
+
+ create_list(:group, 2) do |group|
+ group.add_developer(user)
+ end
+ end
+
+ def get_profile_notifications
+ get profile_notifications_path
+ end
+
+ describe 'GET /profile/notifications' do
+ it 'avoid N+1 due to an additional groups (with no parent group)' do
+ get_profile_notifications
+
+ control = ActiveRecord::QueryRecorder.new do
+ get_profile_notifications
+ end
+
+ create_list(:group, 2) { |group| group.add_developer(user) }
+
+ expect do
+ get_profile_notifications
+ end.not_to exceed_query_limit(control)
+ end
+ end
+end
diff --git a/spec/requests/projects/cycle_analytics_events_spec.rb b/spec/requests/projects/cycle_analytics_events_spec.rb
index 93a1aafde23..773f243e733 100644
--- a/spec/requests/projects/cycle_analytics_events_spec.rb
+++ b/spec/requests/projects/cycle_analytics_events_spec.rb
@@ -2,12 +2,12 @@
require 'spec_helper'
-describe 'cycle analytics events' do
+describe 'value stream analytics events' do
let(:user) { create(:user) }
let(:project) { create(:project, :repository, public_builds: false) }
let(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
- describe 'GET /:namespace/:project/cycle_analytics/events/issues' do
+ describe 'GET /:namespace/:project/value_stream_analytics/events/issues' do
before do
project.add_developer(user)
diff --git a/spec/requests/projects/merge_requests_discussions_spec.rb b/spec/requests/projects/merge_requests_discussions_spec.rb
index 5945561aa7b..ffc98d09e5c 100644
--- a/spec/requests/projects/merge_requests_discussions_spec.rb
+++ b/spec/requests/projects/merge_requests_discussions_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
describe 'merge requests discussions' do
# Further tests can be found at merge_requests_controller_spec.rb
- describe 'GET /:namespace/:project/merge_requests/:iid/discussions' do
+ describe 'GET /:namespace/:project/-/merge_requests/:iid/discussions' do
let(:project) { create(:project, :repository) }
let(:user) { project.owner }
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
diff --git a/spec/requests/rack_attack_global_spec.rb b/spec/requests/rack_attack_global_spec.rb
index 9968b2e4aba..da0ca4c197a 100644
--- a/spec/requests/rack_attack_global_spec.rb
+++ b/spec/requests/rack_attack_global_spec.rb
@@ -53,7 +53,7 @@ describe 'Rack Attack global throttles' do
# At first, allow requests under the rate limit.
requests_per_period.times do
get url_that_does_not_require_authentication
- expect(response).to have_http_status 200
+ expect(response).to have_gitlab_http_status(:ok)
end
# the last straw
@@ -63,7 +63,7 @@ describe 'Rack Attack global throttles' do
it 'allows requests after throttling and then waiting for the next period' do
requests_per_period.times do
get url_that_does_not_require_authentication
- expect(response).to have_http_status 200
+ expect(response).to have_gitlab_http_status(:ok)
end
expect_rejection { get url_that_does_not_require_authentication }
@@ -71,7 +71,7 @@ describe 'Rack Attack global throttles' do
Timecop.travel(period.from_now) do
requests_per_period.times do
get url_that_does_not_require_authentication
- expect(response).to have_http_status 200
+ expect(response).to have_gitlab_http_status(:ok)
end
expect_rejection { get url_that_does_not_require_authentication }
@@ -81,7 +81,7 @@ describe 'Rack Attack global throttles' do
it 'counts requests from different IPs separately' do
requests_per_period.times do
get url_that_does_not_require_authentication
- expect(response).to have_http_status 200
+ expect(response).to have_gitlab_http_status(:ok)
end
expect_next_instance_of(Rack::Attack::Request) do |instance|
@@ -90,14 +90,14 @@ describe 'Rack Attack global throttles' do
# would be over limit for the same IP
get url_that_does_not_require_authentication
- expect(response).to have_http_status 200
+ expect(response).to have_gitlab_http_status(:ok)
end
context 'when the request is to the api internal endpoints' do
it 'allows requests over the rate limit' do
(1 + requests_per_period).times do
get url_api_internal, params: { secret_token: Gitlab::Shell.secret_token }
- expect(response).to have_http_status 200
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -109,7 +109,7 @@ describe 'Rack Attack global throttles' do
it 'does not cont as unauthenticated' do
(1 + requests_per_period).times do
post request_jobs_url, params: { token: runner.token }
- expect(response).to have_http_status 204
+ expect(response).to have_gitlab_http_status(:no_content)
end
end
end
@@ -117,7 +117,7 @@ describe 'Rack Attack global throttles' do
it 'logs RackAttack info into structured logs' do
requests_per_period.times do
get url_that_does_not_require_authentication
- expect(response).to have_http_status 200
+ expect(response).to have_gitlab_http_status(:ok)
end
arguments = {
@@ -143,7 +143,7 @@ describe 'Rack Attack global throttles' do
it 'allows requests over the rate limit' do
(1 + requests_per_period).times do
get url_that_does_not_require_authentication
- expect(response).to have_http_status 200
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -243,7 +243,7 @@ describe 'Rack Attack global throttles' do
it 'allows requests over the rate limit' do
(1 + requests_per_period).times do
post protected_path_that_does_not_require_authentication, params: post_params
- expect(response).to have_http_status 200
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -257,7 +257,7 @@ describe 'Rack Attack global throttles' do
it 'rejects requests over the rate limit' do
requests_per_period.times do
post protected_path_that_does_not_require_authentication, params: post_params
- expect(response).to have_http_status 200
+ expect(response).to have_gitlab_http_status(:ok)
end
expect_rejection { post protected_path_that_does_not_require_authentication, params: post_params }
@@ -272,7 +272,7 @@ describe 'Rack Attack global throttles' do
it 'allows requests over the rate limit' do
(1 + requests_per_period).times do
post protected_path_that_does_not_require_authentication, params: post_params
- expect(response).to have_http_status 200
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -329,7 +329,7 @@ describe 'Rack Attack global throttles' do
it 'allows requests over the rate limit' do
(1 + requests_per_period).times do
post(*request_args)
- expect(response).not_to have_http_status 429
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
end
end
@@ -369,7 +369,7 @@ describe 'Rack Attack global throttles' do
it 'allows requests over the rate limit' do
(1 + requests_per_period).times do
post url_that_requires_authentication
- expect(response).not_to have_http_status 429
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
end
end
diff --git a/spec/requests/self_monitoring_project_spec.rb b/spec/requests/self_monitoring_project_spec.rb
index d562a34aec4..6a0258c349f 100644
--- a/spec/requests/self_monitoring_project_spec.rb
+++ b/spec/requests/self_monitoring_project_spec.rb
@@ -17,11 +17,7 @@ describe 'Self-Monitoring project requests' do
login_as(admin)
end
- context 'with feature flag disabled' do
- it_behaves_like 'not accessible if feature flag is disabled'
- end
-
- context 'with feature flag enabled' do
+ context 'when the self monitoring project is created' do
let(:status_api) { status_create_self_monitoring_project_admin_application_settings_path }
it_behaves_like 'triggers async worker, returns sidekiq job_id with response accepted'
@@ -45,11 +41,7 @@ describe 'Self-Monitoring project requests' do
login_as(admin)
end
- context 'with feature flag disabled' do
- it_behaves_like 'not accessible if feature flag is disabled'
- end
-
- context 'with feature flag enabled' do
+ context 'when the self monitoring project is being created' do
it_behaves_like 'handles invalid job_id'
context 'when job is in progress' do
@@ -68,6 +60,8 @@ describe 'Self-Monitoring project requests' do
let(:job_id) { nil }
it 'returns bad_request' do
+ create(:application_setting)
+
subject
aggregate_failures do
@@ -81,11 +75,10 @@ describe 'Self-Monitoring project requests' do
end
context 'when self-monitoring project exists' do
- let(:project) { build(:project) }
+ let(:project) { create(:project) }
before do
- stub_application_setting(instance_administration_project_id: 1)
- stub_application_setting(instance_administration_project: project)
+ create(:application_setting, self_monitoring_project_id: project.id)
end
it 'does not need job_id' do
@@ -94,7 +87,7 @@ describe 'Self-Monitoring project requests' do
aggregate_failures do
expect(response).to have_gitlab_http_status(:success)
expect(json_response).to eq(
- 'project_id' => 1,
+ 'project_id' => project.id,
'project_full_path' => project.full_path
)
end
@@ -106,7 +99,7 @@ describe 'Self-Monitoring project requests' do
aggregate_failures do
expect(response).to have_gitlab_http_status(:success)
expect(json_response).to eq(
- 'project_id' => 1,
+ 'project_id' => project.id,
'project_full_path' => project.full_path
)
end
@@ -128,11 +121,7 @@ describe 'Self-Monitoring project requests' do
login_as(admin)
end
- context 'with feature flag disabled' do
- it_behaves_like 'not accessible if feature flag is disabled'
- end
-
- context 'with feature flag enabled' do
+ context 'when the self monitoring project is deleted' do
let(:status_api) { status_delete_self_monitoring_project_admin_application_settings_path }
it_behaves_like 'triggers async worker, returns sidekiq job_id with response accepted'
@@ -156,11 +145,7 @@ describe 'Self-Monitoring project requests' do
login_as(admin)
end
- context 'with feature flag disabled' do
- it_behaves_like 'not accessible if feature flag is disabled'
- end
-
- context 'with feature flag enabled' do
+ context 'when the self monitoring project is being deleted' do
it_behaves_like 'handles invalid job_id'
context 'when job is in progress' do
@@ -169,7 +154,7 @@ describe 'Self-Monitoring project requests' do
.with(job_id)
.and_return(true)
- stub_application_setting(instance_administration_project_id: 1)
+ stub_application_setting(self_monitoring_project_id: 1)
end
it_behaves_like 'sets polling header and returns accepted' do
@@ -179,7 +164,7 @@ describe 'Self-Monitoring project requests' do
context 'when self-monitoring project exists and job does not exist' do
before do
- stub_application_setting(instance_administration_project_id: 1)
+ create(:application_setting, self_monitoring_project_id: create(:project).id)
end
it 'returns bad_request' do
@@ -196,6 +181,10 @@ describe 'Self-Monitoring project requests' do
end
context 'when self-monitoring project does not exist' do
+ before do
+ create(:application_setting)
+ end
+
it 'does not need job_id' do
get status_delete_self_monitoring_project_admin_application_settings_path
diff --git a/spec/requests/user_activity_spec.rb b/spec/requests/user_activity_spec.rb
index 15666e00b9f..3cd4911098a 100644
--- a/spec/requests/user_activity_spec.rb
+++ b/spec/requests/user_activity_spec.rb
@@ -26,8 +26,8 @@ describe 'Update of user activity' do
'/dashboard/todos',
'/group/project/issues',
'/group/project/issues/10',
- '/group/project/merge_requests',
- '/group/project/merge_requests/15'
+ '/group/project/-/merge_requests',
+ '/group/project/-/merge_requests/15'
]
context 'without an authenticated user' do
diff --git a/spec/requests/user_avatar_spec.rb b/spec/requests/user_avatar_spec.rb
index 9451674161c..66c7ce4d533 100644
--- a/spec/requests/user_avatar_spec.rb
+++ b/spec/requests/user_avatar_spec.rb
@@ -19,7 +19,7 @@ describe 'Loading a user avatar' do
it 'only performs three SQL queries' do
get user.avatar_url # Skip queries on first application load
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect { get user.avatar_url }.not_to exceed_query_limit(3)
end
end
@@ -29,7 +29,7 @@ describe 'Loading a user avatar' do
it 'only performs two SQL queries' do
get user.avatar_url # Skip queries on first application load
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect { get user.avatar_url }.not_to exceed_query_limit(2)
end
end
diff --git a/spec/routing/admin/serverless/domains_controller_routing_spec.rb b/spec/routing/admin/serverless/domains_controller_routing_spec.rb
new file mode 100644
index 00000000000..18c0db6add1
--- /dev/null
+++ b/spec/routing/admin/serverless/domains_controller_routing_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Admin::Serverless::DomainsController do
+ it 'routes to #index' do
+ expect(get: '/admin/serverless/domains').to route_to('admin/serverless/domains#index')
+ end
+
+ it 'routes to #create' do
+ expect(post: '/admin/serverless/domains/').to route_to('admin/serverless/domains#create')
+ end
+
+ it 'routes to #update' do
+ expect(put: '/admin/serverless/domains/1').to route_to(controller: 'admin/serverless/domains', action: 'update', id: '1')
+ expect(patch: '/admin/serverless/domains/1').to route_to(controller: 'admin/serverless/domains', action: 'update', id: '1')
+ end
+
+ it 'routes #verify' do
+ expect(post: '/admin/serverless/domains/1/verify').to route_to(controller: 'admin/serverless/domains', action: 'verify', id: '1')
+ end
+end
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index efd7d3f3742..e503f1a4231 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -190,23 +190,23 @@ describe 'project routing' do
end
it 'to #archive_alternative' do
- expect(get('/gitlab/gitlabhq/repository/archive')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', append_sha: true)
+ expect(get('/gitlab/gitlabhq/-/repository/archive')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', append_sha: true)
end
it 'to #archive_deprecated' do
- expect(get('/gitlab/gitlabhq/repository/master/archive')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master', append_sha: true)
+ expect(get('/gitlab/gitlabhq/-/repository/master/archive')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master', append_sha: true)
end
it 'to #archive_deprecated format:zip' do
- expect(get('/gitlab/gitlabhq/repository/master/archive.zip')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'zip', id: 'master', append_sha: true)
+ expect(get('/gitlab/gitlabhq/-/repository/master/archive.zip')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'zip', id: 'master', append_sha: true)
end
it 'to #archive_deprecated format:tar.bz2' do
- expect(get('/gitlab/gitlabhq/repository/master/archive.tar.bz2')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'tar.bz2', id: 'master', append_sha: true)
+ expect(get('/gitlab/gitlabhq/-/repository/master/archive.tar.bz2')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'tar.bz2', id: 'master', append_sha: true)
end
it 'to #archive_deprecated with "/" in route' do
- expect(get('/gitlab/gitlabhq/repository/improve/awesome/archive')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'improve/awesome', append_sha: true)
+ expect(get('/gitlab/gitlabhq/-/repository/improve/awesome/archive')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'improve/awesome', append_sha: true)
end
end
@@ -269,20 +269,20 @@ describe 'project routing' do
# logs_file_project_ref GET /:project_id/refs/:id/logs_tree/:path(.:format) refs#logs_tree
describe Projects::RefsController, 'routing' do
it 'to #switch' do
- expect(get('/gitlab/gitlabhq/refs/switch')).to route_to('projects/refs#switch', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ expect(get('/gitlab/gitlabhq/-/refs/switch')).to route_to('projects/refs#switch', namespace_id: 'gitlab', project_id: 'gitlabhq')
end
it 'to #logs_tree' do
- expect(get('/gitlab/gitlabhq/refs/stable/logs_tree')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'stable')
- expect(get('/gitlab/gitlabhq/refs/feature%2345/logs_tree')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'feature#45')
- expect(get('/gitlab/gitlabhq/refs/feature%2B45/logs_tree')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'feature+45')
- expect(get('/gitlab/gitlabhq/refs/feature@45/logs_tree')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'feature@45')
- expect(get('/gitlab/gitlabhq/refs/stable/logs_tree/foo/bar/baz')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'stable', path: 'foo/bar/baz')
- expect(get('/gitlab/gitlabhq/refs/feature%2345/logs_tree/foo/bar/baz')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'feature#45', path: 'foo/bar/baz')
- expect(get('/gitlab/gitlabhq/refs/feature%2B45/logs_tree/foo/bar/baz')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'feature+45', path: 'foo/bar/baz')
- expect(get('/gitlab/gitlabhq/refs/feature@45/logs_tree/foo/bar/baz')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'feature@45', path: 'foo/bar/baz')
- expect(get('/gitlab/gitlabhq/refs/stable/logs_tree/files.scss')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'stable', path: 'files.scss')
- assert_routing({ path: "/gitlab/gitlabhq/refs/stable/logs_tree/new%0A%0Aline.txt",
+ expect(get('/gitlab/gitlabhq/-/refs/stable/logs_tree')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'stable')
+ expect(get('/gitlab/gitlabhq/-/refs/feature%2345/logs_tree')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'feature#45')
+ expect(get('/gitlab/gitlabhq/-/refs/feature%2B45/logs_tree')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'feature+45')
+ expect(get('/gitlab/gitlabhq/-/refs/feature@45/logs_tree')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'feature@45')
+ expect(get('/gitlab/gitlabhq/-/refs/stable/logs_tree/foo/bar/baz')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'stable', path: 'foo/bar/baz')
+ expect(get('/gitlab/gitlabhq/-/refs/feature%2345/logs_tree/foo/bar/baz')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'feature#45', path: 'foo/bar/baz')
+ expect(get('/gitlab/gitlabhq/-/refs/feature%2B45/logs_tree/foo/bar/baz')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'feature+45', path: 'foo/bar/baz')
+ expect(get('/gitlab/gitlabhq/-/refs/feature@45/logs_tree/foo/bar/baz')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'feature@45', path: 'foo/bar/baz')
+ expect(get('/gitlab/gitlabhq/-/refs/stable/logs_tree/files.scss')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'stable', path: 'files.scss')
+ assert_routing({ path: "/gitlab/gitlabhq/-/refs/stable/logs_tree/new%0A%0Aline.txt",
method: :get },
{ controller: 'projects/refs', action: 'logs_tree',
namespace_id: 'gitlab', project_id: 'gitlabhq',
@@ -292,26 +292,26 @@ describe 'project routing' do
describe Projects::MergeRequestsController, 'routing' do
it 'to #commits' do
- expect(get('/gitlab/gitlabhq/merge_requests/1/commits.json')).to route_to('projects/merge_requests#commits', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'json')
+ expect(get('/gitlab/gitlabhq/-/merge_requests/1/commits.json')).to route_to('projects/merge_requests#commits', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'json')
end
it 'to #pipelines' do
- expect(get('/gitlab/gitlabhq/merge_requests/1/pipelines.json')).to route_to('projects/merge_requests#pipelines', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'json')
+ expect(get('/gitlab/gitlabhq/-/merge_requests/1/pipelines.json')).to route_to('projects/merge_requests#pipelines', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'json')
end
it 'to #merge' do
- expect(post('/gitlab/gitlabhq/merge_requests/1/merge')).to route_to(
+ expect(post('/gitlab/gitlabhq/-/merge_requests/1/merge')).to route_to(
'projects/merge_requests#merge',
namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1'
)
end
it 'to #show' do
- expect(get('/gitlab/gitlabhq/merge_requests/1.diff')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'diff')
- expect(get('/gitlab/gitlabhq/merge_requests/1.patch')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'patch')
- expect(get('/gitlab/gitlabhq/merge_requests/1/diffs')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', tab: 'diffs')
- expect(get('/gitlab/gitlabhq/merge_requests/1/commits')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', tab: 'commits')
- expect(get('/gitlab/gitlabhq/merge_requests/1/pipelines')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', tab: 'pipelines')
+ expect(get('/gitlab/gitlabhq/-/merge_requests/1.diff')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'diff')
+ expect(get('/gitlab/gitlabhq/-/merge_requests/1.patch')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'patch')
+ expect(get('/gitlab/gitlabhq/-/merge_requests/1/diffs')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', tab: 'diffs')
+ expect(get('/gitlab/gitlabhq/-/merge_requests/1/commits')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', tab: 'commits')
+ expect(get('/gitlab/gitlabhq/-/merge_requests/1/pipelines')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', tab: 'pipelines')
end
it 'to #show from scoped route' do
@@ -323,46 +323,52 @@ describe 'project routing' do
it_behaves_like 'RESTful project resources' do
let(:controller) { 'merge_requests' }
let(:actions) { [:index, :edit, :show, :update] }
+ let(:controller_path) { '/-/merge_requests' }
end
+
+ it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/merge_requests", "/gitlab/gitlabhq/-/merge_requests"
+ it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/merge_requests/1/diffs", "/gitlab/gitlabhq/-/merge_requests/1/diffs"
end
describe Projects::MergeRequests::CreationsController, 'routing' do
it 'to #new' do
- expect(get('/gitlab/gitlabhq/merge_requests/new')).to route_to('projects/merge_requests/creations#new', namespace_id: 'gitlab', project_id: 'gitlabhq')
- expect(get('/gitlab/gitlabhq/merge_requests/new/diffs')).to route_to('projects/merge_requests/creations#new', namespace_id: 'gitlab', project_id: 'gitlabhq', tab: 'diffs')
- expect(get('/gitlab/gitlabhq/merge_requests/new/pipelines')).to route_to('projects/merge_requests/creations#new', namespace_id: 'gitlab', project_id: 'gitlabhq', tab: 'pipelines')
+ expect(get('/gitlab/gitlabhq/-/merge_requests/new')).to route_to('projects/merge_requests/creations#new', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ expect(get('/gitlab/gitlabhq/-/merge_requests/new/diffs')).to route_to('projects/merge_requests/creations#new', namespace_id: 'gitlab', project_id: 'gitlabhq', tab: 'diffs')
+ expect(get('/gitlab/gitlabhq/-/merge_requests/new/pipelines')).to route_to('projects/merge_requests/creations#new', namespace_id: 'gitlab', project_id: 'gitlabhq', tab: 'pipelines')
end
it 'to #create' do
- expect(post('/gitlab/gitlabhq/merge_requests')).to route_to('projects/merge_requests/creations#create', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ expect(post('/gitlab/gitlabhq/-/merge_requests')).to route_to('projects/merge_requests/creations#create', namespace_id: 'gitlab', project_id: 'gitlabhq')
end
it 'to #branch_from' do
- expect(get('/gitlab/gitlabhq/merge_requests/new/branch_from')).to route_to('projects/merge_requests/creations#branch_from', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ expect(get('/gitlab/gitlabhq/-/merge_requests/new/branch_from')).to route_to('projects/merge_requests/creations#branch_from', namespace_id: 'gitlab', project_id: 'gitlabhq')
end
it 'to #branch_to' do
- expect(get('/gitlab/gitlabhq/merge_requests/new/branch_to')).to route_to('projects/merge_requests/creations#branch_to', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ expect(get('/gitlab/gitlabhq/-/merge_requests/new/branch_to')).to route_to('projects/merge_requests/creations#branch_to', namespace_id: 'gitlab', project_id: 'gitlabhq')
end
it 'to #pipelines' do
- expect(get('/gitlab/gitlabhq/merge_requests/new/pipelines.json')).to route_to('projects/merge_requests/creations#pipelines', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'json')
+ expect(get('/gitlab/gitlabhq/-/merge_requests/new/pipelines.json')).to route_to('projects/merge_requests/creations#pipelines', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'json')
end
it 'to #diffs' do
- expect(get('/gitlab/gitlabhq/merge_requests/new/diffs.json')).to route_to('projects/merge_requests/creations#diffs', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'json')
+ expect(get('/gitlab/gitlabhq/-/merge_requests/new/diffs.json')).to route_to('projects/merge_requests/creations#diffs', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'json')
end
+
+ it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/merge_requests/new", "/gitlab/gitlabhq/-/merge_requests/new"
end
describe Projects::MergeRequests::DiffsController, 'routing' do
it 'to #show' do
- expect(get('/gitlab/gitlabhq/merge_requests/1/diffs.json')).to route_to('projects/merge_requests/diffs#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'json')
+ expect(get('/gitlab/gitlabhq/-/merge_requests/1/diffs.json')).to route_to('projects/merge_requests/diffs#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'json')
end
end
describe Projects::MergeRequests::ConflictsController, 'routing' do
it 'to #show' do
- expect(get('/gitlab/gitlabhq/merge_requests/1/conflicts')).to route_to('projects/merge_requests/conflicts#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
+ expect(get('/gitlab/gitlabhq/-/merge_requests/1/conflicts')).to route_to('projects/merge_requests/conflicts#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
end
end
# raw_project_snippet GET /:project_id/snippets/:id/raw(.:format) snippets#raw
@@ -439,10 +445,14 @@ describe 'project routing' do
# project_commit GET /:project_id/commit/:id(.:format) commit#show {id: /\h{7,40}/, project_id: /[^\/]+/}
describe Projects::CommitController, 'routing' do
it 'to #show' do
+ expect(get('/gitlab/gitlabhq/-/commit/4246fbd')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd')
+ expect(get('/gitlab/gitlabhq/-/commit/4246fbd.diff')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd', format: 'diff')
+ expect(get('/gitlab/gitlabhq/-/commit/4246fbd.patch')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd', format: 'patch')
+ expect(get('/gitlab/gitlabhq/-/commit/4246fbd13872934f72a8fd0d6fb1317b47b59cb5')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd13872934f72a8fd0d6fb1317b47b59cb5')
+ end
+
+ it 'to #show unscoped routing' do
expect(get('/gitlab/gitlabhq/commit/4246fbd')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd')
- expect(get('/gitlab/gitlabhq/commit/4246fbd.diff')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd', format: 'diff')
- expect(get('/gitlab/gitlabhq/commit/4246fbd.patch')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd', format: 'patch')
- expect(get('/gitlab/gitlabhq/commit/4246fbd13872934f72a8fd0d6fb1317b47b59cb5')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd13872934f72a8fd0d6fb1317b47b59cb5')
end
end
@@ -454,9 +464,14 @@ describe 'project routing' do
it_behaves_like 'RESTful project resources' do
let(:actions) { [:show] }
let(:controller) { 'commits' }
+ let(:controller_path) { '/-/commits' }
end
it 'to #show' do
+ expect(get('/gitlab/gitlabhq/-/commits/master.atom')).to route_to('projects/commits#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master.atom')
+ end
+
+ it 'to #show unscoped routing' do
expect(get('/gitlab/gitlabhq/commits/master.atom')).to route_to('projects/commits#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master.atom')
end
end
@@ -550,11 +565,11 @@ describe 'project routing' do
# project_blame GET /:project_id/blame/:id(.:format) blame#show {id: /[^\0]+/, project_id: /[^\/]+/}
describe Projects::BlameController, 'routing' do
it 'to #show' do
- expect(get('/gitlab/gitlabhq/blame/master/app/models/project.rb')).to route_to('projects/blame#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/project.rb')
- expect(get('/gitlab/gitlabhq/blame/master/files.scss')).to route_to('projects/blame#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/files.scss')
+ expect(get('/gitlab/gitlabhq/-/blame/master/app/models/project.rb')).to route_to('projects/blame#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/project.rb')
+ expect(get('/gitlab/gitlabhq/-/blame/master/files.scss')).to route_to('projects/blame#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/files.scss')
newline_file = "new\n\nline.txt"
url_encoded_newline_file = ERB::Util.url_encode(newline_file)
- assert_routing({ path: "/gitlab/gitlabhq/blame/master/#{url_encoded_newline_file}",
+ assert_routing({ path: "/gitlab/gitlabhq/-/blame/master/#{url_encoded_newline_file}",
method: :get },
{ controller: 'projects/blame', action: 'show',
namespace_id: 'gitlab', project_id: 'gitlabhq',
@@ -562,58 +577,58 @@ describe 'project routing' do
end
end
- # project_blob GET /:project_id/blob/:id(.:format) blob#show {id: /[^\0]+/, project_id: /[^\/]+/}
+ # project_blob GET /:project_id/-/blob/:id(.:format) blob#show {id: /[^\0]+/, project_id: /[^\/]+/}
describe Projects::BlobController, 'routing' do
it 'to #show' do
- expect(get('/gitlab/gitlabhq/blob/master/app/models/project.rb')).to route_to('projects/blob#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/project.rb')
- expect(get('/gitlab/gitlabhq/blob/master/app/models/compare.rb')).to route_to('projects/blob#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/compare.rb')
- expect(get('/gitlab/gitlabhq/blob/master/app/models/diff.js')).to route_to('projects/blob#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/diff.js')
- expect(get('/gitlab/gitlabhq/blob/master/files.scss')).to route_to('projects/blob#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/files.scss')
- expect(get('/gitlab/gitlabhq/blob/master/blob/index.js')).to route_to('projects/blob#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/blob/index.js')
- expect(get('/gitlab/gitlabhq/blob/blob/master/blob/index.js')).to route_to('projects/blob#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'blob/master/blob/index.js')
+ expect(get('/gitlab/gitlabhq/-/blob/master/app/models/project.rb')).to route_to('projects/blob#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/project.rb')
+ expect(get('/gitlab/gitlabhq/-/blob/master/app/models/compare.rb')).to route_to('projects/blob#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/compare.rb')
+ expect(get('/gitlab/gitlabhq/-/blob/master/app/models/diff.js')).to route_to('projects/blob#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/diff.js')
+ expect(get('/gitlab/gitlabhq/-/blob/master/files.scss')).to route_to('projects/blob#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/files.scss')
+ expect(get('/gitlab/gitlabhq/-/blob/master/blob/index.js')).to route_to('projects/blob#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/blob/index.js')
+ expect(get('/gitlab/gitlabhq/-/blob/blob/master/blob/index.js')).to route_to('projects/blob#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'blob/master/blob/index.js')
newline_file = "new\n\nline.txt"
url_encoded_newline_file = ERB::Util.url_encode(newline_file)
- assert_routing({ path: "/gitlab/gitlabhq/blob/blob/master/blob/#{url_encoded_newline_file}",
+ assert_routing({ path: "/gitlab/gitlabhq/-/blob/blob/master/blob/#{url_encoded_newline_file}",
method: :get },
{ controller: 'projects/blob', action: 'show',
namespace_id: 'gitlab', project_id: 'gitlabhq',
id: "blob/master/blob/#{newline_file}" })
end
- it 'to #show from scope routing' do
- expect(get('/gitlab/gitlabhq/-/blob/master/app/models/project.rb')).to route_to('projects/blob#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/project.rb')
+ it 'to #show from unscoped routing' do
+ expect(get('/gitlab/gitlabhq/blob/master/app/models/project.rb')).to route_to('projects/blob#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/project.rb')
end
end
- # project_tree GET /:project_id/tree/:id(.:format) tree#show {id: /[^\0]+/, project_id: /[^\/]+/}
+ # project_tree GET /:project_id/-/tree/:id(.:format) tree#show {id: /[^\0]+/, project_id: /[^\/]+/}
describe Projects::TreeController, 'routing' do
it 'to #show' do
- expect(get('/gitlab/gitlabhq/tree/master/app/models/project.rb')).to route_to('projects/tree#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/project.rb')
- expect(get('/gitlab/gitlabhq/tree/master/files.scss')).to route_to('projects/tree#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/files.scss')
- expect(get('/gitlab/gitlabhq/tree/master/tree/files')).to route_to('projects/tree#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/tree/files')
- expect(get('/gitlab/gitlabhq/tree/tree/master/tree/files')).to route_to('projects/tree#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'tree/master/tree/files')
+ expect(get('/gitlab/gitlabhq/-/tree/master/app/models/project.rb')).to route_to('projects/tree#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/project.rb')
+ expect(get('/gitlab/gitlabhq/-/tree/master/files.scss')).to route_to('projects/tree#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/files.scss')
+ expect(get('/gitlab/gitlabhq/-/tree/master/tree/files')).to route_to('projects/tree#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/tree/files')
+ expect(get('/gitlab/gitlabhq/-/tree/tree/master/tree/files')).to route_to('projects/tree#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'tree/master/tree/files')
newline_file = "new\n\nline.txt"
url_encoded_newline_file = ERB::Util.url_encode(newline_file)
- assert_routing({ path: "/gitlab/gitlabhq/tree/master/#{url_encoded_newline_file}",
+ assert_routing({ path: "/gitlab/gitlabhq/-/tree/master/#{url_encoded_newline_file}",
method: :get },
{ controller: 'projects/tree', action: 'show',
namespace_id: 'gitlab', project_id: 'gitlabhq',
id: "master/#{newline_file}" })
end
- it 'to #show from scope routing' do
- expect(get('/gitlab/gitlabhq/-/tree/master/app/models/project.rb')).to route_to('projects/tree#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/project.rb')
+ it 'to #show from unscoped routing' do
+ expect(get('/gitlab/gitlabhq/tree/master/app/models/project.rb')).to route_to('projects/tree#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/project.rb')
end
end
- # project_find_file GET /:namespace_id/:project_id/find_file/*id(.:format) projects/find_file#show {:id=>/[^\0]+/, :namespace_id=>/[a-zA-Z.0-9_\-]+/, :project_id=>/[a-zA-Z.0-9_\-]+(?<!\.atom)/, :format=>/html/}
- # project_files GET /:namespace_id/:project_id/files/*id(.:format) projects/find_file#list {:id=>/(?:[^.]|\.(?!json$))+/, :namespace_id=>/[a-zA-Z.0-9_\-]+/, :project_id=>/[a-zA-Z.0-9_\-]+(?<!\.atom)/, :format=>/json/}
+ # project_find_file GET /:namespace_id/:project_id/-/find_file/*id(.:format) projects/find_file#show {:id=>/[^\0]+/, :namespace_id=>/[a-zA-Z.0-9_\-]+/, :project_id=>/[a-zA-Z.0-9_\-]+(?<!\.atom)/, :format=>/html/}
+ # project_files GET /:namespace_id/:project_id/-/files/*id(.:format) projects/find_file#list {:id=>/(?:[^.]|\.(?!json$))+/, :namespace_id=>/[a-zA-Z.0-9_\-]+/, :project_id=>/[a-zA-Z.0-9_\-]+(?<!\.atom)/, :format=>/json/}
describe Projects::FindFileController, 'routing' do
it 'to #show' do
- expect(get('/gitlab/gitlabhq/find_file/master')).to route_to('projects/find_file#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master')
+ expect(get('/gitlab/gitlabhq/-/find_file/master')).to route_to('projects/find_file#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master')
newline_file = "new\n\nline.txt"
url_encoded_newline_file = ERB::Util.url_encode(newline_file)
- assert_routing({ path: "/gitlab/gitlabhq/find_file/#{url_encoded_newline_file}",
+ assert_routing({ path: "/gitlab/gitlabhq/-/find_file/#{url_encoded_newline_file}",
method: :get },
{ controller: 'projects/find_file', action: 'show',
namespace_id: 'gitlab', project_id: 'gitlabhq',
@@ -621,10 +636,10 @@ describe 'project routing' do
end
it 'to #list' do
- expect(get('/gitlab/gitlabhq/files/master.json')).to route_to('projects/find_file#list', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master.json')
+ expect(get('/gitlab/gitlabhq/-/files/master.json')).to route_to('projects/find_file#list', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master.json')
newline_file = "new\n\nline.txt"
url_encoded_newline_file = ERB::Util.url_encode(newline_file)
- assert_routing({ path: "/gitlab/gitlabhq/files/#{url_encoded_newline_file}",
+ assert_routing({ path: "/gitlab/gitlabhq/-/files/#{url_encoded_newline_file}",
method: :get },
{ controller: 'projects/find_file', action: 'list',
namespace_id: 'gitlab', project_id: 'gitlabhq',
@@ -634,13 +649,13 @@ describe 'project routing' do
describe Projects::BlobController, 'routing' do
it 'to #edit' do
- expect(get('/gitlab/gitlabhq/edit/master/app/models/project.rb')).to(
+ expect(get('/gitlab/gitlabhq/-/edit/master/app/models/project.rb')).to(
route_to('projects/blob#edit',
namespace_id: 'gitlab', project_id: 'gitlabhq',
id: 'master/app/models/project.rb'))
newline_file = "new\n\nline.txt"
url_encoded_newline_file = ERB::Util.url_encode(newline_file)
- assert_routing({ path: "/gitlab/gitlabhq/edit/master/docs/#{url_encoded_newline_file}",
+ assert_routing({ path: "/gitlab/gitlabhq/-/edit/master/docs/#{url_encoded_newline_file}",
method: :get },
{ controller: 'projects/blob', action: 'edit',
namespace_id: 'gitlab', project_id: 'gitlabhq',
@@ -648,13 +663,13 @@ describe 'project routing' do
end
it 'to #preview' do
- expect(post('/gitlab/gitlabhq/preview/master/app/models/project.rb')).to(
+ expect(post('/gitlab/gitlabhq/-/preview/master/app/models/project.rb')).to(
route_to('projects/blob#preview',
namespace_id: 'gitlab', project_id: 'gitlabhq',
id: 'master/app/models/project.rb'))
newline_file = "new\n\nline.txt"
url_encoded_newline_file = ERB::Util.url_encode(newline_file)
- assert_routing({ path: "/gitlab/gitlabhq/edit/master/docs/#{url_encoded_newline_file}",
+ assert_routing({ path: "/gitlab/gitlabhq/-/edit/master/docs/#{url_encoded_newline_file}",
method: :get },
{ controller: 'projects/blob', action: 'edit',
namespace_id: 'gitlab', project_id: 'gitlabhq',
@@ -662,34 +677,38 @@ describe 'project routing' do
end
end
- # project_raw GET /:project_id/raw/:id(.:format) raw#show {id: /[^\0]+/, project_id: /[^\/]+/}
+ # project_raw GET /:project_id/-/raw/:id(.:format) raw#show {id: /[^\0]+/, project_id: /[^\/]+/}
describe Projects::RawController, 'routing' do
it 'to #show' do
newline_file = "new\n\nline.txt"
url_encoded_newline_file = ERB::Util.url_encode(newline_file)
- assert_routing({ path: "/gitlab/gitlabhq/raw/master/#{url_encoded_newline_file}",
+ assert_routing({ path: "/gitlab/gitlabhq/-/raw/master/#{url_encoded_newline_file}",
method: :get },
{ controller: 'projects/raw', action: 'show',
namespace_id: 'gitlab', project_id: 'gitlabhq',
id: "master/#{newline_file}" })
end
+
+ it 'to #show from unscoped routing' do
+ expect(get('/gitlab/gitlabhq/raw/master/app/models/project.rb')).to route_to('projects/raw#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/project.rb')
+ end
end
- # project_compare_index GET /:project_id/compare(.:format) compare#index {id: /[^\/]+/, project_id: /[^\/]+/}
- # POST /:project_id/compare(.:format) compare#create {id: /[^\/]+/, project_id: /[^\/]+/}
- # project_compare /:project_id/compare/:from...:to(.:format) compare#show {from: /.+/, to: /.+/, id: /[^\/]+/, project_id: /[^\/]+/}
+ # project_compare_index GET /:project_id/-/compare(.:format) compare#index {id: /[^\/]+/, project_id: /[^\/]+/}
+ # POST /:project_id/-/compare(.:format) compare#create {id: /[^\/]+/, project_id: /[^\/]+/}
+ # project_compare /:project_id/-/compare/:from...:to(.:format) compare#show {from: /.+/, to: /.+/, id: /[^\/]+/, project_id: /[^\/]+/}
describe Projects::CompareController, 'routing' do
it 'to #index' do
- expect(get('/gitlab/gitlabhq/compare')).to route_to('projects/compare#index', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ expect(get('/gitlab/gitlabhq/-/compare')).to route_to('projects/compare#index', namespace_id: 'gitlab', project_id: 'gitlabhq')
end
it 'to #compare' do
- expect(post('/gitlab/gitlabhq/compare')).to route_to('projects/compare#create', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ expect(post('/gitlab/gitlabhq/-/compare')).to route_to('projects/compare#create', namespace_id: 'gitlab', project_id: 'gitlabhq')
end
it 'to #show' do
- expect(get('/gitlab/gitlabhq/compare/master...stable')).to route_to('projects/compare#show', namespace_id: 'gitlab', project_id: 'gitlabhq', from: 'master', to: 'stable')
- expect(get('/gitlab/gitlabhq/compare/issue/1234...stable')).to route_to('projects/compare#show', namespace_id: 'gitlab', project_id: 'gitlabhq', from: 'issue/1234', to: 'stable')
+ expect(get('/gitlab/gitlabhq/-/compare/master...stable')).to route_to('projects/compare#show', namespace_id: 'gitlab', project_id: 'gitlabhq', from: 'master', to: 'stable')
+ expect(get('/gitlab/gitlabhq/-/compare/issue/1234...stable')).to route_to('projects/compare#show', namespace_id: 'gitlab', project_id: 'gitlabhq', from: 'issue/1234', to: 'stable')
end
end
diff --git a/spec/rubocop/cop/include_action_view_context_spec.rb b/spec/rubocop/cop/include_action_view_context_spec.rb
deleted file mode 100644
index c888555b54f..00000000000
--- a/spec/rubocop/cop/include_action_view_context_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require 'rubocop'
-require 'rubocop/rspec/support'
-
-require_relative '../../../rubocop/cop/include_action_view_context'
-
-describe RuboCop::Cop::IncludeActionViewContext do
- include CopHelper
-
- subject(:cop) { described_class.new }
-
- context 'when `ActionView::Context` is included' do
- let(:source) { 'include ActionView::Context' }
- let(:correct_source) { 'include ::Gitlab::ActionViewOutput::Context' }
-
- it 'registers an offense' do
- inspect_source(source)
-
- aggregate_failures do
- expect(cop.offenses.size).to eq(1)
- expect(cop.offenses.map(&:line)).to eq([1])
- expect(cop.highlights).to eq(['ActionView::Context'])
- end
- end
-
- it 'autocorrects to the right version' do
- autocorrected = autocorrect_source(source)
-
- expect(autocorrected).to eq(correct_source)
- end
- end
-
- context 'when `ActionView::Context` is not included' do
- it 'registers no offense' do
- inspect_source('include Context')
-
- aggregate_failures do
- expect(cop.offenses.size).to eq(0)
- end
- end
- end
-end
diff --git a/spec/rubocop/cop/scalability/bulk_perform_with_context_spec.rb b/spec/rubocop/cop/scalability/bulk_perform_with_context_spec.rb
new file mode 100644
index 00000000000..8107cfa8957
--- /dev/null
+++ b/spec/rubocop/cop/scalability/bulk_perform_with_context_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rubocop'
+require_relative '../../../support/helpers/expect_offense'
+require_relative '../../../../rubocop/cop/scalability/bulk_perform_with_context'
+
+describe RuboCop::Cop::Scalability::BulkPerformWithContext do
+ include CopHelper
+ include ExpectOffense
+
+ subject(:cop) { described_class.new }
+
+ it "adds an offense when calling bulk_perform_async" do
+ inspect_source(<<~CODE.strip_indent)
+ Worker.bulk_perform_async(args)
+ CODE
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it "adds an offense when calling bulk_perform_in" do
+ inspect_source(<<~CODE.strip_indent)
+ diffs.each_batch(of: BATCH_SIZE) do |relation, index|
+ ids = relation.pluck_primary_key.map { |id| [id] }
+ DeleteDiffFilesWorker.bulk_perform_in(index * 5.minutes, ids)
+ end
+ CODE
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it "does not add an offense for migrations" do
+ allow(cop).to receive(:in_migration?).and_return(true)
+
+ inspect_source(<<~CODE.strip_indent)
+ Worker.bulk_perform_in(args)
+ CODE
+
+ expect(cop.offenses.size).to eq(0)
+ end
+
+ it "does not add an offence for specs" do
+ allow(cop).to receive(:in_spec?).and_return(true)
+
+ inspect_source(<<~CODE.strip_indent)
+ Worker.bulk_perform_in(args)
+ CODE
+
+ expect(cop.offenses.size).to eq(0)
+ end
+
+ it "does not add an offense for scheduling BackgroundMigrations" do
+ inspect_source(<<~CODE.strip_indent)
+ BackgroundMigrationWorker.bulk_perform_in(args)
+ CODE
+
+ expect(cop.offenses.size).to eq(0)
+ end
+end
diff --git a/spec/rubocop/cop/scalability/cron_worker_context_spec.rb b/spec/rubocop/cop/scalability/cron_worker_context_spec.rb
new file mode 100644
index 00000000000..460514d9bed
--- /dev/null
+++ b/spec/rubocop/cop/scalability/cron_worker_context_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rubocop'
+require_relative '../../../support/helpers/expect_offense'
+require_relative '../../../../rubocop/cop/scalability/cron_worker_context'
+
+describe RuboCop::Cop::Scalability::CronWorkerContext do
+ include CopHelper
+ include ExpectOffense
+
+ subject(:cop) { described_class.new }
+
+ it 'adds an offense when including CronjobQueue' do
+ inspect_source(<<~CODE.strip_indent)
+ class SomeWorker
+ include CronjobQueue
+ end
+ CODE
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'does not add offenses for other workers' do
+ expect_no_offenses(<<~CODE.strip_indent)
+ class SomeWorker
+ end
+ CODE
+ end
+
+ it 'does not add an offense when the class defines a context' do
+ expect_no_offenses(<<~CODE.strip_indent)
+ class SomeWorker
+ include CronjobQueue
+
+ with_context user: 'bla'
+ end
+ CODE
+ end
+
+ it 'does not add an offense when the worker calls `with_context`' do
+ expect_no_offenses(<<~CODE.strip_indent)
+ class SomeWorker
+ include CronjobQueue
+
+ def perform
+ with_context(user: 'bla') do
+ # more work
+ end
+ end
+ end
+ CODE
+ end
+
+ it 'does not add an offense when the worker calls `bulk_perform_async_with_contexts`' do
+ expect_no_offenses(<<~CODE.strip_indent)
+ class SomeWorker
+ include CronjobQueue
+
+ def perform
+ SomeOtherWorker.bulk_perform_async_with_contexts(things,
+ arguments_proc: -> (thing) { thing.id },
+ context_proc: -> (thing) { { project: thing.project } })
+ end
+ end
+ CODE
+ end
+
+ it 'does not add an offense when the worker calls `bulk_perform_in_with_contexts`' do
+ expect_no_offenses(<<~CODE.strip_indent)
+ class SomeWorker
+ include CronjobQueue
+
+ def perform
+ SomeOtherWorker.bulk_perform_in_with_contexts(10.minutes, things,
+ arguments_proc: -> (thing) { thing.id },
+ context_proc: -> (thing) { { project: thing.project } })
+ end
+ end
+ CODE
+ end
+end
diff --git a/spec/serializers/blob_entity_spec.rb b/spec/serializers/blob_entity_spec.rb
index 7e3a0a87bd5..3cd967ed44c 100644
--- a/spec/serializers/blob_entity_spec.rb
+++ b/spec/serializers/blob_entity_spec.rb
@@ -23,7 +23,7 @@ describe BlobEntity do
mode: "100644",
readable_text: true,
icon: "file-text-o",
- url: "/#{project.full_path}/blob/master/bar/branch-test.txt"
+ url: "/#{project.full_path}/-/blob/master/bar/branch-test.txt"
})
end
end
diff --git a/spec/serializers/build_artifact_entity_spec.rb b/spec/serializers/build_artifact_entity_spec.rb
index 09fe094fff1..c8995cbc5a2 100644
--- a/spec/serializers/build_artifact_entity_spec.rb
+++ b/spec/serializers/build_artifact_entity_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe BuildArtifactEntity do
- let(:job) { create(:ci_build, name: 'test:job', artifacts_expire_at: 1.hour.from_now) }
+ let(:job) { create(:ci_build, :artifacts, name: 'test:job', artifacts_expire_at: 1.hour.from_now) }
let(:entity) do
described_class.new(job, request: double)
diff --git a/spec/serializers/build_details_entity_spec.rb b/spec/serializers/build_details_entity_spec.rb
index 91c5fd6bf2c..fc05989df16 100644
--- a/spec/serializers/build_details_entity_spec.rb
+++ b/spec/serializers/build_details_entity_spec.rb
@@ -176,5 +176,22 @@ describe BuildDetailsEntity do
expect(subject[:reports].first[:file_type]).to eq('codequality')
end
end
+
+ context 'when the build has no archive type artifacts' do
+ let!(:report) { create(:ci_job_artifact, :codequality, job: build) }
+
+ it 'does not expose any artifact actions path' do
+ expect(subject[:artifact].keys).not_to include(:download_path, :browse_path, :keep_path)
+ end
+ end
+
+ context 'when the build has archive type artifacts' do
+ let!(:build) { create(:ci_build, :artifacts, artifacts_expire_at: 7.days.from_now) }
+ let!(:report) { create(:ci_job_artifact, :codequality, job: build) }
+
+ it 'exposes artifact details' do
+ expect(subject[:artifact].keys).to include(:download_path, :browse_path, :keep_path, :expire_at, :expired)
+ end
+ end
end
end
diff --git a/spec/serializers/container_repositories_serializer_spec.rb b/spec/serializers/container_repositories_serializer_spec.rb
new file mode 100644
index 00000000000..382778389b3
--- /dev/null
+++ b/spec/serializers/container_repositories_serializer_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ContainerRepositoriesSerializer do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:resource) { create(:container_repository, name: 'image', project: project) }
+ let(:params) { { current_user: user, project: project } }
+
+ before do
+ project.add_developer(user)
+
+ stub_container_registry_config(enabled: true)
+ stub_container_registry_tags(repository: /image/, tags: %w(rootA latest))
+ end
+
+ describe '#represent' do
+ subject do
+ described_class.new(params).represent(resource)
+ end
+
+ it 'has basic attributes' do
+ expect(subject).to include(:id, :name, :path, :location, :created_at, :tags_path, :destroy_path)
+ end
+ end
+
+ describe '#represent_read_only' do
+ subject do
+ described_class.new(current_user: user, project: project).represent_read_only(resource)
+ end
+
+ it 'does not include destroy_path' do
+ expect(subject).to include(:id, :name, :path, :location, :created_at, :tags_path)
+ expect(subject).not_to include(:destroy_path)
+ end
+ end
+
+ describe '#with_pagination' do
+ let(:request) do
+ double(
+ url: "#{Gitlab.config.gitlab.url}:8080/#{project.namespace_id}/#{project.id}/container_registry?#{query.to_query}",
+ query_parameters: query
+ )
+ end
+
+ let(:response) { spy('response') }
+ let(:resource) { ContainerRepository.all }
+ let(:query) { { page: 1, per_page: 2 } }
+
+ let(:serializer) do
+ described_class
+ .new(current_user: user, project: project)
+ .with_pagination(request, response)
+ end
+
+ subject do
+ serializer.represent(resource)
+ end
+
+ it 'creates a paginated serializer' do
+ expect(serializer).to be_paginated
+ end
+
+ context 'when multiple ContainerRepository objects are serialized' do
+ before do
+ create_list(:container_repository, 5, project: project)
+ end
+
+ it 'serializes appropriate number of objects' do
+ expect(subject.count).to be 2
+ end
+
+ it 'appends relevant headers' do
+ expect(response).to include_pagination_headers
+ expect(response).to receive(:[]=).with('X-Total', '5')
+ expect(response).to receive(:[]=).with('X-Total-Pages', '3')
+ expect(response).to receive(:[]=).with('X-Per-Page', '2')
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/serializers/cluster_basic_entity_spec.rb b/spec/serializers/deployment_cluster_entity_spec.rb
index 8c3307a1837..b22a93fcec7 100644
--- a/spec/serializers/cluster_basic_entity_spec.rb
+++ b/spec/serializers/deployment_cluster_entity_spec.rb
@@ -2,9 +2,9 @@
require 'spec_helper'
-describe ClusterBasicEntity do
+describe DeploymentClusterEntity do
describe '#as_json' do
- subject { described_class.new(cluster, request: request).as_json }
+ subject { described_class.new(deployment, request: request).as_json }
let(:maintainer) { create(:user) }
let(:developer) { create(:user) }
@@ -12,26 +12,30 @@ describe ClusterBasicEntity do
let(:request) { double(:request, current_user: current_user) }
let(:project) { create(:project) }
let(:cluster) { create(:cluster, name: 'the-cluster', projects: [project]) }
+ let(:deployment) { create(:deployment, cluster: cluster) }
+ let!(:deployment_cluster) { create(:deployment_cluster, cluster: cluster, deployment: deployment) }
before do
project.add_maintainer(maintainer)
project.add_developer(developer)
end
- it 'matches cluster_basic entity schema' do
- expect(subject.as_json).to match_schema('cluster_basic')
+ it 'matches deployment_cluster entity schema' do
+ expect(subject.as_json).to match_schema('deployment_cluster')
end
it 'exposes the cluster details' do
expect(subject[:name]).to eq('the-cluster')
expect(subject[:path]).to eq("/#{project.full_path}/-/clusters/#{cluster.id}")
+ expect(subject[:kubernetes_namespace]).to eq(deployment_cluster.kubernetes_namespace)
end
context 'when the user does not have permission to view the cluster' do
let(:current_user) { developer }
- it 'does not include the path' do
+ it 'does not include the path nor the namespace' do
expect(subject[:path]).to be_nil
+ expect(subject[:kubernetes_namespace]).to be_nil
end
end
end
diff --git a/spec/serializers/diff_file_entity_spec.rb b/spec/serializers/diff_file_entity_spec.rb
index 65b62f8aa16..3e341a58a15 100644
--- a/spec/serializers/diff_file_entity_spec.rb
+++ b/spec/serializers/diff_file_entity_spec.rb
@@ -49,7 +49,7 @@ describe DiffFileEntity do
end
end
- context '#parallel_diff_lines' do
+ describe '#parallel_diff_lines' do
let(:options) { { diff_view: :parallel } }
it 'exposes parallel diff lines correctly' do
diff --git a/spec/serializers/diffs_metadata_entity_spec.rb b/spec/serializers/diffs_metadata_entity_spec.rb
index 0fa643d37b3..86438bd59d7 100644
--- a/spec/serializers/diffs_metadata_entity_spec.rb
+++ b/spec/serializers/diffs_metadata_entity_spec.rb
@@ -29,6 +29,7 @@ describe DiffsMetadataEntity do
:added_lines, :removed_lines, :render_overflow_warning,
:email_patch_path, :plain_diff_path,
:merge_request_diffs,
+ :context_commits,
# Attributes
:diff_files
)
diff --git a/spec/serializers/merge_request_diff_entity_spec.rb b/spec/serializers/merge_request_diff_entity_spec.rb
index 59ec0b22158..2e3b0d092fe 100644
--- a/spec/serializers/merge_request_diff_entity_spec.rb
+++ b/spec/serializers/merge_request_diff_entity_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe MergeRequestDiffEntity do
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:request) { EntityRequest.new(project: project) }
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
let(:merge_request_diffs) { merge_request.merge_request_diffs }
@@ -36,4 +36,29 @@ describe MergeRequestDiffEntity do
expect(subject[:short_commit_sha]).to eq(nil)
end
end
+
+ describe '#head_version_path' do
+ before do
+ allow(merge_request).to receive(:diffable_merge_ref?)
+ .and_return(diffable_merge_ref)
+ end
+
+ context 'merge request can be merged' do
+ let(:diffable_merge_ref) { true }
+
+ it 'returns diff path with diff_head param set' do
+ expect(subject[:head_version_path]).to eq(
+ "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/diffs?diff_head=true"
+ )
+ end
+ end
+
+ context 'merge request cannot be merged' do
+ let(:diffable_merge_ref) { false }
+
+ it 'returns diff path with diff_head param set' do
+ expect(subject[:head_version_path]).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/serializers/merge_request_poll_widget_entity_spec.rb b/spec/serializers/merge_request_poll_widget_entity_spec.rb
index ea9052b4046..0593dd527cc 100644
--- a/spec/serializers/merge_request_poll_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_widget_entity_spec.rb
@@ -60,7 +60,7 @@ describe MergeRequestPollWidgetEntity do
project.add_developer(user)
expect(subject[:new_blob_path])
- .to eq("/#{resource.project.full_path}/new/#{resource.source_branch}")
+ .to eq("/#{resource.project.full_path}/-/new/#{resource.source_branch}")
end
end
diff --git a/spec/serializers/merge_request_widget_entity_spec.rb b/spec/serializers/merge_request_widget_entity_spec.rb
index 22232682be1..f621cb650f9 100644
--- a/spec/serializers/merge_request_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_widget_entity_spec.rb
@@ -45,12 +45,100 @@ describe MergeRequestWidgetEntity do
it 'has email_patches_path' do
expect(subject[:email_patches_path])
- .to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}.patch")
+ .to eq("/#{resource.project.full_path}/-/merge_requests/#{resource.iid}.patch")
end
it 'has plain_diff_path' do
expect(subject[:plain_diff_path])
- .to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}.diff")
+ .to eq("/#{resource.project.full_path}/-/merge_requests/#{resource.iid}.diff")
+ end
+
+ describe 'merge_request_add_ci_config_path' do
+ before do
+ project.add_role(user, role)
+ end
+
+ context 'when there are pipelines' do
+ let(:role) { :developer }
+
+ before do
+ create(:ci_empty_pipeline, project: project, sha: resource.all_commit_shas.first, ref: resource.source_branch)
+ end
+
+ it 'no ci config path' do
+ expect(subject[:merge_request_add_ci_config_path]).to be_nil
+ end
+ end
+
+ context 'when there are no pipelines' do
+ context 'when user has permissions' do
+ let(:role) { :developer }
+
+ it 'has add ci config path' do
+ expect(subject[:merge_request_add_ci_config_path])
+ .to eq("/#{resource.project.full_path}/-/new/#{resource.source_branch}?commit_message=Add+.gitlab-ci.yml&file_name=.gitlab-ci.yml")
+ end
+
+ context 'when source project is missing' do
+ before do
+ resource.source_project = nil
+ end
+
+ it 'returns a blank ci config path' do
+ expect(subject[:merge_request_add_ci_config_path]).to be_nil
+ end
+ end
+
+ context 'when there are no commits' do
+ before do
+ allow(resource).to receive(:commits_count).and_return(0)
+ end
+
+ it 'returns a blank ci config path' do
+ expect(subject[:merge_request_add_ci_config_path]).to be_nil
+ end
+ end
+
+ context 'when ci_config_path is customized' do
+ it 'has no path if ci_config_path is not set to our default setting' do
+ project.ci_config_path = 'not_default'
+
+ expect(subject[:merge_request_add_ci_config_path]).to be_nil
+ end
+
+ it 'has a path if ci_config_path unset' do
+ expect(subject[:merge_request_add_ci_config_path]).not_to be_nil
+ end
+
+ it 'has a path if ci_config_path is an empty string' do
+ project.ci_config_path = ''
+
+ expect(subject[:merge_request_add_ci_config_path]).not_to be_nil
+ end
+
+ it 'has a path if ci_config_path is set to our default file' do
+ project.ci_config_path = Gitlab::FileDetector::PATTERNS[:gitlab_ci]
+
+ expect(subject[:merge_request_add_ci_config_path]).not_to be_nil
+ end
+ end
+ end
+
+ context 'when user does not have permissions' do
+ let(:role) { :reporter }
+
+ it 'has add ci config path' do
+ expect(subject[:merge_request_add_ci_config_path]).to be_nil
+ end
+ end
+ end
+ end
+
+ it 'has human access' do
+ project.add_maintainer(user)
+
+ expect(subject[:human_access])
+ .to eq('Maintainer')
end
describe 'when source project is deleted' do
diff --git a/spec/serializers/paginated_diff_entity_spec.rb b/spec/serializers/paginated_diff_entity_spec.rb
index 7432e072318..77569aaa4bc 100644
--- a/spec/serializers/paginated_diff_entity_spec.rb
+++ b/spec/serializers/paginated_diff_entity_spec.rb
@@ -26,7 +26,7 @@ describe PaginatedDiffEntity do
expect(subject[:pagination]).to eq(
current_page: 2,
next_page: 3,
- next_page_href: "/#{merge_request.project.full_path}/merge_requests/#{merge_request.iid}/diffs_batch.json?page=3",
+ next_page_href: "/#{merge_request.project.full_path}/-/merge_requests/#{merge_request.iid}/diffs_batch.json?page=3",
total_pages: 7
)
end
diff --git a/spec/serializers/pipeline_details_entity_spec.rb b/spec/serializers/pipeline_details_entity_spec.rb
index 9ce7c265e43..f270f9fd4cb 100644
--- a/spec/serializers/pipeline_details_entity_spec.rb
+++ b/spec/serializers/pipeline_details_entity_spec.rb
@@ -173,5 +173,44 @@ describe PipelineDetailsEntity do
expect(subject[:triggered].first[:project]).not_to be_nil
end
end
+
+ context 'when pipeline has expiring archive artifacts' do
+ let(:pipeline) { create(:ci_empty_pipeline) }
+ let!(:build_1) { create(:ci_build, :artifacts, pipeline: pipeline, artifacts_expire_at: 2.days.from_now, name: 'build_1') }
+ let!(:build_2) { create(:ci_build, :artifacts, pipeline: pipeline, artifacts_expire_at: 2.days.from_now, name: 'build_2') }
+ let!(:build_3) { create(:ci_build, :artifacts, pipeline: pipeline, artifacts_expire_at: 2.days.from_now, name: 'build_3') }
+
+ let(:names) { subject[:details][:artifacts].map { |a| a[:name] } }
+
+ context 'and preload_job_artifacts_archive is not defined in the options' do
+ it 'defaults to true and eager loads the job_artifacts_archive' do
+ recorder = ActiveRecord::QueryRecorder.new do
+ expect(names).to match_array(%w[build_1 build_2 build_3])
+ end
+
+ expected_queries = Gitlab.ee? ? 42 : 29
+
+ # This makes only one query to fetch all job artifacts
+ expect(recorder.count).to eq(expected_queries)
+ end
+ end
+
+ context 'and preload_job_artifacts_archive is set to false' do
+ let(:entity) do
+ described_class.represent(pipeline, request: request, preload_job_artifacts_archive: false)
+ end
+
+ it 'does not eager load the job_artifacts_archive' do
+ recorder = ActiveRecord::QueryRecorder.new do
+ expect(names).to match_array(%w[build_1 build_2 build_3])
+ end
+
+ expected_queries = Gitlab.ee? ? 44 : 31
+
+ # This makes one query for each job artifact
+ expect(recorder.count).to eq(expected_queries)
+ end
+ end
+ end
end
end
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index 8158277ffbc..84b0e487ee7 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -159,7 +159,7 @@ describe PipelineSerializer do
it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject }
- expected_queries = Gitlab.ee? ? 42 : 39
+ expected_queries = Gitlab.ee? ? 43 : 40
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
@@ -180,7 +180,7 @@ describe PipelineSerializer do
# pipeline. With the same ref this check is cached but if refs are
# different then there is an extra query per ref
# https://gitlab.com/gitlab-org/gitlab-foss/issues/46368
- expected_queries = Gitlab.ee? ? 45 : 42
+ expected_queries = Gitlab.ee? ? 46 : 43
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
diff --git a/spec/serializers/test_reports_comparer_entity_spec.rb b/spec/serializers/test_reports_comparer_entity_spec.rb
index 2627ad536e4..e7dabc67325 100644
--- a/spec/serializers/test_reports_comparer_entity_spec.rb
+++ b/spec/serializers/test_reports_comparer_entity_spec.rb
@@ -24,7 +24,7 @@ describe TestReportsComparerEntity do
it 'contains correct compared test reports details' do
expect(subject[:status]).to eq('success')
- expect(subject[:summary]).to include(total: 2, resolved: 0, failed: 0)
+ expect(subject[:summary]).to include(total: 2, resolved: 0, failed: 0, errored: 0)
expect(subject[:suites].first[:name]).to eq('rspec')
expect(subject[:suites].first[:status]).to eq('success')
expect(subject[:suites].second[:name]).to eq('junit')
@@ -42,7 +42,7 @@ describe TestReportsComparerEntity do
it 'contains correct compared test reports details' do
expect(subject[:status]).to eq('failed')
- expect(subject[:summary]).to include(total: 2, resolved: 0, failed: 1)
+ expect(subject[:summary]).to include(total: 2, resolved: 0, failed: 1, errored: 0)
expect(subject[:suites].first[:name]).to eq('rspec')
expect(subject[:suites].first[:status]).to eq('success')
expect(subject[:suites].second[:name]).to eq('junit')
@@ -60,7 +60,7 @@ describe TestReportsComparerEntity do
it 'contains correct compared test reports details' do
expect(subject[:status]).to eq('success')
- expect(subject[:summary]).to include(total: 2, resolved: 1, failed: 0)
+ expect(subject[:summary]).to include(total: 2, resolved: 1, failed: 0, errored: 0)
expect(subject[:suites].first[:name]).to eq('rspec')
expect(subject[:suites].first[:status]).to eq('success')
expect(subject[:suites].second[:name]).to eq('junit')
diff --git a/spec/serializers/test_suite_comparer_entity_spec.rb b/spec/serializers/test_suite_comparer_entity_spec.rb
index e22387130a1..9790777a570 100644
--- a/spec/serializers/test_suite_comparer_entity_spec.rb
+++ b/spec/serializers/test_suite_comparer_entity_spec.rb
@@ -12,6 +12,7 @@ describe TestSuiteComparerEntity do
let(:head_suite) { Gitlab::Ci::Reports::TestSuite.new(name) }
let(:test_case_success) { create_test_case_rspec_success }
let(:test_case_failed) { create_test_case_rspec_failed }
+ let(:test_case_error) { create_test_case_rspec_error }
describe '#as_json' do
subject { entity.as_json }
@@ -25,7 +26,7 @@ describe TestSuiteComparerEntity do
it 'contains correct compared test suite details' do
expect(subject[:name]).to eq(name)
expect(subject[:status]).to eq('failed')
- expect(subject[:summary]).to include(total: 1, resolved: 0, failed: 1)
+ expect(subject[:summary]).to include(total: 1, resolved: 0, failed: 1, errored: 0)
subject[:new_failures].first.tap do |new_failure|
expect(new_failure[:status]).to eq(test_case_failed.status)
expect(new_failure[:name]).to eq(test_case_failed.name)
@@ -37,6 +38,27 @@ describe TestSuiteComparerEntity do
end
end
+ context 'when head suite has a new error test case which does not exist in base' do
+ before do
+ base_suite.add_test_case(test_case_success)
+ head_suite.add_test_case(test_case_error)
+ end
+
+ it 'contains correct compared test suite details' do
+ expect(subject[:name]).to eq(name)
+ expect(subject[:status]).to eq('failed')
+ expect(subject[:summary]).to include(total: 1, resolved: 0, failed: 0, errored: 1)
+ subject[:new_errors].first.tap do |new_error|
+ expect(new_error[:status]).to eq(test_case_error.status)
+ expect(new_error[:name]).to eq(test_case_error.name)
+ expect(new_error[:execution_time]).to eq(test_case_error.execution_time)
+ expect(new_error[:system_output]).to eq(test_case_error.system_output)
+ end
+ expect(subject[:resolved_failures]).to be_empty
+ expect(subject[:existing_failures]).to be_empty
+ end
+ end
+
context 'when head suite still has a failed test case which failed in base' do
before do
base_suite.add_test_case(test_case_failed)
@@ -46,7 +68,7 @@ describe TestSuiteComparerEntity do
it 'contains correct compared test suite details' do
expect(subject[:name]).to eq(name)
expect(subject[:status]).to eq('failed')
- expect(subject[:summary]).to include(total: 1, resolved: 0, failed: 1)
+ expect(subject[:summary]).to include(total: 1, resolved: 0, failed: 1, errored: 0)
expect(subject[:new_failures]).to be_empty
expect(subject[:resolved_failures]).to be_empty
subject[:existing_failures].first.tap do |existing_failure|
@@ -67,7 +89,7 @@ describe TestSuiteComparerEntity do
it 'contains correct compared test suite details' do
expect(subject[:name]).to eq(name)
expect(subject[:status]).to eq('success')
- expect(subject[:summary]).to include(total: 1, resolved: 1, failed: 0)
+ expect(subject[:summary]).to include(total: 1, resolved: 1, failed: 0, errored: 0)
expect(subject[:new_failures]).to be_empty
subject[:resolved_failures].first.tap do |resolved_failure|
expect(resolved_failure[:status]).to eq(test_case_success.status)
@@ -88,42 +110,57 @@ describe TestSuiteComparerEntity do
context 'prefers new over existing and resolved' do
before do
3.times { add_new_failure }
+ 3.times { add_new_error }
3.times { add_existing_failure }
+ 3.times { add_existing_error }
3.times { add_resolved_failure }
+ 3.times { add_resolved_error }
end
- it 'returns 2 new failures, and 1 of resolved and existing' do
- expect(subject[:summary]).to include(total: 9, resolved: 3, failed: 6)
+ it 'returns 2 of each new category, and 1 of each resolved and existing' do
+ expect(subject[:summary]).to include(total: 18, resolved: 6, failed: 6, errored: 6)
expect(subject[:new_failures].count).to eq(2)
+ expect(subject[:new_errors].count).to eq(2)
expect(subject[:existing_failures].count).to eq(1)
+ expect(subject[:existing_errors].count).to eq(1)
expect(subject[:resolved_failures].count).to eq(1)
+ expect(subject[:resolved_errors].count).to eq(1)
end
end
context 'prefers existing over resolved' do
before do
3.times { add_existing_failure }
+ 3.times { add_existing_error }
3.times { add_resolved_failure }
+ 3.times { add_resolved_error }
end
- it 'returns 2 existing failures, and 1 resolved' do
- expect(subject[:summary]).to include(total: 6, resolved: 3, failed: 3)
+ it 'returns 2 of each existing category, and 1 of each resolved' do
+ expect(subject[:summary]).to include(total: 12, resolved: 6, failed: 3, errored: 3)
expect(subject[:new_failures].count).to eq(0)
+ expect(subject[:new_errors].count).to eq(0)
expect(subject[:existing_failures].count).to eq(2)
+ expect(subject[:existing_errors].count).to eq(2)
expect(subject[:resolved_failures].count).to eq(1)
+ expect(subject[:resolved_errors].count).to eq(1)
end
end
context 'limits amount of resolved' do
before do
3.times { add_resolved_failure }
+ 3.times { add_resolved_error }
end
- it 'returns 2 resolved failures' do
- expect(subject[:summary]).to include(total: 3, resolved: 3, failed: 0)
+ it 'returns 2 of each resolved category' do
+ expect(subject[:summary]).to include(total: 6, resolved: 6, failed: 0, errored: 0)
expect(subject[:new_failures].count).to eq(0)
+ expect(subject[:new_errors].count).to eq(0)
expect(subject[:existing_failures].count).to eq(0)
+ expect(subject[:existing_errors].count).to eq(0)
expect(subject[:resolved_failures].count).to eq(2)
+ expect(subject[:resolved_errors].count).to eq(2)
end
end
@@ -134,19 +171,38 @@ describe TestSuiteComparerEntity do
head_suite.add_test_case(failed_case)
end
+ def add_new_error
+ error_case = create_test_case_rspec_error(SecureRandom.hex)
+ head_suite.add_test_case(error_case)
+ end
+
def add_existing_failure
failed_case = create_test_case_rspec_failed(SecureRandom.hex)
base_suite.add_test_case(failed_case)
head_suite.add_test_case(failed_case)
end
+ def add_existing_error
+ error_case = create_test_case_rspec_error(SecureRandom.hex)
+ base_suite.add_test_case(error_case)
+ head_suite.add_test_case(error_case)
+ end
+
def add_resolved_failure
case_name = SecureRandom.hex
- failed_case = create_test_case_rspec_failed(case_name)
- success_case = create_test_case_rspec_success(case_name)
+ failed_case = create_test_case_java_failed(case_name)
+ success_case = create_test_case_java_success(case_name)
base_suite.add_test_case(failed_case)
head_suite.add_test_case(success_case)
end
+
+ def add_resolved_error
+ case_name = SecureRandom.hex
+ error_case = create_test_case_java_error(case_name)
+ success_case = create_test_case_java_success(case_name)
+ base_suite.add_test_case(error_case)
+ head_suite.add_test_case(success_case)
+ end
end
end
end
diff --git a/spec/serializers/test_suite_entity_spec.rb b/spec/serializers/test_suite_entity_spec.rb
index 54dca3214b7..6a9653954f3 100644
--- a/spec/serializers/test_suite_entity_spec.rb
+++ b/spec/serializers/test_suite_entity_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
describe TestSuiteEntity do
let(:pipeline) { create(:ci_pipeline, :with_test_reports) }
- let(:entity) { described_class.new(pipeline.test_reports.test_suites.values.first) }
+ let(:entity) { described_class.new(pipeline.test_reports.test_suites.each_value.first) }
describe '#as_json' do
subject(:as_json) { entity.as_json }
diff --git a/spec/serializers/variable_entity_spec.rb b/spec/serializers/variable_entity_spec.rb
index 742b14fb3d3..3cb18dab314 100644
--- a/spec/serializers/variable_entity_spec.rb
+++ b/spec/serializers/variable_entity_spec.rb
@@ -10,7 +10,7 @@ describe VariableEntity do
subject { entity.as_json }
it 'contains required fields' do
- expect(subject).to include(:id, :key, :value, :protected, :environment_scope)
+ expect(subject).to include(:id, :key, :value, :protected, :environment_scope, :variable_type)
end
end
end
diff --git a/spec/services/branches/delete_merged_service_spec.rb b/spec/services/branches/delete_merged_service_spec.rb
index 962af8110f7..5c87f156ec7 100644
--- a/spec/services/branches/delete_merged_service_spec.rb
+++ b/spec/services/branches/delete_merged_service_spec.rb
@@ -9,7 +9,7 @@ describe Branches::DeleteMergedService do
let(:project) { create(:project, :repository) }
- context '#execute' do
+ describe '#execute' do
it 'deletes a branch that was merged' do
service.execute
@@ -74,7 +74,7 @@ describe Branches::DeleteMergedService do
end
end
- context '#async_execute' do
+ describe '#async_execute' do
it 'calls DeleteMergedBranchesWorker async' do
expect(DeleteMergedBranchesWorker).to receive(:perform_async)
diff --git a/spec/services/ci/create_cross_project_pipeline_service_spec.rb b/spec/services/ci/create_cross_project_pipeline_service_spec.rb
new file mode 100644
index 00000000000..51cf18f8d87
--- /dev/null
+++ b/spec/services/ci/create_cross_project_pipeline_service_spec.rb
@@ -0,0 +1,364 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::CreateCrossProjectPipelineService, '#execute' do
+ let_it_be(:user) { create(:user) }
+ let(:upstream_project) { create(:project, :repository) }
+ let_it_be(:downstream_project) { create(:project, :repository) }
+
+ let!(:upstream_pipeline) do
+ create(:ci_pipeline, :running, project: upstream_project)
+ end
+
+ let(:trigger) do
+ {
+ trigger: {
+ project: downstream_project.full_path,
+ branch: 'feature'
+ }
+ }
+ end
+
+ let(:bridge) do
+ create(:ci_bridge, status: :pending,
+ user: user,
+ options: trigger,
+ pipeline: upstream_pipeline)
+ end
+
+ let(:service) { described_class.new(upstream_project, user) }
+
+ before do
+ upstream_project.add_developer(user)
+ end
+
+ context 'when downstream project has not been found' do
+ let(:trigger) do
+ { trigger: { project: 'unknown/project' } }
+ end
+
+ it 'does not create a pipeline' do
+ expect { service.execute(bridge) }
+ .not_to change { Ci::Pipeline.count }
+ end
+
+ it 'changes pipeline bridge job status to failed' do
+ service.execute(bridge)
+
+ expect(bridge.reload).to be_failed
+ expect(bridge.failure_reason)
+ .to eq 'downstream_bridge_project_not_found'
+ end
+ end
+
+ context 'when user can not access downstream project' do
+ it 'does not create a new pipeline' do
+ expect { service.execute(bridge) }
+ .not_to change { Ci::Pipeline.count }
+ end
+
+ it 'changes status of the bridge build' do
+ service.execute(bridge)
+
+ expect(bridge.reload).to be_failed
+ expect(bridge.failure_reason)
+ .to eq 'downstream_bridge_project_not_found'
+ end
+ end
+
+ context 'when user does not have access to create pipeline' do
+ before do
+ downstream_project.add_guest(user)
+ end
+
+ it 'does not create a new pipeline' do
+ expect { service.execute(bridge) }
+ .not_to change { Ci::Pipeline.count }
+ end
+
+ it 'changes status of the bridge build' do
+ service.execute(bridge)
+
+ expect(bridge.reload).to be_failed
+ expect(bridge.failure_reason).to eq 'insufficient_bridge_permissions'
+ end
+ end
+
+ context 'when user can create pipeline in a downstream project' do
+ let(:stub_config) { true }
+
+ before do
+ downstream_project.add_developer(user)
+ stub_ci_pipeline_yaml_file(YAML.dump(rspec: { script: 'rspec' })) if stub_config
+ end
+
+ it 'creates only one new pipeline' do
+ expect { service.execute(bridge) }
+ .to change { Ci::Pipeline.count }.by(1)
+ end
+
+ it 'creates a new pipeline in a downstream project' do
+ pipeline = service.execute(bridge)
+
+ expect(pipeline.user).to eq bridge.user
+ expect(pipeline.project).to eq downstream_project
+ expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
+ expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
+ expect(pipeline.source_bridge).to eq bridge
+ expect(pipeline.source_bridge).to be_a ::Ci::Bridge
+ end
+
+ it 'updates bridge status when downstream pipeline gets proceesed' do
+ pipeline = service.execute(bridge)
+
+ expect(pipeline.reload).to be_pending
+ expect(bridge.reload).to be_success
+ end
+
+ context 'when target ref is not specified' do
+ let(:trigger) do
+ { trigger: { project: downstream_project.full_path } }
+ end
+
+ it 'is using default branch name' do
+ pipeline = service.execute(bridge)
+
+ expect(pipeline.ref).to eq 'master'
+ end
+ end
+
+ context 'when downstream project is the same as the job project' do
+ let(:trigger) do
+ { trigger: { project: upstream_project.full_path } }
+ end
+
+ context 'detects a circular dependency' do
+ it 'does not create a new pipeline' do
+ expect { service.execute(bridge) }
+ .not_to change { Ci::Pipeline.count }
+ end
+
+ it 'changes status of the bridge build' do
+ service.execute(bridge)
+
+ expect(bridge.reload).to be_failed
+ expect(bridge.failure_reason).to eq 'invalid_bridge_trigger'
+ end
+ end
+
+ context 'when "include" is provided' do
+ let(:file_content) do
+ YAML.dump(
+ rspec: { script: 'rspec' },
+ echo: { script: 'echo' })
+ end
+
+ shared_examples 'creates a child pipeline' do
+ it 'creates only one new pipeline' do
+ expect { service.execute(bridge) }
+ .to change { Ci::Pipeline.count }.by(1)
+ end
+
+ it 'creates a child pipeline in the same project' do
+ pipeline = service.execute(bridge)
+ pipeline.reload
+
+ expect(pipeline.builds.map(&:name)).to eq %w[rspec echo]
+ expect(pipeline.user).to eq bridge.user
+ expect(pipeline.project).to eq bridge.project
+ expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
+ expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
+ expect(pipeline.source_bridge).to eq bridge
+ expect(pipeline.source_bridge).to be_a ::Ci::Bridge
+ end
+
+ it 'updates bridge status when downstream pipeline gets proceesed' do
+ pipeline = service.execute(bridge)
+
+ expect(pipeline.reload).to be_pending
+ expect(bridge.reload).to be_success
+ end
+
+ it 'propagates parent pipeline settings to the child pipeline' do
+ pipeline = service.execute(bridge)
+ pipeline.reload
+
+ expect(pipeline.ref).to eq(upstream_pipeline.ref)
+ expect(pipeline.sha).to eq(upstream_pipeline.sha)
+ expect(pipeline.source_sha).to eq(upstream_pipeline.source_sha)
+ expect(pipeline.target_sha).to eq(upstream_pipeline.target_sha)
+ expect(pipeline.target_sha).to eq(upstream_pipeline.target_sha)
+
+ expect(pipeline.trigger_requests.last).to eq(bridge.trigger_request)
+ end
+ end
+
+ before do
+ upstream_project.repository.create_file(
+ user, 'child-pipeline.yml', file_content, message: 'message', branch_name: 'master')
+
+ upstream_pipeline.update!(sha: upstream_project.commit.id)
+ end
+
+ let(:stub_config) { false }
+
+ let(:trigger) do
+ {
+ trigger: { include: 'child-pipeline.yml' }
+ }
+ end
+
+ it_behaves_like 'creates a child pipeline'
+
+ context 'when latest sha for the ref changed in the meantime' do
+ before do
+ upstream_project.repository.create_file(
+ user, 'another-change', 'test', message: 'message', branch_name: 'master')
+ end
+
+ # it does not auto-cancel pipelines from the same family
+ it_behaves_like 'creates a child pipeline'
+ end
+
+ context 'when the parent is a merge request pipeline' do
+ let(:merge_request) { create(:merge_request, source_project: bridge.project, target_project: bridge.project) }
+ let(:file_content) do
+ YAML.dump(
+ workflow: { rules: [{ if: '$CI_MERGE_REQUEST_ID' }] },
+ rspec: { script: 'rspec' },
+ echo: { script: 'echo' })
+ end
+
+ before do
+ bridge.pipeline.update!(source: :merge_request_event, merge_request: merge_request)
+ end
+
+ it_behaves_like 'creates a child pipeline'
+
+ it 'propagates the merge request to the child pipeline' do
+ pipeline = service.execute(bridge)
+
+ expect(pipeline.merge_request).to eq(merge_request)
+ expect(pipeline).to be_merge_request
+ end
+ end
+
+ context 'when upstream pipeline is a child pipeline' do
+ let!(:pipeline_source) do
+ create(:ci_sources_pipeline,
+ source_pipeline: create(:ci_pipeline, project: upstream_pipeline.project),
+ pipeline: upstream_pipeline
+ )
+ end
+
+ before do
+ upstream_pipeline.update!(source: :parent_pipeline)
+ end
+
+ it 'does not create a further child pipeline' do
+ expect { service.execute(bridge) }
+ .not_to change { Ci::Pipeline.count }
+
+ expect(bridge.reload).to be_failed
+ expect(bridge.failure_reason).to eq 'bridge_pipeline_is_child_pipeline'
+ end
+ end
+ end
+ end
+
+ context 'when bridge job has YAML variables defined' do
+ before do
+ bridge.yaml_variables = [{ key: 'BRIDGE', value: 'var', public: true }]
+ end
+
+ it 'passes bridge variables to downstream pipeline' do
+ pipeline = service.execute(bridge)
+
+ expect(pipeline.variables.first)
+ .to have_attributes(key: 'BRIDGE', value: 'var')
+ end
+ end
+
+ context 'when pipeline variables are defined' do
+ before do
+ upstream_pipeline.variables.create(key: 'PIPELINE_VARIABLE', value: 'my-value')
+ end
+
+ it 'does not pass pipeline variables directly downstream' do
+ pipeline = service.execute(bridge)
+
+ pipeline.variables.map(&:key).tap do |variables|
+ expect(variables).not_to include 'PIPELINE_VARIABLE'
+ end
+ end
+
+ context 'when using YAML variables interpolation' do
+ before do
+ bridge.yaml_variables = [{ key: 'BRIDGE', value: '$PIPELINE_VARIABLE-var', public: true }]
+ end
+
+ it 'makes it possible to pass pipeline variable downstream' do
+ pipeline = service.execute(bridge)
+
+ pipeline.variables.find_by(key: 'BRIDGE').tap do |variable|
+ expect(variable.value).to eq 'my-value-var'
+ end
+ end
+ end
+ end
+
+ # TODO: Move this context into a feature spec that uses
+ # multiple pipeline processing services. Location TBD in:
+ # https://gitlab.com/gitlab-org/gitlab/issues/36216
+ context 'when configured with bridge job rules' do
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ downstream_project.add_maintainer(upstream_project.owner)
+ end
+
+ let(:config) do
+ <<-EOY
+ hello:
+ script: echo world
+
+ bridge-job:
+ rules:
+ - if: $CI_COMMIT_REF_NAME == "master"
+ trigger:
+ project: #{downstream_project.full_path}
+ branch: master
+ EOY
+ end
+
+ let(:primary_pipeline) do
+ Ci::CreatePipelineService.new(upstream_project, upstream_project.owner, { ref: 'master' })
+ .execute(:push, save_on_errors: false)
+ end
+
+ let(:bridge) { primary_pipeline.processables.find_by(name: 'bridge-job') }
+ let(:service) { described_class.new(upstream_project, upstream_project.owner) }
+
+ context 'that include the bridge job' do
+ it 'creates the downstream pipeline' do
+ expect { service.execute(bridge) }
+ .to change(downstream_project.ci_pipelines, :count).by(1)
+ end
+ end
+ end
+
+ context 'when user does not have access to push protected branch of downstream project' do
+ before do
+ create(:protected_branch, :maintainers_can_push,
+ project: downstream_project, name: 'feature')
+ end
+
+ it 'changes status of the bridge build' do
+ service.execute(bridge)
+
+ expect(bridge.reload).to be_failed
+ expect(bridge.failure_reason).to eq 'insufficient_bridge_permissions'
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/create_job_artifacts_service_spec.rb b/spec/services/ci/create_job_artifacts_service_spec.rb
new file mode 100644
index 00000000000..e1146fc3df6
--- /dev/null
+++ b/spec/services/ci/create_job_artifacts_service_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::CreateJobArtifactsService do
+ let(:service) { described_class.new }
+ let(:job) { create(:ci_build) }
+ let(:artifacts_sha256) { '0' * 64 }
+ let(:metadata_file) { nil }
+
+ let(:artifacts_file) do
+ file_to_upload('spec/fixtures/ci_build_artifacts.zip', sha256: artifacts_sha256)
+ end
+
+ let(:params) do
+ {
+ 'artifact_type' => 'archive',
+ 'artifact_format' => 'zip'
+ }
+ end
+
+ def file_to_upload(path, params = {})
+ upload = Tempfile.new('upload')
+ FileUtils.copy(path, upload.path)
+
+ UploadedFile.new(upload.path, params)
+ end
+
+ describe '#execute' do
+ subject { service.execute(job, artifacts_file, params, metadata_file: metadata_file) }
+
+ context 'when artifacts file is uploaded' do
+ it 'saves artifact for the given type' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(1)
+
+ new_artifact = job.job_artifacts.last
+ expect(new_artifact.project).to eq(job.project)
+ expect(new_artifact.file).to be_present
+ expect(new_artifact.file_type).to eq(params['artifact_type'])
+ expect(new_artifact.file_format).to eq(params['artifact_format'])
+ expect(new_artifact.file_sha256).to eq(artifacts_sha256)
+ end
+
+ context 'when metadata file is also uploaded' do
+ let(:metadata_file) do
+ file_to_upload('spec/fixtures/ci_build_artifacts_metadata.gz', sha256: artifacts_sha256)
+ end
+
+ before do
+ stub_application_setting(default_artifacts_expire_in: '1 day')
+ end
+
+ it 'saves metadata artifact' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(2)
+
+ new_artifact = job.job_artifacts.last
+ expect(new_artifact.project).to eq(job.project)
+ expect(new_artifact.file).to be_present
+ expect(new_artifact.file_type).to eq('metadata')
+ expect(new_artifact.file_format).to eq('gzip')
+ expect(new_artifact.file_sha256).to eq(artifacts_sha256)
+ end
+
+ it 'sets expiration date according to application settings' do
+ expected_expire_at = 1.day.from_now
+
+ expect(subject).to be_truthy
+ archive_artifact, metadata_artifact = job.job_artifacts.last(2)
+
+ expect(job.artifacts_expire_at).to be_within(1.minute).of(expected_expire_at)
+ expect(archive_artifact.expire_at).to be_within(1.minute).of(expected_expire_at)
+ expect(metadata_artifact.expire_at).to be_within(1.minute).of(expected_expire_at)
+ end
+
+ context 'when expire_in params is set' do
+ before do
+ params.merge!('expire_in' => '2 hours')
+ end
+
+ it 'sets expiration date according to the parameter' do
+ expected_expire_at = 2.hours.from_now
+
+ expect(subject).to be_truthy
+ archive_artifact, metadata_artifact = job.job_artifacts.last(2)
+
+ expect(job.artifacts_expire_at).to be_within(1.minute).of(expected_expire_at)
+ expect(archive_artifact.expire_at).to be_within(1.minute).of(expected_expire_at)
+ expect(metadata_artifact.expire_at).to be_within(1.minute).of(expected_expire_at)
+ end
+ end
+ end
+ end
+
+ context 'when artifacts file already exists' do
+ let!(:existing_artifact) do
+ create(:ci_job_artifact, :archive, file_sha256: existing_sha256, job: job)
+ end
+
+ context 'when sha256 of uploading artifact is the same of the existing one' do
+ let(:existing_sha256) { artifacts_sha256 }
+
+ it 'ignores the changes' do
+ expect { subject }.not_to change { Ci::JobArtifact.count }
+ expect(subject).to be_truthy
+ end
+ end
+
+ context 'when sha256 of uploading artifact is different than the existing one' do
+ let(:existing_sha256) { '1' * 64 }
+
+ it 'returns false and logs the error' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).and_call_original
+
+ expect { subject }.not_to change { Ci::JobArtifact.count }
+ expect(subject).to be_falsey
+ expect(job.errors[:base]).to contain_exactly('another artifact of the same type already exists')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service/needs_spec.rb b/spec/services/ci/create_pipeline_service/needs_spec.rb
index 5ef7e592b36..17b9cf80cc1 100644
--- a/spec/services/ci/create_pipeline_service/needs_spec.rb
+++ b/spec/services/ci/create_pipeline_service/needs_spec.rb
@@ -131,6 +131,10 @@ describe Ci::CreatePipelineService do
)
end
end
+
+ it "sets scheduling_type as 'dag'" do
+ expect(test_a_build.scheduling_type).to eq('dag')
+ end
end
context 'with an invalid config' do
@@ -171,5 +175,67 @@ describe Ci::CreatePipelineService do
.to eq('jobs:test_a:needs:need artifacts should be a boolean value')
end
end
+
+ context 'when needs is empty array' do
+ let(:config) do
+ <<~YAML
+ build_a:
+ stage: build
+ script: ls
+ test_a:
+ stage: test
+ script: ls
+ test_b:
+ stage: test
+ script: ls
+ needs: []
+ deploy_a:
+ stage: deploy
+ script: ls
+ needs: [test_a]
+ deploy_b:
+ stage: deploy
+ script: ls
+ when: manual
+ needs: []
+ YAML
+ end
+
+ it 'creates a pipeline with build_a and test_b pending; deploy_b manual' do
+ processables = pipeline.processables
+
+ build_a = processables.find { |processable| processable.name == 'build_a' }
+ test_a = processables.find { |processable| processable.name == 'test_a' }
+ test_b = processables.find { |processable| processable.name == 'test_b' }
+ deploy_a = processables.find { |processable| processable.name == 'deploy_a' }
+ deploy_b = processables.find { |processable| processable.name == 'deploy_b' }
+
+ expect(pipeline).to be_persisted
+ expect(build_a.status).to eq('pending')
+ expect(test_a.status).to eq('created')
+ expect(test_b.status).to eq('pending')
+ expect(deploy_a.status).to eq('created')
+ expect(deploy_b.status).to eq('manual')
+ end
+ end
+
+ context 'when needs is empty hash' do
+ let(:config) do
+ <<~YAML
+ regular_job:
+ stage: build
+ script: echo 'hello'
+ invalid_dag_job:
+ stage: test
+ script: ls
+ needs: {}
+ YAML
+ end
+
+ it 'raises error' do
+ expect(pipeline.yaml_errors)
+ .to eq('jobs:invalid_dag_job:needs config can not be an empty hash')
+ end
+ end
end
end
diff --git a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
new file mode 100644
index 00000000000..b4071d1b0fe
--- /dev/null
+++ b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::CreatePipelineService, '#execute' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let(:ref_name) { 'master' }
+
+ let(:service) do
+ params = { ref: ref_name,
+ before: '00000000',
+ after: project.commit.id,
+ commits: [{ message: 'some commit' }] }
+
+ described_class.new(project, user, params)
+ end
+
+ before do
+ project.add_developer(user)
+ stub_ci_pipeline_to_return_yaml_file
+ end
+
+ describe 'child pipeline triggers' do
+ before do
+ stub_ci_pipeline_yaml_file <<~YAML
+ test:
+ script: rspec
+
+ deploy:
+ variables:
+ CROSS: downstream
+ stage: deploy
+ trigger:
+ include:
+ - local: path/to/child.yml
+ YAML
+ end
+
+ it 'creates bridge jobs correctly' do
+ pipeline = create_pipeline!
+
+ test = pipeline.statuses.find_by(name: 'test')
+ bridge = pipeline.statuses.find_by(name: 'deploy')
+
+ expect(pipeline).to be_persisted
+ expect(test).to be_a Ci::Build
+ expect(bridge).to be_a Ci::Bridge
+ expect(bridge.stage).to eq 'deploy'
+ expect(pipeline.statuses).to match_array [test, bridge]
+ expect(bridge.options).to eq(
+ 'trigger' => { 'include' => [{ 'local' => 'path/to/child.yml' }] }
+ )
+ expect(bridge.yaml_variables)
+ .to include(key: 'CROSS', value: 'downstream', public: true)
+ end
+ end
+
+ describe 'child pipeline triggers' do
+ context 'when YAML is valid' do
+ before do
+ stub_ci_pipeline_yaml_file <<~YAML
+ test:
+ script: rspec
+
+ deploy:
+ variables:
+ CROSS: downstream
+ stage: deploy
+ trigger:
+ include:
+ - local: path/to/child.yml
+ YAML
+ end
+
+ it 'creates bridge jobs correctly' do
+ pipeline = create_pipeline!
+
+ test = pipeline.statuses.find_by(name: 'test')
+ bridge = pipeline.statuses.find_by(name: 'deploy')
+
+ expect(pipeline).to be_persisted
+ expect(test).to be_a Ci::Build
+ expect(bridge).to be_a Ci::Bridge
+ expect(bridge.stage).to eq 'deploy'
+ expect(pipeline.statuses).to match_array [test, bridge]
+ expect(bridge.options).to eq(
+ 'trigger' => { 'include' => [{ 'local' => 'path/to/child.yml' }] }
+ )
+ expect(bridge.yaml_variables)
+ .to include(key: 'CROSS', value: 'downstream', public: true)
+ end
+ end
+
+ context 'when YAML is invalid' do
+ let(:config) do
+ {
+ test: { script: 'rspec' },
+ deploy: {
+ trigger: { include: included_files }
+ }
+ }
+ end
+
+ let(:included_files) do
+ Array.new(include_max_size + 1) do |index|
+ { local: "file#{index}.yml" }
+ end
+ end
+
+ let(:include_max_size) do
+ Gitlab::Ci::Config::Entry::Trigger::ComplexTrigger::SameProjectTrigger::INCLUDE_MAX_SIZE
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(config))
+ end
+
+ it 'returns errors' do
+ pipeline = create_pipeline!
+
+ expect(pipeline.errors.full_messages.first).to match(/trigger:include config is too long/)
+ expect(pipeline.failure_reason).to eq 'config_error'
+ expect(pipeline).to be_persisted
+ expect(pipeline.status).to eq 'failed'
+ end
+ end
+ end
+
+ def create_pipeline!
+ service.execute(:push)
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service/pre_post_stages_spec.rb b/spec/services/ci/create_pipeline_service/pre_post_stages_spec.rb
new file mode 100644
index 00000000000..2b11b98f58c
--- /dev/null
+++ b/spec/services/ci/create_pipeline_service/pre_post_stages_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe Ci::CreatePipelineService do
+ describe '.pre/.post stages' do
+ let_it_be(:user) { create(:admin) }
+ let_it_be(:project) { create(:project, :repository, creator: user) }
+
+ let(:source) { :push }
+ let(:service) { described_class.new(project, user, { ref: ref }) }
+ let(:pipeline) { service.execute(source) }
+
+ let(:config) do
+ <<~YAML
+ validate:
+ stage: .pre
+ script: echo Hello World
+
+ build:
+ stage: build
+ rules:
+ - if: $CI_COMMIT_BRANCH == "master"
+ script: echo Hello World
+
+ notify:
+ stage: .post
+ script: echo Hello World
+ YAML
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ context 'when pipeline contains a build except .pre/.post' do
+ let(:ref) { 'refs/heads/master' }
+
+ it 'creates a pipeline' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.stages.map(&:name)).to contain_exactly(
+ *%w(.pre build .post))
+ expect(pipeline.builds.map(&:name)).to contain_exactly(
+ *%w(validate build notify))
+ end
+ end
+
+ context 'when pipeline does not contain any other build except .pre/.post' do
+ let(:ref) { 'refs/heads/feature' }
+
+ it 'does not create a pipeline' do
+ expect(pipeline).not_to be_persisted
+
+ # we can validate a list of stages, as they are assigned
+ # but not persisted
+ expect(pipeline.stages.map(&:name)).to contain_exactly(
+ *%w(.pre .post))
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service/rules_spec.rb b/spec/services/ci/create_pipeline_service/rules_spec.rb
index 0a2c5724ce4..713d230731b 100644
--- a/spec/services/ci/create_pipeline_service/rules_spec.rb
+++ b/spec/services/ci/create_pipeline_service/rules_spec.rb
@@ -100,17 +100,6 @@ describe Ci::CreatePipelineService do
stub_ci_pipeline_yaml_file(config)
end
- shared_examples 'workflow:rules feature disabled' do
- before do
- stub_feature_flags(workflow_rules: false)
- end
-
- it 'presents a message that rules are disabled' do
- expect(pipeline.errors[:base]).to include('Workflow rules are disabled')
- expect(pipeline).to be_persisted
- end
- end
-
context 'with a single regex-matching if: clause' do
let(:config) do
<<-EOY
@@ -241,8 +230,6 @@ describe Ci::CreatePipelineService do
expect(pipeline.errors[:base]).to include('No stages / jobs for this pipeline.')
expect(pipeline).not_to be_persisted
end
-
- it_behaves_like 'workflow:rules feature disabled'
end
context 'where workflow passes and the job passes' do
@@ -252,8 +239,6 @@ describe Ci::CreatePipelineService do
expect(pipeline).to be_pending
expect(pipeline).to be_persisted
end
-
- it_behaves_like 'workflow:rules feature disabled'
end
context 'where workflow fails and the job fails' do
@@ -263,8 +248,6 @@ describe Ci::CreatePipelineService do
expect(pipeline.errors[:base]).to include('Pipeline filtered out by workflow rules.')
expect(pipeline).not_to be_persisted
end
-
- it_behaves_like 'workflow:rules feature disabled'
end
context 'where workflow fails and the job passes' do
@@ -274,8 +257,6 @@ describe Ci::CreatePipelineService do
expect(pipeline.errors[:base]).to include('Pipeline filtered out by workflow rules.')
expect(pipeline).not_to be_persisted
end
-
- it_behaves_like 'workflow:rules feature disabled'
end
end
end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index d6cc233088d..7745a78a806 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -1473,15 +1473,6 @@ describe Ci::CreatePipelineService do
end
end
end
-
- context 'when merge request is not specified' do
- let(:merge_request) { nil }
-
- it 'does not create a detached merge request pipeline' do
- expect(pipeline).not_to be_persisted
- expect(pipeline.errors[:merge_request]).to eq(["can't be blank"])
- end
- end
end
context "when config does not have merge_requests keywords" do
@@ -1518,17 +1509,6 @@ describe Ci::CreatePipelineService do
.to eq(['No stages / jobs for this pipeline.'])
end
end
-
- context 'when merge request is not specified' do
- let(:merge_request) { nil }
-
- it 'does not create a detached merge request pipeline' do
- expect(pipeline).not_to be_persisted
-
- expect(pipeline.errors[:base])
- .to eq(['No stages / jobs for this pipeline.'])
- end
- end
end
context "when config uses regular expression for only keyword" do
@@ -1623,6 +1603,7 @@ describe Ci::CreatePipelineService do
context 'when source is web' do
let(:source) { :web }
+ let(:merge_request) { nil }
context "when config has merge_requests keywords" do
let(:config) do
@@ -1644,30 +1625,11 @@ describe Ci::CreatePipelineService do
}
end
- context 'when merge request is specified' do
- let(:merge_request) do
- create(:merge_request,
- source_project: project,
- source_branch: Gitlab::Git.ref_name(ref_name),
- target_project: project,
- target_branch: 'master')
- end
-
- it 'does not create a merge request pipeline' do
- expect(pipeline).not_to be_persisted
- expect(pipeline.errors[:merge_request]).to eq(["must be blank"])
- end
- end
-
- context 'when merge request is not specified' do
- let(:merge_request) { nil }
-
- it 'creates a branch pipeline' do
- expect(pipeline).to be_persisted
- expect(pipeline).to be_web
- expect(pipeline.merge_request).to be_nil
- expect(pipeline.builds.order(:stage_id).pluck(:name)).to eq(%w[build pages])
- end
+ it 'creates a branch pipeline' do
+ expect(pipeline).to be_persisted
+ expect(pipeline).to be_web
+ expect(pipeline.merge_request).to be_nil
+ expect(pipeline.builds.order(:stage_id).pluck(:name)).to eq(%w[build pages])
end
end
end
@@ -1750,9 +1712,9 @@ describe Ci::CreatePipelineService do
let(:ref_name) { 'refs/heads/master' }
let(:pipeline) { execute_service }
let(:build_names) { pipeline.builds.pluck(:name) }
- let(:regular_job) { pipeline.builds.find_by(name: 'regular-job') }
- let(:rules_job) { pipeline.builds.find_by(name: 'rules-job') }
- let(:delayed_job) { pipeline.builds.find_by(name: 'delayed-job') }
+ let(:regular_job) { find_job('regular-job') }
+ let(:rules_job) { find_job('rules-job') }
+ let(:delayed_job) { find_job('delayed-job') }
shared_examples 'rules jobs are excluded' do
it 'only persists the job without rules' do
@@ -1763,6 +1725,10 @@ describe Ci::CreatePipelineService do
end
end
+ def find_job(name)
+ pipeline.builds.find_by(name: name)
+ end
+
before do
stub_ci_pipeline_yaml_file(config)
allow_any_instance_of(Ci::BuildScheduleWorker).to receive(:perform).and_return(true)
@@ -1782,6 +1748,12 @@ describe Ci::CreatePipelineService do
- if: $CI_COMMIT_REF_NAME =~ /master/
when: manual
+ negligible-job:
+ script: "exit 1"
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ allow_failure: true
+
delayed-job:
script: "echo See you later, World!"
rules:
@@ -1800,11 +1772,23 @@ describe Ci::CreatePipelineService do
context 'with matches' do
it 'creates a pipeline with the vanilla and manual jobs' do
expect(pipeline).to be_persisted
- expect(build_names).to contain_exactly('regular-job', 'delayed-job', 'master-job')
+ expect(build_names).to contain_exactly(
+ 'regular-job', 'delayed-job', 'master-job', 'negligible-job'
+ )
end
it 'assigns job:when values to the builds' do
- expect(pipeline.builds.pluck(:when)).to contain_exactly('on_success', 'delayed', 'manual')
+ expect(find_job('regular-job').when).to eq('on_success')
+ expect(find_job('master-job').when).to eq('manual')
+ expect(find_job('negligible-job').when).to eq('on_success')
+ expect(find_job('delayed-job').when).to eq('delayed')
+ end
+
+ it 'assigns job:allow_failure values to the builds' do
+ expect(find_job('regular-job').allow_failure).to eq(false)
+ expect(find_job('master-job').allow_failure).to eq(false)
+ expect(find_job('negligible-job').allow_failure).to eq(true)
+ expect(find_job('delayed-job').allow_failure).to eq(false)
end
it 'assigns start_in for delayed jobs' do
@@ -1827,6 +1811,7 @@ describe Ci::CreatePipelineService do
rules:
- if: $VAR == 'present' && $OTHER || $CI_COMMIT_REF_NAME
when: manual
+ allow_failure: true
EOY
end
@@ -1834,6 +1819,7 @@ describe Ci::CreatePipelineService do
expect(pipeline).to be_persisted
expect(build_names).to contain_exactly('regular-job')
expect(regular_job.when).to eq('manual')
+ expect(regular_job.allow_failure).to eq(true)
end
end
@@ -1860,6 +1846,13 @@ describe Ci::CreatePipelineService do
- README.md
when: delayed
start_in: 4 hours
+
+ negligible-job:
+ script: "can be failed sometimes"
+ rules:
+ - changes:
+ - README.md
+ allow_failure: true
EOY
end
@@ -1872,7 +1865,7 @@ describe Ci::CreatePipelineService do
it 'creates two jobs' do
expect(pipeline).to be_persisted
expect(build_names)
- .to contain_exactly('regular-job', 'rules-job', 'delayed-job')
+ .to contain_exactly('regular-job', 'rules-job', 'delayed-job', 'negligible-job')
end
it 'sets when: for all jobs' do
@@ -1881,6 +1874,10 @@ describe Ci::CreatePipelineService do
expect(delayed_job.when).to eq('delayed')
expect(delayed_job.options[:start_in]).to eq('4 hours')
end
+
+ it 'sets allow_failure: for negligible job' do
+ expect(find_job('negligible-job').allow_failure).to eq(true)
+ end
end
context 'and matches the second rule' do
@@ -1922,12 +1919,14 @@ describe Ci::CreatePipelineService do
rules-job:
script: "echo hello world, $CI_COMMIT_REF_NAME"
+ allow_failure: true
rules:
- changes:
- README.md
when: manual
- if: $CI_COMMIT_REF_NAME == "master"
when: on_success
+ allow_failure: false
delayed-job:
script: "echo See you later, World!"
@@ -1936,6 +1935,7 @@ describe Ci::CreatePipelineService do
- README.md
when: delayed
start_in: 4 hours
+ allow_failure: true
- if: $CI_COMMIT_REF_NAME == "master"
when: delayed
start_in: 1 hour
@@ -1960,6 +1960,12 @@ describe Ci::CreatePipelineService do
expect(delayed_job.when).to eq('delayed')
expect(delayed_job.options[:start_in]).to eq('4 hours')
end
+
+ it 'sets allow_failure: for all jobs' do
+ expect(regular_job.allow_failure).to eq(false)
+ expect(rules_job.allow_failure).to eq(true)
+ expect(delayed_job.allow_failure).to eq(true)
+ end
end
context 'and if: matches after changes' do
@@ -1999,6 +2005,7 @@ describe Ci::CreatePipelineService do
- if: $CI_COMMIT_REF_NAME =~ /master/
changes: [README.md]
when: on_success
+ allow_failure: true
- if: $CI_COMMIT_REF_NAME =~ /master/
changes: [app.rb]
when: manual
@@ -2016,6 +2023,7 @@ describe Ci::CreatePipelineService do
expect(regular_job).to be_persisted
expect(rules_job).to be_persisted
expect(rules_job.when).to eq('manual')
+ expect(rules_job.allow_failure).to eq(false)
end
end
@@ -2040,6 +2048,150 @@ describe Ci::CreatePipelineService do
it_behaves_like 'rules jobs are excluded'
end
end
+
+ context 'with complex if: allow_failure usages' do
+ let(:config) do
+ <<-EOY
+ job-1:
+ script: "exit 1"
+ allow_failure: true
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ allow_failure: false
+
+ job-2:
+ script: "exit 1"
+ allow_failure: true
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /nonexistant-branch/
+ allow_failure: false
+
+ job-3:
+ script: "exit 1"
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /nonexistant-branch/
+ allow_failure: true
+
+ job-4:
+ script: "exit 1"
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ allow_failure: false
+
+ job-5:
+ script: "exit 1"
+ allow_failure: false
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ allow_failure: true
+
+ job-6:
+ script: "exit 1"
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /nonexistant-branch/
+ allow_failure: false
+ - allow_failure: true
+ EOY
+ end
+
+ it 'creates a pipeline' do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly('job-1', 'job-4', 'job-5', 'job-6')
+ end
+
+ it 'assigns job:allow_failure values to the builds' do
+ expect(find_job('job-1').allow_failure).to eq(false)
+ expect(find_job('job-4').allow_failure).to eq(false)
+ expect(find_job('job-5').allow_failure).to eq(true)
+ expect(find_job('job-6').allow_failure).to eq(true)
+ end
+ end
+
+ context 'with complex if: allow_failure & when usages' do
+ let(:config) do
+ <<-EOY
+ job-1:
+ script: "exit 1"
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ when: manual
+
+ job-2:
+ script: "exit 1"
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ when: manual
+ allow_failure: true
+
+ job-3:
+ script: "exit 1"
+ allow_failure: true
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ when: manual
+
+ job-4:
+ script: "exit 1"
+ allow_failure: true
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ when: manual
+ allow_failure: false
+
+ job-5:
+ script: "exit 1"
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /nonexistant-branch/
+ when: manual
+ allow_failure: false
+ - when: always
+ allow_failure: true
+
+ job-6:
+ script: "exit 1"
+ allow_failure: false
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ when: manual
+
+ job-7:
+ script: "exit 1"
+ allow_failure: false
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /nonexistant-branch/
+ when: manual
+ - when: :on_failure
+ allow_failure: true
+ EOY
+ end
+
+ it 'creates a pipeline' do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly(
+ 'job-1', 'job-2', 'job-3', 'job-4', 'job-5', 'job-6', 'job-7'
+ )
+ end
+
+ it 'assigns job:allow_failure values to the builds' do
+ expect(find_job('job-1').allow_failure).to eq(false)
+ expect(find_job('job-2').allow_failure).to eq(true)
+ expect(find_job('job-3').allow_failure).to eq(true)
+ expect(find_job('job-4').allow_failure).to eq(false)
+ expect(find_job('job-5').allow_failure).to eq(true)
+ expect(find_job('job-6').allow_failure).to eq(false)
+ expect(find_job('job-7').allow_failure).to eq(true)
+ end
+
+ it 'assigns job:when values to the builds' do
+ expect(find_job('job-1').when).to eq('manual')
+ expect(find_job('job-2').when).to eq('manual')
+ expect(find_job('job-3').when).to eq('manual')
+ expect(find_job('job-4').when).to eq('manual')
+ expect(find_job('job-5').when).to eq('always')
+ expect(find_job('job-6').when).to eq('manual')
+ expect(find_job('job-7').when).to eq('on_failure')
+ end
+ end
end
end
diff --git a/spec/services/ci/ensure_stage_service_spec.rb b/spec/services/ci/ensure_stage_service_spec.rb
index de07a1ae238..8a270d77bae 100644
--- a/spec/services/ci/ensure_stage_service_spec.rb
+++ b/spec/services/ci/ensure_stage_service_spec.rb
@@ -44,7 +44,7 @@ describe Ci::EnsureStageService, '#execute' do
it 'retries up to two times' do
job.assign_attributes(stage_id: nil)
- expect(service).to receive(:find_stage).exactly(2).times
+ expect(service).to receive(:find_stage).twice
expect { service.execute(job) }
.to raise_error(Ci::EnsureStageService::EnsureStageError)
diff --git a/spec/services/ci/expire_pipeline_cache_service_spec.rb b/spec/services/ci/expire_pipeline_cache_service_spec.rb
index c0226654fd9..7b9d6ed4f41 100644
--- a/spec/services/ci/expire_pipeline_cache_service_spec.rb
+++ b/spec/services/ci/expire_pipeline_cache_service_spec.rb
@@ -11,7 +11,7 @@ describe Ci::ExpirePipelineCacheService do
describe '#execute' do
it 'invalidates Etag caching for project pipelines path' do
pipelines_path = "/#{project.full_path}/pipelines.json"
- new_mr_pipelines_path = "/#{project.full_path}/merge_requests/new.json"
+ new_mr_pipelines_path = "/#{project.full_path}/-/merge_requests/new.json"
pipeline_path = "/#{project.full_path}/pipelines/#{pipeline.id}.json"
expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(pipelines_path)
@@ -24,7 +24,7 @@ describe Ci::ExpirePipelineCacheService do
it 'invalidates Etag caching for merge request pipelines if pipeline runs on any commit of that source branch' do
pipeline = create(:ci_empty_pipeline, status: 'created', project: project, ref: 'master')
merge_request = create(:merge_request, source_project: project, source_branch: pipeline.ref)
- merge_request_pipelines_path = "/#{project.full_path}/merge_requests/#{merge_request.iid}/pipelines.json"
+ merge_request_pipelines_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/pipelines.json"
allow_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch)
expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(merge_request_pipelines_path)
diff --git a/spec/services/ci/pipeline_bridge_status_service_spec.rb b/spec/services/ci/pipeline_bridge_status_service_spec.rb
new file mode 100644
index 00000000000..95f16af3af9
--- /dev/null
+++ b/spec/services/ci/pipeline_bridge_status_service_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::PipelineBridgeStatusService do
+ let(:user) { build(:user) }
+ let(:project) { build(:project) }
+ let(:pipeline) { build(:ci_pipeline, project: project) }
+
+ describe '#execute' do
+ subject { described_class.new(project, user).execute(pipeline) }
+
+ context 'when pipeline has upstream bridge' do
+ let(:bridge) { build(:ci_bridge) }
+
+ before do
+ pipeline.source_bridge = bridge
+ end
+
+ it 'calls inherit_status_from_downstream on upstream bridge' do
+ expect(bridge).to receive(:inherit_status_from_downstream!).with(pipeline)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
index c29c56c2b04..b487730d07f 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
@@ -5,12 +5,12 @@ require 'spec_helper'
describe Ci::PipelineProcessing::AtomicProcessingService::StatusCollection do
using RSpec::Parameterized::TableSyntax
- set(:pipeline) { create(:ci_pipeline) }
- set(:build_a) { create(:ci_build, :success, name: 'build-a', stage: 'build', stage_idx: 0, pipeline: pipeline) }
- set(:build_b) { create(:ci_build, :failed, name: 'build-b', stage: 'build', stage_idx: 0, pipeline: pipeline) }
- set(:test_a) { create(:ci_build, :running, name: 'test-a', stage: 'test', stage_idx: 1, pipeline: pipeline) }
- set(:test_b) { create(:ci_build, :pending, name: 'test-b', stage: 'test', stage_idx: 1, pipeline: pipeline) }
- set(:deploy) { create(:ci_build, :created, name: 'deploy', stage: 'deploy', stage_idx: 2, pipeline: pipeline) }
+ let_it_be(:pipeline) { create(:ci_pipeline) }
+ let_it_be(:build_a) { create(:ci_build, :success, name: 'build-a', stage: 'build', stage_idx: 0, pipeline: pipeline) }
+ let_it_be(:build_b) { create(:ci_build, :failed, name: 'build-b', stage: 'build', stage_idx: 0, pipeline: pipeline) }
+ let_it_be(:test_a) { create(:ci_build, :running, name: 'test-a', stage: 'test', stage_idx: 1, pipeline: pipeline) }
+ let_it_be(:test_b) { create(:ci_build, :pending, name: 'test-b', stage: 'test', stage_idx: 1, pipeline: pipeline) }
+ let_it_be(:deploy) { create(:ci_build, :created, name: 'deploy', stage: 'deploy', stage_idx: 2, pipeline: pipeline) }
let(:collection) { described_class.new(pipeline) }
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
index 38686b41a22..cbeb45b92ff 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
@@ -9,4 +9,10 @@ describe Ci::PipelineProcessing::AtomicProcessingService do
end
it_behaves_like 'Pipeline Processing Service'
+
+ private
+
+ def process_pipeline(initial_process: false)
+ described_class.new(pipeline).execute
+ end
end
diff --git a/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb b/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb
index 2da1eb19818..09b462b7600 100644
--- a/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb
@@ -9,4 +9,10 @@ describe Ci::PipelineProcessing::LegacyProcessingService do
end
it_behaves_like 'Pipeline Processing Service'
+
+ private
+
+ def process_pipeline(initial_process: false)
+ described_class.new(pipeline).execute(initial_process: initial_process)
+ end
end
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service.rb b/spec/services/ci/pipeline_processing/shared_processing_service.rb
index cae5ae3f09d..ca003299535 100644
--- a/spec/services/ci/pipeline_processing/shared_processing_service.rb
+++ b/spec/services/ci/pipeline_processing/shared_processing_service.rb
@@ -710,10 +710,10 @@ shared_examples 'Pipeline Processing Service' do
context 'when pipeline with needs is created', :sidekiq_inline do
let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
let!(:mac_build) { create_build('mac:build', stage: 'build', stage_idx: 0) }
- let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1) }
- let!(:linux_rubocop) { create_build('linux:rubocop', stage: 'test', stage_idx: 1) }
- let!(:mac_rspec) { create_build('mac:rspec', stage: 'test', stage_idx: 1) }
- let!(:mac_rubocop) { create_build('mac:rubocop', stage: 'test', stage_idx: 1) }
+ let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
+ let!(:linux_rubocop) { create_build('linux:rubocop', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
+ let!(:mac_rspec) { create_build('mac:rspec', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
+ let!(:mac_rubocop) { create_build('mac:rubocop', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2) }
let!(:linux_rspec_on_build) { create(:ci_build_need, build: linux_rspec, name: 'linux:build') }
@@ -795,7 +795,7 @@ shared_examples 'Pipeline Processing Service' do
end
context 'when one of the jobs is run on a failure' do
- let!(:linux_notify) { create_build('linux:notify', stage: 'deploy', stage_idx: 2, when: 'on_failure') }
+ let!(:linux_notify) { create_build('linux:notify', stage: 'deploy', stage_idx: 2, when: 'on_failure', scheduling_type: :dag) }
let!(:linux_notify_on_build) { create(:ci_build_need, build: linux_notify, name: 'linux:build') }
@@ -837,12 +837,74 @@ shared_examples 'Pipeline Processing Service' do
end
end
end
+
+ context 'when there is a job scheduled with dag but no need (needs: [])' do
+ let!(:deploy_pages) { create_build('deploy_pages', stage: 'deploy', stage_idx: 2, scheduling_type: :dag) }
+
+ it 'runs deploy_pages without waiting prior stages' do
+ # Ci::PipelineProcessing::LegacyProcessingService requires :initial_process parameter
+ expect(process_pipeline(initial_process: true)).to be_truthy
+
+ expect(stages).to eq(%w(pending created pending))
+ expect(builds.pending).to contain_exactly(linux_build, mac_build, deploy_pages)
+
+ linux_build.reset.success!
+ deploy_pages.reset.success!
+
+ expect(stages).to eq(%w(running pending running))
+ expect(builds.success).to contain_exactly(linux_build, deploy_pages)
+ expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop)
+
+ linux_rspec.reset.success!
+ linux_rubocop.reset.success!
+ mac_build.reset.success!
+ mac_rspec.reset.success!
+ mac_rubocop.reset.success!
+
+ expect(stages).to eq(%w(success success running))
+ expect(builds.pending).to contain_exactly(deploy)
+ end
+
+ context 'when ci_dag_support is disabled' do
+ before do
+ stub_feature_flags(ci_dag_support: false)
+ end
+
+ it 'does run deploy_pages at the start' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(pending created created))
+ expect(builds.pending).to contain_exactly(linux_build, mac_build)
+ end
+ end
+ end
end
- def process_pipeline
- described_class.new(pipeline).execute
+ context 'when a needed job is skipped', :sidekiq_inline do
+ let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
+ let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1) }
+ let!(:deploy) do
+ create_build('deploy', stage: 'deploy', stage_idx: 2, scheduling_type: :dag, needs: [
+ create(:ci_build_need, name: 'linux:rspec')
+ ])
+ end
+
+ it 'skips the jobs depending on it' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(pending created created))
+ expect(all_builds.pending).to contain_exactly(linux_build)
+
+ linux_build.reset.drop!
+
+ expect(stages).to eq(%w(failed skipped skipped))
+ expect(all_builds.failed).to contain_exactly(linux_build)
+ expect(all_builds.skipped).to contain_exactly(linux_rspec, deploy)
+ end
end
+ private
+
def all_builds
pipeline.builds.order(:stage_idx, :id)
end
diff --git a/spec/services/ci/process_build_service_spec.rb b/spec/services/ci/process_build_service_spec.rb
index 704685417bb..abc5c18a523 100644
--- a/spec/services/ci/process_build_service_spec.rb
+++ b/spec/services/ci/process_build_service_spec.rb
@@ -22,6 +22,14 @@ describe Ci::ProcessBuildService, '#execute' do
end
end
+ context 'when current status is skipped' do
+ let(:current_status) { 'skipped' }
+
+ it 'changes the build status' do
+ expect { subject }.to change { build.status }.to('pending')
+ end
+ end
+
context 'when current status is failed' do
let(:current_status) { 'failed' }
@@ -114,4 +122,27 @@ describe Ci::ProcessBuildService, '#execute' do
end
end
end
+
+ context 'when build is scheduled with DAG' do
+ let(:pipeline) { create(:ci_pipeline, ref: 'master', project: project) }
+ let!(:build) { create(:ci_build, :created, when: :on_success, pipeline: pipeline, scheduling_type: :dag) }
+ let!(:other_build) { create(:ci_build, :created, when: :on_success, pipeline: pipeline) }
+ let!(:build_on_other_build) { create(:ci_build_need, build: build, name: other_build.name) }
+
+ context 'when current status is success' do
+ let(:current_status) { 'success' }
+
+ it 'enqueues the build' do
+ expect { subject }.to change { build.status }.to('pending')
+ end
+ end
+
+ context 'when current status is skipped' do
+ let(:current_status) { 'skipped' }
+
+ it 'skips the build' do
+ expect { subject }.to change { build.status }.to('skipped')
+ end
+ end
+ end
end
diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb
index 40ae1c4029b..6f5a070d73d 100644
--- a/spec/services/ci/process_pipeline_service_spec.rb
+++ b/spec/services/ci/process_pipeline_service_spec.rb
@@ -33,6 +33,25 @@ describe Ci::ProcessPipelineService do
end
end
+ context 'with a pipeline which has processables with nil scheduling_type', :clean_gitlab_redis_shared_state do
+ let!(:build1) { create_build('build1') }
+ let!(:build2) { create_build('build2') }
+ let!(:build3) { create_build('build3', scheduling_type: :dag) }
+ let!(:build3_on_build2) { create(:ci_build_need, build: build3, name: 'build2') }
+
+ before do
+ pipeline.processables.update_all(scheduling_type: nil)
+ end
+
+ it 'populates scheduling_type before processing' do
+ process_pipeline
+
+ expect(build1.scheduling_type).to eq('stage')
+ expect(build2.scheduling_type).to eq('stage')
+ expect(build3.scheduling_type).to eq('dag')
+ end
+ end
+
def process_pipeline
described_class.new(pipeline).execute
end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index b3189974440..8ca9ce86574 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -33,7 +33,7 @@ describe Ci::RetryBuildService do
job_artifacts_sast job_artifacts_dependency_scanning
job_artifacts_container_scanning job_artifacts_dast
job_artifacts_license_management job_artifacts_license_scanning
- job_artifacts_performance
+ job_artifacts_performance job_artifacts_lsif
job_artifacts_codequality job_artifacts_metrics scheduled_at
job_variables waiting_for_resource_at job_artifacts_metrics_referee
job_artifacts_network_referee].freeze
@@ -46,7 +46,7 @@ describe Ci::RetryBuildService do
sourced_pipelines artifacts_file_store artifacts_metadata_store
metadata runner_session trace_chunks upstream_pipeline_id
artifacts_file artifacts_metadata artifacts_size commands
- resource resource_group_id processed].freeze
+ resource resource_group_id processed security_scans].freeze
shared_examples 'build duplication' do
let(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
@@ -222,6 +222,28 @@ describe Ci::RetryBuildService do
expect { new_build }.to change { Deployment.count }.by(1)
end
end
+
+ context 'when scheduling_type of build is nil' do
+ before do
+ build.update_columns(scheduling_type: nil)
+ end
+
+ context 'when build has not needs' do
+ it 'sets scheduling_type as :stage' do
+ expect(new_build.scheduling_type).to eq('stage')
+ end
+ end
+
+ context 'when build has needs' do
+ before do
+ create(:ci_build_need, build: build)
+ end
+
+ it 'sets scheduling_type as :dag' do
+ expect(new_build.scheduling_type).to eq('dag')
+ end
+ end
+ end
end
context 'when user does not have ability to execute build' do
diff --git a/spec/services/ci/retry_pipeline_service_spec.rb b/spec/services/ci/retry_pipeline_service_spec.rb
index e7a241ed335..7db871adc9a 100644
--- a/spec/services/ci/retry_pipeline_service_spec.rb
+++ b/spec/services/ci/retry_pipeline_service_spec.rb
@@ -95,7 +95,7 @@ describe Ci::RetryPipelineService, '#execute' do
before do
create_build('build', :success, 0)
create_build('build2', :success, 0)
- test_build = create_build('test', :failed, 1)
+ test_build = create_build('test', :failed, 1, scheduling_type: :dag)
create(:ci_build_need, build: test_build, name: 'build')
create(:ci_build_need, build: test_build, name: 'build2')
end
@@ -108,6 +108,21 @@ describe Ci::RetryPipelineService, '#execute' do
expect(build('test')).to be_pending
expect(build('test').needs.map(&:name)).to match_array(%w(build build2))
end
+
+ context 'when there is a failed DAG test without needs' do
+ before do
+ create_build('deploy', :failed, 2, scheduling_type: :dag)
+ end
+
+ it 'retries the test' do
+ service.execute(pipeline)
+
+ expect(build('build')).to be_success
+ expect(build('build2')).to be_success
+ expect(build('test')).to be_pending
+ expect(build('deploy')).to be_pending
+ end
+ end
end
context 'when the last stage was skipepd' do
diff --git a/spec/services/ci/stop_environments_service_spec.rb b/spec/services/ci/stop_environments_service_spec.rb
index ed92625a2cc..19a6bcc307f 100644
--- a/spec/services/ci/stop_environments_service_spec.rb
+++ b/spec/services/ci/stop_environments_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
describe Ci::StopEnvironmentsService do
+ include CreateEnvironmentsHelpers
+
let(:project) { create(:project, :private, :repository) }
let(:user) { create(:user) }
@@ -181,6 +183,57 @@ describe Ci::StopEnvironmentsService do
end
end
+ describe '.execute_in_batch' do
+ subject { described_class.execute_in_batch(environments) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let(:environments) { Environment.available }
+
+ before_all do
+ project.add_developer(user)
+ project.repository.add_branch(user, 'review/feature-1', 'master')
+ project.repository.add_branch(user, 'review/feature-2', 'master')
+ end
+
+ before do
+ create_review_app(user, project, 'review/feature-1')
+ create_review_app(user, project, 'review/feature-2')
+ end
+
+ it 'stops environments' do
+ expect { subject }
+ .to change { project.environments.all.map(&:state).uniq }
+ .from(['available']).to(['stopped'])
+
+ expect(project.environments.all.map(&:auto_stop_at).uniq).to eq([nil])
+ end
+
+ it 'plays stop actions' do
+ expect { subject }
+ .to change { Ci::Build.where(name: 'stop_review_app').map(&:status).uniq }
+ .from(['manual']).to(['pending'])
+ end
+
+ context 'when user does not have a permission to play the stop action' do
+ before do
+ project.team.truncate
+ end
+
+ it 'tracks the exception' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_error)
+ .with(Gitlab::Access::AccessDeniedError, anything).twice
+
+ subject
+ end
+
+ after do
+ project.add_developer(user)
+ end
+ end
+ end
+
def expect_environment_stopped_on(branch)
expect_any_instance_of(Environment)
.to receive(:stop!)
diff --git a/spec/services/clusters/cleanup/app_service_spec.rb b/spec/services/clusters/cleanup/app_service_spec.rb
index cc27f409086..14bfca02fee 100644
--- a/spec/services/clusters/cleanup/app_service_spec.rb
+++ b/spec/services/clusters/cleanup/app_service_spec.rb
@@ -85,7 +85,7 @@ describe Clusters::Cleanup::AppService do
it 'logs application uninstalls and next execution' do
expect(logger).to receive(:info)
- .with(log_meta.merge(event: :uninstalling_app, application: kind_of(String))).exactly(2).times
+ .with(log_meta.merge(event: :uninstalling_app, application: kind_of(String))).twice
expect(logger).to receive(:info)
.with(log_meta.merge(event: :scheduling_execution, next_execution: 1))
diff --git a/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb b/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb
new file mode 100644
index 00000000000..572e2b91187
--- /dev/null
+++ b/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb
@@ -0,0 +1,197 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Clusters::Kubernetes::ConfigureIstioIngressService, '#execute' do
+ include KubernetesHelpers
+
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:api_url) { 'https://kubernetes.example.com' }
+ let(:project) { cluster.project }
+ let(:environment) { create(:environment, project: project) }
+ let(:cluster_project) { cluster.cluster_project }
+ let(:namespace) { "#{project.name}-#{project.id}-#{environment.slug}" }
+ let(:kubeclient) { cluster.kubeclient }
+
+ subject do
+ described_class.new(
+ cluster: cluster
+ ).execute
+ end
+
+ before do
+ stub_kubeclient_discover_istio(api_url)
+ stub_kubeclient_create_secret(api_url, namespace: namespace)
+ stub_kubeclient_put_secret(api_url, "#{namespace}-token", namespace: namespace)
+
+ stub_kubeclient_get_secret(
+ api_url,
+ {
+ metadata_name: "#{namespace}-token",
+ token: Base64.encode64('sample-token'),
+ namespace: namespace
+ }
+ )
+
+ stub_kubeclient_get_secret(
+ api_url,
+ {
+ metadata_name: 'istio-ingressgateway-ca-certs',
+ namespace: 'istio-system'
+ }
+ )
+
+ stub_kubeclient_get_secret(
+ api_url,
+ {
+ metadata_name: 'istio-ingressgateway-certs',
+ namespace: 'istio-system'
+ }
+ )
+
+ stub_kubeclient_put_secret(api_url, 'istio-ingressgateway-ca-certs', namespace: 'istio-system')
+ stub_kubeclient_put_secret(api_url, 'istio-ingressgateway-certs', namespace: 'istio-system')
+ stub_kubeclient_get_gateway(api_url, 'knative-ingress-gateway', namespace: 'knative-serving')
+ stub_kubeclient_put_gateway(api_url, 'knative-ingress-gateway', namespace: 'knative-serving')
+ end
+
+ context 'without a serverless_domain_cluster' do
+ it 'configures gateway to use PASSTHROUGH' do
+ subject
+
+ expect(WebMock).to have_requested(:put, api_url + '/apis/networking.istio.io/v1alpha3/namespaces/knative-serving/gateways/knative-ingress-gateway').with(
+ body: hash_including(
+ apiVersion: "networking.istio.io/v1alpha3",
+ kind: "Gateway",
+ metadata: {
+ generation: 1,
+ labels: {
+ "networking.knative.dev/ingress-provider" => "istio",
+ "serving.knative.dev/release" => "v0.7.0"
+ },
+ name: "knative-ingress-gateway",
+ namespace: "knative-serving",
+ selfLink: "/apis/networking.istio.io/v1alpha3/namespaces/knative-serving/gateways/knative-ingress-gateway"
+ },
+ spec: {
+ selector: {
+ istio: "ingressgateway"
+ },
+ servers: [
+ {
+ hosts: ["*"],
+ port: {
+ name: "http",
+ number: 80,
+ protocol: "HTTP"
+ }
+ },
+ {
+ hosts: ["*"],
+ port: {
+ name: "https",
+ number: 443,
+ protocol: "HTTPS"
+ },
+ tls: {
+ mode: "PASSTHROUGH"
+ }
+ }
+ ]
+ }
+ )
+ )
+ end
+ end
+
+ context 'with a serverless_domain_cluster' do
+ let(:serverless_domain_cluster) { create(:serverless_domain_cluster) }
+ let(:certificate) { OpenSSL::X509::Certificate.new(serverless_domain_cluster.certificate) }
+
+ before do
+ cluster.application_knative = serverless_domain_cluster.knative
+ end
+
+ it 'configures certificates' do
+ subject
+
+ expect(serverless_domain_cluster.reload.key).not_to be_blank
+ expect(serverless_domain_cluster.reload.certificate).not_to be_blank
+
+ expect(certificate.subject.to_s).to include(serverless_domain_cluster.knative.hostname)
+
+ expect(certificate.not_before).to be_within(1.minute).of(Time.now)
+ expect(certificate.not_after).to be_within(1.minute).of(Time.now + 1000.years)
+
+ expect(WebMock).to have_requested(:put, api_url + '/api/v1/namespaces/istio-system/secrets/istio-ingressgateway-ca-certs').with(
+ body: hash_including(
+ metadata: {
+ name: 'istio-ingressgateway-ca-certs',
+ namespace: 'istio-system'
+ },
+ type: 'Opaque'
+ )
+ )
+
+ expect(WebMock).to have_requested(:put, api_url + '/api/v1/namespaces/istio-system/secrets/istio-ingressgateway-certs').with(
+ body: hash_including(
+ metadata: {
+ name: 'istio-ingressgateway-certs',
+ namespace: 'istio-system'
+ },
+ type: 'kubernetes.io/tls'
+ )
+ )
+ end
+
+ it 'configures gateway to use MUTUAL' do
+ subject
+
+ expect(WebMock).to have_requested(:put, api_url + '/apis/networking.istio.io/v1alpha3/namespaces/knative-serving/gateways/knative-ingress-gateway').with(
+ body: {
+ apiVersion: "networking.istio.io/v1alpha3",
+ kind: "Gateway",
+ metadata: {
+ generation: 1,
+ labels: {
+ "networking.knative.dev/ingress-provider" => "istio",
+ "serving.knative.dev/release" => "v0.7.0"
+ },
+ name: "knative-ingress-gateway",
+ namespace: "knative-serving",
+ selfLink: "/apis/networking.istio.io/v1alpha3/namespaces/knative-serving/gateways/knative-ingress-gateway"
+ },
+ spec: {
+ selector: {
+ istio: "ingressgateway"
+ },
+ servers: [
+ {
+ hosts: ["*"],
+ port: {
+ name: "http",
+ number: 80,
+ protocol: "HTTP"
+ }
+ },
+ {
+ hosts: ["*"],
+ port: {
+ name: "https",
+ number: 443,
+ protocol: "HTTPS"
+ },
+ tls: {
+ mode: "MUTUAL",
+ privateKey: "/etc/istio/ingressgateway-certs/tls.key",
+ serverCertificate: "/etc/istio/ingressgateway-certs/tls.crt",
+ caCertificates: "/etc/istio/ingressgateway-ca-certs/cert.pem"
+ }
+ }
+ ]
+ }
+ }
+ )
+ end
+ end
+end
diff --git a/spec/services/commits/cherry_pick_service_spec.rb b/spec/services/commits/cherry_pick_service_spec.rb
new file mode 100644
index 00000000000..ead1932c2d1
--- /dev/null
+++ b/spec/services/commits/cherry_pick_service_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Commits::CherryPickService do
+ let(:project) { create(:project, :repository) }
+ # * ddd0f15ae83993f5cb66a927a28673882e99100b (HEAD -> master, origin/master, origin/HEAD) Merge branch 'po-fix-test-en
+ # |\
+ # | * 2d1db523e11e777e49377cfb22d368deec3f0793 Correct test_env.rb path for adding branch
+ # |/
+ # * 1e292f8fedd741b75372e19097c76d327140c312 Merge branch 'cherry-pick-ce369011' into 'master'
+
+ let_it_be(:merge_commit_sha) { 'ddd0f15ae83993f5cb66a927a28673882e99100b' }
+ let_it_be(:merge_base_sha) { '1e292f8fedd741b75372e19097c76d327140c312' }
+ let_it_be(:branch_name) { 'stable' }
+
+ let(:repository) { project.repository }
+ let(:commit) { project.commit }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_maintainer(user)
+
+ repository.add_branch(user, branch_name, merge_base_sha)
+ end
+
+ def cherry_pick(sha, branch_name)
+ commit = project.commit(sha)
+
+ described_class.new(
+ project,
+ user,
+ commit: commit,
+ start_branch: branch_name,
+ branch_name: branch_name
+ ).execute
+ end
+
+ describe '#execute' do
+ shared_examples 'successful cherry-pick' do
+ it 'picks the commit into the branch' do
+ result = cherry_pick(merge_commit_sha, branch_name)
+ expect(result[:status]).to eq(:success), result[:message]
+
+ head = repository.find_branch(branch_name).target
+ expect(head).not_to eq(merge_base_sha)
+ end
+ end
+
+ it_behaves_like 'successful cherry-pick'
+
+ context 'when picking a merge-request' do
+ let!(:merge_request) { create(:merge_request, :simple, :merged, author: user, source_project: project, merge_commit_sha: merge_commit_sha) }
+
+ it_behaves_like 'successful cherry-pick'
+
+ it 'adds a system note' do
+ result = cherry_pick(merge_commit_sha, branch_name)
+
+ mr_notes = find_cherry_pick_notes(merge_request)
+ expect(mr_notes.length).to eq(1)
+ expect(mr_notes[0].commit_id).to eq(result[:result])
+ end
+
+ context 'when :track_mr_picking feature flag is disabled' do
+ before do
+ stub_feature_flags(track_mr_picking: false)
+ end
+
+ it 'does not add system notes' do
+ expect do
+ cherry_pick(merge_commit_sha, branch_name)
+ end.not_to change { Note.count }
+ end
+ end
+ end
+
+ def find_cherry_pick_notes(noteable)
+ noteable
+ .notes
+ .joins(:system_note_metadata)
+ .where(system_note_metadata: { action: 'cherry_pick' })
+ end
+ end
+end
diff --git a/spec/services/container_expiration_policy_service_spec.rb b/spec/services/container_expiration_policy_service_spec.rb
index 1e4899c627f..b2f2b2e1236 100644
--- a/spec/services/container_expiration_policy_service_spec.rb
+++ b/spec/services/container_expiration_policy_service_spec.rb
@@ -17,7 +17,7 @@ describe ContainerExpirationPolicyService do
it 'kicks off a cleanup worker for the container repository' do
expect(CleanupContainerRepositoryWorker).to receive(:perform_async)
- .with(user.id, container_repository.id, anything)
+ .with(nil, container_repository.id, hash_including(container_expiration_policy: true))
subject
end
diff --git a/spec/services/deployments/link_merge_requests_service_spec.rb b/spec/services/deployments/link_merge_requests_service_spec.rb
index 307fe22a192..605f2cfdc51 100644
--- a/spec/services/deployments/link_merge_requests_service_spec.rb
+++ b/spec/services/deployments/link_merge_requests_service_spec.rb
@@ -5,6 +5,19 @@ require 'spec_helper'
describe Deployments::LinkMergeRequestsService do
let(:project) { create(:project, :repository) }
+ # * ddd0f15 Merge branch 'po-fix-test-env-path' into 'master'
+ # |\
+ # | * 2d1db52 Correct test_env.rb path for adding branch
+ # |/
+ # * 1e292f8 Merge branch 'cherry-pick-ce369011' into 'master'
+ # |\
+ # | * c1c67ab Add file with a _flattable_ path
+ # |/
+ # * 7975be0 Merge branch 'rd-add-file-larger-than-1-mb' into 'master'
+ let_it_be(:first_deployment_sha) { '7975be0116940bf2ad4321f79d02a55c5f7779aa' }
+ let_it_be(:mr1_merge_commit_sha) { '1e292f8fedd741b75372e19097c76d327140c312' }
+ let_it_be(:mr2_merge_commit_sha) { 'ddd0f15ae83993f5cb66a927a28673882e99100b' }
+
describe '#execute' do
context 'when the deployment is for a review environment' do
it 'does nothing' do
@@ -25,7 +38,7 @@ describe Deployments::LinkMergeRequestsService do
:deployment,
:success,
project: project,
- sha: '7975be0116940bf2ad4321f79d02a55c5f7779aa'
+ sha: first_deployment_sha
)
deploy2 = create(
@@ -33,17 +46,14 @@ describe Deployments::LinkMergeRequestsService do
:success,
project: deploy1.project,
environment: deploy1.environment,
- sha: 'ddd0f15ae83993f5cb66a927a28673882e99100b'
+ sha: mr2_merge_commit_sha
)
service = described_class.new(deploy2)
expect(service)
.to receive(:link_merge_requests_for_range)
- .with(
- '7975be0116940bf2ad4321f79d02a55c5f7779aa',
- 'ddd0f15ae83993f5cb66a927a28673882e99100b'
- )
+ .with(first_deployment_sha, mr2_merge_commit_sha)
service.execute
end
@@ -70,7 +80,7 @@ describe Deployments::LinkMergeRequestsService do
mr1 = create(
:merge_request,
:merged,
- merge_commit_sha: '1e292f8fedd741b75372e19097c76d327140c312',
+ merge_commit_sha: mr1_merge_commit_sha,
source_project: project,
target_project: project
)
@@ -78,18 +88,97 @@ describe Deployments::LinkMergeRequestsService do
mr2 = create(
:merge_request,
:merged,
- merge_commit_sha: '2d1db523e11e777e49377cfb22d368deec3f0793',
+ merge_commit_sha: mr2_merge_commit_sha,
source_project: project,
target_project: project
)
described_class.new(deploy).link_merge_requests_for_range(
- '7975be0116940bf2ad4321f79d02a55c5f7779aa',
- 'ddd0f15ae83993f5cb66a927a28673882e99100b'
+ first_deployment_sha,
+ mr2_merge_commit_sha
)
expect(deploy.merge_requests).to include(mr1, mr2)
end
+
+ it 'links picked merge requests' do
+ environment = create(:environment, project: project)
+ deploy =
+ create(:deployment, :success, project: project, environment: environment)
+
+ picked_mr = create(
+ :merge_request,
+ :merged,
+ merge_commit_sha: '123abc',
+ source_project: project,
+ target_project: project
+ )
+
+ mr1 = create(
+ :merge_request,
+ :merged,
+ merge_commit_sha: mr1_merge_commit_sha,
+ source_project: project,
+ target_project: project
+ )
+
+ # mr1 includes c1c67abba which is a cherry-pick of the fake picked_mr merge request
+ create(:track_mr_picking_note, noteable: picked_mr, project: project, commit_id: 'c1c67abbaf91f624347bb3ae96eabe3a1b742478')
+
+ described_class.new(deploy).link_merge_requests_for_range(
+ first_deployment_sha,
+ mr1_merge_commit_sha
+ )
+
+ expect(deploy.merge_requests).to include(mr1, picked_mr)
+ end
+
+ context 'when :track_mr_picking feature flag is disabled' do
+ before do
+ stub_feature_flags(track_mr_picking: false)
+ end
+
+ it 'does not link picked merge requests' do
+ environment = create(:environment, project: project)
+ deploy =
+ create(:deployment, :success, project: project, environment: environment)
+
+ picked_mr = create(
+ :merge_request,
+ :merged,
+ merge_commit_sha: '123abc',
+ source_project: project,
+ target_project: project
+ )
+
+ mr1 = create(
+ :merge_request,
+ :merged,
+ merge_commit_sha: mr1_merge_commit_sha,
+ source_project: project,
+ target_project: project
+ )
+
+ # mr1 includes c1c67abba which is a cherry-pick of the fake picked_mr merge request
+ create(:track_mr_picking_note, noteable: picked_mr, project: project, commit_id: 'c1c67abbaf91f624347bb3ae96eabe3a1b742478')
+
+ mr2 = create(
+ :merge_request,
+ :merged,
+ merge_commit_sha: mr2_merge_commit_sha,
+ source_project: project,
+ target_project: project
+ )
+
+ described_class.new(deploy).link_merge_requests_for_range(
+ first_deployment_sha,
+ mr2_merge_commit_sha
+ )
+
+ expect(deploy.merge_requests).to include(mr1, mr2)
+ expect(deploy.merge_requests).not_to include(picked_mr)
+ end
+ end
end
describe '#link_all_merged_merge_requests' do
diff --git a/spec/services/deployments/older_deployments_drop_service_spec.rb b/spec/services/deployments/older_deployments_drop_service_spec.rb
new file mode 100644
index 00000000000..44e9af07e46
--- /dev/null
+++ b/spec/services/deployments/older_deployments_drop_service_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Deployments::OlderDeploymentsDropService do
+ let(:environment) { create(:environment) }
+ let(:deployment) { create(:deployment, environment: environment) }
+ let(:service) { described_class.new(deployment) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ shared_examples 'it does not drop any build' do
+ it do
+ expect { subject }.to not_change(Ci::Build.failed, :count)
+ end
+ end
+
+ context 'when deployment is nil' do
+ let(:deployment) { nil }
+
+ it_behaves_like 'it does not drop any build'
+ end
+
+ context 'when a deployment is passed in' do
+ context 'and there is no active deployment for the related environment' do
+ let(:deployment) { create(:deployment, :canceled, environment: environment) }
+ let(:deployment2) { create(:deployment, :canceled, environment: environment) }
+
+ before do
+ deployment
+ deployment2
+ end
+
+ it_behaves_like 'it does not drop any build'
+ end
+
+ context 'and there are active deployment for the related environment' do
+ let(:deployment) { create(:deployment, :running, environment: environment) }
+ let(:deployment2) { create(:deployment, :running, environment: environment) }
+
+ context 'and there is no older deployment than "deployment"' do
+ before do
+ deployment
+ deployment2
+ end
+
+ it_behaves_like 'it does not drop any build'
+ end
+
+ context 'and there is an older deployment than "deployment"' do
+ let(:older_deployment) { create(:deployment, :running, environment: environment) }
+
+ before do
+ older_deployment
+ deployment
+ deployment2
+ end
+
+ it 'drops that older deployment' do
+ deployable = older_deployment.deployable
+ expect(deployable.failed?).to be_falsey
+
+ subject
+
+ expect(deployable.reload.failed?).to be_truthy
+ end
+
+ context 'and there is no deployable for that older deployment' do
+ let(:older_deployment) { create(:deployment, :running, environment: environment, deployable: nil) }
+
+ it_behaves_like 'it does not drop any build'
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/environments/auto_stop_service_spec.rb b/spec/services/environments/auto_stop_service_spec.rb
new file mode 100644
index 00000000000..3620bf8fe87
--- /dev/null
+++ b/spec/services/environments/auto_stop_service_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Environments::AutoStopService, :clean_gitlab_redis_shared_state do
+ include CreateEnvironmentsHelpers
+ include ExclusiveLeaseHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let(:service) { described_class.new }
+
+ before_all do
+ project.add_developer(user)
+ end
+
+ describe '#execute' do
+ subject { service.execute }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let(:environments) { Environment.all }
+
+ before_all do
+ project.add_developer(user)
+ project.repository.add_branch(user, 'review/feature-1', 'master')
+ project.repository.add_branch(user, 'review/feature-2', 'master')
+ end
+
+ before do
+ create_review_app(user, project, 'review/feature-1')
+ create_review_app(user, project, 'review/feature-2')
+ end
+
+ it 'stops environments and play stop jobs' do
+ expect { subject }
+ .to change { Environment.all.map(&:state).uniq }
+ .from(['available']).to(['stopped'])
+
+ expect(Ci::Build.where(name: 'stop_review_app').map(&:status).uniq).to eq(['pending'])
+ end
+
+ context 'when auto_stop_environments feature flag is disabled' do
+ before do
+ stub_feature_flags(auto_stop_environments: false)
+ end
+
+ it 'does not execute Ci::StopEnvironmentsService' do
+ expect(Ci::StopEnvironmentsService).not_to receive(:execute_in_batch)
+
+ subject
+ end
+ end
+
+ context 'when the other sidekiq worker has already been running' do
+ before do
+ stub_exclusive_lease_taken(described_class::EXCLUSIVE_LOCK_KEY)
+ end
+
+ it 'does not execute stop_in_batch' do
+ expect_next_instance_of(described_class) do |service|
+ expect(service).not_to receive(:stop_in_batch)
+ end
+
+ expect { subject }.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ end
+ end
+
+ context 'when loop reached timeout' do
+ before do
+ stub_const("#{described_class}::LOOP_TIMEOUT", 0.seconds)
+ stub_const("#{described_class}::LOOP_LIMIT", 100_000)
+ allow_next_instance_of(described_class) do |service|
+ allow(service).to receive(:stop_in_batch) { true }
+ end
+ end
+
+ it 'returns false and does not continue the process' do
+ is_expected.to eq(false)
+ end
+ end
+
+ context 'when loop reached loop limit' do
+ before do
+ stub_const("#{described_class}::LOOP_LIMIT", 1)
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ end
+
+ it 'stops only one available environment' do
+ expect { subject }.to change { Environment.available.count }.by(-1)
+ end
+ end
+ end
+end
diff --git a/spec/services/error_tracking/base_service_spec.rb b/spec/services/error_tracking/base_service_spec.rb
new file mode 100644
index 00000000000..68deb2e2a73
--- /dev/null
+++ b/spec/services/error_tracking/base_service_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ErrorTracking::BaseService do
+ describe '#compose_response' do
+ let(:project) { double('project') }
+ let(:user) { double('user') }
+ let(:service) { described_class.new(project, user) }
+
+ it 'returns bad_request error when response has an error key' do
+ data = { error: 'Unexpected Error' }
+
+ result = service.send(:compose_response, data)
+
+ expect(result[:status]).to be(:error)
+ expect(result[:message]).to be('Unexpected Error')
+ expect(result[:http_status]).to be(:bad_request)
+ end
+
+ it 'returns server error when response has missing key error_type' do
+ data = { error: 'Unexpected Error', error_type: ErrorTracking::ProjectErrorTrackingSetting::SENTRY_API_ERROR_TYPE_MISSING_KEYS }
+
+ result = service.send(:compose_response, data)
+
+ expect(result[:status]).to be(:error)
+ expect(result[:message]).to be('Unexpected Error')
+ expect(result[:http_status]).to be(:internal_server_error)
+ end
+
+ it 'returns no content when response is nil' do
+ data = nil
+
+ result = service.send(:compose_response, data)
+
+ expect(result[:status]).to be(:error)
+ expect(result[:message]).to be('Not ready. Try again later')
+ expect(result[:http_status]).to be(:no_content)
+ end
+
+ context 'when result has no errors key' do
+ let(:data) { { thing: :cat } }
+
+ it 'raises NotImplementedError' do
+ expect { service.send(:compose_response, data) }
+ .to raise_error(NotImplementedError)
+ end
+
+ context 'when parse_response is implemented' do
+ before do
+ expect(service).to receive(:parse_response) do |response|
+ { animal: response[:thing] }
+ end
+ end
+
+ it 'returns successful response' do
+ result = service.send(:compose_response, data)
+
+ expect(result[:animal]).to eq(:cat)
+ expect(result[:status]).to eq(:success)
+ end
+
+ it 'returns successful response with changes from passed block' do
+ result = service.send(:compose_response, data) do
+ data[:thing] = :fish
+ end
+
+ expect(result[:animal]).to eq(:fish)
+ expect(result[:status]).to eq(:success)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/error_tracking/issue_details_service_spec.rb b/spec/services/error_tracking/issue_details_service_spec.rb
index 4d5505bb5a9..9f217deda21 100644
--- a/spec/services/error_tracking/issue_details_service_spec.rb
+++ b/spec/services/error_tracking/issue_details_service_spec.rb
@@ -3,29 +3,15 @@
require 'spec_helper'
describe ErrorTracking::IssueDetailsService do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
+ include_context 'sentry error tracking context'
- let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
- let(:token) { 'test-token' }
- let(:result) { subject.execute }
-
- let(:error_tracking_setting) do
- create(:project_error_tracking_setting, api_url: sentry_url, token: token, project: project)
- end
-
- subject { described_class.new(project, user) }
-
- before do
- expect(project).to receive(:error_tracking_setting).at_least(:once).and_return(error_tracking_setting)
-
- project.add_reporter(user)
- end
+ subject { described_class.new(project, user, params) }
describe '#execute' do
context 'with authorized user' do
context 'when issue_details returns a detailed error' do
let(:detailed_error) { build(:detailed_error_tracking_error) }
+ let(:params) { { issue_id: detailed_error.id } }
before do
expect(error_tracking_setting)
@@ -35,6 +21,19 @@ describe ErrorTracking::IssueDetailsService do
it 'returns the detailed error' do
expect(result).to eq(status: :success, issue: detailed_error)
end
+
+ it 'returns the gitlab_issue when the error has a sentry_issue' do
+ gitlab_issue = create(:issue, project: project)
+ create(:sentry_issue, issue: gitlab_issue, sentry_issue_identifier: detailed_error.id)
+
+ expect(result[:issue].gitlab_issue).to include(
+ "http", "/#{project.full_path}/issues/#{gitlab_issue.iid}"
+ )
+ end
+
+ it 'returns the gitlab_issue path from sentry when the error has no sentry_issue' do
+ expect(result[:issue].gitlab_issue).to eq(detailed_error.gitlab_issue)
+ end
end
include_examples 'error tracking service data not ready', :issue_details
diff --git a/spec/services/error_tracking/issue_latest_event_service_spec.rb b/spec/services/error_tracking/issue_latest_event_service_spec.rb
index cda15042814..078d7511850 100644
--- a/spec/services/error_tracking/issue_latest_event_service_spec.rb
+++ b/spec/services/error_tracking/issue_latest_event_service_spec.rb
@@ -3,25 +3,10 @@
require 'spec_helper'
describe ErrorTracking::IssueLatestEventService do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
-
- let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
- let(:token) { 'test-token' }
- let(:result) { subject.execute }
-
- let(:error_tracking_setting) do
- create(:project_error_tracking_setting, api_url: sentry_url, token: token, project: project)
- end
+ include_context 'sentry error tracking context'
subject { described_class.new(project, user) }
- before do
- expect(project).to receive(:error_tracking_setting).at_least(:once).and_return(error_tracking_setting)
-
- project.add_reporter(user)
- end
-
describe '#execute' do
context 'with authorized user' do
context 'when issue_latest_event returns an error event' do
diff --git a/spec/services/error_tracking/issue_update_service_spec.rb b/spec/services/error_tracking/issue_update_service_spec.rb
new file mode 100644
index 00000000000..78388328a22
--- /dev/null
+++ b/spec/services/error_tracking/issue_update_service_spec.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ErrorTracking::IssueUpdateService do
+ include_context 'sentry error tracking context'
+
+ let(:arguments) { { issue_id: 1234, status: 'resolved' } }
+
+ subject(:update_service) { described_class.new(project, user, arguments) }
+
+ shared_examples 'does not perform close issue flow' do
+ it 'does not call the close issue service' do
+ update_service.execute
+
+ expect(issue_close_service)
+ .not_to have_received(:execute)
+ end
+
+ it 'does not create system note' do
+ expect(SystemNoteService).not_to receive(:close_after_error_tracking_resolve)
+ update_service.execute
+ end
+ end
+
+ describe '#execute' do
+ context 'with authorized user' do
+ context 'when update_issue returns success' do
+ let(:update_issue_response) { { updated: true } }
+
+ before do
+ expect(error_tracking_setting)
+ .to receive(:update_issue).and_return(update_issue_response)
+ end
+
+ it 'returns the response' do
+ expect(update_service.execute).to eq(update_issue_response.merge(status: :success, closed_issue_iid: nil))
+ end
+
+ it 'updates any related issue' do
+ expect(update_service).to receive(:update_related_issue)
+
+ update_service.execute
+ end
+
+ context 'related issue and resolving' do
+ let(:issue) { create(:issue, project: project) }
+ let(:sentry_issue) { create(:sentry_issue, issue: issue) }
+ let(:arguments) { { issue_id: sentry_issue.sentry_issue_identifier, status: 'resolved' } }
+
+ let(:issue_close_service) { spy(:issue_close_service) }
+
+ before do
+ allow_next_instance_of(SentryIssueFinder) do |finder|
+ allow(finder).to receive(:execute).and_return(sentry_issue)
+ end
+
+ allow(Issues::CloseService)
+ .to receive(:new)
+ .and_return(issue_close_service)
+
+ allow(issue_close_service)
+ .to receive(:execute)
+ .and_return(issue)
+ end
+
+ it 'closes the issue' do
+ update_service.execute
+
+ expect(issue_close_service)
+ .to have_received(:execute)
+ .with(issue, system_note: false)
+ end
+
+ context 'issues gets closed' do
+ let(:closed_issue) { create(:issue, :closed, project: project) }
+
+ before do
+ expect(issue_close_service)
+ .to receive(:execute)
+ .with(issue, system_note: false)
+ .and_return(closed_issue)
+ end
+
+ it 'creates a system note' do
+ expect(SystemNoteService).to receive(:close_after_error_tracking_resolve)
+
+ update_service.execute
+ end
+
+ it 'returns a response with closed issue' do
+ expect(update_service.execute).to eq(status: :success, updated: true, closed_issue_iid: closed_issue.iid)
+ end
+ end
+
+ context 'issue is already closed' do
+ let(:issue) { create(:issue, :closed, project: project) }
+
+ include_examples 'does not perform close issue flow'
+ end
+
+ context 'status is not resolving' do
+ let(:arguments) { { issue_id: sentry_issue.sentry_issue_identifier, status: 'ignored' } }
+
+ include_examples 'does not perform close issue flow'
+ end
+ end
+ end
+
+ include_examples 'error tracking service sentry error handling', :update_issue
+ end
+
+ include_examples 'error tracking service unauthorized user'
+ include_examples 'error tracking service disabled'
+ end
+end
diff --git a/spec/services/error_tracking/list_issues_service_spec.rb b/spec/services/error_tracking/list_issues_service_spec.rb
index ecb6bcc541b..5f6e071e10d 100644
--- a/spec/services/error_tracking/list_issues_service_spec.rb
+++ b/spec/services/error_tracking/list_issues_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe ErrorTracking::ListIssuesService do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
+ include_context 'sentry error tracking context'
+
let(:params) { { search_term: 'something', sort: 'last_seen', cursor: 'some-cursor' } }
let(:list_sentry_issues_args) do
{
@@ -16,39 +16,35 @@ describe ErrorTracking::ListIssuesService do
}
end
- let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
- let(:token) { 'test-token' }
- let(:result) { subject.execute }
-
- let(:error_tracking_setting) do
- create(:project_error_tracking_setting, api_url: sentry_url, token: token, project: project)
- end
-
subject { described_class.new(project, user, params) }
- before do
- expect(project).to receive(:error_tracking_setting).at_least(:once).and_return(error_tracking_setting)
-
- project.add_reporter(user)
- end
-
describe '#execute' do
context 'with authorized user' do
- context 'when list_sentry_issues returns issues' do
- let(:issues) { [:list, :of, :issues] }
-
- before do
- expect(error_tracking_setting)
- .to receive(:list_sentry_issues)
- .with(list_sentry_issues_args)
- .and_return(issues: issues, pagination: {})
- end
+ let(:issues) { [] }
+
+ described_class::ISSUE_STATUS_VALUES.each do |status|
+ it "returns the issues with #{status} issue_status" do
+ params[:issue_status] = status
+ list_sentry_issues_args[:issue_status] = status
+ expect_list_sentry_issues_with(list_sentry_issues_args)
- it 'returns the issues' do
expect(result).to eq(status: :success, pagination: {}, issues: issues)
end
end
+ it 'returns the issues with no issue_status' do
+ expect_list_sentry_issues_with(list_sentry_issues_args)
+
+ expect(result).to eq(status: :success, pagination: {}, issues: issues)
+ end
+
+ it 'returns bad request for an issue_status not on the whitelist' do
+ params[:issue_status] = 'assigned'
+
+ expect(error_tracking_setting).not_to receive(:list_sentry_issues)
+ expect(result).to eq(message: "Bad Request: Invalid issue_status", status: :error, http_status: :bad_request)
+ end
+
include_examples 'error tracking service data not ready', :list_sentry_issues
include_examples 'error tracking service sentry error handling', :list_sentry_issues
include_examples 'error tracking service http status handling', :list_sentry_issues
@@ -66,3 +62,10 @@ describe ErrorTracking::ListIssuesService do
end
end
end
+
+def expect_list_sentry_issues_with(list_sentry_issues_args)
+ expect(error_tracking_setting)
+ .to receive(:list_sentry_issues)
+ .with(list_sentry_issues_args)
+ .and_return(issues: [], pagination: {})
+end
diff --git a/spec/services/error_tracking/list_projects_service_spec.rb b/spec/services/error_tracking/list_projects_service_spec.rb
index ddd369d45f2..565610c64ac 100644
--- a/spec/services/error_tracking/list_projects_service_spec.rb
+++ b/spec/services/error_tracking/list_projects_service_spec.rb
@@ -63,32 +63,6 @@ describe ErrorTracking::ListProjectsService do
end
end
- context 'sentry client raises exception' do
- context 'Sentry::Client::Error' do
- before do
- expect(error_tracking_setting).to receive(:list_sentry_projects)
- .and_raise(Sentry::Client::Error, 'Sentry response status code: 500')
- end
-
- it 'returns error response' do
- expect(result[:message]).to eq('Sentry response status code: 500')
- expect(result[:http_status]).to eq(:bad_request)
- end
- end
-
- context 'Sentry::Client::MissingKeysError' do
- before do
- expect(error_tracking_setting).to receive(:list_sentry_projects)
- .and_raise(Sentry::Client::MissingKeysError, 'Sentry API response is missing keys. key not found: "id"')
- end
-
- it 'returns error response' do
- expect(result[:message]).to eq('Sentry API response is missing keys. key not found: "id"')
- expect(result[:http_status]).to eq(:internal_server_error)
- end
- end
- end
-
context 'with invalid url' do
let(:params) do
ActionController::Parameters.new(
diff --git a/spec/services/git/branch_hooks_service_spec.rb b/spec/services/git/branch_hooks_service_spec.rb
index b1c64bc3c0a..ae0506ad442 100644
--- a/spec/services/git/branch_hooks_service_spec.rb
+++ b/spec/services/git/branch_hooks_service_spec.rb
@@ -69,6 +69,7 @@ describe Git::BranchHooksService do
Gitlab.config.gitlab.url,
project.namespace.to_param,
project.to_param,
+ '-',
'commit',
commit.id
].join('/')
@@ -213,23 +214,23 @@ describe Git::BranchHooksService do
end
end
- describe 'GPG signatures' do
+ describe 'signatures' do
context 'when the commit has a signature' do
context 'when the signature is already cached' do
before do
create(:gpg_signature, commit_sha: commit.id)
end
- it 'does not queue a CreateGpgSignatureWorker' do
- expect(CreateGpgSignatureWorker).not_to receive(:perform_async)
+ it 'does not queue a CreateCommitSignatureWorker' do
+ expect(CreateCommitSignatureWorker).not_to receive(:perform_async)
service.execute
end
end
context 'when the signature is not yet cached' do
- it 'queues a CreateGpgSignatureWorker' do
- expect(CreateGpgSignatureWorker).to receive(:perform_async).with([commit.id], project.id)
+ it 'queues a CreateCommitSignatureWorker' do
+ expect(CreateCommitSignatureWorker).to receive(:perform_async).with([commit.id], project.id)
service.execute
end
@@ -239,7 +240,7 @@ describe Git::BranchHooksService do
.to receive(:shas_with_signatures)
.and_return([sample_commit.id, another_sample_commit.id])
- expect(CreateGpgSignatureWorker)
+ expect(CreateCommitSignatureWorker)
.to receive(:perform_async)
.with([sample_commit.id, another_sample_commit.id], project.id)
@@ -256,8 +257,8 @@ describe Git::BranchHooksService do
.and_return([])
end
- it 'does not queue a CreateGpgSignatureWorker' do
- expect(CreateGpgSignatureWorker)
+ it 'does not queue a CreateCommitSignatureWorker' do
+ expect(CreateCommitSignatureWorker)
.not_to receive(:perform_async)
.with(sample_commit.id, project.id)
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index 4d7ec7ac1d8..8b4f45010ed 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -12,6 +12,7 @@ describe Git::BranchPushService, services: true do
let(:newrev) { sample_commit.id }
let(:branch) { 'master' }
let(:ref) { "refs/heads/#{branch}" }
+ let(:push_options) { nil }
before do
project.add_maintainer(user)
@@ -19,7 +20,7 @@ describe Git::BranchPushService, services: true do
describe 'Push branches' do
subject do
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
+ execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref, push_options: push_options)
end
context 'new branch' do
@@ -113,6 +114,20 @@ describe Git::BranchPushService, services: true do
expect { subject }.not_to change { Ci::Pipeline.count }
end
+
+ context 'with push options' do
+ let(:push_options) { ['mr.create'] }
+
+ it 'sanitizes push options' do
+ allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
+ expect(Sidekiq.logger).to receive(:warn) do |args|
+ pipeline_params = args[:pipeline_params]
+ expect(pipeline_params.keys).to match_array(%i(before after ref variables_attributes checkout_sha))
+ end
+
+ expect { subject }.not_to change { Ci::Pipeline.count }
+ end
+ end
end
end
@@ -421,7 +436,7 @@ describe Git::BranchPushService, services: true do
let(:message) { "this is some work.\n\ncloses JIRA-1" }
let(:comment_body) do
{
- body: "Issue solved with [#{closing_commit.id}|http://#{Gitlab.config.gitlab.host}/#{project.full_path}/commit/#{closing_commit.id}]."
+ body: "Issue solved with [#{closing_commit.id}|http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/commit/#{closing_commit.id}]."
}.to_json
end
@@ -637,8 +652,8 @@ describe Git::BranchPushService, services: true do
end
end
- def execute_service(project, user, change)
- service = described_class.new(project, user, change: change)
+ def execute_service(project, user, change, push_options = {})
+ service = described_class.new(project, user, change: change, push_options: push_options)
service.execute
service
end
diff --git a/spec/services/git/tag_hooks_service_spec.rb b/spec/services/git/tag_hooks_service_spec.rb
index abb5b9b130b..094ccd8c9f0 100644
--- a/spec/services/git/tag_hooks_service_spec.rb
+++ b/spec/services/git/tag_hooks_service_spec.rb
@@ -107,6 +107,7 @@ describe Git::TagHooksService, :service do
Gitlab.config.gitlab.url,
project.namespace.to_param,
project.to_param,
+ '-',
'commit',
commit.id
].join('/')
diff --git a/spec/services/groups/import_export/export_service_spec.rb b/spec/services/groups/import_export/export_service_spec.rb
index 2024e1ed457..b1f76964722 100644
--- a/spec/services/groups/import_export/export_service_spec.rb
+++ b/spec/services/groups/import_export/export_service_spec.rb
@@ -10,6 +10,10 @@ describe Groups::ImportExport::ExportService do
let(:export_path) { shared.export_path }
let(:service) { described_class.new(group: group, user: user, params: { shared: shared }) }
+ before do
+ group.add_owner(user)
+ end
+
after do
FileUtils.rm_rf(export_path)
end
@@ -30,6 +34,18 @@ describe Groups::ImportExport::ExportService do
end
end
+ context 'when user does not have admin_group permission' do
+ let!(:another_user) { create(:user) }
+ let(:service) { described_class.new(group: group, user: another_user, params: { shared: shared }) }
+
+ it 'fails' do
+ expected_message =
+ "User with ID: %s does not have permission to Group %s with ID: %s." %
+ [another_user.id, group.name, group.id]
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error).with_message(expected_message)
+ end
+ end
+
context 'when saving services fail' do
before do
allow(service).to receive_message_chain(:tree_exporter, :save).and_return(false)
diff --git a/spec/services/groups/import_export/import_service_spec.rb b/spec/services/groups/import_export/import_service_spec.rb
new file mode 100644
index 00000000000..bac266d08da
--- /dev/null
+++ b/spec/services/groups/import_export/import_service_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Groups::ImportExport::ImportService do
+ describe '#execute' do
+ let(:user) { create(:admin) }
+ let(:group) { create(:group) }
+ let(:service) { described_class.new(group: group, user: user) }
+ let(:import_file) { fixture_file_upload('spec/fixtures/group_export.tar.gz') }
+
+ subject { service.execute }
+
+ before do
+ ImportExportUpload.create(group: group, import_file: import_file)
+ end
+
+ context 'when user has correct permissions' do
+ it 'imports group structure successfully' do
+ expect(subject).to be_truthy
+ end
+
+ it 'removes import file' do
+ subject
+
+ expect(group.import_export_upload.import_file.file).to be_nil
+ end
+ end
+
+ context 'when user does not have correct permissions' do
+ let(:user) { create(:user) }
+
+ it 'raises exception' do
+ expect { subject }.to raise_error(StandardError)
+ end
+ end
+ end
+end
diff --git a/spec/services/incident_management/create_issue_service_spec.rb b/spec/services/incident_management/create_issue_service_spec.rb
new file mode 100644
index 00000000000..e720aafb897
--- /dev/null
+++ b/spec/services/incident_management/create_issue_service_spec.rb
@@ -0,0 +1,311 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe IncidentManagement::CreateIssueService do
+ let(:project) { create(:project, :repository, :private) }
+ let(:user) { User.alert_bot }
+ let(:service) { described_class.new(project, alert_payload) }
+ let(:alert_starts_at) { Time.now }
+ let(:alert_title) { 'TITLE' }
+ let(:alert_annotations) { { title: alert_title } }
+
+ let(:alert_payload) do
+ build_alert_payload(
+ annotations: alert_annotations,
+ starts_at: alert_starts_at
+ )
+ end
+
+ let(:alert_presenter) do
+ Gitlab::Alerting::Alert.new(project: project, payload: alert_payload).present
+ end
+
+ let!(:setting) do
+ create(:project_incident_management_setting, project: project)
+ end
+
+ subject { service.execute }
+
+ context 'when create_issue enabled' do
+ let(:issue) { subject[:issue] }
+ let(:summary_separator) { "\n---\n\n" }
+
+ before do
+ setting.update!(create_issue: true)
+ end
+
+ context 'without issue_template_content' do
+ it 'creates an issue with alert summary only' do
+ expect(subject).to include(status: :success)
+
+ expect(issue.author).to eq(user)
+ expect(issue.title).to eq(alert_title)
+ expect(issue.description).to include(alert_presenter.issue_summary_markdown.strip)
+ expect(separator_count(issue.description)).to eq 0
+ end
+ end
+
+ context 'with erroneous issue service' do
+ let(:invalid_issue) do
+ build(:issue, project: project, title: nil).tap(&:valid?)
+ end
+
+ let(:issue_error) { invalid_issue.errors.full_messages.to_sentence }
+
+ it 'returns and logs the issue error' do
+ expect_next_instance_of(Issues::CreateService) do |issue_service|
+ expect(issue_service).to receive(:execute).and_return(invalid_issue)
+ end
+
+ expect(service)
+ .to receive(:log_error)
+ .with(error_message(issue_error))
+
+ expect(subject).to include(status: :error, message: issue_error)
+ end
+ end
+
+ shared_examples 'GFM template' do
+ context 'plain content' do
+ let(:template_content) { 'some content' }
+
+ it 'creates an issue appending issue template' do
+ expect(subject).to include(status: :success)
+
+ expect(issue.description).to include(alert_presenter.issue_summary_markdown)
+ expect(separator_count(issue.description)).to eq 1
+ expect(issue.description).to include(template_content)
+ end
+ end
+
+ context 'quick actions' do
+ let(:user) { create(:user) }
+ let(:plain_text) { 'some content' }
+
+ let(:template_content) do
+ <<~CONTENT
+ #{plain_text}
+ /due tomorrow
+ /assign @#{user.username}
+ CONTENT
+ end
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'creates an issue interpreting quick actions' do
+ expect(subject).to include(status: :success)
+
+ expect(issue.description).to include(plain_text)
+ expect(issue.due_date).to be_present
+ expect(issue.assignees).to eq([user])
+ end
+ end
+ end
+
+ context 'with gitlab_incident_markdown' do
+ let(:alert_annotations) do
+ { title: alert_title, gitlab_incident_markdown: template_content }
+ end
+
+ it_behaves_like 'GFM template'
+ end
+
+ context 'with issue_template_content' do
+ before do
+ create_issue_template('bug', template_content)
+ setting.update!(issue_template_key: 'bug')
+ end
+
+ it_behaves_like 'GFM template'
+
+ context 'and gitlab_incident_markdown' do
+ let(:template_content) { 'plain text'}
+ let(:alt_template) { 'alternate text' }
+ let(:alert_annotations) do
+ { title: alert_title, gitlab_incident_markdown: alt_template }
+ end
+
+ it 'includes both templates' do
+ expect(subject).to include(status: :success)
+
+ expect(issue.description).to include(alert_presenter.issue_summary_markdown)
+ expect(issue.description).to include(template_content)
+ expect(issue.description).to include(alt_template)
+ expect(separator_count(issue.description)).to eq 2
+ end
+ end
+
+ private
+
+ def create_issue_template(name, content)
+ project.repository.create_file(
+ project.creator,
+ ".gitlab/issue_templates/#{name}.md",
+ content,
+ message: 'message',
+ branch_name: 'master'
+ )
+ end
+ end
+
+ context 'with gitlab alert' do
+ let(:gitlab_alert) { create(:prometheus_alert, project: project) }
+
+ before do
+ alert_payload['labels'] = {
+ 'gitlab_alert_id' => gitlab_alert.prometheus_metric_id.to_s
+ }
+ end
+
+ it 'creates an issue' do
+ query_title = "#{gitlab_alert.title} #{gitlab_alert.computed_operator} #{gitlab_alert.threshold}"
+
+ expect(subject).to include(status: :success)
+
+ expect(issue.author).to eq(user)
+ expect(issue.title).to eq(alert_presenter.full_title)
+ expect(issue.title).to include(gitlab_alert.environment.name)
+ expect(issue.title).to include(query_title)
+ expect(issue.title).to include('for 5 minutes')
+ expect(issue.description).to include(alert_presenter.issue_summary_markdown.strip)
+ expect(separator_count(issue.description)).to eq 0
+ end
+ end
+
+ describe 'with invalid alert payload' do
+ shared_examples 'invalid alert' do
+ it 'does not create an issue' do
+ expect(service)
+ .to receive(:log_error)
+ .with(error_message('invalid alert'))
+
+ expect(subject).to eq(status: :error, message: 'invalid alert')
+ end
+ end
+
+ context 'without title' do
+ let(:alert_annotations) { {} }
+
+ it_behaves_like 'invalid alert'
+ end
+
+ context 'without startsAt' do
+ let(:alert_starts_at) { nil }
+
+ it_behaves_like 'invalid alert'
+ end
+ end
+
+ describe "label `incident`" do
+ let(:title) { 'incident' }
+ let(:color) { '#CC0033' }
+ let(:description) do
+ <<~DESCRIPTION.chomp
+ Denotes a disruption to IT services and \
+ the associated issues require immediate attention
+ DESCRIPTION
+ end
+
+ shared_examples 'existing label' do
+ it 'adds the existing label' do
+ expect { subject }.not_to change(Label, :count)
+
+ expect(issue.labels).to eq([label])
+ end
+ end
+
+ shared_examples 'new label' do
+ it 'adds newly created label' do
+ expect { subject }.to change(Label, :count).by(1)
+
+ label = project.reload.labels.last
+ expect(issue.labels).to eq([label])
+ expect(label.title).to eq(title)
+ expect(label.color).to eq(color)
+ expect(label.description).to eq(description)
+ end
+ end
+
+ context 'with predefined project label' do
+ it_behaves_like 'existing label' do
+ let!(:label) { create(:label, project: project, title: title) }
+ end
+ end
+
+ context 'with predefined group label' do
+ let(:project) { create(:project, group: group) }
+ let(:group) { create(:group) }
+
+ it_behaves_like 'existing label' do
+ let!(:label) { create(:group_label, group: group, title: title) }
+ end
+ end
+
+ context 'without label' do
+ it_behaves_like 'new label'
+ end
+
+ context 'with duplicate labels', issue: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/65042' do
+ before do
+ # Replicate race condition to create duplicates
+ build(:label, project: project, title: title).save!(validate: false)
+ build(:label, project: project, title: title).save!(validate: false)
+ end
+
+ it 'create an issue without labels' do
+ # Verify we have duplicates
+ expect(project.labels.size).to eq(2)
+ expect(project.labels.map(&:title)).to all(eq(title))
+
+ message = <<~MESSAGE.chomp
+ Cannot create incident issue with labels ["#{title}"] for \
+ "#{project.full_name}": Labels is invalid.
+ Retrying without labels.
+ MESSAGE
+
+ expect(service)
+ .to receive(:log_info)
+ .with(message)
+
+ expect(subject).to include(status: :success)
+ expect(issue.labels).to be_empty
+ end
+ end
+ end
+ end
+
+ context 'when create_issue disabled' do
+ before do
+ setting.update!(create_issue: false)
+ end
+
+ it 'returns an error' do
+ expect(service)
+ .to receive(:log_error)
+ .with(error_message('setting disabled'))
+
+ expect(subject).to eq(status: :error, message: 'setting disabled')
+ end
+ end
+
+ private
+
+ def build_alert_payload(annotations: {}, starts_at: Time.now)
+ {
+ 'annotations' => annotations.stringify_keys
+ }.tap do |payload|
+ payload['startsAt'] = starts_at.rfc3339 if starts_at
+ end
+ end
+
+ def error_message(message)
+ %{Cannot create incident issue for "#{project.full_name}": #{message}}
+ end
+
+ def separator_count(text)
+ text.scan(summary_separator).size
+ end
+end
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 5dc6b6176ee..c9701e5d194 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -171,6 +171,31 @@ describe Issues::CreateService do
described_class.new(project, user, opts).execute
end
+
+ context 'after_save callback to store_mentions' do
+ context 'when mentionable attributes change' do
+ let(:opts) { { title: 'Title', description: "Description with #{user.to_reference}" } }
+
+ it 'saves mentions' do
+ expect_next_instance_of(Issue) do |instance|
+ expect(instance).to receive(:store_mentions!).and_call_original
+ end
+ expect(issue.user_mentions.count).to eq 1
+ end
+ end
+
+ context 'when save fails' do
+ let(:opts) { { title: '', label_ids: labels.map(&:id), milestone_id: milestone.id } }
+
+ it 'does not call store_mentions' do
+ expect_next_instance_of(Issue) do |instance|
+ expect(instance).not_to receive(:store_mentions!).and_call_original
+ end
+ expect(issue.valid?).to be false
+ expect(issue.user_mentions.count).to eq 0
+ end
+ end
+ end
end
context 'issue create service' do
@@ -355,7 +380,7 @@ describe Issues::CreateService do
opts[:recaptcha_verified] = true
opts[:spam_log_id] = spam_logs.last.id
- expect(AkismetService).not_to receive(:new)
+ expect(Spam::AkismetService).not_to receive(:new)
end
it 'does no mark an issue as a spam ' do
@@ -385,14 +410,14 @@ describe Issues::CreateService do
context 'when recaptcha was not verified' do
before do
- expect_next_instance_of(SpamService) do |spam_service|
+ expect_next_instance_of(Spam::SpamCheckService) do |spam_service|
expect(spam_service).to receive_messages(check_for_spam?: true)
end
end
context 'when akismet detects spam' do
before do
- expect_next_instance_of(AkismetService) do |akismet_service|
+ expect_next_instance_of(Spam::AkismetService) do |akismet_service|
expect(akismet_service).to receive_messages(spam?: true)
end
end
@@ -408,7 +433,7 @@ describe Issues::CreateService do
it 'creates a new spam_log' do
expect { issue }
- .to log_spam(title: issue.title, description: issue.description, user_id: user.id, noteable_type: 'Issue')
+ .to have_spam_log(title: issue.title, description: issue.description, user_id: user.id, noteable_type: 'Issue')
end
it 'assigns a spam_log to an issue' do
@@ -431,7 +456,7 @@ describe Issues::CreateService do
it 'creates a new spam_log' do
expect { issue }
- .to log_spam(title: issue.title, description: issue.description, user_id: user.id, noteable_type: 'Issue')
+ .to have_spam_log(title: issue.title, description: issue.description, user_id: user.id, noteable_type: 'Issue')
end
it 'assigns a spam_log to an issue' do
@@ -442,7 +467,7 @@ describe Issues::CreateService do
context 'when akismet does not detect spam' do
before do
- expect_next_instance_of(AkismetService) do |akismet_service|
+ expect_next_instance_of(Spam::AkismetService) do |akismet_service|
expect(akismet_service).to receive_messages(spam?: false)
end
end
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index ee809aabac0..ccd4dd4231b 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -6,7 +6,7 @@ describe Issues::MoveService do
let(:user) { create(:user) }
let(:author) { create(:user) }
let(:title) { 'Some issue' }
- let(:description) { 'Some issue description' }
+ let(:description) { "Some issue description with mention to #{user.to_reference}" }
let(:group) { create(:group, :private) }
let(:sub_group_1) { create(:group, :private, parent: group) }
let(:sub_group_2) { create(:group, :private, parent: group) }
@@ -36,6 +36,9 @@ describe Issues::MoveService do
end
context 'issue movable' do
+ let!(:note_with_mention) { create(:note, noteable: old_issue, author: author, project: old_project, note: "note with mention #{user.to_reference}") }
+ let!(:note_with_no_mention) { create(:note, noteable: old_issue, author: author, project: old_project, note: "note without mention") }
+
include_context 'user can move issue'
context 'generic issue' do
@@ -94,6 +97,15 @@ describe Issues::MoveService do
it 'moves the award emoji' do
expect(old_issue.award_emoji.first.name).to eq new_issue.reload.award_emoji.first.name
end
+
+ context 'when issue has notes with mentions' do
+ it 'saves user mentions with actual mentions for new issue' do
+ expect(new_issue.user_mentions.where(note_id: nil).first.mentioned_users_ids).to match_array([user.id])
+ expect(new_issue.user_mentions.where.not(note_id: nil).first.mentioned_users_ids).to match_array([user.id])
+ expect(new_issue.user_mentions.where.not(note_id: nil).count).to eq 1
+ expect(new_issue.user_mentions.count).to eq 2
+ end
+ end
end
context 'issue with assignee' do
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 64bca770d5b..888a63980f6 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -211,6 +211,49 @@ describe Issues::UpdateService, :mailer do
expect(note.note).to eq 'locked this issue'
end
end
+
+ context 'after_save callback to store_mentions' do
+ let(:issue) { create(:issue, title: 'Old title', description: "simple description", project: project, author: create(:user)) }
+ let(:labels) { create_pair(:label, project: project) }
+ let(:milestone) { create(:milestone, project: project) }
+
+ context 'when mentionable attributes change' do
+ let(:opts) { { description: "Description with #{user.to_reference}" } }
+
+ it 'saves mentions' do
+ expect(issue).to receive(:store_mentions!).and_call_original
+
+ expect { update_issue(opts) }.to change { IssueUserMention.count }.by(1)
+
+ expect(issue.referenced_users).to match_array([user])
+ end
+ end
+
+ context 'when mentionable attributes do not change' do
+ let(:opts) { { label_ids: labels.map(&:id), milestone_id: milestone.id } }
+
+ it 'does not call store_mentions' do
+ expect(issue).not_to receive(:store_mentions!).and_call_original
+
+ expect { update_issue(opts) }.not_to change { IssueUserMention.count }
+
+ expect(issue.referenced_users).to be_empty
+ end
+ end
+
+ context 'when save fails' do
+ let(:opts) { { title: '', label_ids: labels.map(&:id), milestone_id: milestone.id } }
+
+ it 'does not call store_mentions' do
+ expect(issue).not_to receive(:store_mentions!).and_call_original
+
+ expect { update_issue(opts) }.not_to change { IssueUserMention.count }
+
+ expect(issue.referenced_users).to be_empty
+ expect(issue.valid?).to be false
+ end
+ end
+ end
end
context 'when description changed' do
diff --git a/spec/services/labels/available_labels_service_spec.rb b/spec/services/labels/available_labels_service_spec.rb
index 4d5c87ecc53..ce120344f16 100644
--- a/spec/services/labels/available_labels_service_spec.rb
+++ b/spec/services/labels/available_labels_service_spec.rb
@@ -12,7 +12,7 @@ describe Labels::AvailableLabelsService do
let(:other_group_label) { create(:group_label) }
let(:labels) { [project_label, other_project_label, group_label, other_group_label] }
- context '#find_or_create_by_titles' do
+ describe '#find_or_create_by_titles' do
let(:label_titles) { labels.map(&:title).push('non existing title') }
context 'when parent is a project' do
@@ -64,7 +64,7 @@ describe Labels::AvailableLabelsService do
end
end
- context '#filter_labels_ids_in_param' do
+ describe '#filter_labels_ids_in_param' do
let(:label_ids) { labels.map(&:id).push(99999) }
context 'when parent is a project' do
diff --git a/spec/services/merge_requests/add_context_service_spec.rb b/spec/services/merge_requests/add_context_service_spec.rb
new file mode 100644
index 00000000000..d4e95c2f1ea
--- /dev/null
+++ b/spec/services/merge_requests/add_context_service_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MergeRequests::AddContextService do
+ let(:project) { create(:project, :repository) }
+ let(:admin) { create(:admin) }
+ let(:merge_request) { create(:merge_request, source_project: project, target_project: project, author: admin) }
+ let(:commits) { ["874797c3a73b60d2187ed6e2fcabd289ff75171e"] }
+ let(:raw_repository) { project.repository.raw }
+
+ subject(:service) { described_class.new(project, admin, merge_request: merge_request, commits: commits) }
+
+ describe "#execute" do
+ it "adds context commit" do
+ service.execute
+
+ expect(merge_request.merge_request_context_commit_diff_files.length).to eq(2)
+ end
+
+ context "when user doesn't have permission to update merge request" do
+ let(:user) { create(:user) }
+ let(:merge_request1) { create(:merge_request, source_project: project, author: user) }
+
+ subject(:service) { described_class.new(project, user, merge_request: merge_request, commits: commits) }
+
+ it "doesn't add context commit" do
+ subject.execute
+
+ expect(merge_request.merge_request_context_commit_diff_files.length).to eq(0)
+ end
+ end
+
+ context "when the commits array is empty" do
+ subject(:service) { described_class.new(project, admin, merge_request: merge_request, commits: []) }
+
+ it "doesn't add context commit" do
+ subject.execute
+
+ expect(merge_request.merge_request_context_commit_diff_files.length).to eq(0)
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb b/spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb
index 203048984a1..4d87fa3e832 100644
--- a/spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb
+++ b/spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
describe MergeRequests::AddTodoWhenBuildFailsService do
let(:user) { create(:user) }
- let(:merge_request) { create(:merge_request) }
let(:project) { create(:project, :repository) }
let(:sha) { '1234567890abcdef1234567890abcdef12345678' }
let(:ref) { merge_request.source_branch }
diff --git a/spec/services/merge_requests/create_from_issue_service_spec.rb b/spec/services/merge_requests/create_from_issue_service_spec.rb
index 3d58ecdd8cd..fb1bb308170 100644
--- a/spec/services/merge_requests/create_from_issue_service_spec.rb
+++ b/spec/services/merge_requests/create_from_issue_service_spec.rb
@@ -11,10 +11,8 @@ describe MergeRequests::CreateFromIssueService do
let(:milestone_id) { create(:milestone, project: project).id }
let(:issue) { create(:issue, project: project, milestone_id: milestone_id) }
let(:custom_source_branch) { 'custom-source-branch' }
-
- subject(:service) { described_class.new(project, user, service_params) }
-
- subject(:service_with_custom_source_branch) { described_class.new(project, user, branch_name: custom_source_branch, **service_params) }
+ let(:service) { described_class.new(project, user, service_params) }
+ let(:service_with_custom_source_branch) { described_class.new(project, user, branch_name: custom_source_branch, **service_params) }
before do
project.add_developer(user)
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index 3db1471bf3c..aebead481ce 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -291,6 +291,46 @@ describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
expect { service.execute }.to change { counter.read(:create) }.by(1)
end
+
+ context 'after_save callback to store_mentions' do
+ let(:labels) { create_pair(:label, project: project) }
+ let(:milestone) { create(:milestone, project: project) }
+ let(:req_opts) { { source_branch: 'feature', target_branch: 'master' } }
+
+ context 'when mentionable attributes change' do
+ let(:opts) { { title: 'Title', description: "Description with #{user.to_reference}" }.merge(req_opts) }
+
+ it 'saves mentions' do
+ expect_next_instance_of(MergeRequest) do |instance|
+ expect(instance).to receive(:store_mentions!).and_call_original
+ end
+ expect(merge_request.user_mentions.count).to eq 1
+ end
+ end
+
+ context 'when mentionable attributes do not change' do
+ let(:opts) { { label_ids: labels.map(&:id), milestone_id: milestone.id }.merge(req_opts) }
+
+ it 'does not call store_mentions' do
+ expect_next_instance_of(MergeRequest) do |instance|
+ expect(instance).not_to receive(:store_mentions!).and_call_original
+ end
+ expect(merge_request.valid?).to be false
+ expect(merge_request.user_mentions.count).to eq 0
+ end
+ end
+
+ context 'when save fails' do
+ let(:opts) { { label_ids: labels.map(&:id), milestone_id: milestone.id } }
+
+ it 'does not call store_mentions' do
+ expect_next_instance_of(MergeRequest) do |instance|
+ expect(instance).not_to receive(:store_mentions!).and_call_original
+ end
+ expect(merge_request.valid?).to be false
+ end
+ end
+ end
end
it_behaves_like 'new issuable record that supports quick actions' do
@@ -483,6 +523,14 @@ describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
expect(merge_request).to be_persisted
end
+ it 'calls MergeRequests::LinkLfsObjectsService#execute', :sidekiq_might_not_need_inline do
+ expect_next_instance_of(MergeRequests::LinkLfsObjectsService) do |service|
+ expect(service).to receive(:execute).with(instance_of(MergeRequest))
+ end
+
+ described_class.new(project, user, opts).execute
+ end
+
it 'does not create the merge request when the target project is archived' do
target_project.update!(archived: true)
diff --git a/spec/services/merge_requests/get_urls_service_spec.rb b/spec/services/merge_requests/get_urls_service_spec.rb
index bb8a1873dac..8cc627b64d9 100644
--- a/spec/services/merge_requests/get_urls_service_spec.rb
+++ b/spec/services/merge_requests/get_urls_service_spec.rb
@@ -8,8 +8,8 @@ describe MergeRequests::GetUrlsService do
let(:project) { create(:project, :public, :repository) }
let(:service) { described_class.new(project) }
let(:source_branch) { "merge-test" }
- let(:new_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/merge_requests/new?merge_request%5Bsource_branch%5D=#{source_branch}" }
- let(:show_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/merge_requests/#{merge_request.iid}" }
+ let(:new_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/new?merge_request%5Bsource_branch%5D=#{source_branch}" }
+ let(:show_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/#{merge_request.iid}" }
let(:new_branch_changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
let(:deleted_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 #{Gitlab::Git::BLANK_SHA} refs/heads/#{source_branch}" }
let(:existing_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
@@ -134,7 +134,7 @@ describe MergeRequests::GetUrlsService do
let(:new_branch_changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/new_branch" }
let(:existing_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/markdown" }
let(:changes) { "#{new_branch_changes}\n#{existing_branch_changes}" }
- let(:new_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/merge_requests/new?merge_request%5Bsource_branch%5D=new_branch" }
+ let(:new_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/new?merge_request%5Bsource_branch%5D=new_branch" }
it 'returns 2 urls for both creating new and showing merge request' do
result = service.execute(changes)
diff --git a/spec/services/merge_requests/link_lfs_objects_service_spec.rb b/spec/services/merge_requests/link_lfs_objects_service_spec.rb
new file mode 100644
index 00000000000..f07cf13e4f2
--- /dev/null
+++ b/spec/services/merge_requests/link_lfs_objects_service_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MergeRequests::LinkLfsObjectsService, :sidekiq_inline do
+ include ProjectForksHelper
+ include RepoHelpers
+
+ let(:target_project) { create(:project, :public, :repository) }
+
+ let(:merge_request) do
+ create(
+ :merge_request,
+ target_project: target_project,
+ target_branch: 'lfs',
+ source_project: source_project,
+ source_branch: 'link-lfs-objects'
+ )
+ end
+
+ subject { described_class.new(target_project) }
+
+ shared_examples_for 'linking LFS objects' do
+ context 'when source project is the same as target project' do
+ let(:source_project) { target_project }
+
+ it 'does not call Projects::LfsPointers::LfsLinkService#execute' do
+ expect(Projects::LfsPointers::LfsLinkService).not_to receive(:new)
+
+ execute
+ end
+ end
+
+ context 'when source project is different from target project' do
+ let(:user) { create(:user) }
+ let(:source_project) { fork_project(target_project, user, namespace: user.namespace, repository: true) }
+
+ before do
+ create_branch(source_project, 'link-lfs-objects', 'lfs')
+ end
+
+ context 'and there are changes' do
+ before do
+ allow(source_project).to receive(:lfs_enabled?).and_return(true)
+ end
+
+ context 'and there are LFS objects added' do
+ before do
+ create_file_in_repo(source_project, 'link-lfs-objects', 'link-lfs-objects', 'one.lfs', 'One')
+ create_file_in_repo(source_project, 'link-lfs-objects', 'link-lfs-objects', 'two.lfs', 'Two')
+ end
+
+ it 'calls Projects::LfsPointers::LfsLinkService#execute with OIDs of LFS objects in merge request' do
+ expect_next_instance_of(Projects::LfsPointers::LfsLinkService) do |service|
+ expect(service).to receive(:execute).with(%w[
+ 8b12507783d5becacbf2ebe5b01a60024d8728a8f86dcc818bce699e8b3320bc
+ 94a72c074cfe574742c9e99e863322f73feff82981d065ff65a0308f44f19f62
+ ])
+ end
+
+ execute
+ end
+ end
+
+ context 'but there are no LFS objects added' do
+ before do
+ create_file_in_repo(source_project, 'link-lfs-objects', 'link-lfs-objects', 'one.txt', 'One')
+ end
+
+ it 'does not call Projects::LfsPointers::LfsLinkService#execute' do
+ expect(Projects::LfsPointers::LfsLinkService).not_to receive(:new)
+
+ execute
+ end
+ end
+ end
+
+ context 'and there are no changes' do
+ it 'does not call Projects::LfsPointers::LfsLinkService#execute' do
+ expect(Projects::LfsPointers::LfsLinkService).not_to receive(:new)
+
+ execute
+ end
+ end
+ end
+ end
+
+ context 'when no oldrev and newrev passed' do
+ let(:execute) { subject.execute(merge_request) }
+
+ it_behaves_like 'linking LFS objects'
+ end
+
+ context 'when oldrev and newrev are passed' do
+ let(:execute) { subject.execute(merge_request, oldrev: merge_request.diff_base_sha, newrev: merge_request.diff_head_sha) }
+
+ it_behaves_like 'linking LFS objects'
+ end
+
+ def create_branch(project, new_name, branch_name)
+ ::Branches::CreateService.new(project, user).execute(new_name, branch_name)
+ end
+end
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index fa1a8f60256..496b08799f2 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -31,6 +31,11 @@ describe MergeRequests::MergeService do
it { expect(merge_request).to be_valid }
it { expect(merge_request).to be_merged }
+ it 'persists merge_commit_sha and nullifies in_progress_merge_commit_sha' do
+ expect(merge_request.merge_commit_sha).not_to be_nil
+ expect(merge_request.in_progress_merge_commit_sha).to be_nil
+ end
+
it 'sends email to user2 about merge of new merge_request' do
email = ActionMailer::Base.deliveries.last
expect(email.to.first).to eq(user2.email)
diff --git a/spec/services/merge_requests/mergeability_check_service_spec.rb b/spec/services/merge_requests/mergeability_check_service_spec.rb
index a864da0a6fb..8f17e8083e3 100644
--- a/spec/services/merge_requests/mergeability_check_service_spec.rb
+++ b/spec/services/merge_requests/mergeability_check_service_spec.rb
@@ -53,9 +53,42 @@ describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shared_sta
end
end
+ let(:project) { create(:project, :repository) }
+ let(:merge_request) { create(:merge_request, merge_status: :unchecked, source_project: project, target_project: project) }
+
+ describe '#async_execute' do
+ shared_examples_for 'no job is enqueued' do
+ it 'does not enqueue MergeRequestMergeabilityCheckWorker' do
+ expect(MergeRequestMergeabilityCheckWorker).not_to receive(:perform_async)
+
+ described_class.new(merge_request).async_execute
+ end
+ end
+
+ it 'enqueues MergeRequestMergeabilityCheckWorker' do
+ expect(MergeRequestMergeabilityCheckWorker).to receive(:perform_async)
+
+ described_class.new(merge_request).async_execute
+ end
+
+ context 'when read only DB' do
+ before do
+ allow(Gitlab::Database).to receive(:read_only?) { true }
+ end
+
+ it_behaves_like 'no job is enqueued'
+ end
+
+ context 'when merge_status is already checking' do
+ before do
+ merge_request.mark_as_checking
+ end
+
+ it_behaves_like 'no job is enqueued'
+ end
+ end
+
describe '#execute' do
- let(:project) { create(:project, :repository) }
- let(:merge_request) { create(:merge_request, merge_status: :unchecked, source_project: project, target_project: project) }
let(:repo) { project.repository }
subject { described_class.new(merge_request).execute }
diff --git a/spec/services/merge_requests/migrate_external_diffs_service_spec.rb b/spec/services/merge_requests/migrate_external_diffs_service_spec.rb
index 40ac747e66f..233b944624f 100644
--- a/spec/services/merge_requests/migrate_external_diffs_service_spec.rb
+++ b/spec/services/merge_requests/migrate_external_diffs_service_spec.rb
@@ -6,7 +6,7 @@ describe MergeRequests::MigrateExternalDiffsService do
let(:merge_request) { create(:merge_request) }
let(:diff) { merge_request.merge_request_diff }
- describe '.enqueue!', :sidekiq do
+ describe '.enqueue!' do
around do |example|
Sidekiq::Testing.fake! { example.run }
end
diff --git a/spec/services/merge_requests/rebase_service_spec.rb b/spec/services/merge_requests/rebase_service_spec.rb
index 184f3f37339..22df3b84243 100644
--- a/spec/services/merge_requests/rebase_service_spec.rb
+++ b/spec/services/merge_requests/rebase_service_spec.rb
@@ -71,14 +71,6 @@ describe MergeRequests::RebaseService do
it_behaves_like 'sequence of failure and success'
- context 'with deprecated step rebase feature' do
- before do
- stub_feature_flags(two_step_rebase: false)
- end
-
- it_behaves_like 'sequence of failure and success'
- end
-
context 'when unexpected error occurs' do
before do
allow(repository).to receive(:gitaly_operation_client).and_raise('Something went wrong')
@@ -140,21 +132,7 @@ describe MergeRequests::RebaseService do
end
end
- context 'when the two_step_rebase feature is enabled' do
- before do
- stub_feature_flags(two_step_rebase: true)
- end
-
- it_behaves_like 'a service that can execute a successful rebase'
- end
-
- context 'when the two_step_rebase feature is disabled' do
- before do
- stub_feature_flags(two_step_rebase: false)
- end
-
- it_behaves_like 'a service that can execute a successful rebase'
- end
+ it_behaves_like 'a service that can execute a successful rebase'
context 'when skip_ci flag is set' do
let(:skip_ci) { true }
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 1ba216e8ff1..b67779a912d 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -384,6 +384,14 @@ describe MergeRequests::RefreshService do
end
context 'open fork merge request' do
+ it 'calls MergeRequests::LinkLfsObjectsService#execute' do
+ expect_next_instance_of(MergeRequests::LinkLfsObjectsService) do |svc|
+ expect(svc).to receive(:execute).with(@fork_merge_request, oldrev: @oldrev, newrev: @newrev)
+ end
+
+ refresh
+ end
+
it 'executes hooks with update action' do
refresh
diff --git a/spec/services/merge_requests/reload_diffs_service_spec.rb b/spec/services/merge_requests/reload_diffs_service_spec.rb
index c450fc0a7dc..d2444af1b0f 100644
--- a/spec/services/merge_requests/reload_diffs_service_spec.rb
+++ b/spec/services/merge_requests/reload_diffs_service_spec.rb
@@ -33,34 +33,13 @@ describe MergeRequests::ReloadDiffsService, :use_clean_rails_memory_store_cachin
end
context 'cache clearing' do
- context 'using Gitlab::Diff::DeprecatedHighlightCache' do
- before do
- stub_feature_flags(hset_redis_diff_caching: false)
- end
+ it 'clears the cache for older diffs on the merge request' do
+ old_diff = merge_request.merge_request_diff
+ old_cache_key = old_diff.diffs_collection.cache_key
- it 'clears the cache for older diffs on the merge request' do
- old_diff = merge_request.merge_request_diff
- old_cache_key = old_diff.diffs_collection.cache_key
+ expect_any_instance_of(Redis).to receive(:del).with(old_cache_key).and_call_original
- expect(Rails.cache).to receive(:delete).with(old_cache_key).and_call_original
-
- subject.execute
- end
- end
-
- context 'using Gitlab::Diff::HighlightCache' do
- before do
- stub_feature_flags(hset_redis_diff_caching: true)
- end
-
- it 'clears the cache for older diffs on the merge request' do
- old_diff = merge_request.merge_request_diff
- old_cache_key = old_diff.diffs_collection.cache_key
-
- expect_any_instance_of(Redis).to receive(:del).with(old_cache_key).and_call_original
-
- subject.execute
- end
+ subject.execute
end
it 'avoids N+1 queries', :request_store do
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index baa0ecf27e3..f295f3c4a81 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -162,6 +162,52 @@ describe MergeRequests::UpdateService, :mailer do
end
end
+ context 'after_save callback to store_mentions' do
+ let(:merge_request) { create(:merge_request, title: 'Old title', description: "simple description", source_branch: 'test', source_project: project, author: user) }
+ let(:labels) { create_pair(:label, project: project) }
+ let(:milestone) { create(:milestone, project: project) }
+ let(:req_opts) { { source_branch: 'feature', target_branch: 'master' } }
+
+ subject { MergeRequests::UpdateService.new(project, user, opts).execute(merge_request) }
+
+ context 'when mentionable attributes change' do
+ let(:opts) { { description: "Description with #{user.to_reference}" }.merge(req_opts) }
+
+ it 'saves mentions' do
+ expect(merge_request).to receive(:store_mentions!).and_call_original
+
+ expect { subject }.to change { MergeRequestUserMention.count }.by(1)
+
+ expect(merge_request.referenced_users).to match_array([user])
+ end
+ end
+
+ context 'when mentionable attributes do not change' do
+ let(:opts) { { label_ids: [label.id, label2.id], milestone_id: milestone.id }.merge(req_opts) }
+
+ it 'does not call store_mentions' do
+ expect(merge_request).not_to receive(:store_mentions!).and_call_original
+
+ expect { subject }.not_to change { MergeRequestUserMention.count }
+
+ expect(merge_request.referenced_users).to be_empty
+ end
+ end
+
+ context 'when save fails' do
+ let(:opts) { { title: '', label_ids: labels.map(&:id), milestone_id: milestone.id } }
+
+ it 'does not call store_mentions' do
+ expect(merge_request).not_to receive(:store_mentions!).and_call_original
+
+ expect { subject }.not_to change { MergeRequestUserMention.count }
+
+ expect(merge_request.referenced_users).to be_empty
+ expect(merge_request.valid?).to be false
+ end
+ end
+ end
+
context 'merge' do
let(:opts) do
{
diff --git a/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb b/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
index 274d594fd68..5f7279ee550 100644
--- a/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
@@ -5,9 +5,11 @@ require 'spec_helper'
describe Metrics::Dashboard::CloneDashboardService, :use_clean_rails_memory_store_caching do
include MetricsDashboardHelpers
- set(:user) { create(:user) }
- set(:project) { create(:project, :repository) }
- set(:environment) { create(:environment, project: project) }
+ STAGES = ::Gitlab::Metrics::Dashboard::Stages
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:environment) { create(:environment, project: project) }
describe '#execute' do
subject(:service_call) { described_class.new(project, user, params).execute }
@@ -16,6 +18,7 @@ describe Metrics::Dashboard::CloneDashboardService, :use_clean_rails_memory_stor
let(:branch) { "dashboard_new_branch" }
let(:dashboard) { 'config/prometheus/common_metrics.yml' }
let(:file_name) { 'custom_dashboard.yml' }
+ let(:file_content_hash) { YAML.safe_load(File.read(dashboard)) }
let(:params) do
{
dashboard: dashboard,
@@ -25,17 +28,6 @@ describe Metrics::Dashboard::CloneDashboardService, :use_clean_rails_memory_stor
}
end
- let(:dashboard_attrs) do
- {
- commit_message: commit_message,
- branch_name: branch,
- start_branch: project.default_branch,
- encoding: 'text',
- file_path: ".gitlab/dashboards/#{file_name}",
- file_content: File.read(dashboard)
- }
- end
-
context 'user does not have push right to repository' do
it_behaves_like 'misconfigured dashboard service response', :forbidden, %q(You can't commit to this project)
end
@@ -72,11 +64,12 @@ describe Metrics::Dashboard::CloneDashboardService, :use_clean_rails_memory_stor
start_branch: project.default_branch,
encoding: 'text',
file_path: ".gitlab/dashboards/custom_dashboard.yml",
- file_content: File.read(dashboard)
+ file_content: file_content_hash.to_yaml
}
end
it 'strips target file name to safe value', :aggregate_failures do
+ allow(::Gitlab::Metrics::Dashboard::Processor).to receive(:new).and_return(double(process: file_content_hash))
service_instance = instance_double(::Files::CreateService)
expect(::Files::CreateService).to receive(:new).with(project, user, dashboard_attrs).and_return(service_instance)
expect(service_instance).to receive(:execute).and_return(status: :success)
@@ -86,14 +79,12 @@ describe Metrics::Dashboard::CloneDashboardService, :use_clean_rails_memory_stor
end
context 'valid parameters' do
- it 'delegates commit creation to Files::CreateService', :aggregate_failures do
- service_instance = instance_double(::Files::CreateService)
- expect(::Files::CreateService).to receive(:new).with(project, user, dashboard_attrs).and_return(service_instance)
- expect(service_instance).to receive(:execute).and_return(status: :success)
-
- service_call
+ before do
+ allow(::Gitlab::Metrics::Dashboard::Processor).to receive(:new).and_return(double(process: file_content_hash))
end
+ it_behaves_like 'valid dashboard cloning process', ::Metrics::Dashboard::SystemDashboardService::DASHBOARD_PATH, [STAGES::CommonMetricsInserter, STAGES::ProjectMetricsInserter, STAGES::Sorter]
+
context 'selected branch already exists' do
let(:branch) { 'existing_branch' }
diff --git a/spec/services/metrics/dashboard/default_embed_service_spec.rb b/spec/services/metrics/dashboard/default_embed_service_spec.rb
index 741a9644905..1b88276368c 100644
--- a/spec/services/metrics/dashboard/default_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/default_embed_service_spec.rb
@@ -13,6 +13,26 @@ describe Metrics::Dashboard::DefaultEmbedService, :use_clean_rails_memory_store_
project.add_maintainer(user)
end
+ describe '.valid_params?' do
+ let(:params) { { embedded: true } }
+
+ subject { described_class.valid_params?(params) }
+
+ it { is_expected.to be_truthy }
+
+ context 'missing embedded' do
+ let(:params) { {} }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'not embedded' do
+ let(:params) { { embedded: false } }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
describe '#get_dashboard' do
let(:service_params) { [project, user, { environment: environment }] }
let(:service_call) { described_class.new(*service_params).get_dashboard }
diff --git a/spec/services/metrics/dashboard/pod_dashboard_service_spec.rb b/spec/services/metrics/dashboard/pod_dashboard_service_spec.rb
index c3993bf71ea..36ca6f882fa 100644
--- a/spec/services/metrics/dashboard/pod_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/pod_dashboard_service_spec.rb
@@ -13,7 +13,27 @@ describe Metrics::Dashboard::PodDashboardService, :use_clean_rails_memory_store_
project.add_maintainer(user)
end
- describe 'get_dashboard' do
+ describe '.valid_params?' do
+ let(:params) { { dashboard_path: described_class::DASHBOARD_PATH } }
+
+ subject { described_class.valid_params?(params) }
+
+ it { is_expected.to be_truthy }
+
+ context 'missing dashboard_path' do
+ let(:params) { {} }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'non-matching dashboard_path' do
+ let(:params) { { dashboard_path: 'path/to/bunk.yml' } }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#get_dashboard' do
let(:dashboard_path) { described_class::DASHBOARD_PATH }
let(:service_params) { [project, user, { environment: environment, dashboard_path: dashboard_path }] }
let(:service_call) { described_class.new(*service_params).get_dashboard }
diff --git a/spec/services/metrics/dashboard/project_dashboard_service_spec.rb b/spec/services/metrics/dashboard/project_dashboard_service_spec.rb
index cba8ef2ec98..829e750d438 100644
--- a/spec/services/metrics/dashboard/project_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/project_dashboard_service_spec.rb
@@ -13,7 +13,7 @@ describe Metrics::Dashboard::ProjectDashboardService, :use_clean_rails_memory_st
project.add_maintainer(user)
end
- describe 'get_dashboard' do
+ describe '#get_dashboard' do
let(:dashboard_path) { '.gitlab/dashboards/test.yml' }
let(:service_params) { [project, user, { environment: environment, dashboard_path: dashboard_path }] }
let(:service_call) { described_class.new(*service_params).get_dashboard }
@@ -62,7 +62,7 @@ describe Metrics::Dashboard::ProjectDashboardService, :use_clean_rails_memory_st
end
end
- describe '::all_dashboard_paths' do
+ describe '.all_dashboard_paths' do
let(:all_dashboards) { described_class.all_dashboard_paths(project) }
context 'when there are no project dashboards' do
@@ -87,4 +87,24 @@ describe Metrics::Dashboard::ProjectDashboardService, :use_clean_rails_memory_st
end
end
end
+
+ describe '.valid_params?' do
+ let(:params) { { dashboard_path: '.gitlab/dashboard/test.yml' } }
+
+ subject { described_class.valid_params?(params) }
+
+ it { is_expected.to be_truthy }
+
+ context 'missing dashboard_path' do
+ let(:params) { {} }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'empty dashboard_path' do
+ let(:params) { { dashboard_path: '' } }
+
+ it { is_expected.to be_falsey }
+ end
+ end
end
diff --git a/spec/services/metrics/dashboard/self_monitoring_dashboard_service_spec.rb b/spec/services/metrics/dashboard/self_monitoring_dashboard_service_spec.rb
new file mode 100644
index 00000000000..9ee5b06b410
--- /dev/null
+++ b/spec/services/metrics/dashboard/self_monitoring_dashboard_service_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Metrics::Dashboard::SelfMonitoringDashboardService, :use_clean_rails_memory_store_caching do
+ include MetricsDashboardHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:environment) { create(:environment, project: project) }
+
+ before do
+ project.add_maintainer(user)
+ stub_application_setting(self_monitoring_project_id: project.id)
+ end
+
+ describe '#get_dashboard' do
+ let(:service_params) { [project, user, { environment: environment }] }
+ let(:service_call) { described_class.new(*service_params).get_dashboard }
+
+ it_behaves_like 'valid dashboard service response'
+ it_behaves_like 'raises error for users with insufficient permissions'
+ it_behaves_like 'caches the unprocessed dashboard for subsequent calls'
+ end
+
+ describe '.all_dashboard_paths' do
+ it 'returns the dashboard attributes' do
+ all_dashboards = described_class.all_dashboard_paths(project)
+
+ expect(all_dashboards).to eq(
+ [{
+ path: described_class::DASHBOARD_PATH,
+ display_name: described_class::DASHBOARD_NAME,
+ default: true,
+ system_dashboard: false
+ }]
+ )
+ end
+ end
+
+ describe '.valid_params?' do
+ subject { described_class.valid_params?(params) }
+
+ context 'with environment' do
+ let(:params) { { environment: environment } }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'with dashboard_path' do
+ let(:params) { { dashboard_path: self_monitoring_dashboard_path } }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'with a different dashboard selected' do
+ let(:dashboard_path) { '.gitlab/dashboards/test.yml' }
+ let(:params) { { dashboard_path: dashboard_path, environment: environment } }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'missing environment and dashboard_path' do
+ let(:params) { {} }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+end
diff --git a/spec/services/metrics/dashboard/system_dashboard_service_spec.rb b/spec/services/metrics/dashboard/system_dashboard_service_spec.rb
index cc9f711c611..1956f9b563b 100644
--- a/spec/services/metrics/dashboard/system_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/system_dashboard_service_spec.rb
@@ -13,7 +13,7 @@ describe Metrics::Dashboard::SystemDashboardService, :use_clean_rails_memory_sto
project.add_maintainer(user)
end
- describe 'get_dashboard' do
+ describe '#get_dashboard' do
let(:dashboard_path) { described_class::DASHBOARD_PATH }
let(:service_params) { [project, user, { environment: environment, dashboard_path: dashboard_path }] }
let(:service_call) { described_class.new(*service_params).get_dashboard }
@@ -30,7 +30,7 @@ describe Metrics::Dashboard::SystemDashboardService, :use_clean_rails_memory_sto
end
end
- describe '::all_dashboard_paths' do
+ describe '.all_dashboard_paths' do
it 'returns the dashboard attributes' do
all_dashboards = described_class.all_dashboard_paths(project)
@@ -44,4 +44,24 @@ describe Metrics::Dashboard::SystemDashboardService, :use_clean_rails_memory_sto
)
end
end
+
+ describe '.valid_params?' do
+ let(:params) { { dashboard_path: described_class::DASHBOARD_PATH } }
+
+ subject { described_class.valid_params?(params) }
+
+ it { is_expected.to be_truthy }
+
+ context 'missing dashboard_path' do
+ let(:params) { {} }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'non-matching dashboard_path' do
+ let(:params) { { dashboard_path: 'path/to/bunk.yml' } }
+
+ it { is_expected.to be_falsey }
+ end
+ end
end
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index c5e2fe8de12..7ba069d1e39 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -87,28 +87,10 @@ describe Notes::CreateService do
.to receive(:unfolded_diff?) { true }
end
- context 'using Gitlab::Diff::DeprecatedHighlightCache' do
- before do
- stub_feature_flags(hset_redis_diff_caching: false)
- end
-
- it 'clears noteable diff cache when it was unfolded for the note position' do
- expect_any_instance_of(Gitlab::Diff::DeprecatedHighlightCache).to receive(:clear)
-
- described_class.new(project_with_repo, user, new_opts).execute
- end
- end
+ it 'clears noteable diff cache when it was unfolded for the note position' do
+ expect_any_instance_of(Gitlab::Diff::HighlightCache).to receive(:clear)
- context 'using Gitlab::Diff::HighlightCache' do
- before do
- stub_feature_flags(hset_redis_diff_caching: true)
- end
-
- it 'clears noteable diff cache when it was unfolded for the note position' do
- expect_any_instance_of(Gitlab::Diff::HighlightCache).to receive(:clear)
-
- described_class.new(project_with_repo, user, new_opts).execute
- end
+ described_class.new(project_with_repo, user, new_opts).execute
end
it 'does not clear cache when note is not the first of the discussion' do
diff --git a/spec/services/notes/quick_actions_service_spec.rb b/spec/services/notes/quick_actions_service_spec.rb
index 7cb0bd41f13..7eea2a7afc6 100644
--- a/spec/services/notes/quick_actions_service_spec.rb
+++ b/spec/services/notes/quick_actions_service_spec.rb
@@ -176,7 +176,6 @@ describe Notes::QuickActionsService do
context 'CE restriction for issue assignees' do
describe '/assign' do
let(:project) { create(:project) }
- let(:maintainer) { create(:user).tap { |u| project.add_maintainer(u) } }
let(:assignee) { create(:user) }
let(:maintainer) { create(:user) }
let(:service) { described_class.new(project, maintainer) }
diff --git a/spec/services/notes/update_service_spec.rb b/spec/services/notes/update_service_spec.rb
index 73fcdd787aa..ab28e08ec83 100644
--- a/spec/services/notes/update_service_spec.rb
+++ b/spec/services/notes/update_service_spec.rb
@@ -30,6 +30,12 @@ describe Notes::UpdateService do
@note.reload
end
+ it 'does not update the note when params is blank' do
+ Timecop.freeze(1.day.from_now) do
+ expect { update_note({}) }.not_to change { note.reload.updated_at }
+ end
+ end
+
context 'suggestions' do
it 'refreshes note suggestions' do
markdown = <<-MARKDOWN.strip_heredoc
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 80b8d36aa07..07a1be6c12b 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -10,10 +10,12 @@ describe NotificationService, :mailer do
let(:notification) { described_class.new }
let(:assignee) { create(:user) }
- around do |example|
- perform_enqueued_jobs do
- example.run
- end
+ around(:example, :deliver_mails_inline) do |example|
+ # This is a temporary `around` hook until all the examples check the
+ # background jobs queue instead of the delivered emails array.
+ # `perform_enqueued_jobs` makes the ActiveJob jobs (e.g. mailer jobs) run inline
+ # compared to `Sidekiq::Testing.inline!` which makes the Sidekiq jobs run inline.
+ perform_enqueued_jobs { example.run }
end
shared_examples 'altered milestone notification on issue' do
@@ -187,26 +189,41 @@ describe NotificationService, :mailer do
describe 'Keys' do
describe '#new_key' do
let(:key_options) { {} }
- let!(:key) { create(:personal_key, key_options) }
+ let!(:key) { build_stubbed(:personal_key, key_options) }
+
+ subject { notification.new_key(key) }
- it { expect(notification.new_key(key)).to be_truthy }
+ it "sends email to key owner" do
+ expect { subject }.to have_enqueued_email(key.id, mail: "new_ssh_key_email")
+ end
- describe 'never emails the ghost user' do
+ describe "never emails the ghost user" do
let(:key_options) { { user: User.ghost } }
- it { should_not_email_anyone }
+ it "does not send email to key owner" do
+ expect { subject }.not_to have_enqueued_email(key.id, mail: "new_ssh_key_email")
+ end
end
end
end
describe 'GpgKeys' do
describe '#new_gpg_key' do
- let!(:key) { create(:gpg_key) }
+ let(:key_options) { {} }
+ let(:key) { create(:gpg_key, key_options) }
+
+ subject { notification.new_gpg_key(key) }
+
+ it "sends email to key owner" do
+ expect { subject }.to have_enqueued_email(key.id, mail: "new_gpg_key_email")
+ end
- it { expect(notification.new_gpg_key(key)).to be_truthy }
+ describe "never emails the ghost user" do
+ let(:key_options) { { user: User.ghost } }
- it 'sends email to key owner' do
- expect { notification.new_gpg_key(key) }.to change { ActionMailer::Base.deliveries.size }.by(1)
+ it "does not send email to key owner" do
+ expect { subject }.not_to have_enqueued_email(key.id, mail: "new_gpg_key_email")
+ end
end
end
end
@@ -215,10 +232,10 @@ describe NotificationService, :mailer do
describe '#access_token_about_to_expire' do
let_it_be(:user) { create(:user) }
- it 'sends email to the token owner' do
- expect(notification.access_token_about_to_expire(user)).to be_truthy
+ subject { notification.access_token_about_to_expire(user) }
- should_email user
+ it 'sends email to the token owner' do
+ expect { subject }.to have_enqueued_email(user, mail: "access_token_about_to_expire_email")
end
end
end
@@ -231,6 +248,8 @@ describe NotificationService, :mailer do
let(:author) { create(:user) }
let(:note) { create(:note_on_issue, author: author, noteable: issue, project_id: issue.project_id, note: '@mention referenced, @unsubscribed_mentioned and @outsider also') }
+ subject { notification.new_note(note) }
+
before do
build_team(project)
project.add_maintainer(issue.author)
@@ -260,32 +279,23 @@ describe NotificationService, :mailer do
reset_delivered_emails!
end
- it do
- expect(SentNotification).to receive(:record).with(issue, any_args).exactly(10).times
-
- notification.new_note(note)
-
- should_email(@u_watcher)
- should_email(note.noteable.author)
- should_email(note.noteable.assignees.first)
- should_email(@u_custom_global)
- should_email(@u_mentioned)
- should_email(@subscriber)
- should_email(@watcher_and_subscriber)
- should_email(@subscribed_participant)
- should_email(@u_custom_off)
- should_email(@unsubscribed_mentioned)
- should_not_email(@u_guest_custom)
- should_not_email(@u_guest_watcher)
- should_not_email(note.author)
- should_not_email(@u_participating)
- should_not_email(@u_disabled)
- should_not_email(@unsubscriber)
- should_not_email(@u_outsider_mentioned)
- should_not_email(@u_lazy_participant)
+ it 'sends emails to recipients' do
+ subject
+
+ expect_delivery_jobs_count(10)
+ expect_enqueud_email(@u_watcher.id, note.id, nil, mail: "note_issue_email")
+ expect_enqueud_email(note.noteable.author.id, note.id, nil, mail: "note_issue_email")
+ expect_enqueud_email(note.noteable.assignees.first.id, note.id, nil, mail: "note_issue_email")
+ expect_enqueud_email(@u_custom_global.id, note.id, nil, mail: "note_issue_email")
+ expect_enqueud_email(@u_mentioned.id, note.id, "mentioned", mail: "note_issue_email")
+ expect_enqueud_email(@subscriber.id, note.id, "subscribed", mail: "note_issue_email")
+ expect_enqueud_email(@watcher_and_subscriber.id, note.id, "subscribed", mail: "note_issue_email")
+ expect_enqueud_email(@subscribed_participant.id, note.id, "subscribed", mail: "note_issue_email")
+ expect_enqueud_email(@u_custom_off.id, note.id, nil, mail: "note_issue_email")
+ expect_enqueud_email(@unsubscribed_mentioned.id, note.id, "mentioned", mail: "note_issue_email")
end
- it "emails the note author if they've opted into notifications about their activity" do
+ it "emails the note author if they've opted into notifications about their activity", :deliver_mails_inline do
note.author.notified_of_own_activity = true
notification.new_note(note)
@@ -294,7 +304,7 @@ describe NotificationService, :mailer do
expect(find_email_for(note.author)).to have_header('X-GitLab-NotificationReason', 'own_activity')
end
- it_behaves_like 'project emails are disabled' do
+ it_behaves_like 'project emails are disabled', check_delivery_jobs_queue: true do
let(:notification_target) { note }
let(:notification_trigger) { notification.new_note(note) }
end
@@ -302,21 +312,21 @@ describe NotificationService, :mailer do
it 'filters out "mentioned in" notes' do
mentioned_note = SystemNoteService.cross_reference(mentioned_issue, issue, issue.author)
+ reset_delivered_emails!
- expect(Notify).not_to receive(:note_issue_email)
notification.new_note(mentioned_note)
+
+ expect_no_delivery_jobs
end
context 'participating' do
context 'by note' do
before do
- reset_delivered_emails!
note.author = @u_lazy_participant
note.save
- notification.new_note(note)
end
- it { should_not_email(@u_lazy_participant) }
+ it { expect { subject }.not_to have_enqueued_email(@u_lazy_participant.id, note.id, mail: "note_issue_email") }
end
end
end
@@ -335,7 +345,7 @@ describe NotificationService, :mailer do
end
shared_examples 'new note notifications' do
- it do
+ it 'sends notifications', :deliver_mails_inline do
notification.new_note(note)
should_email(note.noteable.author)
@@ -359,7 +369,7 @@ describe NotificationService, :mailer do
it_behaves_like 'new note notifications'
- it_behaves_like 'project emails are disabled' do
+ it_behaves_like 'project emails are disabled', check_delivery_jobs_queue: true do
let(:notification_target) { note }
let(:notification_trigger) { notification.new_note(note) }
end
@@ -378,13 +388,13 @@ describe NotificationService, :mailer do
notification.new_note(note)
- should_email(user)
+ expect_enqueud_email(user.id, note.id, nil, mail: "note_issue_email")
end
end
end
end
- context 'confidential issue note' do
+ context 'confidential issue note', :deliver_mails_inline do
let(:project) { create(:project, :public) }
let(:author) { create(:user) }
let(:assignee) { create(:user) }
@@ -441,7 +451,7 @@ describe NotificationService, :mailer do
end
end
- context 'issue note mention' do
+ context 'issue note mention', :deliver_mails_inline do
let(:project) { create(:project, :public) }
let(:issue) { create(:issue, project: project, assignees: [assignee]) }
let(:mentioned_issue) { create(:issue, assignees: issue.assignees) }
@@ -507,7 +517,7 @@ describe NotificationService, :mailer do
end
end
- context 'project snippet note' do
+ context 'project snippet note', :deliver_mails_inline do
let!(:project) { create(:project, :public) }
let(:snippet) { create(:project_snippet, project: project, author: create(:user)) }
let(:author) { create(:user) }
@@ -551,7 +561,7 @@ describe NotificationService, :mailer do
end
end
- context 'personal snippet note' do
+ context 'personal snippet note', :deliver_mails_inline do
let(:snippet) { create(:personal_snippet, :public, author: @u_snippet_author) }
let(:note) { create(:note_on_personal_snippet, noteable: snippet, note: '@mentioned note', author: @u_note_author) }
@@ -600,7 +610,7 @@ describe NotificationService, :mailer do
end
end
- context 'commit note' do
+ context 'commit note', :deliver_mails_inline do
let(:project) { create(:project, :public, :repository) }
let(:note) { create(:note_on_commit, project: project) }
@@ -659,7 +669,7 @@ describe NotificationService, :mailer do
end
end
- context "merge request diff note" do
+ context "merge request diff note", :deliver_mails_inline do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:merge_request) { create(:merge_request, source_project: project, assignees: [user], author: create(:user)) }
@@ -691,11 +701,11 @@ describe NotificationService, :mailer do
end
end
- describe '#send_new_release_notifications' do
+ describe '#send_new_release_notifications', :deliver_mails_inline, :sidekiq_inline do
context 'when recipients for a new release exist' do
let(:release) { create(:release) }
- it 'calls new_release_email for each relevant recipient', :sidekiq_might_not_need_inline do
+ it 'calls new_release_email for each relevant recipient' do
user_1 = create(:user)
user_2 = create(:user)
user_3 = create(:user)
@@ -712,7 +722,7 @@ describe NotificationService, :mailer do
end
end
- describe 'Participating project notification settings have priority over group and global settings if available' do
+ describe 'Participating project notification settings have priority over group and global settings if available', :deliver_mails_inline do
let!(:group) { create(:group) }
let!(:maintainer) { group.add_owner(create(:user, username: 'maintainer')).user }
let!(:user1) { group.add_developer(create(:user, username: 'user_with_project_and_custom_setting')).user }
@@ -770,7 +780,7 @@ describe NotificationService, :mailer do
end
end
- describe 'Issues' do
+ describe 'Issues', :deliver_mails_inline do
let(:group) { create(:group) }
let(:project) { create(:project, :public, namespace: group) }
let(:another_project) { create(:project, :public, namespace: group) }
@@ -978,7 +988,7 @@ describe NotificationService, :mailer do
expect(email).to have_header('X-GitLab-NotificationReason', NotificationReason::ASSIGNED)
end
- it 'emails previous assignee even if he has the "on mention" notif level' do
+ it 'emails previous assignee even if they have the "on mention" notif level' do
issue.assignees = [@u_mentioned]
notification.reassigned_issue(issue, @u_disabled, [@u_watcher])
@@ -995,7 +1005,7 @@ describe NotificationService, :mailer do
should_not_email(@u_lazy_participant)
end
- it 'emails new assignee even if he has the "on mention" notif level' do
+ it 'emails new assignee even if they have the "on mention" notif level' do
issue.assignees = [@u_mentioned]
notification.reassigned_issue(issue, @u_disabled, [@u_mentioned])
@@ -1423,7 +1433,7 @@ describe NotificationService, :mailer do
end
end
- describe 'Merge Requests' do
+ describe 'Merge Requests', :deliver_mails_inline do
let(:group) { create(:group) }
let(:project) { create(:project, :public, :repository, namespace: group) }
let(:another_project) { create(:project, :public, namespace: group) }
@@ -1898,7 +1908,7 @@ describe NotificationService, :mailer do
end
end
- describe 'Projects' do
+ describe 'Projects', :deliver_mails_inline do
let(:project) { create(:project) }
before do
@@ -1989,7 +1999,7 @@ describe NotificationService, :mailer do
end
end
- describe 'GroupMember' do
+ describe 'GroupMember', :deliver_mails_inline do
let(:added_user) { create(:user) }
describe '#new_access_request' do
@@ -2075,7 +2085,7 @@ describe NotificationService, :mailer do
end
end
- describe 'ProjectMember' do
+ describe 'ProjectMember', :deliver_mails_inline do
let(:project) { create(:project) }
let(:added_user) { create(:user) }
@@ -2236,7 +2246,7 @@ describe NotificationService, :mailer do
end
end
- context 'guest user in private project' do
+ context 'guest user in private project', :deliver_mails_inline do
let(:private_project) { create(:project, :private) }
let(:guest) { create(:user) }
let(:developer) { create(:user) }
@@ -2254,7 +2264,7 @@ describe NotificationService, :mailer do
end
it 'filters out guests when new note is created' do
- expect(SentNotification).to receive(:record).with(merge_request, any_args).exactly(1).times
+ expect(SentNotification).to receive(:record).with(merge_request, any_args).once
notification.new_note(note)
@@ -2291,7 +2301,7 @@ describe NotificationService, :mailer do
end
end
- describe 'Pipelines' do
+ describe 'Pipelines', :deliver_mails_inline do
describe '#pipeline_finished' do
let(:project) { create(:project, :public, :repository) }
let(:u_member) { create(:user) }
@@ -2507,7 +2517,7 @@ describe NotificationService, :mailer do
end
end
- describe 'Pages domains' do
+ describe 'Pages domains', :deliver_mails_inline do
let_it_be(:project, reload: true) { create(:project) }
let_it_be(:domain, reload: true) { create(:pages_domain, project: project) }
let_it_be(:u_blocked) { create(:user, :blocked) }
@@ -2560,7 +2570,7 @@ describe NotificationService, :mailer do
end
end
- context 'Auto DevOps notifications' do
+ context 'Auto DevOps notifications', :deliver_mails_inline do
describe '#autodevops_disabled' do
let(:owner) { create(:user) }
let(:namespace) { create(:namespace, owner: owner) }
@@ -2584,7 +2594,7 @@ describe NotificationService, :mailer do
end
end
- describe 'Repository cleanup' do
+ describe 'Repository cleanup', :deliver_mails_inline do
let(:user) { create(:user) }
let(:project) { create(:project) }
@@ -2615,7 +2625,7 @@ describe NotificationService, :mailer do
end
end
- context 'Remote mirror notifications' do
+ context 'Remote mirror notifications', :deliver_mails_inline do
describe '#remote_mirror_update_failed' do
let(:project) { create(:project) }
let(:remote_mirror) { create(:remote_mirror, project: project) }
@@ -2653,7 +2663,7 @@ describe NotificationService, :mailer do
end
end
- context 'with external authorization service' do
+ context 'with external authorization service', :deliver_mails_inline do
let(:issue) { create(:issue) }
let(:project) { issue.project }
let(:note) { create(:note, noteable: issue, project: project) }
diff --git a/spec/services/post_receive_service_spec.rb b/spec/services/post_receive_service_spec.rb
new file mode 100644
index 00000000000..9b9200fd33e
--- /dev/null
+++ b/spec/services/post_receive_service_spec.rb
@@ -0,0 +1,186 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe PostReceiveService do
+ include Gitlab::Routing
+
+ let_it_be(:project) { create(:project, :repository, :wiki_repo) }
+ let_it_be(:user) { create(:user) }
+
+ let(:identifier) { 'key-123' }
+ let(:gl_repository) { "project-#{project.id}" }
+ let(:branch_name) { 'feature' }
+ let(:secret_token) { Gitlab::Shell.secret_token }
+ let(:reference_counter) { double('ReferenceCounter') }
+ let(:push_options) { ['ci.skip', 'another push option'] }
+
+ let(:changes) do
+ "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{branch_name}"
+ end
+
+ let(:params) do
+ {
+ gl_repository: gl_repository,
+ secret_token: secret_token,
+ identifier: identifier,
+ changes: changes,
+ push_options: push_options
+ }
+ end
+
+ let(:response) { PostReceiveService.new(user, project, params).execute }
+
+ subject { response.messages.as_json }
+
+ it 'enqueues a PostReceive worker job' do
+ expect(PostReceive).to receive(:perform_async)
+ .with(gl_repository, identifier, changes, { ci: { skip: true } })
+
+ subject
+ end
+
+ it 'decreases the reference counter and returns the result' do
+ expect(Gitlab::ReferenceCounter).to receive(:new).with(gl_repository)
+ .and_return(reference_counter)
+ expect(reference_counter).to receive(:decrease).and_return(true)
+
+ expect(response.reference_counter_decreased).to be(true)
+ end
+
+ it 'returns link to create new merge request' do
+ message = <<~MESSAGE.strip
+ To create a merge request for #{branch_name}, visit:
+ http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/new?merge_request%5Bsource_branch%5D=#{branch_name}
+ MESSAGE
+
+ expect(subject).to include(build_basic_message(message))
+ end
+
+ it 'returns the link to an existing merge request when it exists' do
+ merge_request = create(:merge_request, source_project: project, source_branch: branch_name, target_branch: 'master')
+ message = <<~MESSAGE.strip
+ View merge request for feature:
+ #{project_merge_request_url(project, merge_request)}
+ MESSAGE
+
+ expect(subject).to include(build_basic_message(message))
+ end
+
+ context 'when printing_merge_request_link_enabled is false' do
+ let(:project) { create(:project, printing_merge_request_link_enabled: false) }
+
+ it 'returns no merge request messages' do
+ expect(subject).to be_blank
+ end
+ end
+
+ it 'does not invoke MergeRequests::PushOptionsHandlerService' do
+ expect(MergeRequests::PushOptionsHandlerService).not_to receive(:new)
+
+ subject
+ end
+
+ context 'when there are merge_request push options' do
+ let(:params) { super().merge(push_options: ['merge_request.create']) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'invokes MergeRequests::PushOptionsHandlerService' do
+ expect(MergeRequests::PushOptionsHandlerService).to receive(:new).and_call_original
+
+ subject
+ end
+
+ it 'creates a new merge request' do
+ expect { Sidekiq::Testing.fake! { subject } }.to change(MergeRequest, :count).by(1)
+ end
+
+ it 'links to the newly created merge request' do
+ message = <<~MESSAGE.strip
+ View merge request for #{branch_name}:
+ http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/1
+ MESSAGE
+
+ expect(subject).to include(build_basic_message(message))
+ end
+
+ it 'adds errors on the service instance to warnings' do
+ expect_any_instance_of(
+ MergeRequests::PushOptionsHandlerService
+ ).to receive(:errors).at_least(:once).and_return(['my error'])
+
+ message = "WARNINGS:\nError encountered with push options 'merge_request.create': my error"
+
+ expect(subject).to include(build_alert_message(message))
+ end
+
+ it 'adds ActiveRecord errors on invalid MergeRequest records to warnings' do
+ invalid_merge_request = MergeRequest.new
+ invalid_merge_request.errors.add(:base, 'my error')
+ message = "WARNINGS:\nError encountered with push options 'merge_request.create': my error"
+
+ expect_any_instance_of(
+ MergeRequests::CreateService
+ ).to receive(:execute).and_return(invalid_merge_request)
+
+ expect(subject).to include(build_alert_message(message))
+ end
+ end
+
+ context 'broadcast message exists' do
+ it 'outputs a broadcast message' do
+ broadcast_message = create(:broadcast_message, starts_at: 1.day.ago, ends_at: 1.day.from_now)
+
+ expect(subject).to include(build_alert_message(broadcast_message.message))
+ end
+ end
+
+ context 'broadcast message does not exist' do
+ it 'does not output a broadcast message' do
+ expect(has_alert_messages?(subject)).to be_falsey
+ end
+ end
+
+ context 'nil broadcast message' do
+ it 'does not output a broadcast message' do
+ allow(BroadcastMessage).to receive(:current).and_return(nil)
+
+ expect(has_alert_messages?(subject)).to be_falsey
+ end
+ end
+
+ context 'with a redirected data' do
+ it 'returns redirected message on the response' do
+ project_moved = Gitlab::Checks::ProjectMoved.new(project, user, 'http', 'foo/baz')
+ project_moved.add_message
+
+ expect(subject).to include(build_basic_message(project_moved.message))
+ end
+ end
+
+ context 'with new project data' do
+ it 'returns new project message on the response' do
+ project_created = Gitlab::Checks::ProjectCreated.new(project, user, 'http')
+ project_created.add_message
+
+ expect(subject).to include(build_basic_message(project_created.message))
+ end
+ end
+
+ def build_alert_message(message)
+ { 'type' => 'alert', 'message' => message }
+ end
+
+ def build_basic_message(message)
+ { 'type' => 'basic', 'message' => message }
+ end
+
+ def has_alert_messages?(messages)
+ messages.any? do |message|
+ message['type'] == 'alert'
+ end
+ end
+end
diff --git a/spec/services/projects/after_import_service_spec.rb b/spec/services/projects/after_import_service_spec.rb
index 27e8f3c45ba..82f654cea10 100644
--- a/spec/services/projects/after_import_service_spec.rb
+++ b/spec/services/projects/after_import_service_spec.rb
@@ -20,7 +20,7 @@ describe Projects::AfterImportService do
allow(housekeeping_service)
.to receive(:execute).and_yield
- expect(housekeeping_service).to receive(:increment!)
+ allow(housekeeping_service).to receive(:increment!)
end
it 'performs housekeeping' do
@@ -58,6 +58,52 @@ describe Projects::AfterImportService do
end
end
+ context 'when after import action throw non-retriable exception' do
+ let(:exception) { StandardError.new('after import error') }
+
+ before do
+ allow(repository)
+ .to receive(:delete_all_refs_except)
+ .and_raise(exception)
+ end
+
+ it 'throws after import error' do
+ expect { subject.execute }.to raise_exception('after import error')
+ end
+ end
+
+ context 'when after import action throw retriable exception one time' do
+ let(:exception) { GRPC::DeadlineExceeded.new }
+
+ before do
+ expect(repository)
+ .to receive(:delete_all_refs_except)
+ .and_raise(exception)
+ expect(repository)
+ .to receive(:delete_all_refs_except)
+ .and_call_original
+
+ subject.execute
+ end
+
+ it 'removes refs/pull/**/*' do
+ expect(rugged.references.map(&:name))
+ .not_to include(%r{\Arefs/pull/})
+ end
+
+ it 'records the failures in the database', :aggregate_failures do
+ import_failure = ImportFailure.last
+
+ expect(import_failure.source).to eq('delete_all_refs')
+ expect(import_failure.project_id).to eq(project.id)
+ expect(import_failure.relation_key).to be_nil
+ expect(import_failure.relation_index).to be_nil
+ expect(import_failure.exception_class).to eq('GRPC::DeadlineExceeded')
+ expect(import_failure.exception_message).to be_present
+ expect(import_failure.correlation_id_value).not_to be_empty
+ end
+ end
+
def rugged
rugged_repo(repository)
end
diff --git a/spec/services/projects/after_rename_service_spec.rb b/spec/services/projects/after_rename_service_spec.rb
index bf637b70aaf..b81dd3d7e3f 100644
--- a/spec/services/projects/after_rename_service_spec.rb
+++ b/spec/services/projects/after_rename_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Projects::AfterRenameService do
let(:rugged_config) { rugged_repo(project.repository).config }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
- let(:hashed_storage) { Storage::HashedProject.new(project) }
+ let(:hashed_storage) { Storage::Hashed.new(project) }
let!(:path_before_rename) { project.path }
let!(:full_path_before_rename) { project.full_path }
let!(:path_after_rename) { "#{project.path}-renamed" }
diff --git a/spec/services/projects/alerting/notify_service_spec.rb b/spec/services/projects/alerting/notify_service_spec.rb
new file mode 100644
index 00000000000..925d323584e
--- /dev/null
+++ b/spec/services/projects/alerting/notify_service_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::Alerting::NotifyService do
+ let_it_be(:project, reload: true) { create(:project) }
+
+ before do
+ # We use `let_it_be(:project)` so we make sure to clear caches
+ project.clear_memoization(:licensed_feature_available)
+ end
+
+ shared_examples 'processes incident issues' do |amount|
+ let(:create_incident_service) { spy }
+
+ it 'processes issues' do
+ expect(IncidentManagement::ProcessAlertWorker)
+ .to receive(:perform_async)
+ .with(project.id, kind_of(Hash))
+ .exactly(amount).times
+
+ Sidekiq::Testing.inline! do
+ expect(subject.status).to eq(:success)
+ end
+ end
+ end
+
+ shared_examples 'does not process incident issues' do |http_status:|
+ it 'does not process issues' do
+ expect(IncidentManagement::ProcessAlertWorker)
+ .not_to receive(:perform_async)
+
+ expect(subject.status).to eq(:error)
+ expect(subject.http_status).to eq(http_status)
+ end
+ end
+
+ describe '#execute' do
+ let(:token) { 'invalid-token' }
+ let(:starts_at) { Time.now.change(usec: 0) }
+ let(:service) { described_class.new(project, nil, payload) }
+ let(:payload_raw) do
+ {
+ 'title' => 'alert title',
+ 'start_time' => starts_at.rfc3339
+ }
+ end
+ let(:payload) { ActionController::Parameters.new(payload_raw).permit! }
+
+ subject { service.execute(token) }
+
+ context 'with activated Alerts Service' do
+ let!(:alerts_service) { create(:alerts_service, project: project) }
+
+ context 'with valid token' do
+ let(:token) { alerts_service.token }
+
+ context 'with a valid payload' do
+ it_behaves_like 'processes incident issues', 1
+ end
+
+ context 'with an invalid payload' do
+ before do
+ allow(Gitlab::Alerting::NotificationPayloadParser)
+ .to receive(:call)
+ .and_raise(Gitlab::Alerting::NotificationPayloadParser::BadPayloadError)
+ end
+
+ it_behaves_like 'does not process incident issues', http_status: 400
+ end
+ end
+
+ context 'with invalid token' do
+ it_behaves_like 'does not process incident issues', http_status: 401
+ end
+ end
+
+ context 'with deactivated Alerts Service' do
+ let!(:alerts_service) { create(:alerts_service, :inactive, project: project) }
+
+ it_behaves_like 'does not process incident issues', http_status: 403
+ end
+ end
+end
diff --git a/spec/services/projects/batch_open_issues_count_service_spec.rb b/spec/services/projects/batch_open_issues_count_service_spec.rb
index e978334d68b..8cb0ce03fba 100644
--- a/spec/services/projects/batch_open_issues_count_service_spec.rb
+++ b/spec/services/projects/batch_open_issues_count_service_spec.rb
@@ -8,7 +8,7 @@ describe Projects::BatchOpenIssuesCountService do
let(:subject) { described_class.new([project_1, project_2]) }
- context '#refresh_cache', :use_clean_rails_memory_store_caching do
+ describe '#refresh_cache', :use_clean_rails_memory_store_caching do
before do
create(:issue, project: project_1)
create(:issue, project: project_1, confidential: true)
diff --git a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
index 78b969c8a0e..ef7e9cda9e0 100644
--- a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
@@ -41,7 +41,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
let(:params) { {} }
it 'does not remove anything' do
- expect_any_instance_of(ContainerRegistry::Client).not_to receive(:delete_repository_tag)
+ expect_any_instance_of(ContainerRegistry::Client).not_to receive(:delete_repository_tag_by_digest)
is_expected.to include(status: :success, deleted: [])
end
@@ -130,6 +130,38 @@ describe Projects::ContainerRepository::CleanupTagsService do
is_expected.to include(status: :success, deleted: %w(Bb Ba C))
end
end
+
+ context 'when running a container_expiration_policy' do
+ let(:user) { nil }
+
+ context 'with valid container_expiration_policy param' do
+ let(:params) do
+ { 'name_regex' => '.*',
+ 'keep_n' => 1,
+ 'older_than' => '1 day',
+ 'container_expiration_policy' => true }
+ end
+
+ it 'succeeds without a user' do
+ expect_delete('sha256:configB').twice
+ expect_delete('sha256:configC')
+
+ is_expected.to include(status: :success, deleted: %w(Bb Ba C))
+ end
+ end
+
+ context 'without container_expiration_policy param' do
+ let(:params) do
+ { 'name_regex' => '.*',
+ 'keep_n' => 1,
+ 'older_than' => '1 day' }
+ end
+
+ it 'fails' do
+ is_expected.to include(status: :error, message: 'access denied')
+ end
+ end
+ end
end
private
@@ -156,7 +188,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
def expect_delete(digest)
expect_any_instance_of(ContainerRegistry::Client)
- .to receive(:delete_repository_tag)
+ .to receive(:delete_repository_tag_by_digest)
.with(repository.path, digest) { true }
end
end
diff --git a/spec/services/projects/container_repository/delete_tags_service_spec.rb b/spec/services/projects/container_repository/delete_tags_service_spec.rb
index decbbb7597f..e17e4b6f7c9 100644
--- a/spec/services/projects/container_repository/delete_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/delete_tags_service_spec.rb
@@ -18,10 +18,6 @@ describe Projects::ContainerRepository::DeleteTagsService do
stub_container_registry_tags(
repository: repository.path,
tags: %w(latest A Ba Bb C D E))
-
- stub_tag_digest('latest', 'sha256:configA')
- stub_tag_digest('A', 'sha256:configA')
- stub_tag_digest('Ba', 'sha256:configB')
end
describe '#execute' do
@@ -38,82 +34,178 @@ describe Projects::ContainerRepository::DeleteTagsService do
project.add_developer(user)
end
- context 'when no params are specified' do
- let(:params) { {} }
+ context 'when the registry supports fast delete' do
+ context 'and the feature is enabled' do
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:repository) { create(:container_repository, :root, project: project) }
- it 'does not remove anything' do
- expect_any_instance_of(ContainerRegistry::Client).not_to receive(:delete_repository_tag)
+ before do
+ allow(repository.client).to receive(:supports_tag_delete?).and_return(true)
+ end
- is_expected.to include(status: :error)
- end
- end
+ context 'with tags to delete' do
+ let_it_be(:tags) { %w[A Ba] }
- context 'with empty tags' do
- let(:tags) { [] }
+ it 'deletes the tags by name' do
+ stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/tags/reference/A")
+ .to_return(status: 200, body: "")
- it 'does not remove anything' do
- expect_any_instance_of(ContainerRegistry::Client).not_to receive(:delete_repository_tag)
+ stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/tags/reference/Ba")
+ .to_return(status: 200, body: "")
- is_expected.to include(status: :error)
- end
- end
+ expect_delete_tag_by_name('A')
+ expect_delete_tag_by_name('Ba')
+
+ is_expected.to include(status: :success)
+ end
+
+ it 'succeeds when tag delete returns 404' do
+ stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/tags/reference/A")
+ .to_return(status: 200, body: "")
+
+ stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/tags/reference/Ba")
+ .to_return(status: 404, body: "")
+
+ is_expected.to include(status: :success)
+ end
+
+ context 'with failures' do
+ context 'when the delete request fails' do
+ before do
+ stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/tags/reference/A")
+ .to_return(status: 500, body: "")
+
+ stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/tags/reference/Ba")
+ .to_return(status: 500, body: "")
+ end
- context 'with tags to delete' do
- let(:tags) { %w[A Ba] }
+ it { is_expected.to include(status: :error) }
+ end
+ end
+ end
+
+ context 'when no params are specified' do
+ let_it_be(:params) { {} }
+
+ it 'does not remove anything' do
+ expect_any_instance_of(ContainerRegistry::Client).not_to receive(:delete_repository_tag_by_name)
+
+ is_expected.to include(status: :error)
+ end
+ end
- it 'deletes the tags using a dummy image' do
- stub_upload("{\n \"config\": {\n }\n}", 'sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
+ context 'with empty tags' do
+ let_it_be(:tags) { [] }
- stub_request(:put, "http://registry.gitlab/v2/#{repository.path}/manifests/A")
- .to_return(status: 200, body: "", headers: { 'docker-content-digest' => 'sha256:dummy' })
+ it 'does not remove anything' do
+ expect_any_instance_of(ContainerRegistry::Client).not_to receive(:delete_repository_tag_by_name)
- stub_request(:put, "http://registry.gitlab/v2/#{repository.path}/manifests/Ba")
- .to_return(status: 200, body: "", headers: { 'docker-content-digest' => 'sha256:dummy' })
+ is_expected.to include(status: :error)
+ end
+ end
+ end
+ context 'and the feature is disabled' do
+ before do
+ stub_feature_flags(container_registry_fast_tag_delete: false)
+ end
- expect_delete_tag('sha256:dummy')
+ it 'fallbacks to slow delete' do
+ expect(service).not_to receive(:fast_delete)
+ expect(service).to receive(:slow_delete).with(repository, tags)
- is_expected.to include(status: :success)
+ subject
+ end
end
+ end
+ context 'when the registry does not support fast delete' do
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:repository) { create(:container_repository, :root, project: project) }
- it 'succedes when tag delete returns 404' do
- stub_upload("{\n \"config\": {\n }\n}", 'sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
+ before do
+ stub_tag_digest('latest', 'sha256:configA')
+ stub_tag_digest('A', 'sha256:configA')
+ stub_tag_digest('Ba', 'sha256:configB')
- stub_request(:put, "http://registry.gitlab/v2/#{repository.path}/manifests/A")
- .to_return(status: 200, body: "", headers: { 'docker-content-digest' => 'sha256:dummy' })
+ allow(repository.client).to receive(:supports_tag_delete?).and_return(false)
+ end
- stub_request(:put, "http://registry.gitlab/v2/#{repository.path}/manifests/Ba")
- .to_return(status: 200, body: "", headers: { 'docker-content-digest' => 'sha256:dummy' })
+ context 'when no params are specified' do
+ let_it_be(:params) { {} }
- stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/manifests/sha256:dummy")
- .to_return(status: 404, body: "", headers: {})
+ it 'does not remove anything' do
+ expect_any_instance_of(ContainerRegistry::Client).not_to receive(:delete_repository_tag_by_digest)
- is_expected.to include(status: :success)
+ is_expected.to include(status: :error)
+ end
end
- context 'with failures' do
- context 'when the dummy manifest generation fails' do
- before do
- stub_upload("{\n \"config\": {\n }\n}", 'sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3', success: false)
- end
+ context 'with empty tags' do
+ let_it_be(:tags) { [] }
+
+ it 'does not remove anything' do
+ expect_any_instance_of(ContainerRegistry::Client).not_to receive(:delete_repository_tag_by_digest)
+
+ is_expected.to include(status: :error)
+ end
+ end
+
+ context 'with tags to delete' do
+ let_it_be(:tags) { %w[A Ba] }
- it { is_expected.to include(status: :error) }
+ it 'deletes the tags using a dummy image' do
+ stub_upload("{\n \"config\": {\n }\n}", 'sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
+
+ stub_request(:put, "http://registry.gitlab/v2/#{repository.path}/manifests/A")
+ .to_return(status: 200, body: "", headers: { 'docker-content-digest' => 'sha256:dummy' })
+
+ stub_request(:put, "http://registry.gitlab/v2/#{repository.path}/manifests/Ba")
+ .to_return(status: 200, body: "", headers: { 'docker-content-digest' => 'sha256:dummy' })
+
+ expect_delete_tag_by_digest('sha256:dummy')
+
+ is_expected.to include(status: :success)
end
- context 'when updating the tags fails' do
- before do
- stub_upload("{\n \"config\": {\n }\n}", 'sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
+ it 'succeeds when tag delete returns 404' do
+ stub_upload("{\n \"config\": {\n }\n}", 'sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
- stub_request(:put, "http://registry.gitlab/v2/#{repository.path}/manifests/A")
- .to_return(status: 500, body: "", headers: { 'docker-content-digest' => 'sha256:dummy' })
+ stub_request(:put, "http://registry.gitlab/v2/#{repository.path}/manifests/A")
+ .to_return(status: 200, body: "", headers: { 'docker-content-digest' => 'sha256:dummy' })
- stub_request(:put, "http://registry.gitlab/v2/#{repository.path}/manifests/Ba")
- .to_return(status: 500, body: "", headers: { 'docker-content-digest' => 'sha256:dummy' })
+ stub_request(:put, "http://registry.gitlab/v2/#{repository.path}/manifests/Ba")
+ .to_return(status: 200, body: "", headers: { 'docker-content-digest' => 'sha256:dummy' })
+
+ stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/manifests/sha256:dummy")
+ .to_return(status: 404, body: "", headers: {})
+
+ is_expected.to include(status: :success)
+ end
- stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/manifests/sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3")
- .to_return(status: 200, body: "", headers: {})
+ context 'with failures' do
+ context 'when the dummy manifest generation fails' do
+ before do
+ stub_upload("{\n \"config\": {\n }\n}", 'sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3', success: false)
+ end
+
+ it { is_expected.to include(status: :error) }
end
- it { is_expected.to include(status: :error) }
+ context 'when updating the tags fails' do
+ before do
+ stub_upload("{\n \"config\": {\n }\n}", 'sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
+
+ stub_request(:put, "http://registry.gitlab/v2/#{repository.path}/manifests/A")
+ .to_return(status: 500, body: "", headers: { 'docker-content-digest' => 'sha256:dummy' })
+
+ stub_request(:put, "http://registry.gitlab/v2/#{repository.path}/manifests/Ba")
+ .to_return(status: 500, body: "", headers: { 'docker-content-digest' => 'sha256:dummy' })
+
+ stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/manifests/sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3")
+ .to_return(status: 200, body: "", headers: {})
+ end
+
+ it { is_expected.to include(status: :error) }
+ end
end
end
end
@@ -141,9 +233,21 @@ describe Projects::ContainerRepository::DeleteTagsService do
.with(repository.path, content, digest) { double(success?: success ) }
end
- def expect_delete_tag(digest)
+ def expect_delete_tag_by_digest(digest)
expect_any_instance_of(ContainerRegistry::Client)
- .to receive(:delete_repository_tag)
+ .to receive(:delete_repository_tag_by_digest)
.with(repository.path, digest) { true }
+
+ expect_any_instance_of(ContainerRegistry::Client)
+ .not_to receive(:delete_repository_tag_by_name)
+ end
+
+ def expect_delete_tag_by_name(name)
+ expect_any_instance_of(ContainerRegistry::Client)
+ .to receive(:delete_repository_tag_by_name)
+ .with(repository.path, name) { true }
+
+ expect_any_instance_of(ContainerRegistry::Client)
+ .not_to receive(:delete_repository_tag_by_digest)
end
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index bce3f72a287..a8e7919dc81 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -43,6 +43,12 @@ describe Projects::CreateService, '#execute' do
create_project(user, opts)
end
+
+ it 'creates associated project settings' do
+ project = create_project(user, opts)
+
+ expect(project.project_setting).to be_persisted
+ end
end
context "admin creates project with other user's namespace_id" do
diff --git a/spec/services/projects/destroy_rollback_service_spec.rb b/spec/services/projects/destroy_rollback_service_spec.rb
new file mode 100644
index 00000000000..8facf17dc45
--- /dev/null
+++ b/spec/services/projects/destroy_rollback_service_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::DestroyRollbackService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
+ let(:repository) { project.repository }
+ let(:repository_storage) { project.repository_storage }
+
+ subject { described_class.new(project, user, {}).execute }
+
+ describe '#execute' do
+ let(:path) { repository.disk_path + '.git' }
+ let(:removal_path) { "#{repository.disk_path}+#{project.id}#{Repositories::DestroyService::DELETED_FLAG}.git" }
+
+ before do
+ aggregate_failures do
+ expect(TestEnv.storage_dir_exists?(repository_storage, path)).to be_truthy
+ expect(TestEnv.storage_dir_exists?(repository_storage, removal_path)).to be_falsey
+ end
+
+ # Don't run sidekiq to check if renamed repository exists
+ Sidekiq::Testing.fake! { destroy_project(project, user, {}) }
+
+ aggregate_failures do
+ expect(TestEnv.storage_dir_exists?(repository_storage, path)).to be_falsey
+ expect(TestEnv.storage_dir_exists?(repository_storage, removal_path)).to be_truthy
+ end
+ end
+
+ it 'restores the repositories' do
+ Sidekiq::Testing.fake! { subject }
+
+ aggregate_failures do
+ expect(TestEnv.storage_dir_exists?(repository_storage, path)).to be_truthy
+ expect(TestEnv.storage_dir_exists?(repository_storage, removal_path)).to be_falsey
+ end
+ end
+ end
+
+ def destroy_project(project, user, params = {})
+ Projects::DestroyService.new(project, user, params).execute
+ end
+end
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index d8ba042af35..21a65f361a9 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -5,15 +5,11 @@ require 'spec_helper'
describe Projects::DestroyService do
include ProjectForksHelper
- let!(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
let!(:project) { create(:project, :repository, namespace: user.namespace) }
- let!(:path) do
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- project.repository.path_to_repo
- end
- end
- let!(:remove_path) { path.sub(/\.git\Z/, "+#{project.id}+deleted.git") }
- let!(:async) { false } # execute or async_execute
+ let(:path) { project.repository.disk_path }
+ let(:remove_path) { removal_path(path) }
+ let(:async) { false } # execute or async_execute
before do
stub_container_registry_config(enabled: true)
@@ -21,7 +17,12 @@ describe Projects::DestroyService do
end
shared_examples 'deleting the project' do
- it 'deletes the project' do
+ before do
+ # Run sidekiq immediately to check that renamed repository will be removed
+ destroy_project(project, user, {})
+ end
+
+ it 'deletes the project', :sidekiq_inline do
expect(Project.unscoped.all).not_to include(project)
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
@@ -30,16 +31,10 @@ describe Projects::DestroyService do
end
shared_examples 'deleting the project with pipeline and build' do
- context 'with pipeline and build' do # which has optimistic locking
+ context 'with pipeline and build', :sidekiq_inline do # which has optimistic locking
let!(:pipeline) { create(:ci_pipeline, project: project) }
let!(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
- before do
- perform_enqueued_jobs do
- destroy_project(project, user, {})
- end
- end
-
it_behaves_like 'deleting the project'
end
end
@@ -47,66 +42,63 @@ describe Projects::DestroyService do
shared_examples 'handles errors thrown during async destroy' do |error_message|
it 'does not allow the error to bubble up' do
expect do
- perform_enqueued_jobs { destroy_project(project, user, {}) }
+ destroy_project(project, user, {})
end.not_to raise_error
end
it 'unmarks the project as "pending deletion"' do
- perform_enqueued_jobs { destroy_project(project, user, {}) }
+ destroy_project(project, user, {})
expect(project.reload.pending_delete).to be(false)
end
it 'stores an error message in `projects.delete_error`' do
- perform_enqueued_jobs { destroy_project(project, user, {}) }
+ destroy_project(project, user, {})
expect(project.reload.delete_error).to be_present
expect(project.delete_error).to include(error_message)
end
end
- context 'Sidekiq inline' do
- before do
- # Run sidekiq immediately to check that renamed repository will be removed
- perform_enqueued_jobs { destroy_project(project, user, {}) }
- end
+ it_behaves_like 'deleting the project'
- it_behaves_like 'deleting the project'
+ it 'invalidates personal_project_count cache' do
+ expect(user).to receive(:invalidate_personal_projects_count)
- context 'when has remote mirrors' do
- let!(:project) do
- create(:project, :repository, namespace: user.namespace).tap do |project|
- project.remote_mirrors.create(url: 'http://test.com')
- end
- end
- let!(:async) { true }
+ destroy_project(project, user, {})
+ end
- it 'destroys them', :sidekiq_might_not_need_inline do
- expect(RemoteMirror.count).to eq(0)
+ context 'when project has remote mirrors' do
+ let!(:project) do
+ create(:project, :repository, namespace: user.namespace).tap do |project|
+ project.remote_mirrors.create(url: 'http://test.com')
end
end
- it 'invalidates personal_project_count cache' do
- expect(user).to receive(:invalidate_personal_projects_count)
+ it 'destroys them' do
+ expect(RemoteMirror.count).to eq(1)
- destroy_project(project, user)
+ destroy_project(project, user, {})
+
+ expect(RemoteMirror.count).to eq(0)
end
+ end
- context 'when project has exports' do
- let!(:project_with_export) do
- create(:project, :repository, namespace: user.namespace).tap do |project|
- create(:import_export_upload,
- project: project,
- export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz'))
- end
+ context 'when project has exports' do
+ let!(:project_with_export) do
+ create(:project, :repository, namespace: user.namespace).tap do |project|
+ create(:import_export_upload,
+ project: project,
+ export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz'))
end
- let!(:async) { true }
+ end
- it 'destroys project and export', :sidekiq_might_not_need_inline do
- expect { destroy_project(project_with_export, user) }.to change(ImportExportUpload, :count).by(-1)
+ it 'destroys project and export' do
+ expect do
+ destroy_project(project_with_export, user, {})
+ end.to change(ImportExportUpload, :count).by(-1)
- expect(Project.all).not_to include(project_with_export)
- end
+ expect(Project.all).not_to include(project_with_export)
end
end
@@ -117,20 +109,24 @@ describe Projects::DestroyService do
end
it { expect(Project.all).not_to include(project) }
- it { expect(Dir.exist?(path)).to be_falsey }
- it { expect(Dir.exist?(remove_path)).to be_truthy }
+
+ it do
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
+ end
+
+ it do
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_truthy
+ end
end
context 'when flushing caches fail due to Git errors' do
before do
allow(project.repository).to receive(:before_delete).and_raise(::Gitlab::Git::CommandError)
allow(Gitlab::GitLogger).to receive(:warn).with(
- class: described_class.name,
+ class: Repositories::DestroyService.name,
project_id: project.id,
disk_path: project.disk_path,
message: 'Gitlab::Git::CommandError').and_call_original
-
- perform_enqueued_jobs { destroy_project(project, user, {}) }
end
it_behaves_like 'deleting the project'
@@ -153,14 +149,12 @@ describe Projects::DestroyService do
end
end
- context 'with async_execute', :sidekiq_might_not_need_inline do
+ context 'with async_execute', :sidekiq_inline do
let(:async) { true }
context 'async delete of project with private issue visibility' do
before do
project.project_feature.update_attribute("issues_access_level", ProjectFeature::PRIVATE)
- # Run sidekiq immediately to check that renamed repository will be removed
- perform_enqueued_jobs { destroy_project(project, user, {}) }
end
it_behaves_like 'deleting the project'
@@ -204,7 +198,7 @@ describe Projects::DestroyService do
it 'allows error to bubble up and rolls back project deletion' do
expect do
- perform_enqueued_jobs { destroy_project(project, user, {}) }
+ destroy_project(project, user, {})
end.to raise_error(Exception, 'Other error message')
expect(project.reload.pending_delete).to be(false)
@@ -312,15 +306,12 @@ describe Projects::DestroyService do
end
context 'repository +deleted path removal' do
- def removal_path(path)
- "#{path}+#{project.id}#{described_class::DELETED_FLAG}"
- end
-
context 'regular phase' do
it 'schedules +deleted removal of existing repos' do
service = described_class.new(project, user, {})
allow(service).to receive(:schedule_stale_repos_removal)
+ expect(Repositories::ShellDestroyService).to receive(:new).and_call_original
expect(GitlabShellWorker).to receive(:perform_in)
.with(5.minutes, :remove_repository, project.repository_storage, removal_path(project.disk_path))
@@ -329,14 +320,16 @@ describe Projects::DestroyService do
end
context 'stale cleanup' do
- let!(:async) { true }
+ let(:async) { true }
it 'schedules +deleted wiki and repo removal' do
allow(ProjectDestroyWorker).to receive(:perform_async)
+ expect(Repositories::ShellDestroyService).to receive(:new).with(project.repository).and_call_original
expect(GitlabShellWorker).to receive(:perform_in)
.with(10.minutes, :remove_repository, project.repository_storage, removal_path(project.disk_path))
+ expect(Repositories::ShellDestroyService).to receive(:new).with(project.wiki.repository).and_call_original
expect(GitlabShellWorker).to receive(:perform_in)
.with(10.minutes, :remove_repository, project.repository_storage, removal_path(project.wiki.disk_path))
@@ -345,33 +338,11 @@ describe Projects::DestroyService do
end
end
- context '#attempt_restore_repositories' do
- let(:path) { project.disk_path + '.git' }
-
- before do
- expect(TestEnv.storage_dir_exists?(project.repository_storage, path)).to be_truthy
- expect(TestEnv.storage_dir_exists?(project.repository_storage, remove_path)).to be_falsey
-
- # Dont run sidekiq to check if renamed repository exists
- Sidekiq::Testing.fake! { destroy_project(project, user, {}) }
-
- expect(TestEnv.storage_dir_exists?(project.repository_storage, path)).to be_falsey
- expect(TestEnv.storage_dir_exists?(project.repository_storage, remove_path)).to be_truthy
- end
-
- it 'restores the repositories' do
- Sidekiq::Testing.fake! { described_class.new(project, user).attempt_repositories_rollback }
-
- expect(TestEnv.storage_dir_exists?(project.repository_storage, path)).to be_truthy
- expect(TestEnv.storage_dir_exists?(project.repository_storage, remove_path)).to be_falsey
- end
+ def destroy_project(project, user, params = {})
+ described_class.new(project, user, params).public_send(async ? :async_execute : :execute)
end
- def destroy_project(project, user, params = {})
- if async
- Projects::DestroyService.new(project, user, params).async_execute
- else
- Projects::DestroyService.new(project, user, params).execute
- end
+ def removal_path(path)
+ "#{path}+#{project.id}#{Repositories::DestroyService::DELETED_FLAG}"
end
end
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index e7b904fcd60..e14f1abf018 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -375,14 +375,6 @@ describe Projects::ForkService do
expect(fork_from_project.forks_count).to eq(1)
end
- it 'leaves no LFS objects dangling' do
- create(:lfs_objects_project, project: fork_to_project)
-
- expect { subject.execute(fork_to_project) }
- .to change { fork_to_project.lfs_objects_projects.count }
- .to(0)
- end
-
context 'if the fork is not allowed' do
let(:fork_from_project) { create(:project, :private) }
diff --git a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
index ab9d2bdba8f..7c7e188a12d 100644
--- a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
@@ -7,14 +7,14 @@ describe Projects::HashedStorage::MigrateAttachmentsService do
let(:project) { create(:project, :repository, storage_version: 1, skip_disk_validation: true) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
- let(:hashed_storage) { Storage::HashedProject.new(project) }
+ let(:hashed_storage) { Storage::Hashed.new(project) }
let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
let(:file_uploader) { build(:file_uploader, project: project) }
let(:old_disk_path) { File.join(base_path(legacy_storage), upload.path) }
let(:new_disk_path) { File.join(base_path(hashed_storage), upload.path) }
- context '#execute' do
+ describe '#execute' do
context 'when succeeds' do
it 'moves attachments to hashed storage layout' do
expect(File.file?(old_disk_path)).to be_truthy
@@ -102,13 +102,13 @@ describe Projects::HashedStorage::MigrateAttachmentsService do
end
end
- context '#old_disk_path' do
+ describe '#old_disk_path' do
it 'returns old disk_path for project' do
expect(service.old_disk_path).to eq(project.full_path)
end
end
- context '#new_disk_path' do
+ describe '#new_disk_path' do
it 'returns new disk_path for project' do
service.execute
@@ -116,7 +116,7 @@ describe Projects::HashedStorage::MigrateAttachmentsService do
end
end
- context '#target_path_discardable?' do
+ describe '#target_path_discardable?' do
it 'returns true when it include only items on the discardable list' do
hashed_attachments_path = File.join(base_path(hashed_storage))
Projects::HashedStorage::MigrateAttachmentsService::DISCARDABLE_PATHS.each do |path_fragment|
diff --git a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
index 132b895fc35..71be335c11d 100644
--- a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
@@ -8,7 +8,7 @@ describe Projects::HashedStorage::MigrateRepositoryService do
let(:gitlab_shell) { Gitlab::Shell.new }
let(:project) { create(:project, :legacy_storage, :repository, :wiki_repo) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
- let(:hashed_storage) { Storage::HashedProject.new(project) }
+ let(:hashed_storage) { Storage::Hashed.new(project) }
subject(:service) { described_class.new(project: project, old_disk_path: project.disk_path) }
diff --git a/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb b/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb
index c2ba9626f41..54695e6e48f 100644
--- a/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb
+++ b/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb
@@ -7,14 +7,14 @@ describe Projects::HashedStorage::RollbackAttachmentsService do
let(:project) { create(:project, :repository, skip_disk_validation: true) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
- let(:hashed_storage) { Storage::HashedProject.new(project) }
+ let(:hashed_storage) { Storage::Hashed.new(project) }
let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
let(:file_uploader) { build(:file_uploader, project: project) }
let(:old_disk_path) { File.join(base_path(hashed_storage), upload.path) }
let(:new_disk_path) { File.join(base_path(legacy_storage), upload.path) }
- context '#execute' do
+ describe '#execute' do
context 'when succeeds' do
it 'moves attachments to legacy storage layout' do
expect(File.file?(old_disk_path)).to be_truthy
@@ -86,13 +86,13 @@ describe Projects::HashedStorage::RollbackAttachmentsService do
end
end
- context '#old_disk_path' do
+ describe '#old_disk_path' do
it 'returns old disk_path for project' do
expect(service.old_disk_path).to eq(project.disk_path)
end
end
- context '#new_disk_path' do
+ describe '#new_disk_path' do
it 'returns new disk_path for project' do
service.execute
diff --git a/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb b/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
index 97c7c0af946..6dcd2ff4555 100644
--- a/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
+++ b/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
@@ -8,7 +8,7 @@ describe Projects::HashedStorage::RollbackRepositoryService, :clean_gitlab_redis
let(:gitlab_shell) { Gitlab::Shell.new }
let(:project) { create(:project, :repository, :wiki_repo, storage_version: ::Project::HASHED_STORAGE_FEATURES[:repository]) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
- let(:hashed_storage) { Storage::HashedProject.new(project) }
+ let(:hashed_storage) { Storage::Hashed.new(project) }
subject(:service) { described_class.new(project: project, old_disk_path: project.disk_path) }
diff --git a/spec/services/projects/housekeeping_service_spec.rb b/spec/services/projects/housekeeping_service_spec.rb
index 60804a8dba6..98a27a71c26 100644
--- a/spec/services/projects/housekeeping_service_spec.rb
+++ b/spec/services/projects/housekeeping_service_spec.rb
@@ -75,7 +75,7 @@ describe Projects::HousekeepingService do
# At push 200
expect(GitGarbageCollectWorker).to receive(:perform_async).with(project.id, :gc, :the_lease_key, :the_uuid)
- .exactly(1).times
+ .once
# At push 50, 100, 150
expect(GitGarbageCollectWorker).to receive(:perform_async).with(project.id, :full_repack, :the_lease_key, :the_uuid)
.exactly(3).times
diff --git a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
index 970e82e7107..21a139cdf3c 100644
--- a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
@@ -48,10 +48,11 @@ describe Projects::LfsPointers::LfsDownloadService do
end
shared_examples 'lfs object is created' do
- it do
+ it 'creates and associate the LFS object to project' do
expect(subject).to receive(:download_and_save_file!).and_call_original
expect { subject.execute }.to change { LfsObject.count }.by(1)
+ expect(LfsObject.first.projects).to include(project)
end
it 'returns success result' do
diff --git a/spec/services/projects/lsif_data_service_spec.rb b/spec/services/projects/lsif_data_service_spec.rb
new file mode 100644
index 00000000000..93579869d1d
--- /dev/null
+++ b/spec/services/projects/lsif_data_service_spec.rb
@@ -0,0 +1,129 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::LsifDataService do
+ let(:artifact) { create(:ci_job_artifact, :lsif) }
+ let(:project) { build_stubbed(:project) }
+ let(:path) { 'main.go' }
+ let(:commit_id) { Digest::SHA1.hexdigest(SecureRandom.hex) }
+ let(:params) { { path: path, commit_id: commit_id } }
+
+ let(:service) { described_class.new(artifact.file, project, params) }
+
+ describe '#execute' do
+ def highlighted_value(value)
+ [{ language: 'go', value: Gitlab::Highlight.highlight(nil, value, language: 'go') }]
+ end
+
+ context 'fetched lsif file', :use_clean_rails_memory_store_caching do
+ it 'is cached' do
+ service.execute
+
+ cached_data = Rails.cache.fetch("project:#{project.id}:lsif:#{commit_id}")
+
+ expect(cached_data.keys).to eq(%w[def_refs doc_ranges docs hover_refs ranges])
+ end
+ end
+
+ context 'for main.go' do
+ let(:path_prefix) { "/#{project.full_path}/-/blob/#{commit_id}" }
+
+ it 'returns lsif ranges for the file' do
+ expect(service.execute).to eq([
+ {
+ end_char: 9,
+ end_line: 6,
+ start_char: 5,
+ start_line: 6,
+ definition_url: "#{path_prefix}/main.go#L7",
+ hover: highlighted_value('func main()')
+ },
+ {
+ end_char: 36,
+ end_line: 3,
+ start_char: 1,
+ start_line: 3,
+ definition_url: "#{path_prefix}/main.go#L4",
+ hover: highlighted_value('package "github.com/user/hello/morestrings" ("github.com/user/hello/morestrings")')
+ },
+ {
+ end_char: 12,
+ end_line: 7,
+ start_char: 1,
+ start_line: 7,
+ definition_url: "#{path_prefix}/main.go#L4",
+ hover: highlighted_value('package "github.com/user/hello/morestrings" ("github.com/user/hello/morestrings")')
+ },
+ {
+ end_char: 20,
+ end_line: 7,
+ start_char: 13,
+ start_line: 7,
+ definition_url: "#{path_prefix}/morestrings/reverse.go#L11",
+ hover: highlighted_value('func Reverse(s string) string') + [{ value: "This method reverses a string \n\n" }]
+ },
+ {
+ end_char: 12,
+ end_line: 8,
+ start_char: 1,
+ start_line: 8,
+ definition_url: "#{path_prefix}/main.go#L4",
+ hover: highlighted_value('package "github.com/user/hello/morestrings" ("github.com/user/hello/morestrings")')
+ },
+ {
+ end_char: 18,
+ end_line: 8,
+ start_char: 13,
+ start_line: 8,
+ definition_url: "#{path_prefix}/morestrings/reverse.go#L5",
+ hover: highlighted_value('func Func2(i int) string')
+ }
+ ])
+ end
+ end
+
+ context 'for morestring/reverse.go' do
+ let(:path) { 'morestrings/reverse.go' }
+
+ it 'returns lsif ranges for the file' do
+ expect(service.execute.first).to eq({
+ end_char: 2,
+ end_line: 11,
+ start_char: 1,
+ start_line: 11,
+ definition_url: "/#{project.full_path}/-/blob/#{commit_id}/morestrings/reverse.go#L12",
+ hover: highlighted_value('var a string')
+ })
+ end
+ end
+
+ context 'for an unknown file' do
+ let(:path) { 'unknown.go' }
+
+ it 'returns nil' do
+ expect(service.execute).to eq(nil)
+ end
+ end
+ end
+
+ describe '#doc_id' do
+ context 'when the passed path matches multiple files' do
+ let(:path) { 'check/main.go' }
+ let(:docs) do
+ {
+ 1 => 'cmd/check/main.go',
+ 2 => 'cmd/command.go',
+ 3 => 'check/main.go',
+ 4 => 'cmd/nested/check/main.go'
+ }
+ end
+
+ it 'fetches the document with the shortest absolute path' do
+ service.instance_variable_set(:@docs, docs)
+
+ expect(service.__send__(:doc_id)).to eq(3)
+ end
+ end
+ end
+end
diff --git a/spec/services/projects/open_issues_count_service_spec.rb b/spec/services/projects/open_issues_count_service_spec.rb
index 04f1353c499..c1d49befeb9 100644
--- a/spec/services/projects/open_issues_count_service_spec.rb
+++ b/spec/services/projects/open_issues_count_service_spec.rb
@@ -57,7 +57,7 @@ describe Projects::OpenIssuesCountService, :use_clean_rails_memory_store_caching
end
end
- context '#refresh_cache' do
+ describe '#refresh_cache' do
before do
create(:issue, :opened, project: project)
create(:issue, :opened, project: project)
diff --git a/spec/services/projects/operations/update_service_spec.rb b/spec/services/projects/operations/update_service_spec.rb
index d20ec0b818b..182906a3337 100644
--- a/spec/services/projects/operations/update_service_spec.rb
+++ b/spec/services/projects/operations/update_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
describe Projects::Operations::UpdateService do
let_it_be(:user) { create(:user) }
- let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:project, refind: true) { create(:project) }
let(:result) { subject.execute }
@@ -145,6 +145,48 @@ describe Projects::Operations::UpdateService do
end
end
+ context 'partial_update' do
+ let(:params) do
+ {
+ error_tracking_setting_attributes: {
+ enabled: true
+ }
+ }
+ end
+
+ context 'with setting' do
+ before do
+ create(:project_error_tracking_setting, :disabled, project: project)
+ end
+
+ it 'service succeeds' do
+ expect(result[:status]).to eq(:success)
+ end
+
+ it 'updates attributes' do
+ expect { result }
+ .to change { project.reload.error_tracking_setting.enabled }
+ .from(false)
+ .to(true)
+ end
+
+ it 'only updates enabled attribute' do
+ result
+
+ expect(project.error_tracking_setting.previous_changes.keys)
+ .to contain_exactly('enabled')
+ end
+ end
+
+ context 'without setting' do
+ it 'does not create a setting' do
+ expect(result[:status]).to eq(:error)
+
+ expect(project.reload.error_tracking_setting).to be_nil
+ end
+ end
+ end
+
context 'with masked param token' do
let(:params) do
{
@@ -247,5 +289,86 @@ describe Projects::Operations::UpdateService do
end
end
end
+
+ context 'prometheus integration' do
+ context 'prometheus params were passed into service' do
+ let(:prometheus_service) do
+ build_stubbed(:prometheus_service, project: project, properties: {
+ api_url: "http://example.prometheus.com",
+ manual_configuration: "0"
+ })
+ end
+ let(:prometheus_params) do
+ {
+ "type" => "PrometheusService",
+ "title" => nil,
+ "active" => true,
+ "properties" => { "api_url" => "http://example.prometheus.com", "manual_configuration" => "0" },
+ "push_events" => true,
+ "issues_events" => true,
+ "merge_requests_events" => true,
+ "tag_push_events" => true,
+ "note_events" => true,
+ "category" => "monitoring",
+ "default" => false,
+ "wiki_page_events" => true,
+ "pipeline_events" => true,
+ "confidential_issues_events" => true,
+ "commit_events" => true,
+ "job_events" => true,
+ "confidential_note_events" => true,
+ "deployment_events" => false,
+ "description" => nil,
+ "comment_on_event_enabled" => true,
+ "template" => false
+ }
+ end
+ let(:params) do
+ {
+ prometheus_integration_attributes: {
+ api_url: 'http://new.prometheus.com',
+ manual_configuration: '1'
+ }
+ }
+ end
+
+ it 'uses Project#find_or_initialize_service to include instance defined defaults and pass them to Projects::UpdateService', :aggregate_failures do
+ project_update_service = double(Projects::UpdateService)
+ prometheus_update_params = prometheus_params.merge('properties' => {
+ 'api_url' => 'http://new.prometheus.com',
+ 'manual_configuration' => '1'
+ })
+
+ expect(project)
+ .to receive(:find_or_initialize_service)
+ .with('prometheus')
+ .and_return(prometheus_service)
+ expect(Projects::UpdateService)
+ .to receive(:new)
+ .with(project, user, { prometheus_service_attributes: prometheus_update_params })
+ .and_return(project_update_service)
+ expect(project_update_service).to receive(:execute)
+
+ subject.execute
+ end
+ end
+
+ context 'prometheus params were not passed into service' do
+ let(:params) { { something: :else } }
+
+ it 'does not pass any prometheus params into Projects::UpdateService', :aggregate_failures do
+ project_update_service = double(Projects::UpdateService)
+
+ expect(project).not_to receive(:find_or_initialize_service)
+ expect(Projects::UpdateService)
+ .to receive(:new)
+ .with(project, user, {})
+ .and_return(project_update_service)
+ expect(project_update_service).to receive(:execute)
+
+ subject.execute
+ end
+ end
+ end
end
end
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index 298867f483b..fe31dafdd03 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -47,11 +47,12 @@ describe Projects::TransferService do
end
end
- it 'disk path has moved' do
+ it 'moves the disk path', :aggregate_failures do
old_path = project.repository.disk_path
old_full_path = project.repository.full_path
transfer_project(project, user, group)
+ project.reload_repository!
expect(project.repository.disk_path).not_to eq(old_path)
expect(project.repository.full_path).not_to eq(old_full_path)
@@ -298,22 +299,41 @@ describe Projects::TransferService do
end
context 'when hashed storage in use' do
- let(:hashed_project) { create(:project, :repository, namespace: user.namespace) }
+ let!(:hashed_project) { create(:project, :repository, namespace: user.namespace) }
+ let!(:old_disk_path) { hashed_project.repository.disk_path }
before do
group.add_owner(user)
end
- it 'does not move the directory' do
- old_path = hashed_project.repository.disk_path
- old_full_path = hashed_project.repository.full_path
+ it 'does not move the disk path', :aggregate_failures do
+ new_full_path = "#{group.full_path}/#{hashed_project.path}"
transfer_project(hashed_project, user, group)
- project.reload
+ hashed_project.reload_repository!
- expect(hashed_project.repository.disk_path).to eq(old_path)
- expect(hashed_project.repository.full_path).to eq(old_full_path)
- expect(hashed_project.disk_path).to eq(old_path)
+ expect(hashed_project.repository).to have_attributes(
+ disk_path: old_disk_path,
+ full_path: new_full_path
+ )
+ expect(hashed_project.disk_path).to eq(old_disk_path)
+ end
+
+ it 'does not move the disk path when the transfer fails', :aggregate_failures do
+ old_full_path = hashed_project.full_path
+
+ expect_next_instance_of(described_class) do |service|
+ allow(service).to receive(:execute_system_hooks).and_raise('foo')
+ end
+ expect { transfer_project(hashed_project, user, group) }.to raise_error('foo')
+
+ hashed_project.reload_repository!
+
+ expect(hashed_project.repository).to have_attributes(
+ disk_path: old_disk_path,
+ full_path: old_full_path
+ )
+ expect(hashed_project.disk_path).to eq(old_disk_path)
end
end
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 3092fb7116a..90fb6b932ee 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -497,6 +497,63 @@ describe Projects::UpdateService do
update_project(project, user, { name: 'New name' })
end
end
+
+ context 'when updating nested attributes for prometheus service' do
+ context 'prometheus service exists' do
+ let(:prometheus_service_attributes) do
+ attributes_for(:prometheus_service,
+ project: project,
+ properties: { api_url: "http://new.prometheus.com", manual_configuration: "0" }
+ )
+ end
+
+ let!(:prometheus_service) do
+ create(:prometheus_service,
+ project: project,
+ properties: { api_url: "http://old.prometheus.com", manual_configuration: "0" }
+ )
+ end
+
+ it 'updates existing record' do
+ expect { update_project(project, user, prometheus_service_attributes: prometheus_service_attributes) }
+ .to change { prometheus_service.reload.api_url }
+ .from("http://old.prometheus.com")
+ .to("http://new.prometheus.com")
+ end
+ end
+
+ context 'prometheus service does not exist' do
+ context 'valid parameters' do
+ let(:prometheus_service_attributes) do
+ attributes_for(:prometheus_service,
+ project: project,
+ properties: { api_url: "http://example.prometheus.com", manual_configuration: "0" }
+ )
+ end
+
+ it 'creates new record' do
+ expect { update_project(project, user, prometheus_service_attributes: prometheus_service_attributes) }
+ .to change { ::PrometheusService.where(project: project).count }
+ .from(0)
+ .to(1)
+ end
+ end
+
+ context 'invalid parameters' do
+ let(:prometheus_service_attributes) do
+ attributes_for(:prometheus_service,
+ project: project,
+ properties: { api_url: nil, manual_configuration: "1" }
+ )
+ end
+
+ it 'does not create new record' do
+ expect { update_project(project, user, prometheus_service_attributes: prometheus_service_attributes) }
+ .not_to change { ::PrometheusService.where(project: project).count }
+ end
+ end
+ end
+ end
end
describe '#run_auto_devops_pipeline?' do
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index b2576cae575..7db94d4a4ac 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -804,7 +804,7 @@ describe QuickActions::InterpretService do
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'empty command', "Failed to assign a user because no user was found." do
let(:content) { '/assign' }
let(:issuable) { issue }
end
@@ -1322,11 +1322,6 @@ describe QuickActions::InterpretService do
let(:issuable) { issue }
end
- it_behaves_like 'empty command', _('Failed to mark this issue as a duplicate because referenced issue was not found.') do
- let(:content) { '/duplicate #{issue.to_reference}' }
- let(:issuable) { issue }
- end
-
it_behaves_like 'empty command' do
let(:content) { '/lock' }
let(:issuable) { issue }
@@ -1889,7 +1884,7 @@ describe QuickActions::InterpretService do
end
end
- context "#commands_executed_count" do
+ describe "#commands_executed_count" do
it 'counts commands executed' do
content = "/close and \n/assign me and \n/title new title"
diff --git a/spec/services/repositories/destroy_rollback_service_spec.rb b/spec/services/repositories/destroy_rollback_service_spec.rb
new file mode 100644
index 00000000000..c3cdae17de7
--- /dev/null
+++ b/spec/services/repositories/destroy_rollback_service_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Repositories::DestroyRollbackService do
+ let_it_be(:user) { create(:user) }
+ let!(:project) { create(:project, :repository, namespace: user.namespace) }
+ let(:repository) { project.repository }
+ let(:path) { repository.disk_path }
+ let(:remove_path) { "#{path}+#{project.id}#{described_class::DELETED_FLAG}" }
+
+ subject { described_class.new(repository).execute }
+
+ before do
+ # Dont run sidekiq to check if renamed repository exists
+ Sidekiq::Testing.fake! { destroy_project(project, user) }
+ end
+
+ it 'moves the repository from the +deleted folder' do
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_truthy
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
+
+ subject
+
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_falsey
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_truthy
+ end
+
+ it 'logs the successful action' do
+ expect(Gitlab::AppLogger).to receive(:info)
+
+ subject
+ end
+
+ it 'flushes the repository cache' do
+ expect(repository).to receive(:before_delete)
+
+ subject
+ end
+
+ it 'returns success and does not perform any action if repository path does not exist' do
+ expect(repository).to receive(:disk_path).and_return('foo')
+ expect(repository).not_to receive(:before_delete)
+
+ result = subject
+
+ expect(result[:status]).to eq :success
+ end
+
+ context 'when move operation cannot be performed' do
+ let(:service) { described_class.new(repository) }
+
+ before do
+ allow(service).to receive(:mv_repository).and_return(false)
+ end
+
+ it 'returns error' do
+ result = service.execute
+
+ expect(result[:status]).to eq :error
+ end
+
+ it 'logs the error' do
+ expect(Gitlab::AppLogger).to receive(:error)
+
+ service.execute
+ end
+ end
+
+ def destroy_project(project, user)
+ Projects::DestroyService.new(project, user, {}).execute
+ end
+end
diff --git a/spec/services/repositories/destroy_service_spec.rb b/spec/services/repositories/destroy_service_spec.rb
new file mode 100644
index 00000000000..9c2694483c1
--- /dev/null
+++ b/spec/services/repositories/destroy_service_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Repositories::DestroyService do
+ let_it_be(:user) { create(:user) }
+ let!(:project) { create(:project, :repository, namespace: user.namespace) }
+ let(:repository) { project.repository }
+ let(:path) { repository.disk_path }
+ let(:remove_path) { "#{path}+#{project.id}#{described_class::DELETED_FLAG}" }
+
+ subject { described_class.new(project.repository).execute }
+
+ it 'moves the repository to a +deleted folder' do
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_truthy
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_falsey
+
+ subject
+
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_truthy
+ end
+
+ it 'schedules the repository deletion' do
+ subject
+
+ expect(Repositories::ShellDestroyService).to receive(:new).with(repository).and_call_original
+
+ expect(GitlabShellWorker).to receive(:perform_in)
+ .with(Repositories::ShellDestroyService::REPO_REMOVAL_DELAY, :remove_repository, project.repository_storage, remove_path)
+
+ # Because GitlabShellWorker is inside a run_after_commit callback we need to
+ # trigger the callback
+ project.touch
+ end
+
+ it 'removes the repository', :sidekiq_inline do
+ subject
+
+ project.touch
+
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_falsey
+ end
+
+ it 'flushes the repository cache' do
+ expect(repository).to receive(:before_delete)
+
+ subject
+ end
+
+ it 'does not perform any action if repository path does not exist and returns success' do
+ expect(repository).to receive(:disk_path).and_return('foo')
+ expect(repository).not_to receive(:before_delete)
+
+ result = subject
+
+ expect(result[:status]).to eq :success
+ end
+
+ context 'when move operation cannot be performed' do
+ let(:service) { described_class.new(repository) }
+
+ before do
+ allow(service).to receive(:mv_repository).and_return(false)
+ end
+
+ it 'returns error' do
+ result = service.execute
+
+ expect(result[:status]).to eq :error
+ end
+
+ it 'logs the error' do
+ expect(Gitlab::AppLogger).to receive(:error)
+
+ service.execute
+ end
+ end
+end
diff --git a/spec/services/repositories/shell_destroy_service_spec.rb b/spec/services/repositories/shell_destroy_service_spec.rb
new file mode 100644
index 00000000000..9419977f6fe
--- /dev/null
+++ b/spec/services/repositories/shell_destroy_service_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Repositories::ShellDestroyService do
+ let_it_be(:user) { create(:user) }
+ let!(:project) { create(:project, :repository, namespace: user.namespace) }
+ let(:path) { project.repository.disk_path }
+ let(:remove_path) { "#{path}+#{project.id}#{described_class::DELETED_FLAG}" }
+
+ it 'returns success if the repository is nil' do
+ expect(GitlabShellWorker).not_to receive(:perform_in)
+
+ result = described_class.new(nil).execute
+
+ expect(result[:status]).to eq :success
+ end
+
+ it 'schedules the repository deletion' do
+ expect(GitlabShellWorker).to receive(:perform_in)
+ .with(described_class::REPO_REMOVAL_DELAY, :remove_repository, project.repository_storage, remove_path)
+
+ described_class.new(project.repository).execute
+ end
+end
diff --git a/spec/services/snippets/count_service_spec.rb b/spec/services/snippets/count_service_spec.rb
new file mode 100644
index 00000000000..4137e65dcca
--- /dev/null
+++ b/spec/services/snippets/count_service_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Snippets::CountService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+
+ describe '#new' do
+ it 'raises an error if no author or project' do
+ expect { described_class.new(user) }.to raise_error(ArgumentError)
+ end
+
+ it 'uses the SnippetsFinder to scope snippets by user' do
+ expect(SnippetsFinder)
+ .to receive(:new)
+ .with(user, author: user, project: nil)
+
+ described_class.new(user, author: user)
+ end
+
+ it 'allows scoping to project' do
+ expect(SnippetsFinder)
+ .to receive(:new)
+ .with(user, author: nil, project: project)
+
+ described_class.new(user, project: project)
+ end
+ end
+
+ describe '#execute' do
+ subject { described_class.new(user, author: user).execute }
+
+ it 'returns a hash of counts' do
+ expect(subject).to match({
+ are_public: 0,
+ are_internal: 0,
+ are_private: 0,
+ are_public_or_internal: 0,
+ total: 0
+ })
+ end
+
+ it 'only counts snippets the user has access to' do
+ create(:personal_snippet, :private, author: user)
+ create(:project_snippet, :private, author: user)
+ create(:project_snippet, :private, author: create(:user))
+
+ expect(subject).to match({
+ are_public: 0,
+ are_internal: 0,
+ are_private: 1,
+ are_public_or_internal: 0,
+ total: 1
+ })
+ end
+
+ it 'returns an empty hash if select returns nil' do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:snippet_counts).and_return(nil)
+ end
+
+ expect(subject).to match({})
+ end
+ end
+end
diff --git a/spec/services/snippets/create_service_spec.rb b/spec/services/snippets/create_service_spec.rb
index 6f7ce7959ff..a1cbec6748a 100644
--- a/spec/services/snippets/create_service_spec.rb
+++ b/spec/services/snippets/create_service_spec.rb
@@ -86,7 +86,7 @@ describe Snippets::CreateService do
it 'creates a new spam_log' do
expect { snippet }
- .to log_spam(title: snippet.title, noteable_type: snippet.class.name)
+ .to have_spam_log(title: snippet.title, noteable_type: snippet.class.name)
end
it 'assigns a spam_log to an issue' do
@@ -99,7 +99,7 @@ describe Snippets::CreateService do
end
before do
- expect_next_instance_of(AkismetService) do |akismet_service|
+ expect_next_instance_of(Spam::AkismetService) do |akismet_service|
expect(akismet_service).to receive_messages(spam?: true)
end
end
@@ -143,6 +143,36 @@ describe Snippets::CreateService do
end
end
+ shared_examples 'creates repository' do
+ it do
+ subject
+
+ expect(snippet.repository_exists?).to be_truthy
+ end
+
+ context 'when snippet creation fails' do
+ let(:extra_opts) { { content: nil } }
+
+ it 'does not create repository' do
+ subject
+
+ expect(snippet.repository_exists?).to be_falsey
+ end
+ end
+
+ context 'when feature flag :version_snippets is disabled' do
+ it 'does not create snippet repository' do
+ stub_feature_flags(version_snippets: false)
+
+ expect do
+ subject
+ end.to change(Snippet, :count).by(1)
+
+ expect(snippet.repository_exists?).to be_falsey
+ end
+ end
+ end
+
context 'when Project Snippet' do
let_it_be(:project) { create(:project) }
@@ -155,6 +185,7 @@ describe Snippets::CreateService do
it_behaves_like 'spam check is performed'
it_behaves_like 'snippet create data is tracked'
it_behaves_like 'an error service response when save fails'
+ it_behaves_like 'creates repository'
end
context 'when PersonalSnippet' do
@@ -165,6 +196,7 @@ describe Snippets::CreateService do
it_behaves_like 'spam check is performed'
it_behaves_like 'snippet create data is tracked'
it_behaves_like 'an error service response when save fails'
+ it_behaves_like 'creates repository'
end
end
end
diff --git a/spec/services/spam/akismet_service_spec.rb b/spec/services/spam/akismet_service_spec.rb
new file mode 100644
index 00000000000..a496cd1890e
--- /dev/null
+++ b/spec/services/spam/akismet_service_spec.rb
@@ -0,0 +1,136 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Spam::AkismetService do
+ let(:fake_akismet_client) { double(:akismet_client) }
+
+ let_it_be(:text) { "Would you like to buy some tinned meat product?" }
+ let_it_be(:spam_owner) { create(:user) }
+
+ subject do
+ options = { ip_address: '1.2.3.4', user_agent: 'some user_agent', referrer: 'some referrer' }
+ described_class.new(spam_owner.name, spam_owner.email, text, options)
+ end
+
+ before do
+ stub_application_setting(akismet_enabled: true)
+ allow(subject).to receive(:akismet_client).and_return(fake_akismet_client)
+ end
+
+ shared_examples 'no activity if Akismet is not enabled' do |method_call|
+ before do
+ stub_application_setting(akismet_enabled: false)
+ end
+
+ it 'is automatically false' do
+ expect(subject.send(method_call)).to be_falsey
+ end
+
+ it 'performs no check' do
+ expect(fake_akismet_client).not_to receive(:public_send)
+
+ subject.send(method_call)
+ end
+ end
+
+ shared_examples 'false if Akismet is not available' do |method_call|
+ context 'if Akismet is not available' do
+ before do
+ allow(fake_akismet_client).to receive(:public_send).and_raise(StandardError.new("oh noes!"))
+ end
+
+ specify do
+ expect(subject.send(method_call)).to be_falsey
+ end
+
+ it 'logs an error' do
+ logger_spy = double(:logger)
+ expect(Rails).to receive(:logger).and_return(logger_spy)
+ expect(logger_spy).to receive(:error).with(/skipping/)
+
+ subject.send(method_call)
+ end
+ end
+ end
+
+ describe '#spam?' do
+ it_behaves_like 'no activity if Akismet is not enabled', :spam?, :check
+
+ context 'if Akismet is enabled' do
+ context 'the text is spam' do
+ before do
+ allow(fake_akismet_client).to receive(:check).and_return([true, false])
+ end
+
+ specify do
+ expect(subject.spam?).to be_truthy
+ end
+ end
+
+ context 'the text is blatant spam' do
+ before do
+ allow(fake_akismet_client).to receive(:check).and_return([false, true])
+ end
+
+ specify do
+ expect(subject.spam?).to be_truthy
+ end
+ end
+
+ context 'the text is not spam' do
+ before do
+ allow(fake_akismet_client).to receive(:check).and_return([false, false])
+ end
+
+ specify do
+ expect(subject.spam?).to be_falsey
+ end
+ end
+
+ context 'if Akismet is not available' do
+ before do
+ allow(fake_akismet_client).to receive(:check).and_raise(StandardError.new("oh noes!"))
+ end
+
+ specify do
+ expect(subject.spam?).to be_falsey
+ end
+
+ it 'logs an error' do
+ logger_spy = double(:logger)
+ expect(Rails).to receive(:logger).and_return(logger_spy)
+ expect(logger_spy).to receive(:error).with(/skipping check/)
+
+ subject.spam?
+ end
+ end
+ end
+ end
+
+ describe '#submit_ham' do
+ it_behaves_like 'no activity if Akismet is not enabled', :submit_ham
+ it_behaves_like 'false if Akismet is not available', :submit_ham
+
+ context 'if Akismet is available' do
+ specify do
+ expect(fake_akismet_client).to receive(:public_send).with(:ham, any_args)
+
+ expect(subject.submit_ham).to be_truthy
+ end
+ end
+ end
+
+ describe '#submit_spam' do
+ it_behaves_like 'no activity if Akismet is not enabled', :submit_spam
+ it_behaves_like 'false if Akismet is not available', :submit_spam
+
+ context 'if Akismet is available' do
+ specify do
+ expect(fake_akismet_client).to receive(:public_send).with(:spam, any_args)
+
+ expect(subject.submit_spam).to be_truthy
+ end
+ end
+ end
+end
diff --git a/spec/services/spam/ham_service_spec.rb b/spec/services/spam/ham_service_spec.rb
new file mode 100644
index 00000000000..9848f48def2
--- /dev/null
+++ b/spec/services/spam/ham_service_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Spam::HamService do
+ let_it_be(:user) { create(:user) }
+ let!(:spam_log) { create(:spam_log, user: user, submitted_as_ham: false) }
+ let(:fake_akismet_service) { double(:akismet_service) }
+
+ subject { described_class.new(spam_log) }
+
+ before do
+ allow(Spam::AkismetService).to receive(:new).and_return fake_akismet_service
+ end
+
+ describe '#execute' do
+ context 'AkismetService returns false (Akismet cannot be reached, etc)' do
+ before do
+ allow(fake_akismet_service).to receive(:submit_ham).and_return false
+ end
+
+ it 'returns false' do
+ expect(subject.execute).to be_falsey
+ end
+
+ it 'does not update the record' do
+ expect { subject.execute }.not_to change { spam_log.submitted_as_ham }
+ end
+
+ context 'if spam log record has already been marked as spam' do
+ before do
+ spam_log.update_attribute(:submitted_as_ham, true)
+ end
+
+ it 'does not update the record' do
+ expect { subject.execute }.not_to change { spam_log.submitted_as_ham }
+ end
+ end
+ end
+
+ context 'Akismet ham submission is successful' do
+ before do
+ spam_log.update_attribute(:submitted_as_ham, false)
+ allow(fake_akismet_service).to receive(:submit_ham).and_return true
+ end
+
+ it 'returns true' do
+ expect(subject.execute).to be_truthy
+ end
+
+ it 'updates the record' do
+ expect { subject.execute }.to change { spam_log.submitted_as_ham }.from(false).to(true)
+ end
+ end
+ end
+end
diff --git a/spec/services/spam/spam_check_service_spec.rb b/spec/services/spam/spam_check_service_spec.rb
new file mode 100644
index 00000000000..732b64b52a0
--- /dev/null
+++ b/spec/services/spam/spam_check_service_spec.rb
@@ -0,0 +1,153 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Spam::SpamCheckService do
+ let(:fake_ip) { '1.2.3.4' }
+ let(:fake_user_agent) { 'fake-user-agent' }
+ let(:fake_referrer) { 'fake-http-referrer' }
+ let(:env) do
+ { 'action_dispatch.remote_ip' => fake_ip,
+ 'HTTP_USER_AGENT' => fake_user_agent,
+ 'HTTP_REFERRER' => fake_referrer }
+ end
+ let(:request) { double(:request, env: env) }
+
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:issue) { create(:issue, project: project, author: user) }
+
+ before do
+ issue.spam = false
+ end
+
+ describe '#initialize' do
+ subject { described_class.new(spammable: issue, request: request) }
+
+ context 'when the request is nil' do
+ let(:request) { nil }
+
+ it 'assembles the options with information from the spammable' do
+ aggregate_failures do
+ expect(subject.options[:ip_address]).to eq(issue.ip_address)
+ expect(subject.options[:user_agent]).to eq(issue.user_agent)
+ expect(subject.options.key?(:referrer)).to be_falsey
+ end
+ end
+ end
+
+ context 'when the request is present' do
+ let(:request) { double(:request, env: env) }
+
+ it 'assembles the options with information from the spammable' do
+ aggregate_failures do
+ expect(subject.options[:ip_address]).to eq(fake_ip)
+ expect(subject.options[:user_agent]).to eq(fake_user_agent)
+ expect(subject.options[:referrer]).to eq(fake_referrer)
+ end
+ end
+ end
+ end
+
+ describe '#execute' do
+ let(:request) { double(:request, env: env) }
+
+ let_it_be(:existing_spam_log) { create(:spam_log, user: user, recaptcha_verified: false) }
+
+ subject do
+ described_service = described_class.new(spammable: issue, request: request)
+ described_service.execute(user_id: user.id, api: nil, recaptcha_verified: recaptcha_verified, spam_log_id: existing_spam_log.id)
+ end
+
+ context 'when recaptcha was already verified' do
+ let(:recaptcha_verified) { true }
+
+ it "updates spam log and doesn't check Akismet" do
+ aggregate_failures do
+ expect(SpamLog).not_to receive(:create!)
+ expect(an_instance_of(described_class)).not_to receive(:check)
+ end
+
+ subject
+ end
+
+ it 'updates spam log' do
+ subject
+
+ expect(existing_spam_log.reload.recaptcha_verified).to be_truthy
+ end
+ end
+
+ context 'when recaptcha was not verified' do
+ let(:recaptcha_verified) { false }
+
+ context 'when spammable attributes have not changed' do
+ before do
+ issue.closed_at = Time.zone.now
+
+ allow(Spam::AkismetService).to receive(:new).and_return(double(spam?: true))
+ end
+
+ it 'returns false' do
+ expect(subject).to be_falsey
+ end
+
+ it 'does not create a spam log' do
+ expect { subject }
+ .not_to change { SpamLog.count }
+ end
+ end
+
+ context 'when spammable attributes have changed' do
+ before do
+ issue.description = 'SPAM!'
+ end
+
+ context 'when indicated as spam by akismet' do
+ before do
+ allow(Spam::AkismetService).to receive(:new).and_return(double(spam?: true))
+ end
+
+ context 'when allow_possible_spam feature flag is false' do
+ before do
+ stub_feature_flags(allow_possible_spam: false)
+ end
+
+ it_behaves_like 'akismet spam'
+
+ it 'checks as spam' do
+ subject
+
+ expect(issue.reload.spam).to be_truthy
+ end
+ end
+
+ context 'when allow_possible_spam feature flag is true' do
+ it_behaves_like 'akismet spam'
+
+ it 'does not check as spam' do
+ subject
+
+ expect(issue.spam).to be_falsey
+ end
+ end
+ end
+
+ context 'when not indicated as spam by akismet' do
+ before do
+ allow(Spam::AkismetService).to receive(:new).and_return(double(spam?: false))
+ end
+
+ it 'returns false' do
+ expect(subject).to be_falsey
+ end
+
+ it 'does not create a spam log' do
+ expect { subject }
+ .not_to change { SpamLog.count }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/spam_service_spec.rb b/spec/services/spam_service_spec.rb
deleted file mode 100644
index c8ebe87e4d2..00000000000
--- a/spec/services/spam_service_spec.rb
+++ /dev/null
@@ -1,111 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe SpamService do
- describe '#when_recaptcha_verified' do
- def check_spam(issue, request, recaptcha_verified)
- described_class.new(spammable: issue, request: request).when_recaptcha_verified(recaptcha_verified) do
- 'yielded'
- end
- end
-
- it 'yields block when recaptcha was already verified' do
- issue = build_stubbed(:issue)
-
- expect(check_spam(issue, nil, true)).to eql('yielded')
- end
-
- context 'when recaptcha was not verified' do
- let(:project) { create(:project, :public) }
- let(:issue) { create(:issue, project: project) }
- let(:request) { double(:request, env: {}) }
-
- context 'when spammable attributes have not changed' do
- before do
- issue.closed_at = Time.zone.now
-
- allow(AkismetService).to receive(:new).and_return(double(spam?: true))
- end
-
- it 'returns false' do
- expect(check_spam(issue, request, false)).to be_falsey
- end
-
- it 'does not create a spam log' do
- expect { check_spam(issue, request, false) }
- .not_to change { SpamLog.count }
- end
- end
-
- context 'when spammable attributes have changed' do
- before do
- issue.description = 'SPAM!'
- end
-
- context 'when indicated as spam by akismet' do
- shared_examples 'akismet spam' do
- it "doesn't check as spam when request is missing" do
- check_spam(issue, nil, false)
-
- expect(issue).not_to be_spam
- end
-
- it 'creates a spam log' do
- expect { check_spam(issue, request, false) }
- .to log_spam(title: issue.title, description: issue.description, noteable_type: 'Issue')
- end
-
- it 'does not yield to the block' do
- expect(check_spam(issue, request, false))
- .to eql(SpamLog.last)
- end
- end
-
- before do
- allow(AkismetService).to receive(:new).and_return(double(spam?: true))
- end
-
- context 'when allow_possible_spam feature flag is false' do
- before do
- stub_feature_flags(allow_possible_spam: false)
- end
-
- it_behaves_like 'akismet spam'
-
- it 'checks as spam' do
- check_spam(issue, request, false)
-
- expect(issue.spam).to be_truthy
- end
- end
-
- context 'when allow_possible_spam feature flag is true' do
- it_behaves_like 'akismet spam'
-
- it 'does not check as spam' do
- check_spam(issue, request, false)
-
- expect(issue.spam).to be_nil
- end
- end
- end
-
- context 'when not indicated as spam by akismet' do
- before do
- allow(AkismetService).to receive(:new).and_return(double(spam?: false))
- end
-
- it 'returns false' do
- expect(check_spam(issue, request, false)).to be_falsey
- end
-
- it 'does not create a spam log' do
- expect { check_spam(issue, request, false) }
- .not_to change { SpamLog.count }
- end
- end
- end
- end
- end
-end
diff --git a/spec/services/submit_usage_ping_service_spec.rb b/spec/services/submit_usage_ping_service_spec.rb
index 719b374553c..26ce5968ad6 100644
--- a/spec/services/submit_usage_ping_service_spec.rb
+++ b/spec/services/submit_usage_ping_service_spec.rb
@@ -5,6 +5,49 @@ require 'spec_helper'
describe SubmitUsagePingService do
include StubRequests
+ let(:score_params) do
+ {
+ score: {
+ leader_issues: 10.2,
+ instance_issues: 3.2,
+ percentage_issues: 31.37,
+
+ leader_notes: 25.3,
+ instance_notes: 23.2,
+
+ leader_milestones: 16.2,
+ instance_milestones: 5.5,
+
+ leader_boards: 5.2,
+ instance_boards: 3.2,
+
+ leader_merge_requests: 5.2,
+ instance_merge_requests: 3.2,
+
+ leader_ci_pipelines: 25.1,
+ instance_ci_pipelines: 21.3,
+
+ leader_environments: 3.3,
+ instance_environments: 2.2,
+
+ leader_deployments: 41.3,
+ instance_deployments: 15.2,
+
+ leader_projects_prometheus_active: 0.31,
+ instance_projects_prometheus_active: 0.30,
+
+ leader_service_desk_issues: 15.8,
+ instance_service_desk_issues: 15.1,
+
+ non_existing_column: 'value'
+ }
+ }
+ end
+
+ let(:with_dev_ops_score_params) { { dev_ops_score: score_params[:score] } }
+ let(:with_conv_index_params) { { conv_index: score_params[:score] } }
+ let(:without_dev_ops_score_params) { { dev_ops_score: {} } }
+
context 'when usage ping is disabled' do
before do
stub_application_setting(usage_ping_enabled: false)
@@ -19,13 +62,26 @@ describe SubmitUsagePingService do
end
end
+ shared_examples 'saves DevOps score data from the response' do
+ it do
+ expect { subject.execute }
+ .to change { DevOpsScore::Metric.count }
+ .by(1)
+
+ expect(DevOpsScore::Metric.last.leader_issues).to eq 10.2
+ expect(DevOpsScore::Metric.last.instance_issues).to eq 3.2
+ expect(DevOpsScore::Metric.last.percentage_issues).to eq 31.37
+ end
+ end
+
context 'when usage ping is enabled' do
before do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
stub_application_setting(usage_ping_enabled: true)
end
it 'sends a POST request' do
- response = stub_response(without_conv_index_params)
+ response = stub_response(without_dev_ops_score_params)
subject.execute
@@ -33,7 +89,7 @@ describe SubmitUsagePingService do
end
it 'refreshes usage data statistics before submitting' do
- stub_response(without_conv_index_params)
+ stub_response(without_dev_ops_score_params)
expect(Gitlab::UsageData).to receive(:to_json)
.with(force_refresh: true)
@@ -42,62 +98,21 @@ describe SubmitUsagePingService do
subject.execute
end
- it 'saves DevOps Score data from the response' do
- stub_response(with_conv_index_params)
+ context 'when conv_index data is passed' do
+ before do
+ stub_response(with_conv_index_params)
+ end
- expect { subject.execute }
- .to change { DevOpsScore::Metric.count }
- .by(1)
-
- expect(DevOpsScore::Metric.last.leader_issues).to eq 10.2
- expect(DevOpsScore::Metric.last.instance_issues).to eq 3.2
- expect(DevOpsScore::Metric.last.percentage_issues).to eq 31.37
+ it_behaves_like 'saves DevOps score data from the response'
end
- end
-
- def without_conv_index_params
- {
- conv_index: {}
- }
- end
- def with_conv_index_params
- {
- conv_index: {
- leader_issues: 10.2,
- instance_issues: 3.2,
- percentage_issues: 31.37,
-
- leader_notes: 25.3,
- instance_notes: 23.2,
-
- leader_milestones: 16.2,
- instance_milestones: 5.5,
+ context 'when DevOps score data is passed' do
+ before do
+ stub_response(with_dev_ops_score_params)
+ end
- leader_boards: 5.2,
- instance_boards: 3.2,
-
- leader_merge_requests: 5.2,
- instance_merge_requests: 3.2,
-
- leader_ci_pipelines: 25.1,
- instance_ci_pipelines: 21.3,
-
- leader_environments: 3.3,
- instance_environments: 2.2,
-
- leader_deployments: 41.3,
- instance_deployments: 15.2,
-
- leader_projects_prometheus_active: 0.31,
- instance_projects_prometheus_active: 0.30,
-
- leader_service_desk_issues: 15.8,
- instance_service_desk_issues: 15.1,
-
- non_existing_column: 'value'
- }
- }
+ it_behaves_like 'saves DevOps score data from the response'
+ end
end
def stub_response(body)
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index 4ba22af85f0..3df620d1fea 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -63,6 +63,16 @@ describe SystemNoteService do
end
end
+ describe '.close_after_error_tracking_resolve' do
+ it 'calls IssuableService' do
+ expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
+ expect(service).to receive(:close_after_error_tracking_resolve)
+ end
+
+ described_class.close_after_error_tracking_resolve(noteable, project, author)
+ end
+ end
+
describe '.change_milestone' do
let(:milestone) { double }
@@ -312,9 +322,9 @@ describe SystemNoteService do
links = []
if link_exists
url = if type == 'commit'
- "#{Settings.gitlab.base_url}/#{project.namespace.path}/#{project.path}/commit/#{commit.id}"
+ "#{Settings.gitlab.base_url}/#{project.namespace.path}/#{project.path}/-/commit/#{commit.id}"
else
- "#{Settings.gitlab.base_url}/#{project.namespace.path}/#{project.path}/merge_requests/#{merge_request.iid}"
+ "#{Settings.gitlab.base_url}/#{project.namespace.path}/#{project.path}/-/merge_requests/#{merge_request.iid}"
end
link = double(object: { 'url' => url })
@@ -452,7 +462,7 @@ describe SystemNoteService do
describe "existing reference" do
before do
allow(JIRA::Resource::Remotelink).to receive(:all).and_return([])
- message = "[#{author.name}|http://localhost/#{author.username}] mentioned this issue in [a commit of #{project.full_path}|http://localhost/#{project.full_path}/commit/#{commit.id}]:\n'#{commit.title.chomp}'"
+ message = "[#{author.name}|http://localhost/#{author.username}] mentioned this issue in [a commit of #{project.full_path}|http://localhost/#{project.full_path}/-/commit/#{commit.id}]:\n'#{commit.title.chomp}'"
allow_next_instance_of(JIRA::Resource::Issue) do |instance|
allow(instance).to receive(:comments).and_return([OpenStruct.new(body: message)])
end
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index 56ef0039b63..228d69fda4e 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -630,4 +630,17 @@ describe ::SystemNotes::IssuablesService do
end
end
end
+
+ describe '#close_after_error_tracking_resolve' do
+ subject { service.close_after_error_tracking_resolve }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'closed' }
+ end
+
+ it 'creates the expected system note' do
+ expect(subject.note)
+ .to eq('resolved the corresponding error and closed the issue.')
+ end
+ end
end
diff --git a/spec/services/system_notes/merge_requests_service_spec.rb b/spec/services/system_notes/merge_requests_service_spec.rb
index 6d2473e8c03..f5c071502f5 100644
--- a/spec/services/system_notes/merge_requests_service_spec.rb
+++ b/spec/services/system_notes/merge_requests_service_spec.rb
@@ -240,4 +240,25 @@ describe ::SystemNotes::MergeRequestsService do
expect(subject.note).to eq("created merge request #{merge_request.to_reference(project)} to address this issue")
end
end
+
+ describe '.picked_into_branch' do
+ let(:branch_name) { 'a-branch' }
+ let(:commit_sha) { project.commit.sha }
+ let(:merge_request) { noteable }
+
+ subject { service.picked_into_branch(branch_name, commit_sha) }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'cherry_pick' }
+ end
+
+ it "posts the 'picked merge request' system note" do
+ expect(subject.note).to eq("picked this merge request into branch [`#{branch_name}`](/#{project.full_path}/-/tree/#{branch_name}) with commit #{commit_sha}")
+ end
+
+ it 'links the merge request and the cherry-pick commit' do
+ expect(subject.noteable).to eq(merge_request)
+ expect(subject.commit_id).to eq(commit_sha)
+ end
+ end
end
diff --git a/spec/services/users/block_service_spec.rb b/spec/services/users/block_service_spec.rb
new file mode 100644
index 00000000000..c3a65a08c0d
--- /dev/null
+++ b/spec/services/users/block_service_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Users::BlockService do
+ let(:current_user) { create(:admin) }
+
+ subject(:service) { described_class.new(current_user) }
+
+ describe '#execute' do
+ subject(:operation) { service.execute(user) }
+
+ context 'when successful' do
+ let(:user) { create(:user) }
+
+ it { is_expected.to eq(status: :success) }
+
+ it "change the user's state" do
+ expect { operation }.to change { user.state }.to('blocked')
+ end
+ end
+
+ context 'when failed' do
+ let(:user) { create(:user, :blocked) }
+
+ it 'returns error result' do
+ aggregate_failures 'error result' do
+ expect(operation[:status]).to eq(:error)
+ expect(operation[:message]).to match(/State cannot transition/)
+ end
+ end
+
+ it "does not change the user's state" do
+ expect { operation }.not_to change { user.state }
+ end
+ end
+ end
+end
diff --git a/spec/services/users/destroy_service_spec.rb b/spec/services/users/destroy_service_spec.rb
index d9335cef5cc..2b658a93b0a 100644
--- a/spec/services/users/destroy_service_spec.rb
+++ b/spec/services/users/destroy_service_spec.rb
@@ -26,16 +26,6 @@ describe Users::DestroyService do
service.execute(user)
end
- context 'when :destroy_user_associations_in_batches flag is disabled' do
- it 'does not delete user associations in batches' do
- stub_feature_flags(destroy_user_associations_in_batches: false)
-
- expect(user).not_to receive(:destroy_dependent_associations_in_batches)
-
- service.execute(user)
- end
- end
-
it 'will delete the project' do
expect_next_instance_of(Projects::DestroyService) do |destroy_service|
expect(destroy_service).to receive(:execute).once.and_return(true)
@@ -121,10 +111,17 @@ describe Users::DestroyService do
before do
solo_owned.group_members = [member]
- service.execute(user)
+ end
+
+ it 'returns the user with attached errors' do
+ expect(service.execute(user)).to be(user)
+ expect(user.errors.full_messages).to eq([
+ 'You must transfer ownership or delete groups before you can remove user'
+ ])
end
it 'does not delete the user' do
+ service.execute(user)
expect(User.find(user.id)).to eq user
end
end
diff --git a/spec/services/users/update_service_spec.rb b/spec/services/users/update_service_spec.rb
index 50bbb16e368..5cd6283ca96 100644
--- a/spec/services/users/update_service_spec.rb
+++ b/spec/services/users/update_service_spec.rb
@@ -55,6 +55,15 @@ describe Users::UpdateService do
expect(result[:message]).to eq("Emoji is not included in the list")
end
+ it 'ignores read-only attributes' do
+ allow(user).to receive(:read_only_attribute?).with(:name).and_return(true)
+
+ expect do
+ update_user(user, name: 'changed' + user.name)
+ user.reload
+ end.not_to change { user.name }
+ end
+
def update_user(user, opts)
described_class.new(user, opts.merge(user: user)).execute
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 6393e482904..35bf6846ab3 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -121,9 +121,12 @@ RSpec.configure do |config|
config.include ExpectRequestWithStatus, type: :request
config.include RailsHelpers
- if ENV['CI']
+ if ENV['CI'] || ENV['RETRIES']
# This includes the first try, i.e. tests will be run 4 times before failing.
- config.default_retry_count = 4
+ config.default_retry_count = ENV.fetch('RETRIES', 3).to_i + 1
+ end
+
+ if ENV['FLAKY_RSPEC_GENERATE_REPORT']
config.reporter.register_listener(
RspecFlaky::Listener.new,
:example_passed,
@@ -139,6 +142,16 @@ RSpec.configure do |config|
TestEnv.clean_test_path
end
+ # We can't use an `around` hook here because the wrapping transaction
+ # is not yet opened at the time that is triggered
+ config.prepend_before do
+ Gitlab::Database.set_open_transactions_baseline
+ end
+
+ config.append_after do
+ Gitlab::Database.reset_open_transactions_baseline
+ end
+
config.before do |example|
# Enable all features by default for testing
allow(Feature).to receive(:enabled?) { true }
@@ -176,12 +189,12 @@ RSpec.configure do |config|
# Stub these calls due to being expensive operations
# It can be reenabled for specific tests via:
#
- # allow(DetectRepositoryLanguagesWorker).to receive(:perform_async).and_call_original
- # allow(Gitlab::Git::KeepAround).to receive(:execute).and_call_original
- allow(DetectRepositoryLanguagesWorker).to receive(:perform_async).and_return(true)
+ # expect(Gitlab::Git::KeepAround).to receive(:execute).and_call_original
allow(Gitlab::Git::KeepAround).to receive(:execute)
+ # Clear thread cache and Sidekiq queues
Gitlab::ThreadMemoryCache.cache_backend.clear
+ Sidekiq::Worker.clear_all
# Temporary patch to force admin mode to be active by default in tests when
# using the feature flag :user_mode_in_session, since this will require
@@ -208,20 +221,23 @@ RSpec.configure do |config|
example.run if config.inclusion_filter[:quarantine]
end
- config.before(:example, :request_store) do
+ config.around(:example, :request_store) do |example|
RequestStore.begin!
- end
- config.after(:example, :request_store) do
+ example.run
+
RequestStore.end!
RequestStore.clear!
end
- config.after do
- Fog.unmock! if Fog.mock?
+ config.around do |example|
+ # Wrap each example in it's own context to make sure the contexts don't
+ # leak
+ Labkit::Context.with_context { example.run }
end
config.after do
+ Fog.unmock! if Fog.mock?
Gitlab::CurrentSettings.clear_in_memory_application_settings!
end
@@ -236,90 +252,6 @@ RSpec.configure do |config|
Gitlab::Metrics.reset_registry!
end
- config.around(:each, :use_clean_rails_memory_store_caching) do |example|
- caching_store = Rails.cache
- Rails.cache = ActiveSupport::Cache::MemoryStore.new
-
- example.run
-
- Rails.cache = caching_store
- end
-
- config.around do |example|
- # Wrap each example in it's own context to make sure the contexts don't
- # leak
- Labkit::Context.with_context { example.run }
- end
-
- config.around(:each, :clean_gitlab_redis_cache) do |example|
- redis_cache_cleanup!
-
- example.run
-
- redis_cache_cleanup!
- end
-
- config.around(:each, :clean_gitlab_redis_shared_state) do |example|
- redis_shared_state_cleanup!
-
- example.run
-
- redis_shared_state_cleanup!
- end
-
- config.around(:each, :clean_gitlab_redis_queues) do |example|
- redis_queues_cleanup!
-
- example.run
-
- redis_queues_cleanup!
- end
-
- config.around(:each, :use_clean_rails_memory_store_fragment_caching) do |example|
- caching_store = ActionController::Base.cache_store
- ActionController::Base.cache_store = ActiveSupport::Cache::MemoryStore.new
- ActionController::Base.perform_caching = true
-
- example.run
-
- ActionController::Base.perform_caching = false
- ActionController::Base.cache_store = caching_store
- end
-
- config.around(:each, :use_sql_query_cache) do |example|
- ActiveRecord::Base.cache do
- example.run
- end
- end
-
- # The :each scope runs "inside" the example, so this hook ensures the DB is in the
- # correct state before any examples' before hooks are called. This prevents a
- # problem where `ScheduleIssuesClosedAtTypeChange` (or any migration that depends
- # on background migrations being run inline during test setup) can be broken by
- # altering Sidekiq behavior in an unrelated spec like so:
- #
- # around do |example|
- # Sidekiq::Testing.fake! do
- # example.run
- # end
- # end
- config.before(:context, :migration) do
- schema_migrate_down!
- end
-
- # Each example may call `migrate!`, so we must ensure we are migrated down every time
- config.before(:each, :migration) do
- use_fake_application_settings
-
- schema_migrate_down!
- end
-
- config.after(:context, :migration) do
- schema_migrate_up!
-
- Gitlab::CurrentSettings.clear_in_memory_application_settings!
- end
-
# This makes sure the `ApplicationController#can?` method is stubbed with the
# original implementation for all view specs.
config.before(:each, type: :view) do
@@ -327,60 +259,8 @@ RSpec.configure do |config|
Ability.allowed?(*args)
end
end
-
- config.before(:each, :http_pages_enabled) do |_|
- allow(Gitlab.config.pages).to receive(:external_http).and_return(['1.1.1.1:80'])
- end
-
- config.before(:each, :https_pages_enabled) do |_|
- allow(Gitlab.config.pages).to receive(:external_https).and_return(['1.1.1.1:443'])
- end
-
- config.before(:each, :http_pages_disabled) do |_|
- allow(Gitlab.config.pages).to receive(:external_http).and_return(false)
- end
-
- config.before(:each, :https_pages_disabled) do |_|
- allow(Gitlab.config.pages).to receive(:external_https).and_return(false)
- end
-
- # We can't use an `around` hook here because the wrapping transaction
- # is not yet opened at the time that is triggered
- config.prepend_before do
- Gitlab::Database.set_open_transactions_baseline
- end
-
- config.append_after do
- Gitlab::Database.reset_open_transactions_baseline
- end
end
-# add simpler way to match asset paths containing digest strings
-RSpec::Matchers.define :match_asset_path do |expected|
- match do |actual|
- path = Regexp.escape(expected)
- extname = Regexp.escape(File.extname(expected))
- digest_regex = Regexp.new(path.sub(extname, "(?:-\\h+)?#{extname}") << '$')
- digest_regex =~ actual
- end
-
- failure_message do |actual|
- "expected that #{actual} would include an asset path for #{expected}"
- end
-
- failure_message_when_negated do |actual|
- "expected that #{actual} would not include an asset path for #{expected}"
- end
-end
-
-FactoryBot::SyntaxRunner.class_eval do
- include RSpec::Mocks::ExampleMethods
-end
-
-# Use FactoryBot 4.x behavior:
-# https://github.com/thoughtbot/factory_bot/blob/master/GETTING_STARTED.md#associations
-FactoryBot.use_parent_strategy = false
-
ActiveRecord::Migration.maintain_test_schema!
Shoulda::Matchers.configure do |config|
diff --git a/spec/support/banzai/reference_filter_shared_examples.rb b/spec/support/banzai/reference_filter_shared_examples.rb
index 27765652f28..0046d931e7d 100644
--- a/spec/support/banzai/reference_filter_shared_examples.rb
+++ b/spec/support/banzai/reference_filter_shared_examples.rb
@@ -4,7 +4,7 @@
#
# Requires a reference:
# let(:reference) { '#42' }
-shared_examples 'a reference containing an element node' do
+RSpec.shared_examples 'a reference containing an element node' do
let(:inner_html) { 'element <code>node</code> inside' }
let(:reference_with_element) { %{<a href="#{reference}">#{inner_html}</a>} }
@@ -18,7 +18,7 @@ end
# subject { create(:user) }
# let(:reference) { subject.to_reference }
# let(:subject_name) { 'user' }
-shared_examples 'user reference or project reference' do
+RSpec.shared_examples 'user reference or project reference' do
shared_examples 'it contains a data- attribute' do
it 'includes a data- attribute' do
doc = reference_filter("Hey #{reference}")
diff --git a/spec/support/caching.rb b/spec/support/caching.rb
new file mode 100644
index 00000000000..ecbe65f7e97
--- /dev/null
+++ b/spec/support/caching.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+RSpec.configure do |config|
+ config.around(:each, :use_clean_rails_memory_store_caching) do |example|
+ caching_store = Rails.cache
+ Rails.cache = ActiveSupport::Cache::MemoryStore.new
+
+ example.run
+
+ Rails.cache = caching_store
+ end
+
+ config.around(:each, :use_clean_rails_memory_store_fragment_caching) do |example|
+ caching_store = ActionController::Base.cache_store
+ ActionController::Base.cache_store = ActiveSupport::Cache::MemoryStore.new
+ ActionController::Base.perform_caching = true
+
+ example.run
+
+ ActionController::Base.perform_caching = false
+ ActionController::Base.cache_store = caching_store
+ end
+
+ config.around(:each, :use_sql_query_cache) do |example|
+ ActiveRecord::Base.cache do
+ example.run
+ end
+ end
+end
diff --git a/spec/support/controllers/project_import_rate_limiter_shared_examples.rb b/spec/support/controllers/project_import_rate_limiter_shared_examples.rb
new file mode 100644
index 00000000000..9a543dd00d4
--- /dev/null
+++ b/spec/support/controllers/project_import_rate_limiter_shared_examples.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+shared_examples 'project import rate limiter' do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when limit exceeds' do
+ before do
+ allow(Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
+ end
+
+ it 'notifies and redirects user' do
+ post :create, params: {}
+
+ expect(flash[:alert]).to eq('This endpoint has been requested too many times. Try again later.')
+ expect(response).to have_gitlab_http_status(:found)
+ end
+ end
+end
diff --git a/spec/support/db_cleaner.rb b/spec/support/db_cleaner.rb
index 5da707b11f9..77e1f6bcaa3 100644
--- a/spec/support/db_cleaner.rb
+++ b/spec/support/db_cleaner.rb
@@ -1,7 +1,9 @@
# frozen_string_literal: true
module DbCleaner
- def delete_from_all_tables!(except: nil)
+ def delete_from_all_tables!(except: [])
+ except << 'ar_internal_metadata'
+
DatabaseCleaner.clean_with(:deletion, cache_tables: false, except: except)
end
diff --git a/spec/support/dns.rb b/spec/support/dns.rb
new file mode 100644
index 00000000000..3e5c8e698e1
--- /dev/null
+++ b/spec/support/dns.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require Rails.root.join("spec/support/helpers/dns_helpers")
+
+RSpec.configure do |config|
+ config.include DnsHelpers
+
+ config.before do
+ block_dns!
+ end
+
+ config.before(:each, :permit_dns) do
+ permit_dns!
+ end
+
+ config.before(:each, :stub_invalid_dns_only) do
+ permit_dns!
+ stub_invalid_dns!
+ end
+end
diff --git a/spec/support/factory_bot.rb b/spec/support/factory_bot.rb
new file mode 100644
index 00000000000..a86161bfded
--- /dev/null
+++ b/spec/support/factory_bot.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot::SyntaxRunner.class_eval do
+ include RSpec::Mocks::ExampleMethods
+end
+
+# Use FactoryBot 4.x behavior:
+# https://github.com/thoughtbot/factory_bot/blob/master/GETTING_STARTED.md#associations
+FactoryBot.use_parent_strategy = false
diff --git a/spec/support/helpers/api_helpers.rb b/spec/support/helpers/api_helpers.rb
index aff0f87b6e4..dc263d64bcc 100644
--- a/spec/support/helpers/api_helpers.rb
+++ b/spec/support/helpers/api_helpers.rb
@@ -40,10 +40,32 @@ module ApiHelpers
end
end
+ def expect_empty_array_response
+ expect_successful_response_with_paginated_array
+ expect(json_response.length).to eq(0)
+ end
+
+ def expect_successful_response_with_paginated_array
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ end
+
def expect_paginated_array_response(items)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |item| item['id'] }).to eq(Array(items))
end
+
+ def expect_response_contain_exactly(*items)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(items.size)
+ expect(json_response.map { |item| item['id'] }).to contain_exactly(*items)
+ end
+
+ def stub_last_activity_update
+ allow_any_instance_of(Users::ActivityService).to receive(:execute)
+ end
end
diff --git a/spec/support/helpers/controller_helpers.rb b/spec/support/helpers/controller_helpers.rb
new file mode 100644
index 00000000000..8f5ef8c9696
--- /dev/null
+++ b/spec/support/helpers/controller_helpers.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+module ControllerHelpers
+ # It seems Devise::Test::ControllerHelpers#sign_in doesn't clear out the @current_user
+ # variable of the controller, hence it's not overwritten on retries.
+ # This should be fixed in Devise:
+ # - https://github.com/heartcombo/devise/issues/5190
+ # - https://github.com/heartcombo/devise/pull/5191
+ def sign_in(resource, deprecated = nil, scope: nil)
+ super
+
+ scope ||= Devise::Mapping.find_scope!(resource)
+
+ @controller.instance_variable_set(:"@current_#{scope}", nil)
+ end
+end
+
+Devise::Test::ControllerHelpers.prepend(ControllerHelpers)
diff --git a/spec/support/helpers/create_environments_helpers.rb b/spec/support/helpers/create_environments_helpers.rb
new file mode 100644
index 00000000000..be105f5862b
--- /dev/null
+++ b/spec/support/helpers/create_environments_helpers.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module CreateEnvironmentsHelpers
+ def create_review_app(user, project, ref)
+ common = { project: project, ref: ref, user: user }
+ pipeline = create(:ci_pipeline, **common)
+ start_review = create(:ci_build, :start_review_app, :success, **common, pipeline: pipeline)
+ stop_review = create(:ci_build, :stop_review_app, :manual, **common, pipeline: pipeline)
+ environment = create(:environment, :auto_stoppable, project: project, name: ref)
+ create(:deployment, :success, **common, on_stop: stop_review.name,
+ deployable: start_review, environment: environment)
+ end
+end
diff --git a/spec/support/helpers/dns_helpers.rb b/spec/support/helpers/dns_helpers.rb
new file mode 100644
index 00000000000..29be4da6902
--- /dev/null
+++ b/spec/support/helpers/dns_helpers.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module DnsHelpers
+ def block_dns!
+ stub_all_dns!
+ stub_invalid_dns!
+ permit_local_dns!
+ end
+
+ def permit_dns!
+ allow(Addrinfo).to receive(:getaddrinfo).and_call_original
+ end
+
+ def stub_all_dns!
+ allow(Addrinfo).to receive(:getaddrinfo).with(anything, anything, nil, :STREAM).and_return([])
+ allow(Addrinfo).to receive(:getaddrinfo).with(anything, anything, nil, :STREAM, anything, anything).and_return([])
+ end
+
+ def stub_invalid_dns!
+ allow(Addrinfo).to receive(:getaddrinfo).with(/\Afoobar\.\w|(\d{1,3}\.){4,}\d{1,3}\z/i, anything, nil, :STREAM) do
+ raise SocketError.new("getaddrinfo: Name or service not known")
+ end
+ end
+
+ def permit_local_dns!
+ local_addresses = /\A(127|10)\.0\.0\.\d{1,3}|(192\.168|172\.16)\.\d{1,3}\.\d{1,3}|0\.0\.0\.0|localhost\z/i
+ allow(Addrinfo).to receive(:getaddrinfo).with(local_addresses, anything, nil, :STREAM).and_call_original
+ allow(Addrinfo).to receive(:getaddrinfo).with(local_addresses, anything, nil, :STREAM, anything, anything).and_call_original
+ end
+end
diff --git a/spec/support/helpers/email_helpers.rb b/spec/support/helpers/email_helpers.rb
index 024340310a1..6df33e68629 100644
--- a/spec/support/helpers/email_helpers.rb
+++ b/spec/support/helpers/email_helpers.rb
@@ -6,7 +6,12 @@ module EmailHelpers
end
def reset_delivered_emails!
+ # We shouldn't actually send the emails, but we keep the following line for
+ # back-compatibility until we only check the mailer jobs enqueued in Sidekiq
ActionMailer::Base.deliveries.clear
+ # We should only check that the mailer jobs are enqueued in Sidekiq, hence
+ # clearing the background jobs queue
+ ActiveJob::Base.queue_adapter.enqueued_jobs.clear
end
def should_only_email(*users, kind: :to)
diff --git a/spec/support/helpers/fake_blob_helpers.rb b/spec/support/helpers/fake_blob_helpers.rb
index ef4740638ff..a7eafb0fd23 100644
--- a/spec/support/helpers/fake_blob_helpers.rb
+++ b/spec/support/helpers/fake_blob_helpers.rb
@@ -37,6 +37,8 @@ module FakeBlobHelpers
end
def fake_blob(**kwargs)
- Blob.decorate(FakeBlob.new(**kwargs), project)
+ container = kwargs.delete(:container) || project
+
+ Blob.decorate(FakeBlob.new(**kwargs), container)
end
end
diff --git a/spec/support/helpers/filter_spec_helper.rb b/spec/support/helpers/filter_spec_helper.rb
index 45d49696e06..c165128040f 100644
--- a/spec/support/helpers/filter_spec_helper.rb
+++ b/spec/support/helpers/filter_spec_helper.rb
@@ -15,7 +15,7 @@ module FilterSpecHelper
# context - Hash context for the filter. (default: {project: project})
#
# Returns a Nokogiri::XML::DocumentFragment
- def filter(html, context = {})
+ def filter(html, context = {}, result = nil)
if defined?(project)
context.reverse_merge!(project: project)
end
@@ -25,7 +25,7 @@ module FilterSpecHelper
context = context.merge(render_context: render_context)
- described_class.call(html, context)
+ described_class.call(html, context, result)
end
# Get an instance of the Filter class
@@ -33,12 +33,15 @@ module FilterSpecHelper
# Use this for testing instance methods, but remember to test the result of
# the full pipeline by calling #call using the other methods in this helper.
def filter_instance
- render_context = Banzai::RenderContext.new(project, current_user)
context = { project: project, current_user: current_user, render_context: render_context }
described_class.new(input_text, context)
end
+ def render_context
+ Banzai::RenderContext.new(project, current_user)
+ end
+
# Run text through HTML::Pipeline with the current filter and return the
# result Hash
#
@@ -55,12 +58,16 @@ module FilterSpecHelper
def reference_pipeline(context = {})
context.reverse_merge!(project: project) if defined?(project)
+ context.reverse_merge!(current_user: current_user) if defined?(current_user)
filters = [
Banzai::Filter::AutolinkFilter,
described_class
]
+ redact = context.delete(:redact)
+ filters.push(Banzai::Filter::ReferenceRedactorFilter) if redact
+
HTML::Pipeline.new(filters, context)
end
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index 6d9c27d0255..35b1b802f35 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -16,6 +16,20 @@ module GraphqlHelpers
resolver_class.new(object: obj, context: ctx).resolve(args)
end
+ # Eagerly run a loader's named resolver
+ # (syncs any lazy values returned by resolve)
+ def eager_resolve(resolver_class, **opts)
+ sync(resolve(resolver_class, **opts))
+ end
+
+ def sync(value)
+ if GitlabSchema.lazy?(value)
+ GitlabSchema.sync_lazy(value)
+ else
+ value
+ end
+ end
+
# Runs a block inside a BatchLoader::Executor wrapper
def batch(max_queries: nil, &blk)
wrapper = proc do
@@ -39,7 +53,7 @@ module GraphqlHelpers
def batch_sync(max_queries: nil, &blk)
wrapper = proc do
lazy_vals = yield
- lazy_vals.is_a?(Array) ? lazy_vals.map(&:sync) : lazy_vals&.sync
+ lazy_vals.is_a?(Array) ? lazy_vals.map { |val| sync(val) } : sync(lazy_vals)
end
batch(max_queries: max_queries, &wrapper)
@@ -136,6 +150,7 @@ module GraphqlHelpers
allow_unlimited_graphql_complexity
allow_unlimited_graphql_depth
allow_high_graphql_recursion
+ allow_high_graphql_transaction_threshold
type = GitlabSchema.types[class_name.to_s]
return "" unless type
@@ -163,16 +178,27 @@ module GraphqlHelpers
def attributes_to_graphql(attributes)
attributes.map do |name, value|
- value_str = if value.is_a?(Array)
- '["' + value.join('","') + '"]'
- else
- "\"#{value}\""
- end
+ value_str = as_graphql_literal(value)
"#{GraphqlHelpers.fieldnamerize(name.to_s)}: #{value_str}"
end.join(", ")
end
+ # Fairly dumb Ruby => GraphQL rendering function. Only suitable for testing.
+ # Use symbol for Enum values
+ def as_graphql_literal(value)
+ case value
+ when Array then "[#{value.map { |v| as_graphql_literal(v) }.join(',')}]"
+ when Integer, Float then value.to_s
+ when String then "\"#{value.gsub(/"/, '\\"')}\""
+ when Symbol then value
+ when nil then 'null'
+ when true then 'true'
+ when false then 'false'
+ else raise ArgumentError, "Cannot represent #{value} as GraphQL literal"
+ end
+ end
+
def post_multiplex(queries, current_user: nil, headers: {})
post api('/', current_user, version: 'graphql'), params: { _json: queries }, headers: headers
end
@@ -215,6 +241,11 @@ module GraphqlHelpers
json_response['data'] || (raise NoData, graphql_errors)
end
+ def graphql_data_at(*path)
+ keys = path.map { |segment| GraphqlHelpers.fieldnamerize(segment) }
+ graphql_data.dig(*keys)
+ end
+
def graphql_errors
case json_response
when Hash # regular query
@@ -295,6 +326,10 @@ module GraphqlHelpers
allow_any_instance_of(Gitlab::Graphql::QueryAnalyzers::RecursionAnalyzer).to receive(:recursion_threshold).and_return 1000
end
+ def allow_high_graphql_transaction_threshold
+ stub_const("Gitlab::QueryLimiting::Transaction::THRESHOLD", 1000)
+ end
+
def node_array(data, extract_attribute = nil)
data.map do |item|
extract_attribute ? item['node'][extract_attribute] : item['node']
@@ -315,6 +350,39 @@ module GraphqlHelpers
def custom_graphql_error(path, msg)
a_hash_including('path' => path, 'message' => msg)
end
+
+ def type_factory
+ Class.new(Types::BaseObject) do
+ graphql_name 'TestType'
+
+ field :name, GraphQL::STRING_TYPE, null: true
+
+ yield(self) if block_given?
+ end
+ end
+
+ def query_factory
+ Class.new(Types::BaseObject) do
+ graphql_name 'TestQuery'
+
+ yield(self) if block_given?
+ end
+ end
+
+ def execute_query(query_type)
+ schema = Class.new(GraphQL::Schema) do
+ use Gitlab::Graphql::Authorize
+ use Gitlab::Graphql::Connections
+
+ query(query_type)
+ end
+
+ schema.execute(
+ query_string,
+ context: { current_user: user },
+ variables: {}
+ )
+ end
end
# This warms our schema, doing this as part of loading the helpers to avoid
diff --git a/spec/support/helpers/javascript_fixtures_helpers.rb b/spec/support/helpers/javascript_fixtures_helpers.rb
index fd5ad9451f7..4f11f8c6b24 100644
--- a/spec/support/helpers/javascript_fixtures_helpers.rb
+++ b/spec/support/helpers/javascript_fixtures_helpers.rb
@@ -62,7 +62,7 @@ module JavaScriptFixturesHelpers
fixture = response.body
fixture.force_encoding("utf-8")
- response_mime_type = Mime::Type.lookup(response.content_type)
+ response_mime_type = Mime::Type.lookup(response.media_type)
if response_mime_type.html?
doc = Nokogiri::HTML::DocumentFragment.parse(fixture)
diff --git a/spec/support/helpers/kubernetes_helpers.rb b/spec/support/helpers/kubernetes_helpers.rb
index 89360b55de2..427948bda96 100644
--- a/spec/support/helpers/kubernetes_helpers.rb
+++ b/spec/support/helpers/kubernetes_helpers.rb
@@ -27,7 +27,10 @@ module KubernetesHelpers
WebMock.stub_request(:get, api_url + '/api/v1').to_return(kube_response(kube_v1_discovery_body))
WebMock
.stub_request(:get, api_url + '/apis/extensions/v1beta1')
- .to_return(kube_response(kube_v1beta1_discovery_body))
+ .to_return(kube_response(kube_extensions_v1beta1_discovery_body))
+ WebMock
+ .stub_request(:get, api_url + '/apis/apps/v1')
+ .to_return(kube_response(kube_apps_v1_discovery_body))
WebMock
.stub_request(:get, api_url + '/apis/rbac.authorization.k8s.io/v1')
.to_return(kube_response(kube_v1_rbac_authorization_discovery_body))
@@ -57,6 +60,12 @@ module KubernetesHelpers
.to_return(status: [404, "Resource Not Found"])
end
+ def stub_kubeclient_discover_knative_found(api_url)
+ WebMock
+ .stub_request(:get, api_url + '/apis/serving.knative.dev/v1alpha1')
+ .to_return(kube_response(kube_knative_discovery_body))
+ end
+
def stub_kubeclient_service_pods(response = nil, options = {})
stub_kubeclient_discover(service.api_url)
@@ -92,7 +101,7 @@ module KubernetesHelpers
end
logs_url = service.api_url + "/api/v1/namespaces/#{namespace}/pods/#{pod_name}" \
- "/log?#{container_query_param}tailLines=#{Clusters::Platforms::Kubernetes::LOGS_LIMIT}&timestamps=true"
+ "/log?#{container_query_param}tailLines=#{::PodLogs::KubernetesService::LOGS_LIMIT}&timestamps=true"
if status
response = { status: status }
@@ -275,15 +284,40 @@ module KubernetesHelpers
}
end
- def kube_v1beta1_discovery_body
+ # From Kubernetes 1.16+ Deployments are no longer served from apis/extensions
+ def kube_1_16_extensions_v1beta1_discovery_body
+ {
+ "kind" => "APIResourceList",
+ "resources" => [
+ { "name" => "ingresses", "namespaced" => true, "kind" => "Deployment" }
+ ]
+ }
+ end
+
+ def kube_knative_discovery_body
+ {
+ "kind" => "APIResourceList",
+ "resources" => []
+ }
+ end
+
+ def kube_extensions_v1beta1_discovery_body
{
"kind" => "APIResourceList",
"resources" => [
- { "name" => "pods", "namespaced" => true, "kind" => "Pod" },
{ "name" => "deployments", "namespaced" => true, "kind" => "Deployment" },
- { "name" => "secrets", "namespaced" => true, "kind" => "Secret" },
- { "name" => "serviceaccounts", "namespaced" => true, "kind" => "ServiceAccount" },
- { "name" => "services", "namespaced" => true, "kind" => "Service" }
+ { "name" => "ingresses", "namespaced" => true, "kind" => "Ingress" }
+ ]
+ }
+ end
+
+ # Yes, deployments are defined in both apis/extensions/v1beta1 and apis/v1
+ # (for Kubernetes < 1.16). This matches what Kubenetes API server returns.
+ def kube_apps_v1_discovery_body
+ {
+ "kind" => "APIResourceList",
+ "resources" => [
+ { "name" => "deployments", "namespaced" => true, "kind" => "Deployment" }
]
}
end
@@ -460,7 +494,7 @@ module KubernetesHelpers
"metadata" => {
"name" => name,
"namespace" => namespace,
- "generate_name" => "generated-name-with-suffix",
+ "generateName" => "generated-name-with-suffix",
"creationTimestamp" => "2016-11-25T19:55:19Z",
"annotations" => {
"app.gitlab.com/env" => environment_slug,
@@ -486,7 +520,7 @@ module KubernetesHelpers
"metadata" => {
"name" => name,
"namespace" => namespace,
- "generate_name" => "generated-name-with-suffix",
+ "generateName" => "generated-name-with-suffix",
"creationTimestamp" => "2016-11-25T19:55:19Z",
"labels" => {
"serving.knative.dev/service" => name
@@ -517,16 +551,13 @@ module KubernetesHelpers
},
"spec" => { "replicas" => 3 },
"status" => {
- "observedGeneration" => 4,
- "replicas" => 3,
- "updatedReplicas" => 3,
- "availableReplicas" => 3
+ "observedGeneration" => 4
}
}
end
# noinspection RubyStringKeysInHashInspection
- def knative_06_service(name: 'kubetest', namespace: 'default', domain: 'example.com', description: 'a knative service', environment: 'production')
+ def knative_06_service(name: 'kubetest', namespace: 'default', domain: 'example.com', description: 'a knative service', environment: 'production', cluster_id: 9)
{ "apiVersion" => "serving.knative.dev/v1alpha1",
"kind" => "Service",
"metadata" =>
@@ -581,12 +612,12 @@ module KubernetesHelpers
"url" => "http://#{name}.#{namespace}.#{domain}"
},
"environment_scope" => environment,
- "cluster_id" => 9,
+ "cluster_id" => cluster_id,
"podcount" => 0 }
end
# noinspection RubyStringKeysInHashInspection
- def knative_07_service(name: 'kubetest', namespace: 'default', domain: 'example.com', description: 'a knative service', environment: 'production')
+ def knative_07_service(name: 'kubetest', namespace: 'default', domain: 'example.com', description: 'a knative service', environment: 'production', cluster_id: 5)
{ "apiVersion" => "serving.knative.dev/v1alpha1",
"kind" => "Service",
"metadata" =>
@@ -633,12 +664,12 @@ module KubernetesHelpers
"traffic" => [{ "latestRevision" => true, "percent" => 100, "revisionName" => "#{name}-92tsj" }],
"url" => "http://#{name}.#{namespace}.#{domain}" },
"environment_scope" => environment,
- "cluster_id" => 5,
+ "cluster_id" => cluster_id,
"podcount" => 0 }
end
# noinspection RubyStringKeysInHashInspection
- def knative_09_service(name: 'kubetest', namespace: 'default', domain: 'example.com', description: 'a knative service', environment: 'production')
+ def knative_09_service(name: 'kubetest', namespace: 'default', domain: 'example.com', description: 'a knative service', environment: 'production', cluster_id: 5)
{ "apiVersion" => "serving.knative.dev/v1alpha1",
"kind" => "Service",
"metadata" =>
@@ -685,12 +716,12 @@ module KubernetesHelpers
"traffic" => [{ "latestRevision" => true, "percent" => 100, "revisionName" => "#{name}-92tsj" }],
"url" => "http://#{name}.#{namespace}.#{domain}" },
"environment_scope" => environment,
- "cluster_id" => 5,
+ "cluster_id" => cluster_id,
"podcount" => 0 }
end
# noinspection RubyStringKeysInHashInspection
- def knative_05_service(name: 'kubetest', namespace: 'default', domain: 'example.com', description: 'a knative service', environment: 'production')
+ def knative_05_service(name: 'kubetest', namespace: 'default', domain: 'example.com', description: 'a knative service', environment: 'production', cluster_id: 8)
{ "apiVersion" => "serving.knative.dev/v1alpha1",
"kind" => "Service",
"metadata" =>
@@ -740,7 +771,7 @@ module KubernetesHelpers
"observedGeneration" => 1,
"traffic" => [{ "percent" => 100, "revisionName" => "#{name}-58qgr" }] },
"environment_scope" => environment,
- "cluster_id" => 8,
+ "cluster_id" => cluster_id,
"podcount" => 0 }
end
diff --git a/spec/support/helpers/metrics_dashboard_helpers.rb b/spec/support/helpers/metrics_dashboard_helpers.rb
index 908a3e1fb09..b8a641d5911 100644
--- a/spec/support/helpers/metrics_dashboard_helpers.rb
+++ b/spec/support/helpers/metrics_dashboard_helpers.rb
@@ -29,4 +29,8 @@ module MetricsDashboardHelpers
def business_metric_title
PrometheusMetricEnums.group_details[:business][:group_title]
end
+
+ def self_monitoring_dashboard_path
+ Metrics::Dashboard::SelfMonitoringDashboardService::DASHBOARD_PATH
+ end
end
diff --git a/spec/support/helpers/migrations_helpers.rb b/spec/support/helpers/migrations_helpers.rb
index 68f71494771..5eb70f534d8 100644
--- a/spec/support/helpers/migrations_helpers.rb
+++ b/spec/support/helpers/migrations_helpers.rb
@@ -21,7 +21,7 @@ module MigrationsHelpers
end
def migration_context
- ActiveRecord::MigrationContext.new(migrations_paths)
+ ActiveRecord::MigrationContext.new(migrations_paths, ActiveRecord::SchemaMigration)
end
def migrations
diff --git a/spec/support/helpers/notification_helpers.rb b/spec/support/helpers/notification_helpers.rb
index 16ecb338f6e..aee76b8be4a 100644
--- a/spec/support/helpers/notification_helpers.rb
+++ b/spec/support/helpers/notification_helpers.rb
@@ -36,4 +36,28 @@ module NotificationHelpers
setting = user.notification_settings_for(resource)
setting.update!(event => value)
end
+
+ def expect_delivery_jobs_count(count)
+ expect(ActionMailer::DeliveryJob).to have_been_enqueued.exactly(count).times
+ end
+
+ def expect_no_delivery_jobs
+ expect(ActionMailer::DeliveryJob).not_to have_been_enqueued
+ end
+
+ def expect_any_delivery_jobs
+ expect(ActionMailer::DeliveryJob).to have_been_enqueued.at_least(:once)
+ end
+
+ def have_enqueued_email(*args, mailer: "Notify", mail: "", delivery: "deliver_now")
+ have_enqueued_job(ActionMailer::DeliveryJob).with(mailer, mail, delivery, *args)
+ end
+
+ def expect_enqueud_email(*args, mailer: "Notify", mail: "", delivery: "deliver_now")
+ expect(ActionMailer::DeliveryJob).to have_been_enqueued.with(mailer, mail, delivery, *args)
+ end
+
+ def expect_not_enqueud_email(*args, mailer: "Notify", mail: "", delivery: "deliver_now")
+ expect(ActionMailer::DeliveryJob).not_to have_been_enqueued.with(mailer, mail, *args, any_args)
+ end
end
diff --git a/spec/support/helpers/query_recorder.rb b/spec/support/helpers/query_recorder.rb
index 1d04014c9a6..fd200a1abf3 100644
--- a/spec/support/helpers/query_recorder.rb
+++ b/spec/support/helpers/query_recorder.rb
@@ -2,12 +2,15 @@
module ActiveRecord
class QueryRecorder
- attr_reader :log, :skip_cached, :cached
+ attr_reader :log, :skip_cached, :cached, :data
+ UNKNOWN = %w(unknown unknown).freeze
- def initialize(skip_cached: true, &block)
+ def initialize(skip_cached: true, query_recorder_debug: false, &block)
+ @data = Hash.new { |h, k| h[k] = { count: 0, occurrences: [], backtrace: [] } }
@log = []
@cached = []
@skip_cached = skip_cached
+ @query_recorder_debug = query_recorder_debug
# force replacement of bind parameters to give tests the ability to check for ids
ActiveRecord::Base.connection.unprepared_statement do
ActiveSupport::Notifications.subscribed(method(:callback), 'sql.active_record', &block)
@@ -19,30 +22,62 @@ module ActiveRecord
Gitlab::BacktraceCleaner.clean_backtrace(caller).each { |line| Rails.logger.debug(" --> #{line}") }
end
+ def get_sql_source(sql)
+ matches = sql.match(/,line:(?<line>.*):in\s+`(?<method>.*)'\*\//)
+ matches ? [matches[:line], matches[:method]] : UNKNOWN
+ end
+
+ def store_sql_by_source(values: {}, backtrace: nil)
+ full_name = get_sql_source(values[:sql]).join(':')
+ @data[full_name][:count] += 1
+ @data[full_name][:occurrences] << values[:sql]
+ @data[full_name][:backtrace] << backtrace
+ end
+
+ def find_query(query_regexp, limit, first_only: false)
+ out = []
+
+ @data.each_pair do |k, v|
+ if v[:count] > limit && k.match(query_regexp)
+ out << [k, v[:count]]
+ break if first_only
+ end
+ end
+
+ out.flatten! if first_only
+ out
+ end
+
+ def occurrences_by_line_method
+ @occurrences_by_line_method ||= @data.sort_by { |_, v| v[:count] }
+ end
+
def callback(name, start, finish, message_id, values)
- show_backtrace(values) if ENV['QUERY_RECORDER_DEBUG']
+ store_backtrace = ENV['QUERY_RECORDER_DEBUG'] || @query_recorder_debug
+ backtrace = store_backtrace ? show_backtrace(values) : nil
if values[:cached] && skip_cached
@cached << values[:sql]
elsif !values[:name]&.include?("SCHEMA")
@log << values[:sql]
+ store_sql_by_source(values: values, backtrace: backtrace)
end
end
def count
- @log.count
+ @count ||= @log.count
end
def cached_count
- @cached.count
+ @cached_count ||= @cached.count
end
def log_message
- @log.join("\n\n")
+ @log_message ||= @log.join("\n\n")
end
def occurrences
- @log.group_by(&:to_s).transform_values(&:count)
+ @occurrences ||= @log.group_by(&:to_s).transform_values(&:count)
end
end
end
diff --git a/spec/support/helpers/rack_attack_spec_helpers.rb b/spec/support/helpers/rack_attack_spec_helpers.rb
index 234271ba1c0..e0cedb5a57b 100644
--- a/spec/support/helpers/rack_attack_spec_helpers.rb
+++ b/spec/support/helpers/rack_attack_spec_helpers.rb
@@ -28,6 +28,6 @@ module RackAttackSpecHelpers
def expect_rejection(&block)
yield
- expect(response).to have_http_status(429)
+ expect(response).to have_gitlab_http_status(:too_many_requests)
end
end
diff --git a/spec/support/helpers/smime_helper.rb b/spec/support/helpers/smime_helper.rb
index 3ad19cd3da0..96da3d81708 100644
--- a/spec/support/helpers/smime_helper.rb
+++ b/spec/support/helpers/smime_helper.rb
@@ -1,8 +1,6 @@
# frozen_string_literal: true
module SmimeHelper
- include OpenSSL
-
INFINITE_EXPIRY = 1000.years
SHORT_EXPIRY = 30.minutes
@@ -20,12 +18,12 @@ module SmimeHelper
public_key = key.public_key
subject = if certificate_authority
- X509::Name.parse("/CN=EU")
+ OpenSSL::X509::Name.parse("/CN=EU")
else
- X509::Name.parse("/CN=#{email_address}")
+ OpenSSL::X509::Name.parse("/CN=#{email_address}")
end
- cert = X509::Certificate.new
+ cert = OpenSSL::X509::Certificate.new
cert.subject = subject
cert.issuer = signed_by&.fetch(:cert, nil)&.subject || subject
@@ -36,7 +34,7 @@ module SmimeHelper
cert.serial = 0x0
cert.version = 2
- extension_factory = X509::ExtensionFactory.new
+ extension_factory = OpenSSL::X509::ExtensionFactory.new
if certificate_authority
extension_factory.subject_certificate = cert
extension_factory.issuer_certificate = cert
@@ -50,7 +48,7 @@ module SmimeHelper
cert.add_extension(extension_factory.create_extension('extendedKeyUsage', 'clientAuth,emailProtection', false))
end
- cert.sign(signed_by&.fetch(:key, nil) || key, Digest::SHA256.new)
+ cert.sign(signed_by&.fetch(:key, nil) || key, OpenSSL::Digest::SHA256.new)
{ key: key, cert: cert }
end
diff --git a/spec/support/helpers/stub_configuration.rb b/spec/support/helpers/stub_configuration.rb
index 0dc6e851190..6a832ca97d1 100644
--- a/spec/support/helpers/stub_configuration.rb
+++ b/spec/support/helpers/stub_configuration.rb
@@ -1,6 +1,5 @@
# frozen_string_literal: true
-require 'active_support/core_ext/hash/transform_values'
require 'active_support/hash_with_indifferent_access'
require 'active_support/dependencies'
diff --git a/spec/support/helpers/user_login_helper.rb b/spec/support/helpers/user_login_helper.rb
index 36c002f53af..66606832883 100644
--- a/spec/support/helpers/user_login_helper.rb
+++ b/spec/support/helpers/user_login_helper.rb
@@ -13,7 +13,7 @@ module UserLoginHelper
def ensure_tab_pane_counts
tabs_count = page.all('[role="tab"]').size
- expect(page).to have_selector('[role="tabpanel"]', count: tabs_count)
+ expect(page).to have_selector('[role="tabpanel"]', visible: :all, count: tabs_count)
end
def ensure_one_active_tab
diff --git a/spec/support/helpers/x509_helpers.rb b/spec/support/helpers/x509_helpers.rb
new file mode 100644
index 00000000000..f72b518134c
--- /dev/null
+++ b/spec/support/helpers/x509_helpers.rb
@@ -0,0 +1,208 @@
+# frozen_string_literal: true
+
+module X509Helpers
+ module User1
+ extend self
+
+ def commit
+ 'a4df3c87f040f5fa693d4d55a89b6af74e22cb56'
+ end
+
+ def path
+ 'gitlab-test'
+ end
+
+ def trust_cert
+ <<~TRUSTCERTIFICATE
+ -----BEGIN CERTIFICATE-----
+ MIIGVTCCBD2gAwIBAgIEdikH4zANBgkqhkiG9w0BAQsFADCBmTELMAkGA1UEBhMC
+ REUxDzANBgNVBAgMBkJheWVybjERMA8GA1UEBwwITXVlbmNoZW4xEDAOBgNVBAoM
+ B1NpZW1lbnMxETAPBgNVBAUTCFpaWlpaWkExMR0wGwYDVQQLDBRTaWVtZW5zIFRy
+ dXN0IENlbnRlcjEiMCAGA1UEAwwZU2llbWVucyBSb290IENBIFYzLjAgMjAxNjAe
+ Fw0xNjA2MDYxMzMwNDhaFw0yODA2MDYxMzMwNDhaMIGZMQswCQYDVQQGEwJERTEP
+ MA0GA1UECAwGQmF5ZXJuMREwDwYDVQQHDAhNdWVuY2hlbjEQMA4GA1UECgwHU2ll
+ bWVuczERMA8GA1UEBRMIWlpaWlpaQTExHTAbBgNVBAsMFFNpZW1lbnMgVHJ1c3Qg
+ Q2VudGVyMSIwIAYDVQQDDBlTaWVtZW5zIFJvb3QgQ0EgVjMuMCAyMDE2MIICIjAN
+ BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp2k2PcfRBu1yeXUxG3UoEDDTFtgF
+ zGVNIq4j4g6niE7hxZzoferzgC6bK3y+lOQFfNkctFzjq6N+JvH535KnN4vXvNoO
+ /Rvrn38XtUC8ms2/1MlzvFDMh0Rt1HzemJYsSUXPvj5EMjGVzeQu1/GZhN6XlRrc
+ SgMSeuwAGN4IX/0QIyxaArxlDZks6zSOA+s9t2PBp6vPZcqA9y4RZLc33nQmdwZg
+ onEYK55xS1QFY2/zuZGQtB73e69IsrAxP+ZzrivlpbgKkEb1kt0qd7rLkp/HnM9J
+ IDFc6uo8dAUCA/oR40Yfe2+8hyKoTrFbTvxC2SqxoBolAemZ2rnckuQ1RInbCQNp
+ pBJJr/Hg78yvIp65gP6mZsyhL6ZLLXjL+ICIUTU86OedkJ7j9o4vdrwBn8AugENy
+ 8jAMu06k9CFbe7QoEynlRvm5VoYMSBsMqn7lAmuBcuMHdEdXu/qN/ULRLGkx1QRc
+ gqf7+QszYla8QEaTtxQKWfdAU0Fyg0ROagrBtFjuDjsMeLK6LM17K3FFM3pghISj
+ o4A8+y2fSbKKnMvU1z3Zey6vnGSwZKOxMJy5/aWuERbegQ07iH0jaA7S/gKZhOKO
+ uDHD9qOBYfKou6wC+xdWyPGFPOq8BQRkWrSEeQW9FxhyYhhcCdcRh+hpZ4eHgRLM
+ KkiFrljndwyB4eUCAwEAAaOBojCBnzAfBgNVHSMEGDAWgBRwbaBQ7KnQLGedGRX+
+ /QRzNcPi1DAPBgNVHRMBAf8EBTADAQH/MDwGA1UdIAQ1MDMwMQYEVR0gADApMCcG
+ CCsGAQUFBwIBFhtodHRwOi8vd3d3LnNpZW1lbnMuY29tL3BraS8wDgYDVR0PAQH/
+ BAQDAgEGMB0GA1UdDgQWBBRwbaBQ7KnQLGedGRX+/QRzNcPi1DANBgkqhkiG9w0B
+ AQsFAAOCAgEAHAxI694Yl16uKvWUdGDoglYLXmTxkVHOSci3TxzdEsAJ6WEf7kbj
+ 6zSQxGcAOz7nvto80rOZzlCluoO5K5fD7a4nEKl+tuBPrgtcEE8nkspPJF6DwjHQ
+ Lmh219YxktZ1D7egLaRCGvxbPjkb3Wuh4vLqzZHr8twcauMxMyqRTN5F2+F43MY0
+ AeBIb9QIMYsxxLBxsSeg4aajGwhdj5FmDFUFbGlyIjd0FfnXxvMuRtWpUWOu4Tya
+ kA0AX/q6uM/L9SFIwmzTO7+2AHW/m/HrCmWb6R4VYWAgppp+jhUViW5l1uLB3i4m
+ 5IaJHZilU/DwQ5FnkuP2xqLvZ7AF3uXBlldOAbE1327uGIhYgp40Oi7PIHH+vgwg
+ JOXQJ3SMwEzYmxCNsyLKAJb2Gs1IpwEpz7lpitl7i/DeUlPZSAo+1SLzc7P35muX
+ ukCeh1vR7LJdCeYQpDpKeUYjKaNXr2/rZlMFmOGXLBKQvTNcI2I5WTIbVQ1sxhWN
+ 0FS+INH6jUypiwh0WH2R1Bo0HY3Lq4zJJ3Ct/12ocQ78+JfENXI8glOs3H07jyng
+ afEj0ba23cn4HnV8s4T0jt8KZYlNkSNlSJ5kgTaZjmdLbTbt24OO4f3WNRrINwKC
+ VzrN1ydSBGHNOsb/muR5axK/dHN2TEycRJPO6kSaVclLhMTxEmhRBUE=
+ -----END CERTIFICATE-----
+ TRUSTCERTIFICATE
+ end
+
+ def signed_commit_signature
+ <<~SIGNATURE
+ -----BEGIN SIGNED MESSAGE-----
+ MIISUgYJKoZIhvcNAQcCoIISQzCCEj8CAQExDTALBglghkgBZQMEAgEwCwYJKoZI
+ hvcNAQcBoIIP3TCCB2kwggVRoAMCAQICBGvn1/4wDQYJKoZIhvcNAQELBQAwgZ8x
+ CzAJBgNVBAYTAkRFMQ8wDQYDVQQIDAZCYXllcm4xETAPBgNVBAcMCE11ZW5jaGVu
+ MRAwDgYDVQQKDAdTaWVtZW5zMREwDwYDVQQFEwhaWlpaWlpBMjEdMBsGA1UECwwU
+ U2llbWVucyBUcnVzdCBDZW50ZXIxKDAmBgNVBAMMH1NpZW1lbnMgSXNzdWluZyBD
+ QSBFRSBBdXRoIDIwMTYwHhcNMTcwMjAzMDY1MzUyWhcNMjAwMjAzMDY1MzUyWjBb
+ MREwDwYDVQQFEwhaMDAwTldESDEOMAwGA1UEKgwFUm9nZXIxDjAMBgNVBAQMBU1l
+ aWVyMRAwDgYDVQQKDAdTaWVtZW5zMRQwEgYDVQQDDAtNZWllciBSb2dlcjCCASIw
+ DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAIpqbpRAtn+vetgVb+APuoVOytZx
+ TmfWovp22nsmJQwE8ZgrJihRjIez0wjD3cvSREvWUXsvbiyxrSHmmwycRCV9YGi1
+ Y9vaYRKOrWhT64Xv6wq6oq8VoA5J3z6V5P6Tkj7g9Q3OskRuSbhFQY89VUdsea+N
+ mcv/XrwtQR0SekfSZw9k0LhbauE69SWRV26O03raengjecbbkS+GTlP30/CqPzzQ
+ 4Ac2TmmVF7RlkGRB05mJqHS+nDK7Lmcr7jD0e92YW+v8Lft4Qu3MpFTYVa7zk712
+ 5xWAgedyOaJb6TpJEz8KRX8v3i0PilQnuKAqZFkLjNcydOox0AtYRW1P2iMCAwEA
+ AaOCAu4wggLqMB0GA1UdDgQWBBTsALUoAlzTpaGrwqE0gYSqv5vP+DBDBgNVHREE
+ PDA6oCMGCisGAQQBgjcUAgOgFQwTci5tZWllckBzaWVtZW5zLmNvbYETci5tZWll
+ ckBzaWVtZW5zLmNvbTAOBgNVHQ8BAf8EBAMCB4AwKQYDVR0lBCIwIAYIKwYBBQUH
+ AwIGCCsGAQUFBwMEBgorBgEEAYI3FAICMIHKBgNVHR8EgcIwgb8wgbyggbmggbaG
+ Jmh0dHA6Ly9jaC5zaWVtZW5zLmNvbS9wa2k/WlpaWlpaQTIuY3JshkFsZGFwOi8v
+ Y2wuc2llbWVucy5uZXQvQ049WlpaWlpaQTIsTD1QS0k/Y2VydGlmaWNhdGVSZXZv
+ Y2F0aW9uTGlzdIZJbGRhcDovL2NsLnNpZW1lbnMuY29tL0NOPVpaWlpaWkEyLG89
+ VHJ1c3RjZW50ZXI/Y2VydGlmaWNhdGVSZXZvY2F0aW9uTGlzdDBFBgNVHSAEPjA8
+ MDoGDSsGAQQBoWkHAgIDAQEwKTAnBggrBgEFBQcCARYbaHR0cDovL3d3dy5zaWVt
+ ZW5zLmNvbS9wa2kvMAwGA1UdEwEB/wQCMAAwHwYDVR0jBBgwFoAUvb0qQyI9SEpX
+ fpgxF6lwne6fqJkwggEEBggrBgEFBQcBAQSB9zCB9DAyBggrBgEFBQcwAoYmaHR0
+ cDovL2FoLnNpZW1lbnMuY29tL3BraT9aWlpaWlpBMi5jcnQwQQYIKwYBBQUHMAKG
+ NWxkYXA6Ly9hbC5zaWVtZW5zLm5ldC9DTj1aWlpaWlpBMixMPVBLST9jQUNlcnRp
+ ZmljYXRlMEkGCCsGAQUFBzAChj1sZGFwOi8vYWwuc2llbWVucy5jb20vQ049Wlpa
+ WlpaQTIsbz1UcnVzdGNlbnRlcj9jQUNlcnRpZmljYXRlMDAGCCsGAQUFBzABhiRo
+ dHRwOi8vb2NzcC5wa2ktc2VydmljZXMuc2llbWVucy5jb20wDQYJKoZIhvcNAQEL
+ BQADggIBAFY2sbX8DKjKlp0OdH+7Ak21ZdRr6p6JIXzQShWpuFr3wYTpM47+WYVe
+ arBekf8eS08feM+TWw6FHt/VNMpn5fLr20jHn7h+j3ClerAxQbx8J6BxhwJ/4DMy
+ 0cCdbe/fpfJyD/8TGdjnxwAgoq9iPuy1ueVnevygnLcuq1+se6EWJm9v1zrwB0LH
+ rE4/NaSCi06+KGg0D9yiigma9yErRZCiaFvqYXUEl7iGpu2OM9o38gZfGzkKaPtQ
+ e9BzRs6ndmvNpQQGLXvOlHn6DIsOuBHJp66A+wumRO2AC8rs1rc4NAIjCFRrz8k1
+ kzb+ibFiTklWG69+At5/nb06BO/0ER4U18sSpmvOsFKNKPXzLkAn8O8ZzB+8afxy
+ egiIJFxYaqoJcQq3CCv8Xp7tp6I+ojr1ui0jK0yqJq6QfgS8FCXIJ+EErNYuoerx
+ ba6amD83e524sdMhCfD5dw6IeEY7LUl465ifUm+v5W3jStfa+0cQXnLZNGsC85nP
+ Lw5cXVIE3LfoSO3kWH45MfcX32fuqmyP2N3k+/+IOfUpSdT1iR1pEu0g/mow7lGj
+ CZngjmMpoto/Qi3l/n1KPWfmB09FZlUhHcGsHbK8+mrkqpv6HW3tKDSorah98aLM
+ Wvu1IXTrU9fOyBqt92i0e5buH+/9NHia0i6k79kwQy5wu6Q21GgUMIIIbDCCBlSg
+ AwIBAgIEL4jNizANBgkqhkiG9w0BAQsFADCBmTELMAkGA1UEBhMCREUxDzANBgNV
+ BAgMBkJheWVybjERMA8GA1UEBwwITXVlbmNoZW4xEDAOBgNVBAoMB1NpZW1lbnMx
+ ETAPBgNVBAUTCFpaWlpaWkExMR0wGwYDVQQLDBRTaWVtZW5zIFRydXN0IENlbnRl
+ cjEiMCAGA1UEAwwZU2llbWVucyBSb290IENBIFYzLjAgMjAxNjAeFw0xNjA3MjAx
+ MzA5MDhaFw0yMjA3MjAxMzA5MDhaMIGfMQswCQYDVQQGEwJERTEPMA0GA1UECAwG
+ QmF5ZXJuMREwDwYDVQQHDAhNdWVuY2hlbjEQMA4GA1UECgwHU2llbWVuczERMA8G
+ A1UEBRMIWlpaWlpaQTIxHTAbBgNVBAsMFFNpZW1lbnMgVHJ1c3QgQ2VudGVyMSgw
+ JgYDVQQDDB9TaWVtZW5zIElzc3VpbmcgQ0EgRUUgQXV0aCAyMDE2MIICIjANBgkq
+ hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAy1aUq88DjZYPge0vZnAr3KJHmMi0o5mp
+ hy54Xr592Vtf8u/B3TCyD+iGCYANPYUq4sG18qXcVxGadz7zeEm6RI7jKKl3URAv
+ zFGiYForZE0JKxwo956T/diLLpH1vHEQDbp8AjNK7aGoltZnm/Jn6IVQy9iBY0SE
+ lRIBhUlppS4/J2PHtKEvQVYJfkAwTtHuGpvPaesoJ8bHA0KhEZ4+/kIYQebaNDf0
+ ltTmXd4Z8zeUhE25d9MzoFnQUg+F01ewMfc0OsEFheKWP6dmo0MSLWARXxjI3K2R
+ THtJU5hxjb/+SA2wlfpqwNIAkTECDBfqYxHReAT8PeezvzEkNZ9RrXl9qj0Cm2iZ
+ AjY1SL+asuxrGvFwEW/ZKJ2ARY/ot1cHh/I79srzh/jFieShVHbT6s6fyKXmkUjB
+ OEnybUKUqcvNuOXnwEiJ/9jKT5UVBWTDxbEQucAarVNFBEf557o9ievbT+VAZKZ8
+ F4tJge6jl2y19eppflresr7Xui9wekK2LYcLOF3X/MOCFq/9VyQDyE7X9KNGtEx7
+ 4V6J2QpbbRJryvavh3b0eQEtqDc65eiEaP8awqOErN8EEYh7Gdx4Um3QFcm1TBhk
+ ZTdQdLlWv4LvIBnXiBEWRczQYEIm5wv5ZkyPwdL39Xwc72esPPBu8FtQFVcQlRdG
+ I2t5Ywefq48CAwEAAaOCArIwggKuMIIBBQYIKwYBBQUHAQEEgfgwgfUwQQYIKwYB
+ BQUHMAKGNWxkYXA6Ly9hbC5zaWVtZW5zLm5ldC9DTj1aWlpaWlpBMSxMPVBLST9j
+ QUNlcnRpZmljYXRlMDIGCCsGAQUFBzAChiZodHRwOi8vYWguc2llbWVucy5jb20v
+ cGtpP1paWlpaWkExLmNydDBKBggrBgEFBQcwAoY+bGRhcDovL2FsLnNpZW1lbnMu
+ Y29tL3VpZD1aWlpaWlpBMSxvPVRydXN0Y2VudGVyP2NBQ2VydGlmaWNhdGUwMAYI
+ KwYBBQUHMAGGJGh0dHA6Ly9vY3NwLnBraS1zZXJ2aWNlcy5zaWVtZW5zLmNvbTAf
+ BgNVHSMEGDAWgBRwbaBQ7KnQLGedGRX+/QRzNcPi1DASBgNVHRMBAf8ECDAGAQH/
+ AgEAMEAGA1UdIAQ5MDcwNQYIKwYBBAGhaQcwKTAnBggrBgEFBQcCARYbaHR0cDov
+ L3d3dy5zaWVtZW5zLmNvbS9wa2kvMIHHBgNVHR8Egb8wgbwwgbmggbaggbOGP2xk
+ YXA6Ly9jbC5zaWVtZW5zLm5ldC9DTj1aWlpaWlpBMSxMPVBLST9hdXRob3JpdHlS
+ ZXZvY2F0aW9uTGlzdIYmaHR0cDovL2NoLnNpZW1lbnMuY29tL3BraT9aWlpaWlpB
+ MS5jcmyGSGxkYXA6Ly9jbC5zaWVtZW5zLmNvbS91aWQ9WlpaWlpaQTEsbz1UcnVz
+ dGNlbnRlcj9hdXRob3JpdHlSZXZvY2F0aW9uTGlzdDAzBgNVHSUELDAqBggrBgEF
+ BQcDAgYIKwYBBQUHAwQGCisGAQQBgjcUAgIGCCsGAQUFBwMJMA4GA1UdDwEB/wQE
+ AwIBBjAdBgNVHQ4EFgQUvb0qQyI9SEpXfpgxF6lwne6fqJkwDQYJKoZIhvcNAQEL
+ BQADggIBAEQB0qDUmU8rX9KVJA/0zxJUmIeE9zeldih8TKrf4UNzS1+2Cqn4agO7
+ MxRG1d52/pL4uKenffwwYy2dP912PwLjCDOL7jvojjQKx/qpVUXF7XWsg8hAQec3
+ 7Ras/jGPcPQ3OehbkcKcmXI4MrF0Haqo3q1n29gjlJ0fGn2fF1/CBnybPuODAjWG
+ o9mZodXfz0woGSxkftC6nTmAV2GCvIU+j5hNKpzEzo8c1KwLVeXtB4PAqioRW1BX
+ Ngjc7HQbvX/C39RnpOM3RdITw2KKXFxeKBMXdiDuFz/2CzO8HxKH9EVWEcSRbTnd
+ E5iEB4CZzcvfzl9X5AwrKkiIziOiEoiv21ooWeFWfR9V2dgYIE7G1TFwsQ4p0/w5
+ xBHSzqP8TCJp1MQTw42/t8uUXoFEGqk5FKQWoIaFf7N//FLAn8r+7vxNhF5s+tMl
+ VsdKnXn3q8THB3JSnbb/AWGL9rjPK3vh2d3c0I5cWuKXexPLp74ynl2XUbiOXKE7
+ XPUZ9qgK0G9JrrFMm4x1aID9Y9jqYeEz6krYjdFHo5BOVGso6SqWVJE48TxJ5KVv
+ FUb4OxhOAw118Tco0XA7H1G3c2/AKJvIku3cRuj8eLe/cpKqUqQl8uikIZs7POaO
+ +9eJsOnNPmUiwumJgwAo3Ka4ALteKZLbGmKvuo/2ueKCQ29F5rnOMYICOzCCAjcC
+ AQEwgagwgZ8xCzAJBgNVBAYTAkRFMQ8wDQYDVQQIDAZCYXllcm4xETAPBgNVBAcM
+ CE11ZW5jaGVuMRAwDgYDVQQKDAdTaWVtZW5zMREwDwYDVQQFEwhaWlpaWlpBMjEd
+ MBsGA1UECwwUU2llbWVucyBUcnVzdCBDZW50ZXIxKDAmBgNVBAMMH1NpZW1lbnMg
+ SXNzdWluZyBDQSBFRSBBdXRoIDIwMTYCBGvn1/4wCwYJYIZIAWUDBAIBoGkwHAYJ
+ KoZIhvcNAQkFMQ8XDTE5MDYyMDEwNDIwNlowLwYJKoZIhvcNAQkEMSIEIHPHp00z
+ IZ93dAl/uwOnixzuAtf1fUTyxFFaq/5yzc+0MBgGCSqGSIb3DQEJAzELBgkqhkiG
+ 9w0BBwEwCwYJKoZIhvcNAQEBBIIBAD8Or5F/A/vpeNPv1YOrGzTrMU5pbn6o8t2+
+ Hqn+hAdjbD26HqjYQN/nyXNBpgXiV4P5vEVNVpmViAAXGsWKM3BJx7GdH/uUwDnj
+ upvoViXYtzQ92UC2Xzqo7uOg2ryMbDIFNfLosvy4a7NfDLYoMsVYrgOKpDrfOLsS
+ 1VNUjlyftm7vKigkJLrPIEmXrZSVEqsdKvFhcSxS55lm0lVd/fTCAi7TXR2FZWbc
+ TrsTrZx2YdIJDwN04szzBjnQ7yJ4jBLYz1GMBe22xDD10UA4XdBYK07rkcabrv/t
+ kUMI7uN/KeiKPeSvWCn3AUqH6TIFa9WU+tI4U2A2BsUMn6Bq9TY=
+ -----END SIGNED MESSAGE-----
+ SIGNATURE
+ end
+
+ def signed_commit_base_data
+ <<~SIGNEDDATA
+ tree 84c167013d2ee86e8a88ac6011df0b178d261a23
+ parent e63f41fe459e62e1228fcef60d7189127aeba95a
+ author Roger Meier <r.meier@siemens.com> 1561027326 +0200
+ committer Roger Meier <r.meier@siemens.com> 1561027326 +0200
+
+ feat: add a smime signed commit
+ SIGNEDDATA
+ end
+
+ def certificate_crl
+ 'http://ch.siemens.com/pki?ZZZZZZA2.crl'
+ end
+
+ def certificate_serial
+ 1810356222
+ end
+
+ def certificate_subject_key_identifier
+ 'EC:00:B5:28:02:5C:D3:A5:A1:AB:C2:A1:34:81:84:AA:BF:9B:CF:F8'
+ end
+
+ def issuer_subject_key_identifier
+ 'BD:BD:2A:43:22:3D:48:4A:57:7E:98:31:17:A9:70:9D:EE:9F:A8:99'
+ end
+
+ def certificate_email
+ 'r.meier@siemens.com'
+ end
+
+ def certificate_issuer
+ 'CN=Siemens Issuing CA EE Auth 2016,OU=Siemens Trust Center,serialNumber=ZZZZZZA2,O=Siemens,L=Muenchen,ST=Bayern,C=DE'
+ end
+
+ def certificate_subject
+ 'CN=Meier Roger,O=Siemens,SN=Meier,GN=Roger,serialNumber=Z000NWDH'
+ end
+
+ def names
+ ['Roger Meier']
+ end
+
+ def emails
+ ['r.meier@siemens.com']
+ end
+ end
+end
diff --git a/spec/support/import_export/common_util.rb b/spec/support/import_export/common_util.rb
index 72baec7bfcb..912a8e0a2ab 100644
--- a/spec/support/import_export/common_util.rb
+++ b/spec/support/import_export/common_util.rb
@@ -17,5 +17,38 @@ module ImportExport
allow_any_instance_of(Gitlab::ImportExport).to receive(:export_path) { export_path }
end
+
+ def fixtures_path
+ "spec/fixtures/lib/gitlab/import_export"
+ end
+
+ def test_tmp_path
+ "tmp/tests/gitlab-test/import_export"
+ end
+
+ def restore_then_save_project(project, import_path:, export_path:)
+ project_restorer = get_project_restorer(project, import_path)
+ project_saver = get_project_saver(project, export_path)
+
+ project_restorer.restore && project_saver.save
+ end
+
+ def get_project_restorer(project, import_path)
+ Gitlab::ImportExport::ProjectTreeRestorer.new(
+ user: project.creator, shared: get_shared_env(path: import_path), project: project
+ )
+ end
+
+ def get_project_saver(project, export_path)
+ Gitlab::ImportExport::ProjectTreeSaver.new(
+ project: project, current_user: project.creator, shared: get_shared_env(path: export_path)
+ )
+ end
+
+ def get_shared_env(path:)
+ instance_double(Gitlab::ImportExport::Shared).tap do |shared|
+ allow(shared).to receive(:export_path).and_return(path)
+ end
+ end
end
end
diff --git a/spec/support/import_export/project_tree_expectations.rb b/spec/support/import_export/project_tree_expectations.rb
new file mode 100644
index 00000000000..966c977e8e9
--- /dev/null
+++ b/spec/support/import_export/project_tree_expectations.rb
@@ -0,0 +1,128 @@
+# frozen_string_literal: true
+
+module ImportExport
+ module ProjectTreeExpectations
+ def assert_relations_match(imported_hash, exported_hash)
+ normalized_imported_hash = normalize_elements(imported_hash)
+ normalized_exported_hash = normalize_elements(exported_hash)
+
+ # this is for sanity checking, to make sure we didn't accidentally pass the test
+ # because we essentially ignored everything
+ stats = {
+ hashes: 0,
+ arrays: {
+ direct: 0,
+ pairwise: 0,
+ fuzzy: 0
+ },
+ values: 0
+ }
+
+ failures = match_recursively(normalized_imported_hash, normalized_exported_hash, stats)
+
+ puts "Elements checked:\n#{stats.pretty_inspect}"
+
+ expect(failures).to be_empty, failures.join("\n\n")
+ end
+
+ private
+
+ def match_recursively(left_node, right_node, stats, location_stack = [], failures = [])
+ if Hash === left_node && Hash === right_node
+ match_hashes(left_node, right_node, stats, location_stack, failures)
+ elsif Array === left_node && Array === right_node
+ match_arrays(left_node, right_node, stats, location_stack, failures)
+ else
+ stats[:values] += 1
+ if left_node != right_node
+ failures << failure_message("Value mismatch", location_stack, left_node, right_node)
+ end
+ end
+
+ failures
+ end
+
+ def match_hashes(left_node, right_node, stats, location_stack, failures)
+ stats[:hashes] += 1
+ left_keys = left_node.keys.to_set
+ right_keys = right_node.keys.to_set
+
+ if left_keys != right_keys
+ failures << failure_message("Hash keys mismatch", location_stack, left_keys, right_keys)
+ end
+
+ left_node.keys.each do |key|
+ location_stack << key
+ match_recursively(left_node[key], right_node[key], stats, location_stack, failures)
+ location_stack.pop
+ end
+ end
+
+ def match_arrays(left_node, right_node, stats, location_stack, failures)
+ has_simple_elements = left_node.none? { |el| Enumerable === el }
+ # for simple types, we can do a direct order-less set comparison
+ if has_simple_elements && left_node.to_set != right_node.to_set
+ stats[:arrays][:direct] += 1
+ failures << failure_message("Elements mismatch", location_stack, left_node, right_node)
+ # if both arrays have the same number of complex elements, we can compare pair-wise in-order
+ elsif left_node.size == right_node.size
+ stats[:arrays][:pairwise] += 1
+ left_node.zip(right_node).each do |left_entry, right_entry|
+ match_recursively(left_entry, right_entry, stats, location_stack, failures)
+ end
+ # otherwise we have to fall back to a best-effort match by probing into the right array;
+ # this means we will not account for elements that exist on the right, but not on the left
+ else
+ stats[:arrays][:fuzzy] += 1
+ left_node.each do |left_entry|
+ right_entry = right_node.find { |el| el == left_entry }
+ match_recursively(left_entry, right_entry, stats, location_stack, failures)
+ end
+ end
+ end
+
+ def failure_message(what, location_stack, left_value, right_value)
+ where =
+ if location_stack.empty?
+ "root"
+ else
+ location_stack.map { |loc| loc.to_sym.inspect }.join(' -> ')
+ end
+
+ ">> [#{where}] #{what}\n\n#{left_value.pretty_inspect}\nNOT EQUAL TO\n\n#{right_value.pretty_inspect}"
+ end
+
+ # Helper that traverses a project tree and normalizes data that we know
+ # to vary in the process of importing (such as list order or row IDs)
+ def normalize_elements(elem)
+ case elem
+ when Hash
+ elem.map do |key, value|
+ if ignore_key?(key, value)
+ [key, :ignored]
+ else
+ [key, normalize_elements(value)]
+ end
+ end.to_h
+ when Array
+ elem.map { |a| normalize_elements(a) }
+ else
+ elem
+ end
+ end
+
+ # We currently need to ignore certain entries when checking for equivalence because
+ # we know them to change between imports/exports either by design or because of bugs;
+ # this helper filters out these problematic nodes.
+ def ignore_key?(key, value)
+ id?(key) || # IDs are known to be replaced during imports
+ key == 'updated_at' || # these get changed frequently during imports
+ key == 'next_run_at' || # these values change based on wall clock
+ key == 'notes' # the importer attaches an extra "by user XYZ" at the end of a note
+ end
+
+ def id?(key)
+ key == 'id' || key.ends_with?('_id')
+ end
+ end
+end
diff --git a/spec/support/matchers/background_migrations_matchers.rb b/spec/support/matchers/background_migrations_matchers.rb
index c38aa7ad6a6..8735dac8b2a 100644
--- a/spec/support/matchers/background_migrations_matchers.rb
+++ b/spec/support/matchers/background_migrations_matchers.rb
@@ -26,3 +26,26 @@ RSpec::Matchers.define :be_scheduled_migration do |*expected|
"Migration `#{migration}` with args `#{expected.inspect}` not scheduled!"
end
end
+
+RSpec::Matchers.define :be_scheduled_migration_with_multiple_args do |*expected|
+ match do |migration|
+ BackgroundMigrationWorker.jobs.any? do |job|
+ args = job['args'].size == 1 ? [BackgroundMigrationWorker.jobs[0]['args'][0], []] : job['args']
+ args[0] == migration && compare_args(args, expected)
+ end
+ end
+
+ failure_message do |migration|
+ "Migration `#{migration}` with args `#{expected.inspect}` not scheduled!"
+ end
+
+ def compare_args(args, expected)
+ args[1].map.with_index do |arg, i|
+ arg.is_a?(Array) ? same_arrays?(arg, expected[i]) : arg == expected[i]
+ end.all?
+ end
+
+ def same_arrays?(arg, expected)
+ arg.sort == expected.sort
+ end
+end
diff --git a/spec/support/matchers/graphql_matchers.rb b/spec/support/matchers/graphql_matchers.rb
index e151a934591..31b0290bb15 100644
--- a/spec/support/matchers/graphql_matchers.rb
+++ b/spec/support/matchers/graphql_matchers.rb
@@ -108,6 +108,12 @@ RSpec::Matchers.define :have_graphql_resolver do |expected|
end
end
+RSpec::Matchers.define :have_graphql_extension do |expected|
+ match do |field|
+ expect(field.metadata[:type_class].extensions).to include(expected)
+ end
+end
+
RSpec::Matchers.define :expose_permissions_using do |expected|
match do |type|
permission_field = type.fields['userPermissions']
diff --git a/spec/support/matchers/log_spam.rb b/spec/support/matchers/log_spam.rb
index 541cacf558c..260d2930816 100644
--- a/spec/support/matchers/log_spam.rb
+++ b/spec/support/matchers/log_spam.rb
@@ -1,29 +1,31 @@
# frozen_string_literal: true
-# This matcher checkes if one spam log with provided attributes was created
+# This matcher checks if one spam log with provided attributes was created
+# during the block evocation.
#
# Example:
#
-# expect { create_issue }.to log_spam
-RSpec::Matchers.define :log_spam do |expected|
- def spam_logs
- SpamLog.all
- end
+# expect { create_issue }.to log_spam(key1: value1, key2: value2)
+RSpec::Matchers.define :log_spam do |expected|
match do |block|
+ @existing_logs_count = SpamLog.count
+
block.call
- expect(spam_logs).to contain_exactly(
- have_attributes(expected)
- )
+ @new_logs_count = SpamLog.count
+ @last_spam_log = SpamLog.last
+
+ expect(@new_logs_count - @existing_logs_count).to eq 1
+ expect(@last_spam_log).to have_attributes(expected)
end
description do
- count = spam_logs.count
+ count = @new_logs_count - @existing_logs_count
if count == 1
keys = expected.keys.map(&:to_s)
- actual = spam_logs.first.attributes.slice(*keys)
+ actual = @last_spam_log.attributes.slice(*keys)
"create a spam log with #{expected} attributes. #{actual} created instead."
else
"create exactly 1 spam log with #{expected} attributes. #{count} spam logs created instead."
@@ -32,3 +34,34 @@ RSpec::Matchers.define :log_spam do |expected|
supports_block_expectations
end
+
+# This matcher checks that the last spam log
+# has the attributes provided.
+# The spam log does not have to be created during the block evocation.
+# The number of total spam logs just has to be more than one.
+#
+# Example:
+#
+# expect { create_issue }.to have_spam_log(key1: value1, key2: value2)
+
+RSpec::Matchers.define :have_spam_log do |expected|
+ match do |block|
+ block.call
+
+ @total_logs_count = SpamLog.count
+ @latest_spam_log = SpamLog.last
+ expect(SpamLog.last).to have_attributes(expected)
+ end
+
+ description do
+ if @total_logs_count > 0
+ keys = expected.keys.map(&:to_s)
+ actual = @latest_spam_log.attributes.slice(*keys)
+ "the last spam log to have #{expected} attributes. Last spam log has #{actual} attributes instead."
+ else
+ "there to be a spam log, but there are no spam logs."
+ end
+ end
+
+ supports_block_expectations
+end
diff --git a/spec/support/matchers/match_asset_path.rb b/spec/support/matchers/match_asset_path.rb
new file mode 100644
index 00000000000..130b1ab02a3
--- /dev/null
+++ b/spec/support/matchers/match_asset_path.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+# add simpler way to match asset paths containing digest strings
+RSpec::Matchers.define :match_asset_path do |expected|
+ match do |actual|
+ path = Regexp.escape(expected)
+ extname = Regexp.escape(File.extname(expected))
+ digest_regex = Regexp.new(path.sub(extname, "(?:-\\h+)?#{extname}") << '$')
+ digest_regex =~ actual
+ end
+
+ failure_message do |actual|
+ "expected that #{actual} would include an asset path for #{expected}"
+ end
+
+ failure_message_when_negated do |actual|
+ "expected that #{actual} would not include an asset path for #{expected}"
+ end
+end
diff --git a/spec/support/api/schema_matcher.rb b/spec/support/matchers/schema_matcher.rb
index ebbd57c8115..ebbd57c8115 100644
--- a/spec/support/api/schema_matcher.rb
+++ b/spec/support/matchers/schema_matcher.rb
diff --git a/spec/support/migration.rb b/spec/support/migration.rb
new file mode 100644
index 00000000000..3c359af886d
--- /dev/null
+++ b/spec/support/migration.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+RSpec.configure do |config|
+ # The :each scope runs "inside" the example, so this hook ensures the DB is in the
+ # correct state before any examples' before hooks are called. This prevents a
+ # problem where `ScheduleIssuesClosedAtTypeChange` (or any migration that depends
+ # on background migrations being run inline during test setup) can be broken by
+ # altering Sidekiq behavior in an unrelated spec like so:
+ #
+ # around do |example|
+ # Sidekiq::Testing.fake! do
+ # example.run
+ # end
+ # end
+ config.before(:context, :migration) do
+ schema_migrate_down!
+ end
+
+ # Each example may call `migrate!`, so we must ensure we are migrated down every time
+ config.before(:each, :migration) do
+ use_fake_application_settings
+
+ schema_migrate_down!
+ end
+
+ config.after(:context, :migration) do
+ schema_migrate_up!
+
+ Gitlab::CurrentSettings.clear_in_memory_application_settings!
+ end
+end
diff --git a/spec/support/migrations_helpers/namespaces_helper.rb b/spec/support/migrations_helpers/namespaces_helper.rb
new file mode 100644
index 00000000000..4ca01c87568
--- /dev/null
+++ b/spec/support/migrations_helpers/namespaces_helper.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module MigrationHelpers
+ module NamespacesHelpers
+ def create_namespace(name, visibility, options = {})
+ table(:namespaces).create({
+ name: name,
+ path: name,
+ type: 'Group',
+ visibility_level: visibility
+ }.merge(options))
+ end
+ end
+end
diff --git a/spec/support/migrations_helpers/prometheus_service_helpers.rb b/spec/support/migrations_helpers/prometheus_service_helpers.rb
deleted file mode 100644
index 88f2f71ee1e..00000000000
--- a/spec/support/migrations_helpers/prometheus_service_helpers.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-module MigrationHelpers
- module PrometheusServiceHelpers
- def service_params_for(project_id, params = {})
- {
- project_id: project_id,
- active: false,
- properties: '{}',
- type: 'PrometheusService',
- template: false,
- push_events: true,
- issues_events: true,
- merge_requests_events: true,
- tag_push_events: true,
- note_events: true,
- category: 'monitoring',
- default: false,
- wiki_page_events: true,
- pipeline_events: true,
- confidential_issues_events: true,
- commit_events: true,
- job_events: true,
- confidential_note_events: true,
- deployment_events: false
- }.merge(params)
- end
-
- def row_attributes(entity)
- entity.attributes.with_indifferent_access.tap do |hash|
- hash.merge!(hash.slice(:created_at, :updated_at).transform_values { |v| v.to_s(:db) })
- end
- end
- end
-end
diff --git a/spec/support/pages.rb b/spec/support/pages.rb
new file mode 100644
index 00000000000..ad73d5b9ef5
--- /dev/null
+++ b/spec/support/pages.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+RSpec.configure do |config|
+ config.before(:each, :http_pages_enabled) do |_|
+ allow(Gitlab.config.pages).to receive(:external_http).and_return(['1.1.1.1:80'])
+ end
+
+ config.before(:each, :https_pages_enabled) do |_|
+ allow(Gitlab.config.pages).to receive(:external_https).and_return(['1.1.1.1:443'])
+ end
+
+ config.before(:each, :http_pages_disabled) do |_|
+ allow(Gitlab.config.pages).to receive(:external_http).and_return(false)
+ end
+
+ config.before(:each, :https_pages_disabled) do |_|
+ allow(Gitlab.config.pages).to receive(:external_https).and_return(false)
+ end
+end
diff --git a/spec/support/redis.rb b/spec/support/redis.rb
new file mode 100644
index 00000000000..8539f202602
--- /dev/null
+++ b/spec/support/redis.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+RSpec.configure do |config|
+ config.after(:each, :redis) do
+ Sidekiq.redis do |connection|
+ connection.redis.flushdb
+ end
+ end
+
+ config.around(:each, :clean_gitlab_redis_cache) do |example|
+ redis_cache_cleanup!
+
+ example.run
+
+ redis_cache_cleanup!
+ end
+
+ config.around(:each, :clean_gitlab_redis_shared_state) do |example|
+ redis_shared_state_cleanup!
+
+ example.run
+
+ redis_shared_state_cleanup!
+ end
+
+ config.around(:each, :clean_gitlab_redis_queues) do |example|
+ redis_queues_cleanup!
+
+ example.run
+
+ redis_queues_cleanup!
+ end
+end
diff --git a/spec/support/services/clusters/create_service_shared.rb b/spec/support/services/clusters/create_service_shared.rb
index 468f25bfffe..31aee08baec 100644
--- a/spec/support/services/clusters/create_service_shared.rb
+++ b/spec/support/services/clusters/create_service_shared.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_context 'valid cluster create params' do
+RSpec.shared_context 'valid cluster create params' do
let(:params) do
{
name: 'test-cluster',
@@ -16,7 +16,7 @@ shared_context 'valid cluster create params' do
end
end
-shared_context 'invalid cluster create params' do
+RSpec.shared_context 'invalid cluster create params' do
let(:params) do
{
name: 'test-cluster',
@@ -31,7 +31,7 @@ shared_context 'invalid cluster create params' do
end
end
-shared_examples 'create cluster service success' do
+RSpec.shared_examples 'create cluster service success' do
it 'creates a cluster object and performs a worker' do
expect(ClusterProvisionWorker).to receive(:perform_async)
@@ -53,7 +53,7 @@ shared_examples 'create cluster service success' do
end
end
-shared_examples 'create cluster service error' do
+RSpec.shared_examples 'create cluster service error' do
it 'returns an error' do
expect(ClusterProvisionWorker).not_to receive(:perform_async)
expect { subject }.to change { Clusters::Cluster.count }.by(0)
diff --git a/spec/support/services/issuable_create_service_slash_commands_shared_examples.rb b/spec/support/services/issuable_create_service_slash_commands_shared_examples.rb
index 4c3644e6724..3d45fe06134 100644
--- a/spec/support/services/issuable_create_service_slash_commands_shared_examples.rb
+++ b/spec/support/services/issuable_create_service_slash_commands_shared_examples.rb
@@ -3,7 +3,7 @@
# Specifications for behavior common to all objects with executable attributes.
# It can take a `default_params`.
-shared_examples 'new issuable record that supports quick actions' do
+RSpec.shared_examples 'new issuable record that supports quick actions' do
let!(:project) { create(:project, :repository) }
let(:user) { create(:user).tap { |u| project.add_maintainer(u) } }
let(:assignee) { create(:user) }
diff --git a/spec/support/services/issuable_update_service_shared_examples.rb b/spec/support/services/issuable_update_service_shared_examples.rb
index 5e5acd0e40a..8867a1e3a90 100644
--- a/spec/support/services/issuable_update_service_shared_examples.rb
+++ b/spec/support/services/issuable_update_service_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'issuable update service' do
+RSpec.shared_examples 'issuable update service' do
def update_issuable(opts)
described_class.new(project, user, opts).execute(open_issuable)
end
diff --git a/spec/support/services/migrate_to_ghost_user_service_shared_examples.rb b/spec/support/services/migrate_to_ghost_user_service_shared_examples.rb
index 65236f13e27..8a88f0335a9 100644
--- a/spec/support/services/migrate_to_ghost_user_service_shared_examples.rb
+++ b/spec/support/services/migrate_to_ghost_user_service_shared_examples.rb
@@ -1,8 +1,6 @@
# frozen_string_literal: true
-require "spec_helper"
-
-shared_examples "migrating a deleted user's associated records to the ghost user" do |record_class, fields|
+RSpec.shared_examples "migrating a deleted user's associated records to the ghost user" do |record_class, fields|
record_class_name = record_class.to_s.titleize.downcase
let(:project) do
diff --git a/spec/support/shared_contexts/change_access_checks_shared_context.rb b/spec/support/shared_contexts/change_access_checks_shared_context.rb
index aca18b0c73b..e1ab81b4e3d 100644
--- a/spec/support/shared_contexts/change_access_checks_shared_context.rb
+++ b/spec/support/shared_contexts/change_access_checks_shared_context.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_context 'change access checks context' do
+RSpec.shared_context 'change access checks context' do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:user_access) { Gitlab::UserAccess.new(user, project: project) }
diff --git a/spec/support/controllers/githubish_import_controller_shared_context.rb b/spec/support/shared_contexts/controllers/githubish_import_controller_shared_context.rb
index 3706178ee34..d4320e928a9 100644
--- a/spec/support/controllers/githubish_import_controller_shared_context.rb
+++ b/spec/support/shared_contexts/controllers/githubish_import_controller_shared_context.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_context 'a GitHub-ish import controller' do
+RSpec.shared_context 'a GitHub-ish import controller' do
let(:user) { create(:user) }
let(:token) { "asdasd12345" }
let(:access_params) { { github_access_token: token } }
diff --git a/spec/support/controllers/ldap_omniauth_callbacks_controller_shared_context.rb b/spec/support/shared_contexts/controllers/ldap_omniauth_callbacks_controller_shared_context.rb
index 8a8a2f714bc..4426d3af908 100644
--- a/spec/support/controllers/ldap_omniauth_callbacks_controller_shared_context.rb
+++ b/spec/support/shared_contexts/controllers/ldap_omniauth_callbacks_controller_shared_context.rb
@@ -1,8 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
-
-shared_context 'Ldap::OmniauthCallbacksController' do
+RSpec.shared_context 'Ldap::OmniauthCallbacksController' do
include LoginHelpers
include LdapHelpers
diff --git a/spec/support/shared_contexts/email_shared_context.rb b/spec/support/shared_contexts/email_shared_context.rb
index b4d061a8215..b4d7722f03d 100644
--- a/spec/support/shared_contexts/email_shared_context.rb
+++ b/spec/support/shared_contexts/email_shared_context.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_context :email_shared_context do
+RSpec.shared_context :email_shared_context do
let(:mail_key) { "59d8df8370b7e95c5a49fbf86aeb2c93" }
let(:receiver) { Gitlab::Email::Receiver.new(email_raw) }
let(:markdown) { "![image](uploads/image.png)" }
@@ -18,7 +18,7 @@ shared_context :email_shared_context do
end
end
-shared_examples :reply_processing_shared_examples do
+RSpec.shared_examples :reply_processing_shared_examples do
context "when the user could not be found" do
before do
user.destroy
diff --git a/spec/support/shared_contexts/features/error_tracking_shared_context.rb b/spec/support/shared_contexts/features/error_tracking_shared_context.rb
new file mode 100644
index 00000000000..48356373c26
--- /dev/null
+++ b/spec/support/shared_contexts/features/error_tracking_shared_context.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+shared_context 'sentry error tracking context feature' do
+ include ReactiveCachingHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:project_error_tracking_settings) { create(:project_error_tracking_setting, project: project) }
+ let_it_be(:issue_response_body) { fixture_file('sentry/issue_sample_response.json') }
+ let_it_be(:issue_response) { JSON.parse(issue_response_body) }
+ let_it_be(:event_response_body) { fixture_file('sentry/issue_latest_event_sample_response.json') }
+ let_it_be(:event_response) { JSON.parse(event_response_body) }
+ let(:sentry_api_urls) { Sentry::ApiUrls.new(project_error_tracking_settings.api_url) }
+ let(:issue_id) { issue_response['id'] }
+ let(:issue_seen) { 1.year.ago.utc }
+ let(:formatted_issue_seen) { issue_seen.strftime("%Y-%m-%d %-l:%M:%S%p %Z") }
+ let(:date_received) { 1.month.ago.utc }
+
+ before do
+ request_headers = { 'Authorization' => 'Bearer access_token_123', 'Content-Type' => 'application/json' }
+ response_headers = { 'Content-Type' => 'application/json' }
+
+ issue_response['firstSeen'] = issue_seen.iso8601(6)
+ issue_response['lastSeen'] = issue_seen.iso8601(6)
+ event_response['dateReceived'] = date_received.iso8601(6)
+
+ issue_url = sentry_api_urls.issue_url(issue_id).to_s
+ stub_request(:get, issue_url)
+ .with(headers: request_headers)
+ .to_return(status: 200, body: issue_response.to_json, headers: response_headers)
+ event_url = sentry_api_urls.issue_latest_event_url(issue_id).to_s
+ stub_request(:get, event_url)
+ .with(headers: request_headers)
+ .to_return(status: 200, body: event_response.to_json, headers: response_headers)
+ end
+end
diff --git a/spec/support/shared_contexts/finders/group_projects_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/group_projects_finder_shared_contexts.rb
index e7fee7239fc..58ee48a98f1 100644
--- a/spec/support/shared_contexts/finders/group_projects_finder_shared_contexts.rb
+++ b/spec/support/shared_contexts/finders/group_projects_finder_shared_contexts.rb
@@ -1,7 +1,5 @@
# frozen_string_literal: true
-require 'spec_helper'
-
RSpec.shared_context 'GroupProjectsFinder context' do
let(:group) { create(:group) }
let(:subgroup) { create(:group, parent: group) }
diff --git a/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb
index 6c96b18d834..6b950a354cf 100644
--- a/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb
+++ b/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb
@@ -1,7 +1,5 @@
# frozen_string_literal: true
-require 'spec_helper'
-
RSpec.shared_context 'IssuesFinder context' do
set(:user) { create(:user) }
set(:user2) { create(:user) }
diff --git a/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb
index ef1e65d2577..82190fb7793 100644
--- a/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb
+++ b/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb
@@ -1,7 +1,5 @@
# frozen_string_literal: true
-require 'spec_helper'
-
RSpec.shared_context 'MergeRequestsFinder multiple projects with merge requests context' do
include ProjectForksHelper
diff --git a/spec/support/shared_contexts/finders/users_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/users_finder_shared_contexts.rb
index d6404b2ee4b..a2fa3d7beac 100644
--- a/spec/support/shared_contexts/finders/users_finder_shared_contexts.rb
+++ b/spec/support/shared_contexts/finders/users_finder_shared_contexts.rb
@@ -1,7 +1,5 @@
# frozen_string_literal: true
-require 'spec_helper'
-
RSpec.shared_context 'UsersFinder#execute filter by project context' do
set(:normal_user) { create(:user, username: 'johndoe') }
set(:blocked_user) { create(:user, :blocked, username: 'notsorandom') }
diff --git a/spec/support/shared_contexts/json_response_shared_context.rb b/spec/support/shared_contexts/json_response_shared_context.rb
index bd37c97ed35..6a0734decd5 100644
--- a/spec/support/shared_contexts/json_response_shared_context.rb
+++ b/spec/support/shared_contexts/json_response_shared_context.rb
@@ -1,5 +1,5 @@
# frozen_string_literal: true
-shared_context 'JSON response' do
+RSpec.shared_context 'JSON response' do
let(:json_response) { JSON.parse(response.body) }
end
diff --git a/spec/support/shared_contexts/mailers/notify_shared_context.rb b/spec/support/shared_contexts/mailers/notify_shared_context.rb
new file mode 100644
index 00000000000..d5b44f8df2c
--- /dev/null
+++ b/spec/support/shared_contexts/mailers/notify_shared_context.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'gitlab email notification' do
+ set(:group) { create(:group) }
+ set(:subgroup) { create(:group, parent: group) }
+ set(:project) { create(:project, :repository, name: 'a-known-name', group: group) }
+ set(:recipient) { create(:user, email: 'recipient@example.com') }
+
+ let(:gitlab_sender_display_name) { Gitlab.config.gitlab.email_display_name }
+ let(:gitlab_sender) { Gitlab.config.gitlab.email_from }
+ let(:gitlab_sender_reply_to) { Gitlab.config.gitlab.email_reply_to }
+ let(:new_user_address) { 'newguy@example.com' }
+
+ before do
+ email = recipient.emails.create(email: "notifications@example.com")
+ recipient.update_attribute(:notification_email, email.email)
+ stub_incoming_email_setting(enabled: true, address: "reply+%{key}@#{Gitlab.config.gitlab.host}")
+ end
+end
+
+RSpec.shared_context 'reply-by-email is enabled with incoming address without %{key}' do
+ before do
+ stub_incoming_email_setting(enabled: true, address: "reply@#{Gitlab.config.gitlab.host}")
+ end
+end
diff --git a/spec/support/shared_contexts/merge_request_create.rb b/spec/support/shared_contexts/merge_request_create_shared_context.rb
index 529f481c2b6..f2defa4eab9 100644
--- a/spec/support/shared_contexts/merge_request_create.rb
+++ b/spec/support/shared_contexts/merge_request_create_shared_context.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_context 'merge request create context' do
+RSpec.shared_context 'merge request create context' do
let(:user) { create(:user) }
let(:user2) { create(:user) }
let(:target_project) { create(:project, :public, :repository) }
diff --git a/spec/support/shared_contexts/merge_request_edit.rb b/spec/support/shared_contexts/merge_request_edit_shared_context.rb
index c84510ff47d..d490d26adfb 100644
--- a/spec/support/shared_contexts/merge_request_edit.rb
+++ b/spec/support/shared_contexts/merge_request_edit_shared_context.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
-shared_context 'merge request edit context' do
+RSpec.shared_context 'merge request edit context' do
let(:user) { create(:user) }
let(:user2) { create(:user) }
let!(:milestone) { create(:milestone, project: target_project) }
diff --git a/spec/support/shared_contexts/merge_requests_allowing_collaboration.rb b/spec/support/shared_contexts/merge_requests_allowing_collaboration_shared_context.rb
index 276ebf973c8..5412a991b22 100644
--- a/spec/support/shared_contexts/merge_requests_allowing_collaboration.rb
+++ b/spec/support/shared_contexts/merge_requests_allowing_collaboration_shared_context.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_context 'merge request allowing collaboration' do
+RSpec.shared_context 'merge request allowing collaboration' do
include ProjectForksHelper
let(:canonical) { create(:project, :public, :repository) }
diff --git a/spec/support/shared_contexts/policies/group_policy_shared_context.rb b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
index c503197a773..63ebbcb93f9 100644
--- a/spec/support/shared_contexts/policies/group_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
@@ -16,7 +16,7 @@ RSpec.shared_context 'GroupPolicy context' do
read_group_merge_requests
]
end
- let(:read_group_permissions) { %i[read_label read_list read_milestone] }
+ let(:read_group_permissions) { %i[read_label read_list read_milestone read_board] }
let(:reporter_permissions) { %i[admin_label read_container_image] }
let(:developer_permissions) { [:admin_milestone] }
let(:maintainer_permissions) do
diff --git a/spec/support/shared_contexts/policies/project_policy_shared_context.rb b/spec/support/shared_contexts/policies/project_policy_shared_context.rb
index 480c5a0fda0..29a64e9b559 100644
--- a/spec/support/shared_contexts/policies/project_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/project_policy_shared_context.rb
@@ -13,7 +13,7 @@ RSpec.shared_context 'ProjectPolicy context' do
%i[
read_project read_board read_list read_wiki read_issue
read_project_for_iids read_issue_iid read_label
- read_milestone read_project_snippet read_project_member read_note
+ read_milestone read_snippet read_project_member read_note
create_project create_issue create_note upload_file create_merge_request_in
award_emoji
]
@@ -21,7 +21,7 @@ RSpec.shared_context 'ProjectPolicy context' do
let(:base_reporter_permissions) do
%i[
- download_code fork_project create_project_snippet update_issue
+ download_code fork_project create_snippet update_issue
admin_issue admin_label admin_list read_commit_status read_build
read_container_image read_pipeline read_environment read_deployment
read_merge_request download_wiki_code read_sentry_issue read_prometheus
@@ -45,8 +45,8 @@ RSpec.shared_context 'ProjectPolicy context' do
let(:base_maintainer_permissions) do
%i[
- push_to_delete_protected_branch update_project_snippet
- admin_project_snippet admin_project_member admin_note admin_wiki admin_project
+ push_to_delete_protected_branch update_snippet
+ admin_snippet admin_project_member admin_note admin_wiki admin_project
admin_commit_status admin_build admin_container_image
admin_pipeline admin_environment admin_deployment destroy_release add_cluster
daily_statistics
diff --git a/spec/support/shared_contexts/policies/project_policy_table_shared_context.rb b/spec/support/shared_contexts/policies/project_policy_table_shared_context.rb
index 0a918ccde81..efd82ecb15a 100644
--- a/spec/support/shared_contexts/policies/project_policy_table_shared_context.rb
+++ b/spec/support/shared_contexts/policies/project_policy_table_shared_context.rb
@@ -318,5 +318,169 @@ RSpec.shared_context 'ProjectPolicyTable context' do
:private | :non_member | 0
:private | :anonymous | 0
end
+
+ # :snippet_level, :project_level, :feature_access_level, :membership, :expected_count
+ def permission_table_for_project_snippet_access
+ :public | :public | :enabled | :admin | 1
+ :public | :public | :enabled | :reporter | 1
+ :public | :public | :enabled | :guest | 1
+ :public | :public | :enabled | :non_member | 1
+ :public | :public | :enabled | :anonymous | 1
+
+ :public | :public | :private | :admin | 1
+ :public | :public | :private | :reporter | 1
+ :public | :public | :private | :guest | 1
+ :public | :public | :private | :non_member | 0
+ :public | :public | :private | :anonymous | 0
+
+ :public | :public | :disabled | :admin | 1
+ :public | :public | :disabled | :reporter | 0
+ :public | :public | :disabled | :guest | 0
+ :public | :public | :disabled | :non_member | 0
+ :public | :public | :disabled | :anonymous | 0
+
+ :public | :internal | :enabled | :admin | 1
+ :public | :internal | :enabled | :reporter | 1
+ :public | :internal | :enabled | :guest | 1
+ :public | :internal | :enabled | :non_member | 1
+ :public | :internal | :enabled | :anonymous | 0
+
+ :public | :internal | :private | :admin | 1
+ :public | :internal | :private | :reporter | 1
+ :public | :internal | :private | :guest | 1
+ :public | :internal | :private | :non_member | 0
+ :public | :internal | :private | :anonymous | 0
+
+ :public | :internal | :disabled | :admin | 1
+ :public | :internal | :disabled | :reporter | 0
+ :public | :internal | :disabled | :guest | 0
+ :public | :internal | :disabled | :non_member | 0
+ :public | :internal | :disabled | :anonymous | 0
+
+ :public | :private | :private | :admin | 1
+ :public | :private | :private | :reporter | 1
+ :public | :private | :private | :guest | 1
+ :public | :private | :private | :non_member | 0
+ :public | :private | :private | :anonymous | 0
+
+ :public | :private | :disabled | :reporter | 0
+ :public | :private | :disabled | :guest | 0
+ :public | :private | :disabled | :non_member | 0
+ :public | :private | :disabled | :anonymous | 0
+
+ :internal | :public | :enabled | :admin | 1
+ :internal | :public | :enabled | :reporter | 1
+ :internal | :public | :enabled | :guest | 1
+ :internal | :public | :enabled | :non_member | 1
+ :internal | :public | :enabled | :anonymous | 0
+
+ :internal | :public | :private | :admin | 1
+ :internal | :public | :private | :reporter | 1
+ :internal | :public | :private | :guest | 1
+ :internal | :public | :private | :non_member | 0
+ :internal | :public | :private | :anonymous | 0
+
+ :internal | :public | :disabled | :admin | 1
+ :internal | :public | :disabled | :reporter | 0
+ :internal | :public | :disabled | :guest | 0
+ :internal | :public | :disabled | :non_member | 0
+ :internal | :public | :disabled | :anonymous | 0
+
+ :internal | :internal | :enabled | :admin | 1
+ :internal | :internal | :enabled | :reporter | 1
+ :internal | :internal | :enabled | :guest | 1
+ :internal | :internal | :enabled | :non_member | 1
+ :internal | :internal | :enabled | :anonymous | 0
+
+ :internal | :internal | :private | :admin | 1
+ :internal | :internal | :private | :reporter | 1
+ :internal | :internal | :private | :guest | 1
+ :internal | :internal | :private | :non_member | 0
+ :internal | :internal | :private | :anonymous | 0
+
+ :internal | :internal | :disabled | :admin | 1
+ :internal | :internal | :disabled | :reporter | 0
+ :internal | :internal | :disabled | :guest | 0
+ :internal | :internal | :disabled | :non_member | 0
+ :internal | :internal | :disabled | :anonymous | 0
+
+ :internal | :private | :private | :admin | 1
+ :internal | :private | :private | :reporter | 1
+ :internal | :private | :private | :guest | 1
+ :internal | :private | :private | :non_member | 0
+ :internal | :private | :private | :anonymous | 0
+
+ :internal | :private | :disabled | :admin | 1
+ :internal | :private | :disabled | :reporter | 0
+ :internal | :private | :disabled | :guest | 0
+ :internal | :private | :disabled | :non_member | 0
+ :internal | :private | :disabled | :anonymous | 0
+
+ :private | :public | :enabled | :admin | 1
+ :private | :public | :enabled | :reporter | 1
+ :private | :public | :enabled | :guest | 1
+ :private | :public | :enabled | :non_member | 0
+ :private | :public | :enabled | :anonymous | 0
+
+ :private | :public | :private | :admin | 1
+ :private | :public | :private | :reporter | 1
+ :private | :public | :private | :guest | 1
+ :private | :public | :private | :non_member | 0
+ :private | :public | :private | :anonymous | 0
+
+ :private | :public | :disabled | :admin | 1
+ :private | :public | :disabled | :reporter | 0
+ :private | :public | :disabled | :guest | 0
+ :private | :public | :disabled | :non_member | 0
+ :private | :public | :disabled | :anonymous | 0
+
+ :private | :internal | :enabled | :admin | 1
+ :private | :internal | :enabled | :reporter | 1
+ :private | :internal | :enabled | :guest | 1
+ :private | :internal | :enabled | :non_member | 0
+ :private | :internal | :enabled | :anonymous | 0
+
+ :private | :internal | :private | :admin | 1
+ :private | :internal | :private | :reporter | 1
+ :private | :internal | :private | :guest | 1
+ :private | :internal | :private | :non_member | 0
+ :private | :internal | :private | :anonymous | 0
+
+ :private | :internal | :disabled | :admin | 1
+ :private | :internal | :disabled | :reporter | 0
+ :private | :internal | :disabled | :guest | 0
+ :private | :internal | :disabled | :non_member | 0
+ :private | :internal | :disabled | :anonymous | 0
+
+ :private | :private | :private | :admin | 1
+ :private | :private | :private | :reporter | 1
+ :private | :private | :private | :guest | 1
+ :private | :private | :private | :non_member | 0
+ :private | :private | :private | :anonymous | 0
+
+ :private | :private | :disabled | :admin | 1
+ :private | :private | :disabled | :reporter | 0
+ :private | :private | :disabled | :guest | 0
+ :private | :private | :disabled | :non_member | 0
+ :private | :private | :disabled | :anonymous | 0
+ end
+
+ # :snippet_level, :membership, :expected_count
+ def permission_table_for_personal_snippet_access
+ :public | :admin | 1
+ :public | :author | 1
+ :public | :non_member | 1
+ :public | :anonymous | 1
+
+ :internal | :admin | 1
+ :internal | :author | 1
+ :internal | :non_member | 1
+ :internal | :anonymous | 0
+
+ :private | :admin | 1
+ :private | :author | 1
+ :private | :non_member | 0
+ :private | :anonymous | 0
+ end
# rubocop:enable Metrics/AbcSize
end
diff --git a/spec/support/shared_contexts/rack_attack_shared_context.rb b/spec/support/shared_contexts/rack_attack_shared_context.rb
index c925f565226..e7b2ee76c3c 100644
--- a/spec/support/shared_contexts/rack_attack_shared_context.rb
+++ b/spec/support/shared_contexts/rack_attack_shared_context.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_context 'rack attack cache store' do
+RSpec.shared_context 'rack attack cache store' do
around do |example|
# Instead of test environment's :null_store so the throttles can increment
Rack::Attack.cache.store = ActiveSupport::Cache::MemoryStore.new
diff --git a/spec/support/shared_contexts/sentry_error_tracking_shared_context.rb b/spec/support/shared_contexts/sentry_error_tracking_shared_context.rb
new file mode 100644
index 00000000000..f06de53f0c1
--- /dev/null
+++ b/spec/support/shared_contexts/sentry_error_tracking_shared_context.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+shared_context 'sentry error tracking context' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
+ let(:token) { 'test-token' }
+ let(:params) { {} }
+ let(:result) { subject.execute }
+
+ let(:error_tracking_setting) do
+ create(:project_error_tracking_setting, api_url: sentry_url, token: token, project: project)
+ end
+
+ before do
+ expect(project).to receive(:error_tracking_setting).at_least(:once).and_return(error_tracking_setting)
+
+ project.add_reporter(user)
+ end
+end
diff --git a/spec/support/shared_contexts/services_shared_context.rb b/spec/support/shared_contexts/services_shared_context.rb
index 113bcc2af9c..21bc0651c44 100644
--- a/spec/support/shared_contexts/services_shared_context.rb
+++ b/spec/support/shared_contexts/services_shared_context.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
Service.available_services_names.each do |service|
- shared_context service do
+ RSpec.shared_context service do
let(:dashed_service) { service.dasherize }
let(:service_method) { "#{service}_service".to_sym }
let(:service_klass) { "#{service}_service".classify.constantize }
@@ -32,8 +32,7 @@ Service.available_services_names.each do |service|
{
'github' => :github_project_service_integration,
'jenkins' => :jenkins_integration,
- 'jenkins_deprecated' => :jenkins_integration,
- 'alerts' => :incident_management
+ 'jenkins_deprecated' => :jenkins_integration
}
end
diff --git a/spec/support/shared_contexts/session_shared_context.rb b/spec/support/shared_contexts/session_shared_context.rb
index 86c145a8360..4cc87896f7e 100644
--- a/spec/support/shared_contexts/session_shared_context.rb
+++ b/spec/support/shared_contexts/session_shared_context.rb
@@ -4,7 +4,7 @@
# let(:session) variable
# we do not use a parameter such as |session| because it does not play nice
# with let variables
-shared_context 'custom session' do
+RSpec.shared_context 'custom session' do
let!(:session) { {} }
around do |example|
diff --git a/spec/support/shared_contexts/unique_ip_check_shared_context.rb b/spec/support/shared_contexts/unique_ip_check_shared_context.rb
new file mode 100644
index 00000000000..f6bedb6cada
--- /dev/null
+++ b/spec/support/shared_contexts/unique_ip_check_shared_context.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'unique ips sign in limit' do
+ include StubENV
+ let(:request_context) { Gitlab::RequestContext.instance }
+
+ before do
+ Gitlab::Redis::Cache.with(&:flushall)
+ Gitlab::Redis::Queues.with(&:flushall)
+ Gitlab::Redis::SharedState.with(&:flushall)
+ end
+
+ before do
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
+
+ Gitlab::CurrentSettings.update!(
+ unique_ips_limit_enabled: true,
+ unique_ips_limit_time_window: 10000
+ )
+
+ # Make sure we're working with the same reqeust context everywhere
+ allow(Gitlab::RequestContext).to receive(:instance).and_return(request_context)
+ end
+
+ def change_ip(ip)
+ allow(request_context).to receive(:client_ip).and_return(ip)
+ end
+
+ def request_from_ip(ip)
+ change_ip(ip)
+ request
+ response
+ end
+
+ def operation_from_ip(ip)
+ change_ip(ip)
+ operation
+ end
+end
diff --git a/spec/support/shared_contexts/upload_type_check_shared_context.rb b/spec/support/shared_contexts/upload_type_check_shared_context.rb
index 04c97500dd6..d29c498fd15 100644
--- a/spec/support/shared_contexts/upload_type_check_shared_context.rb
+++ b/spec/support/shared_contexts/upload_type_check_shared_context.rb
@@ -2,7 +2,7 @@
# Construct an `uploader` variable that is configured to `check_upload_type`
# with `mime_types` and `extensions`.
-shared_context 'uploader with type check' do
+RSpec.shared_context 'uploader with type check' do
let(:uploader_class) do
Class.new(GitlabUploader) do
include UploadTypeCheck::Concern
@@ -20,7 +20,7 @@ shared_context 'uploader with type check' do
end
end
-shared_context 'stubbed MimeMagic mime type detection' do
+RSpec.shared_context 'stubbed MimeMagic mime type detection' do
let(:mime_type) { '' }
let(:magic_mime) { mime_type }
let(:ext_mime) { mime_type }
diff --git a/spec/support/shared_contexts/url_shared_context.rb b/spec/support/shared_contexts/url_shared_context.rb
index 560cd500ecd..f3d227b6e2b 100644
--- a/spec/support/shared_contexts/url_shared_context.rb
+++ b/spec/support/shared_contexts/url_shared_context.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_context 'invalid urls' do
+RSpec.shared_context 'invalid urls' do
let(:urls_with_CRLF) do
["http://127.0.0.1:333/pa\rth",
"http://127.0.0.1:333/pa\nth",
diff --git a/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb b/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
index f2f31e1b7f2..113252a6ab5 100644
--- a/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
+++ b/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples_for 'multiple issue boards' do
+RSpec.shared_examples 'multiple issue boards' do
context 'authorized user' do
before do
parent.add_maintainer(user)
diff --git a/spec/support/shared_examples/ci/auto_merge_merge_requests_examples.rb b/spec/support/shared_examples/ci/auto_merge_merge_requests_shared_examples.rb
index c11448ffe0f..9024845c325 100644
--- a/spec/support/shared_examples/ci/auto_merge_merge_requests_examples.rb
+++ b/spec/support/shared_examples/ci/auto_merge_merge_requests_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'aborted merge requests for MWPS' do
+RSpec.shared_examples 'aborted merge requests for MWPS' do
let(:aborted_message) do
/aborted the automatic merge because target branch was updated/
end
@@ -23,7 +23,7 @@ shared_examples 'aborted merge requests for MWPS' do
end
end
-shared_examples 'maintained merge requests for MWPS' do
+RSpec.shared_examples 'maintained merge requests for MWPS' do
it 'does not cancel auto merge' do
expect(merge_request.auto_merge_enabled?).to be_truthy
expect(merge_request.notes).to be_empty
diff --git a/spec/support/shared_examples/ci/pipeline_email_examples.rb b/spec/support/shared_examples/ci/pipeline_email_shared_examples.rb
index f72d8af3c65..01e453d8fd9 100644
--- a/spec/support/shared_examples/ci/pipeline_email_examples.rb
+++ b/spec/support/shared_examples/ci/pipeline_email_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples_for 'correct pipeline information for pipelines for merge requests' do
+RSpec.shared_examples 'correct pipeline information for pipelines for merge requests' do
context 'when pipeline for merge request' do
let(:pipeline) { merge_request.all_pipelines.first }
diff --git a/spec/support/shared_examples/ci/stage_shared_examples.rb b/spec/support/shared_examples/ci/stage_shared_examples.rb
index 925974ed11e..a2849e00d27 100644
--- a/spec/support/shared_examples/ci/stage_shared_examples.rb
+++ b/spec/support/shared_examples/ci/stage_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'manual playable stage' do |stage_type|
+RSpec.shared_examples 'manual playable stage' do |stage_type|
let(:stage) { build(stage_type, status: status) }
describe '#manual_playable?' do
diff --git a/spec/support/shared_examples/controllers/application_settings_shared_examples.rb b/spec/support/shared_examples/controllers/application_settings_shared_examples.rb
index 9619451cd14..3ee5d35d008 100644
--- a/spec/support/shared_examples/controllers/application_settings_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/application_settings_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'renders correct panels' do
+RSpec.shared_examples 'renders correct panels' do
it 'renders correct action on error' do
expect_next_instance_of(ApplicationSettings::UpdateService) do |service|
allow(service).to receive(:execute).and_return(false)
diff --git a/spec/support/shared_examples/discussions_provider_shared_examples.rb b/spec/support/shared_examples/controllers/discussions_provider_shared_examples.rb
index 77cf1ac3f51..43b5fa2d204 100644
--- a/spec/support/shared_examples/discussions_provider_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/discussions_provider_shared_examples.rb
@@ -1,12 +1,10 @@
# frozen_string_literal: true
-require 'spec_helper'
-
-shared_examples 'discussions provider' do
+RSpec.shared_examples 'discussions provider' do
it 'returns the expected discussions' do
get :discussions, params: { namespace_id: project.namespace, project_id: project, id: requested_iid }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('entities/discussions')
expect(json_response.size).to eq(expected_discussion_count)
diff --git a/spec/support/shared_examples/controllers/environments_controller_shared_examples.rb b/spec/support/shared_examples/controllers/environments_controller_shared_examples.rb
index 3540f60bf1b..c6e880635aa 100644
--- a/spec/support/shared_examples/controllers/environments_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/environments_controller_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples_for 'successful response for #cancel_auto_stop' do
+RSpec.shared_examples 'successful response for #cancel_auto_stop' do
include GitlabRoutingHelper
context 'when request is html' do
@@ -42,7 +42,7 @@ shared_examples_for 'successful response for #cancel_auto_stop' do
end
end
-shared_examples_for 'failed response for #cancel_auto_stop' do
+RSpec.shared_examples 'failed response for #cancel_auto_stop' do
context 'when request is html' do
let(:params) { environment_params(format: :html) }
diff --git a/spec/support/shared_examples/controllers/error_tracking_shared_examples.rb b/spec/support/shared_examples/controllers/error_tracking_shared_examples.rb
index 71251f6ab51..08e5efcf63c 100644
--- a/spec/support/shared_examples/controllers/error_tracking_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/error_tracking_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'sets the polling header' do
+RSpec.shared_examples 'sets the polling header' do
subject { response.headers[Gitlab::PollingInterval::HEADER_NAME] }
it { is_expected.to eq '1000'}
diff --git a/spec/support/shared_examples/controllers/external_authorization_service_shared_examples.rb b/spec/support/shared_examples/controllers/external_authorization_service_shared_examples.rb
index d8a1ae83f61..d521106fa26 100644
--- a/spec/support/shared_examples/controllers/external_authorization_service_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/external_authorization_service_shared_examples.rb
@@ -1,8 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
-
-shared_examples 'disabled when using an external authorization service' do
+RSpec.shared_examples 'disabled when using an external authorization service' do
include ExternalAuthorizationServiceHelpers
it 'works when the feature is not enabled' do
@@ -16,11 +14,11 @@ shared_examples 'disabled when using an external authorization service' do
subject
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
-shared_examples 'unauthorized when external service denies access' do
+RSpec.shared_examples 'unauthorized when external service denies access' do
include ExternalAuthorizationServiceHelpers
it 'allows access when the authorization service allows it' do
@@ -37,6 +35,6 @@ shared_examples 'unauthorized when external service denies access' do
subject
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
diff --git a/spec/support/controllers/githubish_import_controller_shared_examples.rb b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
index f23812e7149..a01fa49d701 100644
--- a/spec/support/controllers/githubish_import_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
@@ -10,7 +10,7 @@ def assign_session_token(provider)
session[:"#{provider}_access_token"] = 'asdasd12345'
end
-shared_examples 'a GitHub-ish import controller: POST personal_access_token' do
+RSpec.shared_examples 'a GitHub-ish import controller: POST personal_access_token' do
let(:status_import_url) { public_send("status_import_#{provider}_url") }
it "updates access token" do
@@ -38,7 +38,7 @@ shared_examples 'a GitHub-ish import controller: POST personal_access_token' do
end
end
-shared_examples 'a GitHub-ish import controller: GET new' do
+RSpec.shared_examples 'a GitHub-ish import controller: GET new' do
let(:status_import_url) { public_send("status_import_#{provider}_url") }
it "redirects to status if we already have a token" do
@@ -57,7 +57,7 @@ shared_examples 'a GitHub-ish import controller: GET new' do
end
end
-shared_examples 'a GitHub-ish import controller: GET status' do
+RSpec.shared_examples 'a GitHub-ish import controller: GET status' do
let(:new_import_url) { public_send("new_import_#{provider}_url") }
let(:user) { create(:user) }
let(:repo) { OpenStruct.new(login: 'vim', full_name: 'asd/vim', name: 'vim', owner: { login: 'owner' }) }
@@ -76,7 +76,7 @@ shared_examples 'a GitHub-ish import controller: GET status' do
get :status, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
expect(json_response.dig("provider_repos", 0, "id")).to eq(repo.id)
expect(json_response.dig("provider_repos", 1, "id")).to eq(org_repo.id)
@@ -107,7 +107,7 @@ shared_examples 'a GitHub-ish import controller: GET status' do
get :status
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it "handles an invalid access token" do
@@ -153,7 +153,7 @@ shared_examples 'a GitHub-ish import controller: GET status' do
it 'filters list of repositories by name' do
get :status, params: { filter: 'emacs' }, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.dig("imported_projects").count).to eq(0)
expect(json_response.dig("provider_repos").count).to eq(1)
expect(json_response.dig("provider_repos", 0, "id")).to eq(repo_2.id)
@@ -173,7 +173,7 @@ shared_examples 'a GitHub-ish import controller: GET status' do
end
end
-shared_examples 'a GitHub-ish import controller: POST create' do
+RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
let(:user) { create(:user) }
let(:provider_username) { user.username }
let(:provider_user) { OpenStruct.new(login: provider_username) }
@@ -198,7 +198,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do
post :create, format: :json
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'returns 422 response with the base error when the project could not be imported' do
@@ -212,7 +212,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do
post :create, format: :json
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['errors']).to eq('Name is invalid, Path is old')
end
@@ -484,13 +484,13 @@ shared_examples 'a GitHub-ish import controller: POST create' do
post :create, params: { target_namespace: other_namespace.name }, format: :json
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
end
end
-shared_examples 'a GitHub-ish import controller: GET realtime_changes' do
+RSpec.shared_examples 'a GitHub-ish import controller: GET realtime_changes' do
let(:user) { create(:user) }
before do
diff --git a/spec/support/shared_examples/instance_statistics_controllers_shared_examples.rb b/spec/support/shared_examples/controllers/instance_statistics_controllers_shared_examples.rb
index 8ea307c7c61..e4d59463d93 100644
--- a/spec/support/shared_examples/instance_statistics_controllers_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/instance_statistics_controllers_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'instance statistics availability' do
+RSpec.shared_examples 'instance statistics availability' do
let(:user) { create(:user) }
before do
diff --git a/spec/support/shared_examples/controllers/issuable_notes_filter_shared_examples.rb b/spec/support/shared_examples/controllers/issuable_notes_filter_shared_examples.rb
index 26ed86bfe26..5ecc5c08bbd 100644
--- a/spec/support/shared_examples/controllers/issuable_notes_filter_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/issuable_notes_filter_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'issuable notes filter' do
+RSpec.shared_examples 'issuable notes filter' do
let(:params) do
if issuable_parent.is_a?(Project)
{ namespace_id: issuable_parent.namespace, project_id: issuable_parent, id: issuable.iid }
diff --git a/spec/support/shared_examples/issuables_list_metadata_shared_examples.rb b/spec/support/shared_examples/controllers/issuables_list_metadata_shared_examples.rb
index 52d90b5f183..2dbaea57c44 100644
--- a/spec/support/shared_examples/issuables_list_metadata_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/issuables_list_metadata_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'issuables list meta-data' do |issuable_type, action = nil|
+RSpec.shared_examples 'issuables list meta-data' do |issuable_type, action = nil|
include ProjectForksHelper
def get_action(action, project, extra_params = {})
diff --git a/spec/support/shared_examples/controllers/issuables_requiring_filter_shared_examples.rb b/spec/support/shared_examples/controllers/issuables_requiring_filter_shared_examples.rb
index ee25df00dfb..e7514e7bb72 100644
--- a/spec/support/shared_examples/controllers/issuables_requiring_filter_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/issuables_requiring_filter_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'issuables requiring filter' do |action|
+RSpec.shared_examples 'issuables requiring filter' do |action|
it "doesn't load any issuables if no filter is set" do
expect_any_instance_of(described_class).not_to receive(:issuables_collection)
diff --git a/spec/support/shared_examples/milestone_tabs_examples.rb b/spec/support/shared_examples/controllers/milestone_tabs_shared_examples.rb
index bda4b978737..d9656824452 100644
--- a/spec/support/shared_examples/milestone_tabs_examples.rb
+++ b/spec/support/shared_examples/controllers/milestone_tabs_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'milestone tabs' do
+RSpec.shared_examples 'milestone tabs' do
def go(path, extra_params = {})
params =
case milestone
diff --git a/spec/support/shared_examples/controllers/paginated_collection_shared_examples.rb b/spec/support/shared_examples/controllers/paginated_collection_shared_examples.rb
index bd84bd1093f..620a6eaf879 100644
--- a/spec/support/shared_examples/controllers/paginated_collection_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/paginated_collection_shared_examples.rb
@@ -1,8 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
-
-shared_examples 'paginated collection' do
+RSpec.shared_examples 'paginated collection' do
let(:collection) { nil }
let(:last_page) { collection.page.total_pages }
let(:action) { :index }
@@ -11,7 +9,7 @@ shared_examples 'paginated collection' do
it 'renders a page number that is not ouf of range' do
get action, params: params.merge(page: last_page)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'redirects to last_page if page number is larger than number of pages' do
diff --git a/spec/support/shared_examples/controllers/repository_lfs_file_load_examples.rb b/spec/support/shared_examples/controllers/repository_lfs_file_load_shared_examples.rb
index 5dea17069f9..fadf428125a 100644
--- a/spec/support/shared_examples/controllers/repository_lfs_file_load_examples.rb
+++ b/spec/support/shared_examples/controllers/repository_lfs_file_load_shared_examples.rb
@@ -17,7 +17,7 @@
# it_behaves_like 'a controller that can serve LFS files', skip_lfs_disabled_tests: true do
# ...
# end
-shared_examples 'a controller that can serve LFS files' do |options = {}|
+RSpec.shared_examples 'a controller that can serve LFS files' do |options = {}|
let(:lfs_oid) { '91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897' }
let(:lfs_size) { '1575078' }
let!(:lfs_object) { create(:lfs_object, oid: lfs_oid, size: lfs_size) }
@@ -41,17 +41,15 @@ shared_examples 'a controller that can serve LFS files' do |options = {}|
it 'serves the file' do
lfs_uploader = LfsObjectUploader.new(lfs_object)
- # Notice the filename= is omitted from the disposition; this is because
- # Rails 5 will append this header in send_file
expect(controller).to receive(:send_file)
.with(
File.join(lfs_uploader.root, lfs_uploader.store_dir, lfs_uploader.filename),
filename: filename,
- disposition: %Q(attachment; filename*=UTF-8''#{filename}))
+ disposition: 'attachment')
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
context 'and lfs uses object storage' do
@@ -65,7 +63,7 @@ shared_examples 'a controller that can serve LFS files' do |options = {}|
it 'responds with redirect to file' do
subject
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
expect(response.location).to include(lfs_object.reload.file.path)
end
@@ -84,7 +82,7 @@ shared_examples 'a controller that can serve LFS files' do |options = {}|
it 'does not serve the file' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -97,7 +95,7 @@ shared_examples 'a controller that can serve LFS files' do |options = {}|
it 'does not serve the file if no members are linked to the LfsObject' do
subject
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'serves the file when the fork network root is linked to the LfsObject' do
@@ -105,7 +103,7 @@ shared_examples 'a controller that can serve LFS files' do |options = {}|
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'serves the file when the fork network member is linked to the LfsObject' do
@@ -113,7 +111,7 @@ shared_examples 'a controller that can serve LFS files' do |options = {}|
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -154,7 +152,7 @@ shared_examples 'a controller that can serve LFS files' do |options = {}|
subject
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
expect(response.header['Content-Disposition'])
.to eq('inline')
diff --git a/spec/support/controllers/sessionless_auth_controller_shared_examples.rb b/spec/support/shared_examples/controllers/sessionless_auth_controller_shared_examples.rb
index bc95fcd6b88..e21a3b2f588 100644
--- a/spec/support/controllers/sessionless_auth_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/sessionless_auth_controller_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'authenticates sessionless user' do |path, format, params|
+RSpec.shared_examples 'authenticates sessionless user' do |path, format, params|
params ||= {}
before do
@@ -20,14 +20,14 @@ shared_examples 'authenticates sessionless user' do |path, format, params|
get path, params: default_params.merge(private_token: personal_access_token.token)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(controller.current_user).to eq(user)
end
it 'does not log the user in if page is public', if: params[:public] do
get path, params: default_params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(controller.current_user).to be_nil
end
end
@@ -48,7 +48,7 @@ shared_examples 'authenticates sessionless user' do |path, format, params|
get path, params: default_params.merge(private_token: personal_access_token.token)
- expect(response).not_to have_gitlab_http_status(200)
+ expect(response).not_to have_gitlab_http_status(:ok)
end
end
@@ -62,7 +62,7 @@ shared_examples 'authenticates sessionless user' do |path, format, params|
@request.headers['PRIVATE-TOKEN'] = personal_access_token.token
get path, params: default_params
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -75,7 +75,7 @@ shared_examples 'authenticates sessionless user' do |path, format, params|
get path, params: default_params.merge(feed_token: user.feed_token)
- expect(response).to have_gitlab_http_status 200
+ expect(response).to have_gitlab_http_status(:ok)
end
end
diff --git a/spec/support/shared_examples/controllers/set_sort_order_from_user_preference_shared_examples.rb b/spec/support/shared_examples/controllers/set_sort_order_from_user_preference_shared_examples.rb
index d89eded6e69..9b5f957d489 100644
--- a/spec/support/shared_examples/controllers/set_sort_order_from_user_preference_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/set_sort_order_from_user_preference_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'set sort order from user preference' do
+RSpec.shared_examples 'set sort order from user preference' do
describe '#set_sort_order_from_user_preference' do
# There is no sorting_field defined in any CE controllers yet,
# however any other field present in user_preferences table can be used for testing.
diff --git a/spec/support/shared_examples/controllers/todos_shared_examples.rb b/spec/support/shared_examples/controllers/todos_shared_examples.rb
index 914bf506320..98fc9d9d926 100644
--- a/spec/support/shared_examples/controllers/todos_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/todos_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'todos actions' do
+RSpec.shared_examples 'todos actions' do
context 'when authorized' do
before do
sign_in(user)
@@ -12,13 +12,13 @@ shared_examples 'todos actions' do
post_create
end.to change { user.todos.count }.by(1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'returns todo path and pending count' do
post_create
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['count']).to eq 1
expect(json_response['delete_path']).to match(%r{/dashboard/todos/\d{1}})
end
@@ -31,7 +31,7 @@ shared_examples 'todos actions' do
post_create
end.to change { user.todos.count }.by(0)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'does not create todo when user is not logged in' do
@@ -39,7 +39,7 @@ shared_examples 'todos actions' do
post_create
end.to change { user.todos.count }.by(0)
- expect(response).to have_gitlab_http_status(302)
+ expect(response).to have_gitlab_http_status(:found)
end
end
end
diff --git a/spec/support/shared_examples/trackable_shared_examples.rb b/spec/support/shared_examples/controllers/trackable_shared_examples.rb
index 6ad75a14d6b..e82c27c43f5 100644
--- a/spec/support/shared_examples/trackable_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/trackable_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'a Trackable Controller' do
+RSpec.shared_examples 'a Trackable Controller' do
describe '#track_event' do
before do
sign_in user
diff --git a/spec/support/shared_examples/update_invalid_issuable.rb b/spec/support/shared_examples/controllers/update_invalid_issuable_shared_examples.rb
index b7ac08372f9..224cf45ebb3 100644
--- a/spec/support/shared_examples/update_invalid_issuable.rb
+++ b/spec/support/shared_examples/controllers/update_invalid_issuable_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'update invalid issuable' do |klass|
+RSpec.shared_examples 'update invalid issuable' do |klass|
let(:params) do
{
namespace_id: project.namespace.path,
diff --git a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
index 8962d98218a..73087befad2 100644
--- a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'handle uploads' do
+RSpec.shared_examples 'handle uploads' do
let(:user) { create(:user) }
let(:jpg) { fixture_file_upload('spec/fixtures/rails_sample.jpg', 'image/jpg') }
let(:txt) { fixture_file_upload('spec/fixtures/doc_sample.txt', 'text/plain') }
@@ -27,7 +27,7 @@ shared_examples 'handle uploads' do
it "returns an error" do
post :create, params: params, format: :json
- expect(response).to have_gitlab_http_status(422)
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
@@ -84,7 +84,7 @@ shared_examples 'handle uploads' do
show_upload
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -110,7 +110,7 @@ shared_examples 'handle uploads' do
it "responds with status 200" do
show_upload
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -123,7 +123,7 @@ shared_examples 'handle uploads' do
it "responds with status 404" do
show_upload
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -135,7 +135,7 @@ shared_examples 'handle uploads' do
it "responds with status 404" do
show_upload
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -149,7 +149,7 @@ shared_examples 'handle uploads' do
it "responds with status 200" do
show_upload
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -161,7 +161,7 @@ shared_examples 'handle uploads' do
it "responds with status 404" do
show_upload
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -182,7 +182,7 @@ shared_examples 'handle uploads' do
it "responds with status 200" do
show_upload
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -226,7 +226,7 @@ shared_examples 'handle uploads' do
it "responds with status 200" do
show_upload
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -238,7 +238,7 @@ shared_examples 'handle uploads' do
it "responds with status 404" do
show_upload
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -253,7 +253,7 @@ shared_examples 'handle uploads' do
it "responds with status 200" do
show_upload
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -265,7 +265,7 @@ shared_examples 'handle uploads' do
it "responds with status 404" do
show_upload
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -278,7 +278,7 @@ shared_examples 'handle uploads' do
it "responds with status 404" do
show_upload
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -287,7 +287,7 @@ shared_examples 'handle uploads' do
end
end
-shared_examples 'handle uploads authorize' do
+RSpec.shared_examples 'handle uploads authorize' do
describe "POST #authorize" do
context 'when a user is not authorized to upload a file' do
it 'returns 404 status' do
@@ -321,7 +321,7 @@ shared_examples 'handle uploads authorize' do
end
it 'responds with status 200' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'uses the gitlab-workhorse content type' do
diff --git a/spec/support/shared_examples/controllers/variables_shared_examples.rb b/spec/support/shared_examples/controllers/variables_shared_examples.rb
index 78666e677ef..752bdc47851 100644
--- a/spec/support/shared_examples/controllers/variables_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/variables_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'GET #show lists all variables' do
+RSpec.shared_examples 'GET #show lists all variables' do
it 'renders the variables as json' do
subject
@@ -14,7 +14,7 @@ shared_examples 'GET #show lists all variables' do
end
end
-shared_examples 'PATCH #update updates variables' do
+RSpec.shared_examples 'PATCH #update updates variables' do
let(:variable_attributes) do
{ id: variable.id,
key: variable.key,
diff --git a/spec/support/shared_examples/email_shared_examples.rb b/spec/support/shared_examples/email_shared_examples.rb
deleted file mode 100644
index 634a2504766..00000000000
--- a/spec/support/shared_examples/email_shared_examples.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-# frozen_string_literal: true
-
-shared_examples_for 'correctly finds the mail key' do
- specify do
- expect(Gitlab::Email::Handler).to receive(:for).with(an_instance_of(Mail::Message), 'gitlabhq/gitlabhq+auth_token').and_return(handler)
-
- receiver.execute
- end
-end
diff --git a/spec/support/shared_examples/error_tracking_shared_examples.rb b/spec/support/shared_examples/error_tracking_shared_examples.rb
new file mode 100644
index 00000000000..8e7a63b69c7
--- /dev/null
+++ b/spec/support/shared_examples/error_tracking_shared_examples.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'setting sentry error data' do
+ it 'sets the sentry error data correctly' do
+ aggregate_failures 'testing the sentry error is correct' do
+ expect(error['id']).to eq sentry_error.to_global_id.to_s
+ expect(error['sentryId']).to eq sentry_error.id.to_s
+ expect(error['status']).to eq sentry_error.status.upcase
+ expect(error['firstSeen']).to eq sentry_error.first_seen
+ expect(error['lastSeen']).to eq sentry_error.last_seen
+ end
+ end
+end
+
+RSpec.shared_examples 'setting stack trace error' do
+ it 'sets the stack trace data correctly' do
+ aggregate_failures 'testing the stack trace is correct' do
+ expect(stack_trace_data['dateReceived']).to eq(sentry_stack_trace.date_received)
+ expect(stack_trace_data['issueId']).to eq(sentry_stack_trace.issue_id)
+ expect(stack_trace_data['stackTraceEntries']).to be_an_instance_of(Array)
+ expect(stack_trace_data['stackTraceEntries'].size).to eq(sentry_stack_trace.stack_trace_entries.size)
+ end
+ end
+
+ it 'sets the stack trace entry data correctly' do
+ aggregate_failures 'testing the stack trace entry is correct' do
+ stack_trace_entry = stack_trace_data['stackTraceEntries'].first
+ model_entry = sentry_stack_trace.stack_trace_entries.first
+
+ expect(stack_trace_entry['function']).to eq model_entry['function']
+ expect(stack_trace_entry['col']).to eq model_entry['colNo']
+ expect(stack_trace_entry['line']).to eq model_entry['lineNo'].to_s
+ expect(stack_trace_entry['fileName']).to eq model_entry['filename']
+ end
+ end
+end
diff --git a/spec/support/shared_examples/evidence_updated_exposed_fields.rb b/spec/support/shared_examples/evidence_updated_exposed_fields.rb
deleted file mode 100644
index 2a02fdd7666..00000000000
--- a/spec/support/shared_examples/evidence_updated_exposed_fields.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-shared_examples 'updated exposed field' do
- it 'creates another Evidence object' do
- model.send("#{updated_field}=", updated_value)
-
- expect(model.evidence_summary_keys).to include(updated_field)
- expect { model.save! }.to change(Evidence, :count).by(1)
- expect(updated_json_field).to eq(updated_value)
- end
-end
-
-shared_examples 'updated non-exposed field' do
- it 'does not create any Evidence object' do
- model.send("#{updated_field}=", updated_value)
-
- expect(model.evidence_summary_keys).not_to include(updated_field)
- expect { model.save! }.not_to change(Evidence, :count)
- end
-end
-
-shared_examples 'updated field on non-linked entity' do
- it 'does not create any Evidence object' do
- model.send("#{updated_field}=", updated_value)
-
- expect(model.evidence_summary_keys).to be_empty
- expect { model.save! }.not_to change(Evidence, :count)
- end
-end
diff --git a/spec/support/shared_examples/fast_destroy_all.rb b/spec/support/shared_examples/fast_destroy_all.rb
deleted file mode 100644
index a64259c03f2..00000000000
--- a/spec/support/shared_examples/fast_destroy_all.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-# frozen_string_literal: true
-
-shared_examples_for 'fast destroyable' do
- describe 'Forbid #destroy and #destroy_all' do
- it 'does not delete database rows and associted external data' do
- expect(external_data_counter).to be > 0
- expect(subjects.count).to be > 0
-
- expect { subjects.first.destroy }.to raise_error('`destroy` and `destroy_all` are forbidden. Please use `fast_destroy_all`')
- expect { subjects.destroy_all }.to raise_error('`destroy` and `destroy_all` are forbidden. Please use `fast_destroy_all`') # rubocop: disable DestroyAll
-
- expect(subjects.count).to be > 0
- expect(external_data_counter).to be > 0
- end
- end
-
- describe '.fast_destroy_all' do
- it 'deletes database rows and associted external data' do
- expect(external_data_counter).to be > 0
- expect(subjects.count).to be > 0
-
- expect { subjects.fast_destroy_all }.not_to raise_error
-
- expect(subjects.count).to eq(0)
- expect(external_data_counter).to eq(0)
- end
- end
-
- describe '.use_fast_destroy' do
- it 'performs cascading delete with fast_destroy_all' do
- expect(external_data_counter).to be > 0
- expect(subjects.count).to be > 0
-
- expect { parent.destroy }.not_to raise_error
-
- expect(subjects.count).to eq(0)
- expect(external_data_counter).to eq(0)
- end
- end
-end
diff --git a/spec/support/shared_examples/features/archive_download_buttons_shared_examples.rb b/spec/support/shared_examples/features/archive_download_buttons_shared_examples.rb
index 21c32c9c04a..73acc7a39eb 100644
--- a/spec/support/shared_examples/features/archive_download_buttons_shared_examples.rb
+++ b/spec/support/shared_examples/features/archive_download_buttons_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'archive download buttons' do
+RSpec.shared_examples 'archive download buttons' do
let(:path_to_visit) { project_path(project) }
let(:ref) { project.default_branch }
diff --git a/spec/support/shared_examples/features/comments_on_merge_request_files_shared_examples.rb b/spec/support/shared_examples/features/comments_on_merge_request_files_shared_examples.rb
index f24e47f4638..fb3b17d05ee 100644
--- a/spec/support/shared_examples/features/comments_on_merge_request_files_shared_examples.rb
+++ b/spec/support/shared_examples/features/comments_on_merge_request_files_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'comment on merge request file' do
+RSpec.shared_examples 'comment on merge request file' do
it 'adds a comment' do
click_diff_line(find("[id='#{sample_commit.line_code}']"))
diff --git a/spec/support/shared_examples/dirty_submit_form_shared_examples.rb b/spec/support/shared_examples/features/dirty_submit_form_shared_examples.rb
index 60c8899d349..cb81eeba236 100644
--- a/spec/support/shared_examples/dirty_submit_form_shared_examples.rb
+++ b/spec/support/shared_examples/features/dirty_submit_form_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'dirty submit form' do |selector_args|
+RSpec.shared_examples 'dirty submit form' do |selector_args|
selectors = selector_args.is_a?(Array) ? selector_args : [selector_args]
def expect_disabled_state(form, submit_selector, is_disabled = true)
diff --git a/spec/support/features/discussion_comments_shared_example.rb b/spec/support/shared_examples/features/discussion_comments_shared_example.rb
index ea13e91860a..81433d124c9 100644
--- a/spec/support/features/discussion_comments_shared_example.rb
+++ b/spec/support/shared_examples/features/discussion_comments_shared_example.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'thread comments' do |resource_name|
+RSpec.shared_examples 'thread comments' do |resource_name|
let(:form_selector) { '.js-main-target-form' }
let(:dropdown_selector) { "#{form_selector} .comment-type-dropdown" }
let(:toggle_selector) { "#{dropdown_selector} .dropdown-toggle" }
@@ -8,44 +8,40 @@ shared_examples 'thread comments' do |resource_name|
let(:submit_selector) { "#{form_selector} .js-comment-submit-button" }
let(:close_selector) { "#{form_selector} .btn-comment-and-close" }
let(:comments_selector) { '.timeline > .note.timeline-entry' }
+ let(:comment) { 'My comment' }
- it 'clicking "Comment" will post a comment', :quarantine do
+ it 'clicking "Comment" will post a comment' do
expect(page).to have_selector toggle_selector
- find("#{form_selector} .note-textarea").send_keys('a')
+ find("#{form_selector} .note-textarea").send_keys(comment)
- find(submit_selector).click
+ click_button 'Comment'
- wait_for_requests
+ expect(page).to have_content(comment)
- find(comments_selector, match: :first)
new_comment = all(comments_selector).last
- expect(new_comment).to have_content 'a'
expect(new_comment).not_to have_selector '.discussion'
end
if resource_name == 'issue'
it "clicking 'Comment & close #{resource_name}' will post a comment and close the #{resource_name}" do
- find("#{form_selector} .note-textarea").send_keys('a')
+ find("#{form_selector} .note-textarea").send_keys(comment)
- find(close_selector).click
- wait_for_requests
+ click_button 'Comment & close issue'
- find(comments_selector, match: :first)
- find("#{comments_selector}.system-note")
- entries = all(comments_selector)
- close_note = entries.last
- new_comment = entries[-2]
+ expect(page).to have_content(comment)
+ expect(page).to have_content "@#{user.username} closed"
+
+ new_comment = all(comments_selector).last
- expect(close_note).to have_content 'closed'
expect(new_comment).not_to have_selector '.discussion'
end
end
describe 'when the toggle is clicked' do
before do
- find("#{form_selector} .note-textarea").send_keys('a')
+ find("#{form_selector} .note-textarea").send_keys(comment)
find(toggle_selector).click
end
@@ -153,10 +149,11 @@ shared_examples 'thread comments' do |resource_name|
end
it 'clicking "Start thread" will post a thread' do
+ expect(page).to have_content(comment)
+
new_comment = all(comments_selector).last
- expect(new_comment).to have_content 'a'
- expect(new_comment).to have_selector '.discussion'
+ expect(new_comment).to have_selector('.discussion')
end
if resource_name =~ /(issue|merge request)/
@@ -208,15 +205,13 @@ shared_examples 'thread comments' do |resource_name|
if resource_name == 'issue'
it "clicking 'Start thread & close #{resource_name}' will post a thread and close the #{resource_name}" do
- find(close_selector).click
+ click_button 'Start thread & close issue'
- find(comments_selector, match: :first)
- find("#{comments_selector}.system-note")
- entries = all(comments_selector)
- close_note = entries.last
- new_discussion = entries[-2]
+ expect(page).to have_content(comment)
+ expect(page).to have_content "@#{user.username} closed"
+
+ new_discussion = all(comments_selector)[-2]
- expect(close_note).to have_content 'closed'
expect(new_discussion).to have_selector '.discussion'
end
end
@@ -269,19 +264,21 @@ shared_examples 'thread comments' do |resource_name|
end
end
- it 'has "Comment" selected when opening the menu' do
+ it 'has "Comment" selected when opening the menu', quarantine: 'https://gitlab.com/gitlab-org/gitlab/issues/196825' do
find(toggle_selector).click
find("#{menu_selector} li", match: :first)
items = all("#{menu_selector} li")
- expect(items.first).to have_content 'Comment'
- expect(items.first).to have_selector '.fa-check'
- expect(items.first['class']).to match 'droplab-item-selected'
+ aggregate_failures do
+ expect(items.first).to have_content 'Comment'
+ expect(items.first).to have_selector '.fa-check'
+ expect(items.first['class']).to match 'droplab-item-selected'
- expect(items.last).to have_content 'Start thread'
- expect(items.last).not_to have_selector '.fa-check'
- expect(items.last['class']).not_to match 'droplab-item-selected'
+ expect(items.last).to have_content 'Start thread'
+ expect(items.last).not_to have_selector '.fa-check'
+ expect(items.last['class']).not_to match 'droplab-item-selected'
+ end
end
end
end
diff --git a/spec/support/shared_examples/features/error_tracking_shared_example.rb b/spec/support/shared_examples/features/error_tracking_shared_example.rb
new file mode 100644
index 00000000000..edc1f42f646
--- /dev/null
+++ b/spec/support/shared_examples/features/error_tracking_shared_example.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+shared_examples 'error tracking index page' do
+ it 'renders the error index page' do
+ within('div.js-title-container') do
+ expect(page).to have_content(project.namespace.name)
+ expect(page).to have_content(project.name)
+ end
+
+ within('div.error-list') do
+ expect(page).to have_content('Open errors')
+ expect(page).to have_content('Events')
+ expect(page).to have_content('Users')
+ expect(page).to have_content('Last Seen')
+ end
+ end
+
+ it 'loads the error show page on click' do
+ click_on issues_response[0]['title']
+
+ wait_for_requests
+
+ expect(page).to have_content('Error Details')
+ end
+
+ it 'renders the error index data' do
+ within('div.error-list') do
+ expect(page).to have_content(issues_response[0]['title'])
+ expect(page).to have_content(issues_response[0]['count'].to_s)
+ expect(page).to have_content(issues_response[0]['last_seen'])
+ expect(page).to have_content('1 year ago')
+ end
+ end
+end
+
+shared_examples 'expanded stack trace context' do |selected_line: nil, expected_line: 1|
+ it 'expands the stack trace context' do
+ within('div.stacktrace') do
+ find("div.file-holder:nth-child(#{selected_line}) svg.ic-chevron-right").click if selected_line
+
+ expanded_line = find("div.file-holder:nth-child(#{expected_line})")
+ expect(expanded_line).to have_css('svg.ic-chevron-down')
+
+ event_response['entries'][0]['data']['values'][0]['stacktrace']['frames'][-expected_line]['context'].each do |context|
+ expect(page).to have_content(context[0])
+ end
+ end
+ end
+end
+
+shared_examples 'error tracking show page' do
+ it 'renders the error details' do
+ release_short_version = issue_response['firstRelease']['shortVersion']
+
+ expect(page).to have_content('1 month ago by raven.scripts.runner in main')
+ expect(page).to have_content(issue_response['metadata']['title'])
+ expect(page).to have_content('level: error')
+ expect(page).to have_content('Error Details')
+ expect(page).to have_content('GitLab Issue: https://gitlab.com/gitlab-org/gitlab/issues/1')
+ expect(page).to have_content("Sentry event: https://sentrytest.gitlab.com/sentry-org/sentry-project/issues/#{issue_id}")
+ expect(page).to have_content("First seen: 1 year ago (#{formatted_issue_seen}) Release: #{release_short_version}")
+ expect(page).to have_content('Events: 1')
+ expect(page).to have_content('Users: 0')
+ end
+
+ it 'renders the stack trace heading' do
+ expect(page).to have_content('Stack trace')
+ end
+
+ it 'renders the stack trace' do
+ event_response['entries'][0]['data']['values'][0]['stacktrace']['frames'].each do |frame|
+ expect(frame['filename']).not_to be_nil
+ expect(page).to have_content(frame['filename'])
+ end
+ end
+
+ # The first line is expanded by default if no line is selected
+ it_behaves_like 'expanded stack trace context', selected_line: nil, expected_line: 1
+ it_behaves_like 'expanded stack trace context', selected_line: 8, expected_line: 8
+end
diff --git a/spec/support/shared_examples/features/issuable_sidebar_shared_examples.rb b/spec/support/shared_examples/features/issuable_sidebar_shared_examples.rb
index 09a48533ee3..a112ee568f9 100644
--- a/spec/support/shared_examples/features/issuable_sidebar_shared_examples.rb
+++ b/spec/support/shared_examples/features/issuable_sidebar_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'issue sidebar stays collapsed on mobile' do
+RSpec.shared_examples 'issue sidebar stays collapsed on mobile' do
before do
resize_screen_xs
end
diff --git a/spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb b/spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb
index 3da80541072..e0d9b828992 100644
--- a/spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb
+++ b/spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'issuable user dropdown behaviors' do
+RSpec.shared_examples 'issuable user dropdown behaviors' do
include FilteredSearchHelpers
before do
diff --git a/spec/support/shared_examples/features/multiple_assignees_mr_shared_examples.rb b/spec/support/shared_examples/features/multiple_assignees_mr_shared_examples.rb
index bab7963f06f..19a5750cf6d 100644
--- a/spec/support/shared_examples/features/multiple_assignees_mr_shared_examples.rb
+++ b/spec/support/shared_examples/features/multiple_assignees_mr_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'multiple assignees merge request' do |action, save_button_title|
+RSpec.shared_examples 'multiple assignees merge request' do |action, save_button_title|
it "#{action} a MR with multiple assignees", :js do
find('.js-assignee-search').click
page.within '.dropdown-menu-user' do
diff --git a/spec/support/shared_examples/features/navbar_shared_examples.rb b/spec/support/shared_examples/features/navbar_shared_examples.rb
new file mode 100644
index 00000000000..91a4048fa7c
--- /dev/null
+++ b/spec/support/shared_examples/features/navbar_shared_examples.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'verified navigation bar' do
+ let(:expected_structure) do
+ structure.compact!
+ structure.each { |s| s[:nav_sub_items].compact! }
+ structure
+ end
+
+ it 'renders correctly' do
+ current_structure = page.all('.sidebar-top-level-items > li', class: ['!hidden']).map do |item|
+ nav_item = item.find_all('a').first.text.gsub(/\s+\d+$/, '') # remove counts at the end
+
+ nav_sub_items = item.all('.sidebar-sub-level-items > li', class: ['!fly-out-top-item']).map do |list_item|
+ list_item.all('a').first.text
+ end
+
+ { nav_item: nav_item, nav_sub_items: nav_sub_items }
+ end
+
+ expect(current_structure).to eq(expected_structure)
+ end
+end
diff --git a/spec/support/shared_examples/features/project_features_apply_to_issuables_shared_examples.rb b/spec/support/shared_examples/features/project_features_apply_to_issuables_shared_examples.rb
index 51559c0b110..d410653ca43 100644
--- a/spec/support/shared_examples/features/project_features_apply_to_issuables_shared_examples.rb
+++ b/spec/support/shared_examples/features/project_features_apply_to_issuables_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'project features apply to issuables' do |klass|
+RSpec.shared_examples 'project features apply to issuables' do |klass|
let(:described_class) { klass }
let(:group) { create(:group) }
diff --git a/spec/support/shared_examples/project_list_shared_examples.rb b/spec/support/shared_examples/features/project_list_shared_examples.rb
index 675d489fcab..a15ba27b4ca 100644
--- a/spec/support/shared_examples/project_list_shared_examples.rb
+++ b/spec/support/shared_examples/features/project_list_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'shows public projects' do
+RSpec.shared_examples 'shows public projects' do
it 'shows projects' do
expect(page).to have_content(public_project.title)
expect(page).not_to have_content(internal_project.title)
@@ -9,7 +9,7 @@ shared_examples 'shows public projects' do
end
end
-shared_examples 'shows public and internal projects' do
+RSpec.shared_examples 'shows public and internal projects' do
it 'shows projects' do
expect(page).to have_content(public_project.title)
expect(page).to have_content(internal_project.title)
diff --git a/spec/support/shared_examples/features/protected_branches_access_control_ce.rb b/spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb
index db83d6f0793..65db082505a 100644
--- a/spec/support/shared_examples/features/protected_branches_access_control_ce.rb
+++ b/spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples "protected branches > access control > CE" do
+RSpec.shared_examples "protected branches > access control > CE" do
ProtectedRefAccess::HUMAN_ACCESS_LEVELS.each do |(access_type_id, access_type_name)|
it "allows creating protected branches that #{access_type_name} can push to" do
visit project_protected_branches_path(project)
diff --git a/spec/support/features/reportable_note_shared_examples.rb b/spec/support/shared_examples/features/reportable_note_shared_examples.rb
index 2f9208e6ed5..bdaa375721f 100644
--- a/spec/support/features/reportable_note_shared_examples.rb
+++ b/spec/support/shared_examples/features/reportable_note_shared_examples.rb
@@ -1,8 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
-
-shared_examples 'reportable note' do |type|
+RSpec.shared_examples 'reportable note' do |type|
include MobileHelpers
include NotesHelper
diff --git a/spec/support/features/resolving_discussions_in_issues_shared_examples.rb b/spec/support/shared_examples/features/resolving_discussions_in_issues_shared_examples.rb
index d4f8a87d0d8..06127f2ed8c 100644
--- a/spec/support/features/resolving_discussions_in_issues_shared_examples.rb
+++ b/spec/support/shared_examples/features/resolving_discussions_in_issues_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'creating an issue for a thread' do
+RSpec.shared_examples 'creating an issue for a thread' do
it 'shows an issue with the title filled in' do
title_field = page.find_field('issue[title]')
diff --git a/spec/support/features/rss_shared_examples.rb b/spec/support/shared_examples/features/rss_shared_examples.rb
index bbe793a81bc..42df88ec08e 100644
--- a/spec/support/features/rss_shared_examples.rb
+++ b/spec/support/shared_examples/features/rss_shared_examples.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
-shared_examples "an autodiscoverable RSS feed with current_user's feed token" do
+RSpec.shared_examples "an autodiscoverable RSS feed with current_user's feed token" do
it "has an RSS autodiscovery link tag with current_user's feed token" do
expect(page).to have_css("link[type*='atom+xml'][href*='feed_token=#{user.feed_token}']", visible: false)
end
end
-shared_examples "it has an RSS button with current_user's feed token" do
+RSpec.shared_examples "it has an RSS button with current_user's feed token" do
it "shows the RSS button with current_user's feed token" do
expect(page)
.to have_css("a:has(.fa-rss)[href*='feed_token=#{user.feed_token}']")
@@ -14,13 +14,13 @@ shared_examples "it has an RSS button with current_user's feed token" do
end
end
-shared_examples "an autodiscoverable RSS feed without a feed token" do
+RSpec.shared_examples "an autodiscoverable RSS feed without a feed token" do
it "has an RSS autodiscovery link tag without a feed token" do
expect(page).to have_css("link[type*='atom+xml']:not([href*='feed_token'])", visible: false)
end
end
-shared_examples "it has an RSS button without a feed token" do
+RSpec.shared_examples "it has an RSS button without a feed token" do
it "shows the RSS button without a feed token" do
expect(page)
.to have_css("a:has(.fa-rss):not([href*='feed_token'])")
diff --git a/spec/support/shared_examples/features/search_shared_examples.rb b/spec/support/shared_examples/features/search_shared_examples.rb
index e27d6700cbf..c043b011e66 100644
--- a/spec/support/shared_examples/features/search_shared_examples.rb
+++ b/spec/support/shared_examples/features/search_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'top right search form' do
+RSpec.shared_examples 'top right search form' do
it 'does not show top right search form' do
expect(page).not_to have_selector('.search')
end
diff --git a/spec/support/shared_examples/showing_user_status_shared_examples.rb b/spec/support/shared_examples/features/showing_user_status_shared_examples.rb
index eef769de2fc..7906fc1f399 100644
--- a/spec/support/shared_examples/showing_user_status_shared_examples.rb
+++ b/spec/support/shared_examples/features/showing_user_status_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'showing user status' do
+RSpec.shared_examples 'showing user status' do
let!(:status) { create(:user_status, user: user_with_status, emoji: 'smirk', message: 'Authoring this object') }
it 'shows the status' do
diff --git a/spec/support/shared_examples/features/snippets_shared_examples.rb b/spec/support/shared_examples/features/snippets_shared_examples.rb
new file mode 100644
index 00000000000..1c8a9714bdf
--- /dev/null
+++ b/spec/support/shared_examples/features/snippets_shared_examples.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+# These shared examples expect a `snippets` array of snippets
+RSpec.shared_examples 'paginated snippets' do |remote: false|
+ it "is limited to #{Snippet.default_per_page} items per page" do
+ expect(page.all('.snippets-list-holder .snippet-row').count).to eq(Snippet.default_per_page)
+ end
+
+ context 'clicking on the link to the second page' do
+ before do
+ click_link('2')
+ wait_for_requests if remote
+ end
+
+ it 'shows the remaining snippets' do
+ remaining_snippets_count = [snippets.size - Snippet.default_per_page, Snippet.default_per_page].min
+ expect(page).to have_selector('.snippets-list-holder .snippet-row', count: remaining_snippets_count)
+ end
+ end
+end
+
+RSpec.shared_examples 'tabs with counts' do
+ let(:tabs) { page.all('.snippet-scope-menu li') }
+
+ it 'shows a tab for All snippets and count' do
+ tab = tabs[0]
+
+ expect(tab.text).to include('All')
+ expect(tab.find('.badge').text).to eq(counts[:all])
+ end
+
+ it 'shows a tab for Private snippets and count' do
+ tab = tabs[1]
+
+ expect(tab.text).to include('Private')
+ expect(tab.find('.badge').text).to eq(counts[:private])
+ end
+
+ it 'shows a tab for Internal snippets and count' do
+ tab = tabs[2]
+
+ expect(tab.text).to include('Internal')
+ expect(tab.find('.badge').text).to eq(counts[:internal])
+ end
+
+ it 'shows a tab for Public snippets and count' do
+ tab = tabs[3]
+
+ expect(tab.text).to include('Public')
+ expect(tab.find('.badge').text).to eq(counts[:public])
+ end
+end
diff --git a/spec/support/features/variable_list_shared_examples.rb b/spec/support/shared_examples/features/variable_list_shared_examples.rb
index 0f8ad2c6536..4fd4d42003f 100644
--- a/spec/support/features/variable_list_shared_examples.rb
+++ b/spec/support/shared_examples/features/variable_list_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'variable list' do
+RSpec.shared_examples 'variable list' do
it 'shows list of variables' do
page.within('.js-ci-variable-list-section') do
expect(first('.js-ci-variable-input-key').value).to eq(variable.key)
diff --git a/spec/support/shared_examples/wiki_file_attachments_examples.rb b/spec/support/shared_examples/features/wiki_file_attachments_shared_examples.rb
index 22fbfb48928..36d91d323b5 100644
--- a/spec/support/shared_examples/wiki_file_attachments_examples.rb
+++ b/spec/support/shared_examples/features/wiki_file_attachments_shared_examples.rb
@@ -3,7 +3,7 @@
# Requires a context containing:
# project
-shared_examples 'wiki file attachments' do
+RSpec.shared_examples 'wiki file attachments' do
include DropzoneHelper
context 'uploading attachments', :js do
diff --git a/spec/support/shared_examples/finders/assignees_filter_shared_examples.rb b/spec/support/shared_examples/finders/assignees_filter_shared_examples.rb
index f1df1052ef2..96b05db4cd9 100644
--- a/spec/support/shared_examples/finders/assignees_filter_shared_examples.rb
+++ b/spec/support/shared_examples/finders/assignees_filter_shared_examples.rb
@@ -1,30 +1,30 @@
# frozen_string_literal: true
-shared_examples 'assignee ID filter' do
+RSpec.shared_examples 'assignee ID filter' do
it 'returns issuables assigned to that user' do
expect(issuables).to contain_exactly(*expected_issuables)
end
end
-shared_examples 'assignee NOT ID filter' do
+RSpec.shared_examples 'assignee NOT ID filter' do
it 'returns issuables not assigned to that user' do
expect(issuables).to contain_exactly(*expected_issuables)
end
end
-shared_examples 'assignee username filter' do
+RSpec.shared_examples 'assignee username filter' do
it 'returns issuables assigned to those users' do
expect(issuables).to contain_exactly(*expected_issuables)
end
end
-shared_examples 'assignee NOT username filter' do
+RSpec.shared_examples 'assignee NOT username filter' do
it 'returns issuables not assigned to those users' do
expect(issuables).to contain_exactly(*expected_issuables)
end
end
-shared_examples 'no assignee filter' do
+RSpec.shared_examples 'no assignee filter' do
let(:params) { { assignee_id: 'None' } }
it 'returns issuables not assigned to any assignee' do
@@ -38,7 +38,7 @@ shared_examples 'no assignee filter' do
end
end
-shared_examples 'any assignee filter' do
+RSpec.shared_examples 'any assignee filter' do
context '' do
let(:params) { { assignee_id: 'Any' } }
diff --git a/spec/support/shared_examples/finders/finder_with_external_authorization_enabled.rb b/spec/support/shared_examples/finders/finder_with_external_authorization_enabled_shared_examples.rb
index b8b0079e36d..2671462ea4a 100644
--- a/spec/support/shared_examples/finders/finder_with_external_authorization_enabled.rb
+++ b/spec/support/shared_examples/finders/finder_with_external_authorization_enabled_shared_examples.rb
@@ -1,8 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
-
-shared_examples 'a finder with external authorization service' do
+RSpec.shared_examples 'a finder with external authorization service' do
include ExternalAuthorizationServiceHelpers
let(:user) { create(:user) }
diff --git a/spec/support/shared_examples/snippet_visibility_shared_examples.rb b/spec/support/shared_examples/finders/snippet_visibility_shared_examples.rb
index e2089ee623a..98ab141ab26 100644
--- a/spec/support/shared_examples/snippet_visibility_shared_examples.rb
+++ b/spec/support/shared_examples/finders/snippet_visibility_shared_examples.rb
@@ -234,8 +234,8 @@ RSpec.shared_examples 'snippet visibility' do
end
context "For #{params[:project_type]} project and #{params[:user_type]} users" do
- it 'agrees with the read_project_snippet policy' do
- expect(can?(user, :read_project_snippet, snippet)).to eq(outcome)
+ it 'agrees with the read_snippet policy' do
+ expect(can?(user, :read_snippet, snippet)).to eq(outcome)
end
it 'returns proper outcome' do
@@ -297,8 +297,8 @@ RSpec.shared_examples 'snippet visibility' do
let!(:snippet) { create(:personal_snippet, visibility_level: snippet_visibility, author: author) }
context "For personal and #{params[:snippet_visibility]} snippets with #{params[:user_type]} user" do
- it 'agrees with read_personal_snippet policy' do
- expect(can?(user, :read_personal_snippet, snippet)).to eq(outcome)
+ it 'agrees with read_snippet policy' do
+ expect(can?(user, :read_snippet, snippet)).to eq(outcome)
end
it 'returns proper outcome' do
diff --git a/spec/support/shared_examples/graphql/connection_paged_nodes.rb b/spec/support/shared_examples/graphql/connection_paged_nodes_shared_examples.rb
index 93de7f619f7..93de7f619f7 100644
--- a/spec/support/shared_examples/graphql/connection_paged_nodes.rb
+++ b/spec/support/shared_examples/graphql/connection_paged_nodes_shared_examples.rb
diff --git a/spec/support/shared_examples/graphql/failure_to_find_anything.rb b/spec/support/shared_examples/graphql/failure_to_find_anything_shared_examples.rb
index b2533c992c1..d27fdb1aa30 100644
--- a/spec/support/shared_examples/graphql/failure_to_find_anything.rb
+++ b/spec/support/shared_examples/graphql/failure_to_find_anything_shared_examples.rb
@@ -1,12 +1,10 @@
# frozen_string_literal: true
-require 'spec_helper'
-
# Shared example for legal queries that are expected to return nil.
# Requires the following let bindings to be defined:
# - post_query: action to send the query
# - path: array of keys from query root to the result
-shared_examples 'a failure to find anything' do
+RSpec.shared_examples 'a failure to find anything' do
it 'finds nothing' do
post_query
diff --git a/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb b/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb
index 9a60825855f..e1dd98814f1 100644
--- a/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
-shared_context 'exposing regular notes on a noteable in GraphQL' do
+RSpec.shared_context 'exposing regular notes on a noteable in GraphQL' do
include GraphqlHelpers
let(:note) do
diff --git a/spec/support/shared_examples/uses_gitlab_url_blocker_examples.rb b/spec/support/shared_examples/initializers/uses_gitlab_url_blocker_shared_examples.rb
index 59c119e6d96..afa495fc9a4 100644
--- a/spec/support/shared_examples/uses_gitlab_url_blocker_examples.rb
+++ b/spec/support/shared_examples/initializers/uses_gitlab_url_blocker_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'a request using Gitlab::UrlBlocker' do
+RSpec.shared_examples 'a request using Gitlab::UrlBlocker' do
# Written to test internal patches against 3rd party libraries
#
# Expects the following to be available in the example contexts:
diff --git a/spec/support/shared_examples/legacy_path_redirect_shared_examples.rb b/spec/support/shared_examples/legacy_path_redirect_shared_examples.rb
deleted file mode 100644
index 22e5698825d..00000000000
--- a/spec/support/shared_examples/legacy_path_redirect_shared_examples.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-shared_examples 'redirecting a legacy path' do |source, target|
- include RSpec::Rails::RequestExampleGroup
-
- it "redirects #{source} to #{target} when the resource does not exist" do
- expect(get(source)).to redirect_to(target)
- end
-
- it "does not redirect #{source} to #{target} when the resource exists" do
- resource
-
- expect(get(source)).not_to redirect_to(target)
- end
-end
-
-shared_examples 'redirecting a legacy project path' do |source, target|
- include RSpec::Rails::RequestExampleGroup
-
- it "redirects #{source} to #{target}" do
- expect(get(source)).to redirect_to(target)
- end
-end
diff --git a/spec/support/shared_examples/reference_parser_shared_examples.rb b/spec/support/shared_examples/lib/banzai/reference_parser_shared_examples.rb
index d903c0f10e0..d903c0f10e0 100644
--- a/spec/support/shared_examples/reference_parser_shared_examples.rb
+++ b/spec/support/shared_examples/lib/banzai/reference_parser_shared_examples.rb
diff --git a/spec/support/shared_examples/lib/gitlab/background_migration/backfill_project_repositories_examples.rb b/spec/support/shared_examples/lib/gitlab/background_migration/backfill_project_repositories_shared_examples.rb
index 2cbc0c2bdf2..459d4f5cd3e 100644
--- a/spec/support/shared_examples/lib/gitlab/background_migration/backfill_project_repositories_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/background_migration/backfill_project_repositories_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'backfill migration for project repositories' do |storage|
+RSpec.shared_examples 'backfill migration for project repositories' do |storage|
describe '#perform' do
let(:storage_versions) { storage == :legacy ? [nil, 0] : [1, 2] }
let(:storage_version) { storage_versions.first }
diff --git a/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb
new file mode 100644
index 00000000000..f90e1a1ebab
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+shared_examples 'resource mentions migration' do |migration_class, resource_class|
+ it 'migrates resource mentions' do
+ join = migration_class::JOIN
+ conditions = migration_class::QUERY_CONDITIONS
+
+ expect do
+ subject.perform(resource_class.name, join, conditions, false, resource_class.minimum(:id), resource_class.maximum(:id))
+ end.to change { user_mentions.count }.by(1)
+
+ user_mention = user_mentions.last
+ expect(user_mention.mentioned_users_ids.sort).to eq(mentioned_users.pluck(:id).sort)
+ expect(user_mention.mentioned_groups_ids.sort).to eq([group.id])
+ expect(user_mention.mentioned_groups_ids.sort).not_to include(inaccessible_group.id)
+
+ # check that performing the same job twice does not fail and does not change counts
+ expect do
+ subject.perform(resource_class.name, join, conditions, false, resource_class.minimum(:id), resource_class.maximum(:id))
+ end.to change { user_mentions.count }.by(0)
+ end
+end
+
+shared_examples 'resource notes mentions migration' do |migration_class, resource_class|
+ before do
+ note1.becomes(Note).save!
+ note2.becomes(Note).save!
+ note3.becomes(Note).save!
+ # note4.becomes(Note).save(validate: false)
+ end
+
+ it 'migrates mentions from note' do
+ join = migration_class::JOIN
+ conditions = migration_class::QUERY_CONDITIONS
+
+ # there are 4 notes for each noteable_type, but one does not have mentions and
+ # another one's noteable_id points to an inexistent resource
+ expect(notes.where(noteable_type: resource_class.to_s).count).to eq 4
+
+ expect do
+ subject.perform(resource_class.name, join, conditions, true, Note.minimum(:id), Note.maximum(:id))
+ end.to change { user_mentions.count }.by(2)
+
+ # check that the user_mention for regular note is created
+ user_mention = user_mentions.first
+ expect(Note.find(user_mention.note_id).system).to be false
+ expect(user_mention.mentioned_users_ids.sort).to eq(users.pluck(:id).sort)
+ expect(user_mention.mentioned_groups_ids.sort).to eq([group.id])
+ expect(user_mention.mentioned_groups_ids.sort).not_to include(inaccessible_group.id)
+
+ # check that the user_mention for system note is created
+ user_mention = user_mentions.second
+ expect(Note.find(user_mention.note_id).system).to be true
+ expect(user_mention.mentioned_users_ids.sort).to eq(users.pluck(:id).sort)
+ expect(user_mention.mentioned_groups_ids.sort).to eq([group.id])
+ expect(user_mention.mentioned_groups_ids.sort).not_to include(inaccessible_group.id)
+
+ # check that performing the same job twice does not fail and does not change counts
+ expect do
+ subject.perform(resource_class.name, join, conditions, true, Note.minimum(:id), Note.maximum(:id))
+ end.to change { user_mentions.count }.by(0)
+ end
+end
+
+shared_examples 'schedules resource mentions migration' do |resource_class, is_for_notes|
+ it 'schedules background migrations' do
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ migration = described_class::MIGRATION
+ join = described_class::JOIN
+ conditions = described_class::QUERY_CONDITIONS
+
+ expect(migration).to be_scheduled_delayed_migration(2.minutes, resource_class.name, join, conditions, is_for_notes, resource1.id, resource1.id)
+ expect(migration).to be_scheduled_delayed_migration(4.minutes, resource_class.name, join, conditions, is_for_notes, resource2.id, resource2.id)
+ expect(migration).to be_scheduled_delayed_migration(6.minutes, resource_class.name, join, conditions, is_for_notes, resource3.id, resource3.id)
+ expect(BackgroundMigrationWorker.jobs.size).to eq 3
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/ci_trace_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
index 441d3f4ccb9..db5e9461f3f 100644
--- a/spec/support/shared_examples/ci_trace_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples_for 'common trace features' do
+RSpec.shared_examples 'common trace features' do
describe '#html' do
before do
trace.set("12\n34")
@@ -284,7 +284,7 @@ shared_examples_for 'common trace features' do
end
end
-shared_examples_for 'trace with disabled live trace feature' do
+RSpec.shared_examples 'trace with disabled live trace feature' do
it_behaves_like 'common trace features'
describe '#read' do
@@ -618,7 +618,7 @@ shared_examples_for 'trace with disabled live trace feature' do
end
end
-shared_examples_for 'trace with enabled live trace feature' do
+RSpec.shared_examples 'trace with enabled live trace feature' do
it_behaves_like 'common trace features'
describe '#read' do
diff --git a/spec/support/shared_examples/cycle_analytics_event_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/cycle_analytics_event_shared_examples.rb
index 028b8da94a6..a00359ce979 100644
--- a/spec/support/shared_examples/cycle_analytics_event_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/cycle_analytics_event_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples_for 'cycle analytics event' do
+RSpec.shared_examples_for 'cycle analytics event' do
let(:params) { {} }
let(:instance) { described_class.new(params) }
diff --git a/spec/support/shared_examples/diff_file_collections.rb b/spec/support/shared_examples/lib/gitlab/diff_file_collections_shared_examples.rb
index c8bd137bf84..a1cdd054f32 100644
--- a/spec/support/shared_examples/diff_file_collections.rb
+++ b/spec/support/shared_examples/lib/gitlab/diff_file_collections_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'diff statistics' do |test_include_stats_flag: true|
+RSpec.shared_examples 'diff statistics' do |test_include_stats_flag: true|
subject { described_class.new(diffable, collection_default_args) }
def stub_stats_find_by_path(path, stats_mock)
@@ -42,7 +42,7 @@ shared_examples 'diff statistics' do |test_include_stats_flag: true|
end
end
-shared_examples 'unfoldable diff' do
+RSpec.shared_examples 'unfoldable diff' do
let(:subject) { described_class.new(diffable, diff_options: nil) }
it 'calls Gitlab::Diff::File#unfold_diff_lines with correct position' do
@@ -58,7 +58,7 @@ shared_examples 'unfoldable diff' do
end
end
-shared_examples 'cacheable diff collection' do
+RSpec.shared_examples 'cacheable diff collection' do
let(:cache) { instance_double(Gitlab::Diff::HighlightCache) }
before do
diff --git a/spec/support/shared_examples/file_finder.rb b/spec/support/shared_examples/lib/gitlab/file_finder_shared_examples.rb
index f4b28b94090..dce927c875e 100644
--- a/spec/support/shared_examples/file_finder.rb
+++ b/spec/support/shared_examples/lib/gitlab/file_finder_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'file finder' do
+RSpec.shared_examples 'file finder' do
let(:query) { 'files' }
let(:search_results) { subject.find(query) }
diff --git a/spec/support/shared_examples/gitlab_verify.rb b/spec/support/shared_examples/lib/gitlab/gitlab_verify_shared_examples.rb
index 721ea3b4c88..721ea3b4c88 100644
--- a/spec/support/shared_examples/gitlab_verify.rb
+++ b/spec/support/shared_examples/lib/gitlab/gitlab_verify_shared_examples.rb
diff --git a/spec/support/shared_examples/helm_generated_script.rb b/spec/support/shared_examples/lib/gitlab/helm_generated_script_shared_examples.rb
index 17f495ebe46..bbf8a946f8b 100644
--- a/spec/support/shared_examples/helm_generated_script.rb
+++ b/spec/support/shared_examples/lib/gitlab/helm_generated_script_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'helm commands' do
+RSpec.shared_examples 'helm commands' do
describe '#generate_script' do
let(:helm_setup) do
<<~EOS
diff --git a/spec/support/shared_examples/lib/gitlab/import_export/import_failure_service_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/import_export/import_failure_service_shared_examples.rb
index 691564120cc..801be5ae946 100644
--- a/spec/support/shared_examples/lib/gitlab/import_export/import_failure_service_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/import_export/import_failure_service_shared_examples.rb
@@ -1,8 +1,9 @@
# frozen_string_literal: true
-shared_examples 'log import failure' do |importable_column|
+RSpec.shared_examples 'log import failure' do |importable_column|
it 'tracks error' do
extra = {
+ source: action,
relation_key: relation_key,
relation_index: relation_index,
retry_count: retry_count
@@ -11,7 +12,12 @@ shared_examples 'log import failure' do |importable_column|
expect(Gitlab::ErrorTracking).to receive(:track_exception).with(exception, extra)
- subject.log_import_failure(relation_key, relation_index, exception, retry_count)
+ subject.log_import_failure(
+ source: action,
+ relation_key: relation_key,
+ relation_index: relation_index,
+ exception: exception,
+ retry_count: retry_count)
end
it 'saves data to ImportFailure' do
@@ -21,6 +27,7 @@ shared_examples 'log import failure' do |importable_column|
aggregate_failures do
expect(import_failure[importable_column]).to eq(importable.id)
+ expect(import_failure.source).to eq(action)
expect(import_failure.relation_key).to eq(relation_key)
expect(import_failure.relation_index).to eq(relation_index)
expect(import_failure.exception_class).to eq('StandardError')
diff --git a/spec/support/shared_examples/ldap_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/ldap_shared_examples.rb
index 0a70ce7ea0c..cacefc63139 100644
--- a/spec/support/shared_examples/ldap_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/ldap_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples_for 'normalizes a DN' do
+RSpec.shared_examples 'normalizes a DN' do
using RSpec::Parameterized::TableSyntax
where(:test_description, :given, :expected) do
@@ -40,7 +40,7 @@ shared_examples_for 'normalizes a DN' do
end
end
-shared_examples_for 'normalizes a DN attribute value' do
+RSpec.shared_examples 'normalizes a DN attribute value' do
using RSpec::Parameterized::TableSyntax
where(:test_description, :given, :expected) do
diff --git a/spec/support/shared_examples/malicious_regexp_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/malicious_regexp_shared_examples.rb
index 96c02260d53..b124c91c0da 100644
--- a/spec/support/shared_examples/malicious_regexp_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/malicious_regexp_shared_examples.rb
@@ -2,7 +2,7 @@
require 'timeout'
-shared_examples 'malicious regexp' do
+RSpec.shared_examples 'malicious regexp' do
let(:malicious_text) { 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!' }
let(:malicious_regexp_re2) { '(?i)^(([a-z])+.)+[A-Z]([a-z])+$' }
let(:malicious_regexp_ruby) { '/^(([a-z])+.)+[A-Z]([a-z])+$/i' }
diff --git a/spec/support/shared_examples/migration_helpers_examples.rb b/spec/support/shared_examples/lib/gitlab/migration_helpers_shared_examples.rb
index 3587297a2d7..8893ed5504b 100644
--- a/spec/support/shared_examples/migration_helpers_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/migration_helpers_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'skips validation' do |validation_option|
+RSpec.shared_examples 'skips validation' do |validation_option|
it 'skips validation' do
expect(model).not_to receive(:disable_statement_timeout)
expect(model).to receive(:execute).with(/ADD CONSTRAINT/)
@@ -10,7 +10,7 @@ shared_examples 'skips validation' do |validation_option|
end
end
-shared_examples 'performs validation' do |validation_option|
+RSpec.shared_examples 'performs validation' do |validation_option|
it 'performs validation' do
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
diff --git a/spec/support/shared_examples/position_formatters.rb b/spec/support/shared_examples/lib/gitlab/position_formatters_shared_examples.rb
index 30b6b8d24f0..c9300aff3e6 100644
--- a/spec/support/shared_examples/position_formatters.rb
+++ b/spec/support/shared_examples/lib/gitlab/position_formatters_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples_for "position formatter" do
+RSpec.shared_examples "position formatter" do
let(:formatter) { described_class.new(attrs) }
describe '#key' do
diff --git a/spec/support/shared_examples/lib/gitlab/repo_type_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/repo_type_shared_examples.rb
new file mode 100644
index 00000000000..69ae9339f10
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/repo_type_shared_examples.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a repo type' do
+ describe '#identifier_for_container' do
+ subject { described_class.identifier_for_container(expected_container) }
+
+ it { is_expected.to eq(expected_identifier) }
+ end
+
+ describe '#fetch_id' do
+ it 'finds an id match in the identifier' do
+ expect(described_class.fetch_id(expected_identifier)).to eq(expected_id)
+ end
+
+ it 'does not break on other identifiers' do
+ expect(described_class.fetch_id('wiki-noid')).to eq(nil)
+ end
+ end
+
+ describe '#fetch_container!' do
+ it 'returns the container' do
+ expect(described_class.fetch_container!(expected_identifier)).to eq expected_container
+ end
+
+ it 'raises an exception if the identifier is invalid' do
+ expect { described_class.fetch_container!('project-noid') }.to raise_error ArgumentError
+ end
+ end
+
+ describe '#path_suffix' do
+ subject { described_class.path_suffix }
+
+ it { is_expected.to eq(expected_suffix) }
+ end
+
+ describe '#repository_for' do
+ it 'finds the repository for the repo type' do
+ expect(described_class.repository_for(expected_container)).to eq(expected_repository)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/unique_ip_check_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/unique_ip_check_shared_examples.rb
index 9bdfa762fc8..e42a927b5ba 100644
--- a/spec/support/shared_examples/unique_ip_check_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/unique_ip_check_shared_examples.rb
@@ -1,44 +1,6 @@
# frozen_string_literal: true
-shared_context 'unique ips sign in limit' do
- include StubENV
- let(:request_context) { Gitlab::RequestContext.instance }
-
- before do
- Gitlab::Redis::Cache.with(&:flushall)
- Gitlab::Redis::Queues.with(&:flushall)
- Gitlab::Redis::SharedState.with(&:flushall)
- end
-
- before do
- stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
-
- Gitlab::CurrentSettings.update!(
- unique_ips_limit_enabled: true,
- unique_ips_limit_time_window: 10000
- )
-
- # Make sure we're working with the same reqeust context everywhere
- allow(Gitlab::RequestContext).to receive(:instance).and_return(request_context)
- end
-
- def change_ip(ip)
- allow(request_context).to receive(:client_ip).and_return(ip)
- end
-
- def request_from_ip(ip)
- change_ip(ip)
- request
- response
- end
-
- def operation_from_ip(ip)
- change_ip(ip)
- operation
- end
-end
-
-shared_examples 'user login operation with unique ip limit' do
+RSpec.shared_examples 'user login operation with unique ip limit' do
include_context 'unique ips sign in limit' do
before do
Gitlab::CurrentSettings.update!(unique_ips_limit_per_user: 1)
@@ -56,7 +18,7 @@ shared_examples 'user login operation with unique ip limit' do
end
end
-shared_examples 'user login request with unique ip limit' do |success_status = 200|
+RSpec.shared_examples 'user login request with unique ip limit' do |success_status = 200|
include_context 'unique ips sign in limit' do
before do
Gitlab::CurrentSettings.update!(unique_ips_limit_per_user: 1)
@@ -69,7 +31,7 @@ shared_examples 'user login request with unique ip limit' do |success_status = 2
it 'blocks user authenticating from two distinct ips' do
expect(request_from_ip('ip')).to have_gitlab_http_status(success_status)
- expect(request_from_ip('ip2')).to have_gitlab_http_status(403)
+ expect(request_from_ip('ip2')).to have_gitlab_http_status(:forbidden)
end
end
end
diff --git a/spec/support/shared_examples/lib/gitlab/usage_data_counters/a_redis_counter.rb b/spec/support/shared_examples/lib/gitlab/usage_data_counters/a_redis_counter_shared_examples.rb
index 91bf804978d..921afbc3e5e 100644
--- a/spec/support/shared_examples/lib/gitlab/usage_data_counters/a_redis_counter.rb
+++ b/spec/support/shared_examples/lib/gitlab/usage_data_counters/a_redis_counter_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'a redis usage counter' do |thing, event|
+RSpec.shared_examples 'a redis usage counter' do |thing, event|
describe ".count(#{event})", :clean_gitlab_redis_shared_state do
it "increments the #{thing} #{event} counter by 1" do
expect do
@@ -22,7 +22,7 @@ shared_examples 'a redis usage counter' do |thing, event|
end
end
-shared_examples 'a redis usage counter with totals' do |prefix, events|
+RSpec.shared_examples 'a redis usage counter with totals' do |prefix, events|
describe 'totals', :clean_gitlab_redis_shared_state do
before do
events.each do |k, n|
diff --git a/spec/support/shared_examples/mail_room_shared_examples.rb b/spec/support/shared_examples/mail_room_shared_examples.rb
deleted file mode 100644
index 4cca29250e2..00000000000
--- a/spec/support/shared_examples/mail_room_shared_examples.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-shared_examples_for 'only truthy if both enabled and address are truthy' do |target_proc|
- context 'with both enabled and address as truthy values' do
- it 'is truthy' do
- stub_config(enabled: true, address: 'localhost')
-
- expect(target_proc.call).to be_truthy
- end
- end
-
- context 'with address only as truthy' do
- it 'is falsey' do
- stub_config(enabled: false, address: 'localhost')
-
- expect(target_proc.call).to be_falsey
- end
- end
-
- context 'with enabled only as truthy' do
- it 'is falsey' do
- stub_config(enabled: true, address: nil)
-
- expect(target_proc.call).to be_falsey
- end
- end
-
- context 'with neither address nor enabled as truthy' do
- it 'is falsey' do
- stub_config(enabled: false, address: nil)
-
- expect(target_proc.call).to be_falsey
- end
- end
-end
diff --git a/spec/support/shared_examples/notify_shared_examples.rb b/spec/support/shared_examples/mailers/notify_shared_examples.rb
index ca031df000e..45987059123 100644
--- a/spec/support/shared_examples/notify_shared_examples.rb
+++ b/spec/support/shared_examples/mailers/notify_shared_examples.rb
@@ -1,36 +1,12 @@
# frozen_string_literal: true
-shared_context 'gitlab email notification' do
- set(:group) { create(:group) }
- set(:subgroup) { create(:group, parent: group) }
- set(:project) { create(:project, :repository, name: 'a-known-name', group: group) }
- set(:recipient) { create(:user, email: 'recipient@example.com') }
-
- let(:gitlab_sender_display_name) { Gitlab.config.gitlab.email_display_name }
- let(:gitlab_sender) { Gitlab.config.gitlab.email_from }
- let(:gitlab_sender_reply_to) { Gitlab.config.gitlab.email_reply_to }
- let(:new_user_address) { 'newguy@example.com' }
-
- before do
- email = recipient.emails.create(email: "notifications@example.com")
- recipient.update_attribute(:notification_email, email.email)
- stub_incoming_email_setting(enabled: true, address: "reply+%{key}@#{Gitlab.config.gitlab.host}")
- end
-end
-
-shared_context 'reply-by-email is enabled with incoming address without %{key}' do
- before do
- stub_incoming_email_setting(enabled: true, address: "reply@#{Gitlab.config.gitlab.host}")
- end
-end
-
-shared_examples 'a multiple recipients email' do
+RSpec.shared_examples 'a multiple recipients email' do
it 'is sent to the given recipient' do
is_expected.to deliver_to recipient.notification_email
end
end
-shared_examples 'an email sent from GitLab' do
+RSpec.shared_examples 'an email sent from GitLab' do
it 'has the characteristics of an email sent from GitLab' do
sender = subject.header[:from].addrs[0]
reply_to = subject.header[:reply_to].addresses
@@ -43,7 +19,7 @@ shared_examples 'an email sent from GitLab' do
end
end
-shared_examples 'an email sent to a user' do
+RSpec.shared_examples 'an email sent to a user' do
it 'is sent to user\'s global notification email address' do
expect(subject).to deliver_to(recipient.notification_email)
end
@@ -59,13 +35,13 @@ shared_examples 'an email sent to a user' do
end
end
-shared_examples 'an email that contains a header with author username' do
+RSpec.shared_examples 'an email that contains a header with author username' do
it 'has X-GitLab-Author header containing author\'s username' do
is_expected.to have_header 'X-GitLab-Author', user.username
end
end
-shared_examples 'an email with X-GitLab headers containing IDs' do
+RSpec.shared_examples 'an email with X-GitLab headers containing IDs' do
it 'has X-GitLab-*-ID header' do
is_expected.to have_header "X-GitLab-#{model.class.name}-ID", "#{model.id}"
end
@@ -79,7 +55,7 @@ shared_examples 'an email with X-GitLab headers containing IDs' do
end
end
-shared_examples 'an email with X-GitLab headers containing project details' do
+RSpec.shared_examples 'an email with X-GitLab headers containing project details' do
it 'has X-GitLab-Project headers' do
aggregate_failures do
full_path_as_domain = "#{project.name}.#{project.namespace.path}"
@@ -91,7 +67,7 @@ shared_examples 'an email with X-GitLab headers containing project details' do
end
end
-shared_examples 'a new thread email with reply-by-email enabled' do
+RSpec.shared_examples 'a new thread email with reply-by-email enabled' do
it 'has the characteristics of a threaded email' do
host = Gitlab.config.gitlab.host
route_key = "#{model.class.model_name.singular_route_key}_#{model.id}"
@@ -103,7 +79,7 @@ shared_examples 'a new thread email with reply-by-email enabled' do
end
end
-shared_examples 'a thread answer email with reply-by-email enabled' do
+RSpec.shared_examples 'a thread answer email with reply-by-email enabled' do
include_examples 'an email with X-GitLab headers containing project details'
include_examples 'an email with X-GitLab headers containing IDs'
@@ -120,7 +96,7 @@ shared_examples 'a thread answer email with reply-by-email enabled' do
end
end
-shared_examples 'an email starting a new thread with reply-by-email enabled' do
+RSpec.shared_examples 'an email starting a new thread with reply-by-email enabled' do
include_examples 'an email with X-GitLab headers containing project details'
include_examples 'an email with X-GitLab headers containing IDs'
include_examples 'a new thread email with reply-by-email enabled'
@@ -145,7 +121,7 @@ shared_examples 'an email starting a new thread with reply-by-email enabled' do
end
end
-shared_examples 'an answer to an existing thread with reply-by-email enabled' do
+RSpec.shared_examples 'an answer to an existing thread with reply-by-email enabled' do
include_examples 'an email with X-GitLab headers containing project details'
include_examples 'an email with X-GitLab headers containing IDs'
include_examples 'a thread answer email with reply-by-email enabled'
@@ -166,7 +142,7 @@ shared_examples 'an answer to an existing thread with reply-by-email enabled' do
end
end
-shared_examples 'it should have Gmail Actions links' do
+RSpec.shared_examples 'it should have Gmail Actions links' do
it do
aggregate_failures do
is_expected.to have_body_text('<script type="application/ld+json">')
@@ -175,7 +151,7 @@ shared_examples 'it should have Gmail Actions links' do
end
end
-shared_examples 'it should not have Gmail Actions links' do
+RSpec.shared_examples 'it should not have Gmail Actions links' do
it do
aggregate_failures do
is_expected.not_to have_body_text('<script type="application/ld+json">')
@@ -184,25 +160,25 @@ shared_examples 'it should not have Gmail Actions links' do
end
end
-shared_examples 'it should show Gmail Actions View Issue link' do
+RSpec.shared_examples 'it should show Gmail Actions View Issue link' do
it_behaves_like 'it should have Gmail Actions links'
it { is_expected.to have_body_text('View Issue') }
end
-shared_examples 'it should show Gmail Actions View Merge request link' do
+RSpec.shared_examples 'it should show Gmail Actions View Merge request link' do
it_behaves_like 'it should have Gmail Actions links'
it { is_expected.to have_body_text('View Merge request') }
end
-shared_examples 'it should show Gmail Actions View Commit link' do
+RSpec.shared_examples 'it should show Gmail Actions View Commit link' do
it_behaves_like 'it should have Gmail Actions links'
it { is_expected.to have_body_text('View Commit') }
end
-shared_examples 'an unsubscribeable thread' do
+RSpec.shared_examples 'an unsubscribeable thread' do
it_behaves_like 'an unsubscribeable thread with incoming address without %{key}'
it 'has a List-Unsubscribe header in the correct format, and a body link' do
@@ -215,7 +191,7 @@ shared_examples 'an unsubscribeable thread' do
end
end
-shared_examples 'an unsubscribeable thread with incoming address without %{key}' do
+RSpec.shared_examples 'an unsubscribeable thread with incoming address without %{key}' do
include_context 'reply-by-email is enabled with incoming address without %{key}'
it 'has a List-Unsubscribe header in the correct format, and a body link' do
@@ -228,7 +204,7 @@ shared_examples 'an unsubscribeable thread with incoming address without %{key}'
end
end
-shared_examples 'a user cannot unsubscribe through footer link' do
+RSpec.shared_examples 'a user cannot unsubscribe through footer link' do
it 'does not have a List-Unsubscribe header or a body link' do
aggregate_failures do
is_expected.not_to have_header('List-Unsubscribe', /unsubscribe/)
@@ -237,11 +213,11 @@ shared_examples 'a user cannot unsubscribe through footer link' do
end
end
-shared_examples 'an email with a labels subscriptions link in its footer' do
+RSpec.shared_examples 'an email with a labels subscriptions link in its footer' do
it { is_expected.to have_body_text('label subscriptions') }
end
-shared_examples 'a note email' do
+RSpec.shared_examples 'a note email' do
it_behaves_like 'it should have Gmail Actions links'
it 'is sent to the given recipient as the author' do
@@ -263,7 +239,7 @@ shared_examples 'a note email' do
end
end
-shared_examples 'appearance header and footer enabled' do
+RSpec.shared_examples 'appearance header and footer enabled' do
it "contains header and footer" do
create :appearance, header_message: "Foo", footer_message: "Bar", email_header_and_footer_enabled: true
@@ -277,7 +253,7 @@ shared_examples 'appearance header and footer enabled' do
end
end
-shared_examples 'appearance header and footer not enabled' do
+RSpec.shared_examples 'appearance header and footer not enabled' do
it "does not contain header and footer" do
create :appearance, header_message: "Foo", footer_message: "Bar", email_header_and_footer_enabled: false
diff --git a/spec/support/shared_examples/metrics/url_shared_examples.rb b/spec/support/shared_examples/metrics/url_shared_examples.rb
new file mode 100644
index 00000000000..67742aecb87
--- /dev/null
+++ b/spec/support/shared_examples/metrics/url_shared_examples.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'regex which matches url when expected' do
+ it { is_expected.to be_a Regexp }
+
+ it 'matches a metrics dashboard link with named params' do
+ expect(subject).to match url
+
+ subject.match(url) do |m|
+ expect(m.named_captures).to eq expected_params
+ end
+ end
+
+ it 'does not match other gitlab urls that contain the term metrics' do
+ url = Gitlab::Routing.url_helpers.active_common_namespace_project_prometheus_metrics_url('foo', 'bar', :json)
+
+ expect(subject).not_to match url
+ end
+
+ it 'does not match other gitlab urls' do
+ url = Gitlab.config.gitlab.url
+
+ expect(subject).not_to match url
+ end
+
+ it 'does not match non-gitlab urls' do
+ url = 'https://www.super_awesome_site.com/'
+
+ expect(subject).not_to match url
+ end
+end
diff --git a/spec/support/shared_examples/models/active_record_enum_shared_examples.rb b/spec/support/shared_examples/models/active_record_enum_shared_examples.rb
index fb1189c7f17..3d765b6ca93 100644
--- a/spec/support/shared_examples/models/active_record_enum_shared_examples.rb
+++ b/spec/support/shared_examples/models/active_record_enum_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'having unique enum values' do
+RSpec.shared_examples 'having unique enum values' do
described_class.defined_enums.each do |name, enum|
it "has unique values in #{name.inspect}" do
duplicated = enum.group_by(&:last).select { |key, value| value.size > 1 }
diff --git a/spec/support/shared_examples/application_setting_examples.rb b/spec/support/shared_examples/models/application_setting_shared_examples.rb
index a43d2a75082..a43d2a75082 100644
--- a/spec/support/shared_examples/application_setting_examples.rb
+++ b/spec/support/shared_examples/models/application_setting_shared_examples.rb
diff --git a/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb b/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb
index b837ca87256..62d56f2e86e 100644
--- a/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb
+++ b/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb
@@ -1,8 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
-
-shared_examples_for 'AtomicInternalId' do |validate_presence: true|
+RSpec.shared_examples 'AtomicInternalId' do |validate_presence: true|
describe '.has_internal_id' do
describe 'Module inclusion' do
subject { described_class }
diff --git a/spec/support/shared_examples/models/chat_service_shared_examples.rb b/spec/support/shared_examples/models/chat_service_shared_examples.rb
index 7936a8eb974..1cc1a1c8176 100644
--- a/spec/support/shared_examples/models/chat_service_shared_examples.rb
+++ b/spec/support/shared_examples/models/chat_service_shared_examples.rb
@@ -1,8 +1,6 @@
# frozen_string_literal: true
-require "spec_helper"
-
-shared_examples_for "chat service" do |service_name|
+RSpec.shared_examples "chat service" do |service_name|
describe "Associations" do
it { is_expected.to belong_to :project }
it { is_expected.to have_one :service_hook }
diff --git a/spec/support/shared_examples/chat_slash_commands_shared_examples.rb b/spec/support/shared_examples/models/chat_slash_commands_shared_examples.rb
index 370f2072705..6611a168c04 100644
--- a/spec/support/shared_examples/chat_slash_commands_shared_examples.rb
+++ b/spec/support/shared_examples/models/chat_slash_commands_shared_examples.rb
@@ -30,7 +30,7 @@ RSpec.shared_examples 'chat slash commands service' do
subject { described_class.new }
context 'no token is passed' do
- let(:params) { Hash.new }
+ let(:params) { {} }
it 'returns nil' do
expect(subject.trigger(params)).to be_nil
diff --git a/spec/support/shared_examples/models/ci_variable_shared_examples.rb b/spec/support/shared_examples/models/ci_variable_shared_examples.rb
index f93de8b6ff1..6cc922b4101 100644
--- a/spec/support/shared_examples/models/ci_variable_shared_examples.rb
+++ b/spec/support/shared_examples/models/ci_variable_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples_for 'CI variable' do
+RSpec.shared_examples 'CI variable' do
it { is_expected.to include_module(HasVariable) }
describe "variable type" do
diff --git a/spec/support/shared_examples/models/cluster_application_core_shared_examples.rb b/spec/support/shared_examples/models/cluster_application_core_shared_examples.rb
index affe88be475..85a7c90ee42 100644
--- a/spec/support/shared_examples/models/cluster_application_core_shared_examples.rb
+++ b/spec/support/shared_examples/models/cluster_application_core_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'cluster application core specs' do |application_name|
+RSpec.shared_examples 'cluster application core specs' do |application_name|
it { is_expected.to belong_to(:cluster) }
it { is_expected.to validate_presence_of(:cluster) }
diff --git a/spec/support/shared_examples/models/cluster_application_helm_cert_examples.rb b/spec/support/shared_examples/models/cluster_application_helm_cert_shared_examples.rb
index 1c8c19acc74..d5c425dea51 100644
--- a/spec/support/shared_examples/models/cluster_application_helm_cert_examples.rb
+++ b/spec/support/shared_examples/models/cluster_application_helm_cert_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'cluster application helm specs' do |application_name|
+RSpec.shared_examples 'cluster application helm specs' do |application_name|
let(:application) { create(application_name) }
describe '#uninstall_command' do
diff --git a/spec/support/shared_examples/models/cluster_application_initial_status.rb b/spec/support/shared_examples/models/cluster_application_initial_status_shared_examples.rb
index 030974c9aa0..0b21e9a3aa7 100644
--- a/spec/support/shared_examples/models/cluster_application_initial_status.rb
+++ b/spec/support/shared_examples/models/cluster_application_initial_status_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'cluster application initial status specs' do
+RSpec.shared_examples 'cluster application initial status specs' do
describe '#status' do
let(:cluster) { create(:cluster, :provided_by_gcp) }
diff --git a/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb b/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb
index 4bca37a4cd0..e4e49b94e42 100644
--- a/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb
+++ b/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'cluster application status specs' do |application_name|
+RSpec.shared_examples 'cluster application status specs' do |application_name|
describe '#status_states' do
let(:cluster) { create(:cluster, :provided_by_gcp) }
diff --git a/spec/support/shared_examples/models/cluster_application_version_shared_examples.rb b/spec/support/shared_examples/models/cluster_application_version_shared_examples.rb
index ba02da41b53..e293467774e 100644
--- a/spec/support/shared_examples/models/cluster_application_version_shared_examples.rb
+++ b/spec/support/shared_examples/models/cluster_application_version_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'cluster application version specs' do |application_name|
+RSpec.shared_examples 'cluster application version specs' do |application_name|
describe 'update_available?' do
let(:version) { '0.0.0' }
diff --git a/spec/support/shared_examples/models/cluster_cleanup_worker_base_shared_examples.rb b/spec/support/shared_examples/models/cluster_cleanup_worker_base_shared_examples.rb
index 66bbd908ea8..2302a605be5 100644
--- a/spec/support/shared_examples/models/cluster_cleanup_worker_base_shared_examples.rb
+++ b/spec/support/shared_examples/models/cluster_cleanup_worker_base_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'cluster cleanup worker base specs' do
+RSpec.shared_examples 'cluster cleanup worker base specs' do
it 'transitions to errored if sidekiq retries exhausted' do
job = { 'args' => [cluster.id, 0], 'jid' => '123' }
diff --git a/spec/support/shared_examples/models/clusters/providers/provider_status.rb b/spec/support/shared_examples/models/clusters/providers/provider_status_shared_examples.rb
index 63cb9a56f5b..3ef1911ba99 100644
--- a/spec/support/shared_examples/models/clusters/providers/provider_status.rb
+++ b/spec/support/shared_examples/models/clusters/providers/provider_status_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'provider status' do |factory|
+RSpec.shared_examples 'provider status' do |factory|
describe 'state_machine' do
context 'when any => [:created]' do
let(:provider) { build(factory, :creating) }
diff --git a/spec/support/shared_examples/models/concerns/bulk_insert_safe_shared_examples.rb b/spec/support/shared_examples/models/concerns/bulk_insert_safe_shared_examples.rb
new file mode 100644
index 00000000000..78d0945ea63
--- /dev/null
+++ b/spec/support/shared_examples/models/concerns/bulk_insert_safe_shared_examples.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a BulkInsertSafe model' do |klass|
+ # Call `.dup` on the class passed in, as a test in this set of examples
+ # calls `belongs_to` on the class, thereby adding a new belongs_to
+ # relationship to the model that can break remaining specs in the test suite.
+ let(:target_class) { klass.dup }
+
+ # We consider all callbacks unsafe for bulk insertions unless we have explicitly
+ # whitelisted them (esp. anything related to :save, :create, :commit etc.)
+ let(:callback_method_blacklist) do
+ ActiveRecord::Callbacks::CALLBACKS.reject do |callback|
+ cb_name = callback.to_s.gsub(/(before_|after_|around_)/, '').to_sym
+ BulkInsertSafe::CALLBACK_NAME_WHITELIST.include?(cb_name)
+ end.to_set
+ end
+
+ context 'when calling class methods directly' do
+ it 'raises an error when method is not bulk-insert safe' do
+ callback_method_blacklist.each do |m|
+ expect { target_class.send(m, nil) }.to(
+ raise_error(BulkInsertSafe::MethodNotAllowedError),
+ "Expected call to #{m} to raise an error, but it didn't"
+ )
+ end
+ end
+
+ it 'does not raise an error when method is bulk-insert safe' do
+ BulkInsertSafe::CALLBACK_NAME_WHITELIST.each do |name|
+ expect { target_class.set_callback(name) {} }.not_to raise_error
+ end
+ end
+
+ it 'does not raise an error when the call is triggered by belongs_to' do
+ expect { target_class.belongs_to(:other_record) }.not_to raise_error
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb b/spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb
new file mode 100644
index 00000000000..d5606e65981
--- /dev/null
+++ b/spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb
@@ -0,0 +1,171 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'model with repository' do
+ describe '#commits_by' do
+ let(:commits) { container.repository.commits('HEAD', limit: 3).commits }
+ let(:commit_shas) { commits.map(&:id) }
+
+ it 'retrieves several commits from the repository by oid' do
+ expect(container.commits_by(oids: commit_shas)).to eq commits
+ end
+ end
+
+ describe "#web_url" do
+ context 'when given the only_path option' do
+ subject { container.web_url(only_path: only_path) }
+
+ context 'when only_path is false' do
+ let(:only_path) { false }
+
+ it 'returns the full web URL for this repo' do
+ expect(subject).to eq("#{Gitlab.config.gitlab.url}/#{expected_web_url_path}")
+ end
+ end
+
+ context 'when only_path is true' do
+ let(:only_path) { true }
+
+ it 'returns the relative web URL for this repo' do
+ expect(subject).to eq("/#{expected_web_url_path}")
+ end
+ end
+
+ context 'when only_path is nil' do
+ let(:only_path) { nil }
+
+ it 'returns the full web URL for this repo' do
+ expect(subject).to eq("#{Gitlab.config.gitlab.url}/#{expected_web_url_path}")
+ end
+ end
+ end
+
+ context 'when not given the only_path option' do
+ it 'returns the full web URL for this repo' do
+ expect(container.web_url).to eq("#{Gitlab.config.gitlab.url}/#{expected_web_url_path}")
+ end
+ end
+ end
+
+ describe '#ssh_url_to_repo' do
+ it 'returns container ssh address' do
+ expect(container.ssh_url_to_repo).to eq container.url_to_repo
+ end
+ end
+
+ describe '#http_url_to_repo' do
+ subject { container.http_url_to_repo }
+
+ context 'when a custom HTTP clone URL root is not set' do
+ it 'returns the url to the repo without a username' do
+ expect(subject).to eq("#{container.web_url}.git")
+ expect(subject).not_to include('@')
+ end
+ end
+
+ context 'when a custom HTTP clone URL root is set' do
+ before do
+ stub_application_setting(custom_http_clone_url_root: custom_http_clone_url_root)
+ end
+
+ context 'when custom HTTP clone URL root has a relative URL root' do
+ context 'when custom HTTP clone URL root ends with a slash' do
+ let(:custom_http_clone_url_root) { 'https://git.example.com:51234/mygitlab/' }
+
+ it 'returns the url to the repo, with the root replaced with the custom one' do
+ expect(subject).to eq("#{custom_http_clone_url_root}#{expected_web_url_path}.git")
+ end
+ end
+
+ context 'when custom HTTP clone URL root does not end with a slash' do
+ let(:custom_http_clone_url_root) { 'https://git.example.com:51234/mygitlab' }
+
+ it 'returns the url to the repo, with the root replaced with the custom one' do
+ expect(subject).to eq("#{custom_http_clone_url_root}/#{expected_web_url_path}.git")
+ end
+ end
+ end
+
+ context 'when custom HTTP clone URL root does not have a relative URL root' do
+ context 'when custom HTTP clone URL root ends with a slash' do
+ let(:custom_http_clone_url_root) { 'https://git.example.com:51234/' }
+
+ it 'returns the url to the repo, with the root replaced with the custom one' do
+ expect(subject).to eq("#{custom_http_clone_url_root}#{expected_web_url_path}.git")
+ end
+ end
+
+ context 'when custom HTTP clone URL root does not end with a slash' do
+ let(:custom_http_clone_url_root) { 'https://git.example.com:51234' }
+
+ it 'returns the url to the repo, with the root replaced with the custom one' do
+ expect(subject).to eq("#{custom_http_clone_url_root}/#{expected_web_url_path}.git")
+ end
+ end
+ end
+ end
+ end
+
+ describe '#repository' do
+ it 'returns valid repo' do
+ expect(container.repository).to be_kind_of(expected_repository_klass)
+ end
+ end
+
+ describe '#storage' do
+ it 'returns valid storage' do
+ expect(container.storage).to be_kind_of(expected_storage_klass)
+ end
+ end
+
+ describe '#full_path' do
+ it 'returns valid full_path' do
+ expect(container.full_path).to eq(expected_full_path)
+ end
+ end
+
+ describe '#empty_repo?' do
+ context 'when the repo does not exist' do
+ it 'returns true' do
+ expect(stubbed_container.empty_repo?).to be(true)
+ end
+ end
+
+ context 'when the repo exists' do
+ it { expect(container.empty_repo?).to be(false) }
+
+ it 'returns true when repository is empty' do
+ allow(container.repository).to receive(:empty?).and_return(true)
+
+ expect(container.empty_repo?).to be(true)
+ end
+ end
+ end
+
+ describe '#valid_repo?' do
+ it { expect(stubbed_container.valid_repo?).to be(false)}
+ it { expect(container.valid_repo?).to be(true) }
+ end
+
+ describe '#repository_exists?' do
+ it { expect(stubbed_container.repository_exists?).to be(false)}
+ it { expect(container.repository_exists?).to be(true) }
+ end
+
+ describe '#repo_exists?' do
+ it { expect(stubbed_container.repo_exists?).to be(false)}
+ it { expect(container.repo_exists?).to be(true) }
+ end
+
+ describe '#root_ref' do
+ let(:root_ref) { container.repository.root_ref }
+
+ it { expect(container.root_ref?(root_ref)).to be(true) }
+ it { expect(container.root_ref?('HEAD')).to be(false) }
+ it { expect(container.root_ref?('foo')).to be(false) }
+ end
+
+ describe 'Respond to' do
+ it { is_expected.to respond_to(:base_dir) }
+ it { is_expected.to respond_to(:disk_path) }
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/issuable_shared_examples.rb b/spec/support/shared_examples/models/concerns/issuable_shared_examples.rb
index 4978a403324..3a407088997 100644
--- a/spec/support/shared_examples/models/concerns/issuable_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/issuable_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples_for 'matches_cross_reference_regex? fails fast' do
+RSpec.shared_examples 'matches_cross_reference_regex? fails fast' do
it 'fails fast for long strings' do
# took well under 1 second in CI https://dev.gitlab.org/gitlab/gitlabhq/merge_requests/3267#note_172823
expect do
@@ -9,7 +9,7 @@ shared_examples_for 'matches_cross_reference_regex? fails fast' do
end
end
-shared_examples_for 'validates description length with custom validation' do
+RSpec.shared_examples 'validates description length with custom validation' do
let(:issuable) { build(:issue, description: 'x' * (::Issuable::DESCRIPTION_LENGTH_MAX + 1)) }
let(:context) { :update }
@@ -48,7 +48,7 @@ shared_examples_for 'validates description length with custom validation' do
end
end
-shared_examples_for 'truncates the description to its allowed maximum length on import' do
+RSpec.shared_examples 'truncates the description to its allowed maximum length on import' do
before do
allow(issuable).to receive(:importing?).and_return(true)
end
diff --git a/spec/support/shared_examples/models/concerns/redactable_shared_examples.rb b/spec/support/shared_examples/models/concerns/redactable_shared_examples.rb
index c5c14901268..e196cf8f8da 100644
--- a/spec/support/shared_examples/models/concerns/redactable_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/redactable_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'model with redactable field' do
+RSpec.shared_examples 'model with redactable field' do
it 'redacts unsubscribe token' do
model[field] = 'some text /sent_notifications/00000000000000000000000000000000/unsubscribe more text'
diff --git a/spec/support/shared_examples/cycle_analytics_stage_shared_examples.rb b/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb
index c781f72ff11..8092f87383d 100644
--- a/spec/support/shared_examples/cycle_analytics_stage_shared_examples.rb
+++ b/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples_for 'cycle analytics stage' do
+RSpec.shared_examples 'cycle analytics stage' do
let(:valid_params) do
{
name: 'My Stage',
@@ -111,7 +111,7 @@ shared_examples_for 'cycle analytics stage' do
end
end
-shared_examples_for 'cycle analytics label based stage' do
+RSpec.shared_examples 'cycle analytics label based stage' do
context 'when creating label based event' do
context 'when the label id is not passed' do
it 'returns validation error when `start_event_label_id` is missing' do
@@ -123,7 +123,7 @@ shared_examples_for 'cycle analytics label based stage' do
})
expect(stage).to be_invalid
- expect(stage.errors[:start_event_label]).to include("can't be blank")
+ expect(stage.errors[:start_event_label_id]).to include("can't be blank")
end
it 'returns validation error when `end_event_label_id` is missing' do
@@ -135,7 +135,7 @@ shared_examples_for 'cycle analytics label based stage' do
})
expect(stage).to be_invalid
- expect(stage.errors[:end_event_label]).to include("can't be blank")
+ expect(stage.errors[:end_event_label_id]).to include("can't be blank")
end
end
@@ -145,7 +145,7 @@ shared_examples_for 'cycle analytics label based stage' do
name: 'My Stage',
parent: parent,
start_event_identifier: :issue_label_added,
- start_event_label: group_label,
+ start_event_label_id: group_label.id,
end_event_identifier: :issue_closed
})
@@ -159,7 +159,7 @@ shared_examples_for 'cycle analytics label based stage' do
name: 'My Stage',
parent: parent_in_subgroup,
start_event_identifier: :issue_label_added,
- start_event_label: group_label,
+ start_event_label_id: group_label.id,
end_event_identifier: :issue_closed
})
@@ -170,30 +170,30 @@ shared_examples_for 'cycle analytics label based stage' do
context 'when label is defined for a different group' do
let(:error_message) { s_('CycleAnalyticsStage|is not available for the selected group') }
- it 'returns validation for `start_event_label`' do
+ it 'returns validation for `start_event_label_id`' do
stage = described_class.new({
name: 'My Stage',
parent: parent_outside_of_group_label_scope,
start_event_identifier: :issue_label_added,
- start_event_label: group_label,
+ start_event_label_id: group_label.id,
end_event_identifier: :issue_closed
})
expect(stage).to be_invalid
- expect(stage.errors[:start_event_label]).to include(error_message)
+ expect(stage.errors[:start_event_label_id]).to include(error_message)
end
- it 'returns validation for `end_event_label`' do
+ it 'returns validation for `end_event_label_id`' do
stage = described_class.new({
name: 'My Stage',
parent: parent_outside_of_group_label_scope,
start_event_identifier: :issue_closed,
end_event_identifier: :issue_label_added,
- end_event_label: group_label
+ end_event_label_id: group_label.id
})
expect(stage).to be_invalid
- expect(stage.errors[:end_event_label]).to include(error_message)
+ expect(stage.errors[:end_event_label_id]).to include(error_message)
end
end
diff --git a/spec/support/shared_examples/models/diff_note_after_commit_shared_examples.rb b/spec/support/shared_examples/models/diff_note_after_commit_shared_examples.rb
index 835d2dfe757..8c3e073193c 100644
--- a/spec/support/shared_examples/models/diff_note_after_commit_shared_examples.rb
+++ b/spec/support/shared_examples/models/diff_note_after_commit_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'a valid diff note with after commit callback' do
+RSpec.shared_examples 'a valid diff note with after commit callback' do
context 'when diff file is fetched from repository' do
before do
allow_any_instance_of(::Gitlab::Diff::Position).to receive(:diff_file).with(project.repository).and_return(diff_file_from_repository)
diff --git a/spec/support/shared_examples/models/diff_positionable_note_shared_examples.rb b/spec/support/shared_examples/models/diff_positionable_note_shared_examples.rb
index 8b298c5c974..38a9f1fe098 100644
--- a/spec/support/shared_examples/models/diff_positionable_note_shared_examples.rb
+++ b/spec/support/shared_examples/models/diff_positionable_note_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples_for 'a valid diff positionable note' do |factory_on_commit|
+RSpec.shared_examples 'a valid diff positionable note' do |factory_on_commit|
context 'for commit' do
let(:project) { create(:project, :repository) }
let(:commit) { project.commit(sample_commit.id) }
diff --git a/spec/support/shared_examples/email_format_shared_examples.rb b/spec/support/shared_examples/models/email_format_shared_examples.rb
index 22d6c2b38e3..6797836e383 100644
--- a/spec/support/shared_examples/email_format_shared_examples.rb
+++ b/spec/support/shared_examples/models/email_format_shared_examples.rb
@@ -6,7 +6,7 @@
# Note: You have access to `email_value` which is the email address value
# being currently tested).
-shared_examples 'an object with email-formated attributes' do |*attributes|
+RSpec.shared_examples 'an object with email-formated attributes' do |*attributes|
attributes.each do |attribute|
describe "specifically its :#{attribute} attribute" do
%w[
diff --git a/spec/support/shared_examples/group_members_shared_example.rb b/spec/support/shared_examples/models/group_members_shared_example.rb
index 4f7d496741d..4f7d496741d 100644
--- a/spec/support/shared_examples/group_members_shared_example.rb
+++ b/spec/support/shared_examples/models/group_members_shared_example.rb
diff --git a/spec/support/shared_examples/models/issuable_hook_data_shared_examples.rb b/spec/support/shared_examples/models/issuable_hook_data_shared_examples.rb
index 7ea2bb265cc..ecf1640ef5d 100644
--- a/spec/support/shared_examples/models/issuable_hook_data_shared_examples.rb
+++ b/spec/support/shared_examples/models/issuable_hook_data_shared_examples.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
# This shared example requires a `builder` and `user` variable
-shared_examples 'issuable hook data' do |kind|
+RSpec.shared_examples 'issuable hook data' do |kind|
let(:data) { builder.build(user: user) }
include_examples 'project hook data' do
diff --git a/spec/support/shared_examples/issue_tracker_service_shared_example.rb b/spec/support/shared_examples/models/issue_tracker_service_shared_examples.rb
index 0a483fd30ba..0a483fd30ba 100644
--- a/spec/support/shared_examples/issue_tracker_service_shared_example.rb
+++ b/spec/support/shared_examples/models/issue_tracker_service_shared_examples.rb
diff --git a/spec/support/shared_examples/models/label_note_shared_examples.rb b/spec/support/shared_examples/models/label_note_shared_examples.rb
index 406385c13bd..73066fb631a 100644
--- a/spec/support/shared_examples/models/label_note_shared_examples.rb
+++ b/spec/support/shared_examples/models/label_note_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'label note created from events' do
+RSpec.shared_examples 'label note created from events' do
def create_event(params = {})
event_params = { action: :add, label: label, user: user }
resource_key = resource.class.name.underscore.to_s
diff --git a/spec/support/shared_examples/models/member_shared_examples.rb b/spec/support/shared_examples/models/member_shared_examples.rb
index e5375bc8280..9bf157212d3 100644
--- a/spec/support/shared_examples/models/member_shared_examples.rb
+++ b/spec/support/shared_examples/models/member_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples_for 'inherited access level as a member of entity' do
+RSpec.shared_examples 'inherited access level as a member of entity' do
let(:parent_entity) { create(:group) }
let(:user) { create(:user) }
let(:member) { entity.is_a?(Group) ? entity.group_member(user) : entity.project_member(user) }
@@ -57,7 +57,7 @@ shared_examples_for 'inherited access level as a member of entity' do
end
end
-shared_examples_for '#valid_level_roles' do |entity_name|
+RSpec.shared_examples '#valid_level_roles' do |entity_name|
let(:member_user) { create(:user) }
let(:group) { create(:group) }
let(:entity) { create(entity_name) }
diff --git a/spec/support/shared_examples/mentionable_shared_examples.rb b/spec/support/shared_examples/models/mentionable_shared_examples.rb
index 6efc471ce75..b6b131338cc 100644
--- a/spec/support/shared_examples/mentionable_shared_examples.rb
+++ b/spec/support/shared_examples/models/mentionable_shared_examples.rb
@@ -6,7 +6,7 @@
# - let(:backref_text) { "the way that +subject+ should refer to itself in backreferences " }
# - let(:set_mentionable_text) { lambda { |txt| "block that assigns txt to the subject's mentionable_text" } }
-shared_context 'mentionable context' do
+RSpec.shared_context 'mentionable context' do
let(:project) { subject.project }
let(:author) { subject.author }
@@ -59,7 +59,7 @@ shared_context 'mentionable context' do
end
end
-shared_examples 'a mentionable' do
+RSpec.shared_examples 'a mentionable' do
include_context 'mentionable context'
it 'generates a descriptive back-reference' do
@@ -86,7 +86,7 @@ shared_examples 'a mentionable' do
end
it 'sends in cached markdown fields when appropriate' do
- if subject.is_a?(CacheMarkdownField)
+ if subject.is_a?(CacheMarkdownField) && subject.extractors[author].blank?
expect_next_instance_of(Gitlab::ReferenceExtractor) do |ext|
attrs = subject.class.mentionable_attrs.collect(&:first) & subject.cached_markdown_fields.markdown_fields
attrs.each do |field|
@@ -115,7 +115,7 @@ shared_examples 'a mentionable' do
end
end
-shared_examples 'an editable mentionable' do
+RSpec.shared_examples 'an editable mentionable' do
include_context 'mentionable context'
it_behaves_like 'a mentionable'
@@ -136,7 +136,7 @@ shared_examples 'an editable mentionable' do
set_mentionable_text.call('This is a text')
- if subject.is_a?(CacheMarkdownField)
+ if subject.is_a?(CacheMarkdownField) && subject.extractors[author].blank?
expect_next_instance_of(Gitlab::ReferenceExtractor) do |ext|
subject.cached_markdown_fields.markdown_fields.each do |field|
expect(ext).to receive(:analyze).with(subject.send(field), hash_including(rendered: anything))
@@ -196,7 +196,7 @@ shared_examples 'an editable mentionable' do
end
end
-shared_examples_for 'mentions in description' do |mentionable_type|
+RSpec.shared_examples 'mentions in description' do |mentionable_type|
describe 'when store_mentioned_users_to_db feature disabled' do
before do
stub_feature_flags(store_mentioned_users_to_db: false)
@@ -229,16 +229,17 @@ shared_examples_for 'mentions in description' do |mentionable_type|
context 'when mentionable description contains mentions' do
let(:user) { create(:user) }
+ let(:user2) { create(:user) }
let(:group) { create(:group) }
- let(:mentionable_desc) { "#{user.to_reference} some description #{group.to_reference(full: true)} and @all" }
+ let(:mentionable_desc) { "#{user.to_reference} #{user2.to_reference} #{user.to_reference} some description #{group.to_reference(full: true)} and #{user2.to_reference} @all" }
let(:mentionable) { create(mentionable_type, description: mentionable_desc) }
it 'stores mentions' do
add_member(user)
expect(mentionable.user_mentions.count).to eq 1
- expect(mentionable.referenced_users).to match_array([user])
+ expect(mentionable.referenced_users).to match_array([user, user2])
expect(mentionable.referenced_projects(user)).to match_array([mentionable.project].compact) # epic.project is nil, and we want empty []
expect(mentionable.referenced_groups(user)).to match_array([group])
end
@@ -246,11 +247,12 @@ shared_examples_for 'mentions in description' do |mentionable_type|
end
end
-shared_examples_for 'mentions in notes' do |mentionable_type|
+RSpec.shared_examples 'mentions in notes' do |mentionable_type|
context 'when mentionable notes contain mentions' do
let(:user) { create(:user) }
+ let(:user2) { create(:user) }
let(:group) { create(:group) }
- let(:note_desc) { "#{user.to_reference} and #{group.to_reference(full: true)} and @all" }
+ let(:note_desc) { "#{user.to_reference} #{user2.to_reference} #{user.to_reference} and #{group.to_reference(full: true)} and #{user2.to_reference} @all" }
let!(:mentionable) { note.noteable }
before do
@@ -261,14 +263,14 @@ shared_examples_for 'mentions in notes' do |mentionable_type|
it 'returns all mentionable mentions' do
expect(mentionable.user_mentions.count).to eq 1
- expect(mentionable.referenced_users).to eq [user]
+ expect(mentionable.referenced_users).to match_array([user, user2])
expect(mentionable.referenced_projects(user)).to eq [mentionable.project].compact # epic.project is nil, and we want empty []
expect(mentionable.referenced_groups(user)).to eq [group]
end
end
end
-shared_examples_for 'load mentions from DB' do |mentionable_type|
+RSpec.shared_examples 'load mentions from DB' do |mentionable_type|
context 'load stored mentions' do
let_it_be(:user) { create(:user) }
let_it_be(:mentioned_user) { create(:user) }
diff --git a/spec/support/shared_examples/models/project_hook_data_shared_examples.rb b/spec/support/shared_examples/models/project_hook_data_shared_examples.rb
index 03d10c10e3c..73a6fd7aeb8 100644
--- a/spec/support/shared_examples/models/project_hook_data_shared_examples.rb
+++ b/spec/support/shared_examples/models/project_hook_data_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'project hook data with deprecateds' do |project_key: :project|
+RSpec.shared_examples 'project hook data with deprecateds' do |project_key: :project|
it 'contains project data' do
expect(data[project_key][:name]).to eq(project.name)
expect(data[project_key][:description]).to eq(project.description)
@@ -19,7 +19,7 @@ shared_examples 'project hook data with deprecateds' do |project_key: :project|
end
end
-shared_examples 'project hook data' do |project_key: :project|
+RSpec.shared_examples 'project hook data' do |project_key: :project|
it 'contains project data' do
expect(data[project_key][:name]).to eq(project.name)
expect(data[project_key][:description]).to eq(project.description)
@@ -34,7 +34,7 @@ shared_examples 'project hook data' do |project_key: :project|
end
end
-shared_examples 'deprecated repository hook data' do
+RSpec.shared_examples 'deprecated repository hook data' do
it 'contains deprecated repository data' do
expect(data[:repository][:name]).to eq(project.name)
expect(data[:repository][:description]).to eq(project.description)
diff --git a/spec/support/shared_examples/project_latest_successful_build_for_examples.rb b/spec/support/shared_examples/models/project_latest_successful_build_for_shared_examples.rb
index a9bd23e9fc9..7bbc0c5a364 100644
--- a/spec/support/shared_examples/project_latest_successful_build_for_examples.rb
+++ b/spec/support/shared_examples/models/project_latest_successful_build_for_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'latest successful build for sha or ref' do
+RSpec.shared_examples 'latest successful build for sha or ref' do
context 'with many builds' do
let(:other_pipeline) { create_pipeline(project) }
let(:other_build) { create_build(other_pipeline, 'test') }
diff --git a/spec/support/shared_examples/relative_positioning_shared_examples.rb b/spec/support/shared_examples/models/relative_positioning_shared_examples.rb
index 99e62ebf422..99e62ebf422 100644
--- a/spec/support/shared_examples/relative_positioning_shared_examples.rb
+++ b/spec/support/shared_examples/models/relative_positioning_shared_examples.rb
diff --git a/spec/support/shared_examples/models/services_fields_shared_examples.rb b/spec/support/shared_examples/models/services_fields_shared_examples.rb
index 6fbd0da9383..cb36f74460d 100644
--- a/spec/support/shared_examples/models/services_fields_shared_examples.rb
+++ b/spec/support/shared_examples/models/services_fields_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'issue tracker fields' do
+RSpec.shared_examples 'issue tracker fields' do
let(:title) { 'custom title' }
let(:description) { 'custom description' }
let(:url) { 'http://issue_tracker.example.com' }
diff --git a/spec/support/shared_examples/slack_mattermost_notifications_shared_examples.rb b/spec/support/shared_examples/models/slack_mattermost_notifications_shared_examples.rb
index 2b68e7bfa82..2b68e7bfa82 100644
--- a/spec/support/shared_examples/slack_mattermost_notifications_shared_examples.rb
+++ b/spec/support/shared_examples/models/slack_mattermost_notifications_shared_examples.rb
diff --git a/spec/support/shared_examples/taskable_shared_examples.rb b/spec/support/shared_examples/models/taskable_shared_examples.rb
index f04f509f3d2..34b1d735bcd 100644
--- a/spec/support/shared_examples/taskable_shared_examples.rb
+++ b/spec/support/shared_examples/models/taskable_shared_examples.rb
@@ -4,7 +4,7 @@
#
# Requires a context containing:
# subject { Issue or MergeRequest }
-shared_examples 'a Taskable' do
+RSpec.shared_examples 'a Taskable' do
describe 'with multiple tasks' do
before do
subject.description = <<-EOT.strip_heredoc
diff --git a/spec/support/shared_examples/throttled_touch.rb b/spec/support/shared_examples/models/throttled_touch_shared_examples.rb
index aaaa590862d..fc4f6053bb9 100644
--- a/spec/support/shared_examples/throttled_touch.rb
+++ b/spec/support/shared_examples/models/throttled_touch_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples_for 'throttled touch' do
+RSpec.shared_examples 'throttled touch' do
describe '#touch' do
it 'updates the updated_at timestamp' do
Timecop.freeze do
diff --git a/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb b/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
index e03435cafe8..7d70df82ec7 100644
--- a/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
+++ b/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
@@ -1,8 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
-
-shared_examples_for 'UpdateProjectStatistics' do
+RSpec.shared_examples 'UpdateProjectStatistics' do
let(:project) { subject.project }
let(:project_statistics_name) { described_class.project_statistics_name }
let(:statistic_attribute) { described_class.statistic_attribute }
diff --git a/spec/support/shared_examples/models/user_mentions_shared_examples.rb b/spec/support/shared_examples/models/user_mentions_shared_examples.rb
index b94994ea712..66c629cb4b8 100644
--- a/spec/support/shared_examples/models/user_mentions_shared_examples.rb
+++ b/spec/support/shared_examples/models/user_mentions_shared_examples.rb
@@ -1,8 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
-
-shared_examples_for 'has user mentions' do
+RSpec.shared_examples 'has user mentions' do
describe '#has_mentions?' do
context 'when no mentions' do
it 'returns false' do
diff --git a/spec/support/shared_examples/versioned_description_shared_examples.rb b/spec/support/shared_examples/models/versioned_description_shared_examples.rb
index 59124af19ec..59124af19ec 100644
--- a/spec/support/shared_examples/versioned_description_shared_examples.rb
+++ b/spec/support/shared_examples/models/versioned_description_shared_examples.rb
diff --git a/spec/support/shared_examples/models/with_uploads_shared_examples.rb b/spec/support/shared_examples/models/with_uploads_shared_examples.rb
index 3d622ba8195..f2a4d9919b7 100644
--- a/spec/support/shared_examples/models/with_uploads_shared_examples.rb
+++ b/spec/support/shared_examples/models/with_uploads_shared_examples.rb
@@ -1,8 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
-
-shared_examples_for 'model with uploads' do |supports_fileuploads|
+RSpec.shared_examples 'model with uploads' do |supports_fileuploads|
describe '.destroy' do
before do
stub_uploads_object_storage(uploader_class)
diff --git a/spec/support/shared_examples/nav_sidebar_shared_examples.rb b/spec/support/shared_examples/nav_sidebar_shared_examples.rb
new file mode 100644
index 00000000000..e084a957785
--- /dev/null
+++ b/spec/support/shared_examples/nav_sidebar_shared_examples.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'has nav sidebar' do
+ it 'has collapsed nav sidebar on mobile' do
+ render
+
+ expect(rendered).to have_selector('.nav-sidebar')
+ expect(rendered).not_to have_selector('.sidebar-collapsed-desktop')
+ expect(rendered).not_to have_selector('.sidebar-expanded-mobile')
+ end
+end
+
+RSpec.shared_examples 'page has active tab' do |title|
+ it "activates #{title} tab" do
+ expect(page).to have_selector('.sidebar-top-level-items > li.active', count: 1)
+ expect(find('.sidebar-top-level-items > li.active')).to have_content(title)
+ end
+end
+
+RSpec.shared_examples 'page has active sub tab' do |title|
+ it "activates #{title} sub tab" do
+ expect(page).to have_selector('.sidebar-sub-level-items > li.active:not(.fly-out-top-item)', count: 1)
+ expect(find('.sidebar-sub-level-items > li.active:not(.fly-out-top-item)'))
+ .to have_content(title)
+ end
+end
diff --git a/spec/support/shared_examples/policies/clusterable_shared_examples.rb b/spec/support/shared_examples/policies/clusterable_shared_examples.rb
index 0b427c23256..b96aa71acbe 100644
--- a/spec/support/shared_examples/policies/clusterable_shared_examples.rb
+++ b/spec/support/shared_examples/policies/clusterable_shared_examples.rb
@@ -1,8 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
-
-shared_examples 'clusterable policies' do
+RSpec.shared_examples 'clusterable policies' do
describe '#add_cluster?' do
let(:current_user) { create(:user) }
diff --git a/spec/support/shared_examples/policies/within_timeframe_shared_examples.rb b/spec/support/shared_examples/policies/within_timeframe_shared_examples.rb
new file mode 100644
index 00000000000..918db6886d3
--- /dev/null
+++ b/spec/support/shared_examples/policies/within_timeframe_shared_examples.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'within_timeframe scope' do
+ describe '.within_timeframe' do
+ it 'returns resources with start_date and/or end_date between timeframe' do
+ resources = described_class.within_timeframe(now + 2.days, now + 3.days)
+
+ expect(resources).to match_array([resource_2, resource_4])
+ end
+
+ it 'returns resources which starts before the timeframe' do
+ resources = described_class.within_timeframe(now, now + 1.day)
+
+ expect(resources).to match_array([resource_1, resource_3, resource_4])
+ end
+
+ it 'returns resources which ends after the timeframe' do
+ resources = described_class.within_timeframe(now + 3.days, now + 5.days)
+
+ expect(resources).to match_array([resource_2, resource_4])
+ end
+ end
+end
diff --git a/spec/support/shared_examples/quick_actions/commit/tag_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/commit/tag_quick_action_shared_examples.rb
index f5a86e4dc2c..14b384b149d 100644
--- a/spec/support/shared_examples/quick_actions/commit/tag_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/commit/tag_quick_action_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'tag quick action' do
+RSpec.shared_examples 'tag quick action' do
context "post note to existing commit" do
it 'tags this commit' do
add_note("/tag #{tag_name} #{tag_message}")
diff --git a/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb
index 6e7eb78261a..e2582f20ece 100644
--- a/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'close quick action' do |issuable_type|
+RSpec.shared_examples 'close quick action' do |issuable_type|
include Spec::Support::Helpers::Features::NotesHelpers
before do
diff --git a/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb b/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb
index 439c068471b..4db52795cd4 100644
--- a/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'issuable quick actions' do
+RSpec.shared_examples 'issuable quick actions' do
QuickAction = Struct.new(:action_text, :expectation, :before_action, keyword_init: true) do
# Pass a block as :before_action if
# issuable state needs to be changed before
diff --git a/spec/support/shared_examples/quick_actions/issuable/time_tracking_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/issuable/time_tracking_quick_action_shared_examples.rb
index ed904c8d539..37a504cd56a 100644
--- a/spec/support/shared_examples/quick_actions/issuable/time_tracking_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issuable/time_tracking_quick_action_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'issuable time tracker' do |issuable_type|
+RSpec.shared_examples 'issuable time tracker' do |issuable_type|
before do
project.add_maintainer(maintainer)
gitlab_sign_in(maintainer)
diff --git a/spec/support/shared_examples/quick_actions/issue/board_move_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/issue/board_move_quick_action_shared_examples.rb
index 6edd20bb024..321e7d386c6 100644
--- a/spec/support/shared_examples/quick_actions/issue/board_move_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issue/board_move_quick_action_shared_examples.rb
@@ -1,4 +1,4 @@
# frozen_string_literal: true
-shared_examples 'board_move quick action' do
+RSpec.shared_examples 'board_move quick action' do
end
diff --git a/spec/support/shared_examples/quick_actions/issue/create_merge_request_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/issue/create_merge_request_quick_action_shared_examples.rb
index 3e9ee9a633f..159660e7d1d 100644
--- a/spec/support/shared_examples/quick_actions/issue/create_merge_request_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issue/create_merge_request_quick_action_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'create_merge_request quick action' do
+RSpec.shared_examples 'create_merge_request quick action' do
context 'create a merge request starting from an issue' do
def expect_mr_quickaction(success, branch_name = nil)
command_message = if branch_name
diff --git a/spec/support/shared_examples/quick_actions/issue/move_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/issue/move_quick_action_shared_examples.rb
index bebc8509d53..897a962fc56 100644
--- a/spec/support/shared_examples/quick_actions/issue/move_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issue/move_quick_action_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'move quick action' do
+RSpec.shared_examples 'move quick action' do
context 'move the issue to another project' do
let(:target_project) { create(:project, :public) }
diff --git a/spec/support/shared_examples/quick_actions/issue/zoom_quick_actions_shared_examples.rb b/spec/support/shared_examples/quick_actions/issue/zoom_quick_actions_shared_examples.rb
index 92bbc4abe77..1ea249d5f9d 100644
--- a/spec/support/shared_examples/quick_actions/issue/zoom_quick_actions_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issue/zoom_quick_actions_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'zoom quick actions' do
+RSpec.shared_examples 'zoom quick actions' do
let(:zoom_link) { 'https://zoom.us/j/123456789' }
let(:existing_zoom_link) { 'https://zoom.us/j/123456780' }
let(:invalid_zoom_link) { 'https://invalid-zoom' }
diff --git a/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb
index a77d729aa2c..fa163b54405 100644
--- a/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'merge quick action' do
+RSpec.shared_examples 'merge quick action' do
context 'when the current user can merge the MR' do
before do
sign_in(user)
diff --git a/spec/support/shared_examples/repo_type_shared_examples.rb b/spec/support/shared_examples/repo_type_shared_examples.rb
deleted file mode 100644
index dc9e3a73346..00000000000
--- a/spec/support/shared_examples/repo_type_shared_examples.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-shared_examples 'a repo type' do
- describe "#identifier_for_subject" do
- subject { described_class.identifier_for_subject(project) }
-
- it { is_expected.to eq(expected_identifier) }
- end
-
- describe "#fetch_id" do
- it "finds an id match in the identifier" do
- expect(described_class.fetch_id(expected_identifier)).to eq(expected_id)
- end
-
- it 'does not break on other identifiers' do
- expect(described_class.fetch_id("wiki-noid")).to eq(nil)
- end
- end
-
- describe "#path_suffix" do
- subject { described_class.path_suffix }
-
- it { is_expected.to eq(expected_suffix) }
- end
-
- describe "#repository_for" do
- it "finds the repository for the repo type" do
- expect(described_class.repository_for(project)).to eq(expected_repository)
- end
- end
-end
diff --git a/spec/support/shared_examples/award_emoji_todo_shared_examples.rb b/spec/support/shared_examples/requests/api/award_emoji_todo_shared_examples.rb
index 88ad37d232f..88ad37d232f 100644
--- a/spec/support/shared_examples/award_emoji_todo_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/award_emoji_todo_shared_examples.rb
diff --git a/spec/support/api/boards_shared_examples.rb b/spec/support/shared_examples/requests/api/boards_shared_examples.rb
index d41490f33e4..2bc79a2ef4d 100644
--- a/spec/support/api/boards_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/boards_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples_for 'group and project boards' do |route_definition, ee = false|
+RSpec.shared_examples 'group and project boards' do |route_definition, ee = false|
let(:root_url) { route_definition.gsub(":id", board_parent.id.to_s) }
before do
@@ -31,7 +31,7 @@ shared_examples_for 'group and project boards' do |route_definition, ee = false|
it "returns authentication error" do
get api(root_url)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -39,7 +39,7 @@ shared_examples_for 'group and project boards' do |route_definition, ee = false|
it "returns the issue boards" do
get api(root_url, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect_schema_match_for(response, 'public_api/v4/boards', ee)
@@ -63,7 +63,7 @@ shared_examples_for 'group and project boards' do |route_definition, ee = false|
it 'returns issue board lists' do
get api(url, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(2)
@@ -73,7 +73,7 @@ shared_examples_for 'group and project boards' do |route_definition, ee = false|
it 'returns 404 if board not found' do
get api("#{root_url}/22343/lists", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -83,7 +83,7 @@ shared_examples_for 'group and project boards' do |route_definition, ee = false|
it 'returns a list' do
get api("#{url}/#{dev_list.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(dev_list.id)
expect(json_response['label']['name']).to eq(dev_label.title)
expect(json_response['position']).to eq(1)
@@ -92,7 +92,7 @@ shared_examples_for 'group and project boards' do |route_definition, ee = false|
it 'returns 404 if list not found' do
get api("#{url}/5324", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -102,7 +102,7 @@ shared_examples_for 'group and project boards' do |route_definition, ee = false|
it 'creates a new issue board list for labels' do
post api(url, user), params: { label_id: ux_label.id }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['label']['name']).to eq(ux_label.title)
expect(json_response['position']).to eq(3)
end
@@ -110,13 +110,13 @@ shared_examples_for 'group and project boards' do |route_definition, ee = false|
it 'returns 400 when creating a new list if label_id is invalid' do
post api(url, user), params: { label_id: 23423 }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 403 for members with guest role' do
put api("#{url}/#{test_list.id}", guest), params: { position: 1 }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -126,20 +126,20 @@ shared_examples_for 'group and project boards' do |route_definition, ee = false|
it "updates a list" do
put api("#{url}/#{test_list.id}", user), params: { position: 1 }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['position']).to eq(1)
end
it "returns 404 error if list id not found" do
put api("#{url}/44444", user), params: { position: 1 }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns 403 for members with guest role" do
put api("#{url}/#{test_list.id}", guest), params: { position: 1 }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -149,19 +149,19 @@ shared_examples_for 'group and project boards' do |route_definition, ee = false|
it "rejects a non member from deleting a list" do
delete api("#{url}/#{dev_list.id}", non_member)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it "rejects a user with guest role from deleting a list" do
delete api("#{url}/#{dev_list.id}", guest)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it "returns 404 error if list id not found" do
delete api("#{url}/44444", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
context "when the user is parent owner" do
@@ -178,7 +178,7 @@ shared_examples_for 'group and project boards' do |route_definition, ee = false|
it "deletes the list if an admin requests it" do
delete api("#{url}/#{dev_list.id}", owner)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
it_behaves_like '412 response' do
diff --git a/spec/support/shared_examples/container_repositories_shared_examples.rb b/spec/support/shared_examples/requests/api/container_repositories_shared_examples.rb
index b4f45ba9a00..0f277c11913 100644
--- a/spec/support/shared_examples/container_repositories_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/container_repositories_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'rejected container repository access' do |user_type, status|
+RSpec.shared_examples 'rejected container repository access' do |user_type, status|
context "for #{user_type}" do
let(:api_user) { users[user_type] }
@@ -12,7 +12,7 @@ shared_examples 'rejected container repository access' do |user_type, status|
end
end
-shared_examples 'returns repositories for allowed users' do |user_type, scope|
+RSpec.shared_examples 'returns repositories for allowed users' do |user_type, scope|
context "for #{user_type}" do
it 'returns a list of repositories' do
subject
@@ -57,7 +57,7 @@ shared_examples 'returns repositories for allowed users' do |user_type, scope|
end
end
-shared_examples 'a gitlab tracking event' do |category, action|
+RSpec.shared_examples 'a gitlab tracking event' do |category, action|
it "creates a gitlab tracking event #{action}" do
expect(Gitlab::Tracking).to receive(:event).with(category, action, {})
diff --git a/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb b/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb
index 776a0bdd29e..8cbf11b6de1 100644
--- a/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'custom attributes endpoints' do |attributable_name|
+RSpec.shared_examples 'custom attributes endpoints' do |attributable_name|
let!(:custom_attribute1) { attributable.custom_attributes.create key: 'foo', value: 'foo' }
let!(:custom_attribute2) { attributable.custom_attributes.create key: 'bar', value: 'bar' }
@@ -13,7 +13,7 @@ shared_examples 'custom attributes endpoints' do |attributable_name|
it 'does not filter by custom attributes' do
get api("/#{attributable_name}", user), params: { custom_attributes: { foo: 'foo', bar: 'bar' } }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to be 2
expect(json_response.map { |r| r['id'] }).to contain_exactly attributable.id, other_attributable.id
end
@@ -23,7 +23,7 @@ shared_examples 'custom attributes endpoints' do |attributable_name|
it 'filters by custom attributes' do
get api("/#{attributable_name}", admin), params: { custom_attributes: { foo: 'foo', bar: 'bar' } }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to be 1
expect(json_response.first['id']).to eq attributable.id
end
@@ -39,7 +39,7 @@ shared_examples 'custom attributes endpoints' do |attributable_name|
it 'does not include custom attributes' do
get api("/#{attributable_name}", user), params: { with_custom_attributes: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to be 2
expect(json_response.first).not_to include 'custom_attributes'
end
@@ -49,7 +49,7 @@ shared_examples 'custom attributes endpoints' do |attributable_name|
it 'does not include custom attributes by default' do
get api("/#{attributable_name}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to be 2
expect(json_response.first).not_to include 'custom_attributes'
expect(json_response.second).not_to include 'custom_attributes'
@@ -58,7 +58,7 @@ shared_examples 'custom attributes endpoints' do |attributable_name|
it 'includes custom attributes if requested' do
get api("/#{attributable_name}", admin), params: { with_custom_attributes: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to be 2
attributable_response = json_response.find { |r| r['id'] == attributable.id }
@@ -79,7 +79,7 @@ shared_examples 'custom attributes endpoints' do |attributable_name|
it 'does not include custom attributes' do
get api("/#{attributable_name}/#{attributable.id}", user), params: { with_custom_attributes: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).not_to include 'custom_attributes'
end
end
@@ -88,14 +88,14 @@ shared_examples 'custom attributes endpoints' do |attributable_name|
it 'does not include custom attributes by default' do
get api("/#{attributable_name}/#{attributable.id}", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).not_to include 'custom_attributes'
end
it 'includes custom attributes if requested' do
get api("/#{attributable_name}/#{attributable.id}", admin), params: { with_custom_attributes: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['custom_attributes']).to contain_exactly(
{ 'key' => 'foo', 'value' => 'foo' },
{ 'key' => 'bar', 'value' => 'bar' }
@@ -115,7 +115,7 @@ shared_examples 'custom attributes endpoints' do |attributable_name|
it 'returns all custom attributes' do
get api("/#{attributable_name}/#{attributable.id}/custom_attributes", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to contain_exactly(
{ 'key' => 'foo', 'value' => 'foo' },
{ 'key' => 'bar', 'value' => 'bar' }
@@ -135,7 +135,7 @@ shared_examples 'custom attributes endpoints' do |attributable_name|
it'returns a single custom attribute' do
get api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq({ 'key' => 'foo', 'value' => 'foo' })
end
end
@@ -154,7 +154,7 @@ shared_examples 'custom attributes endpoints' do |attributable_name|
put api("/#{attributable_name}/#{attributable.id}/custom_attributes/new", admin), params: { value: 'new' }
end.to change { attributable.custom_attributes.count }.by(1)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq({ 'key' => 'new', 'value' => 'new' })
expect(attributable.custom_attributes.find_by(key: 'new').value).to eq 'new'
end
@@ -164,7 +164,7 @@ shared_examples 'custom attributes endpoints' do |attributable_name|
put api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin), params: { value: 'new' }
end.not_to change { attributable.custom_attributes.count }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq({ 'key' => 'foo', 'value' => 'new' })
expect(custom_attribute1.reload.value).to eq 'new'
end
@@ -184,7 +184,7 @@ shared_examples 'custom attributes endpoints' do |attributable_name|
delete api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin)
end.to change { attributable.custom_attributes.count }.by(-1)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
expect(attributable.custom_attributes.find_by(key: 'foo')).to be_nil
end
end
diff --git a/spec/support/shared_examples/requests/api/diff_discussions.rb b/spec/support/shared_examples/requests/api/diff_discussions_shared_examples.rb
index a7774d17d3c..583475678f1 100644
--- a/spec/support/shared_examples/requests/api/diff_discussions.rb
+++ b/spec/support/shared_examples/requests/api/diff_discussions_shared_examples.rb
@@ -1,13 +1,13 @@
# frozen_string_literal: true
-shared_examples 'diff discussions API' do |parent_type, noteable_type, id_name|
+RSpec.shared_examples 'diff discussions API' do |parent_type, noteable_type, id_name|
describe "GET /#{parent_type}/:id/#{noteable_type}/:noteable_id/discussions" do
it "includes diff discussions" do
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user)
discussion = json_response.find { |record| record['id'] == diff_note.discussion_id }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(discussion).not_to be_nil
expect(discussion['individual_note']).to eq(false)
expect(discussion['notes'].first['body']).to eq(diff_note.note)
@@ -18,7 +18,7 @@ shared_examples 'diff discussions API' do |parent_type, noteable_type, id_name|
it "returns a discussion by id" do
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions/#{diff_note.discussion_id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(diff_note.discussion_id)
expect(json_response['notes'].first['body']).to eq(diff_note.note)
expect(json_response['notes'].first['position']).to eq(diff_note.position.to_h.stringify_keys)
@@ -32,7 +32,7 @@ shared_examples 'diff discussions API' do |parent_type, noteable_type, id_name|
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user),
params: { body: 'hi!', position: position }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['notes'].first['body']).to eq('hi!')
expect(json_response['notes'].first['type']).to eq('DiffNote')
expect(json_response['notes'].first['position']).to eq(position.stringify_keys)
@@ -45,7 +45,7 @@ shared_examples 'diff discussions API' do |parent_type, noteable_type, id_name|
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user),
params: { body: 'hi!', position: position }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns a 400 bad request error when the position is not valid for this discussion" do
@@ -54,7 +54,7 @@ shared_examples 'diff discussions API' do |parent_type, noteable_type, id_name|
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user),
params: { body: 'hi!', position: position }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
@@ -64,7 +64,7 @@ shared_examples 'diff discussions API' do |parent_type, noteable_type, id_name|
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"discussions/#{diff_note.discussion_id}/notes", user), params: { body: 'hi!' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['body']).to eq('hi!')
expect(json_response['type']).to eq('DiffNote')
end
diff --git a/spec/support/shared_examples/requests/api/discussions.rb b/spec/support/shared_examples/requests/api/discussions_shared_examples.rb
index 2a5a48f3054..939ea405724 100644
--- a/spec/support/shared_examples/requests/api/discussions.rb
+++ b/spec/support/shared_examples/requests/api/discussions_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'with cross-reference system notes' do
+RSpec.shared_examples 'with cross-reference system notes' do
let(:merge_request) { create(:merge_request) }
let(:project) { merge_request.project }
let(:new_merge_request) { create(:merge_request) }
@@ -23,7 +23,7 @@ shared_examples 'with cross-reference system notes' do
it 'returns only the note that the user should see' do
get api(url, user, personal_access_token: pat)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.count).to eq(1)
expect(notes_in_response.count).to eq(1)
@@ -40,7 +40,7 @@ shared_examples 'with cross-reference system notes' do
get api(url, user, personal_access_token: pat)
end
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
RequestStore.clear!
@@ -50,16 +50,16 @@ shared_examples 'with cross-reference system notes' do
RequestStore.clear!
expect { get api(url, user, personal_access_token: pat) }.not_to exceed_query_limit(control)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
-shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_reply_to_individual_notes: false|
+RSpec.shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_reply_to_individual_notes: false|
describe "GET /#{parent_type}/:id/#{noteable_type}/:noteable_id/discussions" do
it "returns an array of discussions" do
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['id']).to eq(note.discussion_id)
@@ -68,7 +68,7 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_r
it "returns a 404 error when noteable id not found" do
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/12345/discussions", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns 404 when not authorized" do
@@ -76,7 +76,7 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_r
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", private_user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -84,7 +84,7 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_r
it "returns a discussion by id" do
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions/#{note.discussion_id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(note.discussion_id)
expect(json_response['notes'].first['body']).to eq(note.note)
end
@@ -92,7 +92,7 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_r
it "returns a 404 error if discussion not found" do
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions/12345", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -100,7 +100,7 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_r
it "creates a new note" do
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user), params: { body: 'hi!' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['notes'].first['body']).to eq('hi!')
expect(json_response['notes'].first['author']['username']).to eq(user.username)
end
@@ -108,13 +108,13 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_r
it "returns a 400 bad request error if body not given" do
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns a 401 unauthorized error if user not authenticated" do
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions"), params: { body: 'hi!' }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'tracks a Notes::CreateService event' do
@@ -146,7 +146,7 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_r
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user),
params: { body: 'hi!', created_at: creation_time }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['notes'].first['body']).to eq('hi!')
expect(json_response['notes'].first['author']['username']).to eq(user.username)
expect(Time.parse(json_response['notes'].first['created_at'])).to be_like_time(creation_time)
@@ -162,7 +162,7 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_r
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", private_user),
params: { body: 'Foo' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -181,7 +181,7 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_r
end
it 'raises 404 error' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -191,7 +191,7 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_r
end
it 'raises 404 error' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -203,7 +203,7 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_r
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"discussions/#{note.discussion_id}/notes", user), params: { body: 'Hello!' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['body']).to eq('Hello!')
expect(json_response['type']).to eq('DiscussionNote')
end
@@ -212,7 +212,7 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_r
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"discussions/#{note.discussion_id}/notes", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
context 'when the discussion is an individual note' do
@@ -225,13 +225,13 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_r
if can_reply_to_individual_notes
it 'creates a new discussion' do
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['body']).to eq('hi!')
expect(json_response['type']).to eq('DiscussionNote')
end
else
it 'returns 400 bad request' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
@@ -242,7 +242,7 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_r
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"discussions/#{note.discussion_id}/notes/#{note.id}", user), params: { body: 'Hello!' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['body']).to eq('Hello!')
end
@@ -251,14 +251,14 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_r
"discussions/#{note.discussion_id}/notes/12345", user),
params: { body: 'Hello!' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 400 bad request error if body not given' do
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"discussions/#{note.discussion_id}/notes/#{note.id}", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -267,18 +267,18 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_r
delete api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"discussions/#{note.discussion_id}/notes/#{note.id}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
# Check if note is really deleted
delete api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"discussions/#{note.discussion_id}/notes/#{note.id}", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 404 error when note id not found' do
delete api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"discussions/#{note.discussion_id}/notes/12345", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it_behaves_like '412 response' do
diff --git a/spec/support/shared_examples/requests/api/issuable_participants_examples.rb b/spec/support/shared_examples/requests/api/issuable_participants_examples.rb
index 9fe6288d53f..e442b988349 100644
--- a/spec/support/shared_examples/requests/api/issuable_participants_examples.rb
+++ b/spec/support/shared_examples/requests/api/issuable_participants_examples.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
-shared_examples 'issuable participants endpoint' do
+RSpec.shared_examples 'issuable participants endpoint' do
let(:area) { entity.class.name.underscore.pluralize }
it 'returns participants' do
get api("/projects/#{project.id}/#{area}/#{entity.iid}/participants", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(entity.participants.size)
@@ -20,12 +20,12 @@ shared_examples 'issuable participants endpoint' do
it 'returns a 404 when iid does not exist' do
get api("/projects/#{project.id}/#{area}/999/participants", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 404 when id is used instead of iid' do
get api("/projects/#{project.id}/#{area}/#{entity.id}/participants", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
diff --git a/spec/support/shared_examples/requests/api/issues/merge_requests_count_shared_examples.rb b/spec/support/shared_examples/requests/api/issues/merge_requests_count_shared_examples.rb
index 90c1ed8d09b..971b21b5b32 100644
--- a/spec/support/shared_examples/requests/api/issues/merge_requests_count_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/issues/merge_requests_count_shared_examples.rb
@@ -4,7 +4,7 @@ def get_issue
json_response.is_a?(Array) ? json_response.detect {|issue| issue['id'] == target_issue.id} : json_response
end
-shared_examples 'accessible merge requests count' do
+RSpec.shared_examples 'accessible merge requests count' do
it 'returns anonymous accessible merge requests count' do
get api(api_url), params: { scope: 'all' }
diff --git a/spec/support/api/issues_resolving_discussions_shared_examples.rb b/spec/support/shared_examples/requests/api/issues_resolving_discussions_shared_examples.rb
index 4c44f1bd103..b748d5f5eea 100644
--- a/spec/support/api/issues_resolving_discussions_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/issues_resolving_discussions_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'creating an issue resolving discussions through the API' do
+RSpec.shared_examples 'creating an issue resolving discussions through the API' do
it 'creates a new project issue' do
expect(response).to have_gitlab_http_status(:created)
end
diff --git a/spec/support/shared_examples/requests/api/issues_shared_examples.rb b/spec/support/shared_examples/requests/api/issues_shared_examples.rb
index d22210edf99..991dbced02d 100644
--- a/spec/support/shared_examples/requests/api/issues_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/issues_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'labeled issues with labels and label_name params' do
+RSpec.shared_examples 'labeled issues with labels and label_name params' do
shared_examples 'returns label names' do
it 'returns label names' do
expect_paginated_array_response(issue.id)
diff --git a/spec/support/shared_examples/logging_application_context_shared_examples.rb b/spec/support/shared_examples/requests/api/logging_application_context_shared_examples.rb
index 038ede884c8..038ede884c8 100644
--- a/spec/support/shared_examples/logging_application_context_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/logging_application_context_shared_examples.rb
diff --git a/spec/support/api/members_shared_examples.rb b/spec/support/shared_examples/requests/api/members_shared_examples.rb
index 603efd4fc75..fce75c29971 100644
--- a/spec/support/api/members_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/members_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'a 404 response when source is private' do
+RSpec.shared_examples 'a 404 response when source is private' do
before do
source.update_column(:visibility_level, Gitlab::VisibilityLevel::PRIVATE)
end
@@ -8,6 +8,6 @@ shared_examples 'a 404 response when source is private' do
it 'returns 404' do
route
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
diff --git a/spec/support/api/milestones_shared_examples.rb b/spec/support/shared_examples/requests/api/milestones_shared_examples.rb
index ce8c2140e99..b7cc5f2ca6b 100644
--- a/spec/support/api/milestones_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/milestones_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples_for 'group and project milestones' do |route_definition|
+RSpec.shared_examples 'group and project milestones' do |route_definition|
let(:resource_route) { "#{route}/#{milestone.id}" }
let(:label_1) { create(:label, title: 'label_1', project: project, priority: 1) }
let(:label_2) { create(:label, title: 'label_2', project: project, priority: 2) }
@@ -12,7 +12,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
it 'returns milestones list' do
get api(route, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['title']).to eq(milestone.title)
@@ -21,13 +21,13 @@ shared_examples_for 'group and project milestones' do |route_definition|
it 'returns a 401 error if user not authenticated' do
get api(route)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns an array of active milestones' do
get api("#{route}/?state=active", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
@@ -37,7 +37,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
it 'returns an array of closed milestones' do
get api("#{route}/?state=closed", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
@@ -49,7 +49,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
get api(route, user), params: { iids: [closed_milestone.iid, other_milestone.iid] }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.length).to eq(2)
expect(json_response.map { |m| m['id'] }).to match_array([closed_milestone.id, other_milestone.id])
@@ -58,7 +58,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
it 'does not return any milestone if none found' do
get api(route, user), params: { iids: [Milestone.maximum(:iid).succ] }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.length).to eq(0)
end
@@ -77,7 +77,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
it 'returns a milestone by title' do
get api(route, user), params: { title: 'version2' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(1)
expect(json_response.first['title']).to eq milestone.title
expect(json_response.first['id']).to eq milestone.id
@@ -86,7 +86,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
it 'returns a milestone by searching for title' do
get api(route, user), params: { search: 'version2' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response.size).to eq(1)
expect(json_response.first['title']).to eq milestone.title
@@ -96,7 +96,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
it 'returns a milestones by searching for description' do
get api(route, user), params: { search: 'open' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response.size).to eq(1)
expect(json_response.first['title']).to eq milestone.title
@@ -108,7 +108,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
it 'returns a milestone by id' do
get api(resource_route, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq(milestone.title)
expect(json_response['iid']).to eq(milestone.iid)
end
@@ -116,13 +116,13 @@ shared_examples_for 'group and project milestones' do |route_definition|
it 'returns 401 error if user not authenticated' do
get api(resource_route)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns a 404 error if milestone id not found' do
get api("#{route}/1234", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -130,7 +130,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
it 'creates a new milestone' do
post api(route, user), params: { title: 'new milestone' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq('new milestone')
expect(json_response['description']).to be_nil
end
@@ -138,7 +138,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
it 'creates a new milestone with description and dates' do
post api(route, user), params: { title: 'new milestone', description: 'release', due_date: '2013-03-02', start_date: '2013-02-02' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['description']).to eq('release')
expect(json_response['due_date']).to eq('2013-03-02')
expect(json_response['start_date']).to eq('2013-02-02')
@@ -147,19 +147,19 @@ shared_examples_for 'group and project milestones' do |route_definition|
it 'returns a 400 error if title is missing' do
post api(route, user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns a 400 error if params are invalid (duplicate title)' do
post api(route, user), params: { title: milestone.title, description: 'release', due_date: '2013-03-02' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it 'creates a new milestone with reserved html characters' do
post api(route, user), params: { title: 'foo & bar 1.1 -> 2.2' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq('foo & bar 1.1 -> 2.2')
expect(json_response['description']).to be_nil
end
@@ -169,7 +169,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
it 'updates a milestone' do
put api(resource_route, user), params: { title: 'updated title' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq('updated title')
end
@@ -178,19 +178,19 @@ shared_examples_for 'group and project milestones' do |route_definition|
put api(resource_route, user), params: { due_date: nil }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['due_date']).to be_nil
end
it 'returns a 404 error if milestone id not found' do
put api("#{route}/1234", user), params: { title: 'updated title' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'closes milestone' do
put api(resource_route, user), params: { state_event: 'close' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['state']).to eq('closed')
end
@@ -198,7 +198,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
it 'updates milestone with only start date' do
put api(resource_route, user), params: { start_date: Date.tomorrow }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -209,14 +209,14 @@ shared_examples_for 'group and project milestones' do |route_definition|
delete api(resource_route, reporter)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it 'deletes the milestone when the user has developer access to the project' do
delete api(resource_route, user)
expect(project.milestones.find_by_id(milestone.id)).to be_nil
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
end
end
@@ -229,7 +229,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
it 'returns issues for a particular milestone' do
get api(issues_route, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['milestone']['title']).to eq(milestone.title)
@@ -250,14 +250,14 @@ shared_examples_for 'group and project milestones' do |route_definition|
it 'matches V4 response schema for a list of issues' do
get api(issues_route, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/issues')
end
it 'returns a 401 error if user not authenticated' do
get api(issues_route)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
describe 'confidential issues' do
@@ -287,7 +287,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
it 'returns confidential issues to team members' do
get api(issues_route, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
# 2 for projects, 3 for group(which has another project with an issue)
@@ -301,7 +301,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
get api(issues_route, member)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
@@ -311,7 +311,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
it 'does not return confidential issues to regular users' do
get api(issues_route, create(:user))
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
@@ -324,7 +324,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
get api(issues_route, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
# 2 for projects, 3 for group(which has another project with an issue)
@@ -347,7 +347,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
another_merge_request
get api(merge_requests_route, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
expect(json_response.first['title']).to eq(merge_request.title)
@@ -371,20 +371,20 @@ shared_examples_for 'group and project milestones' do |route_definition|
get api(not_found_route, user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 404 if the user has no access to the milestone' do
new_user = create :user
get api(merge_requests_route, new_user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 401 error if user not authenticated' do
get api(merge_requests_route)
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'returns merge_requests ordered by position asc' do
@@ -394,7 +394,7 @@ shared_examples_for 'group and project milestones' do |route_definition|
get api(merge_requests_route, user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(2)
diff --git a/spec/support/shared_examples/requests/api/notes.rb b/spec/support/shared_examples/requests/api/notes_shared_examples.rb
index 4ce78d885bc..0c52af43465 100644
--- a/spec/support/shared_examples/requests/api/notes.rb
+++ b/spec/support/shared_examples/requests/api/notes_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
+RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
describe "GET /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes" do
context 'sorting' do
before do
@@ -20,6 +20,14 @@ shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
expect(response_dates).to eq(response_dates.sort.reverse)
end
+ it 'fetches notes using parent path as id paremeter' do
+ parent_id = CGI.escape(parent.full_path)
+
+ get api("/#{parent_type}/#{parent_id}/#{noteable_type}/#{noteable[id_name]}/notes", user)
+
+ expect(response.status).to eq(200)
+ end
+
context '2 notes with equal created_at' do
before do
@first_note = Note.first
@@ -82,7 +90,7 @@ shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
it "returns an array of notes" do
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['body']).to eq(note.note)
@@ -91,7 +99,7 @@ shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
it "returns a 404 error when noteable id not found" do
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/12345/notes", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns 404 when not authorized" do
@@ -99,7 +107,7 @@ shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", private_user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -107,14 +115,14 @@ shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
it "returns a note by id" do
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{note.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['body']).to eq(note.note)
end
it "returns a 404 error if note not found" do
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/12345", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -122,7 +130,7 @@ shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
it "creates a new note" do
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: { body: 'hi!' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['body']).to eq('hi!')
expect(json_response['author']['username']).to eq(user.username)
end
@@ -130,13 +138,13 @@ shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
it "returns a 400 bad request error if body not given" do
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns a 401 unauthorized error if user not authenticated" do
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes"), params: { body: 'hi!' }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it "creates an activity event when a note is created", :sidekiq_might_not_need_inline do
@@ -154,7 +162,7 @@ shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
admin = create(:admin)
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", admin), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['body']).to eq('hi!')
expect(json_response['author']['username']).to eq(admin.username)
expect(Time.parse(json_response['created_at'])).to be_like_time(creation_time)
@@ -167,7 +175,7 @@ shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
it 'sets the creation time on the new note' do
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['body']).to eq('hi!')
expect(json_response['author']['username']).to eq(user.username)
expect(Time.parse(json_response['created_at'])).to be_like_time(creation_time)
@@ -185,7 +193,7 @@ shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user2), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['body']).to eq('hi!')
expect(json_response['author']['username']).to eq(user2.username)
expect(Time.parse(json_response['created_at'])).to be_like_time(creation_time)
@@ -197,7 +205,7 @@ shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
it 'sets the creation time on the new note' do
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['body']).to eq('hi!')
expect(json_response['author']['username']).to eq(user.username)
expect(Time.parse(json_response['created_at'])).to be_like_time(creation_time)
@@ -212,7 +220,7 @@ shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
parent.add_developer(user2)
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user2), params: params
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['body']).to eq('hi!')
expect(json_response['author']['username']).to eq(user2.username)
expect(Time.parse(json_response['created_at'])).not_to be_like_time(creation_time)
@@ -226,16 +234,16 @@ shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
parent.add_developer(private_user)
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", private_user), params: { body: ':+1:' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['body']).to eq(':+1:')
end
end
- context 'when the user is posting an award emoji on his/her own noteable' do
+ context 'when the user is posting an award emoji on their own noteable' do
it 'creates a new note' do
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: { body: ':+1:' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['body']).to eq(':+1:')
end
end
@@ -249,7 +257,7 @@ shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", private_user),
params: { body: 'Foo' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -259,7 +267,7 @@ shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"notes/#{note.id}", user), params: { body: 'Hello!' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['body']).to eq('Hello!')
end
@@ -267,14 +275,14 @@ shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/12345", user),
params: { body: 'Hello!' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 400 bad request error if body not given' do
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"notes/#{note.id}", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
end
@@ -283,17 +291,17 @@ shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
delete api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"notes/#{note.id}", user)
- expect(response).to have_gitlab_http_status(204)
+ expect(response).to have_gitlab_http_status(:no_content)
# Check if note is really deleted
delete api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"notes/#{note.id}", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 404 error when note id not found' do
delete api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/12345", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it_behaves_like '412 response' do
diff --git a/spec/support/shared_examples/requests/api/pipelines/visibility_table_examples.rb b/spec/support/shared_examples/requests/api/pipelines/visibility_table_shared_examples.rb
index dfd07176b1c..8dd2ef6ccc6 100644
--- a/spec/support/shared_examples/requests/api/pipelines/visibility_table_examples.rb
+++ b/spec/support/shared_examples/requests/api/pipelines/visibility_table_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'pipelines visibility table' do
+RSpec.shared_examples 'pipelines visibility table' do
using RSpec::Parameterized::TableSyntax
let(:ci_user) { create(:user) }
diff --git a/spec/support/api/scopes/read_user_shared_examples.rb b/spec/support/shared_examples/requests/api/read_user_shared_examples.rb
index 3786a8012f9..59cd0ab67b4 100644
--- a/spec/support/api/scopes/read_user_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/read_user_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples_for 'allows the "read_user" scope' do |api_version|
+RSpec.shared_examples 'allows the "read_user" scope' do |api_version|
let(:version) { api_version || 'v4' }
context 'for personal access tokens' do
@@ -10,7 +10,7 @@ shared_examples_for 'allows the "read_user" scope' do |api_version|
it 'returns a "200" response' do
get api_call.call(path, user, personal_access_token: token, version: version)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -20,7 +20,7 @@ shared_examples_for 'allows the "read_user" scope' do |api_version|
it 'returns a "200" response' do
get api_call.call(path, user, personal_access_token: token, version: version)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -34,7 +34,7 @@ shared_examples_for 'allows the "read_user" scope' do |api_version|
it 'returns a "403" response' do
get api_call.call(path, user, personal_access_token: token, version: version)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
@@ -48,7 +48,7 @@ shared_examples_for 'allows the "read_user" scope' do |api_version|
it 'returns a "200" response' do
get api_call.call(path, user, oauth_access_token: token)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -58,7 +58,7 @@ shared_examples_for 'allows the "read_user" scope' do |api_version|
it 'returns a "200" response' do
get api_call.call(path, user, oauth_access_token: token)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -68,20 +68,20 @@ shared_examples_for 'allows the "read_user" scope' do |api_version|
it 'returns a "403" response' do
get api_call.call(path, user, oauth_access_token: token)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
end
-shared_examples_for 'does not allow the "read_user" scope' do
+RSpec.shared_examples 'does not allow the "read_user" scope' do
context 'when the requesting token has the "read_user" scope' do
let(:token) { create(:personal_access_token, scopes: ['read_user'], user: user) }
it 'returns a "403" response' do
post api_call.call(path, user, personal_access_token: token), params: attributes_for(:user, projects_limit: 3)
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
diff --git a/spec/support/api/repositories_shared_context.rb b/spec/support/shared_examples/requests/api/repositories_shared_context.rb
index 346015106e3..cc3a495bec1 100644
--- a/spec/support/api/repositories_shared_context.rb
+++ b/spec/support/shared_examples/requests/api/repositories_shared_context.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_context 'disabled repository' do
+RSpec.shared_context 'disabled repository' do
before do
project.project_feature.update!(
repository_access_level: ProjectFeature::DISABLED,
diff --git a/spec/support/shared_examples/requests/api/resolvable_discussions.rb b/spec/support/shared_examples/requests/api/resolvable_discussions_shared_examples.rb
index 42054a273f3..8d2a3f13d8e 100644
--- a/spec/support/shared_examples/requests/api/resolvable_discussions.rb
+++ b/spec/support/shared_examples/requests/api/resolvable_discussions_shared_examples.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
-shared_examples 'resolvable discussions API' do |parent_type, noteable_type, id_name|
+RSpec.shared_examples 'resolvable discussions API' do |parent_type, noteable_type, id_name|
describe "PUT /#{parent_type}/:id/#{noteable_type}/:noteable_id/discussions/:discussion_id" do
it "resolves discussion if resolved is true" do
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"discussions/#{note.discussion_id}", user), params: { resolved: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['notes'].size).to eq(1)
expect(json_response['notes'][0]['resolved']).to eq(true)
end
@@ -15,7 +15,7 @@ shared_examples 'resolvable discussions API' do |parent_type, noteable_type, id_
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"discussions/#{note.discussion_id}", user), params: { resolved: false }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['notes'].size).to eq(1)
expect(json_response['notes'][0]['resolved']).to eq(false)
end
@@ -24,21 +24,21 @@ shared_examples 'resolvable discussions API' do |parent_type, noteable_type, id_
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"discussions/#{note.discussion_id}", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns a 401 unauthorized error if user is not authenticated" do
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"discussions/#{note.discussion_id}"), params: { resolved: true }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
it "returns a 403 error if user resolves discussion of someone else" do
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"discussions/#{note.discussion_id}", private_user), params: { resolved: true }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
context 'when user does not have access to read the discussion' do
@@ -50,7 +50,7 @@ shared_examples 'resolvable discussions API' do |parent_type, noteable_type, id_
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"discussions/#{note.discussion_id}", private_user), params: { resolved: true }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -60,7 +60,7 @@ shared_examples 'resolvable discussions API' do |parent_type, noteable_type, id_
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"discussions/#{note.discussion_id}/notes/#{note.id}", user), params: { resolved: true }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['resolved']).to eq(true)
end
@@ -69,21 +69,21 @@ shared_examples 'resolvable discussions API' do |parent_type, noteable_type, id_
"discussions/#{note.discussion_id}/notes/12345", user),
params: { body: 'Hello!' }
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a 400 bad request error if neither body nor resolved parameter is given' do
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"discussions/#{note.discussion_id}/notes/#{note.id}", user)
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it "returns a 403 error if user resolves note of someone else" do
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
"discussions/#{note.discussion_id}/notes/#{note.id}", private_user), params: { resolved: true }
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
diff --git a/spec/support/shared_examples/resource_label_events_api.rb b/spec/support/shared_examples/requests/api/resource_label_events_api_shared_examples.rb
index 6622df78ee2..520c3ea8e47 100644
--- a/spec/support/shared_examples/resource_label_events_api.rb
+++ b/spec/support/shared_examples/requests/api/resource_label_events_api_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'resource_label_events API' do |parent_type, eventable_type, id_name|
+RSpec.shared_examples 'resource_label_events API' do |parent_type, eventable_type, id_name|
describe "GET /#{parent_type}/:id/#{eventable_type}/:noteable_id/resource_label_events" do
context "with local label reference" do
let!(:event) { create_event(label) }
@@ -8,7 +8,7 @@ shared_examples 'resource_label_events API' do |parent_type, eventable_type, id_
it "returns an array of resource label events" do
get api("/#{parent_type}/#{parent.id}/#{eventable_type}/#{eventable[id_name]}/resource_label_events", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['id']).to eq(event.id)
@@ -17,7 +17,7 @@ shared_examples 'resource_label_events API' do |parent_type, eventable_type, id_
it "returns a 404 error when eventable id not found" do
get api("/#{parent_type}/#{parent.id}/#{eventable_type}/12345/resource_label_events", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns 404 when not authorized" do
@@ -26,7 +26,7 @@ shared_examples 'resource_label_events API' do |parent_type, eventable_type, id_
get api("/#{parent_type}/#{parent.id}/#{eventable_type}/#{eventable[id_name]}/resource_label_events", private_user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -60,7 +60,7 @@ shared_examples 'resource_label_events API' do |parent_type, eventable_type, id_
it "returns a resource label event by id" do
get api("/#{parent_type}/#{parent.id}/#{eventable_type}/#{eventable[id_name]}/resource_label_events/#{event.id}", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(event.id)
end
@@ -70,13 +70,13 @@ shared_examples 'resource_label_events API' do |parent_type, eventable_type, id_
get api("/#{parent_type}/#{parent.id}/#{eventable_type}/#{eventable[id_name]}/resource_label_events/#{event.id}", private_user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it "returns a 404 error if resource label event not found" do
get api("/#{parent_type}/#{parent.id}/#{eventable_type}/#{eventable[id_name]}/resource_label_events/12345", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -88,7 +88,7 @@ shared_examples 'resource_label_events API' do |parent_type, eventable_type, id_
it "returns a 404 error if cross-reference project is not accessible" do
get api("/#{parent_type}/#{parent.id}/#{eventable_type}/#{eventable[id_name]}/resource_label_events/#{event.id}", user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/support/shared_examples/requests/api/status_shared_examples.rb b/spec/support/shared_examples/requests/api/status_shared_examples.rb
index ed9964fa108..8207190b1dc 100644
--- a/spec/support/shared_examples/requests/api/status_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/status_shared_examples.rb
@@ -4,7 +4,7 @@
#
# Requires an API request:
# let(:request) { get api("/projects/#{project.id}/repository/branches", user) }
-shared_examples_for '400 response' do
+RSpec.shared_examples '400 response' do
let(:message) { nil }
before do
@@ -13,7 +13,7 @@ shared_examples_for '400 response' do
end
it 'returns 400' do
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
if message.present?
expect(json_response['message']).to eq(message)
@@ -21,18 +21,18 @@ shared_examples_for '400 response' do
end
end
-shared_examples_for '403 response' do
+RSpec.shared_examples '403 response' do
before do
# Fires the request
request
end
it 'returns 403' do
- expect(response).to have_gitlab_http_status(403)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
-shared_examples_for '404 response' do
+RSpec.shared_examples '404 response' do
let(:message) { nil }
before do
@@ -41,7 +41,7 @@ shared_examples_for '404 response' do
end
it 'returns 404' do
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
expect(json_response).to be_an Object
if message.present?
@@ -50,7 +50,7 @@ shared_examples_for '404 response' do
end
end
-shared_examples_for '412 response' do
+RSpec.shared_examples '412 response' do
let(:params) { nil }
let(:success_status) { 204 }
@@ -60,7 +60,7 @@ shared_examples_for '412 response' do
end
it 'returns 412 with a JSON error' do
- expect(response).to have_gitlab_http_status(412)
+ expect(response).to have_gitlab_http_status(:precondition_failed)
expect(json_response).to eq('message' => '412 Precondition Failed')
end
end
diff --git a/spec/support/api/time_tracking_shared_examples.rb b/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb
index 3bd1b145433..30ba8d9b436 100644
--- a/spec/support/api/time_tracking_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb
@@ -1,10 +1,10 @@
# frozen_string_literal: true
-shared_examples 'an unauthorized API user' do
+RSpec.shared_examples 'an unauthorized API user' do
it { is_expected.to eq(403) }
end
-shared_examples 'time tracking endpoints' do |issuable_name|
+RSpec.shared_examples 'time tracking endpoints' do |issuable_name|
let(:non_member) { create(:user) }
issuable_collection_name = issuable_name.pluralize
@@ -19,7 +19,7 @@ shared_examples 'time tracking endpoints' do |issuable_name|
it "sets the time estimate for #{issuable_name}" do
post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/time_estimate", user), params: { duration: '1w' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['human_time_estimate']).to eq('1w')
end
@@ -32,7 +32,7 @@ shared_examples 'time tracking endpoints' do |issuable_name|
it 'does not modify the original estimate' do
post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/time_estimate", user), params: { duration: 'foo' }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(issuable.reload.human_time_estimate).to eq('1w')
end
end
@@ -41,7 +41,7 @@ shared_examples 'time tracking endpoints' do |issuable_name|
it 'updates the estimate' do
post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/time_estimate", user), params: { duration: '3w1h' }
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(issuable.reload.human_time_estimate).to eq('3w 1h')
end
end
@@ -58,7 +58,7 @@ shared_examples 'time tracking endpoints' do |issuable_name|
it "resets the time estimate for #{issuable_name}" do
post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/reset_time_estimate", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['time_estimate']).to eq(0)
end
end
@@ -79,7 +79,7 @@ shared_examples 'time tracking endpoints' do |issuable_name|
end.to change { issuable.reload.updated_at }
end
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['human_total_time_spent']).to eq('2h')
end
@@ -93,7 +93,7 @@ shared_examples 'time tracking endpoints' do |issuable_name|
post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/add_spent_time", user), params: { duration: '-1h' }
- expect(response).to have_gitlab_http_status(201)
+ expect(response).to have_gitlab_http_status(:created)
expect(json_response['total_time_spent']).to eq(3600)
end
end
@@ -108,7 +108,7 @@ shared_examples 'time tracking endpoints' do |issuable_name|
end.not_to change { issuable.reload.updated_at }
end
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['time_spent'].first).to match(/exceeds the total time spent/)
end
end
@@ -128,7 +128,7 @@ shared_examples 'time tracking endpoints' do |issuable_name|
end.to change { issuable.reload.updated_at }
end
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['total_time_spent']).to eq(0)
end
end
@@ -140,7 +140,7 @@ shared_examples 'time tracking endpoints' do |issuable_name|
get api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/time_stats", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response['total_time_spent']).to eq(1800)
expect(json_response['time_estimate']).to eq(3600)
end
diff --git a/spec/support/shared_examples/requests/graphql_shared_examples.rb b/spec/support/shared_examples/requests/graphql_shared_examples.rb
index 2a38d56141a..0045fe14501 100644
--- a/spec/support/shared_examples/requests/graphql_shared_examples.rb
+++ b/spec/support/shared_examples/requests/graphql_shared_examples.rb
@@ -1,8 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
-
-shared_examples 'a working graphql query' do
+RSpec.shared_examples 'a working graphql query' do
include GraphqlHelpers
it 'returns a successful response', :aggregate_failures do
diff --git a/spec/support/shared_examples/lfs_http_shared_examples.rb b/spec/support/shared_examples/requests/lfs_http_shared_examples.rb
index bcd30fe9654..48c5a5933e6 100644
--- a/spec/support/shared_examples/lfs_http_shared_examples.rb
+++ b/spec/support/shared_examples/requests/lfs_http_shared_examples.rb
@@ -1,38 +1,38 @@
# frozen_string_literal: true
-shared_examples 'LFS http 200 response' do
+RSpec.shared_examples 'LFS http 200 response' do
it_behaves_like 'LFS http expected response code and message' do
let(:response_code) { 200 }
end
end
-shared_examples 'LFS http 401 response' do
+RSpec.shared_examples 'LFS http 401 response' do
it_behaves_like 'LFS http expected response code and message' do
let(:response_code) { 401 }
end
end
-shared_examples 'LFS http 403 response' do
+RSpec.shared_examples 'LFS http 403 response' do
it_behaves_like 'LFS http expected response code and message' do
let(:response_code) { 403 }
let(:message) { 'Access forbidden. Check your access level.' }
end
end
-shared_examples 'LFS http 501 response' do
+RSpec.shared_examples 'LFS http 501 response' do
it_behaves_like 'LFS http expected response code and message' do
let(:response_code) { 501 }
let(:message) { 'Git LFS is not enabled on this GitLab server, contact your admin.' }
end
end
-shared_examples 'LFS http 404 response' do
+RSpec.shared_examples 'LFS http 404 response' do
it_behaves_like 'LFS http expected response code and message' do
let(:response_code) { 404 }
end
end
-shared_examples 'LFS http expected response code and message' do
+RSpec.shared_examples 'LFS http expected response code and message' do
let(:response_code) { }
let(:message) { }
diff --git a/spec/support/shared_examples/requests/rack_attack_shared_examples.rb b/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
index c078e982e87..08ccbd4a9c1 100644
--- a/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
+++ b/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
@@ -8,7 +8,7 @@
# * requests_per_period
# * period_in_seconds
# * period
-shared_examples_for 'rate-limited token-authenticated requests' do
+RSpec.shared_examples 'rate-limited token-authenticated requests' do
let(:throttle_types) do
{
"throttle_protected_paths" => "throttle_authenticated_protected_paths_api",
@@ -33,7 +33,7 @@ shared_examples_for 'rate-limited token-authenticated requests' do
# At first, allow requests under the rate limit.
requests_per_period.times do
make_request(request_args)
- expect(response).not_to have_http_status 429
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
# the last straw
@@ -43,7 +43,7 @@ shared_examples_for 'rate-limited token-authenticated requests' do
it 'allows requests after throttling and then waiting for the next period' do
requests_per_period.times do
make_request(request_args)
- expect(response).not_to have_http_status 429
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
expect_rejection { make_request(request_args) }
@@ -51,7 +51,7 @@ shared_examples_for 'rate-limited token-authenticated requests' do
Timecop.travel(period.from_now) do
requests_per_period.times do
make_request(request_args)
- expect(response).not_to have_http_status 429
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
expect_rejection { make_request(request_args) }
@@ -61,18 +61,18 @@ shared_examples_for 'rate-limited token-authenticated requests' do
it 'counts requests from different users separately, even from the same IP' do
requests_per_period.times do
make_request(request_args)
- expect(response).not_to have_http_status 429
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
# would be over the limit if this wasn't a different user
make_request(other_user_request_args)
- expect(response).not_to have_http_status 429
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
it 'counts all requests from the same user, even via different IPs' do
requests_per_period.times do
make_request(request_args)
- expect(response).not_to have_http_status 429
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
expect_any_instance_of(Rack::Attack::Request).to receive(:ip).at_least(:once).and_return('1.2.3.4')
@@ -83,7 +83,7 @@ shared_examples_for 'rate-limited token-authenticated requests' do
it 'logs RackAttack info into structured logs' do
requests_per_period.times do
make_request(request_args)
- expect(response).not_to have_http_status 429
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
arguments = {
@@ -112,7 +112,7 @@ shared_examples_for 'rate-limited token-authenticated requests' do
it 'allows requests over the rate limit' do
(1 + requests_per_period).times do
make_request(request_args)
- expect(response).not_to have_http_status 429
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
end
end
@@ -134,7 +134,7 @@ end
# * requests_per_period
# * period_in_seconds
# * period
-shared_examples_for 'rate-limited web authenticated requests' do
+RSpec.shared_examples 'rate-limited web authenticated requests' do
let(:throttle_types) do
{
"throttle_protected_paths" => "throttle_authenticated_protected_paths_web",
@@ -160,7 +160,7 @@ shared_examples_for 'rate-limited web authenticated requests' do
# At first, allow requests under the rate limit.
requests_per_period.times do
request_authenticated_web_url
- expect(response).not_to have_http_status 429
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
# the last straw
@@ -170,7 +170,7 @@ shared_examples_for 'rate-limited web authenticated requests' do
it 'allows requests after throttling and then waiting for the next period' do
requests_per_period.times do
request_authenticated_web_url
- expect(response).not_to have_http_status 429
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
expect_rejection { request_authenticated_web_url }
@@ -178,7 +178,7 @@ shared_examples_for 'rate-limited web authenticated requests' do
Timecop.travel(period.from_now) do
requests_per_period.times do
request_authenticated_web_url
- expect(response).not_to have_http_status 429
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
expect_rejection { request_authenticated_web_url }
@@ -188,20 +188,20 @@ shared_examples_for 'rate-limited web authenticated requests' do
it 'counts requests from different users separately, even from the same IP' do
requests_per_period.times do
request_authenticated_web_url
- expect(response).not_to have_http_status 429
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
# would be over the limit if this wasn't a different user
login_as(create(:user))
request_authenticated_web_url
- expect(response).not_to have_http_status 429
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
it 'counts all requests from the same user, even via different IPs' do
requests_per_period.times do
request_authenticated_web_url
- expect(response).not_to have_http_status 429
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
expect_any_instance_of(Rack::Attack::Request).to receive(:ip).at_least(:once).and_return('1.2.3.4')
@@ -212,7 +212,7 @@ shared_examples_for 'rate-limited web authenticated requests' do
it 'logs RackAttack info into structured logs' do
requests_per_period.times do
request_authenticated_web_url
- expect(response).not_to have_http_status 429
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
arguments = {
@@ -241,7 +241,7 @@ shared_examples_for 'rate-limited web authenticated requests' do
it 'allows requests over the rate limit' do
(1 + requests_per_period).times do
request_authenticated_web_url
- expect(response).not_to have_http_status 429
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
end
end
diff --git a/spec/support/shared_examples/requests/self_monitoring_shared_examples.rb b/spec/support/shared_examples/requests/self_monitoring_shared_examples.rb
index 949aa079435..db11b1fe07d 100644
--- a/spec/support/shared_examples/requests/self_monitoring_shared_examples.rb
+++ b/spec/support/shared_examples/requests/self_monitoring_shared_examples.rb
@@ -1,23 +1,5 @@
# frozen_string_literal: true
-RSpec.shared_examples 'not accessible if feature flag is disabled' do
- before do
- stub_feature_flags(self_monitoring_project: false)
- end
-
- it 'returns not_implemented' do
- subject
-
- aggregate_failures do
- expect(response).to have_gitlab_http_status(:not_implemented)
- expect(json_response).to eq(
- 'message' => _('Self-monitoring is not enabled on this GitLab server, contact your administrator.'),
- 'documentation_url' => help_page_path('administration/monitoring/gitlab_instance_administration_project/index')
- )
- end
- end
-end
-
RSpec.shared_examples 'not accessible to non-admin users' do
context 'with unauthenticated user' do
it 'redirects to signin page' do
diff --git a/spec/support/shared_examples/routing/legacy_path_redirect_shared_examples.rb b/spec/support/shared_examples/routing/legacy_path_redirect_shared_examples.rb
new file mode 100644
index 00000000000..808336db7b1
--- /dev/null
+++ b/spec/support/shared_examples/routing/legacy_path_redirect_shared_examples.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'redirecting a legacy project path' do |source, target|
+ include RSpec::Rails::RequestExampleGroup
+
+ it "redirects #{source} to #{target}" do
+ expect(get(source)).to redirect_to(target)
+ end
+end
diff --git a/spec/support/shared_examples/serializers/diff_file_entity_examples.rb b/spec/support/shared_examples/serializers/diff_file_entity_shared_examples.rb
index d2c269c597c..db5c4b45b70 100644
--- a/spec/support/shared_examples/serializers/diff_file_entity_examples.rb
+++ b/spec/support/shared_examples/serializers/diff_file_entity_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'diff file base entity' do
+RSpec.shared_examples 'diff file base entity' do
it 'exposes essential attributes' do
expect(subject).to include(:content_sha, :submodule, :submodule_link,
:submodule_tree_url, :old_path_html,
@@ -26,7 +26,7 @@ shared_examples 'diff file base entity' do
end
end
-shared_examples 'diff file entity' do
+RSpec.shared_examples 'diff file entity' do
it_behaves_like 'diff file base entity'
it 'exposes correct attributes' do
@@ -70,6 +70,6 @@ shared_examples 'diff file entity' do
end
end
-shared_examples 'diff file discussion entity' do
+RSpec.shared_examples 'diff file discussion entity' do
it_behaves_like 'diff file base entity'
end
diff --git a/spec/support/shared_examples/serializers/note_entity_examples.rb b/spec/support/shared_examples/serializers/note_entity_shared_examples.rb
index bfcaa2f1bd5..7b2ec02c7b6 100644
--- a/spec/support/shared_examples/serializers/note_entity_examples.rb
+++ b/spec/support/shared_examples/serializers/note_entity_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'note entity' do
+RSpec.shared_examples 'note entity' do
subject { entity.as_json }
context 'basic note' do
diff --git a/spec/support/shared_examples/services/base_helm_service_shared_examples.rb b/spec/support/shared_examples/services/base_helm_service_shared_examples.rb
index 19f5334b4b2..c2252c83140 100644
--- a/spec/support/shared_examples/services/base_helm_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/base_helm_service_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'logs kubernetes errors' do
+RSpec.shared_examples 'logs kubernetes errors' do
let(:error_hash) do
{
service: service.class.name,
diff --git a/spec/support/shared_examples/services/boards/boards_create_service.rb b/spec/support/shared_examples/services/boards/boards_create_service_shared_examples.rb
index 7fd69354c2d..fced2e59ace 100644
--- a/spec/support/shared_examples/services/boards/boards_create_service.rb
+++ b/spec/support/shared_examples/services/boards/boards_create_service_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'boards create service' do
+RSpec.shared_examples 'boards create service' do
context 'when parent does not have a board' do
it 'creates a new board' do
expect { service.execute }.to change(Board, :count).by(1)
diff --git a/spec/support/shared_examples/services/boards/boards_list_service.rb b/spec/support/shared_examples/services/boards/boards_list_service_shared_examples.rb
index 18d45ee324a..8f7c08ed625 100644
--- a/spec/support/shared_examples/services/boards/boards_list_service.rb
+++ b/spec/support/shared_examples/services/boards/boards_list_service_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'boards list service' do
+RSpec.shared_examples 'boards list service' do
context 'when parent does not have a board' do
it 'creates a new parent board' do
expect { service.execute }.to change(parent.boards, :count).by(1)
@@ -11,6 +11,12 @@ shared_examples 'boards list service' do
service.execute
end
+
+ context 'when create_default_board is false' do
+ it 'does not create a new parent board' do
+ expect { service.execute(create_default_board: false) }.not_to change(parent.boards, :count)
+ end
+ end
end
context 'when parent has a board' do
@@ -30,7 +36,7 @@ shared_examples 'boards list service' do
end
end
-shared_examples 'multiple boards list service' do
+RSpec.shared_examples 'multiple boards list service' do
let(:service) { described_class.new(parent, double) }
let!(:board_B) { create(:board, resource_parent: parent, name: 'B-board') }
let!(:board_c) { create(:board, resource_parent: parent, name: 'c-board') }
@@ -44,5 +50,20 @@ shared_examples 'multiple boards list service' do
it 'returns boards ordered by name' do
expect(service.execute).to eq [board_a, board_B, board_c]
end
+
+ context 'when wanting a specific board' do
+ it 'returns board specified by id' do
+ service = described_class.new(parent, double, board_id: board_c.id)
+
+ expect(service.execute).to eq [board_c]
+ end
+
+ it 'raises exception when board is not found' do
+ outside_board = create(:board, resource_parent: create(:project), name: 'outside board')
+ service = described_class.new(parent, double, board_id: outside_board.id)
+
+ expect { service.execute }.to raise_exception(ActiveRecord::RecordNotFound)
+ end
+ end
end
end
diff --git a/spec/support/shared_examples/services/boards/issues_list_service.rb b/spec/support/shared_examples/services/boards/issues_list_service_shared_examples.rb
index 75733c774ef..ec1c58e5b67 100644
--- a/spec/support/shared_examples/services/boards/issues_list_service.rb
+++ b/spec/support/shared_examples/services/boards/issues_list_service_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'issues list service' do
+RSpec.shared_examples 'issues list service' do
it 'delegates search to IssuesFinder' do
params = { board_id: board.id, id: list1.id }
@@ -9,7 +9,7 @@ shared_examples 'issues list service' do
described_class.new(parent, user, params).execute
end
- context '#metadata' do
+ describe '#metadata' do
it 'returns issues count for list' do
params = { board_id: board.id, id: list1.id }
diff --git a/spec/support/shared_examples/services/boards/issues_move_service.rb b/spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb
index d3fa8084185..f352b430cc7 100644
--- a/spec/support/shared_examples/services/boards/issues_move_service.rb
+++ b/spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'issues move service' do |group|
+RSpec.shared_examples 'issues move service' do |group|
shared_examples 'updating timestamps' do
it 'updates updated_at' do
expect {described_class.new(parent, user, params).execute(issue)}
diff --git a/spec/support/shared_examples/services/boards/lists_destroy_service.rb b/spec/support/shared_examples/services/boards/lists_destroy_service_shared_examples.rb
index 95725078f9d..6a4f284ec54 100644
--- a/spec/support/shared_examples/services/boards/lists_destroy_service.rb
+++ b/spec/support/shared_examples/services/boards/lists_destroy_service_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'lists destroy service' do
+RSpec.shared_examples 'lists destroy service' do
context 'when list type is label' do
it 'removes list from board' do
list = create(:list, board: board)
diff --git a/spec/support/shared_examples/services/boards/lists_list_service.rb b/spec/support/shared_examples/services/boards/lists_list_service_shared_examples.rb
index 29784f6da08..1b7fe626aea 100644
--- a/spec/support/shared_examples/services/boards/lists_list_service.rb
+++ b/spec/support/shared_examples/services/boards/lists_list_service_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'lists list service' do
+RSpec.shared_examples 'lists list service' do
context 'when the board has a backlog list' do
let!(:backlog_list) { create(:backlog_list, board: board) }
diff --git a/spec/support/shared_examples/services/boards/lists_move_service.rb b/spec/support/shared_examples/services/boards/lists_move_service_shared_examples.rb
index 0b3bfd8e2a8..bf84b912610 100644
--- a/spec/support/shared_examples/services/boards/lists_move_service.rb
+++ b/spec/support/shared_examples/services/boards/lists_move_service_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'lists move service' do
+RSpec.shared_examples 'lists move service' do
let!(:planning) { create(:list, board: board, position: 0) }
let!(:development) { create(:list, board: board, position: 1) }
let!(:review) { create(:list, board: board, position: 2) }
diff --git a/spec/support/shared_examples/services/check_ingress_ip_address_service_shared_examples.rb b/spec/support/shared_examples/services/check_ingress_ip_address_service_shared_examples.rb
index 1e0ac8b7615..e1efe61cce3 100644
--- a/spec/support/shared_examples/services/check_ingress_ip_address_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/check_ingress_ip_address_service_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'check ingress ip executions' do |app_name|
+RSpec.shared_examples 'check ingress ip executions' do |app_name|
describe '#execute' do
let(:application) { create(app_name, :installed) }
let(:service) { described_class.new(application) }
diff --git a/spec/support/shared_examples/common_system_notes_examples.rb b/spec/support/shared_examples/services/common_system_notes_shared_examples.rb
index ca79603a022..4ce3e32d774 100644
--- a/spec/support/shared_examples/common_system_notes_examples.rb
+++ b/spec/support/shared_examples/services/common_system_notes_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'system note creation' do |update_params, note_text|
+RSpec.shared_examples 'system note creation' do |update_params, note_text|
subject { described_class.new(project, user).execute(issuable, old_labels: []) }
before do
@@ -17,7 +17,7 @@ shared_examples 'system note creation' do |update_params, note_text|
end
end
-shared_examples 'WIP notes creation' do |wip_action|
+RSpec.shared_examples 'WIP notes creation' do |wip_action|
subject { described_class.new(project, user).execute(issuable, old_labels: []) }
it 'creates WIP toggle and title change notes' do
@@ -28,7 +28,7 @@ shared_examples 'WIP notes creation' do |wip_action|
end
end
-shared_examples_for 'a note with overridable created_at' do
+RSpec.shared_examples 'a note with overridable created_at' do
let(:noteable) { create(:issue, project: project, system_note_timestamp: Time.at(42)) }
it 'the note has the correct time' do
@@ -36,7 +36,7 @@ shared_examples_for 'a note with overridable created_at' do
end
end
-shared_examples_for 'a system note' do |params|
+RSpec.shared_examples 'a system note' do |params|
let(:expected_noteable) { noteable }
let(:commit_count) { nil }
diff --git a/spec/support/shared_examples/services/count_service_shared_examples.rb b/spec/support/shared_examples/services/count_service_shared_examples.rb
index 9bea180a778..54c6ff79976 100644
--- a/spec/support/shared_examples/services/count_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/count_service_shared_examples.rb
@@ -6,7 +6,7 @@
# describe MyCountService, :use_clean_rails_memory_store_caching do
# it_behaves_like 'a counter caching service'
# end
-shared_examples 'a counter caching service' do
+RSpec.shared_examples 'a counter caching service' do
describe '#count' do
it 'caches the count', :request_store do
subject.delete_cache
diff --git a/spec/support/shared_examples/services/error_tracking_service_shared_examples.rb b/spec/support/shared_examples/services/error_tracking_service_shared_examples.rb
index 83c6d89e560..c825a970b57 100644
--- a/spec/support/shared_examples/services/error_tracking_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/error_tracking_service_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'error tracking service data not ready' do |service_call|
+RSpec.shared_examples 'error tracking service data not ready' do |service_call|
context "when #{service_call} returns nil" do
before do
expect(error_tracking_setting)
@@ -14,7 +14,7 @@ shared_examples 'error tracking service data not ready' do |service_call|
end
end
-shared_examples 'error tracking service sentry error handling' do |service_call|
+RSpec.shared_examples 'error tracking service sentry error handling' do |service_call|
context "when #{service_call} returns error" do
before do
allow(error_tracking_setting)
@@ -35,7 +35,7 @@ shared_examples 'error tracking service sentry error handling' do |service_call|
end
end
-shared_examples 'error tracking service http status handling' do |service_call|
+RSpec.shared_examples 'error tracking service http status handling' do |service_call|
context "when #{service_call} returns error with http_status" do
before do
allow(error_tracking_setting)
@@ -56,7 +56,7 @@ shared_examples 'error tracking service http status handling' do |service_call|
end
end
-shared_examples 'error tracking service unauthorized user' do
+RSpec.shared_examples 'error tracking service unauthorized user' do
context 'with unauthorized user' do
let(:unauthorized_user) { create(:user) }
@@ -74,7 +74,7 @@ shared_examples 'error tracking service unauthorized user' do
end
end
-shared_examples 'error tracking service disabled' do
+RSpec.shared_examples 'error tracking service disabled' do
context 'with error tracking disabled' do
before do
error_tracking_setting.enabled = false
diff --git a/spec/support/shared_examples/services/gitlab_projects_import_service_shared_examples.rb b/spec/support/shared_examples/services/gitlab_projects_import_service_shared_examples.rb
index 1c3fa5644d3..2aac7e328f0 100644
--- a/spec/support/shared_examples/services/gitlab_projects_import_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/gitlab_projects_import_service_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'gitlab projects import validations' do
+RSpec.shared_examples 'gitlab projects import validations' do
context 'with an invalid path' do
let(:path) { '/invalid-path/' }
diff --git a/spec/support/shared_examples/issuable_shared_examples.rb b/spec/support/shared_examples/services/issuable_shared_examples.rb
index 3460a8ba297..9eb66e33513 100644
--- a/spec/support/shared_examples/issuable_shared_examples.rb
+++ b/spec/support/shared_examples/services/issuable_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'cache counters invalidator' do
+RSpec.shared_examples 'cache counters invalidator' do
it 'invalidates counter cache for assignees' do
expect_any_instance_of(User).to receive(:invalidate_merge_request_cache_counts)
@@ -8,7 +8,7 @@ shared_examples 'cache counters invalidator' do
end
end
-shared_examples 'system notes for milestones' do
+RSpec.shared_examples 'system notes for milestones' do
def update_issuable(opts)
issuable = try(:issue) || try(:merge_request)
described_class.new(project, user, opts).execute(issuable)
@@ -39,7 +39,7 @@ shared_examples 'system notes for milestones' do
end
end
-shared_examples 'updating a single task' do
+RSpec.shared_examples 'updating a single task' do
def update_issuable(opts)
issuable = try(:issue) || try(:merge_request)
described_class.new(project, user, opts).execute(issuable)
diff --git a/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb b/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
index 30d91346df3..1f229d6b783 100644
--- a/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
+++ b/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples_for 'misconfigured dashboard service response' do |status_code, message = nil|
+RSpec.shared_examples 'misconfigured dashboard service response' do |status_code, message = nil|
it 'returns an appropriate message and status code', :aggregate_failures do
result = service_call
@@ -11,7 +11,7 @@ shared_examples_for 'misconfigured dashboard service response' do |status_code,
end
end
-shared_examples_for 'valid dashboard service response for schema' do
+RSpec.shared_examples 'valid dashboard service response for schema' do
it 'returns a json representation of the dashboard' do
result = service_call
@@ -22,13 +22,13 @@ shared_examples_for 'valid dashboard service response for schema' do
end
end
-shared_examples_for 'valid dashboard service response' do
+RSpec.shared_examples 'valid dashboard service response' do
let(:dashboard_schema) { JSON.parse(fixture_file('lib/gitlab/metrics/dashboard/schemas/dashboard.json')) }
it_behaves_like 'valid dashboard service response for schema'
end
-shared_examples_for 'caches the unprocessed dashboard for subsequent calls' do
+RSpec.shared_examples 'caches the unprocessed dashboard for subsequent calls' do
it do
expect(YAML).to receive(:safe_load).once.and_call_original
@@ -37,16 +37,51 @@ shared_examples_for 'caches the unprocessed dashboard for subsequent calls' do
end
end
-shared_examples_for 'valid embedded dashboard service response' do
+RSpec.shared_examples 'valid embedded dashboard service response' do
let(:dashboard_schema) { JSON.parse(fixture_file('lib/gitlab/metrics/dashboard/schemas/embedded_dashboard.json')) }
it_behaves_like 'valid dashboard service response for schema'
end
-shared_examples_for 'raises error for users with insufficient permissions' do
+RSpec.shared_examples 'raises error for users with insufficient permissions' do
context 'when the user does not have sufficient access' do
let(:user) { build(:user) }
it_behaves_like 'misconfigured dashboard service response', :unauthorized
end
end
+
+RSpec.shared_examples 'valid dashboard cloning process' do |dashboard_template, sequence|
+ context "dashboard template: #{dashboard_template}" do
+ let(:dashboard) { dashboard_template }
+ let(:dashboard_attrs) do
+ {
+ commit_message: commit_message,
+ branch_name: branch,
+ start_branch: project.default_branch,
+ encoding: 'text',
+ file_path: ".gitlab/dashboards/#{file_name}",
+ file_content: file_content_hash.to_yaml
+ }
+ end
+
+ it 'delegates commit creation to Files::CreateService', :aggregate_failures do
+ service_instance = instance_double(::Files::CreateService)
+ expect(::Files::CreateService).to receive(:new).with(project, user, dashboard_attrs).and_return(service_instance)
+ expect(service_instance).to receive(:execute).and_return(status: :success)
+
+ service_call
+ end
+
+ context 'user has defined custom metrics' do
+ it 'uses external service to includes them into new file content', :aggregate_failures do
+ service_instance = double(::Gitlab::Metrics::Dashboard::Processor)
+ expect(::Gitlab::Metrics::Dashboard::Processor).to receive(:new).with(project, file_content_hash, sequence, {}).and_return(service_instance)
+ expect(service_instance).to receive(:process).and_return(file_content_hash)
+ expect(::Files::CreateService).to receive(:new).with(project, user, dashboard_attrs).and_return(double(execute: { status: :success }))
+
+ service_call
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/notification_service_shared_examples.rb b/spec/support/shared_examples/services/notification_service_shared_examples.rb
index ad580b581d6..43fe6789145 100644
--- a/spec/support/shared_examples/services/notification_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/notification_service_shared_examples.rb
@@ -3,7 +3,7 @@
# Note that we actually update the attribute on the target_project/group, rather than
# using `allow`. This is because there are some specs where, based on how the notification
# is done, using an `allow` doesn't change the correct object.
-shared_examples 'project emails are disabled' do
+RSpec.shared_examples 'project emails are disabled' do |check_delivery_jobs_queue: false|
let(:target_project) { notification_target.is_a?(Project) ? notification_target : notification_target.project }
before do
@@ -16,7 +16,13 @@ shared_examples 'project emails are disabled' do
notification_trigger
- should_not_email_anyone
+ if check_delivery_jobs_queue
+ # Only check enqueud jobs, not delivered emails
+ expect_no_delivery_jobs
+ else
+ # Deprecated: Check actual delivered emails
+ should_not_email_anyone
+ end
end
it 'sends emails to someone' do
@@ -24,11 +30,17 @@ shared_examples 'project emails are disabled' do
notification_trigger
- should_email_anyone
+ if check_delivery_jobs_queue
+ # Only check enqueud jobs, not delivered emails
+ expect_any_delivery_jobs
+ else
+ # Deprecated: Check actual delivered emails
+ should_email_anyone
+ end
end
end
-shared_examples 'group emails are disabled' do
+RSpec.shared_examples 'group emails are disabled' do
let(:target_group) { notification_target.is_a?(Group) ? notification_target : notification_target.project.group }
before do
@@ -53,7 +65,7 @@ shared_examples 'group emails are disabled' do
end
end
-shared_examples 'sends notification only to a maximum of ten, most recently active group owners' do
+RSpec.shared_examples 'sends notification only to a maximum of ten, most recently active group owners' do
let(:owners) { create_list(:user, 12, :with_sign_ins) }
before do
@@ -75,7 +87,7 @@ shared_examples 'sends notification only to a maximum of ten, most recently acti
end
end
-shared_examples 'sends notification only to a maximum of ten, most recently active project maintainers' do
+RSpec.shared_examples 'sends notification only to a maximum of ten, most recently active project maintainers' do
let(:maintainers) { create_list(:user, 12, :with_sign_ins) }
before do
diff --git a/spec/support/shared_examples/pages_size_limit_shared_examples.rb b/spec/support/shared_examples/services/pages_size_limit_shared_examples.rb
index c1e27194738..15bf0d3698a 100644
--- a/spec/support/shared_examples/pages_size_limit_shared_examples.rb
+++ b/spec/support/shared_examples/services/pages_size_limit_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples 'pages size limit is' do |size_limit|
+RSpec.shared_examples 'pages size limit is' do |size_limit|
context "when size is below the limit" do
before do
allow(metadata).to receive(:total_size).and_return(size_limit - 1.megabyte)
diff --git a/spec/support/shared_examples/updating_mentions_shared_examples.rb b/spec/support/shared_examples/services/updating_mentions_shared_examples.rb
index 84f6c4d136a..84f6c4d136a 100644
--- a/spec/support/shared_examples/updating_mentions_shared_examples.rb
+++ b/spec/support/shared_examples/services/updating_mentions_shared_examples.rb
diff --git a/spec/support/shared_examples/snippets_shared_examples.rb b/spec/support/shared_examples/snippets_shared_examples.rb
deleted file mode 100644
index 5c35617bd36..00000000000
--- a/spec/support/shared_examples/snippets_shared_examples.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# frozen_string_literal: true
-
-# These shared examples expect a `snippets` array of snippets
-RSpec.shared_examples 'paginated snippets' do |remote: false|
- it "is limited to #{Snippet.default_per_page} items per page" do
- expect(page.all('.snippets-list-holder .snippet-row').count).to eq(Snippet.default_per_page)
- end
-
- context 'clicking on the link to the second page' do
- before do
- click_link('2')
- wait_for_requests if remote
- end
-
- it 'shows the remaining snippets' do
- remaining_snippets_count = [snippets.size - Snippet.default_per_page, Snippet.default_per_page].min
- expect(page).to have_selector('.snippets-list-holder .snippet-row', count: remaining_snippets_count)
- end
- end
-end
diff --git a/spec/support/shared_examples/spam_check_shared_examples.rb b/spec/support/shared_examples/spam_check_shared_examples.rb
new file mode 100644
index 00000000000..3ecae16db39
--- /dev/null
+++ b/spec/support/shared_examples/spam_check_shared_examples.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+shared_examples 'akismet spam' do
+ context 'when request is missing' do
+ subject { described_class.new(spammable: issue, request: nil) }
+
+ it "doesn't check as spam" do
+ subject
+
+ expect(issue).not_to be_spam
+ end
+ end
+
+ context 'when request exists' do
+ it 'creates a spam log' do
+ expect { subject }
+ .to log_spam(title: issue.title, description: issue.description, noteable_type: 'Issue')
+ end
+ end
+end
diff --git a/spec/support/shared_examples/tasks/gitlab/import_export/import_measurement_shared_examples.rb b/spec/support/shared_examples/tasks/gitlab/import_export/import_measurement_shared_examples.rb
new file mode 100644
index 00000000000..e232f237df9
--- /dev/null
+++ b/spec/support/shared_examples/tasks/gitlab/import_export/import_measurement_shared_examples.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'import measurement' do
+ context 'when measurement is enabled' do
+ let(:measurement_enabled) { true }
+
+ it 'prints measurement results' do
+ expect { subject }.to output(including('Measuring enabled...', 'Number of sql calls:', 'Total GC count:', 'Total GC count:')).to_stdout
+ end
+ end
+
+ context 'when measurement is not enabled' do
+ let(:measurement_enabled) { false }
+
+ it 'does not output measurement results' do
+ expect { subject }.not_to output(/Measuring enabled.../).to_stdout
+ end
+ end
+
+ context 'when measurement is not provided' do
+ let(:task_params) { [username, namespace_path, project_name, archive_path] }
+
+ it 'does not output measurement results' do
+ expect { subject }.not_to output(/Measuring enabled.../).to_stdout
+ end
+
+ it 'does not raise any exception' do
+ expect { subject }.not_to raise_error
+ end
+ end
+end
diff --git a/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb b/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb
index 9263aaff89a..12bcbb8b812 100644
--- a/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb
+++ b/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-shared_examples "matches the method pattern" do |method|
+RSpec.shared_examples "matches the method pattern" do |method|
let(:target) { subject }
let(:args) { nil }
let(:pattern) { patterns[method] }
@@ -12,7 +12,7 @@ shared_examples "matches the method pattern" do |method|
end
end
-shared_examples "builds correct paths" do |**patterns|
+RSpec.shared_examples "builds correct paths" do |**patterns|
let(:patterns) { patterns }
before do
diff --git a/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb b/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb
index 5d605dd811b..f8b00d1e4c0 100644
--- a/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb
+++ b/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
-shared_context 'with storage' do |store, **stub_params|
+RSpec.shared_context 'with storage' do |store, **stub_params|
before do
subject.object_store = store
end
end
-shared_examples "migrates" do |to_store:, from_store: nil|
+RSpec.shared_examples "migrates" do |to_store:, from_store: nil|
let(:to) { to_store }
let(:from) { from_store || subject.object_store }
diff --git a/spec/support/shared_examples/uploaders/upload_type_shared_examples.rb b/spec/support/shared_examples/uploaders/upload_type_shared_examples.rb
index 91d2526cde2..6e5075e135d 100644
--- a/spec/support/shared_examples/uploaders/upload_type_shared_examples.rb
+++ b/spec/support/shared_examples/uploaders/upload_type_shared_examples.rb
@@ -5,13 +5,13 @@ def check_content_matches_extension!(file = double(read: nil, path: ''))
uploader.check_content_matches_extension!(magic_file)
end
-shared_examples 'upload passes content type check' do
+RSpec.shared_examples 'upload passes content type check' do
it 'does not raise error' do
expect { check_content_matches_extension! }.not_to raise_error
end
end
-shared_examples 'upload fails content type check' do
+RSpec.shared_examples 'upload fails content type check' do
it 'raises error' do
expect { check_content_matches_extension! }.to raise_error(CarrierWave::IntegrityError)
end
@@ -42,7 +42,7 @@ def upload_type_checked_fixtures(upload_fixtures)
end
end
-shared_examples 'type checked uploads' do |upload_fixtures = nil, filenames: nil|
+RSpec.shared_examples 'type checked uploads' do |upload_fixtures = nil, filenames: nil|
it 'check type' do
upload_fixtures = Array(upload_fixtures)
filenames = Array(filenames)
@@ -55,7 +55,7 @@ shared_examples 'type checked uploads' do |upload_fixtures = nil, filenames: nil
end
end
-shared_examples 'skipped type checked uploads' do |upload_fixtures = nil, filenames: nil|
+RSpec.shared_examples 'skipped type checked uploads' do |upload_fixtures = nil, filenames: nil|
it 'skip type check' do
expect(uploader).not_to receive(:check_content_matches_extension!)
diff --git a/spec/support/shared_examples/url_validator_examples.rb b/spec/support/shared_examples/validators/url_validator_shared_examples.rb
index c5a775fefb6..c5a775fefb6 100644
--- a/spec/support/shared_examples/url_validator_examples.rb
+++ b/spec/support/shared_examples/validators/url_validator_shared_examples.rb
diff --git a/spec/support/shared_examples/views/nav_sidebar.rb b/spec/support/shared_examples/views/nav_sidebar.rb
deleted file mode 100644
index 6ac5abe275d..00000000000
--- a/spec/support/shared_examples/views/nav_sidebar.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-# frozen_string_literal: true
-
-shared_examples 'has nav sidebar' do
- it 'has collapsed nav sidebar on mobile' do
- render
-
- expect(rendered).to have_selector('.nav-sidebar')
- expect(rendered).not_to have_selector('.sidebar-collapsed-desktop')
- expect(rendered).not_to have_selector('.sidebar-expanded-mobile')
- end
-end
diff --git a/spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb b/spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb
index 7dffbb04fdc..50879969e90 100644
--- a/spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb
+++ b/spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
# Expects `worker_class` to be defined
-shared_examples_for 'reenqueuer' do
+RSpec.shared_examples 'reenqueuer' do
subject(:job) { worker_class.new }
before do
@@ -28,7 +28,7 @@ end
# let(:rate_limited_method) { subject.perform }
# end
#
-shared_examples_for 'it is rate limited to 1 call per' do |minimum_duration|
+RSpec.shared_examples 'it is rate limited to 1 call per' do |minimum_duration|
before do
# Allow Timecop freeze and travel without the block form
Timecop.safe_mode = false
diff --git a/spec/support/shared_examples/workers/pages_domain_cron_worker_shared_examples.rb b/spec/support/shared_examples/workers/pages_domain_cron_worker_shared_examples.rb
new file mode 100644
index 00000000000..9e8102aea53
--- /dev/null
+++ b/spec/support/shared_examples/workers/pages_domain_cron_worker_shared_examples.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a pages cronjob scheduling jobs with context' do |scheduled_worker_class|
+ let(:worker) { described_class.new }
+
+ it 'does not cause extra queries for multiple domains' do
+ control = ActiveRecord::QueryRecorder.new { worker.perform }
+
+ extra_domain
+
+ expect { worker.perform }.not_to exceed_query_limit(control)
+ end
+
+ it 'schedules the renewal with a context' do
+ extra_domain
+
+ worker.perform
+
+ expect(scheduled_worker_class.jobs.last).to include("meta.project" => extra_domain.project.full_path)
+ end
+end
diff --git a/spec/support/sidekiq.rb b/spec/support/sidekiq.rb
index a6d6d5fc6e1..9fa8df39019 100644
--- a/spec/support/sidekiq.rb
+++ b/spec/support/sidekiq.rb
@@ -1,30 +1,14 @@
# frozen_string_literal: true
RSpec.configure do |config|
- config.around(:each, :sidekiq) do |example|
- Sidekiq::Worker.clear_all
- example.run
- Sidekiq::Worker.clear_all
- end
-
- config.after(:each, :sidekiq, :redis) do
- Sidekiq.redis do |connection|
- connection.redis.flushdb
- end
- end
-
# As we'll review the examples with this tag, we should either:
# - fix the example to not require Sidekiq inline mode (and remove this tag)
# - explicitly keep the inline mode and change the tag for `:sidekiq_inline` instead
config.around(:example, :sidekiq_might_not_need_inline) do |example|
- Sidekiq::Worker.clear_all
Sidekiq::Testing.inline! { example.run }
- Sidekiq::Worker.clear_all
end
config.around(:example, :sidekiq_inline) do |example|
- Sidekiq::Worker.clear_all
Sidekiq::Testing.inline! { example.run }
- Sidekiq::Worker.clear_all
end
end
diff --git a/spec/support_specs/helpers/active_record/query_recorder_spec.rb b/spec/support_specs/helpers/active_record/query_recorder_spec.rb
new file mode 100644
index 00000000000..48069c6a766
--- /dev/null
+++ b/spec/support_specs/helpers/active_record/query_recorder_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ActiveRecord::QueryRecorder do
+ class TestQueries < ActiveRecord::Base
+ self.table_name = 'schema_migrations'
+ end
+
+ describe 'detecting the right number of calls and their origin' do
+ it 'detects two separate queries' do
+ control = ActiveRecord::QueryRecorder.new query_recorder_debug: true do
+ 2.times { TestQueries.count }
+ TestQueries.first
+ end
+
+ # Test first_only flag works as expected
+ expect(control.find_query(/.*query_recorder_spec.rb.*/, 0, first_only: true))
+ .to eq(control.find_query(/.*query_recorder_spec.rb.*/, 0).first)
+ # Check #find_query
+ expect(control.find_query(/.*/, 0).size)
+ .to eq(control.data.keys.size)
+ # Ensure exactly 2 COUNT queries were detected
+ expect(control.occurrences_by_line_method.last[1][:occurrences]
+ .find_all {|i| i.match(/SELECT COUNT/) }.count).to eq(2)
+ # Ensure exactly 1 LIMIT 1 (#first)
+ expect(control.occurrences_by_line_method.first[1][:occurrences]
+ .find_all { |i| i.match(/ORDER BY.*#{TestQueries.table_name}.*LIMIT 1/) }.count).to eq(1)
+
+ # Ensure 3 DB calls overall were executed
+ expect(control.log.size).to eq(3)
+ # Ensure memoization value match the raw value above
+ expect(control.count).to eq(control.log.size)
+ # Ensure we have only two sources of queries
+ expect(control.data.keys.size).to eq(2)
+ # Ensure we detect only queries from this file
+ expect(control.data.keys.find_all { |i| i.match(/query_recorder_spec.rb/) }.count).to eq(2)
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/cleanup_rake_spec.rb b/spec/tasks/gitlab/cleanup_rake_spec.rb
index 3c3e5eea838..92ccc195a9a 100644
--- a/spec/tasks/gitlab/cleanup_rake_spec.rb
+++ b/spec/tasks/gitlab/cleanup_rake_spec.rb
@@ -16,10 +16,10 @@ describe 'gitlab:cleanup rake tasks' do
let!(:logger) { double(:logger) }
before do
- expect(main_object).to receive(:logger).and_return(logger).at_least(1).times
+ expect(main_object).to receive(:logger).and_return(logger).at_least(:once)
- allow(logger).to receive(:info).at_least(1).times
- allow(logger).to receive(:debug).at_least(1).times
+ allow(logger).to receive(:info).at_least(:once)
+ allow(logger).to receive(:debug).at_least(:once)
end
context 'with a fixable orphaned project upload file' do
diff --git a/spec/tasks/gitlab/import_export/import_rake_spec.rb b/spec/tasks/gitlab/import_export/import_rake_spec.rb
index b824ede03b2..3a819d23299 100644
--- a/spec/tasks/gitlab/import_export/import_rake_spec.rb
+++ b/spec/tasks/gitlab/import_export/import_rake_spec.rb
@@ -2,11 +2,12 @@
require 'rake_helper'
-describe 'gitlab:import_export:import rake task', :sidekiq do
+describe 'gitlab:import_export:import rake task' do
let(:username) { 'root' }
let(:namespace_path) { username }
let!(:user) { create(:user, username: username) }
- let(:task_params) { [username, namespace_path, project_name, archive_path] }
+ let(:measurement_enabled) { false }
+ let(:task_params) { [username, namespace_path, project_name, archive_path, measurement_enabled] }
let(:project) { Project.find_by_full_path("#{namespace_path}/#{project_name}") }
before do
@@ -68,6 +69,8 @@ describe 'gitlab:import_export:import rake task', :sidekiq do
subject
end
+
+ it_behaves_like 'import measurement'
end
context 'when project import is invalid' do
diff --git a/spec/tasks/gitlab/seed/group_seed_rake_spec.rb b/spec/tasks/gitlab/seed/group_seed_rake_spec.rb
new file mode 100644
index 00000000000..ecf4e9575ab
--- /dev/null
+++ b/spec/tasks/gitlab/seed/group_seed_rake_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+describe 'gitlab:seed:group_seed rake task' do
+ let(:username) { 'group_seed' }
+ let!(:user) { create(:user, username: username) }
+ let(:task_params) { [2, username] }
+
+ before do
+ Rake.application.rake_require('tasks/gitlab/seed/group_seed')
+ end
+
+ subject { run_rake_task('gitlab:seed:group_seed', task_params) }
+
+ it 'performs group seed successfully' do
+ expect { subject }.not_to raise_error
+
+ group = user.groups.first
+
+ expect(user.groups.count).to be 3
+ expect(group.projects.count).to be 2
+ expect(group.members.count).to be 3
+ expect(group.milestones.count).to be 2
+ end
+end
diff --git a/spec/tasks/gitlab/storage_rake_spec.rb b/spec/tasks/gitlab/storage_rake_spec.rb
index ae11e091000..54a84c73a57 100644
--- a/spec/tasks/gitlab/storage_rake_spec.rb
+++ b/spec/tasks/gitlab/storage_rake_spec.rb
@@ -2,7 +2,7 @@
require 'rake_helper'
-describe 'rake gitlab:storage:*', :sidekiq do
+describe 'rake gitlab:storage:*' do
before do
Rake.application.rake_require 'tasks/gitlab/storage'
diff --git a/spec/uploaders/gitlab_uploader_spec.rb b/spec/uploaders/gitlab_uploader_spec.rb
index 4329171f0be..80efdb88585 100644
--- a/spec/uploaders/gitlab_uploader_spec.rb
+++ b/spec/uploaders/gitlab_uploader_spec.rb
@@ -59,7 +59,7 @@ describe GitlabUploader do
describe '#cache!' do
it 'moves the file from the working directory to the cache directory' do
# One to get the work dir, the other to remove it
- expect(subject).to receive(:workfile_path).exactly(2).times.and_call_original
+ expect(subject).to receive(:workfile_path).twice.and_call_original
# Test https://github.com/carrierwavesubject/carrierwave/blob/v1.0.0/lib/carrierwave/sanitized_file.rb#L200
expect(FileUtils).to receive(:mv).with(anything, /^#{subject.work_dir}/).and_call_original
expect(FileUtils).to receive(:mv).with(/^#{subject.work_dir}/, /#{subject.cache_dir}/).and_call_original
diff --git a/spec/uploaders/namespace_file_uploader_spec.rb b/spec/uploaders/namespace_file_uploader_spec.rb
index aa98b3e2828..bc8d6a33e85 100644
--- a/spec/uploaders/namespace_file_uploader_spec.rb
+++ b/spec/uploaders/namespace_file_uploader_spec.rb
@@ -37,7 +37,7 @@ describe NamespaceFileUploader do
end
end
- context '.base_dir' do
+ describe '.base_dir' do
it 'returns local storage base_dir without store param' do
expect(described_class.base_dir(group)).to eq("uploads/-/system/namespace/#{group.id}")
end
diff --git a/spec/uploaders/workers/object_storage/migrate_uploads_worker_spec.rb b/spec/uploaders/workers/object_storage/migrate_uploads_worker_spec.rb
index c7a4680e18d..89a1fa80943 100644
--- a/spec/uploaders/workers/object_storage/migrate_uploads_worker_spec.rb
+++ b/spec/uploaders/workers/object_storage/migrate_uploads_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe ObjectStorage::MigrateUploadsWorker, :sidekiq do
+describe ObjectStorage::MigrateUploadsWorker do
shared_context 'sanity_check! fails' do
before do
expect(described_class).to receive(:sanity_check!).and_raise(described_class::SanityCheckError)
diff --git a/spec/views/help/index.html.haml_spec.rb b/spec/views/help/index.html.haml_spec.rb
index 474a294ce0d..98040da9d2c 100644
--- a/spec/views/help/index.html.haml_spec.rb
+++ b/spec/views/help/index.html.haml_spec.rb
@@ -40,7 +40,7 @@ describe 'help/index' do
render
expect(rendered).to match '8.0.2'
- expect(rendered).to have_link('abcdefg', href: %r{https://gitlab.com/gitlab-org/(gitlab|gitlab-foss)/commits/abcdefg})
+ expect(rendered).to have_link('abcdefg', href: %r{https://gitlab.com/gitlab-org/(gitlab|gitlab-foss)/-/commits/abcdefg})
end
end
end
diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
index 7decfa58153..6ca8fa2bc5c 100644
--- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
@@ -160,4 +160,31 @@ describe 'layouts/nav/sidebar/_project' do
end
end
end
+
+ describe 'value stream analytics entry' do
+ let(:read_cycle_analytics) { true }
+
+ before do
+ allow(view).to receive(:can?).with(nil, :read_cycle_analytics, project).and_return(read_cycle_analytics)
+ stub_feature_flags(analytics_pages_under_project_analytics_sidebar: { enabled: false, thing: project })
+ end
+
+ describe 'when value stream analytics is enabled' do
+ it 'shows the value stream analytics entry' do
+ render
+
+ expect(rendered).to have_link('Value Stream Analytics', href: project_cycle_analytics_path(project))
+ end
+ end
+
+ describe 'when value stream analytics is disabled' do
+ let(:read_cycle_analytics) { false }
+
+ it 'does not show the value stream analytics entry' do
+ render
+
+ expect(rendered).not_to have_link('Value Stream Analytics', href: project_cycle_analytics_path(project))
+ end
+ end
+ end
end
diff --git a/spec/views/notify/pipeline_failed_email.html.haml_spec.rb b/spec/views/notify/pipeline_failed_email.html.haml_spec.rb
index b821b015c97..a540a53c91d 100644
--- a/spec/views/notify/pipeline_failed_email.html.haml_spec.rb
+++ b/spec/views/notify/pipeline_failed_email.html.haml_spec.rb
@@ -15,7 +15,7 @@ describe 'notify/pipeline_failed_email.html.haml' do
user: user,
ref: project.default_branch,
sha: project.commit.sha,
- status: :success)
+ status: :failed)
end
before do
@@ -24,35 +24,51 @@ describe 'notify/pipeline_failed_email.html.haml' do
assign(:merge_request, merge_request)
end
- context 'pipeline with user' do
- it 'renders the email correctly' do
- render
+ shared_examples_for 'renders the pipeline failed email correctly' do
+ context 'pipeline with user' do
+ it 'renders the email correctly' do
+ render
- expect(rendered).to have_content "Your pipeline has failed"
- expect(rendered).to have_content pipeline.project.name
- expect(rendered).to have_content pipeline.git_commit_message.truncate(50).gsub(/\s+/, ' ')
- expect(rendered).to have_content pipeline.commit.author_name
- expect(rendered).to have_content "##{pipeline.id}"
- expect(rendered).to have_content pipeline.user.name
+ expect(rendered).to have_content "Your pipeline has failed"
+ expect(rendered).to have_content pipeline.project.name
+ expect(rendered).to have_content pipeline.git_commit_message.truncate(50).gsub(/\s+/, ' ')
+ expect(rendered).to have_content pipeline.commit.author_name
+ expect(rendered).to have_content "##{pipeline.id}"
+ expect(rendered).to have_content pipeline.user.name
+ expect(rendered).to have_content build.name
+ end
+
+ it_behaves_like 'correct pipeline information for pipelines for merge requests'
end
- it_behaves_like 'correct pipeline information for pipelines for merge requests'
- end
+ context 'pipeline without user' do
+ before do
+ pipeline.update_attribute(:user, nil)
+ end
- context 'pipeline without user' do
- before do
- pipeline.update_attribute(:user, nil)
+ it 'renders the email correctly' do
+ render
+
+ expect(rendered).to have_content "Your pipeline has failed"
+ expect(rendered).to have_content pipeline.project.name
+ expect(rendered).to have_content pipeline.git_commit_message.truncate(50).gsub(/\s+/, ' ')
+ expect(rendered).to have_content pipeline.commit.author_name
+ expect(rendered).to have_content "##{pipeline.id}"
+ expect(rendered).to have_content "by API"
+ expect(rendered).to have_content build.name
+ end
end
+ end
- it 'renders the email correctly' do
- render
+ context 'when the pipeline contains a failed job' do
+ let!(:build) { create(:ci_build, :failed, pipeline: pipeline, project: pipeline.project) }
- expect(rendered).to have_content "Your pipeline has failed"
- expect(rendered).to have_content pipeline.project.name
- expect(rendered).to have_content pipeline.git_commit_message.truncate(50).gsub(/\s+/, ' ')
- expect(rendered).to have_content pipeline.commit.author_name
- expect(rendered).to have_content "##{pipeline.id}"
- expect(rendered).to have_content "by API"
- end
+ it_behaves_like 'renders the pipeline failed email correctly'
+ end
+
+ context 'when the latest failed job is a bridge job' do
+ let!(:build) { create(:ci_bridge, status: :failed, pipeline: pipeline, project: pipeline.project) }
+
+ it_behaves_like 'renders the pipeline failed email correctly'
end
end
diff --git a/spec/views/notify/pipeline_failed_email.text.erb_spec.rb b/spec/views/notify/pipeline_failed_email.text.erb_spec.rb
index d15969acf83..9a4cea408a6 100644
--- a/spec/views/notify/pipeline_failed_email.text.erb_spec.rb
+++ b/spec/views/notify/pipeline_failed_email.text.erb_spec.rb
@@ -23,19 +23,31 @@ describe 'notify/pipeline_failed_email.text.erb' do
assign(:merge_request, merge_request)
end
- it 'renders the email correctly' do
- job = create(:ci_build, :failed, pipeline: pipeline, project: pipeline.project)
-
- render
-
- expect(rendered).to have_content('Your pipeline has failed')
- expect(rendered).to have_content(pipeline.project.name)
- expect(rendered).to have_content(pipeline.git_commit_message.truncate(50).gsub(/\s+/, ' '))
- expect(rendered).to have_content(pipeline.commit.author_name)
- expect(rendered).to have_content("##{pipeline.id}")
- expect(rendered).to have_content(pipeline.user.name)
- expect(rendered).to have_content("/-/jobs/#{job.id}/raw")
+ shared_examples_for 'renders the pipeline failed email correctly' do
+ it 'renders the email correctly' do
+ render
+
+ expect(rendered).to have_content('Your pipeline has failed')
+ expect(rendered).to have_content(pipeline.project.name)
+ expect(rendered).to have_content(pipeline.git_commit_message.truncate(50).gsub(/\s+/, ' '))
+ expect(rendered).to have_content(pipeline.commit.author_name)
+ expect(rendered).to have_content("##{pipeline.id}")
+ expect(rendered).to have_content(pipeline.user.name)
+ expect(rendered).to have_content(build.id)
+ end
+
+ it_behaves_like 'correct pipeline information for pipelines for merge requests'
end
- it_behaves_like 'correct pipeline information for pipelines for merge requests'
+ context 'when the pipeline contains a failed job' do
+ let!(:build) { create(:ci_build, :failed, pipeline: pipeline, project: pipeline.project) }
+
+ it_behaves_like 'renders the pipeline failed email correctly'
+ end
+
+ context 'when the latest failed job is a bridge job' do
+ let!(:build) { create(:ci_bridge, status: :failed, pipeline: pipeline, project: pipeline.project) }
+
+ it_behaves_like 'renders the pipeline failed email correctly'
+ end
end
diff --git a/spec/views/projects/tree/_tree_header.html.haml_spec.rb b/spec/views/projects/tree/_tree_header.html.haml_spec.rb
index caf8c4d1969..69ad331f880 100644
--- a/spec/views/projects/tree/_tree_header.html.haml_spec.rb
+++ b/spec/views/projects/tree/_tree_header.html.haml_spec.rb
@@ -19,12 +19,12 @@ describe 'projects/tree/_tree_header' do
allow(view).to receive(:can_collaborate_with_project?) { true }
end
- it 'does not render the WebIDE button when user cannot create fork or cannot open MR' do
+ it 'renders the WebIDE button when user can collaborate but not create fork or MR' do
allow(view).to receive(:can?) { false }
render
- expect(rendered).not_to have_link('Web IDE')
+ expect(rendered).to have_link('Web IDE')
end
it 'renders the WebIDE button when user can create fork and can open MR in project' do
@@ -43,4 +43,13 @@ describe 'projects/tree/_tree_header' do
expect(rendered).to have_link('Web IDE', href: '#modal-confirm-fork')
end
+
+ it 'does not render the WebIDE button when user cannot collaborate or create mr' do
+ allow(view).to receive(:can?) { false }
+ allow(view).to receive(:can_collaborate_with_project?) { false }
+
+ render
+
+ expect(rendered).not_to have_link('Web IDE')
+ end
end
diff --git a/spec/views/shared/projects/_list.html.haml_spec.rb b/spec/views/shared/projects/_list.html.haml_spec.rb
new file mode 100644
index 00000000000..d6043921fc8
--- /dev/null
+++ b/spec/views/shared/projects/_list.html.haml_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'shared/projects/_list' do
+ let(:group) { create(:group) }
+
+ before do
+ allow(view).to receive(:projects).and_return(projects)
+ allow(view).to receive(:project_list_cache_key).and_return('fake_cache_key')
+ end
+
+ context 'with projects' do
+ let(:projects) { build_stubbed_list(:project, 1) }
+
+ it 'renders the list of projects' do
+ render
+
+ projects.each do |project|
+ expect(rendered).to have_content(project.name)
+ end
+ end
+ end
+
+ context 'without projects' do
+ let(:projects) { [] }
+
+ context 'when @contributed_projects is set' do
+ context 'and is empty' do
+ before do
+ @contributed_projects = []
+ end
+
+ it 'renders a no-content message' do
+ render
+
+ expect(rendered).to have_content(s_('UserProfile|This user hasn\'t contributed to any projects'))
+ end
+ end
+ end
+
+ context 'when @starred_projects is set' do
+ context 'and is empty' do
+ before do
+ @starred_projects = []
+ end
+
+ it 'renders a no-content message' do
+ render
+
+ expect(rendered).to have_content(s_('UserProfile|This user hasn\'t starred any projects'))
+ end
+ end
+ end
+
+ context 'and without a special instance variable' do
+ context 'for an explore_page' do
+ before do
+ allow(view).to receive(:explore_page).and_return(true)
+ end
+
+ it 'renders a no-content message' do
+ render
+
+ expect(rendered).to have_content(s_('UserProfile|Explore public groups to find projects to contribute to.'))
+ end
+ end
+
+ context 'for a non-explore page' do
+ it 'renders a no-content message' do
+ render
+
+ expect(rendered).to have_content(s_('UserProfile|This user doesn\'t have any personal projects'))
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/background_migration_worker_spec.rb b/spec/workers/background_migration_worker_spec.rb
index e5be8ce0423..aae6fa02a0c 100644
--- a/spec/workers/background_migration_worker_spec.rb
+++ b/spec/workers/background_migration_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe BackgroundMigrationWorker, :sidekiq, :clean_gitlab_redis_shared_state do
+describe BackgroundMigrationWorker, :clean_gitlab_redis_shared_state do
let(:worker) { described_class.new }
describe '.minimum_interval' do
diff --git a/spec/workers/ci/create_cross_project_pipeline_worker_spec.rb b/spec/workers/ci/create_cross_project_pipeline_worker_spec.rb
new file mode 100644
index 00000000000..22eab1d20f7
--- /dev/null
+++ b/spec/workers/ci/create_cross_project_pipeline_worker_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::CreateCrossProjectPipelineWorker do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let(:bridge) { create(:ci_bridge, user: user, pipeline: pipeline) }
+
+ let(:service) { double('pipeline creation service') }
+
+ describe '#perform' do
+ context 'when bridge exists' do
+ it 'calls cross project pipeline creation service' do
+ expect(Ci::CreateCrossProjectPipelineService)
+ .to receive(:new)
+ .with(project, user)
+ .and_return(service)
+
+ expect(service).to receive(:execute).with(bridge)
+
+ described_class.new.perform(bridge.id)
+ end
+ end
+
+ context 'when bridge does not exist' do
+ it 'does nothing' do
+ expect(Ci::CreateCrossProjectPipelineService)
+ .not_to receive(:new)
+
+ described_class.new.perform(1234)
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/pipeline_bridge_status_worker_spec.rb b/spec/workers/ci/pipeline_bridge_status_worker_spec.rb
new file mode 100644
index 00000000000..d5f95a035fd
--- /dev/null
+++ b/spec/workers/ci/pipeline_bridge_status_worker_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::PipelineBridgeStatusWorker do
+ describe '#perform' do
+ subject { described_class.new.perform(pipeline_id) }
+
+ context 'when pipeline exists' do
+ let(:pipeline) { create(:ci_pipeline) }
+ let(:pipeline_id) { pipeline.id }
+
+ it 'calls the service' do
+ service = double('bridge status service')
+
+ expect(Ci::PipelineBridgeStatusService)
+ .to receive(:new)
+ .with(pipeline.project, pipeline.user)
+ .and_return(service)
+
+ expect(service).to receive(:execute).with(pipeline)
+
+ subject
+ end
+ end
+
+ context 'when pipeline does not exist' do
+ let(:pipeline_id) { 1234 }
+
+ it 'does not call the service' do
+ expect(Ci::PipelineBridgeStatusService)
+ .not_to receive(:new)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/cleanup_container_repository_worker_spec.rb b/spec/workers/cleanup_container_repository_worker_spec.rb
index 9be8f882785..1228c2c2d9c 100644
--- a/spec/workers/cleanup_container_repository_worker_spec.rb
+++ b/spec/workers/cleanup_container_repository_worker_spec.rb
@@ -6,34 +6,49 @@ describe CleanupContainerRepositoryWorker, :clean_gitlab_redis_shared_state do
let(:repository) { create(:container_repository) }
let(:project) { repository.project }
let(:user) { project.owner }
- let(:params) { { key: 'value' } }
subject { described_class.new }
describe '#perform' do
let(:service) { instance_double(Projects::ContainerRepository::CleanupTagsService) }
- before do
- allow(Projects::ContainerRepository::CleanupTagsService).to receive(:new)
- .with(project, user, params).and_return(service)
+ context 'bulk delete api' do
+ let(:params) { { key: 'value', 'container_expiration_policy' => false } }
+
+ it 'executes the destroy service' do
+ expect(Projects::ContainerRepository::CleanupTagsService).to receive(:new)
+ .with(project, user, params.merge('container_expiration_policy' => false))
+ .and_return(service)
+ expect(service).to receive(:execute)
+
+ subject.perform(user.id, repository.id, params)
+ end
+
+ it 'does not raise error when user could not be found' do
+ expect do
+ subject.perform(-1, repository.id, params)
+ end.not_to raise_error
+ end
+
+ it 'does not raise error when repository could not be found' do
+ expect do
+ subject.perform(user.id, -1, params)
+ end.not_to raise_error
+ end
end
- it 'executes the destroy service' do
- expect(service).to receive(:execute)
+ context 'container expiration policy' do
+ let(:params) { { key: 'value', 'container_expiration_policy' => true } }
- subject.perform(user.id, repository.id, params)
- end
+ it 'executes the destroy service' do
+ expect(Projects::ContainerRepository::CleanupTagsService).to receive(:new)
+ .with(project, nil, params.merge('container_expiration_policy' => true))
+ .and_return(service)
- it 'does not raise error when user could not be found' do
- expect do
- subject.perform(-1, repository.id, params)
- end.not_to raise_error
- end
+ expect(service).to receive(:execute)
- it 'does not raise error when repository could not be found' do
- expect do
- subject.perform(user.id, -1, params)
- end.not_to raise_error
+ subject.perform(nil, repository.id, params)
+ end
end
end
end
diff --git a/spec/workers/cluster_configure_istio_worker_spec.rb b/spec/workers/cluster_configure_istio_worker_spec.rb
new file mode 100644
index 00000000000..0f02d428ced
--- /dev/null
+++ b/spec/workers/cluster_configure_istio_worker_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ClusterConfigureIstioWorker do
+ describe '#perform' do
+ shared_examples 'configure istio service' do
+ it 'configures istio' do
+ expect_any_instance_of(Clusters::Kubernetes::ConfigureIstioIngressService).to receive(:execute)
+
+ described_class.new.perform(cluster.id)
+ end
+ end
+
+ context 'when provider type is gcp' do
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+
+ it_behaves_like 'configure istio service'
+ end
+
+ context 'when provider type is aws' do
+ let(:cluster) { create(:cluster, :project, :provided_by_aws) }
+
+ it_behaves_like 'configure istio service'
+ end
+
+ context 'when provider type is user' do
+ let(:cluster) { create(:cluster, :project, :provided_by_user) }
+
+ it_behaves_like 'configure istio service'
+ end
+
+ context 'when cluster does not exist' do
+ it 'does not provision a cluster' do
+ expect_any_instance_of(Clusters::Kubernetes::ConfigureIstioIngressService).not_to receive(:execute)
+
+ described_class.new.perform(123)
+ end
+ end
+ end
+end
diff --git a/spec/workers/concerns/cronjob_queue_spec.rb b/spec/workers/concerns/cronjob_queue_spec.rb
index cf4d47b7500..ea3b7bad2e1 100644
--- a/spec/workers/concerns/cronjob_queue_spec.rb
+++ b/spec/workers/concerns/cronjob_queue_spec.rb
@@ -10,7 +10,7 @@ describe CronjobQueue do
end
include ApplicationWorker
- include CronjobQueue
+ include CronjobQueue # rubocop:disable Scalability/CronWorkerContext
end
end
@@ -21,4 +21,12 @@ describe CronjobQueue do
it 'disables retrying of failed jobs' do
expect(worker.sidekiq_options['retry']).to eq(false)
end
+
+ it 'automatically clears project, user and namespace from the context', :aggregate_failues do
+ worker_context = worker.get_worker_context.to_lazy_hash.transform_values(&:call)
+
+ expect(worker_context[:user]).to be_nil
+ expect(worker_context[:root_namespace]).to be_nil
+ expect(worker_context[:project]).to be_nil
+ end
end
diff --git a/spec/workers/concerns/worker_context_spec.rb b/spec/workers/concerns/worker_context_spec.rb
new file mode 100644
index 00000000000..4e8c81c57dc
--- /dev/null
+++ b/spec/workers/concerns/worker_context_spec.rb
@@ -0,0 +1,120 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe WorkerContext do
+ let(:worker) do
+ Class.new do
+ def self.name
+ "TestWorker"
+ end
+
+ include ApplicationWorker
+ end
+ end
+
+ describe '.worker_context' do
+ it 'allows modifying the context for the entire worker' do
+ worker.worker_context(user: build_stubbed(:user))
+
+ expect(worker.get_worker_context).to be_a(Gitlab::ApplicationContext)
+ end
+
+ it 'allows fetches the context from a superclass if none was defined' do
+ worker.worker_context(user: build_stubbed(:user))
+ subclass = Class.new(worker)
+
+ expect(subclass.get_worker_context).to eq(worker.get_worker_context)
+ end
+ end
+
+ shared_examples 'tracking bulk scheduling contexts' do
+ describe "context contents" do
+ before do
+ # stub clearing the contexts, so we can check what's inside
+ allow(worker).to receive(:batch_context=).and_call_original
+ allow(worker).to receive(:batch_context=).with(nil)
+ end
+
+ it 'keeps track of the context per key to schedule' do
+ subject
+
+ expect(worker.context_for_arguments(["hello"])).to be_a(Gitlab::ApplicationContext)
+ end
+
+ it 'does not share contexts across threads' do
+ t1_context = nil
+ t2_context = nil
+
+ Thread.new do
+ subject
+
+ t1_context = worker.context_for_arguments(["hello"])
+ end.join
+ Thread.new do
+ t2_context = worker.context_for_arguments(["hello"])
+ end.join
+
+ expect(t1_context).to be_a(Gitlab::ApplicationContext)
+ expect(t2_context).to be_nil
+ end
+ end
+
+ it 'clears the contexts' do
+ subject
+
+ expect(worker.__send__(:batch_context)).to be_nil
+ end
+ end
+
+ describe '.bulk_perform_async_with_contexts' do
+ subject do
+ worker.bulk_perform_async_with_contexts(%w(hello world),
+ context_proc: -> (_) { { user: build_stubbed(:user) } },
+ arguments_proc: -> (word) { word })
+ end
+
+ it 'calls bulk_perform_async with the arguments' do
+ expect(worker).to receive(:bulk_perform_async).with([["hello"], ["world"]])
+
+ subject
+ end
+
+ it_behaves_like 'tracking bulk scheduling contexts'
+ end
+
+ describe '.bulk_perform_in_with_contexts' do
+ subject do
+ worker.bulk_perform_in_with_contexts(10.minutes,
+ %w(hello world),
+ context_proc: -> (_) { { user: build_stubbed(:user) } },
+ arguments_proc: -> (word) { word })
+ end
+
+ it 'calls bulk_perform_in with the arguments and delay' do
+ expect(worker).to receive(:bulk_perform_in).with(10.minutes, [["hello"], ["world"]])
+
+ subject
+ end
+
+ it_behaves_like 'tracking bulk scheduling contexts'
+ end
+
+ describe '#with_context' do
+ it 'allows modifying context when the job is running' do
+ worker.new.with_context(user: build_stubbed(:user, username: 'jane-doe')) do
+ expect(Labkit::Context.current.to_h).to include('meta.user' => 'jane-doe')
+ end
+ end
+
+ it 'yields the arguments to the block' do
+ a_user = build_stubbed(:user)
+ a_project = build_stubbed(:project)
+
+ worker.new.with_context(user: a_user, project: a_project) do |user:, project:|
+ expect(user).to eq(a_user)
+ expect(project).to eq(a_project)
+ end
+ end
+ end
+end
diff --git a/spec/workers/create_gpg_signature_worker_spec.rb b/spec/workers/create_commit_signature_worker_spec.rb
index ae09b4b77f1..d7235fcd907 100644
--- a/spec/workers/create_gpg_signature_worker_spec.rb
+++ b/spec/workers/create_commit_signature_worker_spec.rb
@@ -2,13 +2,14 @@
require 'spec_helper'
-describe CreateGpgSignatureWorker do
+describe CreateCommitSignatureWorker do
let(:project) { create(:project, :repository) }
let(:commits) { project.repository.commits('HEAD', limit: 3).commits }
let(:commit_shas) { commits.map(&:id) }
let(:gpg_commit) { instance_double(Gitlab::Gpg::Commit) }
+ let(:x509_commit) { instance_double(Gitlab::X509::Commit) }
- context 'when GpgKey is found' do
+ context 'when a signature is found' do
before do
allow(Project).to receive(:find_by).with(id: project.id).and_return(project)
allow(project).to receive(:commits_by).with(oids: commit_shas).and_return(commits)
@@ -18,6 +19,7 @@ describe CreateGpgSignatureWorker do
it 'calls Gitlab::Gpg::Commit#signature' do
commits.each do |commit|
+ allow(commit).to receive(:signature_type).and_return(:PGP)
expect(Gitlab::Gpg::Commit).to receive(:new).with(commit).and_return(gpg_commit).once
end
@@ -31,13 +33,46 @@ describe CreateGpgSignatureWorker do
allow(Gitlab::Gpg::Commit).to receive(:new).and_return(gpg_commit)
allow(Gitlab::Gpg::Commit).to receive(:new).with(commits.first).and_raise(StandardError)
- expect(gpg_commit).to receive(:signature).exactly(2).times
+ allow(commits[1]).to receive(:signature_type).and_return(:PGP)
+ allow(commits[2]).to receive(:signature_type).and_return(:PGP)
+
+ expect(gpg_commit).to receive(:signature).twice
+
+ subject
+ end
+
+ it 'calls Gitlab::X509::Commit#signature' do
+ commits.each do |commit|
+ allow(commit).to receive(:signature_type).and_return(:X509)
+ expect(Gitlab::X509::Commit).to receive(:new).with(commit).and_return(x509_commit).once
+ end
+
+ expect(x509_commit).to receive(:signature).exactly(commits.size).times
+
+ subject
+ end
+
+ it 'can recover from exception and continue the X509 signature process' do
+ allow(x509_commit).to receive(:signature)
+ allow(Gitlab::X509::Commit).to receive(:new).and_return(x509_commit)
+ allow(Gitlab::X509::Commit).to receive(:new).with(commits.first).and_raise(StandardError)
+
+ allow(commits[1]).to receive(:signature_type).and_return(:X509)
+ allow(commits[2]).to receive(:signature_type).and_return(:X509)
+
+ expect(x509_commit).to receive(:signature).twice
subject
end
end
context 'handles when a string is passed in for the commit SHA' do
+ before do
+ allow(Project).to receive(:find_by).with(id: project.id).and_return(project)
+ allow(project).to receive(:commits_by).with(oids: Array(commit_shas.first)).and_return(commits)
+ allow(commits.first).to receive(:signature_type).and_return(:PGP)
+ end
+
it 'creates a signature once' do
allow(Gitlab::Gpg::Commit).to receive(:new).with(commits.first).and_return(gpg_commit)
@@ -67,5 +102,11 @@ describe CreateGpgSignatureWorker do
described_class.new.perform(commit_shas, nonexisting_project_id)
end
+
+ it 'does not call Gitlab::X509::Commit#signature' do
+ expect_any_instance_of(Gitlab::X509::Commit).not_to receive(:signature)
+
+ described_class.new.perform(commit_shas, nonexisting_project_id)
+ end
end
end
diff --git a/spec/workers/environments/auto_stop_cron_worker_spec.rb b/spec/workers/environments/auto_stop_cron_worker_spec.rb
new file mode 100644
index 00000000000..6773637d4a7
--- /dev/null
+++ b/spec/workers/environments/auto_stop_cron_worker_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Environments::AutoStopCronWorker do
+ subject { worker.perform }
+
+ let(:worker) { described_class.new }
+
+ it 'executes Environments::AutoStopService' do
+ expect_next_instance_of(Environments::AutoStopService) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ subject
+ end
+end
diff --git a/spec/workers/error_tracking_issue_link_worker_spec.rb b/spec/workers/error_tracking_issue_link_worker_spec.rb
new file mode 100644
index 00000000000..b9206e7e12c
--- /dev/null
+++ b/spec/workers/error_tracking_issue_link_worker_spec.rb
@@ -0,0 +1,99 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ErrorTrackingIssueLinkWorker do
+ let_it_be(:error_tracking) { create(:project_error_tracking_setting) }
+ let_it_be(:project) { error_tracking.project }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:sentry_issue) { create(:sentry_issue, issue: issue) }
+
+ let(:repo) do
+ Gitlab::ErrorTracking::Repo.new(
+ status: 'active',
+ integration_id: 66666,
+ project_id: project.id
+ )
+ end
+
+ subject { described_class.new.perform(issue.id) }
+
+ describe '#perform' do
+ it 'creates a link between an issue and a Sentry issue in Sentry' do
+ expect_next_instance_of(Sentry::Client) do |client|
+ expect(client).to receive(:repos).with('sentry-org').and_return([repo])
+ expect(client)
+ .to receive(:create_issue_link)
+ .with(66666, sentry_issue.sentry_issue_identifier, issue)
+ .and_return(true)
+ end
+
+ expect(subject).to be true
+ end
+
+ shared_examples_for 'makes no external API requests' do
+ it 'takes no action' do
+ expect_any_instance_of(Sentry::Client).not_to receive(:repos)
+ expect_any_instance_of(Sentry::Client).not_to receive(:create_issue_link)
+
+ expect(subject).to be nil
+ end
+ end
+
+ shared_examples_for 'attempts to create a link via plugin' do
+ it 'takes no action' do
+ expect_next_instance_of(Sentry::Client) do |client|
+ expect(client).to receive(:repos).with('sentry-org').and_return([repo])
+ expect(client)
+ .to receive(:create_issue_link)
+ .with(nil, sentry_issue.sentry_issue_identifier, issue)
+ .and_return(true)
+ end
+
+ expect(subject).to be true
+ end
+ end
+
+ context 'when issue is unavailable' do
+ let(:issue) { double('issue', id: -3) }
+
+ it_behaves_like 'makes no external API requests'
+ end
+
+ context 'when project does not have error tracking configured' do
+ let(:issue) { build(:project) }
+
+ it_behaves_like 'makes no external API requests'
+ end
+
+ context 'when the issue is not linked to a Sentry issue in GitLab' do
+ let(:issue) { build(:issue, project: project) }
+
+ it_behaves_like 'makes no external API requests'
+ end
+
+ context 'when Sentry disabled the GitLab integration' do
+ let(:repo) do
+ Gitlab::ErrorTracking::Repo.new(
+ status: 'inactive',
+ integration_id: 66666,
+ project_id: project.id
+ )
+ end
+
+ it_behaves_like 'attempts to create a link via plugin'
+ end
+
+ context 'when Sentry the GitLab integration is for another project' do
+ let(:repo) do
+ Gitlab::ErrorTracking::Repo.new(
+ status: 'active',
+ integration_id: 66666,
+ project_id: -3
+ )
+ end
+
+ it_behaves_like 'attempts to create a link via plugin'
+ end
+ end
+end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 5ceb54eb2d5..f3ee1dc8435 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -3,8 +3,12 @@
require 'spec_helper'
describe 'Every Sidekiq worker' do
+ let(:workers_without_defaults) do
+ Gitlab::SidekiqConfig.workers - Gitlab::SidekiqConfig::DEFAULT_WORKERS
+ end
+
it 'does not use the default queue' do
- expect(Gitlab::SidekiqConfig.workers.map(&:queue)).not_to include('default')
+ expect(workers_without_defaults.map(&:queue)).not_to include('default')
end
it 'uses the cronjob queue when the worker runs as a cronjob' do
@@ -45,7 +49,7 @@ describe 'Every Sidekiq worker' do
# or explicitly be excluded with the `feature_category_not_owned!` annotation.
# Please see doc/development/sidekiq_style_guide.md#Feature-Categorization for more details.
it 'has a feature_category or feature_category_not_owned! attribute', :aggregate_failures do
- Gitlab::SidekiqConfig.workers.each do |worker|
+ workers_without_defaults.each do |worker|
expect(worker.get_feature_category).to be_a(Symbol), "expected #{worker.inspect} to declare a feature_category or feature_category_not_owned!"
end
end
@@ -54,7 +58,7 @@ describe 'Every Sidekiq worker' do
# The category should match a value in `config/feature_categories.yml`.
# Please see doc/development/sidekiq_style_guide.md#Feature-Categorization for more details.
it 'has a feature_category that maps to a value in feature_categories.yml', :aggregate_failures do
- workers_with_feature_categories = Gitlab::SidekiqConfig.workers
+ workers_with_feature_categories = workers_without_defaults
.select(&:get_feature_category)
.reject(&:feature_category_not_owned?)
@@ -69,7 +73,7 @@ describe 'Every Sidekiq worker' do
# rather than scaling the hardware to meet the SLO. For this reason, memory-bound,
# latency-sensitive jobs are explicitly discouraged and disabled.
it 'is (exclusively) memory-bound or latency-sentitive, not both', :aggregate_failures do
- latency_sensitive_workers = Gitlab::SidekiqConfig.workers
+ latency_sensitive_workers = workers_without_defaults
.select(&:latency_sensitive_worker?)
latency_sensitive_workers.each do |worker|
@@ -86,7 +90,7 @@ describe 'Every Sidekiq worker' do
# Please see doc/development/sidekiq_style_guide.md#Jobs-with-External-Dependencies for more
# details.
it 'has (exclusively) external dependencies or is latency-sentitive, not both', :aggregate_failures do
- latency_sensitive_workers = Gitlab::SidekiqConfig.workers
+ latency_sensitive_workers = workers_without_defaults
.select(&:latency_sensitive_worker?)
latency_sensitive_workers.each do |worker|
diff --git a/spec/workers/expire_pipeline_cache_worker_spec.rb b/spec/workers/expire_pipeline_cache_worker_spec.rb
index e162a227a66..8d898ffc13e 100644
--- a/spec/workers/expire_pipeline_cache_worker_spec.rb
+++ b/spec/workers/expire_pipeline_cache_worker_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe ExpirePipelineCacheWorker do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
- let(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
subject { described_class.new }
@@ -22,5 +22,14 @@ describe ExpirePipelineCacheWorker do
subject.perform(617748)
end
+
+ it "doesn't do anything if the pipeline cannot be cached" do
+ allow_any_instance_of(Ci::Pipeline).to receive(:cacheable?).and_return(false)
+
+ expect_any_instance_of(Ci::ExpirePipelineCacheService).not_to receive(:execute)
+ expect_any_instance_of(Gitlab::EtagCaching::Store).not_to receive(:touch)
+
+ subject.perform(pipeline.id)
+ end
end
end
diff --git a/spec/lib/gitlab/phabricator_import/base_worker_spec.rb b/spec/workers/gitlab/phabricator_import/base_worker_spec.rb
index d46d908a3e3..d46d908a3e3 100644
--- a/spec/lib/gitlab/phabricator_import/base_worker_spec.rb
+++ b/spec/workers/gitlab/phabricator_import/base_worker_spec.rb
diff --git a/spec/lib/gitlab/phabricator_import/import_tasks_worker_spec.rb b/spec/workers/gitlab/phabricator_import/import_tasks_worker_spec.rb
index 1e38ef8aaa5..1e38ef8aaa5 100644
--- a/spec/lib/gitlab/phabricator_import/import_tasks_worker_spec.rb
+++ b/spec/workers/gitlab/phabricator_import/import_tasks_worker_spec.rb
diff --git a/spec/workers/group_import_worker_spec.rb b/spec/workers/group_import_worker_spec.rb
new file mode 100644
index 00000000000..0783ac4df4e
--- /dev/null
+++ b/spec/workers/group_import_worker_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe GroupImportWorker do
+ let!(:user) { create(:user) }
+ let!(:group) { create(:group) }
+
+ subject { described_class.new }
+
+ describe '#perform' do
+ context 'when it succeeds' do
+ it 'calls the ImportService' do
+ expect_any_instance_of(::Groups::ImportExport::ImportService).to receive(:execute)
+
+ subject.perform(user.id, group.id)
+ end
+ end
+
+ context 'when it fails' do
+ it 'raises an exception when params are invalid' do
+ expect_any_instance_of(::Groups::ImportExport::ImportService).not_to receive(:execute)
+
+ expect { subject.perform(1234, group.id) }.to raise_exception(ActiveRecord::RecordNotFound)
+ expect { subject.perform(user.id, 1234) }.to raise_exception(ActiveRecord::RecordNotFound)
+ end
+ end
+ end
+end
diff --git a/spec/workers/incident_management/process_alert_worker_spec.rb b/spec/workers/incident_management/process_alert_worker_spec.rb
new file mode 100644
index 00000000000..9f40833dfd7
--- /dev/null
+++ b/spec/workers/incident_management/process_alert_worker_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe IncidentManagement::ProcessAlertWorker do
+ let_it_be(:project) { create(:project) }
+
+ describe '#perform' do
+ let(:alert) { :alert }
+ let(:create_issue_service) { spy(:create_issue_service) }
+
+ subject { described_class.new.perform(project.id, alert) }
+
+ it 'calls create issue service' do
+ expect(Project).to receive(:find_by_id).and_call_original
+
+ expect(IncidentManagement::CreateIssueService)
+ .to receive(:new).with(project, :alert)
+ .and_return(create_issue_service)
+
+ expect(create_issue_service).to receive(:execute)
+
+ subject
+ end
+
+ context 'with invalid project' do
+ let(:invalid_project_id) { 0 }
+
+ subject { described_class.new.perform(invalid_project_id, alert) }
+
+ it 'does not create issues' do
+ expect(Project).to receive(:find_by_id).and_call_original
+ expect(IncidentManagement::CreateIssueService).not_to receive(:new)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/mail_scheduler/notification_service_worker_spec.rb b/spec/workers/mail_scheduler/notification_service_worker_spec.rb
index 14a9f0ed8b7..3c023e713ed 100644
--- a/spec/workers/mail_scheduler/notification_service_worker_spec.rb
+++ b/spec/workers/mail_scheduler/notification_service_worker_spec.rb
@@ -49,7 +49,7 @@ describe MailScheduler::NotificationServiceWorker do
end
end
- describe '.perform_async', :sidekiq do
+ describe '.perform_async' do
around do |example|
Sidekiq::Testing.fake! { example.run }
end
diff --git a/spec/workers/merge_request_mergeability_check_worker_spec.rb b/spec/workers/merge_request_mergeability_check_worker_spec.rb
new file mode 100644
index 00000000000..2331664215f
--- /dev/null
+++ b/spec/workers/merge_request_mergeability_check_worker_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MergeRequestMergeabilityCheckWorker do
+ subject { described_class.new }
+
+ describe '#perform' do
+ context 'when merge request does not exist' do
+ it 'does not execute MergeabilityCheckService' do
+ expect(MergeRequests::MergeabilityCheckService).not_to receive(:new)
+
+ subject.perform(1)
+ end
+ end
+
+ context 'when merge request exists' do
+ let(:merge_request) { create(:merge_request) }
+
+ it 'executes MergeabilityCheckService' do
+ expect_next_instance_of(MergeRequests::MergeabilityCheckService, merge_request) do |service|
+ expect(service).to receive(:execute).and_return(double(error?: false))
+ end
+
+ subject.perform(merge_request.id)
+ end
+ end
+ end
+end
diff --git a/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb b/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb
index 10c23cbb6d4..736acc40371 100644
--- a/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb
+++ b/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb
@@ -12,7 +12,7 @@ describe PagesDomainSslRenewalCronWorker do
end
describe '#perform' do
- let(:project) { create :project }
+ let_it_be(:project) { create :project }
let!(:domain) { create(:pages_domain, project: project, auto_ssl_enabled: false) }
let!(:domain_with_enabled_auto_ssl) { create(:pages_domain, project: project, auto_ssl_enabled: true) }
let!(:domain_with_obtained_letsencrypt) do
@@ -35,12 +35,16 @@ describe PagesDomainSslRenewalCronWorker do
[domain,
domain_with_obtained_letsencrypt].each do |domain|
- expect(PagesDomainVerificationWorker).not_to receive(:perform_async).with(domain.id)
+ expect(PagesDomainSslRenewalWorker).not_to receive(:perform_async).with(domain.id)
end
worker.perform
end
+ it_behaves_like 'a pages cronjob scheduling jobs with context', PagesDomainSslRenewalWorker do
+ let(:extra_domain) { create(:pages_domain, :with_project, auto_ssl_enabled: true) }
+ end
+
shared_examples 'does nothing' do
it 'does nothing' do
expect(PagesDomainSslRenewalWorker).not_to receive(:perform_async)
diff --git a/spec/workers/pages_domain_verification_cron_worker_spec.rb b/spec/workers/pages_domain_verification_cron_worker_spec.rb
index 3fb86adee11..6dd6c33f5fe 100644
--- a/spec/workers/pages_domain_verification_cron_worker_spec.rb
+++ b/spec/workers/pages_domain_verification_cron_worker_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
describe PagesDomainVerificationCronWorker do
subject(:worker) { described_class.new }
- describe '#perform' do
+ describe '#perform', :sidekiq do
let!(:verified) { create(:pages_domain) }
- let!(:reverify) { create(:pages_domain, :reverify) }
+ let!(:reverify) { create(:pages_domain, :reverify, :with_project) }
let!(:disabled) { create(:pages_domain, :disabled) }
it 'does nothing if the database is read-only' do
@@ -26,5 +26,9 @@ describe PagesDomainVerificationCronWorker do
worker.perform
end
+
+ it_behaves_like 'a pages cronjob scheduling jobs with context', PagesDomainVerificationWorker do
+ let(:extra_domain) { create(:pages_domain, :reverify, :with_project) }
+ end
end
end
diff --git a/spec/workers/reactive_caching_worker_spec.rb b/spec/workers/reactive_caching_worker_spec.rb
index ca0e76fc19a..6c74c4ea072 100644
--- a/spec/workers/reactive_caching_worker_spec.rb
+++ b/spec/workers/reactive_caching_worker_spec.rb
@@ -14,6 +14,18 @@ describe ReactiveCachingWorker do
described_class.new.perform("Environment", environment.id)
end
+
+ context 'when ReactiveCaching::ExceededReactiveCacheLimit is raised' do
+ it 'avoids failing the job and tracks via Gitlab::ErrorTracking' do
+ allow_any_instance_of(Environment).to receive(:exclusively_update_reactive_cache!)
+ .and_raise(ReactiveCaching::ExceededReactiveCacheLimit)
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(kind_of(ReactiveCaching::ExceededReactiveCacheLimit))
+
+ described_class.new.perform("Environment", environment.id)
+ end
+ end
end
end
end
diff --git a/spec/workers/repository_fork_worker_spec.rb b/spec/workers/repository_fork_worker_spec.rb
index 26fd67adfaa..01104049404 100644
--- a/spec/workers/repository_fork_worker_spec.rb
+++ b/spec/workers/repository_fork_worker_spec.rb
@@ -72,13 +72,33 @@ describe RepositoryForkWorker do
perform!
end
- it "handles bad fork" do
- error_message = "Unable to fork project #{forked_project.id} for repository #{project.disk_path} -> #{forked_project.disk_path}"
+ it 'handles bad fork' do
+ error_message = "Unable to fork project #{forked_project.id} for repository #{project.disk_path} -> #{forked_project.disk_path}: Failed to create fork repository"
expect_fork_repository.and_return(false)
expect { perform! }.to raise_error(StandardError, error_message)
end
+
+ it 'calls Projects::LfsPointers::LfsLinkService#execute with OIDs of source project LFS objects' do
+ expect_fork_repository.and_return(true)
+ expect_next_instance_of(Projects::LfsPointers::LfsLinkService) do |service|
+ expect(service).to receive(:execute).with(project.lfs_objects_oids)
+ end
+
+ perform!
+ end
+
+ it "handles LFS objects link failure" do
+ error_message = "Unable to fork project #{forked_project.id} for repository #{project.disk_path} -> #{forked_project.disk_path}: Source project has too many LFS objects"
+
+ expect_fork_repository.and_return(true)
+ expect_next_instance_of(Projects::LfsPointers::LfsLinkService) do |service|
+ expect(service).to receive(:execute).and_raise(Projects::LfsPointers::LfsLinkService::TooManyOidsError)
+ end
+
+ expect { perform! }.to raise_error(StandardError, error_message)
+ end
end
context 'only project ID passed' do