Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorKamil Trzciński <ayufan@ayufan.eu>2018-02-28 22:35:22 +0300
committerKamil Trzciński <ayufan@ayufan.eu>2018-02-28 22:35:22 +0300
commite0401df1214397626e65e58166988fe62715d372 (patch)
tree087d8ca4a1611aa50a8ac98e66f7d1657ff1f90f /spec
parent2b7b60728426c10ef1188a1073d3630805773a35 (diff)
parent11c67e7c2f992299ff5918ce67995b73d1e0be6d (diff)
Merge commit '11c67e7c2f992299ff5918ce67995b73d1e0be6d' into object-storage-ee-to-ce-backport
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/admin/users_controller_spec.rb2
-rw-r--r--spec/controllers/boards/issues_controller_spec.rb11
-rw-r--r--spec/controllers/dashboard/todos_controller_spec.rb26
-rw-r--r--spec/controllers/google_api/authorizations_controller_spec.rb49
-rw-r--r--spec/controllers/health_controller_spec.rb1
-rw-r--r--spec/controllers/profiles/emails_controller_spec.rb35
-rw-r--r--spec/controllers/profiles_controller_spec.rb14
-rw-r--r--spec/controllers/projects/artifacts_controller_spec.rb70
-rw-r--r--spec/controllers/projects/blob_controller_spec.rb7
-rw-r--r--spec/controllers/projects/branches_controller_spec.rb15
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb308
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb351
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb2
-rw-r--r--spec/controllers/projects/merge_requests/diffs_controller_spec.rb10
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb32
-rw-r--r--spec/controllers/projects/notes_controller_spec.rb104
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb2
-rw-r--r--spec/controllers/projects/pipelines_settings_controller_spec.rb43
-rw-r--r--spec/controllers/projects/registry/repositories_controller_spec.rb44
-rw-r--r--spec/controllers/projects/registry/tags_controller_spec.rb90
-rw-r--r--spec/controllers/projects_controller_spec.rb43
-rw-r--r--spec/controllers/registrations_controller_spec.rb64
-rw-r--r--spec/factories/ci/build_trace_section_names.rb6
-rw-r--r--spec/factories/ci/pipelines.rb1
-rw-r--r--spec/factories/deployments.rb2
-rw-r--r--spec/factories/emails.rb2
-rw-r--r--spec/factories/fork_networks.rb5
-rw-r--r--spec/factories/gcp/cluster.rb38
-rw-r--r--spec/factories/gitaly/commit.rb17
-rw-r--r--spec/factories/gitaly/commit_author.rb9
-rw-r--r--spec/factories/gpg_key_subkeys.rb10
-rw-r--r--spec/factories/gpg_keys.rb4
-rw-r--r--spec/factories/gpg_signature.rb2
-rw-r--r--spec/factories/merge_requests.rb5
-rw-r--r--spec/factories/projects.rb10
-rw-r--r--spec/factories/user_custom_attributes.rb7
-rw-r--r--spec/features/admin/admin_abuse_reports_spec.rb2
-rw-r--r--spec/features/admin/admin_broadcast_messages_spec.rb2
-rw-r--r--spec/features/admin/admin_disables_two_factor_spec.rb2
-rw-r--r--spec/features/admin/admin_groups_spec.rb10
-rw-r--r--spec/features/admin/admin_health_check_spec.rb2
-rw-r--r--spec/features/admin/admin_hooks_spec.rb2
-rw-r--r--spec/features/admin/admin_labels_spec.rb2
-rw-r--r--spec/features/admin/admin_projects_spec.rb8
-rw-r--r--spec/features/admin/admin_users_impersonation_tokens_spec.rb2
-rw-r--r--spec/features/admin/admin_users_spec.rb4
-rw-r--r--spec/features/admin/admin_uses_repository_checks_spec.rb2
-rw-r--r--spec/features/auto_deploy_spec.rb4
-rw-r--r--spec/features/boards/boards_spec.rb4
-rw-r--r--spec/features/boards/keyboard_shortcut_spec.rb2
-rw-r--r--spec/features/boards/new_issue_spec.rb2
-rw-r--r--spec/features/boards/sidebar_spec.rb2
-rw-r--r--spec/features/ci_lint_spec.rb2
-rw-r--r--spec/features/container_registry_spec.rb9
-rw-r--r--spec/features/copy_as_gfm_spec.rb132
-rw-r--r--spec/features/cycle_analytics_spec.rb2
-rw-r--r--spec/features/dashboard/active_tab_spec.rb2
-rw-r--r--spec/features/dashboard/datetime_on_tooltips_spec.rb2
-rw-r--r--spec/features/dashboard/group_spec.rb8
-rw-r--r--spec/features/dashboard/issues_filter_spec.rb8
-rw-r--r--spec/features/dashboard/issues_spec.rb8
-rw-r--r--spec/features/dashboard/label_filter_spec.rb2
-rw-r--r--spec/features/dashboard/merge_requests_spec.rb17
-rw-r--r--spec/features/dashboard/project_member_activity_index_spec.rb2
-rw-r--r--spec/features/dashboard/projects_spec.rb25
-rw-r--r--spec/features/dashboard/todos/todos_filtering_spec.rb2
-rw-r--r--spec/features/dashboard/todos/todos_spec.rb8
-rw-r--r--spec/features/expand_collapse_diffs_spec.rb2
-rw-r--r--spec/features/explore/new_menu_spec.rb6
-rw-r--r--spec/features/gitlab_flavored_markdown_spec.rb2
-rw-r--r--spec/features/group_variables_spec.rb2
-rw-r--r--spec/features/groups/labels/subscription_spec.rb2
-rw-r--r--spec/features/groups/merge_requests_spec.rb2
-rw-r--r--spec/features/groups/user_sees_users_dropdowns_in_issuables_list_spec.rb22
-rw-r--r--spec/features/groups_spec.rb6
-rw-r--r--spec/features/issuables/default_sort_order_spec.rb56
-rw-r--r--spec/features/issuables/discussion_lock_spec.rb106
-rw-r--r--spec/features/issuables/user_sees_sidebar_spec.rb4
-rw-r--r--spec/features/issues/award_emoji_spec.rb18
-rw-r--r--spec/features/issues/award_spec.rb2
-rw-r--r--spec/features/issues/bulk_assignment_labels_spec.rb4
-rw-r--r--spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb2
-rw-r--r--spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/dropdown_author_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/dropdown_emoji_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/dropdown_label_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/filter_issues_spec.rb18
-rw-r--r--spec/features/issues/filtered_search/recent_searches_spec.rb4
-rw-r--r--spec/features/issues/filtered_search/search_bar_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/visual_tokens_spec.rb4
-rw-r--r--spec/features/issues/form_spec.rb49
-rw-r--r--spec/features/issues/gfm_autocomplete_spec.rb2
-rw-r--r--spec/features/issues/issue_detail_spec.rb3
-rw-r--r--spec/features/issues/issue_sidebar_spec.rb8
-rw-r--r--spec/features/issues/markdown_toolbar_spec.rb2
-rw-r--r--spec/features/issues/move_spec.rb6
-rw-r--r--spec/features/issues/spam_issues_spec.rb2
-rw-r--r--spec/features/issues/todo_spec.rb2
-rw-r--r--spec/features/issues/update_issues_spec.rb2
-rw-r--r--spec/features/issues/user_uses_slash_commands_spec.rb10
-rw-r--r--spec/features/issues_spec.rb202
-rw-r--r--spec/features/login_spec.rb8
-rw-r--r--spec/features/merge_requests/assign_issues_spec.rb2
-rw-r--r--spec/features/merge_requests/award_spec.rb2
-rw-r--r--spec/features/merge_requests/check_if_mergeable_with_unresolved_discussions_spec.rb2
-rw-r--r--spec/features/merge_requests/cherry_pick_spec.rb2
-rw-r--r--spec/features/merge_requests/closes_issues_spec.rb2
-rw-r--r--spec/features/merge_requests/conflicts_spec.rb2
-rw-r--r--spec/features/merge_requests/create_new_mr_from_fork_spec.rb89
-rw-r--r--spec/features/merge_requests/create_new_mr_spec.rb2
-rw-r--r--spec/features/merge_requests/created_from_fork_spec.rb23
-rw-r--r--spec/features/merge_requests/deleted_source_branch_spec.rb2
-rw-r--r--spec/features/merge_requests/diff_notes_avatars_spec.rb26
-rw-r--r--spec/features/merge_requests/diff_notes_resolve_spec.rb2
-rw-r--r--spec/features/merge_requests/diffs_spec.rb14
-rw-r--r--spec/features/merge_requests/discussion_lock_spec.rb49
-rw-r--r--spec/features/merge_requests/edit_mr_spec.rb4
-rw-r--r--spec/features/merge_requests/filter_by_milestone_spec.rb8
-rw-r--r--spec/features/merge_requests/filter_merge_requests_spec.rb20
-rw-r--r--spec/features/merge_requests/form_spec.rb12
-rw-r--r--spec/features/merge_requests/image_diff_notes.rb196
-rw-r--r--spec/features/merge_requests/merge_commit_message_toggle_spec.rb2
-rw-r--r--spec/features/merge_requests/only_allow_merge_if_build_succeeds_spec.rb6
-rw-r--r--spec/features/merge_requests/pipelines_spec.rb2
-rw-r--r--spec/features/merge_requests/resolve_outdated_diff_discussions.rb2
-rw-r--r--spec/features/merge_requests/target_branch_spec.rb2
-rw-r--r--spec/features/merge_requests/toggle_whitespace_changes_spec.rb2
-rw-r--r--spec/features/merge_requests/toggler_behavior_spec.rb2
-rw-r--r--spec/features/merge_requests/update_merge_requests_spec.rb8
-rw-r--r--spec/features/merge_requests/user_lists_merge_requests_spec.rb40
-rw-r--r--spec/features/merge_requests/user_posts_diff_notes_spec.rb35
-rw-r--r--spec/features/merge_requests/user_uses_slash_commands_spec.rb2
-rw-r--r--spec/features/merge_requests/versions_spec.rb2
-rw-r--r--spec/features/merge_requests/widget_deployments_spec.rb2
-rw-r--r--spec/features/merge_requests/widget_spec.rb61
-rw-r--r--spec/features/milestones/show_spec.rb4
-rw-r--r--spec/features/profile_spec.rb44
-rw-r--r--spec/features/profiles/emails_spec.rb71
-rw-r--r--spec/features/profiles/gpg_keys_spec.rb14
-rw-r--r--spec/features/profiles/keys_spec.rb2
-rw-r--r--spec/features/profiles/oauth_applications_spec.rb2
-rw-r--r--spec/features/profiles/personal_access_tokens_spec.rb2
-rw-r--r--spec/features/profiles/user_changes_notified_of_own_activity_spec.rb2
-rw-r--r--spec/features/profiles/user_manages_emails_spec.rb78
-rw-r--r--spec/features/profiles/user_visits_notifications_tab_spec.rb2
-rw-r--r--spec/features/profiles/user_visits_profile_account_page_spec.rb15
-rw-r--r--spec/features/profiles/user_visits_profile_authentication_log_spec.rb15
-rw-r--r--spec/features/profiles/user_visits_profile_preferences_page_spec.rb (renamed from spec/features/profiles/preferences_spec.rb)13
-rw-r--r--spec/features/profiles/user_visits_profile_spec.rb15
-rw-r--r--spec/features/profiles/user_visits_profile_ssh_keys_page_spec.rb15
-rw-r--r--spec/features/projects/artifacts/browse_spec.rb50
-rw-r--r--spec/features/projects/awards/user_interacts_with_awards_in_issue_spec.rb104
-rw-r--r--spec/features/projects/badges/list_spec.rb2
-rw-r--r--spec/features/projects/blobs/blob_line_permalink_updater_spec.rb2
-rw-r--r--spec/features/projects/blobs/edit_spec.rb2
-rw-r--r--spec/features/projects/blobs/shortcuts_blob_spec.rb2
-rw-r--r--spec/features/projects/branches/download_buttons_spec.rb2
-rw-r--r--spec/features/projects/branches_spec.rb100
-rw-r--r--spec/features/projects/clusters_spec.rb111
-rw-r--r--spec/features/projects/commit/builds_spec.rb2
-rw-r--r--spec/features/projects/commit/cherry_pick_spec.rb2
-rw-r--r--spec/features/projects/commit/diff_notes_spec.rb36
-rw-r--r--spec/features/projects/commit/user_reverts_commit_spec.rb56
-rw-r--r--spec/features/projects/compare_spec.rb2
-rw-r--r--spec/features/projects/developer_views_empty_project_instructions_spec.rb4
-rw-r--r--spec/features/projects/diffs/diff_show_spec.rb53
-rw-r--r--spec/features/projects/edit_spec.rb2
-rw-r--r--spec/features/projects/environments/environments_spec.rb206
-rw-r--r--spec/features/projects/features_visibility_spec.rb4
-rw-r--r--spec/features/projects/files/browse_files_spec.rb2
-rw-r--r--spec/features/projects/files/dockerfile_dropdown_spec.rb2
-rw-r--r--spec/features/projects/files/edit_file_soft_wrap_spec.rb2
-rw-r--r--spec/features/projects/files/find_file_keyboard_spec.rb2
-rw-r--r--spec/features/projects/files/gitignore_dropdown_spec.rb2
-rw-r--r--spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb2
-rw-r--r--spec/features/projects/files/project_owner_creates_license_file_spec.rb2
-rw-r--r--spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb2
-rw-r--r--spec/features/projects/files/template_type_dropdown_spec.rb2
-rw-r--r--spec/features/projects/files/undo_template_spec.rb2
-rw-r--r--spec/features/projects/fork_spec.rb57
-rw-r--r--spec/features/projects/gfm_autocomplete_load_spec.rb2
-rw-r--r--spec/features/projects/import_export/export_file_spec.rb2
-rw-r--r--spec/features/projects/import_export/import_file_spec.rb2
-rw-r--r--spec/features/projects/import_export/namespace_export_file_spec.rb2
-rw-r--r--spec/features/projects/issuable_templates_spec.rb56
-rw-r--r--spec/features/projects/jobs/user_browses_job_spec.rb37
-rw-r--r--spec/features/projects/jobs/user_browses_jobs_spec.rb32
-rw-r--r--spec/features/projects/jobs_spec.rb8
-rw-r--r--spec/features/projects/labels/subscription_spec.rb2
-rw-r--r--spec/features/projects/labels/update_prioritization_spec.rb10
-rw-r--r--spec/features/projects/members/group_requester_cannot_request_access_to_project_spec.rb2
-rw-r--r--spec/features/projects/members/groups_with_access_list_spec.rb2
-rw-r--r--spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb2
-rw-r--r--spec/features/projects/merge_requests/user_accepts_merge_request_spec.rb84
-rw-r--r--spec/features/projects/merge_requests/user_closes_merge_request_spec.rb21
-rw-r--r--spec/features/projects/merge_requests/user_comments_on_commit_spec.rb19
-rw-r--r--spec/features/projects/merge_requests/user_comments_on_diff_spec.rb172
-rw-r--r--spec/features/projects/merge_requests/user_comments_on_merge_request_spec.rb50
-rw-r--r--spec/features/projects/merge_requests/user_creates_merge_request_spec.rb32
-rw-r--r--spec/features/projects/merge_requests/user_edits_merge_request_spec.rb25
-rw-r--r--spec/features/projects/merge_requests/user_manages_subscription_spec.rb32
-rw-r--r--spec/features/projects/merge_requests/user_reopens_merge_request_spec.rb22
-rw-r--r--spec/features/projects/merge_requests/user_reverts_merge_request_spec.rb59
-rw-r--r--spec/features/projects/merge_requests/user_sorts_merge_requests_spec.rb63
-rw-r--r--spec/features/projects/merge_requests/user_views_all_merge_requests_spec.rb15
-rw-r--r--spec/features/projects/merge_requests/user_views_closed_merge_requests_spec.rb15
-rw-r--r--spec/features/projects/merge_requests/user_views_diffs_spec.rb46
-rw-r--r--spec/features/projects/merge_requests/user_views_merged_merge_requests_spec.rb15
-rw-r--r--spec/features/projects/merge_requests/user_views_open_merge_request_spec.rb92
-rw-r--r--spec/features/projects/merge_requests/user_views_open_merge_requests_spec.rb72
-rw-r--r--spec/features/projects/milestones/user_interacts_with_labels_spec.rb40
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb14
-rw-r--r--spec/features/projects/project_settings_spec.rb30
-rw-r--r--spec/features/projects/ref_switcher_spec.rb2
-rw-r--r--spec/features/projects/services/slack_service_spec.rb26
-rw-r--r--spec/features/projects/services/user_activates_asana_spec.rb24
-rw-r--r--spec/features/projects/services/user_activates_assembla_spec.rb23
-rw-r--r--spec/features/projects/services/user_activates_atlassian_bamboo_ci_spec.rb31
-rw-r--r--spec/features/projects/services/user_activates_emails_on_push_spec.rb23
-rw-r--r--spec/features/projects/services/user_activates_flowdock_spec.rb23
-rw-r--r--spec/features/projects/services/user_activates_hipchat_spec.rb38
-rw-r--r--spec/features/projects/services/user_activates_irker_spec.rb24
-rw-r--r--spec/features/projects/services/user_activates_jetbrains_teamcity_ci_spec.rb26
-rw-r--r--spec/features/projects/services/user_activates_jira_spec.rb (renamed from spec/features/projects/services/jira_service_spec.rb)2
-rw-r--r--spec/features/projects/services/user_activates_mattermost_slash_command_spec.rb (renamed from spec/features/projects/services/mattermost_slash_command_spec.rb)0
-rw-r--r--spec/features/projects/services/user_activates_pivotaltracker_spec.rb23
-rw-r--r--spec/features/projects/services/user_activates_pushover_spec.rb27
-rw-r--r--spec/features/projects/services/user_activates_slack_notifications_spec.rb54
-rw-r--r--spec/features/projects/services/user_activates_slack_slash_command_spec.rb (renamed from spec/features/projects/services/slack_slash_command_spec.rb)0
-rw-r--r--spec/features/projects/services/user_views_services_spec.rb25
-rw-r--r--spec/features/projects/settings/integration_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/pipelines_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/repository_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/user_manages_group_links_spec.rb41
-rw-r--r--spec/features/projects/settings/user_manages_project_members_spec.rb68
-rw-r--r--spec/features/projects/settings/visibility_settings_spec.rb2
-rw-r--r--spec/features/projects/shortcuts_spec.rb21
-rw-r--r--spec/features/projects/show_project_spec.rb2
-rw-r--r--spec/features/projects/snippets/user_comments_on_snippet_spec.rb25
-rw-r--r--spec/features/projects/snippets/user_deletes_snippet_spec.rb20
-rw-r--r--spec/features/projects/snippets/user_updates_snippet_spec.rb25
-rw-r--r--spec/features/projects/snippets/user_views_snippets_spec.rb20
-rw-r--r--spec/features/projects/user_archives_project_spec.rb43
-rw-r--r--spec/features/projects/user_browses_a_tree_with_a_folder_containing_only_a_folder.rb20
-rw-r--r--spec/features/projects/user_browses_files_spec.rb8
-rw-r--r--spec/features/projects/user_creates_directory_spec.rb4
-rw-r--r--spec/features/projects/user_creates_files_spec.rb12
-rw-r--r--spec/features/projects/user_creates_project_spec.rb2
-rw-r--r--spec/features/projects/user_deletes_files_spec.rb6
-rw-r--r--spec/features/projects/user_edits_files_spec.rb60
-rw-r--r--spec/features/projects/user_interacts_with_stars_spec.rb2
-rw-r--r--spec/features/projects/user_replaces_files_spec.rb6
-rw-r--r--spec/features/projects/user_uploads_files_spec.rb13
-rw-r--r--spec/features/projects/user_uses_shortcuts_spec.rb108
-rw-r--r--spec/features/projects/view_on_env_spec.rb2
-rw-r--r--spec/features/projects/wiki/markdown_preview_spec.rb2
-rw-r--r--spec/features/projects/wiki/user_creates_wiki_page_spec.rb227
-rw-r--r--spec/features/projects/wiki/user_deletes_wiki_page_spec.rb19
-rw-r--r--spec/features/projects/wiki/user_updates_wiki_page_spec.rb149
-rw-r--r--spec/features/projects/wiki/user_views_project_wiki_page_spec.rb39
-rw-r--r--spec/features/projects/wiki/user_views_wiki_page_spec.rb140
-rw-r--r--spec/features/projects_spec.rb60
-rw-r--r--spec/features/protected_branches_spec.rb209
-rw-r--r--spec/features/protected_tags_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_code_spec.rb67
-rw-r--r--spec/features/search/user_searches_for_comments_spec.rb47
-rw-r--r--spec/features/search/user_searches_for_commits_spec.rb49
-rw-r--r--spec/features/search/user_searches_for_issues_spec.rb76
-rw-r--r--spec/features/search/user_searches_for_merge_requests_spec.rb51
-rw-r--r--spec/features/search/user_searches_for_milestones_spec.rb51
-rw-r--r--spec/features/search/user_searches_for_projects_spec.rb36
-rw-r--r--spec/features/search/user_searches_for_wiki_pages_spec.rb35
-rw-r--r--spec/features/search/user_uses_header_search_field_spec.rb90
-rw-r--r--spec/features/search/user_uses_search_filters_spec.rb52
-rw-r--r--spec/features/search_spec.rb310
-rw-r--r--spec/features/security/project/internal_access_spec.rb15
-rw-r--r--spec/features/security/project/private_access_spec.rb15
-rw-r--r--spec/features/security/project/public_access_spec.rb15
-rw-r--r--spec/features/signup_spec.rb18
-rw-r--r--spec/features/snippets/internal_snippet_spec.rb2
-rw-r--r--spec/features/tags/master_creates_tag_spec.rb2
-rw-r--r--spec/features/tags/master_deletes_tag_spec.rb31
-rw-r--r--spec/features/task_lists_spec.rb8
-rw-r--r--spec/features/triggers_spec.rb2
-rw-r--r--spec/features/uploads/user_uploads_file_to_note_spec.rb16
-rw-r--r--spec/features/user_callout_spec.rb55
-rw-r--r--spec/features/users/snippets_spec.rb2
-rw-r--r--spec/features/users_spec.rb2
-rw-r--r--spec/features/variables_spec.rb2
-rw-r--r--spec/finders/autocomplete_users_finder_spec.rb97
-rw-r--r--spec/finders/fork_projects_finder_spec.rb43
-rw-r--r--spec/finders/merge_request_target_project_finder_spec.rb54
-rw-r--r--spec/finders/merge_requests_finder_spec.rb12
-rw-r--r--spec/finders/users_finder_spec.rb22
-rw-r--r--spec/fixtures/api/schemas/cluster_status.json11
-rw-r--r--spec/fixtures/api/schemas/entities/merge_request.json7
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/commit/detail.json11
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/issues.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/merge_requests.json1
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/pipeline/basic.json16
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/user/admins.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/user/basics.json4
-rw-r--r--spec/fixtures/api/schemas/registry/repositories.json6
-rw-r--r--spec/fixtures/api/schemas/registry/repository.json27
-rw-r--r--spec/fixtures/api/schemas/registry/tag.json28
-rw-r--r--spec/fixtures/api/schemas/registry/tags.json6
-rw-r--r--spec/fixtures/config/kubeconfig.yml2
-rw-r--r--spec/fixtures/trace/trace_with_sections15
-rw-r--r--spec/helpers/auto_devops_helper_spec.rb26
-rw-r--r--spec/helpers/avatars_helper_spec.rb102
-rw-r--r--spec/helpers/commits_helper_spec.rb22
-rw-r--r--spec/helpers/diff_helper_spec.rb4
-rw-r--r--spec/helpers/groups_helper_spec.rb2
-rw-r--r--spec/helpers/icons_helper_spec.rb19
-rw-r--r--spec/helpers/merge_requests_helper_spec.rb7
-rw-r--r--spec/helpers/page_layout_helper_spec.rb6
-rw-r--r--spec/helpers/projects_helper_spec.rb44
-rw-r--r--spec/helpers/submodule_helper_spec.rb6
-rw-r--r--spec/initializers/doorkeeper_spec.rb4
-rw-r--r--spec/initializers/secret_token_spec.rb18
-rw-r--r--spec/javascripts/abuse_reports_spec.js80
-rw-r--r--spec/javascripts/ajax_loading_spinner_spec.js4
-rw-r--r--spec/javascripts/blob/notebook/index_spec.js4
-rw-r--r--spec/javascripts/blob/pdf/index_spec.js2
-rw-r--r--spec/javascripts/build_spec.js14
-rw-r--r--spec/javascripts/clusters_spec.js79
-rw-r--r--spec/javascripts/commit/pipelines/pipelines_spec.js8
-rw-r--r--spec/javascripts/commits_spec.js108
-rw-r--r--spec/javascripts/cycle_analytics/banner_spec.js41
-rw-r--r--spec/javascripts/cycle_analytics/limit_warning_component_spec.js2
-rw-r--r--spec/javascripts/cycle_analytics/total_time_component_spec.js58
-rw-r--r--spec/javascripts/environments/folder/environments_folder_view_spec.js4
-rw-r--r--spec/javascripts/feature_highlight/feature_highlight_helper_spec.js219
-rw-r--r--spec/javascripts/feature_highlight/feature_highlight_options_spec.js45
-rw-r--r--spec/javascripts/feature_highlight/feature_highlight_spec.js122
-rw-r--r--spec/javascripts/filtered_search/dropdown_user_spec.js2
-rw-r--r--spec/javascripts/filtered_search/filtered_search_manager_spec.js22
-rw-r--r--spec/javascripts/fixtures/clusters.rb34
-rw-r--r--spec/javascripts/fixtures/dashboard.rb35
-rw-r--r--spec/javascripts/fixtures/merge_requests.rb6
-rw-r--r--spec/javascripts/fixtures/pipelines.html.haml2
-rw-r--r--spec/javascripts/fly_out_nav_spec.js15
-rw-r--r--spec/javascripts/helpers/vuex_action_helper.js (renamed from spec/javascripts/notes/stores/helpers.js)0
-rw-r--r--spec/javascripts/image_diff/helpers/badge_helper_spec.js132
-rw-r--r--spec/javascripts/image_diff/helpers/comment_indicator_helper_spec.js139
-rw-r--r--spec/javascripts/image_diff/helpers/dom_helper_spec.js118
-rw-r--r--spec/javascripts/image_diff/helpers/utils_helper_spec.js207
-rw-r--r--spec/javascripts/image_diff/image_badge_spec.js84
-rw-r--r--spec/javascripts/image_diff/image_diff_spec.js361
-rw-r--r--spec/javascripts/image_diff/init_discussion_tab_spec.js37
-rw-r--r--spec/javascripts/image_diff/mock_data.js28
-rw-r--r--spec/javascripts/image_diff/replaced_image_diff_spec.js312
-rw-r--r--spec/javascripts/image_diff/view_types_spec.js24
-rw-r--r--spec/javascripts/issuable_context_spec.js34
-rw-r--r--spec/javascripts/issue_show/components/app_spec.js25
-rw-r--r--spec/javascripts/lib/utils/common_utils_spec.js693
-rw-r--r--spec/javascripts/lib/utils/csrf_token_spec.js49
-rw-r--r--spec/javascripts/lib/utils/image_utility_spec.js32
-rw-r--r--spec/javascripts/lib/utils/sticky_spec.js75
-rw-r--r--spec/javascripts/line_highlighter_spec.js18
-rw-r--r--spec/javascripts/locale/sprintf_spec.js74
-rw-r--r--spec/javascripts/merge_request_spec.js39
-rw-r--r--spec/javascripts/merge_request_tabs_spec.js30
-rw-r--r--spec/javascripts/monitoring/dashboard_state_spec.js24
-rw-r--r--spec/javascripts/monitoring/graph/deployment_spec.js25
-rw-r--r--spec/javascripts/monitoring/graph/flag_spec.js49
-rw-r--r--spec/javascripts/monitoring/graph/legend_spec.js9
-rw-r--r--spec/javascripts/monitoring/graph_path_spec.js (renamed from spec/javascripts/monitoring/monitoring_paths_spec.js)23
-rw-r--r--spec/javascripts/monitoring/graph_spec.js24
-rw-r--r--spec/javascripts/monitoring/mock_data.js8
-rw-r--r--spec/javascripts/monitoring/utils/multiple_time_series_spec.js15
-rw-r--r--spec/javascripts/notes/components/issue_comment_form_spec.js54
-rw-r--r--spec/javascripts/notes/stores/actions_spec.js3
-rw-r--r--spec/javascripts/notes/stores/mutation_spec.js30
-rw-r--r--spec/javascripts/notes_spec.js14
-rw-r--r--spec/javascripts/pipelines/empty_state_spec.js1
-rw-r--r--spec/javascripts/pipelines/error_state_spec.js6
-rw-r--r--spec/javascripts/pipelines/pipeline_url_spec.js19
-rw-r--r--spec/javascripts/pipelines/pipelines_artifacts_spec.js2
-rw-r--r--spec/javascripts/pretty_time_spec.js282
-rw-r--r--spec/javascripts/profile/account/components/delete_account_modal_spec.js129
-rw-r--r--spec/javascripts/projects_dropdown/service/projects_service_spec.js2
-rw-r--r--spec/javascripts/registry/components/app_spec.js122
-rw-r--r--spec/javascripts/registry/components/collapsible_container_spec.js58
-rw-r--r--spec/javascripts/registry/components/table_registry_spec.js49
-rw-r--r--spec/javascripts/registry/getters_spec.js43
-rw-r--r--spec/javascripts/registry/mock_data.js122
-rw-r--r--spec/javascripts/registry/stores/actions_spec.js85
-rw-r--r--spec/javascripts/registry/stores/mutations_spec.js81
-rw-r--r--spec/javascripts/repo/components/repo_edit_button_spec.js2
-rw-r--r--spec/javascripts/repo/components/repo_file_spec.js28
-rw-r--r--spec/javascripts/repo/components/repo_sidebar_spec.js76
-rw-r--r--spec/javascripts/right_sidebar_spec.js116
-rw-r--r--spec/javascripts/sidebar/lock/edit_form_buttons_spec.js36
-rw-r--r--spec/javascripts/sidebar/lock/edit_form_spec.js41
-rw-r--r--spec/javascripts/sidebar/lock/lock_issue_sidebar_spec.js71
-rw-r--r--spec/javascripts/todos_spec.js29
-rw-r--r--spec/javascripts/user_callout_spec.js49
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js20
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js36
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js113
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js15
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js201
-rw-r--r--spec/javascripts/vue_mr_widget/mr_widget_options_spec.js42
-rw-r--r--spec/javascripts/vue_mr_widget/services/mr_widget_service_spec.js1
-rw-r--r--spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js34
-rw-r--r--spec/javascripts/vue_shared/components/issue/confidential_issue_warning_spec.js20
-rw-r--r--spec/javascripts/vue_shared/components/issue/issue_warning_spec.js46
-rw-r--r--spec/lib/banzai/filter/milestone_reference_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/sanitization_filter_spec.rb36
-rw-r--r--spec/lib/banzai/pipeline/email_pipeline_spec.rb14
-rw-r--r--spec/lib/banzai/renderer_spec.rb9
-rw-r--r--spec/lib/ci/gitlab_ci_yaml_processor_spec.rb1697
-rw-r--r--spec/lib/github/client_spec.rb34
-rw-r--r--spec/lib/github/import/legacy_diff_note_spec.rb9
-rw-r--r--spec/lib/github/import/note_spec.rb9
-rw-r--r--spec/lib/gitlab/auth_spec.rb42
-rw-r--r--spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb117
-rw-r--r--spec/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys_spec.rb32
-rw-r--r--spec/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range_spec.rb40
-rw-r--r--spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb122
-rw-r--r--spec/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/normalize_ldap_extern_uids_range_spec.rb36
-rw-r--r--spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb93
-rw-r--r--spec/lib/gitlab/backup/manager_spec.rb52
-rw-r--r--spec/lib/gitlab/checks/force_push_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/ansi2html_spec.rb (renamed from spec/lib/ci/ansi2html_spec.rb)28
-rw-r--r--spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb30
-rw-r--r--spec/lib/gitlab/ci/build/policy/refs_spec.rb87
-rw-r--r--spec/lib/gitlab/ci/build/policy_spec.rb37
-rw-r--r--spec/lib/gitlab/ci/charts_spec.rb (renamed from spec/lib/ci/charts_spec.rb)4
-rw-r--r--spec/lib/gitlab/ci/mask_secret_spec.rb (renamed from spec/lib/ci/mask_secret_spec.rb)2
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/create_spec.rb66
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb55
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb85
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb142
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb130
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb60
-rw-r--r--spec/lib/gitlab/ci/pipeline/duration_spec.rb (renamed from spec/lib/gitlab/ci/pipeline_duration_spec.rb)6
-rw-r--r--spec/lib/gitlab/ci/stage/seed_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/trace/section_parser_spec.rb87
-rw-r--r--spec/lib/gitlab/ci/trace_spec.rb87
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb1716
-rw-r--r--spec/lib/gitlab/closing_issue_extractor_spec.rb44
-rw-r--r--spec/lib/gitlab/data_builder/push_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb122
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb4
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb2
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb2
-rw-r--r--spec/lib/gitlab/diff/diff_refs_spec.rb55
-rw-r--r--spec/lib/gitlab/diff/formatters/image_formatter_spec.rb20
-rw-r--r--spec/lib/gitlab/diff/formatters/text_formatter_spec.rb42
-rw-r--r--spec/lib/gitlab/diff/position_spec.rb117
-rw-r--r--spec/lib/gitlab/diff/position_tracer_spec.rb26
-rw-r--r--spec/lib/gitlab/gfm/reference_rewriter_spec.rb61
-rw-r--r--spec/lib/gitlab/git/blame_spec.rb2
-rw-r--r--spec/lib/gitlab/git/blob_spec.rb7
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb54
-rw-r--r--spec/lib/gitlab/git/committer_spec.rb22
-rw-r--r--spec/lib/gitlab/git/diff_collection_spec.rb3
-rw-r--r--spec/lib/gitlab/git/diff_spec.rb38
-rw-r--r--spec/lib/gitlab/git/hook_spec.rb11
-rw-r--r--spec/lib/gitlab/git/hooks_service_spec.rb8
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb179
-rw-r--r--spec/lib/gitlab/git/rev_list_spec.rb4
-rw-r--r--spec/lib/gitlab/git/storage/circuit_breaker_spec.rb13
-rw-r--r--spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb77
-rw-r--r--spec/lib/gitlab/git/tag_spec.rb2
-rw-r--r--spec/lib/gitlab/git/user_spec.rb38
-rw-r--r--spec/lib/gitlab/git_access_spec.rb27
-rw-r--r--spec/lib/gitlab/git_access_wiki_spec.rb29
-rw-r--r--spec/lib/gitlab/git_ref_validator_spec.rb5
-rw-r--r--spec/lib/gitlab/git_spec.rb9
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb29
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb92
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_service_spec.rb4
-rw-r--r--spec/lib/gitlab/gitaly_client/util_spec.rb43
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb138
-rw-r--r--spec/lib/gitlab/gpg/commit_spec.rb39
-rw-r--r--spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb49
-rw-r--r--spec/lib/gitlab/gpg_spec.rb17
-rw-r--r--spec/lib/gitlab/group_hierarchy_spec.rb15
-rw-r--r--spec/lib/gitlab/health_checks/fs_shards_check_spec.rb20
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml9
-rw-r--r--spec/lib/gitlab/import_export/fork_spec.rb6
-rw-r--r--spec/lib/gitlab/import_export/merge_request_parser_spec.rb7
-rw-r--r--spec/lib/gitlab/import_export/project_tree_saver_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml31
-rw-r--r--spec/lib/gitlab/ldap/auth_hash_spec.rb24
-rw-r--r--spec/lib/gitlab/ldap/dn_spec.rb224
-rw-r--r--spec/lib/gitlab/ldap/person_spec.rb33
-rw-r--r--spec/lib/gitlab/ldap/user_spec.rb16
-rw-r--r--spec/lib/gitlab/middleware/go_spec.rb18
-rw-r--r--spec/lib/gitlab/middleware/read_only_spec.rb142
-rw-r--r--spec/lib/gitlab/o_auth/auth_hash_spec.rb7
-rw-r--r--spec/lib/gitlab/o_auth/user_spec.rb17
-rw-r--r--spec/lib/gitlab/path_regex_spec.rb7
-rw-r--r--spec/lib/gitlab/popen_spec.rb2
-rw-r--r--spec/lib/gitlab/saml/user_spec.rb97
-rw-r--r--spec/lib/gitlab/search_results_spec.rb4
-rw-r--r--spec/lib/gitlab/shell_spec.rb139
-rw-r--r--spec/lib/gitlab/sql/union_spec.rb7
-rw-r--r--spec/lib/gitlab/url_sanitizer_spec.rb34
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb41
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb65
-rw-r--r--spec/lib/google_api/auth_spec.rb41
-rw-r--r--spec/lib/google_api/cloud_platform/client_spec.rb128
-rw-r--r--spec/lib/rspec_flaky/config_spec.rb102
-rw-r--r--spec/lib/rspec_flaky/flaky_example_spec.rb129
-rw-r--r--spec/lib/rspec_flaky/flaky_examples_collection_spec.rb79
-rw-r--r--spec/lib/rspec_flaky/listener_spec.rb173
-rw-r--r--spec/lib/system_check/app/git_user_default_ssh_config_check_spec.rb15
-rw-r--r--spec/lib/system_check/base_check_spec.rb17
-rw-r--r--spec/lib/system_check/orphans/namespace_check_spec.rb61
-rw-r--r--spec/lib/system_check/orphans/repository_check_spec.rb68
-rw-r--r--spec/mailers/emails/profile_spec.rb25
-rw-r--r--spec/mailers/notify_spec.rb491
-rw-r--r--spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb5
-rw-r--r--spec/migrations/clean_stages_statuses_migration_spec.rb51
-rw-r--r--spec/migrations/delete_conflicting_redirect_routes_spec.rb58
-rw-r--r--spec/migrations/normalize_ldap_extern_uids_spec.rb56
-rw-r--r--spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb43
-rw-r--r--spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb59
-rw-r--r--spec/migrations/update_legacy_diff_notes_type_for_import_spec.rb22
-rw-r--r--spec/migrations/update_notes_type_for_import_spec.rb22
-rw-r--r--spec/migrations/update_upload_paths_to_system_spec.rb4
-rw-r--r--spec/models/abuse_report_spec.rb7
-rw-r--r--spec/models/appearance_spec.rb2
-rw-r--r--spec/models/application_setting_spec.rb22
-rw-r--r--spec/models/chat_name_spec.rb3
-rw-r--r--spec/models/chat_team_spec.rb3
-rw-r--r--spec/models/ci/artifact_blob_spec.rb50
-rw-r--r--spec/models/ci/build_spec.rb60
-rw-r--r--spec/models/ci/build_trace_section_name_spec.rb12
-rw-r--r--spec/models/ci/build_trace_section_spec.rb11
-rw-r--r--spec/models/ci/pipeline_spec.rb31
-rw-r--r--spec/models/ci/pipeline_variable_spec.rb2
-rw-r--r--spec/models/ci/runner_spec.rb69
-rw-r--r--spec/models/commit_spec.rb7
-rw-r--r--spec/models/concerns/cache_markdown_field_spec.rb72
-rw-r--r--spec/models/concerns/has_status_spec.rb12
-rw-r--r--spec/models/concerns/routable_spec.rb10
-rw-r--r--spec/models/diff_note_spec.rb40
-rw-r--r--spec/models/email_spec.rb29
-rw-r--r--spec/models/environment_spec.rb22
-rw-r--r--spec/models/fork_network_member_spec.rb8
-rw-r--r--spec/models/fork_network_spec.rb54
-rw-r--r--spec/models/forked_project_link_spec.rb12
-rw-r--r--spec/models/gcp/cluster_spec.rb240
-rw-r--r--spec/models/gpg_key_spec.rb56
-rw-r--r--spec/models/gpg_key_subkey_spec.rb15
-rw-r--r--spec/models/gpg_signature_spec.rb52
-rw-r--r--spec/models/key_spec.rb45
-rw-r--r--spec/models/lfs_objects_project_spec.rb15
-rw-r--r--spec/models/member_spec.rb4
-rw-r--r--spec/models/merge_request_spec.rb97
-rw-r--r--spec/models/milestone_spec.rb15
-rw-r--r--spec/models/namespace_spec.rb51
-rw-r--r--spec/models/note_spec.rb50
-rw-r--r--spec/models/personal_access_token_spec.rb35
-rw-r--r--spec/models/project_auto_devops_spec.rb16
-rw-r--r--spec/models/project_group_link_spec.rb2
-rw-r--r--spec/models/project_services/chat_message/issue_message_spec.rb12
-rw-r--r--spec/models/project_services/chat_message/merge_message_spec.rb12
-rw-r--r--spec/models/project_services/chat_message/note_message_spec.rb22
-rw-r--r--spec/models/project_services/chat_message/pipeline_message_spec.rb15
-rw-r--r--spec/models/project_services/chat_message/wiki_page_message_spec.rb12
-rw-r--r--spec/models/project_services/kubernetes_service_spec.rb2
-rw-r--r--spec/models/project_services/pipelines_email_service_spec.rb37
-rw-r--r--spec/models/project_spec.rb266
-rw-r--r--spec/models/project_wiki_spec.rb64
-rw-r--r--spec/models/push_event_spec.rb88
-rw-r--r--spec/models/repository_spec.rb439
-rw-r--r--spec/models/sent_notification_spec.rb122
-rw-r--r--spec/models/user_custom_attribute_spec.rb16
-rw-r--r--spec/models/user_spec.rb289
-rw-r--r--spec/models/wiki_page_spec.rb14
-rw-r--r--spec/policies/gcp/cluster_policy_spec.rb28
-rw-r--r--spec/policies/global_policy_spec.rb37
-rw-r--r--spec/policies/group_policy_spec.rb18
-rw-r--r--spec/policies/issuable_policy_spec.rb28
-rw-r--r--spec/policies/namespace_policy_spec.rb20
-rw-r--r--spec/policies/note_policy_spec.rb71
-rw-r--r--spec/policies/project_policy_spec.rb137
-rw-r--r--spec/presenters/ci/pipeline_presenter_spec.rb17
-rw-r--r--spec/presenters/gcp/cluster_presenter_spec.rb35
-rw-r--r--spec/presenters/merge_request_presenter_spec.rb4
-rw-r--r--spec/requests/api/access_requests_spec.rb12
-rw-r--r--spec/requests/api/award_emoji_spec.rb14
-rw-r--r--spec/requests/api/boards_spec.rb33
-rw-r--r--spec/requests/api/branches_spec.rb12
-rw-r--r--spec/requests/api/broadcast_messages_spec.rb11
-rw-r--r--spec/requests/api/commits_spec.rb5
-rw-r--r--spec/requests/api/environments_spec.rb1
-rw-r--r--spec/requests/api/groups_spec.rb3
-rw-r--r--spec/requests/api/helpers_spec.rb127
-rw-r--r--spec/requests/api/issues_spec.rb11
-rw-r--r--spec/requests/api/merge_requests_spec.rb77
-rw-r--r--spec/requests/api/notes_spec.rb34
-rw-r--r--spec/requests/api/projects_spec.rb63
-rw-r--r--spec/requests/api/runner_spec.rb3
-rw-r--r--spec/requests/api/services_spec.rb13
-rw-r--r--spec/requests/api/users_spec.rb64
-rw-r--r--spec/requests/api/v3/award_emoji_spec.rb14
-rw-r--r--spec/requests/api/v3/boards_spec.rb25
-rw-r--r--spec/requests/api/v3/branches_spec.rb10
-rw-r--r--spec/requests/api/v3/broadcast_messages_spec.rb6
-rw-r--r--spec/requests/api/v3/builds_spec.rb14
-rw-r--r--spec/requests/api/v3/issues_spec.rb19
-rw-r--r--spec/requests/api/v3/merge_requests_spec.rb40
-rw-r--r--spec/requests/api/v3/projects_spec.rb1
-rw-r--r--spec/requests/jwt_controller_spec.rb4
-rw-r--r--spec/requests/lfs_http_spec.rb34
-rw-r--r--spec/rubocop/cop/rspec/verbose_include_metadata_spec.rb53
-rw-r--r--spec/serializers/build_details_entity_spec.rb10
-rw-r--r--spec/serializers/build_serializer_spec.rb2
-rw-r--r--spec/serializers/cluster_entity_spec.rb22
-rw-r--r--spec/serializers/cluster_serializer_spec.rb19
-rw-r--r--spec/serializers/container_repository_entity_spec.rb41
-rw-r--r--spec/serializers/container_tag_entity_spec.rb43
-rw-r--r--spec/serializers/environment_entity_spec.rb4
-rw-r--r--spec/serializers/merge_request_entity_spec.rb13
-rw-r--r--spec/serializers/pipeline_entity_spec.rb13
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb12
-rw-r--r--spec/serializers/status_entity_spec.rb4
-rw-r--r--spec/services/ci/create_cluster_service_spec.rb47
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb128
-rw-r--r--spec/services/ci/extract_sections_from_build_trace_service_spec.rb55
-rw-r--r--spec/services/ci/fetch_gcp_operation_service_spec.rb36
-rw-r--r--spec/services/ci/fetch_kubernetes_token_service_spec.rb64
-rw-r--r--spec/services/ci/finalize_cluster_creation_service_spec.rb61
-rw-r--r--spec/services/ci/integrate_cluster_service_spec.rb42
-rw-r--r--spec/services/ci/pipeline_trigger_service_spec.rb2
-rw-r--r--spec/services/ci/provision_cluster_service_spec.rb85
-rw-r--r--spec/services/ci/retry_build_service_spec.rb11
-rw-r--r--spec/services/ci/update_cluster_service_spec.rb37
-rw-r--r--spec/services/delete_merged_branches_service_spec.rb6
-rw-r--r--spec/services/deploy_keys/create_service_spec.rb12
-rw-r--r--spec/services/discussions/update_diff_position_service_spec.rb6
-rw-r--r--spec/services/emails/confirm_service_spec.rb15
-rw-r--r--spec/services/emails/create_service_spec.rb7
-rw-r--r--spec/services/emails/destroy_service_spec.rb4
-rw-r--r--spec/services/event_create_service_spec.rb8
-rw-r--r--spec/services/gpg_keys/create_service_spec.rb31
-rw-r--r--spec/services/issues/close_service_spec.rb2
-rw-r--r--spec/services/issues/create_service_spec.rb2
-rw-r--r--spec/services/issues/update_service_spec.rb43
-rw-r--r--spec/services/keys/create_service_spec.rb21
-rw-r--r--spec/services/keys/last_used_service_spec.rb63
-rw-r--r--spec/services/merge_requests/close_service_spec.rb2
-rw-r--r--spec/services/merge_requests/conflicts/resolve_service_spec.rb20
-rw-r--r--spec/services/merge_requests/create_service_spec.rb2
-rw-r--r--spec/services/merge_requests/ff_merge_service_spec.rb84
-rw-r--r--spec/services/merge_requests/get_urls_service_spec.rb4
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb23
-rw-r--r--spec/services/merge_requests/post_merge_service_spec.rb11
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb12
-rw-r--r--spec/services/merge_requests/update_service_spec.rb11
-rw-r--r--spec/services/notification_service_spec.rb13
-rw-r--r--spec/services/projects/count_service_spec.rb4
-rw-r--r--spec/services/projects/create_service_spec.rb86
-rw-r--r--spec/services/projects/destroy_service_spec.rb13
-rw-r--r--spec/services/projects/fork_service_spec.rb44
-rw-r--r--spec/services/projects/hashed_storage_migration_service_spec.rb74
-rw-r--r--spec/services/projects/housekeeping_service_spec.rb2
-rw-r--r--spec/services/projects/transfer_service_spec.rb7
-rw-r--r--spec/services/projects/unlink_fork_service_spec.rb30
-rw-r--r--spec/services/projects/update_service_spec.rb76
-rw-r--r--spec/services/system_note_service_spec.rb4
-rw-r--r--spec/services/tags/create_service_spec.rb2
-rw-r--r--spec/services/users/activity_service_spec.rb12
-rw-r--r--spec/services/users/last_push_event_service_spec.rb112
-rw-r--r--spec/services/users/update_service_spec.rb8
-rw-r--r--spec/spec_helper.rb37
-rw-r--r--spec/support/api/scopes/read_user_shared_examples.rb4
-rw-r--r--spec/support/features/discussion_comments_shared_example.rb2
-rw-r--r--spec/support/features/issuable_slash_commands_shared_examples.rb6
-rw-r--r--spec/support/gpg_helpers.rb313
-rw-r--r--spec/support/helpers/merge_request_diff_helpers.rb28
-rw-r--r--spec/support/ldap_shared_examples.rb69
-rw-r--r--spec/support/matchers/navigation_matcher.rb12
-rw-r--r--spec/support/migrations_helpers.rb4
-rw-r--r--spec/support/project_forks_helper.rb58
-rw-r--r--spec/support/query_recorder.rb36
-rw-r--r--spec/support/shared_examples/features/comments_on_merge_request_files_shared_examples.rb28
-rw-r--r--spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb21
-rw-r--r--spec/support/shared_examples/features/project_features_apply_to_issuables_shared_examples.rb (renamed from spec/support/project_features_apply_to_issuables_shared_examples.rb)0
-rw-r--r--spec/support/shared_examples/features/search_shared_examples.rb5
-rw-r--r--spec/support/shared_examples/position_formatters.rb43
-rw-r--r--spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb103
-rw-r--r--spec/support/stub_configuration.rb2
-rw-r--r--spec/support/stub_gitlab_calls.rb4
-rw-r--r--spec/support/test_env.rb93
-rw-r--r--spec/support/update_invalid_issuable.rb27
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb17
-rw-r--r--spec/tasks/gitlab/storage_rake_spec.rb52
-rw-r--r--spec/tasks/gitlab/task_helpers_spec.rb20
-rw-r--r--spec/views/ci/lints/show.html.haml_spec.rb4
-rw-r--r--spec/views/dashboard/projects/_nav.html.haml.rb17
-rw-r--r--spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb24
-rw-r--r--spec/views/projects/merge_requests/_commits.html.haml_spec.rb5
-rw-r--r--spec/views/projects/merge_requests/edit.html.haml_spec.rb9
-rw-r--r--spec/views/projects/merge_requests/show.html.haml_spec.rb11
-rw-r--r--spec/views/projects/pipelines_settings/_show.html.haml_spec.rb62
-rw-r--r--spec/views/projects/registry/repositories/index.html.haml_spec.rb36
-rw-r--r--spec/views/shared/issuable/_participants.html.haml.rb26
-rw-r--r--spec/views/shared/milestones/_issuable.html.haml.rb19
-rw-r--r--spec/workers/build_finished_worker_spec.rb2
-rw-r--r--spec/workers/build_trace_sections_worker_spec.rb23
-rw-r--r--spec/workers/cluster_provision_worker_spec.rb23
-rw-r--r--spec/workers/concerns/cluster_queue_spec.rb15
-rw-r--r--spec/workers/git_garbage_collect_worker_spec.rb8
-rw-r--r--spec/workers/namespaceless_project_destroy_worker_spec.rb10
-rw-r--r--spec/workers/post_receive_spec.rb13
-rw-r--r--spec/workers/project_migrate_hashed_storage_worker_spec.rb29
-rw-r--r--spec/workers/repository_check/single_repository_worker_spec.rb7
-rw-r--r--spec/workers/storage_migrator_worker_spec.rb30
-rw-r--r--spec/workers/use_key_worker_spec.rb23
-rw-r--r--spec/workers/wait_for_cluster_creation_worker_spec.rb67
719 files changed, 23020 insertions, 7015 deletions
diff --git a/spec/controllers/admin/users_controller_spec.rb b/spec/controllers/admin/users_controller_spec.rb
index aadd3317875..25fe547ff37 100644
--- a/spec/controllers/admin/users_controller_spec.rb
+++ b/spec/controllers/admin/users_controller_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe Admin::UsersController do
let(:user) { create(:user) }
- let(:admin) { create(:admin) }
+ set(:admin) { create(:admin) }
before do
sign_in(admin)
diff --git a/spec/controllers/boards/issues_controller_spec.rb b/spec/controllers/boards/issues_controller_spec.rb
index dfa06c78d46..5163099cd98 100644
--- a/spec/controllers/boards/issues_controller_spec.rb
+++ b/spec/controllers/boards/issues_controller_spec.rb
@@ -45,6 +45,17 @@ describe Boards::IssuesController do
expect(parsed_response.length).to eq 2
expect(development.issues.map(&:relative_position)).not_to include(nil)
end
+
+ it 'avoids N+1 database queries' do
+ create(:labeled_issue, project: project, labels: [development])
+ control_count = ActiveRecord::QueryRecorder.new { list_issues(user: user, board: board, list: list2) }.count
+
+ # 25 issues is bigger than the page size
+ # the relative position will ignore the `#make_sure_position_set` queries
+ create_list(:labeled_issue, 25, project: project, labels: [development], assignees: [johndoe], relative_position: 1)
+
+ expect { list_issues(user: user, board: board, list: list2) }.not_to exceed_query_limit(control_count)
+ end
end
context 'with invalid list id' do
diff --git a/spec/controllers/dashboard/todos_controller_spec.rb b/spec/controllers/dashboard/todos_controller_spec.rb
index c8c6b9f41bf..9df4ebf2fa0 100644
--- a/spec/controllers/dashboard/todos_controller_spec.rb
+++ b/spec/controllers/dashboard/todos_controller_spec.rb
@@ -57,7 +57,7 @@ describe Dashboard::TodosController do
expect(response).to redirect_to(dashboard_todos_path(page: last_page))
end
- it 'redirects to correspondent page' do
+ it 'goes to the correct page' do
get :index, page: last_page
expect(assigns(:todos).current_page).to eq(last_page)
@@ -70,6 +70,30 @@ describe Dashboard::TodosController do
expect(response).to redirect_to(dashboard_todos_path(page: last_page))
end
+
+ context 'when providing no filters' do
+ it 'does not perform a query to get the page count, but gets that from the user' do
+ allow(controller).to receive(:current_user).and_return(user)
+
+ expect(user).to receive(:todos_pending_count).and_call_original
+
+ get :index, page: (last_page + 1).to_param, sort: :created_asc
+
+ expect(response).to redirect_to(dashboard_todos_path(page: last_page, sort: :created_asc))
+ end
+ end
+
+ context 'when providing filters' do
+ it 'performs a query to get the correct page count' do
+ allow(controller).to receive(:current_user).and_return(user)
+
+ expect(user).not_to receive(:todos_pending_count)
+
+ get :index, page: (last_page + 1).to_param, project_id: project.id
+
+ expect(response).to redirect_to(dashboard_todos_path(page: last_page, project_id: project.id))
+ end
+ end
end
end
diff --git a/spec/controllers/google_api/authorizations_controller_spec.rb b/spec/controllers/google_api/authorizations_controller_spec.rb
new file mode 100644
index 00000000000..80d553f0f34
--- /dev/null
+++ b/spec/controllers/google_api/authorizations_controller_spec.rb
@@ -0,0 +1,49 @@
+require 'spec_helper'
+
+describe GoogleApi::AuthorizationsController do
+ describe 'GET|POST #callback' do
+ let(:user) { create(:user) }
+ let(:token) { 'token' }
+ let(:expires_at) { 1.hour.since.strftime('%s') }
+
+ subject { get :callback, code: 'xxx', state: @state }
+
+ before do
+ sign_in(user)
+
+ allow_any_instance_of(GoogleApi::CloudPlatform::Client)
+ .to receive(:get_token).and_return([token, expires_at])
+ end
+
+ it 'sets token and expires_at in session' do
+ subject
+
+ expect(session[GoogleApi::CloudPlatform::Client.session_key_for_token])
+ .to eq(token)
+ expect(session[GoogleApi::CloudPlatform::Client.session_key_for_expires_at])
+ .to eq(expires_at)
+ end
+
+ context 'when redirect uri key is stored in state' do
+ set(:project) { create(:project) }
+ let(:redirect_uri) { project_clusters_url(project).to_s }
+
+ before do
+ @state = GoogleApi::CloudPlatform::Client
+ .new_session_key_for_redirect_uri do |key|
+ session[key] = redirect_uri
+ end
+ end
+
+ it 'redirects to the URL stored in state param' do
+ expect(subject).to redirect_to(redirect_uri)
+ end
+ end
+
+ context 'when redirection url is not stored in state' do
+ it 'redirects to root_path' do
+ expect(subject).to redirect_to(root_path)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/health_controller_spec.rb b/spec/controllers/health_controller_spec.rb
index cc389e554ad..9e9cf4f2c1f 100644
--- a/spec/controllers/health_controller_spec.rb
+++ b/spec/controllers/health_controller_spec.rb
@@ -10,6 +10,7 @@ describe HealthController do
before do
allow(Settings.monitoring).to receive(:ip_whitelist).and_return([whitelisted_ip])
+ stub_storage_settings({}) # Hide the broken storage
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
end
diff --git a/spec/controllers/profiles/emails_controller_spec.rb b/spec/controllers/profiles/emails_controller_spec.rb
new file mode 100644
index 00000000000..ecf14aad54f
--- /dev/null
+++ b/spec/controllers/profiles/emails_controller_spec.rb
@@ -0,0 +1,35 @@
+require 'spec_helper'
+
+describe Profiles::EmailsController do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ describe '#create' do
+ let(:email_params) { { email: "add_email@example.com" } }
+
+ it 'sends an email confirmation' do
+ expect { post(:create, { email: email_params }) }.to change { ActionMailer::Base.deliveries.size }
+ expect(ActionMailer::Base.deliveries.last.to).to eq [email_params[:email]]
+ expect(ActionMailer::Base.deliveries.last.subject).to match "Confirmation instructions"
+ end
+ end
+
+ describe '#resend_confirmation_instructions' do
+ let(:email_params) { { email: "add_email@example.com" } }
+
+ it 'resends an email confirmation' do
+ email = user.emails.create(email: 'add_email@example.com')
+
+ expect { put(:resend_confirmation_instructions, { id: email }) }.to change { ActionMailer::Base.deliveries.size }
+ expect(ActionMailer::Base.deliveries.last.to).to eq [email_params[:email]]
+ expect(ActionMailer::Base.deliveries.last.subject).to match "Confirmation instructions"
+ end
+
+ it 'unable to resend an email confirmation' do
+ expect { put(:resend_confirmation_instructions, { id: 1 }) }.not_to change { ActionMailer::Base.deliveries.size }
+ end
+ end
+end
diff --git a/spec/controllers/profiles_controller_spec.rb b/spec/controllers/profiles_controller_spec.rb
index b52b63e05a4..ce5040ff02b 100644
--- a/spec/controllers/profiles_controller_spec.rb
+++ b/spec/controllers/profiles_controller_spec.rb
@@ -15,6 +15,20 @@ describe ProfilesController do
expect(user.unconfirmed_email).to eq('john@gmail.com')
end
+ it "allows an email update without confirmation if existing verified email" do
+ user = create(:user)
+ create(:email, :confirmed, user: user, email: 'john@gmail.com')
+ sign_in(user)
+
+ put :update,
+ user: { email: "john@gmail.com", name: "John" }
+
+ user.reload
+
+ expect(response.status).to eq(302)
+ expect(user.unconfirmed_email).to eq nil
+ end
+
it "ignores an email update from a user with an external email address" do
stub_omniauth_setting(sync_profile_from_provider: ['ldap'])
stub_omniauth_setting(sync_profile_attributes: true)
diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb
index 2bd8f8e2bfc..6defb9e15e6 100644
--- a/spec/controllers/projects/artifacts_controller_spec.rb
+++ b/spec/controllers/projects/artifacts_controller_spec.rb
@@ -1,8 +1,8 @@
require 'spec_helper'
describe Projects::ArtifactsController do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
+ set(:user) { create(:user) }
+ set(:project) { create(:project, :repository, :public) }
let(:pipeline) do
create(:ci_pipeline,
@@ -15,7 +15,7 @@ describe Projects::ArtifactsController do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
before do
- project.team << [user, :developer]
+ project.add_developer(user)
sign_in(user)
end
@@ -47,19 +47,67 @@ describe Projects::ArtifactsController do
end
describe 'GET file' do
- context 'when the file exists' do
- it 'renders the file view' do
- get :file, namespace_id: project.namespace, project_id: project, job_id: job, path: 'ci_artifacts.txt'
+ before do
+ allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
+ end
- expect(response).to render_template('projects/artifacts/file')
+ context 'when the file is served by GitLab Pages' do
+ before do
+ allow(Gitlab.config.pages).to receive(:artifacts_server).and_return(true)
+ end
+
+ context 'when the file exists' do
+ it 'renders the file view' do
+ get :file, namespace_id: project.namespace, project_id: project, job_id: job, path: 'ci_artifacts.txt'
+
+ expect(response).to have_http_status(302)
+ end
+ end
+
+ context 'when the file does not exist' do
+ it 'responds Not Found' do
+ get :file, namespace_id: project.namespace, project_id: project, job_id: job, path: 'unknown'
+
+ expect(response).to be_not_found
+ end
end
end
- context 'when the file does not exist' do
- it 'responds Not Found' do
- get :file, namespace_id: project.namespace, project_id: project, job_id: job, path: 'unknown'
+ context 'when the file is served through Rails' do
+ context 'when the file exists' do
+ it 'renders the file view' do
+ get :file, namespace_id: project.namespace, project_id: project, job_id: job, path: 'ci_artifacts.txt'
- expect(response).to be_not_found
+ expect(response).to have_http_status(:ok)
+ expect(response).to render_template('projects/artifacts/file')
+ end
+ end
+
+ context 'when the file does not exist' do
+ it 'responds Not Found' do
+ get :file, namespace_id: project.namespace, project_id: project, job_id: job, path: 'unknown'
+
+ expect(response).to be_not_found
+ end
+ end
+ end
+
+ context 'when the project is private' do
+ let(:private_project) { create(:project, :repository, :private) }
+ let(:pipeline) { create(:ci_pipeline, project: private_project) }
+ let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
+
+ before do
+ private_project.add_developer(user)
+
+ allow(Gitlab.config.pages).to receive(:artifacts_server).and_return(true)
+ end
+
+ it 'does not redirect the request' do
+ get :file, namespace_id: private_project.namespace, project_id: private_project, job_id: job, path: 'ci_artifacts.txt'
+
+ expect(response).to have_http_status(:ok)
+ expect(response).to render_template('projects/artifacts/file')
end
end
end
diff --git a/spec/controllers/projects/blob_controller_spec.rb b/spec/controllers/projects/blob_controller_spec.rb
index 64b9af7b845..fb76b7fdf38 100644
--- a/spec/controllers/projects/blob_controller_spec.rb
+++ b/spec/controllers/projects/blob_controller_spec.rb
@@ -1,6 +1,8 @@
require 'rails_helper'
describe Projects::BlobController do
+ include ProjectForksHelper
+
let(:project) { create(:project, :public, :repository) }
describe "GET show" do
@@ -226,9 +228,8 @@ describe Projects::BlobController do
end
context 'when user has forked project' do
- let(:forked_project_link) { create(:forked_project_link, forked_from_project: project) }
- let!(:forked_project) { forked_project_link.forked_to_project }
- let(:guest) { forked_project.owner }
+ let!(:forked_project) { fork_project(project, guest, namespace: guest.namespace, repository: true) }
+ let(:guest) { create(:user) }
before do
sign_in(guest)
diff --git a/spec/controllers/projects/branches_controller_spec.rb b/spec/controllers/projects/branches_controller_spec.rb
index 745d051a5c1..5e0b57e9b2e 100644
--- a/spec/controllers/projects/branches_controller_spec.rb
+++ b/spec/controllers/projects/branches_controller_spec.rb
@@ -367,5 +367,20 @@ describe Projects::BranchesController do
expect(parsed_response.first).to eq 'master'
end
end
+
+ context 'when branch contains an invalid UTF-8 sequence' do
+ before do
+ project.repository.create_branch("wrong-\xE5-utf8-sequence")
+ end
+
+ it 'return with a status 200' do
+ get :index,
+ namespace_id: project.namespace,
+ project_id: project,
+ format: :html
+
+ expect(response).to have_http_status(200)
+ end
+ end
end
end
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
new file mode 100644
index 00000000000..7985028d73b
--- /dev/null
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -0,0 +1,308 @@
+require 'spec_helper'
+
+describe Projects::ClustersController do
+ set(:user) { create(:user) }
+ set(:project) { create(:project) }
+ let(:role) { :master }
+
+ before do
+ project.team << [user, role]
+
+ sign_in(user)
+ end
+
+ describe 'GET index' do
+ subject do
+ get :index, namespace_id: project.namespace,
+ project_id: project
+ end
+
+ context 'when cluster is already created' do
+ let!(:cluster) { create(:gcp_cluster, :created_on_gke, project: project) }
+
+ it 'redirects to show a cluster' do
+ subject
+
+ expect(response).to redirect_to(project_cluster_path(project, cluster))
+ end
+ end
+
+ context 'when we do not have cluster' do
+ it 'redirects to create a cluster' do
+ subject
+
+ expect(response).to redirect_to(new_project_cluster_path(project))
+ end
+ end
+ end
+
+ describe 'GET login' do
+ render_views
+
+ subject do
+ get :login, namespace_id: project.namespace,
+ project_id: project
+ end
+
+ context 'when we do have omniauth configured' do
+ it 'shows login button' do
+ subject
+
+ expect(response.body).to include('auth_buttons/signin_with_google')
+ end
+ end
+
+ context 'when we do not have omniauth configured' do
+ before do
+ stub_omniauth_setting(providers: [])
+ end
+
+ it 'shows notice message' do
+ subject
+
+ expect(response.body).to include('Ask your GitLab administrator if you want to use this service.')
+ end
+ end
+ end
+
+ shared_examples 'requires to login' do
+ it 'redirects to create a cluster' do
+ subject
+
+ expect(response).to redirect_to(login_project_clusters_path(project))
+ end
+ end
+
+ describe 'GET new' do
+ render_views
+
+ subject do
+ get :new, namespace_id: project.namespace,
+ project_id: project
+ end
+
+ context 'when logged' do
+ before do
+ make_logged_in
+ end
+
+ it 'shows a creation form' do
+ subject
+
+ expect(response.body).to include('Create cluster')
+ end
+ end
+
+ context 'when not logged' do
+ it_behaves_like 'requires to login'
+ end
+ end
+
+ describe 'POST create' do
+ subject do
+ post :create, params.merge(namespace_id: project.namespace,
+ project_id: project)
+ end
+
+ context 'when not logged' do
+ let(:params) { {} }
+
+ it_behaves_like 'requires to login'
+ end
+
+ context 'when logged in' do
+ before do
+ make_logged_in
+ end
+
+ context 'when all required parameters are set' do
+ let(:params) do
+ {
+ cluster: {
+ gcp_cluster_name: 'new-cluster',
+ gcp_project_id: '111'
+ }
+ }
+ end
+
+ before do
+ expect(ClusterProvisionWorker).to receive(:perform_async) { }
+ end
+
+ it 'creates a new cluster' do
+ expect { subject }.to change { Gcp::Cluster.count }
+
+ expect(response).to redirect_to(project_cluster_path(project, project.cluster))
+ end
+ end
+
+ context 'when not all required parameters are set' do
+ render_views
+
+ let(:params) do
+ {
+ cluster: {
+ project_namespace: 'some namespace'
+ }
+ }
+ end
+
+ it 'shows an error message' do
+ expect { subject }.not_to change { Gcp::Cluster.count }
+
+ expect(response).to render_template(:new)
+ end
+ end
+ end
+ end
+
+ describe 'GET status' do
+ let(:cluster) { create(:gcp_cluster, :created_on_gke, project: project) }
+
+ subject do
+ get :status, namespace_id: project.namespace,
+ project_id: project,
+ id: cluster,
+ format: :json
+ end
+
+ it "responds with matching schema" do
+ subject
+
+ expect(response).to have_http_status(:ok)
+ expect(response).to match_response_schema('cluster_status')
+ end
+ end
+
+ describe 'GET show' do
+ render_views
+
+ let(:cluster) { create(:gcp_cluster, :created_on_gke, project: project) }
+
+ subject do
+ get :show, namespace_id: project.namespace,
+ project_id: project,
+ id: cluster
+ end
+
+ context 'when logged as master' do
+ it "allows to update cluster" do
+ subject
+
+ expect(response).to have_http_status(:ok)
+ expect(response.body).to include("Save")
+ end
+
+ it "allows remove integration" do
+ subject
+
+ expect(response).to have_http_status(:ok)
+ expect(response.body).to include("Remove integration")
+ end
+ end
+
+ context 'when logged as developer' do
+ let(:role) { :developer }
+
+ it "does not allow to access page" do
+ subject
+
+ expect(response).to have_http_status(:not_found)
+ end
+ end
+ end
+
+ describe 'PUT update' do
+ render_views
+
+ let(:service) { project.build_kubernetes_service }
+ let(:cluster) { create(:gcp_cluster, :created_on_gke, project: project, service: service) }
+ let(:params) { {} }
+
+ subject do
+ put :update, params.merge(namespace_id: project.namespace,
+ project_id: project,
+ id: cluster)
+ end
+
+ context 'when logged as master' do
+ context 'when valid params are used' do
+ let(:params) do
+ {
+ cluster: { enabled: false }
+ }
+ end
+
+ it "redirects back to show page" do
+ subject
+
+ expect(response).to redirect_to(project_cluster_path(project, project.cluster))
+ expect(flash[:notice]).to eq('Cluster was successfully updated.')
+ end
+ end
+
+ context 'when invalid params are used' do
+ let(:params) do
+ {
+ cluster: { project_namespace: 'my Namespace 321321321 #' }
+ }
+ end
+
+ it "rejects changes" do
+ subject
+
+ expect(response).to have_http_status(:ok)
+ expect(response).to render_template(:show)
+ end
+ end
+ end
+
+ context 'when logged as developer' do
+ let(:role) { :developer }
+
+ it "does not allow to update cluster" do
+ subject
+
+ expect(response).to have_http_status(:not_found)
+ end
+ end
+ end
+
+ describe 'delete update' do
+ let(:cluster) { create(:gcp_cluster, :created_on_gke, project: project) }
+
+ subject do
+ delete :destroy, namespace_id: project.namespace,
+ project_id: project,
+ id: cluster
+ end
+
+ context 'when logged as master' do
+ it "redirects back to clusters list" do
+ subject
+
+ expect(response).to redirect_to(project_clusters_path(project))
+ expect(flash[:notice]).to eq('Cluster integration was successfully removed.')
+ end
+ end
+
+ context 'when logged as developer' do
+ let(:role) { :developer }
+
+ it "does not allow to destroy cluster" do
+ subject
+
+ expect(response).to have_http_status(:not_found)
+ end
+ end
+ end
+
+ def make_logged_in
+ session[GoogleApi::CloudPlatform::Client.session_key_for_token] = '1234'
+ session[GoogleApi::CloudPlatform::Client.session_key_for_expires_at] = in_hour.to_i.to_s
+ end
+
+ def in_hour
+ Time.now + 1.hour
+ end
+end
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 5d9403c23ac..053bd73fee3 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -207,162 +207,6 @@ describe Projects::IssuesController do
end
end
- describe 'PUT #update' do
- before do
- sign_in(user)
- project.team << [user, :developer]
- end
-
- it_behaves_like 'update invalid issuable', Issue
-
- context 'changing the assignee' do
- it 'limits the attributes exposed on the assignee' do
- assignee = create(:user)
- project.add_developer(assignee)
-
- put :update,
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: issue.iid,
- issue: { assignee_ids: [assignee.id] },
- format: :json
- body = JSON.parse(response.body)
-
- expect(body['assignees'].first.keys)
- .to match_array(%w(id name username avatar_url state web_url))
- end
- end
-
- context 'Akismet is enabled' do
- let(:project) { create(:project_empty_repo, :public) }
-
- before do
- stub_application_setting(recaptcha_enabled: true)
- allow_any_instance_of(SpamService).to receive(:check_for_spam?).and_return(true)
- end
-
- context 'when an issue is not identified as spam' do
- before do
- allow_any_instance_of(described_class).to receive(:verify_recaptcha).and_return(false)
- allow_any_instance_of(AkismetService).to receive(:spam?).and_return(false)
- end
-
- it 'normally updates the issue' do
- expect { update_issue(title: 'Foo') }.to change { issue.reload.title }.to('Foo')
- end
- end
-
- context 'when an issue is identified as spam' do
- before do
- allow_any_instance_of(AkismetService).to receive(:spam?).and_return(true)
- end
-
- context 'when captcha is not verified' do
- def update_spam_issue
- update_issue(title: 'Spam Title', description: 'Spam lives here')
- end
-
- before do
- allow_any_instance_of(described_class).to receive(:verify_recaptcha).and_return(false)
- end
-
- it 'rejects an issue recognized as a spam' do
- expect(Gitlab::Recaptcha).to receive(:load_configurations!).and_return(true)
- expect { update_spam_issue }.not_to change { issue.reload.title }
- end
-
- it 'rejects an issue recognized as a spam when recaptcha disabled' do
- stub_application_setting(recaptcha_enabled: false)
-
- expect { update_spam_issue }.not_to change { issue.reload.title }
- end
-
- it 'creates a spam log' do
- update_spam_issue
-
- spam_logs = SpamLog.all
-
- expect(spam_logs.count).to eq(1)
- expect(spam_logs.first.title).to eq('Spam Title')
- expect(spam_logs.first.recaptcha_verified).to be_falsey
- end
-
- context 'as HTML' do
- it 'renders verify template' do
- update_spam_issue
-
- expect(response).to render_template(:verify)
- end
- end
-
- context 'as JSON' do
- before do
- update_issue({ title: 'Spam Title', description: 'Spam lives here' }, format: :json)
- end
-
- it 'renders json errors' do
- expect(json_response)
- .to eql("errors" => ["Your issue has been recognized as spam. Please, change the content or solve the reCAPTCHA to proceed."])
- end
-
- it 'returns 422 status' do
- expect(response).to have_http_status(422)
- end
- end
- end
-
- context 'when captcha is verified' do
- let(:spammy_title) { 'Whatever' }
- let!(:spam_logs) { create_list(:spam_log, 2, user: user, title: spammy_title) }
-
- def update_verified_issue
- update_issue({ title: spammy_title },
- { spam_log_id: spam_logs.last.id,
- recaptcha_verification: true })
- end
-
- before do
- allow_any_instance_of(described_class).to receive(:verify_recaptcha)
- .and_return(true)
- end
-
- it 'redirect to issue page' do
- update_verified_issue
-
- expect(response)
- .to redirect_to(project_issue_path(project, issue))
- end
-
- it 'accepts an issue after recaptcha is verified' do
- expect { update_verified_issue }.to change { issue.reload.title }.to(spammy_title)
- end
-
- it 'marks spam log as recaptcha_verified' do
- expect { update_verified_issue }.to change { SpamLog.last.recaptcha_verified }.from(false).to(true)
- end
-
- it 'does not mark spam log as recaptcha_verified when it does not belong to current_user' do
- spam_log = create(:spam_log)
-
- expect { update_issue(spam_log_id: spam_log.id, recaptcha_verification: true) }
- .not_to change { SpamLog.last.recaptcha_verified }
- end
- end
- end
-
- def update_issue(issue_params = {}, additional_params = {})
- params = {
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: issue.iid,
- issue: issue_params
- }.merge(additional_params)
-
- put :update, params
- end
- end
- end
-
describe 'POST #move' do
before do
sign_in(user)
@@ -533,6 +377,146 @@ describe Projects::IssuesController do
end
end
+ describe 'PUT #update' do
+ def update_issue(issue_params: {}, additional_params: {}, id: nil)
+ id ||= issue.iid
+ params = {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: id,
+ issue: { title: 'New title' }.merge(issue_params),
+ format: :json
+ }.merge(additional_params)
+
+ put :update, params
+ end
+
+ def go(id:)
+ update_issue(id: id)
+ end
+
+ before do
+ sign_in(user)
+ project.team << [user, :developer]
+ end
+
+ it_behaves_like 'restricted action', success: 200
+ it_behaves_like 'update invalid issuable', Issue
+
+ context 'changing the assignee' do
+ it 'limits the attributes exposed on the assignee' do
+ assignee = create(:user)
+ project.add_developer(assignee)
+
+ update_issue(issue_params: { assignee_ids: [assignee.id] })
+
+ body = JSON.parse(response.body)
+
+ expect(body['assignees'].first.keys)
+ .to match_array(%w(id name username avatar_url state web_url))
+ end
+ end
+
+ context 'Akismet is enabled' do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ stub_application_setting(recaptcha_enabled: true)
+ allow_any_instance_of(SpamService).to receive(:check_for_spam?).and_return(true)
+ end
+
+ context 'when an issue is not identified as spam' do
+ before do
+ allow_any_instance_of(described_class).to receive(:verify_recaptcha).and_return(false)
+ allow_any_instance_of(AkismetService).to receive(:spam?).and_return(false)
+ end
+
+ it 'normally updates the issue' do
+ expect { update_issue(issue_params: { title: 'Foo' }) }.to change { issue.reload.title }.to('Foo')
+ end
+ end
+
+ context 'when an issue is identified as spam' do
+ before do
+ allow_any_instance_of(AkismetService).to receive(:spam?).and_return(true)
+ end
+
+ context 'when captcha is not verified' do
+ before do
+ allow_any_instance_of(described_class).to receive(:verify_recaptcha).and_return(false)
+ end
+
+ it 'rejects an issue recognized as a spam' do
+ expect { update_issue }.not_to change { issue.reload.title }
+ end
+
+ it 'rejects an issue recognized as a spam when recaptcha disabled' do
+ stub_application_setting(recaptcha_enabled: false)
+
+ expect { update_issue }.not_to change { issue.reload.title }
+ end
+
+ it 'creates a spam log' do
+ update_issue(issue_params: { title: 'Spam title' })
+
+ spam_logs = SpamLog.all
+
+ expect(spam_logs.count).to eq(1)
+ expect(spam_logs.first.title).to eq('Spam title')
+ expect(spam_logs.first.recaptcha_verified).to be_falsey
+ end
+
+ it 'renders json errors' do
+ update_issue
+
+ expect(json_response)
+ .to eql("errors" => ["Your issue has been recognized as spam. Please, change the content or solve the reCAPTCHA to proceed."])
+ end
+
+ it 'returns 422 status' do
+ update_issue
+
+ expect(response).to have_http_status(422)
+ end
+ end
+
+ context 'when captcha is verified' do
+ let(:spammy_title) { 'Whatever' }
+ let!(:spam_logs) { create_list(:spam_log, 2, user: user, title: spammy_title) }
+
+ def update_verified_issue
+ update_issue(
+ issue_params: { title: spammy_title },
+ additional_params: { spam_log_id: spam_logs.last.id, recaptcha_verification: true })
+ end
+
+ before do
+ allow_any_instance_of(described_class).to receive(:verify_recaptcha)
+ .and_return(true)
+ end
+
+ it 'returns 200 status' do
+ expect(response).to have_http_status(200)
+ end
+
+ it 'accepts an issue after recaptcha is verified' do
+ expect { update_verified_issue }.to change { issue.reload.title }.to(spammy_title)
+ end
+
+ it 'marks spam log as recaptcha_verified' do
+ expect { update_verified_issue }.to change { SpamLog.last.recaptcha_verified }.from(false).to(true)
+ end
+
+ it 'does not mark spam log as recaptcha_verified when it does not belong to current_user' do
+ spam_log = create(:spam_log)
+
+ expect { update_issue(issue_params: { spam_log_id: spam_log.id, recaptcha_verification: true }) }
+ .not_to change { SpamLog.last.recaptcha_verified }
+ end
+ end
+ end
+ end
+ end
+
describe 'GET #show' do
it_behaves_like 'restricted action', success: 200
@@ -573,29 +557,6 @@ describe Projects::IssuesController do
end
end
end
-
- describe 'GET #edit' do
- it_behaves_like 'restricted action', success: 200
-
- def go(id:)
- get :edit,
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: id
- end
- end
-
- describe 'PUT #update' do
- it_behaves_like 'restricted action', success: 302
-
- def go(id:)
- put :update,
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: id,
- issue: { title: 'New title' }
- end
- end
end
describe 'POST #create' do
@@ -900,5 +861,37 @@ describe Projects::IssuesController do
expect(JSON.parse(response.body).first.keys).to match_array(%w[id reply_id expanded notes individual_note])
end
+
+ context 'with cross-reference system note', :request_store do
+ let(:new_issue) { create(:issue) }
+ let(:cross_reference) { "mentioned in #{new_issue.to_reference(issue.project)}" }
+
+ before do
+ create(:discussion_note_on_issue, :system, noteable: issue, project: issue.project, note: cross_reference)
+ end
+
+ it 'filters notes that the user should not see' do
+ get :discussions, namespace_id: project.namespace, project_id: project, id: issue.iid
+
+ expect(JSON.parse(response.body).count).to eq(1)
+ end
+
+ it 'does not result in N+1 queries' do
+ # Instantiate the controller variables to ensure QueryRecorder has an accurate base count
+ get :discussions, namespace_id: project.namespace, project_id: project, id: issue.iid
+
+ RequestStore.clear!
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ get :discussions, namespace_id: project.namespace, project_id: project, id: issue.iid
+ end.count
+
+ RequestStore.clear!
+
+ create_list(:discussion_note_on_issue, 2, :system, noteable: issue, project: issue.project, note: cross_reference)
+
+ expect { get :discussions, namespace_id: project.namespace, project_id: project, id: issue.iid }.not_to exceed_query_limit(control_count)
+ end
+ end
end
end
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index fdd7e6f173f..d01339a0b88 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -216,7 +216,7 @@ describe Projects::JobsController do
expect(json_response['text']).to eq status.text
expect(json_response['label']).to eq status.label
expect(json_response['icon']).to eq status.icon
- expect(json_response['favicon']).to eq "/assets/ci_favicons/#{status.favicon}.ico"
+ expect(json_response['favicon']).to match_asset_path "/assets/ci_favicons/#{status.favicon}.ico"
end
end
diff --git a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
index fad2c8f3ab7..7260350d5fb 100644
--- a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe Projects::MergeRequests::DiffsController do
+ include ProjectForksHelper
+
let(:project) { create(:project, :repository) }
let(:user) { project.owner }
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
@@ -37,12 +39,12 @@ describe Projects::MergeRequests::DiffsController do
render_views
let(:project) { create(:project, :repository) }
- let(:fork_project) { create(:forked_project_with_submodules) }
- let(:merge_request) { create(:merge_request_with_diffs, source_project: fork_project, source_branch: 'add-submodule-version-bump', target_branch: 'master', target_project: project) }
+ let(:forked_project) { fork_project_with_submodules(project) }
+ let(:merge_request) { create(:merge_request_with_diffs, source_project: forked_project, source_branch: 'add-submodule-version-bump', target_branch: 'master', target_project: project) }
before do
- fork_project.build_forked_project_link(forked_to_project_id: fork_project.id, forked_from_project_id: project.id)
- fork_project.save
+ project.add_developer(user)
+
merge_request.reload
go
end
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 6775012bab5..707e7c32283 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe Projects::MergeRequestsController do
+ include ProjectForksHelper
+
let(:project) { create(:project, :repository) }
let(:user) { project.owner }
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
@@ -96,18 +98,6 @@ describe Projects::MergeRequestsController do
expect(response).to match_response_schema('entities/merge_request')
end
end
-
- context 'number of queries', :request_store do
- it 'verifies number of queries' do
- # pre-create objects
- merge_request
-
- recorded = ActiveRecord::QueryRecorder.new { go(format: :json) }
-
- expect(recorded.count).to be_within(5).of(30)
- expect(recorded.cached_count).to eq(0)
- end
- end
end
describe "as diff" do
@@ -216,14 +206,11 @@ describe Projects::MergeRequestsController do
context 'there is no source project' do
let(:project) { create(:project, :repository) }
- let(:fork_project) { create(:forked_project_with_submodules) }
- let(:merge_request) { create(:merge_request, source_project: fork_project, source_branch: 'add-submodule-version-bump', target_branch: 'master', target_project: project) }
+ let(:forked_project) { fork_project_with_submodules(project) }
+ let!(:merge_request) { create(:merge_request, source_project: forked_project, source_branch: 'add-submodule-version-bump', target_branch: 'master', target_project: project) }
before do
- fork_project.build_forked_project_link(forked_to_project_id: fork_project.id, forked_from_project_id: project.id)
- fork_project.save
- merge_request.reload
- fork_project.destroy
+ forked_project.destroy
end
it 'closes MR without errors' do
@@ -611,21 +598,16 @@ describe Projects::MergeRequestsController do
describe 'GET ci_environments_status' do
context 'the environment is from a forked project' do
- let!(:forked) { create(:project, :repository) }
+ let!(:forked) { fork_project(project, user, repository: true) }
let!(:environment) { create(:environment, project: forked) }
let!(:deployment) { create(:deployment, environment: environment, sha: forked.commit.id, ref: 'master') }
let(:admin) { create(:admin) }
let(:merge_request) do
- create(:forked_project_link, forked_to_project: forked,
- forked_from_project: project)
-
create(:merge_request, source_project: forked, target_project: project)
end
before do
- forked.team << [user, :master]
-
get :ci_environments_status,
namespace_id: merge_request.project.namespace.to_param,
project_id: merge_request.project,
@@ -658,7 +640,7 @@ describe Projects::MergeRequestsController do
expect(json_response['text']).to eq status.text
expect(json_response['label']).to eq status.label
expect(json_response['icon']).to eq status.icon
- expect(json_response['favicon']).to eq "/assets/ci_favicons/#{status.favicon}.ico"
+ expect(json_response['favicon']).to match_asset_path "/assets/ci_favicons/#{status.favicon}.ico"
end
end
diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb
index 6ffe41b8608..135fd6449ff 100644
--- a/spec/controllers/projects/notes_controller_spec.rb
+++ b/spec/controllers/projects/notes_controller_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe Projects::NotesController do
+ include ProjectForksHelper
+
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:issue) { create(:issue, project: project) }
@@ -120,6 +122,40 @@ describe Projects::NotesController do
expect(note_json[:diff_discussion_html]).to be_nil
end
end
+
+ context 'with cross-reference system note', :request_store do
+ let(:new_issue) { create(:issue) }
+ let(:cross_reference) { "mentioned in #{new_issue.to_reference(issue.project)}" }
+
+ before do
+ note
+ create(:discussion_note_on_issue, :system, noteable: issue, project: issue.project, note: cross_reference)
+ end
+
+ it 'filters notes that the user should not see' do
+ get :index, request_params
+
+ expect(parsed_response[:notes].count).to eq(1)
+ expect(note_json[:id]).to eq(note.id)
+ end
+
+ it 'does not result in N+1 queries' do
+ # Instantiate the controller variables to ensure QueryRecorder has an accurate base count
+ get :index, request_params
+
+ RequestStore.clear!
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ get :index, request_params
+ end.count
+
+ RequestStore.clear!
+
+ create_list(:discussion_note_on_issue, 2, :system, noteable: issue, project: issue.project, note: cross_reference)
+
+ expect { get :index, request_params }.not_to exceed_query_limit(control_count)
+ end
+ end
end
describe 'POST create' do
@@ -176,18 +212,16 @@ describe Projects::NotesController do
context 'when creating a commit comment from an MR fork' do
let(:project) { create(:project, :repository) }
- let(:fork_project) do
- create(:project, :repository).tap do |fork|
- create(:forked_project_link, forked_to_project: fork, forked_from_project: project)
- end
+ let(:forked_project) do
+ fork_project(project, nil, repository: true)
end
let(:merge_request) do
- create(:merge_request, source_project: fork_project, target_project: project, source_branch: 'feature', target_branch: 'master')
+ create(:merge_request, source_project: forked_project, target_project: project, source_branch: 'feature', target_branch: 'master')
end
let(:existing_comment) do
- create(:note_on_commit, note: 'a note', project: fork_project, commit_id: merge_request.commit_shas.first)
+ create(:note_on_commit, note: 'a note', project: forked_project, commit_id: merge_request.commit_shas.first)
end
def post_create(extra_params = {})
@@ -197,7 +231,7 @@ describe Projects::NotesController do
project_id: project,
target_type: 'merge_request',
target_id: merge_request.id,
- note_project_id: fork_project.id,
+ note_project_id: forked_project.id,
in_reply_to_discussion_id: existing_comment.discussion_id
}.merge(extra_params)
end
@@ -219,16 +253,66 @@ describe Projects::NotesController do
end
context 'when the user has access to the fork' do
- let(:discussion) { fork_project.notes.find_discussion(existing_comment.discussion_id) }
+ let(:discussion) { forked_project.notes.find_discussion(existing_comment.discussion_id) }
before do
- fork_project.add_developer(user)
+ forked_project.add_developer(user)
existing_comment
end
it 'creates the note' do
- expect { post_create }.to change { fork_project.notes.count }.by(1)
+ expect { post_create }.to change { forked_project.notes.count }.by(1)
+ end
+ end
+ end
+
+ context 'when the merge request discussion is locked' do
+ before do
+ project.update_attribute(:visibility_level, Gitlab::VisibilityLevel::PUBLIC)
+ merge_request.update_attribute(:discussion_locked, true)
+ end
+
+ context 'when a noteable is not found' do
+ it 'returns 404 status' do
+ request_params[:note][:noteable_id] = 9999
+ post :create, request_params.merge(format: :json)
+
+ expect(response).to have_http_status(404)
+ end
+ end
+
+ context 'when a user is a team member' do
+ it 'returns 302 status for html' do
+ post :create, request_params
+
+ expect(response).to have_http_status(302)
+ end
+
+ it 'returns 200 status for json' do
+ post :create, request_params.merge(format: :json)
+
+ expect(response).to have_http_status(200)
+ end
+
+ it 'creates a new note' do
+ expect { post :create, request_params }.to change { Note.count }.by(1)
+ end
+ end
+
+ context 'when a user is not a team member' do
+ before do
+ project.project_member(user).destroy
+ end
+
+ it 'returns 404 status' do
+ post :create, request_params
+
+ expect(response).to have_http_status(404)
+ end
+
+ it 'does not create a new note' do
+ expect { post :create, request_params }.not_to change { Note.count }
end
end
end
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index f9d77c7ad03..167e80ed9cd 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -142,7 +142,7 @@ describe Projects::PipelinesController do
expect(json_response['text']).to eq status.text
expect(json_response['label']).to eq status.label
expect(json_response['icon']).to eq status.icon
- expect(json_response['favicon']).to eq "/assets/ci_favicons/#{status.favicon}.ico"
+ expect(json_response['favicon']).to match_asset_path("/assets/ci_favicons/#{status.favicon}.ico")
end
end
diff --git a/spec/controllers/projects/pipelines_settings_controller_spec.rb b/spec/controllers/projects/pipelines_settings_controller_spec.rb
new file mode 100644
index 00000000000..ee46ad00947
--- /dev/null
+++ b/spec/controllers/projects/pipelines_settings_controller_spec.rb
@@ -0,0 +1,43 @@
+require 'spec_helper'
+
+describe Projects::PipelinesSettingsController do
+ set(:user) { create(:user) }
+ set(:project_auto_devops) { create(:project_auto_devops) }
+ let(:project) { project_auto_devops.project }
+
+ before do
+ project.add_master(user)
+
+ sign_in(user)
+ end
+
+ describe 'PATCH update' do
+ before do
+ patch :update,
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ project: {
+ auto_devops_attributes: params
+ }
+ end
+
+ context 'when updating the auto_devops settings' do
+ let(:params) { { enabled: '', domain: 'mepmep.md' } }
+
+ it 'redirects to the settings page' do
+ expect(response).to have_http_status(302)
+ expect(flash[:notice]).to eq("Pipelines settings for '#{project.name}' were successfully updated.")
+ end
+
+ context 'following the instance default' do
+ let(:params) { { enabled: '' } }
+
+ it 'allows enabled to be set to nil' do
+ project_auto_devops.reload
+
+ expect(project_auto_devops.enabled).to be_nil
+ end
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/registry/repositories_controller_spec.rb b/spec/controllers/projects/registry/repositories_controller_spec.rb
index 2805968dcd9..5d9d5351687 100644
--- a/spec/controllers/projects/registry/repositories_controller_spec.rb
+++ b/spec/controllers/projects/registry/repositories_controller_spec.rb
@@ -42,6 +42,13 @@ describe Projects::Registry::RepositoriesController do
expect { go_to_index }.to change { ContainerRepository.all.count }.by(1)
expect(ContainerRepository.first).to be_root_repository
end
+
+ it 'json has a list of projects' do
+ go_to_index(format: :json)
+
+ expect(response).to have_http_status(:ok)
+ expect(response).to match_response_schema('registry/repositories')
+ end
end
context 'when there are no tags for this repository' do
@@ -58,6 +65,31 @@ describe Projects::Registry::RepositoriesController do
it 'does not ensure root container repository' do
expect { go_to_index }.not_to change { ContainerRepository.all.count }
end
+
+ it 'responds with json if asked' do
+ go_to_index(format: :json)
+
+ expect(response).to have_http_status(:ok)
+ expect(json_response).to be_kind_of(Array)
+ end
+ end
+ end
+ end
+
+ describe 'DELETE destroy' do
+ context 'when root container repository exists' do
+ let!(:repository) do
+ create(:container_repository, :root, project: project)
+ end
+
+ before do
+ stub_container_registry_tags(repository: :any, tags: [])
+ end
+
+ it 'deletes a repository' do
+ expect { delete_repository(repository) }.to change { ContainerRepository.all.count }.by(-1)
+
+ expect(response).to have_http_status(:no_content)
end
end
end
@@ -77,8 +109,16 @@ describe Projects::Registry::RepositoriesController do
end
end
- def go_to_index
+ def go_to_index(format: :html)
get :index, namespace_id: project.namespace,
- project_id: project
+ project_id: project,
+ format: format
+ end
+
+ def delete_repository(repository)
+ delete :destroy, namespace_id: project.namespace,
+ project_id: project,
+ id: repository,
+ format: :json
end
end
diff --git a/spec/controllers/projects/registry/tags_controller_spec.rb b/spec/controllers/projects/registry/tags_controller_spec.rb
index f4af3587d23..bb702ebeb23 100644
--- a/spec/controllers/projects/registry/tags_controller_spec.rb
+++ b/spec/controllers/projects/registry/tags_controller_spec.rb
@@ -4,24 +4,83 @@ describe Projects::Registry::TagsController do
let(:user) { create(:user) }
let(:project) { create(:project, :private) }
+ let(:repository) do
+ create(:container_repository, name: 'image', project: project)
+ end
+
before do
sign_in(user)
stub_container_registry_config(enabled: true)
end
- context 'when user has access to registry' do
+ describe 'GET index' do
+ let(:tags) do
+ Array.new(40) { |i| "tag#{i}" }
+ end
+
before do
- project.add_developer(user)
+ stub_container_registry_tags(repository: /image/, tags: tags)
end
- describe 'POST destroy' do
+ context 'when user can control the registry' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'receive a list of tags' do
+ get_tags
+
+ expect(response).to have_http_status(:ok)
+ expect(response).to match_response_schema('registry/tags')
+ expect(response).to include_pagination_headers
+ end
+ end
+
+ context 'when user can read the registry' do
+ before do
+ project.add_reporter(user)
+ end
+
+ it 'receive a list of tags' do
+ get_tags
+
+ expect(response).to have_http_status(:ok)
+ expect(response).to match_response_schema('registry/tags')
+ expect(response).to include_pagination_headers
+ end
+ end
+
+ context 'when user does not have access to registry' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'does not receive a list of tags' do
+ get_tags
+
+ expect(response).to have_http_status(:not_found)
+ end
+ end
+
+ private
+
+ def get_tags
+ get :index, namespace_id: project.namespace,
+ project_id: project,
+ repository_id: repository,
+ format: :json
+ end
+ end
+
+ describe 'POST destroy' do
+ context 'when user has access to registry' do
+ before do
+ project.add_developer(user)
+ end
+
context 'when there is matching tag present' do
before do
- stub_container_registry_tags(repository: /image/, tags: %w[rc1 test.])
- end
-
- let(:repository) do
- create(:container_repository, name: 'image', project: project)
+ stub_container_registry_tags(repository: repository.path, tags: %w[rc1 test.])
end
it 'makes it possible to delete regular tag' do
@@ -37,12 +96,15 @@ describe Projects::Registry::TagsController do
end
end
end
- end
- def destroy_tag(name)
- post :destroy, namespace_id: project.namespace,
- project_id: project,
- repository_id: repository,
- id: name
+ private
+
+ def destroy_tag(name)
+ post :destroy, namespace_id: project.namespace,
+ project_id: project,
+ repository_id: repository,
+ id: name,
+ format: :json
+ end
end
end
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index 4459e227fb3..0544afe31ed 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -1,6 +1,8 @@
require('spec_helper')
describe ProjectsController do
+ include ProjectForksHelper
+
let(:project) { create(:project) }
let(:public_project) { create(:project, :public) }
let(:user) { create(:user) }
@@ -139,8 +141,9 @@ describe ProjectsController do
end
end
- context 'when the storage is not available', broken_storage: true do
- let(:project) { create(:project, :broken_storage) }
+ context 'when the storage is not available', :broken_storage do
+ set(:project) { create(:project, :broken_storage) }
+
before do
project.add_developer(user)
sign_in(user)
@@ -289,6 +292,24 @@ describe ProjectsController do
end
end
+ it 'updates Fast Forward Merge attributes' do
+ controller.instance_variable_set(:@project, project)
+
+ params = {
+ merge_method: :ff
+ }
+
+ put :update,
+ namespace_id: project.namespace,
+ id: project.id,
+ project: params
+
+ expect(response).to have_http_status(302)
+ params.each do |param, value|
+ expect(project.public_send(param)).to eq(value)
+ end
+ end
+
def update_project(**parameters)
put :update,
namespace_id: project.namespace.path,
@@ -358,10 +379,10 @@ describe ProjectsController do
context "when the project is forked" do
let(:project) { create(:project, :repository) }
- let(:fork_project) { create(:project, :repository, forked_from_project: project) }
+ let(:forked_project) { fork_project(project, nil, repository: true) }
let(:merge_request) do
create(:merge_request,
- source_project: fork_project,
+ source_project: forked_project,
target_project: project)
end
@@ -369,7 +390,7 @@ describe ProjectsController do
project.merge_requests << merge_request
sign_in(admin)
- delete :destroy, namespace_id: fork_project.namespace, id: fork_project
+ delete :destroy, namespace_id: forked_project.namespace, id: forked_project
expect(merge_request.reload.state).to eq('closed')
end
@@ -436,18 +457,14 @@ describe ProjectsController do
end
context 'with forked project' do
- let(:project_fork) { create(:project, :repository, namespace: user.namespace) }
-
- before do
- create(:forked_project_link, forked_to_project: project_fork)
- end
+ let(:forked_project) { fork_project(create(:project, :public), user) }
it 'removes fork from project' do
delete(:remove_fork,
- namespace_id: project_fork.namespace.to_param,
- id: project_fork.to_param, format: :js)
+ namespace_id: forked_project.namespace.to_param,
+ id: forked_project.to_param, format: :js)
- expect(project_fork.forked?).to be_falsey
+ expect(forked_project.reload.forked?).to be_falsey
expect(flash[:notice]).to eq('The fork relationship has been removed.')
expect(response).to render_template(:remove_fork)
end
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index 5a4ab39ab86..1d3ddfbd220 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -76,12 +76,68 @@ describe RegistrationsController do
sign_in(user)
end
- it 'schedules the user for destruction' do
- expect(DeleteUserWorker).to receive(:perform_async).with(user.id, user.id, {})
+ def expect_failure(message)
+ expect(flash[:alert]).to eq(message)
+ expect(response.status).to eq(303)
+ expect(response).to redirect_to profile_account_path
+ end
+
+ def expect_password_failure
+ expect_failure('Invalid password')
+ end
+
+ def expect_username_failure
+ expect_failure('Invalid username')
+ end
+
+ def expect_success
+ expect(flash[:notice]).to eq 'Account scheduled for removal.'
+ expect(response.status).to eq(303)
+ expect(response).to redirect_to new_user_session_path
+ end
- post(:destroy)
+ context 'user requires password confirmation' do
+ it 'fails if password confirmation is not provided' do
+ post :destroy
- expect(response.status).to eq(302)
+ expect_password_failure
+ end
+
+ it 'fails if password confirmation is wrong' do
+ post :destroy, password: 'wrong password'
+
+ expect_password_failure
+ end
+
+ it 'succeeds if password is confirmed' do
+ post :destroy, password: '12345678'
+
+ expect_success
+ end
+ end
+
+ context 'user does not require password confirmation' do
+ before do
+ stub_application_setting(password_authentication_enabled: false)
+ end
+
+ it 'fails if username confirmation is not provided' do
+ post :destroy
+
+ expect_username_failure
+ end
+
+ it 'fails if username confirmation is wrong' do
+ post :destroy, username: 'wrong username'
+
+ expect_username_failure
+ end
+
+ it 'succeeds if username is confirmed' do
+ post :destroy, username: user.username
+
+ expect_success
+ end
end
end
end
diff --git a/spec/factories/ci/build_trace_section_names.rb b/spec/factories/ci/build_trace_section_names.rb
new file mode 100644
index 00000000000..1c16225f0e5
--- /dev/null
+++ b/spec/factories/ci/build_trace_section_names.rb
@@ -0,0 +1,6 @@
+FactoryGirl.define do
+ factory :ci_build_trace_section_name, class: Ci::BuildTraceSectionName do
+ sequence(:name) { |n| "section_#{n}" }
+ project factory: :project
+ end
+end
diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb
index e5ea6b41ea3..f994c2df821 100644
--- a/spec/factories/ci/pipelines.rb
+++ b/spec/factories/ci/pipelines.rb
@@ -47,6 +47,7 @@ FactoryGirl.define do
trait :invalid do
config(rspec: nil)
+ failure_reason :config_error
end
trait :blocked do
diff --git a/spec/factories/deployments.rb b/spec/factories/deployments.rb
index e5abfd67d60..0dd1238d6e2 100644
--- a/spec/factories/deployments.rb
+++ b/spec/factories/deployments.rb
@@ -12,7 +12,7 @@ FactoryGirl.define do
deployment.project ||= deployment.environment.project
unless deployment.project.repository_exists?
- allow(deployment.project.repository).to receive(:fetch_ref)
+ allow(deployment.project.repository).to receive(:create_ref)
end
end
end
diff --git a/spec/factories/emails.rb b/spec/factories/emails.rb
index 8303861bcfe..c9ab87a15aa 100644
--- a/spec/factories/emails.rb
+++ b/spec/factories/emails.rb
@@ -2,5 +2,7 @@ FactoryGirl.define do
factory :email do
user
email { generate(:email_alias) }
+
+ trait(:confirmed) { confirmed_at Time.now }
end
end
diff --git a/spec/factories/fork_networks.rb b/spec/factories/fork_networks.rb
new file mode 100644
index 00000000000..f42d36f3d19
--- /dev/null
+++ b/spec/factories/fork_networks.rb
@@ -0,0 +1,5 @@
+FactoryGirl.define do
+ factory :fork_network do
+ association :root_project, factory: :project
+ end
+end
diff --git a/spec/factories/gcp/cluster.rb b/spec/factories/gcp/cluster.rb
new file mode 100644
index 00000000000..630e40da888
--- /dev/null
+++ b/spec/factories/gcp/cluster.rb
@@ -0,0 +1,38 @@
+FactoryGirl.define do
+ factory :gcp_cluster, class: Gcp::Cluster do
+ project
+ user
+ enabled true
+ gcp_project_id 'gcp-project-12345'
+ gcp_cluster_name 'test-cluster'
+ gcp_cluster_zone 'us-central1-a'
+ gcp_cluster_size 1
+ gcp_machine_type 'n1-standard-4'
+
+ trait :with_kubernetes_service do
+ after(:create) do |cluster, evaluator|
+ create(:kubernetes_service, project: cluster.project).tap do |service|
+ cluster.update(service: service)
+ end
+ end
+ end
+
+ trait :custom_project_namespace do
+ project_namespace 'sample-app'
+ end
+
+ trait :created_on_gke do
+ status_event :make_created
+ endpoint '111.111.111.111'
+ ca_cert 'xxxxxx'
+ kubernetes_token 'xxxxxx'
+ username 'xxxxxx'
+ password 'xxxxxx'
+ end
+
+ trait :errored do
+ status_event :make_errored
+ status_reason 'general error'
+ end
+ end
+end
diff --git a/spec/factories/gitaly/commit.rb b/spec/factories/gitaly/commit.rb
new file mode 100644
index 00000000000..e7966cee77b
--- /dev/null
+++ b/spec/factories/gitaly/commit.rb
@@ -0,0 +1,17 @@
+FactoryGirl.define do
+ sequence(:gitaly_commit_id) { Digest::SHA1.hexdigest(Time.now.to_f.to_s) }
+
+ factory :gitaly_commit, class: Gitaly::GitCommit do
+ skip_create
+
+ id { generate(:gitaly_commit_id) }
+ parent_ids do
+ ids = [generate(:gitaly_commit_id), generate(:gitaly_commit_id)]
+ Google::Protobuf::RepeatedField.new(:string, ids)
+ end
+ subject { "My commit" }
+ body { subject + "\nMy body" }
+ author { build(:gitaly_commit_author) }
+ committer { build(:gitaly_commit_author) }
+ end
+end
diff --git a/spec/factories/gitaly/commit_author.rb b/spec/factories/gitaly/commit_author.rb
new file mode 100644
index 00000000000..341873a2002
--- /dev/null
+++ b/spec/factories/gitaly/commit_author.rb
@@ -0,0 +1,9 @@
+FactoryGirl.define do
+ factory :gitaly_commit_author, class: Gitaly::CommitAuthor do
+ skip_create
+
+ name { generate(:name) }
+ email { generate(:email) }
+ date { Google::Protobuf::Timestamp.new(seconds: Time.now.to_i) }
+ end
+end
diff --git a/spec/factories/gpg_key_subkeys.rb b/spec/factories/gpg_key_subkeys.rb
new file mode 100644
index 00000000000..66ecb44d84b
--- /dev/null
+++ b/spec/factories/gpg_key_subkeys.rb
@@ -0,0 +1,10 @@
+require_relative '../support/gpg_helpers'
+
+FactoryGirl.define do
+ factory :gpg_key_subkey do
+ gpg_key
+
+ sequence(:keyid) { |n| "keyid-#{n}" }
+ sequence(:fingerprint) { |n| "fingerprint-#{n}" }
+ end
+end
diff --git a/spec/factories/gpg_keys.rb b/spec/factories/gpg_keys.rb
index 1258dce8940..93218e5763e 100644
--- a/spec/factories/gpg_keys.rb
+++ b/spec/factories/gpg_keys.rb
@@ -4,5 +4,9 @@ FactoryGirl.define do
factory :gpg_key do
key GpgHelpers::User1.public_key
user
+
+ factory :gpg_key_with_subkeys do
+ key GpgHelpers::User1.public_key_with_extra_signing_key
+ end
end
end
diff --git a/spec/factories/gpg_signature.rb b/spec/factories/gpg_signature.rb
index c0beecf0bea..e9798ff6a41 100644
--- a/spec/factories/gpg_signature.rb
+++ b/spec/factories/gpg_signature.rb
@@ -5,7 +5,7 @@ FactoryGirl.define do
commit_sha { Digest::SHA1.hexdigest(SecureRandom.hex) }
project
gpg_key
- gpg_key_primary_keyid { gpg_key.primary_keyid }
+ gpg_key_primary_keyid { gpg_key.keyid }
verification_status :verified
end
end
diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb
index cbec716d6ea..2c732aaf4ed 100644
--- a/spec/factories/merge_requests.rb
+++ b/spec/factories/merge_requests.rb
@@ -22,6 +22,11 @@ FactoryGirl.define do
trait :with_diffs do
end
+ trait :with_image_diffs do
+ source_branch "add_images_and_changes"
+ target_branch "master"
+ end
+
trait :without_diffs do
source_branch "improve/awesome"
target_branch "master"
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 7493b0a8b35..4034e7905ad 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -143,6 +143,16 @@ FactoryGirl.define do
end
end
+ trait :wiki_repo do
+ after(:create) do |project|
+ raise 'Failed to create wiki repository!' unless project.create_wiki
+ end
+ end
+
+ trait :read_only do
+ repository_read_only true
+ end
+
trait :broken_repo do
after(:create) do |project|
raise "Failed to create repository!" unless project.create_repository
diff --git a/spec/factories/user_custom_attributes.rb b/spec/factories/user_custom_attributes.rb
new file mode 100644
index 00000000000..278cf290d4f
--- /dev/null
+++ b/spec/factories/user_custom_attributes.rb
@@ -0,0 +1,7 @@
+FactoryGirl.define do
+ factory :user_custom_attribute do
+ user
+ sequence(:key) { |n| "key#{n}" }
+ sequence(:value) { |n| "value#{n}" }
+ end
+end
diff --git a/spec/features/admin/admin_abuse_reports_spec.rb b/spec/features/admin/admin_abuse_reports_spec.rb
index 2144f6ba635..766cd4b0090 100644
--- a/spec/features/admin/admin_abuse_reports_spec.rb
+++ b/spec/features/admin/admin_abuse_reports_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe "Admin::AbuseReports", js: true do
+describe "Admin::AbuseReports", :js do
let(:user) { create(:user) }
context 'as an admin' do
diff --git a/spec/features/admin/admin_broadcast_messages_spec.rb b/spec/features/admin/admin_broadcast_messages_spec.rb
index cbccf370514..9cb351282a0 100644
--- a/spec/features/admin/admin_broadcast_messages_spec.rb
+++ b/spec/features/admin/admin_broadcast_messages_spec.rb
@@ -40,7 +40,7 @@ feature 'Admin Broadcast Messages' do
expect(page).not_to have_content 'Migration to new server'
end
- scenario 'Live preview a customized broadcast message', js: true do
+ scenario 'Live preview a customized broadcast message', :js do
fill_in 'broadcast_message_message', with: "Live **Markdown** previews. :tada:"
page.within('.broadcast-message-preview') do
diff --git a/spec/features/admin/admin_disables_two_factor_spec.rb b/spec/features/admin/admin_disables_two_factor_spec.rb
index e214ae6b78d..6a97378391b 100644
--- a/spec/features/admin/admin_disables_two_factor_spec.rb
+++ b/spec/features/admin/admin_disables_two_factor_spec.rb
@@ -1,7 +1,7 @@
require 'rails_helper'
feature 'Admin disables 2FA for a user' do
- scenario 'successfully', js: true do
+ scenario 'successfully', :js do
sign_in(create(:admin))
user = create(:user, :two_factor)
diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb
index 3768727d8ae..771fb5253da 100644
--- a/spec/features/admin/admin_groups_spec.rb
+++ b/spec/features/admin/admin_groups_spec.rb
@@ -52,7 +52,7 @@ feature 'Admin Groups' do
expect_selected_visibility(internal)
end
- scenario 'when entered in group path, it auto filled the group name', js: true do
+ scenario 'when entered in group path, it auto filled the group name', :js do
visit admin_groups_path
click_link "New group"
group_path = 'gitlab'
@@ -81,7 +81,7 @@ feature 'Admin Groups' do
expect_selected_visibility(group.visibility_level)
end
- scenario 'edit group path does not change group name', js: true do
+ scenario 'edit group path does not change group name', :js do
group = create(:group, :private)
visit admin_group_edit_path(group)
@@ -93,7 +93,7 @@ feature 'Admin Groups' do
end
end
- describe 'add user into a group', js: true do
+ describe 'add user into a group', :js do
shared_context 'adds user into a group' do
it do
visit admin_group_path(group)
@@ -124,7 +124,7 @@ feature 'Admin Groups' do
group.add_user(:user, Gitlab::Access::OWNER)
end
- it 'adds admin a to a group as developer', js: true do
+ it 'adds admin a to a group as developer', :js do
visit group_group_members_path(group)
page.within '.users-group-form' do
@@ -141,7 +141,7 @@ feature 'Admin Groups' do
end
end
- describe 'admin remove himself from a group', js: true do
+ describe 'admin remove himself from a group', :js do
it 'removes admin from the group' do
group.add_user(current_user, Gitlab::Access::DEVELOPER)
diff --git a/spec/features/admin/admin_health_check_spec.rb b/spec/features/admin/admin_health_check_spec.rb
index 37fd3e171eb..09e6965849a 100644
--- a/spec/features/admin/admin_health_check_spec.rb
+++ b/spec/features/admin/admin_health_check_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature "Admin Health Check", feature: true, broken_storage: true do
+feature "Admin Health Check", :feature, :broken_storage do
include StubENV
before do
diff --git a/spec/features/admin/admin_hooks_spec.rb b/spec/features/admin/admin_hooks_spec.rb
index 91f08dbad5d..2e65fcc5231 100644
--- a/spec/features/admin/admin_hooks_spec.rb
+++ b/spec/features/admin/admin_hooks_spec.rb
@@ -74,7 +74,7 @@ describe 'Admin::Hooks', :js do
end
end
- describe 'Test', js: true do
+ describe 'Test', :js do
before do
WebMock.stub_request(:post, @system_hook.url)
visit admin_hooks_path
diff --git a/spec/features/admin/admin_labels_spec.rb b/spec/features/admin/admin_labels_spec.rb
index ae9b47299e6..a5834056a1d 100644
--- a/spec/features/admin/admin_labels_spec.rb
+++ b/spec/features/admin/admin_labels_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe 'admin issues labels' do
end
end
- it 'deletes all labels', js: true do
+ it 'deletes all labels', :js do
page.within '.labels' do
page.all('.btn-remove').each do |remove|
remove.click
diff --git a/spec/features/admin/admin_projects_spec.rb b/spec/features/admin/admin_projects_spec.rb
index f4f2505d436..94b12105088 100644
--- a/spec/features/admin/admin_projects_spec.rb
+++ b/spec/features/admin/admin_projects_spec.rb
@@ -28,7 +28,7 @@ describe "Admin::Projects" do
expect(page).not_to have_content(archived_project.name)
end
- it 'renders all projects', js: true do
+ it 'renders all projects', :js do
find(:css, '#sort-projects-dropdown').click
click_link 'Show archived projects'
@@ -37,7 +37,7 @@ describe "Admin::Projects" do
expect(page).to have_xpath("//span[@class='label label-warning']", text: 'archived')
end
- it 'renders only archived projects', js: true do
+ it 'renders only archived projects', :js do
find(:css, '#sort-projects-dropdown').click
click_link 'Show archived projects only'
@@ -74,7 +74,7 @@ describe "Admin::Projects" do
.to receive(:move_uploads_to_new_namespace).and_return(true)
end
- it 'transfers project to group web', js: true do
+ it 'transfers project to group web', :js do
visit admin_project_path(project)
click_button 'Search for Namespace'
@@ -91,7 +91,7 @@ describe "Admin::Projects" do
project.team << [user, :master]
end
- it 'adds admin a to a project as developer', js: true do
+ it 'adds admin a to a project as developer', :js do
visit project_project_members_path(project)
page.within '.users-project-form' do
diff --git a/spec/features/admin/admin_users_impersonation_tokens_spec.rb b/spec/features/admin/admin_users_impersonation_tokens_spec.rb
index 034682dae27..388d30828a7 100644
--- a/spec/features/admin/admin_users_impersonation_tokens_spec.rb
+++ b/spec/features/admin/admin_users_impersonation_tokens_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'Admin > Users > Impersonation Tokens', js: true do
+describe 'Admin > Users > Impersonation Tokens', :js do
let(:admin) { create(:admin) }
let!(:user) { create(:user) }
diff --git a/spec/features/admin/admin_users_spec.rb b/spec/features/admin/admin_users_spec.rb
index e2e2b13cf8a..f9f4bd6f5b9 100644
--- a/spec/features/admin/admin_users_spec.rb
+++ b/spec/features/admin/admin_users_spec.rb
@@ -288,7 +288,7 @@ describe "Admin::Users" do
end
end
- it 'allows group membership to be revoked', js: true do
+ it 'allows group membership to be revoked', :js do
page.within(first('.group_member')) do
find('.btn-remove').click
end
@@ -309,7 +309,7 @@ describe "Admin::Users" do
end
end
- describe 'remove users secondary email', js: true do
+ describe 'remove users secondary email', :js do
let!(:secondary_email) do
create :email, email: 'secondary@example.com', user: user
end
diff --git a/spec/features/admin/admin_uses_repository_checks_spec.rb b/spec/features/admin/admin_uses_repository_checks_spec.rb
index c2b7543a690..42f5b5eb8dc 100644
--- a/spec/features/admin/admin_uses_repository_checks_spec.rb
+++ b/spec/features/admin/admin_uses_repository_checks_spec.rb
@@ -32,7 +32,7 @@ feature 'Admin uses repository checks' do
end
end
- scenario 'to clear all repository checks', js: true do
+ scenario 'to clear all repository checks', :js do
visit admin_application_settings_path
expect(RepositoryCheck::ClearWorker).to receive(:perform_async)
diff --git a/spec/features/auto_deploy_spec.rb b/spec/features/auto_deploy_spec.rb
index dff6f96b663..4a7c3e4f1ab 100644
--- a/spec/features/auto_deploy_spec.rb
+++ b/spec/features/auto_deploy_spec.rb
@@ -31,7 +31,7 @@ describe 'Auto deploy' do
expect(page).to have_link('Set up auto deploy')
end
- it 'includes OpenShift as an available template', js: true do
+ it 'includes OpenShift as an available template', :js do
click_link 'Set up auto deploy'
click_button 'Apply a GitLab CI Yaml template'
@@ -40,7 +40,7 @@ describe 'Auto deploy' do
end
end
- it 'creates a merge request using "auto-deploy" branch', js: true do
+ it 'creates a merge request using "auto-deploy" branch', :js do
click_link 'Set up auto deploy'
click_button 'Apply a GitLab CI Yaml template'
within '.gitlab-ci-yml-selector' do
diff --git a/spec/features/boards/boards_spec.rb b/spec/features/boards/boards_spec.rb
index e010b5f3444..91c4e5037de 100644
--- a/spec/features/boards/boards_spec.rb
+++ b/spec/features/boards/boards_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-describe 'Issue Boards', js: true do
+describe 'Issue Boards', :js do
include DragTo
let(:group) { create(:group, :nested) }
@@ -13,7 +13,7 @@ describe 'Issue Boards', js: true do
project.team << [user, :master]
project.team << [user2, :master]
- allow_any_instance_of(ApplicationHelper).to receive(:collapsed_sidebar?).and_return(true)
+ page.driver.set_cookie('sidebar_collapsed', 'true')
sign_in(user)
end
diff --git a/spec/features/boards/keyboard_shortcut_spec.rb b/spec/features/boards/keyboard_shortcut_spec.rb
index 61b53aa5d1e..435de3861cf 100644
--- a/spec/features/boards/keyboard_shortcut_spec.rb
+++ b/spec/features/boards/keyboard_shortcut_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-describe 'Issue Boards shortcut', js: true do
+describe 'Issue Boards shortcut', :js do
let(:project) { create(:project) }
before do
diff --git a/spec/features/boards/new_issue_spec.rb b/spec/features/boards/new_issue_spec.rb
index f67372337ec..5ac4d87e90b 100644
--- a/spec/features/boards/new_issue_spec.rb
+++ b/spec/features/boards/new_issue_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-describe 'Issue Boards new issue', js: true do
+describe 'Issue Boards new issue', :js do
let(:project) { create(:project, :public) }
let(:board) { create(:board, project: project) }
let!(:list) { create(:list, board: board, position: 0) }
diff --git a/spec/features/boards/sidebar_spec.rb b/spec/features/boards/sidebar_spec.rb
index c3bf50ef9d1..c4817eb947b 100644
--- a/spec/features/boards/sidebar_spec.rb
+++ b/spec/features/boards/sidebar_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-describe 'Issue Boards', js: true do
+describe 'Issue Boards', :js do
let(:user) { create(:user) }
let(:user2) { create(:user) }
let(:project) { create(:project, :public) }
diff --git a/spec/features/ci_lint_spec.rb b/spec/features/ci_lint_spec.rb
index af4cc00162a..9accd7bb07c 100644
--- a/spec/features/ci_lint_spec.rb
+++ b/spec/features/ci_lint_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'CI Lint', js: true do
+describe 'CI Lint', :js do
before do
sign_in(create(:user))
end
diff --git a/spec/features/container_registry_spec.rb b/spec/features/container_registry_spec.rb
index ae39ba4da6b..d5e9de20e59 100644
--- a/spec/features/container_registry_spec.rb
+++ b/spec/features/container_registry_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe "Container Registry" do
+describe "Container Registry", :js do
let(:user) { create(:user) }
let(:project) { create(:project) }
@@ -41,16 +41,19 @@ describe "Container Registry" do
expect_any_instance_of(ContainerRepository)
.to receive(:delete_tags!).and_return(true)
- click_on 'Remove repository'
+ click_on(class: 'js-remove-repo')
end
scenario 'user removes a specific tag from container repository' do
visit_container_registry
+ find('.js-toggle-repo').trigger('click')
+ wait_for_requests
+
expect_any_instance_of(ContainerRegistry::Tag)
.to receive(:delete).and_return(true)
- click_on 'Remove tag'
+ click_on(class: 'js-delete-registry')
end
end
diff --git a/spec/features/copy_as_gfm_spec.rb b/spec/features/copy_as_gfm_spec.rb
index 3e6a27eafd8..c6ba1211b9e 100644
--- a/spec/features/copy_as_gfm_spec.rb
+++ b/spec/features/copy_as_gfm_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'Copy as GFM', js: true do
+describe 'Copy as GFM', :js do
include MarkupHelper
include RepoHelpers
include ActionView::Helpers::JavaScriptHelper
@@ -288,8 +288,6 @@ describe 'Copy as GFM', js: true do
'SanitizationFilter',
<<-GFM.strip_heredoc
- <a name="named-anchor"></a>
-
<sub>sub</sub>
<dl>
@@ -448,7 +446,7 @@ describe 'Copy as GFM', js: true do
def verify(label, *gfms)
aggregate_failures(label) do
gfms.each do |gfm|
- html = gfm_to_html(gfm)
+ html = gfm_to_html(gfm).gsub(/\A&#x000A;|&#x000A;\z/, '')
output_gfm = html_to_gfm(html)
expect(output_gfm.strip).to eq(gfm.strip)
end
@@ -465,42 +463,98 @@ describe 'Copy as GFM', js: true do
let(:project) { create(:project, :repository) }
context 'from a diff' do
- before do
- visit project_commit_path(project, sample_commit.id)
- end
+ shared_examples 'copying code from a diff' do
+ context 'selecting one word of text' do
+ it 'copies as inline code' do
+ verify(
+ '[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"] .line .no',
- context 'selecting one word of text' do
- it 'copies as inline code' do
- verify(
- '[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"] .line .no',
+ '`RuntimeError`',
- '`RuntimeError`'
- )
+ target: '[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"]'
+ )
+ end
end
- end
- context 'selecting one line of text' do
- it 'copies as inline code' do
- verify(
- '[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"] .line',
+ context 'selecting one line of text' do
+ it 'copies as inline code' do
+ verify(
+ '[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"]',
- '`raise RuntimeError, "System commands must be given as an array of strings"`'
- )
+ '`raise RuntimeError, "System commands must be given as an array of strings"`',
+
+ target: '[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"]'
+ )
+ end
+ end
+
+ context 'selecting multiple lines of text' do
+ it 'copies as a code block' do
+ verify(
+ '[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"], [id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_10"]',
+
+ <<-GFM.strip_heredoc,
+ ```ruby
+ raise RuntimeError, "System commands must be given as an array of strings"
+ end
+ ```
+ GFM
+
+ target: '[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"]'
+ )
+ end
end
end
- context 'selecting multiple lines of text' do
- it 'copies as a code block' do
- verify(
- '[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"], [id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_10"]',
+ context 'inline diff' do
+ before do
+ visit project_commit_path(project, sample_commit.id, view: 'inline')
+ end
- <<-GFM.strip_heredoc,
- ```ruby
- raise RuntimeError, "System commands must be given as an array of strings"
- end
- ```
- GFM
- )
+ it_behaves_like 'copying code from a diff'
+ end
+
+ context 'parallel diff' do
+ before do
+ visit project_commit_path(project, sample_commit.id, view: 'parallel')
+ end
+
+ it_behaves_like 'copying code from a diff'
+
+ context 'selecting code on the left' do
+ it 'copies as a code block' do
+ verify(
+ '[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_8_8"], [id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_9_9"], [id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"], [id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_10"]',
+
+ <<-GFM.strip_heredoc,
+ ```ruby
+ unless cmd.is_a?(Array)
+ raise "System commands must be given as an array of strings"
+ end
+ ```
+ GFM
+
+ target: '[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_8_8"].left-side'
+ )
+ end
+ end
+
+ context 'selecting code on the right' do
+ it 'copies as a code block' do
+ verify(
+ '[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_8_8"], [id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_9_9"], [id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"], [id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_10"]',
+
+ <<-GFM.strip_heredoc,
+ ```ruby
+ unless cmd.is_a?(Array)
+ raise RuntimeError, "System commands must be given as an array of strings"
+ end
+ ```
+ GFM
+
+ target: '[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_8_8"].right-side'
+ )
+ end
end
end
end
@@ -589,9 +643,9 @@ describe 'Copy as GFM', js: true do
end
end
- def verify(selector, gfm)
+ def verify(selector, gfm, target: nil)
html = html_for_selector(selector)
- output_gfm = html_to_gfm(html, 'transformCodeSelection')
+ output_gfm = html_to_gfm(html, 'transformCodeSelection', target: target)
expect(output_gfm.strip).to eq(gfm.strip)
end
end
@@ -607,15 +661,21 @@ describe 'Copy as GFM', js: true do
page.evaluate_script(js)
end
- def html_to_gfm(html, transformer = 'transformGFMSelection')
+ def html_to_gfm(html, transformer = 'transformGFMSelection', target: nil)
js = <<-JS.strip_heredoc
(function(html) {
var transformer = window.gl.CopyAsGFM[#{transformer.inspect}];
var node = document.createElement('div');
- node.innerHTML = html;
+ $(html).each(function() { node.appendChild(this) });
+
+ var targetSelector = #{target.to_json};
+ var target;
+ if (targetSelector) {
+ target = document.querySelector(targetSelector);
+ }
- node = transformer(node);
+ node = transformer(node, target);
if (!node) return null;
return window.gl.CopyAsGFM.nodeToGFM(node);
diff --git a/spec/features/cycle_analytics_spec.rb b/spec/features/cycle_analytics_spec.rb
index bfe9dac3bd4..177cd50dd72 100644
--- a/spec/features/cycle_analytics_spec.rb
+++ b/spec/features/cycle_analytics_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Cycle Analytics', js: true do
+feature 'Cycle Analytics', :js do
let(:user) { create(:user) }
let(:guest) { create(:user) }
let(:project) { create(:project, :repository) }
diff --git a/spec/features/dashboard/active_tab_spec.rb b/spec/features/dashboard/active_tab_spec.rb
index 08d8cc7922b..8bab501134b 100644
--- a/spec/features/dashboard/active_tab_spec.rb
+++ b/spec/features/dashboard/active_tab_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-RSpec.describe 'Dashboard Active Tab', js: true do
+RSpec.describe 'Dashboard Active Tab', :js do
before do
sign_in(create(:user))
end
diff --git a/spec/features/dashboard/datetime_on_tooltips_spec.rb b/spec/features/dashboard/datetime_on_tooltips_spec.rb
index b6dce1b8ec4..349f9a47112 100644
--- a/spec/features/dashboard/datetime_on_tooltips_spec.rb
+++ b/spec/features/dashboard/datetime_on_tooltips_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Tooltips on .timeago dates', js: true do
+feature 'Tooltips on .timeago dates', :js do
let(:user) { create(:user) }
let(:project) { create(:project, name: 'test', namespace: user.namespace) }
let(:created_date) { Date.yesterday.to_time }
diff --git a/spec/features/dashboard/group_spec.rb b/spec/features/dashboard/group_spec.rb
index 60a16830cdc..1213f8c32eb 100644
--- a/spec/features/dashboard/group_spec.rb
+++ b/spec/features/dashboard/group_spec.rb
@@ -5,7 +5,13 @@ RSpec.describe 'Dashboard Group' do
sign_in(create(:user))
end
- it 'creates new group', js: true do
+ it 'defaults sort dropdown to last created' do
+ visit dashboard_groups_path
+
+ expect(page).to have_button('Last created')
+ end
+
+ it 'creates new group', :js do
visit dashboard_groups_path
find('.btn-new').trigger('click')
new_path = 'Samurai'
diff --git a/spec/features/dashboard/issues_filter_spec.rb b/spec/features/dashboard/issues_filter_spec.rb
index facb67ae787..8759950e013 100644
--- a/spec/features/dashboard/issues_filter_spec.rb
+++ b/spec/features/dashboard/issues_filter_spec.rb
@@ -90,17 +90,17 @@ feature 'Dashboard Issues filtering', :js do
context 'sorting' do
it 'shows sorted issues' do
- sorting_by('Oldest updated')
+ sorting_by('Created date')
visit_issues
- expect(find('.issues-filters')).to have_content('Oldest updated')
+ expect(find('.issues-filters')).to have_content('Created date')
end
it 'keeps sorting issues after visiting Projects Issues page' do
- sorting_by('Oldest updated')
+ sorting_by('Created date')
visit project_issues_path(project)
- expect(find('.issues-filters')).to have_content('Oldest updated')
+ expect(find('.issues-filters')).to have_content('Created date')
end
end
diff --git a/spec/features/dashboard/issues_spec.rb b/spec/features/dashboard/issues_spec.rb
index 795335aa106..5610894fd9a 100644
--- a/spec/features/dashboard/issues_spec.rb
+++ b/spec/features/dashboard/issues_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'Dashboard Issues' do
expect(page).not_to have_content(other_issue.title)
end
- it 'shows checkmark when unassigned is selected for assignee', js: true do
+ it 'shows checkmark when unassigned is selected for assignee', :js do
find('.js-assignee-search').click
find('li', text: 'Unassigned').click
find('.js-assignee-search').click
@@ -32,7 +32,7 @@ RSpec.describe 'Dashboard Issues' do
expect(find('li[data-user-id="0"] a.is-active')).to be_visible
end
- it 'shows issues when current user is author', js: true do
+ it 'shows issues when current user is author', :js do
find('#assignee_id', visible: false).set('')
find('.js-author-search', match: :first).click
@@ -70,7 +70,7 @@ RSpec.describe 'Dashboard Issues' do
end
describe 'new issue dropdown' do
- it 'shows projects only with issues feature enabled', js: true do
+ it 'shows projects only with issues feature enabled', :js do
find('.new-project-item-select-button').trigger('click')
page.within('.select2-results') do
@@ -79,7 +79,7 @@ RSpec.describe 'Dashboard Issues' do
end
end
- it 'shows the new issue page', js: true do
+ it 'shows the new issue page', :js do
find('.new-project-item-select-button').trigger('click')
wait_for_requests
diff --git a/spec/features/dashboard/label_filter_spec.rb b/spec/features/dashboard/label_filter_spec.rb
index b1a207682c3..6802974c2ee 100644
--- a/spec/features/dashboard/label_filter_spec.rb
+++ b/spec/features/dashboard/label_filter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'Dashboard > label filter', js: true do
+describe 'Dashboard > label filter', :js do
let(:user) { create(:user) }
let(:project) { create(:project, name: 'test', namespace: user.namespace) }
let(:project2) { create(:project, name: 'test2', path: 'test2', namespace: user.namespace) }
diff --git a/spec/features/dashboard/merge_requests_spec.rb b/spec/features/dashboard/merge_requests_spec.rb
index b4992dd54a1..f01ba442e58 100644
--- a/spec/features/dashboard/merge_requests_spec.rb
+++ b/spec/features/dashboard/merge_requests_spec.rb
@@ -3,12 +3,13 @@ require 'spec_helper'
feature 'Dashboard Merge Requests' do
include FilterItemSelectHelper
include SortingHelper
+ include ProjectForksHelper
let(:current_user) { create :user }
let(:project) { create(:project) }
let(:public_project) { create(:project, :public, :repository) }
- let(:forked_project) { Projects::ForkService.new(public_project, current_user).execute }
+ let(:forked_project) { fork_project(public_project, current_user, repository: true) }
before do
project.add_master(current_user)
@@ -23,7 +24,7 @@ feature 'Dashboard Merge Requests' do
visit merge_requests_dashboard_path
end
- it 'shows projects only with merge requests feature enabled', js: true do
+ it 'shows projects only with merge requests feature enabled', :js do
find('.new-project-item-select-button').trigger('click')
page.within('.select2-results') do
@@ -88,7 +89,7 @@ feature 'Dashboard Merge Requests' do
expect(page).not_to have_content(other_merge_request.title)
end
- it 'shows authored merge requests', js: true do
+ it 'shows authored merge requests', :js do
filter_item_select('Any Assignee', '.js-assignee-search')
filter_item_select(current_user.to_reference, '.js-author-search')
@@ -100,7 +101,7 @@ feature 'Dashboard Merge Requests' do
expect(page).not_to have_content(other_merge_request.title)
end
- it 'shows all merge requests', js: true do
+ it 'shows all merge requests', :js do
filter_item_select('Any Assignee', '.js-assignee-search')
filter_item_select('Any Author', '.js-author-search')
@@ -112,19 +113,19 @@ feature 'Dashboard Merge Requests' do
end
it 'shows sorted merge requests' do
- sorting_by('Oldest updated')
+ sorting_by('Created date')
visit merge_requests_dashboard_path(assignee_id: current_user.id)
- expect(find('.issues-filters')).to have_content('Oldest updated')
+ expect(find('.issues-filters')).to have_content('Created date')
end
it 'keeps sorting merge requests after visiting Projects MR page' do
- sorting_by('Oldest updated')
+ sorting_by('Created date')
visit project_merge_requests_path(project)
- expect(find('.issues-filters')).to have_content('Oldest updated')
+ expect(find('.issues-filters')).to have_content('Created date')
end
end
end
diff --git a/spec/features/dashboard/project_member_activity_index_spec.rb b/spec/features/dashboard/project_member_activity_index_spec.rb
index 4a004107408..8f96899fb4f 100644
--- a/spec/features/dashboard/project_member_activity_index_spec.rb
+++ b/spec/features/dashboard/project_member_activity_index_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Project member activity', js: true do
+feature 'Project member activity', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :public, name: 'x', namespace: user.namespace) }
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index 0613c158c54..fbf8b5c0db6 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -50,6 +50,25 @@ feature 'Dashboard Projects' do
end
end
+ context 'when on Your projects tab' do
+ it 'shows all projects by default' do
+ visit dashboard_projects_path
+
+ expect(page).to have_content(project.name)
+ end
+
+ it 'shows personal projects on personal projects tab', :js do
+ project3 = create(:project, namespace: user.namespace)
+
+ visit dashboard_projects_path
+
+ click_link 'Personal'
+
+ expect(page).not_to have_content(project.name)
+ expect(page).to have_content(project3.name)
+ end
+ end
+
context 'when on Starred projects tab' do
it 'shows only starred projects' do
user.toggle_star(project2)
@@ -61,7 +80,7 @@ feature 'Dashboard Projects' do
end
end
- describe 'with a pipeline', clean_gitlab_redis_shared_state: true do
+ describe 'with a pipeline', :clean_gitlab_redis_shared_state do
let(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit.sha) }
before do
@@ -83,12 +102,14 @@ feature 'Dashboard Projects' do
end
end
- context 'last push widget' do
+ context 'last push widget', :use_clean_rails_memory_store_caching do
before do
event = create(:push_event, project: project, author: user)
create(:push_event_payload, event: event, ref: 'feature', action: :created)
+ Users::LastPushEventService.new(user).cache_last_push_event(event)
+
visit dashboard_projects_path
end
diff --git a/spec/features/dashboard/todos/todos_filtering_spec.rb b/spec/features/dashboard/todos/todos_filtering_spec.rb
index 54d477f7274..ad0f132da8c 100644
--- a/spec/features/dashboard/todos/todos_filtering_spec.rb
+++ b/spec/features/dashboard/todos/todos_filtering_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Dashboard > User filters todos', js: true do
+feature 'Dashboard > User filters todos', :js do
let(:user_1) { create(:user, username: 'user_1', name: 'user_1') }
let(:user_2) { create(:user, username: 'user_2', name: 'user_2') }
diff --git a/spec/features/dashboard/todos/todos_spec.rb b/spec/features/dashboard/todos/todos_spec.rb
index 30bab7eeaa7..01aca443f4a 100644
--- a/spec/features/dashboard/todos/todos_spec.rb
+++ b/spec/features/dashboard/todos/todos_spec.rb
@@ -17,7 +17,7 @@ feature 'Dashboard Todos' do
end
end
- context 'User has a todo', js: true do
+ context 'User has a todo', :js do
before do
create(:todo, :mentioned, user: user, project: project, target: issue, author: author)
sign_in(user)
@@ -177,7 +177,7 @@ feature 'Dashboard Todos' do
end
end
- context 'User has done todos', js: true do
+ context 'User has done todos', :js do
before do
create(:todo, :mentioned, :done, user: user, project: project, target: issue, author: author)
sign_in(user)
@@ -249,7 +249,7 @@ feature 'Dashboard Todos' do
expect(page).to have_selector('.gl-pagination .page', count: 2)
end
- describe 'mark all as done', js: true do
+ describe 'mark all as done', :js do
before do
visit dashboard_todos_path
find('.js-todos-mark-all').trigger('click')
@@ -267,7 +267,7 @@ feature 'Dashboard Todos' do
end
end
- describe 'undo mark all as done', js: true do
+ describe 'undo mark all as done', :js do
before do
visit dashboard_todos_path
end
diff --git a/spec/features/expand_collapse_diffs_spec.rb b/spec/features/expand_collapse_diffs_spec.rb
index 357d86497d9..1dd7547a7fc 100644
--- a/spec/features/expand_collapse_diffs_spec.rb
+++ b/spec/features/expand_collapse_diffs_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Expand and collapse diffs', js: true do
+feature 'Expand and collapse diffs', :js do
let(:branch) { 'expand-collapse-diffs' }
let(:project) { create(:project, :repository) }
diff --git a/spec/features/explore/new_menu_spec.rb b/spec/features/explore/new_menu_spec.rb
index e1c74a24890..c5ec495a418 100644
--- a/spec/features/explore/new_menu_spec.rb
+++ b/spec/features/explore/new_menu_spec.rb
@@ -128,12 +128,6 @@ feature 'Top Plus Menu', :js do
expect(find('.header-new.dropdown')).not_to have_selector('.header-new-project-snippet')
end
- scenario 'public project has no New Issue Button' do
- visit project_path(public_project)
-
- hasnot_topmenuitem("New issue")
- end
-
scenario 'public project has no New merge request menu item' do
visit project_path(public_project)
diff --git a/spec/features/gitlab_flavored_markdown_spec.rb b/spec/features/gitlab_flavored_markdown_spec.rb
index 53b3bb3b65f..3c2186b3598 100644
--- a/spec/features/gitlab_flavored_markdown_spec.rb
+++ b/spec/features/gitlab_flavored_markdown_spec.rb
@@ -49,7 +49,7 @@ describe "GitLab Flavored Markdown" do
end
end
- describe "for issues", js: true do
+ describe "for issues", :js do
before do
@other_issue = create(:issue,
author: user,
diff --git a/spec/features/group_variables_spec.rb b/spec/features/group_variables_spec.rb
index 37814ba6238..d2d0be35f1c 100644
--- a/spec/features/group_variables_spec.rb
+++ b/spec/features/group_variables_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Group variables', js: true do
+feature 'Group variables', :js do
let(:user) { create(:user) }
let(:group) { create(:group) }
diff --git a/spec/features/groups/labels/subscription_spec.rb b/spec/features/groups/labels/subscription_spec.rb
index 1dd09d4f203..2e06caf98f6 100644
--- a/spec/features/groups/labels/subscription_spec.rb
+++ b/spec/features/groups/labels/subscription_spec.rb
@@ -11,7 +11,7 @@ feature 'Labels subscription' do
gitlab_sign_in user
end
- scenario 'users can subscribe/unsubscribe to group labels', js: true do
+ scenario 'users can subscribe/unsubscribe to group labels', :js do
visit group_labels_path(group)
expect(page).to have_content('feature')
diff --git a/spec/features/groups/merge_requests_spec.rb b/spec/features/groups/merge_requests_spec.rb
index 2577d98df6f..7ce6a61d50c 100644
--- a/spec/features/groups/merge_requests_spec.rb
+++ b/spec/features/groups/merge_requests_spec.rb
@@ -25,7 +25,7 @@ feature 'Group merge requests page' do
end
it 'ignores archived merge request count badges in navbar' do
- expect( page.find('[aria-label="Merge Requests"] span.badge.count').text).to eq("1")
+ expect(first(:link, text: 'Merge Requests').find('.badge').text).to eq("1")
end
it 'ignores archived merge request count badges in state-filters' do
diff --git a/spec/features/groups/user_sees_users_dropdowns_in_issuables_list_spec.rb b/spec/features/groups/user_sees_users_dropdowns_in_issuables_list_spec.rb
new file mode 100644
index 00000000000..5ed4f3ad2bc
--- /dev/null
+++ b/spec/features/groups/user_sees_users_dropdowns_in_issuables_list_spec.rb
@@ -0,0 +1,22 @@
+require 'spec_helper'
+
+feature 'Groups > User sees users dropdowns in issuables list' do
+ let(:entity) { create(:group) }
+ let(:user_in_dropdown) { create(:user) }
+ let!(:user_not_in_dropdown) { create(:user) }
+ let!(:project) { create(:project, group: entity) }
+
+ before do
+ entity.add_developer(user_in_dropdown)
+ end
+
+ it_behaves_like 'issuable user dropdown behaviors' do
+ let(:issuable) { create(:issue, project: project) }
+ let(:issuables_path) { issues_group_path(entity) }
+ end
+
+ it_behaves_like 'issuable user dropdown behaviors' do
+ let(:issuable) { create(:merge_request, source_project: project) }
+ let(:issuables_path) { merge_requests_group_path(entity) }
+ end
+end
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index 4ec2e7e6012..862823d862e 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -85,7 +85,7 @@ feature 'Group' do
end
end
- describe 'create a nested group', :nested_groups, js: true do
+ describe 'create a nested group', :nested_groups, :js do
let(:group) { create(:group, path: 'foo') }
context 'as admin' do
@@ -142,7 +142,7 @@ feature 'Group' do
expect(page).not_to have_content('secret-group')
end
- describe 'group edit', js: true do
+ describe 'group edit', :js do
let(:group) { create(:group) }
let(:path) { edit_group_path(group) }
let(:new_name) { 'new-name' }
@@ -207,7 +207,7 @@ feature 'Group' do
end
end
- describe 'group page with nested groups', :nested_groups, js: true do
+ describe 'group page with nested groups', :nested_groups, :js do
let!(:group) { create(:group) }
let!(:nested_group) { create(:group, parent: group) }
let!(:path) { group_path(group) }
diff --git a/spec/features/issuables/default_sort_order_spec.rb b/spec/features/issuables/default_sort_order_spec.rb
index b72b690110f..caee7a67aec 100644
--- a/spec/features/issuables/default_sort_order_spec.rb
+++ b/spec/features/issuables/default_sort_order_spec.rb
@@ -26,7 +26,7 @@ describe 'Projects > Issuables > Default sort order' do
MergeRequest.all
end
- context 'in the "merge requests" tab', js: true do
+ context 'in the "merge requests" tab', :js do
let(:issuable_type) { :merge_request }
it 'is "last created"' do
@@ -37,19 +37,19 @@ describe 'Projects > Issuables > Default sort order' do
end
end
- context 'in the "merge requests / open" tab', js: true do
+ context 'in the "merge requests / open" tab', :js do
let(:issuable_type) { :merge_request }
- it 'is "last created"' do
+ it 'is "created date"' do
visit_merge_requests_with_state(project, 'open')
- expect(selected_sort_order).to eq('last created')
+ expect(selected_sort_order).to eq('created date')
expect(first_merge_request).to include(last_created_issuable.title)
expect(last_merge_request).to include(first_created_issuable.title)
end
end
- context 'in the "merge requests / merged" tab', js: true do
+ context 'in the "merge requests / merged" tab', :js do
let(:issuable_type) { :merged_merge_request }
it 'is "last updated"' do
@@ -61,7 +61,7 @@ describe 'Projects > Issuables > Default sort order' do
end
end
- context 'in the "merge requests / closed" tab', js: true do
+ context 'in the "merge requests / closed" tab', :js do
let(:issuable_type) { :closed_merge_request }
it 'is "last updated"' do
@@ -73,13 +73,13 @@ describe 'Projects > Issuables > Default sort order' do
end
end
- context 'in the "merge requests / all" tab', js: true do
+ context 'in the "merge requests / all" tab', :js do
let(:issuable_type) { :merge_request }
- it 'is "last created"' do
+ it 'is "created date"' do
visit_merge_requests_with_state(project, 'all')
- expect(find('.issues-other-filters')).to have_content('Last created')
+ expect(find('.issues-other-filters')).to have_content('Created date')
expect(first_merge_request).to include(last_created_issuable.title)
expect(last_merge_request).to include(first_created_issuable.title)
end
@@ -102,31 +102,31 @@ describe 'Projects > Issuables > Default sort order' do
Issue.all
end
- context 'in the "issues" tab', js: true do
+ context 'in the "issues" tab', :js do
let(:issuable_type) { :issue }
- it 'is "last created"' do
+ it 'is "created date"' do
visit_issues project
- expect(find('.issues-other-filters')).to have_content('Last created')
+ expect(find('.issues-other-filters')).to have_content('Created date')
expect(first_issue).to include(last_created_issuable.title)
expect(last_issue).to include(first_created_issuable.title)
end
end
- context 'in the "issues / open" tab', js: true do
+ context 'in the "issues / open" tab', :js do
let(:issuable_type) { :issue }
- it 'is "last created"' do
+ it 'is "created date"' do
visit_issues_with_state(project, 'open')
- expect(find('.issues-other-filters')).to have_content('Last created')
+ expect(find('.issues-other-filters')).to have_content('Created date')
expect(first_issue).to include(last_created_issuable.title)
expect(last_issue).to include(first_created_issuable.title)
end
end
- context 'in the "issues / closed" tab', js: true do
+ context 'in the "issues / closed" tab', :js do
let(:issuable_type) { :closed_issue }
it 'is "last updated"' do
@@ -138,13 +138,13 @@ describe 'Projects > Issuables > Default sort order' do
end
end
- context 'in the "issues / all" tab', js: true do
+ context 'in the "issues / all" tab', :js do
let(:issuable_type) { :issue }
- it 'is "last created"' do
+ it 'is "created date"' do
visit_issues_with_state(project, 'all')
- expect(find('.issues-other-filters')).to have_content('Last created')
+ expect(find('.issues-other-filters')).to have_content('Created date')
expect(first_issue).to include(last_created_issuable.title)
expect(last_issue).to include(first_created_issuable.title)
end
@@ -157,26 +157,12 @@ describe 'Projects > Issuables > Default sort order' do
visit_issues(project, sort: 'id_desc')
end
- it 'shows the sort order as last created' do
- expect(find('.issues-other-filters')).to have_content('Last created')
+ it 'shows the sort order as created date' do
+ expect(find('.issues-other-filters')).to have_content('Created date')
expect(first_issue).to include(last_created_issuable.title)
expect(last_issue).to include(first_created_issuable.title)
end
end
-
- context 'when the sort in the URL is id_asc' do
- let(:issuable_type) { :issue }
-
- before do
- visit_issues(project, sort: 'id_asc')
- end
-
- it 'shows the sort order as oldest created' do
- expect(find('.issues-other-filters')).to have_content('Oldest created')
- expect(first_issue).to include(first_created_issuable.title)
- expect(last_issue).to include(last_created_issuable.title)
- end
- end
end
def selected_sort_order
diff --git a/spec/features/issuables/discussion_lock_spec.rb b/spec/features/issuables/discussion_lock_spec.rb
new file mode 100644
index 00000000000..7ea29ff252b
--- /dev/null
+++ b/spec/features/issuables/discussion_lock_spec.rb
@@ -0,0 +1,106 @@
+require 'spec_helper'
+
+describe 'Discussion Lock', :js do
+ let(:user) { create(:user) }
+ let(:issue) { create(:issue, project: project, author: user) }
+ let(:project) { create(:project, :public) }
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when a user is a team member' do
+ before do
+ project.add_developer(user)
+ end
+
+ context 'when the discussion is unlocked' do
+ it 'the user can lock the issue' do
+ visit project_issue_path(project, issue)
+
+ expect(find('.issuable-sidebar')).to have_content('Unlocked')
+
+ page.within('.issuable-sidebar') do
+ find('.lock-edit').click
+ click_button('Lock')
+ end
+
+ expect(find('#notes')).to have_content('locked this issue')
+ end
+ end
+
+ context 'when the discussion is locked' do
+ before do
+ issue.update_attribute(:discussion_locked, true)
+ visit project_issue_path(project, issue)
+ end
+
+ it 'the user can unlock the issue' do
+ expect(find('.issuable-sidebar')).to have_content('Locked')
+
+ page.within('.issuable-sidebar') do
+ find('.lock-edit').click
+ click_button('Unlock')
+ end
+
+ expect(find('#notes')).to have_content('unlocked this issue')
+ expect(find('.issuable-sidebar')).to have_content('Unlocked')
+ end
+
+ it 'the user can create a comment' do
+ page.within('#notes .js-main-target-form') do
+ fill_in 'note[note]', with: 'Some new comment'
+ click_button 'Comment'
+ end
+
+ wait_for_requests
+
+ expect(find('div#notes')).to have_content('Some new comment')
+ end
+ end
+ end
+
+ context 'when a user is not a team member' do
+ context 'when the discussion is unlocked' do
+ before do
+ visit project_issue_path(project, issue)
+ end
+
+ it 'the user can not lock the issue' do
+ expect(find('.issuable-sidebar')).to have_content('Unlocked')
+ expect(find('.issuable-sidebar')).not_to have_selector('.lock-edit')
+ end
+
+ it 'the user can create a comment' do
+ page.within('#notes .js-main-target-form') do
+ fill_in 'note[note]', with: 'Some new comment'
+ click_button 'Comment'
+ end
+
+ wait_for_requests
+
+ expect(find('div#notes')).to have_content('Some new comment')
+ end
+ end
+
+ context 'when the discussion is locked' do
+ before do
+ issue.update_attribute(:discussion_locked, true)
+ visit project_issue_path(project, issue)
+ end
+
+ it 'the user can not unlock the issue' do
+ expect(find('.issuable-sidebar')).to have_content('Locked')
+ expect(find('.issuable-sidebar')).not_to have_selector('.lock-edit')
+ end
+
+ it 'the user can not create a comment' do
+ page.within('#notes') do
+ expect(page).not_to have_selector('js-main-target-form')
+ expect(page.find('.disabled-comment'))
+ .to have_content('This issue is locked. Only project members can comment.')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/issuables/user_sees_sidebar_spec.rb b/spec/features/issuables/user_sees_sidebar_spec.rb
index 2bd1c8aab86..c6c2e58ecea 100644
--- a/spec/features/issuables/user_sees_sidebar_spec.rb
+++ b/spec/features/issuables/user_sees_sidebar_spec.rb
@@ -12,7 +12,7 @@ describe 'Issue Sidebar on Mobile' do
sign_in(user)
end
- context 'mobile sidebar on merge requests', js: true do
+ context 'mobile sidebar on merge requests', :js do
before do
visit project_merge_request_path(merge_request.project, merge_request)
end
@@ -20,7 +20,7 @@ describe 'Issue Sidebar on Mobile' do
it_behaves_like "issue sidebar stays collapsed on mobile"
end
- context 'mobile sidebar on issues', js: true do
+ context 'mobile sidebar on issues', :js do
before do
visit project_issue_path(project, issue)
end
diff --git a/spec/features/issues/award_emoji_spec.rb b/spec/features/issues/award_emoji_spec.rb
index a29acb30163..850b35c4467 100644
--- a/spec/features/issues/award_emoji_spec.rb
+++ b/spec/features/issues/award_emoji_spec.rb
@@ -24,7 +24,7 @@ describe 'Awards Emoji' do
end
# Regression test: https://gitlab.com/gitlab-org/gitlab-ce/issues/29529
- it 'does not shows a 500 page', js: true do
+ it 'does not shows a 500 page', :js do
expect(page).to have_text(issue.title)
end
end
@@ -37,37 +37,37 @@ describe 'Awards Emoji' do
wait_for_requests
end
- it 'increments the thumbsdown emoji', js: true do
+ it 'increments the thumbsdown emoji', :js do
find('[data-name="thumbsdown"]').click
wait_for_requests
expect(thumbsdown_emoji).to have_text("1")
end
context 'click the thumbsup emoji' do
- it 'increments the thumbsup emoji', js: true do
+ it 'increments the thumbsup emoji', :js do
find('[data-name="thumbsup"]').click
wait_for_requests
expect(thumbsup_emoji).to have_text("1")
end
- it 'decrements the thumbsdown emoji', js: true do
+ it 'decrements the thumbsdown emoji', :js do
expect(thumbsdown_emoji).to have_text("0")
end
end
context 'click the thumbsdown emoji' do
- it 'increments the thumbsdown emoji', js: true do
+ it 'increments the thumbsdown emoji', :js do
find('[data-name="thumbsdown"]').click
wait_for_requests
expect(thumbsdown_emoji).to have_text("1")
end
- it 'decrements the thumbsup emoji', js: true do
+ it 'decrements the thumbsup emoji', :js do
expect(thumbsup_emoji).to have_text("0")
end
end
- it 'toggles the smiley emoji on a note', js: true do
+ it 'toggles the smiley emoji on a note', :js do
toggle_smiley_emoji(true)
within('.note-body') do
@@ -82,7 +82,7 @@ describe 'Awards Emoji' do
end
context 'execute /award quick action' do
- it 'toggles the emoji award on noteable', js: true do
+ it 'toggles the emoji award on noteable', :js do
execute_quick_action('/award :100:')
expect(find(noteable_award_counter)).to have_text("1")
@@ -95,7 +95,7 @@ describe 'Awards Emoji' do
end
end
- context 'unauthorized user', js: true do
+ context 'unauthorized user', :js do
before do
visit project_issue_path(project, issue)
end
diff --git a/spec/features/issues/award_spec.rb b/spec/features/issues/award_spec.rb
index e95eb19f7d1..ddb69d414da 100644
--- a/spec/features/issues/award_spec.rb
+++ b/spec/features/issues/award_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-feature 'Issue awards', js: true do
+feature 'Issue awards', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :public) }
let(:issue) { create(:issue, project: project) }
diff --git a/spec/features/issues/bulk_assignment_labels_spec.rb b/spec/features/issues/bulk_assignment_labels_spec.rb
index b2229b44f99..3223eb20b55 100644
--- a/spec/features/issues/bulk_assignment_labels_spec.rb
+++ b/spec/features/issues/bulk_assignment_labels_spec.rb
@@ -9,7 +9,7 @@ feature 'Issues > Labels bulk assignment' do
let!(:feature) { create(:label, project: project, title: 'feature') }
let!(:wontfix) { create(:label, project: project, title: 'wontfix') }
- context 'as an allowed user', js: true do
+ context 'as an allowed user', :js do
before do
project.team << [user, :master]
@@ -405,7 +405,7 @@ feature 'Issues > Labels bulk assignment' do
end
def update_issues
- click_button 'Update all'
+ find('.update-selected-issues').trigger('click')
wait_for_requests
end
diff --git a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
index 80cc8d22999..822ba48e005 100644
--- a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
+++ b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-feature 'Resolving all open discussions in a merge request from an issue', js: true do
+feature 'Resolving all open discussions in a merge request from an issue', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
diff --git a/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb b/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb
index ad5fd0fd97b..f0bed85595c 100644
--- a/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb
+++ b/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb
@@ -24,7 +24,7 @@ feature 'Resolve an open discussion in a merge request by creating an issue' do
end
end
- context 'resolving the discussion', js: true do
+ context 'resolving the discussion', :js do
before do
click_button 'Resolve discussion'
end
diff --git a/spec/features/issues/filtered_search/dropdown_author_spec.rb b/spec/features/issues/filtered_search/dropdown_author_spec.rb
index 3cec59050ab..5e20fb48768 100644
--- a/spec/features/issues/filtered_search/dropdown_author_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_author_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-describe 'Dropdown author', js: true do
+describe 'Dropdown author', :js do
include FilteredSearchHelpers
let!(:project) { create(:project) }
diff --git a/spec/features/issues/filtered_search/dropdown_emoji_spec.rb b/spec/features/issues/filtered_search/dropdown_emoji_spec.rb
index 44741bcc92d..3012c77f2b9 100644
--- a/spec/features/issues/filtered_search/dropdown_emoji_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_emoji_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-describe 'Dropdown emoji', js: true do
+describe 'Dropdown emoji', :js do
include FilteredSearchHelpers
let!(:project) { create(:project, :public) }
diff --git a/spec/features/issues/filtered_search/dropdown_label_spec.rb b/spec/features/issues/filtered_search/dropdown_label_spec.rb
index c46803112a9..cbc4f8d4c50 100644
--- a/spec/features/issues/filtered_search/dropdown_label_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_label_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'Dropdown label', js: true do
+describe 'Dropdown label', :js do
include FilteredSearchHelpers
let(:project) { create(:project) }
diff --git a/spec/features/issues/filtered_search/filter_issues_spec.rb b/spec/features/issues/filtered_search/filter_issues_spec.rb
index 3ea6e1c8863..2974016c6a7 100644
--- a/spec/features/issues/filtered_search/filter_issues_spec.rb
+++ b/spec/features/issues/filtered_search/filter_issues_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'Filter issues', js: true do
+describe 'Filter issues', :js do
include FilteredSearchHelpers
let(:project) { create(:project) }
@@ -405,20 +405,18 @@ describe 'Filter issues', js: true do
end
context 'sorting' do
- it 'sorts by oldest updated' do
- create(:issue,
+ it 'sorts by created date' do
+ new_issue = create(:issue,
title: '3 days ago',
project: project,
author: user,
- created_at: 3.days.ago,
- updated_at: 3.days.ago)
+ created_at: 3.days.ago)
- old_issue = create(:issue,
+ create(:issue,
title: '5 days ago',
project: project,
author: user,
- created_at: 5.days.ago,
- updated_at: 5.days.ago)
+ created_at: 5.days.ago)
input_filtered_search('days ago')
@@ -427,10 +425,10 @@ describe 'Filter issues', js: true do
sort_toggle = find('.filtered-search-wrapper .dropdown-toggle')
sort_toggle.click
- find('.filtered-search-wrapper .dropdown-menu li a', text: 'Oldest updated').click
+ find('.filtered-search-wrapper .dropdown-menu li a', text: 'Created date').click
wait_for_requests
- expect(find('.issues-list .issue:first-of-type .issue-title-text a')).to have_content(old_issue.title)
+ expect(find('.issues-list .issue:first-of-type .issue-title-text a')).to have_content(new_issue.title)
end
end
end
diff --git a/spec/features/issues/filtered_search/recent_searches_spec.rb b/spec/features/issues/filtered_search/recent_searches_spec.rb
index 5eeecaeda47..eef7988e2bd 100644
--- a/spec/features/issues/filtered_search/recent_searches_spec.rb
+++ b/spec/features/issues/filtered_search/recent_searches_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'Recent searches', js: true do
+describe 'Recent searches', :js do
include FilteredSearchHelpers
let(:project_1) { create(:project, :public) }
@@ -104,6 +104,6 @@ describe 'Recent searches', js: true do
set_recent_searches(project_1_local_storage_key, 'fail')
visit project_issues_path(project_1)
- expect(find('.flash-alert')).to have_text('An error occured while parsing recent searches')
+ expect(find('.flash-alert')).to have_text('An error occurred while parsing recent searches')
end
end
diff --git a/spec/features/issues/filtered_search/search_bar_spec.rb b/spec/features/issues/filtered_search/search_bar_spec.rb
index d4dd570fb37..88688422dc7 100644
--- a/spec/features/issues/filtered_search/search_bar_spec.rb
+++ b/spec/features/issues/filtered_search/search_bar_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-describe 'Search bar', js: true do
+describe 'Search bar', :js do
include FilteredSearchHelpers
let!(:project) { create(:project) }
diff --git a/spec/features/issues/filtered_search/visual_tokens_spec.rb b/spec/features/issues/filtered_search/visual_tokens_spec.rb
index 4ae54fd6f4e..920f5546eef 100644
--- a/spec/features/issues/filtered_search/visual_tokens_spec.rb
+++ b/spec/features/issues/filtered_search/visual_tokens_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-describe 'Visual tokens', js: true do
+describe 'Visual tokens', :js do
include FilteredSearchHelpers
include WaitForRequests
@@ -28,7 +28,7 @@ describe 'Visual tokens', js: true do
sign_in(user)
create(:issue, project: project)
- allow_any_instance_of(ApplicationHelper).to receive(:collapsed_sidebar?).and_return(true)
+ page.driver.set_cookie('sidebar_collapsed', 'true')
visit project_issues_path(project)
end
diff --git a/spec/features/issues/form_spec.rb b/spec/features/issues/form_spec.rb
index 2db6f9a2982..8ce470fc288 100644
--- a/spec/features/issues/form_spec.rb
+++ b/spec/features/issues/form_spec.rb
@@ -218,54 +218,15 @@ describe 'New/edit issue', :js do
context 'edit issue' do
before do
- visit edit_project_issue_path(project, issue)
- end
-
- it 'allows user to update issue' do
- expect(find('input[name="issue[assignee_ids][]"]', visible: false).value).to match(user.id.to_s)
- expect(find('input[name="issue[milestone_id]"]', visible: false).value).to match(milestone.id.to_s)
- expect(find('a', text: 'Assign to me', visible: false)).not_to be_visible
-
- page.within '.js-user-search' do
- expect(page).to have_content user.name
- end
-
- page.within '.js-milestone-select' do
- expect(page).to have_content milestone.title
- end
-
- click_button 'Labels'
- page.within '.dropdown-menu-labels' do
- click_link label.title
- click_link label2.title
- end
- page.within '.js-label-select' do
- expect(page).to have_content label.title
- end
- expect(page.all('input[name="issue[label_ids][]"]', visible: false)[1].value).to match(label.id.to_s)
- expect(page.all('input[name="issue[label_ids][]"]', visible: false)[2].value).to match(label2.id.to_s)
-
- click_button 'Save changes'
-
- page.within '.issuable-sidebar' do
- page.within '.assignee' do
- expect(page).to have_content user.name
- end
-
- page.within '.milestone' do
- expect(page).to have_content milestone.title
- end
-
- page.within '.labels' do
- expect(page).to have_content label.title
- expect(page).to have_content label2.title
- end
+ visit project_issue_path(project, issue)
+ page.within('.content .issuable-actions') do
+ click_on 'Edit'
end
end
it 'description has autocomplete' do
- find('#issue_description').native.send_keys('')
- fill_in 'issue_description', with: '@'
+ find_field('issue-description').native.send_keys('')
+ fill_in 'issue-description', with: '@'
expect(page).to have_selector('.atwho-view')
end
diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb
index c6cf6265645..15041ff04ea 100644
--- a/spec/features/issues/gfm_autocomplete_spec.rb
+++ b/spec/features/issues/gfm_autocomplete_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-feature 'GFM autocomplete', js: true do
+feature 'GFM autocomplete', :js do
let(:user) { create(:user, name: '💃speciąl someone💃', username: 'someone.special') }
let(:project) { create(:project) }
let(:label) { create(:label, project: project, title: 'special+') }
diff --git a/spec/features/issues/issue_detail_spec.rb b/spec/features/issues/issue_detail_spec.rb
index 28b636f9359..c0c396af93f 100644
--- a/spec/features/issues/issue_detail_spec.rb
+++ b/spec/features/issues/issue_detail_spec.rb
@@ -28,8 +28,7 @@ feature 'Issue Detail', :js do
fill_in 'issue-title', with: 'issue title'
click_button 'Save'
- visit profile_account_path
- click_link 'Delete account'
+ Users::DestroyService.new(user).execute(user)
visit project_issue_path(project, issue)
end
diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb
index af11b474842..bc9c3d825c1 100644
--- a/spec/features/issues/issue_sidebar_spec.rb
+++ b/spec/features/issues/issue_sidebar_spec.rb
@@ -13,7 +13,7 @@ feature 'Issue Sidebar' do
sign_in(user)
end
- context 'assignee', js: true do
+ context 'assignee', :js do
let(:user2) { create(:user) }
let(:issue2) { create(:issue, project: project, author: user2) }
@@ -82,7 +82,7 @@ feature 'Issue Sidebar' do
visit_issue(project, issue)
end
- context 'sidebar', js: true do
+ context 'sidebar', :js do
it 'changes size when the screen size is smaller' do
sidebar_selector = 'aside.right-sidebar.right-sidebar-collapsed'
# Resize the window
@@ -101,7 +101,7 @@ feature 'Issue Sidebar' do
end
end
- context 'editing issue labels', js: true do
+ context 'editing issue labels', :js do
before do
page.within('.block.labels') do
find('.edit-link').click
@@ -114,7 +114,7 @@ feature 'Issue Sidebar' do
end
end
- context 'creating a new label', js: true do
+ context 'creating a new label', :js do
before do
page.within('.block.labels') do
click_link 'Create new'
diff --git a/spec/features/issues/markdown_toolbar_spec.rb b/spec/features/issues/markdown_toolbar_spec.rb
index 634ea111dc1..6869c2c869d 100644
--- a/spec/features/issues/markdown_toolbar_spec.rb
+++ b/spec/features/issues/markdown_toolbar_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-feature 'Issue markdown toolbar', js: true do
+feature 'Issue markdown toolbar', :js do
let(:project) { create(:project, :public) }
let(:issue) { create(:issue, project: project) }
let(:user) { create(:user) }
diff --git a/spec/features/issues/move_spec.rb b/spec/features/issues/move_spec.rb
index b2724945da4..6d7b1b1cd8f 100644
--- a/spec/features/issues/move_spec.rb
+++ b/spec/features/issues/move_spec.rb
@@ -37,7 +37,7 @@ feature 'issue move to another project' do
visit issue_path(issue)
end
- scenario 'moving issue to another project', js: true do
+ scenario 'moving issue to another project', :js do
find('.js-move-issue').trigger('click')
wait_for_requests
all('.js-move-issue-dropdown-item')[0].click
@@ -49,7 +49,7 @@ feature 'issue move to another project' do
expect(page.current_path).to include project_path(new_project)
end
- scenario 'searching project dropdown', js: true do
+ scenario 'searching project dropdown', :js do
new_project_search.team << [user, :reporter]
find('.js-move-issue').trigger('click')
@@ -63,7 +63,7 @@ feature 'issue move to another project' do
end
end
- context 'user does not have permission to move the issue to a project', js: true do
+ context 'user does not have permission to move the issue to a project', :js do
let!(:private_project) { create(:project, :private) }
let(:another_project) { create(:project) }
background { another_project.team << [user, :guest] }
diff --git a/spec/features/issues/spam_issues_spec.rb b/spec/features/issues/spam_issues_spec.rb
index 332ce78b138..d25231d624c 100644
--- a/spec/features/issues/spam_issues_spec.rb
+++ b/spec/features/issues/spam_issues_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-describe 'New issue', js: true do
+describe 'New issue', :js do
include StubENV
let(:project) { create(:project, :public) }
diff --git a/spec/features/issues/todo_spec.rb b/spec/features/issues/todo_spec.rb
index 8405f1cd48d..29a2d38ae18 100644
--- a/spec/features/issues/todo_spec.rb
+++ b/spec/features/issues/todo_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-feature 'Manually create a todo item from issue', js: true do
+feature 'Manually create a todo item from issue', :js do
let!(:project) { create(:project) }
let!(:issue) { create(:issue, project: project) }
let!(:user) { create(:user)}
diff --git a/spec/features/issues/update_issues_spec.rb b/spec/features/issues/update_issues_spec.rb
index bcc6e9bab0f..1f57c110c11 100644
--- a/spec/features/issues/update_issues_spec.rb
+++ b/spec/features/issues/update_issues_spec.rb
@@ -118,7 +118,7 @@ feature 'Multiple issue updating from issues#index', :js do
end
def click_update_issues_button
- find('.update-selected-issues').click
+ find('.update-selected-issues').trigger('click')
wait_for_requests
end
end
diff --git a/spec/features/issues/user_uses_slash_commands_spec.rb b/spec/features/issues/user_uses_slash_commands_spec.rb
index 9261acda9dc..9f5e25ff2cb 100644
--- a/spec/features/issues/user_uses_slash_commands_spec.rb
+++ b/spec/features/issues/user_uses_slash_commands_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-feature 'Issues > User uses quick actions', js: true do
+feature 'Issues > User uses quick actions', :js do
include QuickActionsHelpers
it_behaves_like 'issuable record that supports quick actions in its description and notes', :issue do
@@ -159,7 +159,7 @@ feature 'Issues > User uses quick actions', js: true do
describe 'move the issue to another project' do
let(:issue) { create(:issue, project: project) }
- context 'when the project is valid', js: true do
+ context 'when the project is valid' do
let(:target_project) { create(:project, :public) }
before do
@@ -180,7 +180,7 @@ feature 'Issues > User uses quick actions', js: true do
end
end
- context 'when the project is valid but the user not authorized', js: true do
+ context 'when the project is valid but the user not authorized' do
let(:project_unauthorized) {create(:project, :public)}
before do
@@ -196,7 +196,7 @@ feature 'Issues > User uses quick actions', js: true do
end
end
- context 'when the project is invalid', js: true do
+ context 'when the project is invalid' do
before do
sign_in(user)
visit project_issue_path(project, issue)
@@ -210,7 +210,7 @@ feature 'Issues > User uses quick actions', js: true do
end
end
- context 'when the user issues multiple commands', js: true do
+ context 'when the user issues multiple commands' do
let(:target_project) { create(:project, :public) }
let(:milestone) { create(:milestone, title: '1.0', project: project) }
let(:target_milestone) { create(:milestone, title: '1.0', project: target_project) }
diff --git a/spec/features/issues_spec.rb b/spec/features/issues_spec.rb
index 5c284a1fe5f..25e99774575 100644
--- a/spec/features/issues_spec.rb
+++ b/spec/features/issues_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'Issues' do
+describe 'Issues', :js do
include DropzoneHelper
include IssueHelpers
include SortingHelper
@@ -24,113 +24,27 @@ describe 'Issues' do
end
before do
- visit edit_project_issue_path(project, issue)
- find('.js-zen-enter').click
+ visit project_issue_path(project, issue)
+ page.within('.content .issuable-actions') do
+ find('.issuable-edit').click
+ end
+ find('.issue-details .content-block .js-zen-enter').click
end
it 'opens new issue popup' do
- expect(page).to have_content("Issue ##{issue.iid}")
- end
- end
-
- describe 'Editing issue assignee' do
- let!(:issue) do
- create(:issue,
- author: user,
- assignees: [user],
- project: project)
- end
-
- it 'allows user to select unassigned', js: true do
- visit edit_project_issue_path(project, issue)
-
- expect(page).to have_content "Assignee #{user.name}"
-
- first('.js-user-search').click
- click_link 'Unassigned'
-
- click_button 'Save changes'
-
- page.within('.assignee') do
- expect(page).to have_content 'No assignee - assign yourself'
- end
-
- expect(issue.reload.assignees).to be_empty
+ expect(page).to have_content(issue.description)
end
end
- describe 'due date', js: true do
- context 'on new form' do
- before do
- visit new_project_issue_path(project)
- end
-
- it 'saves with due date' do
- date = Date.today.at_beginning_of_month
-
- fill_in 'issue_title', with: 'bug 345'
- fill_in 'issue_description', with: 'bug description'
- find('#issuable-due-date').click
-
- page.within '.pika-single' do
- click_button date.day
- end
-
- expect(find('#issuable-due-date').value).to eq date.to_s
-
- click_button 'Submit issue'
-
- page.within '.issuable-sidebar' do
- expect(page).to have_content date.to_s(:medium)
- end
- end
- end
-
- context 'on edit form' do
- let(:issue) { create(:issue, author: user, project: project, due_date: Date.today.at_beginning_of_month.to_s) }
-
- before do
- visit edit_project_issue_path(project, issue)
- end
-
- it 'saves with due date' do
- date = Date.today.at_beginning_of_month
-
- expect(find('#issuable-due-date').value).to eq date.to_s
-
- date = date.tomorrow
-
- fill_in 'issue_title', with: 'bug 345'
- fill_in 'issue_description', with: 'bug description'
- find('#issuable-due-date').click
-
- page.within '.pika-single' do
- click_button date.day
- end
-
- expect(find('#issuable-due-date').value).to eq date.to_s
-
- click_button 'Save changes'
-
- page.within '.issuable-sidebar' do
- expect(page).to have_content date.to_s(:medium)
- end
- end
-
- it 'warns about version conflict' do
- issue.update(title: "New title")
-
- fill_in 'issue_title', with: 'bug 345'
- fill_in 'issue_description', with: 'bug description'
+ describe 'Issue info' do
+ it 'links to current issue in breadcrubs' do
+ issue = create(:issue, project: project)
- click_button 'Save changes'
+ visit project_issue_path(project, issue)
- expect(page).to have_content 'Someone edited the issue the same time you did'
- end
+ expect(find('.breadcrumbs-sub-title a')[:href]).to end_with(issue_path(issue))
end
- end
- describe 'Issue info' do
it 'excludes award_emoji from comment count' do
issue = create(:issue, author: user, assignees: [user], project: project, title: 'foobar')
create(:award_emoji, awardable: issue)
@@ -190,19 +104,12 @@ describe 'Issues' do
let(:later_due_milestone) { create(:milestone, due_date: '2013-12-12') }
it 'sorts by newest' do
- visit project_issues_path(project, sort: sort_value_recently_created)
+ visit project_issues_path(project, sort: sort_value_created_date)
expect(first_issue).to include('foo')
expect(last_issue).to include('baz')
end
- it 'sorts by oldest' do
- visit project_issues_path(project, sort: sort_value_oldest_created)
-
- expect(first_issue).to include('baz')
- expect(last_issue).to include('foo')
- end
-
it 'sorts by most recently updated' do
baz.updated_at = Time.now + 100
baz.save
@@ -211,36 +118,22 @@ describe 'Issues' do
expect(first_issue).to include('baz')
end
- it 'sorts by least recently updated' do
- baz.updated_at = Time.now - 100
- baz.save
- visit project_issues_path(project, sort: sort_value_oldest_updated)
-
- expect(first_issue).to include('baz')
- end
-
describe 'sorting by due date' do
before do
foo.update(due_date: 1.day.from_now)
bar.update(due_date: 6.days.from_now)
end
- it 'sorts by recently due date' do
- visit project_issues_path(project, sort: sort_value_due_date_soon)
+ it 'sorts by due date' do
+ visit project_issues_path(project, sort: sort_value_due_date)
expect(first_issue).to include('foo')
end
- it 'sorts by least recently due date' do
- visit project_issues_path(project, sort: sort_value_due_date_later)
-
- expect(first_issue).to include('bar')
- end
-
- it 'sorts by least recently due date by excluding nil due dates' do
+ it 'sorts by due date by excluding nil due dates' do
bar.update(due_date: nil)
- visit project_issues_path(project, sort: sort_value_due_date_later)
+ visit project_issues_path(project, sort: sort_value_due_date)
expect(first_issue).to include('foo')
end
@@ -339,19 +232,12 @@ describe 'Issues' do
bar.save
end
- it 'sorts by recently due milestone' do
- visit project_issues_path(project, sort: sort_value_milestone_soon)
+ it 'sorts by milestone' do
+ visit project_issues_path(project, sort: sort_value_milestone)
expect(first_issue).to include('foo')
expect(last_issue).to include('baz')
end
-
- it 'sorts by least recently due milestone' do
- visit project_issues_path(project, sort: sort_value_milestone_later)
-
- expect(first_issue).to include('bar')
- expect(last_issue).to include('baz')
- end
end
describe 'combine filter and sort' do
@@ -365,13 +251,11 @@ describe 'Issues' do
end
it 'sorts with a filter applied' do
- visit project_issues_path(project,
- sort: sort_value_oldest_created,
- assignee_id: user2.id)
+ visit project_issues_path(project, sort: sort_value_created_date, assignee_id: user2.id)
- expect(first_issue).to include('bar')
- expect(last_issue).to include('foo')
- expect(page).not_to have_content 'baz'
+ expect(first_issue).to include('foo')
+ expect(last_issue).to include('bar')
+ expect(page).not_to have_content('baz')
end
end
end
@@ -386,7 +270,7 @@ describe 'Issues' do
visit namespace_project_issues_path(user.namespace, project1)
end
- it 'changes incoming email address token', js: true do
+ it 'changes incoming email address token', :js do
find('.issue-email-modal-btn').click
previous_token = find('input#issue_email').value
find('.incoming-email-token-reset').trigger('click')
@@ -402,7 +286,7 @@ describe 'Issues' do
end
end
- describe 'update labels from issue#show', js: true do
+ describe 'update labels from issue#show', :js do
let(:issue) { create(:issue, project: project, author: user, assignees: [user]) }
let!(:label) { create(:label, project: project) }
@@ -425,7 +309,7 @@ describe 'Issues' do
let(:issue) { create(:issue, project: project, author: user, assignees: [user]) }
context 'by authorized user' do
- it 'allows user to select unassigned', js: true do
+ it 'allows user to select unassigned', :js do
visit project_issue_path(project, issue)
page.within('.assignee') do
@@ -443,7 +327,7 @@ describe 'Issues' do
expect(issue.reload.assignees).to be_empty
end
- it 'allows user to select an assignee', js: true do
+ it 'allows user to select an assignee', :js do
issue2 = create(:issue, project: project, author: user)
visit project_issue_path(project, issue2)
@@ -464,7 +348,7 @@ describe 'Issues' do
end
end
- it 'allows user to unselect themselves', js: true do
+ it 'allows user to unselect themselves', :js do
issue2 = create(:issue, project: project, author: user)
visit project_issue_path(project, issue2)
@@ -493,7 +377,7 @@ describe 'Issues' do
project.team << [[guest], :guest]
end
- it 'shows assignee text', js: true do
+ it 'shows assignee text', :js do
sign_out(:user)
sign_in(guest)
@@ -508,7 +392,7 @@ describe 'Issues' do
let!(:milestone) { create(:milestone, project: project) }
context 'by authorized user' do
- it 'allows user to select unassigned', js: true do
+ it 'allows user to select unassigned', :js do
visit project_issue_path(project, issue)
page.within('.milestone') do
@@ -526,7 +410,7 @@ describe 'Issues' do
expect(issue.reload.milestone).to be_nil
end
- it 'allows user to de-select milestone', js: true do
+ it 'allows user to de-select milestone', :js do
visit project_issue_path(project, issue)
page.within('.milestone') do
@@ -556,7 +440,7 @@ describe 'Issues' do
issue.save
end
- it 'shows milestone text', js: true do
+ it 'shows milestone text', :js do
sign_out(:user)
sign_in(guest)
@@ -589,7 +473,7 @@ describe 'Issues' do
end
end
- context 'dropzone upload file', js: true do
+ context 'dropzone upload file', :js do
before do
visit new_project_issue_path(project)
end
@@ -660,7 +544,7 @@ describe 'Issues' do
end
describe 'due date' do
- context 'update due on issue#show', js: true do
+ context 'update due on issue#show', :js do
let(:issue) { create(:issue, project: project, author: user, assignees: [user]) }
before do
@@ -704,8 +588,8 @@ describe 'Issues' do
end
end
- describe 'title issue#show', js: true do
- it 'updates the title', js: true do
+ describe 'title issue#show', :js do
+ it 'updates the title', :js do
issue = create(:issue, author: user, assignees: [user], project: project, title: 'new title')
visit project_issue_path(project, issue)
@@ -719,20 +603,20 @@ describe 'Issues' do
end
end
- describe 'confidential issue#show', js: true do
+ describe 'confidential issue#show', :js do
it 'shows confidential sibebar information as confidential and can be turned off' do
issue = create(:issue, :confidential, project: project)
visit project_issue_path(project, issue)
- expect(page).to have_css('.confidential-issue-warning')
- expect(page).to have_css('.is-confidential')
- expect(page).not_to have_css('.is-not-confidential')
+ expect(page).to have_css('.issuable-note-warning')
+ expect(find('.issuable-sidebar-item.confidentiality')).to have_css('.is-active')
+ expect(find('.issuable-sidebar-item.confidentiality')).not_to have_css('.not-active')
find('.confidential-edit').click
- expect(page).to have_css('.confidential-warning-message')
+ expect(page).to have_css('.sidebar-item-warning-message')
- within('.confidential-warning-message') do
+ within('.sidebar-item-warning-message') do
find('.btn-close').click
end
@@ -740,7 +624,7 @@ describe 'Issues' do
visit project_issue_path(project, issue)
- expect(page).not_to have_css('.is-confidential')
+ expect(page).not_to have_css('.is-active')
end
end
end
diff --git a/spec/features/login_spec.rb b/spec/features/login_spec.rb
index c9983f0941f..6dfabcc7225 100644
--- a/spec/features/login_spec.rb
+++ b/spec/features/login_spec.rb
@@ -197,7 +197,7 @@ feature 'Login' do
expect(page).to have_content('The global settings require you to enable Two-Factor Authentication for your account. You need to do this before ')
end
- it 'allows skipping two-factor configuration', js: true do
+ it 'allows skipping two-factor configuration', :js do
expect(current_path).to eq profile_two_factor_auth_path
click_link 'Configure it later'
@@ -215,7 +215,7 @@ feature 'Login' do
)
end
- it 'disallows skipping two-factor configuration', js: true do
+ it 'disallows skipping two-factor configuration', :js do
expect(current_path).to eq profile_two_factor_auth_path
expect(page).not_to have_link('Configure it later')
end
@@ -260,7 +260,7 @@ feature 'Login' do
'before ')
end
- it 'allows skipping two-factor configuration', js: true do
+ it 'allows skipping two-factor configuration', :js do
expect(current_path).to eq profile_two_factor_auth_path
click_link 'Configure it later'
@@ -279,7 +279,7 @@ feature 'Login' do
)
end
- it 'disallows skipping two-factor configuration', js: true do
+ it 'disallows skipping two-factor configuration', :js do
expect(current_path).to eq profile_two_factor_auth_path
expect(page).not_to have_link('Configure it later')
end
diff --git a/spec/features/merge_requests/assign_issues_spec.rb b/spec/features/merge_requests/assign_issues_spec.rb
index 63fa72650ac..d49d145f254 100644
--- a/spec/features/merge_requests/assign_issues_spec.rb
+++ b/spec/features/merge_requests/assign_issues_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-feature 'Merge request issue assignment', js: true do
+feature 'Merge request issue assignment', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
let(:issue1) { create(:issue, project: project) }
diff --git a/spec/features/merge_requests/award_spec.rb b/spec/features/merge_requests/award_spec.rb
index e886309133d..a24464f2556 100644
--- a/spec/features/merge_requests/award_spec.rb
+++ b/spec/features/merge_requests/award_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-feature 'Merge request awards', js: true do
+feature 'Merge request awards', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
diff --git a/spec/features/merge_requests/check_if_mergeable_with_unresolved_discussions_spec.rb b/spec/features/merge_requests/check_if_mergeable_with_unresolved_discussions_spec.rb
index 1f5e7b55fb0..fbbfe7942be 100644
--- a/spec/features/merge_requests/check_if_mergeable_with_unresolved_discussions_spec.rb
+++ b/spec/features/merge_requests/check_if_mergeable_with_unresolved_discussions_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Check if mergeable with unresolved discussions', js: true do
+feature 'Check if mergeable with unresolved discussions', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let!(:merge_request) { create(:merge_request_with_diff_notes, source_project: project, author: user) }
diff --git a/spec/features/merge_requests/cherry_pick_spec.rb b/spec/features/merge_requests/cherry_pick_spec.rb
index 4b1e1b9a8d4..48f370c3ad4 100644
--- a/spec/features/merge_requests/cherry_pick_spec.rb
+++ b/spec/features/merge_requests/cherry_pick_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'Cherry-pick Merge Requests', js: true do
+describe 'Cherry-pick Merge Requests', :js do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project) { create(:project, :repository, namespace: group) }
diff --git a/spec/features/merge_requests/closes_issues_spec.rb b/spec/features/merge_requests/closes_issues_spec.rb
index 299b4f5708a..4dd4e40f52c 100644
--- a/spec/features/merge_requests/closes_issues_spec.rb
+++ b/spec/features/merge_requests/closes_issues_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Merge Request closing issues message', js: true do
+feature 'Merge Request closing issues message', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
let(:issue_1) { create(:issue, project: project)}
diff --git a/spec/features/merge_requests/conflicts_spec.rb b/spec/features/merge_requests/conflicts_spec.rb
index 2d2c674f8fb..b0432ed8fc6 100644
--- a/spec/features/merge_requests/conflicts_spec.rb
+++ b/spec/features/merge_requests/conflicts_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Merge request conflict resolution', js: true do
+feature 'Merge request conflict resolution', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
diff --git a/spec/features/merge_requests/create_new_mr_from_fork_spec.rb b/spec/features/merge_requests/create_new_mr_from_fork_spec.rb
new file mode 100644
index 00000000000..93c40ff6443
--- /dev/null
+++ b/spec/features/merge_requests/create_new_mr_from_fork_spec.rb
@@ -0,0 +1,89 @@
+require 'spec_helper'
+
+feature 'Creating a merge request from a fork', :js do
+ include ProjectForksHelper
+
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public, :repository) }
+ let!(:source_project) do
+ fork_project(project, user,
+ repository: true,
+ namespace: user.namespace)
+ end
+
+ before do
+ source_project.add_master(user)
+
+ sign_in(user)
+ end
+
+ shared_examples 'create merge request to other project' do
+ it 'has all possible target projects' do
+ visit project_new_merge_request_path(source_project)
+
+ first('.js-target-project').click
+
+ within('.dropdown-target-project .dropdown-content') do
+ expect(page).to have_content(project.full_path)
+ expect(page).to have_content(target_project.full_path)
+ expect(page).to have_content(source_project.full_path)
+ end
+ end
+
+ it 'allows creating the merge request to another target project' do
+ visit project_merge_requests_path(source_project)
+
+ page.within '.content' do
+ click_link 'New merge request'
+ end
+
+ find('.js-source-branch', match: :first).click
+ find('.dropdown-source-branch .dropdown-content a', match: :first).click
+
+ first('.js-target-project').click
+ find('.dropdown-target-project .dropdown-content a', text: target_project.full_path).click
+
+ click_button 'Compare branches and continue'
+
+ wait_for_requests
+
+ expect { click_button 'Submit merge request' }
+ .to change { target_project.merge_requests.reload.size }.by(1)
+ end
+
+ it 'updates the branches when selecting a new target project' do
+ target_project_member = target_project.owner
+ CreateBranchService.new(target_project, target_project_member)
+ .execute('a-brand-new-branch-to-test', 'master')
+ visit project_new_merge_request_path(source_project)
+
+ first('.js-target-project').click
+ find('.dropdown-target-project .dropdown-content a', text: target_project.full_path).click
+
+ wait_for_requests
+
+ first('.js-target-branch').click
+
+ within('.dropdown-target-branch .dropdown-content') do
+ expect(page).to have_content('a-brand-new-branch-to-test')
+ end
+ end
+ end
+
+ context 'creating to the source of a fork' do
+ let!(:target_project) { project }
+
+ it_behaves_like('create merge request to other project')
+ end
+
+ context 'creating to a sibling of a fork' do
+ let!(:target_project) do
+ other_user = create(:user)
+ fork_project(project, other_user,
+ repository: true,
+ namespace: other_user.namespace)
+ end
+
+ it_behaves_like('create merge request to other project')
+ end
+end
diff --git a/spec/features/merge_requests/create_new_mr_spec.rb b/spec/features/merge_requests/create_new_mr_spec.rb
index 96e8027a54d..5402d61da54 100644
--- a/spec/features/merge_requests/create_new_mr_spec.rb
+++ b/spec/features/merge_requests/create_new_mr_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Create New Merge Request', js: true do
+feature 'Create New Merge Request', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/merge_requests/created_from_fork_spec.rb b/spec/features/merge_requests/created_from_fork_spec.rb
index 09541873f71..d03ddfece74 100644
--- a/spec/features/merge_requests/created_from_fork_spec.rb
+++ b/spec/features/merge_requests/created_from_fork_spec.rb
@@ -1,21 +1,20 @@
require 'spec_helper'
feature 'Merge request created from fork' do
+ include ProjectForksHelper
+
given(:user) { create(:user) }
given(:project) { create(:project, :public, :repository) }
- given(:fork_project) { create(:project, :public, :repository) }
+ given(:forked_project) { fork_project(project, user, repository: true) }
given!(:merge_request) do
- create(:forked_project_link, forked_to_project: fork_project,
- forked_from_project: project)
-
- create(:merge_request_with_diffs, source_project: fork_project,
+ create(:merge_request_with_diffs, source_project: forked_project,
target_project: project,
description: 'Test merge request')
end
background do
- fork_project.team << [user, :master]
+ forked_project.team << [user, :master]
sign_in user
end
@@ -31,11 +30,11 @@ feature 'Merge request created from fork' do
background do
create(:note_on_commit, note: comment,
- project: fork_project,
+ project: forked_project,
commit_id: merge_request.commit_shas.first)
end
- scenario 'user can reply to the comment', js: true do
+ scenario 'user can reply to the comment', :js do
visit_merge_request(merge_request)
expect(page).to have_content(comment)
@@ -55,10 +54,10 @@ feature 'Merge request created from fork' do
context 'source project is deleted' do
background do
MergeRequests::MergeService.new(project, user).execute(merge_request)
- fork_project.destroy!
+ forked_project.destroy!
end
- scenario 'user can access merge request', js: true do
+ scenario 'user can access merge request', :js do
visit_merge_request(merge_request)
expect(page).to have_content 'Test merge request'
@@ -69,7 +68,7 @@ feature 'Merge request created from fork' do
context 'pipeline present in source project' do
given(:pipeline) do
create(:ci_pipeline,
- project: fork_project,
+ project: forked_project,
sha: merge_request.diff_head_sha,
ref: merge_request.source_branch)
end
@@ -79,7 +78,7 @@ feature 'Merge request created from fork' do
create(:ci_build, pipeline: pipeline, name: 'spinach')
end
- scenario 'user visits a pipelines page', js: true do
+ scenario 'user visits a pipelines page', :js do
visit_merge_request(merge_request)
page.within('.merge-request-tabs') { click_link 'Pipelines' }
diff --git a/spec/features/merge_requests/deleted_source_branch_spec.rb b/spec/features/merge_requests/deleted_source_branch_spec.rb
index 874c6e2ff69..7f69e82af4c 100644
--- a/spec/features/merge_requests/deleted_source_branch_spec.rb
+++ b/spec/features/merge_requests/deleted_source_branch_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
# This test serves as a regression test for a bug that caused an error
# message to be shown by JavaScript when the source branch was deleted.
# Please do not remove "js: true".
-describe 'Deleted source branch', js: true do
+describe 'Deleted source branch', :js do
let(:user) { create(:user) }
let(:merge_request) { create(:merge_request) }
diff --git a/spec/features/merge_requests/diff_notes_avatars_spec.rb b/spec/features/merge_requests/diff_notes_avatars_spec.rb
index ca536f2800c..9aa0672feae 100644
--- a/spec/features/merge_requests/diff_notes_avatars_spec.rb
+++ b/spec/features/merge_requests/diff_notes_avatars_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Diff note avatars', js: true do
+feature 'Diff note avatars', :js do
include NoteInteractionHelpers
let(:user) { create(:user) }
@@ -22,7 +22,7 @@ feature 'Diff note avatars', js: true do
project.team << [user, :master]
sign_in user
- allow_any_instance_of(ApplicationHelper).to receive(:collapsed_sidebar?).and_return(true)
+ page.driver.set_cookie('sidebar_collapsed', 'true')
end
context 'discussion tab' do
@@ -84,7 +84,7 @@ feature 'Diff note avatars', js: true do
end
it 'shows note avatar' do
- page.within find("[id='#{position.line_code(project.repository)}']") do
+ page.within find_line(position.line_code(project.repository)) do
find('.diff-notes-collapse').click
expect(page).to have_selector('img.js-diff-comment-avatar', count: 1)
@@ -92,7 +92,7 @@ feature 'Diff note avatars', js: true do
end
it 'shows comment on note avatar' do
- page.within find("[id='#{position.line_code(project.repository)}']") do
+ page.within find_line(position.line_code(project.repository)) do
find('.diff-notes-collapse').click
expect(first('img.js-diff-comment-avatar')["data-original-title"]).to eq("#{note.author.name}: #{note.note.truncate(17)}")
@@ -100,13 +100,13 @@ feature 'Diff note avatars', js: true do
end
it 'toggles comments when clicking avatar' do
- page.within find("[id='#{position.line_code(project.repository)}']") do
+ page.within find_line(position.line_code(project.repository)) do
find('.diff-notes-collapse').click
end
expect(page).to have_selector('.notes_holder', visible: false)
- page.within find("[id='#{position.line_code(project.repository)}']") do
+ page.within find_line(position.line_code(project.repository)) do
first('img.js-diff-comment-avatar').click
end
@@ -122,7 +122,7 @@ feature 'Diff note avatars', js: true do
wait_for_requests
- page.within find("[id='#{position.line_code(project.repository)}']") do
+ page.within find_line(position.line_code(project.repository)) do
expect(page).not_to have_selector('img.js-diff-comment-avatar')
end
end
@@ -138,7 +138,7 @@ feature 'Diff note avatars', js: true do
wait_for_requests
end
- page.within find("[id='#{position.line_code(project.repository)}']") do
+ page.within find_line(position.line_code(project.repository)) do
find('.diff-notes-collapse').trigger('click')
expect(page).to have_selector('img.js-diff-comment-avatar', count: 2)
@@ -158,7 +158,7 @@ feature 'Diff note avatars', js: true do
end
end
- page.within find("[id='#{position.line_code(project.repository)}']") do
+ page.within find_line(position.line_code(project.repository)) do
find('.diff-notes-collapse').trigger('click')
expect(page).to have_selector('img.js-diff-comment-avatar', count: 3)
@@ -176,7 +176,7 @@ feature 'Diff note avatars', js: true do
end
it 'shows extra comment count' do
- page.within find("[id='#{position.line_code(project.repository)}']") do
+ page.within find_line(position.line_code(project.repository)) do
find('.diff-notes-collapse').click
expect(find('.diff-comments-more-count')).to have_content '+1'
@@ -185,4 +185,10 @@ feature 'Diff note avatars', js: true do
end
end
end
+
+ def find_line(line_code)
+ line = find("[id='#{line_code}']")
+ line = line.find(:xpath, 'preceding-sibling::*[1][self::td]') if line.tag_name == 'td'
+ line
+ end
end
diff --git a/spec/features/merge_requests/diff_notes_resolve_spec.rb b/spec/features/merge_requests/diff_notes_resolve_spec.rb
index fd110e68e84..637e6036384 100644
--- a/spec/features/merge_requests/diff_notes_resolve_spec.rb
+++ b/spec/features/merge_requests/diff_notes_resolve_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Diff notes resolve', js: true do
+feature 'Diff notes resolve', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request_with_diffs, source_project: project, author: user, title: "Bug NS-04") }
diff --git a/spec/features/merge_requests/diffs_spec.rb b/spec/features/merge_requests/diffs_spec.rb
index e9068f722d5..80fb7335989 100644
--- a/spec/features/merge_requests/diffs_spec.rb
+++ b/spec/features/merge_requests/diffs_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
-feature 'Diffs URL', js: true do
+feature 'Diffs URL', :js do
+ include ProjectForksHelper
+
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
@@ -42,8 +44,12 @@ feature 'Diffs URL', js: true do
visit "#{diffs_project_merge_request_path(project, merge_request)}#{fragment}"
end
- it 'shows expanded note' do
- expect(page).to have_selector(fragment, visible: true)
+ it 'shows collapsed note' do
+ wait_for_requests
+
+ expect(page).to have_selector('.discussion-notes.collapsed') do |note_container|
+ expect(note_container).to have_selector(fragment, visible: false)
+ end
end
end
end
@@ -64,7 +70,7 @@ feature 'Diffs URL', js: true do
context 'when editing file' do
let(:author_user) { create(:user) }
let(:user) { create(:user) }
- let(:forked_project) { Projects::ForkService.new(project, author_user).execute }
+ let(:forked_project) { fork_project(project, author_user, repository: true) }
let(:merge_request) { create(:merge_request_with_diffs, source_project: forked_project, target_project: project, author: author_user) }
let(:changelog_id) { Digest::SHA1.hexdigest("CHANGELOG") }
diff --git a/spec/features/merge_requests/discussion_lock_spec.rb b/spec/features/merge_requests/discussion_lock_spec.rb
new file mode 100644
index 00000000000..7bbd3b1e69e
--- /dev/null
+++ b/spec/features/merge_requests/discussion_lock_spec.rb
@@ -0,0 +1,49 @@
+require 'spec_helper'
+
+describe 'Discussion Lock', :js do
+ let(:user) { create(:user) }
+ let(:merge_request) { create(:merge_request, source_project: project, author: user) }
+ let(:project) { create(:project, :public, :repository) }
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when the discussion is locked' do
+ before do
+ merge_request.update_attribute(:discussion_locked, true)
+ end
+
+ context 'when a user is a team member' do
+ before do
+ project.add_developer(user)
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it 'the user can create a comment' do
+ page.within('.issuable-discussion #notes .js-main-target-form') do
+ fill_in 'note[note]', with: 'Some new comment'
+ click_button 'Comment'
+ end
+
+ wait_for_requests
+
+ expect(find('.issuable-discussion #notes')).to have_content('Some new comment')
+ end
+ end
+
+ context 'when a user is not a team member' do
+ before do
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it 'the user can not create a comment' do
+ page.within('.issuable-discussion #notes') do
+ expect(page).not_to have_selector('js-main-target-form')
+ expect(page.find('.disabled-comment'))
+ .to have_content('This merge request is locked. Only project members can comment.')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/merge_requests/edit_mr_spec.rb b/spec/features/merge_requests/edit_mr_spec.rb
index 7386e78fb13..4538555c168 100644
--- a/spec/features/merge_requests/edit_mr_spec.rb
+++ b/spec/features/merge_requests/edit_mr_spec.rb
@@ -29,7 +29,7 @@ feature 'Edit Merge Request' do
expect(page).to have_content 'Someone edited the merge request the same time you did'
end
- it 'allows to unselect "Remove source branch"', js: true do
+ it 'allows to unselect "Remove source branch"', :js do
merge_request.update(merge_params: { 'force_remove_source_branch' => '1' })
expect(merge_request.merge_params['force_remove_source_branch']).to be_truthy
@@ -42,7 +42,7 @@ feature 'Edit Merge Request' do
expect(page).to have_content 'Remove source branch'
end
- it 'should preserve description textarea height', js: true do
+ it 'should preserve description textarea height', :js do
long_description = %q(
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam ac ornare ligula, ut tempus arcu. Etiam ultricies accumsan dolor vitae faucibus. Donec at elit lacus. Mauris orci ante, aliquam quis lorem eget, convallis faucibus arcu. Aenean at pulvinar lacus. Ut viverra quam massa, molestie ornare tortor dignissim a. Suspendisse tristique pellentesque tellus, id lacinia metus elementum id. Nam tristique, arcu rhoncus faucibus viverra, lacus ipsum sagittis ligula, vitae convallis odio lacus a nibh. Ut tincidunt est purus, ac vestibulum augue maximus in. Suspendisse vel erat et mi ultricies semper. Pellentesque volutpat pellentesque consequat.
diff --git a/spec/features/merge_requests/filter_by_milestone_spec.rb b/spec/features/merge_requests/filter_by_milestone_spec.rb
index 166c02a7a7f..8b9ff9be943 100644
--- a/spec/features/merge_requests/filter_by_milestone_spec.rb
+++ b/spec/features/merge_requests/filter_by_milestone_spec.rb
@@ -18,7 +18,7 @@ feature 'Merge Request filtering by Milestone' do
sign_in(user)
end
- scenario 'filters by no Milestone', js: true do
+ scenario 'filters by no Milestone', :js do
create(:merge_request, :with_diffs, source_project: project)
create(:merge_request, :simple, source_project: project, milestone: milestone)
@@ -32,7 +32,7 @@ feature 'Merge Request filtering by Milestone' do
expect(page).to have_css('.merge-request', count: 1)
end
- context 'filters by upcoming milestone', js: true do
+ context 'filters by upcoming milestone', :js do
it 'does not show merge requests with no expiry' do
create(:merge_request, :with_diffs, source_project: project)
create(:merge_request, :simple, source_project: project, milestone: milestone)
@@ -67,7 +67,7 @@ feature 'Merge Request filtering by Milestone' do
end
end
- scenario 'filters by a specific Milestone', js: true do
+ scenario 'filters by a specific Milestone', :js do
create(:merge_request, :with_diffs, source_project: project, milestone: milestone)
create(:merge_request, :simple, source_project: project)
@@ -83,7 +83,7 @@ feature 'Merge Request filtering by Milestone' do
milestone.update(name: "rock 'n' roll")
end
- scenario 'filters by a specific Milestone', js: true do
+ scenario 'filters by a specific Milestone', :js do
create(:merge_request, :with_diffs, source_project: project, milestone: milestone)
create(:merge_request, :simple, source_project: project)
diff --git a/spec/features/merge_requests/filter_merge_requests_spec.rb b/spec/features/merge_requests/filter_merge_requests_spec.rb
index b51ae0890e4..aac295ab940 100644
--- a/spec/features/merge_requests/filter_merge_requests_spec.rb
+++ b/spec/features/merge_requests/filter_merge_requests_spec.rb
@@ -36,7 +36,7 @@ describe 'Filter merge requests' do
expect_mr_list_count(0)
end
- context 'assignee', js: true do
+ context 'assignee', :js do
it 'updates to current user' do
expect_assignee_visual_tokens()
end
@@ -69,7 +69,7 @@ describe 'Filter merge requests' do
expect_mr_list_count(0)
end
- context 'milestone', js: true do
+ context 'milestone', :js do
it 'updates to current milestone' do
expect_milestone_visual_tokens()
end
@@ -88,7 +88,7 @@ describe 'Filter merge requests' do
end
end
- describe 'for label from mr#index', js: true do
+ describe 'for label from mr#index', :js do
it 'filters by no label' do
input_filtered_search('label:none')
@@ -137,7 +137,7 @@ describe 'Filter merge requests' do
expect_mr_list_count(0)
end
- context 'assignee and label', js: true do
+ context 'assignee and label', :js do
def expect_assignee_label_visual_tokens
wait_for_requests
@@ -183,7 +183,7 @@ describe 'Filter merge requests' do
visit project_merge_requests_path(project)
end
- context 'only text', js: true do
+ context 'only text', :js do
it 'filters merge requests by searched text' do
input_filtered_search('bug')
@@ -199,7 +199,7 @@ describe 'Filter merge requests' do
end
end
- context 'filters and searches', js: true do
+ context 'filters and searches', :js do
it 'filters by text and label' do
input_filtered_search('Bug')
@@ -277,9 +277,9 @@ describe 'Filter merge requests' do
expect_mr_list_count(2)
- click_button 'Last created'
+ click_button 'Created date'
page.within '.dropdown-menu-sort' do
- click_link 'Oldest created'
+ click_link 'Priority'
end
wait_for_requests
@@ -289,7 +289,7 @@ describe 'Filter merge requests' do
end
end
- describe 'filter by assignee id', js: true do
+ describe 'filter by assignee id', :js do
it 'filter by current user' do
visit project_merge_requests_path(project, assignee_id: user.id)
@@ -312,7 +312,7 @@ describe 'Filter merge requests' do
end
end
- describe 'filter by author id', js: true do
+ describe 'filter by author id', :js do
it 'filter by current user' do
visit project_merge_requests_path(project, author_id: user.id)
diff --git a/spec/features/merge_requests/form_spec.rb b/spec/features/merge_requests/form_spec.rb
index de98b147d04..758fc9b139d 100644
--- a/spec/features/merge_requests/form_spec.rb
+++ b/spec/features/merge_requests/form_spec.rb
@@ -1,8 +1,10 @@
require 'rails_helper'
describe 'New/edit merge request', :js do
+ include ProjectForksHelper
+
let!(:project) { create(:project, :public, :repository) }
- let(:fork_project) { create(:project, :repository, forked_from_project: project) }
+ let(:forked_project) { fork_project(project, nil, repository: true) }
let!(:user) { create(:user) }
let!(:user2) { create(:user) }
let!(:milestone) { create(:milestone, project: project) }
@@ -170,16 +172,16 @@ describe 'New/edit merge request', :js do
context 'forked project' do
before do
- fork_project.team << [user, :master]
+ forked_project.team << [user, :master]
sign_in(user)
end
context 'new merge request' do
before do
visit project_new_merge_request_path(
- fork_project,
+ forked_project,
merge_request: {
- source_project_id: fork_project.id,
+ source_project_id: forked_project.id,
target_project_id: project.id,
source_branch: 'fix',
target_branch: 'master'
@@ -238,7 +240,7 @@ describe 'New/edit merge request', :js do
context 'edit merge request' do
before do
merge_request = create(:merge_request,
- source_project: fork_project,
+ source_project: forked_project,
target_project: project,
source_branch: 'fix',
target_branch: 'master'
diff --git a/spec/features/merge_requests/image_diff_notes.rb b/spec/features/merge_requests/image_diff_notes.rb
new file mode 100644
index 00000000000..3c53b51e330
--- /dev/null
+++ b/spec/features/merge_requests/image_diff_notes.rb
@@ -0,0 +1,196 @@
+require 'spec_helper'
+
+feature 'image diff notes', :js do
+ include NoteInteractionHelpers
+
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public, :repository) }
+
+ before do
+ project.team << [user, :master]
+ sign_in user
+
+ page.driver.set_cookie('sidebar_collapsed', 'true')
+
+ # Stub helper to return any blob file as image from public app folder.
+ # This is necessary to run this specs since we don't display repo images in capybara.
+ allow_any_instance_of(DiffHelper).to receive(:diff_file_blob_raw_path).and_return('/apple-touch-icon.png')
+ end
+
+ context 'create commit diff notes' do
+ commit_id = '2f63565e7aac07bcdadb654e253078b727143ec4'
+
+ describe 'create a new diff note' do
+ before do
+ visit project_commit_path(project, commit_id)
+ create_image_diff_note
+ end
+
+ it 'shows indicator badge on image diff' do
+ indicator = find('.js-image-badge')
+
+ expect(indicator).to have_content('1')
+ end
+
+ it 'shows the avatar badge on the new note' do
+ badge = find('.image-diff-avatar-link .badge')
+
+ expect(badge).to have_content('1')
+ end
+
+ it 'allows collapsing/expanding the discussion notes' do
+ find('.js-diff-notes-toggle', :first).click
+
+ expect(page).not_to have_content('image diff test comment')
+
+ find('.js-diff-notes-toggle').click
+
+ expect(page).to have_content('image diff test comment')
+ end
+ end
+
+ describe 'render commit diff notes' do
+ let(:path) { "files/images/6049019_460s.jpg" }
+ let(:commit) { project.commit('2f63565e7aac07bcdadb654e253078b727143ec4') }
+
+ let(:note1_position) do
+ Gitlab::Diff::Position.new(
+ old_path: path,
+ new_path: path,
+ width: 100,
+ height: 100,
+ x: 10,
+ y: 10,
+ position_type: "image",
+ diff_refs: commit.diff_refs
+ )
+ end
+
+ let(:note2_position) do
+ Gitlab::Diff::Position.new(
+ old_path: path,
+ new_path: path,
+ width: 100,
+ height: 100,
+ x: 20,
+ y: 20,
+ position_type: "image",
+ diff_refs: commit.diff_refs
+ )
+ end
+
+ let!(:note1) { create(:diff_note_on_commit, commit_id: commit.id, project: project, position: note1_position, note: 'my note 1') }
+ let!(:note2) { create(:diff_note_on_commit, commit_id: commit.id, project: project, position: note2_position, note: 'my note 2') }
+
+ before do
+ visit project_commit_path(project, commit.id)
+ wait_for_requests
+ end
+
+ it 'render diff indicators within the image diff frame' do
+ expect(page).to have_css('.js-image-badge', count: 2)
+ end
+
+ it 'shows the diff notes' do
+ expect(page).to have_css('.diff-content .note', count: 2)
+ end
+
+ it 'shows the diff notes with correct avatar badge numbers' do
+ expect(page).to have_css('.image-diff-avatar-link', text: 1)
+ expect(page).to have_css('.image-diff-avatar-link', text: 2)
+ end
+ end
+ end
+
+ %w(inline parallel).each do |view|
+ context "#{view} view" do
+ let(:merge_request) { create(:merge_request_with_diffs, :with_image_diffs, source_project: project, author: user) }
+ let(:path) { "files/images/ee_repo_logo.png" }
+
+ let(:position) do
+ Gitlab::Diff::Position.new(
+ old_path: path,
+ new_path: path,
+ width: 100,
+ height: 100,
+ x: 1,
+ y: 1,
+ position_type: "image",
+ diff_refs: merge_request.diff_refs
+ )
+ end
+
+ let!(:note) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: position) }
+
+ describe 'creating a new diff note' do
+ before do
+ visit diffs_project_merge_request_path(project, merge_request, view: view)
+ create_image_diff_note
+ end
+
+ it 'shows indicator badge on image diff' do
+ indicator = find('.js-image-badge', match: :first)
+
+ expect(indicator).to have_content('1')
+ end
+
+ it 'shows the avatar badge on the new note' do
+ badge = find('.image-diff-avatar-link .badge', match: :first)
+
+ expect(badge).to have_content('1')
+ end
+
+ it 'allows expanding/collapsing the discussion notes' do
+ page.all('.js-diff-notes-toggle')[0].trigger('click')
+ page.all('.js-diff-notes-toggle')[1].trigger('click')
+
+ expect(page).not_to have_content('image diff test comment')
+
+ page.all('.js-diff-notes-toggle')[0].trigger('click')
+ page.all('.js-diff-notes-toggle')[1].trigger('click')
+
+ expect(page).to have_content('image diff test comment')
+ end
+ end
+ end
+ end
+
+ describe 'discussion tab polling', :js do
+ let(:merge_request) { create(:merge_request_with_diffs, :with_image_diffs, source_project: project, author: user) }
+ let(:path) { "files/images/ee_repo_logo.png" }
+
+ let(:position) do
+ Gitlab::Diff::Position.new(
+ old_path: path,
+ new_path: path,
+ width: 100,
+ height: 100,
+ x: 50,
+ y: 50,
+ position_type: "image",
+ diff_refs: merge_request.diff_refs
+ )
+ end
+
+ before do
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it 'render diff indicators within the image frame' do
+ diff_note = create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: position)
+
+ wait_for_requests
+
+ expect(page).to have_selector('.image-comment-badge')
+ expect(page).to have_content(diff_note.note)
+ end
+ end
+end
+
+def create_image_diff_note
+ find('.js-add-image-diff-note-button', match: :first).click
+ page.all('.js-add-image-diff-note-button')[0].trigger('click')
+ find('.diff-content .note-textarea').native.send_keys('image diff test comment')
+ click_button 'Comment'
+ wait_for_requests
+end
diff --git a/spec/features/merge_requests/merge_commit_message_toggle_spec.rb b/spec/features/merge_requests/merge_commit_message_toggle_spec.rb
index 08a3bb84aac..82b2b56ef80 100644
--- a/spec/features/merge_requests/merge_commit_message_toggle_spec.rb
+++ b/spec/features/merge_requests/merge_commit_message_toggle_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Clicking toggle commit message link', js: true do
+feature 'Clicking toggle commit message link', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
let(:issue_1) { create(:issue, project: project)}
diff --git a/spec/features/merge_requests/only_allow_merge_if_build_succeeds_spec.rb b/spec/features/merge_requests/only_allow_merge_if_build_succeeds_spec.rb
index 59e67420333..91f207bd339 100644
--- a/spec/features/merge_requests/only_allow_merge_if_build_succeeds_spec.rb
+++ b/spec/features/merge_requests/only_allow_merge_if_build_succeeds_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Only allow merge requests to be merged if the pipeline succeeds', js: true do
+feature 'Only allow merge requests to be merged if the pipeline succeeds', :js do
let(:merge_request) { create(:merge_request_with_diffs) }
let(:project) { merge_request.target_project }
@@ -10,7 +10,7 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', js: t
project.team << [merge_request.author, :master]
end
- context 'project does not have CI enabled', js: true do
+ context 'project does not have CI enabled', :js do
it 'allows MR to be merged' do
visit_merge_request(merge_request)
@@ -20,7 +20,7 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', js: t
end
end
- context 'when project has CI enabled', js: true do
+ context 'when project has CI enabled', :js do
given!(:pipeline) do
create(:ci_empty_pipeline,
project: project,
diff --git a/spec/features/merge_requests/pipelines_spec.rb b/spec/features/merge_requests/pipelines_spec.rb
index 347ce788b36..a3fcc27cab0 100644
--- a/spec/features/merge_requests/pipelines_spec.rb
+++ b/spec/features/merge_requests/pipelines_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Pipelines for Merge Requests', js: true do
+feature 'Pipelines for Merge Requests', :js do
describe 'pipeline tab' do
given(:user) { create(:user) }
given(:merge_request) { create(:merge_request) }
diff --git a/spec/features/merge_requests/resolve_outdated_diff_discussions.rb b/spec/features/merge_requests/resolve_outdated_diff_discussions.rb
index 55a82bdf2b9..25abbb469ab 100644
--- a/spec/features/merge_requests/resolve_outdated_diff_discussions.rb
+++ b/spec/features/merge_requests/resolve_outdated_diff_discussions.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Resolve outdated diff discussions', js: true do
+feature 'Resolve outdated diff discussions', :js do
let(:project) { create(:project, :repository, :public) }
let(:merge_request) do
diff --git a/spec/features/merge_requests/target_branch_spec.rb b/spec/features/merge_requests/target_branch_spec.rb
index 9bbf2610bcb..bce36e05e57 100644
--- a/spec/features/merge_requests/target_branch_spec.rb
+++ b/spec/features/merge_requests/target_branch_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'Target branch', js: true do
+describe 'Target branch', :js do
let(:user) { create(:user) }
let(:merge_request) { create(:merge_request) }
let(:project) { merge_request.project }
diff --git a/spec/features/merge_requests/toggle_whitespace_changes_spec.rb b/spec/features/merge_requests/toggle_whitespace_changes_spec.rb
index dd989fd49b2..fa3d988b27a 100644
--- a/spec/features/merge_requests/toggle_whitespace_changes_spec.rb
+++ b/spec/features/merge_requests/toggle_whitespace_changes_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Toggle Whitespace Changes', js: true do
+feature 'Toggle Whitespace Changes', :js do
before do
sign_in(create(:admin))
merge_request = create(:merge_request)
diff --git a/spec/features/merge_requests/toggler_behavior_spec.rb b/spec/features/merge_requests/toggler_behavior_spec.rb
index 4e5ec9fbd2d..cd92ad22267 100644
--- a/spec/features/merge_requests/toggler_behavior_spec.rb
+++ b/spec/features/merge_requests/toggler_behavior_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'toggler_behavior', js: true do
+feature 'toggler_behavior', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:merge_request) { create(:merge_request, source_project: project, author: user) }
diff --git a/spec/features/merge_requests/update_merge_requests_spec.rb b/spec/features/merge_requests/update_merge_requests_spec.rb
index e6dc284cba7..1a41fd36a4f 100644
--- a/spec/features/merge_requests/update_merge_requests_spec.rb
+++ b/spec/features/merge_requests/update_merge_requests_spec.rb
@@ -10,7 +10,7 @@ feature 'Multiple merge requests updating from merge_requests#index' do
sign_in(user)
end
- context 'status', js: true do
+ context 'status', :js do
describe 'close merge request' do
before do
visit project_merge_requests_path(project)
@@ -37,7 +37,7 @@ feature 'Multiple merge requests updating from merge_requests#index' do
end
end
- context 'assignee', js: true do
+ context 'assignee', :js do
describe 'set assignee' do
before do
visit project_merge_requests_path(project)
@@ -67,7 +67,7 @@ feature 'Multiple merge requests updating from merge_requests#index' do
end
end
- context 'milestone', js: true do
+ context 'milestone', :js do
let(:milestone) { create(:milestone, project: project) }
describe 'set milestone' do
@@ -127,7 +127,7 @@ feature 'Multiple merge requests updating from merge_requests#index' do
end
def click_update_merge_requests_button
- find('.update-selected-issues').click
+ find('.update-selected-issues').trigger('click')
wait_for_requests
end
end
diff --git a/spec/features/merge_requests/user_lists_merge_requests_spec.rb b/spec/features/merge_requests/user_lists_merge_requests_spec.rb
index 20008b4e7f9..416a0f78a45 100644
--- a/spec/features/merge_requests/user_lists_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_lists_merge_requests_spec.rb
@@ -52,21 +52,13 @@ describe 'Projects > Merge requests > User lists merge requests' do
end
it 'sorts by newest' do
- visit_merge_requests(project, sort: sort_value_recently_created)
+ visit_merge_requests(project, sort: sort_value_created_date)
expect(first_merge_request).to include('fix')
expect(last_merge_request).to include('merge-test')
expect(count_merge_requests).to eq(3)
end
- it 'sorts by oldest' do
- visit_merge_requests(project, sort: sort_value_oldest_created)
-
- expect(first_merge_request).to include('merge-test')
- expect(last_merge_request).to include('fix')
- expect(count_merge_requests).to eq(3)
- end
-
it 'sorts by last updated' do
visit_merge_requests(project, sort: sort_value_recently_updated)
@@ -74,33 +66,19 @@ describe 'Projects > Merge requests > User lists merge requests' do
expect(count_merge_requests).to eq(3)
end
- it 'sorts by oldest updated' do
- visit_merge_requests(project, sort: sort_value_oldest_updated)
-
- expect(first_merge_request).to include('markdown')
- expect(count_merge_requests).to eq(3)
- end
-
- it 'sorts by milestone due soon' do
- visit_merge_requests(project, sort: sort_value_milestone_soon)
+ it 'sorts by milestone' do
+ visit_merge_requests(project, sort: sort_value_milestone)
expect(first_merge_request).to include('fix')
expect(count_merge_requests).to eq(3)
end
- it 'sorts by milestone due later' do
- visit_merge_requests(project, sort: sort_value_milestone_later)
-
- expect(first_merge_request).to include('markdown')
- expect(count_merge_requests).to eq(3)
- end
-
- it 'filters on one label and sorts by due soon' do
+ it 'filters on one label and sorts by due date' do
label = create(:label, project: project)
create(:label_link, label: label, target: @fix)
visit_merge_requests(project, label_name: [label.name],
- sort: sort_value_due_date_soon)
+ sort: sort_value_due_date)
expect(first_merge_request).to include('fix')
expect(count_merge_requests).to eq(1)
@@ -115,9 +93,9 @@ describe 'Projects > Merge requests > User lists merge requests' do
create(:label_link, label: label2, target: @fix)
end
- it 'sorts by due soon' do
+ it 'sorts by due date' do
visit_merge_requests(project, label_name: [label.name, label2.name],
- sort: sort_value_due_date_soon)
+ sort: sort_value_due_date)
expect(first_merge_request).to include('fix')
expect(count_merge_requests).to eq(1)
@@ -127,7 +105,7 @@ describe 'Projects > Merge requests > User lists merge requests' do
it 'sorts by due soon' do
visit_merge_requests(project, label_name: [label.name, label2.name],
assignee_id: user.id,
- sort: sort_value_due_date_soon)
+ sort: sort_value_due_date)
expect(first_merge_request).to include('fix')
expect(count_merge_requests).to eq(1)
@@ -137,7 +115,7 @@ describe 'Projects > Merge requests > User lists merge requests' do
visit project_merge_requests_path(project,
label_name: [label.name, label2.name],
assignee_id: user.id,
- sort: sort_value_milestone_soon)
+ sort: sort_value_milestone)
expect(first_merge_request).to include('fix')
end
diff --git a/spec/features/merge_requests/user_posts_diff_notes_spec.rb b/spec/features/merge_requests/user_posts_diff_notes_spec.rb
index 442ce14eb7e..7a773fb2baa 100644
--- a/spec/features/merge_requests/user_posts_diff_notes_spec.rb
+++ b/spec/features/merge_requests/user_posts_diff_notes_spec.rb
@@ -1,12 +1,14 @@
require 'spec_helper'
feature 'Merge requests > User posts diff notes', :js do
+ include MergeRequestDiffHelpers
+
let(:user) { create(:user) }
let(:merge_request) { create(:merge_request) }
let(:project) { merge_request.source_project }
before do
- allow_any_instance_of(ApplicationHelper).to receive(:collapsed_sidebar?).and_return(true)
+ page.driver.set_cookie('sidebar_collapsed', 'true')
project.add_developer(user)
sign_in(user)
@@ -225,6 +227,7 @@ feature 'Merge requests > User posts diff notes', :js do
write_comment_on_line(line_holder, diff_side)
click_button 'Comment'
+
wait_for_requests
assert_comment_persistence(line_holder, asset_form_reset: asset_form_reset)
@@ -244,36 +247,6 @@ feature 'Merge requests > User posts diff notes', :js do
expect(line[:num]).not_to have_css comment_button_class
end
- def get_line_components(line_holder, diff_side = nil)
- if diff_side.nil?
- get_inline_line_components(line_holder)
- else
- get_parallel_line_components(line_holder, diff_side)
- end
- end
-
- def get_inline_line_components(line_holder)
- { content: line_holder.find('.line_content', match: :first), num: line_holder.find('.diff-line-num', match: :first) }
- end
-
- def get_parallel_line_components(line_holder, diff_side = nil)
- side_index = diff_side == 'left' ? 0 : 1
- # Wait for `.line_content`
- line_holder.find('.line_content', match: :first)
- # Wait for `.diff-line-num`
- line_holder.find('.diff-line-num', match: :first)
- { content: line_holder.all('.line_content')[side_index], num: line_holder.all('.diff-line-num')[side_index] }
- end
-
- def click_diff_line(line_holder, diff_side = nil)
- line = get_line_components(line_holder, diff_side)
- line[:content].hover
-
- expect(line[:num]).to have_css comment_button_class
-
- line[:num].find(comment_button_class).trigger 'click'
- end
-
def write_comment_on_line(line_holder, diff_side)
click_diff_line(line_holder, diff_side)
diff --git a/spec/features/merge_requests/user_uses_slash_commands_spec.rb b/spec/features/merge_requests/user_uses_slash_commands_spec.rb
index 95c50df1896..ee0766f1192 100644
--- a/spec/features/merge_requests/user_uses_slash_commands_spec.rb
+++ b/spec/features/merge_requests/user_uses_slash_commands_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-feature 'Merge Requests > User uses quick actions', js: true do
+feature 'Merge Requests > User uses quick actions', :js do
include QuickActionsHelpers
it_behaves_like 'issuable record that supports quick actions in its description and notes', :merge_request do
diff --git a/spec/features/merge_requests/versions_spec.rb b/spec/features/merge_requests/versions_spec.rb
index 8e231fbc281..50f7d721ff3 100644
--- a/spec/features/merge_requests/versions_spec.rb
+++ b/spec/features/merge_requests/versions_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Merge Request versions', js: true do
+feature 'Merge Request versions', :js do
let(:merge_request) { create(:merge_request, importing: true) }
let(:project) { merge_request.source_project }
let!(:merge_request_diff1) { merge_request.merge_request_diffs.create(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') }
diff --git a/spec/features/merge_requests/widget_deployments_spec.rb b/spec/features/merge_requests/widget_deployments_spec.rb
index c0221525c9f..5658c2c5122 100644
--- a/spec/features/merge_requests/widget_deployments_spec.rb
+++ b/spec/features/merge_requests/widget_deployments_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Widget Deployments Header', js: true do
+feature 'Widget Deployments Header', :js do
describe 'when deployed to an environment' do
given(:user) { create(:user) }
given(:project) { merge_request.target_project }
diff --git a/spec/features/merge_requests/widget_spec.rb b/spec/features/merge_requests/widget_spec.rb
index fd991293ee9..2bad3b02250 100644
--- a/spec/features/merge_requests/widget_spec.rb
+++ b/spec/features/merge_requests/widget_spec.rb
@@ -3,10 +3,13 @@ require 'rails_helper'
describe 'Merge request', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
+ let(:project_only_mwps) { create(:project, :repository, only_allow_merge_if_pipeline_succeeds: true) }
let(:merge_request) { create(:merge_request, source_project: project) }
+ let(:merge_request_in_only_mwps_project) { create(:merge_request, source_project: project_only_mwps) }
before do
- project.team << [user, :master]
+ project.add_master(user)
+ project_only_mwps.add_master(user)
sign_in(user)
end
@@ -142,6 +145,38 @@ describe 'Merge request', :js do
end
end
+ context 'view merge request where project has CI setup but no CI status' do
+ before do
+ pipeline = create(:ci_pipeline, project: project,
+ sha: merge_request.diff_head_sha,
+ ref: merge_request.source_branch)
+ create(:ci_build, pipeline: pipeline)
+
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it 'has pipeline error text' do
+ # Wait for the `ci_status` and `merge_check` requests
+ wait_for_requests
+
+ expect(page).to have_text('Could not connect to the CI server. Please check your settings and try again')
+ end
+ end
+
+ context 'view merge request in project with only-mwps setting enabled but no CI is setup' do
+ before do
+ visit project_merge_request_path(project_only_mwps, merge_request_in_only_mwps_project)
+ end
+
+ it 'should be allowed to merge' do
+ # Wait for the `ci_status` and `merge_check` requests
+ wait_for_requests
+
+ expect(page).to have_selector('.accept-merge-request')
+ expect(find('.accept-merge-request')['disabled']).not_to be(true)
+ end
+ end
+
context 'view merge request with MWPS enabled but automatically merge fails' do
before do
merge_request.update(
@@ -184,6 +219,28 @@ describe 'Merge request', :js do
end
end
+ context 'view merge request where fast-forward merge is not possible' do
+ before do
+ project.update(merge_requests_ff_only_enabled: true)
+
+ merge_request.update(
+ merge_user: merge_request.author,
+ merge_status: :cannot_be_merged
+ )
+
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it 'shows information about the merge error' do
+ # Wait for the `ci_status` and `merge_check` requests
+ wait_for_requests
+
+ page.within('.mr-widget-body') do
+ expect(page).to have_content('Fast-forward merge is not possible')
+ end
+ end
+ end
+
context 'merge error' do
before do
allow_any_instance_of(Repository).to receive(:merge).and_return(false)
@@ -199,7 +256,7 @@ describe 'Merge request', :js do
end
end
- context 'user can merge into source project but cannot push to fork', js: true do
+ context 'user can merge into source project but cannot push to fork', :js do
let(:fork_project) { create(:project, :public, :repository) }
let(:user2) { create(:user) }
diff --git a/spec/features/milestones/show_spec.rb b/spec/features/milestones/show_spec.rb
index 624f13922ed..50c5e0bb65f 100644
--- a/spec/features/milestones/show_spec.rb
+++ b/spec/features/milestones/show_spec.rb
@@ -18,9 +18,9 @@ describe 'Milestone show' do
it 'avoids N+1 database queries' do
create(:labeled_issue, issue_params)
- control_count = ActiveRecord::QueryRecorder.new { visit_milestone }.count
+ control = ActiveRecord::QueryRecorder.new { visit_milestone }
create_list(:labeled_issue, 10, issue_params)
- expect { visit_milestone }.not_to exceed_query_limit(control_count)
+ expect { visit_milestone }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/features/profile_spec.rb b/spec/features/profile_spec.rb
index f183dd8cb75..1cddd35fd8a 100644
--- a/spec/features/profile_spec.rb
+++ b/spec/features/profile_spec.rb
@@ -12,11 +12,47 @@ describe 'Profile account page' do
visit profile_account_path
end
- it { expect(page).to have_content('Remove account') }
+ it { expect(page).to have_content('Delete account') }
- it 'deletes the account' do
- expect { click_link 'Delete account' }.to change { User.where(id: user.id).count }.by(-1)
- expect(current_path).to eq(new_user_session_path)
+ it 'does not immediately delete the account' do
+ click_button 'Delete account'
+
+ expect(User.exists?(user.id)).to be_truthy
+ end
+
+ it 'deletes user', :js do
+ click_button 'Delete account'
+
+ fill_in 'password', with: '12345678'
+
+ page.within '.popup-dialog' do
+ click_button 'Delete account'
+ end
+
+ expect(page).to have_content('Account scheduled for removal')
+ expect(User.exists?(user.id)).to be_falsy
+ end
+
+ it 'shows invalid password flash message', :js do
+ click_button 'Delete account'
+
+ fill_in 'password', with: 'testing123'
+
+ page.within '.popup-dialog' do
+ click_button 'Delete account'
+ end
+
+ expect(page).to have_content('Invalid password')
+ end
+
+ it 'does not show delete button when user owns a group' do
+ group = create(:group)
+ group.add_owner(user)
+
+ visit profile_account_path
+
+ expect(page).not_to have_button('Delete account')
+ expect(page).to have_content("Your account is currently an owner in these groups: #{group.name}")
end
end
diff --git a/spec/features/profiles/emails_spec.rb b/spec/features/profiles/emails_spec.rb
new file mode 100644
index 00000000000..11cc8aae6f3
--- /dev/null
+++ b/spec/features/profiles/emails_spec.rb
@@ -0,0 +1,71 @@
+require 'rails_helper'
+
+feature 'Profile > Emails' do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'User adds an email' do
+ before do
+ visit profile_emails_path
+ end
+
+ scenario 'saves the new email' do
+ fill_in('Email', with: 'my@email.com')
+ click_button('Add email address')
+
+ expect(page).to have_content('my@email.com Unverified')
+ expect(page).to have_content("#{user.email} Verified")
+ expect(page).to have_content('Resend confirmation email')
+ end
+
+ scenario 'does not add a duplicate email' do
+ fill_in('Email', with: user.email)
+ click_button('Add email address')
+
+ email = user.emails.find_by(email: user.email)
+ expect(email).to be_nil
+ expect(page).to have_content('Email has already been taken')
+ end
+ end
+
+ scenario 'User removes email' do
+ user.emails.create(email: 'my@email.com')
+ visit profile_emails_path
+ expect(page).to have_content("my@email.com")
+
+ click_link('Remove')
+ expect(page).not_to have_content("my@email.com")
+ end
+
+ scenario 'User confirms email' do
+ email = user.emails.create(email: 'my@email.com')
+ visit profile_emails_path
+ expect(page).to have_content("#{email.email} Unverified")
+
+ email.confirm
+ expect(email.confirmed?).to be_truthy
+
+ visit profile_emails_path
+ expect(page).to have_content("#{email.email} Verified")
+ end
+
+ scenario 'User re-sends confirmation email' do
+ email = user.emails.create(email: 'my@email.com')
+ visit profile_emails_path
+
+ expect { click_link("Resend confirmation email") }.to change { ActionMailer::Base.deliveries.size }
+ expect(page).to have_content("Confirmation email sent to #{email.email}")
+ end
+
+ scenario 'old unconfirmed emails show Send Confirmation button' do
+ email = user.emails.create(email: 'my@email.com')
+ email.update_attribute(:confirmation_sent_at, nil)
+ visit profile_emails_path
+
+ expect(page).not_to have_content('Resend confirmation email')
+ expect(page).to have_content('Send confirmation email')
+ end
+end
diff --git a/spec/features/profiles/gpg_keys_spec.rb b/spec/features/profiles/gpg_keys_spec.rb
index 623e4f341c5..59233e92f93 100644
--- a/spec/features/profiles/gpg_keys_spec.rb
+++ b/spec/features/profiles/gpg_keys_spec.rb
@@ -4,7 +4,7 @@ feature 'Profile > GPG Keys' do
let(:user) { create(:user, email: GpgHelpers::User2.emails.first) }
before do
- login_as(user)
+ sign_in(user)
end
describe 'User adds a key' do
@@ -20,6 +20,18 @@ feature 'Profile > GPG Keys' do
expect(page).to have_content('bette.cartwright@example.net Unverified')
expect(page).to have_content(GpgHelpers::User2.fingerprint)
end
+
+ scenario 'with multiple subkeys' do
+ fill_in('Key', with: GpgHelpers::User3.public_key)
+ click_button('Add key')
+
+ expect(page).to have_content('john.doe@example.com Unverified')
+ expect(page).to have_content(GpgHelpers::User3.fingerprint)
+
+ GpgHelpers::User3.subkey_fingerprints.each do |fingerprint|
+ expect(page).to have_content(fingerprint)
+ end
+ end
end
scenario 'User sees their key' do
diff --git a/spec/features/profiles/keys_spec.rb b/spec/features/profiles/keys_spec.rb
index aa71c4dbba4..7d5ba3a7328 100644
--- a/spec/features/profiles/keys_spec.rb
+++ b/spec/features/profiles/keys_spec.rb
@@ -12,7 +12,7 @@ feature 'Profile > SSH Keys' do
visit profile_keys_path
end
- scenario 'auto-populates the title', js: true do
+ scenario 'auto-populates the title', :js do
fill_in('Key', with: attributes_for(:key).fetch(:key))
expect(page).to have_field("Title", with: "dummy@gitlab.com")
diff --git a/spec/features/profiles/oauth_applications_spec.rb b/spec/features/profiles/oauth_applications_spec.rb
index 45f78444362..8cb240077eb 100644
--- a/spec/features/profiles/oauth_applications_spec.rb
+++ b/spec/features/profiles/oauth_applications_spec.rb
@@ -7,7 +7,7 @@ describe 'Profile > Applications' do
sign_in(user)
end
- describe 'User manages applications', js: true do
+ describe 'User manages applications', :js do
it 'deletes an application' do
create(:oauth_application, owner: user)
visit oauth_applications_path
diff --git a/spec/features/profiles/personal_access_tokens_spec.rb b/spec/features/profiles/personal_access_tokens_spec.rb
index f3124bbf29e..a572160dae9 100644
--- a/spec/features/profiles/personal_access_tokens_spec.rb
+++ b/spec/features/profiles/personal_access_tokens_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'Profile > Personal Access Tokens', js: true do
+describe 'Profile > Personal Access Tokens', :js do
let(:user) { create(:user) }
def active_personal_access_tokens
diff --git a/spec/features/profiles/user_changes_notified_of_own_activity_spec.rb b/spec/features/profiles/user_changes_notified_of_own_activity_spec.rb
index 6a4173d43e1..d5fe5bdffc5 100644
--- a/spec/features/profiles/user_changes_notified_of_own_activity_spec.rb
+++ b/spec/features/profiles/user_changes_notified_of_own_activity_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Profile > Notifications > User changes notified_of_own_activity setting', js: true do
+feature 'Profile > Notifications > User changes notified_of_own_activity setting', :js do
let(:user) { create(:user) }
before do
diff --git a/spec/features/profiles/user_manages_emails_spec.rb b/spec/features/profiles/user_manages_emails_spec.rb
new file mode 100644
index 00000000000..7283c76eb54
--- /dev/null
+++ b/spec/features/profiles/user_manages_emails_spec.rb
@@ -0,0 +1,78 @@
+require 'spec_helper'
+
+describe 'User manages emails' do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+
+ visit(profile_emails_path)
+ end
+
+ it "shows user's emails" do
+ expect(page).to have_content(user.email)
+
+ user.emails.each do |email|
+ expect(page).to have_content(email.email)
+ end
+ end
+
+ it 'adds an email' do
+ fill_in('email_email', with: 'my@email.com')
+ click_button('Add')
+
+ email = user.emails.find_by(email: 'my@email.com')
+
+ expect(email).not_to be_nil
+ expect(page).to have_content('my@email.com')
+ expect(page).to have_content(user.email)
+
+ user.emails.each do |email|
+ expect(page).to have_content(email.email)
+ end
+ end
+
+ it 'does not add a duplicate email' do
+ fill_in('email_email', with: user.email)
+ click_button('Add')
+
+ email = user.emails.find_by(email: user.email)
+
+ expect(email).to be_nil
+ expect(page).to have_content(user.email)
+
+ user.emails.each do |email|
+ expect(page).to have_content(email.email)
+ end
+ end
+
+ it 'removes an email' do
+ fill_in('email_email', with: 'my@email.com')
+ click_button('Add')
+
+ email = user.emails.find_by(email: 'my@email.com')
+
+ expect(email).not_to be_nil
+ expect(page).to have_content('my@email.com')
+ expect(page).to have_content(user.email)
+
+ user.emails.each do |email|
+ expect(page).to have_content(email.email)
+ end
+
+ # There should be only one remove button at this time
+ click_link('Remove')
+
+ # Force these to reload as they have been cached
+ user.emails.reload
+ email = user.emails.find_by(email: 'my@email.com')
+
+ expect(email).to be_nil
+ expect(page).not_to have_content('my@email.com')
+ expect(page).to have_content(user.email)
+
+ user.emails.each do |email|
+ expect(page).to have_content(email.email)
+ end
+ end
+end
diff --git a/spec/features/profiles/user_visits_notifications_tab_spec.rb b/spec/features/profiles/user_visits_notifications_tab_spec.rb
index 48c1787c8b7..923ca8b1c80 100644
--- a/spec/features/profiles/user_visits_notifications_tab_spec.rb
+++ b/spec/features/profiles/user_visits_notifications_tab_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'User visits the notifications tab', js: true do
+feature 'User visits the notifications tab', :js do
let(:project) { create(:project) }
let(:user) { create(:user) }
diff --git a/spec/features/profiles/user_visits_profile_account_page_spec.rb b/spec/features/profiles/user_visits_profile_account_page_spec.rb
new file mode 100644
index 00000000000..a8c08a680d7
--- /dev/null
+++ b/spec/features/profiles/user_visits_profile_account_page_spec.rb
@@ -0,0 +1,15 @@
+require 'spec_helper'
+
+describe 'User visits the profile account page' do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+
+ visit(profile_account_path)
+ end
+
+ it 'shows correct menu item' do
+ expect(page).to have_active_navigation('Account')
+ end
+end
diff --git a/spec/features/profiles/user_visits_profile_authentication_log_spec.rb b/spec/features/profiles/user_visits_profile_authentication_log_spec.rb
new file mode 100644
index 00000000000..a50ebb29e01
--- /dev/null
+++ b/spec/features/profiles/user_visits_profile_authentication_log_spec.rb
@@ -0,0 +1,15 @@
+require 'spec_helper'
+
+describe 'User visits the authentication log' do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+
+ visit(audit_log_profile_path)
+ end
+
+ it 'shows correct menu item' do
+ expect(page).to have_active_navigation('Authentication log')
+ end
+end
diff --git a/spec/features/profiles/preferences_spec.rb b/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
index c935cdfd5c4..924ee0e4174 100644
--- a/spec/features/profiles/preferences_spec.rb
+++ b/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
@@ -1,14 +1,19 @@
require 'spec_helper'
-describe 'Profile > Preferences', :js do
+describe 'User visits the profile preferences page' do
let(:user) { create(:user) }
before do
sign_in(user)
- visit profile_preferences_path
+
+ visit(profile_preferences_path)
+ end
+
+ it 'shows correct menu item' do
+ expect(page).to have_active_navigation('Preferences')
end
- describe 'User changes their syntax highlighting theme' do
+ describe 'User changes their syntax highlighting theme', :js do
it 'creates a flash message' do
choose 'user_color_scheme_id_5'
@@ -27,7 +32,7 @@ describe 'Profile > Preferences', :js do
end
end
- describe 'User changes their default dashboard' do
+ describe 'User changes their default dashboard', :js do
it 'creates a flash message' do
select 'Starred Projects', from: 'user_dashboard'
click_button 'Save'
diff --git a/spec/features/profiles/user_visits_profile_spec.rb b/spec/features/profiles/user_visits_profile_spec.rb
new file mode 100644
index 00000000000..6601d3039ed
--- /dev/null
+++ b/spec/features/profiles/user_visits_profile_spec.rb
@@ -0,0 +1,15 @@
+require 'spec_helper'
+
+describe 'User visits their profile' do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+
+ visit(profile_path)
+ end
+
+ it 'shows correct menu item' do
+ expect(page).to have_active_navigation('Profile')
+ end
+end
diff --git a/spec/features/profiles/user_visits_profile_ssh_keys_page_spec.rb b/spec/features/profiles/user_visits_profile_ssh_keys_page_spec.rb
new file mode 100644
index 00000000000..685bf44619d
--- /dev/null
+++ b/spec/features/profiles/user_visits_profile_ssh_keys_page_spec.rb
@@ -0,0 +1,15 @@
+require 'spec_helper'
+
+describe 'User visits the profile SSH keys page' do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+
+ visit(profile_keys_path)
+ end
+
+ it 'shows correct menu item' do
+ expect(page).to have_active_navigation('SSH Keys')
+ end
+end
diff --git a/spec/features/projects/artifacts/browse_spec.rb b/spec/features/projects/artifacts/browse_spec.rb
index 42b47cb3301..cb69aff8d5f 100644
--- a/spec/features/projects/artifacts/browse_spec.rb
+++ b/spec/features/projects/artifacts/browse_spec.rb
@@ -4,16 +4,15 @@ feature 'Browse artifact', :js do
let(:project) { create(:project, :public) }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
+ let(:browse_url) do
+ browse_path('other_artifacts_0.1.2')
+ end
def browse_path(path)
browse_project_job_artifacts_path(project, job, path)
end
context 'when visiting old URL' do
- let(:browse_url) do
- browse_path('other_artifacts_0.1.2')
- end
-
before do
visit browse_url.sub('/-/jobs', '/builds')
end
@@ -22,4 +21,47 @@ feature 'Browse artifact', :js do
expect(page.current_path).to eq(browse_url)
end
end
+
+ context 'when browsing a directory with an text file' do
+ let(:txt_entry) { job.artifacts_metadata_entry('other_artifacts_0.1.2/doc_sample.txt') }
+
+ before do
+ allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
+ allow(Gitlab.config.pages).to receive(:artifacts_server).and_return(true)
+ end
+
+ context 'when the project is public' do
+ it "shows external link icon and styles" do
+ visit browse_url
+
+ link = first('.tree-item-file-external-link')
+
+ expect(page).to have_link('doc_sample.txt', href: file_project_job_artifacts_path(project, job, path: txt_entry.blob.path))
+ expect(link[:target]).to eq('_blank')
+ expect(link[:rel]).to include('noopener')
+ expect(link[:rel]).to include('noreferrer')
+ expect(page).to have_selector('.js-artifact-tree-external-icon')
+ end
+ end
+
+ context 'when the project is private' do
+ let!(:private_project) { create(:project, :private) }
+ let(:pipeline) { create(:ci_empty_pipeline, project: private_project) }
+ let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
+ let(:user) { create(:user) }
+
+ before do
+ private_project.add_developer(user)
+
+ sign_in(user)
+ end
+
+ it 'shows internal link styles' do
+ visit browse_project_job_artifacts_path(private_project, job, 'other_artifacts_0.1.2')
+
+ expect(page).to have_link('doc_sample.txt')
+ expect(page).not_to have_selector('.js-artifact-tree-external-icon')
+ end
+ end
+ end
end
diff --git a/spec/features/projects/awards/user_interacts_with_awards_in_issue_spec.rb b/spec/features/projects/awards/user_interacts_with_awards_in_issue_spec.rb
new file mode 100644
index 00000000000..adff0a10f0e
--- /dev/null
+++ b/spec/features/projects/awards/user_interacts_with_awards_in_issue_spec.rb
@@ -0,0 +1,104 @@
+require 'spec_helper'
+
+describe 'User interacts with awards in an issue', :js do
+ let(:issue) { create(:issue, project: project)}
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_issue_path(project, issue))
+ end
+
+ it 'toggles the thumbsup award emoji' do
+ page.within('.awards') do
+ thumbsup = page.first('.award-control')
+ thumbsup.click
+ thumbsup.hover
+
+ expect(page).to have_selector('.js-emoji-btn')
+ expect(page).to have_css(".js-emoji-btn.active[data-original-title='You']")
+ expect(page.find('.js-emoji-btn.active .js-counter')).to have_content('1')
+
+ thumbsup = page.first('.award-control')
+ thumbsup.click
+ thumbsup.hover
+
+ expect(page).to have_selector('.award-control.js-emoji-btn')
+ expect(page.all('.award-control.js-emoji-btn').size).to eq(2)
+
+ page.all('.award-control.js-emoji-btn').each do |element|
+ expect(element['title']).to eq('')
+ end
+
+ page.all('.award-control .js-counter').each do |element|
+ expect(element).to have_content('0')
+ end
+
+ thumbsup = page.first('.award-control')
+ thumbsup.click
+ thumbsup.hover
+
+ expect(page).to have_selector('.js-emoji-btn')
+ expect(page).to have_css(".js-emoji-btn.active[data-original-title='You']")
+ expect(page.find('.js-emoji-btn.active .js-counter')).to have_content('1')
+ end
+ end
+
+ it 'toggles a custom award emoji' do
+ page.within('.awards') do
+ page.find('.js-add-award').click
+ end
+
+ page.find('.emoji-menu.is-visible')
+
+ expect(page).to have_selector('.js-emoji-menu-search')
+ expect(page.evaluate_script("document.activeElement.classList.contains('js-emoji-menu-search')")).to eq(true)
+
+ page.within('.emoji-menu-content') do
+ emoji_button = page.first('.js-emoji-btn')
+ emoji_button.hover
+ emoji_button.click
+ end
+
+ page.within('.awards') do
+ expect(page).to have_selector('.js-emoji-btn')
+ expect(page.find('.js-emoji-btn.active .js-counter')).to have_content('1')
+ expect(page).to have_css(".js-emoji-btn.active[data-original-title='You']")
+
+ expect do
+ page.find('.js-emoji-btn.active').click
+ wait_for_requests
+ end.to change { page.all('.award-control.js-emoji-btn').size }.from(3).to(2)
+ end
+ end
+
+ it 'shows the list of award emoji categories' do
+ page.within('.awards') do
+ page.find('.js-add-award').click
+ end
+
+ page.find('.emoji-menu.is-visible')
+
+ expect(page).to have_selector('.js-emoji-menu-search')
+ expect(page.evaluate_script("document.activeElement.classList.contains('js-emoji-menu-search')")).to eq(true)
+
+ fill_in('emoji-menu-search', with: 'hand')
+
+ page.within('.emoji-menu-content') do
+ expect(page).to have_selector('[data-name="raised_hand"]')
+ end
+ end
+
+ it 'adds an award emoji by a comment' do
+ page.within('.js-main-target-form') do
+ fill_in('note[note]', with: ':smile:')
+
+ click_button('Comment')
+ end
+
+ expect(page).to have_selector('gl-emoji[data-name="smile"]')
+ end
+end
diff --git a/spec/features/projects/badges/list_spec.rb b/spec/features/projects/badges/list_spec.rb
index 89ae891037e..68c4a647958 100644
--- a/spec/features/projects/badges/list_spec.rb
+++ b/spec/features/projects/badges/list_spec.rb
@@ -39,7 +39,7 @@ feature 'list of badges' do
end
end
- scenario 'user changes current ref of build status badge', js: true do
+ scenario 'user changes current ref of build status badge', :js do
page.within('.pipeline-status') do
first('.js-project-refs-dropdown').click
diff --git a/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb b/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb
index 1160f674974..c12e56d2c3f 100644
--- a/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb
+++ b/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Blob button line permalinks (BlobLinePermalinkUpdater)', js: true do
+feature 'Blob button line permalinks (BlobLinePermalinkUpdater)', :js do
include TreeHelper
let(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/projects/blobs/edit_spec.rb b/spec/features/projects/blobs/edit_spec.rb
index 62ac9fd0e95..6c625ed17aa 100644
--- a/spec/features/projects/blobs/edit_spec.rb
+++ b/spec/features/projects/blobs/edit_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Editing file blob', js: true do
+feature 'Editing file blob', :js do
include TreeHelper
let(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/projects/blobs/shortcuts_blob_spec.rb b/spec/features/projects/blobs/shortcuts_blob_spec.rb
index 1e3080fa319..9f1fef80ab5 100644
--- a/spec/features/projects/blobs/shortcuts_blob_spec.rb
+++ b/spec/features/projects/blobs/shortcuts_blob_spec.rb
@@ -6,7 +6,7 @@ feature 'Blob shortcuts' do
let(:path) { project.repository.ls_files(project.repository.root_ref)[0] }
let(:sha) { project.repository.commit.sha }
- describe 'On a file(blob)', js: true do
+ describe 'On a file(blob)', :js do
def get_absolute_url(path = "")
"http://#{page.server.host}:#{page.server.port}#{path}"
end
diff --git a/spec/features/projects/branches/download_buttons_spec.rb b/spec/features/projects/branches/download_buttons_spec.rb
index ad06cee4e81..2f407b13c2f 100644
--- a/spec/features/projects/branches/download_buttons_spec.rb
+++ b/spec/features/projects/branches/download_buttons_spec.rb
@@ -29,7 +29,7 @@ feature 'Download buttons in branches page' do
describe 'when checking branches' do
context 'with artifacts' do
before do
- visit project_branches_path(project)
+ visit project_branches_path(project, search: 'binary-encoding')
end
scenario 'shows download artifacts button' do
diff --git a/spec/features/projects/branches_spec.rb b/spec/features/projects/branches_spec.rb
index ad4527a0b74..941d34dd660 100644
--- a/spec/features/projects/branches_spec.rb
+++ b/spec/features/projects/branches_spec.rb
@@ -5,12 +5,6 @@ describe 'Branches' do
let(:project) { create(:project, :public, :repository) }
let(:repository) { project.repository }
- def set_protected_branch_name(branch_name)
- find(".js-protected-branch-select").click
- find(".dropdown-input-field").set(branch_name)
- click_on("Create wildcard #{branch_name}")
- end
-
context 'logged in as developer' do
before do
sign_in(user)
@@ -18,12 +12,10 @@ describe 'Branches' do
end
describe 'Initial branches page' do
- it 'shows all the branches' do
+ it 'shows all the branches sorted by last updated by default' do
visit project_branches_path(project)
- repository.branches_sorted_by(:name).first(20).each do |branch|
- expect(page).to have_content("#{branch.name}")
- end
+ expect(page).to have_content(sorted_branches(repository, count: 20, sort_by: :updated_desc))
end
it 'sorts the branches by name' do
@@ -32,22 +24,7 @@ describe 'Branches' do
click_button "Last updated" # Open sorting dropdown
click_link "Name"
- sorted = repository.branches_sorted_by(:name).first(20).map do |branch|
- Regexp.escape(branch.name)
- end
- expect(page).to have_content(/#{sorted.join(".*")}/)
- end
-
- it 'sorts the branches by last updated' do
- visit project_branches_path(project)
-
- click_button "Last updated" # Open sorting dropdown
- click_link "Last updated"
-
- sorted = repository.branches_sorted_by(:updated_desc).first(20).map do |branch|
- Regexp.escape(branch.name)
- end
- expect(page).to have_content(/#{sorted.join(".*")}/)
+ expect(page).to have_content(sorted_branches(repository, count: 20, sort_by: :name))
end
it 'sorts the branches by oldest updated' do
@@ -56,10 +33,7 @@ describe 'Branches' do
click_button "Last updated" # Open sorting dropdown
click_link "Oldest updated"
- sorted = repository.branches_sorted_by(:updated_asc).first(20).map do |branch|
- Regexp.escape(branch.name)
- end
- expect(page).to have_content(/#{sorted.join(".*")}/)
+ expect(page).to have_content(sorted_branches(repository, count: 20, sort_by: :updated_asc))
end
it 'avoids a N+1 query in branches index' do
@@ -72,7 +46,7 @@ describe 'Branches' do
end
describe 'Find branches' do
- it 'shows filtered branches', js: true do
+ it 'shows filtered branches', :js do
visit project_branches_path(project)
fill_in 'branch-search', with: 'fix'
@@ -84,7 +58,7 @@ describe 'Branches' do
end
describe 'Delete unprotected branch' do
- it 'removes branch after confirmation', js: true do
+ it 'removes branch after confirmation', :js do
visit project_branches_path(project)
fill_in 'branch-search', with: 'fix'
@@ -99,28 +73,6 @@ describe 'Branches' do
expect(find('.all-branches')).to have_selector('li', count: 0)
end
end
-
- describe 'Delete protected branch' do
- before do
- project.add_user(user, :master)
- visit project_protected_branches_path(project)
- set_protected_branch_name('fix')
- click_on "Protect"
-
- within(".protected-branches-list") { expect(page).to have_content('fix') }
- expect(ProtectedBranch.count).to eq(1)
- project.add_user(user, :developer)
- end
-
- it 'does not allow devleoper to removes protected branch', js: true do
- visit project_branches_path(project)
-
- fill_in 'branch-search', with: 'fix'
- find('#branch-search').native.send_keys(:enter)
-
- expect(page).to have_css('.btn-remove.disabled')
- end
- end
end
context 'logged in as master' do
@@ -136,37 +88,6 @@ describe 'Branches' do
expect(page).to have_content("Protected branches can be managed in project settings")
end
end
-
- describe 'Delete protected branch' do
- before do
- visit project_protected_branches_path(project)
- set_protected_branch_name('fix')
- click_on "Protect"
-
- within(".protected-branches-list") { expect(page).to have_content('fix') }
- expect(ProtectedBranch.count).to eq(1)
- end
-
- it 'removes branch after modal confirmation', js: true do
- visit project_branches_path(project)
-
- fill_in 'branch-search', with: 'fix'
- find('#branch-search').native.send_keys(:enter)
-
- expect(page).to have_content('fix')
- expect(find('.all-branches')).to have_selector('li', count: 1)
- page.find('[data-target="#modal-delete-branch"]').trigger(:click)
-
- expect(page).to have_css('.js-delete-branch[disabled]')
- fill_in 'delete_branch_input', with: 'fix'
- click_link 'Delete protected branch'
-
- fill_in 'branch-search', with: 'fix'
- find('#branch-search').native.send_keys(:enter)
-
- expect(page).to have_content('No branches to show')
- end
- end
end
context 'logged out' do
@@ -180,4 +101,13 @@ describe 'Branches' do
end
end
end
+
+ def sorted_branches(repository, count:, sort_by:)
+ sorted_branches =
+ repository.branches_sorted_by(sort_by).first(count).map do |branch|
+ Regexp.escape(branch.name)
+ end
+
+ Regexp.new(sorted_branches.join('.*'))
+ end
end
diff --git a/spec/features/projects/clusters_spec.rb b/spec/features/projects/clusters_spec.rb
new file mode 100644
index 00000000000..810f2c39b43
--- /dev/null
+++ b/spec/features/projects/clusters_spec.rb
@@ -0,0 +1,111 @@
+require 'spec_helper'
+
+feature 'Clusters', :js do
+ let!(:project) { create(:project, :repository) }
+ let!(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ gitlab_sign_in(user)
+ end
+
+ context 'when user has signed in Google' do
+ before do
+ allow_any_instance_of(GoogleApi::CloudPlatform::Client)
+ .to receive(:validate_token).and_return(true)
+ end
+
+ context 'when user does not have a cluster and visits cluster index page' do
+ before do
+ visit project_clusters_path(project)
+ end
+
+ it 'user sees a new page' do
+ expect(page).to have_button('Create cluster')
+ end
+
+ context 'when user filled form with valid parameters' do
+ before do
+ double.tap do |dbl|
+ allow(dbl).to receive(:status).and_return('RUNNING')
+ allow(dbl).to receive(:self_link)
+ .and_return('projects/gcp-project-12345/zones/us-central1-a/operations/ope-123')
+ allow_any_instance_of(GoogleApi::CloudPlatform::Client)
+ .to receive(:projects_zones_clusters_create).and_return(dbl)
+ end
+
+ allow(WaitForClusterCreationWorker).to receive(:perform_in).and_return(nil)
+
+ fill_in 'cluster_gcp_project_id', with: 'gcp-project-123'
+ fill_in 'cluster_gcp_cluster_name', with: 'dev-cluster'
+ click_button 'Create cluster'
+ end
+
+ it 'user sees a cluster details page and creation status' do
+ expect(page).to have_content('Cluster is being created on Google Container Engine...')
+
+ Gcp::Cluster.last.make_created!
+
+ expect(page).to have_content('Cluster was successfully created on Google Container Engine')
+ end
+ end
+
+ context 'when user filled form with invalid parameters' do
+ before do
+ click_button 'Create cluster'
+ end
+
+ it 'user sees a validation error' do
+ expect(page).to have_css('#error_explanation')
+ end
+ end
+ end
+
+ context 'when user has a cluster and visits cluster index page' do
+ let!(:cluster) { create(:gcp_cluster, :created_on_gke, :with_kubernetes_service, project: project) }
+
+ before do
+ visit project_clusters_path(project)
+ end
+
+ it 'user sees an cluster details page' do
+ expect(page).to have_button('Save')
+ expect(page.find(:css, '.cluster-name').value).to eq(cluster.gcp_cluster_name)
+ end
+
+ context 'when user disables the cluster' do
+ before do
+ page.find(:css, '.js-toggle-cluster').click
+ click_button 'Save'
+ end
+
+ it 'user sees the succeccful message' do
+ expect(page).to have_content('Cluster was successfully updated.')
+ end
+ end
+
+ context 'when user destory the cluster' do
+ before do
+ page.accept_confirm do
+ click_link 'Remove integration'
+ end
+ end
+
+ it 'user sees creation form with the succeccful message' do
+ expect(page).to have_content('Cluster integration was successfully removed.')
+ expect(page).to have_button('Create cluster')
+ end
+ end
+ end
+ end
+
+ context 'when user has not signed in Google' do
+ before do
+ visit project_clusters_path(project)
+ end
+
+ it 'user sees a login page' do
+ expect(page).to have_css('.signin-with-google')
+ end
+ end
+end
diff --git a/spec/features/projects/commit/builds_spec.rb b/spec/features/projects/commit/builds_spec.rb
index 740331fe42a..9c57626ea1d 100644
--- a/spec/features/projects/commit/builds_spec.rb
+++ b/spec/features/projects/commit/builds_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'project commit pipelines', js: true do
+feature 'project commit pipelines', :js do
given(:project) { create(:project, :repository) }
background do
diff --git a/spec/features/projects/commit/cherry_pick_spec.rb b/spec/features/projects/commit/cherry_pick_spec.rb
index 7086f56bb1b..c11a95732b2 100644
--- a/spec/features/projects/commit/cherry_pick_spec.rb
+++ b/spec/features/projects/commit/cherry_pick_spec.rb
@@ -64,7 +64,7 @@ describe 'Cherry-pick Commits' do
end
end
- context "I cherry-pick a commit from a different branch", js: true do
+ context "I cherry-pick a commit from a different branch", :js do
it do
find('.header-action-buttons a.dropdown-toggle').click
find(:css, "a[href='#modal-cherry-pick-commit']").click
diff --git a/spec/features/projects/commit/diff_notes_spec.rb b/spec/features/projects/commit/diff_notes_spec.rb
new file mode 100644
index 00000000000..f0fe4e00acc
--- /dev/null
+++ b/spec/features/projects/commit/diff_notes_spec.rb
@@ -0,0 +1,36 @@
+require 'spec_helper'
+
+feature 'Commit diff', :js do
+ include RepoHelpers
+
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public, :repository) }
+
+ before do
+ project.add_master(user)
+ sign_in user
+ end
+
+ %w(inline parallel).each do |view|
+ context "#{view} view" do
+ before do
+ visit project_commit_path(project, sample_commit.id, view: view)
+ end
+
+ it "adds comment to diff" do
+ diff_line_num = first('.diff-line-num.new')
+
+ diff_line_num.trigger('mouseover')
+ diff_line_num.find('.js-add-diff-note-button').trigger('click')
+
+ page.within(first('.diff-viewer')) do
+ find('.js-note-text').set 'test comment'
+
+ click_button 'Comment'
+
+ expect(page).to have_content('test comment')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/commit/user_reverts_commit_spec.rb b/spec/features/projects/commit/user_reverts_commit_spec.rb
new file mode 100644
index 00000000000..221f1d7757e
--- /dev/null
+++ b/spec/features/projects/commit/user_reverts_commit_spec.rb
@@ -0,0 +1,56 @@
+require 'spec_helper'
+
+describe 'User reverts a commit', :js do
+ include RepoHelpers
+
+ let(:project) { create(:project, :repository, namespace: user.namespace) }
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+
+ visit(project_commit_path(project, sample_commit.id))
+
+ find('.header-action-buttons .dropdown').click
+ find('a[href="#modal-revert-commit"]').click
+ end
+
+ context 'without creating a new merge request' do
+ before do
+ page.within('#modal-revert-commit') do
+ uncheck('create_merge_request')
+ click_button('Revert')
+ end
+ end
+
+ it 'reverts a commit' do
+ expect(page).to have_content('The commit has been successfully reverted.')
+ end
+
+ it 'does not revert a previously reverted commit' do
+ # Visit the comment again once it was reverted.
+ visit project_commit_path(project, sample_commit.id)
+
+ find('.header-action-buttons .dropdown').click
+ find('a[href="#modal-revert-commit"]').click
+
+ page.within('#modal-revert-commit') do
+ uncheck('create_merge_request')
+ click_button('Revert')
+ end
+
+ expect(page).to have_content('Sorry, we cannot revert this commit automatically.')
+ end
+ end
+
+ context 'with creating a new merge request' do
+ it 'reverts a commit' do
+ page.within('#modal-revert-commit') do
+ click_button('Revert')
+ end
+
+ expect(page).to have_content('The commit has been successfully reverted. You can now submit a merge request to get this change into the original branch.')
+ expect(page).to have_content("From revert-#{Commit.truncate_sha(sample_commit.id)} into master")
+ end
+ end
+end
diff --git a/spec/features/projects/compare_spec.rb b/spec/features/projects/compare_spec.rb
index 82d73fe8531..87ffc2a0b90 100644
--- a/spec/features/projects/compare_spec.rb
+++ b/spec/features/projects/compare_spec.rb
@@ -1,6 +1,6 @@
require "spec_helper"
-describe "Compare", js: true do
+describe "Compare", :js do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
diff --git a/spec/features/projects/developer_views_empty_project_instructions_spec.rb b/spec/features/projects/developer_views_empty_project_instructions_spec.rb
index fe8567ce348..36809240f76 100644
--- a/spec/features/projects/developer_views_empty_project_instructions_spec.rb
+++ b/spec/features/projects/developer_views_empty_project_instructions_spec.rb
@@ -17,7 +17,7 @@ feature 'Developer views empty project instructions' do
expect_instructions_for('http')
end
- scenario 'switches to SSH', js: true do
+ scenario 'switches to SSH', :js do
visit_project
select_protocol('SSH')
@@ -37,7 +37,7 @@ feature 'Developer views empty project instructions' do
expect_instructions_for('ssh')
end
- scenario 'switches to HTTP', js: true do
+ scenario 'switches to HTTP', :js do
visit_project
select_protocol('HTTP')
diff --git a/spec/features/projects/diffs/diff_show_spec.rb b/spec/features/projects/diffs/diff_show_spec.rb
index bc102895aaf..c1307ab640f 100644
--- a/spec/features/projects/diffs/diff_show_spec.rb
+++ b/spec/features/projects/diffs/diff_show_spec.rb
@@ -62,13 +62,43 @@ feature 'Diff file viewer', :js do
end
context 'Image file' do
- before do
- visit_commit('2f63565e7aac07bcdadb654e253078b727143ec4')
+ context 'Replaced' do
+ before do
+ visit_commit('2f63565e7aac07bcdadb654e253078b727143ec4')
+ end
+
+ it 'shows a rendered image' do
+ within('.diff-file[id="e986451b8f7397b617dbb6fffcb5539328c56921"]') do
+ expect(page).to have_css('img[alt="files/images/6049019_460s.jpg"]')
+ end
+ end
+
+ it 'shows view replaced and view file links' do
+ expect(page.all('.file-actions a').length).to eq 2
+ expect(page.all('.file-actions a')[0]).to have_content 'View replaced file @'
+ expect(page.all('.file-actions a')[1]).to have_content 'View file @'
+ end
end
- it 'shows a rendered image' do
- within('.diff-file[id="e986451b8f7397b617dbb6fffcb5539328c56921"]') do
- expect(page).to have_css('img[alt="files/images/6049019_460s.jpg"]')
+ context 'Added' do
+ before do
+ visit_commit('33f3729a45c02fc67d00adb1b8bca394b0e761d9')
+ end
+
+ it 'shows view file link' do
+ expect(page.all('.file-actions a').length).to eq 1
+ expect(page.all('.file-actions a')[0]).to have_content 'View file @'
+ end
+ end
+
+ context 'Deleted' do
+ before do
+ visit_commit('7fd7a459706ee87be6f855fd98ce8c552b15529a')
+ end
+
+ it 'shows view file link' do
+ expect(page.all('.file-actions a').length).to eq 1
+ expect(page.all('.file-actions a')[0]).to have_content 'View file @'
end
end
end
@@ -108,6 +138,19 @@ feature 'Diff file viewer', :js do
end
end
+ context 'renamed file' do
+ before do
+ visit_commit('6907208d755b60ebeacb2e9dfea74c92c3449a1f')
+ end
+
+ it 'shows the filename with diff highlight' do
+ within('.file-header-content') do
+ expect(page).to have_css('.idiff.left.right.deletion')
+ expect(page).to have_content('files/js/commit.coffee')
+ end
+ end
+ end
+
context 'binary file that appears to be text in the first 1024 bytes' do
before do
# The file we're visiting is smaller than 10 KB and we want it collapsed
diff --git a/spec/features/projects/edit_spec.rb b/spec/features/projects/edit_spec.rb
index 17f914c9c17..7a372757523 100644
--- a/spec/features/projects/edit_spec.rb
+++ b/spec/features/projects/edit_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-feature 'Project edit', js: true do
+feature 'Project edit', :js do
let(:admin) { create(:admin) }
let(:user) { create(:user) }
let(:project) { create(:project) }
diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb
index 1c59e57c0a4..610f566c0cf 100644
--- a/spec/features/projects/environments/environments_spec.rb
+++ b/spec/features/projects/environments/environments_spec.rb
@@ -10,26 +10,23 @@ feature 'Environments page', :js do
sign_in(user)
end
- given!(:environment) { }
- given!(:deployment) { }
- given!(:action) { }
-
- before do
- visit_environments(project)
- end
-
describe 'page tabs' do
- scenario 'shows "Available" and "Stopped" tab with links' do
+ it 'shows "Available" and "Stopped" tab with links' do
+ visit_environments(project)
+
expect(page).to have_link('Available')
expect(page).to have_link('Stopped')
end
describe 'with one available environment' do
- given(:environment) { create(:environment, project: project, state: :available) }
+ before do
+ create(:environment, project: project, state: :available)
+ end
describe 'in available tab page' do
it 'should show one environment' do
- visit project_environments_path(project, scope: 'available')
+ visit_environments(project, scope: 'available')
+
expect(page).to have_css('.environments-container')
expect(page.all('.environment-name').length).to eq(1)
end
@@ -37,7 +34,8 @@ feature 'Environments page', :js do
describe 'in stopped tab page' do
it 'should show no environments' do
- visit project_environments_path(project, scope: 'stopped')
+ visit_environments(project, scope: 'stopped')
+
expect(page).to have_css('.environments-container')
expect(page).to have_content('You don\'t have any environments right now')
end
@@ -45,11 +43,14 @@ feature 'Environments page', :js do
end
describe 'with one stopped environment' do
- given(:environment) { create(:environment, project: project, state: :stopped) }
+ before do
+ create(:environment, project: project, state: :stopped)
+ end
describe 'in available tab page' do
it 'should show no environments' do
- visit project_environments_path(project, scope: 'available')
+ visit_environments(project, scope: 'available')
+
expect(page).to have_css('.environments-container')
expect(page).to have_content('You don\'t have any environments right now')
end
@@ -57,7 +58,8 @@ feature 'Environments page', :js do
describe 'in stopped tab page' do
it 'should show one environment' do
- visit project_environments_path(project, scope: 'stopped')
+ visit_environments(project, scope: 'stopped')
+
expect(page).to have_css('.environments-container')
expect(page.all('.environment-name').length).to eq(1)
end
@@ -66,86 +68,84 @@ feature 'Environments page', :js do
end
context 'without environments' do
- scenario 'does show no environments' do
- expect(page).to have_content('You don\'t have any environments right now.')
+ before do
+ visit_environments(project)
end
- scenario 'does show 0 as counter for environments in both tabs' do
+ it 'does not show environments and counters are set to zero' do
+ expect(page).to have_content('You don\'t have any environments right now.')
+
expect(page.find('.js-available-environments-count').text).to eq('0')
expect(page.find('.js-stopped-environments-count').text).to eq('0')
end
end
- describe 'when showing the environment' do
- given(:environment) { create(:environment, project: project) }
-
- scenario 'does show environment name' do
- expect(page).to have_link(environment.name)
- end
-
- scenario 'does show number of available and stopped environments' do
- expect(page.find('.js-available-environments-count').text).to eq('1')
- expect(page.find('.js-stopped-environments-count').text).to eq('0')
+ describe 'environments table' do
+ given!(:environment) do
+ create(:environment, project: project, state: :available)
end
- context 'without deployments' do
- scenario 'does show no deployments' do
- expect(page).to have_content('No deployments yet')
+ context 'when there are no deployments' do
+ before do
+ visit_environments(project)
end
- context 'for available environment' do
- given(:environment) { create(:environment, project: project, state: :available) }
+ it 'shows environments names and counters' do
+ expect(page).to have_link(environment.name)
- scenario 'does not shows stop button' do
- expect(page).not_to have_selector('.stop-env-link')
- end
+ expect(page.find('.js-available-environments-count').text).to eq('1')
+ expect(page.find('.js-stopped-environments-count').text).to eq('0')
end
- context 'for stopped environment' do
- given(:environment) { create(:environment, project: project, state: :stopped) }
+ it 'does not show deployments' do
+ expect(page).to have_content('No deployments yet')
+ end
- scenario 'does not shows stop button' do
- expect(page).not_to have_selector('.stop-env-link')
- end
+ it 'does not show stip button when environment is not stoppable' do
+ expect(page).not_to have_selector('.stop-env-link')
end
end
- context 'with deployments' do
+ context 'when there are deployments' do
given(:project) { create(:project, :repository) }
- given(:deployment) do
+ given!(:deployment) do
create(:deployment, environment: environment,
sha: project.commit.id)
end
- scenario 'does show deployment SHA' do
- expect(page).to have_link(deployment.short_sha)
- end
+ it 'shows deployment SHA and internal ID' do
+ visit_environments(project)
- scenario 'does show deployment internal id' do
+ expect(page).to have_link(deployment.short_sha)
expect(page).to have_content(deployment.iid)
end
- context 'with build and manual actions' do
- given(:pipeline) { create(:ci_pipeline, project: project) }
- given(:build) { create(:ci_build, pipeline: pipeline) }
+ context 'when builds and manual actions are present' do
+ given!(:pipeline) { create(:ci_pipeline, project: project) }
+ given!(:build) { create(:ci_build, pipeline: pipeline) }
- given(:action) do
+ given!(:action) do
create(:ci_build, :manual, pipeline: pipeline, name: 'deploy to production')
end
- given(:deployment) do
+ given!(:deployment) do
create(:deployment, environment: environment,
deployable: build,
sha: project.commit.id)
end
- scenario 'does show a play button' do
+ before do
+ visit_environments(project)
+ end
+
+ it 'shows a play button' do
find('.js-dropdown-play-icon-container').click
+
expect(page).to have_content(action.name.humanize)
end
- scenario 'does allow to play manual action', js: true do
+ it 'allows to play a manual action', :js do
expect(action).to be_manual
find('.js-dropdown-play-icon-container').click
@@ -155,19 +155,19 @@ feature 'Environments page', :js do
.not_to change { Ci::Pipeline.count }
end
- scenario 'does show build name and id' do
+ it 'shows build name and id' do
expect(page).to have_link("#{build.name} ##{build.id}")
end
- scenario 'does not show stop button' do
+ it 'shows a stop button' do
expect(page).not_to have_selector('.stop-env-link')
end
- scenario 'does not show external link button' do
+ it 'does not show external link button' do
expect(page).not_to have_css('external-url')
end
- scenario 'does not show terminal button' do
+ it 'does not show terminal button' do
expect(page).not_to have_terminal_button
end
@@ -176,7 +176,7 @@ feature 'Environments page', :js do
given(:build) { create(:ci_build, pipeline: pipeline) }
given(:deployment) { create(:deployment, environment: environment, deployable: build) }
- scenario 'does show an external link button' do
+ it 'shows an external link button' do
expect(page).to have_link(nil, href: environment.external_url)
end
end
@@ -192,34 +192,34 @@ feature 'Environments page', :js do
on_stop: 'close_app')
end
- scenario 'does show stop button' do
+ it 'shows a stop button' do
expect(page).to have_selector('.stop-env-link')
end
- context 'for reporter' do
+ context 'when user is a reporter' do
let(:role) { :reporter }
- scenario 'does not show stop button' do
+ it 'does not show stop button' do
expect(page).not_to have_selector('.stop-env-link')
end
end
end
- context 'with terminal' do
+ context 'when kubernetes terminal is available' do
let(:project) { create(:kubernetes_project, :test_repo) }
context 'for project master' do
let(:role) { :master }
- scenario 'it shows the terminal button' do
+ it 'shows the terminal button' do
expect(page).to have_terminal_button
end
end
- context 'for developer' do
+ context 'when user is a developer' do
let(:role) { :developer }
- scenario 'does not show terminal button' do
+ it 'does not show terminal button' do
expect(page).not_to have_terminal_button
end
end
@@ -228,59 +228,77 @@ feature 'Environments page', :js do
end
end
- scenario 'does have a New environment button' do
+ it 'does have a new environment button' do
+ visit_environments(project)
+
expect(page).to have_link('New environment')
end
- describe 'when creating a new environment' do
+ describe 'creating a new environment' do
before do
visit_environments(project)
end
- context 'when logged as developer' do
- before do
- within(".top-area") do
- click_link 'New environment'
- end
- end
+ context 'user is a developer' do
+ given(:role) { :developer }
- context 'for valid name' do
- before do
- fill_in('Name', with: 'production')
- click_on 'Save'
- end
+ scenario 'developer creates a new environment with a valid name' do
+ within(".top-area") { click_link 'New environment' }
+ fill_in('Name', with: 'production')
+ click_on 'Save'
- scenario 'does create a new pipeline' do
- expect(page).to have_content('production')
- end
+ expect(page).to have_content('production')
end
- context 'for invalid name' do
- before do
- fill_in('Name', with: 'name,with,commas')
- click_on 'Save'
- end
+ scenario 'developer creates a new environmetn with invalid name' do
+ within(".top-area") { click_link 'New environment' }
+ fill_in('Name', with: 'name,with,commas')
+ click_on 'Save'
- scenario 'does show errors' do
- expect(page).to have_content('Name can contain only letters')
- end
+ expect(page).to have_content('Name can contain only letters')
end
end
- context 'when logged as reporter' do
+ context 'user is a reporter' do
given(:role) { :reporter }
- scenario 'does not have a New environment link' do
+ scenario 'reporters tries to create a new environment' do
expect(page).not_to have_link('New environment')
end
end
end
+ describe 'environments folders' do
+ before do
+ create(:environment, project: project,
+ name: 'staging/review-1',
+ state: :available)
+ create(:environment, project: project,
+ name: 'staging/review-2',
+ state: :available)
+ end
+
+ scenario 'users unfurls an environment folder' do
+ visit_environments(project)
+
+ expect(page).not_to have_content 'review-1'
+ expect(page).not_to have_content 'review-2'
+ expect(page).to have_content 'staging 2'
+
+ within('.folder-row') do
+ find('.folder-name', text: 'staging').click
+ end
+
+ expect(page).to have_content 'review-1'
+ expect(page).to have_content 'review-2'
+ end
+ end
+
def have_terminal_button
have_link(nil, href: terminal_project_environment_path(project, environment))
end
- def visit_environments(project)
- visit project_environments_path(project)
+ def visit_environments(project, **opts)
+ visit project_environments_path(project, **opts)
end
end
diff --git a/spec/features/projects/features_visibility_spec.rb b/spec/features/projects/features_visibility_spec.rb
index 57722276d79..e5282b42a4f 100644
--- a/spec/features/projects/features_visibility_spec.rb
+++ b/spec/features/projects/features_visibility_spec.rb
@@ -6,7 +6,7 @@ describe 'Edit Project Settings' do
let!(:issue) { create(:issue, project: project) }
let(:non_member) { create(:user) }
- describe 'project features visibility selectors', js: true do
+ describe 'project features visibility selectors', :js do
before do
project.team << [member, :master]
sign_in(member)
@@ -163,7 +163,7 @@ describe 'Edit Project Settings' do
end
end
- describe 'repository visibility', js: true do
+ describe 'repository visibility', :js do
before do
project.team << [member, :master]
sign_in(member)
diff --git a/spec/features/projects/files/browse_files_spec.rb b/spec/features/projects/files/browse_files_spec.rb
index f62a9edd37e..84197e45dcb 100644
--- a/spec/features/projects/files/browse_files_spec.rb
+++ b/spec/features/projects/files/browse_files_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'user browses project', js: true do
+feature 'user browses project', :js do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
diff --git a/spec/features/projects/files/dockerfile_dropdown_spec.rb b/spec/features/projects/files/dockerfile_dropdown_spec.rb
index cebb238dda1..3c3a5326538 100644
--- a/spec/features/projects/files/dockerfile_dropdown_spec.rb
+++ b/spec/features/projects/files/dockerfile_dropdown_spec.rb
@@ -16,7 +16,7 @@ feature 'User wants to add a Dockerfile file' do
expect(page).to have_css('.dockerfile-selector')
end
- scenario 'user can pick a Dockerfile file from the dropdown', js: true do
+ scenario 'user can pick a Dockerfile file from the dropdown', :js do
find('.js-dockerfile-selector').click
wait_for_requests
diff --git a/spec/features/projects/files/edit_file_soft_wrap_spec.rb b/spec/features/projects/files/edit_file_soft_wrap_spec.rb
index c7e3f657639..25f7e18ac5c 100644
--- a/spec/features/projects/files/edit_file_soft_wrap_spec.rb
+++ b/spec/features/projects/files/edit_file_soft_wrap_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'User uses soft wrap whilst editing file', js: true do
+feature 'User uses soft wrap whilst editing file', :js do
before do
user = create(:user)
project = create(:project, :repository)
diff --git a/spec/features/projects/files/find_file_keyboard_spec.rb b/spec/features/projects/files/find_file_keyboard_spec.rb
index 7f97fdb8cc9..618725ee781 100644
--- a/spec/features/projects/files/find_file_keyboard_spec.rb
+++ b/spec/features/projects/files/find_file_keyboard_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Find file keyboard shortcuts', js: true do
+feature 'Find file keyboard shortcuts', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
diff --git a/spec/features/projects/files/gitignore_dropdown_spec.rb b/spec/features/projects/files/gitignore_dropdown_spec.rb
index e2044c9d5aa..81d68c3d67c 100644
--- a/spec/features/projects/files/gitignore_dropdown_spec.rb
+++ b/spec/features/projects/files/gitignore_dropdown_spec.rb
@@ -13,7 +13,7 @@ feature 'User wants to add a .gitignore file' do
expect(page).to have_css('.gitignore-selector')
end
- scenario 'user can pick a .gitignore file from the dropdown', js: true do
+ scenario 'user can pick a .gitignore file from the dropdown', :js do
find('.js-gitignore-selector').click
wait_for_requests
within '.gitignore-selector' do
diff --git a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
index ab242b0b0b5..8e58fa7bd56 100644
--- a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
+++ b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
@@ -13,7 +13,7 @@ feature 'User wants to add a .gitlab-ci.yml file' do
expect(page).to have_css('.gitlab-ci-yml-selector')
end
- scenario 'user can pick a template from the dropdown', js: true do
+ scenario 'user can pick a template from the dropdown', :js do
find('.js-gitlab-ci-yml-selector').click
wait_for_requests
within '.gitlab-ci-yml-selector' do
diff --git a/spec/features/projects/files/project_owner_creates_license_file_spec.rb b/spec/features/projects/files/project_owner_creates_license_file_spec.rb
index 95af263bcac..6c5b1086ec1 100644
--- a/spec/features/projects/files/project_owner_creates_license_file_spec.rb
+++ b/spec/features/projects/files/project_owner_creates_license_file_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'project owner creates a license file', js: true do
+feature 'project owner creates a license file', :js do
let(:project_master) { create(:user) }
let(:project) { create(:project, :repository) }
background do
diff --git a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
index 7bcab01c739..6c616bf0456 100644
--- a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
+++ b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'project owner sees a link to create a license file in empty project', js: true do
+feature 'project owner sees a link to create a license file in empty project', :js do
let(:project_master) { create(:user) }
let(:project) { create(:project) }
background do
diff --git a/spec/features/projects/files/template_type_dropdown_spec.rb b/spec/features/projects/files/template_type_dropdown_spec.rb
index 48003eeaa87..f95a60e5194 100644
--- a/spec/features/projects/files/template_type_dropdown_spec.rb
+++ b/spec/features/projects/files/template_type_dropdown_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Template type dropdown selector', js: true do
+feature 'Template type dropdown selector', :js do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
diff --git a/spec/features/projects/files/undo_template_spec.rb b/spec/features/projects/files/undo_template_spec.rb
index 9bcd5beabb8..64fe350f3dc 100644
--- a/spec/features/projects/files/undo_template_spec.rb
+++ b/spec/features/projects/files/undo_template_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Template Undo Button', js: true do
+feature 'Template Undo Button', :js do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
diff --git a/spec/features/projects/fork_spec.rb b/spec/features/projects/fork_spec.rb
new file mode 100644
index 00000000000..e10d29e5eea
--- /dev/null
+++ b/spec/features/projects/fork_spec.rb
@@ -0,0 +1,57 @@
+require 'spec_helper'
+
+describe 'Project fork' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public, :repository) }
+
+ before do
+ sign_in user
+ end
+
+ it 'allows user to fork project' do
+ visit project_path(project)
+
+ expect(page).not_to have_css('a.disabled', text: 'Fork')
+ end
+
+ it 'disables fork button when user has exceeded project limit' do
+ user.projects_limit = 0
+ user.save!
+
+ visit project_path(project)
+
+ expect(page).to have_css('a.disabled', text: 'Fork')
+ end
+
+ context 'master in group' do
+ before do
+ group = create(:group)
+ group.add_master(user)
+ end
+
+ it 'allows user to fork project to group or to user namespace' do
+ visit project_path(project)
+
+ expect(page).not_to have_css('a.disabled', text: 'Fork')
+
+ click_link 'Fork'
+
+ expect(page).to have_css('.fork-thumbnail', count: 2)
+ expect(page).not_to have_css('.fork-thumbnail.disabled')
+ end
+
+ it 'allows user to fork project to group and not user when exceeded project limit' do
+ user.projects_limit = 0
+ user.save!
+
+ visit project_path(project)
+
+ expect(page).not_to have_css('a.disabled', text: 'Fork')
+
+ click_link 'Fork'
+
+ expect(page).to have_css('.fork-thumbnail', count: 2)
+ expect(page).to have_css('.fork-thumbnail.disabled')
+ end
+ end
+end
diff --git a/spec/features/projects/gfm_autocomplete_load_spec.rb b/spec/features/projects/gfm_autocomplete_load_spec.rb
index cff3b1f5743..1c988726ae6 100644
--- a/spec/features/projects/gfm_autocomplete_load_spec.rb
+++ b/spec/features/projects/gfm_autocomplete_load_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'GFM autocomplete loading', js: true do
+describe 'GFM autocomplete loading', :js do
let(:project) { create(:project) }
before do
diff --git a/spec/features/projects/import_export/export_file_spec.rb b/spec/features/projects/import_export/export_file_spec.rb
index 62d244ff259..05776c50f9d 100644
--- a/spec/features/projects/import_export/export_file_spec.rb
+++ b/spec/features/projects/import_export/export_file_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
# It looks up for any sensitive word inside the JSON, so if a sensitive word is found
# we''l have to either include it adding the model that includes it to the +safe_list+
# or make sure the attribute is blacklisted in the +import_export.yml+ configuration
-feature 'Import/Export - project export integration test', js: true do
+feature 'Import/Export - project export integration test', :js do
include Select2Helper
include ExportFileHelper
diff --git a/spec/features/projects/import_export/import_file_spec.rb b/spec/features/projects/import_export/import_file_spec.rb
index e5c7781a096..c928459f911 100644
--- a/spec/features/projects/import_export/import_file_spec.rb
+++ b/spec/features/projects/import_export/import_file_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Import/Export - project import integration test', js: true do
+feature 'Import/Export - project import integration test', :js do
include Select2Helper
let(:user) { create(:user) }
diff --git a/spec/features/projects/import_export/namespace_export_file_spec.rb b/spec/features/projects/import_export/namespace_export_file_spec.rb
index 691b0e1e4ca..b6a7c3cdcdb 100644
--- a/spec/features/projects/import_export/namespace_export_file_spec.rb
+++ b/spec/features/projects/import_export/namespace_export_file_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Import/Export - Namespace export file cleanup', js: true do
+feature 'Import/Export - Namespace export file cleanup', :js do
let(:export_path) { "#{Dir.tmpdir}/import_file_spec" }
let(:config_hash) { YAML.load_file(Gitlab::ImportExport.config_file).deep_stringify_keys }
diff --git a/spec/features/projects/issuable_templates_spec.rb b/spec/features/projects/issuable_templates_spec.rb
index d2789d0aa52..62b23121c5a 100644
--- a/spec/features/projects/issuable_templates_spec.rb
+++ b/spec/features/projects/issuable_templates_spec.rb
@@ -1,8 +1,11 @@
require 'spec_helper'
-feature 'issuable templates', js: true do
+feature 'issuable templates', :js do
+ include ProjectForksHelper
+
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
+ let(:issue_form_location) { '#content-body .issuable-details .detail-page-description' }
before do
project.team << [user, :master]
@@ -28,14 +31,17 @@ feature 'issuable templates', js: true do
longtemplate_content,
message: 'added issue template',
branch_name: 'master')
- visit edit_project_issue_path project, issue
- fill_in :'issue[title]', with: 'test issue title'
+ visit project_issue_path project, issue
+ page.within('.content .issuable-actions') do
+ click_on 'Edit'
+ end
+ fill_in :'issue-title', with: 'test issue title'
end
scenario 'user selects "bug" template' do
select_template 'bug'
wait_for_requests
- assert_template
+ assert_template(page_part: issue_form_location)
save_changes
end
@@ -43,30 +49,19 @@ feature 'issuable templates', js: true do
select_template 'bug'
wait_for_requests
select_option 'No template'
- assert_template('')
+ assert_template(expected_content: '', page_part: issue_form_location)
save_changes('')
end
scenario 'user selects "bug" template, edits description and then selects "reset template"' do
select_template 'bug'
wait_for_requests
- find_field('issue_description').send_keys(description_addition)
- assert_template(template_content + description_addition)
+ find_field('issue-description').send_keys(description_addition)
+ assert_template(expected_content: template_content + description_addition, page_part: issue_form_location)
select_option 'Reset template'
- assert_template
+ assert_template(page_part: issue_form_location)
save_changes
end
-
- it 'updates height of markdown textarea' do
- start_height = page.evaluate_script('$(".markdown-area").outerHeight()')
-
- select_template 'test'
- wait_for_requests
-
- end_height = page.evaluate_script('$(".markdown-area").outerHeight()')
-
- expect(end_height).not_to eq(start_height)
- end
end
context 'user creates an issue using templates, with a prior description' do
@@ -81,15 +76,18 @@ feature 'issuable templates', js: true do
template_content,
message: 'added issue template',
branch_name: 'master')
- visit edit_project_issue_path project, issue
- fill_in :'issue[title]', with: 'test issue title'
- fill_in :'issue[description]', with: prior_description
+ visit project_issue_path project, issue
+ page.within('.content .issuable-actions') do
+ click_on 'Edit'
+ end
+ fill_in :'issue-title', with: 'test issue title'
+ fill_in :'issue-description', with: prior_description
end
scenario 'user selects "bug" template' do
select_template 'bug'
wait_for_requests
- assert_template("#{template_content}")
+ assert_template(page_part: issue_form_location)
save_changes
end
end
@@ -120,15 +118,13 @@ feature 'issuable templates', js: true do
context 'user creates a merge request from a forked project using templates' do
let(:template_content) { 'this is a test "feature-proposal" template' }
let(:fork_user) { create(:user) }
- let(:fork_project) { create(:project, :public, :repository) }
- let(:merge_request) { create(:merge_request, :with_diffs, source_project: fork_project, target_project: project) }
+ let(:forked_project) { fork_project(project, fork_user) }
+ let(:merge_request) { create(:merge_request, :with_diffs, source_project: forked_project, target_project: project) }
background do
sign_out(:user)
project.team << [fork_user, :developer]
- fork_project.team << [fork_user, :master]
- create(:forked_project_link, forked_to_project: fork_project, forked_from_project: project)
sign_in(fork_user)
@@ -154,8 +150,10 @@ feature 'issuable templates', js: true do
end
end
- def assert_template(expected_content = template_content)
- expect(find('textarea')['value']).to eq(expected_content)
+ def assert_template(expected_content: template_content, page_part: '#content-body')
+ page.within(page_part) do
+ expect(find('textarea')['value']).to eq(expected_content)
+ end
end
def save_changes(expected_content = template_content)
diff --git a/spec/features/projects/jobs/user_browses_job_spec.rb b/spec/features/projects/jobs/user_browses_job_spec.rb
new file mode 100644
index 00000000000..21c9acc7ac0
--- /dev/null
+++ b/spec/features/projects/jobs/user_browses_job_spec.rb
@@ -0,0 +1,37 @@
+require 'spec_helper'
+
+describe 'User browses a job', :js do
+ let!(:build) { create(:ci_build, :coverage, pipeline: pipeline) }
+ let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.sha, ref: 'master') }
+ let(:project) { create(:project, :repository, namespace: user.namespace) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ project.enable_ci
+ build.success
+ build.trace.set('job trace')
+
+ sign_in(user)
+
+ visit(project_job_path(project, build))
+ end
+
+ it 'erases the job log' do
+ expect(page).to have_content("Job ##{build.id}")
+ expect(page).to have_css('#build-trace')
+
+ click_link('Erase')
+
+ expect(build).not_to have_trace
+ expect(build.artifacts_file.exists?).to be_falsy
+ expect(build.artifacts_metadata.exists?).to be_falsy
+ expect(page).to have_no_css('.artifacts')
+
+ page.within('.erased') do
+ expect(page).to have_content('Job has been erased')
+ end
+
+ expect(build.project.running_or_pending_build_count).to eq(build.project.builds.running_or_pending.count(:all))
+ end
+end
diff --git a/spec/features/projects/jobs/user_browses_jobs_spec.rb b/spec/features/projects/jobs/user_browses_jobs_spec.rb
new file mode 100644
index 00000000000..767777f3bf9
--- /dev/null
+++ b/spec/features/projects/jobs/user_browses_jobs_spec.rb
@@ -0,0 +1,32 @@
+require 'spec_helper'
+
+describe 'User browses jobs' do
+ let!(:build) { create(:ci_build, :coverage, pipeline: pipeline) }
+ let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.sha, ref: 'master') }
+ let(:project) { create(:project, :repository, namespace: user.namespace) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ project.enable_ci
+ project.update_attribute(:build_coverage_regex, /Coverage (\d+)%/)
+
+ sign_in(user)
+
+ visit(project_jobs_path(project))
+ end
+
+ it 'shows the coverage' do
+ page.within('td.coverage') do
+ expect(page).to have_content('99.9%')
+ end
+ end
+
+ it 'shows the "CI Lint" button' do
+ page.within('.nav-controls') do
+ ci_lint_tool_link = page.find_link('CI lint')
+
+ expect(ci_lint_tool_link[:href]).to end_with(ci_lint_path)
+ end
+ end
+end
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index 3b5c6966287..71702db860c 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -164,9 +164,9 @@ feature 'Jobs' do
end
it 'links to issues/new with the title and description filled in' do
- button_title = "Build Failed ##{job.id}"
- job_path = project_job_path(project, job)
- options = { issue: { title: button_title, description: job_path } }
+ button_title = "Job Failed ##{job.id}"
+ job_url = project_job_path(project, job)
+ options = { issue: { title: button_title, description: "Job [##{job.id}](#{job_url}) failed for #{job.sha}:\n" } }
href = new_project_issue_path(project, options)
@@ -299,7 +299,7 @@ feature 'Jobs' do
end
shared_examples 'expected variables behavior' do
- it 'shows variable key and value after click', js: true do
+ it 'shows variable key and value after click', :js do
expect(page).to have_css('.reveal-variables')
expect(page).not_to have_css('.js-build-variable')
expect(page).not_to have_css('.js-build-value')
diff --git a/spec/features/projects/labels/subscription_spec.rb b/spec/features/projects/labels/subscription_spec.rb
index 5716d151250..e8c70dec854 100644
--- a/spec/features/projects/labels/subscription_spec.rb
+++ b/spec/features/projects/labels/subscription_spec.rb
@@ -13,7 +13,7 @@ feature 'Labels subscription' do
sign_in user
end
- scenario 'users can subscribe/unsubscribe to labels', js: true do
+ scenario 'users can subscribe/unsubscribe to labels', :js do
visit project_labels_path(project)
expect(page).to have_content('bug')
diff --git a/spec/features/projects/labels/update_prioritization_spec.rb b/spec/features/projects/labels/update_prioritization_spec.rb
index 8f85e972027..d063f5c27b5 100644
--- a/spec/features/projects/labels/update_prioritization_spec.rb
+++ b/spec/features/projects/labels/update_prioritization_spec.rb
@@ -17,7 +17,7 @@ feature 'Prioritize labels' do
sign_in user
end
- scenario 'user can prioritize a group label', js: true do
+ scenario 'user can prioritize a group label', :js do
visit project_labels_path(project)
expect(page).to have_content('Star labels to start sorting by priority')
@@ -34,7 +34,7 @@ feature 'Prioritize labels' do
end
end
- scenario 'user can unprioritize a group label', js: true do
+ scenario 'user can unprioritize a group label', :js do
create(:label_priority, project: project, label: feature, priority: 1)
visit project_labels_path(project)
@@ -52,7 +52,7 @@ feature 'Prioritize labels' do
end
end
- scenario 'user can prioritize a project label', js: true do
+ scenario 'user can prioritize a project label', :js do
visit project_labels_path(project)
expect(page).to have_content('Star labels to start sorting by priority')
@@ -69,7 +69,7 @@ feature 'Prioritize labels' do
end
end
- scenario 'user can unprioritize a project label', js: true do
+ scenario 'user can unprioritize a project label', :js do
create(:label_priority, project: project, label: bug, priority: 1)
visit project_labels_path(project)
@@ -88,7 +88,7 @@ feature 'Prioritize labels' do
end
end
- scenario 'user can sort prioritized labels and persist across reloads', js: true do
+ scenario 'user can sort prioritized labels and persist across reloads', :js do
create(:label_priority, project: project, label: bug, priority: 1)
create(:label_priority, project: project, label: feature, priority: 2)
diff --git a/spec/features/projects/members/group_requester_cannot_request_access_to_project_spec.rb b/spec/features/projects/members/group_requester_cannot_request_access_to_project_spec.rb
index c8988aa63a7..6d729f2f85f 100644
--- a/spec/features/projects/members/group_requester_cannot_request_access_to_project_spec.rb
+++ b/spec/features/projects/members/group_requester_cannot_request_access_to_project_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Projects > Members > Group requester cannot request access to project', js: true do
+feature 'Projects > Members > Group requester cannot request access to project', :js do
let(:user) { create(:user) }
let(:owner) { create(:user) }
let(:group) { create(:group, :public, :access_requestable) }
diff --git a/spec/features/projects/members/groups_with_access_list_spec.rb b/spec/features/projects/members/groups_with_access_list_spec.rb
index 9950272af08..b1053982eee 100644
--- a/spec/features/projects/members/groups_with_access_list_spec.rb
+++ b/spec/features/projects/members/groups_with_access_list_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Projects > Members > Groups with access list', js: true do
+feature 'Projects > Members > Groups with access list', :js do
let(:user) { create(:user) }
let(:group) { create(:group, :public) }
let(:project) { create(:project, :public) }
diff --git a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
index cd621b6b3ce..5f7b4ee0e77 100644
--- a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
+++ b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Projects > Members > Master adds member with expiration date', js: true do
+feature 'Projects > Members > Master adds member with expiration date', :js do
include Select2Helper
include ActiveSupport::Testing::TimeHelpers
diff --git a/spec/features/projects/merge_requests/user_accepts_merge_request_spec.rb b/spec/features/projects/merge_requests/user_accepts_merge_request_spec.rb
new file mode 100644
index 00000000000..c35ba2d7016
--- /dev/null
+++ b/spec/features/projects/merge_requests/user_accepts_merge_request_spec.rb
@@ -0,0 +1,84 @@
+require 'spec_helper'
+
+describe 'User accepts a merge request', :js do
+ let(:merge_request) { create(:merge_request, :with_diffs, :simple, source_project: project) }
+ let(:project) { create(:project, :public, :repository) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ end
+
+ context 'with removing the source branch' do
+ before do
+ visit(merge_request_path(merge_request))
+ end
+
+ it 'accepts a merge request' do
+ check('Remove source branch')
+ click_button('Merge')
+
+ expect(page).to have_content('The changes were merged into')
+ expect(page).not_to have_selector('.js-remove-branch-button')
+
+ # Wait for View Resource requests to complete so they don't blow up if they are
+ # only handled after `DatabaseCleaner` has already run.
+ wait_for_requests
+ end
+ end
+
+ context 'without removing the source branch' do
+ before do
+ visit(merge_request_path(merge_request))
+ end
+
+ it 'accepts a merge request' do
+ click_button('Merge')
+
+ expect(page).to have_content('The changes were merged into')
+ expect(page).to have_selector('.js-remove-branch-button')
+
+ # Wait for View Resource requests to complete so they don't blow up if they are
+ # only handled after `DatabaseCleaner` has already run
+ wait_for_requests
+ end
+ end
+
+ context 'when a URL has an anchor' do
+ before do
+ visit(merge_request_path(merge_request, anchor: 'note_123'))
+ end
+
+ it 'accepts a merge request' do
+ check('Remove source branch')
+ click_button('Merge')
+
+ expect(page).to have_content('The changes were merged into')
+ expect(page).not_to have_selector('.js-remove-branch-button')
+
+ # Wait for View Resource requests to complete so they don't blow up if they are
+ # only handled after `DatabaseCleaner` has already run
+ wait_for_requests
+ end
+ end
+
+ context 'when modifying the merge commit message' do
+ before do
+ merge_request.mark_as_mergeable
+
+ visit(merge_request_path(merge_request))
+ end
+
+ it 'accepts a merge request' do
+ click_button('Modify commit message')
+ fill_in('Commit message', with: 'wow such merge')
+
+ click_button('Merge')
+
+ page.within('.status-box') do
+ expect(page).to have_content('Merged')
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/merge_requests/user_closes_merge_request_spec.rb b/spec/features/projects/merge_requests/user_closes_merge_request_spec.rb
new file mode 100644
index 00000000000..b257f447439
--- /dev/null
+++ b/spec/features/projects/merge_requests/user_closes_merge_request_spec.rb
@@ -0,0 +1,21 @@
+require 'spec_helper'
+
+describe 'User closes a merge requests', :js do
+ let(:project) { create(:project, :repository) }
+ let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(merge_request_path(merge_request))
+ end
+
+ it 'closes a merge request' do
+ click_link('Close merge request', match: :first)
+
+ expect(page).to have_content(merge_request.title)
+ expect(page).to have_content('Closed by')
+ end
+end
diff --git a/spec/features/projects/merge_requests/user_comments_on_commit_spec.rb b/spec/features/projects/merge_requests/user_comments_on_commit_spec.rb
new file mode 100644
index 00000000000..0a952cfc2a9
--- /dev/null
+++ b/spec/features/projects/merge_requests/user_comments_on_commit_spec.rb
@@ -0,0 +1,19 @@
+require 'spec_helper'
+
+describe 'User comments on a commit', :js do
+ include MergeRequestDiffHelpers
+ include RepoHelpers
+
+ let(:project) { create(:project, :repository) }
+ let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_commit_path(project, sample_commit.id))
+ end
+
+ include_examples 'comment on merge request file'
+end
diff --git a/spec/features/projects/merge_requests/user_comments_on_diff_spec.rb b/spec/features/projects/merge_requests/user_comments_on_diff_spec.rb
new file mode 100644
index 00000000000..f34302f25f8
--- /dev/null
+++ b/spec/features/projects/merge_requests/user_comments_on_diff_spec.rb
@@ -0,0 +1,172 @@
+require 'spec_helper'
+
+describe 'User comments on a diff', :js do
+ include MergeRequestDiffHelpers
+ include RepoHelpers
+
+ let(:project) { create(:project, :repository) }
+ let(:merge_request) do
+ create(:merge_request_with_diffs, source_project: project, target_project: project, source_branch: 'merge-test')
+ end
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(diffs_project_merge_request_path(project, merge_request))
+ end
+
+ context 'when viewing comments' do
+ context 'when toggling inline comments' do
+ context 'in a single file' do
+ it 'hides a comment' do
+ click_diff_line(find("[id='#{sample_compare.changes[1][:line_code]}']"))
+
+ page.within('.js-discussion-note-form') do
+ fill_in('note_note', with: 'Line is wrong')
+ click_button('Comment')
+ end
+
+ page.within('.files > div:nth-child(3)') do
+ expect(page).to have_content('Line is wrong')
+
+ find('.js-toggle-diff-comments').trigger('click')
+
+ expect(page).not_to have_content('Line is wrong')
+ end
+ end
+ end
+
+ context 'in multiple files' do
+ it 'toggles comments' do
+ click_diff_line(find("[id='#{sample_compare.changes[0][:line_code]}']"))
+
+ page.within('.js-discussion-note-form') do
+ fill_in('note_note', with: 'Line is correct')
+ click_button('Comment')
+ end
+
+ wait_for_requests
+
+ page.within('.files > div:nth-child(2) .note-body > .note-text') do
+ expect(page).to have_content('Line is correct')
+ end
+
+ click_diff_line(find("[id='#{sample_compare.changes[1][:line_code]}']"))
+
+ page.within('.js-discussion-note-form') do
+ fill_in('note_note', with: 'Line is wrong')
+ click_button('Comment')
+ end
+
+ wait_for_requests
+
+ # Hide the comment.
+ page.within('.files > div:nth-child(3)') do
+ find('.js-toggle-diff-comments').trigger('click')
+
+ expect(page).not_to have_content('Line is wrong')
+ end
+
+ # At this moment a user should see only one comment.
+ # The other one should be hidden.
+ page.within('.files > div:nth-child(2) .note-body > .note-text') do
+ expect(page).to have_content('Line is correct')
+ end
+
+ # Show the comment.
+ page.within('.files > div:nth-child(3)') do
+ find('.js-toggle-diff-comments').trigger('click')
+ end
+
+ # Now both the comments should be shown.
+ page.within('.files > div:nth-child(3) .note-body > .note-text') do
+ expect(page).to have_content('Line is wrong')
+ end
+
+ page.within('.files > div:nth-child(2) .note-body > .note-text') do
+ expect(page).to have_content('Line is correct')
+ end
+
+ # Check the same comments in the side-by-side view.
+ click_link('Side-by-side')
+
+ wait_for_requests
+
+ page.within('.files > div:nth-child(3) .parallel .note-body > .note-text') do
+ expect(page).to have_content('Line is wrong')
+ end
+
+ page.within('.files > div:nth-child(2) .parallel .note-body > .note-text') do
+ expect(page).to have_content('Line is correct')
+ end
+ end
+ end
+ end
+ end
+
+ context 'when adding comments' do
+ include_examples 'comment on merge request file'
+ end
+
+ context 'when editing comments' do
+ it 'edits a comment' do
+ click_diff_line(find("[id='#{sample_commit.line_code}']"))
+
+ page.within('.js-discussion-note-form') do
+ fill_in(:note_note, with: 'Line is wrong')
+ click_button('Comment')
+ end
+
+ page.within('.diff-file:nth-of-type(5) .note') do
+ find('.js-note-edit').click
+
+ page.within('.current-note-edit-form') do
+ fill_in('note_note', with: 'Typo, please fix')
+ click_button('Save comment')
+ end
+
+ expect(page).not_to have_button('Save comment', disabled: true)
+ end
+
+ page.within('.diff-file:nth-of-type(5) .note') do
+ expect(page).to have_content('Typo, please fix').and have_no_content('Line is wrong')
+ end
+ end
+ end
+
+ context 'when deleting comments' do
+ it 'deletes a comment' do
+ click_diff_line(find("[id='#{sample_commit.line_code}']"))
+
+ page.within('.js-discussion-note-form') do
+ fill_in(:note_note, with: 'Line is wrong')
+ click_button('Comment')
+ end
+
+ page.within('.notes-tab .badge') do
+ expect(page).to have_content('1')
+ end
+
+ page.within('.diff-file:nth-of-type(5) .note') do
+ find('.more-actions').click
+ find('.more-actions .dropdown-menu li', match: :first)
+
+ find('.js-note-delete').click
+ end
+
+ page.within('.merge-request-tabs') do
+ find('.notes-tab').trigger('click')
+ end
+
+ wait_for_requests
+
+ expect(page).not_to have_css('.notes .discussion')
+
+ page.within('.notes-tab .badge') do
+ expect(page).to have_content('0')
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/merge_requests/user_comments_on_merge_request_spec.rb b/spec/features/projects/merge_requests/user_comments_on_merge_request_spec.rb
new file mode 100644
index 00000000000..2eb652147ce
--- /dev/null
+++ b/spec/features/projects/merge_requests/user_comments_on_merge_request_spec.rb
@@ -0,0 +1,50 @@
+require 'spec_helper'
+
+describe 'User comments on a merge request', :js do
+ include RepoHelpers
+
+ let(:project) { create(:project, :repository) }
+ let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(merge_request_path(merge_request))
+ end
+
+ it 'adds a comment' do
+ page.within('.js-main-target-form') do
+ fill_in(:note_note, with: '# Comment with a header')
+ click_button('Comment')
+ end
+
+ wait_for_requests
+
+ page.within('.note') do
+ expect(page).to have_content('Comment with a header')
+ expect(page).not_to have_css('#comment-with-a-header')
+ end
+ end
+
+ it 'loads new comment' do
+ # Add new comment in background in order to check
+ # if it's going to be loaded automatically for current user.
+ create(:diff_note_on_merge_request, project: project, noteable: merge_request, author: user, note: 'Line is wrong')
+
+ # Trigger a refresh of notes.
+ execute_script("$(document).trigger('visibilitychange');")
+ wait_for_requests
+
+ page.within('.notes .discussion') do
+ expect(page).to have_content("#{user.name} #{user.to_reference} started a discussion")
+ expect(page).to have_content(sample_commit.line_code_path)
+ expect(page).to have_content('Line is wrong')
+ end
+
+ page.within('.notes-tab .badge') do
+ expect(page).to have_content('1')
+ end
+ end
+end
diff --git a/spec/features/projects/merge_requests/user_creates_merge_request_spec.rb b/spec/features/projects/merge_requests/user_creates_merge_request_spec.rb
new file mode 100644
index 00000000000..f285c6c8783
--- /dev/null
+++ b/spec/features/projects/merge_requests/user_creates_merge_request_spec.rb
@@ -0,0 +1,32 @@
+require 'spec_helper'
+
+describe 'User creates a merge request', :js do
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_new_merge_request_path(project))
+ end
+
+ it 'creates a merge request' do
+ find('.js-source-branch').click
+ click_link('fix')
+
+ find('.js-target-branch').click
+ click_link('feature')
+
+ click_button('Compare branches')
+
+ fill_in('merge_request_title', with: 'Wiki Feature')
+ click_button('Submit merge request')
+
+ page.within('.merge-request') do
+ expect(page).to have_content('Wiki Feature')
+ end
+
+ wait_for_requests
+ end
+end
diff --git a/spec/features/projects/merge_requests/user_edits_merge_request_spec.rb b/spec/features/projects/merge_requests/user_edits_merge_request_spec.rb
new file mode 100644
index 00000000000..f6e3997383f
--- /dev/null
+++ b/spec/features/projects/merge_requests/user_edits_merge_request_spec.rb
@@ -0,0 +1,25 @@
+require 'spec_helper'
+
+describe 'User edits a merge request', :js do
+ let(:project) { create(:project, :repository) }
+ let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(edit_project_merge_request_path(project, merge_request))
+ end
+
+ it 'changes the target branch' do
+ expect(page).to have_content('Target branch')
+
+ first('.target_branch').click
+ select('merge-test', from: 'merge_request_target_branch', visible: false)
+ click_button('Save changes')
+
+ expect(page).to have_content("Request to merge #{merge_request.source_branch} into merge-test")
+ expect(page).to have_content("changed target branch from #{merge_request.target_branch} to merge-test")
+ end
+end
diff --git a/spec/features/projects/merge_requests/user_manages_subscription_spec.rb b/spec/features/projects/merge_requests/user_manages_subscription_spec.rb
new file mode 100644
index 00000000000..30a80f8e652
--- /dev/null
+++ b/spec/features/projects/merge_requests/user_manages_subscription_spec.rb
@@ -0,0 +1,32 @@
+require 'spec_helper'
+
+describe 'User manages subscription', :js do
+ let(:project) { create(:project, :public, :repository) }
+ let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(merge_request_path(merge_request))
+ end
+
+ it 'toggles subscription' do
+ subscribe_button = find('.issuable-subscribe-button span')
+
+ expect(subscribe_button).to have_content('Subscribe')
+
+ click_on('Subscribe')
+
+ wait_for_requests
+
+ expect(subscribe_button).to have_content('Unsubscribe')
+
+ click_on('Unsubscribe')
+
+ wait_for_requests
+
+ expect(subscribe_button).to have_content('Subscribe')
+ end
+end
diff --git a/spec/features/projects/merge_requests/user_reopens_merge_request_spec.rb b/spec/features/projects/merge_requests/user_reopens_merge_request_spec.rb
new file mode 100644
index 00000000000..ba3c9789da1
--- /dev/null
+++ b/spec/features/projects/merge_requests/user_reopens_merge_request_spec.rb
@@ -0,0 +1,22 @@
+require 'spec_helper'
+
+describe 'User reopens a merge requests', :js do
+ let(:project) { create(:project, :public, :repository) }
+ let!(:merge_request) { create(:closed_merge_request, source_project: project, target_project: project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(merge_request_path(merge_request))
+ end
+
+ it 'reopens a merge request' do
+ click_link('Reopen merge request', match: :first)
+
+ page.within('.status-box') do
+ expect(page).to have_content('Open')
+ end
+ end
+end
diff --git a/spec/features/projects/merge_requests/user_reverts_merge_request_spec.rb b/spec/features/projects/merge_requests/user_reverts_merge_request_spec.rb
new file mode 100644
index 00000000000..a41d683dbbb
--- /dev/null
+++ b/spec/features/projects/merge_requests/user_reverts_merge_request_spec.rb
@@ -0,0 +1,59 @@
+require 'spec_helper'
+
+describe 'User reverts a merge request', :js do
+ let(:merge_request) { create(:merge_request, :with_diffs, :simple, source_project: project) }
+ let(:project) { create(:project, :public, :repository) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_developer(user)
+ sign_in(user)
+
+ visit(merge_request_path(merge_request))
+
+ click_button('Merge')
+
+ visit(merge_request_path(merge_request))
+ end
+
+ it 'reverts a merge request' do
+ find("a[href='#modal-revert-commit']").click
+
+ page.within('#modal-revert-commit') do
+ uncheck('create_merge_request')
+ click_button('Revert')
+ end
+
+ expect(page).to have_content('The merge request has been successfully reverted.')
+
+ wait_for_requests
+ end
+
+ it 'does not revert a merge request that was previously reverted' do
+ find("a[href='#modal-revert-commit']").click
+
+ page.within('#modal-revert-commit') do
+ uncheck('create_merge_request')
+ click_button('Revert')
+ end
+
+ find("a[href='#modal-revert-commit']").click
+
+ page.within('#modal-revert-commit') do
+ uncheck('create_merge_request')
+ click_button('Revert')
+ end
+
+ expect(page).to have_content('Sorry, we cannot revert this merge request automatically.')
+ end
+
+ it 'reverts a merge request in a new merge request' do
+ find("a[href='#modal-revert-commit']").click
+
+ page.within('#modal-revert-commit') do
+ click_button('Revert')
+ end
+
+ expect(page).to have_content('The merge request has been successfully reverted. You can now submit a merge request to get this change into the original branch.')
+ end
+end
diff --git a/spec/features/projects/merge_requests/user_sorts_merge_requests_spec.rb b/spec/features/projects/merge_requests/user_sorts_merge_requests_spec.rb
new file mode 100644
index 00000000000..d8d9f7e2a8c
--- /dev/null
+++ b/spec/features/projects/merge_requests/user_sorts_merge_requests_spec.rb
@@ -0,0 +1,63 @@
+require 'spec_helper'
+
+describe 'User sorts merge requests' do
+ let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let!(:merge_request2) do
+ create(:merge_request_with_diffs, source_project: project, target_project: project, source_branch: 'merge-test')
+ end
+ let(:project) { create(:project, :public, :repository) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_merge_requests_path(project))
+ end
+
+ it 'keeps the sort option' do
+ find('button.dropdown-toggle').click
+
+ page.within('.content ul.dropdown-menu.dropdown-menu-align-right li') do
+ click_link('Last updated')
+ end
+
+ visit(merge_requests_dashboard_path(assignee_id: user.id))
+
+ expect(find('.issues-filters')).to have_content('Last updated')
+
+ visit(project_merge_requests_path(project))
+
+ expect(find('.issues-filters')).to have_content('Last updated')
+ end
+
+ context 'when merge requests have awards' do
+ before do
+ create_list(:award_emoji, 2, awardable: merge_request)
+ create(:award_emoji, :downvote, awardable: merge_request)
+
+ create(:award_emoji, awardable: merge_request2)
+ create_list(:award_emoji, 2, :downvote, awardable: merge_request2)
+ end
+
+ it 'sorts by popularity' do
+ find('button.dropdown-toggle').click
+
+ page.within('.content ul.dropdown-menu.dropdown-menu-align-right li') do
+ click_link('Popularity')
+ end
+
+ page.within('.mr-list') do
+ page.within('li.merge-request:nth-child(1)') do
+ expect(page).to have_content(merge_request.title)
+ expect(page).to have_content('2 1')
+ end
+
+ page.within('li.merge-request:nth-child(2)') do
+ expect(page).to have_content(merge_request2.title)
+ expect(page).to have_content('1 2')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/merge_requests/user_views_all_merge_requests_spec.rb b/spec/features/projects/merge_requests/user_views_all_merge_requests_spec.rb
new file mode 100644
index 00000000000..6c695bd7aa9
--- /dev/null
+++ b/spec/features/projects/merge_requests/user_views_all_merge_requests_spec.rb
@@ -0,0 +1,15 @@
+require 'spec_helper'
+
+describe 'User views all merge requests' do
+ let!(:closed_merge_request) { create(:closed_merge_request, source_project: project, target_project: project) }
+ let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let(:project) { create(:project, :public) }
+
+ before do
+ visit(project_merge_requests_path(project, state: :all))
+ end
+
+ it 'shows all merge requests' do
+ expect(page).to have_content(merge_request.title).and have_content(closed_merge_request.title)
+ end
+end
diff --git a/spec/features/projects/merge_requests/user_views_closed_merge_requests_spec.rb b/spec/features/projects/merge_requests/user_views_closed_merge_requests_spec.rb
new file mode 100644
index 00000000000..853809fe87a
--- /dev/null
+++ b/spec/features/projects/merge_requests/user_views_closed_merge_requests_spec.rb
@@ -0,0 +1,15 @@
+require 'spec_helper'
+
+describe 'User views closed merge requests' do
+ let!(:closed_merge_request) { create(:closed_merge_request, source_project: project, target_project: project) }
+ let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let(:project) { create(:project, :public) }
+
+ before do
+ visit(project_merge_requests_path(project, state: :closed))
+ end
+
+ it 'shows closed merge requests' do
+ expect(page).to have_content(closed_merge_request.title).and have_no_content(merge_request.title)
+ end
+end
diff --git a/spec/features/projects/merge_requests/user_views_diffs_spec.rb b/spec/features/projects/merge_requests/user_views_diffs_spec.rb
new file mode 100644
index 00000000000..295eb02b625
--- /dev/null
+++ b/spec/features/projects/merge_requests/user_views_diffs_spec.rb
@@ -0,0 +1,46 @@
+require 'spec_helper'
+
+describe 'User views diffs', :js do
+ let(:merge_request) do
+ create(:merge_request_with_diffs, source_project: project, target_project: project, source_branch: 'merge-test')
+ end
+ let(:project) { create(:project, :public, :repository) }
+
+ before do
+ visit(diffs_project_merge_request_path(project, merge_request))
+
+ wait_for_requests
+ end
+
+ shared_examples 'unfold diffs' do
+ it 'unfolds diffs' do
+ first('.js-unfold').click
+
+ expect(first('.text-file')).to have_content('.bundle')
+ end
+ end
+
+ it 'shows diffs' do
+ expect(page).to have_css('.tab-content #diffs.active')
+ expect(page).to have_css('#parallel-diff-btn', count: 1)
+ expect(page).to have_css('#inline-diff-btn', count: 1)
+ end
+
+ context 'when in the inline view' do
+ include_examples 'unfold diffs'
+ end
+
+ context 'when in the side-by-side view' do
+ before do
+ click_link('Side-by-side')
+
+ wait_for_requests
+ end
+
+ it 'shows diffs in parallel' do
+ expect(page).to have_css('.parallel')
+ end
+
+ include_examples 'unfold diffs'
+ end
+end
diff --git a/spec/features/projects/merge_requests/user_views_merged_merge_requests_spec.rb b/spec/features/projects/merge_requests/user_views_merged_merge_requests_spec.rb
new file mode 100644
index 00000000000..eb012694f1e
--- /dev/null
+++ b/spec/features/projects/merge_requests/user_views_merged_merge_requests_spec.rb
@@ -0,0 +1,15 @@
+require 'spec_helper'
+
+describe 'User views merged merge requests' do
+ let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let!(:merged_merge_request) { create(:merged_merge_request, source_project: project, target_project: project) }
+ let(:project) { create(:project, :public) }
+
+ before do
+ visit(project_merge_requests_path(project, state: :merged))
+ end
+
+ it 'shows merged merge requests' do
+ expect(page).to have_content(merged_merge_request.title).and have_no_content(merge_request.title)
+ end
+end
diff --git a/spec/features/projects/merge_requests/user_views_open_merge_request_spec.rb b/spec/features/projects/merge_requests/user_views_open_merge_request_spec.rb
new file mode 100644
index 00000000000..8970cf54457
--- /dev/null
+++ b/spec/features/projects/merge_requests/user_views_open_merge_request_spec.rb
@@ -0,0 +1,92 @@
+require 'spec_helper'
+
+describe 'User views an open merge request' do
+ let(:merge_request) do
+ create(:merge_request, source_project: project, target_project: project, description: '# Description header')
+ end
+
+ context 'when a merge request does not have repository' do
+ let(:project) { create(:project, :public) }
+
+ before do
+ visit(merge_request_path(merge_request))
+ end
+
+ it 'renders both the title and the description' do
+ node = find('.wiki h1 a#user-content-description-header')
+ expect(node[:href]).to end_with('#description-header')
+
+ # Work around a weird Capybara behavior where calling `parent` on a node
+ # returns the whole document, not the node's actual parent element
+ expect(find(:xpath, "#{node.path}/..").text).to eq(merge_request.description[2..-1])
+
+ expect(page).to have_content(merge_request.title).and have_content(merge_request.description)
+ end
+ end
+
+ context 'when a merge request has repository', :js do
+ let(:project) { create(:project, :public, :repository) }
+
+ context 'when rendering description preview' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(edit_project_merge_request_path(project, merge_request))
+ end
+
+ it 'renders empty description preview' do
+ find('.gfm-form').fill_in(:merge_request_description, with: '')
+
+ page.within('.gfm-form') do
+ click_link('Preview')
+
+ expect(find('.js-md-preview')).to have_content('Nothing to preview.')
+ end
+ end
+
+ it 'renders description preview' do
+ find('.gfm-form').fill_in(:merge_request_description, with: ':+1: Nice')
+
+ page.within('.gfm-form') do
+ click_link('Preview')
+
+ expect(find('.js-md-preview')).to have_css('gl-emoji')
+ end
+
+ expect(find('.gfm-form')).to have_css('.js-md-preview').and have_link('Write')
+ expect(find('#merge_request_description', visible: false)).not_to be_visible
+ end
+ end
+
+ context 'when the branch is rebased on the target' do
+ let(:merge_request) { create(:merge_request, :rebased, source_project: project, target_project: project) }
+
+ before do
+ visit(merge_request_path(merge_request))
+ end
+
+ it 'does not show diverged commits count' do
+ page.within('.mr-source-target') do
+ expect(page).not_to have_content(/([0-9]+ commit[s]? behind)/)
+ end
+ end
+ end
+
+ context 'when the branch is diverged on the target' do
+ let(:merge_request) { create(:merge_request, :diverged, source_project: project, target_project: project) }
+
+ before do
+ visit(merge_request_path(merge_request))
+ end
+
+ it 'shows diverged commits count' do
+ page.within('.mr-source-target') do
+ expect(page).to have_content(/([0-9]+ commits behind)/)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/merge_requests/user_views_open_merge_requests_spec.rb b/spec/features/projects/merge_requests/user_views_open_merge_requests_spec.rb
new file mode 100644
index 00000000000..07b8c1ef479
--- /dev/null
+++ b/spec/features/projects/merge_requests/user_views_open_merge_requests_spec.rb
@@ -0,0 +1,72 @@
+require 'spec_helper'
+
+describe 'User views open merge requests' do
+ let(:project) { create(:project, :public, :repository) }
+
+ context "when the target branch is the project's default branch" do
+ let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let!(:closed_merge_request) { create(:closed_merge_request, source_project: project, target_project: project) }
+
+ before do
+ visit(project_merge_requests_path(project))
+ end
+
+ it 'shows open merge requests' do
+ expect(page).to have_content(merge_request.title).and have_no_content(closed_merge_request.title)
+ end
+
+ it 'does not show target branch name' do
+ expect(page).to have_content(merge_request.title)
+ expect(find('.issuable-info')).not_to have_content(project.default_branch)
+ end
+ end
+
+ context "when the target branch is different from the project's default branch" do
+ let!(:merge_request) do
+ create(:merge_request,
+ source_project: project,
+ target_project: project,
+ source_branch: 'fix',
+ target_branch: 'feature_conflict')
+ end
+
+ before do
+ visit(project_merge_requests_path(project))
+ end
+
+ it 'shows target branch name' do
+ expect(page).to have_content(merge_request.target_branch)
+ end
+ end
+
+ context 'when a merge request has pipelines' do
+ let!(:build) { create :ci_build, pipeline: pipeline }
+
+ let(:merge_request) do
+ create(:merge_request_with_diffs,
+ source_project: project,
+ target_project: project,
+ source_branch: 'merge-test')
+ end
+
+ let(:pipeline) do
+ create(:ci_pipeline,
+ project: project,
+ sha: merge_request.diff_head_sha,
+ ref: merge_request.source_branch,
+ head_pipeline_of: merge_request)
+ end
+
+ before do
+ project.enable_ci
+
+ visit(project_merge_requests_path(project))
+ end
+
+ it 'shows pipeline status' do
+ page.within('.mr-list') do
+ expect(page).to have_link('Pipeline: pending')
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/milestones/user_interacts_with_labels_spec.rb b/spec/features/projects/milestones/user_interacts_with_labels_spec.rb
new file mode 100644
index 00000000000..f6a82f80d65
--- /dev/null
+++ b/spec/features/projects/milestones/user_interacts_with_labels_spec.rb
@@ -0,0 +1,40 @@
+require 'spec_helper'
+
+describe 'User interacts with labels' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, namespace: user.namespace) }
+ let(:milestone) { create(:milestone, project: project, title: 'v2.2', description: '# Description header') }
+ let(:issue1) { create(:issue, project: project, title: 'Bugfix1', milestone: milestone) }
+ let(:issue2) { create(:issue, project: project, title: 'Bugfix2', milestone: milestone) }
+ let(:label_bug) { create(:label, project: project, title: 'bug') }
+ let(:label_feature) { create(:label, project: project, title: 'feature') }
+ let(:label_enhancement) { create(:label, project: project, title: 'enhancement') }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ issue1.labels << [label_bug, label_feature]
+ issue2.labels << [label_bug, label_enhancement]
+
+ visit(project_milestones_path(project))
+ end
+
+ it 'shows the list of labels', :js do
+ click_link('v2.2')
+
+ page.within('.nav-sidebar') do
+ page.find(:xpath, "//a[@href='#tab-labels']").click
+ end
+
+ expect(page).to have_selector('ul.manage-labels-list')
+
+ wait_for_requests
+
+ page.within('#tab-labels') do
+ expect(page).to have_content(label_bug.title)
+ expect(page).to have_content(label_enhancement.title)
+ expect(page).to have_content(label_feature.title)
+ end
+ end
+end
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index f7b40cb1820..c35b0840248 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -162,6 +162,16 @@ describe 'Pipelines', :js do
expect(page).to have_selector(
%Q{span[data-original-title="#{pipeline.yaml_errors}"]})
end
+
+ it 'contains badge that indicates failure reason' do
+ expect(page).to have_content 'error'
+ end
+
+ it 'contains badge with tooltip which contains failure reason' do
+ expect(pipeline.failure_reason?).to eq true
+ expect(page).to have_selector(
+ %Q{span[data-original-title="#{pipeline.present.failure_reason}"]})
+ end
end
context 'with manual actions' do
@@ -443,7 +453,7 @@ describe 'Pipelines', :js do
visit new_project_pipeline_path(project)
end
- context 'for valid commit', js: true do
+ context 'for valid commit', :js do
before do
click_button project.default_branch
@@ -491,7 +501,7 @@ describe 'Pipelines', :js do
end
describe 'find pipelines' do
- it 'shows filtered pipelines', js: true do
+ it 'shows filtered pipelines', :js do
click_button project.default_branch
page.within '.dropdown-menu' do
diff --git a/spec/features/projects/project_settings_spec.rb b/spec/features/projects/project_settings_spec.rb
index 5d77cd1ccd5..15a5cd9990b 100644
--- a/spec/features/projects/project_settings_spec.rb
+++ b/spec/features/projects/project_settings_spec.rb
@@ -10,7 +10,7 @@ describe 'Edit Project Settings' do
sign_in(user)
end
- describe 'Project settings section', js: true do
+ describe 'Project settings section', :js do
it 'shows errors for invalid project name' do
visit edit_project_path(project)
fill_in 'project_name_edit', with: 'foo&bar'
@@ -32,6 +32,32 @@ describe 'Edit Project Settings' do
end
end
+ describe 'Merge request settings section' do
+ it 'shows "Merge commit" strategy' do
+ visit edit_project_path(project)
+
+ page.within '.merge-requests-feature' do
+ expect(page).to have_content 'Merge commit'
+ end
+ end
+
+ it 'shows "Merge commit with semi-linear history " strategy' do
+ visit edit_project_path(project)
+
+ page.within '.merge-requests-feature' do
+ expect(page).to have_content 'Merge commit with semi-linear history'
+ end
+ end
+
+ it 'shows "Fast-forward merge" strategy' do
+ visit edit_project_path(project)
+
+ page.within '.merge-requests-feature' do
+ expect(page).to have_content 'Fast-forward merge'
+ end
+ end
+ end
+
describe 'Rename repository section' do
context 'with invalid characters' do
it 'shows errors for invalid project path/name' do
@@ -99,7 +125,7 @@ describe 'Edit Project Settings' do
end
end
- describe 'Transfer project section', js: true do
+ describe 'Transfer project section', :js do
let!(:project) { create(:project, :repository, namespace: user.namespace, name: 'gitlabhq') }
let!(:group) { create(:group) }
diff --git a/spec/features/projects/ref_switcher_spec.rb b/spec/features/projects/ref_switcher_spec.rb
index f0a23729220..f8695403857 100644
--- a/spec/features/projects/ref_switcher_spec.rb
+++ b/spec/features/projects/ref_switcher_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-feature 'Ref switcher', js: true do
+feature 'Ref switcher', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/projects/services/slack_service_spec.rb b/spec/features/projects/services/slack_service_spec.rb
deleted file mode 100644
index c10ec5e2987..00000000000
--- a/spec/features/projects/services/slack_service_spec.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-require 'spec_helper'
-
-feature 'Projects > Slack service > Setup events' do
- let(:user) { create(:user) }
- let(:service) { SlackService.new }
- let(:project) { create(:project, slack_service: service) }
-
- background do
- service.fields
- service.update_attributes(push_channel: 1, issue_channel: 2, merge_request_channel: 3, note_channel: 4, tag_push_channel: 5, pipeline_channel: 6, wiki_page_channel: 7)
- project.team << [user, :master]
- sign_in(user)
- end
-
- scenario 'user can filter events by channel' do
- visit edit_project_service_path(project, service)
-
- expect(page.find_field("service_push_channel").value).to have_content '1'
- expect(page.find_field("service_issue_channel").value).to have_content '2'
- expect(page.find_field("service_merge_request_channel").value).to have_content '3'
- expect(page.find_field("service_note_channel").value).to have_content '4'
- expect(page.find_field("service_tag_push_channel").value).to have_content '5'
- expect(page.find_field("service_pipeline_channel").value).to have_content '6'
- expect(page.find_field("service_wiki_page_channel").value).to have_content '7'
- end
-end
diff --git a/spec/features/projects/services/user_activates_asana_spec.rb b/spec/features/projects/services/user_activates_asana_spec.rb
new file mode 100644
index 00000000000..db836d2985c
--- /dev/null
+++ b/spec/features/projects/services/user_activates_asana_spec.rb
@@ -0,0 +1,24 @@
+require 'spec_helper'
+
+describe 'User activates Asana' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('Asana')
+ end
+
+ it 'activates service' do
+ check('Active')
+ fill_in('Api key', with: 'verySecret')
+ fill_in('Restrict to branch', with: 'verySecret')
+ click_button('Save')
+
+ expect(page).to have_content('Asana activated.')
+ end
+end
diff --git a/spec/features/projects/services/user_activates_assembla_spec.rb b/spec/features/projects/services/user_activates_assembla_spec.rb
new file mode 100644
index 00000000000..f099b332785
--- /dev/null
+++ b/spec/features/projects/services/user_activates_assembla_spec.rb
@@ -0,0 +1,23 @@
+require 'spec_helper'
+
+describe 'User activates Assembla' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('Assembla')
+ end
+
+ it 'activates service' do
+ check('Active')
+ fill_in('Token', with: 'verySecret')
+ click_button('Save')
+
+ expect(page).to have_content('Assembla activated.')
+ end
+end
diff --git a/spec/features/projects/services/user_activates_atlassian_bamboo_ci_spec.rb b/spec/features/projects/services/user_activates_atlassian_bamboo_ci_spec.rb
new file mode 100644
index 00000000000..a00c2e0ad99
--- /dev/null
+++ b/spec/features/projects/services/user_activates_atlassian_bamboo_ci_spec.rb
@@ -0,0 +1,31 @@
+require 'spec_helper'
+
+describe 'User activates Atlassian Bamboo CI' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('Atlassian Bamboo CI')
+ end
+
+ it 'activates service' do
+ check('Active')
+ fill_in('Bamboo url', with: 'http://bamboo.example.com')
+ fill_in('Build key', with: 'KEY')
+ fill_in('Username', with: 'user')
+ fill_in('Password', with: 'verySecret')
+ click_button('Save')
+
+ expect(page).to have_content('Atlassian Bamboo CI activated.')
+
+ # Password field should not be filled in.
+ click_link('Atlassian Bamboo CI')
+
+ expect(find_field('Enter new password').value).to be_nil
+ end
+end
diff --git a/spec/features/projects/services/user_activates_emails_on_push_spec.rb b/spec/features/projects/services/user_activates_emails_on_push_spec.rb
new file mode 100644
index 00000000000..3769875b29c
--- /dev/null
+++ b/spec/features/projects/services/user_activates_emails_on_push_spec.rb
@@ -0,0 +1,23 @@
+require 'spec_helper'
+
+describe 'User activates Emails on push' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('Emails on push')
+ end
+
+ it 'activates service' do
+ check('Active')
+ fill_in('Recipients', with: 'qa@company.name')
+ click_button('Save')
+
+ expect(page).to have_content('Emails on push activated.')
+ end
+end
diff --git a/spec/features/projects/services/user_activates_flowdock_spec.rb b/spec/features/projects/services/user_activates_flowdock_spec.rb
new file mode 100644
index 00000000000..5298d8acaf5
--- /dev/null
+++ b/spec/features/projects/services/user_activates_flowdock_spec.rb
@@ -0,0 +1,23 @@
+require 'spec_helper'
+
+describe 'User activates Flowdock' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('Flowdock')
+ end
+
+ it 'activates service' do
+ check('Active')
+ fill_in('Token', with: 'verySecret')
+ click_button('Save')
+
+ expect(page).to have_content('Flowdock activated.')
+ end
+end
diff --git a/spec/features/projects/services/user_activates_hipchat_spec.rb b/spec/features/projects/services/user_activates_hipchat_spec.rb
new file mode 100644
index 00000000000..a9bf16642c7
--- /dev/null
+++ b/spec/features/projects/services/user_activates_hipchat_spec.rb
@@ -0,0 +1,38 @@
+require 'spec_helper'
+
+describe 'User activates HipChat' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('HipChat')
+ end
+
+ context 'with standart settings' do
+ it 'activates service' do
+ check('Active')
+ fill_in('Room', with: 'gitlab')
+ fill_in('Token', with: 'verySecret')
+ click_button('Save')
+
+ expect(page).to have_content('HipChat activated.')
+ end
+ end
+
+ context 'with custom settings' do
+ it 'activates service' do
+ check('Active')
+ fill_in('Room', with: 'gitlab_custom')
+ fill_in('Token', with: 'secretCustom')
+ fill_in('Server', with: 'https://chat.example.com')
+ click_button('Save')
+
+ expect(page).to have_content('HipChat activated.')
+ end
+ end
+end
diff --git a/spec/features/projects/services/user_activates_irker_spec.rb b/spec/features/projects/services/user_activates_irker_spec.rb
new file mode 100644
index 00000000000..435663c818f
--- /dev/null
+++ b/spec/features/projects/services/user_activates_irker_spec.rb
@@ -0,0 +1,24 @@
+require 'spec_helper'
+
+describe 'User activates Irker (IRC gateway)' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('Irker (IRC gateway)')
+ end
+
+ it 'activates service' do
+ check('Active')
+ check('Colorize messages')
+ fill_in('Recipients', with: 'irc://chat.freenode.net/#commits')
+ click_button('Save')
+
+ expect(page).to have_content('Irker (IRC gateway) activated.')
+ end
+end
diff --git a/spec/features/projects/services/user_activates_jetbrains_teamcity_ci_spec.rb b/spec/features/projects/services/user_activates_jetbrains_teamcity_ci_spec.rb
new file mode 100644
index 00000000000..1048803fde8
--- /dev/null
+++ b/spec/features/projects/services/user_activates_jetbrains_teamcity_ci_spec.rb
@@ -0,0 +1,26 @@
+require 'spec_helper'
+
+describe 'User activates JetBrains TeamCity CI' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('JetBrains TeamCity CI')
+ end
+
+ it 'activates service' do
+ check('Active')
+ fill_in('Teamcity url', with: 'http://teamcity.example.com')
+ fill_in('Build type', with: 'GitlabTest_Build')
+ fill_in('Username', with: 'user')
+ fill_in('Password', with: 'verySecret')
+ click_button('Save')
+
+ expect(page).to have_content('JetBrains TeamCity CI activated.')
+ end
+end
diff --git a/spec/features/projects/services/jira_service_spec.rb b/spec/features/projects/services/user_activates_jira_spec.rb
index 65e3a487d4b..0a86292ae6c 100644
--- a/spec/features/projects/services/jira_service_spec.rb
+++ b/spec/features/projects/services/user_activates_jira_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Setup Jira service', :js do
+describe 'User activates Jira', :js do
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:service) { project.create_jira_service }
diff --git a/spec/features/projects/services/mattermost_slash_command_spec.rb b/spec/features/projects/services/user_activates_mattermost_slash_command_spec.rb
index 95d5e8b14b9..95d5e8b14b9 100644
--- a/spec/features/projects/services/mattermost_slash_command_spec.rb
+++ b/spec/features/projects/services/user_activates_mattermost_slash_command_spec.rb
diff --git a/spec/features/projects/services/user_activates_pivotaltracker_spec.rb b/spec/features/projects/services/user_activates_pivotaltracker_spec.rb
new file mode 100644
index 00000000000..d5d109ba48b
--- /dev/null
+++ b/spec/features/projects/services/user_activates_pivotaltracker_spec.rb
@@ -0,0 +1,23 @@
+require 'spec_helper'
+
+describe 'User activates PivotalTracker' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('PivotalTracker')
+ end
+
+ it 'activates service' do
+ check('Active')
+ fill_in('Token', with: 'verySecret')
+ click_button('Save')
+
+ expect(page).to have_content('PivotalTracker activated.')
+ end
+end
diff --git a/spec/features/projects/services/user_activates_pushover_spec.rb b/spec/features/projects/services/user_activates_pushover_spec.rb
new file mode 100644
index 00000000000..9b7e8d62792
--- /dev/null
+++ b/spec/features/projects/services/user_activates_pushover_spec.rb
@@ -0,0 +1,27 @@
+require 'spec_helper'
+
+describe 'User activates Pushover' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+
+ click_link('Pushover')
+ end
+
+ it 'activates service' do
+ check('Active')
+ fill_in('Api key', with: 'verySecret')
+ fill_in('User key', with: 'verySecret')
+ fill_in('Device', with: 'myDevice')
+ select('High Priority', from: 'Priority')
+ select('Bike', from: 'Sound')
+ click_button('Save')
+
+ expect(page).to have_content('Pushover activated.')
+ end
+end
diff --git a/spec/features/projects/services/user_activates_slack_notifications_spec.rb b/spec/features/projects/services/user_activates_slack_notifications_spec.rb
new file mode 100644
index 00000000000..fae9ebd1bd6
--- /dev/null
+++ b/spec/features/projects/services/user_activates_slack_notifications_spec.rb
@@ -0,0 +1,54 @@
+require 'spec_helper'
+
+describe 'User activates Slack notifications' do
+ let(:user) { create(:user) }
+ let(:service) { SlackService.new }
+ let(:project) { create(:project, slack_service: service) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+ end
+
+ context 'when service is not configured yet' do
+ before do
+ visit(project_settings_integrations_path(project))
+
+ click_link('Slack notifications')
+ end
+
+ it 'activates service' do
+ check('Active')
+ fill_in('Webhook', with: 'https://hooks.slack.com/services/SVRWFV0VVAR97N/B02R25XN3/ZBqu7xMupaEEICInN685')
+ click_button('Save')
+
+ expect(page).to have_content('Slack notifications activated.')
+ end
+ end
+
+ context 'when service is already configured' do
+ before do
+ service.fields
+ service.update_attributes(
+ push_channel: 1,
+ issue_channel: 2,
+ merge_request_channel: 3,
+ note_channel: 4,
+ tag_push_channel: 5,
+ pipeline_channel: 6,
+ wiki_page_channel: 7)
+
+ visit(edit_project_service_path(project, service))
+ end
+
+ it 'filters events by channel' do
+ expect(page.find_field('service_push_channel').value).to have_content('1')
+ expect(page.find_field('service_issue_channel').value).to have_content('2')
+ expect(page.find_field('service_merge_request_channel').value).to have_content('3')
+ expect(page.find_field('service_note_channel').value).to have_content('4')
+ expect(page.find_field('service_tag_push_channel').value).to have_content('5')
+ expect(page.find_field('service_pipeline_channel').value).to have_content('6')
+ expect(page.find_field('service_wiki_page_channel').value).to have_content('7')
+ end
+ end
+end
diff --git a/spec/features/projects/services/slack_slash_command_spec.rb b/spec/features/projects/services/user_activates_slack_slash_command_spec.rb
index a8baf126269..a8baf126269 100644
--- a/spec/features/projects/services/slack_slash_command_spec.rb
+++ b/spec/features/projects/services/user_activates_slack_slash_command_spec.rb
diff --git a/spec/features/projects/services/user_views_services_spec.rb b/spec/features/projects/services/user_views_services_spec.rb
new file mode 100644
index 00000000000..f86591c2633
--- /dev/null
+++ b/spec/features/projects/services/user_views_services_spec.rb
@@ -0,0 +1,25 @@
+require 'spec_helper'
+
+describe 'User views services' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_settings_integrations_path(project))
+ end
+
+ it 'shows the list of available services' do
+ expect(page).to have_content('Project services')
+ expect(page).to have_content('Campfire')
+ expect(page).to have_content('HipChat')
+ expect(page).to have_content('Assembla')
+ expect(page).to have_content('Pushover')
+ expect(page).to have_content('Atlassian Bamboo')
+ expect(page).to have_content('JetBrains TeamCity')
+ expect(page).to have_content('Asana')
+ expect(page).to have_content('Irker (IRC gateway)')
+ end
+end
diff --git a/spec/features/projects/settings/integration_settings_spec.rb b/spec/features/projects/settings/integration_settings_spec.rb
index d932c4e4d9a..cbdb7973ac8 100644
--- a/spec/features/projects/settings/integration_settings_spec.rb
+++ b/spec/features/projects/settings/integration_settings_spec.rb
@@ -76,7 +76,7 @@ feature 'Integration settings' do
expect(page).to have_content(url)
end
- scenario 'test existing webhook', js: true do
+ scenario 'test existing webhook', :js do
WebMock.stub_request(:post, hook.url)
visit integrations_path
diff --git a/spec/features/projects/settings/pipelines_settings_spec.rb b/spec/features/projects/settings/pipelines_settings_spec.rb
index 975d204e75e..de8fbb15b9c 100644
--- a/spec/features/projects/settings/pipelines_settings_spec.rb
+++ b/spec/features/projects/settings/pipelines_settings_spec.rb
@@ -22,7 +22,7 @@ feature "Pipelines settings" do
context 'for master' do
given(:role) { :master }
- scenario 'be allowed to change', js: true do
+ scenario 'be allowed to change', :js do
fill_in('Test coverage parsing', with: 'coverage_regex')
click_on 'Save changes'
diff --git a/spec/features/projects/settings/repository_settings_spec.rb b/spec/features/projects/settings/repository_settings_spec.rb
index 15180d4b498..a4fefb0d0e7 100644
--- a/spec/features/projects/settings/repository_settings_spec.rb
+++ b/spec/features/projects/settings/repository_settings_spec.rb
@@ -23,7 +23,7 @@ feature 'Repository settings' do
context 'for master' do
given(:role) { :master }
- context 'Deploy Keys', js: true do
+ context 'Deploy Keys', :js do
let(:private_deploy_key) { create(:deploy_key, title: 'private_deploy_key', public: false) }
let(:public_deploy_key) { create(:another_deploy_key, title: 'public_deploy_key', public: true) }
let(:new_ssh_key) { attributes_for(:key)[:key] }
diff --git a/spec/features/projects/settings/user_manages_group_links_spec.rb b/spec/features/projects/settings/user_manages_group_links_spec.rb
new file mode 100644
index 00000000000..91e8059865c
--- /dev/null
+++ b/spec/features/projects/settings/user_manages_group_links_spec.rb
@@ -0,0 +1,41 @@
+require 'spec_helper'
+
+describe 'User manages group links' do
+ include Select2Helper
+
+ let(:user) { create(:user) }
+ let(:project) { create(:project, namespace: user.namespace) }
+ let(:group_ops) { create(:group, name: 'Ops') }
+ let(:group_market) { create(:group, name: 'Market', path: 'market') }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ share_link = project.project_group_links.new(group_access: Gitlab::Access::MASTER)
+ share_link.group_id = group_ops.id
+ share_link.save!
+
+ visit(project_group_links_path(project))
+ end
+
+ it 'shows a list of groups' do
+ page.within('.project-members-groups') do
+ expect(page).to have_content('Ops')
+ expect(page).not_to have_content('Market')
+ end
+ end
+
+ it 'shares a project with a group', :js do
+ click_link('Share with group')
+
+ select2(group_market.id, from: '#link_group_id')
+ select('Master', from: 'link_group_access')
+
+ click_button('Share')
+
+ page.within('.project-members-groups') do
+ expect(page).to have_content('Market')
+ end
+ end
+end
diff --git a/spec/features/projects/settings/user_manages_project_members_spec.rb b/spec/features/projects/settings/user_manages_project_members_spec.rb
new file mode 100644
index 00000000000..2709047b8de
--- /dev/null
+++ b/spec/features/projects/settings/user_manages_project_members_spec.rb
@@ -0,0 +1,68 @@
+require 'spec_helper'
+
+describe 'User manages project members' do
+ let(:group) { create(:group, name: 'OpenSource') }
+ let(:project) { create(:project) }
+ let(:project2) { create(:project) }
+ let(:user) { create(:user) }
+ let(:user_dmitriy) { create(:user, name: 'Dmitriy') }
+ let(:user_mike) { create(:user, name: 'Mike') }
+
+ before do
+ project.add_master(user)
+ project.add_developer(user_dmitriy)
+ sign_in(user)
+ end
+
+ it 'cancels a team member' do
+ visit(project_project_members_path(project))
+
+ project_member = project.project_members.find_by(user_id: user_dmitriy.id)
+
+ page.within("#project_member_#{project_member.id}") do
+ click_link('Remove user from project')
+ end
+
+ visit(project_project_members_path(project))
+
+ expect(page).not_to have_content(user_dmitriy.name)
+ expect(page).not_to have_content(user_dmitriy.username)
+ end
+
+ it 'imports a team from another project' do
+ project2.add_master(user)
+ project2.add_reporter(user_mike)
+
+ visit(project_project_members_path(project))
+
+ page.within('.users-project-form') do
+ click_link('Import')
+ end
+
+ select(project2.name_with_namespace, from: 'source_project_id')
+ click_button('Import')
+
+ project_member = project.project_members.find_by(user_id: user_mike.id)
+
+ page.within("#project_member_#{project_member.id}") do
+ expect(page).to have_content('Mike')
+ expect(page).to have_content('Reporter')
+ end
+ end
+
+ it 'shows all members of project shared group' do
+ group.add_owner(user)
+ group.add_developer(user_dmitriy)
+
+ share_link = project.project_group_links.new(group_access: Gitlab::Access::MASTER)
+ share_link.group_id = group.id
+ share_link.save!
+
+ visit(project_project_members_path(project))
+
+ page.within('.project-members-groups') do
+ expect(page).to have_content('OpenSource')
+ expect(first('.group_member')).to have_content('Master')
+ end
+ end
+end
diff --git a/spec/features/projects/settings/visibility_settings_spec.rb b/spec/features/projects/settings/visibility_settings_spec.rb
index 37ee6255bd1..1c3b84d0114 100644
--- a/spec/features/projects/settings/visibility_settings_spec.rb
+++ b/spec/features/projects/settings/visibility_settings_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Visibility settings', js: true do
+feature 'Visibility settings', :js do
let(:user) { create(:user) }
let(:project) { create(:project, namespace: user.namespace, visibility_level: 20) }
diff --git a/spec/features/projects/shortcuts_spec.rb b/spec/features/projects/shortcuts_spec.rb
deleted file mode 100644
index bf18c444c3d..00000000000
--- a/spec/features/projects/shortcuts_spec.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-require 'spec_helper'
-
-feature 'Project shortcuts' do
- let(:project) { create(:project, name: 'Victorialand') }
- let(:user) { create(:user) }
-
- describe 'On a project', js: true do
- before do
- project.team << [user, :master]
- sign_in user
- visit project_path(project)
- end
-
- describe 'pressing "i"' do
- it 'redirects to new issue page' do
- find('body').native.send_key('i')
- expect(page).to have_content('Victorialand')
- end
- end
- end
-end
diff --git a/spec/features/projects/show_project_spec.rb b/spec/features/projects/show_project_spec.rb
index 1bc6fae9e7f..0b94c9eae5d 100644
--- a/spec/features/projects/show_project_spec.rb
+++ b/spec/features/projects/show_project_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'Project show page', feature: true do
+describe 'Project show page', :feature do
context 'when project pending delete' do
let(:project) { create(:project, :empty_repo, pending_delete: true) }
diff --git a/spec/features/projects/snippets/user_comments_on_snippet_spec.rb b/spec/features/projects/snippets/user_comments_on_snippet_spec.rb
new file mode 100644
index 00000000000..1bd2098af6d
--- /dev/null
+++ b/spec/features/projects/snippets/user_comments_on_snippet_spec.rb
@@ -0,0 +1,25 @@
+require 'spec_helper'
+
+describe 'User comments on a snippet', :js do
+ let(:project) { create(:project) }
+ let!(:snippet) { create(:project_snippet, project: project, author: user) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_snippet_path(project, snippet))
+ end
+
+ it 'leaves a comment on a snippet' do
+ page.within('.js-main-target-form') do
+ fill_in('note_note', with: 'Good snippet!')
+ click_button('Comment')
+ end
+
+ wait_for_requests
+
+ expect(page).to have_content('Good snippet!')
+ end
+end
diff --git a/spec/features/projects/snippets/user_deletes_snippet_spec.rb b/spec/features/projects/snippets/user_deletes_snippet_spec.rb
new file mode 100644
index 00000000000..ca5f7981c33
--- /dev/null
+++ b/spec/features/projects/snippets/user_deletes_snippet_spec.rb
@@ -0,0 +1,20 @@
+require 'spec_helper'
+
+describe 'User deletes a snippet' do
+ let(:project) { create(:project) }
+ let!(:snippet) { create(:project_snippet, project: project, author: user) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_snippet_path(project, snippet))
+ end
+
+ it 'deletes a snippet' do
+ first(:link, 'Delete').click
+
+ expect(page).not_to have_content(snippet.title)
+ end
+end
diff --git a/spec/features/projects/snippets/user_updates_snippet_spec.rb b/spec/features/projects/snippets/user_updates_snippet_spec.rb
new file mode 100644
index 00000000000..09a390443cf
--- /dev/null
+++ b/spec/features/projects/snippets/user_updates_snippet_spec.rb
@@ -0,0 +1,25 @@
+require 'spec_helper'
+
+describe 'User updates a snippet' do
+ let(:project) { create(:project) }
+ let!(:snippet) { create(:project_snippet, project: project, author: user) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_snippet_path(project, snippet))
+ end
+
+ it 'updates a snippet' do
+ page.within('.detail-page-header') do
+ first(:link, 'Edit').click
+ end
+
+ fill_in('project_snippet_title', with: 'Snippet new title')
+ click_button('Save')
+
+ expect(page).to have_content('Snippet new title')
+ end
+end
diff --git a/spec/features/projects/snippets/user_views_snippets_spec.rb b/spec/features/projects/snippets/user_views_snippets_spec.rb
new file mode 100644
index 00000000000..e9992e00ca8
--- /dev/null
+++ b/spec/features/projects/snippets/user_views_snippets_spec.rb
@@ -0,0 +1,20 @@
+require 'spec_helper'
+
+describe 'User views snippets' do
+ let(:project) { create(:project) }
+ let!(:project_snippet) { create(:project_snippet, project: project, author: user) }
+ let!(:snippet) { create(:snippet, author: user) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_snippets_path(project))
+ end
+
+ it 'shows snippets' do
+ expect(page).to have_content(project_snippet.title)
+ expect(page).not_to have_content(snippet.title)
+ end
+end
diff --git a/spec/features/projects/user_archives_project_spec.rb b/spec/features/projects/user_archives_project_spec.rb
new file mode 100644
index 00000000000..72063d13c2a
--- /dev/null
+++ b/spec/features/projects/user_archives_project_spec.rb
@@ -0,0 +1,43 @@
+require 'spec_helper'
+
+describe 'User archives a project' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+
+ sign_in(user)
+ end
+
+ context 'when a project is archived' do
+ let(:project) { create(:project, :archived, namespace: user.namespace) }
+
+ before do
+ visit(edit_project_path(project))
+ end
+
+ it 'unarchives a project' do
+ expect(page).to have_content('Unarchive project')
+
+ click_link('Unarchive')
+
+ expect(page).not_to have_content('Archived project')
+ end
+ end
+
+ context 'when a project is unarchived' do
+ let(:project) { create(:project, :repository, namespace: user.namespace) }
+
+ before do
+ visit(edit_project_path(project))
+ end
+
+ it 'archives a project' do
+ expect(page).to have_content('Archive project')
+
+ click_link('Archive')
+
+ expect(page).to have_content('Archived')
+ end
+ end
+end
diff --git a/spec/features/projects/user_browses_a_tree_with_a_folder_containing_only_a_folder.rb b/spec/features/projects/user_browses_a_tree_with_a_folder_containing_only_a_folder.rb
new file mode 100644
index 00000000000..a17e65cc5b9
--- /dev/null
+++ b/spec/features/projects/user_browses_a_tree_with_a_folder_containing_only_a_folder.rb
@@ -0,0 +1,20 @@
+require 'spec_helper'
+
+# This is a regression test for https://gitlab.com/gitlab-org/gitlab-ce/issues/37569
+describe 'User browses a tree with a folder containing only a folder' do
+ let(:project) { create(:project, :empty_repo) }
+ let(:user) { project.creator }
+
+ before do
+ # We need to disable the tree.flat_path provided by Gitaly to reproduce the issue
+ allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(false)
+
+ project.repository.create_dir(user, 'foo/bar', branch_name: 'master', message: 'Add the foo/bar folder')
+ sign_in(user)
+ visit(project_tree_path(project, project.repository.root_ref))
+ end
+
+ it 'shows the nested folder on a single row' do
+ expect(page).to have_content('foo/bar')
+ end
+end
diff --git a/spec/features/projects/user_browses_files_spec.rb b/spec/features/projects/user_browses_files_spec.rb
index b7a0b72db50..f43b11c9485 100644
--- a/spec/features/projects/user_browses_files_spec.rb
+++ b/spec/features/projects/user_browses_files_spec.rb
@@ -76,7 +76,7 @@ describe 'User browses files' do
expect(page).to have_content('LICENSE')
end
- it 'shows files from a repository with apostroph in its name', js: true do
+ it 'shows files from a repository with apostroph in its name', :js do
first('.js-project-refs-dropdown').click
page.within('.project-refs-form') do
@@ -91,7 +91,7 @@ describe 'User browses files' do
expect(page).not_to have_content('Loading commit data...')
end
- it 'shows the code with a leading dot in the directory', js: true do
+ it 'shows the code with a leading dot in the directory', :js do
first('.js-project-refs-dropdown').click
page.within('.project-refs-form') do
@@ -117,7 +117,7 @@ describe 'User browses files' do
click_link('.gitignore')
end
- it 'shows a file content', js: true do
+ it 'shows a file content', :js do
wait_for_requests
expect(page).to have_content('*.rbc')
end
@@ -168,7 +168,7 @@ describe 'User browses files' do
visit(tree_path_root_ref)
end
- it 'shows a preview of a file content', js: true do
+ it 'shows a preview of a file content', :js do
find('.add-to-tree').click
click_link('Upload file')
drop_in_dropzone(File.join(Rails.root, 'spec', 'fixtures', 'logo_sample.svg'))
diff --git a/spec/features/projects/user_creates_directory_spec.rb b/spec/features/projects/user_creates_directory_spec.rb
index 1ba5d83eadf..052cb3188c5 100644
--- a/spec/features/projects/user_creates_directory_spec.rb
+++ b/spec/features/projects/user_creates_directory_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'User creates a directory', js: true do
+feature 'User creates a directory', :js do
let(:fork_message) do
"You're not allowed to make changes to this project directly. "\
"A fork of this project has been created that you can make changes in, so you can submit a merge request."
@@ -79,7 +79,7 @@ feature 'User creates a directory', js: true do
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Create directory')
- fork = user.fork_of(project2)
+ fork = user.fork_of(project2.reload)
expect(current_path).to eq(project_new_merge_request_path(fork))
end
diff --git a/spec/features/projects/user_creates_files_spec.rb b/spec/features/projects/user_creates_files_spec.rb
index 3d335687510..cbe70a93942 100644
--- a/spec/features/projects/user_creates_files_spec.rb
+++ b/spec/features/projects/user_creates_files_spec.rb
@@ -59,7 +59,7 @@ describe 'User creates files' do
expect(page).to have_selector('.file-editor')
end
- it 'creates and commit a new file', js: true do
+ it 'creates and commit a new file', :js do
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:file_name, with: 'not_a_file.md')
fill_in(:commit_message, with: 'New commit message', visible: true)
@@ -74,7 +74,7 @@ describe 'User creates files' do
expect(page).to have_content('*.rbca')
end
- it 'creates and commit a new file with new lines at the end of file', js: true do
+ it 'creates and commit a new file with new lines at the end of file', :js do
execute_script('ace.edit("editor").setValue("Sample\n\n\n")')
fill_in(:file_name, with: 'not_a_file.md')
fill_in(:commit_message, with: 'New commit message', visible: true)
@@ -89,7 +89,7 @@ describe 'User creates files' do
expect(evaluate_script('ace.edit("editor").getValue()')).to eq("Sample\n\n\n")
end
- it 'creates and commit a new file with a directory name', js: true do
+ it 'creates and commit a new file with a directory name', :js do
fill_in(:file_name, with: 'foo/bar/baz.txt')
expect(page).to have_selector('.file-editor')
@@ -105,7 +105,7 @@ describe 'User creates files' do
expect(page).to have_content('*.rbca')
end
- it 'creates and commit a new file specifying a new branch', js: true do
+ it 'creates and commit a new file specifying a new branch', :js do
expect(page).to have_selector('.file-editor')
execute_script("ace.edit('editor').setValue('*.rbca')")
@@ -130,7 +130,7 @@ describe 'User creates files' do
visit(project2_tree_path_root_ref)
end
- it 'creates and commit new file in forked project', js: true do
+ it 'creates and commit new file in forked project', :js do
find('.add-to-tree').click
click_link('New file')
@@ -142,7 +142,7 @@ describe 'User creates files' do
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Commit changes')
- fork = user.fork_of(project2)
+ fork = user.fork_of(project2.reload)
expect(current_path).to eq(project_new_merge_request_path(fork))
expect(page).to have_content('New commit message')
diff --git a/spec/features/projects/user_creates_project_spec.rb b/spec/features/projects/user_creates_project_spec.rb
index 1c3791f63ac..4a152572502 100644
--- a/spec/features/projects/user_creates_project_spec.rb
+++ b/spec/features/projects/user_creates_project_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'User creates a project', js: true do
+feature 'User creates a project', :js do
let(:user) { create(:user) }
before do
diff --git a/spec/features/projects/user_deletes_files_spec.rb b/spec/features/projects/user_deletes_files_spec.rb
index 95cd316be0e..9e4e92ec076 100644
--- a/spec/features/projects/user_deletes_files_spec.rb
+++ b/spec/features/projects/user_deletes_files_spec.rb
@@ -21,7 +21,7 @@ describe 'User deletes files' do
visit(project_tree_path_root_ref)
end
- it 'deletes the file', js: true do
+ it 'deletes the file', :js do
click_link('.gitignore')
expect(page).to have_content('.gitignore')
@@ -41,7 +41,7 @@ describe 'User deletes files' do
visit(project2_tree_path_root_ref)
end
- it 'deletes the file in a forked project', js: true do
+ it 'deletes the file in a forked project', :js do
click_link('.gitignore')
expect(page).to have_content('.gitignore')
@@ -59,7 +59,7 @@ describe 'User deletes files' do
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Delete file')
- fork = user.fork_of(project2)
+ fork = user.fork_of(project2.reload)
expect(current_path).to eq(project_new_merge_request_path(fork))
expect(page).to have_content('New commit message')
diff --git a/spec/features/projects/user_edits_files_spec.rb b/spec/features/projects/user_edits_files_spec.rb
index 3129aad8473..e8d83a661d4 100644
--- a/spec/features/projects/user_edits_files_spec.rb
+++ b/spec/features/projects/user_edits_files_spec.rb
@@ -1,6 +1,7 @@
require 'spec_helper'
describe 'User edits files' do
+ include ProjectForksHelper
let(:project) { create(:project, :repository, name: 'Shop') }
let(:project2) { create(:project, :repository, name: 'Another Project', path: 'another-project') }
let(:project_tree_path_root_ref) { project_tree_path(project, project.repository.root_ref) }
@@ -17,11 +18,10 @@ describe 'User edits files' do
visit(project_tree_path_root_ref)
end
- it 'inserts a content of a file', js: true do
+ it 'inserts a content of a file', :js do
click_link('.gitignore')
find('.js-edit-blob').click
-
- wait_for_requests
+ find('.file-editor', match: :first)
execute_script("ace.edit('editor').setValue('*.rbca')")
@@ -35,11 +35,10 @@ describe 'User edits files' do
expect(page).not_to have_link('edit')
end
- it 'commits an edited file', js: true do
+ it 'commits an edited file', :js do
click_link('.gitignore')
find('.js-edit-blob').click
-
- wait_for_requests
+ find('.file-editor', match: :first)
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:commit_message, with: 'New commit message', visible: true)
@@ -52,11 +51,11 @@ describe 'User edits files' do
expect(page).to have_content('*.rbca')
end
- it 'commits an edited file to a new branch', js: true do
+ it 'commits an edited file to a new branch', :js do
click_link('.gitignore')
find('.js-edit-blob').click
- wait_for_requests
+ find('.file-editor', match: :first)
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:commit_message, with: 'New commit message', visible: true)
@@ -67,15 +66,13 @@ describe 'User edits files' do
click_link('Changes')
- wait_for_requests
expect(page).to have_content('*.rbca')
end
- it 'shows the diff of an edited file', js: true do
+ it 'shows the diff of an edited file', :js do
click_link('.gitignore')
find('.js-edit-blob').click
-
- wait_for_requests
+ find('.file-editor', match: :first)
execute_script("ace.edit('editor').setValue('*.rbca')")
click_link('Preview changes')
@@ -90,7 +87,7 @@ describe 'User edits files' do
visit(project2_tree_path_root_ref)
end
- it 'inserts a content of a file in a forked project', js: true do
+ it 'inserts a content of a file in a forked project', :js do
click_link('.gitignore')
find('.js-edit-blob').click
@@ -104,14 +101,14 @@ describe 'User edits files' do
"A fork of this project has been created that you can make changes in, so you can submit a merge request."
)
- wait_for_requests
+ find('.file-editor', match: :first)
execute_script("ace.edit('editor').setValue('*.rbca')")
expect(evaluate_script('ace.edit("editor").getValue()')).to eq('*.rbca')
end
- it 'commits an edited file in a forked project', js: true do
+ it 'commits an edited file in a forked project', :js do
click_link('.gitignore')
find('.js-edit-blob').click
@@ -120,13 +117,13 @@ describe 'User edits files' do
click_link('Fork')
- wait_for_requests
+ find('.file-editor', match: :first)
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Commit changes')
- fork = user.fork_of(project2)
+ fork = user.fork_of(project2.reload)
expect(current_path).to eq(project_new_merge_request_path(fork))
@@ -134,5 +131,34 @@ describe 'User edits files' do
expect(page).to have_content('New commit message')
end
+
+ context 'when the user already had a fork of the project', :js do
+ let!(:forked_project) { fork_project(project2, user, namespace: user.namespace, repository: true) }
+ before do
+ visit(project2_tree_path_root_ref)
+ end
+
+ it 'links to the forked project for editing' do
+ click_link('.gitignore')
+ find('.js-edit-blob').click
+
+ expect(page).not_to have_link('Fork')
+ expect(page).not_to have_button('Cancel')
+
+ execute_script("ace.edit('editor').setValue('*.rbca')")
+ fill_in(:commit_message, with: 'Another commit', visible: true)
+ click_button('Commit changes')
+
+ fork = user.fork_of(project2)
+
+ expect(current_path).to eq(project_new_merge_request_path(fork))
+
+ wait_for_requests
+
+ expect(page).to have_content('Another commit')
+ expect(page).to have_content("From #{forked_project.full_path}")
+ expect(page).to have_content("into #{project2.full_path}")
+ end
+ end
end
end
diff --git a/spec/features/projects/user_interacts_with_stars_spec.rb b/spec/features/projects/user_interacts_with_stars_spec.rb
index 0ac3f8181fa..d9d2e0ab171 100644
--- a/spec/features/projects/user_interacts_with_stars_spec.rb
+++ b/spec/features/projects/user_interacts_with_stars_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe 'User interacts with project stars' do
let(:project) { create(:project, :public, :repository) }
- context 'when user is signed in', js: true do
+ context 'when user is signed in', :js do
let(:user) { create(:user) }
before do
diff --git a/spec/features/projects/user_replaces_files_spec.rb b/spec/features/projects/user_replaces_files_spec.rb
index e284fdefd4f..245b6aa285b 100644
--- a/spec/features/projects/user_replaces_files_spec.rb
+++ b/spec/features/projects/user_replaces_files_spec.rb
@@ -23,7 +23,7 @@ describe 'User replaces files' do
visit(project_tree_path_root_ref)
end
- it 'replaces an existed file with a new one', js: true do
+ it 'replaces an existed file with a new one', :js do
click_link('.gitignore')
expect(page).to have_content('.gitignore')
@@ -49,7 +49,7 @@ describe 'User replaces files' do
visit(project2_tree_path_root_ref)
end
- it 'replaces an existed file with a new one in a forked project', js: true do
+ it 'replaces an existed file with a new one in a forked project', :js do
click_link('.gitignore')
expect(page).to have_content('.gitignore')
@@ -74,7 +74,7 @@ describe 'User replaces files' do
expect(page).to have_content('Replacement file commit message')
- fork = user.fork_of(project2)
+ fork = user.fork_of(project2.reload)
expect(current_path).to eq(project_new_merge_request_path(fork))
diff --git a/spec/features/projects/user_uploads_files_spec.rb b/spec/features/projects/user_uploads_files_spec.rb
index 98871317ca3..ae51901adc6 100644
--- a/spec/features/projects/user_uploads_files_spec.rb
+++ b/spec/features/projects/user_uploads_files_spec.rb
@@ -23,7 +23,7 @@ describe 'User uploads files' do
visit(project_tree_path_root_ref)
end
- it 'uploads and commit a new file', js: true do
+ it 'uploads and commit a new file', :js do
find('.add-to-tree').click
click_link('Upload file')
drop_in_dropzone(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'))
@@ -39,6 +39,9 @@ describe 'User uploads files' do
expect(current_path).to eq(project_new_merge_request_path(project))
click_link('Changes')
+ find("a[data-action='diffs']", text: 'Changes').click
+
+ wait_for_requests
expect(page).to have_content('Lorem ipsum dolor sit amet')
expect(page).to have_content('Sed ut perspiciatis unde omnis')
@@ -51,7 +54,7 @@ describe 'User uploads files' do
visit(project2_tree_path_root_ref)
end
- it 'uploads and commit a new fileto a forked project', js: true do
+ it 'uploads and commit a new file to a forked project', :js do
find('.add-to-tree').click
click_link('Upload file')
@@ -69,11 +72,13 @@ describe 'User uploads files' do
expect(page).to have_content('New commit message')
- fork = user.fork_of(project2)
+ fork = user.fork_of(project2.reload)
expect(current_path).to eq(project_new_merge_request_path(fork))
- click_link('Changes')
+ find("a[data-action='diffs']", text: 'Changes').click
+
+ wait_for_requests
expect(page).to have_content('Lorem ipsum dolor sit amet')
expect(page).to have_content('Sed ut perspiciatis unde omnis')
diff --git a/spec/features/projects/user_uses_shortcuts_spec.rb b/spec/features/projects/user_uses_shortcuts_spec.rb
new file mode 100644
index 00000000000..fb0d8c766fe
--- /dev/null
+++ b/spec/features/projects/user_uses_shortcuts_spec.rb
@@ -0,0 +1,108 @@
+require 'spec_helper'
+
+describe 'User uses shortcuts', :js do
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(project_path(project))
+ end
+
+ context 'when navigating to the Overview pages' do
+ it 'redirects to the details page' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('p')
+
+ expect(page).to have_active_navigation('Overview')
+ expect(page).to have_active_sub_navigation('Details')
+ end
+
+ it 'redirects to the activity page' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('e')
+
+ expect(page).to have_active_navigation('Overview')
+ expect(page).to have_active_sub_navigation('Activity')
+ end
+ end
+
+ context 'when navigating to the Repository pages' do
+ it 'redirects to the repository files page' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('f')
+
+ expect(page).to have_active_navigation('Repository')
+ expect(page).to have_active_sub_navigation('Files')
+ end
+
+ it 'redirects to the repository commits page' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('c')
+
+ expect(page).to have_active_navigation('Repository')
+ expect(page).to have_active_sub_navigation('Commits')
+ end
+
+ it 'redirects to the repository graph page' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('n')
+
+ expect(page).to have_active_navigation('Repository')
+ expect(page).to have_active_sub_navigation('Graph')
+ end
+
+ it 'redirects to the repository charts page' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('d')
+
+ expect(page).to have_active_navigation('Repository')
+ expect(page).to have_active_sub_navigation('Charts')
+ end
+ end
+
+ context 'when navigating to the Issues pages' do
+ it 'redirects to the issues list page' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('i')
+
+ expect(page).to have_active_navigation('Issues')
+ expect(page).to have_active_sub_navigation('List')
+ end
+
+ it 'redirects to the new issue page' do
+ find('body').native.send_key('i')
+
+ expect(page).to have_content(project.title)
+ end
+ end
+
+ context 'when navigating to the Merge Requests pages' do
+ it 'redirects to the merge requests page' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('m')
+
+ expect(page).to have_active_navigation('Merge Requests')
+ end
+ end
+
+ context 'when navigating to the Snippets pages' do
+ it 'redirects to the snippets page' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('s')
+
+ expect(page).to have_active_navigation('Snippets')
+ end
+ end
+
+ context 'when navigating to the Wiki pages' do
+ it 'redirects to the wiki page' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('w')
+
+ expect(page).to have_active_navigation('Wiki')
+ end
+ end
+end
diff --git a/spec/features/projects/view_on_env_spec.rb b/spec/features/projects/view_on_env_spec.rb
index 2a316a0d0db..7f547a4ca1f 100644
--- a/spec/features/projects/view_on_env_spec.rb
+++ b/spec/features/projects/view_on_env_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'View on environment', js: true do
+describe 'View on environment', :js do
let(:branch_name) { 'feature' }
let(:file_path) { 'files/ruby/feature.rb' }
let(:project) { create(:project, :repository) }
diff --git a/spec/features/projects/wiki/markdown_preview_spec.rb b/spec/features/projects/wiki/markdown_preview_spec.rb
index 9a4ccf3c54d..78c350c8ee4 100644
--- a/spec/features/projects/wiki/markdown_preview_spec.rb
+++ b/spec/features/projects/wiki/markdown_preview_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Projects > Wiki > User previews markdown changes', js: true do
+feature 'Projects > Wiki > User previews markdown changes', :js do
let(:user) { create(:user) }
let(:project) { create(:project, namespace: user.namespace) }
let(:wiki_content) do
diff --git a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
index 9d66f482c8d..e72b7dc0dd5 100644
--- a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
@@ -1,38 +1,75 @@
require 'spec_helper'
-feature 'Projects > Wiki > User creates wiki page', :js do
+describe 'User creates wiki page' do
let(:user) { create(:user) }
- background do
- project.team << [user, :master]
+ before do
+ project.add_master(user)
sign_in(user)
- visit project_path(project)
+ visit(project_wikis_path(project))
end
- context 'in the user namespace' do
- let(:project) { create(:project, namespace: user.namespace) }
+ context 'when wiki is empty' do
+ context 'in a user namespace' do
+ let(:project) { create(:project, namespace: user.namespace) }
- context 'when wiki is empty' do
- before do
- find('.shortcuts-wiki').trigger('click')
+ it 'shows validation error message' do
+ page.within('.wiki-form') do
+ fill_in(:wiki_content, with: '')
+ click_on('Create page')
+ end
+
+ expect(page).to have_content('The form contains the following error:')
+ expect(page).to have_content("Content can't be blank")
+
+ page.within('.wiki-form') do
+ fill_in(:wiki_content, with: '[link test](test)')
+ click_on('Create page')
+ end
+
+ expect(page).to have_content('Home')
+ expect(page).to have_content('link test')
+
+ click_link('link test')
+
+ expect(page).to have_content('Create Page')
+ end
+
+ it 'shows non-escaped link in the pages list', :js do
+ click_link('New page')
+
+ page.within('#modal-new-wiki') do
+ fill_in(:new_wiki_path, with: 'one/two/three-test')
+ click_on('Create page')
+ end
+
+ page.within('.wiki-form') do
+ fill_in(:wiki_content, with: 'wiki content')
+ click_on('Create page')
+ end
+
+ expect(current_path).to include('one/two/three-test')
+ expect(page).to have_xpath("//a[@href='/#{project.full_path}/wikis/one/two/three-test']")
end
- scenario 'commit message field has value "Create home"' do
+ it 'has "Create home" as a commit message' do
expect(page).to have_field('wiki[message]', with: 'Create home')
end
- scenario 'directly from the wiki home page' do
- fill_in :wiki_content, with: 'My awesome wiki!'
- page.within '.wiki-form' do
- click_button 'Create page'
+ it 'creates a page from the home page' do
+ fill_in(:wiki_content, with: 'My awesome wiki!')
+
+ page.within('.wiki-form') do
+ click_button('Create page')
end
+
expect(page).to have_content('Home')
expect(page).to have_content("Last edited by #{user.name}")
expect(page).to have_content('My awesome wiki!')
end
- scenario 'creates ASCII wiki with LaTeX blocks' do
+ it 'creates ASCII wiki with LaTeX blocks', :js do
stub_application_setting(plantuml_url: 'http://localhost', plantuml_enabled: true)
ascii_content = <<~MD
@@ -54,10 +91,10 @@ feature 'Projects > Wiki > User creates wiki page', :js do
MD
find('#wiki_format option[value=asciidoc]').select_option
- fill_in :wiki_content, with: ascii_content
+ fill_in(:wiki_content, with: ascii_content)
- page.within '.wiki-form' do
- click_button 'Create page'
+ page.within('.wiki-form') do
+ click_button('Create page')
end
page.within '.wiki' do
@@ -67,27 +104,49 @@ feature 'Projects > Wiki > User creates wiki page', :js do
end
end
- context 'when wiki is not empty' do
- before do
- WikiPages::CreateService.new(project, user, title: 'home', content: 'Home page').execute
- find('.shortcuts-wiki').trigger('click')
+ context 'in a group namespace', :js do
+ let(:project) { create(:project, namespace: create(:group, :public)) }
+
+ it 'has "Create home" as a commit message' do
+ expect(page).to have_field('wiki[message]', with: 'Create home')
+ end
+
+ it 'creates a page from from the home page' do
+ page.within('.wiki-form') do
+ fill_in(:wiki_content, with: 'My awesome wiki!')
+ click_button('Create page')
+ end
+
+ expect(page).to have_content('Home')
+ expect(page).to have_content("Last edited by #{user.name}")
+ expect(page).to have_content('My awesome wiki!')
end
+ end
+ end
+
+ context 'when wiki is not empty', :js do
+ before do
+ create(:wiki_page, wiki: create(:project, namespace: user.namespace).wiki, attrs: { title: 'home', content: 'Home page' })
+ end
+
+ context 'in a user namespace' do
+ let(:project) { create(:project, namespace: user.namespace) }
context 'via the "new wiki page" page' do
- scenario 'when the wiki page has a single word name' do
- click_link 'New page'
+ it 'creates a page with a single word' do
+ click_link('New page')
- page.within '#modal-new-wiki' do
- fill_in :new_wiki_path, with: 'foo'
- click_button 'Create page'
+ page.within('#modal-new-wiki') do
+ fill_in(:new_wiki_path, with: 'foo')
+ click_button('Create page')
end
# Commit message field should have correct value.
expect(page).to have_field('wiki[message]', with: 'Create foo')
- page.within '.wiki-form' do
- fill_in :wiki_content, with: 'My awesome wiki!'
- click_button 'Create page'
+ page.within('.wiki-form') do
+ fill_in(:wiki_content, with: 'My awesome wiki!')
+ click_button('Create page')
end
expect(page).to have_content('Foo')
@@ -95,20 +154,20 @@ feature 'Projects > Wiki > User creates wiki page', :js do
expect(page).to have_content('My awesome wiki!')
end
- scenario 'when the wiki page has spaces in the name' do
- click_link 'New page'
+ it 'creates a page with spaces in the name' do
+ click_link('New page')
- page.within '#modal-new-wiki' do
- fill_in :new_wiki_path, with: 'Spaces in the name'
- click_button 'Create page'
+ page.within('#modal-new-wiki') do
+ fill_in(:new_wiki_path, with: 'Spaces in the name')
+ click_button('Create page')
end
# Commit message field should have correct value.
expect(page).to have_field('wiki[message]', with: 'Create spaces in the name')
- page.within '.wiki-form' do
- fill_in :wiki_content, with: 'My awesome wiki!'
- click_button 'Create page'
+ page.within('.wiki-form') do
+ fill_in(:wiki_content, with: 'My awesome wiki!')
+ click_button('Create page')
end
expect(page).to have_content('Spaces in the name')
@@ -116,20 +175,20 @@ feature 'Projects > Wiki > User creates wiki page', :js do
expect(page).to have_content('My awesome wiki!')
end
- scenario 'when the wiki page has hyphens in the name' do
- click_link 'New page'
+ it 'creates a page with hyphens in the name' do
+ click_link('New page')
- page.within '#modal-new-wiki' do
- fill_in :new_wiki_path, with: 'hyphens-in-the-name'
- click_button 'Create page'
+ page.within('#modal-new-wiki') do
+ fill_in(:new_wiki_path, with: 'hyphens-in-the-name')
+ click_button('Create page')
end
# Commit message field should have correct value.
expect(page).to have_field('wiki[message]', with: 'Create hyphens in the name')
- page.within '.wiki-form' do
- fill_in :wiki_content, with: 'My awesome wiki!'
- click_button 'Create page'
+ page.within('.wiki-form') do
+ fill_in(:wiki_content, with: 'My awesome wiki!')
+ click_button('Create page')
end
expect(page).to have_content('Hyphens in the name')
@@ -138,73 +197,47 @@ feature 'Projects > Wiki > User creates wiki page', :js do
end
end
- scenario 'content has autocomplete' do
- click_link 'New page'
+ it 'shows the autocompletion dropdown' do
+ click_link('New page')
- page.within '#modal-new-wiki' do
- fill_in :new_wiki_path, with: 'test-autocomplete'
- click_button 'Create page'
+ page.within('#modal-new-wiki') do
+ fill_in(:new_wiki_path, with: 'test-autocomplete')
+ click_button('Create page')
end
- page.within '.wiki-form' do
+ page.within('.wiki-form') do
find('#wiki_content').native.send_keys('')
- fill_in :wiki_content, with: '@'
+ fill_in(:wiki_content, with: '@')
end
expect(page).to have_selector('.atwho-view')
end
end
- end
-
- context 'in a group namespace' do
- let(:project) { create(:project, namespace: create(:group, :public)) }
- context 'when wiki is empty' do
- before do
- find('.shortcuts-wiki').trigger('click')
- end
-
- scenario 'commit message field has value "Create home"' do
- expect(page).to have_field('wiki[message]', with: 'Create home')
- end
+ context 'in a group namespace' do
+ let(:project) { create(:project, namespace: create(:group, :public)) }
- scenario 'directly from the wiki home page' do
- fill_in :wiki_content, with: 'My awesome wiki!'
- page.within '.wiki-form' do
- click_button 'Create page'
- end
-
- expect(page).to have_content('Home')
- expect(page).to have_content("Last edited by #{user.name}")
- expect(page).to have_content('My awesome wiki!')
- end
- end
-
- context 'when wiki is not empty' do
- before do
- WikiPages::CreateService.new(project, user, title: 'home', content: 'Home page').execute
- find('.shortcuts-wiki').trigger('click')
- end
+ context 'via the "new wiki page" page' do
+ it 'creates a page' do
+ click_link('New page')
- scenario 'via the "new wiki page" page' do
- click_link 'New page'
+ page.within('#modal-new-wiki') do
+ fill_in(:new_wiki_path, with: 'foo')
+ click_button('Create page')
+ end
- page.within '#modal-new-wiki' do
- fill_in :new_wiki_path, with: 'foo'
- click_button 'Create page'
- end
+ # Commit message field should have correct value.
+ expect(page).to have_field('wiki[message]', with: 'Create foo')
- # Commit message field should have correct value.
- expect(page).to have_field('wiki[message]', with: 'Create foo')
+ page.within('.wiki-form') do
+ fill_in(:wiki_content, with: 'My awesome wiki!')
+ click_button('Create page')
+ end
- page.within '.wiki-form' do
- fill_in :wiki_content, with: 'My awesome wiki!'
- click_button 'Create page'
+ expect(page).to have_content('Foo')
+ expect(page).to have_content("Last edited by #{user.name}")
+ expect(page).to have_content('My awesome wiki!')
end
-
- expect(page).to have_content('Foo')
- expect(page).to have_content("Last edited by #{user.name}")
- expect(page).to have_content('My awesome wiki!')
end
end
end
diff --git a/spec/features/projects/wiki/user_deletes_wiki_page_spec.rb b/spec/features/projects/wiki/user_deletes_wiki_page_spec.rb
new file mode 100644
index 00000000000..605e332196b
--- /dev/null
+++ b/spec/features/projects/wiki/user_deletes_wiki_page_spec.rb
@@ -0,0 +1,19 @@
+require 'spec_helper'
+
+feature 'User deletes wiki page' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, namespace: user.namespace) }
+ let(:wiki_page) { create(:wiki_page, wiki: project.wiki) }
+
+ before do
+ sign_in(user)
+ visit(project_wiki_path(project, wiki_page))
+ end
+
+ it 'deletes a page' do
+ click_on('Edit')
+ click_on('Delete')
+
+ expect(page).to have_content('Page was successfully deleted')
+ end
+end
diff --git a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
index 64a80aec205..949d90a50ff 100644
--- a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
@@ -1,83 +1,154 @@
require 'spec_helper'
-feature 'Projects > Wiki > User updates wiki page' do
+describe 'User updates wiki page' do
let(:user) { create(:user) }
- let!(:wiki_page) { WikiPages::CreateService.new(project, user, title: 'home', content: 'Home page').execute }
- background do
- project.team << [user, :master]
+ before do
+ project.add_master(user)
sign_in(user)
+ end
+
+ context 'when wiki is empty' do
+ before do
+ visit(project_wikis_path(project))
+ end
+
+ context 'in a user namespace' do
+ let(:project) { create(:project, namespace: user.namespace) }
+
+ it 'redirects back to the home edit page' do
+ page.within(:css, '.wiki-form .form-actions') do
+ click_on('Cancel')
+ end
+
+ expect(current_path).to eq project_wiki_path(project, :home)
+ end
+
+ it 'updates a page that has a path', :js do
+ click_on('New page')
+
+ page.within('#modal-new-wiki') do
+ fill_in(:new_wiki_path, with: 'one/two/three-test')
+ click_on('Create page')
+ end
+
+ page.within '.wiki-form' do
+ fill_in(:wiki_content, with: 'wiki content')
+ click_on('Create page')
+ end
- visit project_wikis_path(project)
+ expect(current_path).to include('one/two/three-test')
+ expect(find('.wiki-pages')).to have_content('Three')
+
+ first(:link, text: 'Three').click
+
+ expect(find('.nav-text')).to have_content('Three')
+
+ click_on('Edit')
+
+ expect(current_path).to include('one/two/three-test')
+ expect(page).to have_content('Edit Page')
+
+ fill_in('Content', with: 'Updated Wiki Content')
+ click_on('Save changes')
+
+ expect(page).to have_content('Updated Wiki Content')
+ end
+ end
end
- context 'in the user namespace' do
- let(:project) { create(:project, namespace: user.namespace) }
+ context 'when wiki is not empty' do
+ let(:project_wiki) { create(:project_wiki, project: project, user: project.creator) }
+ let!(:wiki_page) { create(:wiki_page, wiki: project_wiki, attrs: { title: 'home', content: 'Home page' }) }
- context 'the home page' do
- scenario 'success when the wiki content is not empty' do
- click_link 'Edit'
+ before do
+ visit(project_wikis_path(project))
+ end
+
+ context 'in a user namespace' do
+ let(:project) { create(:project, namespace: user.namespace) }
+
+ it 'updates a page' do
+ click_link('Edit')
# Commit message field should have correct value.
expect(page).to have_field('wiki[message]', with: 'Update home')
- fill_in :wiki_content, with: 'My awesome wiki!'
- click_button 'Save changes'
+ fill_in(:wiki_content, with: 'My awesome wiki!')
+ click_button('Save changes')
expect(page).to have_content('Home')
expect(page).to have_content("Last edited by #{user.name}")
expect(page).to have_content('My awesome wiki!')
end
- scenario 'failure when the wiki content is empty' do
- click_link 'Edit'
+ it 'shows a validation error message' do
+ click_link('Edit')
- fill_in :wiki_content, with: ''
- click_button 'Save changes'
+ fill_in(:wiki_content, with: '')
+ click_button('Save changes')
expect(page).to have_selector('.wiki-form')
expect(page).to have_content('Edit Page')
expect(page).to have_content('The form contains the following error:')
- expect(page).to have_content('Content can\'t be blank')
- expect(find('textarea#wiki_content').value).to eq ''
+ expect(page).to have_content("Content can't be blank")
+ expect(find('textarea#wiki_content').value).to eq('')
end
- scenario 'content has autocomplete', :js do
- click_link 'Edit'
+ it 'shows the autocompletion dropdown', :js do
+ click_link('Edit')
find('#wiki_content').native.send_keys('')
- fill_in :wiki_content, with: '@'
+ fill_in(:wiki_content, with: '@')
expect(page).to have_selector('.atwho-view')
end
- end
- scenario 'page has been updated since the user opened the edit page' do
- click_link 'Edit'
+ it 'shows the error message' do
+ click_link('Edit')
+
+ wiki_page.update(content: 'Update')
- wiki_page.update(content: 'Update')
+ click_button('Save changes')
+
+ expect(page).to have_content('Someone edited the page the same time you did.')
+ end
+
+ it 'updates a page' do
+ click_on('Edit')
+ fill_in('Content', with: 'Updated Wiki Content')
+ click_on('Save changes')
+
+ expect(page).to have_content('Updated Wiki Content')
+ end
- click_button 'Save changes'
+ it 'cancels edititng of a page' do
+ click_on('Edit')
- expect(page).to have_content 'Someone edited the page the same time you did.'
+ page.within(:css, '.wiki-form .form-actions') do
+ click_on('Cancel')
+ end
+
+ expect(current_path).to eq(project_wiki_path(project, wiki_page))
+ end
end
- end
- context 'in a group namespace' do
- let(:project) { create(:project, namespace: create(:group, :public)) }
+ context 'in a group namespace' do
+ let(:project) { create(:project, namespace: create(:group, :public)) }
- scenario 'the home page' do
- click_link 'Edit'
+ it 'updates a page' do
+ click_link('Edit')
- # Commit message field should have correct value.
- expect(page).to have_field('wiki[message]', with: 'Update home')
+ # Commit message field should have correct value.
+ expect(page).to have_field('wiki[message]', with: 'Update home')
- fill_in :wiki_content, with: 'My awesome wiki!'
- click_button 'Save changes'
+ fill_in(:wiki_content, with: 'My awesome wiki!')
+ click_button('Save changes')
- expect(page).to have_content('Home')
- expect(page).to have_content("Last edited by #{user.name}")
- expect(page).to have_content('My awesome wiki!')
+ expect(page).to have_content('Home')
+ expect(page).to have_content("Last edited by #{user.name}")
+ expect(page).to have_content('My awesome wiki!')
+ end
end
end
end
diff --git a/spec/features/projects/wiki/user_views_project_wiki_page_spec.rb b/spec/features/projects/wiki/user_views_project_wiki_page_spec.rb
deleted file mode 100644
index 92e96f11219..00000000000
--- a/spec/features/projects/wiki/user_views_project_wiki_page_spec.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-require 'spec_helper'
-
-feature 'Projects > Wiki > User views the wiki page' do
- let(:user) { create(:user) }
- let(:project) { create(:project, :public) }
- let(:old_page_version_id) { wiki_page.versions.last.id }
- let(:wiki_page) do
- WikiPages::CreateService.new(
- project,
- user,
- title: 'home',
- content: '[some link](other-page)'
- ).execute
- end
-
- background do
- project.team << [user, :master]
- sign_in(user)
- WikiPages::UpdateService.new(
- project,
- user,
- message: 'updated home',
- content: 'updated [some link](other-page)',
- format: :markdown
- ).execute(wiki_page)
- end
-
- scenario 'Visit Wiki Page Current Commit' do
- visit project_wiki_path(project, wiki_page)
-
- expect(page).to have_selector('a.btn', text: 'Edit')
- end
-
- scenario 'Visit Wiki Page Historical Commit' do
- visit project_wiki_path(project, wiki_page, version_id: old_page_version_id)
-
- expect(page).not_to have_selector('a.btn', text: 'Edit')
- end
-end
diff --git a/spec/features/projects/wiki/user_views_wiki_page_spec.rb b/spec/features/projects/wiki/user_views_wiki_page_spec.rb
new file mode 100644
index 00000000000..470391dc66b
--- /dev/null
+++ b/spec/features/projects/wiki/user_views_wiki_page_spec.rb
@@ -0,0 +1,140 @@
+require 'spec_helper'
+
+describe 'User views a wiki page' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, namespace: user.namespace) }
+ let(:wiki_page) do
+ create(:wiki_page,
+ wiki: project.wiki,
+ attrs: { title: 'home', content: 'Look at this [image](image.jpg)\n\n ![alt text](image.jpg)' })
+ end
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+ end
+
+ context 'when wiki is empty' do
+ before do
+ visit(project_wikis_path(project))
+
+ click_on('New page')
+
+ page.within('#modal-new-wiki') do
+ fill_in(:new_wiki_path, with: 'one/two/three-test')
+ click_on('Create page')
+ end
+
+ page.within('.wiki-form') do
+ fill_in(:wiki_content, with: 'wiki content')
+ click_on('Create page')
+ end
+ end
+
+ it 'shows the history of a page that has a path', :js do
+ expect(current_path).to include('one/two/three-test')
+
+ first(:link, text: 'Three').click
+ click_on('Page history')
+
+ expect(current_path).to include('one/two/three-test')
+
+ page.within(:css, '.nav-text') do
+ expect(page).to have_content('History')
+ end
+ end
+
+ it 'shows an old version of a page', :js do
+ expect(current_path).to include('one/two/three-test')
+ expect(find('.wiki-pages')).to have_content('Three')
+
+ first(:link, text: 'Three').click
+
+ expect(find('.nav-text')).to have_content('Three')
+
+ click_on('Edit')
+
+ expect(current_path).to include('one/two/three-test')
+ expect(page).to have_content('Edit Page')
+
+ fill_in('Content', with: 'Updated Wiki Content')
+
+ click_on('Save changes')
+ click_on('Page history')
+
+ page.within(:css, '.nav-text') do
+ expect(page).to have_content('History')
+ end
+
+ find('a[href*="?version_id"]')
+ end
+ end
+
+ context 'when a page does not have history' do
+ before do
+ visit(project_wiki_path(project, wiki_page))
+ end
+
+ it 'shows all the pages' do
+ expect(page).to have_content(user.name)
+ expect(find('.wiki-pages')).to have_content(wiki_page.title.capitalize)
+ end
+
+ it 'shows a file stored in a page' do
+ file = Gollum::File.new(project.wiki)
+
+ allow_any_instance_of(Gollum::Wiki).to receive(:file).with('image.jpg', 'master').and_return(file)
+ allow_any_instance_of(Gollum::File).to receive(:mime_type).and_return('image/jpeg')
+
+ expect(page).to have_xpath('//img[@data-src="image.jpg"]')
+ expect(page).to have_link('image', href: "#{project.wiki.wiki_base_path}/image.jpg")
+
+ click_on('image')
+
+ expect(current_path).to match('wikis/image.jpg')
+ expect(page).not_to have_xpath('/html') # Page should render the image which means there is no html involved
+ end
+
+ it 'shows the creation page if file does not exist' do
+ expect(page).to have_link('image', href: "#{project.wiki.wiki_base_path}/image.jpg")
+
+ click_on('image')
+
+ expect(current_path).to match('wikis/image.jpg')
+ expect(page).to have_content('New Wiki Page')
+ expect(page).to have_content('Create page')
+ end
+ end
+
+ context 'when a page has history' do
+ before do
+ wiki_page.update(message: 'updated home', content: 'updated [some link](other-page)')
+ end
+
+ it 'shows the page history' do
+ visit(project_wiki_path(project, wiki_page))
+
+ expect(page).to have_selector('a.btn', text: 'Edit')
+
+ click_on('Page history')
+
+ expect(page).to have_content(user.name)
+ expect(page).to have_content("#{user.username} created page: home")
+ expect(page).to have_content('updated home')
+ end
+
+ it 'does not show the "Edit" button' do
+ visit(project_wiki_path(project, wiki_page, version_id: wiki_page.versions.last.id))
+
+ expect(page).not_to have_selector('a.btn', text: 'Edit')
+ end
+ end
+
+ it 'opens a default wiki page', :js do
+ visit(project_path(project))
+
+ find('.shortcuts-wiki').trigger('click')
+
+ expect(page).to have_content('Home · Create Page')
+ end
+end
diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb
index 81f7ab80a04..ac62280e4ca 100644
--- a/spec/features/projects_spec.rb
+++ b/spec/features/projects_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
feature 'Project' do
+ include ProjectForksHelper
+
describe 'creating from template' do
let(:user) { create(:user) }
let(:template) { Gitlab::ProjectTemplate.find(:rails) }
@@ -55,13 +57,12 @@ feature 'Project' do
end
end
- describe 'remove forked relationship', js: true do
+ describe 'remove forked relationship', :js do
let(:user) { create(:user) }
- let(:project) { create(:project, namespace: user.namespace) }
+ let(:project) { fork_project(create(:project, :public), user, namespace_id: user.namespace) }
before do
sign_in user
- create(:forked_project_link, forked_to_project: project)
visit edit_project_path(project)
end
@@ -71,12 +72,61 @@ feature 'Project' do
remove_with_confirm('Remove fork relationship', project.path)
expect(page).to have_content 'The fork relationship has been removed.'
- expect(project.forked?).to be_falsey
+ expect(project.reload.forked?).to be_falsey
expect(page).not_to have_content 'Remove fork relationship'
end
end
- describe 'removal', js: true do
+ describe 'showing information about source of a project fork' do
+ let(:user) { create(:user) }
+ let(:base_project) { create(:project, :public, :repository) }
+ let(:forked_project) { fork_project(base_project, user, repository: true) }
+
+ before do
+ sign_in user
+ end
+
+ it 'shows a link to the source project when it is available' do
+ visit project_path(forked_project)
+
+ expect(page).to have_content('Forked from')
+ expect(page).to have_link(base_project.full_name)
+ end
+
+ it 'does not contain fork network information for the root project' do
+ forked_project
+
+ visit project_path(base_project)
+
+ expect(page).not_to have_content('In fork network of')
+ expect(page).not_to have_content('Forked from')
+ end
+
+ it 'shows the name of the deleted project when the source was deleted' do
+ forked_project
+ Projects::DestroyService.new(base_project, base_project.owner).execute
+
+ visit project_path(forked_project)
+
+ expect(page).to have_content("Forked from #{base_project.full_name} (deleted)")
+ end
+
+ context 'a fork of a fork' do
+ let(:fork_of_fork) { fork_project(forked_project, user, repository: true) }
+
+ it 'links to the base project if the source project is removed' do
+ fork_of_fork
+ Projects::DestroyService.new(forked_project, user).execute
+
+ visit project_path(fork_of_fork)
+
+ expect(page).to have_content("Forked from")
+ expect(page).to have_link(base_project.full_name)
+ end
+ end
+ end
+
+ describe 'removal', :js do
let(:user) { create(:user, username: 'test', name: 'test') }
let(:project) { create(:project, namespace: user.namespace, name: 'project1') }
diff --git a/spec/features/protected_branches_spec.rb b/spec/features/protected_branches_spec.rb
index 3677bf38724..2ab1eda90f1 100644
--- a/spec/features/protected_branches_spec.rb
+++ b/spec/features/protected_branches_spec.rb
@@ -1,93 +1,178 @@
require 'spec_helper'
-feature 'Protected Branches', js: true do
- let(:user) { create(:user, :admin) }
+feature 'Protected Branches', :js do
+ let(:user) { create(:user) }
+ let(:admin) { create(:admin) }
let(:project) { create(:project, :repository) }
- before do
- sign_in(user)
- end
+ context 'logged in as developer' do
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ end
- def set_protected_branch_name(branch_name)
- find(".js-protected-branch-select").trigger('click')
- find(".dropdown-input-field").set(branch_name)
- click_on("Create wildcard #{branch_name}")
- end
+ describe 'Delete protected branch' do
+ before do
+ create(:protected_branch, project: project, name: 'fix')
+ expect(ProtectedBranch.count).to eq(1)
+ end
+
+ it 'does not allow developer to removes protected branch' do
+ visit project_branches_path(project)
- describe "explicit protected branches" do
- it "allows creating explicit protected branches" do
- visit project_protected_branches_path(project)
- set_protected_branch_name('some-branch')
- click_on "Protect"
+ fill_in 'branch-search', with: 'fix'
+ find('#branch-search').native.send_keys(:enter)
- within(".protected-branches-list") { expect(page).to have_content('some-branch') }
- expect(ProtectedBranch.count).to eq(1)
- expect(ProtectedBranch.last.name).to eq('some-branch')
+ expect(page).to have_css('.btn-remove.disabled')
+ end
end
+ end
- it "displays the last commit on the matching branch if it exists" do
- commit = create(:commit, project: project)
- project.repository.add_branch(user, 'some-branch', commit.id)
+ context 'logged in as master' do
+ before do
+ project.add_master(user)
+ sign_in(user)
+ end
- visit project_protected_branches_path(project)
- set_protected_branch_name('some-branch')
- click_on "Protect"
+ describe 'Delete protected branch' do
+ before do
+ create(:protected_branch, project: project, name: 'fix')
+ expect(ProtectedBranch.count).to eq(1)
+ end
- within(".protected-branches-list") { expect(page).to have_content(commit.id[0..7]) }
- end
+ it 'removes branch after modal confirmation' do
+ visit project_branches_path(project)
+
+ fill_in 'branch-search', with: 'fix'
+ find('#branch-search').native.send_keys(:enter)
+
+ expect(page).to have_content('fix')
+ expect(find('.all-branches')).to have_selector('li', count: 1)
+ page.find('[data-target="#modal-delete-branch"]').trigger(:click)
- it "displays an error message if the named branch does not exist" do
- visit project_protected_branches_path(project)
- set_protected_branch_name('some-branch')
- click_on "Protect"
+ expect(page).to have_css('.js-delete-branch[disabled]')
+ fill_in 'delete_branch_input', with: 'fix'
+ click_link 'Delete protected branch'
- within(".protected-branches-list") { expect(page).to have_content('branch was removed') }
+ fill_in 'branch-search', with: 'fix'
+ find('#branch-search').native.send_keys(:enter)
+
+ expect(page).to have_content('No branches to show')
+ end
end
- end
- describe "wildcard protected branches" do
- it "allows creating protected branches with a wildcard" do
- visit project_protected_branches_path(project)
- set_protected_branch_name('*-stable')
- click_on "Protect"
+ describe "Saved defaults" do
+ it "keeps the allowed to merge and push dropdowns defaults based on the previous selection" do
+ visit project_protected_branches_path(project)
+ form = '.js-new-protected-branch'
+
+ within form do
+ find(".js-allowed-to-merge").trigger('click')
+ click_link 'No one'
+ find(".js-allowed-to-push").trigger('click')
+ click_link 'Developers + Masters'
+ end
+
+ visit project_protected_branches_path(project)
+
+ within form do
+ page.within(".js-allowed-to-merge") do
+ expect(page.find(".dropdown-toggle-text")).to have_content("No one")
+ end
+ page.within(".js-allowed-to-push") do
+ expect(page.find(".dropdown-toggle-text")).to have_content("Developers + Masters")
+ end
+ end
+ end
+ end
+ end
- within(".protected-branches-list") { expect(page).to have_content('*-stable') }
- expect(ProtectedBranch.count).to eq(1)
- expect(ProtectedBranch.last.name).to eq('*-stable')
+ context 'logged in as admin' do
+ before do
+ sign_in(admin)
end
- it "displays the number of matching branches" do
- project.repository.add_branch(user, 'production-stable', 'master')
- project.repository.add_branch(user, 'staging-stable', 'master')
+ describe "explicit protected branches" do
+ it "allows creating explicit protected branches" do
+ visit project_protected_branches_path(project)
+ set_protected_branch_name('some-branch')
+ click_on "Protect"
+
+ within(".protected-branches-list") { expect(page).to have_content('some-branch') }
+ expect(ProtectedBranch.count).to eq(1)
+ expect(ProtectedBranch.last.name).to eq('some-branch')
+ end
+
+ it "displays the last commit on the matching branch if it exists" do
+ commit = create(:commit, project: project)
+ project.repository.add_branch(admin, 'some-branch', commit.id)
+
+ visit project_protected_branches_path(project)
+ set_protected_branch_name('some-branch')
+ click_on "Protect"
- visit project_protected_branches_path(project)
- set_protected_branch_name('*-stable')
- click_on "Protect"
+ within(".protected-branches-list") { expect(page).to have_content(commit.id[0..7]) }
+ end
- within(".protected-branches-list") { expect(page).to have_content("2 matching branches") }
+ it "displays an error message if the named branch does not exist" do
+ visit project_protected_branches_path(project)
+ set_protected_branch_name('some-branch')
+ click_on "Protect"
+
+ within(".protected-branches-list") { expect(page).to have_content('branch was removed') }
+ end
end
- it "displays all the branches matching the wildcard" do
- project.repository.add_branch(user, 'production-stable', 'master')
- project.repository.add_branch(user, 'staging-stable', 'master')
- project.repository.add_branch(user, 'development', 'master')
+ describe "wildcard protected branches" do
+ it "allows creating protected branches with a wildcard" do
+ visit project_protected_branches_path(project)
+ set_protected_branch_name('*-stable')
+ click_on "Protect"
- visit project_protected_branches_path(project)
- set_protected_branch_name('*-stable')
- click_on "Protect"
+ within(".protected-branches-list") { expect(page).to have_content('*-stable') }
+ expect(ProtectedBranch.count).to eq(1)
+ expect(ProtectedBranch.last.name).to eq('*-stable')
+ end
- visit project_protected_branches_path(project)
- click_on "2 matching branches"
+ it "displays the number of matching branches" do
+ project.repository.add_branch(admin, 'production-stable', 'master')
+ project.repository.add_branch(admin, 'staging-stable', 'master')
- within(".protected-branches-list") do
- expect(page).to have_content("production-stable")
- expect(page).to have_content("staging-stable")
- expect(page).not_to have_content("development")
+ visit project_protected_branches_path(project)
+ set_protected_branch_name('*-stable')
+ click_on "Protect"
+
+ within(".protected-branches-list") { expect(page).to have_content("2 matching branches") }
end
+
+ it "displays all the branches matching the wildcard" do
+ project.repository.add_branch(admin, 'production-stable', 'master')
+ project.repository.add_branch(admin, 'staging-stable', 'master')
+ project.repository.add_branch(admin, 'development', 'master')
+
+ visit project_protected_branches_path(project)
+ set_protected_branch_name('*-stable')
+ click_on "Protect"
+
+ visit project_protected_branches_path(project)
+ click_on "2 matching branches"
+
+ within(".protected-branches-list") do
+ expect(page).to have_content("production-stable")
+ expect(page).to have_content("staging-stable")
+ expect(page).not_to have_content("development")
+ end
+ end
+ end
+
+ describe "access control" do
+ include_examples "protected branches > access control > CE"
end
end
- describe "access control" do
- include_examples "protected branches > access control > CE"
+ def set_protected_branch_name(branch_name)
+ find(".js-protected-branch-select").trigger('click')
+ find(".dropdown-input-field").set(branch_name)
+ click_on("Create wildcard #{branch_name}")
end
end
diff --git a/spec/features/protected_tags_spec.rb b/spec/features/protected_tags_spec.rb
index 8abd4403065..8cc6f17b8d9 100644
--- a/spec/features/protected_tags_spec.rb
+++ b/spec/features/protected_tags_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Protected Tags', js: true do
+feature 'Protected Tags', :js do
let(:user) { create(:user, :admin) }
let(:project) { create(:project, :repository) }
diff --git a/spec/features/search/user_searches_for_code_spec.rb b/spec/features/search/user_searches_for_code_spec.rb
new file mode 100644
index 00000000000..0ed797a62ea
--- /dev/null
+++ b/spec/features/search/user_searches_for_code_spec.rb
@@ -0,0 +1,67 @@
+require 'spec_helper'
+
+describe 'User searches for code' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :repository, namespace: user.namespace) }
+
+ context 'when signed in' do
+ before do
+ project.add_master(user)
+ sign_in(user)
+ end
+
+ it 'finds a file' do
+ visit(project_path(project))
+
+ page.within('.search') do
+ fill_in('search', with: 'application.js')
+ click_button('Go')
+ end
+
+ click_link('Code')
+
+ expect(page).to have_selector('.file-content .code')
+ expect(page).to have_selector("span.line[lang='javascript']")
+ end
+
+ context 'when on a project page', :js do
+ before do
+ visit(search_path)
+ end
+
+ include_examples 'top right search form'
+
+ it 'finds code' do
+ find('.js-search-project-dropdown').trigger('click')
+
+ page.within('.project-filter') do
+ click_link(project.name_with_namespace)
+ end
+
+ fill_in('dashboard_search', with: 'rspec')
+ find('.btn-search').trigger('click')
+
+ page.within('.results') do
+ expect(find(:css, '.search-results')).to have_content('Update capybara, rspec-rails, poltergeist to recent versions')
+ end
+ end
+ end
+ end
+
+ context 'when signed out' do
+ let(:project) { create(:project, :public, :repository) }
+
+ before do
+ visit(project_path(project))
+ end
+
+ it 'finds code' do
+ fill_in('search', with: 'rspec')
+ click_button('Go')
+
+ page.within('.results') do
+ expect(find(:css, '.search-results')).to have_content('Update capybara, rspec-rails, poltergeist to recent versions')
+ end
+ end
+ end
+end
diff --git a/spec/features/search/user_searches_for_comments_spec.rb b/spec/features/search/user_searches_for_comments_spec.rb
new file mode 100644
index 00000000000..c7c469a262c
--- /dev/null
+++ b/spec/features/search/user_searches_for_comments_spec.rb
@@ -0,0 +1,47 @@
+require 'spec_helper'
+
+describe 'User searches for comments' do
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_reporter(user)
+ sign_in(user)
+
+ visit(project_path(project))
+ end
+
+ context 'when a comment is in commits' do
+ context 'when comment belongs to an invalid commit' do
+ let(:comment) { create(:note_on_commit, author: user, project: project, commit_id: 12345678, note: 'Bug here') }
+
+ it 'finds a commit' do
+ page.within('.search') do
+ fill_in('search', with: comment.note)
+ click_button('Go')
+ end
+
+ click_link('Comments')
+
+ expect(page).to have_text('Commit deleted')
+ expect(page).to have_text('12345678')
+ end
+ end
+ end
+
+ context 'when a comment is in a snippet' do
+ let(:snippet) { create(:project_snippet, :private, project: project, author: user, title: 'Some title') }
+ let(:comment) { create(:note, noteable: snippet, author: user, note: 'Supercalifragilisticexpialidocious', project: project) }
+
+ it 'finds a snippet' do
+ page.within('.search') do
+ fill_in('search', with: comment.note)
+ click_button('Go')
+ end
+
+ click_link('Comments')
+
+ expect(page).to have_link(snippet.title)
+ end
+ end
+end
diff --git a/spec/features/search/user_searches_for_commits_spec.rb b/spec/features/search/user_searches_for_commits_spec.rb
new file mode 100644
index 00000000000..28cae444588
--- /dev/null
+++ b/spec/features/search/user_searches_for_commits_spec.rb
@@ -0,0 +1,49 @@
+require 'spec_helper'
+
+describe 'User searches for commits' do
+ let(:project) { create(:project, :repository) }
+ let(:sha) { '6d394385cf567f80a8fd85055db1ab4c5295806f' }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_reporter(user)
+ sign_in(user)
+
+ visit(search_path(project_id: project.id))
+ end
+
+ context 'when searching by SHA' do
+ it 'finds a commit and redirects to its page' do
+ fill_in('search', with: sha)
+ click_button('Search')
+
+ expect(page).to have_current_path(project_commit_path(project, sha))
+ end
+
+ it 'finds a commit in uppercase and redirects to its page' do
+ fill_in('search', with: sha.upcase)
+ click_button('Search')
+
+ expect(page).to have_current_path(project_commit_path(project, sha))
+ end
+ end
+
+ context 'when searching by message' do
+ it 'finds a commit and holds on /search page' do
+ create_commit('Message referencing another sha: "deadbeef"', project, user, 'master')
+
+ fill_in('search', with: 'deadbeef')
+ click_button('Search')
+
+ expect(page).to have_current_path('/search', only_path: true)
+ end
+
+ it 'finds multiple commits' do
+ fill_in('search', with: 'See merge request')
+ click_button('Search')
+ click_link('Commits')
+
+ expect(page).to have_selector('.commit-row-description', count: 9)
+ end
+ end
+end
diff --git a/spec/features/search/user_searches_for_issues_spec.rb b/spec/features/search/user_searches_for_issues_spec.rb
new file mode 100644
index 00000000000..630a81b1c5e
--- /dev/null
+++ b/spec/features/search/user_searches_for_issues_spec.rb
@@ -0,0 +1,76 @@
+require 'spec_helper'
+
+describe 'User searches for issues', :js do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, namespace: user.namespace) }
+ let!(:issue1) { create(:issue, title: 'Foo', project: project) }
+ let!(:issue2) { create(:issue, title: 'Bar', project: project) }
+
+ context 'when signed in' do
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(search_path)
+ end
+
+ include_examples 'top right search form'
+
+ it 'finds an issue' do
+ fill_in('dashboard_search', with: issue1.title)
+ find('.btn-search').trigger('click')
+
+ page.within('.search-filter') do
+ click_link('Issues')
+ end
+
+ page.within('.results') do
+ expect(find(:css, '.search-results')).to have_link(issue1.title).and have_no_link(issue2.title)
+ end
+ end
+
+ context 'when on a project page' do
+ it 'finds an issue' do
+ find('.js-search-project-dropdown').trigger('click')
+
+ page.within('.project-filter') do
+ click_link(project.name_with_namespace)
+ end
+
+ fill_in('dashboard_search', with: issue1.title)
+ find('.btn-search').trigger('click')
+
+ page.within('.search-filter') do
+ click_link('Issues')
+ end
+
+ page.within('.results') do
+ expect(find(:css, '.search-results')).to have_link(issue1.title).and have_no_link(issue2.title)
+ end
+ end
+ end
+ end
+
+ context 'when signed out' do
+ let(:project) { create(:project, :public) }
+
+ before do
+ visit(search_path)
+ end
+
+ include_examples 'top right search form'
+
+ it 'finds an issue' do
+ fill_in('dashboard_search', with: issue1.title)
+ find('.btn-search').trigger('click')
+
+ page.within('.search-filter') do
+ click_link('Issues')
+ end
+
+ page.within('.results') do
+ expect(find(:css, '.search-results')).to have_link(issue1.title).and have_no_link(issue2.title)
+ end
+ end
+ end
+end
diff --git a/spec/features/search/user_searches_for_merge_requests_spec.rb b/spec/features/search/user_searches_for_merge_requests_spec.rb
new file mode 100644
index 00000000000..116256682f4
--- /dev/null
+++ b/spec/features/search/user_searches_for_merge_requests_spec.rb
@@ -0,0 +1,51 @@
+require 'spec_helper'
+
+describe 'User searches for merge requests', :js do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, namespace: user.namespace) }
+ let!(:merge_request1) { create(:merge_request, title: 'Foo', source_project: project, target_project: project) }
+ let!(:merge_request2) { create(:merge_request, :simple, title: 'Bar', source_project: project, target_project: project) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(search_path)
+ end
+
+ include_examples 'top right search form'
+
+ it 'finds a merge request' do
+ fill_in('dashboard_search', with: merge_request1.title)
+ find('.btn-search').trigger('click')
+
+ page.within('.search-filter') do
+ click_link('Merge requests')
+ end
+
+ page.within('.results') do
+ expect(find(:css, '.search-results')).to have_link(merge_request1.title).and have_no_link(merge_request2.title)
+ end
+ end
+
+ context 'when on a project page' do
+ it 'finds a merge request' do
+ find('.js-search-project-dropdown').trigger('click')
+
+ page.within('.project-filter') do
+ click_link(project.name_with_namespace)
+ end
+
+ fill_in('dashboard_search', with: merge_request1.title)
+ find('.btn-search').trigger('click')
+
+ page.within('.search-filter') do
+ click_link('Merge requests')
+ end
+
+ page.within('.results') do
+ expect(find(:css, '.search-results')).to have_link(merge_request1.title).and have_no_link(merge_request2.title)
+ end
+ end
+ end
+end
diff --git a/spec/features/search/user_searches_for_milestones_spec.rb b/spec/features/search/user_searches_for_milestones_spec.rb
new file mode 100644
index 00000000000..4fa9fe9ce8c
--- /dev/null
+++ b/spec/features/search/user_searches_for_milestones_spec.rb
@@ -0,0 +1,51 @@
+require 'spec_helper'
+
+describe 'User searches for milestones', :js do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, namespace: user.namespace) }
+ let!(:milestone1) { create(:milestone, title: 'Foo', project: project) }
+ let!(:milestone2) { create(:milestone, title: 'Bar', project: project) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(search_path)
+ end
+
+ include_examples 'top right search form'
+
+ it 'finds a milestone' do
+ fill_in('dashboard_search', with: milestone1.title)
+ find('.btn-search').trigger('click')
+
+ page.within('.search-filter') do
+ click_link('Milestones')
+ end
+
+ page.within('.results') do
+ expect(find(:css, '.search-results')).to have_link(milestone1.title).and have_no_link(milestone2.title)
+ end
+ end
+
+ context 'when on a project page' do
+ it 'finds a milestone' do
+ find('.js-search-project-dropdown').trigger('click')
+
+ page.within('.project-filter') do
+ click_link(project.name_with_namespace)
+ end
+
+ fill_in('dashboard_search', with: milestone1.title)
+ find('.btn-search').trigger('click')
+
+ page.within('.search-filter') do
+ click_link('Milestones')
+ end
+
+ page.within('.results') do
+ expect(find(:css, '.search-results')).to have_link(milestone1.title).and have_no_link(milestone2.title)
+ end
+ end
+ end
+end
diff --git a/spec/features/search/user_searches_for_projects_spec.rb b/spec/features/search/user_searches_for_projects_spec.rb
new file mode 100644
index 00000000000..242e437e41c
--- /dev/null
+++ b/spec/features/search/user_searches_for_projects_spec.rb
@@ -0,0 +1,36 @@
+require 'spec_helper'
+
+describe 'User searches for projects' do
+ let!(:project) { create(:project, :public, name: 'Shop') }
+
+ context 'when signed out' do
+ include_examples 'top right search form'
+
+ it 'finds a project' do
+ visit(search_path)
+
+ fill_in('dashboard_search', with: project.name[0..3])
+ click_button('Search')
+
+ expect(page).to have_link(project.name)
+ end
+
+ it 'preserves the group being searched in' do
+ visit(search_path(group_id: project.namespace.id))
+
+ fill_in('search', with: 'foo')
+ click_button('Search')
+
+ expect(find('#group_id', visible: false).value).to eq(project.namespace.id.to_s)
+ end
+
+ it 'preserves the project being searched in' do
+ visit(search_path(project_id: project.id))
+
+ fill_in('search', with: 'foo')
+ click_button('Search')
+
+ expect(find('#project_id', visible: false).value).to eq(project.id.to_s)
+ end
+ end
+end
diff --git a/spec/features/search/user_searches_for_wiki_pages_spec.rb b/spec/features/search/user_searches_for_wiki_pages_spec.rb
new file mode 100644
index 00000000000..1ea56479ecc
--- /dev/null
+++ b/spec/features/search/user_searches_for_wiki_pages_spec.rb
@@ -0,0 +1,35 @@
+require 'spec_helper'
+
+describe 'User searches for wiki pages', :js do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, namespace: user.namespace) }
+ let!(:wiki_page) { create(:wiki_page, wiki: project.wiki, attrs: { title: 'test_wiki', content: 'Some Wiki content' }) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+
+ visit(search_path)
+ end
+
+ include_examples 'top right search form'
+
+ it 'finds a page' do
+ find('.js-search-project-dropdown').trigger('click')
+
+ page.within('.project-filter') do
+ click_link(project.name_with_namespace)
+ end
+
+ fill_in('dashboard_search', with: 'content')
+ find('.btn-search').trigger('click')
+
+ page.within('.search-filter') do
+ click_link('Wiki')
+ end
+
+ page.within('.results') do
+ expect(find(:css, '.search-results')).to have_link(wiki_page.title)
+ end
+ end
+end
diff --git a/spec/features/search/user_uses_header_search_field_spec.rb b/spec/features/search/user_uses_header_search_field_spec.rb
new file mode 100644
index 00000000000..5ddea36add5
--- /dev/null
+++ b/spec/features/search/user_uses_header_search_field_spec.rb
@@ -0,0 +1,90 @@
+require 'spec_helper'
+
+describe 'User uses header search field' do
+ include FilteredSearchHelpers
+
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_reporter(user)
+ sign_in(user)
+
+ visit(project_path(project))
+ end
+
+ it 'starts searching by pressing the enter key', :js do
+ fill_in('search', with: 'gitlab')
+ find('#search').native.send_keys(:enter)
+
+ page.within('.breadcrumbs-sub-title') do
+ expect(page).to have_content('Search')
+ end
+ end
+
+ it 'contains location badge' do
+ expect(page).to have_selector('.has-location-badge')
+ end
+
+ context 'when clicking the search field', :js do
+ before do
+ page.find('#search').click
+ end
+
+ it 'shows category search dropdown' do
+ expect(page).to have_selector('.dropdown-header', text: /#{project.name}/i)
+ end
+
+ context 'when clicking issues' do
+ let!(:issue) { create(:issue, project: project, author: user, assignees: [user]) }
+
+ it 'shows assigned issues' do
+ find('.dropdown-menu').click_link('Issues assigned to me')
+
+ expect(page).to have_selector('.filtered-search')
+ expect_tokens([assignee_token(user.name)])
+ expect_filtered_search_input_empty
+ end
+
+ it 'shows created issues' do
+ find('.dropdown-menu').click_link("Issues I've created")
+
+ expect(page).to have_selector('.filtered-search')
+ expect_tokens([author_token(user.name)])
+ expect_filtered_search_input_empty
+ end
+ end
+
+ context 'when clicking merge requests' do
+ let!(:merge_request) { create(:merge_request, source_project: project, author: user, assignee: user) }
+
+ it 'shows assigned merge requests' do
+ find('.dropdown-menu').click_link('Merge requests assigned to me')
+
+ expect(page).to have_selector('.merge-requests-holder')
+ expect_tokens([assignee_token(user.name)])
+ expect_filtered_search_input_empty
+ end
+
+ it 'shows created merge requests' do
+ find('.dropdown-menu').click_link("Merge requests I've created")
+
+ expect(page).to have_selector('.merge-requests-holder')
+ expect_tokens([author_token(user.name)])
+ expect_filtered_search_input_empty
+ end
+ end
+ end
+
+ context 'when entering text into the search field', :js do
+ before do
+ page.within('.search-input-wrap') do
+ fill_in('search', with: project.name[0..3])
+ end
+ end
+
+ it 'does not display the category search dropdown' do
+ expect(page).not_to have_selector('.dropdown-header', text: /#{project.name}/i)
+ end
+ end
+end
diff --git a/spec/features/search/user_uses_search_filters_spec.rb b/spec/features/search/user_uses_search_filters_spec.rb
new file mode 100644
index 00000000000..95f3eb5e805
--- /dev/null
+++ b/spec/features/search/user_uses_search_filters_spec.rb
@@ -0,0 +1,52 @@
+require 'spec_helper'
+
+describe 'User uses search filters', :js do
+ let(:group) { create(:group) }
+ let!(:group_project) { create(:project, group: group) }
+ let(:project) { create(:project, namespace: user.namespace) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_reporter(user)
+ group.add_owner(user)
+ sign_in(user)
+
+ visit(search_path)
+ end
+
+ context' when filtering by group' do
+ it 'shows group projects' do
+ find('.js-search-group-dropdown').trigger('click')
+
+ wait_for_requests
+
+ page.within('.search-holder') do
+ click_link(group.name)
+ end
+
+ expect(find('.js-search-group-dropdown')).to have_content(group.name)
+
+ page.within('.project-filter') do
+ find('.js-search-project-dropdown').trigger('click')
+
+ wait_for_requests
+
+ expect(page).to have_link(group_project.name_with_namespace)
+ end
+ end
+ end
+
+ context' when filtering by project' do
+ it 'shows a project' do
+ page.within('.project-filter') do
+ find('.js-search-project-dropdown').trigger('click')
+
+ wait_for_requests
+
+ click_link(project.name_with_namespace)
+ end
+
+ expect(find('.js-search-project-dropdown')).to have_content(project.name_with_namespace)
+ end
+ end
+end
diff --git a/spec/features/search_spec.rb b/spec/features/search_spec.rb
deleted file mode 100644
index 8f6d0bb9d1b..00000000000
--- a/spec/features/search_spec.rb
+++ /dev/null
@@ -1,310 +0,0 @@
-require 'spec_helper'
-
-describe "Search" do
- include FilteredSearchHelpers
-
- let(:user) { create(:user) }
- let(:project) { create(:project, namespace: user.namespace) }
- let!(:issue) { create(:issue, project: project, assignees: [user]) }
- let!(:issue2) { create(:issue, project: project, author: user) }
-
- before do
- sign_in(user)
- project.team << [user, :reporter]
- visit search_path
- end
-
- it 'does not show top right search form' do
- expect(page).not_to have_selector('.search')
- end
-
- context 'search filters', js: true do
- let(:group) { create(:group) }
- let!(:group_project) { create(:project, group: group) }
-
- before do
- group.add_owner(user)
- end
-
- it 'shows group name after filtering' do
- find('.js-search-group-dropdown').trigger('click')
- wait_for_requests
-
- page.within '.search-holder' do
- click_link group.name
- end
-
- expect(find('.js-search-group-dropdown')).to have_content(group.name)
- end
-
- it 'filters by group projects after filtering by group' do
- find('.js-search-group-dropdown').trigger('click')
- wait_for_requests
-
- page.within '.search-holder' do
- click_link group.name
- end
-
- expect(find('.js-search-group-dropdown')).to have_content(group.name)
-
- page.within('.project-filter') do
- find('.js-search-project-dropdown').trigger('click')
- wait_for_requests
-
- expect(page).to have_link(group_project.name_with_namespace)
- end
- end
-
- it 'shows project name after filtering' do
- page.within('.project-filter') do
- find('.js-search-project-dropdown').trigger('click')
- wait_for_requests
-
- click_link project.name_with_namespace
- end
-
- expect(find('.js-search-project-dropdown')).to have_content(project.name_with_namespace)
- end
- end
-
- describe 'searching for Projects' do
- it 'finds a project' do
- page.within '.search-holder' do
- fill_in "search", with: project.name[0..3]
- click_button "Search"
- end
-
- expect(page).to have_content project.name
- end
- end
-
- context 'search for comments' do
- context 'when comment belongs to a invalid commit' do
- let(:project) { create(:project, :repository) }
- let(:note) { create(:note_on_commit, author: user, project: project, commit_id: project.repository.commit.id, note: 'Bug here') }
-
- before do
- note.update_attributes(commit_id: 12345678)
- end
-
- it 'finds comment' do
- visit project_path(project)
-
- page.within '.search' do
- fill_in 'search', with: note.note
- click_button 'Go'
- end
-
- click_link 'Comments'
-
- expect(page).to have_text("Commit deleted")
- expect(page).to have_text("12345678")
- end
- end
-
- it 'finds a snippet' do
- snippet = create(:project_snippet, :private, project: project, author: user, title: 'Some title')
- note = create(:note,
- noteable: snippet,
- author: user,
- note: 'Supercalifragilisticexpialidocious',
- project: project)
- # Must visit project dashboard since global search won't search
- # everything (e.g. comments, snippets, etc.)
- visit project_path(project)
-
- page.within '.search' do
- fill_in 'search', with: note.note
- click_button 'Go'
- end
-
- click_link 'Comments'
-
- expect(page).to have_link(snippet.title)
- end
-
- it 'finds a commit' do
- project = create(:project, :repository) { |p| p.add_reporter(user) }
- visit project_path(project)
-
- page.within '.search' do
- fill_in 'search', with: 'add'
- click_button 'Go'
- end
-
- click_link "Commits"
-
- expect(page).to have_selector('.commit-row-description')
- end
-
- it 'finds a code' do
- project = create(:project, :repository) { |p| p.add_reporter(user) }
- visit project_path(project)
-
- page.within '.search' do
- fill_in 'search', with: 'application.js'
- click_button 'Go'
- end
-
- click_link "Code"
-
- expect(page).to have_selector('.file-content .code')
-
- expect(page).to have_selector("span.line[lang='javascript']")
- end
- end
-
- describe 'Right header search field' do
- it 'allows enter key to search', js: true do
- visit project_path(project)
- fill_in 'search', with: 'gitlab'
- find('#search').native.send_keys(:enter)
-
- page.within '.breadcrumbs-sub-title' do
- expect(page).to have_content 'Search'
- end
- end
-
- describe 'Search in project page' do
- before do
- visit project_path(project)
- end
-
- it 'shows top right search form' do
- expect(page).to have_selector('#search')
- end
-
- it 'contains location badge in top right search form' do
- expect(page).to have_selector('.has-location-badge')
- end
-
- context 'clicking the search field', js: true do
- it 'shows category search dropdown' do
- page.find('#search').click
-
- expect(page).to have_selector('.dropdown-header', text: /#{project.name}/i)
- end
- end
-
- context 'click the links in the category search dropdown', js: true do
- let!(:merge_request) { create(:merge_request, source_project: project, author: user, assignee: user) }
-
- before do
- page.find('#search').click
- end
-
- it 'takes user to her issues page when issues assigned is clicked' do
- find('.dropdown-menu').click_link 'Issues assigned to me'
-
- expect(page).to have_selector('.filtered-search')
- expect_tokens([assignee_token(user.name)])
- expect_filtered_search_input_empty
- end
-
- it 'takes user to her issues page when issues authored is clicked' do
- find('.dropdown-menu').click_link "Issues I've created"
-
- expect(page).to have_selector('.filtered-search')
- expect_tokens([author_token(user.name)])
- expect_filtered_search_input_empty
- end
-
- it 'takes user to her MR page when MR assigned is clicked' do
- find('.dropdown-menu').click_link 'Merge requests assigned to me'
-
- expect(page).to have_selector('.merge-requests-holder')
- expect_tokens([assignee_token(user.name)])
- expect_filtered_search_input_empty
- end
-
- it 'takes user to her MR page when MR authored is clicked' do
- find('.dropdown-menu').click_link "Merge requests I've created"
-
- expect(page).to have_selector('.merge-requests-holder')
- expect_tokens([author_token(user.name)])
- expect_filtered_search_input_empty
- end
- end
-
- context 'entering text into the search field', js: true do
- before do
- page.within '.search-input-wrap' do
- fill_in "search", with: project.name[0..3]
- end
- end
-
- it 'does not display the category search dropdown' do
- expect(page).not_to have_selector('.dropdown-header', text: /#{project.name}/i)
- end
- end
- end
- end
-
- describe 'search for commits' do
- let(:project) { create(:project, :repository) }
-
- before do
- visit search_path(project_id: project.id)
- end
-
- it 'redirects to commit page when search by sha and only commit found' do
- fill_in 'search', with: '6d394385cf567f80a8fd85055db1ab4c5295806f'
-
- click_button 'Search'
-
- expect(page).to have_current_path(project_commit_path(project, '6d394385cf567f80a8fd85055db1ab4c5295806f'))
- end
-
- it 'redirects to single commit regardless of query case' do
- fill_in 'search', with: '6D394385cf'
-
- click_button 'Search'
-
- expect(page).to have_current_path(project_commit_path(project, '6d394385cf567f80a8fd85055db1ab4c5295806f'))
- end
-
- it 'holds on /search page when the only commit is found by message' do
- create_commit('Message referencing another sha: "deadbeef" ', project, user, 'master')
-
- fill_in 'search', with: 'deadbeef'
- click_button 'Search'
-
- expect(page).to have_current_path('/search', only_path: true)
- end
-
- it 'shows multiple matching commits' do
- fill_in 'search', with: 'See merge request'
-
- click_button 'Search'
- click_link 'Commits'
-
- expect(page).to have_selector('.commit-row-description', count: 9)
- end
- end
-
- context 'anonymous user' do
- let(:project) { create(:project, :public) }
-
- before do
- sign_out(user)
- end
-
- it 'preserves the group being searched in' do
- visit search_path(group_id: project.namespace.id)
-
- fill_in 'search', with: 'foo'
- click_button 'Search'
-
- expect(find('#group_id', visible: false).value).to eq(project.namespace.id.to_s)
- end
-
- it 'preserves the project being searched in' do
- visit search_path(project_id: project.id)
-
- fill_in 'search', with: 'foo'
- click_button 'Search'
-
- expect(find('#project_id', visible: false).value).to eq(project.id.to_s)
- end
- end
-end
diff --git a/spec/features/security/project/internal_access_spec.rb b/spec/features/security/project/internal_access_spec.rb
index a7928857b7d..d70cf1527e7 100644
--- a/spec/features/security/project/internal_access_spec.rb
+++ b/spec/features/security/project/internal_access_spec.rb
@@ -181,21 +181,6 @@ describe "Internal Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/issues/:id/edit" do
- let(:issue) { create(:issue, project: project) }
- subject { edit_project_issue_path(project, issue) }
-
- it { is_expected.to be_allowed_for(:admin) }
- it { is_expected.to be_allowed_for(:owner).of(project) }
- it { is_expected.to be_allowed_for(:master).of(project) }
- it { is_expected.to be_allowed_for(:developer).of(project) }
- it { is_expected.to be_allowed_for(:reporter).of(project) }
- it { is_expected.to be_denied_for(:guest).of(project) }
- it { is_expected.to be_denied_for(:user) }
- it { is_expected.to be_denied_for(:external) }
- it { is_expected.to be_denied_for(:visitor) }
- end
-
describe "GET /:project_path/snippets" do
subject { project_snippets_path(project) }
diff --git a/spec/features/security/project/private_access_spec.rb b/spec/features/security/project/private_access_spec.rb
index a4396b20afd..ea130606545 100644
--- a/spec/features/security/project/private_access_spec.rb
+++ b/spec/features/security/project/private_access_spec.rb
@@ -181,21 +181,6 @@ describe "Private Project Access" do
it { is_expected.to be_denied_for(:visitor) }
end
- describe "GET /:project_path/issues/:id/edit" do
- let(:issue) { create(:issue, project: project) }
- subject { edit_project_issue_path(project, issue) }
-
- it { is_expected.to be_allowed_for(:admin) }
- it { is_expected.to be_allowed_for(:owner).of(project) }
- it { is_expected.to be_allowed_for(:master).of(project) }
- it { is_expected.to be_allowed_for(:developer).of(project) }
- it { is_expected.to be_allowed_for(:reporter).of(project) }
- it { is_expected.to be_denied_for(:guest).of(project) }
- it { is_expected.to be_denied_for(:user) }
- it { is_expected.to be_denied_for(:external) }
- it { is_expected.to be_denied_for(:visitor) }
- end
-
describe "GET /:project_path/snippets" do
subject { project_snippets_path(project) }
diff --git a/spec/features/security/project/public_access_spec.rb b/spec/features/security/project/public_access_spec.rb
index fccdeb0e5b7..d15f5af66c9 100644
--- a/spec/features/security/project/public_access_spec.rb
+++ b/spec/features/security/project/public_access_spec.rb
@@ -394,21 +394,6 @@ describe "Public Project Access" do
it { is_expected.to be_allowed_for(:visitor) }
end
- describe "GET /:project_path/issues/:id/edit" do
- let(:issue) { create(:issue, project: project) }
- subject { edit_project_issue_path(project, issue) }
-
- it { is_expected.to be_allowed_for(:admin) }
- it { is_expected.to be_allowed_for(:owner).of(project) }
- it { is_expected.to be_allowed_for(:master).of(project) }
- it { is_expected.to be_allowed_for(:developer).of(project) }
- it { is_expected.to be_allowed_for(:reporter).of(project) }
- it { is_expected.to be_denied_for(:guest).of(project) }
- it { is_expected.to be_denied_for(:user) }
- it { is_expected.to be_denied_for(:external) }
- it { is_expected.to be_denied_for(:visitor) }
- end
-
describe "GET /:project_path/snippets" do
subject { project_snippets_path(project) }
diff --git a/spec/features/signup_spec.rb b/spec/features/signup_spec.rb
index b6367b88e17..917fad74ef1 100644
--- a/spec/features/signup_spec.rb
+++ b/spec/features/signup_spec.rb
@@ -24,6 +24,24 @@ feature 'Signup' do
end
end
+ context "when sigining up with different cased emails" do
+ it "creates the user successfully" do
+ user = build(:user)
+
+ visit root_path
+
+ fill_in 'new_user_name', with: user.name
+ fill_in 'new_user_username', with: user.username
+ fill_in 'new_user_email', with: user.email
+ fill_in 'new_user_email_confirmation', with: user.email.capitalize
+ fill_in 'new_user_password', with: user.password
+ click_button "Register"
+
+ expect(current_path).to eq dashboard_projects_path
+ expect(page).to have_content("Welcome! You have signed up successfully.")
+ end
+ end
+
context "when not sending confirmation email" do
before do
stub_application_setting(send_user_confirmation_email: false)
diff --git a/spec/features/snippets/internal_snippet_spec.rb b/spec/features/snippets/internal_snippet_spec.rb
index 3a229612235..3a2768c424f 100644
--- a/spec/features/snippets/internal_snippet_spec.rb
+++ b/spec/features/snippets/internal_snippet_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-feature 'Internal Snippets', js: true do
+feature 'Internal Snippets', :js do
let(:internal_snippet) { create(:personal_snippet, :internal) }
describe 'normal user' do
diff --git a/spec/features/tags/master_creates_tag_spec.rb b/spec/features/tags/master_creates_tag_spec.rb
index 39d79a3327b..1455345bd56 100644
--- a/spec/features/tags/master_creates_tag_spec.rb
+++ b/spec/features/tags/master_creates_tag_spec.rb
@@ -55,7 +55,7 @@ feature 'Master creates tag' do
end
end
- scenario 'opens dropdown for ref', js: true do
+ scenario 'opens dropdown for ref', :js do
click_link 'New tag'
ref_row = find('.form-group:nth-of-type(2) .col-sm-10')
page.within ref_row do
diff --git a/spec/features/tags/master_deletes_tag_spec.rb b/spec/features/tags/master_deletes_tag_spec.rb
index d6a6b8fc7d5..f5b3774122b 100644
--- a/spec/features/tags/master_deletes_tag_spec.rb
+++ b/spec/features/tags/master_deletes_tag_spec.rb
@@ -10,7 +10,7 @@ feature 'Master deletes tag' do
visit project_tags_path(project)
end
- context 'from the tags list page', js: true do
+ context 'from the tags list page', :js do
scenario 'deletes the tag' do
expect(page).to have_content 'v1.1.0'
@@ -34,16 +34,31 @@ feature 'Master deletes tag' do
end
end
- context 'when pre-receive hook fails', js: true do
- before do
- allow_any_instance_of(Gitlab::Git::HooksService).to receive(:execute)
- .and_raise(Gitlab::Git::HooksService::PreReceiveError, 'Do not delete tags')
+ context 'when pre-receive hook fails', :js do
+ context 'when Gitaly operation_user_delete_tag feature is enabled' do
+ before do
+ allow_any_instance_of(Gitlab::GitalyClient::OperationService).to receive(:rm_tag)
+ .and_raise(Gitlab::Git::HooksService::PreReceiveError, 'Do not delete tags')
+ end
+
+ scenario 'shows the error message' do
+ delete_first_tag
+
+ expect(page).to have_content('Do not delete tags')
+ end
end
- scenario 'shows the error message' do
- delete_first_tag
+ context 'when Gitaly operation_user_delete_tag feature is disabled', :skip_gitaly_mock do
+ before do
+ allow_any_instance_of(Gitlab::Git::HooksService).to receive(:execute)
+ .and_raise(Gitlab::Git::HooksService::PreReceiveError, 'Do not delete tags')
+ end
+
+ scenario 'shows the error message' do
+ delete_first_tag
- expect(page).to have_content('Do not delete tags')
+ expect(page).to have_content('Do not delete tags')
+ end
end
end
diff --git a/spec/features/task_lists_spec.rb b/spec/features/task_lists_spec.rb
index aeb0534b733..485b0b287ad 100644
--- a/spec/features/task_lists_spec.rb
+++ b/spec/features/task_lists_spec.rb
@@ -63,7 +63,7 @@ feature 'Task Lists' do
end
describe 'for Issues' do
- describe 'multiple tasks', js: true do
+ describe 'multiple tasks', :js do
let!(:issue) { create(:issue, description: markdown, author: user, project: project) }
it 'renders' do
@@ -103,7 +103,7 @@ feature 'Task Lists' do
end
end
- describe 'single incomplete task', js: true do
+ describe 'single incomplete task', :js do
let!(:issue) { create(:issue, description: singleIncompleteMarkdown, author: user, project: project) }
it 'renders' do
@@ -122,7 +122,7 @@ feature 'Task Lists' do
end
end
- describe 'single complete task', js: true do
+ describe 'single complete task', :js do
let!(:issue) { create(:issue, description: singleCompleteMarkdown, author: user, project: project) }
it 'renders' do
@@ -141,7 +141,7 @@ feature 'Task Lists' do
end
end
- describe 'nested tasks', js: true do
+ describe 'nested tasks', :js do
let(:issue) { create(:issue, description: nested_tasks_markdown, author: user, project: project) }
before do
diff --git a/spec/features/triggers_spec.rb b/spec/features/triggers_spec.rb
index 47664de469a..548d8372a07 100644
--- a/spec/features/triggers_spec.rb
+++ b/spec/features/triggers_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Triggers', js: true do
+feature 'Triggers', :js do
let(:trigger_title) { 'trigger desc' }
let(:user) { create(:user) }
let(:user2) { create(:user) }
diff --git a/spec/features/uploads/user_uploads_file_to_note_spec.rb b/spec/features/uploads/user_uploads_file_to_note_spec.rb
index e1c95590af1..1261ffdc2ee 100644
--- a/spec/features/uploads/user_uploads_file_to_note_spec.rb
+++ b/spec/features/uploads/user_uploads_file_to_note_spec.rb
@@ -14,20 +14,20 @@ feature 'User uploads file to note' do
end
context 'before uploading' do
- it 'shows "Attach a file" button', js: true do
+ it 'shows "Attach a file" button', :js do
expect(page).to have_button('Attach a file')
expect(page).not_to have_selector('.uploading-progress-container', visible: true)
end
end
context 'uploading is in progress' do
- it 'shows "Cancel" button on uploading', js: true do
+ it 'shows "Cancel" button on uploading', :js do
dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false)
expect(page).to have_button('Cancel')
end
- it 'cancels uploading on clicking to "Cancel" button', js: true do
+ it 'cancels uploading on clicking to "Cancel" button', :js do
dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false)
click_button 'Cancel'
@@ -37,20 +37,20 @@ feature 'User uploads file to note' do
expect(page).not_to have_selector('.uploading-progress-container', visible: true)
end
- it 'shows "Attaching a file" message on uploading 1 file', js: true do
+ it 'shows "Attaching a file" message on uploading 1 file', :js do
dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false)
expect(page).to have_selector('.attaching-file-message', visible: true, text: 'Attaching a file -')
end
- it 'shows "Attaching 2 files" message on uploading 2 file', js: true do
+ it 'shows "Attaching 2 files" message on uploading 2 file', :js do
dropzone_file([Rails.root.join('spec', 'fixtures', 'video_sample.mp4'),
Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false)
expect(page).to have_selector('.attaching-file-message', visible: true, text: 'Attaching 2 files -')
end
- it 'shows error message, "retry" and "attach a new file" link a if file is too big', js: true do
+ it 'shows error message, "retry" and "attach a new file" link a if file is too big', :js do
dropzone_file([Rails.root.join('spec', 'fixtures', 'video_sample.mp4')], 0.01)
error_text = 'File is too big (0.06MiB). Max filesize: 0.01MiB.'
@@ -63,7 +63,7 @@ feature 'User uploads file to note' do
end
context 'uploading is complete' do
- it 'shows "Attach a file" button on uploading complete', js: true do
+ it 'shows "Attach a file" button on uploading complete', :js do
dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')])
wait_for_requests
@@ -71,7 +71,7 @@ feature 'User uploads file to note' do
expect(page).not_to have_selector('.uploading-progress-container', visible: true)
end
- scenario 'they see the attached file', js: true do
+ scenario 'they see the attached file', :js do
dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')])
click_button 'Comment'
wait_for_requests
diff --git a/spec/features/user_callout_spec.rb b/spec/features/user_callout_spec.rb
deleted file mode 100644
index 37d66b618af..00000000000
--- a/spec/features/user_callout_spec.rb
+++ /dev/null
@@ -1,55 +0,0 @@
-require 'spec_helper'
-
-describe 'User Callouts', js: true do
- let(:user) { create(:user) }
- let(:another_user) { create(:user) }
- let(:project) { create(:project, path: 'gitlab', name: 'sample') }
-
- before do
- sign_in(user)
- project.team << [user, :master]
- end
-
- it 'takes you to the profile preferences when the link is clicked' do
- visit dashboard_projects_path
- click_link 'Check it out'
- expect(current_path).to eq profile_preferences_path
- end
-
- it 'does not show when cookie is set' do
- visit dashboard_projects_path
-
- within('.user-callout') do
- find('.close').trigger('click')
- end
-
- visit dashboard_projects_path
-
- expect(page).not_to have_selector('.user-callout')
- end
-
- describe 'user callout should appear in two routes' do
- it 'shows up on the user profile' do
- visit user_path(user)
- expect(find('.user-callout')).to have_content 'Customize your experience'
- end
-
- it 'shows up on the dashboard projects' do
- visit dashboard_projects_path
- expect(find('.user-callout')).to have_content 'Customize your experience'
- end
- end
-
- it 'hides the user callout when click on the dismiss icon' do
- visit user_path(user)
- within('.user-callout') do
- find('.close').click
- end
- expect(page).not_to have_selector('.user-callout')
- end
-
- it 'does not show callout on another users profile' do
- visit user_path(another_user)
- expect(page).not_to have_selector('.user-callout')
- end
-end
diff --git a/spec/features/users/snippets_spec.rb b/spec/features/users/snippets_spec.rb
index 13760b4c2fc..8c697e33436 100644
--- a/spec/features/users/snippets_spec.rb
+++ b/spec/features/users/snippets_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'Snippets tab on a user profile', js: true do
+describe 'Snippets tab on a user profile', :js do
context 'when the user has snippets' do
let(:user) { create(:user) }
diff --git a/spec/features/users_spec.rb b/spec/features/users_spec.rb
index 15b89dac572..0252c957c95 100644
--- a/spec/features/users_spec.rb
+++ b/spec/features/users_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Users', js: true do
+feature 'Users', :js do
let(:user) { create(:user, username: 'user1', name: 'User 1', email: 'user1@gitlab.com') }
scenario 'GET /users/sign_in creates a new user account' do
diff --git a/spec/features/variables_spec.rb b/spec/features/variables_spec.rb
index 6794bf4f4ba..5d8e818f7bf 100644
--- a/spec/features/variables_spec.rb
+++ b/spec/features/variables_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'Project variables', js: true do
+describe 'Project variables', :js do
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:variable) { create(:ci_variable, key: 'test_key', value: 'test value') }
diff --git a/spec/finders/autocomplete_users_finder_spec.rb b/spec/finders/autocomplete_users_finder_spec.rb
new file mode 100644
index 00000000000..684af74d750
--- /dev/null
+++ b/spec/finders/autocomplete_users_finder_spec.rb
@@ -0,0 +1,97 @@
+require 'spec_helper'
+
+describe AutocompleteUsersFinder do
+ describe '#execute' do
+ let!(:user1) { create(:user, username: 'johndoe') }
+ let!(:user2) { create(:user, :blocked, username: 'notsorandom') }
+ let!(:external_user) { create(:user, :external) }
+ let!(:omniauth_user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') }
+ let(:current_user) { create(:user) }
+ let(:params) { {} }
+
+ let(:project) { nil }
+ let(:group) { nil }
+
+ subject { described_class.new(params: params, current_user: current_user, project: project, group: group).execute.to_a }
+
+ context 'when current_user not passed or nil' do
+ let(:current_user) { nil }
+
+ it { is_expected.to match_array([]) }
+ end
+
+ context 'when project passed' do
+ let(:project) { create(:project) }
+
+ it { is_expected.to match_array([project.owner]) }
+
+ context 'when author_id passed' do
+ let(:params) { { author_id: user2.id } }
+
+ it { is_expected.to match_array([project.owner, user2]) }
+ end
+ end
+
+ context 'when group passed and project not passed' do
+ let(:group) { create(:group, :public) }
+
+ before do
+ group.add_users([user1], GroupMember::DEVELOPER)
+ end
+
+ it { is_expected.to match_array([user1]) }
+ end
+
+ it { is_expected.to match_array([user1, external_user, omniauth_user, current_user]) }
+
+ context 'when filtered by search' do
+ let(:params) { { search: 'johndoe' } }
+
+ it { is_expected.to match_array([user1]) }
+ end
+
+ context 'when filtered by skip_users' do
+ let(:params) { { skip_users: [omniauth_user.id, current_user.id] } }
+
+ it { is_expected.to match_array([user1, external_user]) }
+ end
+
+ context 'when todos exist' do
+ let!(:pending_todo1) { create(:todo, user: current_user, author: user1, state: :pending) }
+ let!(:pending_todo2) { create(:todo, user: external_user, author: omniauth_user, state: :pending) }
+ let!(:done_todo1) { create(:todo, user: current_user, author: external_user, state: :done) }
+ let!(:done_todo2) { create(:todo, user: user1, author: external_user, state: :done) }
+
+ context 'when filtered by todo_filter without todo_state_filter' do
+ let(:params) { { todo_filter: true } }
+
+ it { is_expected.to match_array([]) }
+ end
+
+ context 'when filtered by todo_filter with pending todo_state_filter' do
+ let(:params) { { todo_filter: true, todo_state_filter: 'pending' } }
+
+ it { is_expected.to match_array([user1]) }
+ end
+
+ context 'when filtered by todo_filter with done todo_state_filter' do
+ let(:params) { { todo_filter: true, todo_state_filter: 'done' } }
+
+ it { is_expected.to match_array([external_user]) }
+ end
+ end
+
+ context 'when filtered by current_user' do
+ let(:current_user) { user2 }
+ let(:params) { { current_user: true } }
+
+ it { is_expected.to match_array([user2, user1, external_user, omniauth_user]) }
+ end
+
+ context 'when filtered by author_id' do
+ let(:params) { { author_id: user2.id } }
+
+ it { is_expected.to match_array([user2, user1, external_user, omniauth_user, current_user]) }
+ end
+ end
+end
diff --git a/spec/finders/fork_projects_finder_spec.rb b/spec/finders/fork_projects_finder_spec.rb
new file mode 100644
index 00000000000..f0cef7ea406
--- /dev/null
+++ b/spec/finders/fork_projects_finder_spec.rb
@@ -0,0 +1,43 @@
+require 'spec_helper'
+
+describe ForkProjectsFinder do
+ let(:source_project) { create(:project, :empty_repo) }
+ let(:private_fork) { create(:project, :private, :empty_repo, name: 'A') }
+ let(:internal_fork) { create(:project, :internal, :empty_repo, name: 'B') }
+ let(:public_fork) { create(:project, :public, :empty_repo, name: 'C') }
+
+ let(:non_member) { create(:user) }
+ let(:private_fork_member) { create(:user) }
+
+ before do
+ private_fork.add_developer(private_fork_member)
+
+ source_project.forks << private_fork
+ source_project.forks << internal_fork
+ source_project.forks << public_fork
+ end
+
+ describe '#execute' do
+ let(:finder) { described_class.new(source_project, params: {}, current_user: current_user) }
+
+ subject { finder.execute }
+
+ describe 'without a user' do
+ let(:current_user) { nil }
+
+ it { is_expected.to eq([public_fork]) }
+ end
+
+ describe 'with a user' do
+ let(:current_user) { non_member }
+
+ it { is_expected.to eq([public_fork, internal_fork]) }
+ end
+
+ describe 'with a member' do
+ let(:current_user) { private_fork_member }
+
+ it { is_expected.to eq([public_fork, internal_fork, private_fork]) }
+ end
+ end
+end
diff --git a/spec/finders/merge_request_target_project_finder_spec.rb b/spec/finders/merge_request_target_project_finder_spec.rb
new file mode 100644
index 00000000000..c81bfd7932c
--- /dev/null
+++ b/spec/finders/merge_request_target_project_finder_spec.rb
@@ -0,0 +1,54 @@
+require 'spec_helper'
+
+describe MergeRequestTargetProjectFinder do
+ include ProjectForksHelper
+
+ let(:user) { create(:user) }
+ subject(:finder) { described_class.new(current_user: user, source_project: forked_project) }
+
+ shared_examples 'finding related projects' do
+ it 'finds sibling projects and base project' do
+ other_fork
+
+ expect(finder.execute).to contain_exactly(base_project, other_fork, forked_project)
+ end
+
+ it 'does not include projects that have merge requests turned off' do
+ other_fork.project_feature.update!(merge_requests_access_level: ProjectFeature::DISABLED)
+ base_project.project_feature.update!(merge_requests_access_level: ProjectFeature::DISABLED)
+
+ expect(finder.execute).to contain_exactly(forked_project)
+ end
+ end
+
+ context 'public projects' do
+ let(:base_project) { create(:project, :public, path: 'base') }
+ let(:forked_project) { fork_project(base_project) }
+ let(:other_fork) { fork_project(base_project) }
+
+ it_behaves_like 'finding related projects'
+ end
+
+ context 'private projects' do
+ let(:base_project) { create(:project, :private, path: 'base') }
+ let(:forked_project) { fork_project(base_project, base_project.owner) }
+ let(:other_fork) { fork_project(base_project, base_project.owner) }
+
+ context 'when the user is a member of all projects' do
+ before do
+ base_project.add_developer(user)
+ forked_project.add_developer(user)
+ other_fork.add_developer(user)
+ end
+
+ it_behaves_like 'finding related projects'
+ end
+
+ it 'only finds the projects the user is a member of' do
+ other_fork.add_developer(user)
+ base_project.add_developer(user)
+
+ expect(finder.execute).to contain_exactly(other_fork, base_project)
+ end
+ end
+end
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index 95f445e7905..883bdf3746a 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -1,12 +1,18 @@
require 'spec_helper'
describe MergeRequestsFinder do
+ include ProjectForksHelper
+
let(:user) { create :user }
let(:user2) { create :user }
- let(:project1) { create(:project) }
- let(:project2) { create(:project, forked_from_project: project1) }
- let(:project3) { create(:project, :archived, forked_from_project: project1) }
+ let(:project1) { create(:project, :public) }
+ let(:project2) { fork_project(project1, user) }
+ let(:project3) do
+ p = fork_project(project1, user)
+ p.update!(archived: true)
+ p
+ end
let!(:merge_request1) { create(:merge_request, :simple, author: user, source_project: project2, target_project: project1) }
let!(:merge_request2) { create(:merge_request, :simple, author: user, source_project: project2, target_project: project1, state: 'closed') }
diff --git a/spec/finders/users_finder_spec.rb b/spec/finders/users_finder_spec.rb
index 1bab6d64388..4249c52c481 100644
--- a/spec/finders/users_finder_spec.rb
+++ b/spec/finders/users_finder_spec.rb
@@ -56,6 +56,15 @@ describe UsersFinder do
expect(users.map(&:username)).not_to include([filtered_user_before.username, filtered_user_after.username])
end
+
+ it 'does not filter by custom attributes' do
+ users = described_class.new(
+ user,
+ custom_attributes: { foo: 'bar' }
+ ).execute
+
+ expect(users).to contain_exactly(user, user1, user2, omniauth_user)
+ end
end
context 'with an admin user' do
@@ -72,6 +81,19 @@ describe UsersFinder do
expect(users).to contain_exactly(admin, user1, user2, external_user, omniauth_user)
end
+
+ it 'filters by custom attributes' do
+ create :user_custom_attribute, user: user1, key: 'foo', value: 'foo'
+ create :user_custom_attribute, user: user1, key: 'bar', value: 'bar'
+ create :user_custom_attribute, user: user2, key: 'foo', value: 'foo'
+
+ users = described_class.new(
+ admin,
+ custom_attributes: { foo: 'foo', bar: 'bar' }
+ ).execute
+
+ expect(users).to contain_exactly(user1)
+ end
end
end
end
diff --git a/spec/fixtures/api/schemas/cluster_status.json b/spec/fixtures/api/schemas/cluster_status.json
new file mode 100644
index 00000000000..1f255a17881
--- /dev/null
+++ b/spec/fixtures/api/schemas/cluster_status.json
@@ -0,0 +1,11 @@
+{
+ "type": "object",
+ "required" : [
+ "status"
+ ],
+ "properties" : {
+ "status": { "type": "string" },
+ "status_reason": { "type": ["string", "null"] }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/entities/merge_request.json b/spec/fixtures/api/schemas/entities/merge_request.json
index 1030f323a1f..ba094ba1657 100644
--- a/spec/fixtures/api/schemas/entities/merge_request.json
+++ b/spec/fixtures/api/schemas/entities/merge_request.json
@@ -46,6 +46,7 @@
"branch_missing": { "type": "boolean" },
"has_conflicts": { "type": "boolean" },
"can_be_merged": { "type": "boolean" },
+ "mergeable": { "type": "boolean" },
"project_archived": { "type": "boolean" },
"only_allow_merge_if_pipeline_succeeds": { "type": "boolean" },
"has_ci": { "type": "boolean" },
@@ -93,10 +94,12 @@
"merge_commit_message_with_description": { "type": "string" },
"diverged_commits_count": { "type": "integer" },
"commit_change_content_path": { "type": "string" },
- "remove_wip_path": { "type": "string" },
+ "remove_wip_path": { "type": ["string", "null"] },
"commits_count": { "type": "integer" },
"remove_source_branch": { "type": ["boolean", "null"] },
- "merge_ongoing": { "type": "boolean" }
+ "merge_ongoing": { "type": "boolean" },
+ "ff_only_enabled": { "type": ["boolean", false] },
+ "should_be_rebased": { "type": "boolean" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/commit/detail.json b/spec/fixtures/api/schemas/public_api/v4/commit/detail.json
index b7b2535c204..88a3cad62f6 100644
--- a/spec/fixtures/api/schemas/public_api/v4/commit/detail.json
+++ b/spec/fixtures/api/schemas/public_api/v4/commit/detail.json
@@ -5,11 +5,18 @@
{
"required" : [
"stats",
- "status"
+ "status",
+ "last_pipeline"
],
"properties": {
"stats": { "$ref": "../commit_stats.json" },
- "status": { "type": ["string", "null"] }
+ "status": { "type": ["string", "null"] },
+ "last_pipeline": {
+ "oneOf": [
+ { "type": "null" },
+ { "$ref": "../pipeline/basic.json" }
+ ]
+ }
}
}
]
diff --git a/spec/fixtures/api/schemas/public_api/v4/issues.json b/spec/fixtures/api/schemas/public_api/v4/issues.json
index 8acd9488215..5c08dbc3b96 100644
--- a/spec/fixtures/api/schemas/public_api/v4/issues.json
+++ b/spec/fixtures/api/schemas/public_api/v4/issues.json
@@ -9,6 +9,8 @@
"title": { "type": "string" },
"description": { "type": ["string", "null"] },
"state": { "type": "string" },
+ "discussion_locked": { "type": ["boolean", "null"] },
+ "closed_at": { "type": "date" },
"created_at": { "type": "date" },
"updated_at": { "type": "date" },
"labels": {
diff --git a/spec/fixtures/api/schemas/public_api/v4/merge_requests.json b/spec/fixtures/api/schemas/public_api/v4/merge_requests.json
index 31b3f4ba946..5828be5255b 100644
--- a/spec/fixtures/api/schemas/public_api/v4/merge_requests.json
+++ b/spec/fixtures/api/schemas/public_api/v4/merge_requests.json
@@ -72,6 +72,7 @@
"user_notes_count": { "type": "integer" },
"should_remove_source_branch": { "type": ["boolean", "null"] },
"force_remove_source_branch": { "type": ["boolean", "null"] },
+ "discussion_locked": { "type": ["boolean", "null"] },
"web_url": { "type": "uri" },
"time_stats": {
"time_estimate": { "type": "integer" },
diff --git a/spec/fixtures/api/schemas/public_api/v4/pipeline/basic.json b/spec/fixtures/api/schemas/public_api/v4/pipeline/basic.json
new file mode 100644
index 00000000000..0d127dc5297
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/pipeline/basic.json
@@ -0,0 +1,16 @@
+{
+ "type": "object",
+ "required" : [
+ "id",
+ "sha",
+ "ref",
+ "status"
+ ],
+ "properties" : {
+ "id": { "type": "integer" },
+ "sha": { "type": "string" },
+ "ref": { "type": "string" },
+ "status": { "type": "string" }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/user/admins.json b/spec/fixtures/api/schemas/public_api/v4/user/admins.json
new file mode 100644
index 00000000000..4a107f0ddbe
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/user/admins.json
@@ -0,0 +1,4 @@
+{
+ "type": "array",
+ "items": { "$ref": "admin.json" }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/user/basics.json b/spec/fixtures/api/schemas/public_api/v4/user/basics.json
new file mode 100644
index 00000000000..6f7cf42229d
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/user/basics.json
@@ -0,0 +1,4 @@
+{
+ "type": "array",
+ "items": { "$ref": "basic.json" }
+}
diff --git a/spec/fixtures/api/schemas/registry/repositories.json b/spec/fixtures/api/schemas/registry/repositories.json
new file mode 100644
index 00000000000..4978bd89cda
--- /dev/null
+++ b/spec/fixtures/api/schemas/registry/repositories.json
@@ -0,0 +1,6 @@
+{
+ "type": "array",
+ "items": {
+ "$ref": "repository.json"
+ }
+}
diff --git a/spec/fixtures/api/schemas/registry/repository.json b/spec/fixtures/api/schemas/registry/repository.json
new file mode 100644
index 00000000000..4175642eb00
--- /dev/null
+++ b/spec/fixtures/api/schemas/registry/repository.json
@@ -0,0 +1,27 @@
+{
+ "type": "object",
+ "required" : [
+ "id",
+ "path",
+ "location",
+ "tags_path"
+ ],
+ "properties" : {
+ "id": {
+ "type": "integer"
+ },
+ "path": {
+ "type": "string"
+ },
+ "location": {
+ "type": "string"
+ },
+ "tags_path": {
+ "type": "string"
+ },
+ "destroy_path": {
+ "type": "string"
+ }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/registry/tag.json b/spec/fixtures/api/schemas/registry/tag.json
new file mode 100644
index 00000000000..5bc307e0e64
--- /dev/null
+++ b/spec/fixtures/api/schemas/registry/tag.json
@@ -0,0 +1,28 @@
+{
+ "type": "object",
+ "required" : [
+ "name",
+ "location"
+ ],
+ "properties" : {
+ "name": {
+ "type": "string"
+ },
+ "location": {
+ "type": "string"
+ },
+ "revision": {
+ "type": "string"
+ },
+ "total_size": {
+ "type": "integer"
+ },
+ "created_at": {
+ "type": "date"
+ },
+ "destroy_path": {
+ "type": "string"
+ }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/registry/tags.json b/spec/fixtures/api/schemas/registry/tags.json
new file mode 100644
index 00000000000..c72f957459a
--- /dev/null
+++ b/spec/fixtures/api/schemas/registry/tags.json
@@ -0,0 +1,6 @@
+{
+ "type": "array",
+ "items": {
+ "$ref": "tag.json"
+ }
+}
diff --git a/spec/fixtures/config/kubeconfig.yml b/spec/fixtures/config/kubeconfig.yml
index c4e8e573c32..5152dae0104 100644
--- a/spec/fixtures/config/kubeconfig.yml
+++ b/spec/fixtures/config/kubeconfig.yml
@@ -4,7 +4,7 @@ clusters:
- name: gitlab-deploy
cluster:
server: https://kube.domain.com
- certificate-authority-data: "UEVN\n"
+ certificate-authority-data: "UEVN"
contexts:
- name: gitlab-deploy
context:
diff --git a/spec/fixtures/trace/trace_with_sections b/spec/fixtures/trace/trace_with_sections
new file mode 100644
index 00000000000..21dff3928c3
--- /dev/null
+++ b/spec/fixtures/trace/trace_with_sections
@@ -0,0 +1,15 @@
+Running with gitlab-runner dev (HEAD)
+ on kitsune minikube (a21b584f)
+WARNING: Namespace is empty, therefore assuming 'default'.
+Using Kubernetes namespace: default
+Using Kubernetes executor with image alpine:3.4 ...
+section_start:1506004954:prepare_script Waiting for pod default/runner-a21b584f-project-1208199-concurrent-0sg03f to be running, status is Pending
+Running on runner-a21b584f-project-1208199-concurrent-0sg03f via kitsune.local...
+section_end:1506004957:prepare_script section_start:1506004957:get_sources Cloning repository...
+Cloning into '/nolith/ci-tests'...
+Checking out dddd7a6e as master...
+Skipping Git submodules setup
+section_end:1506004958:get_sources section_start:1506004958:restore_cache section_end:1506004958:restore_cache section_start:1506004958:download_artifacts section_end:1506004958:download_artifacts section_start:1506004958:build_script $ whoami
+root
+section_end:1506004959:build_script section_start:1506004959:after_script section_end:1506004959:after_script section_start:1506004959:archive_cache section_end:1506004959:archive_cache section_start:1506004959:upload_artifacts section_end:1506004959:upload_artifacts Job succeeded
+
diff --git a/spec/helpers/auto_devops_helper_spec.rb b/spec/helpers/auto_devops_helper_spec.rb
index b6d892548ef..5e272af6073 100644
--- a/spec/helpers/auto_devops_helper_spec.rb
+++ b/spec/helpers/auto_devops_helper_spec.rb
@@ -10,6 +10,8 @@ describe AutoDevopsHelper do
before do
allow(helper).to receive(:can?).with(user, :admin_pipeline, project) { allowed }
allow(helper).to receive(:current_user) { user }
+
+ Feature.get(:auto_devops_banner_disabled).disable
end
subject { helper.show_auto_devops_callout?(project) }
@@ -18,6 +20,14 @@ describe AutoDevopsHelper do
it { is_expected.to eq(true) }
end
+ context 'when the banner is disabled by feature flag' do
+ it 'allows the feature flag to disable' do
+ Feature.get(:auto_devops_banner_disabled).enable
+
+ expect(subject).to be(false)
+ end
+ end
+
context 'when dismissed' do
before do
helper.request.cookies[:auto_devops_settings_dismissed] = 'true'
@@ -55,5 +65,21 @@ describe AutoDevopsHelper do
it { is_expected.to eq(false) }
end
+
+ context 'when master contains a .gitlab-ci.yml file' do
+ before do
+ allow(project.repository).to receive(:gitlab_ci_yml).and_return("script: ['test']")
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when another service is enabled' do
+ before do
+ create(:service, project: project, category: :ci, active: true)
+ end
+
+ it { is_expected.to eq(false) }
+ end
end
end
diff --git a/spec/helpers/avatars_helper_spec.rb b/spec/helpers/avatars_helper_spec.rb
index 4632c679972..f44e7ef6843 100644
--- a/spec/helpers/avatars_helper_spec.rb
+++ b/spec/helpers/avatars_helper_spec.rb
@@ -26,12 +26,13 @@ describe AvatarsHelper do
subject { helper.user_avatar_without_link(options) }
it 'displays user avatar' do
- is_expected.to eq image_tag(
- LazyImageTagHelper.placeholder_image,
- class: 'avatar s16 has-tooltip lazy',
+ is_expected.to eq tag(
+ :img,
alt: "#{user.name}'s avatar",
- title: user.name,
- data: { container: 'body', src: avatar_icon(user, 16) }
+ src: avatar_icon(user, 16),
+ data: { container: 'body' },
+ class: 'avatar s16 has-tooltip',
+ title: user.name
)
end
@@ -39,12 +40,13 @@ describe AvatarsHelper do
let(:options) { { user: user, css_class: '.cat-pics' } }
it 'uses provided css_class' do
- is_expected.to eq image_tag(
- LazyImageTagHelper.placeholder_image,
- class: "avatar s16 #{options[:css_class]} has-tooltip lazy",
+ is_expected.to eq tag(
+ :img,
alt: "#{user.name}'s avatar",
- title: user.name,
- data: { container: 'body', src: avatar_icon(user, 16) }
+ src: avatar_icon(user, 16),
+ data: { container: 'body' },
+ class: "avatar s16 #{options[:css_class]} has-tooltip",
+ title: user.name
)
end
end
@@ -53,12 +55,13 @@ describe AvatarsHelper do
let(:options) { { user: user, size: 99 } }
it 'uses provided size' do
- is_expected.to eq image_tag(
- LazyImageTagHelper.placeholder_image,
- class: "avatar s#{options[:size]} has-tooltip lazy",
+ is_expected.to eq tag(
+ :img,
alt: "#{user.name}'s avatar",
- title: user.name,
- data: { container: 'body', src: avatar_icon(user, options[:size]) }
+ src: avatar_icon(user, options[:size]),
+ data: { container: 'body' },
+ class: "avatar s#{options[:size]} has-tooltip",
+ title: user.name
)
end
end
@@ -67,12 +70,28 @@ describe AvatarsHelper do
let(:options) { { user: user, url: '/over/the/rainbow.png' } }
it 'uses provided url' do
- is_expected.to eq image_tag(
- LazyImageTagHelper.placeholder_image,
- class: 'avatar s16 has-tooltip lazy',
+ is_expected.to eq tag(
+ :img,
alt: "#{user.name}'s avatar",
- title: user.name,
- data: { container: 'body', src: options[:url] }
+ src: options[:url],
+ data: { container: 'body' },
+ class: "avatar s16 has-tooltip",
+ title: user.name
+ )
+ end
+ end
+
+ context 'with lazy parameter' do
+ let(:options) { { user: user, lazy: true } }
+
+ it 'adds `lazy` class to class list, sets `data-src` with avatar URL and `src` with placeholder image' do
+ is_expected.to eq tag(
+ :img,
+ alt: "#{user.name}'s avatar",
+ src: LazyImageTagHelper.placeholder_image,
+ data: { container: 'body', src: avatar_icon(user, 16) },
+ class: "avatar s16 has-tooltip lazy",
+ title: user.name
)
end
end
@@ -82,12 +101,13 @@ describe AvatarsHelper do
let(:options) { { user: user, has_tooltip: true } }
it 'adds has-tooltip' do
- is_expected.to eq image_tag(
- LazyImageTagHelper.placeholder_image,
- class: 'avatar s16 has-tooltip lazy',
+ is_expected.to eq tag(
+ :img,
alt: "#{user.name}'s avatar",
- title: user.name,
- data: { container: 'body', src: avatar_icon(user, 16) }
+ src: avatar_icon(user, 16),
+ data: { container: 'body' },
+ class: "avatar s16 has-tooltip",
+ title: user.name
)
end
end
@@ -96,12 +116,12 @@ describe AvatarsHelper do
let(:options) { { user: user, has_tooltip: false } }
it 'does not add has-tooltip or data container' do
- is_expected.to eq image_tag(
- LazyImageTagHelper.placeholder_image,
- class: 'avatar s16 lazy',
+ is_expected.to eq tag(
+ :img,
alt: "#{user.name}'s avatar",
- title: user.name,
- data: { src: avatar_icon(user, 16) }
+ src: avatar_icon(user, 16),
+ class: "avatar s16",
+ title: user.name
)
end
end
@@ -114,23 +134,25 @@ describe AvatarsHelper do
let(:options) { { user: user, user_name: 'Tinky Winky' } }
it 'prefers user parameter' do
- is_expected.to eq image_tag(
- LazyImageTagHelper.placeholder_image,
- class: 'avatar s16 has-tooltip lazy',
+ is_expected.to eq tag(
+ :img,
alt: "#{user.name}'s avatar",
- title: user.name,
- data: { container: 'body', src: avatar_icon(user, 16) }
+ src: avatar_icon(user, 16),
+ data: { container: 'body' },
+ class: "avatar s16 has-tooltip",
+ title: user.name
)
end
end
it 'uses user_name and user_email parameter if user is not present' do
- is_expected.to eq image_tag(
- LazyImageTagHelper.placeholder_image,
- class: 'avatar s16 has-tooltip lazy',
+ is_expected.to eq tag(
+ :img,
alt: "#{options[:user_name]}'s avatar",
- title: options[:user_name],
- data: { container: 'body', src: avatar_icon(options[:user_email], 16) }
+ src: avatar_icon(options[:user_email], 16),
+ data: { container: 'body' },
+ class: "avatar s16 has-tooltip",
+ title: options[:user_name]
)
end
end
diff --git a/spec/helpers/commits_helper_spec.rb b/spec/helpers/commits_helper_spec.rb
index 7179185285c..4b6c7c33e5b 100644
--- a/spec/helpers/commits_helper_spec.rb
+++ b/spec/helpers/commits_helper_spec.rb
@@ -12,6 +12,17 @@ describe CommitsHelper do
expect(helper.commit_author_link(commit))
.not_to include('onmouseover="alert(1)"')
end
+
+ it 'escapes the author name' do
+ user = build_stubbed(:user, name: 'Foo <script>alert("XSS")</script>')
+
+ commit = double(author: user, author_name: '', author_email: '')
+
+ expect(helper.commit_author_link(commit))
+ .to include('Foo &lt;script&gt;')
+ expect(helper.commit_author_link(commit, avatar: true))
+ .to include('commit-author-name', 'Foo &lt;script&gt;')
+ end
end
describe 'commit_committer_link' do
@@ -25,6 +36,17 @@ describe CommitsHelper do
expect(helper.commit_committer_link(commit))
.not_to include('onmouseover="alert(1)"')
end
+
+ it 'escapes the commiter name' do
+ user = build_stubbed(:user, name: 'Foo <script>alert("XSS")</script>')
+
+ commit = double(committer: user, committer_name: '', committer_email: '')
+
+ expect(helper.commit_committer_link(commit))
+ .to include('Foo &lt;script&gt;')
+ expect(helper.commit_committer_link(commit, avatar: true))
+ .to include('commit-committer-name', 'Foo &lt;script&gt;')
+ end
end
describe '#view_on_environment_button' do
diff --git a/spec/helpers/diff_helper_spec.rb b/spec/helpers/diff_helper_spec.rb
index 0deea0ff6a3..f9c31ac61d8 100644
--- a/spec/helpers/diff_helper_spec.rb
+++ b/spec/helpers/diff_helper_spec.rb
@@ -136,9 +136,9 @@ describe DiffHelper do
marked_old_line, marked_new_line = mark_inline_diffs(old_line, new_line)
expect(marked_old_line).to eq(%q{abc <span class="idiff left right deletion">'def'</span>})
- expect(marked_old_line).not_to be_html_safe
+ expect(marked_old_line).to be_html_safe
expect(marked_new_line).to eq(%q{abc <span class="idiff left right addition">"def"</span>})
- expect(marked_new_line).not_to be_html_safe
+ expect(marked_new_line).to be_html_safe
end
end
diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb
index 36031ac1a28..76e5964ccf7 100644
--- a/spec/helpers/groups_helper_spec.rb
+++ b/spec/helpers/groups_helper_spec.rb
@@ -17,7 +17,7 @@ describe GroupsHelper do
it 'gives default avatar_icon when no avatar is present' do
group = create(:group)
group.save!
- expect(group_icon(group.path)).to match('group_avatar.png')
+ expect(group_icon(group.path)).to match_asset_path('group_avatar.png')
end
end
diff --git a/spec/helpers/icons_helper_spec.rb b/spec/helpers/icons_helper_spec.rb
index 91c8faea7fd..3d79dac284f 100644
--- a/spec/helpers/icons_helper_spec.rb
+++ b/spec/helpers/icons_helper_spec.rb
@@ -16,6 +16,25 @@ describe IconsHelper do
end
end
+ describe 'sprite_icon' do
+ icon_name = 'clock'
+
+ it 'returns svg icon html' do
+ expect(sprite_icon(icon_name).to_s)
+ .to eq "<svg><use xlink:href=\"/images/icons.svg##{icon_name}\"></use></svg>"
+ end
+
+ it 'returns svg icon html + size classes' do
+ expect(sprite_icon(icon_name, size: 72).to_s)
+ .to eq "<svg class=\"s72\"><use xlink:href=\"/images/icons.svg##{icon_name}\"></use></svg>"
+ end
+
+ it 'returns svg icon html + size classes + additional class' do
+ expect(sprite_icon(icon_name, size: 72, css_class: 'icon-danger').to_s)
+ .to eq "<svg class=\"s72 icon-danger\"><use xlink:href=\"/images/icons.svg##{icon_name}\"></use></svg>"
+ end
+ end
+
describe 'file_type_icon_class' do
it 'returns folder class' do
expect(file_type_icon_class('folder', 0, 'folder_name')).to eq 'folder'
diff --git a/spec/helpers/merge_requests_helper_spec.rb b/spec/helpers/merge_requests_helper_spec.rb
index 7d1c17909bf..fd7900c32f4 100644
--- a/spec/helpers/merge_requests_helper_spec.rb
+++ b/spec/helpers/merge_requests_helper_spec.rb
@@ -1,6 +1,7 @@
require 'spec_helper'
describe MergeRequestsHelper do
+ include ProjectForksHelper
describe 'ci_build_details_path' do
let(:project) { create(:project) }
let(:merge_request) { MergeRequest.new }
@@ -31,10 +32,10 @@ describe MergeRequestsHelper do
describe 'within different projects' do
let(:project) { create(:project) }
- let(:fork_project) { create(:project, forked_from_project: project) }
- let(:merge_request) { create(:merge_request, source_project: fork_project, target_project: project) }
+ let(:forked_project) { fork_project(project) }
+ let(:merge_request) { create(:merge_request, source_project: forked_project, target_project: project) }
subject { format_mr_branch_names(merge_request) }
- let(:source_title) { "#{fork_project.full_path}:#{merge_request.source_branch}" }
+ let(:source_title) { "#{forked_project.full_path}:#{merge_request.source_branch}" }
let(:target_title) { "#{project.full_path}:#{merge_request.target_branch}" }
it { is_expected.to eq([source_title, target_title]) }
diff --git a/spec/helpers/page_layout_helper_spec.rb b/spec/helpers/page_layout_helper_spec.rb
index 9aca3987657..baf927a9acc 100644
--- a/spec/helpers/page_layout_helper_spec.rb
+++ b/spec/helpers/page_layout_helper_spec.rb
@@ -54,7 +54,7 @@ describe PageLayoutHelper do
describe 'page_image' do
it 'defaults to the GitLab logo' do
- expect(helper.page_image).to end_with 'assets/gitlab_logo.png'
+ expect(helper.page_image).to match_asset_path 'assets/gitlab_logo.png'
end
%w(project user group).each do |type|
@@ -70,13 +70,13 @@ describe PageLayoutHelper do
object = double(avatar_url: nil)
assign(type, object)
- expect(helper.page_image).to end_with 'assets/gitlab_logo.png'
+ expect(helper.page_image).to match_asset_path 'assets/gitlab_logo.png'
end
end
context "with no assignments" do
it 'falls back to the default' do
- expect(helper.page_image).to end_with 'assets/gitlab_logo.png'
+ expect(helper.page_image).to match_asset_path 'assets/gitlab_logo.png'
end
end
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 49cb7c954b4..5777b5c4025 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -63,7 +63,7 @@ describe ProjectsHelper do
end
end
- describe "#project_list_cache_key", clean_gitlab_redis_shared_state: true do
+ describe "#project_list_cache_key", :clean_gitlab_redis_shared_state do
let(:project) { create(:project, :repository) }
it "includes the route" do
@@ -200,13 +200,13 @@ describe ProjectsHelper do
end
it 'returns image tag for member avatar' do
- expect(helper).to receive(:image_tag).with(expected, { width: 16, class: ["avatar", "avatar-inline", "s16"], alt: "" })
+ expect(helper).to receive(:image_tag).with(expected, { width: 16, class: ["avatar", "avatar-inline", "s16"], alt: "", "data-src" => anything })
helper.link_to_member_avatar(user)
end
it 'returns image tag with avatar class' do
- expect(helper).to receive(:image_tag).with(expected, { width: 16, class: ["avatar", "avatar-inline", "s16", "any-avatar-class"], alt: "" })
+ expect(helper).to receive(:image_tag).with(expected, { width: 16, class: ["avatar", "avatar-inline", "s16", "any-avatar-class"], alt: "", "data-src" => anything })
helper.link_to_member_avatar(user, avatar_class: "any-avatar-class")
end
@@ -313,23 +313,10 @@ describe ProjectsHelper do
it 'returns recent push on the current project' do
event = double(:event)
- expect(user).to receive(:recent_push).with([project.id]).and_return(event)
+ expect(user).to receive(:recent_push).with(project).and_return(event)
expect(helper.last_push_event).to eq(event)
end
-
- context 'when current user has a fork of the current project' do
- let(:fork) { double(:fork, id: 2) }
-
- it 'returns recent push considering fork events' do
- expect(user).to receive(:fork_of).with(project).and_return(fork)
-
- event_on_fork = double(:event)
- expect(user).to receive(:recent_push).with([project.id, fork.id]).and_return(event_on_fork)
-
- expect(helper.last_push_event).to eq(event_on_fork)
- end
- end
end
describe "#project_feature_access_select" do
@@ -433,22 +420,26 @@ describe ProjectsHelper do
end
end
- describe '#has_projects_or_name?' do
+ describe '#show_projects' do
let(:projects) do
create(:project)
Project.all
end
it 'returns true when there are projects' do
- expect(helper.has_projects_or_name?(projects, {})).to eq(true)
+ expect(helper.show_projects?(projects, {})).to eq(true)
end
it 'returns true when there are no projects but a name is given' do
- expect(helper.has_projects_or_name?(Project.none, name: 'foo')).to eq(true)
+ expect(helper.show_projects?(Project.none, name: 'foo')).to eq(true)
+ end
+
+ it 'returns true when there are no projects but personal is present' do
+ expect(helper.show_projects?(Project.none, personal: 'true')).to eq(true)
end
it 'returns false when there are no projects and there is no name' do
- expect(helper.has_projects_or_name?(Project.none, {})).to eq(false)
+ expect(helper.show_projects?(Project.none, {})).to eq(false)
end
end
@@ -482,4 +473,15 @@ describe ProjectsHelper do
expect(recorder.count).to eq(1)
end
end
+
+ describe '#git_user_name' do
+ let(:user) { double(:user, name: 'John "A" Doe53') }
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ end
+
+ it 'parses quotes in name' do
+ expect(helper.send(:git_user_name)).to eq('John \"A\" Doe53')
+ end
+ end
end
diff --git a/spec/helpers/submodule_helper_spec.rb b/spec/helpers/submodule_helper_spec.rb
index c4f4e0d21dc..5a2e4b34069 100644
--- a/spec/helpers/submodule_helper_spec.rb
+++ b/spec/helpers/submodule_helper_spec.rb
@@ -147,6 +147,12 @@ describe SubmoduleHelper do
expect(helper.submodule_links(submodule_item)).to eq([nil, nil])
end
+ it 'sanitizes invalid URL with extended ASCII' do
+ stub_url('é')
+
+ expect(helper.submodule_links(submodule_item)).to eq([nil, nil])
+ end
+
it 'returns original' do
stub_url('http://mygitserver.com/gitlab-org/gitlab-ce')
expect(submodule_links(submodule_item)).to eq([repo.submodule_url_for, nil])
diff --git a/spec/initializers/doorkeeper_spec.rb b/spec/initializers/doorkeeper_spec.rb
index 74bdbb01166..1a78196e33d 100644
--- a/spec/initializers/doorkeeper_spec.rb
+++ b/spec/initializers/doorkeeper_spec.rb
@@ -9,8 +9,8 @@ describe Doorkeeper.configuration do
end
describe '#optional_scopes' do
- it 'matches Gitlab::Auth::OPTIONAL_SCOPES' do
- expect(subject.optional_scopes).to eq Gitlab::Auth::OPTIONAL_SCOPES
+ it 'matches Gitlab::Auth.optional_scopes' do
+ expect(subject.optional_scopes).to eq Gitlab::Auth.optional_scopes - Gitlab::Auth::REGISTRY_SCOPES
end
end
diff --git a/spec/initializers/secret_token_spec.rb b/spec/initializers/secret_token_spec.rb
index 84ad55e9f98..d56e14e0e0b 100644
--- a/spec/initializers/secret_token_spec.rb
+++ b/spec/initializers/secret_token_spec.rb
@@ -36,10 +36,10 @@ describe 'create_tokens' do
expect(keys).to all(match(HEX_KEY))
end
- it 'generates an RSA key for jws_private_key' do
+ it 'generates an RSA key for openid_connect_signing_key' do
create_tokens
- keys = secrets.values_at(:jws_private_key)
+ keys = secrets.values_at(:openid_connect_signing_key)
expect(keys.uniq).to eq(keys)
expect(keys).to all(match(RSA_KEY))
@@ -49,7 +49,7 @@ describe 'create_tokens' do
expect(self).to receive(:warn_missing_secret).with('secret_key_base')
expect(self).to receive(:warn_missing_secret).with('otp_key_base')
expect(self).to receive(:warn_missing_secret).with('db_key_base')
- expect(self).to receive(:warn_missing_secret).with('jws_private_key')
+ expect(self).to receive(:warn_missing_secret).with('openid_connect_signing_key')
create_tokens
end
@@ -61,7 +61,7 @@ describe 'create_tokens' do
expect(new_secrets['secret_key_base']).to eq(secrets.secret_key_base)
expect(new_secrets['otp_key_base']).to eq(secrets.otp_key_base)
expect(new_secrets['db_key_base']).to eq(secrets.db_key_base)
- expect(new_secrets['jws_private_key']).to eq(secrets.jws_private_key)
+ expect(new_secrets['openid_connect_signing_key']).to eq(secrets.openid_connect_signing_key)
end
create_tokens
@@ -77,7 +77,7 @@ describe 'create_tokens' do
context 'when the other secrets all exist' do
before do
secrets.db_key_base = 'db_key_base'
- secrets.jws_private_key = 'jws_private_key'
+ secrets.openid_connect_signing_key = 'openid_connect_signing_key'
allow(File).to receive(:exist?).with('.secret').and_return(true)
allow(File).to receive(:read).with('.secret').and_return('file_key')
@@ -88,7 +88,7 @@ describe 'create_tokens' do
stub_env('SECRET_KEY_BASE', 'env_key')
secrets.secret_key_base = 'secret_key_base'
secrets.otp_key_base = 'otp_key_base'
- secrets.jws_private_key = 'jws_private_key'
+ secrets.openid_connect_signing_key = 'openid_connect_signing_key'
end
it 'does not issue a warning' do
@@ -114,7 +114,7 @@ describe 'create_tokens' do
before do
secrets.secret_key_base = 'secret_key_base'
secrets.otp_key_base = 'otp_key_base'
- secrets.jws_private_key = 'jws_private_key'
+ secrets.openid_connect_signing_key = 'openid_connect_signing_key'
end
it 'does not write any files' do
@@ -129,7 +129,7 @@ describe 'create_tokens' do
expect(secrets.secret_key_base).to eq('secret_key_base')
expect(secrets.otp_key_base).to eq('otp_key_base')
expect(secrets.db_key_base).to eq('db_key_base')
- expect(secrets.jws_private_key).to eq('jws_private_key')
+ expect(secrets.openid_connect_signing_key).to eq('openid_connect_signing_key')
end
it 'deletes the .secret file' do
@@ -153,7 +153,7 @@ describe 'create_tokens' do
expect(new_secrets['secret_key_base']).to eq('file_key')
expect(new_secrets['otp_key_base']).to eq('file_key')
expect(new_secrets['db_key_base']).to eq('db_key_base')
- expect(new_secrets['jws_private_key']).to eq('jws_private_key')
+ expect(new_secrets['openid_connect_signing_key']).to eq('openid_connect_signing_key')
end
create_tokens
diff --git a/spec/javascripts/abuse_reports_spec.js b/spec/javascripts/abuse_reports_spec.js
index 13cab81dd60..7f6b5873011 100644
--- a/spec/javascripts/abuse_reports_spec.js
+++ b/spec/javascripts/abuse_reports_spec.js
@@ -1,43 +1,41 @@
import '~/lib/utils/text_utility';
-import '~/abuse_reports';
-
-((global) => {
- describe('Abuse Reports', () => {
- const FIXTURE = 'abuse_reports/abuse_reports_list.html.raw';
- const MAX_MESSAGE_LENGTH = 500;
-
- let $messages;
-
- const assertMaxLength = $message => expect($message.text().length).toEqual(MAX_MESSAGE_LENGTH);
- const findMessage = searchText => $messages.filter(
- (index, element) => element.innerText.indexOf(searchText) > -1,
- ).first();
-
- preloadFixtures(FIXTURE);
-
- beforeEach(function () {
- loadFixtures(FIXTURE);
- this.abuseReports = new global.AbuseReports();
- $messages = $('.abuse-reports .message');
- });
-
- it('should truncate long messages', () => {
- const $longMessage = findMessage('LONG MESSAGE');
- expect($longMessage.data('original-message')).toEqual(jasmine.anything());
- assertMaxLength($longMessage);
- });
-
- it('should not truncate short messages', () => {
- const $shortMessage = findMessage('SHORT MESSAGE');
- expect($shortMessage.data('original-message')).not.toEqual(jasmine.anything());
- });
-
- it('should allow clicking a truncated message to expand and collapse the full message', () => {
- const $longMessage = findMessage('LONG MESSAGE');
- $longMessage.click();
- expect($longMessage.data('original-message').length).toEqual($longMessage.text().length);
- $longMessage.click();
- assertMaxLength($longMessage);
- });
+import AbuseReports from '~/abuse_reports';
+
+describe('Abuse Reports', () => {
+ const FIXTURE = 'abuse_reports/abuse_reports_list.html.raw';
+ const MAX_MESSAGE_LENGTH = 500;
+
+ let $messages;
+
+ const assertMaxLength = $message => expect($message.text().length).toEqual(MAX_MESSAGE_LENGTH);
+ const findMessage = searchText => $messages.filter(
+ (index, element) => element.innerText.indexOf(searchText) > -1,
+ ).first();
+
+ preloadFixtures(FIXTURE);
+
+ beforeEach(function () {
+ loadFixtures(FIXTURE);
+ this.abuseReports = new AbuseReports();
+ $messages = $('.abuse-reports .message');
+ });
+
+ it('should truncate long messages', () => {
+ const $longMessage = findMessage('LONG MESSAGE');
+ expect($longMessage.data('original-message')).toEqual(jasmine.anything());
+ assertMaxLength($longMessage);
+ });
+
+ it('should not truncate short messages', () => {
+ const $shortMessage = findMessage('SHORT MESSAGE');
+ expect($shortMessage.data('original-message')).not.toEqual(jasmine.anything());
+ });
+
+ it('should allow clicking a truncated message to expand and collapse the full message', () => {
+ const $longMessage = findMessage('LONG MESSAGE');
+ $longMessage.click();
+ expect($longMessage.data('original-message').length).toEqual($longMessage.text().length);
+ $longMessage.click();
+ assertMaxLength($longMessage);
});
-})(window.gl);
+});
diff --git a/spec/javascripts/ajax_loading_spinner_spec.js b/spec/javascripts/ajax_loading_spinner_spec.js
index 46e072a8ebb..c93b7cc6cac 100644
--- a/spec/javascripts/ajax_loading_spinner_spec.js
+++ b/spec/javascripts/ajax_loading_spinner_spec.js
@@ -1,6 +1,6 @@
import 'jquery';
import 'jquery-ujs';
-import '~/ajax_loading_spinner';
+import AjaxLoadingSpinner from '~/ajax_loading_spinner';
describe('Ajax Loading Spinner', () => {
const fixtureTemplate = 'static/ajax_loading_spinner.html.raw';
@@ -8,7 +8,7 @@ describe('Ajax Loading Spinner', () => {
beforeEach(() => {
loadFixtures(fixtureTemplate);
- gl.AjaxLoadingSpinner.init();
+ AjaxLoadingSpinner.init();
});
it('change current icon with spinner icon and disable link while waiting ajax response', (done) => {
diff --git a/spec/javascripts/blob/notebook/index_spec.js b/spec/javascripts/blob/notebook/index_spec.js
index 11f2a950678..c3e67550f05 100644
--- a/spec/javascripts/blob/notebook/index_spec.js
+++ b/spec/javascripts/blob/notebook/index_spec.js
@@ -117,7 +117,7 @@ describe('iPython notebook renderer', () => {
it('shows error message', () => {
expect(
document.querySelector('.md').textContent.trim(),
- ).toBe('An error occured whilst parsing the file.');
+ ).toBe('An error occurred whilst parsing the file.');
});
});
@@ -153,7 +153,7 @@ describe('iPython notebook renderer', () => {
it('shows error message', () => {
expect(
document.querySelector('.md').textContent.trim(),
- ).toBe('An error occured whilst loading the file. Please try again later.');
+ ).toBe('An error occurred whilst loading the file. Please try again later.');
});
});
});
diff --git a/spec/javascripts/blob/pdf/index_spec.js b/spec/javascripts/blob/pdf/index_spec.js
index bbeaf95e68d..51bf3086627 100644
--- a/spec/javascripts/blob/pdf/index_spec.js
+++ b/spec/javascripts/blob/pdf/index_spec.js
@@ -76,7 +76,7 @@ describe('PDF renderer', () => {
it('shows error message', () => {
expect(
document.querySelector('.md').textContent.trim(),
- ).toBe('An error occured whilst loading the file. Please try again later.');
+ ).toBe('An error occurred whilst loading the file. Please try again later.');
});
});
});
diff --git a/spec/javascripts/build_spec.js b/spec/javascripts/build_spec.js
index 35149611095..d5b0f23e7b7 100644
--- a/spec/javascripts/build_spec.js
+++ b/spec/javascripts/build_spec.js
@@ -289,4 +289,18 @@ describe('Build', () => {
});
});
});
+
+ describe('getBuildTrace', () => {
+ it('should request build trace with state parameter', (done) => {
+ spyOn(jQuery, 'ajax').and.callThrough();
+ new Build();
+
+ setTimeout(() => {
+ expect(jQuery.ajax).toHaveBeenCalledWith(
+ { url: `${BUILD_URL}/trace.json`, data: { state: '' } },
+ );
+ done();
+ }, 0);
+ });
+ });
});
diff --git a/spec/javascripts/clusters_spec.js b/spec/javascripts/clusters_spec.js
new file mode 100644
index 00000000000..eb1cd6eb804
--- /dev/null
+++ b/spec/javascripts/clusters_spec.js
@@ -0,0 +1,79 @@
+import Clusters from '~/clusters';
+
+describe('Clusters', () => {
+ let cluster;
+ preloadFixtures('clusters/show_cluster.html.raw');
+
+ beforeEach(() => {
+ loadFixtures('clusters/show_cluster.html.raw');
+ cluster = new Clusters();
+ });
+
+ describe('toggle', () => {
+ it('should update the button and the input field on click', () => {
+ cluster.toggleButton.click();
+
+ expect(
+ cluster.toggleButton.classList,
+ ).not.toContain('checked');
+
+ expect(
+ cluster.toggleInput.getAttribute('value'),
+ ).toEqual('false');
+ });
+ });
+
+ describe('updateContainer', () => {
+ describe('when creating cluster', () => {
+ it('should show the creating container', () => {
+ cluster.updateContainer('creating');
+
+ expect(
+ cluster.creatingContainer.classList.contains('hidden'),
+ ).toBeFalsy();
+ expect(
+ cluster.successContainer.classList.contains('hidden'),
+ ).toBeTruthy();
+ expect(
+ cluster.errorContainer.classList.contains('hidden'),
+ ).toBeTruthy();
+ });
+ });
+
+ describe('when cluster is created', () => {
+ it('should show the success container', () => {
+ cluster.updateContainer('created');
+
+ expect(
+ cluster.creatingContainer.classList.contains('hidden'),
+ ).toBeTruthy();
+ expect(
+ cluster.successContainer.classList.contains('hidden'),
+ ).toBeFalsy();
+ expect(
+ cluster.errorContainer.classList.contains('hidden'),
+ ).toBeTruthy();
+ });
+ });
+
+ describe('when cluster has error', () => {
+ it('should show the error container', () => {
+ cluster.updateContainer('errored', 'this is an error');
+
+ expect(
+ cluster.creatingContainer.classList.contains('hidden'),
+ ).toBeTruthy();
+ expect(
+ cluster.successContainer.classList.contains('hidden'),
+ ).toBeTruthy();
+ expect(
+ cluster.errorContainer.classList.contains('hidden'),
+ ).toBeFalsy();
+
+ expect(
+ cluster.errorReasonContainer.textContent,
+ ).toContain('this is an error');
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/commit/pipelines/pipelines_spec.js b/spec/javascripts/commit/pipelines/pipelines_spec.js
index 454f187ccbc..9fc047b1f5e 100644
--- a/spec/javascripts/commit/pipelines/pipelines_spec.js
+++ b/spec/javascripts/commit/pipelines/pipelines_spec.js
@@ -29,6 +29,8 @@ describe('Pipelines table in Commits and Merge requests', () => {
propsData: {
endpoint: 'endpoint',
helpPagePath: 'foo',
+ emptyStateSvgPath: 'foo',
+ errorStateSvgPath: 'foo',
autoDevopsHelpPath: 'foo',
},
}).$mount();
@@ -65,6 +67,8 @@ describe('Pipelines table in Commits and Merge requests', () => {
propsData: {
endpoint: 'endpoint',
helpPagePath: 'foo',
+ emptyStateSvgPath: 'foo',
+ errorStateSvgPath: 'foo',
autoDevopsHelpPath: 'foo',
},
}).$mount();
@@ -117,6 +121,8 @@ describe('Pipelines table in Commits and Merge requests', () => {
propsData: {
endpoint: 'endpoint',
helpPagePath: 'foo',
+ emptyStateSvgPath: 'foo',
+ errorStateSvgPath: 'foo',
autoDevopsHelpPath: 'foo',
},
}).$mount();
@@ -139,6 +145,8 @@ describe('Pipelines table in Commits and Merge requests', () => {
propsData: {
endpoint: 'endpoint',
helpPagePath: 'foo',
+ emptyStateSvgPath: 'foo',
+ errorStateSvgPath: 'foo',
autoDevopsHelpPath: 'foo',
},
}).$mount();
diff --git a/spec/javascripts/commits_spec.js b/spec/javascripts/commits_spec.js
index ace95000468..e5a5e3293b9 100644
--- a/spec/javascripts/commits_spec.js
+++ b/spec/javascripts/commits_spec.js
@@ -1,77 +1,73 @@
-/* global CommitsList */
-
import 'vendor/jquery.endless-scroll';
import '~/pager';
-import '~/commits';
-
-(() => {
- describe('Commits List', () => {
- beforeEach(() => {
- setFixtures(`
- <form class="commits-search-form" action="/h5bp/html5-boilerplate/commits/master">
- <input id="commits-search">
- </form>
- <ol id="commits-list"></ol>
- `);
- });
+import CommitsList from '~/commits';
- it('should be defined', () => {
- expect(CommitsList).toBeDefined();
- });
+describe('Commits List', () => {
+ beforeEach(() => {
+ setFixtures(`
+ <form class="commits-search-form" action="/h5bp/html5-boilerplate/commits/master">
+ <input id="commits-search">
+ </form>
+ <ol id="commits-list"></ol>
+ `);
+ });
- describe('processCommits', () => {
- it('should join commit headers', () => {
- CommitsList.$contentList = $(`
- <div>
- <li class="commit-header" data-day="2016-09-20">
- <span class="day">20 Sep, 2016</span>
- <span class="commits-count">1 commit</span>
- </li>
- <li class="commit"></li>
- </div>
- `);
+ it('should be defined', () => {
+ expect(CommitsList).toBeDefined();
+ });
- const data = `
+ describe('processCommits', () => {
+ it('should join commit headers', () => {
+ CommitsList.$contentList = $(`
+ <div>
<li class="commit-header" data-day="2016-09-20">
<span class="day">20 Sep, 2016</span>
<span class="commits-count">1 commit</span>
</li>
<li class="commit"></li>
- `;
+ </div>
+ `);
- // The last commit header should be removed
- // since the previous one has the same data-day value.
- expect(CommitsList.processCommits(data).find('li.commit-header').length).toBe(0);
- });
+ const data = `
+ <li class="commit-header" data-day="2016-09-20">
+ <span class="day">20 Sep, 2016</span>
+ <span class="commits-count">1 commit</span>
+ </li>
+ <li class="commit"></li>
+ `;
+
+ // The last commit header should be removed
+ // since the previous one has the same data-day value.
+ expect(CommitsList.processCommits(data).find('li.commit-header').length).toBe(0);
});
+ });
- describe('on entering input', () => {
- let ajaxSpy;
+ describe('on entering input', () => {
+ let ajaxSpy;
- beforeEach(() => {
- CommitsList.init(25);
- CommitsList.searchField.val('');
+ beforeEach(() => {
+ CommitsList.init(25);
+ CommitsList.searchField.val('');
- spyOn(history, 'replaceState').and.stub();
- ajaxSpy = spyOn(jQuery, 'ajax').and.callFake((req) => {
- req.success({
- data: '<li>Result</li>',
- });
+ spyOn(history, 'replaceState').and.stub();
+ ajaxSpy = spyOn(jQuery, 'ajax').and.callFake((req) => {
+ req.success({
+ data: '<li>Result</li>',
});
});
+ });
- it('should save the last search string', () => {
- CommitsList.searchField.val('GitLab');
- CommitsList.filterResults();
- expect(ajaxSpy).toHaveBeenCalled();
- expect(CommitsList.lastSearch).toEqual('GitLab');
- });
+ it('should save the last search string', () => {
+ CommitsList.searchField.val('GitLab');
+ CommitsList.filterResults();
+ expect(ajaxSpy).toHaveBeenCalled();
+ expect(CommitsList.lastSearch).toEqual('GitLab');
+ });
- it('should not make ajax call if the input does not change', () => {
- CommitsList.filterResults();
- expect(ajaxSpy).not.toHaveBeenCalled();
- expect(CommitsList.lastSearch).toEqual('');
- });
+ it('should not make ajax call if the input does not change', () => {
+ CommitsList.filterResults();
+ expect(ajaxSpy).not.toHaveBeenCalled();
+ expect(CommitsList.lastSearch).toEqual('');
});
});
-})();
+});
diff --git a/spec/javascripts/cycle_analytics/banner_spec.js b/spec/javascripts/cycle_analytics/banner_spec.js
new file mode 100644
index 00000000000..fb6b7fee168
--- /dev/null
+++ b/spec/javascripts/cycle_analytics/banner_spec.js
@@ -0,0 +1,41 @@
+import Vue from 'vue';
+import banner from '~/cycle_analytics/components/banner.vue';
+import mountComponent from '../helpers/vue_mount_component_helper';
+
+describe('Cycle analytics banner', () => {
+ let vm;
+
+ beforeEach(() => {
+ const Component = Vue.extend(banner);
+ vm = mountComponent(Component, {
+ documentationLink: 'path',
+ });
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('should render cycle analytics information', () => {
+ expect(
+ vm.$el.querySelector('h4').textContent.trim(),
+ ).toEqual('Introducing Cycle Analytics');
+ expect(
+ vm.$el.querySelector('p').textContent.trim(),
+ ).toContain('Cycle Analytics gives an overview of how much time it takes to go from idea to production in your project.');
+ expect(
+ vm.$el.querySelector('a').textContent.trim(),
+ ).toEqual('Read more');
+ expect(
+ vm.$el.querySelector('a').getAttribute('href'),
+ ).toEqual('path');
+ });
+
+ it('should emit an event when close button is clicked', () => {
+ spyOn(vm, '$emit');
+
+ vm.$el.querySelector('.js-ca-dismiss-button').click();
+
+ expect(vm.$emit).toHaveBeenCalled();
+ });
+});
diff --git a/spec/javascripts/cycle_analytics/limit_warning_component_spec.js b/spec/javascripts/cycle_analytics/limit_warning_component_spec.js
index 2fb9eb0ca85..13e9fe00a00 100644
--- a/spec/javascripts/cycle_analytics/limit_warning_component_spec.js
+++ b/spec/javascripts/cycle_analytics/limit_warning_component_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
import Translate from '~/vue_shared/translate';
-import limitWarningComp from '~/cycle_analytics/components/limit_warning_component';
+import limitWarningComp from '~/cycle_analytics/components/limit_warning_component.vue';
Vue.use(Translate);
diff --git a/spec/javascripts/cycle_analytics/total_time_component_spec.js b/spec/javascripts/cycle_analytics/total_time_component_spec.js
new file mode 100644
index 00000000000..31b65fd1cde
--- /dev/null
+++ b/spec/javascripts/cycle_analytics/total_time_component_spec.js
@@ -0,0 +1,58 @@
+import Vue from 'vue';
+import component from '~/cycle_analytics/components/total_time_component.vue';
+import mountComponent from '../helpers/vue_mount_component_helper';
+
+describe('Total time component', () => {
+ let vm;
+ let Component;
+
+ beforeEach(() => {
+ Component = Vue.extend(component);
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ describe('With data', () => {
+ it('should render information for days and hours', () => {
+ vm = mountComponent(Component, {
+ time: {
+ days: 3,
+ hours: 4,
+ },
+ });
+
+ expect(vm.$el.textContent.trim()).toEqual('3 days 4 hrs');
+ });
+
+ it('should render information for hours and minutes', () => {
+ vm = mountComponent(Component, {
+ time: {
+ hours: 4,
+ mins: 35,
+ },
+ });
+
+ expect(vm.$el.textContent.trim()).toEqual('4 hrs 35 mins');
+ });
+
+ it('should render information for seconds', () => {
+ vm = mountComponent(Component, {
+ time: {
+ seconds: 45,
+ },
+ });
+
+ expect(vm.$el.textContent.trim()).toEqual('45 s');
+ });
+ });
+
+ describe('Without data', () => {
+ it('should render no information', () => {
+ vm = mountComponent(Component);
+
+ expect(vm.$el.textContent.trim()).toEqual('--');
+ });
+ });
+});
diff --git a/spec/javascripts/environments/folder/environments_folder_view_spec.js b/spec/javascripts/environments/folder/environments_folder_view_spec.js
index fdaea5c0b0c..7e62d356bd2 100644
--- a/spec/javascripts/environments/folder/environments_folder_view_spec.js
+++ b/spec/javascripts/environments/folder/environments_folder_view_spec.js
@@ -14,6 +14,10 @@ describe('Environments Folder View', () => {
window.history.pushState({}, null, 'environments/folders/build');
});
+ afterEach(() => {
+ window.history.pushState({}, null, '/');
+ });
+
let component;
describe('successfull request', () => {
diff --git a/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js b/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js
deleted file mode 100644
index 114d282e48a..00000000000
--- a/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js
+++ /dev/null
@@ -1,219 +0,0 @@
-import Cookies from 'js-cookie';
-import {
- getCookieName,
- getSelector,
- showPopover,
- hidePopover,
- dismiss,
- mouseleave,
- mouseenter,
- setupDismissButton,
-} from '~/feature_highlight/feature_highlight_helper';
-
-describe('feature highlight helper', () => {
- describe('getCookieName', () => {
- it('returns `feature-highlighted-` prefix', () => {
- const cookieId = 'cookieId';
- expect(getCookieName(cookieId)).toEqual(`feature-highlighted-${cookieId}`);
- });
- });
-
- describe('getSelector', () => {
- it('returns js-feature-highlight selector', () => {
- const highlightId = 'highlightId';
- expect(getSelector(highlightId)).toEqual(`.js-feature-highlight[data-highlight=${highlightId}]`);
- });
- });
-
- describe('showPopover', () => {
- it('returns true when popover is shown', () => {
- const context = {
- hasClass: () => false,
- popover: () => {},
- addClass: () => {},
- };
-
- expect(showPopover.call(context)).toEqual(true);
- });
-
- it('returns false when popover is already shown', () => {
- const context = {
- hasClass: () => true,
- };
-
- expect(showPopover.call(context)).toEqual(false);
- });
-
- it('shows popover', (done) => {
- const context = {
- hasClass: () => false,
- popover: () => {},
- addClass: () => {},
- };
-
- spyOn(context, 'popover').and.callFake((method) => {
- expect(method).toEqual('show');
- done();
- });
-
- showPopover.call(context);
- });
-
- it('adds disable-animation and js-popover-show class', (done) => {
- const context = {
- hasClass: () => false,
- popover: () => {},
- addClass: () => {},
- };
-
- spyOn(context, 'addClass').and.callFake((classNames) => {
- expect(classNames).toEqual('disable-animation js-popover-show');
- done();
- });
-
- showPopover.call(context);
- });
- });
-
- describe('hidePopover', () => {
- it('returns true when popover is hidden', () => {
- const context = {
- hasClass: () => true,
- popover: () => {},
- removeClass: () => {},
- };
-
- expect(hidePopover.call(context)).toEqual(true);
- });
-
- it('returns false when popover is already hidden', () => {
- const context = {
- hasClass: () => false,
- };
-
- expect(hidePopover.call(context)).toEqual(false);
- });
-
- it('hides popover', (done) => {
- const context = {
- hasClass: () => true,
- popover: () => {},
- removeClass: () => {},
- };
-
- spyOn(context, 'popover').and.callFake((method) => {
- expect(method).toEqual('hide');
- done();
- });
-
- hidePopover.call(context);
- });
-
- it('removes disable-animation and js-popover-show class', (done) => {
- const context = {
- hasClass: () => true,
- popover: () => {},
- removeClass: () => {},
- };
-
- spyOn(context, 'removeClass').and.callFake((classNames) => {
- expect(classNames).toEqual('disable-animation js-popover-show');
- done();
- });
-
- hidePopover.call(context);
- });
- });
-
- describe('dismiss', () => {
- const context = {
- hide: () => {},
- };
-
- beforeEach(() => {
- spyOn(Cookies, 'set').and.callFake(() => {});
- spyOn(hidePopover, 'call').and.callFake(() => {});
- spyOn(context, 'hide').and.callFake(() => {});
- dismiss.call(context);
- });
-
- it('sets cookie to true', () => {
- expect(Cookies.set).toHaveBeenCalled();
- });
-
- it('calls hide popover', () => {
- expect(hidePopover.call).toHaveBeenCalled();
- });
-
- it('calls hide', () => {
- expect(context.hide).toHaveBeenCalled();
- });
- });
-
- describe('mouseleave', () => {
- it('calls hide popover if .popover:hover is false', () => {
- const fakeJquery = {
- length: 0,
- };
-
- spyOn($.fn, 'init').and.callFake(selector => (selector === '.popover:hover' ? fakeJquery : $.fn));
- spyOn(hidePopover, 'call');
- mouseleave();
- expect(hidePopover.call).toHaveBeenCalled();
- });
-
- it('does not call hide popover if .popover:hover is true', () => {
- const fakeJquery = {
- length: 1,
- };
-
- spyOn($.fn, 'init').and.callFake(selector => (selector === '.popover:hover' ? fakeJquery : $.fn));
- spyOn(hidePopover, 'call');
- mouseleave();
- expect(hidePopover.call).not.toHaveBeenCalled();
- });
- });
-
- describe('mouseenter', () => {
- const context = {};
-
- it('shows popover', () => {
- spyOn(showPopover, 'call').and.returnValue(false);
- mouseenter.call(context);
- expect(showPopover.call).toHaveBeenCalled();
- });
-
- it('registers mouseleave event if popover is showed', (done) => {
- spyOn(showPopover, 'call').and.returnValue(true);
- spyOn($.fn, 'on').and.callFake((eventName) => {
- expect(eventName).toEqual('mouseleave');
- done();
- });
- mouseenter.call(context);
- });
-
- it('does not register mouseleave event if popover is not showed', () => {
- spyOn(showPopover, 'call').and.returnValue(false);
- const spy = spyOn($.fn, 'on').and.callFake(() => {});
- mouseenter.call(context);
- expect(spy).not.toHaveBeenCalled();
- });
- });
-
- describe('setupDismissButton', () => {
- it('registers click event callback', (done) => {
- const context = {
- getAttribute: () => 'popoverId',
- dataset: {
- highlight: 'cookieId',
- },
- };
-
- spyOn($.fn, 'on').and.callFake((event) => {
- expect(event).toEqual('click');
- done();
- });
- setupDismissButton.call(context);
- });
- });
-});
diff --git a/spec/javascripts/feature_highlight/feature_highlight_options_spec.js b/spec/javascripts/feature_highlight/feature_highlight_options_spec.js
deleted file mode 100644
index 7feb361edec..00000000000
--- a/spec/javascripts/feature_highlight/feature_highlight_options_spec.js
+++ /dev/null
@@ -1,45 +0,0 @@
-import domContentLoaded from '~/feature_highlight/feature_highlight_options';
-import bp from '~/breakpoints';
-
-describe('feature highlight options', () => {
- describe('domContentLoaded', () => {
- const highlightOrder = [];
-
- beforeEach(() => {
- // Check for when highlightFeatures is called
- spyOn(highlightOrder, 'find').and.callFake(() => {});
- });
-
- it('should not call highlightFeatures when breakpoint is xs', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('xs');
-
- domContentLoaded(highlightOrder);
- expect(bp.getBreakpointSize).toHaveBeenCalled();
- expect(highlightOrder.find).not.toHaveBeenCalled();
- });
-
- it('should not call highlightFeatures when breakpoint is sm', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('sm');
-
- domContentLoaded(highlightOrder);
- expect(bp.getBreakpointSize).toHaveBeenCalled();
- expect(highlightOrder.find).not.toHaveBeenCalled();
- });
-
- it('should not call highlightFeatures when breakpoint is md', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('md');
-
- domContentLoaded(highlightOrder);
- expect(bp.getBreakpointSize).toHaveBeenCalled();
- expect(highlightOrder.find).not.toHaveBeenCalled();
- });
-
- it('should call highlightFeatures when breakpoint is lg', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('lg');
-
- domContentLoaded(highlightOrder);
- expect(bp.getBreakpointSize).toHaveBeenCalled();
- expect(highlightOrder.find).toHaveBeenCalled();
- });
- });
-});
diff --git a/spec/javascripts/feature_highlight/feature_highlight_spec.js b/spec/javascripts/feature_highlight/feature_highlight_spec.js
deleted file mode 100644
index 6abe8425ee7..00000000000
--- a/spec/javascripts/feature_highlight/feature_highlight_spec.js
+++ /dev/null
@@ -1,122 +0,0 @@
-import Cookies from 'js-cookie';
-import * as featureHighlightHelper from '~/feature_highlight/feature_highlight_helper';
-import * as featureHighlight from '~/feature_highlight/feature_highlight';
-
-describe('feature highlight', () => {
- describe('setupFeatureHighlightPopover', () => {
- const selector = '.js-feature-highlight[data-highlight=test]';
- beforeEach(() => {
- setFixtures(`
- <div>
- <div class="js-feature-highlight" data-highlight="test" disabled>
- Trigger
- </div>
- </div>
- <div class="feature-highlight-popover-content">
- Content
- <div class="dismiss-feature-highlight">
- Dismiss
- </div>
- </div>
- `);
- spyOn(window, 'addEventListener');
- spyOn(window, 'removeEventListener');
- featureHighlight.setupFeatureHighlightPopover('test', 0);
- });
-
- it('setups popover content', () => {
- const $popoverContent = $('.feature-highlight-popover-content');
- const outerHTML = $popoverContent.prop('outerHTML');
-
- expect($(selector).data('content')).toEqual(outerHTML);
- });
-
- it('setups mouseenter', () => {
- const showSpy = spyOn(featureHighlightHelper.showPopover, 'call');
- $(selector).trigger('mouseenter');
-
- expect(showSpy).toHaveBeenCalled();
- });
-
- it('setups debounced mouseleave', (done) => {
- const hideSpy = spyOn(featureHighlightHelper.hidePopover, 'call');
- $(selector).trigger('mouseleave');
-
- // Even though we've set the debounce to 0ms, setTimeout is needed for the debounce
- setTimeout(() => {
- expect(hideSpy).toHaveBeenCalled();
- done();
- }, 0);
- });
-
- it('setups inserted.bs.popover', () => {
- $(selector).trigger('mouseenter');
- const popoverId = $(selector).attr('aria-describedby');
- const spyEvent = spyOnEvent(`#${popoverId} .dismiss-feature-highlight`, 'click');
-
- $(`#${popoverId} .dismiss-feature-highlight`).click();
- expect(spyEvent).toHaveBeenTriggered();
- });
-
- it('setups show.bs.popover', () => {
- $(selector).trigger('show.bs.popover');
- expect(window.addEventListener).toHaveBeenCalledWith('scroll', jasmine.any(Function));
- });
-
- it('setups hide.bs.popover', () => {
- $(selector).trigger('hide.bs.popover');
- expect(window.removeEventListener).toHaveBeenCalledWith('scroll', jasmine.any(Function));
- });
-
- it('removes disabled attribute', () => {
- expect($('.js-feature-highlight').is(':disabled')).toEqual(false);
- });
-
- it('displays popover', () => {
- expect($(selector).attr('aria-describedby')).toBeFalsy();
- $(selector).trigger('mouseenter');
- expect($(selector).attr('aria-describedby')).toBeTruthy();
- });
- });
-
- describe('shouldHighlightFeature', () => {
- it('should return false if element is not found', () => {
- spyOn(document, 'querySelector').and.returnValue(null);
- spyOn(Cookies, 'get').and.returnValue(null);
-
- expect(featureHighlight.shouldHighlightFeature()).toBeFalsy();
- });
-
- it('should return false if previouslyDismissed', () => {
- spyOn(document, 'querySelector').and.returnValue(document.createElement('div'));
- spyOn(Cookies, 'get').and.returnValue('true');
-
- expect(featureHighlight.shouldHighlightFeature()).toBeFalsy();
- });
-
- it('should return true if element is found and not previouslyDismissed', () => {
- spyOn(document, 'querySelector').and.returnValue(document.createElement('div'));
- spyOn(Cookies, 'get').and.returnValue(null);
-
- expect(featureHighlight.shouldHighlightFeature()).toBeTruthy();
- });
- });
-
- describe('highlightFeatures', () => {
- it('calls setupFeatureHighlightPopover if shouldHighlightFeature returns true', () => {
- // Mimic shouldHighlightFeature set to true
- const highlightOrder = ['issue-boards'];
- spyOn(highlightOrder, 'find').and.returnValue(highlightOrder[0]);
-
- expect(featureHighlight.highlightFeatures(highlightOrder)).toEqual(true);
- });
-
- it('does not call setupFeatureHighlightPopover if shouldHighlightFeature returns false', () => {
- // Mimic shouldHighlightFeature set to false
- const highlightOrder = ['issue-boards'];
- spyOn(highlightOrder, 'find').and.returnValue(null);
-
- expect(featureHighlight.highlightFeatures(highlightOrder)).toEqual(false);
- });
- });
-});
diff --git a/spec/javascripts/filtered_search/dropdown_user_spec.js b/spec/javascripts/filtered_search/dropdown_user_spec.js
index b3c9bca64cc..02415485d19 100644
--- a/spec/javascripts/filtered_search/dropdown_user_spec.js
+++ b/spec/javascripts/filtered_search/dropdown_user_spec.js
@@ -10,6 +10,7 @@ describe('Dropdown User', () => {
beforeEach(() => {
spyOn(gl.DropdownUser.prototype, 'bindEvents').and.callFake(() => {});
spyOn(gl.DropdownUser.prototype, 'getProjectId').and.callFake(() => {});
+ spyOn(gl.DropdownUser.prototype, 'getGroupId').and.callFake(() => {});
spyOn(gl.DropdownUtils, 'getSearchInput').and.callFake(() => {});
dropdownUser = new gl.DropdownUser({
@@ -38,6 +39,7 @@ describe('Dropdown User', () => {
beforeEach(() => {
spyOn(gl.DropdownUser.prototype, 'bindEvents').and.callFake(() => {});
spyOn(gl.DropdownUser.prototype, 'getProjectId').and.callFake(() => {});
+ spyOn(gl.DropdownUser.prototype, 'getGroupId').and.callFake(() => {});
});
it('should return endpoint', () => {
diff --git a/spec/javascripts/filtered_search/filtered_search_manager_spec.js b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
index 16ae649ee60..f209328dee1 100644
--- a/spec/javascripts/filtered_search/filtered_search_manager_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
@@ -411,4 +411,26 @@ describe('Filtered Search Manager', () => {
expect(document.querySelector('.filtered-search-box').classList.contains('focus')).toEqual(false);
});
});
+
+ describe('getAllParams', () => {
+ beforeEach(() => {
+ this.paramsArr = ['key=value', 'otherkey=othervalue'];
+
+ initializeManager();
+ });
+
+ it('correctly modifies params when custom modifier is passed', () => {
+ const modifedParams = manager.getAllParams.call({
+ modifyUrlParams: paramsArr => paramsArr.reverse(),
+ }, [].concat(this.paramsArr));
+
+ expect(modifedParams[0]).toBe(this.paramsArr[1]);
+ });
+
+ it('does not modify params when no custom modifier is passed', () => {
+ const modifedParams = manager.getAllParams.call({}, this.paramsArr);
+
+ expect(modifedParams[1]).toBe(this.paramsArr[1]);
+ });
+ });
});
diff --git a/spec/javascripts/fixtures/clusters.rb b/spec/javascripts/fixtures/clusters.rb
new file mode 100644
index 00000000000..5774f36f026
--- /dev/null
+++ b/spec/javascripts/fixtures/clusters.rb
@@ -0,0 +1,34 @@
+require 'spec_helper'
+
+describe Projects::ClustersController, '(JavaScript fixtures)', type: :controller do
+ include JavaScriptFixturesHelpers
+
+ let(:admin) { create(:admin) }
+ let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
+ let(:project) { create(:project, :repository, namespace: namespace) }
+ let(:cluster) { project.create_cluster!(gcp_cluster_name: "gke-test-creation-1", gcp_project_id: 'gitlab-internal-153318', gcp_cluster_zone: 'us-central1-a', gcp_cluster_size: '1', project_namespace: 'aaa', gcp_machine_type: 'n1-standard-1')}
+
+ render_views
+
+ before(:all) do
+ clean_frontend_fixtures('clusters/')
+ end
+
+ before do
+ sign_in(admin)
+ end
+
+ after do
+ remove_repository(project)
+ end
+
+ it 'clusters/show_cluster.html.raw' do |example|
+ get :show,
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: cluster
+
+ expect(response).to be_success
+ store_frontend_fixture(response, example.description)
+ end
+end
diff --git a/spec/javascripts/fixtures/dashboard.rb b/spec/javascripts/fixtures/dashboard.rb
deleted file mode 100644
index 7fa351680c9..00000000000
--- a/spec/javascripts/fixtures/dashboard.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-require 'spec_helper'
-
-describe Dashboard::ProjectsController, '(JavaScript fixtures)', type: :controller do
- include JavaScriptFixturesHelpers
-
- let(:admin) { create(:admin) }
- let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
- let(:project) { create(:project, namespace: namespace, path: 'builds-project') }
-
- render_views
-
- before(:all) do
- clean_frontend_fixtures('dashboard/')
- end
-
- before do
- sign_in(admin)
- end
-
- after do
- remove_repository(project)
- end
-
- it 'dashboard/user-callout.html.raw' do |example|
- rendered = render_template('shared/_user_callout')
- store_frontend_fixture(rendered, example.description)
- end
-
- private
-
- def render_template(template_file_name)
- controller.prepend_view_path(JavaScriptFixturesHelpers::FIXTURE_PATH)
- controller.render_to_string(template_file_name, layout: false)
- end
-end
diff --git a/spec/javascripts/fixtures/merge_requests.rb b/spec/javascripts/fixtures/merge_requests.rb
index 4bc2205e642..3fd16d76f51 100644
--- a/spec/javascripts/fixtures/merge_requests.rb
+++ b/spec/javascripts/fixtures/merge_requests.rb
@@ -41,6 +41,12 @@ describe Projects::MergeRequestsController, '(JavaScript fixtures)', type: :cont
remove_repository(project)
end
+ it 'merge_requests/merge_request_of_current_user.html.raw' do |example|
+ merge_request.update(author: admin)
+
+ render_merge_request(example.description, merge_request)
+ end
+
it 'merge_requests/merge_request_with_task_list.html.raw' do |example|
create(:ci_build, :pending, pipeline: pipeline)
diff --git a/spec/javascripts/fixtures/pipelines.html.haml b/spec/javascripts/fixtures/pipelines.html.haml
index 418a38a0e2e..97b0c25c923 100644
--- a/spec/javascripts/fixtures/pipelines.html.haml
+++ b/spec/javascripts/fixtures/pipelines.html.haml
@@ -2,6 +2,8 @@
#pipelines-list-vue{ data: { endpoint: 'foo',
"css-class" => 'foo',
"help-page-path" => 'foo',
+ "empty-state-svg-path" => 'foo',
+ "error-state-svg-path" => 'foo',
"new-pipeline-path" => 'foo',
"can-create-pipeline" => 'true',
"all-path" => 'foo',
diff --git a/spec/javascripts/fly_out_nav_spec.js b/spec/javascripts/fly_out_nav_spec.js
index f8b37c0edde..4f20e31f511 100644
--- a/spec/javascripts/fly_out_nav_spec.js
+++ b/spec/javascripts/fly_out_nav_spec.js
@@ -73,6 +73,12 @@ describe('Fly out sidebar navigation', () => {
).toBe(0);
});
+ it('returns 0 if mousePos is empty', () => {
+ expect(
+ getHideSubItemsInterval(),
+ ).toBe(0);
+ });
+
it('returns 0 when mouse above sub-items', () => {
showSubLevelItems(el);
documentMouseMove({
@@ -271,12 +277,19 @@ describe('Fly out sidebar navigation', () => {
});
it('sets transform of sub-items', () => {
+ const sidebar = document.createElement('div');
const subItems = el.querySelector('.sidebar-sub-level-items');
+
+ sidebar.style.width = '200px';
+
+ document.body.appendChild(sidebar);
+
+ setSidebar(sidebar);
showSubLevelItems(el);
expect(
subItems.style.transform,
- ).toBe(`translate3d(0px, ${Math.floor(el.getBoundingClientRect().top) - getHeaderHeight()}px, 0px)`);
+ ).toBe(`translate3d(200px, ${Math.floor(el.getBoundingClientRect().top) - getHeaderHeight()}px, 0px)`);
});
it('sets is-above when element is above', () => {
diff --git a/spec/javascripts/notes/stores/helpers.js b/spec/javascripts/helpers/vuex_action_helper.js
index 2d386fe1da5..2d386fe1da5 100644
--- a/spec/javascripts/notes/stores/helpers.js
+++ b/spec/javascripts/helpers/vuex_action_helper.js
diff --git a/spec/javascripts/image_diff/helpers/badge_helper_spec.js b/spec/javascripts/image_diff/helpers/badge_helper_spec.js
new file mode 100644
index 00000000000..fb9c7e59031
--- /dev/null
+++ b/spec/javascripts/image_diff/helpers/badge_helper_spec.js
@@ -0,0 +1,132 @@
+import * as badgeHelper from '~/image_diff/helpers/badge_helper';
+import * as mockData from '../mock_data';
+
+describe('badge helper', () => {
+ const { coordinate, noteId, badgeText, badgeNumber } = mockData;
+ let containerEl;
+ let buttonEl;
+
+ beforeEach(() => {
+ containerEl = document.createElement('div');
+ });
+
+ describe('createImageBadge', () => {
+ beforeEach(() => {
+ buttonEl = badgeHelper.createImageBadge(noteId, coordinate);
+ });
+
+ it('should create button', () => {
+ expect(buttonEl.tagName).toEqual('BUTTON');
+ expect(buttonEl.getAttribute('type')).toEqual('button');
+ });
+
+ it('should set disabled attribute', () => {
+ expect(buttonEl.hasAttribute('disabled')).toEqual(true);
+ });
+
+ it('should set noteId', () => {
+ expect(buttonEl.dataset.noteId).toEqual(noteId);
+ });
+
+ it('should set coordinate', () => {
+ expect(buttonEl.style.left).toEqual(`${coordinate.x}px`);
+ expect(buttonEl.style.top).toEqual(`${coordinate.y}px`);
+ });
+
+ describe('classNames', () => {
+ it('should set .js-image-badge by default', () => {
+ expect(buttonEl.className).toEqual('js-image-badge');
+ });
+
+ it('should add additional class names if parameter is passed', () => {
+ const classNames = ['first-class', 'second-class'];
+ buttonEl = badgeHelper.createImageBadge(noteId, coordinate, classNames);
+
+ expect(buttonEl.className).toEqual(classNames.concat('js-image-badge').join(' '));
+ });
+ });
+ });
+
+ describe('addImageBadge', () => {
+ beforeEach(() => {
+ badgeHelper.addImageBadge(containerEl, {
+ coordinate,
+ badgeText,
+ noteId,
+ });
+ buttonEl = containerEl.querySelector('button');
+ });
+
+ it('should appends button to container', () => {
+ expect(buttonEl).toBeDefined();
+ });
+
+ it('should set the badge text', () => {
+ expect(buttonEl.innerText).toEqual(badgeText);
+ });
+
+ it('should set the button coordinates', () => {
+ expect(buttonEl.style.left).toEqual(`${coordinate.x}px`);
+ expect(buttonEl.style.top).toEqual(`${coordinate.y}px`);
+ });
+
+ it('should set the button noteId', () => {
+ expect(buttonEl.dataset.noteId).toEqual(noteId);
+ });
+ });
+
+ describe('addImageCommentBadge', () => {
+ beforeEach(() => {
+ badgeHelper.addImageCommentBadge(containerEl, {
+ coordinate,
+ noteId,
+ });
+ buttonEl = containerEl.querySelector('button');
+ });
+
+ it('should append icon button to container', () => {
+ expect(buttonEl).toBeDefined();
+ });
+
+ it('should create icon comment button', () => {
+ const iconEl = buttonEl.querySelector('i');
+ expect(iconEl).toBeDefined();
+ expect(iconEl.classList.contains('fa')).toEqual(true);
+ expect(iconEl.classList.contains('fa-comment-o')).toEqual(true);
+ });
+
+ it('should have .image-comment-badge.inverted in button class', () => {
+ expect(buttonEl.classList.contains('image-comment-badge')).toEqual(true);
+ expect(buttonEl.classList.contains('inverted')).toEqual(true);
+ });
+ });
+
+ describe('addAvatarBadge', () => {
+ let avatarBadgeEl;
+
+ beforeEach(() => {
+ containerEl.innerHTML = `
+ <div id="${noteId}">
+ <div class="badge hidden">
+ </div>
+ </div>
+ `;
+
+ badgeHelper.addAvatarBadge(containerEl, {
+ detail: {
+ noteId,
+ badgeNumber,
+ },
+ });
+ avatarBadgeEl = containerEl.querySelector(`#${noteId} .badge`);
+ });
+
+ it('should update badge number', () => {
+ expect(avatarBadgeEl.innerText).toEqual(badgeNumber.toString());
+ });
+
+ it('should remove hidden class', () => {
+ expect(avatarBadgeEl.classList.contains('hidden')).toEqual(false);
+ });
+ });
+});
diff --git a/spec/javascripts/image_diff/helpers/comment_indicator_helper_spec.js b/spec/javascripts/image_diff/helpers/comment_indicator_helper_spec.js
new file mode 100644
index 00000000000..a284b981d2a
--- /dev/null
+++ b/spec/javascripts/image_diff/helpers/comment_indicator_helper_spec.js
@@ -0,0 +1,139 @@
+import * as commentIndicatorHelper from '~/image_diff/helpers/comment_indicator_helper';
+import * as mockData from '../mock_data';
+
+describe('commentIndicatorHelper', () => {
+ const { coordinate } = mockData;
+ let containerEl;
+
+ beforeEach(() => {
+ containerEl = document.createElement('div');
+ });
+
+ describe('addCommentIndicator', () => {
+ let buttonEl;
+
+ beforeEach(() => {
+ commentIndicatorHelper.addCommentIndicator(containerEl, coordinate);
+ buttonEl = containerEl.querySelector('button');
+ });
+
+ it('should append button to container', () => {
+ expect(buttonEl).toBeDefined();
+ });
+
+ describe('button', () => {
+ it('should set coordinate', () => {
+ expect(buttonEl.style.left).toEqual(`${coordinate.x}px`);
+ expect(buttonEl.style.top).toEqual(`${coordinate.y}px`);
+ });
+
+ it('should contain image-comment-dark svg', () => {
+ const svgEl = buttonEl.querySelector('svg');
+ expect(svgEl).toBeDefined();
+
+ const svgLink = svgEl.querySelector('use').getAttribute('xlink:href');
+ expect(svgLink.indexOf('image-comment-dark') !== -1).toEqual(true);
+ });
+ });
+ });
+
+ describe('removeCommentIndicator', () => {
+ it('should return removed false if there is no comment-indicator', () => {
+ const result = commentIndicatorHelper.removeCommentIndicator(containerEl);
+ expect(result.removed).toEqual(false);
+ });
+
+ describe('has comment indicator', () => {
+ let result;
+
+ beforeEach(() => {
+ containerEl.innerHTML = `
+ <div class="comment-indicator" style="left:${coordinate.x}px; top: ${coordinate.y}px;">
+ <img src="${gl.TEST_HOST}/image.png">
+ </div>
+ `;
+ result = commentIndicatorHelper.removeCommentIndicator(containerEl);
+ });
+
+ it('should remove comment indicator', () => {
+ expect(containerEl.querySelector('.comment-indicator')).toBeNull();
+ });
+
+ it('should return removed true', () => {
+ expect(result.removed).toEqual(true);
+ });
+
+ it('should return indicator meta', () => {
+ expect(result.x).toEqual(coordinate.x);
+ expect(result.y).toEqual(coordinate.y);
+ expect(result.image).toBeDefined();
+ expect(result.image.width).toBeDefined();
+ expect(result.image.height).toBeDefined();
+ });
+ });
+ });
+
+ describe('showCommentIndicator', () => {
+ describe('commentIndicator exists', () => {
+ beforeEach(() => {
+ containerEl.innerHTML = `
+ <button class="comment-indicator"></button>
+ `;
+ commentIndicatorHelper.showCommentIndicator(containerEl, coordinate);
+ });
+
+ it('should set commentIndicator coordinates', () => {
+ const commentIndicatorEl = containerEl.querySelector('.comment-indicator');
+ expect(commentIndicatorEl.style.left).toEqual(`${coordinate.x}px`);
+ expect(commentIndicatorEl.style.top).toEqual(`${coordinate.y}px`);
+ });
+ });
+
+ describe('commentIndicator does not exist', () => {
+ beforeEach(() => {
+ commentIndicatorHelper.showCommentIndicator(containerEl, coordinate);
+ });
+
+ it('should addCommentIndicator', () => {
+ const buttonEl = containerEl.querySelector('.comment-indicator');
+ expect(buttonEl).toBeDefined();
+ expect(buttonEl.style.left).toEqual(`${coordinate.x}px`);
+ expect(buttonEl.style.top).toEqual(`${coordinate.y}px`);
+ });
+ });
+ });
+
+ describe('commentIndicatorOnClick', () => {
+ let event;
+ let textAreaEl;
+
+ beforeEach(() => {
+ containerEl.innerHTML = `
+ <div class="diff-viewer">
+ <button></button>
+ <div class="note-container">
+ <textarea class="note-textarea"></textarea>
+ </div>
+ </div>
+ `;
+ textAreaEl = containerEl.querySelector('textarea');
+
+ event = {
+ stopPropagation: () => {},
+ currentTarget: containerEl.querySelector('button'),
+ };
+
+ spyOn(event, 'stopPropagation');
+ spyOn(textAreaEl, 'focus');
+ commentIndicatorHelper.commentIndicatorOnClick(event);
+ });
+
+ it('should stopPropagation', () => {
+ expect(event.stopPropagation).toHaveBeenCalled();
+ });
+
+ it('should focus textAreaEl', () => {
+ expect(textAreaEl.focus).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/javascripts/image_diff/helpers/dom_helper_spec.js b/spec/javascripts/image_diff/helpers/dom_helper_spec.js
new file mode 100644
index 00000000000..8dde924e8ae
--- /dev/null
+++ b/spec/javascripts/image_diff/helpers/dom_helper_spec.js
@@ -0,0 +1,118 @@
+import * as domHelper from '~/image_diff/helpers/dom_helper';
+import * as mockData from '../mock_data';
+
+describe('domHelper', () => {
+ const { imageMeta, badgeNumber } = mockData;
+
+ describe('setPositionDataAttribute', () => {
+ let containerEl;
+ let attributeAfterCall;
+ const position = {
+ myProperty: 'myProperty',
+ };
+
+ beforeEach(() => {
+ containerEl = document.createElement('div');
+ containerEl.dataset.position = JSON.stringify(position);
+ domHelper.setPositionDataAttribute(containerEl, imageMeta);
+ attributeAfterCall = JSON.parse(containerEl.dataset.position);
+ });
+
+ it('should set x, y, width, height', () => {
+ expect(attributeAfterCall.x).toEqual(imageMeta.x);
+ expect(attributeAfterCall.y).toEqual(imageMeta.y);
+ expect(attributeAfterCall.width).toEqual(imageMeta.width);
+ expect(attributeAfterCall.height).toEqual(imageMeta.height);
+ });
+
+ it('should not override other properties', () => {
+ expect(attributeAfterCall.myProperty).toEqual('myProperty');
+ });
+ });
+
+ describe('updateDiscussionAvatarBadgeNumber', () => {
+ let discussionEl;
+
+ beforeEach(() => {
+ discussionEl = document.createElement('div');
+ discussionEl.innerHTML = `
+ <a href="#" class="image-diff-avatar-link">
+ <div class="badge"></div>
+ </a>
+ `;
+ domHelper.updateDiscussionAvatarBadgeNumber(discussionEl, badgeNumber);
+ });
+
+ it('should update avatar badge number', () => {
+ expect(discussionEl.querySelector('.badge').innerText).toEqual(badgeNumber.toString());
+ });
+ });
+
+ describe('updateDiscussionBadgeNumber', () => {
+ let discussionEl;
+
+ beforeEach(() => {
+ discussionEl = document.createElement('div');
+ discussionEl.innerHTML = `
+ <div class="badge"></div>
+ `;
+ domHelper.updateDiscussionBadgeNumber(discussionEl, badgeNumber);
+ });
+
+ it('should update discussion badge number', () => {
+ expect(discussionEl.querySelector('.badge').innerText).toEqual(badgeNumber.toString());
+ });
+ });
+
+ describe('toggleCollapsed', () => {
+ let element;
+ let discussionNotesEl;
+
+ beforeEach(() => {
+ element = document.createElement('div');
+ element.innerHTML = `
+ <div class="discussion-notes">
+ <button></button>
+ <form class="discussion-form"></form>
+ </div>
+ `;
+ discussionNotesEl = element.querySelector('.discussion-notes');
+ });
+
+ describe('not collapsed', () => {
+ beforeEach(() => {
+ domHelper.toggleCollapsed({
+ currentTarget: element.querySelector('button'),
+ });
+ });
+
+ it('should add collapsed class', () => {
+ expect(discussionNotesEl.classList.contains('collapsed')).toEqual(true);
+ });
+
+ it('should force formEl to display none', () => {
+ const formEl = element.querySelector('.discussion-form');
+ expect(formEl.style.display).toEqual('none');
+ });
+ });
+
+ describe('collapsed', () => {
+ beforeEach(() => {
+ discussionNotesEl.classList.add('collapsed');
+
+ domHelper.toggleCollapsed({
+ currentTarget: element.querySelector('button'),
+ });
+ });
+
+ it('should remove collapsed class', () => {
+ expect(discussionNotesEl.classList.contains('collapsed')).toEqual(false);
+ });
+
+ it('should force formEl to display block', () => {
+ const formEl = element.querySelector('.discussion-form');
+ expect(formEl.style.display).toEqual('block');
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/image_diff/helpers/utils_helper_spec.js b/spec/javascripts/image_diff/helpers/utils_helper_spec.js
new file mode 100644
index 00000000000..56d77a05c4c
--- /dev/null
+++ b/spec/javascripts/image_diff/helpers/utils_helper_spec.js
@@ -0,0 +1,207 @@
+import * as utilsHelper from '~/image_diff/helpers/utils_helper';
+import ImageDiff from '~/image_diff/image_diff';
+import ReplacedImageDiff from '~/image_diff/replaced_image_diff';
+import ImageBadge from '~/image_diff/image_badge';
+import * as mockData from '../mock_data';
+
+describe('utilsHelper', () => {
+ const {
+ noteId,
+ discussionId,
+ image,
+ imageProperties,
+ imageMeta,
+ } = mockData;
+
+ describe('resizeCoordinatesToImageElement', () => {
+ let result;
+
+ beforeEach(() => {
+ result = utilsHelper.resizeCoordinatesToImageElement(image, imageMeta);
+ });
+
+ it('should return x based on widthRatio', () => {
+ expect(result.x).toEqual(imageMeta.x * 0.5);
+ });
+
+ it('should return y based on heightRatio', () => {
+ expect(result.y).toEqual(imageMeta.y * 0.5);
+ });
+
+ it('should return image width', () => {
+ expect(result.width).toEqual(image.width);
+ });
+
+ it('should return image height', () => {
+ expect(result.height).toEqual(image.height);
+ });
+ });
+
+ describe('generateBadgeFromDiscussionDOM', () => {
+ let discussionEl;
+ let result;
+
+ beforeEach(() => {
+ const imageFrameEl = document.createElement('div');
+ imageFrameEl.innerHTML = `
+ <img src="${gl.TEST_HOST}/image.png">
+ `;
+ discussionEl = document.createElement('div');
+ discussionEl.dataset.discussionId = discussionId;
+ discussionEl.innerHTML = `
+ <div class="note" id="${noteId}"></div>
+ `;
+ discussionEl.dataset.position = JSON.stringify(imageMeta);
+ result = utilsHelper.generateBadgeFromDiscussionDOM(imageFrameEl, discussionEl);
+ });
+
+ it('should return actual image properties', () => {
+ const { actual } = result;
+ expect(actual.x).toEqual(imageMeta.x);
+ expect(actual.y).toEqual(imageMeta.y);
+ expect(actual.width).toEqual(imageMeta.width);
+ expect(actual.height).toEqual(imageMeta.height);
+ });
+
+ it('should return browser image properties', () => {
+ const { browser } = result;
+ expect(browser.x).toBeDefined();
+ expect(browser.y).toBeDefined();
+ expect(browser.width).toBeDefined();
+ expect(browser.height).toBeDefined();
+ });
+
+ it('should return instance of ImageBadge', () => {
+ expect(result instanceof ImageBadge).toEqual(true);
+ });
+
+ it('should return noteId', () => {
+ expect(result.noteId).toEqual(noteId);
+ });
+
+ it('should return discussionId', () => {
+ expect(result.discussionId).toEqual(discussionId);
+ });
+ });
+
+ describe('getTargetSelection', () => {
+ let containerEl;
+
+ beforeEach(() => {
+ containerEl = {
+ querySelector: () => imageProperties,
+ };
+ });
+
+ function generateEvent(offsetX, offsetY) {
+ return {
+ currentTarget: containerEl,
+ offsetX,
+ offsetY,
+ };
+ }
+
+ it('should return browser properties', () => {
+ const event = generateEvent(25, 25);
+ const result = utilsHelper.getTargetSelection(event);
+
+ const { browser } = result;
+ expect(browser.x).toEqual(event.offsetX);
+ expect(browser.y).toEqual(event.offsetY);
+ expect(browser.width).toEqual(imageProperties.width);
+ expect(browser.height).toEqual(imageProperties.height);
+ });
+
+ it('should return resized actual image properties', () => {
+ const event = generateEvent(50, 50);
+ const result = utilsHelper.getTargetSelection(event);
+
+ const { actual } = result;
+ expect(actual.x).toEqual(100);
+ expect(actual.y).toEqual(100);
+ expect(actual.width).toEqual(imageProperties.naturalWidth);
+ expect(actual.height).toEqual(imageProperties.naturalHeight);
+ });
+
+ describe('normalize coordinates', () => {
+ it('should return x = 0 if x < 0', () => {
+ const event = generateEvent(-5, 50);
+ const result = utilsHelper.getTargetSelection(event);
+ expect(result.browser.x).toEqual(0);
+ });
+
+ it('should return x = width if x > width', () => {
+ const event = generateEvent(1000, 50);
+ const result = utilsHelper.getTargetSelection(event);
+ expect(result.browser.x).toEqual(imageProperties.width);
+ });
+
+ it('should return y = 0 if y < 0', () => {
+ const event = generateEvent(50, -10);
+ const result = utilsHelper.getTargetSelection(event);
+ expect(result.browser.y).toEqual(0);
+ });
+
+ it('should return y = height if y > height', () => {
+ const event = generateEvent(50, 1000);
+ const result = utilsHelper.getTargetSelection(event);
+ expect(result.browser.y).toEqual(imageProperties.height);
+ });
+ });
+ });
+
+ describe('initImageDiff', () => {
+ let glCache;
+ let fileEl;
+
+ beforeEach(() => {
+ window.gl = window.gl || (window.gl = {});
+ glCache = window.gl;
+ window.gl.ImageFile = () => {};
+ fileEl = document.createElement('div');
+ fileEl.innerHTML = `
+ <div class="diff-file"></div>
+ `;
+
+ spyOn(ImageDiff.prototype, 'init').and.callFake(() => {});
+ spyOn(ReplacedImageDiff.prototype, 'init').and.callFake(() => {});
+ });
+
+ afterEach(() => {
+ window.gl = glCache;
+ });
+
+ it('should initialize gl.ImageFile', () => {
+ spyOn(window.gl, 'ImageFile');
+
+ utilsHelper.initImageDiff(fileEl, false, false);
+ expect(gl.ImageFile).toHaveBeenCalled();
+ });
+
+ it('should initialize ImageDiff if js-single-image', () => {
+ const diffFileEl = fileEl.querySelector('.diff-file');
+ diffFileEl.innerHTML = `
+ <div class="js-single-image">
+ </div>
+ `;
+
+ const imageDiff = utilsHelper.initImageDiff(fileEl, true, false);
+ expect(ImageDiff.prototype.init).toHaveBeenCalled();
+ expect(imageDiff.canCreateNote).toEqual(true);
+ expect(imageDiff.renderCommentBadge).toEqual(false);
+ });
+
+ it('should initialize ReplacedImageDiff if js-replaced-image', () => {
+ const diffFileEl = fileEl.querySelector('.diff-file');
+ diffFileEl.innerHTML = `
+ <div class="js-replaced-image">
+ </div>
+ `;
+
+ const replacedImageDiff = utilsHelper.initImageDiff(fileEl, false, true);
+ expect(ReplacedImageDiff.prototype.init).toHaveBeenCalled();
+ expect(replacedImageDiff.canCreateNote).toEqual(false);
+ expect(replacedImageDiff.renderCommentBadge).toEqual(true);
+ });
+ });
+});
diff --git a/spec/javascripts/image_diff/image_badge_spec.js b/spec/javascripts/image_diff/image_badge_spec.js
new file mode 100644
index 00000000000..87f98fc0926
--- /dev/null
+++ b/spec/javascripts/image_diff/image_badge_spec.js
@@ -0,0 +1,84 @@
+import ImageBadge from '~/image_diff/image_badge';
+import imageDiffHelper from '~/image_diff/helpers/index';
+import * as mockData from './mock_data';
+
+describe('ImageBadge', () => {
+ const { noteId, discussionId, imageMeta } = mockData;
+ const options = {
+ noteId,
+ discussionId,
+ };
+
+ it('should save actual property', () => {
+ const imageBadge = new ImageBadge(Object.assign({}, options, {
+ actual: imageMeta,
+ }));
+
+ const { actual } = imageBadge;
+ expect(actual.x).toEqual(imageMeta.x);
+ expect(actual.y).toEqual(imageMeta.y);
+ expect(actual.width).toEqual(imageMeta.width);
+ expect(actual.height).toEqual(imageMeta.height);
+ });
+
+ it('should save browser property', () => {
+ const imageBadge = new ImageBadge(Object.assign({}, options, {
+ browser: imageMeta,
+ }));
+
+ const { browser } = imageBadge;
+ expect(browser.x).toEqual(imageMeta.x);
+ expect(browser.y).toEqual(imageMeta.y);
+ expect(browser.width).toEqual(imageMeta.width);
+ expect(browser.height).toEqual(imageMeta.height);
+ });
+
+ it('should save noteId', () => {
+ const imageBadge = new ImageBadge(options);
+ expect(imageBadge.noteId).toEqual(noteId);
+ });
+
+ it('should save discussionId', () => {
+ const imageBadge = new ImageBadge(options);
+ expect(imageBadge.discussionId).toEqual(discussionId);
+ });
+
+ describe('default values', () => {
+ let imageBadge;
+
+ beforeEach(() => {
+ imageBadge = new ImageBadge(options);
+ });
+
+ it('should return defaultimageMeta if actual property is not provided', () => {
+ const { actual } = imageBadge;
+ expect(actual.x).toEqual(0);
+ expect(actual.y).toEqual(0);
+ expect(actual.width).toEqual(0);
+ expect(actual.height).toEqual(0);
+ });
+
+ it('should return defaultimageMeta if browser property is not provided', () => {
+ const { browser } = imageBadge;
+ expect(browser.x).toEqual(0);
+ expect(browser.y).toEqual(0);
+ expect(browser.width).toEqual(0);
+ expect(browser.height).toEqual(0);
+ });
+ });
+
+ describe('imageEl property is provided and not browser property', () => {
+ beforeEach(() => {
+ spyOn(imageDiffHelper, 'resizeCoordinatesToImageElement').and.returnValue(true);
+ });
+
+ it('should generate browser property', () => {
+ const imageBadge = new ImageBadge(Object.assign({}, options, {
+ imageEl: document.createElement('img'),
+ }));
+
+ expect(imageDiffHelper.resizeCoordinatesToImageElement).toHaveBeenCalled();
+ expect(imageBadge.browser).toEqual(true);
+ });
+ });
+});
diff --git a/spec/javascripts/image_diff/image_diff_spec.js b/spec/javascripts/image_diff/image_diff_spec.js
new file mode 100644
index 00000000000..346282328c7
--- /dev/null
+++ b/spec/javascripts/image_diff/image_diff_spec.js
@@ -0,0 +1,361 @@
+import ImageDiff from '~/image_diff/image_diff';
+import * as imageUtility from '~/lib/utils/image_utility';
+import imageDiffHelper from '~/image_diff/helpers/index';
+import * as mockData from './mock_data';
+
+describe('ImageDiff', () => {
+ let element;
+ let imageDiff;
+
+ beforeEach(() => {
+ setFixtures(`
+ <div id="element">
+ <div class="diff-file">
+ <div class="js-image-frame">
+ <img src="${gl.TEST_HOST}/image.png">
+ <div class="comment-indicator"></div>
+ <div id="badge-1" class="badge">1</div>
+ <div id="badge-2" class="badge">2</div>
+ <div id="badge-3" class="badge">3</div>
+ </div>
+ <div class="note-container">
+ <div class="discussion-notes">
+ <div class="js-diff-notes-toggle"></div>
+ <div class="notes"></div>
+ </div>
+ <div class="discussion-notes">
+ <div class="js-diff-notes-toggle"></div>
+ <div class="notes"></div>
+ </div>
+ </div>
+ </div>
+ </div>
+ `);
+ element = document.getElementById('element');
+ });
+
+ describe('constructor', () => {
+ beforeEach(() => {
+ imageDiff = new ImageDiff(element, {
+ canCreateNote: true,
+ renderCommentBadge: true,
+ });
+ });
+
+ it('should set el', () => {
+ expect(imageDiff.el).toEqual(element);
+ });
+
+ it('should set canCreateNote', () => {
+ expect(imageDiff.canCreateNote).toEqual(true);
+ });
+
+ it('should set renderCommentBadge', () => {
+ expect(imageDiff.renderCommentBadge).toEqual(true);
+ });
+
+ it('should set $noteContainer', () => {
+ expect(imageDiff.$noteContainer[0]).toEqual(element.querySelector('.note-container'));
+ });
+
+ describe('default', () => {
+ beforeEach(() => {
+ imageDiff = new ImageDiff(element);
+ });
+
+ it('should set canCreateNote as false', () => {
+ expect(imageDiff.canCreateNote).toEqual(false);
+ });
+
+ it('should set renderCommentBadge as false', () => {
+ expect(imageDiff.renderCommentBadge).toEqual(false);
+ });
+ });
+ });
+
+ describe('init', () => {
+ beforeEach(() => {
+ spyOn(ImageDiff.prototype, 'bindEvents').and.callFake(() => {});
+ imageDiff = new ImageDiff(element);
+ imageDiff.init();
+ });
+
+ it('should set imageFrameEl', () => {
+ expect(imageDiff.imageFrameEl).toEqual(element.querySelector('.diff-file .js-image-frame'));
+ });
+
+ it('should set imageEl', () => {
+ expect(imageDiff.imageEl).toEqual(element.querySelector('.diff-file .js-image-frame img'));
+ });
+
+ it('should call bindEvents', () => {
+ expect(imageDiff.bindEvents).toHaveBeenCalled();
+ });
+ });
+
+ describe('bindEvents', () => {
+ let imageEl;
+
+ beforeEach(() => {
+ spyOn(imageDiffHelper, 'toggleCollapsed').and.callFake(() => {});
+ spyOn(imageDiffHelper, 'commentIndicatorOnClick').and.callFake(() => {});
+ spyOn(imageDiffHelper, 'removeCommentIndicator').and.callFake(() => {});
+ spyOn(ImageDiff.prototype, 'imageClicked').and.callFake(() => {});
+ spyOn(ImageDiff.prototype, 'addBadge').and.callFake(() => {});
+ spyOn(ImageDiff.prototype, 'removeBadge').and.callFake(() => {});
+ spyOn(ImageDiff.prototype, 'renderBadges').and.callFake(() => {});
+ imageEl = element.querySelector('.diff-file .js-image-frame img');
+ });
+
+ describe('default', () => {
+ beforeEach(() => {
+ spyOn(imageUtility, 'isImageLoaded').and.returnValue(false);
+ imageDiff = new ImageDiff(element);
+ imageDiff.imageEl = imageEl;
+ imageDiff.bindEvents();
+ });
+
+ it('should register click event delegation to js-diff-notes-toggle', () => {
+ element.querySelector('.js-diff-notes-toggle').click();
+ expect(imageDiffHelper.toggleCollapsed).toHaveBeenCalled();
+ });
+
+ it('should register click event delegation to comment-indicator', () => {
+ element.querySelector('.comment-indicator').click();
+ expect(imageDiffHelper.commentIndicatorOnClick).toHaveBeenCalled();
+ });
+ });
+
+ describe('image loaded', () => {
+ beforeEach(() => {
+ spyOn(imageUtility, 'isImageLoaded').and.returnValue(true);
+ imageDiff = new ImageDiff(element);
+ imageDiff.imageEl = imageEl;
+ });
+
+ it('should renderBadges', () => {});
+ });
+
+ describe('image not loaded', () => {
+ beforeEach(() => {
+ spyOn(imageUtility, 'isImageLoaded').and.returnValue(false);
+ imageDiff = new ImageDiff(element);
+ imageDiff.imageEl = imageEl;
+ imageDiff.bindEvents();
+ });
+
+ it('should registers load eventListener', () => {
+ const loadEvent = new Event('load');
+ imageEl.dispatchEvent(loadEvent);
+ expect(imageDiff.renderBadges).toHaveBeenCalled();
+ });
+ });
+
+ describe('canCreateNote', () => {
+ beforeEach(() => {
+ spyOn(imageUtility, 'isImageLoaded').and.returnValue(false);
+ imageDiff = new ImageDiff(element, {
+ canCreateNote: true,
+ });
+ imageDiff.imageEl = imageEl;
+ imageDiff.bindEvents();
+ });
+
+ it('should register click.imageDiff event', () => {
+ const event = new CustomEvent('click.imageDiff');
+ element.dispatchEvent(event);
+ expect(imageDiff.imageClicked).toHaveBeenCalled();
+ });
+
+ it('should register blur.imageDiff event', () => {
+ const event = new CustomEvent('blur.imageDiff');
+ element.dispatchEvent(event);
+ expect(imageDiffHelper.removeCommentIndicator).toHaveBeenCalled();
+ });
+
+ it('should register addBadge.imageDiff event', () => {
+ const event = new CustomEvent('addBadge.imageDiff');
+ element.dispatchEvent(event);
+ expect(imageDiff.addBadge).toHaveBeenCalled();
+ });
+
+ it('should register removeBadge.imageDiff event', () => {
+ const event = new CustomEvent('removeBadge.imageDiff');
+ element.dispatchEvent(event);
+ expect(imageDiff.removeBadge).toHaveBeenCalled();
+ });
+ });
+
+ describe('canCreateNote is false', () => {
+ beforeEach(() => {
+ spyOn(imageUtility, 'isImageLoaded').and.returnValue(false);
+ imageDiff = new ImageDiff(element);
+ imageDiff.imageEl = imageEl;
+ imageDiff.bindEvents();
+ });
+
+ it('should not register click.imageDiff event', () => {
+ const event = new CustomEvent('click.imageDiff');
+ element.dispatchEvent(event);
+ expect(imageDiff.imageClicked).not.toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('imageClicked', () => {
+ beforeEach(() => {
+ spyOn(imageDiffHelper, 'getTargetSelection').and.returnValue({
+ actual: {},
+ browser: {},
+ });
+ spyOn(imageDiffHelper, 'setPositionDataAttribute').and.callFake(() => {});
+ spyOn(imageDiffHelper, 'showCommentIndicator').and.callFake(() => {});
+ imageDiff = new ImageDiff(element);
+ imageDiff.imageClicked({
+ detail: {
+ currentTarget: {},
+ },
+ });
+ });
+
+ it('should call getTargetSelection', () => {
+ expect(imageDiffHelper.getTargetSelection).toHaveBeenCalled();
+ });
+
+ it('should call setPositionDataAttribute', () => {
+ expect(imageDiffHelper.setPositionDataAttribute).toHaveBeenCalled();
+ });
+
+ it('should call showCommentIndicator', () => {
+ expect(imageDiffHelper.showCommentIndicator).toHaveBeenCalled();
+ });
+ });
+
+ describe('renderBadges', () => {
+ beforeEach(() => {
+ spyOn(ImageDiff.prototype, 'renderBadge').and.callFake(() => {});
+ imageDiff = new ImageDiff(element);
+ imageDiff.renderBadges();
+ });
+
+ it('should call renderBadge for each discussionEl', () => {
+ const discussionEls = element.querySelectorAll('.note-container .discussion-notes .notes');
+ expect(imageDiff.renderBadge.calls.count()).toEqual(discussionEls.length);
+ });
+ });
+
+ describe('renderBadge', () => {
+ let discussionEls;
+
+ beforeEach(() => {
+ spyOn(imageDiffHelper, 'addImageBadge').and.callFake(() => {});
+ spyOn(imageDiffHelper, 'addImageCommentBadge').and.callFake(() => {});
+ spyOn(imageDiffHelper, 'generateBadgeFromDiscussionDOM').and.returnValue({
+ browser: {},
+ noteId: 'noteId',
+ });
+ discussionEls = element.querySelectorAll('.note-container .discussion-notes .notes');
+ imageDiff = new ImageDiff(element);
+ imageDiff.renderBadge(discussionEls[0], 0);
+ });
+
+ it('should populate imageBadges', () => {
+ expect(imageDiff.imageBadges.length).toEqual(1);
+ });
+
+ describe('renderCommentBadge', () => {
+ beforeEach(() => {
+ imageDiff.renderCommentBadge = true;
+ imageDiff.renderBadge(discussionEls[0], 0);
+ });
+
+ it('should call addImageCommentBadge', () => {
+ expect(imageDiffHelper.addImageCommentBadge).toHaveBeenCalled();
+ });
+ });
+
+ describe('renderCommentBadge is false', () => {
+ it('should call addImageBadge', () => {
+ expect(imageDiffHelper.addImageBadge).toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('addBadge', () => {
+ beforeEach(() => {
+ spyOn(imageDiffHelper, 'addImageBadge').and.callFake(() => {});
+ spyOn(imageDiffHelper, 'addAvatarBadge').and.callFake(() => {});
+ spyOn(imageDiffHelper, 'updateDiscussionBadgeNumber').and.callFake(() => {});
+ imageDiff = new ImageDiff(element);
+ imageDiff.imageFrameEl = element.querySelector('.diff-file .js-image-frame');
+ imageDiff.addBadge({
+ detail: {
+ x: 0,
+ y: 1,
+ width: 25,
+ height: 50,
+ noteId: 'noteId',
+ discussionId: 'discussionId',
+ },
+ });
+ });
+
+ it('should add imageBadge to imageBadges', () => {
+ expect(imageDiff.imageBadges.length).toEqual(1);
+ });
+
+ it('should call addImageBadge', () => {
+ expect(imageDiffHelper.addImageBadge).toHaveBeenCalled();
+ });
+
+ it('should call addAvatarBadge', () => {
+ expect(imageDiffHelper.addAvatarBadge).toHaveBeenCalled();
+ });
+
+ it('should call updateDiscussionBadgeNumber', () => {
+ expect(imageDiffHelper.updateDiscussionBadgeNumber).toHaveBeenCalled();
+ });
+ });
+
+ describe('removeBadge', () => {
+ beforeEach(() => {
+ const { imageMeta } = mockData;
+
+ spyOn(imageDiffHelper, 'updateDiscussionBadgeNumber').and.callFake(() => {});
+ spyOn(imageDiffHelper, 'updateDiscussionAvatarBadgeNumber').and.callFake(() => {});
+ imageDiff = new ImageDiff(element);
+ imageDiff.imageBadges = [imageMeta, imageMeta, imageMeta];
+ imageDiff.imageFrameEl = element.querySelector('.diff-file .js-image-frame');
+ imageDiff.removeBadge({
+ detail: {
+ badgeNumber: 2,
+ },
+ });
+ });
+
+ describe('cascade badge count', () => {
+ it('should update next imageBadgeEl value', () => {
+ const imageBadgeEls = imageDiff.imageFrameEl.querySelectorAll('.badge');
+ expect(imageBadgeEls[0].innerText).toEqual('1');
+ expect(imageBadgeEls[1].innerText).toEqual('2');
+ expect(imageBadgeEls.length).toEqual(2);
+ });
+
+ it('should call updateDiscussionBadgeNumber', () => {
+ expect(imageDiffHelper.updateDiscussionBadgeNumber).toHaveBeenCalled();
+ });
+
+ it('should call updateDiscussionAvatarBadgeNumber', () => {
+ expect(imageDiffHelper.updateDiscussionAvatarBadgeNumber).toHaveBeenCalled();
+ });
+ });
+
+ it('should remove badge from imageBadges', () => {
+ expect(imageDiff.imageBadges.length).toEqual(2);
+ });
+
+ it('should remove imageBadgeEl', () => {
+ expect(imageDiff.imageFrameEl.querySelector('#badge-2')).toBeNull();
+ });
+ });
+});
diff --git a/spec/javascripts/image_diff/init_discussion_tab_spec.js b/spec/javascripts/image_diff/init_discussion_tab_spec.js
new file mode 100644
index 00000000000..7c447d6f70d
--- /dev/null
+++ b/spec/javascripts/image_diff/init_discussion_tab_spec.js
@@ -0,0 +1,37 @@
+import initDiscussionTab from '~/image_diff/init_discussion_tab';
+import imageDiffHelper from '~/image_diff/helpers/index';
+
+describe('initDiscussionTab', () => {
+ beforeEach(() => {
+ setFixtures(`
+ <div class="timeline-content">
+ <div class="diff-file js-image-file"></div>
+ <div class="diff-file js-image-file"></div>
+ </div>
+ `);
+ });
+
+ it('should pass canCreateNote as false to initImageDiff', (done) => {
+ spyOn(imageDiffHelper, 'initImageDiff').and.callFake((diffFileEl, canCreateNote) => {
+ expect(canCreateNote).toEqual(false);
+ done();
+ });
+
+ initDiscussionTab();
+ });
+
+ it('should pass renderCommentBadge as true to initImageDiff', (done) => {
+ spyOn(imageDiffHelper, 'initImageDiff').and.callFake((diffFileEl, canCreateNote, renderCommentBadge) => {
+ expect(renderCommentBadge).toEqual(true);
+ done();
+ });
+
+ initDiscussionTab();
+ });
+
+ it('should call initImageDiff for each diffFileEls', () => {
+ spyOn(imageDiffHelper, 'initImageDiff').and.callFake(() => {});
+ initDiscussionTab();
+ expect(imageDiffHelper.initImageDiff.calls.count()).toEqual(2);
+ });
+});
diff --git a/spec/javascripts/image_diff/mock_data.js b/spec/javascripts/image_diff/mock_data.js
new file mode 100644
index 00000000000..a0d1732dd0a
--- /dev/null
+++ b/spec/javascripts/image_diff/mock_data.js
@@ -0,0 +1,28 @@
+export const noteId = 'noteId';
+export const discussionId = 'discussionId';
+export const badgeText = 'badgeText';
+export const badgeNumber = 5;
+
+export const coordinate = {
+ x: 100,
+ y: 100,
+};
+
+export const image = {
+ width: 100,
+ height: 100,
+};
+
+export const imageProperties = {
+ width: image.width,
+ height: image.height,
+ naturalWidth: image.width * 2,
+ naturalHeight: image.height * 2,
+};
+
+export const imageMeta = {
+ x: coordinate.x,
+ y: coordinate.y,
+ width: imageProperties.naturalWidth,
+ height: imageProperties.naturalHeight,
+};
diff --git a/spec/javascripts/image_diff/replaced_image_diff_spec.js b/spec/javascripts/image_diff/replaced_image_diff_spec.js
new file mode 100644
index 00000000000..5f8cd7c531a
--- /dev/null
+++ b/spec/javascripts/image_diff/replaced_image_diff_spec.js
@@ -0,0 +1,312 @@
+import ReplacedImageDiff from '~/image_diff/replaced_image_diff';
+import ImageDiff from '~/image_diff/image_diff';
+import { viewTypes } from '~/image_diff/view_types';
+import imageDiffHelper from '~/image_diff/helpers/index';
+
+describe('ReplacedImageDiff', () => {
+ let element;
+ let replacedImageDiff;
+
+ beforeEach(() => {
+ setFixtures(`
+ <div id="element">
+ <div class="two-up">
+ <div class="js-image-frame">
+ <img src="${gl.TEST_HOST}/image.png">
+ </div>
+ </div>
+ <div class="swipe">
+ <div class="js-image-frame">
+ <img src="${gl.TEST_HOST}/image.png">
+ </div>
+ </div>
+ <div class="onion-skin">
+ <div class="js-image-frame">
+ <img src="${gl.TEST_HOST}/image.png">
+ </div>
+ </div>
+ <div class="view-modes-menu">
+ <div class="two-up">2-up</div>
+ <div class="swipe">Swipe</div>
+ <div class="onion-skin">Onion skin</div>
+ </div>
+ </div>
+ `);
+ element = document.getElementById('element');
+ });
+
+ function setupImageFrameEls() {
+ replacedImageDiff.imageFrameEls = [];
+ replacedImageDiff.imageFrameEls[viewTypes.TWO_UP] = element.querySelector('.two-up .js-image-frame');
+ replacedImageDiff.imageFrameEls[viewTypes.SWIPE] = element.querySelector('.swipe .js-image-frame');
+ replacedImageDiff.imageFrameEls[viewTypes.ONION_SKIN] = element.querySelector('.onion-skin .js-image-frame');
+ }
+
+ function setupViewModesEls() {
+ replacedImageDiff.viewModesEls = [];
+ replacedImageDiff.viewModesEls[viewTypes.TWO_UP] = element.querySelector('.view-modes-menu .two-up');
+ replacedImageDiff.viewModesEls[viewTypes.SWIPE] = element.querySelector('.view-modes-menu .swipe');
+ replacedImageDiff.viewModesEls[viewTypes.ONION_SKIN] = element.querySelector('.view-modes-menu .onion-skin');
+ }
+
+ function setupImageEls() {
+ replacedImageDiff.imageEls = [];
+ replacedImageDiff.imageEls[viewTypes.TWO_UP] = element.querySelector('.two-up img');
+ replacedImageDiff.imageEls[viewTypes.SWIPE] = element.querySelector('.swipe img');
+ replacedImageDiff.imageEls[viewTypes.ONION_SKIN] = element.querySelector('.onion-skin img');
+ }
+
+ it('should extend ImageDiff', () => {
+ replacedImageDiff = new ReplacedImageDiff(element);
+ expect(replacedImageDiff instanceof ImageDiff).toEqual(true);
+ });
+
+ describe('init', () => {
+ beforeEach(() => {
+ spyOn(ReplacedImageDiff.prototype, 'bindEvents').and.callFake(() => {});
+ spyOn(ReplacedImageDiff.prototype, 'generateImageEls').and.callFake(() => {});
+
+ replacedImageDiff = new ReplacedImageDiff(element);
+ replacedImageDiff.init();
+ });
+
+ it('should set imageFrameEls', () => {
+ const { imageFrameEls } = replacedImageDiff;
+ expect(imageFrameEls).toBeDefined();
+ expect(imageFrameEls[viewTypes.TWO_UP]).toEqual(element.querySelector('.two-up .js-image-frame'));
+ expect(imageFrameEls[viewTypes.SWIPE]).toEqual(element.querySelector('.swipe .js-image-frame'));
+ expect(imageFrameEls[viewTypes.ONION_SKIN]).toEqual(element.querySelector('.onion-skin .js-image-frame'));
+ });
+
+ it('should set viewModesEls', () => {
+ const { viewModesEls } = replacedImageDiff;
+ expect(viewModesEls).toBeDefined();
+ expect(viewModesEls[viewTypes.TWO_UP]).toEqual(element.querySelector('.view-modes-menu .two-up'));
+ expect(viewModesEls[viewTypes.SWIPE]).toEqual(element.querySelector('.view-modes-menu .swipe'));
+ expect(viewModesEls[viewTypes.ONION_SKIN]).toEqual(element.querySelector('.view-modes-menu .onion-skin'));
+ });
+
+ it('should generateImageEls', () => {
+ expect(ReplacedImageDiff.prototype.generateImageEls).toHaveBeenCalled();
+ });
+
+ it('should bindEvents', () => {
+ expect(ReplacedImageDiff.prototype.bindEvents).toHaveBeenCalled();
+ });
+
+ describe('currentView', () => {
+ it('should set currentView', () => {
+ replacedImageDiff.init(viewTypes.ONION_SKIN);
+ expect(replacedImageDiff.currentView).toEqual(viewTypes.ONION_SKIN);
+ });
+
+ it('should default to viewTypes.TWO_UP', () => {
+ expect(replacedImageDiff.currentView).toEqual(viewTypes.TWO_UP);
+ });
+ });
+ });
+
+ describe('generateImageEls', () => {
+ beforeEach(() => {
+ spyOn(ReplacedImageDiff.prototype, 'bindEvents').and.callFake(() => {});
+
+ replacedImageDiff = new ReplacedImageDiff(element, {
+ canCreateNote: false,
+ renderCommentBadge: false,
+ });
+
+ setupImageFrameEls();
+ });
+
+ it('should set imageEls', () => {
+ replacedImageDiff.generateImageEls();
+ const { imageEls } = replacedImageDiff;
+ expect(imageEls).toBeDefined();
+ expect(imageEls[viewTypes.TWO_UP]).toEqual(element.querySelector('.two-up img'));
+ expect(imageEls[viewTypes.SWIPE]).toEqual(element.querySelector('.swipe img'));
+ expect(imageEls[viewTypes.ONION_SKIN]).toEqual(element.querySelector('.onion-skin img'));
+ });
+ });
+
+ describe('bindEvents', () => {
+ beforeEach(() => {
+ spyOn(ImageDiff.prototype, 'bindEvents').and.callFake(() => {});
+ replacedImageDiff = new ReplacedImageDiff(element);
+
+ setupViewModesEls();
+ });
+
+ it('should call super.bindEvents', () => {
+ replacedImageDiff.bindEvents();
+ expect(ImageDiff.prototype.bindEvents).toHaveBeenCalled();
+ });
+
+ it('should register click eventlistener to 2-up view mode', (done) => {
+ spyOn(ReplacedImageDiff.prototype, 'changeView').and.callFake((viewMode) => {
+ expect(viewMode).toEqual(viewTypes.TWO_UP);
+ done();
+ });
+
+ replacedImageDiff.bindEvents();
+ replacedImageDiff.viewModesEls[viewTypes.TWO_UP].click();
+ });
+
+ it('should register click eventlistener to swipe view mode', (done) => {
+ spyOn(ReplacedImageDiff.prototype, 'changeView').and.callFake((viewMode) => {
+ expect(viewMode).toEqual(viewTypes.SWIPE);
+ done();
+ });
+
+ replacedImageDiff.bindEvents();
+ replacedImageDiff.viewModesEls[viewTypes.SWIPE].click();
+ });
+
+ it('should register click eventlistener to onion skin view mode', (done) => {
+ spyOn(ReplacedImageDiff.prototype, 'changeView').and.callFake((viewMode) => {
+ expect(viewMode).toEqual(viewTypes.SWIPE);
+ done();
+ });
+
+ replacedImageDiff.bindEvents();
+ replacedImageDiff.viewModesEls[viewTypes.SWIPE].click();
+ });
+ });
+
+ describe('getters', () => {
+ describe('imageEl', () => {
+ beforeEach(() => {
+ replacedImageDiff = new ReplacedImageDiff(element);
+ replacedImageDiff.currentView = viewTypes.TWO_UP;
+ setupImageEls();
+ });
+
+ it('should return imageEl based on currentView', () => {
+ expect(replacedImageDiff.imageEl).toEqual(element.querySelector('.two-up img'));
+
+ replacedImageDiff.currentView = viewTypes.SWIPE;
+ expect(replacedImageDiff.imageEl).toEqual(element.querySelector('.swipe img'));
+ });
+ });
+
+ describe('imageFrameEl', () => {
+ beforeEach(() => {
+ replacedImageDiff = new ReplacedImageDiff(element);
+ replacedImageDiff.currentView = viewTypes.TWO_UP;
+ setupImageFrameEls();
+ });
+
+ it('should return imageFrameEl based on currentView', () => {
+ expect(replacedImageDiff.imageFrameEl).toEqual(element.querySelector('.two-up .js-image-frame'));
+
+ replacedImageDiff.currentView = viewTypes.ONION_SKIN;
+ expect(replacedImageDiff.imageFrameEl).toEqual(element.querySelector('.onion-skin .js-image-frame'));
+ });
+ });
+ });
+
+ describe('changeView', () => {
+ beforeEach(() => {
+ replacedImageDiff = new ReplacedImageDiff(element);
+ spyOn(imageDiffHelper, 'removeCommentIndicator').and.returnValue({
+ removed: false,
+ });
+ setupImageFrameEls();
+ });
+
+ describe('invalid viewType', () => {
+ beforeEach(() => {
+ replacedImageDiff.changeView('some-view-name');
+ });
+
+ it('should not call removeCommentIndicator', () => {
+ expect(imageDiffHelper.removeCommentIndicator).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('valid viewType', () => {
+ beforeEach(() => {
+ jasmine.clock().install();
+ spyOn(ReplacedImageDiff.prototype, 'renderNewView').and.callFake(() => {});
+ replacedImageDiff.changeView(viewTypes.ONION_SKIN);
+ });
+
+ afterEach(() => {
+ jasmine.clock().uninstall();
+ });
+
+ it('should call removeCommentIndicator', () => {
+ expect(imageDiffHelper.removeCommentIndicator).toHaveBeenCalled();
+ });
+
+ it('should update currentView to newView', () => {
+ expect(replacedImageDiff.currentView).toEqual(viewTypes.ONION_SKIN);
+ });
+
+ it('should clear imageBadges', () => {
+ expect(replacedImageDiff.imageBadges.length).toEqual(0);
+ });
+
+ it('should call renderNewView', () => {
+ jasmine.clock().tick(251);
+ expect(replacedImageDiff.renderNewView).toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('renderNewView', () => {
+ beforeEach(() => {
+ replacedImageDiff = new ReplacedImageDiff(element);
+ });
+
+ it('should call renderBadges', () => {
+ spyOn(ReplacedImageDiff.prototype, 'renderBadges').and.callFake(() => {});
+
+ replacedImageDiff.renderNewView({
+ removed: false,
+ });
+
+ expect(replacedImageDiff.renderBadges).toHaveBeenCalled();
+ });
+
+ describe('removeIndicator', () => {
+ const indicator = {
+ removed: true,
+ x: 0,
+ y: 1,
+ image: {
+ width: 50,
+ height: 100,
+ },
+ };
+
+ beforeEach(() => {
+ setupImageEls();
+ setupImageFrameEls();
+ });
+
+ it('should pass showCommentIndicator normalized indicator values', (done) => {
+ spyOn(imageDiffHelper, 'showCommentIndicator').and.callFake(() => {});
+ spyOn(imageDiffHelper, 'resizeCoordinatesToImageElement').and.callFake((imageEl, meta) => {
+ expect(meta.x).toEqual(indicator.x);
+ expect(meta.y).toEqual(indicator.y);
+ expect(meta.width).toEqual(indicator.image.width);
+ expect(meta.height).toEqual(indicator.image.height);
+ done();
+ });
+ replacedImageDiff.renderNewView(indicator);
+ });
+
+ it('should call showCommentIndicator', (done) => {
+ const normalized = {
+ normalized: true,
+ };
+ spyOn(imageDiffHelper, 'resizeCoordinatesToImageElement').and.returnValue(normalized);
+ spyOn(imageDiffHelper, 'showCommentIndicator').and.callFake((imageFrameEl, normalizedIndicator) => {
+ expect(normalizedIndicator).toEqual(normalized);
+ done();
+ });
+ replacedImageDiff.renderNewView(indicator);
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/image_diff/view_types_spec.js b/spec/javascripts/image_diff/view_types_spec.js
new file mode 100644
index 00000000000..e9639f46497
--- /dev/null
+++ b/spec/javascripts/image_diff/view_types_spec.js
@@ -0,0 +1,24 @@
+import { viewTypes, isValidViewType } from '~/image_diff/view_types';
+
+describe('viewTypes', () => {
+ describe('isValidViewType', () => {
+ it('should return true for TWO_UP', () => {
+ expect(isValidViewType(viewTypes.TWO_UP)).toEqual(true);
+ });
+
+ it('should return true for SWIPE', () => {
+ expect(isValidViewType(viewTypes.SWIPE)).toEqual(true);
+ });
+
+ it('should return true for ONION_SKIN', () => {
+ expect(isValidViewType(viewTypes.ONION_SKIN)).toEqual(true);
+ });
+
+ it('should return false for non view types', () => {
+ expect(isValidViewType('some-view-type')).toEqual(false);
+ expect(isValidViewType(null)).toEqual(false);
+ expect(isValidViewType(undefined)).toEqual(false);
+ expect(isValidViewType('')).toEqual(false);
+ });
+ });
+});
diff --git a/spec/javascripts/issuable_context_spec.js b/spec/javascripts/issuable_context_spec.js
new file mode 100644
index 00000000000..bcb2b7b24a0
--- /dev/null
+++ b/spec/javascripts/issuable_context_spec.js
@@ -0,0 +1,34 @@
+/* global IssuableContext */
+import '~/issuable_context';
+import $ from 'jquery';
+
+describe('IssuableContext', () => {
+ describe('toggleHiddenParticipants', () => {
+ const event = jasmine.createSpyObj('event', ['preventDefault']);
+
+ beforeEach(() => {
+ spyOn($.fn, 'data').and.returnValue('data');
+ spyOn($.fn, 'text').and.returnValue('data');
+ });
+
+ afterEach(() => {
+ gl.lazyLoader = undefined;
+ });
+
+ it('calls loadCheck if lazyLoader is set', () => {
+ gl.lazyLoader = jasmine.createSpyObj('lazyLoader', ['loadCheck']);
+
+ IssuableContext.prototype.toggleHiddenParticipants(event);
+
+ expect(gl.lazyLoader.loadCheck).toHaveBeenCalled();
+ });
+
+ it('does not throw if lazyLoader is not defined', () => {
+ gl.lazyLoader = undefined;
+
+ const toggle = IssuableContext.prototype.toggleHiddenParticipants.bind(null, event);
+
+ expect(toggle).not.toThrow();
+ });
+ });
+});
diff --git a/spec/javascripts/issue_show/components/app_spec.js b/spec/javascripts/issue_show/components/app_spec.js
index 39065814bc2..583a3a74d77 100644
--- a/spec/javascripts/issue_show/components/app_spec.js
+++ b/spec/javascripts/issue_show/components/app_spec.js
@@ -42,7 +42,6 @@ describe('Issuable output', () => {
initialDescriptionText: '',
markdownPreviewPath: '/',
markdownDocsPath: '/',
- isConfidential: false,
projectNamespace: '/',
projectPath: '/',
},
@@ -157,30 +156,6 @@ describe('Issuable output', () => {
});
});
- it('reloads the page if the confidential status has changed', (done) => {
- spyOn(gl.utils, 'visitUrl');
- spyOn(vm.service, 'updateIssuable').and.callFake(() => new Promise((resolve) => {
- resolve({
- json() {
- return {
- confidential: true,
- web_url: location.pathname,
- };
- },
- });
- }));
-
- vm.updateIssuable();
-
- setTimeout(() => {
- expect(
- gl.utils.visitUrl,
- ).toHaveBeenCalledWith(location.pathname);
-
- done();
- });
- });
-
it('correctly updates issuable data', (done) => {
spyOn(vm.service, 'updateIssuable').and.callFake(() => new Promise((resolve) => {
resolve();
diff --git a/spec/javascripts/lib/utils/common_utils_spec.js b/spec/javascripts/lib/utils/common_utils_spec.js
index a6ad250bd86..f86f2f260c3 100644
--- a/spec/javascripts/lib/utils/common_utils_spec.js
+++ b/spec/javascripts/lib/utils/common_utils_spec.js
@@ -1,398 +1,481 @@
/* eslint-disable promise/catch-or-return */
-import '~/lib/utils/common_utils';
+import * as commonUtils from '~/lib/utils/common_utils';
-(() => {
- describe('common_utils', () => {
- describe('gl.utils.parseUrl', () => {
- it('returns an anchor tag with url', () => {
- expect(gl.utils.parseUrl('/some/absolute/url').pathname).toContain('some/absolute/url');
- });
- it('url is escaped', () => {
- // IE11 will return a relative pathname while other browsers will return a full pathname.
- // parseUrl uses an anchor element for parsing an url. With relative urls, the anchor
- // element will create an absolute url relative to the current execution context.
- // The JavaScript test suite is executed at '/' which will lead to an absolute url
- // starting with '/'.
- expect(gl.utils.parseUrl('" test="asf"').pathname).toContain('/%22%20test=%22asf%22');
- });
+describe('common_utils', () => {
+ describe('parseUrl', () => {
+ it('returns an anchor tag with url', () => {
+ expect(commonUtils.parseUrl('/some/absolute/url').pathname).toContain('some/absolute/url');
});
+ it('url is escaped', () => {
+ // IE11 will return a relative pathname while other browsers will return a full pathname.
+ // parseUrl uses an anchor element for parsing an url. With relative urls, the anchor
+ // element will create an absolute url relative to the current execution context.
+ // The JavaScript test suite is executed at '/' which will lead to an absolute url
+ // starting with '/'.
+ expect(commonUtils.parseUrl('" test="asf"').pathname).toContain('/%22%20test=%22asf%22');
+ });
+ });
- describe('gl.utils.parseUrlPathname', () => {
- beforeEach(() => {
- spyOn(gl.utils, 'parseUrl').and.callFake(url => ({
- pathname: url,
- }));
- });
- it('returns an absolute url when given an absolute url', () => {
- expect(gl.utils.parseUrlPathname('/some/absolute/url')).toEqual('/some/absolute/url');
- });
- it('returns an absolute url when given a relative url', () => {
- expect(gl.utils.parseUrlPathname('some/relative/url')).toEqual('/some/relative/url');
- });
+ describe('parseUrlPathname', () => {
+ it('returns an absolute url when given an absolute url', () => {
+ expect(commonUtils.parseUrlPathname('/some/absolute/url')).toEqual('/some/absolute/url');
});
- describe('gl.utils.getUrlParamsArray', () => {
- it('should return params array', () => {
- expect(gl.utils.getUrlParamsArray() instanceof Array).toBe(true);
- });
+ it('returns an absolute url when given a relative url', () => {
+ expect(commonUtils.parseUrlPathname('some/relative/url')).toEqual('/some/relative/url');
+ });
+ });
- it('should remove the question mark from the search params', () => {
- const paramsArray = gl.utils.getUrlParamsArray();
- expect(paramsArray[0][0] !== '?').toBe(true);
- });
+ describe('getUrlParamsArray', () => {
+ it('should return params array', () => {
+ expect(commonUtils.getUrlParamsArray() instanceof Array).toBe(true);
+ });
+
+ it('should remove the question mark from the search params', () => {
+ const paramsArray = commonUtils.getUrlParamsArray();
+ expect(paramsArray[0][0] !== '?').toBe(true);
+ });
- it('should decode params', () => {
- history.pushState('', '', '?label_name%5B%5D=test');
+ it('should decode params', () => {
+ history.pushState('', '', '?label_name%5B%5D=test');
- expect(
- gl.utils.getUrlParamsArray()[0],
- ).toBe('label_name[]=test');
+ expect(
+ commonUtils.getUrlParamsArray()[0],
+ ).toBe('label_name[]=test');
- history.pushState('', '', '?');
- });
+ history.pushState('', '', '?');
});
+ });
- describe('gl.utils.handleLocationHash', () => {
- beforeEach(() => {
- spyOn(window.document, 'getElementById').and.callThrough();
- });
+ describe('handleLocationHash', () => {
+ beforeEach(() => {
+ spyOn(window.document, 'getElementById').and.callThrough();
+ });
- afterEach(() => {
- window.history.pushState({}, null, '');
- });
+ afterEach(() => {
+ window.history.pushState({}, null, '');
+ });
- function expectGetElementIdToHaveBeenCalledWith(elementId) {
- expect(window.document.getElementById).toHaveBeenCalledWith(elementId);
- }
+ function expectGetElementIdToHaveBeenCalledWith(elementId) {
+ expect(window.document.getElementById).toHaveBeenCalledWith(elementId);
+ }
- it('decodes hash parameter', () => {
- window.history.pushState({}, null, '#random-hash');
- gl.utils.handleLocationHash();
+ it('decodes hash parameter', () => {
+ window.history.pushState({}, null, '#random-hash');
+ commonUtils.handleLocationHash();
- expectGetElementIdToHaveBeenCalledWith('random-hash');
- expectGetElementIdToHaveBeenCalledWith('user-content-random-hash');
- });
+ expectGetElementIdToHaveBeenCalledWith('random-hash');
+ expectGetElementIdToHaveBeenCalledWith('user-content-random-hash');
+ });
- it('decodes cyrillic hash parameter', () => {
- window.history.pushState({}, null, '#definição');
- gl.utils.handleLocationHash();
+ it('decodes cyrillic hash parameter', () => {
+ window.history.pushState({}, null, '#definição');
+ commonUtils.handleLocationHash();
- expectGetElementIdToHaveBeenCalledWith('definição');
- expectGetElementIdToHaveBeenCalledWith('user-content-definição');
- });
+ expectGetElementIdToHaveBeenCalledWith('definição');
+ expectGetElementIdToHaveBeenCalledWith('user-content-definição');
+ });
- it('decodes encoded cyrillic hash parameter', () => {
- window.history.pushState({}, null, '#defini%C3%A7%C3%A3o');
- gl.utils.handleLocationHash();
+ it('decodes encoded cyrillic hash parameter', () => {
+ window.history.pushState({}, null, '#defini%C3%A7%C3%A3o');
+ commonUtils.handleLocationHash();
- expectGetElementIdToHaveBeenCalledWith('definição');
- expectGetElementIdToHaveBeenCalledWith('user-content-definição');
- });
+ expectGetElementIdToHaveBeenCalledWith('definição');
+ expectGetElementIdToHaveBeenCalledWith('user-content-definição');
});
- describe('gl.utils.setParamInURL', () => {
- afterEach(() => {
- window.history.pushState({}, null, '');
- });
+ it('scrolls element into view', () => {
+ document.body.innerHTML += `
+ <div id="parent">
+ <div style="height: 2000px;"></div>
+ <div id="test" style="height: 2000px;"></div>
+ </div>
+ `;
- it('should return the parameter', () => {
- window.history.replaceState({}, null, '');
+ window.history.pushState({}, null, '#test');
+ commonUtils.handleLocationHash();
- expect(gl.utils.setParamInURL('page', 156)).toBe('?page=156');
- expect(gl.utils.setParamInURL('page', '156')).toBe('?page=156');
- });
+ expectGetElementIdToHaveBeenCalledWith('test');
+ expect(window.scrollY).toBe(document.getElementById('test').offsetTop);
- it('should update the existing parameter when its a number', () => {
- window.history.pushState({}, null, '?page=15');
+ document.getElementById('parent').remove();
+ });
- expect(gl.utils.setParamInURL('page', 16)).toBe('?page=16');
- expect(gl.utils.setParamInURL('page', '16')).toBe('?page=16');
- expect(gl.utils.setParamInURL('page', true)).toBe('?page=true');
- });
+ it('scrolls user content element into view', () => {
+ document.body.innerHTML += `
+ <div id="parent">
+ <div style="height: 2000px;"></div>
+ <div id="user-content-test" style="height: 2000px;"></div>
+ </div>
+ `;
- it('should update the existing parameter when its a string', () => {
- window.history.pushState({}, null, '?scope=all');
+ window.history.pushState({}, null, '#test');
+ commonUtils.handleLocationHash();
- expect(gl.utils.setParamInURL('scope', 'finished')).toBe('?scope=finished');
- });
+ expectGetElementIdToHaveBeenCalledWith('test');
+ expectGetElementIdToHaveBeenCalledWith('user-content-test');
+ expect(window.scrollY).toBe(document.getElementById('user-content-test').offsetTop);
- it('should update the existing parameter when more than one parameter exists', () => {
- window.history.pushState({}, null, '?scope=all&page=15');
+ document.getElementById('parent').remove();
+ });
- expect(gl.utils.setParamInURL('scope', 'finished')).toBe('?scope=finished&page=15');
- });
+ it('scrolls to element with offset from navbar', () => {
+ spyOn(window, 'scrollBy').and.callThrough();
+ document.body.innerHTML += `
+ <div id="parent">
+ <div class="navbar-gitlab" style="position: fixed; top: 0; height: 50px;"></div>
+ <div style="height: 2000px; margin-top: 50px;"></div>
+ <div id="user-content-test" style="height: 2000px;"></div>
+ </div>
+ `;
+
+ window.history.pushState({}, null, '#test');
+ commonUtils.handleLocationHash();
+
+ expectGetElementIdToHaveBeenCalledWith('test');
+ expectGetElementIdToHaveBeenCalledWith('user-content-test');
+ expect(window.scrollY).toBe(document.getElementById('user-content-test').offsetTop - 50);
+ expect(window.scrollBy).toHaveBeenCalledWith(0, -50);
+
+ document.getElementById('parent').remove();
+ });
+ });
+
+ describe('setParamInURL', () => {
+ afterEach(() => {
+ window.history.pushState({}, null, '');
+ });
- it('should add a new parameter to the end of the existing ones', () => {
- window.history.pushState({}, null, '?scope=all');
+ it('should return the parameter', () => {
+ window.history.replaceState({}, null, '');
- expect(gl.utils.setParamInURL('page', 16)).toBe('?scope=all&page=16');
- expect(gl.utils.setParamInURL('page', '16')).toBe('?scope=all&page=16');
- expect(gl.utils.setParamInURL('page', true)).toBe('?scope=all&page=true');
- });
+ expect(commonUtils.setParamInURL('page', 156)).toBe('?page=156');
+ expect(commonUtils.setParamInURL('page', '156')).toBe('?page=156');
});
- describe('gl.utils.getParameterByName', () => {
- beforeEach(() => {
- window.history.pushState({}, null, '?scope=all&p=2');
- });
+ it('should update the existing parameter when its a number', () => {
+ window.history.pushState({}, null, '?page=15');
- afterEach(() => {
- window.history.replaceState({}, null, null);
- });
+ expect(commonUtils.setParamInURL('page', 16)).toBe('?page=16');
+ expect(commonUtils.setParamInURL('page', '16')).toBe('?page=16');
+ expect(commonUtils.setParamInURL('page', true)).toBe('?page=true');
+ });
- it('should return valid parameter', () => {
- const value = gl.utils.getParameterByName('scope');
- expect(gl.utils.getParameterByName('p')).toEqual('2');
- expect(value).toBe('all');
- });
+ it('should update the existing parameter when its a string', () => {
+ window.history.pushState({}, null, '?scope=all');
- it('should return invalid parameter', () => {
- const value = gl.utils.getParameterByName('fakeParameter');
- expect(value).toBe(null);
- });
+ expect(commonUtils.setParamInURL('scope', 'finished')).toBe('?scope=finished');
+ });
- it('should return valid paramentes if URL is provided', () => {
- let value = gl.utils.getParameterByName('foo', 'http://cocteau.twins/?foo=bar');
- expect(value).toBe('bar');
+ it('should update the existing parameter when more than one parameter exists', () => {
+ window.history.pushState({}, null, '?scope=all&page=15');
- value = gl.utils.getParameterByName('manan', 'http://cocteau.twins/?foo=bar&manan=canchu');
- expect(value).toBe('canchu');
- });
+ expect(commonUtils.setParamInURL('scope', 'finished')).toBe('?scope=finished&page=15');
});
- describe('gl.utils.normalizedHeaders', () => {
- it('should upperCase all the header keys to keep them consistent', () => {
- const apiHeaders = {
- 'X-Something-Workhorse': { workhorse: 'ok' },
- 'x-something-nginx': { nginx: 'ok' },
- };
+ it('should add a new parameter to the end of the existing ones', () => {
+ window.history.pushState({}, null, '?scope=all');
- const normalized = gl.utils.normalizeHeaders(apiHeaders);
+ expect(commonUtils.setParamInURL('page', 16)).toBe('?scope=all&page=16');
+ expect(commonUtils.setParamInURL('page', '16')).toBe('?scope=all&page=16');
+ expect(commonUtils.setParamInURL('page', true)).toBe('?scope=all&page=true');
+ });
+ });
- const WORKHORSE = 'X-SOMETHING-WORKHORSE';
- const NGINX = 'X-SOMETHING-NGINX';
+ describe('getParameterByName', () => {
+ beforeEach(() => {
+ window.history.pushState({}, null, '?scope=all&p=2');
+ });
- expect(normalized[WORKHORSE].workhorse).toBe('ok');
- expect(normalized[NGINX].nginx).toBe('ok');
- });
+ afterEach(() => {
+ window.history.replaceState({}, null, null);
});
- describe('gl.utils.normalizeCRLFHeaders', () => {
- beforeEach(function () {
- this.CLRFHeaders = 'a-header: a-value\nAnother-Header: ANOTHER-VALUE\nLaSt-HeAdEr: last-VALUE';
+ it('should return valid parameter', () => {
+ const value = commonUtils.getParameterByName('scope');
+ expect(commonUtils.getParameterByName('p')).toEqual('2');
+ expect(value).toBe('all');
+ });
- spyOn(String.prototype, 'split').and.callThrough();
- spyOn(gl.utils, 'normalizeHeaders').and.callThrough();
+ it('should return invalid parameter', () => {
+ const value = commonUtils.getParameterByName('fakeParameter');
+ expect(value).toBe(null);
+ });
- this.normalizeCRLFHeaders = gl.utils.normalizeCRLFHeaders(this.CLRFHeaders);
- });
+ it('should return valid paramentes if URL is provided', () => {
+ let value = commonUtils.getParameterByName('foo', 'http://cocteau.twins/?foo=bar');
+ expect(value).toBe('bar');
- it('should split by newline', function () {
- expect(String.prototype.split).toHaveBeenCalledWith('\n');
- });
+ value = commonUtils.getParameterByName('manan', 'http://cocteau.twins/?foo=bar&manan=canchu');
+ expect(value).toBe('canchu');
+ });
+ });
- it('should split by colon+space for each header', function () {
- expect(String.prototype.split.calls.allArgs().filter(args => args[0] === ': ').length).toBe(3);
- });
+ describe('normalizedHeaders', () => {
+ it('should upperCase all the header keys to keep them consistent', () => {
+ const apiHeaders = {
+ 'X-Something-Workhorse': { workhorse: 'ok' },
+ 'x-something-nginx': { nginx: 'ok' },
+ };
- it('should call gl.utils.normalizeHeaders with a parsed headers object', function () {
- expect(gl.utils.normalizeHeaders).toHaveBeenCalledWith(jasmine.any(Object));
- });
+ const normalized = commonUtils.normalizeHeaders(apiHeaders);
- it('should return a normalized headers object', function () {
- expect(this.normalizeCRLFHeaders).toEqual({
- 'A-HEADER': 'a-value',
- 'ANOTHER-HEADER': 'ANOTHER-VALUE',
- 'LAST-HEADER': 'last-VALUE',
- });
- });
+ const WORKHORSE = 'X-SOMETHING-WORKHORSE';
+ const NGINX = 'X-SOMETHING-NGINX';
+
+ expect(normalized[WORKHORSE].workhorse).toBe('ok');
+ expect(normalized[NGINX].nginx).toBe('ok');
});
+ });
- describe('gl.utils.parseIntPagination', () => {
- it('should parse to integers all string values and return pagination object', () => {
- const pagination = {
- 'X-PER-PAGE': 10,
- 'X-PAGE': 2,
- 'X-TOTAL': 30,
- 'X-TOTAL-PAGES': 3,
- 'X-NEXT-PAGE': 3,
- 'X-PREV-PAGE': 1,
- };
-
- const expectedPagination = {
- perPage: 10,
- page: 2,
- total: 30,
- totalPages: 3,
- nextPage: 3,
- previousPage: 1,
- };
-
- expect(gl.utils.parseIntPagination(pagination)).toEqual(expectedPagination);
- });
+ describe('normalizeCRLFHeaders', () => {
+ beforeEach(function () {
+ this.CLRFHeaders = 'a-header: a-value\nAnother-Header: ANOTHER-VALUE\nLaSt-HeAdEr: last-VALUE';
+ spyOn(String.prototype, 'split').and.callThrough();
+ this.normalizeCRLFHeaders = commonUtils.normalizeCRLFHeaders(this.CLRFHeaders);
+ });
+
+ it('should split by newline', function () {
+ expect(String.prototype.split).toHaveBeenCalledWith('\n');
});
- describe('gl.utils.isMetaClick', () => {
- it('should identify meta click on Windows/Linux', () => {
- const e = {
- metaKey: false,
- ctrlKey: true,
- which: 1,
- };
+ it('should split by colon+space for each header', function () {
+ expect(String.prototype.split.calls.allArgs().filter(args => args[0] === ': ').length).toBe(3);
+ });
- expect(gl.utils.isMetaClick(e)).toBe(true);
+ it('should return a normalized headers object', function () {
+ expect(this.normalizeCRLFHeaders).toEqual({
+ 'A-HEADER': 'a-value',
+ 'ANOTHER-HEADER': 'ANOTHER-VALUE',
+ 'LAST-HEADER': 'last-VALUE',
});
+ });
+ });
+
+ describe('parseIntPagination', () => {
+ it('should parse to integers all string values and return pagination object', () => {
+ const pagination = {
+ 'X-PER-PAGE': 10,
+ 'X-PAGE': 2,
+ 'X-TOTAL': 30,
+ 'X-TOTAL-PAGES': 3,
+ 'X-NEXT-PAGE': 3,
+ 'X-PREV-PAGE': 1,
+ };
+
+ const expectedPagination = {
+ perPage: 10,
+ page: 2,
+ total: 30,
+ totalPages: 3,
+ nextPage: 3,
+ previousPage: 1,
+ };
+
+ expect(commonUtils.parseIntPagination(pagination)).toEqual(expectedPagination);
+ });
+ });
- it('should identify meta click on macOS', () => {
- const e = {
- metaKey: true,
- ctrlKey: false,
- which: 1,
- };
+ describe('isMetaClick', () => {
+ it('should identify meta click on Windows/Linux', () => {
+ const e = {
+ metaKey: false,
+ ctrlKey: true,
+ which: 1,
+ };
- expect(gl.utils.isMetaClick(e)).toBe(true);
- });
+ expect(commonUtils.isMetaClick(e)).toBe(true);
+ });
- it('should identify as meta click on middle-click or Mouse-wheel click', () => {
- const e = {
- metaKey: false,
- ctrlKey: false,
- which: 2,
- };
+ it('should identify meta click on macOS', () => {
+ const e = {
+ metaKey: true,
+ ctrlKey: false,
+ which: 1,
+ };
- expect(gl.utils.isMetaClick(e)).toBe(true);
+ expect(commonUtils.isMetaClick(e)).toBe(true);
+ });
+
+ it('should identify as meta click on middle-click or Mouse-wheel click', () => {
+ const e = {
+ metaKey: false,
+ ctrlKey: false,
+ which: 2,
+ };
+
+ expect(commonUtils.isMetaClick(e)).toBe(true);
+ });
+ });
+
+ describe('convertPermissionToBoolean', () => {
+ it('should convert a boolean in a string to a boolean', () => {
+ expect(commonUtils.convertPermissionToBoolean('true')).toEqual(true);
+ expect(commonUtils.convertPermissionToBoolean('false')).toEqual(false);
+ });
+ });
+
+ describe('backOff', () => {
+ beforeEach(() => {
+ // shortcut our timeouts otherwise these tests will take a long time to finish
+ const origSetTimeout = window.setTimeout;
+ spyOn(window, 'setTimeout').and.callFake(cb => origSetTimeout(cb, 0));
+ });
+
+ it('solves the promise from the callback', (done) => {
+ const expectedResponseValue = 'Success!';
+ commonUtils.backOff((next, stop) => (
+ new Promise((resolve) => {
+ resolve(expectedResponseValue);
+ }).then((resp) => {
+ stop(resp);
+ })
+ )).then((respBackoff) => {
+ expect(respBackoff).toBe(expectedResponseValue);
+ done();
});
});
- describe('gl.utils.backOff', () => {
- beforeEach(() => {
- // shortcut our timeouts otherwise these tests will take a long time to finish
- const origSetTimeout = window.setTimeout;
- spyOn(window, 'setTimeout').and.callFake(cb => origSetTimeout(cb, 0));
+ it('catches the rejected promise from the callback ', (done) => {
+ const errorMessage = 'Mistakes were made!';
+ commonUtils.backOff((next, stop) => {
+ new Promise((resolve, reject) => {
+ reject(new Error(errorMessage));
+ }).then((resp) => {
+ stop(resp);
+ }).catch(err => stop(err));
+ }).catch((errBackoffResp) => {
+ expect(errBackoffResp instanceof Error).toBe(true);
+ expect(errBackoffResp.message).toBe(errorMessage);
+ done();
});
+ });
- it('solves the promise from the callback', (done) => {
- const expectedResponseValue = 'Success!';
- gl.utils.backOff((next, stop) => (
- new Promise((resolve) => {
- resolve(expectedResponseValue);
- }).then((resp) => {
- stop(resp);
+ it('solves the promise correctly after retrying a third time', (done) => {
+ let numberOfCalls = 1;
+ const expectedResponseValue = 'Success!';
+ commonUtils.backOff((next, stop) => (
+ Promise.resolve(expectedResponseValue)
+ .then((resp) => {
+ if (numberOfCalls < 3) {
+ numberOfCalls += 1;
+ next();
+ } else {
+ stop(resp);
+ }
})
- )).then((respBackoff) => {
- expect(respBackoff).toBe(expectedResponseValue);
- done();
- });
+ )).then((respBackoff) => {
+ const timeouts = window.setTimeout.calls.allArgs().map(([, timeout]) => timeout);
+ expect(timeouts).toEqual([2000, 4000]);
+ expect(respBackoff).toBe(expectedResponseValue);
+ done();
});
+ });
- it('catches the rejected promise from the callback ', (done) => {
- const errorMessage = 'Mistakes were made!';
- gl.utils.backOff((next, stop) => {
- new Promise((resolve, reject) => {
- reject(new Error(errorMessage));
- }).then((resp) => {
- stop(resp);
- }).catch(err => stop(err));
- }).catch((errBackoffResp) => {
+ it('rejects the backOff promise after timing out', (done) => {
+ commonUtils.backOff(next => next(), 64000)
+ .catch((errBackoffResp) => {
+ const timeouts = window.setTimeout.calls.allArgs().map(([, timeout]) => timeout);
+ expect(timeouts).toEqual([2000, 4000, 8000, 16000, 32000, 32000]);
expect(errBackoffResp instanceof Error).toBe(true);
- expect(errBackoffResp.message).toBe(errorMessage);
+ expect(errBackoffResp.message).toBe('BACKOFF_TIMEOUT');
done();
});
- });
+ });
+ });
- it('solves the promise correctly after retrying a third time', (done) => {
- let numberOfCalls = 1;
- const expectedResponseValue = 'Success!';
- gl.utils.backOff((next, stop) => (
- Promise.resolve(expectedResponseValue)
- .then((resp) => {
- if (numberOfCalls < 3) {
- numberOfCalls += 1;
- next();
- } else {
- stop(resp);
- }
- })
- )).then((respBackoff) => {
- const timeouts = window.setTimeout.calls.allArgs().map(([, timeout]) => timeout);
- expect(timeouts).toEqual([2000, 4000]);
- expect(respBackoff).toBe(expectedResponseValue);
- done();
- });
- });
+ describe('setFavicon', () => {
+ beforeEach(() => {
+ const favicon = document.createElement('link');
+ favicon.setAttribute('id', 'favicon');
+ favicon.setAttribute('href', 'default/favicon');
+ document.body.appendChild(favicon);
+ });
- it('rejects the backOff promise after timing out', (done) => {
- gl.utils.backOff(next => next(), 64000)
- .catch((errBackoffResp) => {
- const timeouts = window.setTimeout.calls.allArgs().map(([, timeout]) => timeout);
- expect(timeouts).toEqual([2000, 4000, 8000, 16000, 32000, 32000]);
- expect(errBackoffResp instanceof Error).toBe(true);
- expect(errBackoffResp.message).toBe('BACKOFF_TIMEOUT');
- done();
- });
- });
+ afterEach(() => {
+ document.body.removeChild(document.getElementById('favicon'));
});
+ it('should set page favicon to provided favicon', () => {
+ const faviconPath = '//custom_favicon';
+ commonUtils.setFavicon(faviconPath);
- describe('gl.utils.setFavicon', () => {
- it('should set page favicon to provided favicon', () => {
- const faviconPath = '//custom_favicon';
- const fakeLink = {
- setAttribute() {},
- };
+ expect(document.getElementById('favicon').getAttribute('href')).toEqual(faviconPath);
+ });
+ });
- spyOn(window.document, 'getElementById').and.callFake(() => fakeLink);
- spyOn(fakeLink, 'setAttribute').and.callFake((attr, val) => {
- expect(attr).toEqual('href');
- expect(val.indexOf(faviconPath) > -1).toBe(true);
- });
- gl.utils.setFavicon(faviconPath);
- });
+ describe('resetFavicon', () => {
+ beforeEach(() => {
+ const favicon = document.createElement('link');
+ favicon.setAttribute('id', 'favicon');
+ favicon.setAttribute('href', 'default/favicon');
+ document.body.appendChild(favicon);
});
- describe('gl.utils.resetFavicon', () => {
- it('should reset page favicon to tanuki', () => {
- const fakeLink = {
- setAttribute() {},
- };
+ afterEach(() => {
+ document.body.removeChild(document.getElementById('favicon'));
+ });
- spyOn(window.document, 'getElementById').and.callFake(() => fakeLink);
- spyOn(fakeLink, 'setAttribute').and.callFake((attr, val) => {
- expect(attr).toEqual('href');
- expect(val).toMatch(/favicon/);
- });
- gl.utils.resetFavicon();
+ it('should reset page favicon to tanuki', () => {
+ commonUtils.resetFavicon();
+ expect(document.getElementById('favicon').getAttribute('href')).toEqual('default/favicon');
+ });
+ });
+
+ describe('setCiStatusFavicon', () => {
+ const BUILD_URL = `${gl.TEST_HOST}/frontend-fixtures/builds-project/-/jobs/1/status.json`;
+
+ beforeEach(() => {
+ const favicon = document.createElement('link');
+ favicon.setAttribute('id', 'favicon');
+ document.body.appendChild(favicon);
+ });
+
+ afterEach(() => {
+ document.body.removeChild(document.getElementById('favicon'));
+ });
+
+ it('should reset favicon in case of error', () => {
+ const favicon = document.getElementById('favicon');
+ spyOn($, 'ajax').and.callFake(function (options) {
+ options.error();
+ expect(favicon.getAttribute('href')).toEqual('null');
});
+
+ commonUtils.setCiStatusFavicon(BUILD_URL);
});
- describe('gl.utils.setCiStatusFavicon', () => {
- it('should set page favicon to CI status favicon based on provided status', () => {
- const BUILD_URL = `${gl.TEST_HOST}/frontend-fixtures/builds-project/-/jobs/1/status.json`;
- const FAVICON_PATH = '//icon_status_success';
- const spySetFavicon = spyOn(gl.utils, 'setFavicon').and.stub();
- const spyResetFavicon = spyOn(gl.utils, 'resetFavicon').and.stub();
- spyOn($, 'ajax').and.callFake(function (options) {
- options.success({ favicon: FAVICON_PATH });
- expect(spySetFavicon).toHaveBeenCalledWith(FAVICON_PATH);
- options.success();
- expect(spyResetFavicon).toHaveBeenCalled();
- options.error();
- expect(spyResetFavicon).toHaveBeenCalled();
- });
+ it('should set page favicon to CI status favicon based on provided status', () => {
+ const FAVICON_PATH = '//icon_status_success';
+ const favicon = document.getElementById('favicon');
- gl.utils.setCiStatusFavicon(BUILD_URL);
+ spyOn($, 'ajax').and.callFake(function (options) {
+ options.success({ favicon: FAVICON_PATH });
+ expect(favicon.getAttribute('href')).toEqual(FAVICON_PATH);
});
+
+ commonUtils.setCiStatusFavicon(BUILD_URL);
});
+ });
- describe('gl.utils.ajaxPost', () => {
- it('should perform `$.ajax` call and do `POST` request', () => {
- const requestURL = '/some/random/api';
- const data = { keyname: 'value' };
- const ajaxSpy = spyOn($, 'ajax').and.callFake(() => {});
+ describe('ajaxPost', () => {
+ it('should perform `$.ajax` call and do `POST` request', () => {
+ const requestURL = '/some/random/api';
+ const data = { keyname: 'value' };
+ const ajaxSpy = spyOn($, 'ajax').and.callFake(() => {});
+
+ commonUtils.ajaxPost(requestURL, data);
+ expect(ajaxSpy.calls.allArgs()[0][0].type).toEqual('POST');
+ });
+
+ describe('gl.utils.spriteIcon', () => {
+ beforeEach(() => {
+ window.gon.sprite_icons = 'icons.svg';
+ });
- gl.utils.ajaxPost(requestURL, data);
- expect(ajaxSpy.calls.allArgs()[0][0].type).toEqual('POST');
+ it('should return the svg for a linked icon', () => {
+ expect(gl.utils.spriteIcon('test')).toEqual('<svg><use xlink:href="icons.svg#test" /></svg>');
});
});
});
-})();
+});
diff --git a/spec/javascripts/lib/utils/csrf_token_spec.js b/spec/javascripts/lib/utils/csrf_token_spec.js
new file mode 100644
index 00000000000..c484213df8e
--- /dev/null
+++ b/spec/javascripts/lib/utils/csrf_token_spec.js
@@ -0,0 +1,49 @@
+import csrf from '~/lib/utils/csrf';
+
+describe('csrf', () => {
+ beforeEach(() => {
+ this.tokenKey = 'X-CSRF-Token';
+ this.token = 'pH1cvjnP9grx2oKlhWEDvUZnJ8x2eXsIs1qzyHkF3DugSG5yTxR76CWeEZRhML2D1IeVB7NEW0t5l/axE4iJpQ==';
+ });
+
+ it('returns the correct headerKey', () => {
+ expect(csrf.headerKey).toBe(this.tokenKey);
+ });
+
+ describe('when csrf token is in the DOM', () => {
+ beforeEach(() => {
+ setFixtures(`
+ <meta name="csrf-token" content="${this.token}">
+ `);
+
+ csrf.init();
+ });
+
+ it('returns the csrf token', () => {
+ expect(csrf.token).toBe(this.token);
+ });
+
+ it('returns the csrf headers object', () => {
+ expect(csrf.headers[this.tokenKey]).toBe(this.token);
+ });
+ });
+
+ describe('when csrf token is not in the DOM', () => {
+ beforeEach(() => {
+ setFixtures(`
+ <meta name="some-other-token">
+ `);
+
+ csrf.init();
+ });
+
+ it('returns null for token', () => {
+ expect(csrf.token).toBeNull();
+ });
+
+ it('returns empty object for headers', () => {
+ expect(typeof csrf.headers).toBe('object');
+ expect(Object.keys(csrf.headers).length).toBe(0);
+ });
+ });
+});
diff --git a/spec/javascripts/lib/utils/image_utility_spec.js b/spec/javascripts/lib/utils/image_utility_spec.js
new file mode 100644
index 00000000000..75addfcc833
--- /dev/null
+++ b/spec/javascripts/lib/utils/image_utility_spec.js
@@ -0,0 +1,32 @@
+import * as imageUtility from '~/lib/utils/image_utility';
+
+describe('imageUtility', () => {
+ describe('isImageLoaded', () => {
+ it('should return false when image.complete is false', () => {
+ const element = {
+ complete: false,
+ naturalHeight: 100,
+ };
+
+ expect(imageUtility.isImageLoaded(element)).toEqual(false);
+ });
+
+ it('should return false when naturalHeight = 0', () => {
+ const element = {
+ complete: true,
+ naturalHeight: 0,
+ };
+
+ expect(imageUtility.isImageLoaded(element)).toEqual(false);
+ });
+
+ it('should return true when image.complete and naturalHeight != 0', () => {
+ const element = {
+ complete: true,
+ naturalHeight: 100,
+ };
+
+ expect(imageUtility.isImageLoaded(element)).toEqual(true);
+ });
+ });
+});
diff --git a/spec/javascripts/lib/utils/sticky_spec.js b/spec/javascripts/lib/utils/sticky_spec.js
index c3ee3ef9825..b87c836654d 100644
--- a/spec/javascripts/lib/utils/sticky_spec.js
+++ b/spec/javascripts/lib/utils/sticky_spec.js
@@ -1,52 +1,79 @@
import { isSticky } from '~/lib/utils/sticky';
describe('sticky', () => {
- const el = {
- offsetTop: 0,
- classList: {},
- };
+ let el;
beforeEach(() => {
- el.offsetTop = 0;
- el.classList.add = jasmine.createSpy('spy');
- el.classList.remove = jasmine.createSpy('spy');
+ document.body.innerHTML += `
+ <div class="parent">
+ <div id="js-sticky"></div>
+ </div>
+ `;
+
+ el = document.getElementById('js-sticky');
});
- describe('classList.remove', () => {
- it('does not call classList.remove when stuck', () => {
- isSticky(el, 0, 0);
+ afterEach(() => {
+ el.parentNode.remove();
+ });
+
+ describe('when stuck', () => {
+ it('does not remove is-stuck class', () => {
+ isSticky(el, 0, el.offsetTop);
+ isSticky(el, 0, el.offsetTop);
expect(
- el.classList.remove,
- ).not.toHaveBeenCalled();
+ el.classList.contains('is-stuck'),
+ ).toBeTruthy();
});
- it('calls classList.remove when not stuck', () => {
- el.offsetTop = 10;
- isSticky(el, 0, 0);
+ it('adds is-stuck class', () => {
+ isSticky(el, 0, el.offsetTop);
expect(
- el.classList.remove,
- ).toHaveBeenCalledWith('is-stuck');
+ el.classList.contains('is-stuck'),
+ ).toBeTruthy();
+ });
+
+ it('inserts placeholder element', () => {
+ isSticky(el, 0, el.offsetTop, true);
+
+ expect(
+ document.querySelector('.sticky-placeholder'),
+ ).not.toBeNull();
});
});
- describe('classList.add', () => {
- it('calls classList.add when stuck', () => {
+ describe('when not stuck', () => {
+ it('removes is-stuck class', () => {
+ spyOn(el.classList, 'remove').and.callThrough();
+
+ isSticky(el, 0, el.offsetTop);
isSticky(el, 0, 0);
expect(
- el.classList.add,
+ el.classList.remove,
).toHaveBeenCalledWith('is-stuck');
+ expect(
+ el.classList.contains('is-stuck'),
+ ).toBeFalsy();
});
- it('does not call classList.add when not stuck', () => {
- el.offsetTop = 10;
+ it('does not add is-stuck class', () => {
isSticky(el, 0, 0);
expect(
- el.classList.add,
- ).not.toHaveBeenCalled();
+ el.classList.contains('is-stuck'),
+ ).toBeFalsy();
+ });
+
+ it('removes placeholder', () => {
+ isSticky(el, 0, el.offsetTop, true);
+ isSticky(el, 0, 0, true);
+
+ expect(
+ document.querySelector('.sticky-placeholder'),
+ ).toBeNull();
});
});
});
diff --git a/spec/javascripts/line_highlighter_spec.js b/spec/javascripts/line_highlighter_spec.js
index aee274641e8..645664a5219 100644
--- a/spec/javascripts/line_highlighter_spec.js
+++ b/spec/javascripts/line_highlighter_spec.js
@@ -18,19 +18,25 @@ import '~/line_highlighter';
beforeEach(function() {
loadFixtures('static/line_highlighter.html.raw');
this["class"] = new LineHighlighter();
- this.css = this["class"].highlightClass;
+ this.css = this["class"].highlightLineClass;
return this.spies = {
__setLocationHash__: spyOn(this["class"], '__setLocationHash__').and.callFake(function() {})
};
});
describe('behavior', function() {
it('highlights one line given in the URL hash', function() {
- new LineHighlighter('#L13');
+ new LineHighlighter({ hash: '#L13' });
return expect($('#LC13')).toHaveClass(this.css);
});
+ it('highlights one line given in the URL hash with given CSS class name', function() {
+ const hiliter = new LineHighlighter({ hash: '#L13', highlightLineClass: 'hilite' });
+ expect(hiliter.highlightLineClass).toBe('hilite');
+ expect($('#LC13')).toHaveClass('hilite');
+ expect($('#LC13')).not.toHaveClass('hll');
+ });
it('highlights a range of lines given in the URL hash', function() {
var line, results;
- new LineHighlighter('#L5-25');
+ new LineHighlighter({ hash: '#L5-25' });
expect($("." + this.css).length).toBe(21);
results = [];
for (line = 5; line <= 25; line += 1) {
@@ -41,7 +47,7 @@ import '~/line_highlighter';
it('scrolls to the first highlighted line on initial load', function() {
var spy;
spy = spyOn($, 'scrollTo');
- new LineHighlighter('#L5-25');
+ new LineHighlighter({ hash: '#L5-25' });
return expect(spy).toHaveBeenCalledWith('#L5', jasmine.anything());
});
it('discards click events', function() {
@@ -50,10 +56,10 @@ import '~/line_highlighter';
clickLine(13);
return expect(spy).toHaveBeenPrevented();
});
- return it('handles garbage input from the hash', function() {
+ it('handles garbage input from the hash', function() {
var func;
func = function() {
- return new LineHighlighter('#blob-content-holder');
+ return new LineHighlighter({ fileHolderSelector: '#blob-content-holder' });
};
return expect(func).not.toThrow();
});
diff --git a/spec/javascripts/locale/sprintf_spec.js b/spec/javascripts/locale/sprintf_spec.js
new file mode 100644
index 00000000000..52e903b819f
--- /dev/null
+++ b/spec/javascripts/locale/sprintf_spec.js
@@ -0,0 +1,74 @@
+import sprintf from '~/locale/sprintf';
+
+describe('locale', () => {
+ describe('sprintf', () => {
+ it('does not modify string without parameters', () => {
+ const input = 'No parameters';
+
+ const output = sprintf(input);
+
+ expect(output).toBe(input);
+ });
+
+ it('ignores extraneous parameters', () => {
+ const input = 'No parameters';
+
+ const output = sprintf(input, { ignore: 'this' });
+
+ expect(output).toBe(input);
+ });
+
+ it('ignores extraneous placeholders', () => {
+ const input = 'No %{parameters}';
+
+ const output = sprintf(input);
+
+ expect(output).toBe(input);
+ });
+
+ it('replaces parameters', () => {
+ const input = '%{name} has %{count} parameters';
+ const parameters = {
+ name: 'this',
+ count: 2,
+ };
+
+ const output = sprintf(input, parameters);
+
+ expect(output).toBe('this has 2 parameters');
+ });
+
+ it('replaces multiple occurrences', () => {
+ const input = 'to %{verb} or not to %{verb}';
+ const parameters = {
+ verb: 'be',
+ };
+
+ const output = sprintf(input, parameters);
+
+ expect(output).toBe('to be or not to be');
+ });
+
+ it('escapes parameters', () => {
+ const input = 'contains %{userContent}';
+ const parameters = {
+ userContent: '<script>alert("malicious!")</script>',
+ };
+
+ const output = sprintf(input, parameters);
+
+ expect(output).toBe('contains &lt;script&gt;alert(&quot;malicious!&quot;)&lt;/script&gt;');
+ });
+
+ it('does not escape parameters for escapeParameters = false', () => {
+ const input = 'contains %{safeContent}';
+ const parameters = {
+ safeContent: '<strong>bold attempt</strong>',
+ };
+
+ const output = sprintf(input, parameters, false);
+
+ expect(output).toBe('contains <strong>bold attempt</strong>');
+ });
+ });
+});
diff --git a/spec/javascripts/merge_request_spec.js b/spec/javascripts/merge_request_spec.js
index 6ff42e2378d..3ab901da6b6 100644
--- a/spec/javascripts/merge_request_spec.js
+++ b/spec/javascripts/merge_request_spec.js
@@ -58,5 +58,44 @@ import IssuablesHelper from '~/helpers/issuables_helper';
expect(CloseReopenReportToggle.prototype.initDroplab).toHaveBeenCalled();
});
});
+
+ describe('hideCloseButton', () => {
+ describe('merge request of another user', () => {
+ beforeEach(() => {
+ loadFixtures('merge_requests/merge_request_with_task_list.html.raw');
+ this.el = document.querySelector('.merge-request .issuable-actions');
+ const merge = new MergeRequest();
+ merge.hideCloseButton();
+ });
+
+ it('hides the dropdown close item and selects the next item', () => {
+ const closeItem = this.el.querySelector('li.close-item');
+ const smallCloseItem = this.el.querySelector('.js-close-item');
+ const reportItem = this.el.querySelector('li.report-item');
+
+ expect(closeItem).toHaveClass('hidden');
+ expect(smallCloseItem).toHaveClass('hidden');
+ expect(reportItem).toHaveClass('droplab-item-selected');
+ expect(reportItem).not.toHaveClass('hidden');
+ });
+ });
+
+ describe('merge request of current_user', () => {
+ beforeEach(() => {
+ loadFixtures('merge_requests/merge_request_of_current_user.html.raw');
+ this.el = document.querySelector('.merge-request .issuable-actions');
+ const merge = new MergeRequest();
+ merge.hideCloseButton();
+ });
+
+ it('hides the close button', () => {
+ const closeButton = this.el.querySelector('.btn-close');
+ const smallCloseItem = this.el.querySelector('.js-close-item');
+
+ expect(closeButton).toHaveClass('hidden');
+ expect(smallCloseItem).toHaveClass('hidden');
+ });
+ });
+ });
});
}).call(window);
diff --git a/spec/javascripts/merge_request_tabs_spec.js b/spec/javascripts/merge_request_tabs_spec.js
index 8830a2d29e5..ccdbfcba692 100644
--- a/spec/javascripts/merge_request_tabs_spec.js
+++ b/spec/javascripts/merge_request_tabs_spec.js
@@ -78,8 +78,9 @@ import 'vendor/jquery.scrollTo';
});
describe('meta click', () => {
+ let metakeyEvent;
beforeEach(function () {
- spyOn(gl.utils, 'isMetaClick').and.returnValue(true);
+ metakeyEvent = $.Event('click', { keyCode: 91, ctrlKey: true });
});
it('opens page when commits link is clicked', function () {
@@ -89,7 +90,7 @@ import 'vendor/jquery.scrollTo';
});
this.class.bindEvents();
- document.querySelector('.merge-request-tabs .commits-tab a').click();
+ $('.merge-request-tabs .commits-tab a').trigger(metakeyEvent);
});
it('opens page when commits badge is clicked', function () {
@@ -99,7 +100,7 @@ import 'vendor/jquery.scrollTo';
});
this.class.bindEvents();
- document.querySelector('.merge-request-tabs .commits-tab a .badge').click();
+ $('.merge-request-tabs .commits-tab a .badge').trigger(metakeyEvent);
});
});
@@ -415,5 +416,28 @@ import 'vendor/jquery.scrollTo';
});
});
});
+
+ describe('expandViewContainer', function () {
+ beforeEach(() => {
+ $('body').append('<div class="content-wrapper"><div class="container-fluid container-limited"></div></div>');
+ });
+
+ afterEach(() => {
+ $('.content-wrapper').remove();
+ });
+
+ it('removes container-limited from containers', function () {
+ this.class.expandViewContainer();
+
+ expect($('.content-wrapper')).not.toContainElement('.container-limited');
+ });
+
+ it('does remove container-limited from breadcrumbs', function () {
+ $('.container-limited').addClass('breadcrumbs');
+ this.class.expandViewContainer();
+
+ expect($('.content-wrapper')).toContainElement('.container-limited');
+ });
+ });
});
}).call(window);
diff --git a/spec/javascripts/monitoring/dashboard_state_spec.js b/spec/javascripts/monitoring/dashboard_state_spec.js
index e8f7042e131..3319eeb3f31 100644
--- a/spec/javascripts/monitoring/dashboard_state_spec.js
+++ b/spec/javascripts/monitoring/dashboard_state_spec.js
@@ -21,6 +21,9 @@ describe('EmptyState', () => {
selectedState: 'gettingStarted',
settingsPath: statePaths.settingsPath,
documentationPath: statePaths.documentationPath,
+ emptyGettingStartedSvgPath: 'foo',
+ emptyLoadingSvgPath: 'foo',
+ emptyUnableToConnectSvgPath: 'foo',
});
expect(component.currentState).toBe(component.states.gettingStarted);
@@ -31,6 +34,9 @@ describe('EmptyState', () => {
selectedState: 'gettingStarted',
settingsPath: statePaths.settingsPath,
documentationPath: statePaths.documentationPath,
+ emptyGettingStartedSvgPath: 'foo',
+ emptyLoadingSvgPath: 'foo',
+ emptyUnableToConnectSvgPath: 'foo',
});
expect(component.buttonPath).toEqual(statePaths.settingsPath);
@@ -42,6 +48,9 @@ describe('EmptyState', () => {
selectedState: 'loading',
settingsPath: statePaths.settingsPath,
documentationPath: statePaths.documentationPath,
+ emptyGettingStartedSvgPath: 'foo',
+ emptyLoadingSvgPath: 'foo',
+ emptyUnableToConnectSvgPath: 'foo',
});
expect(component.buttonPath).toEqual(statePaths.documentationPath);
@@ -53,6 +62,9 @@ describe('EmptyState', () => {
selectedState: 'unableToConnect',
settingsPath: statePaths.settingsPath,
documentationPath: statePaths.documentationPath,
+ emptyGettingStartedSvgPath: 'foo',
+ emptyLoadingSvgPath: 'foo',
+ emptyUnableToConnectSvgPath: 'foo',
});
expect(component.showButtonDescription).toEqual(true);
@@ -63,6 +75,9 @@ describe('EmptyState', () => {
selectedState: 'loading',
settingsPath: statePaths.settingsPath,
documentationPath: statePaths.documentationPath,
+ emptyGettingStartedSvgPath: 'foo',
+ emptyLoadingSvgPath: 'foo',
+ emptyUnableToConnectSvgPath: 'foo',
});
expect(component.showButtonDescription).toEqual(false);
@@ -74,6 +89,9 @@ describe('EmptyState', () => {
selectedState: 'gettingStarted',
settingsPath: statePaths.settingsPath,
documentationPath: statePaths.documentationPath,
+ emptyGettingStartedSvgPath: 'foo',
+ emptyLoadingSvgPath: 'foo',
+ emptyUnableToConnectSvgPath: 'foo',
});
expect(component.$el.querySelector('svg')).toBeDefined();
@@ -87,6 +105,9 @@ describe('EmptyState', () => {
selectedState: 'loading',
settingsPath: statePaths.settingsPath,
documentationPath: statePaths.documentationPath,
+ emptyGettingStartedSvgPath: 'foo',
+ emptyLoadingSvgPath: 'foo',
+ emptyUnableToConnectSvgPath: 'foo',
});
expect(component.$el.querySelector('svg')).toBeDefined();
@@ -100,6 +121,9 @@ describe('EmptyState', () => {
selectedState: 'unableToConnect',
settingsPath: statePaths.settingsPath,
documentationPath: statePaths.documentationPath,
+ emptyGettingStartedSvgPath: 'foo',
+ emptyLoadingSvgPath: 'foo',
+ emptyUnableToConnectSvgPath: 'foo',
});
expect(component.$el.querySelector('svg')).toBeDefined();
diff --git a/spec/javascripts/monitoring/graph/deployment_spec.js b/spec/javascripts/monitoring/graph/deployment_spec.js
index c2ff38ffab9..dea42d755d4 100644
--- a/spec/javascripts/monitoring/graph/deployment_spec.js
+++ b/spec/javascripts/monitoring/graph/deployment_spec.js
@@ -21,6 +21,7 @@ describe('MonitoringDeployment', () => {
const component = createComponent({
showDeployInfo: false,
deploymentData: reducedDeploymentData,
+ graphWidth: 440,
graphHeight: 300,
graphHeightOffset: 120,
});
@@ -36,6 +37,7 @@ describe('MonitoringDeployment', () => {
showDeployInfo: false,
deploymentData: reducedDeploymentData,
graphHeight: 300,
+ graphWidth: 440,
graphHeightOffset: 120,
});
@@ -49,6 +51,7 @@ describe('MonitoringDeployment', () => {
showDeployInfo: false,
deploymentData: reducedDeploymentData,
graphHeight: 300,
+ graphWidth: 440,
graphHeightOffset: 120,
});
@@ -62,6 +65,7 @@ describe('MonitoringDeployment', () => {
showDeployInfo: false,
deploymentData: reducedDeploymentData,
graphHeight: 300,
+ graphWidth: 440,
graphHeightOffset: 120,
});
@@ -75,6 +79,7 @@ describe('MonitoringDeployment', () => {
const component = createComponent({
showDeployInfo: true,
deploymentData: reducedDeploymentData,
+ graphWidth: 440,
graphHeight: 300,
graphHeightOffset: 120,
});
@@ -82,12 +87,29 @@ describe('MonitoringDeployment', () => {
expect(component.$el.querySelector('.js-deploy-info-box')).toBeNull();
});
+ it('positions the flag to the left when the xPos is too far right', () => {
+ reducedDeploymentData[0].showDeploymentFlag = false;
+ reducedDeploymentData[0].xPos = 250;
+ const component = createComponent({
+ showDeployInfo: true,
+ deploymentData: reducedDeploymentData,
+ graphWidth: 440,
+ graphHeight: 300,
+ graphHeightOffset: 120,
+ });
+
+ expect(
+ component.positionFlag(reducedDeploymentData[0]),
+ ).toBeLessThan(0);
+ });
+
it('shows the deployment flag', () => {
reducedDeploymentData[0].showDeploymentFlag = true;
const component = createComponent({
showDeployInfo: true,
deploymentData: reducedDeploymentData,
graphHeight: 300,
+ graphWidth: 440,
graphHeightOffset: 120,
});
@@ -102,6 +124,7 @@ describe('MonitoringDeployment', () => {
showDeployInfo: true,
deploymentData: reducedDeploymentData,
graphHeight: 300,
+ graphWidth: 440,
graphHeightOffset: 120,
});
@@ -115,6 +138,7 @@ describe('MonitoringDeployment', () => {
showDeployInfo: true,
deploymentData: reducedDeploymentData,
graphHeight: 300,
+ graphWidth: 440,
graphHeightOffset: 120,
});
@@ -127,6 +151,7 @@ describe('MonitoringDeployment', () => {
showDeployInfo: true,
deploymentData: reducedDeploymentData,
graphHeight: 300,
+ graphWidth: 440,
graphHeightOffset: 120,
});
diff --git a/spec/javascripts/monitoring/graph/flag_spec.js b/spec/javascripts/monitoring/graph/flag_spec.js
index 14794cbfd50..8ee1171419d 100644
--- a/spec/javascripts/monitoring/graph/flag_spec.js
+++ b/spec/javascripts/monitoring/graph/flag_spec.js
@@ -14,19 +14,22 @@ function getCoordinate(component, selector, coordinate) {
return parseInt(coordinateVal, 10);
}
+const defaultValuesComponent = {
+ currentXCoordinate: 200,
+ currentYCoordinate: 100,
+ currentFlagPosition: 100,
+ currentData: {
+ time: new Date('2017-06-04T18:17:33.501Z'),
+ value: '1.49609375',
+ },
+ graphHeight: 300,
+ graphHeightOffset: 120,
+ showFlagContent: true,
+};
+
describe('GraphFlag', () => {
it('has a line and a circle located at the currentXCoordinate and currentYCoordinate', () => {
- const component = createComponent({
- currentXCoordinate: 200,
- currentYCoordinate: 100,
- currentFlagPosition: 100,
- currentData: {
- time: new Date('2017-06-04T18:17:33.501Z'),
- value: '1.49609375',
- },
- graphHeight: 300,
- graphHeightOffset: 120,
- });
+ const component = createComponent(defaultValuesComponent);
expect(getCoordinate(component, '.selected-metric-line', 'x1'))
.toEqual(component.currentXCoordinate);
@@ -35,17 +38,7 @@ describe('GraphFlag', () => {
});
it('has a SVG with the class rect-text-metric at the currentFlagPosition', () => {
- const component = createComponent({
- currentXCoordinate: 200,
- currentYCoordinate: 100,
- currentFlagPosition: 100,
- currentData: {
- time: new Date('2017-06-04T18:17:33.501Z'),
- value: '1.49609375',
- },
- graphHeight: 300,
- graphHeightOffset: 120,
- });
+ const component = createComponent(defaultValuesComponent);
const svg = component.$el.querySelector('.rect-text-metric');
expect(svg.tagName).toEqual('svg');
@@ -54,17 +47,7 @@ describe('GraphFlag', () => {
describe('Computed props', () => {
it('calculatedHeight', () => {
- const component = createComponent({
- currentXCoordinate: 200,
- currentYCoordinate: 100,
- currentFlagPosition: 100,
- currentData: {
- time: new Date('2017-06-04T18:17:33.501Z'),
- value: '1.49609375',
- },
- graphHeight: 300,
- graphHeightOffset: 120,
- });
+ const component = createComponent(defaultValuesComponent);
expect(component.calculatedHeight).toEqual(180);
});
diff --git a/spec/javascripts/monitoring/graph/legend_spec.js b/spec/javascripts/monitoring/graph/legend_spec.js
index da2fbd26e23..2571b7ef869 100644
--- a/spec/javascripts/monitoring/graph/legend_spec.js
+++ b/spec/javascripts/monitoring/graph/legend_spec.js
@@ -28,7 +28,7 @@ const defaultValuesComponent = {
currentDataIndex: 0,
};
-const timeSeries = createTimeSeries(convertedMetrics[0].queries[0].result,
+const timeSeries = createTimeSeries(convertedMetrics[0].queries[0],
defaultValuesComponent.graphWidth, defaultValuesComponent.graphHeight,
defaultValuesComponent.graphHeightOffset);
@@ -89,13 +89,12 @@ describe('GraphLegend', () => {
expect(component.$el.querySelectorAll('.rect-axis-text').length).toEqual(2);
});
- it('contains text to signal the usage, title and time', () => {
+ it('contains text to signal the usage, title and time with multiple time series', () => {
const component = createComponent(defaultValuesComponent);
const titles = component.$el.querySelectorAll('.legend-metric-title');
- expect(getTextFromNode(component, '.legend-metric-title').indexOf(component.legendTitle)).not.toEqual(-1);
- expect(titles[0].textContent.indexOf('Title')).not.toEqual(-1);
- expect(titles[1].textContent.indexOf('Series')).not.toEqual(-1);
+ expect(titles[0].textContent.indexOf('1xx')).not.toEqual(-1);
+ expect(titles[1].textContent.indexOf('2xx')).not.toEqual(-1);
expect(getTextFromNode(component, '.y-label-text')).toEqual(component.yAxisLabel);
});
diff --git a/spec/javascripts/monitoring/monitoring_paths_spec.js b/spec/javascripts/monitoring/graph_path_spec.js
index d39db945e17..81825a3ae87 100644
--- a/spec/javascripts/monitoring/monitoring_paths_spec.js
+++ b/spec/javascripts/monitoring/graph_path_spec.js
@@ -1,10 +1,10 @@
import Vue from 'vue';
-import MonitoringPaths from '~/monitoring/components/monitoring_paths.vue';
+import GraphPath from '~/monitoring/components/graph/path.vue';
import createTimeSeries from '~/monitoring/utils/multiple_time_series';
import { singleRowMetricsMultipleSeries, convertDatesMultipleSeries } from './mock_data';
const createComponent = (propsData) => {
- const Component = Vue.extend(MonitoringPaths);
+ const Component = Vue.extend(GraphPath);
return new Component({
propsData,
@@ -13,22 +13,23 @@ const createComponent = (propsData) => {
const convertedMetrics = convertDatesMultipleSeries(singleRowMetricsMultipleSeries);
-const timeSeries = createTimeSeries(convertedMetrics[0].queries[0].result, 428, 272, 120);
+const timeSeries = createTimeSeries(convertedMetrics[0].queries[0], 428, 272, 120);
+const firstTimeSeries = timeSeries[0];
describe('Monitoring Paths', () => {
it('renders two paths to represent a line and the area underneath it', () => {
const component = createComponent({
- generatedLinePath: timeSeries[0].linePath,
- generatedAreaPath: timeSeries[0].areaPath,
- lineColor: '#ccc',
- areaColor: '#fff',
+ generatedLinePath: firstTimeSeries.linePath,
+ generatedAreaPath: firstTimeSeries.areaPath,
+ lineColor: firstTimeSeries.lineColor,
+ areaColor: firstTimeSeries.areaColor,
});
const metricArea = component.$el.querySelector('.metric-area');
const metricLine = component.$el.querySelector('.metric-line');
- expect(metricArea.getAttribute('fill')).toBe('#fff');
- expect(metricArea.getAttribute('d')).toBe(timeSeries[0].areaPath);
- expect(metricLine.getAttribute('stroke')).toBe('#ccc');
- expect(metricLine.getAttribute('d')).toBe(timeSeries[0].linePath);
+ expect(metricArea.getAttribute('fill')).toBe('#8fbce8');
+ expect(metricArea.getAttribute('d')).toBe(firstTimeSeries.areaPath);
+ expect(metricLine.getAttribute('stroke')).toBe('#1f78d1');
+ expect(metricLine.getAttribute('d')).toBe(firstTimeSeries.linePath);
});
});
diff --git a/spec/javascripts/monitoring/graph_spec.js b/spec/javascripts/monitoring/graph_spec.js
index 7d8b0744af1..fd79abe241a 100644
--- a/spec/javascripts/monitoring/graph_spec.js
+++ b/spec/javascripts/monitoring/graph_spec.js
@@ -44,7 +44,7 @@ describe('Graph', () => {
.not.toEqual(-1);
});
- it('outterViewBox gets a width and height property based on the DOM size of the element', () => {
+ it('outerViewBox gets a width and height property based on the DOM size of the element', () => {
const component = createComponent({
graphData: convertedMetrics[1],
classType: 'col-md-6',
@@ -52,8 +52,8 @@ describe('Graph', () => {
deploymentData,
});
- const viewBoxArray = component.outterViewBox.split(' ');
- expect(typeof component.outterViewBox).toEqual('string');
+ const viewBoxArray = component.outerViewBox.split(' ');
+ expect(typeof component.outerViewBox).toEqual('string');
expect(viewBoxArray[2]).toEqual(component.graphWidth.toString());
expect(viewBoxArray[3]).toEqual(component.graphHeight.toString());
});
@@ -86,4 +86,22 @@ describe('Graph', () => {
expect(component.yAxisLabel).toEqual(component.graphData.y_label);
expect(component.legendTitle).toEqual(component.graphData.queries[0].label);
});
+
+ it('sets the currentData object based on the hovered data index', () => {
+ const component = createComponent({
+ graphData: convertedMetrics[1],
+ classType: 'col-md-6',
+ updateAspectRatio: false,
+ deploymentData,
+ graphIdentifier: 0,
+ hoverData: {
+ hoveredDate: new Date('Sun Aug 27 2017 06:11:51 GMT-0500 (CDT)'),
+ currentDeployXPos: null,
+ },
+ });
+
+ component.positionFlag();
+ expect(component.currentData).toBe(component.timeSeries[0].values[10]);
+ expect(component.currentDataIndex).toEqual(10);
+ });
});
diff --git a/spec/javascripts/monitoring/mock_data.js b/spec/javascripts/monitoring/mock_data.js
index 3d399f2bb95..7ceab657464 100644
--- a/spec/javascripts/monitoring/mock_data.js
+++ b/spec/javascripts/monitoring/mock_data.js
@@ -6346,7 +6346,13 @@ export const singleRowMetricsMultipleSeries = [
}
]
},
- ]
+ ],
+ 'when': [
+ {
+ 'value': 'hundred(s)',
+ 'color': 'green',
+ },
+ ],
}
]
},
diff --git a/spec/javascripts/monitoring/utils/multiple_time_series_spec.js b/spec/javascripts/monitoring/utils/multiple_time_series_spec.js
index 3daf6bf82df..7e44a9ade9e 100644
--- a/spec/javascripts/monitoring/utils/multiple_time_series_spec.js
+++ b/spec/javascripts/monitoring/utils/multiple_time_series_spec.js
@@ -2,16 +2,17 @@ import createTimeSeries from '~/monitoring/utils/multiple_time_series';
import { convertDatesMultipleSeries, singleRowMetricsMultipleSeries } from '../mock_data';
const convertedMetrics = convertDatesMultipleSeries(singleRowMetricsMultipleSeries);
-const timeSeries = createTimeSeries(convertedMetrics[0].queries[0].result, 428, 272, 120);
+const timeSeries = createTimeSeries(convertedMetrics[0].queries[0], 428, 272, 120);
+const firstTimeSeries = timeSeries[0];
describe('Multiple time series', () => {
it('createTimeSeries returned array contains an object for each element', () => {
- expect(typeof timeSeries[0].linePath).toEqual('string');
- expect(typeof timeSeries[0].areaPath).toEqual('string');
- expect(typeof timeSeries[0].timeSeriesScaleX).toEqual('function');
- expect(typeof timeSeries[0].areaColor).toEqual('string');
- expect(typeof timeSeries[0].lineColor).toEqual('string');
- expect(timeSeries[0].values instanceof Array).toEqual(true);
+ expect(typeof firstTimeSeries.linePath).toEqual('string');
+ expect(typeof firstTimeSeries.areaPath).toEqual('string');
+ expect(typeof firstTimeSeries.timeSeriesScaleX).toEqual('function');
+ expect(typeof firstTimeSeries.areaColor).toEqual('string');
+ expect(typeof firstTimeSeries.lineColor).toEqual('string');
+ expect(firstTimeSeries.values instanceof Array).toEqual(true);
});
it('createTimeSeries returns an array', () => {
diff --git a/spec/javascripts/notes/components/issue_comment_form_spec.js b/spec/javascripts/notes/components/issue_comment_form_spec.js
index cca5ec887a3..3f659af5c3b 100644
--- a/spec/javascripts/notes/components/issue_comment_form_spec.js
+++ b/spec/javascripts/notes/components/issue_comment_form_spec.js
@@ -1,4 +1,5 @@
import Vue from 'vue';
+import autosize from 'vendor/autosize';
import store from '~/notes/stores';
import issueCommentForm from '~/notes/components/issue_comment_form.vue';
import { loggedOutIssueData, notesDataMock, userDataMock, issueDataMock } from '../mock_data';
@@ -32,6 +33,30 @@ describe('issue_comment_form component', () => {
expect(vm.$el.querySelector('.timeline-icon .user-avatar-link').getAttribute('href')).toEqual(userDataMock.path);
});
+ describe('handleSave', () => {
+ it('should request to save note when note is entered', () => {
+ vm.note = 'hello world';
+ spyOn(vm, 'saveNote').and.returnValue(new Promise(() => {}));
+ spyOn(vm, 'resizeTextarea');
+ spyOn(vm, 'stopPolling');
+
+ vm.handleSave();
+ expect(vm.isSubmitting).toEqual(true);
+ expect(vm.note).toEqual('');
+ expect(vm.saveNote).toHaveBeenCalled();
+ expect(vm.stopPolling).toHaveBeenCalled();
+ expect(vm.resizeTextarea).toHaveBeenCalled();
+ });
+
+ it('should toggle issue state when no note', () => {
+ spyOn(vm, 'toggleIssueState');
+
+ vm.handleSave();
+
+ expect(vm.toggleIssueState).toHaveBeenCalled();
+ });
+ });
+
describe('textarea', () => {
it('should render textarea with placeholder', () => {
expect(
@@ -39,6 +64,22 @@ describe('issue_comment_form component', () => {
).toEqual('Write a comment or drag your files here...');
});
+ it('should make textarea disabled while requesting', (done) => {
+ const $submitButton = $(vm.$el.querySelector('.js-comment-submit-button'));
+ vm.note = 'hello world';
+ spyOn(vm, 'stopPolling');
+ spyOn(vm, 'saveNote').and.returnValue(new Promise(() => {}));
+
+ vm.$nextTick(() => { // Wait for vm.note change triggered. It should enable $submitButton.
+ $submitButton.trigger('click');
+
+ vm.$nextTick(() => { // Wait for vm.isSubmitting triggered. It should disable textarea.
+ expect(vm.$el.querySelector('.js-main-target-form textarea').disabled).toBeTruthy();
+ done();
+ });
+ });
+ });
+
it('should support quick actions', () => {
expect(
vm.$el.querySelector('.js-main-target-form textarea').getAttribute('data-supports-quick-actions'),
@@ -55,6 +96,19 @@ describe('issue_comment_form component', () => {
expect(vm.$el.querySelector(`a[href="${quickActionsDocsPath}"]`).textContent.trim()).toEqual('quick actions');
});
+ it('should resize textarea after note discarded', (done) => {
+ spyOn(autosize, 'update');
+ spyOn(vm, 'discard').and.callThrough();
+
+ vm.note = 'foo';
+ vm.discard();
+
+ Vue.nextTick(() => {
+ expect(autosize.update).toHaveBeenCalled();
+ done();
+ });
+ });
+
describe('edit mode', () => {
it('should enter edit mode when arrow up is pressed', () => {
spyOn(vm, 'editCurrentUserLastNote').and.callThrough();
diff --git a/spec/javascripts/notes/stores/actions_spec.js b/spec/javascripts/notes/stores/actions_spec.js
index 72d362acb2f..3d1ca870ca4 100644
--- a/spec/javascripts/notes/stores/actions_spec.js
+++ b/spec/javascripts/notes/stores/actions_spec.js
@@ -1,6 +1,5 @@
-
import * as actions from '~/notes/stores/actions';
-import testAction from './helpers';
+import testAction from '../../helpers/vuex_action_helper';
import { discussionMock, notesDataMock, userDataMock, issueDataMock, individualNote } from '../mock_data';
describe('Actions Notes Store', () => {
diff --git a/spec/javascripts/notes/stores/mutation_spec.js b/spec/javascripts/notes/stores/mutation_spec.js
index a38f29c1e39..1e22e03e178 100644
--- a/spec/javascripts/notes/stores/mutation_spec.js
+++ b/spec/javascripts/notes/stores/mutation_spec.js
@@ -3,19 +3,31 @@ import { note, discussionMock, notesDataMock, userDataMock, issueDataMock, indiv
describe('Mutation Notes Store', () => {
describe('ADD_NEW_NOTE', () => {
- it('should add a new note to an array of notes', () => {
- const state = { notes: [] };
+ let state;
+ let noteData;
+
+ beforeEach(() => {
+ state = { notes: [] };
+ noteData = {
+ expanded: true,
+ id: note.discussion_id,
+ individual_note: true,
+ notes: [note],
+ reply_id: note.discussion_id,
+ };
mutations.ADD_NEW_NOTE(state, note);
+ });
+ it('should add a new note to an array of notes', () => {
expect(state).toEqual({
- notes: [{
- expanded: true,
- id: note.discussion_id,
- individual_note: true,
- notes: [note],
- reply_id: note.discussion_id,
- }],
+ notes: [noteData],
});
+ expect(state.notes.length).toBe(1);
+ });
+
+ it('should not add the same note to the notes array', () => {
+ mutations.ADD_NEW_NOTE(state, note);
+ expect(state.notes.length).toBe(1);
});
});
diff --git a/spec/javascripts/notes_spec.js b/spec/javascripts/notes_spec.js
index 8c5ad8914b0..3e791a31604 100644
--- a/spec/javascripts/notes_spec.js
+++ b/spec/javascripts/notes_spec.js
@@ -770,6 +770,20 @@ import '~/notes';
expect($tempNote.prop('nodeName')).toEqual('LI');
expect($tempNote.find('.timeline-content').hasClass('discussion')).toBeTruthy();
});
+
+ it('should return a escaped user name', () => {
+ const currentUserFullnameXSS = 'Foo <script>alert("XSS")</script>';
+ const $tempNote = this.notes.createPlaceholderNote({
+ formContent: sampleComment,
+ uniqueId,
+ isDiscussionNote: false,
+ currentUsername,
+ currentUserFullname: currentUserFullnameXSS,
+ currentUserAvatar,
+ });
+ const $tempNoteHeader = $tempNote.find('.note-header');
+ expect($tempNoteHeader.find('.hidden-xs').text().trim()).toEqual('Foo &lt;script&gt;alert(&quot;XSS&quot;)&lt;/script&gt;');
+ });
});
describe('createPlaceholderSystemNote', () => {
diff --git a/spec/javascripts/pipelines/empty_state_spec.js b/spec/javascripts/pipelines/empty_state_spec.js
index bb47a28d9fe..6611b74594f 100644
--- a/spec/javascripts/pipelines/empty_state_spec.js
+++ b/spec/javascripts/pipelines/empty_state_spec.js
@@ -11,6 +11,7 @@ describe('Pipelines Empty State', () => {
component = new EmptyStateComponent({
propsData: {
helpPagePath: 'foo',
+ emptyStateSvgPath: 'foo',
},
}).$mount();
});
diff --git a/spec/javascripts/pipelines/error_state_spec.js b/spec/javascripts/pipelines/error_state_spec.js
index f667d351f72..a402857a4d1 100644
--- a/spec/javascripts/pipelines/error_state_spec.js
+++ b/spec/javascripts/pipelines/error_state_spec.js
@@ -8,7 +8,11 @@ describe('Pipelines Error State', () => {
beforeEach(() => {
ErrorStateComponent = Vue.extend(errorStateComp);
- component = new ErrorStateComponent().$mount();
+ component = new ErrorStateComponent({
+ propsData: {
+ errorStateSvgPath: 'foo',
+ },
+ }).$mount();
});
it('should render error state SVG', () => {
diff --git a/spec/javascripts/pipelines/pipeline_url_spec.js b/spec/javascripts/pipelines/pipeline_url_spec.js
index 256fdbe743c..4a4f2259d23 100644
--- a/spec/javascripts/pipelines/pipeline_url_spec.js
+++ b/spec/javascripts/pipelines/pipeline_url_spec.js
@@ -125,4 +125,23 @@ describe('Pipeline Url Component', () => {
component.$el.querySelector('.js-pipeline-url-autodevops').textContent.trim(),
).toEqual('Auto DevOps');
});
+
+ it('should render error badge when pipeline has a failure reason set', () => {
+ const component = new PipelineUrlComponent({
+ propsData: {
+ pipeline: {
+ id: 1,
+ path: 'foo',
+ flags: {
+ failure_reason: true,
+ },
+ failure_reason: 'some reason',
+ },
+ autoDevopsHelpPath: 'foo',
+ },
+ }).$mount();
+
+ expect(component.$el.querySelector('.js-pipeline-url-failure').textContent).toContain('error');
+ expect(component.$el.querySelector('.js-pipeline-url-failure').getAttribute('data-original-title')).toContain('some reason');
+ });
});
diff --git a/spec/javascripts/pipelines/pipelines_artifacts_spec.js b/spec/javascripts/pipelines/pipelines_artifacts_spec.js
index acb67d0ec21..a8a8e3e2cff 100644
--- a/spec/javascripts/pipelines/pipelines_artifacts_spec.js
+++ b/spec/javascripts/pipelines/pipelines_artifacts_spec.js
@@ -34,7 +34,7 @@ describe('Pipelines Artifacts dropdown', () => {
).toEqual(artifacts[0].path);
expect(
- component.$el.querySelector('.dropdown-menu li a span').textContent,
+ component.$el.querySelector('.dropdown-menu li a').textContent,
).toContain(artifacts[0].name);
});
});
diff --git a/spec/javascripts/pretty_time_spec.js b/spec/javascripts/pretty_time_spec.js
index 0a6c479a95b..084ffe08917 100644
--- a/spec/javascripts/pretty_time_spec.js
+++ b/spec/javascripts/pretty_time_spec.js
@@ -1,215 +1,133 @@
-import '~/lib/utils/pretty_time';
+import { parseSeconds, abbreviateTime, stringifyTime } from '~/lib/utils/pretty_time';
-(() => {
- const prettyTime = gl.utils.prettyTime;
+function assertTimeUnits(obj, minutes, hours, days, weeks) {
+ expect(obj.minutes).toBe(minutes);
+ expect(obj.hours).toBe(hours);
+ expect(obj.days).toBe(days);
+ expect(obj.weeks).toBe(weeks);
+}
- describe('prettyTime methods', function () {
- describe('parseSeconds', function () {
- it('should correctly parse a negative value', function () {
- const parser = prettyTime.parseSeconds;
+describe('prettyTime methods', () => {
+ describe('parseSeconds', () => {
+ it('should correctly parse a negative value', () => {
+ const zeroSeconds = parseSeconds(-1000);
- const zeroSeconds = parser(-1000);
-
- expect(zeroSeconds.minutes).toBe(16);
- expect(zeroSeconds.hours).toBe(0);
- expect(zeroSeconds.days).toBe(0);
- expect(zeroSeconds.weeks).toBe(0);
- });
-
- it('should correctly parse a zero value', function () {
- const parser = prettyTime.parseSeconds;
-
- const zeroSeconds = parser(0);
-
- expect(zeroSeconds.minutes).toBe(0);
- expect(zeroSeconds.hours).toBe(0);
- expect(zeroSeconds.days).toBe(0);
- expect(zeroSeconds.weeks).toBe(0);
- });
-
- it('should correctly parse a small non-zero second values', function () {
- const parser = prettyTime.parseSeconds;
-
- const subOneMinute = parser(10);
-
- expect(subOneMinute.minutes).toBe(0);
- expect(subOneMinute.hours).toBe(0);
- expect(subOneMinute.days).toBe(0);
- expect(subOneMinute.weeks).toBe(0);
-
- const aboveOneMinute = parser(100);
-
- expect(aboveOneMinute.minutes).toBe(1);
- expect(aboveOneMinute.hours).toBe(0);
- expect(aboveOneMinute.days).toBe(0);
- expect(aboveOneMinute.weeks).toBe(0);
-
- const manyMinutes = parser(1000);
-
- expect(manyMinutes.minutes).toBe(16);
- expect(manyMinutes.hours).toBe(0);
- expect(manyMinutes.days).toBe(0);
- expect(manyMinutes.weeks).toBe(0);
- });
-
- it('should correctly parse large second values', function () {
- const parser = prettyTime.parseSeconds;
-
- const aboveOneHour = parser(4800);
-
- expect(aboveOneHour.minutes).toBe(20);
- expect(aboveOneHour.hours).toBe(1);
- expect(aboveOneHour.days).toBe(0);
- expect(aboveOneHour.weeks).toBe(0);
-
- const aboveOneDay = parser(110000);
-
- expect(aboveOneDay.minutes).toBe(33);
- expect(aboveOneDay.hours).toBe(6);
- expect(aboveOneDay.days).toBe(3);
- expect(aboveOneDay.weeks).toBe(0);
-
- const aboveOneWeek = parser(25000000);
-
- expect(aboveOneWeek.minutes).toBe(26);
- expect(aboveOneWeek.hours).toBe(0);
- expect(aboveOneWeek.days).toBe(3);
- expect(aboveOneWeek.weeks).toBe(173);
- });
+ assertTimeUnits(zeroSeconds, 16, 0, 0, 0);
+ });
- it('should correctly accept a custom param for hoursPerDay', function () {
- const parser = prettyTime.parseSeconds;
- const config = { hoursPerDay: 24 };
+ it('should correctly parse a zero value', () => {
+ const zeroSeconds = parseSeconds(0);
- const aboveOneHour = parser(4800, config);
+ assertTimeUnits(zeroSeconds, 0, 0, 0, 0);
+ });
- expect(aboveOneHour.minutes).toBe(20);
- expect(aboveOneHour.hours).toBe(1);
- expect(aboveOneHour.days).toBe(0);
- expect(aboveOneHour.weeks).toBe(0);
+ it('should correctly parse a small non-zero second values', () => {
+ const subOneMinute = parseSeconds(10);
+ const aboveOneMinute = parseSeconds(100);
+ const manyMinutes = parseSeconds(1000);
- const aboveOneDay = parser(110000, config);
+ assertTimeUnits(subOneMinute, 0, 0, 0, 0);
+ assertTimeUnits(aboveOneMinute, 1, 0, 0, 0);
+ assertTimeUnits(manyMinutes, 16, 0, 0, 0);
+ });
- expect(aboveOneDay.minutes).toBe(33);
- expect(aboveOneDay.hours).toBe(6);
- expect(aboveOneDay.days).toBe(1);
- expect(aboveOneDay.weeks).toBe(0);
+ it('should correctly parse large second values', () => {
+ const aboveOneHour = parseSeconds(4800);
+ const aboveOneDay = parseSeconds(110000);
+ const aboveOneWeek = parseSeconds(25000000);
- const aboveOneWeek = parser(25000000, config);
+ assertTimeUnits(aboveOneHour, 20, 1, 0, 0);
+ assertTimeUnits(aboveOneDay, 33, 6, 3, 0);
+ assertTimeUnits(aboveOneWeek, 26, 0, 3, 173);
+ });
- expect(aboveOneWeek.minutes).toBe(26);
- expect(aboveOneWeek.hours).toBe(8);
- expect(aboveOneWeek.days).toBe(4);
+ it('should correctly accept a custom param for hoursPerDay', () => {
+ const config = { hoursPerDay: 24 };
- expect(aboveOneWeek.weeks).toBe(57);
- });
+ const aboveOneHour = parseSeconds(4800, config);
+ const aboveOneDay = parseSeconds(110000, config);
+ const aboveOneWeek = parseSeconds(25000000, config);
- it('should correctly accept a custom param for daysPerWeek', function () {
- const parser = prettyTime.parseSeconds;
- const config = { daysPerWeek: 7 };
+ assertTimeUnits(aboveOneHour, 20, 1, 0, 0);
+ assertTimeUnits(aboveOneDay, 33, 6, 1, 0);
+ assertTimeUnits(aboveOneWeek, 26, 8, 4, 57);
+ });
- const aboveOneHour = parser(4800, config);
+ it('should correctly accept a custom param for daysPerWeek', () => {
+ const config = { daysPerWeek: 7 };
- expect(aboveOneHour.minutes).toBe(20);
- expect(aboveOneHour.hours).toBe(1);
- expect(aboveOneHour.days).toBe(0);
- expect(aboveOneHour.weeks).toBe(0);
+ const aboveOneHour = parseSeconds(4800, config);
+ const aboveOneDay = parseSeconds(110000, config);
+ const aboveOneWeek = parseSeconds(25000000, config);
- const aboveOneDay = parser(110000, config);
+ assertTimeUnits(aboveOneHour, 20, 1, 0, 0);
+ assertTimeUnits(aboveOneDay, 33, 6, 3, 0);
+ assertTimeUnits(aboveOneWeek, 26, 0, 0, 124);
+ });
- expect(aboveOneDay.minutes).toBe(33);
- expect(aboveOneDay.hours).toBe(6);
- expect(aboveOneDay.days).toBe(3);
- expect(aboveOneDay.weeks).toBe(0);
+ it('should correctly accept custom params for daysPerWeek and hoursPerDay', () => {
+ const config = { daysPerWeek: 55, hoursPerDay: 14 };
- const aboveOneWeek = parser(25000000, config);
+ const aboveOneHour = parseSeconds(4800, config);
+ const aboveOneDay = parseSeconds(110000, config);
+ const aboveOneWeek = parseSeconds(25000000, config);
- expect(aboveOneWeek.minutes).toBe(26);
- expect(aboveOneWeek.hours).toBe(0);
- expect(aboveOneWeek.days).toBe(0);
+ assertTimeUnits(aboveOneHour, 20, 1, 0, 0);
+ assertTimeUnits(aboveOneDay, 33, 2, 2, 0);
+ assertTimeUnits(aboveOneWeek, 26, 0, 1, 9);
+ });
+ });
- expect(aboveOneWeek.weeks).toBe(124);
- });
+ describe('stringifyTime', () => {
+ it('should stringify values with all non-zero units', () => {
+ const timeObject = {
+ weeks: 1,
+ days: 4,
+ hours: 7,
+ minutes: 20,
+ };
- it('should correctly accept custom params for daysPerWeek and hoursPerDay', function () {
- const parser = prettyTime.parseSeconds;
- const config = { daysPerWeek: 55, hoursPerDay: 14 };
+ const timeString = stringifyTime(timeObject);
- const aboveOneHour = parser(4800, config);
+ expect(timeString).toBe('1w 4d 7h 20m');
+ });
- expect(aboveOneHour.minutes).toBe(20);
- expect(aboveOneHour.hours).toBe(1);
- expect(aboveOneHour.days).toBe(0);
- expect(aboveOneHour.weeks).toBe(0);
+ it('should stringify values with some non-zero units', () => {
+ const timeObject = {
+ weeks: 0,
+ days: 4,
+ hours: 0,
+ minutes: 20,
+ };
- const aboveOneDay = parser(110000, config);
+ const timeString = stringifyTime(timeObject);
- expect(aboveOneDay.minutes).toBe(33);
- expect(aboveOneDay.hours).toBe(2);
- expect(aboveOneDay.days).toBe(2);
- expect(aboveOneDay.weeks).toBe(0);
+ expect(timeString).toBe('4d 20m');
+ });
- const aboveOneWeek = parser(25000000, config);
+ it('should stringify values with no non-zero units', () => {
+ const timeObject = {
+ weeks: 0,
+ days: 0,
+ hours: 0,
+ minutes: 0,
+ };
- expect(aboveOneWeek.minutes).toBe(26);
- expect(aboveOneWeek.hours).toBe(0);
- expect(aboveOneWeek.days).toBe(1);
+ const timeString = stringifyTime(timeObject);
- expect(aboveOneWeek.weeks).toBe(9);
- });
+ expect(timeString).toBe('0m');
});
+ });
- describe('stringifyTime', function () {
- it('should stringify values with all non-zero units', function () {
- const timeObject = {
- weeks: 1,
- days: 4,
- hours: 7,
- minutes: 20,
- };
-
- const timeString = prettyTime.stringifyTime(timeObject);
-
- expect(timeString).toBe('1w 4d 7h 20m');
- });
-
- it('should stringify values with some non-zero units', function () {
- const timeObject = {
- weeks: 0,
- days: 4,
- hours: 0,
- minutes: 20,
- };
-
- const timeString = prettyTime.stringifyTime(timeObject);
-
- expect(timeString).toBe('4d 20m');
- });
-
- it('should stringify values with no non-zero units', function () {
- const timeObject = {
- weeks: 0,
- days: 0,
- hours: 0,
- minutes: 0,
- };
-
- const timeString = prettyTime.stringifyTime(timeObject);
-
- expect(timeString).toBe('0m');
- });
+ describe('abbreviateTime', () => {
+ it('should abbreviate stringified times for weeks', () => {
+ const fullTimeString = '1w 3d 4h 5m';
+ expect(abbreviateTime(fullTimeString)).toBe('1w');
});
- describe('abbreviateTime', function () {
- it('should abbreviate stringified times for weeks', function () {
- const fullTimeString = '1w 3d 4h 5m';
- expect(prettyTime.abbreviateTime(fullTimeString)).toBe('1w');
- });
-
- it('should abbreviate stringified times for non-weeks', function () {
- const fullTimeString = '0w 3d 4h 5m';
- expect(prettyTime.abbreviateTime(fullTimeString)).toBe('3d');
- });
+ it('should abbreviate stringified times for non-weeks', () => {
+ const fullTimeString = '0w 3d 4h 5m';
+ expect(abbreviateTime(fullTimeString)).toBe('3d');
});
});
-})(window.gl || (window.gl = {}));
+});
diff --git a/spec/javascripts/profile/account/components/delete_account_modal_spec.js b/spec/javascripts/profile/account/components/delete_account_modal_spec.js
new file mode 100644
index 00000000000..2e94948cfb2
--- /dev/null
+++ b/spec/javascripts/profile/account/components/delete_account_modal_spec.js
@@ -0,0 +1,129 @@
+import Vue from 'vue';
+
+import deleteAccountModal from '~/profile/account/components/delete_account_modal.vue';
+
+import mountComponent from '../../../helpers/vue_mount_component_helper';
+
+describe('DeleteAccountModal component', () => {
+ const actionUrl = `${gl.TEST_HOST}/delete/user`;
+ const username = 'hasnoname';
+ let Component;
+ let vm;
+
+ beforeEach(() => {
+ Component = Vue.extend(deleteAccountModal);
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ const findElements = () => {
+ const confirmation = vm.confirmWithPassword ? 'password' : 'username';
+ return {
+ form: vm.$refs.form,
+ input: vm.$el.querySelector(`[name="${confirmation}"]`),
+ submitButton: vm.$el.querySelector('.btn-danger'),
+ };
+ };
+
+ describe('with password confirmation', () => {
+ beforeEach((done) => {
+ vm = mountComponent(Component, {
+ actionUrl,
+ confirmWithPassword: true,
+ username,
+ });
+
+ vm.isOpen = true;
+
+ Vue.nextTick()
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('does not accept empty password', (done) => {
+ const { form, input, submitButton } = findElements();
+ spyOn(form, 'submit');
+ input.value = '';
+ input.dispatchEvent(new Event('input'));
+
+ Vue.nextTick()
+ .then(() => {
+ expect(vm.enteredPassword).toBe(input.value);
+ expect(submitButton).toHaveClass('disabled');
+ submitButton.click();
+ expect(form.submit).not.toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('submits form with password', (done) => {
+ const { form, input, submitButton } = findElements();
+ spyOn(form, 'submit');
+ input.value = 'anything';
+ input.dispatchEvent(new Event('input'));
+
+ Vue.nextTick()
+ .then(() => {
+ expect(vm.enteredPassword).toBe(input.value);
+ expect(submitButton).not.toHaveClass('disabled');
+ submitButton.click();
+ expect(form.submit).toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('with username confirmation', () => {
+ beforeEach((done) => {
+ vm = mountComponent(Component, {
+ actionUrl,
+ confirmWithPassword: false,
+ username,
+ });
+
+ vm.isOpen = true;
+
+ Vue.nextTick()
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('does not accept wrong username', (done) => {
+ const { form, input, submitButton } = findElements();
+ spyOn(form, 'submit');
+ input.value = 'this is wrong';
+ input.dispatchEvent(new Event('input'));
+
+ Vue.nextTick()
+ .then(() => {
+ expect(vm.enteredUsername).toBe(input.value);
+ expect(submitButton).toHaveClass('disabled');
+ submitButton.click();
+ expect(form.submit).not.toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('submits form with correct username', (done) => {
+ const { form, input, submitButton } = findElements();
+ spyOn(form, 'submit');
+ input.value = username;
+ input.dispatchEvent(new Event('input'));
+
+ Vue.nextTick()
+ .then(() => {
+ expect(vm.enteredUsername).toBe(input.value);
+ expect(submitButton).not.toHaveClass('disabled');
+ submitButton.click();
+ expect(form.submit).toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+});
diff --git a/spec/javascripts/projects_dropdown/service/projects_service_spec.js b/spec/javascripts/projects_dropdown/service/projects_service_spec.js
index d5dd8b3449a..cfd1bb7d24f 100644
--- a/spec/javascripts/projects_dropdown/service/projects_service_spec.js
+++ b/spec/javascripts/projects_dropdown/service/projects_service_spec.js
@@ -34,7 +34,7 @@ describe('ProjectsService', () => {
const searchQuery = 'lab';
const queryParams = {
- simple: false,
+ simple: true,
per_page: 20,
membership: true,
order_by: 'last_activity_at',
diff --git a/spec/javascripts/registry/components/app_spec.js b/spec/javascripts/registry/components/app_spec.js
new file mode 100644
index 00000000000..43e7d9e1224
--- /dev/null
+++ b/spec/javascripts/registry/components/app_spec.js
@@ -0,0 +1,122 @@
+import Vue from 'vue';
+import registry from '~/registry/components/app.vue';
+import mountComponent from '../../helpers/vue_mount_component_helper';
+import { reposServerResponse } from '../mock_data';
+
+describe('Registry List', () => {
+ let vm;
+ let Component;
+
+ beforeEach(() => {
+ Component = Vue.extend(registry);
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ describe('with data', () => {
+ const interceptor = (request, next) => {
+ next(request.respondWith(JSON.stringify(reposServerResponse), {
+ status: 200,
+ }));
+ };
+
+ beforeEach(() => {
+ Vue.http.interceptors.push(interceptor);
+ vm = mountComponent(Component, { endpoint: 'foo' });
+ });
+
+ afterEach(() => {
+ Vue.http.interceptors = _.without(Vue.http.interceptors, interceptor);
+ });
+
+ it('should render a list of repos', (done) => {
+ setTimeout(() => {
+ expect(vm.$store.state.repos.length).toEqual(reposServerResponse.length);
+
+ Vue.nextTick(() => {
+ expect(
+ vm.$el.querySelectorAll('.container-image').length,
+ ).toEqual(reposServerResponse.length);
+ done();
+ });
+ }, 0);
+ });
+
+ describe('delete repository', () => {
+ it('should be possible to delete a repo', (done) => {
+ setTimeout(() => {
+ Vue.nextTick(() => {
+ expect(vm.$el.querySelector('.container-image-head .js-remove-repo')).toBeDefined();
+ done();
+ });
+ }, 0);
+ });
+ });
+
+ describe('toggle repository', () => {
+ it('should open the container', (done) => {
+ setTimeout(() => {
+ Vue.nextTick(() => {
+ vm.$el.querySelector('.js-toggle-repo').click();
+ Vue.nextTick(() => {
+ expect(vm.$el.querySelector('.js-toggle-repo i').className).toEqual('fa fa-chevron-up');
+ done();
+ });
+ });
+ }, 0);
+ });
+ });
+ });
+
+ describe('without data', () => {
+ const interceptor = (request, next) => {
+ next(request.respondWith(JSON.stringify([]), {
+ status: 200,
+ }));
+ };
+
+ beforeEach(() => {
+ Vue.http.interceptors.push(interceptor);
+ vm = mountComponent(Component, { endpoint: 'foo' });
+ });
+
+ afterEach(() => {
+ Vue.http.interceptors = _.without(Vue.http.interceptors, interceptor);
+ });
+
+ it('should render empty message', (done) => {
+ setTimeout(() => {
+ expect(
+ vm.$el.querySelector('p').textContent.trim(),
+ ).toEqual('No container images stored for this project. Add one by following the instructions above.');
+ done();
+ }, 0);
+ });
+ });
+
+ describe('while loading data', () => {
+ const interceptor = (request, next) => {
+ next(request.respondWith(JSON.stringify(reposServerResponse), {
+ status: 200,
+ }));
+ };
+
+ beforeEach(() => {
+ Vue.http.interceptors.push(interceptor);
+ vm = mountComponent(Component, { endpoint: 'foo' });
+ });
+
+ afterEach(() => {
+ Vue.http.interceptors = _.without(Vue.http.interceptors, interceptor);
+ });
+
+ it('should render a loading spinner', (done) => {
+ Vue.nextTick(() => {
+ expect(vm.$el.querySelector('.fa-spinner')).not.toBe(null);
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/registry/components/collapsible_container_spec.js b/spec/javascripts/registry/components/collapsible_container_spec.js
new file mode 100644
index 00000000000..5891921318a
--- /dev/null
+++ b/spec/javascripts/registry/components/collapsible_container_spec.js
@@ -0,0 +1,58 @@
+import Vue from 'vue';
+import collapsibleComponent from '~/registry/components/collapsible_container.vue';
+import store from '~/registry/stores';
+import { repoPropsData } from '../mock_data';
+
+describe('collapsible registry container', () => {
+ let vm;
+ let Component;
+
+ beforeEach(() => {
+ Component = Vue.extend(collapsibleComponent);
+ vm = new Component({
+ store,
+ propsData: {
+ repo: repoPropsData,
+ },
+ }).$mount();
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ describe('toggle', () => {
+ it('should be closed by default', () => {
+ expect(vm.$el.querySelector('.container-image-tags')).toBe(null);
+ expect(vm.$el.querySelector('.container-image-head i').className).toEqual('fa fa-chevron-right');
+ });
+
+ it('should be open when user clicks on closed repo', (done) => {
+ vm.$el.querySelector('.js-toggle-repo').click();
+ Vue.nextTick(() => {
+ expect(vm.$el.querySelector('.container-image-tags')).toBeDefined();
+ expect(vm.$el.querySelector('.container-image-head i').className).toEqual('fa fa-chevron-up');
+ done();
+ });
+ });
+
+ it('should be closed when the user clicks on an opened repo', (done) => {
+ vm.$el.querySelector('.js-toggle-repo').click();
+
+ Vue.nextTick(() => {
+ vm.$el.querySelector('.js-toggle-repo').click();
+ Vue.nextTick(() => {
+ expect(vm.$el.querySelector('.container-image-tags')).toBe(null);
+ expect(vm.$el.querySelector('.container-image-head i').className).toEqual('fa fa-chevron-right');
+ done();
+ });
+ });
+ });
+ });
+
+ describe('delete repo', () => {
+ it('should be possible to delete a repo', () => {
+ expect(vm.$el.querySelector('.js-remove-repo')).toBeDefined();
+ });
+ });
+});
diff --git a/spec/javascripts/registry/components/table_registry_spec.js b/spec/javascripts/registry/components/table_registry_spec.js
new file mode 100644
index 00000000000..6aa61afc445
--- /dev/null
+++ b/spec/javascripts/registry/components/table_registry_spec.js
@@ -0,0 +1,49 @@
+import Vue from 'vue';
+import tableRegistry from '~/registry/components/table_registry.vue';
+import store from '~/registry/stores';
+import { repoPropsData } from '../mock_data';
+
+describe('table registry', () => {
+ let vm;
+ let Component;
+
+ beforeEach(() => {
+ Component = Vue.extend(tableRegistry);
+ vm = new Component({
+ store,
+ propsData: {
+ repo: repoPropsData,
+ },
+ }).$mount();
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('should render a table with the registry list', () => {
+ expect(
+ vm.$el.querySelectorAll('table tbody tr').length,
+ ).toEqual(repoPropsData.list.length);
+ });
+
+ it('should render registry tag', () => {
+ const textRendered = vm.$el.querySelector('.table tbody tr').textContent.trim().replace(/\s\s+/g, ' ');
+ expect(textRendered).toContain(repoPropsData.list[0].tag);
+ expect(textRendered).toContain(repoPropsData.list[0].shortRevision);
+ expect(textRendered).toContain(repoPropsData.list[0].layers);
+ expect(textRendered).toContain(repoPropsData.list[0].size);
+ });
+
+ it('should be possible to delete a registry', () => {
+ expect(
+ vm.$el.querySelector('.table tbody tr .js-delete-registry'),
+ ).toBeDefined();
+ });
+
+ describe('pagination', () => {
+ it('should be possible to change the page', () => {
+ expect(vm.$el.querySelector('.gl-pagination')).toBeDefined();
+ });
+ });
+});
diff --git a/spec/javascripts/registry/getters_spec.js b/spec/javascripts/registry/getters_spec.js
new file mode 100644
index 00000000000..3d989541881
--- /dev/null
+++ b/spec/javascripts/registry/getters_spec.js
@@ -0,0 +1,43 @@
+import * as getters from '~/registry/stores/getters';
+
+describe('Getters Registry Store', () => {
+ let state;
+
+ beforeEach(() => {
+ state = {
+ isLoading: false,
+ endpoint: '/root/empty-project/container_registry.json',
+ repos: [{
+ canDelete: true,
+ destroyPath: 'bar',
+ id: '134',
+ isLoading: false,
+ list: [],
+ location: 'foo',
+ name: 'gitlab-org/omnibus-gitlab/foo',
+ tagsPath: 'foo',
+ }, {
+ canDelete: true,
+ destroyPath: 'bar',
+ id: '123',
+ isLoading: false,
+ list: [],
+ location: 'foo',
+ name: 'gitlab-org/omnibus-gitlab',
+ tagsPath: 'foo',
+ }],
+ };
+ });
+
+ describe('isLoading', () => {
+ it('should return the isLoading property', () => {
+ expect(getters.isLoading(state)).toEqual(state.isLoading);
+ });
+ });
+
+ describe('repos', () => {
+ it('should return the repos', () => {
+ expect(getters.repos(state)).toEqual(state.repos);
+ });
+ });
+});
diff --git a/spec/javascripts/registry/mock_data.js b/spec/javascripts/registry/mock_data.js
new file mode 100644
index 00000000000..18600d00bff
--- /dev/null
+++ b/spec/javascripts/registry/mock_data.js
@@ -0,0 +1,122 @@
+export const defaultState = {
+ isLoading: false,
+ endpoint: '',
+ repos: [],
+};
+
+export const reposServerResponse = [
+ {
+ destroy_path: 'path',
+ id: '123',
+ location: 'location',
+ path: 'foo',
+ tags_path: 'tags_path',
+ },
+ {
+ destroy_path: 'path_',
+ id: '456',
+ location: 'location_',
+ path: 'bar',
+ tags_path: 'tags_path_',
+ },
+];
+
+export const registryServerResponse = [
+ {
+ name: 'centos7',
+ short_revision: 'b118ab5b0',
+ revision: 'b118ab5b0e90b7cb5127db31d5321ac14961d097516a8e0e72084b6cdc783b43',
+ size: 679,
+ layers: 19,
+ location: 'location',
+ created_at: 1505828744434,
+ destroy_path: 'path_',
+ },
+ {
+ name: 'centos6',
+ short_revision: 'b118ab5b0',
+ revision: 'b118ab5b0e90b7cb5127db31d5321ac14961d097516a8e0e72084b6cdc783b43',
+ size: 679,
+ layers: 19,
+ location: 'location',
+ created_at: 1505828744434,
+ }];
+
+export const parsedReposServerResponse = [
+ {
+ canDelete: true,
+ destroyPath: reposServerResponse[0].destroy_path,
+ id: reposServerResponse[0].id,
+ isLoading: false,
+ list: [],
+ location: reposServerResponse[0].location,
+ name: reposServerResponse[0].path,
+ tagsPath: reposServerResponse[0].tags_path,
+ },
+ {
+ canDelete: true,
+ destroyPath: reposServerResponse[1].destroy_path,
+ id: reposServerResponse[1].id,
+ isLoading: false,
+ list: [],
+ location: reposServerResponse[1].location,
+ name: reposServerResponse[1].path,
+ tagsPath: reposServerResponse[1].tags_path,
+ },
+];
+
+export const parsedRegistryServerResponse = [
+ {
+ tag: registryServerResponse[0].name,
+ revision: registryServerResponse[0].revision,
+ shortRevision: registryServerResponse[0].short_revision,
+ size: registryServerResponse[0].size,
+ layers: registryServerResponse[0].layers,
+ location: registryServerResponse[0].location,
+ createdAt: registryServerResponse[0].created_at,
+ destroyPath: registryServerResponse[0].destroy_path,
+ canDelete: true,
+ },
+ {
+ tag: registryServerResponse[1].name,
+ revision: registryServerResponse[1].revision,
+ shortRevision: registryServerResponse[1].short_revision,
+ size: registryServerResponse[1].size,
+ layers: registryServerResponse[1].layers,
+ location: registryServerResponse[1].location,
+ createdAt: registryServerResponse[1].created_at,
+ destroyPath: registryServerResponse[1].destroy_path,
+ canDelete: false,
+ },
+];
+
+export const repoPropsData = {
+ canDelete: true,
+ destroyPath: 'path',
+ id: '123',
+ isLoading: false,
+ list: [
+ {
+ tag: 'centos6',
+ revision: 'b118ab5b0e90b7cb5127db31d5321ac14961d097516a8e0e72084b6cdc783b43',
+ shortRevision: 'b118ab5b0',
+ size: 19,
+ layers: 10,
+ location: 'location',
+ createdAt: 1505828744434,
+ destroyPath: 'path',
+ canDelete: true,
+ },
+ ],
+ location: 'location',
+ name: 'foo',
+ tagsPath: 'path',
+ pagination: {
+ perPage: 5,
+ page: 1,
+ total: 13,
+ totalPages: 1,
+ nextPage: null,
+ previousPage: null,
+ },
+};
diff --git a/spec/javascripts/registry/stores/actions_spec.js b/spec/javascripts/registry/stores/actions_spec.js
new file mode 100644
index 00000000000..3c9da4f107b
--- /dev/null
+++ b/spec/javascripts/registry/stores/actions_spec.js
@@ -0,0 +1,85 @@
+import Vue from 'vue';
+import VueResource from 'vue-resource';
+import _ from 'underscore';
+import * as actions from '~/registry/stores/actions';
+import * as types from '~/registry/stores/mutation_types';
+import testAction from '../../helpers/vuex_action_helper';
+import {
+ defaultState,
+ reposServerResponse,
+ registryServerResponse,
+ parsedReposServerResponse,
+} from '../mock_data';
+
+Vue.use(VueResource);
+
+describe('Actions Registry Store', () => {
+ let interceptor;
+ let mockedState;
+
+ beforeEach(() => {
+ mockedState = defaultState;
+ });
+
+ describe('server requests', () => {
+ afterEach(() => {
+ Vue.http.interceptors = _.without(Vue.http.interceptors, interceptor);
+ });
+
+ describe('fetchRepos', () => {
+ beforeEach(() => {
+ interceptor = (request, next) => {
+ next(request.respondWith(JSON.stringify(reposServerResponse), {
+ status: 200,
+ }));
+ };
+
+ Vue.http.interceptors.push(interceptor);
+ });
+
+ it('should set receveived repos', (done) => {
+ testAction(actions.fetchRepos, null, mockedState, [
+ { type: types.TOGGLE_MAIN_LOADING },
+ { type: types.SET_REPOS_LIST, payload: reposServerResponse },
+ ], done);
+ });
+ });
+
+ describe('fetchList', () => {
+ beforeEach(() => {
+ interceptor = (request, next) => {
+ next(request.respondWith(JSON.stringify(registryServerResponse), {
+ status: 200,
+ }));
+ };
+
+ Vue.http.interceptors.push(interceptor);
+ });
+
+ it('should set received list', (done) => {
+ mockedState.repos = parsedReposServerResponse;
+
+ testAction(actions.fetchList, { repo: mockedState.repos[1] }, mockedState, [
+ { type: types.TOGGLE_REGISTRY_LIST_LOADING },
+ { type: types.SET_REGISTRY_LIST, payload: registryServerResponse },
+ ], done);
+ });
+ });
+ });
+
+ describe('setMainEndpoint', () => {
+ it('should commit set main endpoint', (done) => {
+ testAction(actions.setMainEndpoint, 'endpoint', mockedState, [
+ { type: types.SET_MAIN_ENDPOINT, payload: 'endpoint' },
+ ], done);
+ });
+ });
+
+ describe('toggleLoading', () => {
+ it('should commit toggle main loading', (done) => {
+ testAction(actions.toggleLoading, null, mockedState, [
+ { type: types.TOGGLE_MAIN_LOADING },
+ ], done);
+ });
+ });
+});
diff --git a/spec/javascripts/registry/stores/mutations_spec.js b/spec/javascripts/registry/stores/mutations_spec.js
new file mode 100644
index 00000000000..2e4c0659daa
--- /dev/null
+++ b/spec/javascripts/registry/stores/mutations_spec.js
@@ -0,0 +1,81 @@
+import mutations from '~/registry/stores/mutations';
+import * as types from '~/registry/stores/mutation_types';
+import {
+ defaultState,
+ reposServerResponse,
+ registryServerResponse,
+ parsedReposServerResponse,
+ parsedRegistryServerResponse,
+} from '../mock_data';
+
+describe('Mutations Registry Store', () => {
+ let mockState;
+ beforeEach(() => {
+ mockState = defaultState;
+ });
+
+ describe('SET_MAIN_ENDPOINT', () => {
+ it('should set the main endpoint', () => {
+ const expectedState = Object.assign({}, mockState, { endpoint: 'foo' });
+ mutations[types.SET_MAIN_ENDPOINT](mockState, 'foo');
+ expect(mockState).toEqual(expectedState);
+ });
+ });
+
+ describe('SET_REPOS_LIST', () => {
+ it('should set a parsed repository list', () => {
+ mutations[types.SET_REPOS_LIST](mockState, reposServerResponse);
+ expect(mockState.repos).toEqual(parsedReposServerResponse);
+ });
+ });
+
+ describe('TOGGLE_MAIN_LOADING', () => {
+ it('should set a parsed repository list', () => {
+ mutations[types.TOGGLE_MAIN_LOADING](mockState);
+ expect(mockState.isLoading).toEqual(true);
+ });
+ });
+
+ describe('SET_REGISTRY_LIST', () => {
+ it('should set a list of registries in a specific repository', () => {
+ mutations[types.SET_REPOS_LIST](mockState, reposServerResponse);
+ mutations[types.SET_REGISTRY_LIST](mockState, {
+ repo: mockState.repos[0],
+ resp: registryServerResponse,
+ headers: {
+ 'x-per-page': 2,
+ 'x-page': 1,
+ 'x-total': 10,
+ },
+ });
+
+ expect(mockState.repos[0].list).toEqual(parsedRegistryServerResponse);
+ expect(mockState.repos[0].pagination).toEqual({
+ perPage: 2,
+ page: 1,
+ total: 10,
+ totalPages: NaN,
+ nextPage: NaN,
+ previousPage: NaN,
+ });
+ });
+ });
+
+ describe('TOGGLE_REGISTRY_LIST_LOADING', () => {
+ it('should toggle isLoading property for a specific repository', () => {
+ mutations[types.SET_REPOS_LIST](mockState, reposServerResponse);
+ mutations[types.SET_REGISTRY_LIST](mockState, {
+ repo: mockState.repos[0],
+ resp: registryServerResponse,
+ headers: {
+ 'x-per-page': 2,
+ 'x-page': 1,
+ 'x-total': 10,
+ },
+ });
+
+ mutations[types.TOGGLE_REGISTRY_LIST_LOADING](mockState, mockState.repos[0]);
+ expect(mockState.repos[0].isLoading).toEqual(true);
+ });
+ });
+});
diff --git a/spec/javascripts/repo/components/repo_edit_button_spec.js b/spec/javascripts/repo/components/repo_edit_button_spec.js
index 29dc2d21e4b..411514009dc 100644
--- a/spec/javascripts/repo/components/repo_edit_button_spec.js
+++ b/spec/javascripts/repo/components/repo_edit_button_spec.js
@@ -21,13 +21,11 @@ describe('RepoEditButton', () => {
expect(vm.$el.textContent).toMatch('Edit');
spyOn(vm, 'editCancelClicked').and.callThrough();
- spyOn(vm, 'toggleProjectRefsForm');
vm.$el.click();
Vue.nextTick(() => {
expect(vm.editCancelClicked).toHaveBeenCalled();
- expect(vm.toggleProjectRefsForm).toHaveBeenCalled();
expect(vm.$el.textContent).toMatch('Cancel edit');
done();
});
diff --git a/spec/javascripts/repo/components/repo_file_spec.js b/spec/javascripts/repo/components/repo_file_spec.js
index 518a2d25ecf..620b604f404 100644
--- a/spec/javascripts/repo/components/repo_file_spec.js
+++ b/spec/javascripts/repo/components/repo_file_spec.js
@@ -1,5 +1,6 @@
import Vue from 'vue';
import repoFile from '~/repo/components/repo_file.vue';
+import RepoStore from '~/repo/stores/repo_store';
describe('RepoFile', () => {
const updated = 'updated';
@@ -12,8 +13,13 @@ describe('RepoFile', () => {
level: 10,
};
const activeFile = {
+ pageTitle: 'pageTitle',
url: 'url',
};
+ const otherFile = {
+ html: '<p class="file-content">html</p>',
+ pageTitle: 'otherpageTitle',
+ };
function createComponent(propsData) {
const RepoFile = Vue.extend(repoFile);
@@ -23,15 +29,17 @@ describe('RepoFile', () => {
}).$mount();
}
- beforeEach(() => {
- spyOn(repoFile.mixins[0].methods, 'timeFormated').and.returnValue(updated);
- });
-
it('renders link, icon, name and last commit details', () => {
- const vm = createComponent({
- file,
- activeFile,
+ const RepoFile = Vue.extend(repoFile);
+ const vm = new RepoFile({
+ propsData: {
+ file,
+ activeFile,
+ },
});
+ spyOn(vm, 'timeFormated').and.returnValue(updated);
+ vm.$mount();
+
const name = vm.$el.querySelector('.repo-file-name');
const fileIcon = vm.$el.querySelector('.file-icon');
@@ -60,6 +68,12 @@ describe('RepoFile', () => {
expect(vm.$el.querySelector('.fa-spin.fa-spinner')).toBeFalsy();
});
+ it('sets the document title correctly', () => {
+ RepoStore.setActiveFiles(otherFile);
+
+ expect(document.title.trim()).toEqual(otherFile.pageTitle);
+ });
+
it('renders a spinner if the file is loading', () => {
file.loading = true;
const vm = createComponent({
diff --git a/spec/javascripts/repo/components/repo_sidebar_spec.js b/spec/javascripts/repo/components/repo_sidebar_spec.js
index abcff8e537e..35d2b37ac2a 100644
--- a/spec/javascripts/repo/components/repo_sidebar_spec.js
+++ b/spec/javascripts/repo/components/repo_sidebar_spec.js
@@ -5,26 +5,34 @@ import RepoStore from '~/repo/stores/repo_store';
import repoSidebar from '~/repo/components/repo_sidebar.vue';
describe('RepoSidebar', () => {
+ let vm;
+
function createComponent() {
const RepoSidebar = Vue.extend(repoSidebar);
return new RepoSidebar().$mount();
}
+ afterEach(() => {
+ vm.$destroy();
+ });
+
it('renders a sidebar', () => {
RepoStore.files = [{
id: 0,
}];
RepoStore.openedFiles = [];
- const vm = createComponent();
+ RepoStore.isRoot = false;
+
+ vm = createComponent();
const thead = vm.$el.querySelector('thead');
const tbody = vm.$el.querySelector('tbody');
expect(vm.$el.id).toEqual('sidebar');
expect(vm.$el.classList.contains('sidebar-mini')).toBeFalsy();
expect(thead.querySelector('.name').textContent).toEqual('Name');
- expect(thead.querySelector('.last-commit').textContent).toEqual('Last Commit');
- expect(thead.querySelector('.last-update').textContent).toEqual('Last Update');
+ expect(thead.querySelector('.last-commit').textContent).toEqual('Last commit');
+ expect(thead.querySelector('.last-update').textContent).toEqual('Last update');
expect(tbody.querySelector('.repo-file-options')).toBeFalsy();
expect(tbody.querySelector('.prev-directory')).toBeFalsy();
expect(tbody.querySelector('.loading-file')).toBeFalsy();
@@ -35,7 +43,7 @@ describe('RepoSidebar', () => {
RepoStore.openedFiles = [{
id: 0,
}];
- const vm = createComponent();
+ vm = createComponent();
expect(vm.$el.classList.contains('sidebar-mini')).toBeTruthy();
expect(vm.$el.querySelector('thead')).toBeFalsy();
@@ -47,7 +55,7 @@ describe('RepoSidebar', () => {
tree: true,
};
RepoStore.files = [];
- const vm = createComponent();
+ vm = createComponent();
expect(vm.$el.querySelectorAll('tbody .loading-file').length).toEqual(5);
});
@@ -57,7 +65,7 @@ describe('RepoSidebar', () => {
id: 0,
}];
RepoStore.isRoot = true;
- const vm = createComponent();
+ vm = createComponent();
expect(vm.$el.querySelector('tbody .prev-directory')).toBeTruthy();
});
@@ -72,13 +80,27 @@ describe('RepoSidebar', () => {
};
RepoStore.files = [file1];
RepoStore.isRoot = true;
- const vm = createComponent();
+ vm = createComponent();
vm.fileClicked(file1);
expect(Helper.getContent).toHaveBeenCalledWith(file1);
});
+ it('should not fetch data for already opened files', () => {
+ const file = {
+ id: 42,
+ url: 'foo',
+ };
+
+ spyOn(Helper, 'getFileFromPath').and.returnValue(file);
+ spyOn(RepoStore, 'setActiveFiles');
+ vm = createComponent();
+ vm.fileClicked(file);
+
+ expect(RepoStore.setActiveFiles).toHaveBeenCalledWith(file);
+ });
+
it('should hide files in directory if already open', () => {
spyOn(RepoStore, 'removeChildFilesOfTree').and.callThrough();
const file1 = {
@@ -89,7 +111,7 @@ describe('RepoSidebar', () => {
};
RepoStore.files = [file1];
RepoStore.isRoot = true;
- const vm = createComponent();
+ vm = createComponent();
vm.fileClicked(file1);
@@ -100,12 +122,48 @@ describe('RepoSidebar', () => {
describe('goToPreviousDirectoryClicked', () => {
it('should hide files in directory if already open', () => {
const prevUrl = 'foo/bar';
- const vm = createComponent();
+ vm = createComponent();
vm.goToPreviousDirectoryClicked(prevUrl);
expect(RepoService.url).toEqual(prevUrl);
});
});
+
+ describe('back button', () => {
+ const file1 = {
+ id: 1,
+ url: 'file1',
+ };
+ const file2 = {
+ id: 2,
+ url: 'file2',
+ };
+ RepoStore.files = [file1, file2];
+ RepoStore.openedFiles = [file1, file2];
+ RepoStore.isRoot = true;
+
+ vm = createComponent();
+ vm.fileClicked(file1);
+
+ it('render previous file when using back button', () => {
+ spyOn(Helper, 'getContent').and.callThrough();
+
+ vm.fileClicked(file2);
+ expect(Helper.getContent).toHaveBeenCalledWith(file2);
+ Helper.getContent.calls.reset();
+
+ history.pushState({
+ key: Math.random(),
+ }, '', file1.url);
+ const popEvent = document.createEvent('Event');
+ popEvent.initEvent('popstate', true, true);
+ window.dispatchEvent(popEvent);
+
+ expect(Helper.getContent.calls.mostRecent().args[0].url).toContain(file1.url);
+
+ window.history.pushState({}, null, '/');
+ });
+ });
});
});
diff --git a/spec/javascripts/right_sidebar_spec.js b/spec/javascripts/right_sidebar_spec.js
index f2072a6f350..5505f983d71 100644
--- a/spec/javascripts/right_sidebar_spec.js
+++ b/spec/javascripts/right_sidebar_spec.js
@@ -32,56 +32,86 @@ import '~/right_sidebar';
};
describe('RightSidebar', function() {
- var fixtureName = 'issues/open-issue.html.raw';
- preloadFixtures(fixtureName);
- loadJSONFixtures('todos/todos.json');
-
- beforeEach(function() {
- loadFixtures(fixtureName);
- this.sidebar = new Sidebar;
- $aside = $('.right-sidebar');
- $page = $('.page-with-sidebar');
- $icon = $aside.find('i');
- $toggle = $aside.find('.js-sidebar-toggle');
- return $labelsIcon = $aside.find('.sidebar-collapsed-icon');
- });
- it('should expand/collapse the sidebar when arrow is clicked', function() {
- assertSidebarState('expanded');
- $toggle.click();
- assertSidebarState('collapsed');
- $toggle.click();
- assertSidebarState('expanded');
- });
- it('should float over the page and when sidebar icons clicked', function() {
- $labelsIcon.click();
- return assertSidebarState('expanded');
- });
- it('should collapse when the icon arrow clicked while it is floating on page', function() {
- $labelsIcon.click();
- assertSidebarState('expanded');
- $toggle.click();
- return assertSidebarState('collapsed');
+ describe('fixture tests', () => {
+ var fixtureName = 'issues/open-issue.html.raw';
+ preloadFixtures(fixtureName);
+ loadJSONFixtures('todos/todos.json');
+
+ beforeEach(function() {
+ loadFixtures(fixtureName);
+ this.sidebar = new Sidebar;
+ $aside = $('.right-sidebar');
+ $page = $('.page-with-sidebar');
+ $icon = $aside.find('i');
+ $toggle = $aside.find('.js-sidebar-toggle');
+ return $labelsIcon = $aside.find('.sidebar-collapsed-icon');
+ });
+ it('should expand/collapse the sidebar when arrow is clicked', function() {
+ assertSidebarState('expanded');
+ $toggle.click();
+ assertSidebarState('collapsed');
+ $toggle.click();
+ assertSidebarState('expanded');
+ });
+ it('should float over the page and when sidebar icons clicked', function() {
+ $labelsIcon.click();
+ return assertSidebarState('expanded');
+ });
+ it('should collapse when the icon arrow clicked while it is floating on page', function() {
+ $labelsIcon.click();
+ assertSidebarState('expanded');
+ $toggle.click();
+ return assertSidebarState('collapsed');
+ });
+
+ it('should broadcast todo:toggle event when add todo clicked', function() {
+ var todos = getJSONFixture('todos/todos.json');
+ spyOn(jQuery, 'ajax').and.callFake(function() {
+ var d = $.Deferred();
+ var response = todos;
+ d.resolve(response);
+ return d.promise();
+ });
+
+ var todoToggleSpy = spyOnEvent(document, 'todo:toggle');
+
+ $('.issuable-sidebar-header .js-issuable-todo').click();
+
+ expect(todoToggleSpy.calls.count()).toEqual(1);
+ });
+
+ it('should not hide collapsed icons', () => {
+ [].forEach.call(document.querySelectorAll('.sidebar-collapsed-icon'), (el) => {
+ expect(el.querySelector('.fa, svg').classList.contains('hidden')).toBeFalsy();
+ });
+ });
});
- it('should broadcast todo:toggle event when add todo clicked', function() {
- var todos = getJSONFixture('todos/todos.json');
- spyOn(jQuery, 'ajax').and.callFake(function() {
- var d = $.Deferred();
- var response = todos;
- d.resolve(response);
- return d.promise();
+ describe('sidebarToggleClicked', () => {
+ const event = jasmine.createSpyObj('event', ['preventDefault']);
+
+ beforeEach(() => {
+ spyOn($.fn, 'hasClass').and.returnValue(false);
+ });
+
+ afterEach(() => {
+ gl.lazyLoader = undefined;
});
- var todoToggleSpy = spyOnEvent(document, 'todo:toggle');
+ it('calls loadCheck if lazyLoader is set', () => {
+ gl.lazyLoader = jasmine.createSpyObj('lazyLoader', ['loadCheck']);
- $('.issuable-sidebar-header .js-issuable-todo').click();
+ Sidebar.prototype.sidebarToggleClicked(event);
- expect(todoToggleSpy.calls.count()).toEqual(1);
- });
+ expect(gl.lazyLoader.loadCheck).toHaveBeenCalled();
+ });
+
+ it('does not throw if lazyLoader is not defined', () => {
+ gl.lazyLoader = undefined;
+
+ const toggle = Sidebar.prototype.sidebarToggleClicked.bind(null, event);
- it('should not hide collapsed icons', () => {
- [].forEach.call(document.querySelectorAll('.sidebar-collapsed-icon'), (el) => {
- expect(el.querySelector('.fa, svg').classList.contains('hidden')).toBeFalsy();
+ expect(toggle).not.toThrow();
});
});
});
diff --git a/spec/javascripts/sidebar/lock/edit_form_buttons_spec.js b/spec/javascripts/sidebar/lock/edit_form_buttons_spec.js
new file mode 100644
index 00000000000..b0ea8ae0206
--- /dev/null
+++ b/spec/javascripts/sidebar/lock/edit_form_buttons_spec.js
@@ -0,0 +1,36 @@
+import Vue from 'vue';
+import editFormButtons from '~/sidebar/components/lock/edit_form_buttons.vue';
+import mountComponent from '../../helpers/vue_mount_component_helper';
+
+describe('EditFormButtons', () => {
+ let vm1;
+ let vm2;
+
+ beforeEach(() => {
+ const Component = Vue.extend(editFormButtons);
+ const toggleForm = () => { };
+ const updateLockedAttribute = () => { };
+
+ vm1 = mountComponent(Component, {
+ isLocked: true,
+ toggleForm,
+ updateLockedAttribute,
+ });
+
+ vm2 = mountComponent(Component, {
+ isLocked: false,
+ toggleForm,
+ updateLockedAttribute,
+ });
+ });
+
+ it('renders unlock or lock text based on locked state', () => {
+ expect(
+ vm1.$el.innerHTML.includes('Unlock'),
+ ).toBe(true);
+
+ expect(
+ vm2.$el.innerHTML.includes('Lock'),
+ ).toBe(true);
+ });
+});
diff --git a/spec/javascripts/sidebar/lock/edit_form_spec.js b/spec/javascripts/sidebar/lock/edit_form_spec.js
new file mode 100644
index 00000000000..7abd6997a18
--- /dev/null
+++ b/spec/javascripts/sidebar/lock/edit_form_spec.js
@@ -0,0 +1,41 @@
+import Vue from 'vue';
+import editForm from '~/sidebar/components/lock/edit_form.vue';
+
+describe('EditForm', () => {
+ let vm1;
+ let vm2;
+
+ beforeEach(() => {
+ const Component = Vue.extend(editForm);
+ const toggleForm = () => { };
+ const updateLockedAttribute = () => { };
+
+ vm1 = new Component({
+ propsData: {
+ isLocked: true,
+ toggleForm,
+ updateLockedAttribute,
+ issuableType: 'issue',
+ },
+ }).$mount();
+
+ vm2 = new Component({
+ propsData: {
+ isLocked: false,
+ toggleForm,
+ updateLockedAttribute,
+ issuableType: 'merge_request',
+ },
+ }).$mount();
+ });
+
+ it('renders on the appropriate warning text', () => {
+ expect(
+ vm1.$el.innerHTML.includes('Unlock this issue?'),
+ ).toBe(true);
+
+ expect(
+ vm2.$el.innerHTML.includes('Lock this merge request?'),
+ ).toBe(true);
+ });
+});
diff --git a/spec/javascripts/sidebar/lock/lock_issue_sidebar_spec.js b/spec/javascripts/sidebar/lock/lock_issue_sidebar_spec.js
new file mode 100644
index 00000000000..696fca516bc
--- /dev/null
+++ b/spec/javascripts/sidebar/lock/lock_issue_sidebar_spec.js
@@ -0,0 +1,71 @@
+import Vue from 'vue';
+import lockIssueSidebar from '~/sidebar/components/lock/lock_issue_sidebar.vue';
+
+describe('LockIssueSidebar', () => {
+ let vm1;
+ let vm2;
+
+ beforeEach(() => {
+ const Component = Vue.extend(lockIssueSidebar);
+
+ const mediator = {
+ service: {
+ update: Promise.resolve(true),
+ },
+
+ store: {
+ isLockDialogOpen: false,
+ },
+ };
+
+ vm1 = new Component({
+ propsData: {
+ isLocked: true,
+ isEditable: true,
+ mediator,
+ issuableType: 'issue',
+ },
+ }).$mount();
+
+ vm2 = new Component({
+ propsData: {
+ isLocked: false,
+ isEditable: false,
+ mediator,
+ issuableType: 'merge_request',
+ },
+ }).$mount();
+ });
+
+ it('shows if locked and/or editable', () => {
+ expect(
+ vm1.$el.innerHTML.includes('Edit'),
+ ).toBe(true);
+
+ expect(
+ vm1.$el.innerHTML.includes('Locked'),
+ ).toBe(true);
+
+ expect(
+ vm2.$el.innerHTML.includes('Unlocked'),
+ ).toBe(true);
+ });
+
+ it('displays the edit form when editable', (done) => {
+ expect(vm1.isLockDialogOpen).toBe(false);
+
+ vm1.$el.querySelector('.lock-edit').click();
+
+ expect(vm1.isLockDialogOpen).toBe(true);
+
+ vm1.$nextTick(() => {
+ expect(
+ vm1.$el
+ .innerHTML
+ .includes('Unlock this issue?'),
+ ).toBe(true);
+
+ done();
+ });
+ });
+});
diff --git a/spec/javascripts/todos_spec.js b/spec/javascripts/todos_spec.js
index fd492159081..7d3c9319a11 100644
--- a/spec/javascripts/todos_spec.js
+++ b/spec/javascripts/todos_spec.js
@@ -26,37 +26,30 @@ describe('Todos', () => {
describe('meta click', () => {
let visitUrlSpy;
+ let windowOpenSpy;
+ let metakeyEvent;
beforeEach(() => {
- spyOn(gl.utils, 'isMetaClick').and.returnValue(true);
+ metakeyEvent = $.Event('click', { keyCode: 91, ctrlKey: true });
visitUrlSpy = spyOn(gl.utils, 'visitUrl').and.callFake(() => {});
+ windowOpenSpy = spyOn(window, 'open').and.callFake(() => {});
});
- it('opens the todo url in another tab', (done) => {
+ it('opens the todo url in another tab', () => {
const todoLink = todoItem.dataset.url;
- spyOn(window, 'open').and.callFake((url, target) => {
- expect(todoLink).toEqual(url);
- expect(target).toEqual('_blank');
- done();
- });
+ $('.todos-list .todo').trigger(metakeyEvent);
- todoItem.click();
expect(visitUrlSpy).not.toHaveBeenCalled();
+ expect(windowOpenSpy).toHaveBeenCalledWith(todoLink, '_blank');
});
- it('opens the avatar\'s url in another tab when the avatar is clicked', (done) => {
- const avatarImage = todoItem.querySelector('img');
- const avatarUrl = avatarImage.parentElement.getAttribute('href');
+ it('run native funcionality when avatar is clicked', () => {
+ $('.todos-list a').on('click', e => e.preventDefault());
+ $('.todos-list img').trigger(metakeyEvent);
- spyOn(window, 'open').and.callFake((url, target) => {
- expect(avatarUrl).toEqual(url);
- expect(target).toEqual('_blank');
- done();
- });
-
- avatarImage.click();
expect(visitUrlSpy).not.toHaveBeenCalled();
+ expect(windowOpenSpy).not.toHaveBeenCalled();
});
});
});
diff --git a/spec/javascripts/user_callout_spec.js b/spec/javascripts/user_callout_spec.js
deleted file mode 100644
index 69cb93bd850..00000000000
--- a/spec/javascripts/user_callout_spec.js
+++ /dev/null
@@ -1,49 +0,0 @@
-import Cookies from 'js-cookie';
-import UserCallout from '~/user_callout';
-
-const USER_CALLOUT_COOKIE = 'user_callout_dismissed';
-
-describe('UserCallout', function () {
- const fixtureName = 'dashboard/user-callout.html.raw';
- preloadFixtures(fixtureName);
-
- beforeEach(() => {
- loadFixtures(fixtureName);
- Cookies.remove(USER_CALLOUT_COOKIE);
-
- this.userCallout = new UserCallout();
- this.closeButton = $('.js-close-callout.close');
- this.userCalloutBtn = $('.js-close-callout:not(.close)');
- });
-
- it('hides when user clicks on the dismiss-icon', (done) => {
- this.closeButton.click();
- expect(Cookies.get(USER_CALLOUT_COOKIE)).toBe('true');
-
- setTimeout(() => {
- expect(
- document.querySelector('.user-callout'),
- ).toBeNull();
-
- done();
- });
- });
-
- it('hides when user clicks on the "check it out" button', () => {
- this.userCalloutBtn.click();
- expect(Cookies.get(USER_CALLOUT_COOKIE)).toBe('true');
- });
-
- describe('Sets cookie with setCalloutPerProject', () => {
- beforeEach(() => {
- spyOn(Cookies, 'set').and.callFake(() => {});
- document.querySelector('.user-callout').setAttribute('data-project-path', 'foo/bar');
- this.userCallout = new UserCallout({ setCalloutPerProject: true });
- });
-
- it('sets a cookie when the user clicks the close button', () => {
- this.userCalloutBtn.click();
- expect(Cookies.set).toHaveBeenCalledWith('user_callout_dismissed', 'true', Object({ expires: 365, path: 'foo/bar' }));
- });
- });
-});
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js
index c763487d12f..690665ae12c 100644
--- a/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js
@@ -37,6 +37,26 @@ describe('MRWidgetPipeline', () => {
});
});
+ describe('hasPipeline', () => {
+ it('should return true when there is a pipeline', () => {
+ expect(Object.keys(mockData.pipeline).length).toBeGreaterThan(0);
+
+ const vm = createComponent({
+ pipeline: mockData.pipeline,
+ });
+
+ expect(vm.hasPipeline).toBeTruthy();
+ });
+
+ it('should return false when there is no pipeline', () => {
+ const vm = createComponent({
+ pipeline: null,
+ });
+
+ expect(vm.hasPipeline).toBeFalsy();
+ });
+ });
+
describe('hasCIError', () => {
it('should return false when there is no CI error', () => {
const vm = createComponent({
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js
index 47303d1e80f..d23b558f4ea 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js
@@ -4,11 +4,15 @@ import closedComponent from '~/vue_merge_request_widget/components/states/mr_wid
const mr = {
targetBranch: 'good-branch',
targetBranchPath: '/good-branch',
- closedBy: {
- name: 'Fatih Acet',
- username: 'fatihacet',
+ closedEvent: {
+ author: {
+ name: 'Fatih Acet',
+ username: 'fatihacet',
+ },
+ updatedAt: 'closedEventUpdatedAt',
+ formattedUpdatedAt: '',
},
- updatedAt: '2017-03-23T20:08:08.845Z',
+ updatedAt: 'mrUpdatedAt',
closedAt: '1 day ago',
};
@@ -18,7 +22,7 @@ const createComponent = () => {
return new Component({
el: document.createElement('div'),
propsData: { mr },
- }).$el;
+ });
};
describe('MRWidgetClosed', () => {
@@ -38,14 +42,30 @@ describe('MRWidgetClosed', () => {
});
describe('template', () => {
- it('should have correct elements', () => {
- const el = createComponent();
+ let vm;
+ let el;
+ beforeEach(() => {
+ vm = createComponent();
+ el = vm.$el;
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('should have correct elements', () => {
expect(el.querySelector('h4').textContent).toContain('Closed by');
- expect(el.querySelector('h4').textContent).toContain(mr.closedBy.name);
+ expect(el.querySelector('h4').textContent).toContain(mr.closedEvent.author.name);
expect(el.textContent).toContain('The changes were not merged into');
expect(el.querySelector('.label-branch').getAttribute('href')).toEqual(mr.targetBranchPath);
expect(el.querySelector('.label-branch').textContent).toContain(mr.targetBranch);
});
+
+ it('should use closedEvent updatedAt as tooltip title', () => {
+ expect(
+ el.querySelector('time').getAttribute('title'),
+ ).toBe('closedEventUpdatedAt');
+ });
});
});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
index 3b7b7d93662..5d4c7ec09dc 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
@@ -1,20 +1,9 @@
import Vue from 'vue';
import conflictsComponent from '~/vue_merge_request_widget/components/states/mr_widget_conflicts';
+import mountComponent from '../../../helpers/vue_mount_component_helper';
+const ConflictsComponent = Vue.extend(conflictsComponent);
const path = '/conflicts';
-const createComponent = () => {
- const Component = Vue.extend(conflictsComponent);
-
- return new Component({
- el: document.createElement('div'),
- propsData: {
- mr: {
- canMerge: true,
- conflictResolutionPath: path,
- },
- },
- });
-};
describe('MRWidgetConflicts', () => {
describe('props', () => {
@@ -27,44 +16,90 @@ describe('MRWidgetConflicts', () => {
});
describe('template', () => {
- it('should have correct elements', () => {
- const el = createComponent().$el;
- const resolveButton = el.querySelector('.js-resolve-conflicts-button');
- const mergeButton = el.querySelector('.mr-widget-body .btn');
- const mergeLocallyButton = el.querySelector('.js-merge-locally-button');
-
- expect(el.textContent).toContain('There are merge conflicts');
- expect(el.textContent).not.toContain('ask someone with write access');
- expect(el.querySelector('.btn-success').disabled).toBeTruthy();
- expect(resolveButton.textContent).toContain('Resolve conflicts');
- expect(resolveButton.getAttribute('href')).toEqual(path);
- expect(mergeButton.textContent).toContain('Merge');
- expect(mergeLocallyButton.textContent).toContain('Merge locally');
+ describe('when allowed to merge', () => {
+ let vm;
+
+ beforeEach(() => {
+ vm = mountComponent(ConflictsComponent, {
+ mr: {
+ canMerge: true,
+ conflictResolutionPath: path,
+ },
+ });
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('should tell you about conflicts without bothering other people', () => {
+ expect(vm.$el.textContent).toContain('There are merge conflicts');
+ expect(vm.$el.textContent).not.toContain('ask someone with write access');
+ });
+
+ it('should allow you to resolve the conflicts', () => {
+ const resolveButton = vm.$el.querySelector('.js-resolve-conflicts-button');
+
+ expect(resolveButton.textContent).toContain('Resolve conflicts');
+ expect(resolveButton.getAttribute('href')).toEqual(path);
+ });
+
+ it('should have merge buttons', () => {
+ const mergeButton = vm.$el.querySelector('.js-disabled-merge-button');
+ const mergeLocallyButton = vm.$el.querySelector('.js-merge-locally-button');
+
+ expect(mergeButton.textContent).toContain('Merge');
+ expect(mergeButton.disabled).toBeTruthy();
+ expect(mergeButton.classList.contains('btn-success')).toEqual(true);
+ expect(mergeLocallyButton.textContent).toContain('Merge locally');
+ });
});
describe('when user does not have permission to merge', () => {
let vm;
beforeEach(() => {
- vm = createComponent();
- vm.mr.canMerge = false;
+ vm = mountComponent(ConflictsComponent, {
+ mr: {
+ canMerge: false,
+ },
+ });
});
- it('should show proper message', (done) => {
- Vue.nextTick(() => {
- expect(vm.$el.textContent).toContain('ask someone with write access');
- done();
- });
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('should show proper message', () => {
+ expect(vm.$el.textContent).toContain('ask someone with write access');
+ });
+
+ it('should not have action buttons', () => {
+ expect(vm.$el.querySelector('.js-disabled-merge-button')).toBeDefined();
+ expect(vm.$el.querySelector('.js-resolve-conflicts-button')).toBeNull();
+ expect(vm.$el.querySelector('.js-merge-locally-button')).toBeNull();
});
+ });
- it('should not have action buttons', (done) => {
- Vue.nextTick(() => {
- expect(vm.$el.querySelectorAll('.btn').length).toBe(1);
- expect(vm.$el.querySelector('.js-resolve-conflicts-button')).toEqual(null);
- expect(vm.$el.querySelector('.js-merge-locally-button')).toEqual(null);
- done();
+ describe('when fast-forward or semi-linear merge enabled', () => {
+ let vm;
+
+ beforeEach(() => {
+ vm = mountComponent(ConflictsComponent, {
+ mr: {
+ shouldBeRebased: true,
+ },
});
});
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('should tell you to rebase locally', () => {
+ expect(vm.$el.textContent).toContain('Fast-forward merge is not possible.');
+ expect(vm.$el.textContent).toContain('To merge this request, first rebase locally');
+ });
});
});
});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js
index afaa750199a..2714e8294fa 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js
@@ -14,9 +14,12 @@ const createComponent = () => {
canRevertInCurrentMR: true,
canRemoveSourceBranch: true,
sourceBranchRemoved: true,
- mergedBy: {},
- mergedAt: '',
- updatedAt: '',
+ mergedEvent: {
+ author: {},
+ updatedAt: 'mergedUpdatedAt',
+ formattedUpdatedAt: '',
+ },
+ updatedAt: 'mrUpdatedAt',
targetBranch,
};
@@ -170,5 +173,11 @@ describe('MRWidgetMerged', () => {
done();
});
});
+
+ it('should use mergedEvent updatedAt as tooltip title', () => {
+ expect(
+ el.querySelector('time').getAttribute('title'),
+ ).toBe('mergedUpdatedAt');
+ });
});
});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
index c607c9746a4..d7019ea408b 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -11,6 +11,8 @@ const createComponent = (customConfig = {}) => {
isPipelineActive: false,
pipeline: null,
isPipelineFailed: false,
+ isPipelinePassing: false,
+ isMergeAllowed: true,
onlyAllowMergeIfPipelineSucceeds: false,
hasCI: false,
ciStatus: null,
@@ -68,6 +70,18 @@ describe('MRWidgetReadyToMerge', () => {
});
describe('computed', () => {
+ describe('shouldShowMergeWhenPipelineSucceedsText', () => {
+ it('should return true with active pipeline', () => {
+ vm.mr.isPipelineActive = true;
+ expect(vm.shouldShowMergeWhenPipelineSucceedsText).toBeTruthy();
+ });
+
+ it('should return false with inactive pipeline', () => {
+ vm.mr.isPipelineActive = false;
+ expect(vm.shouldShowMergeWhenPipelineSucceedsText).toBeFalsy();
+ });
+ });
+
describe('commitMessageLinkTitle', () => {
const withDesc = 'Include description in commit message';
const withoutDesc = "Don't include description in commit message";
@@ -82,35 +96,84 @@ describe('MRWidgetReadyToMerge', () => {
});
});
+ describe('status', () => {
+ it('defaults to success', () => {
+ vm.mr.pipeline = true;
+ expect(vm.status).toEqual('success');
+ });
+
+ it('returns failed when MR has CI but also has an unknown status', () => {
+ vm.mr.hasCI = true;
+ expect(vm.status).toEqual('failed');
+ });
+
+ it('returns default when MR has no pipeline', () => {
+ expect(vm.status).toEqual('success');
+ });
+
+ it('returns pending when pipeline is active', () => {
+ vm.mr.pipeline = {};
+ vm.mr.isPipelineActive = true;
+ expect(vm.status).toEqual('pending');
+ });
+
+ it('returns failed when pipeline is failed', () => {
+ vm.mr.pipeline = {};
+ vm.mr.isPipelineFailed = true;
+ expect(vm.status).toEqual('failed');
+ });
+ });
+
describe('mergeButtonClass', () => {
- const defaultClass = 'btn btn-small btn-success accept-merge-request';
+ const defaultClass = 'btn btn-sm btn-success accept-merge-request';
const failedClass = `${defaultClass} btn-danger`;
const inActionClass = `${defaultClass} btn-info`;
- it('should return default class', () => {
+ it('defaults to success class', () => {
+ expect(vm.mergeButtonClass).toEqual(defaultClass);
+ });
+
+ it('returns success class for success status', () => {
vm.mr.pipeline = true;
expect(vm.mergeButtonClass).toEqual(defaultClass);
});
- it('should return failed class when MR has CI but also has an unknown status', () => {
+ it('returns info class for pending status', () => {
+ vm.mr.pipeline = {};
+ vm.mr.isPipelineActive = true;
+ expect(vm.mergeButtonClass).toEqual(inActionClass);
+ });
+
+ it('returns failed class for failed status', () => {
vm.mr.hasCI = true;
expect(vm.mergeButtonClass).toEqual(failedClass);
});
+ });
- it('should return default class when MR has no pipeline', () => {
- expect(vm.mergeButtonClass).toEqual(defaultClass);
+ describe('status icon', () => {
+ it('defaults to tick icon', () => {
+ expect(vm.iconClass).toEqual('success');
+ });
+
+ it('shows tick for success status', () => {
+ vm.mr.pipeline = true;
+ expect(vm.iconClass).toEqual('success');
});
- it('should return in action class when pipeline is active', () => {
+ it('shows tick for pending status', () => {
vm.mr.pipeline = {};
vm.mr.isPipelineActive = true;
- expect(vm.mergeButtonClass).toEqual(inActionClass);
+ expect(vm.iconClass).toEqual('success');
});
- it('should return failed class when pipeline is failed', () => {
- vm.mr.pipeline = {};
- vm.mr.isPipelineFailed = true;
- expect(vm.mergeButtonClass).toEqual(failedClass);
+ it('shows x for failed status', () => {
+ vm.mr.hasCI = true;
+ expect(vm.iconClass).toEqual('failed');
+ });
+
+ it('shows x for merge not allowed', () => {
+ vm.mr.hasCI = true;
+ expect(vm.iconClass).toEqual('failed');
});
});
@@ -150,72 +213,54 @@ describe('MRWidgetReadyToMerge', () => {
describe('isMergeButtonDisabled', () => {
it('should return false with initial data', () => {
+ vm.mr.isMergeAllowed = true;
expect(vm.isMergeButtonDisabled).toBeFalsy();
});
it('should return true when there is no commit message', () => {
+ vm.mr.isMergeAllowed = true;
vm.commitMessage = '';
expect(vm.isMergeButtonDisabled).toBeTruthy();
});
it('should return true if merge is not allowed', () => {
+ vm.mr.isMergeAllowed = false;
vm.mr.onlyAllowMergeIfPipelineSucceeds = true;
- vm.mr.isPipelineFailed = true;
expect(vm.isMergeButtonDisabled).toBeTruthy();
});
- it('should return true when there vm instance is making request', () => {
+ it('should return true when the vm instance is making request', () => {
+ vm.mr.isMergeAllowed = true;
vm.isMakingRequest = true;
expect(vm.isMergeButtonDisabled).toBeTruthy();
});
});
-
- describe('Remove source branch checkbox', () => {
- describe('when user can merge but cannot delete branch', () => {
- it('isRemoveSourceBranchButtonDisabled should be true', () => {
- expect(vm.isRemoveSourceBranchButtonDisabled).toBe(true);
- });
-
- it('should be disabled in the rendered output', () => {
- const checkboxElement = vm.$el.querySelector('#remove-source-branch-input');
- expect(checkboxElement.getAttribute('disabled')).toBe('disabled');
- });
- });
-
- describe('when user can merge and can delete branch', () => {
- beforeEach(() => {
- this.customVm = createComponent({
- mr: { canRemoveSourceBranch: true },
- });
- });
-
- it('isRemoveSourceBranchButtonDisabled should be false', () => {
- expect(this.customVm.isRemoveSourceBranchButtonDisabled).toBe(false);
- });
-
- it('should be enabled in rendered output', () => {
- const checkboxElement = this.customVm.$el.querySelector('#remove-source-branch-input');
- expect(checkboxElement.getAttribute('disabled')).toBeNull();
- });
- });
- });
});
describe('methods', () => {
- describe('isMergeAllowed', () => {
- it('should return false with initial data', () => {
- expect(vm.isMergeAllowed()).toBeTruthy();
+ describe('shouldShowMergeControls', () => {
+ it('should return false when an external pipeline is running and required to succeed', () => {
+ vm.mr.isMergeAllowed = false;
+ vm.mr.isPipelineActive = false;
+ expect(vm.shouldShowMergeControls()).toBeFalsy();
});
- it('should return false when MR is set only merge when pipeline succeeds', () => {
- vm.mr.onlyAllowMergeIfPipelineSucceeds = true;
- expect(vm.isMergeAllowed()).toBeTruthy();
+ it('should return true when the build succeeded or build not required to succeed', () => {
+ vm.mr.isMergeAllowed = true;
+ vm.mr.isPipelineActive = false;
+ expect(vm.shouldShowMergeControls()).toBeTruthy();
});
- it('should return true true', () => {
- vm.mr.onlyAllowMergeIfPipelineSucceeds = true;
- vm.mr.isPipelineFailed = true;
- expect(vm.isMergeAllowed()).toBeFalsy();
+ it('should return true when showing the MWPS button and a pipeline is running that needs to be successful', () => {
+ vm.mr.isMergeAllowed = false;
+ vm.mr.isPipelineActive = true;
+ expect(vm.shouldShowMergeControls()).toBeTruthy();
+ });
+
+ it('should return true when showing the MWPS button but not required for the pipeline to succeed', () => {
+ vm.mr.isMergeAllowed = true;
+ vm.mr.isPipelineActive = true;
+ expect(vm.shouldShowMergeControls()).toBeTruthy();
});
});
@@ -419,4 +464,54 @@ describe('MRWidgetReadyToMerge', () => {
});
});
});
+
+ describe('Remove source branch checkbox', () => {
+ describe('when user can merge but cannot delete branch', () => {
+ it('isRemoveSourceBranchButtonDisabled should be true', () => {
+ expect(vm.isRemoveSourceBranchButtonDisabled).toBe(true);
+ });
+
+ it('should be disabled in the rendered output', () => {
+ const checkboxElement = vm.$el.querySelector('#remove-source-branch-input');
+ expect(checkboxElement.getAttribute('disabled')).toBe('disabled');
+ });
+ });
+
+ describe('when user can merge and can delete branch', () => {
+ beforeEach(() => {
+ this.customVm = createComponent({
+ mr: { canRemoveSourceBranch: true },
+ });
+ });
+
+ it('isRemoveSourceBranchButtonDisabled should be false', () => {
+ expect(this.customVm.isRemoveSourceBranchButtonDisabled).toBe(false);
+ });
+
+ it('should be enabled in rendered output', () => {
+ const checkboxElement = this.customVm.$el.querySelector('#remove-source-branch-input');
+ expect(checkboxElement.getAttribute('disabled')).toBeNull();
+ });
+ });
+ });
+
+ describe('Commit message area', () => {
+ it('when using merge commits, should show "Modify commit message" button', () => {
+ const customVm = createComponent({
+ mr: { ffOnlyEnabled: false },
+ });
+
+ expect(customVm.$el.querySelector('.js-fast-forward-message')).toBeNull();
+ expect(customVm.$el.querySelector('.js-modify-commit-message-button')).toBeDefined();
+ });
+
+ it('when fast-forward merge is enabled, only show fast-forward message', () => {
+ const customVm = createComponent({
+ mr: { ffOnlyEnabled: true },
+ });
+
+ expect(customVm.$el.querySelector('.js-fast-forward-message')).toBeDefined();
+ expect(customVm.$el.querySelector('.js-modify-commit-message-button')).toBeNull();
+ });
+ });
});
diff --git a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
index 669ee248bf1..e4324e91502 100644
--- a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
@@ -59,23 +59,15 @@ describe('mrWidgetOptions', () => {
});
describe('shouldRenderPipelines', () => {
- it('should return true for the initial data', () => {
- expect(vm.shouldRenderPipelines).toBeTruthy();
- });
+ it('should return true when hasCI is true', () => {
+ vm.mr.hasCI = true;
- it('should return true when pipeline is empty but MR.hasCI is set to true', () => {
- vm.mr.pipeline = {};
expect(vm.shouldRenderPipelines).toBeTruthy();
});
- it('should return true when pipeline available', () => {
+ it('should return false when hasCI is false', () => {
vm.mr.hasCI = false;
- expect(vm.shouldRenderPipelines).toBeTruthy();
- });
- it('should return false when there is no pipeline', () => {
- vm.mr.pipeline = {};
- vm.mr.hasCI = false;
expect(vm.shouldRenderPipelines).toBeFalsy();
});
});
@@ -232,29 +224,41 @@ describe('mrWidgetOptions', () => {
describe('handleMounted', () => {
it('should call required methods to do the initial kick-off', () => {
spyOn(vm, 'initDeploymentsPolling');
- spyOn(vm, 'setFavicon');
+ spyOn(vm, 'setFaviconHelper');
vm.handleMounted();
- expect(vm.setFavicon).toHaveBeenCalled();
+ expect(vm.setFaviconHelper).toHaveBeenCalled();
expect(vm.initDeploymentsPolling).toHaveBeenCalled();
});
});
describe('setFavicon', () => {
+ let faviconElement;
+
+ beforeEach(() => {
+ const favicon = document.createElement('link');
+ favicon.setAttribute('id', 'favicon');
+ document.body.appendChild(favicon);
+
+ faviconElement = document.getElementById('favicon');
+ });
+
+ afterEach(() => {
+ document.body.removeChild(document.getElementById('favicon'));
+ });
+
it('should call setFavicon method', () => {
- spyOn(gl.utils, 'setFavicon');
- vm.setFavicon();
+ vm.setFaviconHelper();
- expect(gl.utils.setFavicon).toHaveBeenCalledWith(vm.mr.ciStatusFaviconPath);
+ expect(faviconElement.getAttribute('href')).toEqual(vm.mr.ciStatusFaviconPath);
});
it('should not call setFavicon when there is no ciStatusFaviconPath', () => {
- spyOn(gl.utils, 'setFavicon');
vm.mr.ciStatusFaviconPath = null;
- vm.setFavicon();
+ vm.setFaviconHelper();
- expect(gl.utils.setFavicon).not.toHaveBeenCalled();
+ expect(faviconElement.getAttribute('href')).toEqual(null);
});
});
diff --git a/spec/javascripts/vue_mr_widget/services/mr_widget_service_spec.js b/spec/javascripts/vue_mr_widget/services/mr_widget_service_spec.js
index b63633c03b8..e667b4b3677 100644
--- a/spec/javascripts/vue_mr_widget/services/mr_widget_service_spec.js
+++ b/spec/javascripts/vue_mr_widget/services/mr_widget_service_spec.js
@@ -31,6 +31,7 @@ describe('MRWidgetService', () => {
});
it('should have methods defined', () => {
+ window.history.pushState({}, null, '/');
const service = new MRWidgetService(mr);
expect(service.merge()).toBeDefined();
diff --git a/spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js b/spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js
index 56dd0198ae2..8e5614b20f0 100644
--- a/spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js
+++ b/spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js
@@ -18,5 +18,39 @@ describe('MergeRequestStore', () => {
store.setData({ ...mockData, work_in_progress: !mockData.work_in_progress });
expect(store.hasSHAChanged).toBe(false);
});
+
+ describe('isPipelinePassing', () => {
+ it('is true when the CI status is `success`', () => {
+ store.setData({ ...mockData, ci_status: 'success' });
+ expect(store.isPipelinePassing).toBe(true);
+ });
+
+ it('is true when the CI status is `success_with_warnings`', () => {
+ store.setData({ ...mockData, ci_status: 'success_with_warnings' });
+ expect(store.isPipelinePassing).toBe(true);
+ });
+
+ it('is false when the CI status is `failed`', () => {
+ store.setData({ ...mockData, ci_status: 'failed' });
+ expect(store.isPipelinePassing).toBe(false);
+ });
+
+ it('is false when the CI status is anything except `success`', () => {
+ store.setData({ ...mockData, ci_status: 'foobarbaz' });
+ expect(store.isPipelinePassing).toBe(false);
+ });
+ });
+
+ describe('isPipelineSkipped', () => {
+ it('should set isPipelineSkipped=true when the CI status is `skipped`', () => {
+ store.setData({ ...mockData, ci_status: 'skipped' });
+ expect(store.isPipelineSkipped).toBe(true);
+ });
+
+ it('should set isPipelineSkipped=false when the CI status is anything except `skipped`', () => {
+ store.setData({ ...mockData, ci_status: 'foobarbaz' });
+ expect(store.isPipelineSkipped).toBe(false);
+ });
+ });
});
});
diff --git a/spec/javascripts/vue_shared/components/issue/confidential_issue_warning_spec.js b/spec/javascripts/vue_shared/components/issue/confidential_issue_warning_spec.js
deleted file mode 100644
index 6df08f3ebe7..00000000000
--- a/spec/javascripts/vue_shared/components/issue/confidential_issue_warning_spec.js
+++ /dev/null
@@ -1,20 +0,0 @@
-import Vue from 'vue';
-import confidentialIssue from '~/vue_shared/components/issue/confidential_issue_warning.vue';
-
-describe('Confidential Issue Warning Component', () => {
- let vm;
-
- beforeEach(() => {
- const Component = Vue.extend(confidentialIssue);
- vm = new Component().$mount();
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- it('should render confidential issue warning information', () => {
- expect(vm.$el.querySelector('i').className).toEqual('fa fa-eye-slash');
- expect(vm.$el.querySelector('span').textContent.trim()).toEqual('This is a confidential issue. Your comment will not be visible to the public.');
- });
-});
diff --git a/spec/javascripts/vue_shared/components/issue/issue_warning_spec.js b/spec/javascripts/vue_shared/components/issue/issue_warning_spec.js
new file mode 100644
index 00000000000..2cf4d8e00ed
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/issue/issue_warning_spec.js
@@ -0,0 +1,46 @@
+import Vue from 'vue';
+import issueWarning from '~/vue_shared/components/issue/issue_warning.vue';
+import mountComponent from '../../../helpers/vue_mount_component_helper';
+
+const IssueWarning = Vue.extend(issueWarning);
+
+function formatWarning(string) {
+ // Replace newlines with a space then replace multiple spaces with one space
+ return string.trim().replace(/\n/g, ' ').replace(/\s\s+/g, ' ');
+}
+
+describe('Issue Warning Component', () => {
+ describe('isLocked', () => {
+ it('should render locked issue warning information', () => {
+ const vm = mountComponent(IssueWarning, {
+ isLocked: true,
+ });
+
+ expect(vm.$el.querySelector('i').className).toEqual('fa icon fa-lock');
+ expect(formatWarning(vm.$el.querySelector('span').textContent)).toEqual('This issue is locked. Only project members can comment.');
+ });
+ });
+
+ describe('isConfidential', () => {
+ it('should render confidential issue warning information', () => {
+ const vm = mountComponent(IssueWarning, {
+ isConfidential: true,
+ });
+
+ expect(vm.$el.querySelector('i').className).toEqual('fa icon fa-eye-slash');
+ expect(formatWarning(vm.$el.querySelector('span').textContent)).toEqual('This is a confidential issue. Your comment will not be visible to the public.');
+ });
+ });
+
+ describe('isLocked and isConfidential', () => {
+ it('should render locked and confidential issue warning information', () => {
+ const vm = mountComponent(IssueWarning, {
+ isLocked: true,
+ isConfidential: true,
+ });
+
+ expect(vm.$el.querySelector('i')).toBeFalsy();
+ expect(formatWarning(vm.$el.querySelector('span').textContent)).toEqual('This issue is confidential and locked. People without permission will never get a notification and won\'t be able to comment.');
+ });
+ });
+});
diff --git a/spec/lib/banzai/filter/milestone_reference_filter_spec.rb b/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
index ebd6c79077e..fe7a8c84c9e 100644
--- a/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
@@ -296,7 +296,7 @@ describe Banzai::Filter::MilestoneReferenceFilter do
context 'project milestones' do
let(:milestone) { create(:milestone, project: project) }
- let(:reference) { milestone.to_reference }
+ let(:reference) { milestone.to_reference(format: :iid) }
include_examples 'reference parsing'
diff --git a/spec/lib/banzai/filter/sanitization_filter_spec.rb b/spec/lib/banzai/filter/sanitization_filter_spec.rb
index 35a32a46eff..5f41e28fece 100644
--- a/spec/lib/banzai/filter/sanitization_filter_spec.rb
+++ b/spec/lib/banzai/filter/sanitization_filter_spec.rb
@@ -47,9 +47,11 @@ describe Banzai::Filter::SanitizationFilter do
describe 'custom whitelist' do
it 'customizes the whitelist only once' do
instance = described_class.new('Foo')
+ control_count = instance.whitelist[:transformers].size
+
3.times { instance.whitelist }
- expect(instance.whitelist[:transformers].size).to eq 4
+ expect(instance.whitelist[:transformers].size).to eq control_count
end
it 'sanitizes `class` attribute from all elements' do
@@ -63,8 +65,8 @@ describe Banzai::Filter::SanitizationFilter do
expect(filter(act).to_html).to eq %q{<span>def</span>}
end
- it 'allows `style` attribute on table elements' do
- html = <<-HTML.strip_heredoc
+ it 'allows `text-align` property in `style` attribute on table elements' do
+ html = <<~HTML
<table>
<tr><th style="text-align: center">Head</th></tr>
<tr><td style="text-align: right">Body</th></tr>
@@ -77,6 +79,20 @@ describe Banzai::Filter::SanitizationFilter do
expect(doc.at_css('td')['style']).to eq 'text-align: right'
end
+ it 'disallows other properties in `style` attribute on table elements' do
+ html = <<~HTML
+ <table>
+ <tr><th style="text-align: foo">Head</th></tr>
+ <tr><td style="position: fixed; height: 50px; width: 50px; background: red; z-index: 999; font-size: 36px; text-align: center">Body</th></tr>
+ </table>
+ HTML
+
+ doc = filter(html)
+
+ expect(doc.at_css('th')['style']).to be_nil
+ expect(doc.at_css('td')['style']).to eq 'text-align: center'
+ end
+
it 'allows `span` elements' do
exp = act = %q{<span>Hello</span>}
expect(filter(act).to_html).to eq exp
@@ -87,6 +103,20 @@ describe Banzai::Filter::SanitizationFilter do
expect(filter(act).to_html).to eq exp
end
+ it 'disallows the `name` attribute globally, allows on `a`' do
+ html = <<~HTML
+ <img name="getElementById" src="">
+ <span name="foo" class="bar">Hi</span>
+ <a name="foo" class="bar">Bye</a>
+ HTML
+
+ doc = filter(html)
+
+ expect(doc.at_css('img')).not_to have_attribute('name')
+ expect(doc.at_css('span')).not_to have_attribute('name')
+ expect(doc.at_css('a')).to have_attribute('name')
+ end
+
it 'allows `summary` elements' do
exp = act = '<summary>summary line</summary>'
expect(filter(act).to_html).to eq exp
diff --git a/spec/lib/banzai/pipeline/email_pipeline_spec.rb b/spec/lib/banzai/pipeline/email_pipeline_spec.rb
new file mode 100644
index 00000000000..6a11ca2f9d5
--- /dev/null
+++ b/spec/lib/banzai/pipeline/email_pipeline_spec.rb
@@ -0,0 +1,14 @@
+require 'rails_helper'
+
+describe Banzai::Pipeline::EmailPipeline do
+ describe '.filters' do
+ it 'returns the expected type' do
+ expect(described_class.filters).to be_kind_of(Banzai::FilterArray)
+ end
+
+ it 'excludes ImageLazyLoadFilter' do
+ expect(described_class.filters).not_to be_empty
+ expect(described_class.filters).not_to include(Banzai::Filter::ImageLazyLoadFilter)
+ end
+ end
+end
diff --git a/spec/lib/banzai/renderer_spec.rb b/spec/lib/banzai/renderer_spec.rb
index da42272bbef..81a04a2d46d 100644
--- a/spec/lib/banzai/renderer_spec.rb
+++ b/spec/lib/banzai/renderer_spec.rb
@@ -31,7 +31,14 @@ describe Banzai::Renderer do
let(:object) { fake_object(fresh: false) }
it 'caches and returns the result' do
- expect(object).to receive(:refresh_markdown_cache!).with(do_update: true)
+ expect(object).to receive(:refresh_markdown_cache!)
+
+ is_expected.to eq('field_html')
+ end
+
+ it "skips database caching on a GitLab read-only instance" do
+ allow(Gitlab::Database).to receive(:read_only?).and_return(true)
+ expect(object).to receive(:refresh_markdown_cache!)
is_expected.to eq('field_html')
end
diff --git a/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb b/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb
deleted file mode 100644
index 1efd3113a43..00000000000
--- a/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb
+++ /dev/null
@@ -1,1697 +0,0 @@
-require 'spec_helper'
-
-module Ci
- describe GitlabCiYamlProcessor, :lib do
- subject { described_class.new(config, path) }
- let(:path) { 'path' }
-
- describe 'our current .gitlab-ci.yml' do
- let(:config) { File.read("#{Rails.root}/.gitlab-ci.yml") }
-
- it 'is valid' do
- error_message = described_class.validation_message(config)
-
- expect(error_message).to be_nil
- end
- end
-
- describe '#build_attributes' do
- subject { described_class.new(config, path).build_attributes(:rspec) }
-
- describe 'coverage entry' do
- describe 'code coverage regexp' do
- let(:config) do
- YAML.dump(rspec: { script: 'rspec',
- coverage: '/Code coverage: \d+\.\d+/' })
- end
-
- it 'includes coverage regexp in build attributes' do
- expect(subject)
- .to include(coverage_regex: 'Code coverage: \d+\.\d+')
- end
- end
- end
-
- describe 'retry entry' do
- context 'when retry count is specified' do
- let(:config) do
- YAML.dump(rspec: { script: 'rspec', retry: 1 })
- end
-
- it 'includes retry count in build options attribute' do
- expect(subject[:options]).to include(retry: 1)
- end
- end
-
- context 'when retry count is not specified' do
- let(:config) do
- YAML.dump(rspec: { script: 'rspec' })
- end
-
- it 'does not persist retry count in the database' do
- expect(subject[:options]).not_to have_key(:retry)
- end
- end
- end
-
- describe 'allow failure entry' do
- context 'when job is a manual action' do
- context 'when allow_failure is defined' do
- let(:config) do
- YAML.dump(rspec: { script: 'rspec',
- when: 'manual',
- allow_failure: false })
- end
-
- it 'is not allowed to fail' do
- expect(subject[:allow_failure]).to be false
- end
- end
-
- context 'when allow_failure is not defined' do
- let(:config) do
- YAML.dump(rspec: { script: 'rspec',
- when: 'manual' })
- end
-
- it 'is allowed to fail' do
- expect(subject[:allow_failure]).to be true
- end
- end
- end
-
- context 'when job is not a manual action' do
- context 'when allow_failure is defined' do
- let(:config) do
- YAML.dump(rspec: { script: 'rspec',
- allow_failure: false })
- end
-
- it 'is not allowed to fail' do
- expect(subject[:allow_failure]).to be false
- end
- end
-
- context 'when allow_failure is not defined' do
- let(:config) do
- YAML.dump(rspec: { script: 'rspec' })
- end
-
- it 'is not allowed to fail' do
- expect(subject[:allow_failure]).to be false
- end
- end
- end
- end
- end
-
- describe '#stage_seeds' do
- context 'when no refs policy is specified' do
- let(:config) do
- YAML.dump(production: { stage: 'deploy', script: 'cap prod' },
- rspec: { stage: 'test', script: 'rspec' },
- spinach: { stage: 'test', script: 'spinach' })
- end
-
- let(:pipeline) { create(:ci_empty_pipeline) }
-
- it 'correctly fabricates a stage seeds object' do
- seeds = subject.stage_seeds(pipeline)
-
- expect(seeds.size).to eq 2
- expect(seeds.first.stage[:name]).to eq 'test'
- expect(seeds.second.stage[:name]).to eq 'deploy'
- expect(seeds.first.builds.dig(0, :name)).to eq 'rspec'
- expect(seeds.first.builds.dig(1, :name)).to eq 'spinach'
- expect(seeds.second.builds.dig(0, :name)).to eq 'production'
- end
- end
-
- context 'when refs policy is specified' do
- let(:config) do
- YAML.dump(production: { stage: 'deploy', script: 'cap prod', only: ['master'] },
- spinach: { stage: 'test', script: 'spinach', only: ['tags'] })
- end
-
- let(:pipeline) do
- create(:ci_empty_pipeline, ref: 'feature', tag: true)
- end
-
- it 'returns stage seeds only assigned to master to master' do
- seeds = subject.stage_seeds(pipeline)
-
- expect(seeds.size).to eq 1
- expect(seeds.first.stage[:name]).to eq 'test'
- expect(seeds.first.builds.dig(0, :name)).to eq 'spinach'
- end
- end
-
- context 'when source policy is specified' do
- let(:config) do
- YAML.dump(production: { stage: 'deploy', script: 'cap prod', only: ['triggers'] },
- spinach: { stage: 'test', script: 'spinach', only: ['schedules'] })
- end
-
- let(:pipeline) do
- create(:ci_empty_pipeline, source: :schedule)
- end
-
- it 'returns stage seeds only assigned to schedules' do
- seeds = subject.stage_seeds(pipeline)
-
- expect(seeds.size).to eq 1
- expect(seeds.first.stage[:name]).to eq 'test'
- expect(seeds.first.builds.dig(0, :name)).to eq 'spinach'
- end
- end
-
- context 'when kubernetes policy is specified' do
- let(:pipeline) { create(:ci_empty_pipeline) }
-
- let(:config) do
- YAML.dump(
- spinach: { stage: 'test', script: 'spinach' },
- production: {
- stage: 'deploy',
- script: 'cap',
- only: { kubernetes: 'active' }
- }
- )
- end
-
- context 'when kubernetes is active' do
- let(:project) { create(:kubernetes_project) }
- let(:pipeline) { create(:ci_empty_pipeline, project: project) }
-
- it 'returns seeds for kubernetes dependent job' do
- seeds = subject.stage_seeds(pipeline)
-
- expect(seeds.size).to eq 2
- expect(seeds.first.builds.dig(0, :name)).to eq 'spinach'
- expect(seeds.second.builds.dig(0, :name)).to eq 'production'
- end
- end
-
- context 'when kubernetes is not active' do
- it 'does not return seeds for kubernetes dependent job' do
- seeds = subject.stage_seeds(pipeline)
-
- expect(seeds.size).to eq 1
- expect(seeds.first.builds.dig(0, :name)).to eq 'spinach'
- end
- end
- end
- end
-
- describe "#builds_for_stage_and_ref" do
- let(:type) { 'test' }
-
- it "returns builds if no branch specified" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec" }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref(type, "master").first).to eq({
- stage: "test",
- stage_idx: 1,
- name: "rspec",
- commands: "pwd\nrspec",
- coverage_regex: nil,
- tag_list: [],
- options: {
- before_script: ["pwd"],
- script: ["rspec"]
- },
- allow_failure: false,
- when: "on_success",
- environment: nil,
- yaml_variables: []
- })
- end
-
- describe 'only' do
- it "does not return builds if only has another branch" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", only: ["deploy"] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(0)
- end
-
- it "does not return builds if only has regexp with another branch" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", only: ["/^deploy$/"] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(0)
- end
-
- it "returns builds if only has specified this branch" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", only: ["master"] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(1)
- end
-
- it "returns builds if only has a list of branches including specified" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", type: type, only: %w(master deploy) }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1)
- end
-
- it "returns builds if only has a branches keyword specified" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", type: type, only: ["branches"] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1)
- end
-
- it "does not return builds if only has a tags keyword" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", type: type, only: ["tags"] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0)
- end
-
- it "returns builds if only has special keywords specified and source matches" do
- possibilities = [{ keyword: 'pushes', source: 'push' },
- { keyword: 'web', source: 'web' },
- { keyword: 'triggers', source: 'trigger' },
- { keyword: 'schedules', source: 'schedule' },
- { keyword: 'api', source: 'api' },
- { keyword: 'external', source: 'external' }]
-
- possibilities.each do |possibility|
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", type: type, only: [possibility[:keyword]] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "deploy", false, possibility[:source]).size).to eq(1)
- end
- end
-
- it "does not return builds if only has special keywords specified and source doesn't match" do
- possibilities = [{ keyword: 'pushes', source: 'web' },
- { keyword: 'web', source: 'push' },
- { keyword: 'triggers', source: 'schedule' },
- { keyword: 'schedules', source: 'external' },
- { keyword: 'api', source: 'trigger' },
- { keyword: 'external', source: 'api' }]
-
- possibilities.each do |possibility|
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", type: type, only: [possibility[:keyword]] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "deploy", false, possibility[:source]).size).to eq(0)
- end
- end
-
- it "returns builds if only has current repository path" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", type: type, only: ["branches@path"] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1)
- end
-
- it "does not return builds if only has different repository path" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", type: type, only: ["branches@fork"] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0)
- end
-
- it "returns build only for specified type" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", type: "test", only: %w(master deploy) },
- staging: { script: "deploy", type: "deploy", only: %w(master deploy) },
- production: { script: "deploy", type: "deploy", only: ["master@path", "deploy"] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, 'fork')
-
- expect(config_processor.builds_for_stage_and_ref("deploy", "deploy").size).to eq(2)
- expect(config_processor.builds_for_stage_and_ref("test", "deploy").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("deploy", "master").size).to eq(1)
- end
-
- context 'for invalid value' do
- let(:config) { { rspec: { script: "rspec", type: "test", only: only } } }
- let(:processor) { GitlabCiYamlProcessor.new(YAML.dump(config)) }
-
- context 'when it is integer' do
- let(:only) { 1 }
-
- it do
- expect { processor }.to raise_error(GitlabCiYamlProcessor::ValidationError,
- 'jobs:rspec:only has to be either an array of conditions or a hash')
- end
- end
-
- context 'when it is an array of integers' do
- let(:only) { [1, 1] }
-
- it do
- expect { processor }.to raise_error(GitlabCiYamlProcessor::ValidationError,
- 'jobs:rspec:only config should be an array of strings or regexps')
- end
- end
-
- context 'when it is invalid regex' do
- let(:only) { ["/*invalid/"] }
-
- it do
- expect { processor }.to raise_error(GitlabCiYamlProcessor::ValidationError,
- 'jobs:rspec:only config should be an array of strings or regexps')
- end
- end
- end
- end
-
- describe 'except' do
- it "returns builds if except has another branch" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", except: ["deploy"] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(1)
- end
-
- it "returns builds if except has regexp with another branch" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", except: ["/^deploy$/"] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(1)
- end
-
- it "does not return builds if except has specified this branch" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", except: ["master"] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(0)
- end
-
- it "does not return builds if except has a list of branches including specified" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", type: type, except: %w(master deploy) }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0)
- end
-
- it "does not return builds if except has a branches keyword specified" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", type: type, except: ["branches"] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0)
- end
-
- it "returns builds if except has a tags keyword" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", type: type, except: ["tags"] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1)
- end
-
- it "does not return builds if except has special keywords specified and source matches" do
- possibilities = [{ keyword: 'pushes', source: 'push' },
- { keyword: 'web', source: 'web' },
- { keyword: 'triggers', source: 'trigger' },
- { keyword: 'schedules', source: 'schedule' },
- { keyword: 'api', source: 'api' },
- { keyword: 'external', source: 'external' }]
-
- possibilities.each do |possibility|
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", type: type, except: [possibility[:keyword]] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "deploy", false, possibility[:source]).size).to eq(0)
- end
- end
-
- it "returns builds if except has special keywords specified and source doesn't match" do
- possibilities = [{ keyword: 'pushes', source: 'web' },
- { keyword: 'web', source: 'push' },
- { keyword: 'triggers', source: 'schedule' },
- { keyword: 'schedules', source: 'external' },
- { keyword: 'api', source: 'trigger' },
- { keyword: 'external', source: 'api' }]
-
- possibilities.each do |possibility|
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", type: type, except: [possibility[:keyword]] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "deploy", false, possibility[:source]).size).to eq(1)
- end
- end
-
- it "does not return builds if except has current repository path" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", type: type, except: ["branches@path"] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0)
- end
-
- it "returns builds if except has different repository path" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", type: type, except: ["branches@fork"] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1)
- end
-
- it "returns build except specified type" do
- config = YAML.dump({
- before_script: ["pwd"],
- rspec: { script: "rspec", type: "test", except: ["master", "deploy", "test@fork"] },
- staging: { script: "deploy", type: "deploy", except: ["master"] },
- production: { script: "deploy", type: "deploy", except: ["master@fork"] }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, 'fork')
-
- expect(config_processor.builds_for_stage_and_ref("deploy", "deploy").size).to eq(2)
- expect(config_processor.builds_for_stage_and_ref("test", "test").size).to eq(0)
- expect(config_processor.builds_for_stage_and_ref("deploy", "master").size).to eq(0)
- end
-
- context 'for invalid value' do
- let(:config) { { rspec: { script: "rspec", except: except } } }
- let(:processor) { GitlabCiYamlProcessor.new(YAML.dump(config)) }
-
- context 'when it is integer' do
- let(:except) { 1 }
-
- it do
- expect { processor }.to raise_error(GitlabCiYamlProcessor::ValidationError,
- 'jobs:rspec:except has to be either an array of conditions or a hash')
- end
- end
-
- context 'when it is an array of integers' do
- let(:except) { [1, 1] }
-
- it do
- expect { processor }.to raise_error(GitlabCiYamlProcessor::ValidationError,
- 'jobs:rspec:except config should be an array of strings or regexps')
- end
- end
-
- context 'when it is invalid regex' do
- let(:except) { ["/*invalid/"] }
-
- it do
- expect { processor }.to raise_error(GitlabCiYamlProcessor::ValidationError,
- 'jobs:rspec:except config should be an array of strings or regexps')
- end
- end
- end
- end
- end
-
- describe "Scripts handling" do
- let(:config_data) { YAML.dump(config) }
- let(:config_processor) { GitlabCiYamlProcessor.new(config_data, path) }
-
- subject { config_processor.builds_for_stage_and_ref("test", "master").first }
-
- describe "before_script" do
- context "in global context" do
- let(:config) do
- {
- before_script: ["global script"],
- test: { script: ["script"] }
- }
- end
-
- it "return commands with scripts concencaced" do
- expect(subject[:commands]).to eq("global script\nscript")
- end
- end
-
- context "overwritten in local context" do
- let(:config) do
- {
- before_script: ["global script"],
- test: { before_script: ["local script"], script: ["script"] }
- }
- end
-
- it "return commands with scripts concencaced" do
- expect(subject[:commands]).to eq("local script\nscript")
- end
- end
- end
-
- describe "script" do
- let(:config) do
- {
- test: { script: ["script"] }
- }
- end
-
- it "return commands with scripts concencaced" do
- expect(subject[:commands]).to eq("script")
- end
- end
-
- describe "after_script" do
- context "in global context" do
- let(:config) do
- {
- after_script: ["after_script"],
- test: { script: ["script"] }
- }
- end
-
- it "return after_script in options" do
- expect(subject[:options][:after_script]).to eq(["after_script"])
- end
- end
-
- context "overwritten in local context" do
- let(:config) do
- {
- after_script: ["local after_script"],
- test: { after_script: ["local after_script"], script: ["script"] }
- }
- end
-
- it "return after_script in options" do
- expect(subject[:options][:after_script]).to eq(["local after_script"])
- end
- end
- end
- end
-
- describe "Image and service handling" do
- context "when extended docker configuration is used" do
- it "returns image and service when defined" do
- config = YAML.dump({ image: { name: "ruby:2.1", entrypoint: ["/usr/local/bin/init", "run"] },
- services: ["mysql", { name: "docker:dind", alias: "docker",
- entrypoint: ["/usr/local/bin/init", "run"],
- command: ["/usr/local/bin/init", "run"] }],
- before_script: ["pwd"],
- rspec: { script: "rspec" } })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({
- stage: "test",
- stage_idx: 1,
- name: "rspec",
- commands: "pwd\nrspec",
- coverage_regex: nil,
- tag_list: [],
- options: {
- before_script: ["pwd"],
- script: ["rspec"],
- image: { name: "ruby:2.1", entrypoint: ["/usr/local/bin/init", "run"] },
- services: [{ name: "mysql" },
- { name: "docker:dind", alias: "docker", entrypoint: ["/usr/local/bin/init", "run"],
- command: ["/usr/local/bin/init", "run"] }]
- },
- allow_failure: false,
- when: "on_success",
- environment: nil,
- yaml_variables: []
- })
- end
-
- it "returns image and service when overridden for job" do
- config = YAML.dump({ image: "ruby:2.1",
- services: ["mysql"],
- before_script: ["pwd"],
- rspec: { image: { name: "ruby:2.5", entrypoint: ["/usr/local/bin/init", "run"] },
- services: [{ name: "postgresql", alias: "db-pg",
- entrypoint: ["/usr/local/bin/init", "run"],
- command: ["/usr/local/bin/init", "run"] }, "docker:dind"],
- script: "rspec" } })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({
- stage: "test",
- stage_idx: 1,
- name: "rspec",
- commands: "pwd\nrspec",
- coverage_regex: nil,
- tag_list: [],
- options: {
- before_script: ["pwd"],
- script: ["rspec"],
- image: { name: "ruby:2.5", entrypoint: ["/usr/local/bin/init", "run"] },
- services: [{ name: "postgresql", alias: "db-pg", entrypoint: ["/usr/local/bin/init", "run"],
- command: ["/usr/local/bin/init", "run"] },
- { name: "docker:dind" }]
- },
- allow_failure: false,
- when: "on_success",
- environment: nil,
- yaml_variables: []
- })
- end
- end
-
- context "when etended docker configuration is not used" do
- it "returns image and service when defined" do
- config = YAML.dump({ image: "ruby:2.1",
- services: ["mysql", "docker:dind"],
- before_script: ["pwd"],
- rspec: { script: "rspec" } })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({
- stage: "test",
- stage_idx: 1,
- name: "rspec",
- commands: "pwd\nrspec",
- coverage_regex: nil,
- tag_list: [],
- options: {
- before_script: ["pwd"],
- script: ["rspec"],
- image: { name: "ruby:2.1" },
- services: [{ name: "mysql" }, { name: "docker:dind" }]
- },
- allow_failure: false,
- when: "on_success",
- environment: nil,
- yaml_variables: []
- })
- end
-
- it "returns image and service when overridden for job" do
- config = YAML.dump({ image: "ruby:2.1",
- services: ["mysql"],
- before_script: ["pwd"],
- rspec: { image: "ruby:2.5", services: ["postgresql", "docker:dind"], script: "rspec" } })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({
- stage: "test",
- stage_idx: 1,
- name: "rspec",
- commands: "pwd\nrspec",
- coverage_regex: nil,
- tag_list: [],
- options: {
- before_script: ["pwd"],
- script: ["rspec"],
- image: { name: "ruby:2.5" },
- services: [{ name: "postgresql" }, { name: "docker:dind" }]
- },
- allow_failure: false,
- when: "on_success",
- environment: nil,
- yaml_variables: []
- })
- end
- end
- end
-
- describe 'Variables' do
- let(:config_processor) { GitlabCiYamlProcessor.new(YAML.dump(config), path) }
-
- subject { config_processor.builds.first[:yaml_variables] }
-
- context 'when global variables are defined' do
- let(:variables) do
- { 'VAR1' => 'value1', 'VAR2' => 'value2' }
- end
- let(:config) do
- {
- variables: variables,
- before_script: ['pwd'],
- rspec: { script: 'rspec' }
- }
- end
-
- it 'returns global variables' do
- expect(subject).to contain_exactly(
- { key: 'VAR1', value: 'value1', public: true },
- { key: 'VAR2', value: 'value2', public: true }
- )
- end
- end
-
- context 'when job and global variables are defined' do
- let(:global_variables) do
- { 'VAR1' => 'global1', 'VAR3' => 'global3' }
- end
- let(:job_variables) do
- { 'VAR1' => 'value1', 'VAR2' => 'value2' }
- end
- let(:config) do
- {
- before_script: ['pwd'],
- variables: global_variables,
- rspec: { script: 'rspec', variables: job_variables }
- }
- end
-
- it 'returns all unique variables' do
- expect(subject).to contain_exactly(
- { key: 'VAR3', value: 'global3', public: true },
- { key: 'VAR1', value: 'value1', public: true },
- { key: 'VAR2', value: 'value2', public: true }
- )
- end
- end
-
- context 'when job variables are defined' do
- let(:config) do
- {
- before_script: ['pwd'],
- rspec: { script: 'rspec', variables: variables }
- }
- end
-
- context 'when syntax is correct' do
- let(:variables) do
- { 'VAR1' => 'value1', 'VAR2' => 'value2' }
- end
-
- it 'returns job variables' do
- expect(subject).to contain_exactly(
- { key: 'VAR1', value: 'value1', public: true },
- { key: 'VAR2', value: 'value2', public: true }
- )
- end
- end
-
- context 'when syntax is incorrect' do
- context 'when variables defined but invalid' do
- let(:variables) do
- %w(VAR1 value1 VAR2 value2)
- end
-
- it 'raises error' do
- expect { subject }
- .to raise_error(GitlabCiYamlProcessor::ValidationError,
- /jobs:rspec:variables config should be a hash of key value pairs/)
- end
- end
-
- context 'when variables key defined but value not specified' do
- let(:variables) do
- nil
- end
-
- it 'returns empty array' do
- ##
- # When variables config is empty, we assume this is a valid
- # configuration, see issue #18775
- #
- expect(subject).to be_an_instance_of(Array)
- expect(subject).to be_empty
- end
- end
- end
- end
-
- context 'when job variables are not defined' do
- let(:config) do
- {
- before_script: ['pwd'],
- rspec: { script: 'rspec' }
- }
- end
-
- it 'returns empty array' do
- expect(subject).to be_an_instance_of(Array)
- expect(subject).to be_empty
- end
- end
- end
-
- describe "When" do
- %w(on_success on_failure always).each do |when_state|
- it "returns #{when_state} when defined" do
- config = YAML.dump({
- rspec: { script: "rspec", when: when_state }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- builds = config_processor.builds_for_stage_and_ref("test", "master")
- expect(builds.size).to eq(1)
- expect(builds.first[:when]).to eq(when_state)
- end
- end
- end
-
- describe 'cache' do
- context 'when cache definition has unknown keys' do
- it 'raises relevant validation error' do
- config = YAML.dump(
- { cache: { untracked: true, invalid: 'key' },
- rspec: { script: 'rspec' } })
-
- expect { GitlabCiYamlProcessor.new(config) }.to raise_error(
- GitlabCiYamlProcessor::ValidationError,
- 'cache config contains unknown keys: invalid'
- )
- end
- end
-
- it "returns cache when defined globally" do
- config = YAML.dump({
- cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'key' },
- rspec: {
- script: "rspec"
- }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config)
-
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first[:options][:cache]).to eq(
- paths: ["logs/", "binaries/"],
- untracked: true,
- key: 'key',
- policy: 'pull-push'
- )
- end
-
- it "returns cache when defined in a job" do
- config = YAML.dump({
- rspec: {
- cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'key' },
- script: "rspec"
- }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config)
-
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first[:options][:cache]).to eq(
- paths: ["logs/", "binaries/"],
- untracked: true,
- key: 'key',
- policy: 'pull-push'
- )
- end
-
- it "overwrite cache when defined for a job and globally" do
- config = YAML.dump({
- cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'global' },
- rspec: {
- script: "rspec",
- cache: { paths: ["test/"], untracked: false, key: 'local' }
- }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config)
-
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first[:options][:cache]).to eq(
- paths: ["test/"],
- untracked: false,
- key: 'local',
- policy: 'pull-push'
- )
- end
- end
-
- describe "Artifacts" do
- it "returns artifacts when defined" do
- config = YAML.dump({
- image: "ruby:2.1",
- services: ["mysql"],
- before_script: ["pwd"],
- rspec: {
- artifacts: {
- paths: ["logs/", "binaries/"],
- untracked: true,
- name: "custom_name",
- expire_in: "7d"
- },
- script: "rspec"
- }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config)
-
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({
- stage: "test",
- stage_idx: 1,
- name: "rspec",
- commands: "pwd\nrspec",
- coverage_regex: nil,
- tag_list: [],
- options: {
- before_script: ["pwd"],
- script: ["rspec"],
- image: { name: "ruby:2.1" },
- services: [{ name: "mysql" }],
- artifacts: {
- name: "custom_name",
- paths: ["logs/", "binaries/"],
- untracked: true,
- expire_in: "7d"
- }
- },
- when: "on_success",
- allow_failure: false,
- environment: nil,
- yaml_variables: []
- })
- end
-
- %w[on_success on_failure always].each do |when_state|
- it "returns artifacts for when #{when_state} defined" do
- config = YAML.dump({
- rspec: {
- script: "rspec",
- artifacts: { paths: ["logs/", "binaries/"], when: when_state }
- }
- })
-
- config_processor = GitlabCiYamlProcessor.new(config, path)
-
- builds = config_processor.builds_for_stage_and_ref("test", "master")
- expect(builds.size).to eq(1)
- expect(builds.first[:options][:artifacts][:when]).to eq(when_state)
- end
- end
- end
-
- describe '#environment' do
- let(:config) do
- {
- deploy_to_production: { stage: 'deploy', script: 'test', environment: environment }
- }
- end
-
- let(:processor) { GitlabCiYamlProcessor.new(YAML.dump(config)) }
- let(:builds) { processor.builds_for_stage_and_ref('deploy', 'master') }
-
- context 'when a production environment is specified' do
- let(:environment) { 'production' }
-
- it 'does return production' do
- expect(builds.size).to eq(1)
- expect(builds.first[:environment]).to eq(environment)
- expect(builds.first[:options]).to include(environment: { name: environment, action: "start" })
- end
- end
-
- context 'when hash is specified' do
- let(:environment) do
- { name: 'production',
- url: 'http://production.gitlab.com' }
- end
-
- it 'does return production and URL' do
- expect(builds.size).to eq(1)
- expect(builds.first[:environment]).to eq(environment[:name])
- expect(builds.first[:options]).to include(environment: environment)
- end
-
- context 'the url has a port as variable' do
- let(:environment) do
- { name: 'production',
- url: 'http://production.gitlab.com:$PORT' }
- end
-
- it 'allows a variable for the port' do
- expect(builds.size).to eq(1)
- expect(builds.first[:environment]).to eq(environment[:name])
- expect(builds.first[:options]).to include(environment: environment)
- end
- end
- end
-
- context 'when no environment is specified' do
- let(:environment) { nil }
-
- it 'does return nil environment' do
- expect(builds.size).to eq(1)
- expect(builds.first[:environment]).to be_nil
- end
- end
-
- context 'is not a string' do
- let(:environment) { 1 }
-
- it 'raises error' do
- expect { builds }.to raise_error(
- 'jobs:deploy_to_production:environment config should be a hash or a string')
- end
- end
-
- context 'is not a valid string' do
- let(:environment) { 'production:staging' }
-
- it 'raises error' do
- expect { builds }.to raise_error("jobs:deploy_to_production:environment name #{Gitlab::Regex.environment_name_regex_message}")
- end
- end
-
- context 'when on_stop is specified' do
- let(:review) { { stage: 'deploy', script: 'test', environment: { name: 'review', on_stop: 'close_review' } } }
- let(:config) { { review: review, close_review: close_review }.compact }
-
- context 'with matching job' do
- let(:close_review) { { stage: 'deploy', script: 'test', environment: { name: 'review', action: 'stop' } } }
-
- it 'does return a list of builds' do
- expect(builds.size).to eq(2)
- expect(builds.first[:environment]).to eq('review')
- end
- end
-
- context 'without matching job' do
- let(:close_review) { nil }
-
- it 'raises error' do
- expect { builds }.to raise_error('review job: on_stop job close_review is not defined')
- end
- end
-
- context 'with close job without environment' do
- let(:close_review) { { stage: 'deploy', script: 'test' } }
-
- it 'raises error' do
- expect { builds }.to raise_error('review job: on_stop job close_review does not have environment defined')
- end
- end
-
- context 'with close job for different environment' do
- let(:close_review) { { stage: 'deploy', script: 'test', environment: 'production' } }
-
- it 'raises error' do
- expect { builds }.to raise_error('review job: on_stop job close_review have different environment name')
- end
- end
-
- context 'with close job without stop action' do
- let(:close_review) { { stage: 'deploy', script: 'test', environment: { name: 'review' } } }
-
- it 'raises error' do
- expect { builds }.to raise_error('review job: on_stop job close_review needs to have action stop defined')
- end
- end
- end
- end
-
- describe "Dependencies" do
- let(:config) do
- {
- build1: { stage: 'build', script: 'test' },
- build2: { stage: 'build', script: 'test' },
- test1: { stage: 'test', script: 'test', dependencies: dependencies },
- test2: { stage: 'test', script: 'test' },
- deploy: { stage: 'test', script: 'test' }
- }
- end
-
- subject { GitlabCiYamlProcessor.new(YAML.dump(config)) }
-
- context 'no dependencies' do
- let(:dependencies) { }
-
- it { expect { subject }.not_to raise_error }
- end
-
- context 'dependencies to builds' do
- let(:dependencies) { %w(build1 build2) }
-
- it { expect { subject }.not_to raise_error }
- end
-
- context 'dependencies to builds defined as symbols' do
- let(:dependencies) { [:build1, :build2] }
-
- it { expect { subject }.not_to raise_error }
- end
-
- context 'undefined dependency' do
- let(:dependencies) { ['undefined'] }
-
- it { expect { subject }.to raise_error(GitlabCiYamlProcessor::ValidationError, 'test1 job: undefined dependency: undefined') }
- end
-
- context 'dependencies to deploy' do
- let(:dependencies) { ['deploy'] }
-
- it { expect { subject }.to raise_error(GitlabCiYamlProcessor::ValidationError, 'test1 job: dependency deploy is not defined in prior stages') }
- end
- end
-
- describe "Hidden jobs" do
- let(:config_processor) { GitlabCiYamlProcessor.new(config) }
- subject { config_processor.builds_for_stage_and_ref("test", "master") }
-
- shared_examples 'hidden_job_handling' do
- it "doesn't create jobs that start with dot" do
- expect(subject.size).to eq(1)
- expect(subject.first).to eq({
- stage: "test",
- stage_idx: 1,
- name: "normal_job",
- commands: "test",
- coverage_regex: nil,
- tag_list: [],
- options: {
- script: ["test"]
- },
- when: "on_success",
- allow_failure: false,
- environment: nil,
- yaml_variables: []
- })
- end
- end
-
- context 'when hidden job have a script definition' do
- let(:config) do
- YAML.dump({
- '.hidden_job' => { image: 'ruby:2.1', script: 'test' },
- 'normal_job' => { script: 'test' }
- })
- end
-
- it_behaves_like 'hidden_job_handling'
- end
-
- context "when hidden job doesn't have a script definition" do
- let(:config) do
- YAML.dump({
- '.hidden_job' => { image: 'ruby:2.1' },
- 'normal_job' => { script: 'test' }
- })
- end
-
- it_behaves_like 'hidden_job_handling'
- end
- end
-
- describe "YAML Alias/Anchor" do
- let(:config_processor) { GitlabCiYamlProcessor.new(config) }
- subject { config_processor.builds_for_stage_and_ref("build", "master") }
-
- shared_examples 'job_templates_handling' do
- it "is correctly supported for jobs" do
- expect(subject.size).to eq(2)
- expect(subject.first).to eq({
- stage: "build",
- stage_idx: 0,
- name: "job1",
- commands: "execute-script-for-job",
- coverage_regex: nil,
- tag_list: [],
- options: {
- script: ["execute-script-for-job"]
- },
- when: "on_success",
- allow_failure: false,
- environment: nil,
- yaml_variables: []
- })
- expect(subject.second).to eq({
- stage: "build",
- stage_idx: 0,
- name: "job2",
- commands: "execute-script-for-job",
- coverage_regex: nil,
- tag_list: [],
- options: {
- script: ["execute-script-for-job"]
- },
- when: "on_success",
- allow_failure: false,
- environment: nil,
- yaml_variables: []
- })
- end
- end
-
- context 'when template is a job' do
- let(:config) do
- <<EOT
-job1: &JOBTMPL
- stage: build
- script: execute-script-for-job
-
-job2: *JOBTMPL
-EOT
- end
-
- it_behaves_like 'job_templates_handling'
- end
-
- context 'when template is a hidden job' do
- let(:config) do
- <<EOT
-.template: &JOBTMPL
- stage: build
- script: execute-script-for-job
-
-job1: *JOBTMPL
-
-job2: *JOBTMPL
-EOT
- end
-
- it_behaves_like 'job_templates_handling'
- end
-
- context 'when job adds its own keys to a template definition' do
- let(:config) do
- <<EOT
-.template: &JOBTMPL
- stage: build
-
-job1:
- <<: *JOBTMPL
- script: execute-script-for-job
-
-job2:
- <<: *JOBTMPL
- script: execute-script-for-job
-EOT
- end
-
- it_behaves_like 'job_templates_handling'
- end
- end
-
- describe "Error handling" do
- it "fails to parse YAML" do
- expect {GitlabCiYamlProcessor.new("invalid: yaml: test")}.to raise_error(Psych::SyntaxError)
- end
-
- it "indicates that object is invalid" do
- expect {GitlabCiYamlProcessor.new("invalid_yaml")}.to raise_error(GitlabCiYamlProcessor::ValidationError)
- end
-
- it "returns errors if tags parameter is invalid" do
- config = YAML.dump({ rspec: { script: "test", tags: "mysql" } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:rspec tags should be an array of strings")
- end
-
- it "returns errors if before_script parameter is invalid" do
- config = YAML.dump({ before_script: "bundle update", rspec: { script: "test" } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "before_script config should be an array of strings")
- end
-
- it "returns errors if job before_script parameter is not an array of strings" do
- config = YAML.dump({ rspec: { script: "test", before_script: [10, "test"] } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:rspec:before_script config should be an array of strings")
- end
-
- it "returns errors if after_script parameter is invalid" do
- config = YAML.dump({ after_script: "bundle update", rspec: { script: "test" } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "after_script config should be an array of strings")
- end
-
- it "returns errors if job after_script parameter is not an array of strings" do
- config = YAML.dump({ rspec: { script: "test", after_script: [10, "test"] } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:rspec:after_script config should be an array of strings")
- end
-
- it "returns errors if image parameter is invalid" do
- config = YAML.dump({ image: ["test"], rspec: { script: "test" } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "image config should be a hash or a string")
- end
-
- it "returns errors if job name is blank" do
- config = YAML.dump({ '' => { script: "test" } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:job name can't be blank")
- end
-
- it "returns errors if job name is non-string" do
- config = YAML.dump({ 10 => { script: "test" } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:10 name should be a symbol")
- end
-
- it "returns errors if job image parameter is invalid" do
- config = YAML.dump({ rspec: { script: "test", image: ["test"] } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:rspec:image config should be a hash or a string")
- end
-
- it "returns errors if services parameter is not an array" do
- config = YAML.dump({ services: "test", rspec: { script: "test" } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "services config should be a array")
- end
-
- it "returns errors if services parameter is not an array of strings" do
- config = YAML.dump({ services: [10, "test"], rspec: { script: "test" } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "service config should be a hash or a string")
- end
-
- it "returns errors if job services parameter is not an array" do
- config = YAML.dump({ rspec: { script: "test", services: "test" } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:rspec:services config should be a array")
- end
-
- it "returns errors if job services parameter is not an array of strings" do
- config = YAML.dump({ rspec: { script: "test", services: [10, "test"] } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "service config should be a hash or a string")
- end
-
- it "returns error if job configuration is invalid" do
- config = YAML.dump({ extra: "bundle update" })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:extra config should be a hash")
- end
-
- it "returns errors if services configuration is not correct" do
- config = YAML.dump({ extra: { script: 'rspec', services: "test" } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:extra:services config should be a array")
- end
-
- it "returns errors if there are no jobs defined" do
- config = YAML.dump({ before_script: ["bundle update"] })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs config should contain at least one visible job")
- end
-
- it "returns errors if there are no visible jobs defined" do
- config = YAML.dump({ before_script: ["bundle update"], '.hidden'.to_sym => { script: 'ls' } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs config should contain at least one visible job")
- end
-
- it "returns errors if job allow_failure parameter is not an boolean" do
- config = YAML.dump({ rspec: { script: "test", allow_failure: "string" } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:rspec allow failure should be a boolean value")
- end
-
- it "returns errors if job stage is not a string" do
- config = YAML.dump({ rspec: { script: "test", type: 1 } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:rspec:type config should be a string")
- end
-
- it "returns errors if job stage is not a pre-defined stage" do
- config = YAML.dump({ rspec: { script: "test", type: "acceptance" } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: stage parameter should be build, test, deploy")
- end
-
- it "returns errors if job stage is not a defined stage" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", type: "acceptance" } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "rspec job: stage parameter should be build, test")
- end
-
- it "returns errors if stages is not an array" do
- config = YAML.dump({ stages: "test", rspec: { script: "test" } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "stages config should be an array of strings")
- end
-
- it "returns errors if stages is not an array of strings" do
- config = YAML.dump({ stages: [true, "test"], rspec: { script: "test" } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "stages config should be an array of strings")
- end
-
- it "returns errors if variables is not a map" do
- config = YAML.dump({ variables: "test", rspec: { script: "test" } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "variables config should be a hash of key value pairs")
- end
-
- it "returns errors if variables is not a map of key-value strings" do
- config = YAML.dump({ variables: { test: false }, rspec: { script: "test" } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "variables config should be a hash of key value pairs")
- end
-
- it "returns errors if job when is not on_success, on_failure or always" do
- config = YAML.dump({ rspec: { script: "test", when: 1 } })
- expect do
- GitlabCiYamlProcessor.new(config, path)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:rspec when should be on_success, on_failure, always or manual")
- end
-
- it "returns errors if job artifacts:name is not an a string" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { name: 1 } } })
- expect do
- GitlabCiYamlProcessor.new(config)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:rspec:artifacts name should be a string")
- end
-
- it "returns errors if job artifacts:when is not an a predefined value" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { when: 1 } } })
- expect do
- GitlabCiYamlProcessor.new(config)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:rspec:artifacts when should be on_success, on_failure or always")
- end
-
- it "returns errors if job artifacts:expire_in is not an a string" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { expire_in: 1 } } })
- expect do
- GitlabCiYamlProcessor.new(config)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:rspec:artifacts expire in should be a duration")
- end
-
- it "returns errors if job artifacts:expire_in is not an a valid duration" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { expire_in: "7 elephants" } } })
- expect do
- GitlabCiYamlProcessor.new(config)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:rspec:artifacts expire in should be a duration")
- end
-
- it "returns errors if job artifacts:untracked is not an array of strings" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { untracked: "string" } } })
- expect do
- GitlabCiYamlProcessor.new(config)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:rspec:artifacts untracked should be a boolean value")
- end
-
- it "returns errors if job artifacts:paths is not an array of strings" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { paths: "string" } } })
- expect do
- GitlabCiYamlProcessor.new(config)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:rspec:artifacts paths should be an array of strings")
- end
-
- it "returns errors if cache:untracked is not an array of strings" do
- config = YAML.dump({ cache: { untracked: "string" }, rspec: { script: "test" } })
- expect do
- GitlabCiYamlProcessor.new(config)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "cache:untracked config should be a boolean value")
- end
-
- it "returns errors if cache:paths is not an array of strings" do
- config = YAML.dump({ cache: { paths: "string" }, rspec: { script: "test" } })
- expect do
- GitlabCiYamlProcessor.new(config)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "cache:paths config should be an array of strings")
- end
-
- it "returns errors if cache:key is not a string" do
- config = YAML.dump({ cache: { key: 1 }, rspec: { script: "test" } })
- expect do
- GitlabCiYamlProcessor.new(config)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "cache:key config should be a string or symbol")
- end
-
- it "returns errors if job cache:key is not an a string" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: 1 } } })
- expect do
- GitlabCiYamlProcessor.new(config)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:rspec:cache:key config should be a string or symbol")
- end
-
- it "returns errors if job cache:untracked is not an array of strings" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { untracked: "string" } } })
- expect do
- GitlabCiYamlProcessor.new(config)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:rspec:cache:untracked config should be a boolean value")
- end
-
- it "returns errors if job cache:paths is not an array of strings" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { paths: "string" } } })
- expect do
- GitlabCiYamlProcessor.new(config)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:rspec:cache:paths config should be an array of strings")
- end
-
- it "returns errors if job dependencies is not an array of strings" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", dependencies: "string" } })
- expect do
- GitlabCiYamlProcessor.new(config)
- end.to raise_error(GitlabCiYamlProcessor::ValidationError, "jobs:rspec dependencies should be an array of strings")
- end
- end
-
- describe "Validate configuration templates" do
- templates = Dir.glob("#{Rails.root.join('vendor/gitlab-ci-yml')}/**/*.gitlab-ci.yml")
-
- templates.each do |file|
- it "does not return errors for #{file}" do
- file = File.read(file)
-
- expect { GitlabCiYamlProcessor.new(file) }.not_to raise_error
- end
- end
- end
-
- describe "#validation_message" do
- context "when the YAML could not be parsed" do
- it "returns an error about invalid configutaion" do
- content = YAML.dump("invalid: yaml: test")
-
- expect(GitlabCiYamlProcessor.validation_message(content))
- .to eq "Invalid configuration format"
- end
- end
-
- context "when the tags parameter is invalid" do
- it "returns an error about invalid tags" do
- content = YAML.dump({ rspec: { script: "test", tags: "mysql" } })
-
- expect(GitlabCiYamlProcessor.validation_message(content))
- .to eq "jobs:rspec tags should be an array of strings"
- end
- end
-
- context "when YAML content is empty" do
- it "returns an error about missing content" do
- expect(GitlabCiYamlProcessor.validation_message(''))
- .to eq "Please provide content of .gitlab-ci.yml"
- end
- end
-
- context "when the YAML is valid" do
- it "does not return any errors" do
- content = File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
-
- expect(GitlabCiYamlProcessor.validation_message(content)).to be_nil
- end
- end
- end
- end
-end
diff --git a/spec/lib/github/client_spec.rb b/spec/lib/github/client_spec.rb
new file mode 100644
index 00000000000..b846096fe25
--- /dev/null
+++ b/spec/lib/github/client_spec.rb
@@ -0,0 +1,34 @@
+require 'spec_helper'
+
+describe Github::Client do
+ let(:connection) { spy }
+ let(:rate_limit) { double(get: [false, 1]) }
+ let(:client) { described_class.new({}) }
+ let(:results) { double }
+ let(:response) { double }
+
+ before do
+ allow(Faraday).to receive(:new).and_return(connection)
+ allow(Github::RateLimit).to receive(:new).with(connection).and_return(rate_limit)
+ end
+
+ describe '#get' do
+ before do
+ allow(Github::Response).to receive(:new).with(results).and_return(response)
+ end
+
+ it 'uses a default per_page param' do
+ expect(connection).to receive(:get).with('/foo', per_page: 100).and_return(results)
+
+ expect(client.get('/foo')).to eq(response)
+ end
+
+ context 'with per_page given' do
+ it 'overwrites the default per_page' do
+ expect(connection).to receive(:get).with('/foo', per_page: 30).and_return(results)
+
+ expect(client.get('/foo', per_page: 30)).to eq(response)
+ end
+ end
+ end
+end
diff --git a/spec/lib/github/import/legacy_diff_note_spec.rb b/spec/lib/github/import/legacy_diff_note_spec.rb
new file mode 100644
index 00000000000..8c50b46cacb
--- /dev/null
+++ b/spec/lib/github/import/legacy_diff_note_spec.rb
@@ -0,0 +1,9 @@
+require 'spec_helper'
+
+describe Github::Import::LegacyDiffNote do
+ describe '#type' do
+ it 'returns the original note type' do
+ expect(described_class.new.type).to eq('LegacyDiffNote')
+ end
+ end
+end
diff --git a/spec/lib/github/import/note_spec.rb b/spec/lib/github/import/note_spec.rb
new file mode 100644
index 00000000000..fcdccd9e097
--- /dev/null
+++ b/spec/lib/github/import/note_spec.rb
@@ -0,0 +1,9 @@
+require 'spec_helper'
+
+describe Github::Import::Note do
+ describe '#type' do
+ it 'returns the original note type' do
+ expect(described_class.new.type).to eq('Note')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index f685bb83d0d..af1db2c3455 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -16,12 +16,32 @@ describe Gitlab::Auth do
expect(subject::DEFAULT_SCOPES).to eq [:api]
end
- it 'OPTIONAL_SCOPES contains all non-default scopes' do
- expect(subject::OPTIONAL_SCOPES).to eq %i[read_user read_registry openid]
+ it 'optional_scopes contains all non-default scopes' do
+ stub_container_registry_config(enabled: true)
+
+ expect(subject.optional_scopes).to eq %i[read_user read_registry openid]
end
- it 'REGISTRY_SCOPES contains all registry related scopes' do
- expect(subject::REGISTRY_SCOPES).to eq %i[read_registry]
+ context 'registry_scopes' do
+ context 'when registry is disabled' do
+ before do
+ stub_container_registry_config(enabled: false)
+ end
+
+ it 'is empty' do
+ expect(subject.registry_scopes).to eq []
+ end
+ end
+
+ context 'when registry is enabled' do
+ before do
+ stub_container_registry_config(enabled: true)
+ end
+
+ it 'contains all registry related scopes' do
+ expect(subject.registry_scopes).to eq %i[read_registry]
+ end
+ end
end
end
@@ -147,11 +167,17 @@ describe Gitlab::Auth do
expect(gl_auth.find_for_git_client('', personal_access_token.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(personal_access_token.user, nil, :personal_token, full_authentication_abilities))
end
- it 'succeeds for personal access tokens with the `read_registry` scope' do
- personal_access_token = create(:personal_access_token, scopes: ['read_registry'])
+ context 'when registry is enabled' do
+ before do
+ stub_container_registry_config(enabled: true)
+ end
+
+ it 'succeeds for personal access tokens with the `read_registry` scope' do
+ personal_access_token = create(:personal_access_token, scopes: ['read_registry'])
- expect(gl_auth).to receive(:rate_limit!).with('ip', success: true, login: '')
- expect(gl_auth.find_for_git_client('', personal_access_token.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(personal_access_token.user, nil, :personal_token, [:read_container_image]))
+ expect(gl_auth).to receive(:rate_limit!).with('ip', success: true, login: '')
+ expect(gl_auth.find_for_git_client('', personal_access_token.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(personal_access_token.user, nil, :personal_token, [:read_container_image]))
+ end
end
it 'succeeds if it is an impersonation token' do
diff --git a/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb b/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb
new file mode 100644
index 00000000000..1a4ea2bac48
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb
@@ -0,0 +1,117 @@
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::CreateForkNetworkMembershipsRange, :migration, schema: 20170929131201 do
+ let(:migration) { described_class.new }
+
+ let(:base1) { create(:project) }
+ let(:base1_fork1) { create(:project) }
+ let(:base1_fork2) { create(:project) }
+
+ let(:base2) { create(:project) }
+ let(:base2_fork1) { create(:project) }
+ let(:base2_fork2) { create(:project) }
+
+ let(:fork_of_fork) { create(:project) }
+ let(:fork_of_fork2) { create(:project) }
+ let(:second_level_fork) { create(:project) }
+ let(:third_level_fork) { create(:project) }
+
+ let(:fork_network1) { fork_networks.find_by(root_project_id: base1.id) }
+ let(:fork_network2) { fork_networks.find_by(root_project_id: base2.id) }
+
+ let!(:forked_project_links) { table(:forked_project_links) }
+ let!(:fork_networks) { table(:fork_networks) }
+ let!(:fork_network_members) { table(:fork_network_members) }
+
+ before do
+ # The fork-network relation created for the forked project
+ fork_networks.create(id: 1, root_project_id: base1.id)
+ fork_network_members.create(project_id: base1.id, fork_network_id: 1)
+ fork_networks.create(id: 2, root_project_id: base2.id)
+ fork_network_members.create(project_id: base2.id, fork_network_id: 2)
+
+ # Normal fork links
+ forked_project_links.create(id: 1, forked_from_project_id: base1.id, forked_to_project_id: base1_fork1.id)
+ forked_project_links.create(id: 2, forked_from_project_id: base1.id, forked_to_project_id: base1_fork2.id)
+ forked_project_links.create(id: 3, forked_from_project_id: base2.id, forked_to_project_id: base2_fork1.id)
+ forked_project_links.create(id: 4, forked_from_project_id: base2.id, forked_to_project_id: base2_fork2.id)
+
+ # Fork links
+ forked_project_links.create(id: 5, forked_from_project_id: base1_fork1.id, forked_to_project_id: fork_of_fork.id)
+ forked_project_links.create(id: 6, forked_from_project_id: base1_fork1.id, forked_to_project_id: fork_of_fork2.id)
+
+ # Forks 3 levels down
+ forked_project_links.create(id: 7, forked_from_project_id: fork_of_fork.id, forked_to_project_id: second_level_fork.id)
+ forked_project_links.create(id: 8, forked_from_project_id: second_level_fork.id, forked_to_project_id: third_level_fork.id)
+
+ migration.perform(1, 8)
+ end
+
+ it 'creates a memberships for the direct forks' do
+ base1_fork1_membership = fork_network_members.find_by(fork_network_id: fork_network1.id,
+ project_id: base1_fork1.id)
+ base1_fork2_membership = fork_network_members.find_by(fork_network_id: fork_network1.id,
+ project_id: base1_fork2.id)
+ base2_fork1_membership = fork_network_members.find_by(fork_network_id: fork_network2.id,
+ project_id: base2_fork1.id)
+ base2_fork2_membership = fork_network_members.find_by(fork_network_id: fork_network2.id,
+ project_id: base2_fork2.id)
+
+ expect(base1_fork1_membership.forked_from_project_id).to eq(base1.id)
+ expect(base1_fork2_membership.forked_from_project_id).to eq(base1.id)
+ expect(base2_fork1_membership.forked_from_project_id).to eq(base2.id)
+ expect(base2_fork2_membership.forked_from_project_id).to eq(base2.id)
+ end
+
+ it 'adds the fork network members for forks of forks' do
+ fork_of_fork_membership = fork_network_members.find_by(project_id: fork_of_fork.id,
+ fork_network_id: fork_network1.id)
+ fork_of_fork2_membership = fork_network_members.find_by(project_id: fork_of_fork2.id,
+ fork_network_id: fork_network1.id)
+ second_level_fork_membership = fork_network_members.find_by(project_id: second_level_fork.id,
+ fork_network_id: fork_network1.id)
+ third_level_fork_membership = fork_network_members.find_by(project_id: third_level_fork.id,
+ fork_network_id: fork_network1.id)
+
+ expect(fork_of_fork_membership.forked_from_project_id).to eq(base1_fork1.id)
+ expect(fork_of_fork2_membership.forked_from_project_id).to eq(base1_fork1.id)
+ expect(second_level_fork_membership.forked_from_project_id).to eq(fork_of_fork.id)
+ expect(third_level_fork_membership.forked_from_project_id).to eq(second_level_fork.id)
+ end
+
+ it 'reschedules itself when there are missing members' do
+ allow(migration).to receive(:missing_members?).and_return(true)
+
+ expect(BackgroundMigrationWorker)
+ .to receive(:perform_in).with(described_class::RESCHEDULE_DELAY, "CreateForkNetworkMembershipsRange", [1, 3])
+
+ migration.perform(1, 3)
+ end
+
+ it 'can be repeated without effect' do
+ expect { fork_network_members.count }.not_to change { migration.perform(1, 7) }
+ end
+
+ it 'knows it is finished for this range' do
+ expect(migration.missing_members?(1, 7)).to be_falsy
+ end
+
+ context 'with more forks' do
+ before do
+ forked_project_links.create(id: 9, forked_from_project_id: fork_of_fork.id, forked_to_project_id: create(:project).id)
+ forked_project_links.create(id: 10, forked_from_project_id: fork_of_fork.id, forked_to_project_id: create(:project).id)
+ end
+
+ it 'only processes a single batch of links at a time' do
+ expect(fork_network_members.count).to eq(10)
+
+ migration.perform(8, 10)
+
+ expect(fork_network_members.count).to eq(12)
+ end
+
+ it 'knows when not all memberships withing a batch have been created' do
+ expect(migration.missing_members?(8, 10)).to be_truthy
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys_spec.rb b/spec/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys_spec.rb
new file mode 100644
index 00000000000..26d48cc8201
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys_spec.rb
@@ -0,0 +1,32 @@
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::CreateGpgKeySubkeysFromGpgKeys, :migration, schema: 20171005130944 do
+ context 'when GpgKey exists' do
+ let!(:gpg_key) { create(:gpg_key, key: GpgHelpers::User3.public_key) }
+
+ before do
+ GpgKeySubkey.destroy_all
+ end
+
+ it 'generate the subkeys' do
+ expect do
+ described_class.new.perform(gpg_key.id)
+ end.to change { gpg_key.subkeys.count }.from(0).to(2)
+ end
+
+ it 'schedules the signature update worker' do
+ expect(InvalidGpgSignatureUpdateWorker).to receive(:perform_async).with(gpg_key.id)
+
+ described_class.new.perform(gpg_key.id)
+ end
+ end
+
+ context 'when GpgKey does not exist' do
+ it 'does not do anything' do
+ expect(Gitlab::Gpg).not_to receive(:subkeys_from_key)
+ expect(InvalidGpgSignatureUpdateWorker).not_to receive(:perform_async)
+
+ described_class.new.perform(123)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range_spec.rb b/spec/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range_spec.rb
new file mode 100644
index 00000000000..5c471cbdeda
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range_spec.rb
@@ -0,0 +1,40 @@
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::DeleteConflictingRedirectRoutesRange, :migration, schema: 20170907170235 do
+ let!(:redirect_routes) { table(:redirect_routes) }
+ let!(:routes) { table(:routes) }
+
+ before do
+ routes.create!(id: 1, source_id: 1, source_type: 'Namespace', path: 'foo1')
+ routes.create!(id: 2, source_id: 2, source_type: 'Namespace', path: 'foo2')
+ routes.create!(id: 3, source_id: 3, source_type: 'Namespace', path: 'foo3')
+ routes.create!(id: 4, source_id: 4, source_type: 'Namespace', path: 'foo4')
+ routes.create!(id: 5, source_id: 5, source_type: 'Namespace', path: 'foo5')
+
+ # Valid redirects
+ redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'bar')
+ redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'bar2')
+ redirect_routes.create!(source_id: 2, source_type: 'Namespace', path: 'bar3')
+
+ # Conflicting redirects
+ redirect_routes.create!(source_id: 2, source_type: 'Namespace', path: 'foo1')
+ redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo2')
+ redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo3')
+ redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo4')
+ redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo5')
+ end
+
+ it 'deletes the conflicting redirect_routes in the range' do
+ expect(redirect_routes.count).to eq(8)
+
+ expect do
+ described_class.new.perform(1, 3)
+ end.to change { redirect_routes.where("path like 'foo%'").count }.from(5).to(2)
+
+ expect do
+ described_class.new.perform(4, 5)
+ end.to change { redirect_routes.where("path like 'foo%'").count }.from(2).to(0)
+
+ expect(redirect_routes.count).to eq(3)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb b/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
index c0427639746..d2e7243ee05 100644
--- a/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
+++ b/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
@@ -1,9 +1,9 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits do
+describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :truncate do
describe '#perform' do
- set(:merge_request) { create(:merge_request) }
- set(:merge_request_diff) { merge_request.merge_request_diff }
+ let(:merge_request) { create(:merge_request) }
+ let(:merge_request_diff) { merge_request.merge_request_diff }
let(:updated_merge_request_diff) { MergeRequestDiff.find(merge_request_diff.id) }
def diffs_to_hashes(diffs)
@@ -70,8 +70,8 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits do
before do
merge_request.reload_diff(true)
- convert_to_yaml(start_id, merge_request_diff.commits, merge_request_diff.diffs)
- convert_to_yaml(stop_id, updated_merge_request_diff.commits, updated_merge_request_diff.diffs)
+ convert_to_yaml(start_id, merge_request_diff.commits, diffs_to_hashes(merge_request_diff.merge_request_diff_files))
+ convert_to_yaml(stop_id, updated_merge_request_diff.commits, diffs_to_hashes(updated_merge_request_diff.merge_request_diff_files))
MergeRequestDiffCommit.delete_all
MergeRequestDiffFile.delete_all
@@ -80,10 +80,32 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits do
context 'when BUFFER_ROWS is exceeded' do
before do
stub_const("#{described_class}::BUFFER_ROWS", 1)
+
+ allow(Gitlab::Database).to receive(:bulk_insert).and_call_original
+ end
+
+ it 'inserts commit rows in chunks of BUFFER_ROWS' do
+ # There are 29 commits in each diff, so we should have slices of 20 + 9 + 20 + 9.
+ stub_const("#{described_class}::BUFFER_ROWS", 20)
+
+ expect(Gitlab::Database).to receive(:bulk_insert)
+ .with('merge_request_diff_commits', anything)
+ .exactly(4)
+ .times
+ .and_call_original
+
+ subject.perform(start_id, stop_id)
end
- it 'updates and continues' do
- expect(described_class::MergeRequestDiff).to receive(:transaction).twice
+ it 'inserts diff rows in chunks of DIFF_FILE_BUFFER_ROWS' do
+ # There are 20 files in each diff, so we should have slices of 20 + 20.
+ stub_const("#{described_class}::DIFF_FILE_BUFFER_ROWS", 20)
+
+ expect(Gitlab::Database).to receive(:bulk_insert)
+ .with('merge_request_diff_files', anything)
+ .exactly(2)
+ .times
+ .and_call_original
subject.perform(start_id, stop_id)
end
@@ -91,27 +113,87 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits do
context 'when BUFFER_ROWS is not exceeded' do
it 'only updates once' do
- expect(described_class::MergeRequestDiff).to receive(:transaction).once
+ expect(Gitlab::Database).to receive(:bulk_insert)
+ .with('merge_request_diff_commits', anything)
+ .once
+ .and_call_original
+
+ expect(Gitlab::Database).to receive(:bulk_insert)
+ .with('merge_request_diff_files', anything)
+ .once
+ .and_call_original
subject.perform(start_id, stop_id)
end
end
- end
- context 'when the merge request diff update fails' do
- before do
- allow(described_class::MergeRequestDiff)
- .to receive(:update_all).and_raise(ActiveRecord::Rollback)
- end
+ context 'when some rows were already inserted due to a previous failure' do
+ before do
+ subject.perform(start_id, stop_id)
- it 'does not add any diff commits' do
- expect { subject.perform(merge_request_diff.id, merge_request_diff.id) }
- .not_to change { MergeRequestDiffCommit.count }
+ convert_to_yaml(start_id, merge_request_diff.commits, diffs_to_hashes(merge_request_diff.merge_request_diff_files))
+ convert_to_yaml(stop_id, updated_merge_request_diff.commits, diffs_to_hashes(updated_merge_request_diff.merge_request_diff_files))
+ end
+
+ it 'does not raise' do
+ expect { subject.perform(start_id, stop_id) }.not_to raise_exception
+ end
+
+ it 'logs a message' do
+ expect(Rails.logger).to receive(:info)
+ .with(
+ a_string_matching(described_class.name).and(matching([start_id, stop_id].inspect))
+ )
+ .twice
+
+ subject.perform(start_id, stop_id)
+ end
+
+ it 'ends up with the correct rows' do
+ expect(updated_merge_request_diff.commits.count).to eq(29)
+ expect(updated_merge_request_diff.raw_diffs.count).to eq(20)
+ end
end
- it 'does not add any diff files' do
- expect { subject.perform(merge_request_diff.id, merge_request_diff.id) }
- .not_to change { MergeRequestDiffFile.count }
+ context 'when the merge request diff update fails' do
+ let(:exception) { ActiveRecord::RecordNotFound }
+
+ let(:perform_ignoring_exceptions) do
+ begin
+ subject.perform(start_id, stop_id)
+ rescue described_class::Error
+ end
+ end
+
+ before do
+ allow_any_instance_of(described_class::MergeRequestDiff::ActiveRecord_Relation)
+ .to receive(:update_all).and_raise(exception)
+ end
+
+ it 'raises an error' do
+ expect { subject.perform(start_id, stop_id) }
+ .to raise_exception(described_class::Error)
+ end
+
+ it 'logs the error' do
+ expect(Rails.logger).to receive(:info).with(
+ a_string_matching(described_class.name)
+ .and(matching([start_id, stop_id].inspect))
+ .and(matching(exception.name))
+ )
+
+ perform_ignoring_exceptions
+ end
+
+ it 'still adds diff commits' do
+ expect { perform_ignoring_exceptions }
+ .to change { MergeRequestDiffCommit.count }
+ end
+
+ it 'still adds diff files' do
+ expect { perform_ignoring_exceptions }
+ .to change { MergeRequestDiffFile.count }
+ end
end
end
diff --git a/spec/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder_spec.rb b/spec/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder_spec.rb
index 59f69d1e4b1..7b5a00c6111 100644
--- a/spec/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder_spec.rb
@@ -8,7 +8,7 @@ describe Gitlab::BackgroundMigration::MigrateSystemUploadsToNewFolder do
end
describe '#perform' do
- it 'renames the path of system-uploads', truncate: true do
+ it 'renames the path of system-uploads', :truncate do
upload = create(:upload, model: create(:project), path: 'uploads/system/project/avatar.jpg')
migration.perform('uploads/system/', 'uploads/-/system/')
diff --git a/spec/lib/gitlab/background_migration/normalize_ldap_extern_uids_range_spec.rb b/spec/lib/gitlab/background_migration/normalize_ldap_extern_uids_range_spec.rb
new file mode 100644
index 00000000000..dfbf1bb681a
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/normalize_ldap_extern_uids_range_spec.rb
@@ -0,0 +1,36 @@
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::NormalizeLdapExternUidsRange, :migration, schema: 20170921101004 do
+ let!(:identities) { table(:identities) }
+
+ before do
+ # LDAP identities
+ (1..4).each do |i|
+ identities.create!(id: i, provider: 'ldapmain', extern_uid: " uid = foo #{i}, ou = People, dc = example, dc = com ", user_id: i)
+ end
+
+ # Non-LDAP identity
+ identities.create!(id: 5, provider: 'foo', extern_uid: " uid = foo 5, ou = People, dc = example, dc = com ", user_id: 5)
+
+ # Another LDAP identity
+ identities.create!(id: 6, provider: 'ldapmain', extern_uid: " uid = foo 6, ou = People, dc = example, dc = com ", user_id: 6)
+ end
+
+ it 'normalizes the LDAP identities in the range' do
+ described_class.new.perform(1, 3)
+ expect(identities.find(1).extern_uid).to eq("uid=foo 1,ou=people,dc=example,dc=com")
+ expect(identities.find(2).extern_uid).to eq("uid=foo 2,ou=people,dc=example,dc=com")
+ expect(identities.find(3).extern_uid).to eq("uid=foo 3,ou=people,dc=example,dc=com")
+ expect(identities.find(4).extern_uid).to eq(" uid = foo 4, ou = People, dc = example, dc = com ")
+ expect(identities.find(5).extern_uid).to eq(" uid = foo 5, ou = People, dc = example, dc = com ")
+ expect(identities.find(6).extern_uid).to eq(" uid = foo 6, ou = People, dc = example, dc = com ")
+
+ described_class.new.perform(4, 6)
+ expect(identities.find(1).extern_uid).to eq("uid=foo 1,ou=people,dc=example,dc=com")
+ expect(identities.find(2).extern_uid).to eq("uid=foo 2,ou=people,dc=example,dc=com")
+ expect(identities.find(3).extern_uid).to eq("uid=foo 3,ou=people,dc=example,dc=com")
+ expect(identities.find(4).extern_uid).to eq("uid=foo 4,ou=people,dc=example,dc=com")
+ expect(identities.find(5).extern_uid).to eq(" uid = foo 5, ou = People, dc = example, dc = com ")
+ expect(identities.find(6).extern_uid).to eq("uid=foo 6,ou=people,dc=example,dc=com")
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb b/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb
new file mode 100644
index 00000000000..2c2684a6fc9
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb
@@ -0,0 +1,93 @@
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::PopulateForkNetworksRange, :migration, schema: 20170929131201 do
+ let(:migration) { described_class.new }
+ let(:base1) { create(:project) }
+ let(:base1_fork1) { create(:project) }
+ let(:base1_fork2) { create(:project) }
+
+ let(:base2) { create(:project) }
+ let(:base2_fork1) { create(:project) }
+ let(:base2_fork2) { create(:project) }
+
+ let!(:forked_project_links) { table(:forked_project_links) }
+ let!(:fork_networks) { table(:fork_networks) }
+ let!(:fork_network_members) { table(:fork_network_members) }
+
+ let(:fork_network1) { fork_networks.find_by(root_project_id: base1.id) }
+ let(:fork_network2) { fork_networks.find_by(root_project_id: base2.id) }
+
+ before do
+ # A normal fork link
+ forked_project_links.create(id: 1,
+ forked_from_project_id: base1.id,
+ forked_to_project_id: base1_fork1.id)
+ forked_project_links.create(id: 2,
+ forked_from_project_id: base1.id,
+ forked_to_project_id: base1_fork2.id)
+
+ forked_project_links.create(id: 3,
+ forked_from_project_id: base2.id,
+ forked_to_project_id: base2_fork1.id)
+ forked_project_links.create(id: 4,
+ forked_from_project_id: base2_fork1.id,
+ forked_to_project_id: create(:project).id)
+
+ forked_project_links.create(id: 5,
+ forked_from_project_id: base2.id,
+ forked_to_project_id: base2_fork2.id)
+
+ migration.perform(1, 3)
+ end
+
+ it 'it creates the fork network' do
+ expect(fork_network1).not_to be_nil
+ expect(fork_network2).not_to be_nil
+ end
+
+ it 'does not create a fork network for a fork-of-fork' do
+ # perfrom the entire batch
+ migration.perform(1, 5)
+
+ expect(fork_networks.find_by(root_project_id: base2_fork1.id)).to be_nil
+ end
+
+ it 'creates memberships for the root of fork networks' do
+ base1_membership = fork_network_members.find_by(fork_network_id: fork_network1.id,
+ project_id: base1.id)
+ base2_membership = fork_network_members.find_by(fork_network_id: fork_network2.id,
+ project_id: base2.id)
+
+ expect(base1_membership).not_to be_nil
+ expect(base2_membership).not_to be_nil
+ end
+
+ it 'skips links that had their source project deleted' do
+ forked_project_links.create(id: 6, forked_from_project_id: 99999, forked_to_project_id: create(:project).id)
+
+ migration.perform(5, 8)
+
+ expect(fork_networks.find_by(root_project_id: 99999)).to be_nil
+ end
+
+ it 'schedules a job for inserting memberships for forks-of-forks' do
+ delay = Gitlab::BackgroundMigration::CreateForkNetworkMembershipsRange::RESCHEDULE_DELAY
+
+ expect(BackgroundMigrationWorker)
+ .to receive(:perform_in).with(delay, "CreateForkNetworkMembershipsRange", [1, 3])
+
+ migration.perform(1, 3)
+ end
+
+ it 'only processes a single batch of links at a time' do
+ expect(fork_network_members.count).to eq(5)
+
+ migration.perform(3, 5)
+
+ expect(fork_network_members.count).to eq(7)
+ end
+
+ it 'can be repeated without effect' do
+ expect { migration.perform(1, 3) }.not_to change { fork_network_members.count }
+ end
+end
diff --git a/spec/lib/gitlab/backup/manager_spec.rb b/spec/lib/gitlab/backup/manager_spec.rb
index 8772d3d5ada..422f2af7266 100644
--- a/spec/lib/gitlab/backup/manager_spec.rb
+++ b/spec/lib/gitlab/backup/manager_spec.rb
@@ -26,6 +26,9 @@ describe Backup::Manager do
[
'1451606400_2016_01_01_1.2.3_gitlab_backup.tar',
'1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-pre_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-rc1_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-pre-ee_gitlab_backup.tar',
'1451510000_2015_12_30_gitlab_backup.tar',
'1450742400_2015_12_22_gitlab_backup.tar',
'1449878400_gitlab_backup.tar',
@@ -57,6 +60,30 @@ describe Backup::Manager do
end
end
+ context 'when no valid file is found' do
+ let(:files) do
+ [
+ '14516064000_2016_01_01_1.2.3_gitlab_backup.tar',
+ 'foo_1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-foo_gitlab_backup.tar'
+ ]
+ end
+
+ before do
+ allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
+
+ subject.remove_old
+ end
+
+ it 'removes no files' do
+ expect(FileUtils).not_to have_received(:rm)
+ end
+
+ it 'prints a done message' do
+ expect(progress).to have_received(:puts).with('done. (0 removed)')
+ end
+ end
+
context 'when there are no files older than keep_time' do
before do
# Set to 30 days
@@ -84,16 +111,22 @@ describe Backup::Manager do
it 'removes matching files with a human-readable versioned timestamp' do
expect(FileUtils).to have_received(:rm).with(files[1])
- end
-
- it 'removes matching files with a human-readable non-versioned timestamp' do
expect(FileUtils).to have_received(:rm).with(files[2])
expect(FileUtils).to have_received(:rm).with(files[3])
end
- it 'removes matching files without a human-readable timestamp' do
+ it 'removes matching files with a human-readable versioned timestamp with tagged EE' do
expect(FileUtils).to have_received(:rm).with(files[4])
+ end
+
+ it 'removes matching files with a human-readable non-versioned timestamp' do
expect(FileUtils).to have_received(:rm).with(files[5])
+ expect(FileUtils).to have_received(:rm).with(files[6])
+ end
+
+ it 'removes matching files without a human-readable timestamp' do
+ expect(FileUtils).to have_received(:rm).with(files[7])
+ expect(FileUtils).to have_received(:rm).with(files[8])
end
it 'does not remove files that are not old enough' do
@@ -101,11 +134,11 @@ describe Backup::Manager do
end
it 'does not remove non-matching files' do
- expect(FileUtils).not_to have_received(:rm).with(files[6])
+ expect(FileUtils).not_to have_received(:rm).with(files[9])
end
it 'prints a done message' do
- expect(progress).to have_received(:puts).with('done. (5 removed)')
+ expect(progress).to have_received(:puts).with('done. (8 removed)')
end
end
@@ -121,14 +154,15 @@ describe Backup::Manager do
end
it 'removes the remaining expected files' do
- expect(FileUtils).to have_received(:rm).with(files[2])
- expect(FileUtils).to have_received(:rm).with(files[3])
expect(FileUtils).to have_received(:rm).with(files[4])
expect(FileUtils).to have_received(:rm).with(files[5])
+ expect(FileUtils).to have_received(:rm).with(files[6])
+ expect(FileUtils).to have_received(:rm).with(files[7])
+ expect(FileUtils).to have_received(:rm).with(files[8])
end
it 'sets the correct removed count' do
- expect(progress).to have_received(:puts).with('done. (4 removed)')
+ expect(progress).to have_received(:puts).with('done. (7 removed)')
end
it 'prints the error from file that could not be removed' do
diff --git a/spec/lib/gitlab/checks/force_push_spec.rb b/spec/lib/gitlab/checks/force_push_spec.rb
index f8c8b83a3ac..633e319f46d 100644
--- a/spec/lib/gitlab/checks/force_push_spec.rb
+++ b/spec/lib/gitlab/checks/force_push_spec.rb
@@ -3,15 +3,15 @@ require 'spec_helper'
describe Gitlab::Checks::ForcePush do
let(:project) { create(:project, :repository) }
- context "exit code checking", skip_gitaly_mock: true do
+ context "exit code checking", :skip_gitaly_mock do
it "does not raise a runtime error if the `popen` call to git returns a zero exit code" do
- allow(Gitlab::Popen).to receive(:popen).and_return(['normal output', 0])
+ allow_any_instance_of(Gitlab::Git::RevList).to receive(:popen).and_return(['normal output', 0])
expect { described_class.force_push?(project, 'oldrev', 'newrev') }.not_to raise_error
end
it "raises a runtime error if the `popen` call to git returns a non-zero exit code" do
- allow(Gitlab::Popen).to receive(:popen).and_return(['error', 1])
+ allow_any_instance_of(Gitlab::Git::RevList).to receive(:popen).and_return(['error', 1])
expect { described_class.force_push?(project, 'oldrev', 'newrev') }.to raise_error(RuntimeError)
end
diff --git a/spec/lib/ci/ansi2html_spec.rb b/spec/lib/gitlab/ci/ansi2html_spec.rb
index e49ecadde20..33540eab5d6 100644
--- a/spec/lib/ci/ansi2html_spec.rb
+++ b/spec/lib/gitlab/ci/ansi2html_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Ci::Ansi2html do
+describe Gitlab::Ci::Ansi2html do
subject { described_class }
it "prints non-ansi as-is" do
@@ -195,6 +195,32 @@ describe Ci::Ansi2html do
end
end
+ context "with section markers" do
+ let(:section_name) { 'test_section' }
+ let(:section_start_time) { Time.new(2017, 9, 20).utc }
+ let(:section_duration) { 3.seconds }
+ let(:section_end_time) { section_start_time + section_duration }
+ let(:section_start) { "section_start:#{section_start_time.to_i}:#{section_name}\r\033[0K"}
+ let(:section_end) { "section_end:#{section_end_time.to_i}:#{section_name}\r\033[0K"}
+ let(:section_start_html) do
+ '<div class="hidden" data-action="start"'\
+ " data-timestamp=\"#{section_start_time.to_i}\" data-section=\"#{section_name}\">"\
+ "#{section_start[0...-5]}</div>"
+ end
+ let(:section_end_html) do
+ '<div class="hidden" data-action="end"'\
+ " data-timestamp=\"#{section_end_time.to_i}\" data-section=\"#{section_name}\">"\
+ "#{section_end[0...-5]}</div>"
+ end
+
+ it "prints light red" do
+ text = "#{section_start}\e[91mHello\e[0m\n#{section_end}"
+ html = %{#{section_start_html}<span class="term-fg-l-red">Hello</span><br>#{section_end_html}}
+
+ expect(convert_html(text)).to eq(html)
+ end
+ end
+
describe "truncates" do
let(:text) { "Hello World" }
let(:stream) { StringIO.new(text) }
diff --git a/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb b/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb
new file mode 100644
index 00000000000..15eb01eb472
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb
@@ -0,0 +1,30 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Build::Policy::Kubernetes do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ context 'when kubernetes service is active' do
+ set(:project) { create(:kubernetes_project) }
+
+ it 'is satisfied by a kubernetes pipeline' do
+ expect(described_class.new('active'))
+ .to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when kubernetes service is inactive' do
+ set(:project) { create(:project) }
+
+ it 'is not satisfied by a pipeline without kubernetes available' do
+ expect(described_class.new('active'))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when kubernetes policy is invalid' do
+ it 'raises an error' do
+ expect { described_class.new('unknown') }
+ .to raise_error(described_class::UnknownPolicyError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/policy/refs_spec.rb b/spec/lib/gitlab/ci/build/policy/refs_spec.rb
new file mode 100644
index 00000000000..7211187e511
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/policy/refs_spec.rb
@@ -0,0 +1,87 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Build::Policy::Refs do
+ describe '#satisfied_by?' do
+ context 'when matching ref' do
+ let(:pipeline) { build_stubbed(:ci_pipeline, ref: 'master') }
+
+ it 'is satisfied when pipeline branch matches' do
+ expect(described_class.new(%w[master deploy]))
+ .to be_satisfied_by(pipeline)
+ end
+
+ it 'is not satisfied when pipeline branch does not match' do
+ expect(described_class.new(%w[feature fix]))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when maching tags' do
+ context 'when pipeline runs for a tag' do
+ let(:pipeline) do
+ build_stubbed(:ci_pipeline, ref: 'feature', tag: true)
+ end
+
+ it 'is satisfied when tags matcher is specified' do
+ expect(described_class.new(%w[master tags]))
+ .to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when pipeline is not created for a tag' do
+ let(:pipeline) do
+ build_stubbed(:ci_pipeline, ref: 'feature', tag: false)
+ end
+
+ it 'is not satisfied when tag match is specified' do
+ expect(described_class.new(%w[master tags]))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
+ end
+
+ context 'when also matching a path' do
+ let(:pipeline) do
+ build_stubbed(:ci_pipeline, ref: 'master')
+ end
+
+ it 'is satisfied when provided patch matches specified one' do
+ expect(described_class.new(%W[master@#{pipeline.project_full_path}]))
+ .to be_satisfied_by(pipeline)
+ end
+
+ it 'is not satisfied when path differs' do
+ expect(described_class.new(%w[master@some/fork/repository]))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when maching a source' do
+ let(:pipeline) { build_stubbed(:ci_pipeline, source: :push) }
+
+ it 'is satisifed when provided source keyword matches' do
+ expect(described_class.new(%w[pushes]))
+ .to be_satisfied_by(pipeline)
+ end
+
+ it 'is not satisfied when provided source keyword does not match' do
+ expect(described_class.new(%w[triggers]))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when matching a ref by a regular expression' do
+ let(:pipeline) { build_stubbed(:ci_pipeline, ref: 'docs-something') }
+
+ it 'is satisfied when regexp matches pipeline ref' do
+ expect(described_class.new(['/docs-.*/']))
+ .to be_satisfied_by(pipeline)
+ end
+
+ it 'is not satisfied when regexp does not match pipeline ref' do
+ expect(described_class.new(['/fix-.*/']))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/policy_spec.rb b/spec/lib/gitlab/ci/build/policy_spec.rb
new file mode 100644
index 00000000000..20ee3dd3e89
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/policy_spec.rb
@@ -0,0 +1,37 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Build::Policy do
+ let(:policy) { spy('policy specification') }
+
+ before do
+ stub_const("#{described_class}::Something", policy)
+ end
+
+ describe '.fabricate' do
+ context 'when policy exists' do
+ it 'fabricates and initializes relevant policy' do
+ specs = described_class.fabricate(something: 'some value')
+
+ expect(specs).to be_an Array
+ expect(specs).to be_one
+ expect(policy).to have_received(:new).with('some value')
+ end
+ end
+
+ context 'when some policies are not defined' do
+ it 'gracefully skips unknown policies' do
+ expect { described_class.fabricate(unknown: 'first') }
+ .to raise_error(NameError)
+ end
+ end
+
+ context 'when passing a nil value as specs' do
+ it 'returns an empty array' do
+ specs = described_class.fabricate(nil)
+
+ expect(specs).to be_an Array
+ expect(specs).to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/lib/ci/charts_spec.rb b/spec/lib/gitlab/ci/charts_spec.rb
index f0769deef21..f8188675013 100644
--- a/spec/lib/ci/charts_spec.rb
+++ b/spec/lib/gitlab/ci/charts_spec.rb
@@ -1,9 +1,9 @@
require 'spec_helper'
-describe Ci::Charts do
+describe Gitlab::Ci::Charts do
context "pipeline_times" do
let(:project) { create(:project) }
- let(:chart) { Ci::Charts::PipelineTime.new(project) }
+ let(:chart) { Gitlab::Ci::Charts::PipelineTime.new(project) }
subject { chart.pipeline_times }
diff --git a/spec/lib/ci/mask_secret_spec.rb b/spec/lib/gitlab/ci/mask_secret_spec.rb
index f7b753b022b..3789a142248 100644
--- a/spec/lib/ci/mask_secret_spec.rb
+++ b/spec/lib/gitlab/ci/mask_secret_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Ci::MaskSecret do
+describe Gitlab::Ci::MaskSecret do
subject { described_class }
describe '#mask' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
new file mode 100644
index 00000000000..f54e2326b06
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
@@ -0,0 +1,66 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Pipeline::Chain::Create do
+ set(:project) { create(:project) }
+ set(:user) { create(:user) }
+
+ let(:pipeline) do
+ build(:ci_pipeline_with_one_job, project: project,
+ ref: 'master')
+ end
+
+ let(:command) do
+ double('command', project: project,
+ current_user: user,
+ seeds_block: nil)
+ end
+
+ let(:step) { described_class.new(pipeline, command) }
+
+ before do
+ step.perform!
+ end
+
+ context 'when pipeline is ready to be saved' do
+ it 'saves a pipeline' do
+ expect(pipeline).to be_persisted
+ end
+
+ it 'does not break the chain' do
+ expect(step.break?).to be false
+ end
+
+ it 'creates stages' do
+ expect(pipeline.reload.stages).to be_one
+ end
+ end
+
+ context 'when pipeline has validation errors' do
+ let(:pipeline) do
+ build(:ci_pipeline, project: project, ref: nil)
+ end
+
+ it 'breaks the chain' do
+ expect(step.break?).to be true
+ end
+
+ it 'appends validation error' do
+ expect(pipeline.errors.to_a)
+ .to include /Failed to persist the pipeline/
+ end
+ end
+
+ context 'when there is a seed block present' do
+ let(:seeds) { spy('pipeline seeds') }
+
+ let(:command) do
+ double('command', project: project,
+ current_user: user,
+ seeds_block: seeds)
+ end
+
+ it 'executes the block' do
+ expect(seeds).to have_received(:call).with(pipeline)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
new file mode 100644
index 00000000000..e165e0fac2a
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
@@ -0,0 +1,55 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Pipeline::Chain::Sequence do
+ set(:project) { create(:project) }
+ set(:user) { create(:user) }
+
+ let(:pipeline) { build_stubbed(:ci_pipeline) }
+ let(:command) { double('command' ) }
+ let(:first_step) { spy('first step') }
+ let(:second_step) { spy('second step') }
+ let(:sequence) { [first_step, second_step] }
+
+ subject do
+ described_class.new(pipeline, command, sequence)
+ end
+
+ context 'when one of steps breaks the chain' do
+ before do
+ allow(first_step).to receive(:break?).and_return(true)
+ end
+
+ it 'does not process the second step' do
+ subject.build! do |pipeline, sequence|
+ expect(sequence).not_to be_complete
+ end
+
+ expect(second_step).not_to have_received(:perform!)
+ end
+
+ it 'returns a pipeline object' do
+ expect(subject.build!).to eq pipeline
+ end
+ end
+
+ context 'when all chains are executed correctly' do
+ before do
+ sequence.each do |step|
+ allow(step).to receive(:break?).and_return(false)
+ end
+ end
+
+ it 'iterates through entire sequence' do
+ subject.build! do |pipeline, sequence|
+ expect(sequence).to be_complete
+ end
+
+ expect(first_step).to have_received(:perform!)
+ expect(second_step).to have_received(:perform!)
+ end
+
+ it 'returns a pipeline object' do
+ expect(subject.build!).to eq pipeline
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb
new file mode 100644
index 00000000000..32bd5de829b
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb
@@ -0,0 +1,85 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Pipeline::Chain::Skip do
+ set(:project) { create(:project) }
+ set(:user) { create(:user) }
+ set(:pipeline) { create(:ci_pipeline, project: project) }
+
+ let(:command) do
+ double('command', project: project,
+ current_user: user,
+ ignore_skip_ci: false,
+ save_incompleted: true)
+ end
+
+ let(:step) { described_class.new(pipeline, command) }
+
+ context 'when pipeline has been skipped by a user' do
+ before do
+ allow(pipeline).to receive(:git_commit_message)
+ .and_return('commit message [ci skip]')
+
+ step.perform!
+ end
+
+ it 'should break the chain' do
+ expect(step.break?).to be true
+ end
+
+ it 'skips the pipeline' do
+ expect(pipeline.reload).to be_skipped
+ end
+ end
+
+ context 'when pipeline has not been skipped' do
+ before do
+ step.perform!
+ end
+
+ it 'should not break the chain' do
+ expect(step.break?).to be false
+ end
+
+ it 'should not skip a pipeline chain' do
+ expect(pipeline.reload).not_to be_skipped
+ end
+ end
+
+ context 'when [ci skip] should be ignored' do
+ let(:command) do
+ double('command', project: project,
+ current_user: user,
+ ignore_skip_ci: true)
+ end
+
+ it 'does not break the chain' do
+ step.perform!
+
+ expect(step.break?).to be false
+ end
+ end
+
+ context 'when pipeline should be skipped but not persisted' do
+ let(:command) do
+ double('command', project: project,
+ current_user: user,
+ ignore_skip_ci: false,
+ save_incompleted: false)
+ end
+
+ before do
+ allow(pipeline).to receive(:git_commit_message)
+ .and_return('commit message [ci skip]')
+
+ step.perform!
+ end
+
+ it 'breaks the chain' do
+ expect(step.break?).to be true
+ end
+
+ it 'does not skip pipeline' do
+ expect(pipeline.reload).not_to be_skipped
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb
new file mode 100644
index 00000000000..0bbdd23f4d6
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb
@@ -0,0 +1,142 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Pipeline::Chain::Validate::Abilities do
+ set(:project) { create(:project, :repository) }
+ set(:user) { create(:user) }
+
+ let(:pipeline) do
+ build_stubbed(:ci_pipeline, ref: ref, project: project)
+ end
+
+ let(:command) do
+ double('command', project: project, current_user: user)
+ end
+
+ let(:step) { described_class.new(pipeline, command) }
+
+ let(:ref) { 'master' }
+
+ context 'when users has no ability to run a pipeline' do
+ before do
+ step.perform!
+ end
+
+ it 'adds an error about insufficient permissions' do
+ expect(pipeline.errors.to_a)
+ .to include /Insufficient permissions/
+ end
+
+ it 'breaks the pipeline builder chain' do
+ expect(step.break?).to eq true
+ end
+ end
+
+ context 'when user has ability to create a pipeline' do
+ before do
+ project.add_developer(user)
+
+ step.perform!
+ end
+
+ it 'does not invalidate the pipeline' do
+ expect(pipeline).to be_valid
+ end
+
+ it 'does not break the chain' do
+ expect(step.break?).to eq false
+ end
+ end
+
+ describe '#allowed_to_create?' do
+ subject { step.allowed_to_create? }
+
+ context 'when user is a developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ it { is_expected.to be_truthy }
+
+ context 'when the branch is protected' do
+ let!(:protected_branch) do
+ create(:protected_branch, project: project, name: ref)
+ end
+
+ it { is_expected.to be_falsey }
+
+ context 'when developers are allowed to merge' do
+ let!(:protected_branch) do
+ create(:protected_branch,
+ :developers_can_merge,
+ project: project,
+ name: ref)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ context 'when the tag is protected' do
+ let(:ref) { 'v1.0.0' }
+
+ let!(:protected_tag) do
+ create(:protected_tag, project: project, name: ref)
+ end
+
+ it { is_expected.to be_falsey }
+
+ context 'when developers are allowed to create the tag' do
+ let!(:protected_tag) do
+ create(:protected_tag,
+ :developers_can_create,
+ project: project,
+ name: ref)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+
+ context 'when user is a master' do
+ before do
+ project.add_master(user)
+ end
+
+ it { is_expected.to be_truthy }
+
+ context 'when the branch is protected' do
+ let!(:protected_branch) do
+ create(:protected_branch, project: project, name: ref)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when the tag is protected' do
+ let(:ref) { 'v1.0.0' }
+
+ let!(:protected_tag) do
+ create(:protected_tag, project: project, name: ref)
+ end
+
+ it { is_expected.to be_truthy }
+
+ context 'when no one can create the tag' do
+ let!(:protected_tag) do
+ create(:protected_tag,
+ :no_one_can_create,
+ project: project,
+ name: ref)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+ end
+
+ context 'when owner cannot create pipeline' do
+ it { is_expected.to be_falsey }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb
new file mode 100644
index 00000000000..8357af38f92
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb
@@ -0,0 +1,130 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Pipeline::Chain::Validate::Config do
+ set(:project) { create(:project) }
+ set(:user) { create(:user) }
+
+ let(:command) do
+ double('command', project: project,
+ current_user: user,
+ save_incompleted: true)
+ end
+
+ let!(:step) { described_class.new(pipeline, command) }
+
+ before do
+ step.perform!
+ end
+
+ context 'when pipeline has no YAML configuration' do
+ let(:pipeline) do
+ build_stubbed(:ci_pipeline, project: project)
+ end
+
+ it 'appends errors about missing configuration' do
+ expect(pipeline.errors.to_a)
+ .to include 'Missing .gitlab-ci.yml file'
+ end
+
+ it 'breaks the chain' do
+ expect(step.break?).to be true
+ end
+ end
+
+ context 'when YAML configuration contains errors' do
+ let(:pipeline) do
+ build(:ci_pipeline, project: project, config: 'invalid YAML')
+ end
+
+ it 'appends errors about YAML errors' do
+ expect(pipeline.errors.to_a)
+ .to include 'Invalid configuration format'
+ end
+
+ it 'breaks the chain' do
+ expect(step.break?).to be true
+ end
+
+ context 'when saving incomplete pipeline is allowed' do
+ let(:command) do
+ double('command', project: project,
+ current_user: user,
+ save_incompleted: true)
+ end
+
+ it 'fails the pipeline' do
+ expect(pipeline.reload).to be_failed
+ end
+
+ it 'sets a config error failure reason' do
+ expect(pipeline.reload.config_error?).to eq true
+ end
+ end
+
+ context 'when saving incomplete pipeline is not allowed' do
+ let(:command) do
+ double('command', project: project,
+ current_user: user,
+ save_incompleted: false)
+ end
+
+ it 'does not drop pipeline' do
+ expect(pipeline).not_to be_failed
+ expect(pipeline).not_to be_persisted
+ end
+ end
+ end
+
+ context 'when pipeline has no stages / jobs' do
+ let(:config) do
+ { rspec: {
+ script: 'ls',
+ only: ['something']
+ } }
+ end
+
+ let(:pipeline) do
+ build(:ci_pipeline, project: project, config: config)
+ end
+
+ it 'appends an error about missing stages' do
+ expect(pipeline.errors.to_a)
+ .to include 'No stages / jobs for this pipeline.'
+ end
+
+ it 'breaks the chain' do
+ expect(step.break?).to be true
+ end
+ end
+
+ context 'when pipeline contains configuration validation errors' do
+ let(:config) { { rspec: {} } }
+
+ let(:pipeline) do
+ build(:ci_pipeline, project: project, config: config)
+ end
+
+ it 'appends configuration validation errors to pipeline errors' do
+ expect(pipeline.errors.to_a)
+ .to include "jobs:rspec config can't be blank"
+ end
+
+ it 'breaks the chain' do
+ expect(step.break?).to be true
+ end
+ end
+
+ context 'when pipeline is correct and complete' do
+ let(:pipeline) do
+ build(:ci_pipeline_with_one_job, project: project)
+ end
+
+ it 'does not invalidate the pipeline' do
+ expect(pipeline).to be_valid
+ end
+
+ it 'does not break the chain' do
+ expect(step.break?).to be false
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb
new file mode 100644
index 00000000000..bb356efe9ad
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb
@@ -0,0 +1,60 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Pipeline::Chain::Validate::Repository do
+ set(:project) { create(:project, :repository) }
+ set(:user) { create(:user) }
+
+ let(:command) do
+ double('command', project: project, current_user: user)
+ end
+
+ let!(:step) { described_class.new(pipeline, command) }
+
+ before do
+ step.perform!
+ end
+
+ context 'when pipeline ref and sha exists' do
+ let(:pipeline) do
+ build_stubbed(:ci_pipeline, ref: 'master', sha: '123', project: project)
+ end
+
+ it 'does not break the chain' do
+ expect(step.break?).to be false
+ end
+
+ it 'does not append pipeline errors' do
+ expect(pipeline.errors).to be_empty
+ end
+ end
+
+ context 'when pipeline ref does not exist' do
+ let(:pipeline) do
+ build_stubbed(:ci_pipeline, ref: 'something', project: project)
+ end
+
+ it 'breaks the chain' do
+ expect(step.break?).to be true
+ end
+
+ it 'adds an error about missing ref' do
+ expect(pipeline.errors.to_a)
+ .to include 'Reference not found'
+ end
+ end
+
+ context 'when pipeline does not have SHA set' do
+ let(:pipeline) do
+ build_stubbed(:ci_pipeline, ref: 'master', sha: nil, project: project)
+ end
+
+ it 'breaks the chain' do
+ expect(step.break?).to be true
+ end
+
+ it 'adds an error about missing SHA' do
+ expect(pipeline.errors.to_a)
+ .to include 'Commit not found'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline_duration_spec.rb b/spec/lib/gitlab/ci/pipeline/duration_spec.rb
index b26728a843c..7c9836e2da6 100644
--- a/spec/lib/gitlab/ci/pipeline_duration_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/duration_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::Ci::PipelineDuration do
+describe Gitlab::Ci::Pipeline::Duration do
let(:calculated_duration) { calculate(data) }
shared_examples 'calculating duration' do
@@ -107,9 +107,9 @@ describe Gitlab::Ci::PipelineDuration do
def calculate(data)
periods = data.shuffle.map do |(first, last)|
- Gitlab::Ci::PipelineDuration::Period.new(first, last)
+ described_class::Period.new(first, last)
end
- Gitlab::Ci::PipelineDuration.from_periods(periods.sort_by(&:first))
+ described_class.from_periods(periods.sort_by(&:first))
end
end
diff --git a/spec/lib/gitlab/ci/stage/seed_spec.rb b/spec/lib/gitlab/ci/stage/seed_spec.rb
index 9ecd128faca..3fe8d50c49a 100644
--- a/spec/lib/gitlab/ci/stage/seed_spec.rb
+++ b/spec/lib/gitlab/ci/stage/seed_spec.rb
@@ -11,6 +11,12 @@ describe Gitlab::Ci::Stage::Seed do
described_class.new(pipeline, 'test', builds)
end
+ describe '#size' do
+ it 'returns a number of jobs in the stage' do
+ expect(subject.size).to eq 2
+ end
+ end
+
describe '#stage' do
it 'returns hash attributes of a stage' do
expect(subject.stage).to be_a Hash
diff --git a/spec/lib/gitlab/ci/trace/section_parser_spec.rb b/spec/lib/gitlab/ci/trace/section_parser_spec.rb
new file mode 100644
index 00000000000..ca53ff87c6f
--- /dev/null
+++ b/spec/lib/gitlab/ci/trace/section_parser_spec.rb
@@ -0,0 +1,87 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Trace::SectionParser do
+ def lines_with_pos(text)
+ pos = 0
+ StringIO.new(text).each_line do |line|
+ yield line, pos
+ pos += line.bytesize + 1 # newline
+ end
+ end
+
+ def build_lines(text)
+ to_enum(:lines_with_pos, text)
+ end
+
+ def section(name, start, duration, text)
+ end_ = start + duration
+ "section_start:#{start.to_i}:#{name}\r\033[0K#{text}section_end:#{end_.to_i}:#{name}\r\033[0K"
+ end
+
+ let(:lines) { build_lines('') }
+ subject { described_class.new(lines) }
+
+ describe '#sections' do
+ before do
+ subject.parse!
+ end
+
+ context 'empty trace' do
+ let(:lines) { build_lines('') }
+
+ it { expect(subject.sections).to be_empty }
+ end
+
+ context 'with a sectionless trace' do
+ let(:lines) { build_lines("line 1\nline 2\n") }
+
+ it { expect(subject.sections).to be_empty }
+ end
+
+ context 'with trace markers' do
+ let(:start_time) { Time.new(2017, 10, 5).utc }
+ let(:section_b_duration) { 1.second }
+ let(:section_a) { section('a', start_time, 0, 'a line') }
+ let(:section_b) { section('b', start_time, section_b_duration, "another line\n") }
+ let(:lines) { build_lines(section_a + section_b) }
+
+ it { expect(subject.sections.size).to eq(2) }
+ it { expect(subject.sections[1][:name]).to eq('b') }
+ it { expect(subject.sections[1][:date_start]).to eq(start_time) }
+ it { expect(subject.sections[1][:date_end]).to eq(start_time + section_b_duration) }
+ end
+ end
+
+ describe '#parse!' do
+ context 'multiple "section_" but no complete markers' do
+ let(:lines) { build_lines('section_section_section_') }
+
+ it 'must find 3 possible section start but no complete sections' do
+ expect(subject).to receive(:find_next_marker).exactly(3).times.and_call_original
+
+ subject.parse!
+
+ expect(subject.sections).to be_empty
+ end
+ end
+
+ context 'trace with UTF-8 chars' do
+ let(:line) { 'GitLab ❤️ 狸 (tanukis)\n' }
+ let(:trace) { section('test_section', Time.new(2017, 10, 5).utc, 3.seconds, line) }
+ let(:lines) { build_lines(trace) }
+
+ it 'must handle correctly byte positioning' do
+ expect(subject).to receive(:find_next_marker).exactly(2).times.and_call_original
+
+ subject.parse!
+
+ sections = subject.sections
+
+ expect(sections.size).to eq(1)
+ s = sections[0]
+ len = s[:byte_end] - s[:byte_start]
+ expect(trace.byteslice(s[:byte_start], len)).to eq(line)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb
index 9cb0b62590a..3546532b9b4 100644
--- a/spec/lib/gitlab/ci/trace_spec.rb
+++ b/spec/lib/gitlab/ci/trace_spec.rb
@@ -61,6 +61,93 @@ describe Gitlab::Ci::Trace do
end
end
+ describe '#extract_sections' do
+ let(:log) { 'No sections' }
+ let(:sections) { trace.extract_sections }
+
+ before do
+ trace.set(log)
+ end
+
+ context 'no sections' do
+ it 'returs []' do
+ expect(trace.extract_sections).to eq([])
+ end
+ end
+
+ context 'multiple sections available' do
+ let(:log) { File.read(expand_fixture_path('trace/trace_with_sections')) }
+ let(:sections_data) do
+ [
+ { name: 'prepare_script', lines: 2, duration: 3.seconds },
+ { name: 'get_sources', lines: 4, duration: 1.second },
+ { name: 'restore_cache', lines: 0, duration: 0.seconds },
+ { name: 'download_artifacts', lines: 0, duration: 0.seconds },
+ { name: 'build_script', lines: 2, duration: 1.second },
+ { name: 'after_script', lines: 0, duration: 0.seconds },
+ { name: 'archive_cache', lines: 0, duration: 0.seconds },
+ { name: 'upload_artifacts', lines: 0, duration: 0.seconds }
+ ]
+ end
+
+ it "returns valid sections" do
+ expect(sections).not_to be_empty
+ expect(sections.size).to eq(sections_data.size),
+ "expected #{sections_data.size} sections, got #{sections.size}"
+
+ buff = StringIO.new(log)
+ sections.each_with_index do |s, i|
+ expected = sections_data[i]
+
+ expect(s[:name]).to eq(expected[:name])
+ expect(s[:date_end] - s[:date_start]).to eq(expected[:duration])
+
+ buff.seek(s[:byte_start], IO::SEEK_SET)
+ length = s[:byte_end] - s[:byte_start]
+ lines = buff.read(length).count("\n")
+ expect(lines).to eq(expected[:lines])
+ end
+ end
+ end
+
+ context 'logs contains "section_start"' do
+ let(:log) { "section_start:1506417476:a_section\r\033[0Klooks like a section_start:invalid\nsection_end:1506417477:a_section\r\033[0K"}
+
+ it "returns only one section" do
+ expect(sections).not_to be_empty
+ expect(sections.size).to eq(1)
+
+ section = sections[0]
+ expect(section[:name]).to eq('a_section')
+ expect(section[:byte_start]).not_to eq(section[:byte_end]), "got an empty section"
+ end
+ end
+
+ context 'missing section_end' do
+ let(:log) { "section_start:1506417476:a_section\r\033[0KSome logs\nNo section_end\n"}
+
+ it "returns no sections" do
+ expect(sections).to be_empty
+ end
+ end
+
+ context 'missing section_start' do
+ let(:log) { "Some logs\nNo section_start\nsection_end:1506417476:a_section\r\033[0K"}
+
+ it "returns no sections" do
+ expect(sections).to be_empty
+ end
+ end
+
+ context 'inverted section_start section_end' do
+ let(:log) { "section_end:1506417476:a_section\r\033[0Klooks like a section_start:invalid\nsection_start:1506417477:a_section\r\033[0K"}
+
+ it "returns no sections" do
+ expect(sections).to be_empty
+ end
+ end
+ end
+
describe '#set' do
before do
trace.set("12")
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
new file mode 100644
index 00000000000..d72f8553f55
--- /dev/null
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -0,0 +1,1716 @@
+require 'spec_helper'
+
+module Gitlab
+ module Ci
+ describe YamlProcessor, :lib do
+ subject { described_class.new(config) }
+
+ describe 'our current .gitlab-ci.yml' do
+ let(:config) { File.read("#{Rails.root}/.gitlab-ci.yml") }
+
+ it 'is valid' do
+ error_message = described_class.validation_message(config)
+
+ expect(error_message).to be_nil
+ end
+ end
+
+ describe '#build_attributes' do
+ subject { described_class.new(config).build_attributes(:rspec) }
+
+ describe 'coverage entry' do
+ describe 'code coverage regexp' do
+ let(:config) do
+ YAML.dump(rspec: { script: 'rspec',
+ coverage: '/Code coverage: \d+\.\d+/' })
+ end
+
+ it 'includes coverage regexp in build attributes' do
+ expect(subject)
+ .to include(coverage_regex: 'Code coverage: \d+\.\d+')
+ end
+ end
+ end
+
+ describe 'retry entry' do
+ context 'when retry count is specified' do
+ let(:config) do
+ YAML.dump(rspec: { script: 'rspec', retry: 1 })
+ end
+
+ it 'includes retry count in build options attribute' do
+ expect(subject[:options]).to include(retry: 1)
+ end
+ end
+
+ context 'when retry count is not specified' do
+ let(:config) do
+ YAML.dump(rspec: { script: 'rspec' })
+ end
+
+ it 'does not persist retry count in the database' do
+ expect(subject[:options]).not_to have_key(:retry)
+ end
+ end
+ end
+
+ describe 'allow failure entry' do
+ context 'when job is a manual action' do
+ context 'when allow_failure is defined' do
+ let(:config) do
+ YAML.dump(rspec: { script: 'rspec',
+ when: 'manual',
+ allow_failure: false })
+ end
+
+ it 'is not allowed to fail' do
+ expect(subject[:allow_failure]).to be false
+ end
+ end
+
+ context 'when allow_failure is not defined' do
+ let(:config) do
+ YAML.dump(rspec: { script: 'rspec',
+ when: 'manual' })
+ end
+
+ it 'is allowed to fail' do
+ expect(subject[:allow_failure]).to be true
+ end
+ end
+ end
+
+ context 'when job is not a manual action' do
+ context 'when allow_failure is defined' do
+ let(:config) do
+ YAML.dump(rspec: { script: 'rspec',
+ allow_failure: false })
+ end
+
+ it 'is not allowed to fail' do
+ expect(subject[:allow_failure]).to be false
+ end
+ end
+
+ context 'when allow_failure is not defined' do
+ let(:config) do
+ YAML.dump(rspec: { script: 'rspec' })
+ end
+
+ it 'is not allowed to fail' do
+ expect(subject[:allow_failure]).to be false
+ end
+ end
+ end
+ end
+ end
+
+ describe '#stage_seeds' do
+ context 'when no refs policy is specified' do
+ let(:config) do
+ YAML.dump(production: { stage: 'deploy', script: 'cap prod' },
+ rspec: { stage: 'test', script: 'rspec' },
+ spinach: { stage: 'test', script: 'spinach' })
+ end
+
+ let(:pipeline) { create(:ci_empty_pipeline) }
+
+ it 'correctly fabricates a stage seeds object' do
+ seeds = subject.stage_seeds(pipeline)
+
+ expect(seeds.size).to eq 2
+ expect(seeds.first.stage[:name]).to eq 'test'
+ expect(seeds.second.stage[:name]).to eq 'deploy'
+ expect(seeds.first.builds.dig(0, :name)).to eq 'rspec'
+ expect(seeds.first.builds.dig(1, :name)).to eq 'spinach'
+ expect(seeds.second.builds.dig(0, :name)).to eq 'production'
+ end
+ end
+
+ context 'when refs policy is specified' do
+ let(:config) do
+ YAML.dump(production: { stage: 'deploy', script: 'cap prod', only: ['master'] },
+ spinach: { stage: 'test', script: 'spinach', only: ['tags'] })
+ end
+
+ let(:pipeline) do
+ create(:ci_empty_pipeline, ref: 'feature', tag: true)
+ end
+
+ it 'returns stage seeds only assigned to master to master' do
+ seeds = subject.stage_seeds(pipeline)
+
+ expect(seeds.size).to eq 1
+ expect(seeds.first.stage[:name]).to eq 'test'
+ expect(seeds.first.builds.dig(0, :name)).to eq 'spinach'
+ end
+ end
+
+ context 'when source policy is specified' do
+ let(:config) do
+ YAML.dump(production: { stage: 'deploy', script: 'cap prod', only: ['triggers'] },
+ spinach: { stage: 'test', script: 'spinach', only: ['schedules'] })
+ end
+
+ let(:pipeline) do
+ create(:ci_empty_pipeline, source: :schedule)
+ end
+
+ it 'returns stage seeds only assigned to schedules' do
+ seeds = subject.stage_seeds(pipeline)
+
+ expect(seeds.size).to eq 1
+ expect(seeds.first.stage[:name]).to eq 'test'
+ expect(seeds.first.builds.dig(0, :name)).to eq 'spinach'
+ end
+ end
+
+ context 'when kubernetes policy is specified' do
+ let(:config) do
+ YAML.dump(
+ spinach: { stage: 'test', script: 'spinach' },
+ production: {
+ stage: 'deploy',
+ script: 'cap',
+ only: { kubernetes: 'active' }
+ }
+ )
+ end
+
+ context 'when kubernetes is active' do
+ let(:project) { create(:kubernetes_project) }
+ let(:pipeline) { create(:ci_empty_pipeline, project: project) }
+
+ it 'returns seeds for kubernetes dependent job' do
+ seeds = subject.stage_seeds(pipeline)
+
+ expect(seeds.size).to eq 2
+ expect(seeds.first.builds.dig(0, :name)).to eq 'spinach'
+ expect(seeds.second.builds.dig(0, :name)).to eq 'production'
+ end
+ end
+
+ context 'when kubernetes is not active' do
+ it 'does not return seeds for kubernetes dependent job' do
+ seeds = subject.stage_seeds(pipeline)
+
+ expect(seeds.size).to eq 1
+ expect(seeds.first.builds.dig(0, :name)).to eq 'spinach'
+ end
+ end
+ end
+ end
+
+ describe "#pipeline_stage_builds" do
+ let(:type) { 'test' }
+
+ it "returns builds if no branch specified" do
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec" }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).first).to eq({
+ stage: "test",
+ stage_idx: 1,
+ name: "rspec",
+ commands: "pwd\nrspec",
+ coverage_regex: nil,
+ tag_list: [],
+ options: {
+ before_script: ["pwd"],
+ script: ["rspec"]
+ },
+ allow_failure: false,
+ when: "on_success",
+ environment: nil,
+ yaml_variables: []
+ })
+ end
+
+ describe 'only' do
+ it "does not return builds if only has another branch" do
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", only: ["deploy"] }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(0)
+ end
+
+ it "does not return builds if only has regexp with another branch" do
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", only: ["/^deploy$/"] }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(0)
+ end
+
+ it "returns builds if only has specified this branch" do
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", only: ["master"] }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1)
+ end
+
+ it "returns builds if only has a list of branches including specified" do
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", type: type, only: %w(master deploy) }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1)
+ end
+
+ it "returns builds if only has a branches keyword specified" do
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", type: type, only: ["branches"] }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1)
+ end
+
+ it "does not return builds if only has a tags keyword" do
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", type: type, only: ["tags"] }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0)
+ end
+
+ it "returns builds if only has special keywords specified and source matches" do
+ possibilities = [{ keyword: 'pushes', source: 'push' },
+ { keyword: 'web', source: 'web' },
+ { keyword: 'triggers', source: 'trigger' },
+ { keyword: 'schedules', source: 'schedule' },
+ { keyword: 'api', source: 'api' },
+ { keyword: 'external', source: 'external' }]
+
+ possibilities.each do |possibility|
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", type: type, only: [possibility[:keyword]] }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(1)
+ end
+ end
+
+ it "does not return builds if only has special keywords specified and source doesn't match" do
+ possibilities = [{ keyword: 'pushes', source: 'web' },
+ { keyword: 'web', source: 'push' },
+ { keyword: 'triggers', source: 'schedule' },
+ { keyword: 'schedules', source: 'external' },
+ { keyword: 'api', source: 'trigger' },
+ { keyword: 'external', source: 'api' }]
+
+ possibilities.each do |possibility|
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", type: type, only: [possibility[:keyword]] }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(0)
+ end
+ end
+
+ it "returns builds if only has current repository path" do
+ seed_pipeline = pipeline(ref: 'deploy')
+
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: {
+ script: "rspec",
+ type: type,
+ only: ["branches@#{seed_pipeline.project_full_path}"]
+ }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, seed_pipeline).size).to eq(1)
+ end
+
+ it "does not return builds if only has different repository path" do
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", type: type, only: ["branches@fork"] }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0)
+ end
+
+ it "returns build only for specified type" do
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", type: "test", only: %w(master deploy) },
+ staging: { script: "deploy", type: "deploy", only: %w(master deploy) },
+ production: { script: "deploy", type: "deploy", only: ["master@path", "deploy"] }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds("deploy", pipeline(ref: "deploy")).size).to eq(2)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "deploy")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("deploy", pipeline(ref: "master")).size).to eq(1)
+ end
+
+ context 'for invalid value' do
+ let(:config) { { rspec: { script: "rspec", type: "test", only: only } } }
+ let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
+
+ context 'when it is integer' do
+ let(:only) { 1 }
+
+ it do
+ expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
+ 'jobs:rspec:only has to be either an array of conditions or a hash')
+ end
+ end
+
+ context 'when it is an array of integers' do
+ let(:only) { [1, 1] }
+
+ it do
+ expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
+ 'jobs:rspec:only config should be an array of strings or regexps')
+ end
+ end
+
+ context 'when it is invalid regex' do
+ let(:only) { ["/*invalid/"] }
+
+ it do
+ expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
+ 'jobs:rspec:only config should be an array of strings or regexps')
+ end
+ end
+ end
+ end
+
+ describe 'except' do
+ it "returns builds if except has another branch" do
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", except: ["deploy"] }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1)
+ end
+
+ it "returns builds if except has regexp with another branch" do
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", except: ["/^deploy$/"] }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1)
+ end
+
+ it "does not return builds if except has specified this branch" do
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", except: ["master"] }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(0)
+ end
+
+ it "does not return builds if except has a list of branches including specified" do
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", type: type, except: %w(master deploy) }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0)
+ end
+
+ it "does not return builds if except has a branches keyword specified" do
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", type: type, except: ["branches"] }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0)
+ end
+
+ it "returns builds if except has a tags keyword" do
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", type: type, except: ["tags"] }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1)
+ end
+
+ it "does not return builds if except has special keywords specified and source matches" do
+ possibilities = [{ keyword: 'pushes', source: 'push' },
+ { keyword: 'web', source: 'web' },
+ { keyword: 'triggers', source: 'trigger' },
+ { keyword: 'schedules', source: 'schedule' },
+ { keyword: 'api', source: 'api' },
+ { keyword: 'external', source: 'external' }]
+
+ possibilities.each do |possibility|
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", type: type, except: [possibility[:keyword]] }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(0)
+ end
+ end
+
+ it "returns builds if except has special keywords specified and source doesn't match" do
+ possibilities = [{ keyword: 'pushes', source: 'web' },
+ { keyword: 'web', source: 'push' },
+ { keyword: 'triggers', source: 'schedule' },
+ { keyword: 'schedules', source: 'external' },
+ { keyword: 'api', source: 'trigger' },
+ { keyword: 'external', source: 'api' }]
+
+ possibilities.each do |possibility|
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", type: type, except: [possibility[:keyword]] }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(1)
+ end
+ end
+
+ it "does not return builds if except has current repository path" do
+ seed_pipeline = pipeline(ref: 'deploy')
+
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: {
+ script: "rspec",
+ type: type,
+ except: ["branches@#{seed_pipeline.project_full_path}"]
+ }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, seed_pipeline).size).to eq(0)
+ end
+
+ it "returns builds if except has different repository path" do
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", type: type, except: ["branches@fork"] }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1)
+ end
+
+ it "returns build except specified type" do
+ master_pipeline = pipeline(ref: 'master')
+ test_pipeline = pipeline(ref: 'test')
+ deploy_pipeline = pipeline(ref: 'deploy')
+
+ config = YAML.dump({
+ before_script: ["pwd"],
+ rspec: { script: "rspec", type: "test", except: ["master", "deploy", "test@#{test_pipeline.project_full_path}"] },
+ staging: { script: "deploy", type: "deploy", except: ["master"] },
+ production: { script: "deploy", type: "deploy", except: ["master@#{master_pipeline.project_full_path}"] }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds("deploy", deploy_pipeline).size).to eq(2)
+ expect(config_processor.pipeline_stage_builds("test", test_pipeline).size).to eq(0)
+ expect(config_processor.pipeline_stage_builds("deploy", master_pipeline).size).to eq(0)
+ end
+
+ context 'for invalid value' do
+ let(:config) { { rspec: { script: "rspec", except: except } } }
+ let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
+
+ context 'when it is integer' do
+ let(:except) { 1 }
+
+ it do
+ expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
+ 'jobs:rspec:except has to be either an array of conditions or a hash')
+ end
+ end
+
+ context 'when it is an array of integers' do
+ let(:except) { [1, 1] }
+
+ it do
+ expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
+ 'jobs:rspec:except config should be an array of strings or regexps')
+ end
+ end
+
+ context 'when it is invalid regex' do
+ let(:except) { ["/*invalid/"] }
+
+ it do
+ expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
+ 'jobs:rspec:except config should be an array of strings or regexps')
+ end
+ end
+ end
+ end
+ end
+
+ describe "Scripts handling" do
+ let(:config_data) { YAML.dump(config) }
+ let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config_data) }
+
+ subject { config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first }
+
+ describe "before_script" do
+ context "in global context" do
+ let(:config) do
+ {
+ before_script: ["global script"],
+ test: { script: ["script"] }
+ }
+ end
+
+ it "return commands with scripts concencaced" do
+ expect(subject[:commands]).to eq("global script\nscript")
+ end
+ end
+
+ context "overwritten in local context" do
+ let(:config) do
+ {
+ before_script: ["global script"],
+ test: { before_script: ["local script"], script: ["script"] }
+ }
+ end
+
+ it "return commands with scripts concencaced" do
+ expect(subject[:commands]).to eq("local script\nscript")
+ end
+ end
+ end
+
+ describe "script" do
+ let(:config) do
+ {
+ test: { script: ["script"] }
+ }
+ end
+
+ it "return commands with scripts concencaced" do
+ expect(subject[:commands]).to eq("script")
+ end
+ end
+
+ describe "after_script" do
+ context "in global context" do
+ let(:config) do
+ {
+ after_script: ["after_script"],
+ test: { script: ["script"] }
+ }
+ end
+
+ it "return after_script in options" do
+ expect(subject[:options][:after_script]).to eq(["after_script"])
+ end
+ end
+
+ context "overwritten in local context" do
+ let(:config) do
+ {
+ after_script: ["local after_script"],
+ test: { after_script: ["local after_script"], script: ["script"] }
+ }
+ end
+
+ it "return after_script in options" do
+ expect(subject[:options][:after_script]).to eq(["local after_script"])
+ end
+ end
+ end
+ end
+
+ describe "Image and service handling" do
+ context "when extended docker configuration is used" do
+ it "returns image and service when defined" do
+ config = YAML.dump({ image: { name: "ruby:2.1", entrypoint: ["/usr/local/bin/init", "run"] },
+ services: ["mysql", { name: "docker:dind", alias: "docker",
+ entrypoint: ["/usr/local/bin/init", "run"],
+ command: ["/usr/local/bin/init", "run"] }],
+ before_script: ["pwd"],
+ rspec: { script: "rspec" } })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({
+ stage: "test",
+ stage_idx: 1,
+ name: "rspec",
+ commands: "pwd\nrspec",
+ coverage_regex: nil,
+ tag_list: [],
+ options: {
+ before_script: ["pwd"],
+ script: ["rspec"],
+ image: { name: "ruby:2.1", entrypoint: ["/usr/local/bin/init", "run"] },
+ services: [{ name: "mysql" },
+ { name: "docker:dind", alias: "docker", entrypoint: ["/usr/local/bin/init", "run"],
+ command: ["/usr/local/bin/init", "run"] }]
+ },
+ allow_failure: false,
+ when: "on_success",
+ environment: nil,
+ yaml_variables: []
+ })
+ end
+
+ it "returns image and service when overridden for job" do
+ config = YAML.dump({ image: "ruby:2.1",
+ services: ["mysql"],
+ before_script: ["pwd"],
+ rspec: { image: { name: "ruby:2.5", entrypoint: ["/usr/local/bin/init", "run"] },
+ services: [{ name: "postgresql", alias: "db-pg",
+ entrypoint: ["/usr/local/bin/init", "run"],
+ command: ["/usr/local/bin/init", "run"] }, "docker:dind"],
+ script: "rspec" } })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({
+ stage: "test",
+ stage_idx: 1,
+ name: "rspec",
+ commands: "pwd\nrspec",
+ coverage_regex: nil,
+ tag_list: [],
+ options: {
+ before_script: ["pwd"],
+ script: ["rspec"],
+ image: { name: "ruby:2.5", entrypoint: ["/usr/local/bin/init", "run"] },
+ services: [{ name: "postgresql", alias: "db-pg", entrypoint: ["/usr/local/bin/init", "run"],
+ command: ["/usr/local/bin/init", "run"] },
+ { name: "docker:dind" }]
+ },
+ allow_failure: false,
+ when: "on_success",
+ environment: nil,
+ yaml_variables: []
+ })
+ end
+ end
+
+ context "when etended docker configuration is not used" do
+ it "returns image and service when defined" do
+ config = YAML.dump({ image: "ruby:2.1",
+ services: ["mysql", "docker:dind"],
+ before_script: ["pwd"],
+ rspec: { script: "rspec" } })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({
+ stage: "test",
+ stage_idx: 1,
+ name: "rspec",
+ commands: "pwd\nrspec",
+ coverage_regex: nil,
+ tag_list: [],
+ options: {
+ before_script: ["pwd"],
+ script: ["rspec"],
+ image: { name: "ruby:2.1" },
+ services: [{ name: "mysql" }, { name: "docker:dind" }]
+ },
+ allow_failure: false,
+ when: "on_success",
+ environment: nil,
+ yaml_variables: []
+ })
+ end
+
+ it "returns image and service when overridden for job" do
+ config = YAML.dump({ image: "ruby:2.1",
+ services: ["mysql"],
+ before_script: ["pwd"],
+ rspec: { image: "ruby:2.5", services: ["postgresql", "docker:dind"], script: "rspec" } })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({
+ stage: "test",
+ stage_idx: 1,
+ name: "rspec",
+ commands: "pwd\nrspec",
+ coverage_regex: nil,
+ tag_list: [],
+ options: {
+ before_script: ["pwd"],
+ script: ["rspec"],
+ image: { name: "ruby:2.5" },
+ services: [{ name: "postgresql" }, { name: "docker:dind" }]
+ },
+ allow_failure: false,
+ when: "on_success",
+ environment: nil,
+ yaml_variables: []
+ })
+ end
+ end
+ end
+
+ describe 'Variables' do
+ let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
+
+ subject { config_processor.builds.first[:yaml_variables] }
+
+ context 'when global variables are defined' do
+ let(:variables) do
+ { 'VAR1' => 'value1', 'VAR2' => 'value2' }
+ end
+ let(:config) do
+ {
+ variables: variables,
+ before_script: ['pwd'],
+ rspec: { script: 'rspec' }
+ }
+ end
+
+ it 'returns global variables' do
+ expect(subject).to contain_exactly(
+ { key: 'VAR1', value: 'value1', public: true },
+ { key: 'VAR2', value: 'value2', public: true }
+ )
+ end
+ end
+
+ context 'when job and global variables are defined' do
+ let(:global_variables) do
+ { 'VAR1' => 'global1', 'VAR3' => 'global3' }
+ end
+ let(:job_variables) do
+ { 'VAR1' => 'value1', 'VAR2' => 'value2' }
+ end
+ let(:config) do
+ {
+ before_script: ['pwd'],
+ variables: global_variables,
+ rspec: { script: 'rspec', variables: job_variables }
+ }
+ end
+
+ it 'returns all unique variables' do
+ expect(subject).to contain_exactly(
+ { key: 'VAR3', value: 'global3', public: true },
+ { key: 'VAR1', value: 'value1', public: true },
+ { key: 'VAR2', value: 'value2', public: true }
+ )
+ end
+ end
+
+ context 'when job variables are defined' do
+ let(:config) do
+ {
+ before_script: ['pwd'],
+ rspec: { script: 'rspec', variables: variables }
+ }
+ end
+
+ context 'when syntax is correct' do
+ let(:variables) do
+ { 'VAR1' => 'value1', 'VAR2' => 'value2' }
+ end
+
+ it 'returns job variables' do
+ expect(subject).to contain_exactly(
+ { key: 'VAR1', value: 'value1', public: true },
+ { key: 'VAR2', value: 'value2', public: true }
+ )
+ end
+ end
+
+ context 'when syntax is incorrect' do
+ context 'when variables defined but invalid' do
+ let(:variables) do
+ %w(VAR1 value1 VAR2 value2)
+ end
+
+ it 'raises error' do
+ expect { subject }
+ .to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
+ /jobs:rspec:variables config should be a hash of key value pairs/)
+ end
+ end
+
+ context 'when variables key defined but value not specified' do
+ let(:variables) do
+ nil
+ end
+
+ it 'returns empty array' do
+ ##
+ # When variables config is empty, we assume this is a valid
+ # configuration, see issue #18775
+ #
+ expect(subject).to be_an_instance_of(Array)
+ expect(subject).to be_empty
+ end
+ end
+ end
+ end
+
+ context 'when job variables are not defined' do
+ let(:config) do
+ {
+ before_script: ['pwd'],
+ rspec: { script: 'rspec' }
+ }
+ end
+
+ it 'returns empty array' do
+ expect(subject).to be_an_instance_of(Array)
+ expect(subject).to be_empty
+ end
+ end
+ end
+
+ describe "When" do
+ %w(on_success on_failure always).each do |when_state|
+ it "returns #{when_state} when defined" do
+ config = YAML.dump({
+ rspec: { script: "rspec", when: when_state }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ builds = config_processor.pipeline_stage_builds("test", pipeline(ref: "master"))
+ expect(builds.size).to eq(1)
+ expect(builds.first[:when]).to eq(when_state)
+ end
+ end
+ end
+
+ describe 'cache' do
+ context 'when cache definition has unknown keys' do
+ it 'raises relevant validation error' do
+ config = YAML.dump(
+ { cache: { untracked: true, invalid: 'key' },
+ rspec: { script: 'rspec' } })
+
+ expect { Gitlab::Ci::YamlProcessor.new(config) }.to raise_error(
+ Gitlab::Ci::YamlProcessor::ValidationError,
+ 'cache config contains unknown keys: invalid'
+ )
+ end
+ end
+
+ it "returns cache when defined globally" do
+ config = YAML.dump({
+ cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'key' },
+ rspec: {
+ script: "rspec"
+ }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first[:options][:cache]).to eq(
+ paths: ["logs/", "binaries/"],
+ untracked: true,
+ key: 'key',
+ policy: 'pull-push'
+ )
+ end
+
+ it "returns cache when defined in a job" do
+ config = YAML.dump({
+ rspec: {
+ cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'key' },
+ script: "rspec"
+ }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first[:options][:cache]).to eq(
+ paths: ["logs/", "binaries/"],
+ untracked: true,
+ key: 'key',
+ policy: 'pull-push'
+ )
+ end
+
+ it "overwrite cache when defined for a job and globally" do
+ config = YAML.dump({
+ cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'global' },
+ rspec: {
+ script: "rspec",
+ cache: { paths: ["test/"], untracked: false, key: 'local' }
+ }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first[:options][:cache]).to eq(
+ paths: ["test/"],
+ untracked: false,
+ key: 'local',
+ policy: 'pull-push'
+ )
+ end
+ end
+
+ describe "Artifacts" do
+ it "returns artifacts when defined" do
+ config = YAML.dump({
+ image: "ruby:2.1",
+ services: ["mysql"],
+ before_script: ["pwd"],
+ rspec: {
+ artifacts: {
+ paths: ["logs/", "binaries/"],
+ untracked: true,
+ name: "custom_name",
+ expire_in: "7d"
+ },
+ script: "rspec"
+ }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({
+ stage: "test",
+ stage_idx: 1,
+ name: "rspec",
+ commands: "pwd\nrspec",
+ coverage_regex: nil,
+ tag_list: [],
+ options: {
+ before_script: ["pwd"],
+ script: ["rspec"],
+ image: { name: "ruby:2.1" },
+ services: [{ name: "mysql" }],
+ artifacts: {
+ name: "custom_name",
+ paths: ["logs/", "binaries/"],
+ untracked: true,
+ expire_in: "7d"
+ }
+ },
+ when: "on_success",
+ allow_failure: false,
+ environment: nil,
+ yaml_variables: []
+ })
+ end
+
+ %w[on_success on_failure always].each do |when_state|
+ it "returns artifacts for when #{when_state} defined" do
+ config = YAML.dump({
+ rspec: {
+ script: "rspec",
+ artifacts: { paths: ["logs/", "binaries/"], when: when_state }
+ }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+
+ builds = config_processor.pipeline_stage_builds("test", pipeline(ref: "master"))
+ expect(builds.size).to eq(1)
+ expect(builds.first[:options][:artifacts][:when]).to eq(when_state)
+ end
+ end
+ end
+
+ describe '#environment' do
+ let(:config) do
+ {
+ deploy_to_production: { stage: 'deploy', script: 'test', environment: environment }
+ }
+ end
+
+ let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
+ let(:builds) { processor.pipeline_stage_builds('deploy', pipeline(ref: 'master')) }
+
+ context 'when a production environment is specified' do
+ let(:environment) { 'production' }
+
+ it 'does return production' do
+ expect(builds.size).to eq(1)
+ expect(builds.first[:environment]).to eq(environment)
+ expect(builds.first[:options]).to include(environment: { name: environment, action: "start" })
+ end
+ end
+
+ context 'when hash is specified' do
+ let(:environment) do
+ { name: 'production',
+ url: 'http://production.gitlab.com' }
+ end
+
+ it 'does return production and URL' do
+ expect(builds.size).to eq(1)
+ expect(builds.first[:environment]).to eq(environment[:name])
+ expect(builds.first[:options]).to include(environment: environment)
+ end
+
+ context 'the url has a port as variable' do
+ let(:environment) do
+ { name: 'production',
+ url: 'http://production.gitlab.com:$PORT' }
+ end
+
+ it 'allows a variable for the port' do
+ expect(builds.size).to eq(1)
+ expect(builds.first[:environment]).to eq(environment[:name])
+ expect(builds.first[:options]).to include(environment: environment)
+ end
+ end
+ end
+
+ context 'when no environment is specified' do
+ let(:environment) { nil }
+
+ it 'does return nil environment' do
+ expect(builds.size).to eq(1)
+ expect(builds.first[:environment]).to be_nil
+ end
+ end
+
+ context 'is not a string' do
+ let(:environment) { 1 }
+
+ it 'raises error' do
+ expect { builds }.to raise_error(
+ 'jobs:deploy_to_production:environment config should be a hash or a string')
+ end
+ end
+
+ context 'is not a valid string' do
+ let(:environment) { 'production:staging' }
+
+ it 'raises error' do
+ expect { builds }.to raise_error("jobs:deploy_to_production:environment name #{Gitlab::Regex.environment_name_regex_message}")
+ end
+ end
+
+ context 'when on_stop is specified' do
+ let(:review) { { stage: 'deploy', script: 'test', environment: { name: 'review', on_stop: 'close_review' } } }
+ let(:config) { { review: review, close_review: close_review }.compact }
+
+ context 'with matching job' do
+ let(:close_review) { { stage: 'deploy', script: 'test', environment: { name: 'review', action: 'stop' } } }
+
+ it 'does return a list of builds' do
+ expect(builds.size).to eq(2)
+ expect(builds.first[:environment]).to eq('review')
+ end
+ end
+
+ context 'without matching job' do
+ let(:close_review) { nil }
+
+ it 'raises error' do
+ expect { builds }.to raise_error('review job: on_stop job close_review is not defined')
+ end
+ end
+
+ context 'with close job without environment' do
+ let(:close_review) { { stage: 'deploy', script: 'test' } }
+
+ it 'raises error' do
+ expect { builds }.to raise_error('review job: on_stop job close_review does not have environment defined')
+ end
+ end
+
+ context 'with close job for different environment' do
+ let(:close_review) { { stage: 'deploy', script: 'test', environment: 'production' } }
+
+ it 'raises error' do
+ expect { builds }.to raise_error('review job: on_stop job close_review have different environment name')
+ end
+ end
+
+ context 'with close job without stop action' do
+ let(:close_review) { { stage: 'deploy', script: 'test', environment: { name: 'review' } } }
+
+ it 'raises error' do
+ expect { builds }.to raise_error('review job: on_stop job close_review needs to have action stop defined')
+ end
+ end
+ end
+ end
+
+ describe "Dependencies" do
+ let(:config) do
+ {
+ build1: { stage: 'build', script: 'test' },
+ build2: { stage: 'build', script: 'test' },
+ test1: { stage: 'test', script: 'test', dependencies: dependencies },
+ test2: { stage: 'test', script: 'test' },
+ deploy: { stage: 'test', script: 'test' }
+ }
+ end
+
+ subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
+
+ context 'no dependencies' do
+ let(:dependencies) { }
+
+ it { expect { subject }.not_to raise_error }
+ end
+
+ context 'dependencies to builds' do
+ let(:dependencies) { %w(build1 build2) }
+
+ it { expect { subject }.not_to raise_error }
+ end
+
+ context 'dependencies to builds defined as symbols' do
+ let(:dependencies) { [:build1, :build2] }
+
+ it { expect { subject }.not_to raise_error }
+ end
+
+ context 'undefined dependency' do
+ let(:dependencies) { ['undefined'] }
+
+ it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'test1 job: undefined dependency: undefined') }
+ end
+
+ context 'dependencies to deploy' do
+ let(:dependencies) { ['deploy'] }
+
+ it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'test1 job: dependency deploy is not defined in prior stages') }
+ end
+ end
+
+ describe "Hidden jobs" do
+ let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
+ subject { config_processor.pipeline_stage_builds("test", pipeline(ref: "master")) }
+
+ shared_examples 'hidden_job_handling' do
+ it "doesn't create jobs that start with dot" do
+ expect(subject.size).to eq(1)
+ expect(subject.first).to eq({
+ stage: "test",
+ stage_idx: 1,
+ name: "normal_job",
+ commands: "test",
+ coverage_regex: nil,
+ tag_list: [],
+ options: {
+ script: ["test"]
+ },
+ when: "on_success",
+ allow_failure: false,
+ environment: nil,
+ yaml_variables: []
+ })
+ end
+ end
+
+ context 'when hidden job have a script definition' do
+ let(:config) do
+ YAML.dump({
+ '.hidden_job' => { image: 'ruby:2.1', script: 'test' },
+ 'normal_job' => { script: 'test' }
+ })
+ end
+
+ it_behaves_like 'hidden_job_handling'
+ end
+
+ context "when hidden job doesn't have a script definition" do
+ let(:config) do
+ YAML.dump({
+ '.hidden_job' => { image: 'ruby:2.1' },
+ 'normal_job' => { script: 'test' }
+ })
+ end
+
+ it_behaves_like 'hidden_job_handling'
+ end
+ end
+
+ describe "YAML Alias/Anchor" do
+ let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
+ subject { config_processor.pipeline_stage_builds("build", pipeline(ref: "master")) }
+
+ shared_examples 'job_templates_handling' do
+ it "is correctly supported for jobs" do
+ expect(subject.size).to eq(2)
+ expect(subject.first).to eq({
+ stage: "build",
+ stage_idx: 0,
+ name: "job1",
+ commands: "execute-script-for-job",
+ coverage_regex: nil,
+ tag_list: [],
+ options: {
+ script: ["execute-script-for-job"]
+ },
+ when: "on_success",
+ allow_failure: false,
+ environment: nil,
+ yaml_variables: []
+ })
+ expect(subject.second).to eq({
+ stage: "build",
+ stage_idx: 0,
+ name: "job2",
+ commands: "execute-script-for-job",
+ coverage_regex: nil,
+ tag_list: [],
+ options: {
+ script: ["execute-script-for-job"]
+ },
+ when: "on_success",
+ allow_failure: false,
+ environment: nil,
+ yaml_variables: []
+ })
+ end
+ end
+
+ context 'when template is a job' do
+ let(:config) do
+ <<EOT
+job1: &JOBTMPL
+ stage: build
+ script: execute-script-for-job
+
+job2: *JOBTMPL
+EOT
+ end
+
+ it_behaves_like 'job_templates_handling'
+ end
+
+ context 'when template is a hidden job' do
+ let(:config) do
+ <<EOT
+.template: &JOBTMPL
+ stage: build
+ script: execute-script-for-job
+
+job1: *JOBTMPL
+
+job2: *JOBTMPL
+EOT
+ end
+
+ it_behaves_like 'job_templates_handling'
+ end
+
+ context 'when job adds its own keys to a template definition' do
+ let(:config) do
+ <<EOT
+.template: &JOBTMPL
+ stage: build
+
+job1:
+ <<: *JOBTMPL
+ script: execute-script-for-job
+
+job2:
+ <<: *JOBTMPL
+ script: execute-script-for-job
+EOT
+ end
+
+ it_behaves_like 'job_templates_handling'
+ end
+ end
+
+ describe "Error handling" do
+ it "fails to parse YAML" do
+ expect {Gitlab::Ci::YamlProcessor.new("invalid: yaml: test")}.to raise_error(Psych::SyntaxError)
+ end
+
+ it "indicates that object is invalid" do
+ expect {Gitlab::Ci::YamlProcessor.new("invalid_yaml")}.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError)
+ end
+
+ it "returns errors if tags parameter is invalid" do
+ config = YAML.dump({ rspec: { script: "test", tags: "mysql" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec tags should be an array of strings")
+ end
+
+ it "returns errors if before_script parameter is invalid" do
+ config = YAML.dump({ before_script: "bundle update", rspec: { script: "test" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "before_script config should be an array of strings")
+ end
+
+ it "returns errors if job before_script parameter is not an array of strings" do
+ config = YAML.dump({ rspec: { script: "test", before_script: [10, "test"] } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:before_script config should be an array of strings")
+ end
+
+ it "returns errors if after_script parameter is invalid" do
+ config = YAML.dump({ after_script: "bundle update", rspec: { script: "test" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "after_script config should be an array of strings")
+ end
+
+ it "returns errors if job after_script parameter is not an array of strings" do
+ config = YAML.dump({ rspec: { script: "test", after_script: [10, "test"] } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:after_script config should be an array of strings")
+ end
+
+ it "returns errors if image parameter is invalid" do
+ config = YAML.dump({ image: ["test"], rspec: { script: "test" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "image config should be a hash or a string")
+ end
+
+ it "returns errors if job name is blank" do
+ config = YAML.dump({ '' => { script: "test" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:job name can't be blank")
+ end
+
+ it "returns errors if job name is non-string" do
+ config = YAML.dump({ 10 => { script: "test" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:10 name should be a symbol")
+ end
+
+ it "returns errors if job image parameter is invalid" do
+ config = YAML.dump({ rspec: { script: "test", image: ["test"] } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:image config should be a hash or a string")
+ end
+
+ it "returns errors if services parameter is not an array" do
+ config = YAML.dump({ services: "test", rspec: { script: "test" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "services config should be a array")
+ end
+
+ it "returns errors if services parameter is not an array of strings" do
+ config = YAML.dump({ services: [10, "test"], rspec: { script: "test" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "service config should be a hash or a string")
+ end
+
+ it "returns errors if job services parameter is not an array" do
+ config = YAML.dump({ rspec: { script: "test", services: "test" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:services config should be a array")
+ end
+
+ it "returns errors if job services parameter is not an array of strings" do
+ config = YAML.dump({ rspec: { script: "test", services: [10, "test"] } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "service config should be a hash or a string")
+ end
+
+ it "returns error if job configuration is invalid" do
+ config = YAML.dump({ extra: "bundle update" })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:extra config should be a hash")
+ end
+
+ it "returns errors if services configuration is not correct" do
+ config = YAML.dump({ extra: { script: 'rspec', services: "test" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:extra:services config should be a array")
+ end
+
+ it "returns errors if there are no jobs defined" do
+ config = YAML.dump({ before_script: ["bundle update"] })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs config should contain at least one visible job")
+ end
+
+ it "returns errors if there are no visible jobs defined" do
+ config = YAML.dump({ before_script: ["bundle update"], '.hidden'.to_sym => { script: 'ls' } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs config should contain at least one visible job")
+ end
+
+ it "returns errors if job allow_failure parameter is not an boolean" do
+ config = YAML.dump({ rspec: { script: "test", allow_failure: "string" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec allow failure should be a boolean value")
+ end
+
+ it "returns errors if job stage is not a string" do
+ config = YAML.dump({ rspec: { script: "test", type: 1 } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:type config should be a string")
+ end
+
+ it "returns errors if job stage is not a pre-defined stage" do
+ config = YAML.dump({ rspec: { script: "test", type: "acceptance" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "rspec job: stage parameter should be build, test, deploy")
+ end
+
+ it "returns errors if job stage is not a defined stage" do
+ config = YAML.dump({ types: %w(build test), rspec: { script: "test", type: "acceptance" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "rspec job: stage parameter should be build, test")
+ end
+
+ it "returns errors if stages is not an array" do
+ config = YAML.dump({ stages: "test", rspec: { script: "test" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "stages config should be an array of strings")
+ end
+
+ it "returns errors if stages is not an array of strings" do
+ config = YAML.dump({ stages: [true, "test"], rspec: { script: "test" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "stages config should be an array of strings")
+ end
+
+ it "returns errors if variables is not a map" do
+ config = YAML.dump({ variables: "test", rspec: { script: "test" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "variables config should be a hash of key value pairs")
+ end
+
+ it "returns errors if variables is not a map of key-value strings" do
+ config = YAML.dump({ variables: { test: false }, rspec: { script: "test" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "variables config should be a hash of key value pairs")
+ end
+
+ it "returns errors if job when is not on_success, on_failure or always" do
+ config = YAML.dump({ rspec: { script: "test", when: 1 } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec when should be on_success, on_failure, always or manual")
+ end
+
+ it "returns errors if job artifacts:name is not an a string" do
+ config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { name: 1 } } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:artifacts name should be a string")
+ end
+
+ it "returns errors if job artifacts:when is not an a predefined value" do
+ config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { when: 1 } } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:artifacts when should be on_success, on_failure or always")
+ end
+
+ it "returns errors if job artifacts:expire_in is not an a string" do
+ config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { expire_in: 1 } } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:artifacts expire in should be a duration")
+ end
+
+ it "returns errors if job artifacts:expire_in is not an a valid duration" do
+ config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { expire_in: "7 elephants" } } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:artifacts expire in should be a duration")
+ end
+
+ it "returns errors if job artifacts:untracked is not an array of strings" do
+ config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { untracked: "string" } } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:artifacts untracked should be a boolean value")
+ end
+
+ it "returns errors if job artifacts:paths is not an array of strings" do
+ config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { paths: "string" } } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:artifacts paths should be an array of strings")
+ end
+
+ it "returns errors if cache:untracked is not an array of strings" do
+ config = YAML.dump({ cache: { untracked: "string" }, rspec: { script: "test" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "cache:untracked config should be a boolean value")
+ end
+
+ it "returns errors if cache:paths is not an array of strings" do
+ config = YAML.dump({ cache: { paths: "string" }, rspec: { script: "test" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "cache:paths config should be an array of strings")
+ end
+
+ it "returns errors if cache:key is not a string" do
+ config = YAML.dump({ cache: { key: 1 }, rspec: { script: "test" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "cache:key config should be a string or symbol")
+ end
+
+ it "returns errors if job cache:key is not an a string" do
+ config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: 1 } } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:cache:key config should be a string or symbol")
+ end
+
+ it "returns errors if job cache:untracked is not an array of strings" do
+ config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { untracked: "string" } } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:cache:untracked config should be a boolean value")
+ end
+
+ it "returns errors if job cache:paths is not an array of strings" do
+ config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { paths: "string" } } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:cache:paths config should be an array of strings")
+ end
+
+ it "returns errors if job dependencies is not an array of strings" do
+ config = YAML.dump({ types: %w(build test), rspec: { script: "test", dependencies: "string" } })
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec dependencies should be an array of strings")
+ end
+ end
+
+ describe "Validate configuration templates" do
+ templates = Dir.glob("#{Rails.root.join('vendor/gitlab-ci-yml')}/**/*.gitlab-ci.yml")
+
+ templates.each do |file|
+ it "does not return errors for #{file}" do
+ file = File.read(file)
+
+ expect { Gitlab::Ci::YamlProcessor.new(file) }.not_to raise_error
+ end
+ end
+ end
+
+ describe "#validation_message" do
+ context "when the YAML could not be parsed" do
+ it "returns an error about invalid configutaion" do
+ content = YAML.dump("invalid: yaml: test")
+
+ expect(Gitlab::Ci::YamlProcessor.validation_message(content))
+ .to eq "Invalid configuration format"
+ end
+ end
+
+ context "when the tags parameter is invalid" do
+ it "returns an error about invalid tags" do
+ content = YAML.dump({ rspec: { script: "test", tags: "mysql" } })
+
+ expect(Gitlab::Ci::YamlProcessor.validation_message(content))
+ .to eq "jobs:rspec tags should be an array of strings"
+ end
+ end
+
+ context "when YAML content is empty" do
+ it "returns an error about missing content" do
+ expect(Gitlab::Ci::YamlProcessor.validation_message(''))
+ .to eq "Please provide content of .gitlab-ci.yml"
+ end
+ end
+
+ context "when the YAML is valid" do
+ it "does not return any errors" do
+ content = File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
+
+ expect(Gitlab::Ci::YamlProcessor.validation_message(content)).to be_nil
+ end
+ end
+ end
+
+ def pipeline(**attributes)
+ build_stubbed(:ci_empty_pipeline, **attributes)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/closing_issue_extractor_spec.rb b/spec/lib/gitlab/closing_issue_extractor_spec.rb
index 15012495247..ef7d766a13d 100644
--- a/spec/lib/gitlab/closing_issue_extractor_spec.rb
+++ b/spec/lib/gitlab/closing_issue_extractor_spec.rb
@@ -254,6 +254,46 @@ describe Gitlab::ClosingIssueExtractor do
expect(subject.closed_by_message(message)).to eq([issue])
end
+ it do
+ message = "Implement: #{reference}"
+ expect(subject.closed_by_message(message)).to eq([issue])
+ end
+
+ it do
+ message = "Implements: #{reference}"
+ expect(subject.closed_by_message(message)).to eq([issue])
+ end
+
+ it do
+ message = "Implemented: #{reference}"
+ expect(subject.closed_by_message(message)).to eq([issue])
+ end
+
+ it do
+ message = "Implementing: #{reference}"
+ expect(subject.closed_by_message(message)).to eq([issue])
+ end
+
+ it do
+ message = "implement: #{reference}"
+ expect(subject.closed_by_message(message)).to eq([issue])
+ end
+
+ it do
+ message = "implements: #{reference}"
+ expect(subject.closed_by_message(message)).to eq([issue])
+ end
+
+ it do
+ message = "implemented: #{reference}"
+ expect(subject.closed_by_message(message)).to eq([issue])
+ end
+
+ it do
+ message = "implementing: #{reference}"
+ expect(subject.closed_by_message(message)).to eq([issue])
+ end
+
context 'with an external issue tracker reference' do
it 'extracts the referenced issue' do
jira_project = create(:jira_project, name: 'JIRA_EXT1')
@@ -347,10 +387,10 @@ describe Gitlab::ClosingIssueExtractor do
end
it "fetches cross-project URL references" do
- message = "Closes #{urls.project_issue_url(issue2.project, issue2)} and #{reference}"
+ message = "Closes #{urls.project_issue_url(issue2.project, issue2)}, #{reference} and #{urls.project_issue_url(other_issue.project, other_issue)}"
expect(subject.closed_by_message(message))
- .to match_array([issue, issue2])
+ .to match_array([issue, issue2, other_issue])
end
it "ignores invalid cross-project URL references" do
diff --git a/spec/lib/gitlab/data_builder/push_spec.rb b/spec/lib/gitlab/data_builder/push_spec.rb
index cb430b47463..befdc18d1aa 100644
--- a/spec/lib/gitlab/data_builder/push_spec.rb
+++ b/spec/lib/gitlab/data_builder/push_spec.rb
@@ -47,7 +47,7 @@ describe Gitlab::DataBuilder::Push do
include_examples 'deprecated repository hook data'
it 'does not raise an error when given nil commits' do
- expect { described_class.build(spy, spy, spy, spy, spy, nil) }
+ expect { described_class.build(spy, spy, spy, spy, 'refs/tags/v1.1.0', nil) }
.not_to raise_error
end
end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 1bcdc369c44..3c8350b3aad 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -914,4 +914,126 @@ describe Gitlab::Database::MigrationHelpers do
.to raise_error(RuntimeError, /Your database user is not allowed/)
end
end
+
+ describe '#bulk_queue_background_migration_jobs_by_range', :sidekiq do
+ context 'when the model has an ID column' do
+ let!(:id1) { create(:user).id }
+ let!(:id2) { create(:user).id }
+ let!(:id3) { create(:user).id }
+
+ before do
+ User.class_eval do
+ include EachBatch
+ end
+ end
+
+ context 'with enough rows to bulk queue jobs more than once' do
+ before do
+ stub_const('Gitlab::Database::MigrationHelpers::BACKGROUND_MIGRATION_JOB_BUFFER_SIZE', 1)
+ end
+
+ it 'queues jobs correctly' do
+ Sidekiq::Testing.fake! do
+ model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
+
+ expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id2]])
+ expect(BackgroundMigrationWorker.jobs[1]['args']).to eq(['FooJob', [id3, id3]])
+ end
+ end
+
+ it 'queues jobs in groups of buffer size 1' do
+ expect(BackgroundMigrationWorker).to receive(:perform_bulk).with([['FooJob', [id1, id2]]])
+ expect(BackgroundMigrationWorker).to receive(:perform_bulk).with([['FooJob', [id3, id3]]])
+
+ model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
+ end
+ end
+
+ context 'with not enough rows to bulk queue jobs more than once' do
+ it 'queues jobs correctly' do
+ Sidekiq::Testing.fake! do
+ model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
+
+ expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id2]])
+ expect(BackgroundMigrationWorker.jobs[1]['args']).to eq(['FooJob', [id3, id3]])
+ end
+ end
+
+ it 'queues jobs in bulk all at once (big buffer size)' do
+ expect(BackgroundMigrationWorker).to receive(:perform_bulk).with([['FooJob', [id1, id2]],
+ ['FooJob', [id3, id3]]])
+
+ model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
+ end
+ end
+
+ context 'without specifying batch_size' do
+ it 'queues jobs correctly' do
+ Sidekiq::Testing.fake! do
+ model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob')
+
+ expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id3]])
+ end
+ end
+ end
+ end
+
+ context "when the model doesn't have an ID column" do
+ it 'raises error (for now)' do
+ expect do
+ model.bulk_queue_background_migration_jobs_by_range(ProjectAuthorization, 'FooJob')
+ end.to raise_error(StandardError, /does not have an ID/)
+ end
+ end
+ end
+
+ describe '#queue_background_migration_jobs_by_range_at_intervals', :sidekiq do
+ context 'when the model has an ID column' do
+ let!(:id1) { create(:user).id }
+ let!(:id2) { create(:user).id }
+ let!(:id3) { create(:user).id }
+
+ around do |example|
+ Timecop.freeze { example.run }
+ end
+
+ before do
+ User.class_eval do
+ include EachBatch
+ end
+ end
+
+ context 'with batch_size option' do
+ it 'queues jobs correctly' do
+ Sidekiq::Testing.fake! do
+ model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.seconds, batch_size: 2)
+
+ expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id2]])
+ expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(10.seconds.from_now.to_f)
+ expect(BackgroundMigrationWorker.jobs[1]['args']).to eq(['FooJob', [id3, id3]])
+ expect(BackgroundMigrationWorker.jobs[1]['at']).to eq(20.seconds.from_now.to_f)
+ end
+ end
+ end
+
+ context 'without batch_size option' do
+ it 'queues jobs correctly' do
+ Sidekiq::Testing.fake! do
+ model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.seconds)
+
+ expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id3]])
+ expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(10.seconds.from_now.to_f)
+ end
+ end
+ end
+ end
+
+ context "when the model doesn't have an ID column" do
+ it 'raises error (for now)' do
+ expect do
+ model.queue_background_migration_jobs_by_range_at_intervals(ProjectAuthorization, 'FooJob', 10.seconds)
+ end.to raise_error(StandardError, /does not have an ID/)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
index 90aa4f63dd5..596cc435bd9 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
@@ -229,7 +229,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :trunca
end
end
- describe '#track_rename', redis: true do
+ describe '#track_rename', :redis do
it 'tracks a rename in redis' do
key = 'rename:FakeRenameReservedPathMigrationV1:namespace'
@@ -246,7 +246,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :trunca
end
end
- describe '#reverts_for_type', redis: true do
+ describe '#reverts_for_type', :redis do
it 'yields for each tracked rename' do
subject.track_rename('project', 'old_path', 'new_path')
subject.track_rename('project', 'old_path2', 'new_path2')
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
index 32ac0b88a9b..1143182531f 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
@@ -241,7 +241,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :
end
end
- describe '#revert_renames', redis: true do
+ describe '#revert_renames', :redis do
it 'renames the routes back to the previous values' do
project = create(:project, :repository, path: 'a-project', namespace: namespace)
subject.rename_namespace(namespace)
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
index 595e06a9748..8922370b0a0 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
@@ -115,7 +115,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :tr
end
end
- describe '#revert_renames', redis: true do
+ describe '#revert_renames', :redis do
it 'renames the routes back to the previous values' do
subject.rename_project(project)
diff --git a/spec/lib/gitlab/diff/diff_refs_spec.rb b/spec/lib/gitlab/diff/diff_refs_spec.rb
index c73708d90a8..f9bfb4c469e 100644
--- a/spec/lib/gitlab/diff/diff_refs_spec.rb
+++ b/spec/lib/gitlab/diff/diff_refs_spec.rb
@@ -3,6 +3,61 @@ require 'spec_helper'
describe Gitlab::Diff::DiffRefs do
let(:project) { create(:project, :repository) }
+ describe '#==' do
+ let(:commit) { project.commit('1a0b36b3cdad1d2ee32457c102a8c0b7056fa863') }
+ subject { commit.diff_refs }
+
+ context 'when shas are missing' do
+ let(:other) { described_class.new(base_sha: subject.base_sha, start_sha: subject.start_sha, head_sha: nil) }
+
+ it 'returns false' do
+ expect(subject).not_to eq(other)
+ end
+ end
+
+ context 'when shas are equal' do
+ let(:other) { described_class.new(base_sha: subject.base_sha, start_sha: subject.start_sha, head_sha: subject.head_sha) }
+
+ it 'returns true' do
+ expect(subject).to eq(other)
+ end
+ end
+
+ context 'when shas are unequal' do
+ let(:other) { described_class.new(base_sha: subject.base_sha, start_sha: subject.start_sha, head_sha: subject.head_sha.reverse) }
+
+ it 'returns false' do
+ expect(subject).not_to eq(other)
+ end
+ end
+
+ context 'when shas are truncated' do
+ context 'when sha prefixes are too short' do
+ let(:other) { described_class.new(base_sha: subject.base_sha[0, 4], start_sha: subject.start_sha[0, 4], head_sha: subject.head_sha[0, 4]) }
+
+ it 'returns false' do
+ expect(subject).not_to eq(other)
+ end
+ end
+
+ context 'when sha prefixes are equal' do
+ let(:other) { described_class.new(base_sha: subject.base_sha[0, 10], start_sha: subject.start_sha[0, 10], head_sha: subject.head_sha[0, 10]) }
+
+ it 'returns true' do
+ expect(subject).to eq(other)
+ end
+ end
+
+ context 'when sha prefixes are unequal' do
+ let(:other) { described_class.new(base_sha: subject.base_sha[0, 10], start_sha: subject.start_sha[0, 10], head_sha: subject.head_sha[0, 10].reverse) }
+
+ it 'returns false' do
+ expect(subject).not_to eq(other)
+ end
+ end
+ end
+ end
+
describe '#compare_in' do
context 'with diff refs for the initial commit' do
let(:commit) { project.commit('1a0b36b3cdad1d2ee32457c102a8c0b7056fa863') }
diff --git a/spec/lib/gitlab/diff/formatters/image_formatter_spec.rb b/spec/lib/gitlab/diff/formatters/image_formatter_spec.rb
new file mode 100644
index 00000000000..2f99febe04e
--- /dev/null
+++ b/spec/lib/gitlab/diff/formatters/image_formatter_spec.rb
@@ -0,0 +1,20 @@
+require 'spec_helper'
+
+describe Gitlab::Diff::Formatters::ImageFormatter do
+ it_behaves_like "position formatter" do
+ let(:base_attrs) do
+ {
+ base_sha: 123,
+ start_sha: 456,
+ head_sha: 789,
+ old_path: 'old_image.png',
+ new_path: 'new_image.png',
+ position_type: 'image'
+ }
+ end
+
+ let(:attrs) do
+ base_attrs.merge(width: 100, height: 100, x: 1, y: 2)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb b/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb
new file mode 100644
index 00000000000..897dc917f6a
--- /dev/null
+++ b/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb
@@ -0,0 +1,42 @@
+require 'spec_helper'
+
+describe Gitlab::Diff::Formatters::TextFormatter do
+ let!(:base) do
+ {
+ base_sha: 123,
+ start_sha: 456,
+ head_sha: 789,
+ old_path: 'old_path.txt',
+ new_path: 'new_path.txt'
+ }
+ end
+
+ let!(:complete) do
+ base.merge(old_line: 1, new_line: 2)
+ end
+
+ it_behaves_like "position formatter" do
+ let(:base_attrs) { base }
+
+ let(:attrs) { complete }
+ end
+
+ # Specific text formatter examples
+ let!(:formatter) { described_class.new(attrs) }
+
+ describe '#line_age' do
+ subject { formatter.line_age }
+
+ context ' when there is only new_line' do
+ let(:attrs) { base.merge(new_line: 1) }
+
+ it { is_expected.to eq('new') }
+ end
+
+ context ' when there is only old_line' do
+ let(:attrs) { base.merge(old_line: 1) }
+
+ it { is_expected.to eq('old') }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/position_spec.rb b/spec/lib/gitlab/diff/position_spec.rb
index d4a2a852c12..9bf54fdecc4 100644
--- a/spec/lib/gitlab/diff/position_spec.rb
+++ b/spec/lib/gitlab/diff/position_spec.rb
@@ -5,7 +5,7 @@ describe Gitlab::Diff::Position do
let(:project) { create(:project, :repository) }
- describe "position for an added file" do
+ describe "position for an added text file" do
let(:commit) { project.commit("2ea1f3dec713d940208fb5ce4a38765ecb5d3f73") }
subject do
@@ -47,6 +47,31 @@ describe Gitlab::Diff::Position do
end
end
+ describe "position for an added image file" do
+ let(:commit) { project.commit("33f3729a45c02fc67d00adb1b8bca394b0e761d9") }
+
+ subject do
+ described_class.new(
+ old_path: "files/images/6049019_460s.jpg",
+ new_path: "files/images/6049019_460s.jpg",
+ width: 100,
+ height: 100,
+ x: 1,
+ y: 100,
+ diff_refs: commit.diff_refs,
+ position_type: "image"
+ )
+ end
+
+ it "returns the correct diff file" do
+ diff_file = subject.diff_file(project.repository)
+
+ expect(diff_file.new_file?).to be true
+ expect(diff_file.new_path).to eq(subject.new_path)
+ expect(diff_file.diff_refs).to eq(subject.diff_refs)
+ end
+ end
+
describe "position for a changed file" do
let(:commit) { project.commit("570e7b2abdd848b95f2f578043fc23bd6f6fd24d") }
@@ -429,27 +454,93 @@ describe Gitlab::Diff::Position do
end
end
- describe "#to_json" do
- let(:hash) do
- {
+ describe '#==' do
+ let(:commit) { project.commit("570e7b2abdd848b95f2f578043fc23bd6f6fd24d") }
+
+ subject do
+ described_class.new(
old_path: "files/ruby/popen.rb",
new_path: "files/ruby/popen.rb",
old_line: nil,
new_line: 14,
- base_sha: nil,
- head_sha: nil,
- start_sha: nil
- }
+ diff_refs: commit.diff_refs
+ )
+ end
+
+ context 'when positions are equal' do
+ let(:other) { described_class.new(subject.to_h) }
+
+ it 'returns true' do
+ expect(subject).to eq(other)
+ end
end
- let(:diff_position) { described_class.new(hash) }
+ context 'when positions are equal, except for truncated shas' do
+ let(:other) { described_class.new(subject.to_h.merge(start_sha: subject.start_sha[0, 10])) }
+
+ it 'returns true' do
+ expect(subject).to eq(other)
+ end
+ end
+
+ context 'when positions are unequal' do
+ let(:other) { described_class.new(subject.to_h.merge(start_sha: subject.start_sha.reverse)) }
+
+ it 'returns false' do
+ expect(subject).not_to eq(other)
+ end
+ end
+ end
+
+ describe "#to_json" do
+ shared_examples "diff position json" do
+ it "returns the position as JSON" do
+ expect(JSON.parse(diff_position.to_json)).to eq(hash.stringify_keys)
+ end
+
+ it "works when nested under another hash" do
+ expect(JSON.parse(JSON.generate(pos: diff_position))).to eq('pos' => hash.stringify_keys)
+ end
+ end
+
+ context "for text positon" do
+ let(:hash) do
+ {
+ old_path: "files/ruby/popen.rb",
+ new_path: "files/ruby/popen.rb",
+ old_line: nil,
+ new_line: 14,
+ base_sha: nil,
+ head_sha: nil,
+ start_sha: nil,
+ position_type: "text"
+ }
+ end
+
+ let(:diff_position) { described_class.new(hash) }
- it "returns the position as JSON" do
- expect(JSON.parse(diff_position.to_json)).to eq(hash.stringify_keys)
+ it_behaves_like "diff position json"
end
- it "works when nested under another hash" do
- expect(JSON.parse(JSON.generate(pos: diff_position))).to eq('pos' => hash.stringify_keys)
+ context "for image positon" do
+ let(:hash) do
+ {
+ old_path: "files/any.img",
+ new_path: "files/any.img",
+ base_sha: nil,
+ head_sha: nil,
+ start_sha: nil,
+ width: 100,
+ height: 100,
+ x: 1,
+ y: 100,
+ position_type: "image"
+ }
+ end
+
+ let(:diff_position) { described_class.new(hash) }
+
+ it_behaves_like "diff position json"
end
end
end
diff --git a/spec/lib/gitlab/diff/position_tracer_spec.rb b/spec/lib/gitlab/diff/position_tracer_spec.rb
index 8beebc10040..e5138705443 100644
--- a/spec/lib/gitlab/diff/position_tracer_spec.rb
+++ b/spec/lib/gitlab/diff/position_tracer_spec.rb
@@ -71,6 +71,10 @@ describe Gitlab::Diff::PositionTracer do
Gitlab::Diff::DiffRefs.new(base_sha: base_commit.id, head_sha: head_commit.id)
end
+ def text_position_attrs
+ [:old_line, :new_line]
+ end
+
def position(attrs = {})
attrs.reverse_merge!(
diff_refs: old_diff_refs
@@ -91,7 +95,11 @@ describe Gitlab::Diff::PositionTracer do
expect(new_position.diff_refs).to eq(new_diff_refs)
attrs.each do |attr, value|
- expect(new_position.send(attr)).to eq(value)
+ if text_position_attrs.include?(attr)
+ expect(new_position.formatter.send(attr)).to eq(value)
+ else
+ expect(new_position.send(attr)).to eq(value)
+ end
end
end
end
@@ -110,7 +118,11 @@ describe Gitlab::Diff::PositionTracer do
expect(change_position.diff_refs).to eq(change_diff_refs)
attrs.each do |attr, value|
- expect(change_position.send(attr)).to eq(value)
+ if text_position_attrs.include?(attr)
+ expect(change_position.formatter.send(attr)).to eq(value)
+ else
+ expect(change_position.send(attr)).to eq(value)
+ end
end
end
end
@@ -1761,17 +1773,9 @@ describe Gitlab::Diff::PositionTracer do
let(:merge_commit) do
update_file_again_commit
- committer = repository.user_to_committer(current_user)
-
- options = {
- message: "Merge branches",
- author: committer,
- committer: committer
- }
-
merge_request = create(:merge_request, source_branch: second_create_file_commit.sha, target_branch: branch_name, source_project: project)
- repository.merge(current_user, merge_request.diff_head_sha, merge_request, options)
+ repository.merge(current_user, merge_request.diff_head_sha, merge_request, "Merge branches")
project.commit(branch_name)
end
diff --git a/spec/lib/gitlab/gfm/reference_rewriter_spec.rb b/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
index a3d323fe28a..7dc06c90078 100644
--- a/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
+++ b/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
@@ -1,11 +1,14 @@
require 'spec_helper'
describe Gitlab::Gfm::ReferenceRewriter do
- let(:text) { 'some text' }
- let(:old_project) { create(:project, name: 'old-project') }
- let(:new_project) { create(:project, name: 'new-project') }
+ let(:group) { create(:group) }
+ let(:old_project) { create(:project, name: 'old-project', group: group) }
+ let(:new_project) { create(:project, name: 'new-project', group: group) }
let(:user) { create(:user) }
+ let(:old_project_ref) { old_project.to_reference(new_project) }
+ let(:text) { 'some text' }
+
before do
old_project.team << [user, :reporter]
end
@@ -39,7 +42,7 @@ describe Gitlab::Gfm::ReferenceRewriter do
it { is_expected.not_to include merge_request.to_reference(new_project) }
end
- context 'description ambigous elements' do
+ context 'rewrite ambigous references' do
context 'url' do
let(:url) { 'http://gitlab.com/#1' }
let(:text) { "This references #1, but not #{url}" }
@@ -66,23 +69,21 @@ describe Gitlab::Gfm::ReferenceRewriter do
context 'description with project labels' do
let!(:label) { create(:label, id: 123, name: 'test', project: old_project) }
- let(:project_ref) { old_project.to_reference(new_project) }
context 'label referenced by id' do
let(:text) { '#1 and ~123' }
- it { is_expected.to eq %Q{#{project_ref}#1 and #{project_ref}~123} }
+ it { is_expected.to eq %Q{#{old_project_ref}#1 and #{old_project_ref}~123} }
end
context 'label referenced by text' do
let(:text) { '#1 and ~"test"' }
- it { is_expected.to eq %Q{#{project_ref}#1 and #{project_ref}~123} }
+ it { is_expected.to eq %Q{#{old_project_ref}#1 and #{old_project_ref}~123} }
end
end
context 'description with group labels' do
let(:old_group) { create(:group) }
let!(:group_label) { create(:group_label, id: 321, name: 'group label', group: old_group) }
- let(:project_ref) { old_project.to_reference(new_project) }
before do
old_project.update(namespace: old_group)
@@ -90,21 +91,53 @@ describe Gitlab::Gfm::ReferenceRewriter do
context 'label referenced by id' do
let(:text) { '#1 and ~321' }
- it { is_expected.to eq %Q{#{project_ref}#1 and #{project_ref}~321} }
+ it { is_expected.to eq %Q{#{old_project_ref}#1 and #{old_project_ref}~321} }
end
context 'label referenced by text' do
let(:text) { '#1 and ~"group label"' }
- it { is_expected.to eq %Q{#{project_ref}#1 and #{project_ref}~321} }
+ it { is_expected.to eq %Q{#{old_project_ref}#1 and #{old_project_ref}~321} }
end
end
end
+ end
+
+ context 'reference contains project milestone' do
+ let!(:milestone) do
+ create(:milestone, title: '9.0', project: old_project)
+ end
+
+ let(:text) { 'milestone: %"9.0"' }
+
+ it { is_expected.to eq %Q[milestone: #{old_project_ref}%"9.0"] }
+ end
+
+ context 'when referring to group milestone' do
+ let!(:milestone) do
+ create(:milestone, title: '10.0', group: group)
+ end
+
+ let(:text) { 'milestone %"10.0"' }
+
+ it { is_expected.to eq text }
+ end
+
+ context 'when referable has a nil reference' do
+ before do
+ create(:milestone, title: '9.0', project: old_project)
+
+ allow_any_instance_of(Milestone)
+ .to receive(:to_reference)
+ .and_return(nil)
+ end
- context 'reference contains milestone' do
- let(:milestone) { create(:milestone) }
- let(:text) { "milestone ref: #{milestone.to_reference}" }
+ let(:text) { 'milestone: %"9.0"' }
- it { is_expected.to eq text }
+ it 'raises an error that should be fixed' do
+ expect { subject }.to raise_error(
+ described_class::RewriteError,
+ 'Unspecified reference detected for Milestone'
+ )
end
end
end
diff --git a/spec/lib/gitlab/git/blame_spec.rb b/spec/lib/gitlab/git/blame_spec.rb
index 465c2012b05..793228701cf 100644
--- a/spec/lib/gitlab/git/blame_spec.rb
+++ b/spec/lib/gitlab/git/blame_spec.rb
@@ -73,7 +73,7 @@ describe Gitlab::Git::Blame, seed_helper: true do
it_behaves_like 'blaming a file'
end
- context 'when Gitaly blame feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly blame feature is disabled', :skip_gitaly_mock do
it_behaves_like 'blaming a file'
end
end
diff --git a/spec/lib/gitlab/git/blob_spec.rb b/spec/lib/gitlab/git/blob_spec.rb
index 66ba00acb7d..412a0093d97 100644
--- a/spec/lib/gitlab/git/blob_spec.rb
+++ b/spec/lib/gitlab/git/blob_spec.rb
@@ -112,17 +112,20 @@ describe Gitlab::Git::Blob, seed_helper: true do
it_behaves_like 'finding blobs'
end
- context 'when project_raw_show Gitaly feature is disabled', skip_gitaly_mock: true do
+ context 'when project_raw_show Gitaly feature is disabled', :skip_gitaly_mock do
it_behaves_like 'finding blobs'
end
end
shared_examples 'finding blobs by ID' do
let(:raw_blob) { Gitlab::Git::Blob.raw(repository, SeedRepo::RubyBlob::ID) }
+ let(:bad_blob) { Gitlab::Git::Blob.raw(repository, SeedRepo::BigCommit::ID) }
+
it { expect(raw_blob.id).to eq(SeedRepo::RubyBlob::ID) }
it { expect(raw_blob.data[0..10]).to eq("require \'fi") }
it { expect(raw_blob.size).to eq(669) }
it { expect(raw_blob.truncated?).to be_falsey }
+ it { expect(bad_blob).to be_nil }
context 'large file' do
it 'limits the size of a large file' do
@@ -147,7 +150,7 @@ describe Gitlab::Git::Blob, seed_helper: true do
it_behaves_like 'finding blobs by ID'
end
- context 'when the blob_raw Gitaly feature is disabled', skip_gitaly_mock: true do
+ context 'when the blob_raw Gitaly feature is disabled', :skip_gitaly_mock do
it_behaves_like 'finding blobs by ID'
end
end
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index 14d64d8c4da..9f4e3c49adc 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -65,34 +65,12 @@ describe Gitlab::Git::Commit, seed_helper: true do
end
describe "Commit info from gitaly commit" do
- let(:id) { 'f00' }
- let(:parent_ids) { %w(b45 b46) }
let(:subject) { "My commit".force_encoding('ASCII-8BIT') }
let(:body) { subject + "My body".force_encoding('ASCII-8BIT') }
- let(:committer) do
- Gitaly::CommitAuthor.new(
- name: generate(:name),
- email: generate(:email),
- date: Google::Protobuf::Timestamp.new(seconds: 123)
- )
- end
- let(:author) do
- Gitaly::CommitAuthor.new(
- name: generate(:name),
- email: generate(:email),
- date: Google::Protobuf::Timestamp.new(seconds: 456)
- )
- end
- let(:gitaly_commit) do
- Gitaly::GitCommit.new(
- id: id,
- subject: subject,
- body: body,
- author: author,
- committer: committer,
- parent_ids: parent_ids
- )
- end
+ let(:gitaly_commit) { build(:gitaly_commit, subject: subject, body: body) }
+ let(:id) { gitaly_commit.id }
+ let(:committer) { gitaly_commit.committer }
+ let(:author) { gitaly_commit.author }
let(:commit) { described_class.new(repository, gitaly_commit) }
it { expect(commit.short_id).to eq(id[0..10]) }
@@ -104,7 +82,7 @@ describe Gitlab::Git::Commit, seed_helper: true do
it { expect(commit.author_name).to eq(author.name) }
it { expect(commit.committer_name).to eq(committer.name) }
it { expect(commit.committer_email).to eq(committer.email) }
- it { expect(commit.parent_ids).to eq(parent_ids) }
+ it { expect(commit.parent_ids).to eq(gitaly_commit.parent_ids) }
context 'no body' do
let(:body) { "".force_encoding('ASCII-8BIT') }
@@ -181,7 +159,7 @@ describe Gitlab::Git::Commit, seed_helper: true do
end
end
- describe '.where' do
+ shared_examples '.where' do
context 'path is empty string' do
subject do
commits = described_class.where(
@@ -279,6 +257,14 @@ describe Gitlab::Git::Commit, seed_helper: true do
end
end
+ describe '.where with gitaly' do
+ it_should_behave_like '.where'
+ end
+
+ describe '.where without gitaly', :skip_gitaly_mock do
+ it_should_behave_like '.where'
+ end
+
describe '.between' do
subject do
commits = described_class.between(repository, SeedRepo::Commit::PARENT_ID, SeedRepo::Commit::ID)
@@ -350,7 +336,7 @@ describe Gitlab::Git::Commit, seed_helper: true do
it_behaves_like 'finding all commits'
end
- context 'when Gitaly find_all_commits feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly find_all_commits feature is disabled', :skip_gitaly_mock do
it_behaves_like 'finding all commits'
context 'while applying a sort order based on the `order` option' do
@@ -401,7 +387,7 @@ describe Gitlab::Git::Commit, seed_helper: true do
end
end
- describe '#stats' do
+ shared_examples '#stats' do
subject { commit.stats }
describe '#additions' do
@@ -415,6 +401,14 @@ describe Gitlab::Git::Commit, seed_helper: true do
end
end
+ describe '#stats with gitaly on' do
+ it_should_behave_like '#stats'
+ end
+
+ describe '#stats with gitaly disabled', :skip_gitaly_mock do
+ it_should_behave_like '#stats'
+ end
+
describe '#to_diff' do
subject { commit.to_diff }
diff --git a/spec/lib/gitlab/git/committer_spec.rb b/spec/lib/gitlab/git/committer_spec.rb
deleted file mode 100644
index b0ddbb51449..00000000000
--- a/spec/lib/gitlab/git/committer_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::Git::Committer do
- let(:name) { 'Jane Doe' }
- let(:email) { 'janedoe@example.com' }
- let(:gl_id) { 'user-123' }
-
- subject { described_class.new(name, email, gl_id) }
-
- describe '#==' do
- def eq_other(name, email, gl_id)
- eq(described_class.new(name, email, gl_id))
- end
-
- it { expect(subject).to eq_other(name, email, gl_id) }
-
- it { expect(subject).not_to eq_other(nil, nil, nil) }
- it { expect(subject).not_to eq_other(name + 'x', email, gl_id) }
- it { expect(subject).not_to eq_other(name, email + 'x', gl_id) }
- it { expect(subject).not_to eq_other(name, email, gl_id + 'x') }
- end
-end
diff --git a/spec/lib/gitlab/git/diff_collection_spec.rb b/spec/lib/gitlab/git/diff_collection_spec.rb
index 3494f0cc98d..ee657101f4c 100644
--- a/spec/lib/gitlab/git/diff_collection_spec.rb
+++ b/spec/lib/gitlab/git/diff_collection_spec.rb
@@ -341,8 +341,7 @@ describe Gitlab::Git::DiffCollection, seed_helper: true do
end
context 'when diff is quite large will collapse by default' do
- let(:iterator) { [{ diff: 'a' * (Gitlab::Git::Diff.collapse_limit + 1) }] }
- let(:max_files) { 100 }
+ let(:iterator) { [{ diff: 'a' * 20480 }] }
context 'when no collapse is set' do
let(:expanded) { true }
diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb
index d39b33a0c05..4a7b06003fc 100644
--- a/spec/lib/gitlab/git/diff_spec.rb
+++ b/spec/lib/gitlab/git/diff_spec.rb
@@ -31,36 +31,6 @@ EOT
[".gitmodules"]).patches.first
end
- describe 'size limit feature toggles' do
- context 'when the feature gitlab_git_diff_size_limit_increase is enabled' do
- before do
- stub_feature_flags(gitlab_git_diff_size_limit_increase: true)
- end
-
- it 'returns 200 KB for size_limit' do
- expect(described_class.size_limit).to eq(200.kilobytes)
- end
-
- it 'returns 100 KB for collapse_limit' do
- expect(described_class.collapse_limit).to eq(100.kilobytes)
- end
- end
-
- context 'when the feature gitlab_git_diff_size_limit_increase is disabled' do
- before do
- stub_feature_flags(gitlab_git_diff_size_limit_increase: false)
- end
-
- it 'returns 100 KB for size_limit' do
- expect(described_class.size_limit).to eq(100.kilobytes)
- end
-
- it 'returns 10 KB for collapse_limit' do
- expect(described_class.collapse_limit).to eq(10.kilobytes)
- end
- end
- end
-
describe '.new' do
context 'using a Hash' do
context 'with a small diff' do
@@ -77,7 +47,7 @@ EOT
context 'using a diff that is too large' do
it 'prunes the diff' do
- diff = described_class.new(diff: 'a' * (described_class.size_limit + 1))
+ diff = described_class.new(diff: 'a' * 204800)
expect(diff.diff).to be_empty
expect(diff).to be_too_large
@@ -115,8 +85,8 @@ EOT
# The patch total size is 200, with lines between 21 and 54.
# This is a quick-and-dirty way to test this. Ideally, a new patch is
# added to the test repo with a size that falls between the real limits.
- allow(Gitlab::Git::Diff).to receive(:size_limit).and_return(150)
- allow(Gitlab::Git::Diff).to receive(:collapse_limit).and_return(100)
+ stub_const("#{described_class}::SIZE_LIMIT", 150)
+ stub_const("#{described_class}::COLLAPSE_LIMIT", 100)
end
it 'prunes the diff as a large diff instead of as a collapsed diff' do
@@ -356,7 +326,7 @@ EOT
describe '#collapsed?' do
it 'returns true for a diff that is quite large' do
- diff = described_class.new({ diff: 'a' * (described_class.collapse_limit + 1) }, expanded: false)
+ diff = described_class.new({ diff: 'a' * 20480 }, expanded: false)
expect(diff).to be_collapsed
end
diff --git a/spec/lib/gitlab/git/hook_spec.rb b/spec/lib/gitlab/git/hook_spec.rb
index ea3e4680b1d..2fe1f5603ce 100644
--- a/spec/lib/gitlab/git/hook_spec.rb
+++ b/spec/lib/gitlab/git/hook_spec.rb
@@ -14,6 +14,7 @@ describe Gitlab::Git::Hook do
let(:repo_path) { repository.path }
let(:user) { create(:user) }
let(:gl_id) { Gitlab::GlId.gl_id(user) }
+ let(:gl_username) { user.username }
def create_hook(name)
FileUtils.mkdir_p(File.join(repo_path, 'hooks'))
@@ -28,6 +29,7 @@ describe Gitlab::Git::Hook do
f.write(<<-HOOK)
echo 'regular message from the hook'
echo 'error message from the hook' 1>&2
+ echo 'error message from the hook line 2' 1>&2
exit 1
HOOK
end
@@ -41,6 +43,7 @@ describe Gitlab::Git::Hook do
let(:env) do
{
'GL_ID' => gl_id,
+ 'GL_USERNAME' => gl_username,
'PWD' => repo_path,
'GL_PROTOCOL' => 'web',
'GL_REPOSITORY' => gl_repository
@@ -58,7 +61,7 @@ describe Gitlab::Git::Hook do
.with(env, hook_path, chdir: repo_path).and_call_original
end
- status, errors = hook.trigger(gl_id, blank, blank, ref)
+ status, errors = hook.trigger(gl_id, gl_username, blank, blank, ref)
expect(status).to be true
expect(errors).to be_blank
end
@@ -71,9 +74,9 @@ describe Gitlab::Git::Hook do
blank = Gitlab::Git::BLANK_SHA
ref = Gitlab::Git::BRANCH_REF_PREFIX + 'new_branch'
- status, errors = hook.trigger(gl_id, blank, blank, ref)
+ status, errors = hook.trigger(gl_id, gl_username, blank, blank, ref)
expect(status).to be false
- expect(errors).to eq("error message from the hook\n")
+ expect(errors).to eq("error message from the hook<br>error message from the hook line 2<br>")
end
end
end
@@ -85,7 +88,7 @@ describe Gitlab::Git::Hook do
blank = Gitlab::Git::BLANK_SHA
ref = Gitlab::Git::BRANCH_REF_PREFIX + 'new_branch'
- status, errors = hook.trigger(gl_id, blank, blank, ref)
+ status, errors = hook.trigger(gl_id, gl_username, blank, blank, ref)
expect(status).to be true
expect(errors).to be_nil
end
diff --git a/spec/lib/gitlab/git/hooks_service_spec.rb b/spec/lib/gitlab/git/hooks_service_spec.rb
index e9c0209fe3b..51e4e3fdad1 100644
--- a/spec/lib/gitlab/git/hooks_service_spec.rb
+++ b/spec/lib/gitlab/git/hooks_service_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe Gitlab::Git::HooksService, seed_helper: true do
- let(:committer) { Gitlab::Git::Committer.new('Jane Doe', 'janedoe@example.com', 'user-456') }
+ let(:user) { Gitlab::Git::User.new('janedoe', 'Jane Doe', 'janedoe@example.com', 'user-456') }
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, 'project-123') }
let(:service) { described_class.new }
@@ -18,7 +18,7 @@ describe Gitlab::Git::HooksService, seed_helper: true do
hook = double(trigger: [true, nil])
expect(Gitlab::Git::Hook).to receive(:new).exactly(3).times.and_return(hook)
- service.execute(committer, repository, @blankrev, @newrev, @ref) { }
+ service.execute(user, repository, @blankrev, @newrev, @ref) { }
end
end
@@ -28,7 +28,7 @@ describe Gitlab::Git::HooksService, seed_helper: true do
expect(service).not_to receive(:run_hook).with('post-receive')
expect do
- service.execute(committer, repository, @blankrev, @newrev, @ref)
+ service.execute(user, repository, @blankrev, @newrev, @ref)
end.to raise_error(Gitlab::Git::HooksService::PreReceiveError)
end
end
@@ -40,7 +40,7 @@ describe Gitlab::Git::HooksService, seed_helper: true do
expect(service).not_to receive(:run_hook).with('post-receive')
expect do
- service.execute(committer, repository, @blankrev, @newrev, @ref)
+ service.execute(user, repository, @blankrev, @newrev, @ref)
end.to raise_error(Gitlab::Git::HooksService::PreReceiveError)
end
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 556a148c3bc..1ee4acfd193 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -54,7 +54,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
describe "#rugged" do
- describe 'when storage is broken', broken_storage: true do
+ describe 'when storage is broken', :broken_storage do
it 'raises a storage exception when storage is not available' do
broken_repo = described_class.new('broken', 'a/path.git', '')
@@ -384,11 +384,45 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
end
- context 'when Gitaly commit_count feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly commit_count feature is disabled', :skip_gitaly_mock do
it_behaves_like 'simple commit counting'
end
end
+ describe '#has_local_branches?' do
+ shared_examples 'check for local branches' do
+ it { expect(repository.has_local_branches?).to eq(true) }
+
+ context 'mutable' do
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') }
+
+ after do
+ ensure_seeds
+ end
+
+ it 'returns false when there are no branches' do
+ # Sanity check
+ expect(repository.has_local_branches?).to eq(true)
+
+ FileUtils.rm_rf(File.join(repository.path, 'packed-refs'))
+ heads_dir = File.join(repository.path, 'refs/heads')
+ FileUtils.rm_rf(heads_dir)
+ FileUtils.mkdir_p(heads_dir)
+
+ expect(repository.has_local_branches?).to eq(false)
+ end
+ end
+ end
+
+ context 'with gitaly' do
+ it_behaves_like 'check for local branches'
+ end
+
+ context 'without gitaly', :skip_gitaly_mock do
+ it_behaves_like 'check for local branches'
+ end
+ end
+
describe "#delete_branch" do
shared_examples "deleting a branch" do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') }
@@ -419,7 +453,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like "deleting a branch"
end
- context "when Gitaly delete_branch is disabled", skip_gitaly_mock: true do
+ context "when Gitaly delete_branch is disabled", :skip_gitaly_mock do
it_behaves_like "deleting a branch"
end
end
@@ -455,7 +489,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like 'creating a branch'
end
- context 'when Gitaly create_branch feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly create_branch feature is disabled', :skip_gitaly_mock do
it_behaves_like 'creating a branch'
end
end
@@ -481,7 +515,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
it 'raises an error if it failed' do
- expect(Gitlab::Popen).to receive(:popen).and_return(['Error', 1])
+ expect(@repo).to receive(:popen).and_return(['Error', 1])
expect do
@repo.delete_refs('refs/heads/fix')
@@ -895,7 +929,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like 'extended commit counting'
end
- context 'when Gitaly count_commits feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly count_commits feature is disabled', :skip_gitaly_mock do
it_behaves_like 'extended commit counting'
end
end
@@ -962,7 +996,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like 'finding a branch'
end
- context 'when Gitaly find_branch feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly find_branch feature is disabled', :skip_gitaly_mock do
it_behaves_like 'finding a branch'
it 'should reload Rugged::Repository and return master' do
@@ -1204,7 +1238,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like 'checks the existence of refs'
end
- context 'when Gitaly ref_exists feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly ref_exists feature is disabled', :skip_gitaly_mock do
it_behaves_like 'checks the existence of refs'
end
end
@@ -1226,7 +1260,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like 'checks the existence of tags'
end
- context 'when Gitaly ref_exists_tags feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly ref_exists_tags feature is disabled', :skip_gitaly_mock do
it_behaves_like 'checks the existence of tags'
end
end
@@ -1250,7 +1284,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like 'checks the existence of branches'
end
- context 'when Gitaly ref_exists_branches feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly ref_exists_branches feature is disabled', :skip_gitaly_mock do
it_behaves_like 'checks the existence of branches'
end
end
@@ -1327,11 +1361,134 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like 'languages'
- context 'with rugged', skip_gitaly_mock: true do
+ context 'with rugged', :skip_gitaly_mock do
it_behaves_like 'languages'
end
end
+ describe '#with_repo_branch_commit' do
+ context 'when comparing with the same repository' do
+ let(:start_repository) { repository }
+
+ context 'when the branch exists' do
+ let(:start_branch_name) { 'master' }
+
+ it 'yields the commit' do
+ expect { |b| repository.with_repo_branch_commit(start_repository, start_branch_name, &b) }
+ .to yield_with_args(an_instance_of(Gitlab::Git::Commit))
+ end
+ end
+
+ context 'when the branch does not exist' do
+ let(:start_branch_name) { 'definitely-not-master' }
+
+ it 'yields nil' do
+ expect { |b| repository.with_repo_branch_commit(start_repository, start_branch_name, &b) }
+ .to yield_with_args(nil)
+ end
+ end
+ end
+
+ context 'when comparing with another repository' do
+ let(:start_repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') }
+
+ context 'when the branch exists' do
+ let(:start_branch_name) { 'master' }
+
+ it 'yields the commit' do
+ expect { |b| repository.with_repo_branch_commit(start_repository, start_branch_name, &b) }
+ .to yield_with_args(an_instance_of(Gitlab::Git::Commit))
+ end
+ end
+
+ context 'when the branch does not exist' do
+ let(:start_branch_name) { 'definitely-not-master' }
+
+ it 'yields nil' do
+ expect { |b| repository.with_repo_branch_commit(start_repository, start_branch_name, &b) }
+ .to yield_with_args(nil)
+ end
+ end
+ end
+ end
+
+ describe '#fetch_source_branch' do
+ let(:local_ref) { 'refs/merge-requests/1/head' }
+
+ context 'when the branch exists' do
+ let(:source_branch) { 'master' }
+
+ it 'writes the ref' do
+ expect(repository).to receive(:write_ref).with(local_ref, /\h{40}/)
+
+ repository.fetch_source_branch(repository, source_branch, local_ref)
+ end
+
+ it 'returns true' do
+ expect(repository.fetch_source_branch(repository, source_branch, local_ref)).to eq(true)
+ end
+ end
+
+ context 'when the branch does not exist' do
+ let(:source_branch) { 'definitely-not-master' }
+
+ it 'does not write the ref' do
+ expect(repository).not_to receive(:write_ref)
+
+ repository.fetch_source_branch(repository, source_branch, local_ref)
+ end
+
+ it 'returns false' do
+ expect(repository.fetch_source_branch(repository, source_branch, local_ref)).to eq(false)
+ end
+ end
+ end
+
+ describe '#rm_branch' do
+ shared_examples "user deleting a branch" do
+ let(:project) { create(:project, :repository) }
+ let(:repository) { project.repository.raw }
+ let(:user) { create(:user) }
+ let(:branch_name) { "to-be-deleted-soon" }
+
+ before do
+ project.team << [user, :developer]
+ repository.create_branch(branch_name)
+ end
+
+ it "removes the branch from the repo" do
+ repository.rm_branch(branch_name, user: user)
+
+ expect(repository.rugged.branches[branch_name]).to be_nil
+ end
+ end
+
+ context "when Gitaly user_delete_branch is enabled" do
+ it_behaves_like "user deleting a branch"
+ end
+
+ context "when Gitaly user_delete_branch is disabled", :skip_gitaly_mock do
+ it_behaves_like "user deleting a branch"
+ end
+ end
+
+ describe '#write_ref' do
+ context 'validations' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:ref_path, :ref) do
+ 'foo bar' | '123'
+ 'foobar' | "12\x003"
+ end
+
+ with_them do
+ it 'raises ArgumentError' do
+ expect { repository.write_ref(ref_path, ref) }.to raise_error(ArgumentError)
+ end
+ end
+ end
+ end
+
def create_remote_branch(repository, remote_name, branch_name, source_branch_name)
source_branch = repository.branches.find { |branch| branch.name == source_branch_name }
rugged = repository.rugged
diff --git a/spec/lib/gitlab/git/rev_list_spec.rb b/spec/lib/gitlab/git/rev_list_spec.rb
index b051a088171..c0eac98d718 100644
--- a/spec/lib/gitlab/git/rev_list_spec.rb
+++ b/spec/lib/gitlab/git/rev_list_spec.rb
@@ -14,7 +14,7 @@ describe Gitlab::Git::RevList do
let(:rev_list) { described_class.new(newrev: 'newrev', path_to_repo: project.repository.path_to_repo) }
it 'calls out to `popen`' do
- expect(Gitlab::Popen).to receive(:popen).with([
+ expect(rev_list).to receive(:popen).with([
Gitlab.config.git.bin_path,
"--git-dir=#{project.repository.path_to_repo}",
'rev-list',
@@ -36,7 +36,7 @@ describe Gitlab::Git::RevList do
let(:rev_list) { described_class.new(oldrev: 'oldrev', newrev: 'newrev', path_to_repo: project.repository.path_to_repo) }
it 'calls out to `popen`' do
- expect(Gitlab::Popen).to receive(:popen).with([
+ expect(rev_list).to receive(:popen).with([
Gitlab.config.git.bin_path,
"--git-dir=#{project.repository.path_to_repo}",
'rev-list',
diff --git a/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb b/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb
index c86353abb7c..98cf7966dad 100644
--- a/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb
+++ b/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state: true, broken_storage: true do
let(:storage_name) { 'default' }
- let(:circuit_breaker) { described_class.new(storage_name) }
+ let(:circuit_breaker) { described_class.new(storage_name, hostname) }
let(:hostname) { Gitlab::Environment.hostname }
let(:cache_key) { "storage_accessible:#{storage_name}:#{hostname}" }
@@ -22,7 +22,8 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
'failure_wait_time' => 30,
'failure_reset_time' => 1800,
'storage_timeout' => 5
- }
+ },
+ 'nopath' => { 'path' => nil }
)
end
@@ -59,6 +60,14 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
expect(breaker).to be_a(described_class)
expect(described_class.for_storage('default')).to eq(breaker)
end
+
+ it 'returns a broken circuit breaker for an unknown storage' do
+ expect(described_class.for_storage('unknown').circuit_broken?).to be_truthy
+ end
+
+ it 'returns a broken circuit breaker when the path is not set' do
+ expect(described_class.for_storage('nopath').circuit_broken?).to be_truthy
+ end
end
describe '#initialize' do
diff --git a/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb b/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb
new file mode 100644
index 00000000000..0e645008c88
--- /dev/null
+++ b/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb
@@ -0,0 +1,77 @@
+require 'spec_helper'
+
+describe Gitlab::Git::Storage::NullCircuitBreaker do
+ let(:storage) { 'default' }
+ let(:hostname) { 'localhost' }
+ let(:error) { nil }
+
+ subject(:breaker) { described_class.new(storage, hostname, error: error) }
+
+ context 'with an error' do
+ let(:error) { Gitlab::Git::Storage::Misconfiguration.new('error') }
+
+ describe '#perform' do
+ it { expect { breaker.perform { 'ok' } }.to raise_error(error) }
+ end
+
+ describe '#circuit_broken?' do
+ it { expect(breaker.circuit_broken?).to be_truthy }
+ end
+
+ describe '#last_failure' do
+ it { Timecop.freeze { expect(breaker.last_failure).to eq(Time.now) } }
+ end
+
+ describe '#failure_count' do
+ it { expect(breaker.failure_count).to eq(breaker.failure_count_threshold) }
+ end
+
+ describe '#failure_info' do
+ it { Timecop.freeze { expect(breaker.failure_info).to eq(Gitlab::Git::Storage::CircuitBreaker::FailureInfo.new(Time.now, breaker.failure_count_threshold)) } }
+ end
+ end
+
+ context 'not broken' do
+ describe '#perform' do
+ it { expect(breaker.perform { 'ok' }).to eq('ok') }
+ end
+
+ describe '#circuit_broken?' do
+ it { expect(breaker.circuit_broken?).to be_falsy }
+ end
+
+ describe '#last_failure' do
+ it { expect(breaker.last_failure).to be_nil }
+ end
+
+ describe '#failure_count' do
+ it { expect(breaker.failure_count).to eq(0) }
+ end
+
+ describe '#failure_info' do
+ it { expect(breaker.failure_info).to eq(Gitlab::Git::Storage::CircuitBreaker::FailureInfo.new(nil, 0)) }
+ end
+ end
+
+ describe '#failure_count_threshold' do
+ it { expect(breaker.failure_count_threshold).to eq(1) }
+ end
+
+ it 'implements the CircuitBreaker interface' do
+ ours = described_class.public_instance_methods
+ theirs = Gitlab::Git::Storage::CircuitBreaker.public_instance_methods
+
+ # These methods are not part of the public API, but are public to allow the
+ # CircuitBreaker specs to operate. They should be made private over time.
+ exceptions = %i[
+ cache_key
+ check_storage_accessible!
+ no_failures?
+ storage_available?
+ track_storage_accessible
+ track_storage_inaccessible
+ ]
+
+ expect(theirs - ours).to contain_exactly(*exceptions)
+ end
+end
diff --git a/spec/lib/gitlab/git/tag_spec.rb b/spec/lib/gitlab/git/tag_spec.rb
index cc10679ef1e..6c4f538bf01 100644
--- a/spec/lib/gitlab/git/tag_spec.rb
+++ b/spec/lib/gitlab/git/tag_spec.rb
@@ -29,7 +29,7 @@ describe Gitlab::Git::Tag, seed_helper: true do
it_behaves_like 'Gitlab::Git::Repository#tags'
end
- context 'when Gitaly tags feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly tags feature is disabled', :skip_gitaly_mock do
it_behaves_like 'Gitlab::Git::Repository#tags'
end
end
diff --git a/spec/lib/gitlab/git/user_spec.rb b/spec/lib/gitlab/git/user_spec.rb
new file mode 100644
index 00000000000..31d5f59a562
--- /dev/null
+++ b/spec/lib/gitlab/git/user_spec.rb
@@ -0,0 +1,38 @@
+require 'spec_helper'
+
+describe Gitlab::Git::User do
+ let(:username) { 'janedo' }
+ let(:name) { 'Jane Doe' }
+ let(:email) { 'janedoe@example.com' }
+ let(:gl_id) { 'user-123' }
+
+ subject { described_class.new(username, name, email, gl_id) }
+
+ describe '.from_gitaly' do
+ let(:gitaly_user) { Gitaly::User.new(name: name, email: email, gl_id: gl_id) }
+ subject { described_class.from_gitaly(gitaly_user) }
+
+ it { expect(subject).to eq(described_class.new('', name, email, gl_id)) }
+ end
+
+ describe '.from_gitlab' do
+ let(:user) { build(:user) }
+ subject { described_class.from_gitlab(user) }
+
+ it { expect(subject).to eq(described_class.new(user.username, user.name, user.email, 'user-')) }
+ end
+
+ describe '#==' do
+ def eq_other(username, name, email, gl_id)
+ eq(described_class.new(username, name, email, gl_id))
+ end
+
+ it { expect(subject).to eq_other(username, name, email, gl_id) }
+
+ it { expect(subject).not_to eq_other(nil, nil, nil, nil) }
+ it { expect(subject).not_to eq_other(username + 'x', name, email, gl_id) }
+ it { expect(subject).not_to eq_other(username, name + 'x', email, gl_id) }
+ it { expect(subject).not_to eq_other(username, name, email + 'x', gl_id) }
+ it { expect(subject).not_to eq_other(username, name, email, gl_id + 'x') }
+ end
+end
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index 458627ee4de..c9643c5da47 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -165,7 +165,7 @@ describe Gitlab::GitAccess do
stub_application_setting(rsa_key_restriction: 4096)
end
- it 'does not allow keys which are too small', aggregate_failures: true do
+ it 'does not allow keys which are too small', :aggregate_failures do
expect(actor).not_to be_valid
expect { pull_access_check }.to raise_unauthorized('Your SSH key must be at least 4096 bits.')
expect { push_access_check }.to raise_unauthorized('Your SSH key must be at least 4096 bits.')
@@ -177,7 +177,7 @@ describe Gitlab::GitAccess do
stub_application_setting(rsa_key_restriction: ApplicationSetting::FORBIDDEN_KEY_VALUE)
end
- it 'does not allow keys which are too small', aggregate_failures: true do
+ it 'does not allow keys which are too small', :aggregate_failures do
expect(actor).not_to be_valid
expect { pull_access_check }.to raise_unauthorized(/Your SSH key type is forbidden/)
expect { push_access_check }.to raise_unauthorized(/Your SSH key type is forbidden/)
@@ -598,6 +598,19 @@ describe Gitlab::GitAccess do
admin: { push_protected_branch: false, push_all: false, merge_into_protected_branch: false }))
end
end
+
+ context "when in a read-only GitLab instance" do
+ before do
+ create(:protected_branch, name: 'feature', project: project)
+ allow(Gitlab::Database).to receive(:read_only?) { true }
+ end
+
+ # Only check admin; if an admin can't do it, other roles can't either
+ matrix = permissions_matrix[:admin].dup
+ matrix.each { |key, _| matrix[key] = false }
+
+ run_permission_checks(admin: matrix)
+ end
end
describe 'build authentication abilities' do
@@ -632,6 +645,16 @@ describe Gitlab::GitAccess do
end
end
+ context 'when the repository is read only' do
+ let(:project) { create(:project, :repository, :read_only) }
+
+ it 'denies push access' do
+ project.add_master(user)
+
+ expect { push_access_check }.to raise_unauthorized('The repository is temporarily read-only. Please try again later.')
+ end
+ end
+
describe 'deploy key permissions' do
let(:key) { create(:deploy_key, user: user, can_push: can_push) }
let(:actor) { key }
diff --git a/spec/lib/gitlab/git_access_wiki_spec.rb b/spec/lib/gitlab/git_access_wiki_spec.rb
index 0376b4ee783..1056074264a 100644
--- a/spec/lib/gitlab/git_access_wiki_spec.rb
+++ b/spec/lib/gitlab/git_access_wiki_spec.rb
@@ -4,6 +4,7 @@ describe Gitlab::GitAccessWiki do
let(:access) { described_class.new(user, project, 'web', authentication_abilities: authentication_abilities, redirected_path: redirected_path) }
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
+ let(:changes) { ['6f6d7e7ed 570e7b2ab refs/heads/master'] }
let(:redirected_path) { nil }
let(:authentication_abilities) do
[
@@ -13,19 +14,27 @@ describe Gitlab::GitAccessWiki do
]
end
- describe 'push_allowed?' do
- before do
- create(:protected_branch, name: 'master', project: project)
- project.team << [user, :developer]
- end
+ describe '#push_access_check' do
+ context 'when user can :create_wiki' do
+ before do
+ create(:protected_branch, name: 'master', project: project)
+ project.team << [user, :developer]
+ end
- subject { access.check('git-receive-pack', changes) }
+ subject { access.check('git-receive-pack', changes) }
- it { expect { subject }.not_to raise_error }
- end
+ it { expect { subject }.not_to raise_error }
+
+ context 'when in a read-only GitLab instance' do
+ before do
+ allow(Gitlab::Database).to receive(:read_only?) { true }
+ end
- def changes
- ['6f6d7e7ed 570e7b2ab refs/heads/master']
+ it 'does not give access to upload wiki code' do
+ expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "You can't push code to a read-only GitLab instance.")
+ end
+ end
+ end
end
describe '#access_check_download!' do
diff --git a/spec/lib/gitlab/git_ref_validator_spec.rb b/spec/lib/gitlab/git_ref_validator_spec.rb
index e1fa8ae03f8..ba7fb168a3b 100644
--- a/spec/lib/gitlab/git_ref_validator_spec.rb
+++ b/spec/lib/gitlab/git_ref_validator_spec.rb
@@ -4,6 +4,7 @@ describe Gitlab::GitRefValidator do
it { expect(described_class.validate('feature/new')).to be_truthy }
it { expect(described_class.validate('implement_@all')).to be_truthy }
it { expect(described_class.validate('my_new_feature')).to be_truthy }
+ it { expect(described_class.validate('my-branch')).to be_truthy }
it { expect(described_class.validate('#1')).to be_truthy }
it { expect(described_class.validate('feature/refs/heads/foo')).to be_truthy }
it { expect(described_class.validate('feature/~new/')).to be_falsey }
@@ -22,4 +23,8 @@ describe Gitlab::GitRefValidator do
it { expect(described_class.validate('refs/remotes/')).to be_falsey }
it { expect(described_class.validate('refs/heads/feature')).to be_falsey }
it { expect(described_class.validate('refs/remotes/origin')).to be_falsey }
+ it { expect(described_class.validate('-')).to be_falsey }
+ it { expect(described_class.validate('-branch')).to be_falsey }
+ it { expect(described_class.validate('.tag')).to be_falsey }
+ it { expect(described_class.validate('my branch')).to be_falsey }
end
diff --git a/spec/lib/gitlab/git_spec.rb b/spec/lib/gitlab/git_spec.rb
index 4702a978f19..494dfe0e595 100644
--- a/spec/lib/gitlab/git_spec.rb
+++ b/spec/lib/gitlab/git_spec.rb
@@ -1,3 +1,4 @@
+# coding: utf-8
require 'spec_helper'
describe Gitlab::Git do
@@ -29,4 +30,12 @@ describe Gitlab::Git do
end
end
end
+
+ describe '.ref_name' do
+ it 'ensure ref is a valid UTF-8 string' do
+ utf8_invalid_ref = Gitlab::Git::BRANCH_REF_PREFIX + "an_invalid_ref_\xE5"
+
+ expect(described_class.ref_name(utf8_invalid_ref)).to eq("an_invalid_ref_å")
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index f32fe5d8150..b2275119a04 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -51,6 +51,10 @@ describe Gitlab::GitalyClient::CommitService do
expect(ret).to be_kind_of(Gitlab::GitalyClient::DiffStitcher)
end
+
+ it 'encodes paths correctly' do
+ expect { client.diff_from_parent(commit, paths: ['encoding/test.txt', 'encoding/テスト.txt', nil]) }.not_to raise_error
+ end
end
describe '#commit_deltas' do
@@ -165,4 +169,29 @@ describe Gitlab::GitalyClient::CommitService do
expect(subject).to eq("my diff")
end
end
+
+ describe '#commit_stats' do
+ let(:request) do
+ Gitaly::CommitStatsRequest.new(
+ repository: repository_message, revision: revision
+ )
+ end
+ let(:response) do
+ Gitaly::CommitStatsResponse.new(
+ oid: revision,
+ additions: 11,
+ deletions: 15
+ )
+ end
+
+ subject { described_class.new(repository).commit_stats(revision) }
+
+ it 'sends an RPC request' do
+ expect_any_instance_of(Gitaly::CommitService::Stub).to receive(:commit_stats)
+ .with(request, kind_of(Hash)).and_return(response)
+
+ expect(subject.additions).to eq(11)
+ expect(subject.deletions).to eq(15)
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
new file mode 100644
index 00000000000..7bd6a7fa842
--- /dev/null
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -0,0 +1,92 @@
+require 'spec_helper'
+
+describe Gitlab::GitalyClient::OperationService do
+ let(:project) { create(:project) }
+ let(:repository) { project.repository.raw }
+ let(:client) { described_class.new(repository) }
+ let(:user) { create(:user) }
+ let(:gitaly_user) { Gitlab::GitalyClient::Util.gitaly_user(user) }
+
+ describe '#user_create_branch' do
+ let(:branch_name) { 'new' }
+ let(:start_point) { 'master' }
+ let(:request) do
+ Gitaly::UserCreateBranchRequest.new(
+ repository: repository.gitaly_repository,
+ branch_name: branch_name,
+ start_point: start_point,
+ user: gitaly_user
+ )
+ end
+ let(:gitaly_commit) { build(:gitaly_commit) }
+ let(:commit_id) { gitaly_commit.id }
+ let(:gitaly_branch) do
+ Gitaly::Branch.new(name: branch_name, target_commit: gitaly_commit)
+ end
+ let(:response) { Gitaly::UserCreateBranchResponse.new(branch: gitaly_branch) }
+ let(:commit) { Gitlab::Git::Commit.new(repository, gitaly_commit) }
+
+ subject { client.user_create_branch(branch_name, user, start_point) }
+
+ it 'sends a user_create_branch message and returns a Gitlab::git::Branch' do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_create_branch).with(request, kind_of(Hash))
+ .and_return(response)
+
+ expect(subject.name).to eq(branch_name)
+ expect(subject.dereferenced_target).to eq(commit)
+ end
+
+ context "when pre_receive_error is present" do
+ let(:response) do
+ Gitaly::UserCreateBranchResponse.new(pre_receive_error: "something failed")
+ end
+
+ it "throws a PreReceive exception" do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_create_branch).with(request, kind_of(Hash))
+ .and_return(response)
+
+ expect { subject }.to raise_error(
+ Gitlab::Git::HooksService::PreReceiveError, "something failed")
+ end
+ end
+ end
+
+ describe '#user_delete_branch' do
+ let(:branch_name) { 'my-branch' }
+ let(:request) do
+ Gitaly::UserDeleteBranchRequest.new(
+ repository: repository.gitaly_repository,
+ branch_name: branch_name,
+ user: gitaly_user
+ )
+ end
+ let(:response) { Gitaly::UserDeleteBranchResponse.new }
+
+ subject { client.user_delete_branch(branch_name, user) }
+
+ it 'sends a user_delete_branch message' do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_delete_branch).with(request, kind_of(Hash))
+ .and_return(response)
+
+ subject
+ end
+
+ context "when pre_receive_error is present" do
+ let(:response) do
+ Gitaly::UserDeleteBranchResponse.new(pre_receive_error: "something failed")
+ end
+
+ it "throws a PreReceive exception" do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_delete_branch).with(request, kind_of(Hash))
+ .and_return(response)
+
+ expect { subject }.to raise_error(
+ Gitlab::Git::HooksService::PreReceiveError, "something failed")
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
index 6f59750b4da..8127b4842b7 100644
--- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
@@ -84,14 +84,14 @@ describe Gitlab::GitalyClient::RefService do
end
end
- describe '#find_ref_name', seed_helper: true do
+ describe '#find_ref_name', :seed_helper do
subject { client.find_ref_name(SeedRepo::Commit::ID, 'refs/heads/master') }
it { is_expected.to be_utf8 }
it { is_expected.to eq('refs/heads/master') }
end
- describe '#ref_exists?', seed_helper: true do
+ describe '#ref_exists?', :seed_helper do
it 'finds the master branch ref' do
expect(client.ref_exists?('refs/heads/master')).to eq(true)
end
diff --git a/spec/lib/gitlab/gitaly_client/util_spec.rb b/spec/lib/gitlab/gitaly_client/util_spec.rb
new file mode 100644
index 00000000000..498f6886bee
--- /dev/null
+++ b/spec/lib/gitlab/gitaly_client/util_spec.rb
@@ -0,0 +1,43 @@
+require 'spec_helper'
+
+describe Gitlab::GitalyClient::Util do
+ describe '.repository' do
+ let(:repository_storage) { 'default' }
+ let(:relative_path) { 'my/repo.git' }
+ let(:gl_repository) { 'project-1' }
+ let(:git_object_directory) { '.git/objects' }
+ let(:git_alternate_object_directory) { '/dir/one:/dir/two' }
+
+ subject do
+ described_class.repository(repository_storage, relative_path, gl_repository)
+ end
+
+ it 'creates a Gitaly::Repository with the given data' do
+ expect(Gitlab::Git::Env).to receive(:[]).with('GIT_OBJECT_DIRECTORY')
+ .and_return(git_object_directory)
+ expect(Gitlab::Git::Env).to receive(:[]).with('GIT_ALTERNATE_OBJECT_DIRECTORIES')
+ .and_return(git_alternate_object_directory)
+
+ expect(subject).to be_a(Gitaly::Repository)
+ expect(subject.storage_name).to eq(repository_storage)
+ expect(subject.relative_path).to eq(relative_path)
+ expect(subject.gl_repository).to eq(gl_repository)
+ expect(subject.git_object_directory).to eq(git_object_directory)
+ expect(subject.git_alternate_object_directories).to eq([git_alternate_object_directory])
+ end
+ end
+
+ describe '.gitaly_user' do
+ let(:user) { create(:user) }
+ let(:gl_id) { Gitlab::GlId.gl_id(user) }
+
+ subject { described_class.gitaly_user(user) }
+
+ it 'creates a Gitaly::User from a GitLab user' do
+ expect(subject).to be_a(Gitaly::User)
+ expect(subject.name).to eq(user.name)
+ expect(subject.email).to eq(user.email)
+ expect(subject.gl_id).to eq(gl_id)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index a9b861fcff2..a1f4e65b8d4 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -38,6 +38,144 @@ describe Gitlab::GitalyClient, skip_gitaly_mock: true do
end
end
+ describe 'encode' do
+ [
+ [nil, ""],
+ ["", ""],
+ [" ", " "],
+ %w(a1 a1),
+ ["编码", "\xE7\xBC\x96\xE7\xA0\x81".b]
+ ].each do |input, result|
+ it "encodes #{input.inspect} to #{result.inspect}" do
+ expect(described_class.encode(input)).to eq result
+ end
+ end
+ end
+
+ describe 'allow_n_plus_1_calls' do
+ context 'when RequestStore is enabled', :request_store do
+ it 'returns the result of the allow_n_plus_1_calls block' do
+ expect(described_class.allow_n_plus_1_calls { "result" }).to eq("result")
+ end
+ end
+
+ context 'when RequestStore is not active' do
+ it 'returns the result of the allow_n_plus_1_calls block' do
+ expect(described_class.allow_n_plus_1_calls { "something" }).to eq("something")
+ end
+ end
+ end
+
+ describe 'enforce_gitaly_request_limits?' do
+ def call_gitaly(count = 1)
+ (1..count).each do
+ described_class.enforce_gitaly_request_limits(:test)
+ end
+ end
+
+ context 'when RequestStore is enabled', :request_store do
+ it 'allows up the maximum number of allowed calls' do
+ expect { call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS) }.not_to raise_error
+ end
+
+ context 'when the maximum number of calls has been reached' do
+ before do
+ call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS)
+ end
+
+ it 'fails on the next call' do
+ expect { call_gitaly(1) }.to raise_error(Gitlab::GitalyClient::TooManyInvocationsError)
+ end
+ end
+
+ it 'allows the maximum number of calls to be exceeded within an allow_n_plus_1_calls block' do
+ expect do
+ described_class.allow_n_plus_1_calls do
+ call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS + 1)
+ end
+ end.not_to raise_error
+ end
+
+ context 'when the maximum number of calls has been reached within an allow_n_plus_1_calls block' do
+ before do
+ described_class.allow_n_plus_1_calls do
+ call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS)
+ end
+ end
+
+ it 'allows up to the maximum number of calls outside of an allow_n_plus_1_calls block' do
+ expect { call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS) }.not_to raise_error
+ end
+
+ it 'does not allow the maximum number of calls to be exceeded outside of an allow_n_plus_1_calls block' do
+ expect { call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS + 1) }.to raise_error(Gitlab::GitalyClient::TooManyInvocationsError)
+ end
+ end
+ end
+
+ context 'when RequestStore is not active' do
+ it 'does not raise errors when the maximum number of allowed calls is exceeded' do
+ expect { call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS + 2) }.not_to raise_error
+ end
+
+ it 'does not fail when the maximum number of calls is exceeded within an allow_n_plus_1_calls block' do
+ expect do
+ described_class.allow_n_plus_1_calls do
+ call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS + 1)
+ end
+ end.not_to raise_error
+ end
+ end
+ end
+
+ describe 'get_request_count' do
+ context 'when RequestStore is enabled', :request_store do
+ context 'when enforce_gitaly_request_limits is called outside of allow_n_plus_1_calls blocks' do
+ before do
+ described_class.enforce_gitaly_request_limits(:call)
+ end
+
+ it 'counts gitaly calls' do
+ expect(described_class.get_request_count).to eq(1)
+ end
+ end
+
+ context 'when enforce_gitaly_request_limits is called inside and outside of allow_n_plus_1_calls blocks' do
+ before do
+ described_class.enforce_gitaly_request_limits(:call)
+ described_class.allow_n_plus_1_calls do
+ described_class.enforce_gitaly_request_limits(:call)
+ end
+ end
+
+ it 'counts gitaly calls' do
+ expect(described_class.get_request_count).to eq(2)
+ end
+ end
+
+ context 'when reset_counts is called' do
+ before do
+ described_class.enforce_gitaly_request_limits(:call)
+ described_class.reset_counts
+ end
+
+ it 'resets counts' do
+ expect(described_class.get_request_count).to eq(0)
+ end
+ end
+ end
+
+ context 'when RequestStore is not active' do
+ before do
+ described_class.enforce_gitaly_request_limits(:call)
+ end
+
+ it 'returns zero' do
+ expect(described_class.get_request_count).to eq(0)
+ end
+ end
+ end
+
describe 'feature_enabled?' do
let(:feature_name) { 'my_feature' }
let(:real_feature_name) { "gitaly_#{feature_name}" }
diff --git a/spec/lib/gitlab/gpg/commit_spec.rb b/spec/lib/gitlab/gpg/commit_spec.rb
index b07462e4978..a6c99bc07d4 100644
--- a/spec/lib/gitlab/gpg/commit_spec.rb
+++ b/spec/lib/gitlab/gpg/commit_spec.rb
@@ -63,6 +63,45 @@ describe Gitlab::Gpg::Commit do
it_behaves_like 'returns the cached signature on second call'
end
+ context 'commit signed with a subkey' do
+ let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User3.emails.first }
+
+ let!(:user) { create(:user, email: GpgHelpers::User3.emails.first) }
+
+ let!(:gpg_key) do
+ create :gpg_key, key: GpgHelpers::User3.public_key, user: user
+ end
+
+ let(:gpg_key_subkey) do
+ gpg_key.subkeys.find_by(fingerprint: '0522DD29B98F167CD8421752E38FFCAF75ABD92A')
+ end
+
+ before do
+ allow(Rugged::Commit).to receive(:extract_signature)
+ .with(Rugged::Repository, commit_sha)
+ .and_return(
+ [
+ GpgHelpers::User3.signed_commit_signature,
+ GpgHelpers::User3.signed_commit_base_data
+ ]
+ )
+ end
+
+ it 'returns a valid signature' do
+ expect(described_class.new(commit).signature).to have_attributes(
+ commit_sha: commit_sha,
+ project: project,
+ gpg_key: gpg_key_subkey,
+ gpg_key_primary_keyid: gpg_key_subkey.keyid,
+ gpg_key_user_name: GpgHelpers::User3.names.first,
+ gpg_key_user_email: GpgHelpers::User3.emails.first,
+ verification_status: 'verified'
+ )
+ end
+
+ it_behaves_like 'returns the cached signature on second call'
+ end
+
context 'user email does not match the committer email, but is the same user' do
let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User2.emails.first }
diff --git a/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb b/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb
index b9fd4d02156..d6000af0ecd 100644
--- a/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb
+++ b/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb
@@ -2,17 +2,16 @@ require 'rails_helper'
RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
describe '#run' do
- let!(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' }
- let!(:project) { create :project, :repository, path: 'sample-project' }
+ let(:signature) { [GpgHelpers::User1.signed_commit_signature, GpgHelpers::User1.signed_commit_base_data] }
+ let(:committer_email) { GpgHelpers::User1.emails.first }
+ let!(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' }
+ let!(:project) { create :project, :repository, path: 'sample-project' }
let!(:raw_commit) do
raw_commit = double(
:raw_commit,
- signature: [
- GpgHelpers::User1.signed_commit_signature,
- GpgHelpers::User1.signed_commit_base_data
- ],
+ signature: signature,
sha: commit_sha,
- committer_email: GpgHelpers::User1.emails.first
+ committer_email: committer_email
)
allow(raw_commit).to receive :save!
@@ -29,12 +28,7 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
allow(Rugged::Commit).to receive(:extract_signature)
.with(Rugged::Repository, commit_sha)
- .and_return(
- [
- GpgHelpers::User1.signed_commit_signature,
- GpgHelpers::User1.signed_commit_base_data
- ]
- )
+ .and_return(signature)
end
context 'gpg signature did have an associated gpg key which was removed later' do
@@ -183,5 +177,34 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
)
end
end
+
+ context 'gpg signature did not have an associated gpg subkey' do
+ let(:signature) { [GpgHelpers::User3.signed_commit_signature, GpgHelpers::User3.signed_commit_base_data] }
+ let(:committer_email) { GpgHelpers::User3.emails.first }
+ let!(:user) { create :user, email: GpgHelpers::User3.emails.first }
+
+ let!(:invalid_gpg_signature) do
+ create :gpg_signature,
+ project: project,
+ commit_sha: commit_sha,
+ gpg_key: nil,
+ gpg_key_primary_keyid: GpgHelpers::User3.subkey_fingerprints.last[24..-1],
+ verification_status: 'unknown_key'
+ end
+
+ it 'updates the signature to being valid when the missing gpg key is added' do
+ # InvalidGpgSignatureUpdater is called by the after_create hook
+ gpg_key = create(:gpg_key, key: GpgHelpers::User3.public_key, user: user)
+ subkey = gpg_key.subkeys.last
+
+ expect(invalid_gpg_signature.reload).to have_attributes(
+ project: project,
+ commit_sha: commit_sha,
+ gpg_key_subkey_id: subkey.id,
+ gpg_key_primary_keyid: subkey.keyid,
+ verification_status: 'verified'
+ )
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/gpg_spec.rb b/spec/lib/gitlab/gpg_spec.rb
index 11a2aea1915..ab9a166db00 100644
--- a/spec/lib/gitlab/gpg_spec.rb
+++ b/spec/lib/gitlab/gpg_spec.rb
@@ -28,6 +28,23 @@ describe Gitlab::Gpg do
end
end
+ describe '.subkeys_from_key' do
+ it 'returns the subkeys by primary key' do
+ all_subkeys = described_class.subkeys_from_key(GpgHelpers::User1.public_key)
+ subkeys = all_subkeys[GpgHelpers::User1.primary_keyid]
+
+ expect(subkeys).to be_present
+ expect(subkeys.first[:keyid]).to be_present
+ expect(subkeys.first[:fingerprint]).to be_present
+ end
+
+ it 'returns an empty array when there are not subkeys' do
+ all_subkeys = described_class.subkeys_from_key(GpgHelpers::User4.public_key)
+
+ expect(all_subkeys[GpgHelpers::User4.primary_keyid]).to be_empty
+ end
+ end
+
describe '.user_infos_from_key' do
it 'returns the names and emails' do
user_infos = described_class.user_infos_from_key(GpgHelpers::User1.public_key)
diff --git a/spec/lib/gitlab/group_hierarchy_spec.rb b/spec/lib/gitlab/group_hierarchy_spec.rb
index 08010c2d0e2..8dc83a6db7f 100644
--- a/spec/lib/gitlab/group_hierarchy_spec.rb
+++ b/spec/lib/gitlab/group_hierarchy_spec.rb
@@ -23,6 +23,11 @@ describe Gitlab::GroupHierarchy, :postgresql do
expect(relation).to include(parent, child1, child2)
end
+
+ it 'does not allow the use of #update_all' do
+ expect { relation.update_all(share_with_group_lock: false) }
+ .to raise_error(ActiveRecord::ReadOnlyRecord)
+ end
end
describe '#base_and_descendants' do
@@ -43,6 +48,11 @@ describe Gitlab::GroupHierarchy, :postgresql do
expect(relation).to include(parent, child1, child2)
end
+
+ it 'does not allow the use of #update_all' do
+ expect { relation.update_all(share_with_group_lock: false) }
+ .to raise_error(ActiveRecord::ReadOnlyRecord)
+ end
end
describe '#all_groups' do
@@ -73,5 +83,10 @@ describe Gitlab::GroupHierarchy, :postgresql do
expect(relation).to include(child2)
end
+
+ it 'does not allow the use of #update_all' do
+ expect { relation.update_all(share_with_group_lock: false) }
+ .to raise_error(ActiveRecord::ReadOnlyRecord)
+ end
end
end
diff --git a/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb b/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb
index f5c9680bf59..4c1ca4349ea 100644
--- a/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb
+++ b/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb
@@ -21,7 +21,7 @@ describe Gitlab::HealthChecks::FsShardsCheck do
let(:metric_class) { Gitlab::HealthChecks::Metric }
let(:result_class) { Gitlab::HealthChecks::Result }
- let(:repository_storages) { [:default] }
+ let(:repository_storages) { ['default'] }
let(:tmp_dir) { Dir.mktmpdir }
let(:storages_paths) do
@@ -44,7 +44,7 @@ describe Gitlab::HealthChecks::FsShardsCheck do
describe '#readiness' do
subject { described_class.readiness }
- context 'storage has a tripped circuitbreaker', broken_storage: true do
+ context 'storage has a tripped circuitbreaker', :broken_storage do
let(:repository_storages) { ['broken'] }
let(:storages_paths) do
Gitlab.config.repositories.storages
@@ -64,7 +64,7 @@ describe Gitlab::HealthChecks::FsShardsCheck do
allow(described_class).to receive(:storage_circuitbreaker_test) { true }
end
- it { is_expected.to include(result_class.new(false, 'cannot stat storage', shard: :default)) }
+ it { is_expected.to include(result_class.new(false, 'cannot stat storage', shard: 'default')) }
end
context 'storage points to directory that has both read and write rights' do
@@ -72,7 +72,7 @@ describe Gitlab::HealthChecks::FsShardsCheck do
FileUtils.chmod_R(0755, tmp_dir)
end
- it { is_expected.to include(result_class.new(true, nil, shard: :default)) }
+ it { is_expected.to include(result_class.new(true, nil, shard: 'default')) }
it 'cleans up files used for testing' do
expect(described_class).to receive(:storage_write_test).with(any_args).and_call_original
@@ -85,7 +85,7 @@ describe Gitlab::HealthChecks::FsShardsCheck do
allow(described_class).to receive(:storage_read_test).with(any_args).and_return(false)
end
- it { is_expected.to include(result_class.new(false, 'cannot read from storage', shard: :default)) }
+ it { is_expected.to include(result_class.new(false, 'cannot read from storage', shard: 'default')) }
end
context 'write test fails' do
@@ -93,7 +93,7 @@ describe Gitlab::HealthChecks::FsShardsCheck do
allow(described_class).to receive(:storage_write_test).with(any_args).and_return(false)
end
- it { is_expected.to include(result_class.new(false, 'cannot write to storage', shard: :default)) }
+ it { is_expected.to include(result_class.new(false, 'cannot write to storage', shard: 'default')) }
end
end
end
@@ -109,7 +109,7 @@ describe Gitlab::HealthChecks::FsShardsCheck do
it 'provides metrics' do
metrics = described_class.metrics
- expect(metrics).to all(have_attributes(labels: { shard: :default }))
+ expect(metrics).to all(have_attributes(labels: { shard: 'default' }))
expect(metrics).to include(an_object_having_attributes(name: :filesystem_accessible, value: 0))
expect(metrics).to include(an_object_having_attributes(name: :filesystem_readable, value: 0))
expect(metrics).to include(an_object_having_attributes(name: :filesystem_writable, value: 0))
@@ -128,7 +128,7 @@ describe Gitlab::HealthChecks::FsShardsCheck do
it 'provides metrics' do
metrics = described_class.metrics
- expect(metrics).to all(have_attributes(labels: { shard: :default }))
+ expect(metrics).to all(have_attributes(labels: { shard: 'default' }))
expect(metrics).to include(an_object_having_attributes(name: :filesystem_accessible, value: 1))
expect(metrics).to include(an_object_having_attributes(name: :filesystem_readable, value: 1))
expect(metrics).to include(an_object_having_attributes(name: :filesystem_writable, value: 1))
@@ -156,14 +156,14 @@ describe Gitlab::HealthChecks::FsShardsCheck do
describe '#readiness' do
subject { described_class.readiness }
- it { is_expected.to include(result_class.new(false, 'cannot stat storage', shard: :default)) }
+ it { is_expected.to include(result_class.new(false, 'cannot stat storage', shard: 'default')) }
end
describe '#metrics' do
it 'provides metrics' do
metrics = described_class.metrics
- expect(metrics).to all(have_attributes(labels: { shard: :default }))
+ expect(metrics).to all(have_attributes(labels: { shard: 'default' }))
expect(metrics).to include(an_object_having_attributes(name: :filesystem_accessible, value: 0))
expect(metrics).to include(an_object_having_attributes(name: :filesystem_readable, value: 0))
expect(metrics).to include(an_object_having_attributes(name: :filesystem_writable, value: 0))
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 3fb8edeb701..29baa70d5ae 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -147,6 +147,10 @@ deploy_keys:
- user
- deploy_keys_projects
- projects
+cluster:
+- project
+- user
+- service
services:
- project
- service_hook
@@ -177,6 +181,7 @@ project:
- tag_taggings
- tags
- chat_services
+- cluster
- creator
- group
- namespace
@@ -266,6 +271,10 @@ project:
- container_repositories
- uploads
- members_and_requesters
+- build_trace_section_names
+- root_of_fork_network
+- fork_network_member
+- fork_network
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/fork_spec.rb b/spec/lib/gitlab/import_export/fork_spec.rb
index c7fbc2bc92f..dd0ce0dae41 100644
--- a/spec/lib/gitlab/import_export/fork_spec.rb
+++ b/spec/lib/gitlab/import_export/fork_spec.rb
@@ -1,13 +1,15 @@
require 'spec_helper'
describe 'forked project import' do
+ include ProjectForksHelper
+
let(:user) { create(:user) }
let!(:project_with_repo) { create(:project, :repository, name: 'test-repo-restorer', path: 'test-repo-restorer') }
let!(:project) { create(:project, name: 'test-repo-restorer-no-repo', path: 'test-repo-restorer-no-repo') }
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) }
let(:forked_from_project) { create(:project, :repository) }
- let(:fork_link) { create(:forked_project_link, forked_from_project: project_with_repo) }
+ let(:forked_project) { fork_project(project_with_repo, nil, repository: true) }
let(:repo_saver) { Gitlab::ImportExport::RepoSaver.new(project: project_with_repo, shared: shared) }
let(:bundle_path) { File.join(shared.export_path, Gitlab::ImportExport.project_bundle_filename) }
@@ -16,7 +18,7 @@ describe 'forked project import' do
end
let!(:merge_request) do
- create(:merge_request, source_project: fork_link.forked_to_project, target_project: project_with_repo)
+ create(:merge_request, source_project: forked_project, target_project: project_with_repo)
end
let(:saver) do
diff --git a/spec/lib/gitlab/import_export/merge_request_parser_spec.rb b/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
index 4d87f27ce05..473ba40fae7 100644
--- a/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
+++ b/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
@@ -1,13 +1,14 @@
require 'spec_helper'
describe Gitlab::ImportExport::MergeRequestParser do
+ include ProjectForksHelper
+
let(:user) { create(:user) }
let!(:project) { create(:project, :repository, name: 'test-repo-restorer', path: 'test-repo-restorer') }
- let(:forked_from_project) { create(:project, :repository) }
- let(:fork_link) { create(:forked_project_link, forked_from_project: project) }
+ let(:forked_project) { fork_project(project) }
let!(:merge_request) do
- create(:merge_request, source_project: fork_link.forked_to_project, target_project: project)
+ create(:merge_request, source_project: forked_project, target_project: project)
end
let(:parsed_merge_request) do
diff --git a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
index 8e3554375e8..d9b86e1bf34 100644
--- a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
@@ -119,7 +119,7 @@ describe Gitlab::ImportExport::ProjectTreeSaver do
it 'has no when YML attributes but only the DB column' do
allow_any_instance_of(Ci::Pipeline).to receive(:ci_yaml_file).and_return(File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')))
- expect_any_instance_of(Ci::GitlabCiYamlProcessor).not_to receive(:build_attributes)
+ expect_any_instance_of(Gitlab::Ci::YamlProcessor).not_to receive(:build_attributes)
saved_project_json
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 96c70d1b675..6d4ee470b00 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -25,6 +25,7 @@ Issue:
- relative_position
- last_edited_at
- last_edited_by_id
+- discussion_locked
Event:
- id
- target_type
@@ -168,6 +169,7 @@ MergeRequest:
- last_edited_at
- last_edited_by_id
- head_pipeline_id
+- discussion_locked
MergeRequestDiff:
- id
- state
@@ -224,6 +226,7 @@ Ci::Pipeline:
- auto_canceled_by_id
- pipeline_schedule_id
- config_source
+- failure_reason
- protected
Ci::Stage:
- id
@@ -312,6 +315,32 @@ Ci::PipelineSchedule:
- deleted_at
- created_at
- updated_at
+Gcp::Cluster:
+- id
+- project_id
+- user_id
+- service_id
+- enabled
+- status
+- status_reason
+- project_namespace
+- endpoint
+- ca_cert
+- encrypted_kubernetes_token
+- encrypted_kubernetes_token_iv
+- username
+- encrypted_password
+- encrypted_password_iv
+- gcp_project_id
+- gcp_cluster_zone
+- gcp_cluster_name
+- gcp_cluster_size
+- gcp_machine_type
+- gcp_operation_id
+- encrypted_gcp_token
+- encrypted_gcp_token_iv
+- created_at
+- updated_at
DeployKey:
- id
- user_id
@@ -414,6 +443,8 @@ Project:
- last_repository_updated_at
- ci_config_path
- delete_error
+- merge_requests_ff_only_enabled
+- merge_requests_rebase_enabled
Author:
- name
ProjectFeature:
diff --git a/spec/lib/gitlab/ldap/auth_hash_spec.rb b/spec/lib/gitlab/ldap/auth_hash_spec.rb
index 8370adf9211..1785094af10 100644
--- a/spec/lib/gitlab/ldap/auth_hash_spec.rb
+++ b/spec/lib/gitlab/ldap/auth_hash_spec.rb
@@ -4,7 +4,7 @@ describe Gitlab::LDAP::AuthHash do
let(:auth_hash) do
described_class.new(
OmniAuth::AuthHash.new(
- uid: '123456',
+ uid: given_uid,
provider: 'ldapmain',
info: info,
extra: {
@@ -32,6 +32,8 @@ describe Gitlab::LDAP::AuthHash do
end
context "without overridden attributes" do
+ let(:given_uid) { 'uid=John Smith,ou=People,dc=example,dc=com' }
+
it "has the correct username" do
expect(auth_hash.username).to eq("123456")
end
@@ -42,6 +44,8 @@ describe Gitlab::LDAP::AuthHash do
end
context "with overridden attributes" do
+ let(:given_uid) { 'uid=John Smith,ou=People,dc=example,dc=com' }
+
let(:attributes) do
{
'username' => %w(mail email),
@@ -61,4 +65,22 @@ describe Gitlab::LDAP::AuthHash do
expect(auth_hash.name).to eq("John Smith")
end
end
+
+ describe '#uid' do
+ context 'when there is extraneous (but valid) whitespace' do
+ let(:given_uid) { 'uid =john smith , ou = people, dc= example,dc =com' }
+
+ it 'removes the extraneous whitespace' do
+ expect(auth_hash.uid).to eq('uid=john smith,ou=people,dc=example,dc=com')
+ end
+ end
+
+ context 'when there are upper case characters' do
+ let(:given_uid) { 'UID=John Smith,ou=People,dc=example,dc=com' }
+
+ it 'downcases' do
+ expect(auth_hash.uid).to eq('uid=john smith,ou=people,dc=example,dc=com')
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ldap/dn_spec.rb b/spec/lib/gitlab/ldap/dn_spec.rb
new file mode 100644
index 00000000000..8e21ecdf9ab
--- /dev/null
+++ b/spec/lib/gitlab/ldap/dn_spec.rb
@@ -0,0 +1,224 @@
+require 'spec_helper'
+
+describe Gitlab::LDAP::DN do
+ using RSpec::Parameterized::TableSyntax
+
+ describe '#normalize_value' do
+ subject { described_class.normalize_value(given) }
+
+ it_behaves_like 'normalizes a DN attribute value'
+
+ context 'when the given DN is malformed' do
+ context 'when ending with a comma' do
+ let(:given) { 'John Smith,' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'DN string ended unexpectedly')
+ end
+ end
+
+ context 'when given a BER encoded attribute value with a space in it' do
+ let(:given) { '#aa aa' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, "Expected the end of an attribute value, but got \"a\"")
+ end
+ end
+
+ context 'when given a BER encoded attribute value with a non-hex character in it' do
+ let(:given) { '#aaXaaa' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, "Expected the first character of a hex pair, but got \"X\"")
+ end
+ end
+
+ context 'when given a BER encoded attribute value with a non-hex character in it' do
+ let(:given) { '#aaaYaa' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, "Expected the second character of a hex pair, but got \"Y\"")
+ end
+ end
+
+ context 'when given a hex pair with a non-hex character in it, inside double quotes' do
+ let(:given) { '"Sebasti\\cX\\a1n"' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, "Expected the second character of a hex pair inside a double quoted value, but got \"X\"")
+ end
+ end
+
+ context 'with an open (as opposed to closed) double quote' do
+ let(:given) { '"James' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'DN string ended unexpectedly')
+ end
+ end
+
+ context 'with an invalid escaped hex code' do
+ let(:given) { 'J\ames' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'Invalid escaped hex code "\am"')
+ end
+ end
+
+ context 'with a value ending with the escape character' do
+ let(:given) { 'foo\\' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'DN string ended unexpectedly')
+ end
+ end
+ end
+ end
+
+ describe '#to_normalized_s' do
+ subject { described_class.new(given).to_normalized_s }
+
+ it_behaves_like 'normalizes a DN'
+
+ context 'when we do not support the given DN format' do
+ context 'multivalued RDNs' do
+ context 'without extraneous whitespace' do
+ let(:given) { 'uid=john smith+telephonenumber=+1 555-555-5555,ou=people,dc=example,dc=com' }
+
+ it 'raises UnsupportedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::UnsupportedError)
+ end
+ end
+
+ context 'with extraneous whitespace' do
+ context 'around the phone number plus sign' do
+ let(:given) { 'uid = John Smith + telephoneNumber = + 1 555-555-5555 , ou = People,dc=example,dc=com' }
+
+ it 'raises UnsupportedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::UnsupportedError)
+ end
+ end
+
+ context 'not around the phone number plus sign' do
+ let(:given) { 'uid = John Smith + telephoneNumber = +1 555-555-5555 , ou = People,dc=example,dc=com' }
+
+ it 'raises UnsupportedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::UnsupportedError)
+ end
+ end
+ end
+ end
+ end
+
+ context 'when the given DN is malformed' do
+ context 'when ending with a comma' do
+ let(:given) { 'uid=John Smith,' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'DN string ended unexpectedly')
+ end
+ end
+
+ context 'when given a BER encoded attribute value with a space in it' do
+ let(:given) { '0.9.2342.19200300.100.1.25=#aa aa' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, "Expected the end of an attribute value, but got \"a\"")
+ end
+ end
+
+ context 'when given a BER encoded attribute value with a non-hex character in it' do
+ let(:given) { '0.9.2342.19200300.100.1.25=#aaXaaa' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, "Expected the first character of a hex pair, but got \"X\"")
+ end
+ end
+
+ context 'when given a BER encoded attribute value with a non-hex character in it' do
+ let(:given) { '0.9.2342.19200300.100.1.25=#aaaYaa' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, "Expected the second character of a hex pair, but got \"Y\"")
+ end
+ end
+
+ context 'when given a hex pair with a non-hex character in it, inside double quotes' do
+ let(:given) { 'uid="Sebasti\\cX\\a1n"' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, "Expected the second character of a hex pair inside a double quoted value, but got \"X\"")
+ end
+ end
+
+ context 'without a name value pair' do
+ let(:given) { 'John' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'DN string ended unexpectedly')
+ end
+ end
+
+ context 'with an open (as opposed to closed) double quote' do
+ let(:given) { 'cn="James' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'DN string ended unexpectedly')
+ end
+ end
+
+ context 'with an invalid escaped hex code' do
+ let(:given) { 'cn=J\ames' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'Invalid escaped hex code "\am"')
+ end
+ end
+
+ context 'with a value ending with the escape character' do
+ let(:given) { 'cn=\\' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'DN string ended unexpectedly')
+ end
+ end
+
+ context 'with an invalid OID attribute type name' do
+ let(:given) { '1.2.d=Value' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'Unrecognized RDN OID attribute type name character "d"')
+ end
+ end
+
+ context 'with a period in a non-OID attribute type name' do
+ let(:given) { 'd1.2=Value' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'Unrecognized RDN attribute type name character "."')
+ end
+ end
+
+ context 'when starting with non-space, non-alphanumeric character' do
+ let(:given) { ' -uid=John Smith' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'Unrecognized first character of an RDN attribute type name "-"')
+ end
+ end
+
+ context 'when given a UID with an escaped equal sign' do
+ let(:given) { 'uid\\=john' }
+
+ it 'raises MalformedError' do
+ expect { subject }.to raise_error(Gitlab::LDAP::DN::MalformedError, 'Unrecognized RDN attribute type name character "\\"')
+ end
+ end
+ end
+ end
+
+ def assert_generic_test(test_description, got, expected)
+ test_failure_message = "Failed test description: '#{test_description}'\n\n expected: \"#{expected}\"\n got: \"#{got}\""
+ expect(got).to eq(expected), test_failure_message
+ end
+end
diff --git a/spec/lib/gitlab/ldap/person_spec.rb b/spec/lib/gitlab/ldap/person_spec.rb
index 087c4d8c92c..d204050ef66 100644
--- a/spec/lib/gitlab/ldap/person_spec.rb
+++ b/spec/lib/gitlab/ldap/person_spec.rb
@@ -16,6 +16,34 @@ describe Gitlab::LDAP::Person do
)
end
+ describe '.normalize_dn' do
+ subject { described_class.normalize_dn(given) }
+
+ it_behaves_like 'normalizes a DN'
+
+ context 'with an exception during normalization' do
+ let(:given) { 'John "Smith,' } # just something that will cause an exception
+
+ it 'returns the given DN unmodified' do
+ expect(subject).to eq(given)
+ end
+ end
+ end
+
+ describe '.normalize_uid' do
+ subject { described_class.normalize_uid(given) }
+
+ it_behaves_like 'normalizes a DN attribute value'
+
+ context 'with an exception during normalization' do
+ let(:given) { 'John "Smith,' } # just something that will cause an exception
+
+ it 'returns the given UID unmodified' do
+ expect(subject).to eq(given)
+ end
+ end
+ end
+
describe '#name' do
it 'uses the configured name attribute and handles values as an array' do
name = 'John Doe'
@@ -43,4 +71,9 @@ describe Gitlab::LDAP::Person do
expect(person.email).to eq([user_principal_name])
end
end
+
+ def assert_generic_test(test_description, got, expected)
+ test_failure_message = "Failed test description: '#{test_description}'\n\n expected: #{expected}\n got: #{got}"
+ expect(got).to eq(expected), test_failure_message
+ end
end
diff --git a/spec/lib/gitlab/ldap/user_spec.rb b/spec/lib/gitlab/ldap/user_spec.rb
index 6a6e465cea2..9a4705d1cee 100644
--- a/spec/lib/gitlab/ldap/user_spec.rb
+++ b/spec/lib/gitlab/ldap/user_spec.rb
@@ -11,7 +11,7 @@ describe Gitlab::LDAP::User do
}
end
let(:auth_hash) do
- OmniAuth::AuthHash.new(uid: 'my-uid', provider: 'ldapmain', info: info)
+ OmniAuth::AuthHash.new(uid: 'uid=John Smith,ou=People,dc=example,dc=com', provider: 'ldapmain', info: info)
end
let(:ldap_user_upper_case) { described_class.new(auth_hash_upper_case) }
let(:info_upper_case) do
@@ -22,12 +22,12 @@ describe Gitlab::LDAP::User do
}
end
let(:auth_hash_upper_case) do
- OmniAuth::AuthHash.new(uid: 'my-uid', provider: 'ldapmain', info: info_upper_case)
+ OmniAuth::AuthHash.new(uid: 'uid=John Smith,ou=People,dc=example,dc=com', provider: 'ldapmain', info: info_upper_case)
end
describe '#changed?' do
it "marks existing ldap user as changed" do
- create(:omniauth_user, extern_uid: 'my-uid', provider: 'ldapmain')
+ create(:omniauth_user, extern_uid: 'uid=John Smith,ou=People,dc=example,dc=com', provider: 'ldapmain')
expect(ldap_user.changed?).to be_truthy
end
@@ -37,7 +37,7 @@ describe Gitlab::LDAP::User do
end
it "does not mark existing ldap user as changed" do
- create(:omniauth_user, email: 'john@example.com', extern_uid: 'my-uid', provider: 'ldapmain')
+ create(:omniauth_user, email: 'john@example.com', extern_uid: 'uid=john smith,ou=people,dc=example,dc=com', provider: 'ldapmain')
ldap_user.gl_user.user_synced_attributes_metadata(provider: 'ldapmain', email: true)
expect(ldap_user.changed?).to be_falsey
end
@@ -60,7 +60,7 @@ describe Gitlab::LDAP::User do
describe 'find or create' do
it "finds the user if already existing" do
- create(:omniauth_user, extern_uid: 'my-uid', provider: 'ldapmain')
+ create(:omniauth_user, extern_uid: 'uid=John Smith,ou=People,dc=example,dc=com', provider: 'ldapmain')
expect { ldap_user.save }.not_to change { User.count }
end
@@ -70,7 +70,7 @@ describe Gitlab::LDAP::User do
expect { ldap_user.save }.not_to change { User.count }
existing_user.reload
- expect(existing_user.ldap_identity.extern_uid).to eql 'my-uid'
+ expect(existing_user.ldap_identity.extern_uid).to eql 'uid=john smith,ou=people,dc=example,dc=com'
expect(existing_user.ldap_identity.provider).to eql 'ldapmain'
end
@@ -79,7 +79,7 @@ describe Gitlab::LDAP::User do
expect { ldap_user.save }.not_to change { User.count }
existing_user.reload
- expect(existing_user.ldap_identity.extern_uid).to eql 'my-uid'
+ expect(existing_user.ldap_identity.extern_uid).to eql 'uid=john smith,ou=people,dc=example,dc=com'
expect(existing_user.ldap_identity.provider).to eql 'ldapmain'
expect(existing_user.id).to eql ldap_user.gl_user.id
end
@@ -89,7 +89,7 @@ describe Gitlab::LDAP::User do
expect { ldap_user_upper_case.save }.not_to change { User.count }
existing_user.reload
- expect(existing_user.ldap_identity.extern_uid).to eql 'my-uid'
+ expect(existing_user.ldap_identity.extern_uid).to eql 'uid=john smith,ou=people,dc=example,dc=com'
expect(existing_user.ldap_identity.provider).to eql 'ldapmain'
expect(existing_user.id).to eql ldap_user.gl_user.id
end
diff --git a/spec/lib/gitlab/middleware/go_spec.rb b/spec/lib/gitlab/middleware/go_spec.rb
index 6af1564da19..cab662819ac 100644
--- a/spec/lib/gitlab/middleware/go_spec.rb
+++ b/spec/lib/gitlab/middleware/go_spec.rb
@@ -79,12 +79,28 @@ describe Gitlab::Middleware::Go do
it_behaves_like 'a nested project'
end
+ context 'with a subpackage that is not a valid project path' do
+ let(:path) { "#{project.full_path}/---subpackage" }
+
+ it_behaves_like 'a nested project'
+ end
+
context 'without subpackages' do
let(:path) { project.full_path }
it_behaves_like 'a nested project'
end
end
+
+ context 'with a bogus path' do
+ let(:path) { "http:;url=http:&sol;&sol;www.example.com'http-equiv='refresh'x='?go-get=1" }
+
+ it 'skips go-import generation' do
+ expect(app).to receive(:call).and_return('no-go')
+
+ go
+ end
+ end
end
def go
@@ -100,7 +116,7 @@ describe Gitlab::Middleware::Go do
def expect_response_with_path(response, path)
expect(response[0]).to eq(200)
expect(response[1]['Content-Type']).to eq('text/html')
- expected_body = "<!DOCTYPE html><html><head><meta content='#{Gitlab.config.gitlab.host}/#{path} git http://#{Gitlab.config.gitlab.host}/#{path}.git' name='go-import'></head></html>\n"
+ expected_body = %{<html><head><meta name="go-import" content="#{Gitlab.config.gitlab.host}/#{path} git http://#{Gitlab.config.gitlab.host}/#{path}.git" /></head></html>}
expect(response[2].body).to eq([expected_body])
end
end
diff --git a/spec/lib/gitlab/middleware/read_only_spec.rb b/spec/lib/gitlab/middleware/read_only_spec.rb
new file mode 100644
index 00000000000..742a792a1af
--- /dev/null
+++ b/spec/lib/gitlab/middleware/read_only_spec.rb
@@ -0,0 +1,142 @@
+require 'spec_helper'
+
+describe Gitlab::Middleware::ReadOnly do
+ include Rack::Test::Methods
+
+ RSpec::Matchers.define :be_a_redirect do
+ match do |response|
+ response.status == 301
+ end
+ end
+
+ RSpec::Matchers.define :disallow_request do
+ match do |middleware|
+ flash = middleware.send(:rack_flash)
+ flash['alert'] && flash['alert'].include?('You cannot do writing operations')
+ end
+ end
+
+ RSpec::Matchers.define :disallow_request_in_json do
+ match do |response|
+ json_response = JSON.parse(response.body)
+ response.body.include?('You cannot do writing operations') && json_response.key?('message')
+ end
+ end
+
+ let(:rack_stack) do
+ rack = Rack::Builder.new do
+ use ActionDispatch::Session::CacheStore
+ use ActionDispatch::Flash
+ use ActionDispatch::ParamsParser
+ end
+
+ rack.run(subject)
+ rack.to_app
+ end
+
+ subject { described_class.new(fake_app) }
+
+ let(:request) { Rack::MockRequest.new(rack_stack) }
+
+ context 'normal requests to a read-only Gitlab instance' do
+ let(:fake_app) { lambda { |env| [200, { 'Content-Type' => 'text/plain' }, ['OK']] } }
+
+ before do
+ allow(Gitlab::Database).to receive(:read_only?) { true }
+ end
+
+ it 'expects PATCH requests to be disallowed' do
+ response = request.patch('/test_request')
+
+ expect(response).to be_a_redirect
+ expect(subject).to disallow_request
+ end
+
+ it 'expects PUT requests to be disallowed' do
+ response = request.put('/test_request')
+
+ expect(response).to be_a_redirect
+ expect(subject).to disallow_request
+ end
+
+ it 'expects POST requests to be disallowed' do
+ response = request.post('/test_request')
+
+ expect(response).to be_a_redirect
+ expect(subject).to disallow_request
+ end
+
+ it 'expects a internal POST request to be allowed after a disallowed request' do
+ response = request.post('/test_request')
+
+ expect(response).to be_a_redirect
+
+ response = request.post("/api/#{API::API.version}/internal")
+
+ expect(response).not_to be_a_redirect
+ end
+
+ it 'expects DELETE requests to be disallowed' do
+ response = request.delete('/test_request')
+
+ expect(response).to be_a_redirect
+ expect(subject).to disallow_request
+ end
+
+ context 'whitelisted requests' do
+ it 'expects DELETE request to logout to be allowed' do
+ response = request.delete('/users/sign_out')
+
+ expect(response).not_to be_a_redirect
+ expect(subject).not_to disallow_request
+ end
+
+ it 'expects a POST internal request to be allowed' do
+ response = request.post("/api/#{API::API.version}/internal")
+
+ expect(response).not_to be_a_redirect
+ expect(subject).not_to disallow_request
+ end
+
+ it 'expects a POST LFS request to batch URL to be allowed' do
+ response = request.post('/root/rouge.git/info/lfs/objects/batch')
+
+ expect(response).not_to be_a_redirect
+ expect(subject).not_to disallow_request
+ end
+ end
+ end
+
+ context 'json requests to a read-only GitLab instance' do
+ let(:fake_app) { lambda { |env| [200, { 'Content-Type' => 'application/json' }, ['OK']] } }
+ let(:content_json) { { 'CONTENT_TYPE' => 'application/json' } }
+
+ before do
+ allow(Gitlab::Database).to receive(:read_only?) { true }
+ end
+
+ it 'expects PATCH requests to be disallowed' do
+ response = request.patch('/test_request', content_json)
+
+ expect(response).to disallow_request_in_json
+ end
+
+ it 'expects PUT requests to be disallowed' do
+ response = request.put('/test_request', content_json)
+
+ expect(response).to disallow_request_in_json
+ end
+
+ it 'expects POST requests to be disallowed' do
+ response = request.post('/test_request', content_json)
+
+ expect(response).to disallow_request_in_json
+ end
+
+ it 'expects DELETE requests to be disallowed' do
+ response = request.delete('/test_request', content_json)
+
+ expect(response).to disallow_request_in_json
+ end
+ end
+end
diff --git a/spec/lib/gitlab/o_auth/auth_hash_spec.rb b/spec/lib/gitlab/o_auth/auth_hash_spec.rb
index d5f4da3ce36..dbcc200b90b 100644
--- a/spec/lib/gitlab/o_auth/auth_hash_spec.rb
+++ b/spec/lib/gitlab/o_auth/auth_hash_spec.rb
@@ -1,10 +1,11 @@
require 'spec_helper'
describe Gitlab::OAuth::AuthHash do
+ let(:provider) { 'ldap'.freeze }
let(:auth_hash) do
described_class.new(
OmniAuth::AuthHash.new(
- provider: provider_ascii,
+ provider: provider,
uid: uid_ascii,
info: info_hash
)
@@ -20,7 +21,6 @@ describe Gitlab::OAuth::AuthHash do
let(:last_name_raw) { "K\xC3\xBC\xC3\xA7\xC3\xBCk" }
let(:name_raw) { "Onur K\xC3\xBC\xC3\xA7\xC3\xBCk" }
- let(:provider_ascii) { 'ldap'.force_encoding(Encoding::ASCII_8BIT) }
let(:uid_ascii) { uid_raw.force_encoding(Encoding::ASCII_8BIT) }
let(:email_ascii) { email_raw.force_encoding(Encoding::ASCII_8BIT) }
let(:nickname_ascii) { nickname_raw.force_encoding(Encoding::ASCII_8BIT) }
@@ -28,7 +28,6 @@ describe Gitlab::OAuth::AuthHash do
let(:last_name_ascii) { last_name_raw.force_encoding(Encoding::ASCII_8BIT) }
let(:name_ascii) { name_raw.force_encoding(Encoding::ASCII_8BIT) }
- let(:provider_utf8) { provider_ascii.force_encoding(Encoding::UTF_8) }
let(:uid_utf8) { uid_ascii.force_encoding(Encoding::UTF_8) }
let(:email_utf8) { email_ascii.force_encoding(Encoding::UTF_8) }
let(:nickname_utf8) { nickname_ascii.force_encoding(Encoding::UTF_8) }
@@ -46,7 +45,7 @@ describe Gitlab::OAuth::AuthHash do
end
context 'defaults' do
- it { expect(auth_hash.provider).to eql provider_utf8 }
+ it { expect(auth_hash.provider).to eq provider }
it { expect(auth_hash.uid).to eql uid_utf8 }
it { expect(auth_hash.email).to eql email_utf8 }
it { expect(auth_hash.username).to eql nickname_utf8 }
diff --git a/spec/lib/gitlab/o_auth/user_spec.rb b/spec/lib/gitlab/o_auth/user_spec.rb
index 8aaf320cbf5..db26e16e3b2 100644
--- a/spec/lib/gitlab/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/o_auth/user_spec.rb
@@ -4,6 +4,7 @@ describe Gitlab::OAuth::User do
let(:oauth_user) { described_class.new(auth_hash) }
let(:gl_user) { oauth_user.gl_user }
let(:uid) { 'my-uid' }
+ let(:dn) { 'uid=user1,ou=People,dc=example' }
let(:provider) { 'my-provider' }
let(:auth_hash) { OmniAuth::AuthHash.new(uid: uid, provider: provider, info: info_hash) }
let(:info_hash) do
@@ -197,7 +198,7 @@ describe Gitlab::OAuth::User do
allow(ldap_user).to receive(:uid) { uid }
allow(ldap_user).to receive(:username) { uid }
allow(ldap_user).to receive(:email) { ['johndoe@example.com', 'john2@example.com'] }
- allow(ldap_user).to receive(:dn) { 'uid=user1,ou=People,dc=example' }
+ allow(ldap_user).to receive(:dn) { dn }
end
context "and no account for the LDAP user" do
@@ -213,7 +214,7 @@ describe Gitlab::OAuth::User do
identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } }
expect(identities_as_hash).to match_array(
[
- { provider: 'ldapmain', extern_uid: 'uid=user1,ou=People,dc=example' },
+ { provider: 'ldapmain', extern_uid: dn },
{ provider: 'twitter', extern_uid: uid }
]
)
@@ -221,7 +222,7 @@ describe Gitlab::OAuth::User do
end
context "and LDAP user has an account already" do
- let!(:existing_user) { create(:omniauth_user, email: 'john@example.com', extern_uid: 'uid=user1,ou=People,dc=example', provider: 'ldapmain', username: 'john') }
+ let!(:existing_user) { create(:omniauth_user, email: 'john@example.com', extern_uid: dn, provider: 'ldapmain', username: 'john') }
it "adds the omniauth identity to the LDAP account" do
allow(Gitlab::LDAP::Person).to receive(:find_by_uid).and_return(ldap_user)
@@ -234,7 +235,7 @@ describe Gitlab::OAuth::User do
identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } }
expect(identities_as_hash).to match_array(
[
- { provider: 'ldapmain', extern_uid: 'uid=user1,ou=People,dc=example' },
+ { provider: 'ldapmain', extern_uid: dn },
{ provider: 'twitter', extern_uid: uid }
]
)
@@ -252,7 +253,7 @@ describe Gitlab::OAuth::User do
expect(identities_as_hash)
.to match_array(
[
- { provider: 'ldapmain', extern_uid: 'uid=user1,ou=People,dc=example' },
+ { provider: 'ldapmain', extern_uid: dn },
{ provider: 'twitter', extern_uid: uid }
]
)
@@ -310,8 +311,8 @@ describe Gitlab::OAuth::User do
allow(ldap_user).to receive(:uid) { uid }
allow(ldap_user).to receive(:username) { uid }
allow(ldap_user).to receive(:email) { ['johndoe@example.com', 'john2@example.com'] }
- allow(ldap_user).to receive(:dn) { 'uid=user1,ou=People,dc=example' }
- allow(oauth_user).to receive(:ldap_person).and_return(ldap_user)
+ allow(ldap_user).to receive(:dn) { dn }
+ allow(Gitlab::LDAP::Person).to receive(:find_by_uid).and_return(ldap_user)
end
context "and no account for the LDAP user" do
@@ -341,7 +342,7 @@ describe Gitlab::OAuth::User do
end
context 'and LDAP user has an account already' do
- let!(:existing_user) { create(:omniauth_user, email: 'john@example.com', extern_uid: 'uid=user1,ou=People,dc=example', provider: 'ldapmain', username: 'john') }
+ let!(:existing_user) { create(:omniauth_user, email: 'john@example.com', extern_uid: dn, provider: 'ldapmain', username: 'john') }
context 'dont block on create (LDAP)' do
before do
diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb
index 2f989397f7e..1f1c48ee9b5 100644
--- a/spec/lib/gitlab/path_regex_spec.rb
+++ b/spec/lib/gitlab/path_regex_spec.rb
@@ -84,9 +84,9 @@ describe Gitlab::PathRegex do
let(:top_level_words) do
words = routes_not_starting_in_wildcard.map do |route|
route.split('/')[1]
- end.compact.uniq
+ end.compact
- words + ee_top_level_words + files_in_public + Array(API::API.prefix.to_s)
+ (words + ee_top_level_words + files_in_public + Array(API::API.prefix.to_s)).uniq
end
let(:ee_top_level_words) do
@@ -95,10 +95,11 @@ describe Gitlab::PathRegex do
let(:files_in_public) do
git = Gitlab.config.git.bin_path
- `cd #{Rails.root} && #{git} ls-files public`
+ tracked = `cd #{Rails.root} && #{git} ls-files public`
.split("\n")
.map { |entry| entry.gsub('public/', '') }
.uniq
+ tracked + %w(assets uploads)
end
# All routes that start with a namespaced path, that have 1 or more
diff --git a/spec/lib/gitlab/popen_spec.rb b/spec/lib/gitlab/popen_spec.rb
index 4567f220c11..b145ca36f26 100644
--- a/spec/lib/gitlab/popen_spec.rb
+++ b/spec/lib/gitlab/popen_spec.rb
@@ -14,7 +14,7 @@ describe 'Gitlab::Popen' do
end
it { expect(@status).to be_zero }
- it { expect(@output).to include('cache') }
+ it { expect(@output).to include('tests') }
end
context 'non-zero status' do
diff --git a/spec/lib/gitlab/saml/user_spec.rb b/spec/lib/gitlab/saml/user_spec.rb
index 19710029224..59923bfb14d 100644
--- a/spec/lib/gitlab/saml/user_spec.rb
+++ b/spec/lib/gitlab/saml/user_spec.rb
@@ -1,9 +1,12 @@
require 'spec_helper'
describe Gitlab::Saml::User do
+ include LdapHelpers
+
let(:saml_user) { described_class.new(auth_hash) }
let(:gl_user) { saml_user.gl_user }
let(:uid) { 'my-uid' }
+ let(:dn) { 'uid=user1,ou=People,dc=example' }
let(:provider) { 'saml' }
let(:auth_hash) { OmniAuth::AuthHash.new(uid: uid, provider: provider, info: info_hash, extra: { raw_info: OneLogin::RubySaml::Attributes.new({ 'groups' => %w(Developers Freelancers Designers) }) }) }
let(:info_hash) do
@@ -163,13 +166,17 @@ describe Gitlab::Saml::User do
end
context 'and a corresponding LDAP person' do
+ let(:adapter) { ldap_adapter('ldapmain') }
+
before do
allow(ldap_user).to receive(:uid) { uid }
allow(ldap_user).to receive(:username) { uid }
allow(ldap_user).to receive(:email) { %w(john@mail.com john2@example.com) }
- allow(ldap_user).to receive(:dn) { 'uid=user1,ou=People,dc=example' }
- allow(Gitlab::LDAP::Person).to receive(:find_by_uid).and_return(ldap_user)
- allow(Gitlab::LDAP::Person).to receive(:find_by_dn).and_return(ldap_user)
+ allow(ldap_user).to receive(:dn) { dn }
+ allow(Gitlab::LDAP::Adapter).to receive(:new).and_return(adapter)
+ allow(Gitlab::LDAP::Person).to receive(:find_by_uid).with(uid, adapter).and_return(ldap_user)
+ allow(Gitlab::LDAP::Person).to receive(:find_by_dn).with(dn, adapter).and_return(ldap_user)
+ allow(Gitlab::LDAP::Person).to receive(:find_by_email).with('john@mail.com', adapter).and_return(ldap_user)
end
context 'and no account for the LDAP user' do
@@ -181,20 +188,86 @@ describe Gitlab::Saml::User do
expect(gl_user.email).to eql 'john@mail.com'
expect(gl_user.identities.length).to be 2
identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } }
- expect(identities_as_hash).to match_array([{ provider: 'ldapmain', extern_uid: 'uid=user1,ou=People,dc=example' },
+ expect(identities_as_hash).to match_array([{ provider: 'ldapmain', extern_uid: dn },
{ provider: 'saml', extern_uid: uid }])
end
end
context 'and LDAP user has an account already' do
+ let(:auth_hash_base_attributes) do
+ {
+ uid: uid,
+ provider: provider,
+ info: info_hash,
+ extra: {
+ raw_info: OneLogin::RubySaml::Attributes.new(
+ { 'groups' => %w(Developers Freelancers Designers) }
+ )
+ }
+ }
+ end
+ let(:auth_hash) { OmniAuth::AuthHash.new(auth_hash_base_attributes) }
+ let(:uid_types) { %w(uid dn email) }
+
before do
create(:omniauth_user,
email: 'john@mail.com',
- extern_uid: 'uid=user1,ou=People,dc=example',
+ extern_uid: dn,
provider: 'ldapmain',
username: 'john')
end
+ shared_examples 'find LDAP person' do |uid_type, uid|
+ let(:auth_hash) { OmniAuth::AuthHash.new(auth_hash_base_attributes.merge(uid: extern_uid)) }
+
+ before do
+ nil_types = uid_types - [uid_type]
+
+ nil_types.each do |type|
+ allow(Gitlab::LDAP::Person).to receive(:"find_by_#{type}").and_return(nil)
+ end
+
+ allow(Gitlab::LDAP::Person).to receive(:"find_by_#{uid_type}").and_return(ldap_user)
+ end
+
+ it 'adds the omniauth identity to the LDAP account' do
+ identities = [
+ { provider: 'ldapmain', extern_uid: dn },
+ { provider: 'saml', extern_uid: extern_uid }
+ ]
+
+ identities_as_hash = gl_user.identities.map do |id|
+ { provider: id.provider, extern_uid: id.extern_uid }
+ end
+
+ saml_user.save
+
+ expect(gl_user).to be_valid
+ expect(gl_user.username).to eql 'john'
+ expect(gl_user.email).to eql 'john@mail.com'
+ expect(gl_user.identities.length).to be 2
+ expect(identities_as_hash).to match_array(identities)
+ end
+ end
+
+ context 'when uid is an uid' do
+ it_behaves_like 'find LDAP person', 'uid' do
+ let(:extern_uid) { uid }
+ end
+ end
+
+ context 'when uid is a dn' do
+ it_behaves_like 'find LDAP person', 'dn' do
+ let(:extern_uid) { dn }
+ end
+ end
+
+ context 'when uid is an email' do
+ it_behaves_like 'find LDAP person', 'email' do
+ let(:extern_uid) { 'john@mail.com' }
+ end
+ end
+
it 'adds the omniauth identity to the LDAP account' do
saml_user.save
@@ -203,7 +276,7 @@ describe Gitlab::Saml::User do
expect(gl_user.email).to eql 'john@mail.com'
expect(gl_user.identities.length).to be 2
identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } }
- expect(identities_as_hash).to match_array([{ provider: 'ldapmain', extern_uid: 'uid=user1,ou=People,dc=example' },
+ expect(identities_as_hash).to match_array([{ provider: 'ldapmain', extern_uid: dn },
{ provider: 'saml', extern_uid: uid }])
end
@@ -219,17 +292,21 @@ describe Gitlab::Saml::User do
context 'user has SAML user, and wants to add their LDAP identity' do
it 'adds the LDAP identity to the existing SAML user' do
- create(:omniauth_user, email: 'john@mail.com', extern_uid: 'uid=user1,ou=People,dc=example', provider: 'saml', username: 'john')
- local_hash = OmniAuth::AuthHash.new(uid: 'uid=user1,ou=People,dc=example', provider: provider, info: info_hash)
+ create(:omniauth_user, email: 'john@mail.com', extern_uid: dn, provider: 'saml', username: 'john')
+
+ allow(Gitlab::LDAP::Person).to receive(:find_by_uid).with(dn, adapter).and_return(ldap_user)
+
+ local_hash = OmniAuth::AuthHash.new(uid: dn, provider: provider, info: info_hash)
local_saml_user = described_class.new(local_hash)
+
local_saml_user.save
local_gl_user = local_saml_user.gl_user
expect(local_gl_user).to be_valid
expect(local_gl_user.identities.length).to be 2
identities_as_hash = local_gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } }
- expect(identities_as_hash).to match_array([{ provider: 'ldapmain', extern_uid: 'uid=user1,ou=People,dc=example' },
- { provider: 'saml', extern_uid: 'uid=user1,ou=People,dc=example' }])
+ expect(identities_as_hash).to match_array([{ provider: 'ldapmain', extern_uid: dn },
+ { provider: 'saml', extern_uid: dn }])
end
end
end
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index 4c5efbde69a..e44a7c23452 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe Gitlab::SearchResults do
+ include ProjectForksHelper
+
let(:user) { create(:user) }
let!(:project) { create(:project, name: 'foo') }
let!(:issue) { create(:issue, project: project, title: 'foo') }
@@ -42,7 +44,7 @@ describe Gitlab::SearchResults do
end
it 'includes merge requests from source and target projects' do
- forked_project = create(:project, forked_from_project: project)
+ forked_project = fork_project(project, user)
merge_request_2 = create(:merge_request, target_project: project, source_project: forked_project, title: 'foo')
results = described_class.new(user, Project.where(id: forked_project.id), 'foo')
diff --git a/spec/lib/gitlab/shell_spec.rb b/spec/lib/gitlab/shell_spec.rb
index c7930378240..2158b2837e2 100644
--- a/spec/lib/gitlab/shell_spec.rb
+++ b/spec/lib/gitlab/shell_spec.rb
@@ -15,10 +15,6 @@ describe Gitlab::Shell do
it { is_expected.to respond_to :add_repository }
it { is_expected.to respond_to :remove_repository }
it { is_expected.to respond_to :fork_repository }
- it { is_expected.to respond_to :add_namespace }
- it { is_expected.to respond_to :rm_namespace }
- it { is_expected.to respond_to :mv_namespace }
- it { is_expected.to respond_to :exists? }
it { expect(gitlab_shell.url_to_repo('diaspora')).to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + "diaspora.git") }
@@ -48,14 +44,35 @@ describe Gitlab::Shell do
end
end
- describe '#add_key' do
- it 'removes trailing garbage' do
- allow(gitlab_shell).to receive(:gitlab_shell_keys_path).and_return(:gitlab_shell_keys_path)
- expect(gitlab_shell).to receive(:gitlab_shell_fast_execute).with(
- [:gitlab_shell_keys_path, 'add-key', 'key-123', 'ssh-rsa foobar']
- )
+ describe 'projects commands' do
+ let(:gitlab_shell_path) { File.expand_path('tmp/tests/gitlab-shell') }
+ let(:projects_path) { File.join(gitlab_shell_path, 'bin/gitlab-projects') }
+ let(:gitlab_shell_hooks_path) { File.join(gitlab_shell_path, 'hooks') }
+
+ before do
+ allow(Gitlab.config.gitlab_shell).to receive(:path).and_return(gitlab_shell_path)
+ allow(Gitlab.config.gitlab_shell).to receive(:hooks_path).and_return(gitlab_shell_hooks_path)
+ allow(Gitlab.config.gitlab_shell).to receive(:git_timeout).and_return(800)
+ end
+
+ describe '#mv_repository' do
+ it 'executes the command' do
+ expect(gitlab_shell).to receive(:gitlab_shell_fast_execute).with(
+ [projects_path, 'mv-project', 'storage/path', 'project/path.git', 'new/path.git']
+ )
+ gitlab_shell.mv_repository('storage/path', 'project/path', 'new/path')
+ end
+ end
+
+ describe '#add_key' do
+ it 'removes trailing garbage' do
+ allow(gitlab_shell).to receive(:gitlab_shell_keys_path).and_return(:gitlab_shell_keys_path)
+ expect(gitlab_shell).to receive(:gitlab_shell_fast_execute).with(
+ [:gitlab_shell_keys_path, 'add-key', 'key-123', 'ssh-rsa foobar']
+ )
- gitlab_shell.add_key('key-123', 'ssh-rsa foobar trailing garbage')
+ gitlab_shell.add_key('key-123', 'ssh-rsa foobar trailing garbage')
+ end
end
end
@@ -105,30 +122,42 @@ describe Gitlab::Shell do
end
describe '#add_repository' do
- it 'creates a repository' do
- created_path = File.join(TestEnv.repos_path, 'project', 'path.git')
- hooks_path = File.join(created_path, 'hooks')
-
- begin
- result = gitlab_shell.add_repository(TestEnv.repos_path, 'project/path')
+ shared_examples '#add_repository' do
+ let(:repository_storage) { 'default' }
+ let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] }
+ let(:repo_name) { 'project/path' }
+ let(:created_path) { File.join(repository_storage_path, repo_name + '.git') }
- repo_stat = File.stat(created_path) rescue nil
- hooks_stat = File.lstat(hooks_path) rescue nil
- hooks_dir = File.realpath(hooks_path)
- ensure
+ after do
FileUtils.rm_rf(created_path)
end
- expect(result).to be_truthy
- expect(repo_stat.mode & 0o777).to eq(0o770)
- expect(hooks_stat.symlink?).to be_truthy
- expect(hooks_dir).to eq(gitlab_shell_hooks_path)
+ it 'creates a repository' do
+ expect(gitlab_shell.add_repository(repository_storage, repo_name)).to be_truthy
+
+ expect(File.stat(created_path).mode & 0o777).to eq(0o770)
+
+ hooks_path = File.join(created_path, 'hooks')
+ expect(File.lstat(hooks_path)).to be_symlink
+ expect(File.realpath(hooks_path)).to eq(gitlab_shell_hooks_path)
+ end
+
+ it 'returns false when the command fails' do
+ FileUtils.mkdir_p(File.dirname(created_path))
+ # This file will block the creation of the repo's .git directory. That
+ # should cause #add_repository to fail.
+ FileUtils.touch(created_path)
+
+ expect(gitlab_shell.add_repository(repository_storage, repo_name)).to be_falsy
+ end
end
- it 'returns false when the command fails' do
- expect(FileUtils).to receive(:mkdir_p).and_raise(Errno::EEXIST)
+ context 'with gitaly' do
+ it_behaves_like '#add_repository'
+ end
- expect(gitlab_shell.add_repository('current/storage', 'project/path')).to be_falsy
+ context 'without gitaly', :skip_gitaly_mock do
+ it_behaves_like '#add_repository'
end
end
@@ -136,7 +165,7 @@ describe Gitlab::Shell do
it 'returns true when the command succeeds' do
expect(Gitlab::Popen).to receive(:popen)
.with([projects_path, 'rm-project', 'current/storage', 'project/path.git'],
- nil, popen_vars).and_return([nil, 0])
+ nil, popen_vars).and_return([nil, 0])
expect(gitlab_shell.remove_repository('current/storage', 'project/path')).to be true
end
@@ -144,7 +173,7 @@ describe Gitlab::Shell do
it 'returns false when the command fails' do
expect(Gitlab::Popen).to receive(:popen)
.with([projects_path, 'rm-project', 'current/storage', 'project/path.git'],
- nil, popen_vars).and_return(["error", 1])
+ nil, popen_vars).and_return(["error", 1])
expect(gitlab_shell.remove_repository('current/storage', 'project/path')).to be false
end
@@ -304,7 +333,7 @@ describe Gitlab::Shell do
end
end
- describe '#fetch_remote local', skip_gitaly_mock: true do
+ describe '#fetch_remote local', :skip_gitaly_mock do
it_should_behave_like 'fetch_remote', false
end
@@ -330,4 +359,52 @@ describe Gitlab::Shell do
end
end
end
+
+ describe 'namespace actions' do
+ subject { described_class.new }
+ let(:storage_path) { Gitlab.config.repositories.storages.default.path }
+
+ describe '#add_namespace' do
+ it 'creates a namespace' do
+ subject.add_namespace(storage_path, "mepmep")
+
+ expect(subject.exists?(storage_path, "mepmep")).to be(true)
+ end
+ end
+
+ describe '#exists?' do
+ context 'when the namespace does not exist' do
+ it 'returns false' do
+ expect(subject.exists?(storage_path, "non-existing")).to be(false)
+ end
+ end
+
+ context 'when the namespace exists' do
+ it 'returns true' do
+ subject.add_namespace(storage_path, "mepmep")
+
+ expect(subject.exists?(storage_path, "mepmep")).to be(true)
+ end
+ end
+ end
+
+ describe '#remove' do
+ it 'removes the namespace' do
+ subject.add_namespace(storage_path, "mepmep")
+ subject.rm_namespace(storage_path, "mepmep")
+
+ expect(subject.exists?(storage_path, "mepmep")).to be(false)
+ end
+ end
+
+ describe '#mv_namespace' do
+ it 'renames the namespace' do
+ subject.add_namespace(storage_path, "mepmep")
+ subject.mv_namespace(storage_path, "mepmep", "2mep")
+
+ expect(subject.exists?(storage_path, "mepmep")).to be(false)
+ expect(subject.exists?(storage_path, "2mep")).to be(true)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/sql/union_spec.rb b/spec/lib/gitlab/sql/union_spec.rb
index baf8f6644bf..8026fba9f0a 100644
--- a/spec/lib/gitlab/sql/union_spec.rb
+++ b/spec/lib/gitlab/sql/union_spec.rb
@@ -22,5 +22,12 @@ describe Gitlab::SQL::Union do
expect {User.where("users.id IN (#{union.to_sql})").to_a}.not_to raise_error
expect(union.to_sql).to eq("#{to_sql(relation_1)}\nUNION\n#{to_sql(relation_2)}")
end
+
+ it 'uses UNION ALL when removing duplicates is disabled' do
+ union = described_class
+ .new([relation_1, relation_2], remove_duplicates: false)
+
+ expect(union.to_sql).to include('UNION ALL')
+ end
end
end
diff --git a/spec/lib/gitlab/url_sanitizer_spec.rb b/spec/lib/gitlab/url_sanitizer_spec.rb
index fdc3990132a..fc8991fd31f 100644
--- a/spec/lib/gitlab/url_sanitizer_spec.rb
+++ b/spec/lib/gitlab/url_sanitizer_spec.rb
@@ -39,7 +39,8 @@ describe Gitlab::UrlSanitizer do
false | nil
false | ''
false | '123://invalid:url'
- true | 'valid@project:url.git'
+ false | 'valid@project:url.git'
+ false | 'valid:pass@project:url.git'
true | 'ssh://example.com'
true | 'ssh://:@example.com'
true | 'ssh://foo@example.com'
@@ -81,24 +82,6 @@ describe Gitlab::UrlSanitizer do
describe '#credentials' do
context 'credentials in hash' do
- where(:input, :output) do
- { user: 'foo', password: 'bar' } | { user: 'foo', password: 'bar' }
- { user: 'foo', password: '' } | { user: 'foo', password: nil }
- { user: 'foo', password: nil } | { user: 'foo', password: nil }
- { user: '', password: 'bar' } | { user: nil, password: 'bar' }
- { user: '', password: '' } | { user: nil, password: nil }
- { user: '', password: nil } | { user: nil, password: nil }
- { user: nil, password: 'bar' } | { user: nil, password: 'bar' }
- { user: nil, password: '' } | { user: nil, password: nil }
- { user: nil, password: nil } | { user: nil, password: nil }
- end
-
- with_them do
- subject { described_class.new('user@example.com:path.git', credentials: input).credentials }
-
- it { is_expected.to eq(output) }
- end
-
it 'overrides URL-provided credentials' do
sanitizer = described_class.new('http://a:b@example.com', credentials: { user: 'c', password: 'd' })
@@ -116,10 +99,6 @@ describe Gitlab::UrlSanitizer do
'http://@example.com' | { user: nil, password: nil }
'http://example.com' | { user: nil, password: nil }
- # Credentials from SCP-style URLs are not supported at present
- 'foo@example.com:path' | { user: nil, password: nil }
- 'foo:bar@example.com:path' | { user: nil, password: nil }
-
# Other invalid URLs
nil | { user: nil, password: nil }
'' | { user: nil, password: nil }
@@ -174,4 +153,13 @@ describe Gitlab::UrlSanitizer do
end
end
end
+
+ context 'when credentials contains special chars' do
+ it 'should parse the URL without errors' do
+ url_sanitizer = described_class.new("https://foo:b?r@github.com/me/project.git")
+
+ expect(url_sanitizer.sanitized_url).to eq("https://github.com/me/project.git")
+ expect(url_sanitizer.full_url).to eq("https://foo:b?r@github.com/me/project.git")
+ end
+ end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 68429d792f2..777e9c8e21d 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -26,6 +26,16 @@ describe Gitlab::UsageData do
version
uuid
hostname
+ signup
+ ldap
+ gravatar
+ omniauth
+ reply_by_email
+ container_registry
+ gitlab_pages
+ gitlab_shared_runners
+ git
+ database
))
end
@@ -40,12 +50,17 @@ describe Gitlab::UsageData do
ci_builds
ci_internal_pipelines
ci_external_pipelines
+ ci_pipeline_config_auto_devops
+ ci_pipeline_config_repository
ci_runners
ci_triggers
ci_pipeline_schedules
+ auto_devops_enabled
+ auto_devops_disabled
deploy_keys
deployments
environments
+ gcp_clusters
in_review_folder
groups
issues
@@ -82,6 +97,32 @@ describe Gitlab::UsageData do
end
end
+ describe '#features_usage_data_ce' do
+ subject { described_class.features_usage_data_ce }
+
+ it 'gathers feature usage data' do
+ expect(subject[:signup]).to eq(current_application_settings.signup_enabled?)
+ expect(subject[:ldap]).to eq(Gitlab.config.ldap.enabled)
+ expect(subject[:gravatar]).to eq(current_application_settings.gravatar_enabled?)
+ expect(subject[:omniauth]).to eq(Gitlab.config.omniauth.enabled)
+ expect(subject[:reply_by_email]).to eq(Gitlab::IncomingEmail.enabled?)
+ expect(subject[:container_registry]).to eq(Gitlab.config.registry.enabled)
+ expect(subject[:gitlab_shared_runners]).to eq(Gitlab.config.gitlab_ci.shared_runners_enabled)
+ end
+ end
+
+ describe '#components_usage_data' do
+ subject { described_class.components_usage_data }
+
+ it 'gathers components usage data' do
+ expect(subject[:gitlab_pages][:enabled]).to eq(Gitlab.config.pages.enabled)
+ expect(subject[:gitlab_pages][:version]).to eq(Gitlab::Pages::VERSION)
+ expect(subject[:git][:version]).to eq(Gitlab::Git.version)
+ expect(subject[:database][:adapter]).to eq(Gitlab::Database.adapter_name)
+ expect(subject[:database][:version]).to eq(Gitlab::Database.version)
+ end
+ end
+
describe '#license_usage_data' do
subject { described_class.license_usage_data }
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index 699184ad9fe..9230d58012f 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -13,13 +13,51 @@ describe Gitlab::Workhorse do
end
describe ".send_git_archive" do
+ let(:ref) { 'master' }
+ let(:format) { 'zip' }
+ let(:storage_path) { Gitlab.config.gitlab.repository_downloads_path }
+ let(:base_params) { repository.archive_metadata(ref, storage_path, format) }
+ let(:gitaly_params) do
+ base_params.merge(
+ 'GitalyServer' => {
+ 'address' => Gitlab::GitalyClient.address(project.repository_storage),
+ 'token' => Gitlab::GitalyClient.token(project.repository_storage)
+ },
+ 'GitalyRepository' => repository.gitaly_repository.to_h.deep_stringify_keys
+ )
+ end
+
+ subject do
+ described_class.send_git_archive(repository, ref: ref, format: format)
+ end
+
+ context 'when Gitaly workhorse_archive feature is enabled' do
+ it 'sets the header correctly' do
+ key, command, params = decode_workhorse_header(subject)
+
+ expect(key).to eq('Gitlab-Workhorse-Send-Data')
+ expect(command).to eq('git-archive')
+ expect(params).to include(gitaly_params)
+ end
+ end
+
+ context 'when Gitaly workhorse_archive feature is disabled', :skip_gitaly_mock do
+ it 'sets the header correctly' do
+ key, command, params = decode_workhorse_header(subject)
+
+ expect(key).to eq('Gitlab-Workhorse-Send-Data')
+ expect(command).to eq('git-archive')
+ expect(params).to eq(base_params)
+ end
+ end
+
context "when the repository doesn't have an archive file path" do
before do
allow(project.repository).to receive(:archive_metadata).and_return(Hash.new)
end
it "raises an error" do
- expect { described_class.send_git_archive(project.repository, ref: "master", format: "zip") }.to raise_error(RuntimeError)
+ expect { subject }.to raise_error(RuntimeError)
end
end
end
@@ -182,7 +220,12 @@ describe Gitlab::Workhorse do
let(:repo_path) { repository.path_to_repo }
let(:action) { 'info_refs' }
let(:params) do
- { GL_ID: "user-#{user.id}", GL_REPOSITORY: "project-#{project.id}", RepoPath: repo_path }
+ {
+ GL_ID: "user-#{user.id}",
+ GL_USERNAME: user.username,
+ GL_REPOSITORY: "project-#{project.id}",
+ RepoPath: repo_path
+ }
end
subject { described_class.git_http_ok(repository, false, user, action) }
@@ -191,7 +234,12 @@ describe Gitlab::Workhorse do
context 'when is_wiki' do
let(:params) do
- { GL_ID: "user-#{user.id}", GL_REPOSITORY: "wiki-#{project.id}", RepoPath: repo_path }
+ {
+ GL_ID: "user-#{user.id}",
+ GL_USERNAME: user.username,
+ GL_REPOSITORY: "wiki-#{project.id}",
+ RepoPath: repo_path
+ }
end
subject { described_class.git_http_ok(repository, true, user, action) }
@@ -214,14 +262,13 @@ describe Gitlab::Workhorse do
end
it 'includes a Repository param' do
- repo_param = { Repository: {
+ repo_param = {
storage_name: 'default',
relative_path: project.full_path + '.git',
- git_object_directory: '',
- git_alternate_object_directories: []
- } }
+ gl_repository: "project-#{project.id}"
+ }
- expect(subject).to include(repo_param)
+ expect(subject[:Repository]).to include(repo_param)
end
context "when git_upload_pack action is passed" do
@@ -336,7 +383,7 @@ describe Gitlab::Workhorse do
end
end
- context 'when Gitaly workhorse_raw_show feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly workhorse_raw_show feature is disabled', :skip_gitaly_mock do
it 'sets the header correctly' do
key, command, params = decode_workhorse_header(subject)
diff --git a/spec/lib/google_api/auth_spec.rb b/spec/lib/google_api/auth_spec.rb
new file mode 100644
index 00000000000..87a3f43274f
--- /dev/null
+++ b/spec/lib/google_api/auth_spec.rb
@@ -0,0 +1,41 @@
+require 'spec_helper'
+
+describe GoogleApi::Auth do
+ let(:redirect_uri) { 'http://localhost:3000/google_api/authorizations/callback' }
+ let(:redirect_to) { 'http://localhost:3000/namaspace/project/clusters' }
+
+ let(:client) do
+ GoogleApi::CloudPlatform::Client
+ .new(nil, redirect_uri, state: redirect_to)
+ end
+
+ describe '#authorize_url' do
+ subject { client.authorize_url }
+
+ it 'returns authorize_url' do
+ is_expected.to start_with('https://accounts.google.com/o/oauth2')
+ is_expected.to include(URI.encode(redirect_uri, URI::PATTERN::RESERVED))
+ is_expected.to include(URI.encode(redirect_to, URI::PATTERN::RESERVED))
+ end
+ end
+
+ describe '#get_token' do
+ let(:token) do
+ double.tap do |dbl|
+ allow(dbl).to receive(:token).and_return('token')
+ allow(dbl).to receive(:expires_at).and_return('expires_at')
+ end
+ end
+
+ before do
+ allow_any_instance_of(OAuth2::Strategy::AuthCode)
+ .to receive(:get_token).and_return(token)
+ end
+
+ it 'returns token and expires_at' do
+ token, expires_at = client.get_token('xxx')
+ expect(token).to eq('token')
+ expect(expires_at).to eq('expires_at')
+ end
+ end
+end
diff --git a/spec/lib/google_api/cloud_platform/client_spec.rb b/spec/lib/google_api/cloud_platform/client_spec.rb
new file mode 100644
index 00000000000..acc5bd1da35
--- /dev/null
+++ b/spec/lib/google_api/cloud_platform/client_spec.rb
@@ -0,0 +1,128 @@
+require 'spec_helper'
+
+describe GoogleApi::CloudPlatform::Client do
+ let(:token) { 'token' }
+ let(:client) { described_class.new(token, nil) }
+
+ describe '.session_key_for_redirect_uri' do
+ let(:state) { 'random_string' }
+
+ subject { described_class.session_key_for_redirect_uri(state) }
+
+ it 'creates a new session key' do
+ is_expected.to eq('cloud_platform_second_redirect_uri_random_string')
+ end
+ end
+
+ describe '.new_session_key_for_redirect_uri' do
+ it 'generates a new session key' do
+ expect { |b| described_class.new_session_key_for_redirect_uri(&b) }
+ .to yield_with_args(String)
+ end
+ end
+
+ describe '#validate_token' do
+ subject { client.validate_token(expires_at) }
+
+ let(:expires_at) { 1.hour.since.utc.strftime('%s') }
+
+ context 'when token is nil' do
+ let(:token) { nil }
+
+ it { is_expected.to be_falsy }
+ end
+
+ context 'when expires_at is nil' do
+ let(:expires_at) { nil }
+
+ it { is_expected.to be_falsy }
+ end
+
+ context 'when expires in 1 hour' do
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when expires in 10 minutes' do
+ let(:expires_at) { 5.minutes.since.utc.strftime('%s') }
+
+ it { is_expected.to be_falsy }
+ end
+ end
+
+ describe '#projects_zones_clusters_get' do
+ subject { client.projects_zones_clusters_get(spy, spy, spy) }
+ let(:gke_cluster) { double }
+
+ before do
+ allow_any_instance_of(Google::Apis::ContainerV1::ContainerService)
+ .to receive(:get_zone_cluster).and_return(gke_cluster)
+ end
+
+ it { is_expected.to eq(gke_cluster) }
+ end
+
+ describe '#projects_zones_clusters_create' do
+ subject do
+ client.projects_zones_clusters_create(
+ spy, spy, cluster_name, cluster_size, machine_type: machine_type)
+ end
+
+ let(:cluster_name) { 'test-cluster' }
+ let(:cluster_size) { 1 }
+ let(:machine_type) { 'n1-standard-4' }
+ let(:operation) { double }
+
+ before do
+ allow_any_instance_of(Google::Apis::ContainerV1::ContainerService)
+ .to receive(:create_cluster).and_return(operation)
+ end
+
+ it { is_expected.to eq(operation) }
+
+ it 'sets corresponded parameters' do
+ expect_any_instance_of(Google::Apis::ContainerV1::CreateClusterRequest)
+ .to receive(:initialize).with(
+ {
+ "cluster": {
+ "name": cluster_name,
+ "initial_node_count": cluster_size,
+ "node_config": {
+ "machine_type": machine_type
+ }
+ }
+ } )
+
+ subject
+ end
+ end
+
+ describe '#projects_zones_operations' do
+ subject { client.projects_zones_operations(spy, spy, spy) }
+ let(:operation) { double }
+
+ before do
+ allow_any_instance_of(Google::Apis::ContainerV1::ContainerService)
+ .to receive(:get_zone_operation).and_return(operation)
+ end
+
+ it { is_expected.to eq(operation) }
+ end
+
+ describe '#parse_operation_id' do
+ subject { client.parse_operation_id(self_link) }
+
+ context 'when expected url' do
+ let(:self_link) do
+ 'projects/gcp-project-12345/zones/us-central1-a/operations/ope-123'
+ end
+
+ it { is_expected.to eq('ope-123') }
+ end
+
+ context 'when unexpected url' do
+ let(:self_link) { '???' }
+
+ it { is_expected.to be_nil }
+ end
+ end
+end
diff --git a/spec/lib/rspec_flaky/config_spec.rb b/spec/lib/rspec_flaky/config_spec.rb
new file mode 100644
index 00000000000..83556787e85
--- /dev/null
+++ b/spec/lib/rspec_flaky/config_spec.rb
@@ -0,0 +1,102 @@
+require 'spec_helper'
+
+describe RspecFlaky::Config, :aggregate_failures do
+ before do
+ # Stub these env variables otherwise specs don't behave the same on the CI
+ stub_env('FLAKY_RSPEC_GENERATE_REPORT', nil)
+ stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', nil)
+ stub_env('FLAKY_RSPEC_REPORT_PATH', nil)
+ stub_env('NEW_FLAKY_RSPEC_REPORT_PATH', nil)
+ end
+
+ describe '.generate_report?' do
+ context "when ENV['FLAKY_RSPEC_GENERATE_REPORT'] is not set" do
+ it 'returns false' do
+ expect(described_class).not_to be_generate_report
+ end
+ end
+
+ context "when ENV['FLAKY_RSPEC_GENERATE_REPORT'] is set to 'false'" do
+ before do
+ stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'false')
+ end
+
+ it 'returns false' do
+ expect(described_class).not_to be_generate_report
+ end
+ end
+
+ context "when ENV['FLAKY_RSPEC_GENERATE_REPORT'] is set to 'true'" do
+ before do
+ stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'true')
+ end
+
+ it 'returns true' do
+ expect(described_class).to be_generate_report
+ end
+ end
+ end
+
+ describe '.suite_flaky_examples_report_path' do
+ context "when ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'] is not set" do
+ it 'returns the default path' do
+ expect(Rails.root).to receive(:join).with('rspec_flaky/suite-report.json')
+ .and_return('root/rspec_flaky/suite-report.json')
+
+ expect(described_class.suite_flaky_examples_report_path).to eq('root/rspec_flaky/suite-report.json')
+ end
+ end
+
+ context "when ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'] is set" do
+ before do
+ stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', 'foo/suite-report.json')
+ end
+
+ it 'returns the value of the env variable' do
+ expect(described_class.suite_flaky_examples_report_path).to eq('foo/suite-report.json')
+ end
+ end
+ end
+
+ describe '.flaky_examples_report_path' do
+ context "when ENV['FLAKY_RSPEC_REPORT_PATH'] is not set" do
+ it 'returns the default path' do
+ expect(Rails.root).to receive(:join).with('rspec_flaky/report.json')
+ .and_return('root/rspec_flaky/report.json')
+
+ expect(described_class.flaky_examples_report_path).to eq('root/rspec_flaky/report.json')
+ end
+ end
+
+ context "when ENV['FLAKY_RSPEC_REPORT_PATH'] is set" do
+ before do
+ stub_env('FLAKY_RSPEC_REPORT_PATH', 'foo/report.json')
+ end
+
+ it 'returns the value of the env variable' do
+ expect(described_class.flaky_examples_report_path).to eq('foo/report.json')
+ end
+ end
+ end
+
+ describe '.new_flaky_examples_report_path' do
+ context "when ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] is not set" do
+ it 'returns the default path' do
+ expect(Rails.root).to receive(:join).with('rspec_flaky/new-report.json')
+ .and_return('root/rspec_flaky/new-report.json')
+
+ expect(described_class.new_flaky_examples_report_path).to eq('root/rspec_flaky/new-report.json')
+ end
+ end
+
+ context "when ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] is set" do
+ before do
+ stub_env('NEW_FLAKY_RSPEC_REPORT_PATH', 'foo/new-report.json')
+ end
+
+ it 'returns the value of the env variable' do
+ expect(described_class.new_flaky_examples_report_path).to eq('foo/new-report.json')
+ end
+ end
+ end
+end
diff --git a/spec/lib/rspec_flaky/flaky_example_spec.rb b/spec/lib/rspec_flaky/flaky_example_spec.rb
index cbfc1e538ab..d19c34bebb3 100644
--- a/spec/lib/rspec_flaky/flaky_example_spec.rb
+++ b/spec/lib/rspec_flaky/flaky_example_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe RspecFlaky::FlakyExample do
+describe RspecFlaky::FlakyExample, :aggregate_failures do
let(:flaky_example_attrs) do
{
example_id: 'spec/foo/bar_spec.rb:2',
@@ -9,6 +9,7 @@ describe RspecFlaky::FlakyExample do
description: 'hello world',
first_flaky_at: 1234,
last_flaky_at: 2345,
+ last_flaky_job: 'https://gitlab.com/gitlab-org/gitlab-ce/-/jobs/12',
last_attempts_count: 2,
flaky_reports: 1
}
@@ -27,57 +28,78 @@ describe RspecFlaky::FlakyExample do
end
let(:example) { double(example_attrs) }
+ before do
+ # Stub these env variables otherwise specs don't behave the same on the CI
+ stub_env('CI_PROJECT_URL', nil)
+ stub_env('CI_JOB_ID', nil)
+ end
+
describe '#initialize' do
shared_examples 'a valid FlakyExample instance' do
- it 'returns valid attributes' do
- flaky_example = described_class.new(args)
+ let(:flaky_example) { described_class.new(args) }
+ it 'returns valid attributes' do
expect(flaky_example.uid).to eq(flaky_example_attrs[:uid])
- expect(flaky_example.example_id).to eq(flaky_example_attrs[:example_id])
+ expect(flaky_example.file).to eq(flaky_example_attrs[:file])
+ expect(flaky_example.line).to eq(flaky_example_attrs[:line])
+ expect(flaky_example.description).to eq(flaky_example_attrs[:description])
+ expect(flaky_example.first_flaky_at).to eq(expected_first_flaky_at)
+ expect(flaky_example.last_flaky_at).to eq(expected_last_flaky_at)
+ expect(flaky_example.last_attempts_count).to eq(flaky_example_attrs[:last_attempts_count])
+ expect(flaky_example.flaky_reports).to eq(expected_flaky_reports)
end
end
context 'when given an Rspec::Example' do
- let(:args) { example }
-
- it_behaves_like 'a valid FlakyExample instance'
+ it_behaves_like 'a valid FlakyExample instance' do
+ let(:args) { example }
+ let(:expected_first_flaky_at) { nil }
+ let(:expected_last_flaky_at) { nil }
+ let(:expected_flaky_reports) { 0 }
+ end
end
context 'when given a hash' do
- let(:args) { flaky_example_attrs }
-
- it_behaves_like 'a valid FlakyExample instance'
+ it_behaves_like 'a valid FlakyExample instance' do
+ let(:args) { flaky_example_attrs }
+ let(:expected_flaky_reports) { flaky_example_attrs[:flaky_reports] }
+ let(:expected_first_flaky_at) { flaky_example_attrs[:first_flaky_at] }
+ let(:expected_last_flaky_at) { flaky_example_attrs[:last_flaky_at] }
+ end
end
end
- describe '#to_h' do
- before do
- # Stub these env variables otherwise specs don't behave the same on the CI
- stub_env('CI_PROJECT_URL', nil)
- stub_env('CI_JOB_ID', nil)
- end
+ describe '#update_flakiness!' do
+ shared_examples 'an up-to-date FlakyExample instance' do
+ let(:flaky_example) { described_class.new(args) }
- shared_examples 'a valid FlakyExample hash' do
- let(:additional_attrs) { {} }
+ it 'updates the first_flaky_at' do
+ now = Time.now
+ expected_first_flaky_at = flaky_example.first_flaky_at ? flaky_example.first_flaky_at : now
+ Timecop.freeze(now) { flaky_example.update_flakiness! }
- it 'returns a valid hash' do
- flaky_example = described_class.new(args)
- final_hash = flaky_example_attrs
- .merge(last_flaky_at: instance_of(Time), last_flaky_job: nil)
- .merge(additional_attrs)
+ expect(flaky_example.first_flaky_at).to eq(expected_first_flaky_at)
+ end
+
+ it 'updates the last_flaky_at' do
+ now = Time.now
+ Timecop.freeze(now) { flaky_example.update_flakiness! }
- expect(flaky_example.to_h).to match(hash_including(final_hash))
+ expect(flaky_example.last_flaky_at).to eq(now)
end
- end
- context 'when given an Rspec::Example' do
- let(:args) { example }
+ it 'updates the flaky_reports' do
+ expected_flaky_reports = flaky_example.first_flaky_at ? flaky_example.flaky_reports + 1 : 1
+
+ expect { flaky_example.update_flakiness! }.to change { flaky_example.flaky_reports }.by(1)
+ expect(flaky_example.flaky_reports).to eq(expected_flaky_reports)
+ end
+
+ context 'when passed a :last_attempts_count' do
+ it 'updates the last_attempts_count' do
+ flaky_example.update_flakiness!(last_attempts_count: 42)
- context 'when run locally' do
- it_behaves_like 'a valid FlakyExample hash' do
- let(:additional_attrs) do
- { first_flaky_at: instance_of(Time) }
- end
+ expect(flaky_example.last_attempts_count).to eq(42)
end
end
@@ -87,10 +109,45 @@ describe RspecFlaky::FlakyExample do
stub_env('CI_JOB_ID', 42)
end
- it_behaves_like 'a valid FlakyExample hash' do
- let(:additional_attrs) do
- { first_flaky_at: instance_of(Time), last_flaky_job: "https://gitlab.com/gitlab-org/gitlab-ce/-/jobs/42" }
- end
+ it 'updates the last_flaky_job' do
+ flaky_example.update_flakiness!
+
+ expect(flaky_example.last_flaky_job).to eq('https://gitlab.com/gitlab-org/gitlab-ce/-/jobs/42')
+ end
+ end
+ end
+
+ context 'when given an Rspec::Example' do
+ it_behaves_like 'an up-to-date FlakyExample instance' do
+ let(:args) { example }
+ end
+ end
+
+ context 'when given a hash' do
+ it_behaves_like 'an up-to-date FlakyExample instance' do
+ let(:args) { flaky_example_attrs }
+ end
+ end
+ end
+
+ describe '#to_h' do
+ shared_examples 'a valid FlakyExample hash' do
+ let(:additional_attrs) { {} }
+
+ it 'returns a valid hash' do
+ flaky_example = described_class.new(args)
+ final_hash = flaky_example_attrs.merge(additional_attrs)
+
+ expect(flaky_example.to_h).to eq(final_hash)
+ end
+ end
+
+ context 'when given an Rspec::Example' do
+ let(:args) { example }
+
+ it_behaves_like 'a valid FlakyExample hash' do
+ let(:additional_attrs) do
+ { first_flaky_at: nil, last_flaky_at: nil, last_flaky_job: nil, flaky_reports: 0 }
end
end
end
diff --git a/spec/lib/rspec_flaky/flaky_examples_collection_spec.rb b/spec/lib/rspec_flaky/flaky_examples_collection_spec.rb
new file mode 100644
index 00000000000..06a8ba0d02e
--- /dev/null
+++ b/spec/lib/rspec_flaky/flaky_examples_collection_spec.rb
@@ -0,0 +1,79 @@
+require 'spec_helper'
+
+describe RspecFlaky::FlakyExamplesCollection, :aggregate_failures do
+ let(:collection_hash) do
+ {
+ a: { example_id: 'spec/foo/bar_spec.rb:2' },
+ b: { example_id: 'spec/foo/baz_spec.rb:3' }
+ }
+ end
+ let(:collection_report) do
+ {
+ a: {
+ example_id: 'spec/foo/bar_spec.rb:2',
+ first_flaky_at: nil,
+ last_flaky_at: nil,
+ last_flaky_job: nil
+ },
+ b: {
+ example_id: 'spec/foo/baz_spec.rb:3',
+ first_flaky_at: nil,
+ last_flaky_at: nil,
+ last_flaky_job: nil
+ }
+ }
+ end
+
+ describe '.from_json' do
+ it 'accepts a JSON' do
+ collection = described_class.from_json(JSON.pretty_generate(collection_hash))
+
+ expect(collection.to_report).to eq(described_class.new(collection_hash).to_report)
+ end
+ end
+
+ describe '#initialize' do
+ it 'accepts no argument' do
+ expect { described_class.new }.not_to raise_error
+ end
+
+ it 'accepts a hash' do
+ expect { described_class.new(collection_hash) }.not_to raise_error
+ end
+
+ it 'does not accept anything else' do
+ expect { described_class.new([1, 2, 3]) }.to raise_error(ArgumentError, "`collection` must be a Hash, Array given!")
+ end
+ end
+
+ describe '#to_report' do
+ it 'calls #to_h on the values' do
+ collection = described_class.new(collection_hash)
+
+ expect(collection.to_report).to eq(collection_report)
+ end
+ end
+
+ describe '#-' do
+ it 'returns only examples that are not present in the given collection' do
+ collection1 = described_class.new(collection_hash)
+ collection2 = described_class.new(
+ a: { example_id: 'spec/foo/bar_spec.rb:2' },
+ c: { example_id: 'spec/bar/baz_spec.rb:4' })
+
+ expect((collection2 - collection1).to_report).to eq(
+ c: {
+ example_id: 'spec/bar/baz_spec.rb:4',
+ first_flaky_at: nil,
+ last_flaky_at: nil,
+ last_flaky_job: nil
+ })
+ end
+
+ it 'fails if the given collection does not respond to `#key?`' do
+ collection = described_class.new(collection_hash)
+
+ expect { collection - [1, 2, 3] }.to raise_error(ArgumentError, "`other` must respond to `#key?`, Array does not!")
+ end
+ end
+end
diff --git a/spec/lib/rspec_flaky/listener_spec.rb b/spec/lib/rspec_flaky/listener_spec.rb
index 0e193bf408b..bfb7648b486 100644
--- a/spec/lib/rspec_flaky/listener_spec.rb
+++ b/spec/lib/rspec_flaky/listener_spec.rb
@@ -1,22 +1,35 @@
require 'spec_helper'
-describe RspecFlaky::Listener do
- let(:flaky_example_report) do
+describe RspecFlaky::Listener, :aggregate_failures do
+ let(:already_flaky_example_uid) { '6e869794f4cfd2badd93eb68719371d1' }
+ let(:suite_flaky_example_report) do
{
- 'abc123' => {
+ already_flaky_example_uid => {
example_id: 'spec/foo/bar_spec.rb:2',
file: 'spec/foo/bar_spec.rb',
line: 2,
description: 'hello world',
first_flaky_at: 1234,
- last_flaky_at: instance_of(Time),
- last_attempts_count: 2,
+ last_flaky_at: 4321,
+ last_attempts_count: 3,
flaky_reports: 1,
last_flaky_job: nil
}
}
end
- let(:example_attrs) do
+ let(:already_flaky_example_attrs) do
+ {
+ id: 'spec/foo/bar_spec.rb:2',
+ metadata: {
+ file_path: 'spec/foo/bar_spec.rb',
+ line_number: 2,
+ full_description: 'hello world'
+ },
+ execution_result: double(status: 'passed', exception: nil)
+ }
+ end
+ let(:already_flaky_example) { RspecFlaky::FlakyExample.new(suite_flaky_example_report[already_flaky_example_uid]) }
+ let(:new_example_attrs) do
{
id: 'spec/foo/baz_spec.rb:3',
metadata: {
@@ -32,18 +45,19 @@ describe RspecFlaky::Listener do
# Stub these env variables otherwise specs don't behave the same on the CI
stub_env('CI_PROJECT_URL', nil)
stub_env('CI_JOB_ID', nil)
+ stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', nil)
end
describe '#initialize' do
shared_examples 'a valid Listener instance' do
- let(:expected_all_flaky_examples) { {} }
+ let(:expected_suite_flaky_examples) { {} }
it 'returns a valid Listener instance' do
listener = described_class.new
- expect(listener.to_report(listener.all_flaky_examples))
- .to match(hash_including(expected_all_flaky_examples))
- expect(listener.new_flaky_examples).to eq({})
+ expect(listener.to_report(listener.suite_flaky_examples))
+ .to eq(expected_suite_flaky_examples)
+ expect(listener.flaky_examples).to eq({})
end
end
@@ -51,16 +65,16 @@ describe RspecFlaky::Listener do
it_behaves_like 'a valid Listener instance'
end
- context 'when a report file exists and set by ALL_FLAKY_RSPEC_REPORT_PATH' do
+ context 'when a report file exists and set by SUITE_FLAKY_RSPEC_REPORT_PATH' do
let(:report_file) do
Tempfile.new(%w[rspec_flaky_report .json]).tap do |f|
- f.write(JSON.pretty_generate(flaky_example_report))
+ f.write(JSON.pretty_generate(suite_flaky_example_report))
f.rewind
end
end
before do
- stub_env('ALL_FLAKY_RSPEC_REPORT_PATH', report_file.path)
+ stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', report_file.path)
end
after do
@@ -69,74 +83,122 @@ describe RspecFlaky::Listener do
end
it_behaves_like 'a valid Listener instance' do
- let(:expected_all_flaky_examples) { flaky_example_report }
+ let(:expected_suite_flaky_examples) { suite_flaky_example_report }
end
end
end
describe '#example_passed' do
- let(:rspec_example) { double(example_attrs) }
+ let(:rspec_example) { double(new_example_attrs) }
let(:notification) { double(example: rspec_example) }
+ let(:listener) { described_class.new(suite_flaky_example_report.to_json) }
shared_examples 'a non-flaky example' do
it 'does not change the flaky examples hash' do
- expect { subject.example_passed(notification) }
- .not_to change { subject.all_flaky_examples }
+ expect { listener.example_passed(notification) }
+ .not_to change { listener.flaky_examples }
end
end
- describe 'when the RSpec example does not respond to attempts' do
- it_behaves_like 'a non-flaky example'
- end
+ shared_examples 'an existing flaky example' do
+ let(:expected_flaky_example) do
+ {
+ example_id: 'spec/foo/bar_spec.rb:2',
+ file: 'spec/foo/bar_spec.rb',
+ line: 2,
+ description: 'hello world',
+ first_flaky_at: 1234,
+ last_attempts_count: 2,
+ flaky_reports: 2,
+ last_flaky_job: nil
+ }
+ end
- describe 'when the RSpec example has 1 attempt' do
- let(:rspec_example) { double(example_attrs.merge(attempts: 1)) }
+ it 'changes the flaky examples hash' do
+ new_example = RspecFlaky::Example.new(rspec_example)
- it_behaves_like 'a non-flaky example'
+ now = Time.now
+ Timecop.freeze(now) do
+ expect { listener.example_passed(notification) }
+ .to change { listener.flaky_examples[new_example.uid].to_h }
+ end
+
+ expect(listener.flaky_examples[new_example.uid].to_h)
+ .to eq(expected_flaky_example.merge(last_flaky_at: now))
+ end
end
- describe 'when the RSpec example has 2 attempts' do
- let(:rspec_example) { double(example_attrs.merge(attempts: 2)) }
- let(:expected_new_flaky_example) do
+ shared_examples 'a new flaky example' do
+ let(:expected_flaky_example) do
{
example_id: 'spec/foo/baz_spec.rb:3',
file: 'spec/foo/baz_spec.rb',
line: 3,
description: 'hello GitLab',
- first_flaky_at: instance_of(Time),
- last_flaky_at: instance_of(Time),
last_attempts_count: 2,
flaky_reports: 1,
last_flaky_job: nil
}
end
- it 'does not change the flaky examples hash' do
- expect { subject.example_passed(notification) }
- .to change { subject.all_flaky_examples }
-
+ it 'changes the all flaky examples hash' do
new_example = RspecFlaky::Example.new(rspec_example)
- expect(subject.all_flaky_examples[new_example.uid].to_h)
- .to match(hash_including(expected_new_flaky_example))
+ now = Time.now
+ Timecop.freeze(now) do
+ expect { listener.example_passed(notification) }
+ .to change { listener.flaky_examples[new_example.uid].to_h }
+ end
+
+ expect(listener.flaky_examples[new_example.uid].to_h)
+ .to eq(expected_flaky_example.merge(first_flaky_at: now, last_flaky_at: now))
+ end
+ end
+
+ describe 'when the RSpec example does not respond to attempts' do
+ it_behaves_like 'a non-flaky example'
+ end
+
+ describe 'when the RSpec example has 1 attempt' do
+ let(:rspec_example) { double(new_example_attrs.merge(attempts: 1)) }
+
+ it_behaves_like 'a non-flaky example'
+ end
+
+ describe 'when the RSpec example has 2 attempts' do
+ let(:rspec_example) { double(new_example_attrs.merge(attempts: 2)) }
+
+ it_behaves_like 'a new flaky example'
+
+ context 'with an existing flaky example' do
+ let(:rspec_example) { double(already_flaky_example_attrs.merge(attempts: 2)) }
+
+ it_behaves_like 'an existing flaky example'
end
end
end
describe '#dump_summary' do
- let(:rspec_example) { double(example_attrs) }
- let(:notification) { double(example: rspec_example) }
+ let(:listener) { described_class.new(suite_flaky_example_report.to_json) }
+ let(:new_flaky_rspec_example) { double(new_example_attrs.merge(attempts: 2)) }
+ let(:already_flaky_rspec_example) { double(already_flaky_example_attrs.merge(attempts: 2)) }
+ let(:notification_new_flaky_rspec_example) { double(example: new_flaky_rspec_example) }
+ let(:notification_already_flaky_rspec_example) { double(example: already_flaky_rspec_example) }
- context 'when a report file path is set by ALL_FLAKY_RSPEC_REPORT_PATH' do
+ context 'when a report file path is set by FLAKY_RSPEC_REPORT_PATH' do
let(:report_file_path) { Rails.root.join('tmp', 'rspec_flaky_report.json') }
+ let(:new_report_file_path) { Rails.root.join('tmp', 'rspec_flaky_new_report.json') }
before do
- stub_env('ALL_FLAKY_RSPEC_REPORT_PATH', report_file_path)
+ stub_env('FLAKY_RSPEC_REPORT_PATH', report_file_path)
+ stub_env('NEW_FLAKY_RSPEC_REPORT_PATH', new_report_file_path)
FileUtils.rm(report_file_path) if File.exist?(report_file_path)
+ FileUtils.rm(new_report_file_path) if File.exist?(new_report_file_path)
end
after do
FileUtils.rm(report_file_path) if File.exist?(report_file_path)
+ FileUtils.rm(new_report_file_path) if File.exist?(new_report_file_path)
end
context 'when FLAKY_RSPEC_GENERATE_REPORT == "false"' do
@@ -144,12 +206,13 @@ describe RspecFlaky::Listener do
stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'false')
end
- it 'does not write the report file' do
- subject.example_passed(notification)
+ it 'does not write any report file' do
+ listener.example_passed(notification_new_flaky_rspec_example)
- subject.dump_summary(nil)
+ listener.dump_summary(nil)
expect(File.exist?(report_file_path)).to be(false)
+ expect(File.exist?(new_report_file_path)).to be(false)
end
end
@@ -158,21 +221,39 @@ describe RspecFlaky::Listener do
stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'true')
end
- it 'writes the report file' do
- subject.example_passed(notification)
+ around do |example|
+ Timecop.freeze { example.run }
+ end
+
+ it 'writes the report files' do
+ listener.example_passed(notification_new_flaky_rspec_example)
+ listener.example_passed(notification_already_flaky_rspec_example)
- subject.dump_summary(nil)
+ listener.dump_summary(nil)
expect(File.exist?(report_file_path)).to be(true)
+ expect(File.exist?(new_report_file_path)).to be(true)
+
+ expect(File.read(report_file_path))
+ .to eq(JSON.pretty_generate(listener.to_report(listener.flaky_examples)))
+
+ new_example = RspecFlaky::Example.new(notification_new_flaky_rspec_example)
+ new_flaky_example = RspecFlaky::FlakyExample.new(new_example)
+ new_flaky_example.update_flakiness!
+
+ expect(File.read(new_report_file_path))
+ .to eq(JSON.pretty_generate(listener.to_report(new_example.uid => new_flaky_example)))
end
end
end
end
describe '#to_report' do
+ let(:listener) { described_class.new(suite_flaky_example_report.to_json) }
+
it 'transforms the internal hash to a JSON-ready hash' do
- expect(subject.to_report('abc123' => RspecFlaky::FlakyExample.new(flaky_example_report['abc123'])))
- .to match(hash_including(flaky_example_report))
+ expect(listener.to_report(already_flaky_example_uid => already_flaky_example))
+ .to match(hash_including(suite_flaky_example_report))
end
end
end
diff --git a/spec/lib/system_check/app/git_user_default_ssh_config_check_spec.rb b/spec/lib/system_check/app/git_user_default_ssh_config_check_spec.rb
index 7125bfcab59..b4b83b70d1c 100644
--- a/spec/lib/system_check/app/git_user_default_ssh_config_check_spec.rb
+++ b/spec/lib/system_check/app/git_user_default_ssh_config_check_spec.rb
@@ -16,7 +16,12 @@ describe SystemCheck::App::GitUserDefaultSSHConfigCheck do
end
it 'only whitelists safe files' do
- expect(described_class::WHITELIST).to contain_exactly('authorized_keys', 'authorized_keys2', 'known_hosts')
+ expect(described_class::WHITELIST).to contain_exactly(
+ 'authorized_keys',
+ 'authorized_keys2',
+ 'authorized_keys.lock',
+ 'known_hosts'
+ )
end
describe '#skip?' do
@@ -34,6 +39,14 @@ describe SystemCheck::App::GitUserDefaultSSHConfigCheck do
it { is_expected.to eq(expected_result) }
end
+
+ it 'skips GitLab read-only instances' do
+ stub_user
+ stub_home_dir
+ allow(Gitlab::Database).to receive(:read_only?).and_return(true)
+
+ is_expected.to be_truthy
+ end
end
describe '#check?' do
diff --git a/spec/lib/system_check/base_check_spec.rb b/spec/lib/system_check/base_check_spec.rb
new file mode 100644
index 00000000000..faf8c99e772
--- /dev/null
+++ b/spec/lib/system_check/base_check_spec.rb
@@ -0,0 +1,17 @@
+require 'spec_helper'
+
+describe SystemCheck::BaseCheck do
+ context 'helpers on instance level' do
+ it 'responds to SystemCheck::Helpers methods' do
+ expect(subject).to respond_to :fix_and_rerun, :for_more_information, :see_installation_guide_section,
+ :finished_checking, :start_checking, :try_fixing_it, :sanitized_message, :should_sanitize?, :omnibus_gitlab?,
+ :sudo_gitlab
+ end
+
+ it 'responds to Gitlab::TaskHelpers methods' do
+ expect(subject).to respond_to :ask_to_continue, :os_name, :prompt, :run_and_match, :run_command,
+ :run_command!, :uid_for, :gid_for, :gitlab_user, :gitlab_user?, :warn_user_is_not_gitlab, :all_repos,
+ :repository_storage_paths_args, :user_home, :checkout_or_clone_version, :clone_repo, :checkout_version
+ end
+ end
+end
diff --git a/spec/lib/system_check/orphans/namespace_check_spec.rb b/spec/lib/system_check/orphans/namespace_check_spec.rb
new file mode 100644
index 00000000000..2a61ff3ad65
--- /dev/null
+++ b/spec/lib/system_check/orphans/namespace_check_spec.rb
@@ -0,0 +1,61 @@
+require 'spec_helper'
+require 'rake_helper'
+
+describe SystemCheck::Orphans::NamespaceCheck do
+ let(:storages) { Gitlab.config.repositories.storages.reject { |key, _| key.eql? 'broken' } }
+
+ before do
+ allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
+ allow(subject).to receive(:fetch_disk_namespaces).and_return(disk_namespaces)
+ silence_output
+ end
+
+ describe '#multi_check' do
+ context 'all orphans' do
+ let(:disk_namespaces) { %w(/repos/orphan1 /repos/orphan2 repos/@hashed) }
+
+ it 'prints list of all orphaned namespaces except @hashed' do
+ expect_list_of_orphans(%w(orphan1 orphan2))
+
+ subject.multi_check
+ end
+ end
+
+ context 'few orphans with existing namespace' do
+ let!(:first_level) { create(:group, path: 'my-namespace') }
+ let(:disk_namespaces) { %w(/repos/orphan1 /repos/orphan2 /repos/my-namespace /repos/@hashed) }
+
+ it 'prints list of orphaned namespaces' do
+ expect_list_of_orphans(%w(orphan1 orphan2))
+
+ subject.multi_check
+ end
+ end
+
+ context 'few orphans with existing namespace and parents with same name as orphans' do
+ let!(:first_level) { create(:group, path: 'my-namespace') }
+ let!(:second_level) { create(:group, path: 'second-level', parent: first_level) }
+ let(:disk_namespaces) { %w(/repos/orphan1 /repos/orphan2 /repos/my-namespace /repos/second-level /repos/@hashed) }
+
+ it 'prints list of orphaned namespaces ignoring parents with same namespace as orphans' do
+ expect_list_of_orphans(%w(orphan1 orphan2 second-level))
+
+ subject.multi_check
+ end
+ end
+
+ context 'no orphans' do
+ let(:disk_namespaces) { %w(@hashed) }
+
+ it 'prints an empty list ignoring @hashed' do
+ expect_list_of_orphans([])
+
+ subject.multi_check
+ end
+ end
+ end
+
+ def expect_list_of_orphans(orphans)
+ expect(subject).to receive(:print_orphans).with(orphans, 'default')
+ end
+end
diff --git a/spec/lib/system_check/orphans/repository_check_spec.rb b/spec/lib/system_check/orphans/repository_check_spec.rb
new file mode 100644
index 00000000000..b0c2267d177
--- /dev/null
+++ b/spec/lib/system_check/orphans/repository_check_spec.rb
@@ -0,0 +1,68 @@
+require 'spec_helper'
+require 'rake_helper'
+
+describe SystemCheck::Orphans::RepositoryCheck do
+ let(:storages) { Gitlab.config.repositories.storages.reject { |key, _| key.eql? 'broken' } }
+
+ before do
+ allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
+ allow(subject).to receive(:fetch_disk_namespaces).and_return(disk_namespaces)
+ allow(subject).to receive(:fetch_disk_repositories).and_return(disk_repositories)
+ # silence_output
+ end
+
+ describe '#multi_check' do
+ context 'all orphans' do
+ let(:disk_namespaces) { %w(/repos/orphan1 /repos/orphan2 repos/@hashed) }
+ let(:disk_repositories) { %w(repo1.git repo2.git) }
+
+ it 'prints list of all orphaned namespaces except @hashed' do
+ expect_list_of_orphans(%w(orphan1/repo1.git orphan1/repo2.git orphan2/repo1.git orphan2/repo2.git))
+
+ subject.multi_check
+ end
+ end
+
+ context 'few orphans with existing namespace' do
+ let!(:first_level) { create(:group, path: 'my-namespace') }
+ let!(:project) { create(:project, path: 'repo', namespace: first_level) }
+ let(:disk_namespaces) { %w(/repos/orphan1 /repos/orphan2 /repos/my-namespace /repos/@hashed) }
+ let(:disk_repositories) { %w(repo.git) }
+
+ it 'prints list of orphaned namespaces' do
+ expect_list_of_orphans(%w(orphan1/repo.git orphan2/repo.git))
+
+ subject.multi_check
+ end
+ end
+
+ context 'few orphans with existing namespace and parents with same name as orphans' do
+ let!(:first_level) { create(:group, path: 'my-namespace') }
+ let!(:second_level) { create(:group, path: 'second-level', parent: first_level) }
+ let!(:project) { create(:project, path: 'repo', namespace: first_level) }
+ let(:disk_namespaces) { %w(/repos/orphan1 /repos/orphan2 /repos/my-namespace /repos/second-level /repos/@hashed) }
+ let(:disk_repositories) { %w(repo.git) }
+
+ it 'prints list of orphaned namespaces ignoring parents with same namespace as orphans' do
+ expect_list_of_orphans(%w(orphan1/repo.git orphan2/repo.git second-level/repo.git))
+
+ subject.multi_check
+ end
+ end
+
+ context 'no orphans' do
+ let(:disk_namespaces) { %w(@hashed) }
+ let(:disk_repositories) { %w(repo.git) }
+
+ it 'prints an empty list ignoring @hashed' do
+ expect_list_of_orphans([])
+
+ subject.multi_check
+ end
+ end
+ end
+
+ def expect_list_of_orphans(orphans)
+ expect(subject).to receive(:print_orphans).with(orphans, 'default')
+ end
+end
diff --git a/spec/mailers/emails/profile_spec.rb b/spec/mailers/emails/profile_spec.rb
index 09e5094cf84..1f7be415e35 100644
--- a/spec/mailers/emails/profile_spec.rb
+++ b/spec/mailers/emails/profile_spec.rb
@@ -120,29 +120,4 @@ describe Emails::Profile do
it { expect { Notify.new_gpg_key_email('foo') }.not_to raise_error }
end
end
-
- describe 'user added email' do
- let(:email) { create(:email) }
-
- subject { Notify.new_email_email(email.id) }
-
- it_behaves_like 'it should not have Gmail Actions links'
- it_behaves_like 'a user cannot unsubscribe through footer link'
-
- it 'is sent to the new user' do
- is_expected.to deliver_to email.user.email
- end
-
- it 'has the correct subject' do
- is_expected.to have_subject /^Email was added to your account$/i
- end
-
- it 'contains the new email address' do
- is_expected.to have_body_text /#{email.email}/
- end
-
- it 'includes a link to emails page' do
- is_expected.to have_body_text /#{profile_emails_path}/
- end
- end
end
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index 932e2fd8c95..c832cee965b 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -28,319 +28,334 @@ describe Notify do
end
def have_referable_subject(referable, reply: false)
- prefix = referable.project.name if referable.project
- prefix = "Re: #{prefix}" if reply
+ prefix = referable.project ? "#{referable.project.name} | " : ''
+ prefix.prepend('Re: ') if reply
suffix = "#{referable.title} (#{referable.to_reference})"
- have_subject [prefix, suffix].compact.join(' | ')
+ have_subject [prefix, suffix].compact.join
end
context 'for a project' do
- describe 'items that are assignable, the email' do
- let(:previous_assignee) { create(:user, name: 'Previous Assignee') }
+ shared_examples 'an assignee email' do
+ it 'is sent to the assignee as the author' do
+ sender = subject.header[:from].addrs.first
+
+ aggregate_failures do
+ expect(sender.display_name).to eq(current_user.name)
+ expect(sender.address).to eq(gitlab_sender)
+ expect(subject).to deliver_to(assignee.email)
+ end
+ end
+ end
- shared_examples 'an assignee email' do
- it 'is sent to the assignee as the author' do
- sender = subject.header[:from].addrs.first
+ context 'for issues' do
+ describe 'that are new' do
+ subject { described_class.new_issue_email(issue.assignees.first.id, issue.id) }
+ it_behaves_like 'an assignee email'
+ it_behaves_like 'an email starting a new thread with reply-by-email enabled' do
+ let(:model) { issue }
+ end
+ it_behaves_like 'it should show Gmail Actions View Issue link'
+ it_behaves_like 'an unsubscribeable thread'
+
+ it 'has the correct subject and body' do
aggregate_failures do
- expect(sender.display_name).to eq(current_user.name)
- expect(sender.address).to eq(gitlab_sender)
- expect(subject).to deliver_to(assignee.email)
+ is_expected.to have_referable_subject(issue)
+ is_expected.to have_body_text(project_issue_path(project, issue))
end
end
- end
- context 'for issues' do
- describe 'that are new' do
- subject { described_class.new_issue_email(issue.assignees.first.id, issue.id) }
+ it 'contains the description' do
+ is_expected.to have_html_escaped_body_text issue.description
+ end
- it_behaves_like 'an assignee email'
- it_behaves_like 'an email starting a new thread with reply-by-email enabled' do
- let(:model) { issue }
- end
- it_behaves_like 'it should show Gmail Actions View Issue link'
- it_behaves_like 'an unsubscribeable thread'
-
- it 'has the correct subject and body' do
- aggregate_failures do
- is_expected.to have_referable_subject(issue)
- is_expected.to have_body_text(project_issue_path(project, issue))
- end
+ context 'when enabled email_author_in_body' do
+ before do
+ stub_application_setting(email_author_in_body: true)
end
- it 'contains the description' do
- is_expected.to have_html_escaped_body_text issue.description
+ it 'contains a link to note author' do
+ is_expected.to have_html_escaped_body_text(issue.author_name)
+ is_expected.to have_body_text 'created an issue:'
end
+ end
+ end
- context 'when enabled email_author_in_body' do
- before do
- stub_application_setting(email_author_in_body: true)
- end
+ describe 'that are reassigned' do
+ let(:previous_assignee) { create(:user, name: 'Previous Assignee') }
+ subject { described_class.reassigned_issue_email(recipient.id, issue.id, [previous_assignee.id], current_user.id) }
- it 'contains a link to note author' do
- is_expected.to have_html_escaped_body_text(issue.author_name)
- is_expected.to have_body_text 'created an issue:'
- end
- end
+ it_behaves_like 'a multiple recipients email'
+ it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
+ let(:model) { issue }
end
+ it_behaves_like 'it should show Gmail Actions View Issue link'
+ it_behaves_like 'an unsubscribeable thread'
- describe 'that have been reassigned' do
- subject { described_class.reassigned_issue_email(recipient.id, issue.id, [previous_assignee.id], current_user.id) }
+ it 'is sent as the author' do
+ sender = subject.header[:from].addrs[0]
+ expect(sender.display_name).to eq(current_user.name)
+ expect(sender.address).to eq(gitlab_sender)
+ end
- it_behaves_like 'a multiple recipients email'
- it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
- let(:model) { issue }
+ it 'has the correct subject and body' do
+ aggregate_failures do
+ is_expected.to have_referable_subject(issue, reply: true)
+ is_expected.to have_html_escaped_body_text(previous_assignee.name)
+ is_expected.to have_html_escaped_body_text(assignee.name)
+ is_expected.to have_body_text(project_issue_path(project, issue))
end
- it_behaves_like 'it should show Gmail Actions View Issue link'
- it_behaves_like 'an unsubscribeable thread'
+ end
+ end
- it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(current_user.name)
- expect(sender.address).to eq(gitlab_sender)
- end
+ describe 'that have been relabeled' do
+ subject { described_class.relabeled_issue_email(recipient.id, issue.id, %w[foo bar baz], current_user.id) }
- it 'has the correct subject and body' do
- aggregate_failures do
- is_expected.to have_referable_subject(issue, reply: true)
- is_expected.to have_html_escaped_body_text(previous_assignee.name)
- is_expected.to have_html_escaped_body_text(assignee.name)
- is_expected.to have_body_text(project_issue_path(project, issue))
- end
- end
+ it_behaves_like 'a multiple recipients email'
+ it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
+ let(:model) { issue }
end
+ it_behaves_like 'it should show Gmail Actions View Issue link'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+ it_behaves_like 'an email with a labels subscriptions link in its footer'
- describe 'that have been relabeled' do
- subject { described_class.relabeled_issue_email(recipient.id, issue.id, %w[foo bar baz], current_user.id) }
+ it 'is sent as the author' do
+ sender = subject.header[:from].addrs[0]
+ expect(sender.display_name).to eq(current_user.name)
+ expect(sender.address).to eq(gitlab_sender)
+ end
- it_behaves_like 'a multiple recipients email'
- it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
- let(:model) { issue }
+ it 'has the correct subject and body' do
+ aggregate_failures do
+ is_expected.to have_referable_subject(issue, reply: true)
+ is_expected.to have_body_text('foo, bar, and baz')
+ is_expected.to have_body_text(project_issue_path(project, issue))
end
- it_behaves_like 'it should show Gmail Actions View Issue link'
- it_behaves_like 'a user cannot unsubscribe through footer link'
- it_behaves_like 'an email with a labels subscriptions link in its footer'
+ end
- it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(current_user.name)
- expect(sender.address).to eq(gitlab_sender)
+ context 'with a preferred language' do
+ before do
+ Gitlab::I18n.locale = :es
end
- it 'has the correct subject and body' do
- aggregate_failures do
- is_expected.to have_referable_subject(issue, reply: true)
- is_expected.to have_body_text('foo, bar, and baz')
- is_expected.to have_body_text(project_issue_path(project, issue))
- end
+ after do
+ Gitlab::I18n.use_default_locale
end
- context 'with a preferred language' do
- before do
- Gitlab::I18n.locale = :es
- end
-
- after do
- Gitlab::I18n.use_default_locale
- end
-
- it 'always generates the email using the default language' do
- is_expected.to have_body_text('foo, bar, and baz')
- end
+ it 'always generates the email using the default language' do
+ is_expected.to have_body_text('foo, bar, and baz')
end
end
+ end
- describe 'status changed' do
- let(:status) { 'closed' }
- subject { described_class.issue_status_changed_email(recipient.id, issue.id, status, current_user.id) }
+ describe 'status changed' do
+ let(:status) { 'closed' }
+ subject { described_class.issue_status_changed_email(recipient.id, issue.id, status, current_user.id) }
- it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
- let(:model) { issue }
- end
- it_behaves_like 'it should show Gmail Actions View Issue link'
- it_behaves_like 'an unsubscribeable thread'
+ it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
+ let(:model) { issue }
+ end
+ it_behaves_like 'it should show Gmail Actions View Issue link'
+ it_behaves_like 'an unsubscribeable thread'
- it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(current_user.name)
- expect(sender.address).to eq(gitlab_sender)
- end
+ it 'is sent as the author' do
+ sender = subject.header[:from].addrs[0]
+ expect(sender.display_name).to eq(current_user.name)
+ expect(sender.address).to eq(gitlab_sender)
+ end
- it 'has the correct subject and body' do
- aggregate_failures do
- is_expected.to have_referable_subject(issue, reply: true)
- is_expected.to have_body_text(status)
- is_expected.to have_html_escaped_body_text(current_user.name)
- is_expected.to have_body_text(project_issue_path project, issue)
- end
+ it 'has the correct subject and body' do
+ aggregate_failures do
+ is_expected.to have_referable_subject(issue, reply: true)
+ is_expected.to have_body_text(status)
+ is_expected.to have_html_escaped_body_text(current_user.name)
+ is_expected.to have_body_text(project_issue_path project, issue)
end
end
+ end
- describe 'moved to another project' do
- let(:new_issue) { create(:issue) }
- subject { described_class.issue_moved_email(recipient, issue, new_issue, current_user) }
+ describe 'moved to another project' do
+ let(:new_issue) { create(:issue) }
+ subject { described_class.issue_moved_email(recipient, issue, new_issue, current_user) }
- it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
- let(:model) { issue }
- end
- it_behaves_like 'it should show Gmail Actions View Issue link'
- it_behaves_like 'an unsubscribeable thread'
+ it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
+ let(:model) { issue }
+ end
+ it_behaves_like 'it should show Gmail Actions View Issue link'
+ it_behaves_like 'an unsubscribeable thread'
- it 'contains description about action taken' do
- is_expected.to have_body_text 'Issue was moved to another project'
- end
+ it 'contains description about action taken' do
+ is_expected.to have_body_text 'Issue was moved to another project'
+ end
- it 'has the correct subject and body' do
- new_issue_url = project_issue_path(new_issue.project, new_issue)
+ it 'has the correct subject and body' do
+ new_issue_url = project_issue_path(new_issue.project, new_issue)
- aggregate_failures do
- is_expected.to have_referable_subject(issue, reply: true)
- is_expected.to have_body_text(new_issue_url)
- is_expected.to have_body_text(project_issue_path(project, issue))
- end
+ aggregate_failures do
+ is_expected.to have_referable_subject(issue, reply: true)
+ is_expected.to have_body_text(new_issue_url)
+ is_expected.to have_body_text(project_issue_path(project, issue))
end
end
end
+ end
- context 'for merge requests' do
- describe 'that are new' do
- subject { described_class.new_merge_request_email(merge_request.assignee_id, merge_request.id) }
+ context 'for merge requests' do
+ describe 'that are new' do
+ subject { described_class.new_merge_request_email(merge_request.assignee_id, merge_request.id) }
+
+ it_behaves_like 'an assignee email'
+ it_behaves_like 'an email starting a new thread with reply-by-email enabled' do
+ let(:model) { merge_request }
+ end
+ it_behaves_like 'it should show Gmail Actions View Merge request link'
+ it_behaves_like 'an unsubscribeable thread'
- it_behaves_like 'an assignee email'
- it_behaves_like 'an email starting a new thread with reply-by-email enabled' do
- let(:model) { merge_request }
+ it 'has the correct subject and body' do
+ aggregate_failures do
+ is_expected.to have_referable_subject(merge_request)
+ is_expected.to have_body_text(project_merge_request_path(project, merge_request))
+ is_expected.to have_body_text(merge_request.source_branch)
+ is_expected.to have_body_text(merge_request.target_branch)
end
- it_behaves_like 'it should show Gmail Actions View Merge request link'
- it_behaves_like 'an unsubscribeable thread'
-
- it 'has the correct subject and body' do
- aggregate_failures do
- is_expected.to have_referable_subject(merge_request)
- is_expected.to have_body_text(project_merge_request_path(project, merge_request))
- is_expected.to have_body_text(merge_request.source_branch)
- is_expected.to have_body_text(merge_request.target_branch)
- end
+ end
+
+ it 'contains the description' do
+ is_expected.to have_html_escaped_body_text merge_request.description
+ end
+
+ context 'when enabled email_author_in_body' do
+ before do
+ stub_application_setting(email_author_in_body: true)
end
- it 'contains the description' do
- is_expected.to have_html_escaped_body_text merge_request.description
+ it 'contains a link to note author' do
+ is_expected.to have_html_escaped_body_text merge_request.author_name
+ is_expected.to have_body_text 'created a merge request:'
end
+ end
+ end
- context 'when enabled email_author_in_body' do
- before do
- stub_application_setting(email_author_in_body: true)
- end
+ describe 'that are reassigned' do
+ let(:previous_assignee) { create(:user, name: 'Previous Assignee') }
+ subject { described_class.reassigned_merge_request_email(recipient.id, merge_request.id, previous_assignee.id, current_user.id) }
- it 'contains a link to note author' do
- is_expected.to have_html_escaped_body_text merge_request.author_name
- is_expected.to have_body_text 'created a merge request:'
- end
- end
+ it_behaves_like 'a multiple recipients email'
+ it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
+ let(:model) { merge_request }
end
+ it_behaves_like 'it should show Gmail Actions View Merge request link'
+ it_behaves_like "an unsubscribeable thread"
- describe 'that are reassigned' do
- subject { described_class.reassigned_merge_request_email(recipient.id, merge_request.id, previous_assignee.id, current_user.id) }
+ it 'is sent as the author' do
+ sender = subject.header[:from].addrs[0]
+ expect(sender.display_name).to eq(current_user.name)
+ expect(sender.address).to eq(gitlab_sender)
+ end
- it_behaves_like 'a multiple recipients email'
- it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
- let(:model) { merge_request }
+ it 'has the correct subject and body' do
+ aggregate_failures do
+ is_expected.to have_referable_subject(merge_request, reply: true)
+ is_expected.to have_html_escaped_body_text(previous_assignee.name)
+ is_expected.to have_body_text(project_merge_request_path(project, merge_request))
+ is_expected.to have_html_escaped_body_text(assignee.name)
end
- it_behaves_like 'it should show Gmail Actions View Merge request link'
- it_behaves_like "an unsubscribeable thread"
+ end
+ end
- it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(current_user.name)
- expect(sender.address).to eq(gitlab_sender)
- end
+ describe 'that have been relabeled' do
+ subject { described_class.relabeled_merge_request_email(recipient.id, merge_request.id, %w[foo bar baz], current_user.id) }
- it 'has the correct subject and body' do
- aggregate_failures do
- is_expected.to have_referable_subject(merge_request, reply: true)
- is_expected.to have_html_escaped_body_text(previous_assignee.name)
- is_expected.to have_body_text(project_merge_request_path(project, merge_request))
- is_expected.to have_html_escaped_body_text(assignee.name)
- end
- end
+ it_behaves_like 'a multiple recipients email'
+ it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
+ let(:model) { merge_request }
end
+ it_behaves_like 'it should show Gmail Actions View Merge request link'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+ it_behaves_like 'an email with a labels subscriptions link in its footer'
- describe 'that have been relabeled' do
- subject { described_class.relabeled_merge_request_email(recipient.id, merge_request.id, %w[foo bar baz], current_user.id) }
+ it 'is sent as the author' do
+ sender = subject.header[:from].addrs[0]
+ expect(sender.display_name).to eq(current_user.name)
+ expect(sender.address).to eq(gitlab_sender)
+ end
- it_behaves_like 'a multiple recipients email'
- it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
- let(:model) { merge_request }
- end
- it_behaves_like 'it should show Gmail Actions View Merge request link'
- it_behaves_like 'a user cannot unsubscribe through footer link'
- it_behaves_like 'an email with a labels subscriptions link in its footer'
+ it 'has the correct subject and body' do
+ is_expected.to have_referable_subject(merge_request, reply: true)
+ is_expected.to have_body_text('foo, bar, and baz')
+ is_expected.to have_body_text(project_merge_request_path(project, merge_request))
+ end
+ end
- it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(current_user.name)
- expect(sender.address).to eq(gitlab_sender)
- end
+ describe 'status changed' do
+ let(:status) { 'reopened' }
+ subject { described_class.merge_request_status_email(recipient.id, merge_request.id, status, current_user.id) }
- it 'has the correct subject and body' do
+ it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
+ let(:model) { merge_request }
+ end
+ it_behaves_like 'it should show Gmail Actions View Merge request link'
+ it_behaves_like 'an unsubscribeable thread'
+
+ it 'is sent as the author' do
+ sender = subject.header[:from].addrs[0]
+ expect(sender.display_name).to eq(current_user.name)
+ expect(sender.address).to eq(gitlab_sender)
+ end
+
+ it 'has the correct subject and body' do
+ aggregate_failures do
is_expected.to have_referable_subject(merge_request, reply: true)
- is_expected.to have_body_text('foo, bar, and baz')
+ is_expected.to have_body_text(status)
+ is_expected.to have_html_escaped_body_text(current_user.name)
is_expected.to have_body_text(project_merge_request_path(project, merge_request))
end
end
+ end
- describe 'status changed' do
- let(:status) { 'reopened' }
- subject { described_class.merge_request_status_email(recipient.id, merge_request.id, status, current_user.id) }
+ describe 'that are merged' do
+ let(:merge_author) { create(:user) }
+ subject { described_class.merged_merge_request_email(recipient.id, merge_request.id, merge_author.id) }
- it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
- let(:model) { merge_request }
- end
- it_behaves_like 'it should show Gmail Actions View Merge request link'
- it_behaves_like 'an unsubscribeable thread'
+ it_behaves_like 'a multiple recipients email'
+ it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
+ let(:model) { merge_request }
+ end
+ it_behaves_like 'it should show Gmail Actions View Merge request link'
+ it_behaves_like 'an unsubscribeable thread'
- it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(current_user.name)
- expect(sender.address).to eq(gitlab_sender)
- end
+ it 'is sent as the merge author' do
+ sender = subject.header[:from].addrs[0]
+ expect(sender.display_name).to eq(merge_author.name)
+ expect(sender.address).to eq(gitlab_sender)
+ end
- it 'has the correct subject and body' do
- aggregate_failures do
- is_expected.to have_referable_subject(merge_request, reply: true)
- is_expected.to have_body_text(status)
- is_expected.to have_html_escaped_body_text(current_user.name)
- is_expected.to have_body_text(project_merge_request_path(project, merge_request))
- end
+ it 'has the correct subject and body' do
+ aggregate_failures do
+ is_expected.to have_referable_subject(merge_request, reply: true)
+ is_expected.to have_body_text('merged')
+ is_expected.to have_body_text(project_merge_request_path(project, merge_request))
end
end
+ end
+ end
- describe 'that are merged' do
- let(:merge_author) { create(:user) }
- subject { described_class.merged_merge_request_email(recipient.id, merge_request.id, merge_author.id) }
+ context 'for snippet notes' do
+ let(:project_snippet) { create(:project_snippet, project: project) }
+ let(:project_snippet_note) { create(:note_on_project_snippet, project: project, noteable: project_snippet) }
- it_behaves_like 'a multiple recipients email'
- it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
- let(:model) { merge_request }
- end
- it_behaves_like 'it should show Gmail Actions View Merge request link'
- it_behaves_like 'an unsubscribeable thread'
+ subject { described_class.note_snippet_email(project_snippet_note.author_id, project_snippet_note.id) }
- it 'is sent as the merge author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(merge_author.name)
- expect(sender.address).to eq(gitlab_sender)
- end
+ it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
+ let(:model) { project_snippet }
+ end
+ it_behaves_like 'a user cannot unsubscribe through footer link'
- it 'has the correct subject and body' do
- aggregate_failures do
- is_expected.to have_referable_subject(merge_request, reply: true)
- is_expected.to have_body_text('merged')
- is_expected.to have_body_text(project_merge_request_path(project, merge_request))
- end
- end
- end
+ it 'has the correct subject and body' do
+ is_expected.to have_referable_subject(project_snippet, reply: true)
+ is_expected.to have_html_escaped_body_text project_snippet_note.note
end
end
@@ -1239,4 +1254,18 @@ describe Notify do
end
end
end
+
+ context 'for personal snippet notes' do
+ let(:personal_snippet) { create(:personal_snippet) }
+ let(:personal_snippet_note) { create(:note_on_personal_snippet, noteable: personal_snippet) }
+
+ subject { described_class.note_personal_snippet_email(personal_snippet_note.author_id, personal_snippet_note.id) }
+
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+
+ it 'has the correct subject and body' do
+ is_expected.to have_referable_subject(personal_snippet, reply: true)
+ is_expected.to have_html_escaped_body_text personal_snippet_note.note
+ end
+ end
end
diff --git a/spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb b/spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb
index 862907c5d01..84c2e9f7e52 100644
--- a/spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb
+++ b/spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb
@@ -2,11 +2,12 @@ require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170508170547_add_head_pipeline_for_each_merge_request.rb')
describe AddHeadPipelineForEachMergeRequest, :truncate do
+ include ProjectForksHelper
+
let(:migration) { described_class.new }
let!(:project) { create(:project) }
- let!(:forked_project_link) { create(:forked_project_link, forked_from_project: project) }
- let!(:other_project) { forked_project_link.forked_to_project }
+ let!(:other_project) { fork_project(project) }
let!(:pipeline_1) { create(:ci_pipeline, project: project, ref: "branch_1") }
let!(:pipeline_2) { create(:ci_pipeline, project: other_project, ref: "branch_1") }
diff --git a/spec/migrations/clean_stages_statuses_migration_spec.rb b/spec/migrations/clean_stages_statuses_migration_spec.rb
new file mode 100644
index 00000000000..38705f8eaae
--- /dev/null
+++ b/spec/migrations/clean_stages_statuses_migration_spec.rb
@@ -0,0 +1,51 @@
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20170912113435_clean_stages_statuses_migration.rb')
+
+describe CleanStagesStatusesMigration, :migration, :sidekiq, :redis do
+ let(:migration) { spy('migration') }
+
+ before do
+ allow(Gitlab::BackgroundMigration::MigrateStageStatus)
+ .to receive(:new).and_return(migration)
+ end
+
+ context 'when there are pending background migrations' do
+ it 'processes pending jobs synchronously' do
+ Sidekiq::Testing.disable! do
+ BackgroundMigrationWorker
+ .perform_in(2.minutes, 'MigrateStageStatus', [1, 1])
+ BackgroundMigrationWorker
+ .perform_async('MigrateStageStatus', [1, 1])
+
+ migrate!
+
+ expect(migration).to have_received(:perform).with(1, 1).twice
+ end
+ end
+ end
+
+ context 'when there are no background migrations pending' do
+ it 'does nothing' do
+ Sidekiq::Testing.disable! do
+ migrate!
+
+ expect(migration).not_to have_received(:perform)
+ end
+ end
+ end
+
+ context 'when there are still unmigrated stages afterwards' do
+ let(:stages) { table('ci_stages') }
+
+ before do
+ stages.create!(status: nil, name: 'build')
+ stages.create!(status: nil, name: 'test')
+ end
+
+ it 'migrates statuses sequentially in batches' do
+ migrate!
+
+ expect(migration).to have_received(:perform).once
+ end
+ end
+end
diff --git a/spec/migrations/delete_conflicting_redirect_routes_spec.rb b/spec/migrations/delete_conflicting_redirect_routes_spec.rb
new file mode 100644
index 00000000000..1df2477da51
--- /dev/null
+++ b/spec/migrations/delete_conflicting_redirect_routes_spec.rb
@@ -0,0 +1,58 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20170907170235_delete_conflicting_redirect_routes')
+
+describe DeleteConflictingRedirectRoutes, :migration, :sidekiq do
+ let!(:redirect_routes) { table(:redirect_routes) }
+ let!(:routes) { table(:routes) }
+
+ around do |example|
+ Timecop.freeze { example.run }
+ end
+
+ before do
+ stub_const("DeleteConflictingRedirectRoutes::BATCH_SIZE", 2)
+ stub_const("Gitlab::Database::MigrationHelpers::BACKGROUND_MIGRATION_JOB_BUFFER_SIZE", 2)
+
+ routes.create!(id: 1, source_id: 1, source_type: 'Namespace', path: 'foo1')
+ routes.create!(id: 2, source_id: 2, source_type: 'Namespace', path: 'foo2')
+ routes.create!(id: 3, source_id: 3, source_type: 'Namespace', path: 'foo3')
+ routes.create!(id: 4, source_id: 4, source_type: 'Namespace', path: 'foo4')
+ routes.create!(id: 5, source_id: 5, source_type: 'Namespace', path: 'foo5')
+
+ # Valid redirects
+ redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'bar')
+ redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'bar2')
+ redirect_routes.create!(source_id: 2, source_type: 'Namespace', path: 'bar3')
+
+ # Conflicting redirects
+ redirect_routes.create!(source_id: 2, source_type: 'Namespace', path: 'foo1')
+ redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo2')
+ redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo3')
+ redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo4')
+ redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo5')
+ end
+
+ it 'correctly schedules background migrations' do
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs[0]['args']).to eq([described_class::MIGRATION, [1, 2]])
+ expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(12.seconds.from_now.to_f)
+ expect(BackgroundMigrationWorker.jobs[1]['args']).to eq([described_class::MIGRATION, [3, 4]])
+ expect(BackgroundMigrationWorker.jobs[1]['at']).to eq(24.seconds.from_now.to_f)
+ expect(BackgroundMigrationWorker.jobs[2]['args']).to eq([described_class::MIGRATION, [5, 5]])
+ expect(BackgroundMigrationWorker.jobs[2]['at']).to eq(36.seconds.from_now.to_f)
+ expect(BackgroundMigrationWorker.jobs.size).to eq 3
+ end
+ end
+ end
+
+ it 'schedules background migrations' do
+ Sidekiq::Testing.inline! do
+ expect do
+ migrate!
+ end.to change { redirect_routes.count }.from(8).to(3)
+ end
+ end
+end
diff --git a/spec/migrations/normalize_ldap_extern_uids_spec.rb b/spec/migrations/normalize_ldap_extern_uids_spec.rb
new file mode 100644
index 00000000000..262d7742aaf
--- /dev/null
+++ b/spec/migrations/normalize_ldap_extern_uids_spec.rb
@@ -0,0 +1,56 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20170921101004_normalize_ldap_extern_uids')
+
+describe NormalizeLdapExternUids, :migration, :sidekiq do
+ let!(:identities) { table(:identities) }
+
+ around do |example|
+ Timecop.freeze { example.run }
+ end
+
+ before do
+ stub_const("Gitlab::Database::MigrationHelpers::BACKGROUND_MIGRATION_BATCH_SIZE", 2)
+ stub_const("Gitlab::Database::MigrationHelpers::BACKGROUND_MIGRATION_JOB_BUFFER_SIZE", 2)
+
+ # LDAP identities
+ (1..4).each do |i|
+ identities.create!(id: i, provider: 'ldapmain', extern_uid: " uid = foo #{i}, ou = People, dc = example, dc = com ", user_id: i)
+ end
+
+ # Non-LDAP identity
+ identities.create!(id: 5, provider: 'foo', extern_uid: " uid = foo 5, ou = People, dc = example, dc = com ", user_id: 5)
+ end
+
+ it 'correctly schedules background migrations' do
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs[0]['args']).to eq([described_class::MIGRATION, [1, 2]])
+ expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(10.seconds.from_now.to_f)
+ expect(BackgroundMigrationWorker.jobs[1]['args']).to eq([described_class::MIGRATION, [3, 4]])
+ expect(BackgroundMigrationWorker.jobs[1]['at']).to eq(20.seconds.from_now.to_f)
+ expect(BackgroundMigrationWorker.jobs[2]['args']).to eq([described_class::MIGRATION, [5, 5]])
+ expect(BackgroundMigrationWorker.jobs[2]['at']).to eq(30.seconds.from_now.to_f)
+ expect(BackgroundMigrationWorker.jobs.size).to eq 3
+ end
+ end
+ end
+
+ it 'migrates the LDAP identities' do
+ Sidekiq::Testing.inline! do
+ migrate!
+ identities.where(id: 1..4).each do |identity|
+ expect(identity.extern_uid).to eq("uid=foo #{identity.id},ou=people,dc=example,dc=com")
+ end
+ end
+ end
+
+ it 'does not modify non-LDAP identities' do
+ Sidekiq::Testing.inline! do
+ migrate!
+ identity = identities.last
+ expect(identity.extern_uid).to eq(" uid = foo 5, ou = People, dc = example, dc = com ")
+ end
+ end
+end
diff --git a/spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb b/spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb
new file mode 100644
index 00000000000..0e884a7d910
--- /dev/null
+++ b/spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb
@@ -0,0 +1,43 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20171005130944_schedule_create_gpg_key_subkeys_from_gpg_keys')
+
+describe ScheduleCreateGpgKeySubkeysFromGpgKeys, :migration, :sidekiq do
+ matcher :be_scheduled_migration do |*expected|
+ match do |migration|
+ BackgroundMigrationWorker.jobs.any? do |job|
+ job['args'] == [migration, expected]
+ end
+ end
+
+ failure_message do |migration|
+ "Migration `#{migration}` with args `#{expected.inspect}` not scheduled!"
+ end
+ end
+
+ before do
+ create(:gpg_key, id: 1, key: GpgHelpers::User1.public_key)
+ create(:gpg_key, id: 2, key: GpgHelpers::User3.public_key)
+ # Delete all subkeys so they can be recreated
+ GpgKeySubkey.destroy_all
+ end
+
+ it 'correctly schedules background migrations' do
+ Sidekiq::Testing.fake! do
+ migrate!
+
+ expect(described_class::MIGRATION).to be_scheduled_migration(1)
+ expect(described_class::MIGRATION).to be_scheduled_migration(2)
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+
+ it 'schedules background migrations' do
+ Sidekiq::Testing.inline! do
+ expect(GpgKeySubkey.count).to eq(0)
+
+ migrate!
+
+ expect(GpgKeySubkey.count).to eq(3)
+ end
+ end
+end
diff --git a/spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb b/spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb
new file mode 100644
index 00000000000..4ab1bb67058
--- /dev/null
+++ b/spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb
@@ -0,0 +1,59 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20170926150348_schedule_merge_request_diff_migrations_take_two')
+
+describe ScheduleMergeRequestDiffMigrationsTakeTwo, :migration, :sidekiq do
+ matcher :be_scheduled_migration do |time, *expected|
+ match do |migration|
+ BackgroundMigrationWorker.jobs.any? do |job|
+ job['args'] == [migration, expected] &&
+ job['at'].to_i == time.to_i
+ end
+ end
+
+ failure_message do |migration|
+ "Migration `#{migration}` with args `#{expected.inspect}` not scheduled!"
+ end
+ end
+
+ let(:merge_request_diffs) { table(:merge_request_diffs) }
+ let(:merge_requests) { table(:merge_requests) }
+ let(:projects) { table(:projects) }
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 1)
+
+ projects.create!(id: 1, name: 'gitlab', path: 'gitlab')
+
+ merge_requests.create!(id: 1, target_project_id: 1, source_project_id: 1, target_branch: 'feature', source_branch: 'master')
+
+ merge_request_diffs.create!(id: 1, merge_request_id: 1, st_commits: YAML.dump([]), st_diffs: nil)
+ merge_request_diffs.create!(id: 2, merge_request_id: 1, st_commits: nil, st_diffs: YAML.dump([]))
+ merge_request_diffs.create!(id: 3, merge_request_id: 1, st_commits: nil, st_diffs: nil)
+ merge_request_diffs.create!(id: 4, merge_request_id: 1, st_commits: YAML.dump([]), st_diffs: YAML.dump([]))
+ end
+
+ it 'correctly schedules background migrations' do
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(described_class::MIGRATION).to be_scheduled_migration(10.minutes.from_now, 1, 1)
+ expect(described_class::MIGRATION).to be_scheduled_migration(20.minutes.from_now, 2, 2)
+ expect(described_class::MIGRATION).to be_scheduled_migration(30.minutes.from_now, 4, 4)
+ expect(BackgroundMigrationWorker.jobs.size).to eq 3
+ end
+ end
+ end
+
+ it 'migrates the data' do
+ Sidekiq::Testing.inline! do
+ non_empty = 'st_commits IS NOT NULL OR st_diffs IS NOT NULL'
+
+ expect(merge_request_diffs.where(non_empty).count).to eq 3
+
+ migrate!
+
+ expect(merge_request_diffs.where(non_empty).count).to eq 0
+ end
+ end
+end
diff --git a/spec/migrations/update_legacy_diff_notes_type_for_import_spec.rb b/spec/migrations/update_legacy_diff_notes_type_for_import_spec.rb
new file mode 100644
index 00000000000..d625b60ff50
--- /dev/null
+++ b/spec/migrations/update_legacy_diff_notes_type_for_import_spec.rb
@@ -0,0 +1,22 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20170927112318_update_legacy_diff_notes_type_for_import.rb')
+
+describe UpdateLegacyDiffNotesTypeForImport, :migration do
+ let(:notes) { table(:notes) }
+
+ before do
+ notes.inheritance_column = nil
+
+ notes.create(type: 'Note')
+ notes.create(type: 'LegacyDiffNote')
+ notes.create(type: 'Github::Import::Note')
+ notes.create(type: 'Github::Import::LegacyDiffNote')
+ end
+
+ it 'updates the notes type' do
+ migrate!
+
+ expect(notes.pluck(:type))
+ .to contain_exactly('Note', 'Github::Import::Note', 'LegacyDiffNote', 'LegacyDiffNote')
+ end
+end
diff --git a/spec/migrations/update_notes_type_for_import_spec.rb b/spec/migrations/update_notes_type_for_import_spec.rb
new file mode 100644
index 00000000000..06195d970d8
--- /dev/null
+++ b/spec/migrations/update_notes_type_for_import_spec.rb
@@ -0,0 +1,22 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20170927112319_update_notes_type_for_import.rb')
+
+describe UpdateNotesTypeForImport, :migration do
+ let(:notes) { table(:notes) }
+
+ before do
+ notes.inheritance_column = nil
+
+ notes.create(type: 'Note')
+ notes.create(type: 'LegacyDiffNote')
+ notes.create(type: 'Github::Import::Note')
+ notes.create(type: 'Github::Import::LegacyDiffNote')
+ end
+
+ it 'updates the notes type' do
+ migrate!
+
+ expect(notes.pluck(:type))
+ .to contain_exactly('Note', 'Note', 'LegacyDiffNote', 'Github::Import::LegacyDiffNote')
+ end
+end
diff --git a/spec/migrations/update_upload_paths_to_system_spec.rb b/spec/migrations/update_upload_paths_to_system_spec.rb
index 0a45c5ea32d..d4a1553fb0e 100644
--- a/spec/migrations/update_upload_paths_to_system_spec.rb
+++ b/spec/migrations/update_upload_paths_to_system_spec.rb
@@ -31,7 +31,7 @@ describe UpdateUploadPathsToSystem do
end
end
- describe "#up", truncate: true do
+ describe "#up", :truncate do
it "updates old upload records to the new path" do
old_upload = create(:upload, model: create(:project), path: "uploads/project/avatar.jpg")
@@ -41,7 +41,7 @@ describe UpdateUploadPathsToSystem do
end
end
- describe "#down", truncate: true do
+ describe "#down", :truncate do
it "updates the new system patsh to the old paths" do
new_upload = create(:upload, model: create(:project), path: "uploads/-/system/project/avatar.jpg")
diff --git a/spec/models/abuse_report_spec.rb b/spec/models/abuse_report_spec.rb
index d4da30b1641..f49a61062c1 100644
--- a/spec/models/abuse_report_spec.rb
+++ b/spec/models/abuse_report_spec.rb
@@ -1,8 +1,9 @@
require 'rails_helper'
-RSpec.describe AbuseReport do
- subject { create(:abuse_report) }
- let(:user) { create(:admin) }
+describe AbuseReport do
+ set(:report) { create(:abuse_report) }
+ set(:user) { create(:admin) }
+ subject { report }
it { expect(subject).to be_valid }
diff --git a/spec/models/appearance_spec.rb b/spec/models/appearance_spec.rb
index b5d5d58697b..49f44525b29 100644
--- a/spec/models/appearance_spec.rb
+++ b/spec/models/appearance_spec.rb
@@ -1,6 +1,6 @@
require 'rails_helper'
-RSpec.describe Appearance do
+describe Appearance do
subject { build(:appearance) }
it { is_expected.to be_valid }
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index c7a9eabdf06..78cacf9ff5d 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -167,19 +167,33 @@ describe ApplicationSetting do
context 'housekeeping settings' do
it { is_expected.not_to allow_value(0).for(:housekeeping_incremental_repack_period) }
- it 'wants the full repack period to be longer than the incremental repack period' do
+ it 'wants the full repack period to be at least the incremental repack period' do
subject.housekeeping_incremental_repack_period = 2
subject.housekeeping_full_repack_period = 1
expect(subject).not_to be_valid
end
- it 'wants the gc period to be longer than the full repack period' do
- subject.housekeeping_full_repack_period = 2
- subject.housekeeping_gc_period = 1
+ it 'wants the gc period to be at least the full repack period' do
+ subject.housekeeping_full_repack_period = 100
+ subject.housekeeping_gc_period = 90
expect(subject).not_to be_valid
end
+
+ it 'allows the same period for incremental repack and full repack, effectively skipping incremental repack' do
+ subject.housekeeping_incremental_repack_period = 2
+ subject.housekeeping_full_repack_period = 2
+
+ expect(subject).to be_valid
+ end
+
+ it 'allows the same period for full repack and gc, effectively skipping full repack' do
+ subject.housekeeping_full_repack_period = 100
+ subject.housekeeping_gc_period = 100
+
+ expect(subject).to be_valid
+ end
end
end
diff --git a/spec/models/chat_name_spec.rb b/spec/models/chat_name_spec.rb
index 8581bcbb08b..e89e534d914 100644
--- a/spec/models/chat_name_spec.rb
+++ b/spec/models/chat_name_spec.rb
@@ -1,7 +1,8 @@
require 'spec_helper'
describe ChatName do
- subject { create(:chat_name) }
+ set(:chat_name) { create(:chat_name) }
+ subject { chat_name }
it { is_expected.to belong_to(:service) }
it { is_expected.to belong_to(:user) }
diff --git a/spec/models/chat_team_spec.rb b/spec/models/chat_team_spec.rb
index e0e5f73e6fe..70a9a206faa 100644
--- a/spec/models/chat_team_spec.rb
+++ b/spec/models/chat_team_spec.rb
@@ -1,7 +1,8 @@
require 'spec_helper'
describe ChatTeam do
- subject { create(:chat_team) }
+ set(:chat_team) { create(:chat_team) }
+ subject { chat_team }
# Associations
it { is_expected.to belong_to(:namespace) }
diff --git a/spec/models/ci/artifact_blob_spec.rb b/spec/models/ci/artifact_blob_spec.rb
index a10a8af5303..d5ba088af53 100644
--- a/spec/models/ci/artifact_blob_spec.rb
+++ b/spec/models/ci/artifact_blob_spec.rb
@@ -1,7 +1,8 @@
require 'spec_helper'
describe Ci::ArtifactBlob do
- let(:build) { create(:ci_build, :artifacts) }
+ set(:project) { create(:project, :public) }
+ set(:build) { create(:ci_build, :artifacts, project: project) }
let(:entry) { build.artifacts_metadata_entry('other_artifacts_0.1.2/another-subdirectory/banana_sample.gif') }
subject { described_class.new(entry) }
@@ -41,4 +42,51 @@ describe Ci::ArtifactBlob do
expect(subject.external_storage).to eq(:build_artifact)
end
end
+
+ describe '#external_url' do
+ before do
+ allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
+ allow(Gitlab.config.pages).to receive(:artifacts_server).and_return(true)
+ end
+
+ context '.gif extension' do
+ it 'returns nil' do
+ expect(subject.external_url(build.project, build)).to be_nil
+ end
+ end
+
+ context 'txt extensions' do
+ let(:entry) { build.artifacts_metadata_entry('other_artifacts_0.1.2/doc_sample.txt') }
+
+ it 'returns a URL' do
+ url = subject.external_url(build.project, build)
+
+ expect(url).not_to be_nil
+ expect(url).to start_with("http")
+ expect(url).to match Gitlab.config.pages.host
+ expect(url).to end_with(entry.path)
+ end
+ end
+ end
+
+ describe '#external_link?' do
+ before do
+ allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
+ allow(Gitlab.config.pages).to receive(:artifacts_server).and_return(true)
+ end
+
+ context 'gif extensions' do
+ it 'returns false' do
+ expect(subject.external_link?(build)).to be false
+ end
+ end
+
+ context 'txt extensions' do
+ let(:entry) { build.artifacts_metadata_entry('other_artifacts_0.1.2/doc_sample.txt') }
+
+ it 'returns true' do
+ expect(subject.external_link?(build)).to be true
+ end
+ end
+ end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index d67f578ab8a..1e3da4e1f1a 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -1,22 +1,24 @@
require 'spec_helper'
describe Ci::Build do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
- let(:build) { create(:ci_build, pipeline: pipeline) }
- let(:test_trace) { 'This is a test' }
+ set(:user) { create(:user) }
+ set(:group) { create(:group, :access_requestable) }
+ set(:project) { create(:project, :repository, group: group) }
- let(:pipeline) do
+ set(:pipeline) do
create(:ci_pipeline, project: project,
sha: project.commit.id,
ref: project.default_branch,
status: 'success')
end
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+
it { is_expected.to belong_to(:runner) }
it { is_expected.to belong_to(:trigger_request) }
it { is_expected.to belong_to(:erased_by) }
it { is_expected.to have_many(:deployments) }
+ it { is_expected.to have_many(:trace_sections)}
it { is_expected.to validate_presence_of(:ref) }
it { is_expected.to respond_to(:has_trace?) }
it { is_expected.to respond_to(:trace) }
@@ -326,7 +328,7 @@ describe Ci::Build do
let(:project_regex) { '\(\d+\.\d+\) covered' }
before do
- project.build_coverage_regex = project_regex
+ project.update_column(:build_coverage_regex, project_regex)
end
context 'and coverage_regex attribute is not set' do
@@ -363,6 +365,17 @@ describe Ci::Build do
end
end
+ describe '#parse_trace_sections!' do
+ it 'calls ExtractSectionsFromBuildTraceService' do
+ expect(Ci::ExtractSectionsFromBuildTraceService)
+ .to receive(:new).with(project, build.user).once.and_call_original
+ expect_any_instance_of(Ci::ExtractSectionsFromBuildTraceService)
+ .to receive(:execute).with(build).once
+
+ build.parse_trace_sections!
+ end
+ end
+
describe '#trace' do
subject { build.trace }
@@ -1140,9 +1153,6 @@ describe Ci::Build do
end
describe '#repo_url' do
- let(:build) { create(:ci_build) }
- let(:project) { build.project }
-
subject { build.repo_url }
it { is_expected.to be_a(String) }
@@ -1243,6 +1253,8 @@ describe Ci::Build do
end
context 'use from gitlab-ci.yml' do
+ let(:pipeline) { create(:ci_pipeline) }
+
before do
stub_ci_pipeline_yaml_file(config)
end
@@ -1486,11 +1498,7 @@ describe Ci::Build do
{ key: 'SECRET_KEY', value: 'secret_value', public: false }
end
- let(:group) { create(:group, :access_requestable) }
-
before do
- build.project.update(group: group)
-
create(:ci_group_variable,
secret_variable.slice(:key, :value).merge(group: group))
end
@@ -1503,11 +1511,7 @@ describe Ci::Build do
{ key: 'PROTECTED_KEY', value: 'protected_value', public: false }
end
- let(:group) { create(:group, :access_requestable) }
-
before do
- build.project.update(group: group)
-
create(:ci_group_variable,
:protected,
protected_variable.slice(:key, :value).merge(group: group))
@@ -1530,6 +1534,10 @@ describe Ci::Build do
end
context 'when the ref is not protected' do
+ before do
+ build.update_column(:ref, 'some/feature')
+ end
+
it { is_expected.not_to include(protected_variable) }
end
end
@@ -1596,6 +1604,8 @@ describe Ci::Build do
end
context 'when yaml_variables are undefined' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
before do
build.yaml_variables = nil
end
@@ -1689,7 +1699,10 @@ describe Ci::Build do
before do
build.environment = 'production'
- allow(project).to receive(:deployment_variables).and_return([deployment_variable])
+
+ allow_any_instance_of(Project)
+ .to receive(:deployment_variables)
+ .and_return([deployment_variable])
end
it { is_expected.to include(deployment_variable) }
@@ -1713,14 +1726,19 @@ describe Ci::Build do
before do
allow(build).to receive(:predefined_variables) { [build_pre_var] }
- allow(project).to receive(:predefined_variables) { [project_pre_var] }
- allow(pipeline).to receive(:predefined_variables) { [pipeline_pre_var] }
allow(build).to receive(:yaml_variables) { [build_yaml_var] }
- allow(project).to receive(:secret_variables_for)
+ allow_any_instance_of(Project)
+ .to receive(:predefined_variables) { [project_pre_var] }
+
+ allow_any_instance_of(Project)
+ .to receive(:secret_variables_for)
.with(ref: 'master', environment: nil) do
[create(:ci_variable, key: 'secret', value: 'value')]
end
+
+ allow_any_instance_of(Ci::Pipeline)
+ .to receive(:predefined_variables) { [pipeline_pre_var] }
end
it do
diff --git a/spec/models/ci/build_trace_section_name_spec.rb b/spec/models/ci/build_trace_section_name_spec.rb
new file mode 100644
index 00000000000..386ee6880cb
--- /dev/null
+++ b/spec/models/ci/build_trace_section_name_spec.rb
@@ -0,0 +1,12 @@
+require 'spec_helper'
+
+describe Ci::BuildTraceSectionName, model: true do
+ subject { build(:ci_build_trace_section_name) }
+
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to have_many(:trace_sections)}
+
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_uniqueness_of(:name).scoped_to(:project_id) }
+end
diff --git a/spec/models/ci/build_trace_section_spec.rb b/spec/models/ci/build_trace_section_spec.rb
new file mode 100644
index 00000000000..541a9a36fb8
--- /dev/null
+++ b/spec/models/ci/build_trace_section_spec.rb
@@ -0,0 +1,11 @@
+require 'spec_helper'
+
+describe Ci::BuildTraceSection, model: true do
+ it { is_expected.to belong_to(:build)}
+ it { is_expected.to belong_to(:project)}
+ it { is_expected.to belong_to(:section_name)}
+
+ it { is_expected.to validate_presence_of(:section_name) }
+ it { is_expected.to validate_presence_of(:build) }
+ it { is_expected.to validate_presence_of(:project) }
+end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 95da97b7bc5..2c9e7013b77 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -26,6 +26,7 @@ describe Ci::Pipeline, :mailer do
it { is_expected.to respond_to :git_author_name }
it { is_expected.to respond_to :git_author_email }
it { is_expected.to respond_to :short_sha }
+ it { is_expected.to delegate_method(:full_path).to(:project).with_prefix }
describe '#source' do
context 'when creating new pipeline' do
@@ -237,7 +238,7 @@ describe Ci::Pipeline, :mailer do
describe '#stage_seeds' do
let(:pipeline) do
- create(:ci_pipeline, config: { rspec: { script: 'rake' } })
+ build(:ci_pipeline, config: { rspec: { script: 'rake' } })
end
it 'returns preseeded stage seeds object' do
@@ -246,6 +247,14 @@ describe Ci::Pipeline, :mailer do
end
end
+ describe '#seeds_size' do
+ let(:pipeline) { build(:ci_pipeline_with_one_job) }
+
+ it 'returns number of jobs in stage seeds' do
+ expect(pipeline.seeds_size).to eq 1
+ end
+ end
+
describe '#legacy_stages' do
subject { pipeline.legacy_stages }
@@ -1439,4 +1448,24 @@ describe Ci::Pipeline, :mailer do
it_behaves_like 'not sending any notification'
end
end
+
+ describe '#latest_builds_with_artifacts' do
+ let!(:pipeline) { create(:ci_pipeline, :success) }
+
+ let!(:build) do
+ create(:ci_build, :success, :artifacts, pipeline: pipeline)
+ end
+
+ it 'returns the latest builds' do
+ expect(pipeline.latest_builds_with_artifacts).to eq([build])
+ end
+
+ it 'memoizes the returned relation' do
+ query_count = ActiveRecord::QueryRecorder
+ .new { 2.times { pipeline.latest_builds_with_artifacts.to_a } }
+ .count
+
+ expect(query_count).to eq(1)
+ end
+ end
end
diff --git a/spec/models/ci/pipeline_variable_spec.rb b/spec/models/ci/pipeline_variable_spec.rb
index 2ce78e34b0c..889c243c8d8 100644
--- a/spec/models/ci/pipeline_variable_spec.rb
+++ b/spec/models/ci/pipeline_variable_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Ci::PipelineVariable, models: true do
+describe Ci::PipelineVariable do
subject { build(:ci_pipeline_variable) }
it { is_expected.to include_module(HasVariable) }
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 2e686e515c5..584dfe9a5c1 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -183,75 +183,42 @@ describe Ci::Runner do
end
end
- context 'when runner is locked' do
+ context 'when runner is shared' do
before do
- runner.locked = true
+ runner.is_shared = true
+ build.project.runners = []
end
- shared_examples 'locked build picker' do
- context 'when runner cannot pick untagged jobs' do
- before do
- runner.run_untagged = false
- end
+ it 'can handle builds' do
+ expect(runner.can_pick?(build)).to be_truthy
+ end
- it 'cannot handle builds without tags' do
- expect(runner.can_pick?(build)).to be_falsey
- end
+ context 'when runner is locked' do
+ before do
+ runner.locked = true
end
- context 'when having runner tags' do
- before do
- runner.tag_list = %w(bb cc)
- end
-
- it 'cannot handle it for builds without matching tags' do
- build.tag_list = ['aa']
-
- expect(runner.can_pick?(build)).to be_falsey
- end
+ it 'can handle builds' do
+ expect(runner.can_pick?(build)).to be_truthy
end
end
+ end
- context 'when serving the same project' do
- it 'can handle it' do
+ context 'when runner is not shared' do
+ context 'when runner is assigned to a project' do
+ it 'can handle builds' do
expect(runner.can_pick?(build)).to be_truthy
end
-
- it_behaves_like 'locked build picker'
-
- context 'when having runner tags' do
- before do
- runner.tag_list = %w(bb cc)
- build.tag_list = ['bb']
- end
-
- it 'can handle it for matching tags' do
- expect(runner.can_pick?(build)).to be_truthy
- end
- end
end
- context 'serving a different project' do
+ context 'when runner is not assigned to a project' do
before do
- runner.runner_projects.destroy_all
+ build.project.runners = []
end
- it 'cannot handle it' do
+ it 'cannot handle builds' do
expect(runner.can_pick?(build)).to be_falsey
end
-
- it_behaves_like 'locked build picker'
-
- context 'when having runner tags' do
- before do
- runner.tag_list = %w(bb cc)
- build.tag_list = ['bb']
- end
-
- it 'cannot handle it for matching tags' do
- expect(runner.can_pick?(build)).to be_falsey
- end
- end
end
end
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index 11e64a0f877..e3cfa149e3a 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -207,11 +207,6 @@ eos
context 'of a merge commit' do
let(:repository) { project.repository }
- let(:commit_options) do
- author = repository.user_to_committer(user)
- { message: 'Test message', committer: author, author: author }
- end
-
let(:merge_request) do
create(:merge_request,
source_branch: 'video',
@@ -224,7 +219,7 @@ eos
merge_commit_id = repository.merge(user,
merge_request.diff_head_sha,
merge_request,
- commit_options)
+ 'Test message')
repository.commit(merge_commit_id)
end
diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb
index 40bbb10eaac..129dfa07f15 100644
--- a/spec/models/concerns/cache_markdown_field_spec.rb
+++ b/spec/models/concerns/cache_markdown_field_spec.rb
@@ -178,57 +178,59 @@ describe CacheMarkdownField do
end
end
- describe '#refresh_markdown_cache!' do
+ describe '#refresh_markdown_cache' do
before do
thing.foo = updated_markdown
end
- context 'do_update: false' do
- it 'fills all html fields' do
- thing.refresh_markdown_cache!
+ it 'fills all html fields' do
+ thing.refresh_markdown_cache
- expect(thing.foo_html).to eq(updated_html)
- expect(thing.foo_html_changed?).to be_truthy
- expect(thing.baz_html_changed?).to be_truthy
- end
+ expect(thing.foo_html).to eq(updated_html)
+ expect(thing.foo_html_changed?).to be_truthy
+ expect(thing.baz_html_changed?).to be_truthy
+ end
- it 'does not save the result' do
- expect(thing).not_to receive(:update_columns)
+ it 'does not save the result' do
+ expect(thing).not_to receive(:update_columns)
- thing.refresh_markdown_cache!
- end
+ thing.refresh_markdown_cache
+ end
- it 'updates the markdown cache version' do
- thing.cached_markdown_version = nil
- thing.refresh_markdown_cache!
+ it 'updates the markdown cache version' do
+ thing.cached_markdown_version = nil
+ thing.refresh_markdown_cache
- expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_VERSION)
- end
+ expect(thing.cached_markdown_version).to eq(CacheMarkdownField::CACHE_VERSION)
end
+ end
- context 'do_update: true' do
- it 'fills all html fields' do
- thing.refresh_markdown_cache!(do_update: true)
+ describe '#refresh_markdown_cache!' do
+ before do
+ thing.foo = updated_markdown
+ end
- expect(thing.foo_html).to eq(updated_html)
- expect(thing.foo_html_changed?).to be_truthy
- expect(thing.baz_html_changed?).to be_truthy
- end
+ it 'fills all html fields' do
+ thing.refresh_markdown_cache!
- it 'skips saving if not persisted' do
- expect(thing).to receive(:persisted?).and_return(false)
- expect(thing).not_to receive(:update_columns)
+ expect(thing.foo_html).to eq(updated_html)
+ expect(thing.foo_html_changed?).to be_truthy
+ expect(thing.baz_html_changed?).to be_truthy
+ end
- thing.refresh_markdown_cache!(do_update: true)
- end
+ it 'skips saving if not persisted' do
+ expect(thing).to receive(:persisted?).and_return(false)
+ expect(thing).not_to receive(:update_columns)
- it 'saves the changes using #update_columns' do
- expect(thing).to receive(:persisted?).and_return(true)
- expect(thing).to receive(:update_columns)
- .with("foo_html" => updated_html, "baz_html" => "", "cached_markdown_version" => CacheMarkdownField::CACHE_VERSION)
+ thing.refresh_markdown_cache!
+ end
- thing.refresh_markdown_cache!(do_update: true)
- end
+ it 'saves the changes using #update_columns' do
+ expect(thing).to receive(:persisted?).and_return(true)
+ expect(thing).to receive(:update_columns)
+ .with("foo_html" => updated_html, "baz_html" => "", "cached_markdown_version" => CacheMarkdownField::CACHE_VERSION)
+
+ thing.refresh_markdown_cache!
end
end
diff --git a/spec/models/concerns/has_status_spec.rb b/spec/models/concerns/has_status_spec.rb
index a38f2553eb1..6866b43432c 100644
--- a/spec/models/concerns/has_status_spec.rb
+++ b/spec/models/concerns/has_status_spec.rb
@@ -231,6 +231,18 @@ describe HasStatus do
end
end
+ describe '.alive' do
+ subject { CommitStatus.alive }
+
+ %i[running pending created].each do |status|
+ it_behaves_like 'containing the job', status
+ end
+
+ %i[failed success].each do |status|
+ it_behaves_like 'not containing the job', status
+ end
+ end
+
describe '.created_or_pending' do
subject { CommitStatus.created_or_pending }
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index b463d12e448..ab8773b7ede 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -12,6 +12,16 @@ describe Group, 'Routable' do
it { is_expected.to have_many(:redirect_routes).dependent(:destroy) }
end
+ describe 'GitLab read-only instance' do
+ it 'does not save route if route is not present' do
+ group.route.path = ''
+ allow(Gitlab::Database).to receive(:read_only?).and_return(true)
+ expect(group).to receive(:update_route).and_call_original
+
+ expect { group.full_path }.to change { Route.count }.by(0)
+ end
+ end
+
describe 'Callbacks' do
it 'creates route record on create' do
expect(group.route.path).to eq(group.path)
diff --git a/spec/models/diff_note_spec.rb b/spec/models/diff_note_spec.rb
index 4aa9ec789a3..eb0a3e9e0d3 100644
--- a/spec/models/diff_note_spec.rb
+++ b/spec/models/diff_note_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe DiffNote do
include RepoHelpers
- let(:merge_request) { create(:merge_request) }
+ let!(:merge_request) { create(:merge_request) }
let(:project) { merge_request.project }
let(:commit) { project.commit(sample_commit.id) }
@@ -98,14 +98,14 @@ describe DiffNote do
diff_line = subject.diff_line
expect(diff_line.added?).to be true
- expect(diff_line.new_line).to eq(position.new_line)
+ expect(diff_line.new_line).to eq(position.formatter.new_line)
expect(diff_line.text).to eq("+ vars = {")
end
end
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(position.file_path, position.new_line, 15)
+ line_code = Gitlab::Diff::LineCode.generate(position.file_path, position.formatter.new_line, 15)
expect(subject.line_code).to eq(line_code)
end
@@ -255,4 +255,38 @@ describe DiffNote do
end
end
end
+
+ describe "image diff notes" do
+ let(:path) { "files/images/any_image.png" }
+
+ let!(:position) do
+ Gitlab::Diff::Position.new(
+ old_path: path,
+ new_path: path,
+ width: 10,
+ height: 10,
+ x: 1,
+ y: 1,
+ diff_refs: merge_request.diff_refs,
+ position_type: "image"
+ )
+ end
+
+ describe "validations" do
+ subject { build(:diff_note_on_merge_request, project: project, position: position, noteable: merge_request) }
+
+ it { is_expected.not_to validate_presence_of(:line_code) }
+
+ it "does not validate diff line" do
+ diff_line = subject.diff_line
+
+ expect(diff_line).to be nil
+ expect(subject).to be_valid
+ end
+ end
+
+ it "returns true for on_image?" do
+ expect(subject.on_image?).to be_truthy
+ end
+ end
end
diff --git a/spec/models/email_spec.rb b/spec/models/email_spec.rb
index 1d6fabe48b1..b32dd31ae6d 100644
--- a/spec/models/email_spec.rb
+++ b/spec/models/email_spec.rb
@@ -11,4 +11,33 @@ describe Email do
expect(described_class.new(email: ' inFO@exAMPLe.com ').email)
.to eq 'info@example.com'
end
+
+ describe '#update_invalid_gpg_signatures' do
+ let(:user) do
+ create(:user, email: 'tula.torphy@abshire.ca').tap do |user|
+ user.skip_reconfirmation!
+ end
+ end
+ let(:user) { create(:user) }
+
+ it 'synchronizes the gpg keys when the email is updated' do
+ email = user.emails.create(email: 'new@email.com')
+
+ expect(user).to receive(:update_invalid_gpg_signatures)
+
+ email.confirm
+ end
+ end
+
+ describe 'scopes' do
+ let(:user) { create(:user) }
+
+ it 'scopes confirmed emails' do
+ create(:email, :confirmed, user: user)
+ create(:email, user: user)
+
+ expect(user.emails.count).to eq 2
+ expect(user.emails.confirmed.count).to eq 1
+ end
+ end
end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index ea8512a5eae..25e5d155894 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -54,6 +54,28 @@ describe Environment do
end
end
+ describe '#folder_name' do
+ context 'when it is inside a folder' do
+ subject(:environment) do
+ create(:environment, name: 'staging/review-1')
+ end
+
+ it 'returns a top-level folder name' do
+ expect(environment.folder_name).to eq 'staging'
+ end
+ end
+
+ context 'when the environment if a top-level item itself' do
+ subject(:environment) do
+ create(:environment, name: 'production')
+ end
+
+ it 'returns an environment name' do
+ expect(environment.folder_name).to eq 'production'
+ end
+ end
+ end
+
describe '#nullify_external_url' do
it 'replaces a blank url with nil' do
env = build(:environment, external_url: "")
diff --git a/spec/models/fork_network_member_spec.rb b/spec/models/fork_network_member_spec.rb
new file mode 100644
index 00000000000..532ca1fca8c
--- /dev/null
+++ b/spec/models/fork_network_member_spec.rb
@@ -0,0 +1,8 @@
+require 'spec_helper'
+
+describe ForkNetworkMember do
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:fork_network) }
+ end
+end
diff --git a/spec/models/fork_network_spec.rb b/spec/models/fork_network_spec.rb
new file mode 100644
index 00000000000..605ccd6db06
--- /dev/null
+++ b/spec/models/fork_network_spec.rb
@@ -0,0 +1,54 @@
+require 'spec_helper'
+
+describe ForkNetwork do
+ include ProjectForksHelper
+
+ describe '#add_root_as_member' do
+ it 'adds the root project as a member when creating a new root network' do
+ project = create(:project)
+ fork_network = described_class.create(root_project: project)
+
+ expect(fork_network.projects).to include(project)
+ end
+ end
+
+ describe '#find_fork_in' do
+ it 'finds all fork of the current network in al collection' do
+ network = create(:fork_network)
+ root_project = network.root_project
+ another_project = fork_project(root_project)
+ create(:project)
+
+ expect(network.find_forks_in(Project.all))
+ .to contain_exactly(another_project, root_project)
+ end
+ end
+
+ context 'for a deleted project' do
+ it 'keeps the fork network' do
+ project = create(:project, :public)
+ forked = fork_project(project)
+ project.destroy!
+
+ fork_network = forked.reload.fork_network
+
+ expect(fork_network.projects).to contain_exactly(forked)
+ expect(fork_network.root_project).to be_nil
+ end
+
+ it 'allows multiple fork networks where the root project is deleted' do
+ first_project = create(:project)
+ second_project = create(:project)
+ first_fork = fork_project(first_project)
+ second_fork = fork_project(second_project)
+
+ first_project.destroy
+ second_project.destroy
+
+ expect(first_fork.fork_network).not_to be_nil
+ expect(first_fork.fork_network.root_project).to be_nil
+ expect(second_fork.fork_network).not_to be_nil
+ expect(second_fork.fork_network.root_project).to be_nil
+ end
+ end
+end
diff --git a/spec/models/forked_project_link_spec.rb b/spec/models/forked_project_link_spec.rb
index 7dbeb4d2e74..32e33e8f42f 100644
--- a/spec/models/forked_project_link_spec.rb
+++ b/spec/models/forked_project_link_spec.rb
@@ -1,10 +1,11 @@
require 'spec_helper'
describe ForkedProjectLink, "add link on fork" do
+ include ProjectForksHelper
+
let(:project_from) { create(:project, :repository) }
let(:project_to) { fork_project(project_from, user) }
let(:user) { create(:user) }
- let(:namespace) { user.namespace }
before do
project_from.add_reporter(user)
@@ -64,13 +65,4 @@ describe ForkedProjectLink, "add link on fork" do
expect(ForkedProjectLink.exists?(id: forked_project_link.id)).to eq(false)
end
end
-
- def fork_project(from_project, user)
- service = Projects::ForkService.new(from_project, user)
- shell = double('gitlab_shell', fork_repository: true)
-
- allow(service).to receive(:gitlab_shell).and_return(shell)
-
- service.execute
- end
end
diff --git a/spec/models/gcp/cluster_spec.rb b/spec/models/gcp/cluster_spec.rb
new file mode 100644
index 00000000000..350fbc257d9
--- /dev/null
+++ b/spec/models/gcp/cluster_spec.rb
@@ -0,0 +1,240 @@
+require 'spec_helper'
+
+describe Gcp::Cluster do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:user) }
+ it { is_expected.to belong_to(:service) }
+
+ it { is_expected.to validate_presence_of(:gcp_cluster_zone) }
+
+ describe '#default_value_for' do
+ let(:cluster) { described_class.new }
+
+ it { expect(cluster.gcp_cluster_zone).to eq('us-central1-a') }
+ it { expect(cluster.gcp_cluster_size).to eq(3) }
+ it { expect(cluster.gcp_machine_type).to eq('n1-standard-4') }
+ end
+
+ describe '#validates' do
+ subject { cluster.valid? }
+
+ context 'when validates gcp_project_id' do
+ let(:cluster) { build(:gcp_cluster, gcp_project_id: gcp_project_id) }
+
+ context 'when valid' do
+ let(:gcp_project_id) { 'gcp-project-12345' }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when empty' do
+ let(:gcp_project_id) { '' }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when too long' do
+ let(:gcp_project_id) { 'A' * 64 }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when includes abnormal character' do
+ let(:gcp_project_id) { '!!!!!!' }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when validates gcp_cluster_name' do
+ let(:cluster) { build(:gcp_cluster, gcp_cluster_name: gcp_cluster_name) }
+
+ context 'when valid' do
+ let(:gcp_cluster_name) { 'test-cluster' }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when empty' do
+ let(:gcp_cluster_name) { '' }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when too long' do
+ let(:gcp_cluster_name) { 'A' * 64 }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when includes abnormal character' do
+ let(:gcp_cluster_name) { '!!!!!!' }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when validates gcp_cluster_size' do
+ let(:cluster) { build(:gcp_cluster, gcp_cluster_size: gcp_cluster_size) }
+
+ context 'when valid' do
+ let(:gcp_cluster_size) { 1 }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when zero' do
+ let(:gcp_cluster_size) { 0 }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when validates project_namespace' do
+ let(:cluster) { build(:gcp_cluster, project_namespace: project_namespace) }
+
+ context 'when valid' do
+ let(:project_namespace) { 'default-namespace' }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when empty' do
+ let(:project_namespace) { '' }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when too long' do
+ let(:project_namespace) { 'A' * 64 }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when includes abnormal character' do
+ let(:project_namespace) { '!!!!!!' }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when validates restrict_modification' do
+ let(:cluster) { create(:gcp_cluster) }
+
+ before do
+ cluster.make_creating!
+ end
+
+ context 'when created' do
+ before do
+ cluster.make_created!
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when creating' do
+ it { is_expected.to be_falsey }
+ end
+ end
+ end
+
+ describe '#state_machine' do
+ let(:cluster) { build(:gcp_cluster) }
+
+ context 'when transits to created state' do
+ before do
+ cluster.gcp_token = 'tmp'
+ cluster.gcp_operation_id = 'tmp'
+ cluster.make_created!
+ end
+
+ it 'nullify gcp_token and gcp_operation_id' do
+ expect(cluster.gcp_token).to be_nil
+ expect(cluster.gcp_operation_id).to be_nil
+ expect(cluster).to be_created
+ end
+ end
+
+ context 'when transits to errored state' do
+ let(:reason) { 'something wrong' }
+
+ before do
+ cluster.make_errored!(reason)
+ end
+
+ it 'sets status_reason' do
+ expect(cluster.status_reason).to eq(reason)
+ expect(cluster).to be_errored
+ end
+ end
+ end
+
+ describe '#project_namespace_placeholder' do
+ subject { cluster.project_namespace_placeholder }
+
+ let(:cluster) { create(:gcp_cluster) }
+
+ it 'returns a placeholder' do
+ is_expected.to eq("#{cluster.project.path}-#{cluster.project.id}")
+ end
+ end
+
+ describe '#on_creation?' do
+ subject { cluster.on_creation? }
+
+ let(:cluster) { create(:gcp_cluster) }
+
+ context 'when status is creating' do
+ before do
+ cluster.make_creating!
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when status is created' do
+ before do
+ cluster.make_created!
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#api_url' do
+ subject { cluster.api_url }
+
+ let(:cluster) { create(:gcp_cluster, :created_on_gke) }
+ let(:api_url) { 'https://' + cluster.endpoint }
+
+ it { is_expected.to eq(api_url) }
+ end
+
+ describe '#restrict_modification' do
+ subject { cluster.restrict_modification }
+
+ let(:cluster) { create(:gcp_cluster) }
+
+ context 'when status is created' do
+ before do
+ cluster.make_created!
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when status is creating' do
+ before do
+ cluster.make_creating!
+ end
+
+ it { is_expected.to be_falsey }
+
+ it 'sets error' do
+ is_expected.to be_falsey
+ expect(cluster.errors).not_to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/models/gpg_key_spec.rb b/spec/models/gpg_key_spec.rb
index 9c99c3e5c08..33e6f1de3d1 100644
--- a/spec/models/gpg_key_spec.rb
+++ b/spec/models/gpg_key_spec.rb
@@ -3,6 +3,7 @@ require 'rails_helper'
describe GpgKey do
describe "associations" do
it { is_expected.to belong_to(:user) }
+ it { is_expected.to have_many(:subkeys) }
end
describe "validation" do
@@ -38,6 +39,14 @@ describe GpgKey do
expect(gpg_key.primary_keyid).to eq GpgHelpers::User1.primary_keyid
end
end
+
+ describe 'generate_subkeys' do
+ it 'extracts the subkeys from the gpg key' do
+ gpg_key = create(:gpg_key, key: GpgHelpers::User1.public_key_with_extra_signing_key)
+
+ expect(gpg_key.subkeys.count).to eq(2)
+ end
+ end
end
describe '#key=' do
@@ -90,11 +99,20 @@ describe GpgKey do
it 'email is verified if the user has the matching email' do
user = create :user, email: 'bette.cartwright@example.com'
gpg_key = create :gpg_key, key: GpgHelpers::User2.public_key, user: user
+ create :email, user: user
+ user.reload
expect(gpg_key.emails_with_verified_status).to eq(
'bette.cartwright@example.com' => true,
'bette.cartwright@example.net' => false
)
+
+ create :email, :confirmed, user: user, email: 'bette.cartwright@example.net'
+ user.reload
+ expect(gpg_key.emails_with_verified_status).to eq(
+ 'bette.cartwright@example.com' => true,
+ 'bette.cartwright@example.net' => true
+ )
end
end
@@ -138,17 +156,13 @@ describe GpgKey do
expect(gpg_key.verified?).to be_truthy
expect(gpg_key.verified_and_belongs_to_email?('bette.cartwright@example.com')).to be_truthy
end
- end
- describe 'notification', :mailer do
- let(:user) { create(:user) }
-
- it 'sends a notification' do
- perform_enqueued_jobs do
- create(:gpg_key, user: user)
- end
+ it 'returns true if one of the email addresses in the key belongs to the user and case-insensitively matches the provided email' do
+ user = create :user, email: 'bette.cartwright@example.com'
+ gpg_key = create :gpg_key, key: GpgHelpers::User2.public_key, user: user
- should_email(user)
+ expect(gpg_key.verified?).to be_truthy
+ expect(gpg_key.verified_and_belongs_to_email?('Bette.Cartwright@example.com')).to be_truthy
end
end
@@ -177,5 +191,29 @@ describe GpgKey do
expect(unrelated_gpg_key.destroyed?).to be false
end
+
+ it 'deletes all the associated subkeys' do
+ gpg_key = create :gpg_key, key: GpgHelpers::User3.public_key
+
+ expect(gpg_key.subkeys).to be_present
+
+ gpg_key.revoke
+
+ expect(gpg_key.subkeys(true)).to be_blank
+ end
+
+ it 'invalidates all signatures associated to the subkeys' do
+ gpg_key = create :gpg_key, key: GpgHelpers::User3.public_key
+ gpg_key_subkey = gpg_key.subkeys.last
+ gpg_signature = create :gpg_signature, verification_status: :verified, gpg_key: gpg_key_subkey
+
+ gpg_key.revoke
+
+ expect(gpg_signature.reload).to have_attributes(
+ verification_status: 'unknown_key',
+ gpg_key: nil,
+ gpg_key_subkey: nil
+ )
+ end
end
end
diff --git a/spec/models/gpg_key_subkey_spec.rb b/spec/models/gpg_key_subkey_spec.rb
new file mode 100644
index 00000000000..3c86837f47f
--- /dev/null
+++ b/spec/models/gpg_key_subkey_spec.rb
@@ -0,0 +1,15 @@
+require 'rails_helper'
+
+describe GpgKeySubkey do
+ subject { build(:gpg_key_subkey) }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:gpg_key) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:gpg_key_id) }
+ it { is_expected.to validate_presence_of(:fingerprint) }
+ it { is_expected.to validate_presence_of(:keyid) }
+ end
+end
diff --git a/spec/models/gpg_signature_spec.rb b/spec/models/gpg_signature_spec.rb
index c58fd46762a..0136bb61c07 100644
--- a/spec/models/gpg_signature_spec.rb
+++ b/spec/models/gpg_signature_spec.rb
@@ -1,9 +1,17 @@
require 'rails_helper'
RSpec.describe GpgSignature do
+ let(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' }
+ let!(:project) { create(:project, :repository, path: 'sample-project') }
+ let!(:commit) { create(:commit, project: project, sha: commit_sha) }
+ let(:gpg_signature) { create(:gpg_signature, commit_sha: commit_sha) }
+ let(:gpg_key) { create(:gpg_key) }
+ let(:gpg_key_subkey) { create(:gpg_key_subkey) }
+
describe 'associations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:gpg_key) }
+ it { is_expected.to belong_to(:gpg_key_subkey) }
end
describe 'validation' do
@@ -15,14 +23,48 @@ RSpec.describe GpgSignature do
describe '#commit' do
it 'fetches the commit through the project' do
- commit_sha = '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
- project = create :project, :repository
- commit = create :commit, project: project
- gpg_signature = create :gpg_signature, commit_sha: commit_sha
-
expect_any_instance_of(Project).to receive(:commit).with(commit_sha).and_return(commit)
gpg_signature.commit
end
end
+
+ describe '#gpg_key=' do
+ it 'supports the assignment of a GpgKey' do
+ gpg_signature = create(:gpg_signature, gpg_key: gpg_key)
+
+ expect(gpg_signature.gpg_key).to be_an_instance_of(GpgKey)
+ end
+
+ it 'supports the assignment of a GpgKeySubkey' do
+ gpg_signature = create(:gpg_signature, gpg_key: gpg_key_subkey)
+
+ expect(gpg_signature.gpg_key).to be_an_instance_of(GpgKeySubkey)
+ end
+
+ it 'clears gpg_key and gpg_key_subkey_id when passing nil' do
+ gpg_signature.update_attribute(:gpg_key, nil)
+
+ expect(gpg_signature.gpg_key_id).to be_nil
+ expect(gpg_signature.gpg_key_subkey_id).to be_nil
+ end
+ end
+
+ describe '#gpg_commit' do
+ context 'when commit does not exist' do
+ it 'returns nil' do
+ allow(gpg_signature).to receive(:commit).and_return(nil)
+
+ expect(gpg_signature.gpg_commit).to be_nil
+ end
+ end
+
+ context 'when commit exists' do
+ it 'returns an instance of Gitlab::Gpg::Commit' do
+ allow(gpg_signature).to receive(:commit).and_return(commit)
+
+ expect(gpg_signature.gpg_commit).to be_an_instance_of(Gitlab::Gpg::Commit)
+ end
+ end
+ end
end
diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb
index 96baeaff0a4..81c2057e175 100644
--- a/spec/models/key_spec.rb
+++ b/spec/models/key_spec.rb
@@ -37,30 +37,17 @@ describe Key, :mailer do
end
describe "#update_last_used_at" do
- let(:key) { create(:key) }
-
- context 'when key was not updated during the last day' do
- before do
- allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain)
- .and_return('000000')
- end
-
- it 'enqueues a UseKeyWorker job' do
- expect(UseKeyWorker).to receive(:perform_async).with(key.id)
- key.update_last_used_at
- end
- end
+ it 'updates the last used timestamp' do
+ key = build(:key)
+ service = double(:service)
+
+ expect(Keys::LastUsedService).to receive(:new)
+ .with(key)
+ .and_return(service)
- context 'when key was updated during the last day' do
- before do
- allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain)
- .and_return(false)
- end
+ expect(service).to receive(:execute)
- it 'does not enqueue a UseKeyWorker job' do
- expect(UseKeyWorker).not_to receive(:perform_async)
- key.update_last_used_at
- end
+ key.update_last_used_at
end
end
end
@@ -168,17 +155,15 @@ describe Key, :mailer do
it 'strips white spaces' do
expect(described_class.new(key: " #{valid_key} ").key).to eq(valid_key)
end
- end
- describe 'notification' do
- let(:user) { create(:user) }
+ it 'invalidates the public_key attribute' do
+ key = build(:key)
- it 'sends a notification' do
- perform_enqueued_jobs do
- create(:key, user: user)
- end
+ original = key.public_key
+ key.key = valid_key
- should_email(user)
+ expect(original.key_text).not_to be_nil
+ expect(key.public_key.key_text).to eq(valid_key)
end
end
end
diff --git a/spec/models/lfs_objects_project_spec.rb b/spec/models/lfs_objects_project_spec.rb
index d24d4cf7695..0a3180f43e8 100644
--- a/spec/models/lfs_objects_project_spec.rb
+++ b/spec/models/lfs_objects_project_spec.rb
@@ -1,8 +1,11 @@
require 'spec_helper'
describe LfsObjectsProject do
- subject { create(:lfs_objects_project, project: project) }
- let(:project) { create(:project) }
+ set(:project) { create(:project) }
+
+ subject do
+ create(:lfs_objects_project, project: project)
+ end
describe 'associations' do
it { is_expected.to belong_to(:project) }
@@ -11,9 +14,13 @@ describe LfsObjectsProject do
describe 'validation' do
it { is_expected.to validate_presence_of(:lfs_object_id) }
- it { is_expected.to validate_uniqueness_of(:lfs_object_id).scoped_to(:project_id).with_message("already exists in project") }
-
it { is_expected.to validate_presence_of(:project_id) }
+
+ it 'validates object id' do
+ is_expected.to validate_uniqueness_of(:lfs_object_id)
+ .scoped_to(:project_id)
+ .with_message("already exists in project")
+ end
end
describe '#update_project_statistics' do
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index a07ce05a865..0a017c068ad 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -488,7 +488,7 @@ describe Member do
member.accept_invite!(user)
end
- it "refreshes user's authorized projects", truncate: true do
+ it "refreshes user's authorized projects", :truncate do
project = member.source
expect(user.authorized_projects).not_to include(project)
@@ -523,7 +523,7 @@ describe Member do
end
end
- describe "destroying a record", truncate: true do
+ describe "destroying a record", :truncate do
it "refreshes user's authorized projects" do
project = create(:project, :private)
user = create(:user)
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index d80d5657c42..950af653c80 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -2,6 +2,7 @@ require 'spec_helper'
describe MergeRequest do
include RepoHelpers
+ include ProjectForksHelper
subject { create(:merge_request) }
@@ -49,6 +50,19 @@ describe MergeRequest do
expect(subject).to be_valid
end
end
+
+ context 'for forks' do
+ let(:project) { create(:project) }
+ let(:fork1) { fork_project(project) }
+ let(:fork2) { fork_project(project) }
+
+ it 'allows merge requests for sibling-forks' do
+ subject.source_project = fork1
+ subject.target_project = fork2
+
+ expect(subject).to be_valid
+ end
+ end
end
describe 'respond to' do
@@ -672,7 +686,7 @@ describe MergeRequest do
describe '#diverged_commits_count' do
let(:project) { create(:project, :repository) }
- let(:fork_project) { create(:project, :repository, forked_from_project: project) }
+ let(:forked_project) { fork_project(project, nil, repository: true) }
context 'when the target branch does not exist anymore' do
subject { create(:merge_request, source_project: project, target_project: project) }
@@ -700,7 +714,7 @@ describe MergeRequest do
end
context 'diverged on fork' do
- subject(:merge_request_fork_with_divergence) { create(:merge_request, :diverged, source_project: fork_project, target_project: project) }
+ subject(:merge_request_fork_with_divergence) { create(:merge_request, :diverged, source_project: forked_project, target_project: project) }
it 'counts commits that are on target branch but not on source branch' do
expect(subject.diverged_commits_count).to eq(29)
@@ -708,7 +722,7 @@ describe MergeRequest do
end
context 'rebased on fork' do
- subject(:merge_request_rebased) { create(:merge_request, :rebased, source_project: fork_project, target_project: project) }
+ subject(:merge_request_rebased) { create(:merge_request, :rebased, source_project: forked_project, target_project: project) }
it 'counts commits that are on target branch but not on source branch' do
expect(subject.diverged_commits_count).to eq(0)
@@ -791,6 +805,49 @@ describe MergeRequest do
end
end
+ describe '#has_ci?' do
+ let(:merge_request) { build_stubbed(:merge_request) }
+
+ context 'has ci' do
+ it 'returns true if MR has head_pipeline_id and commits' do
+ allow(merge_request).to receive_message_chain(:source_project, :ci_service) { nil }
+ allow(merge_request).to receive(:head_pipeline_id) { double }
+ allow(merge_request).to receive(:has_no_commits?) { false }
+
+ expect(merge_request.has_ci?).to be(true)
+ end
+
+ it 'returns true if MR has any pipeline and commits' do
+ allow(merge_request).to receive_message_chain(:source_project, :ci_service) { nil }
+ allow(merge_request).to receive(:head_pipeline_id) { nil }
+ allow(merge_request).to receive(:has_no_commits?) { false }
+ allow(merge_request).to receive(:all_pipelines) { [double] }
+
+ expect(merge_request.has_ci?).to be(true)
+ end
+
+ it 'returns true if MR has CI service and commits' do
+ allow(merge_request).to receive_message_chain(:source_project, :ci_service) { double }
+ allow(merge_request).to receive(:head_pipeline_id) { nil }
+ allow(merge_request).to receive(:has_no_commits?) { false }
+ allow(merge_request).to receive(:all_pipelines) { [] }
+
+ expect(merge_request.has_ci?).to be(true)
+ end
+ end
+
+ context 'has no ci' do
+ it 'returns false if MR has no CI service nor pipeline, and no commits' do
+ allow(merge_request).to receive_message_chain(:source_project, :ci_service) { nil }
+ allow(merge_request).to receive(:head_pipeline_id) { nil }
+ allow(merge_request).to receive(:all_pipelines) { [] }
+ allow(merge_request).to receive(:has_no_commits?) { true }
+
+ expect(merge_request.has_ci?).to be(false)
+ end
+ end
+ end
+
describe '#all_pipelines' do
shared_examples 'returning pipelines with proper ordering' do
let!(:all_pipelines) do
@@ -1214,11 +1271,7 @@ describe MergeRequest do
end
context 'with environments on source project' do
- let(:source_project) do
- create(:project, :repository) do |fork_project|
- fork_project.create_forked_project_link(forked_to_project_id: fork_project.id, forked_from_project_id: project.id)
- end
- end
+ let(:source_project) { fork_project(project, nil, repository: true) }
let(:merge_request) do
create(:merge_request,
@@ -1382,14 +1435,14 @@ describe MergeRequest do
describe "#source_project_missing?" do
let(:project) { create(:project) }
- let(:fork_project) { create(:project, forked_from_project: project) }
+ let(:forked_project) { fork_project(project) }
let(:user) { create(:user) }
- let(:unlink_project) { Projects::UnlinkForkService.new(fork_project, user) }
+ let(:unlink_project) { Projects::UnlinkForkService.new(forked_project, user) }
context "when the fork exists" do
let(:merge_request) do
create(:merge_request,
- source_project: fork_project,
+ source_project: forked_project,
target_project: project)
end
@@ -1403,9 +1456,9 @@ describe MergeRequest do
end
context "when the fork does not exist" do
- let(:merge_request) do
+ let!(:merge_request) do
create(:merge_request,
- source_project: fork_project,
+ source_project: forked_project,
target_project: project)
end
@@ -1428,14 +1481,14 @@ describe MergeRequest do
describe "#closed_without_fork?" do
let(:project) { create(:project) }
- let(:fork_project) { create(:project, forked_from_project: project) }
+ let(:forked_project) { fork_project(project) }
let(:user) { create(:user) }
- let(:unlink_project) { Projects::UnlinkForkService.new(fork_project, user) }
+ let(:unlink_project) { Projects::UnlinkForkService.new(forked_project, user) }
context "when the merge request is closed" do
let(:closed_merge_request) do
create(:closed_merge_request,
- source_project: fork_project,
+ source_project: forked_project,
target_project: project)
end
@@ -1454,7 +1507,7 @@ describe MergeRequest do
context "when the merge request is open" do
let(:open_merge_request) do
create(:merge_request,
- source_project: fork_project,
+ source_project: forked_project,
target_project: project)
end
@@ -1473,24 +1526,24 @@ describe MergeRequest do
end
context 'forked project' do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :public) }
let(:user) { create(:user) }
- let(:fork_project) { create(:project, forked_from_project: project, namespace: user.namespace) }
+ let(:forked_project) { fork_project(project, user) }
let!(:merge_request) do
create(:closed_merge_request,
- source_project: fork_project,
+ source_project: forked_project,
target_project: project)
end
it 'returns false if unforked' do
- Projects::UnlinkForkService.new(fork_project, user).execute
+ Projects::UnlinkForkService.new(forked_project, user).execute
expect(merge_request.reload.reopenable?).to be_falsey
end
it 'returns false if the source project is deleted' do
- Projects::DestroyService.new(fork_project, user).execute
+ Projects::DestroyService.new(forked_project, user).execute
expect(merge_request.reload.reopenable?).to be_falsey
end
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index d3da0107d5c..13e37fffa4e 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -238,7 +238,7 @@ describe Milestone do
let(:milestone) { build_stubbed(:milestone, iid: 1, project: project, name: 'milestone') }
it 'returns a String reference to the object' do
- expect(milestone.to_reference).to eq '%1'
+ expect(milestone.to_reference).to eq '%"milestone"'
end
it 'returns a reference by name when the format is set to :name' do
@@ -246,24 +246,29 @@ describe Milestone do
end
it 'supports a cross-project reference' do
- expect(milestone.to_reference(another_project)).to eq 'sample-project%1'
+ expect(milestone.to_reference(another_project)).to eq 'sample-project%"milestone"'
end
end
context 'for a group milestone' do
let(:milestone) { build_stubbed(:milestone, iid: 1, group: group, name: 'milestone') }
- it 'returns nil with the default format' do
- expect(milestone.to_reference).to be_nil
+ it 'returns a group milestone reference with a default format' do
+ expect(milestone.to_reference).to eq '%"milestone"'
end
it 'returns a reference by name when the format is set to :name' do
expect(milestone.to_reference(format: :name)).to eq '%"milestone"'
end
- it 'does not supports cross-project references' do
+ it 'does supports cross-project references within a group' do
expect(milestone.to_reference(another_project, format: :name)).to eq '%"milestone"'
end
+
+ it 'raises an error when using iid format' do
+ expect { milestone.to_reference(format: :iid) }
+ .to raise_error(ArgumentError, 'Cannot refer to a group milestone by an internal id!')
+ end
end
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 81d5ab7a6d3..2ebf6acd42a 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe Namespace do
+ include ProjectForksHelper
+
let!(:namespace) { create(:namespace) }
describe 'associations' do
@@ -152,22 +154,24 @@ describe Namespace do
end
describe '#move_dir' do
+ let(:namespace) { create(:namespace) }
+ let!(:project) { create(:project_empty_repo, namespace: namespace) }
+
before do
- @namespace = create :namespace
- @project = create(:project_empty_repo, namespace: @namespace)
- allow(@namespace).to receive(:path_changed?).and_return(true)
+ allow(namespace).to receive(:path_changed?).and_return(true)
end
it "raises error when directory exists" do
- expect { @namespace.move_dir }.to raise_error("namespace directory cannot be moved")
+ expect { namespace.move_dir }.to raise_error("namespace directory cannot be moved")
end
it "moves dir if path changed" do
- new_path = @namespace.full_path + "_new"
- allow(@namespace).to receive(:full_path_was).and_return(@namespace.full_path)
- allow(@namespace).to receive(:full_path).and_return(new_path)
- expect(@namespace).to receive(:remove_exports!)
- expect(@namespace.move_dir).to be_truthy
+ new_path = namespace.full_path + "_new"
+
+ allow(namespace).to receive(:full_path_was).and_return(namespace.full_path)
+ allow(namespace).to receive(:full_path).and_return(new_path)
+ expect(namespace).to receive(:remove_exports!)
+ expect(namespace.move_dir).to be_truthy
end
context "when any project has container images" do
@@ -177,14 +181,14 @@ describe Namespace do
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: ['tag'])
- create(:project, namespace: @namespace, container_repositories: [container_repository])
+ create(:project, namespace: namespace, container_repositories: [container_repository])
- allow(@namespace).to receive(:path_was).and_return(@namespace.path)
- allow(@namespace).to receive(:path).and_return('new_path')
+ allow(namespace).to receive(:path_was).and_return(namespace.path)
+ allow(namespace).to receive(:path).and_return('new_path')
end
it 'raises an error about not movable project' do
- expect { @namespace.move_dir }.to raise_error(/Namespace cannot be moved/)
+ expect { namespace.move_dir }.to raise_error(/Namespace cannot be moved/)
end
end
@@ -518,4 +522,25 @@ describe Namespace do
end
end
end
+
+ describe '#has_forks_of?' do
+ let(:project) { create(:project, :public) }
+ let!(:forked_project) { fork_project(project, namespace.owner, namespace: namespace) }
+
+ before do
+ # Reset the fork network relation
+ project.reload
+ end
+
+ it 'knows if there is a direct fork in the namespace' do
+ expect(namespace.find_fork_of(project)).to eq(forked_project)
+ end
+
+ it 'knows when there is as fork-of-fork in the namespace' do
+ other_namespace = create(:namespace)
+ other_fork = fork_project(forked_project, other_namespace.owner, namespace: other_namespace)
+
+ expect(other_namespace.find_fork_of(project)).to eq(other_fork)
+ end
+ end
end
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index b214074fdce..1ecb50586c7 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -314,6 +314,56 @@ describe Note do
expect(subject[active_diff_note1.line_code].first.id).to eq(active_diff_note1.discussion_id)
expect(subject[active_diff_note3.line_code].first.id).to eq(active_diff_note3.discussion_id)
end
+
+ context 'with image discussions' do
+ let(:merge_request2) { create(:merge_request_with_diffs, :with_image_diffs, source_project: project, title: "Added images and changes") }
+ let(:image_path) { "files/images/ee_repo_logo.png" }
+ let(:text_path) { "bar/branch-test.txt" }
+ let!(:image_note) { create(:diff_note_on_merge_request, project: project, noteable: merge_request2, position: image_position) }
+ let!(:text_note) { create(:diff_note_on_merge_request, project: project, noteable: merge_request2, position: text_position) }
+
+ let(:image_position) do
+ Gitlab::Diff::Position.new(
+ old_path: image_path,
+ new_path: image_path,
+ width: 100,
+ height: 100,
+ x: 1,
+ y: 1,
+ position_type: "image",
+ diff_refs: merge_request2.diff_refs
+ )
+ end
+
+ let(:text_position) do
+ Gitlab::Diff::Position.new(
+ old_path: text_path,
+ new_path: text_path,
+ old_line: nil,
+ new_line: 2,
+ position_type: "text",
+ diff_refs: merge_request2.diff_refs
+ )
+ end
+
+ it "groups image discussions by file identifier" do
+ diff_discussion = DiffDiscussion.new([image_note])
+
+ discussions = merge_request2.notes.grouped_diff_discussions
+
+ expect(discussions.size).to eq(2)
+ expect(discussions[image_note.diff_file.new_path]).to include(diff_discussion)
+ end
+
+ it "groups text discussions by line code" do
+ diff_discussion = DiffDiscussion.new([text_note])
+
+ discussions = merge_request2.notes.grouped_diff_discussions
+
+ expect(discussions.size).to eq(2)
+ expect(discussions[text_note.line_code]).to include(diff_discussion)
+ end
+ end
end
context 'diff discussions for older diff refs' do
diff --git a/spec/models/personal_access_token_spec.rb b/spec/models/personal_access_token_spec.rb
index b2f2a3ce914..01440b15674 100644
--- a/spec/models/personal_access_token_spec.rb
+++ b/spec/models/personal_access_token_spec.rb
@@ -41,7 +41,7 @@ describe PersonalAccessToken do
it 'revokes the token' do
active_personal_access_token.revoke!
- expect(active_personal_access_token.revoked?).to be true
+ expect(active_personal_access_token).to be_revoked
end
end
@@ -61,10 +61,37 @@ describe PersonalAccessToken do
expect(personal_access_token).to be_valid
end
- it "allows creating a token with read_registry scope" do
- personal_access_token.scopes = [:read_registry]
+ context 'when registry is disabled' do
+ before do
+ stub_container_registry_config(enabled: false)
+ end
- expect(personal_access_token).to be_valid
+ it "rejects creating a token with read_registry scope" do
+ personal_access_token.scopes = [:read_registry]
+
+ expect(personal_access_token).not_to be_valid
+ expect(personal_access_token.errors[:scopes].first).to eq "can only contain available scopes"
+ end
+
+ it "allows revoking a token with read_registry scope" do
+ personal_access_token.scopes = [:read_registry]
+
+ personal_access_token.revoke!
+
+ expect(personal_access_token).to be_revoked
+ end
+ end
+
+ context 'when registry is enabled' do
+ before do
+ stub_container_registry_config(enabled: true)
+ end
+
+ it "allows creating a token with read_registry scope" do
+ personal_access_token.scopes = [:read_registry]
+
+ expect(personal_access_token).to be_valid
+ end
end
it "rejects creating a token with unavailable scopes" do
diff --git a/spec/models/project_auto_devops_spec.rb b/spec/models/project_auto_devops_spec.rb
index ca13af4d73e..12069575866 100644
--- a/spec/models/project_auto_devops_spec.rb
+++ b/spec/models/project_auto_devops_spec.rb
@@ -8,7 +8,21 @@ describe ProjectAutoDevops do
it { is_expected.to respond_to(:created_at) }
it { is_expected.to respond_to(:updated_at) }
- describe 'variables' do
+ describe '#has_domain?' do
+ context 'when domain is defined' do
+ let(:auto_devops) { build_stubbed(:project_auto_devops, project: project, domain: 'domain.com') }
+
+ it { expect(auto_devops).to have_domain }
+ end
+
+ context 'when domain is empty' do
+ let(:auto_devops) { build_stubbed(:project_auto_devops, project: project, domain: '') }
+
+ it { expect(auto_devops).not_to have_domain }
+ end
+ end
+
+ describe '#variables' do
let(:auto_devops) { build_stubbed(:project_auto_devops, project: project, domain: domain) }
context 'when domain is defined' do
diff --git a/spec/models/project_group_link_spec.rb b/spec/models/project_group_link_spec.rb
index b3513c80150..41e2ab20d69 100644
--- a/spec/models/project_group_link_spec.rb
+++ b/spec/models/project_group_link_spec.rb
@@ -30,7 +30,7 @@ describe ProjectGroupLink do
end
end
- describe "destroying a record", truncate: true do
+ describe "destroying a record", :truncate do
it "refreshes group users' authorized projects" do
project = create(:project, :private)
group = create(:group)
diff --git a/spec/models/project_services/chat_message/issue_message_spec.rb b/spec/models/project_services/chat_message/issue_message_spec.rb
index 4bb1db684e6..d37726dc3f1 100644
--- a/spec/models/project_services/chat_message/issue_message_spec.rb
+++ b/spec/models/project_services/chat_message/issue_message_spec.rb
@@ -42,7 +42,7 @@ describe ChatMessage::IssueMessage do
context 'open' do
it 'returns a message regarding opening of issues' do
expect(subject.pretext).to eq(
- '[<http://somewhere.com|project_name>] Issue opened by test.user')
+ '[<http://somewhere.com|project_name>] Issue opened by Test User (test.user)')
expect(subject.attachments).to eq([
{
title: "#100 Issue title",
@@ -62,7 +62,7 @@ describe ChatMessage::IssueMessage do
it 'returns a message regarding closing of issues' do
expect(subject.pretext). to eq(
- '[<http://somewhere.com|project_name>] Issue <http://url.com|#100 Issue title> closed by test.user')
+ '[<http://somewhere.com|project_name>] Issue <http://url.com|#100 Issue title> closed by Test User (test.user)')
expect(subject.attachments).to be_empty
end
end
@@ -76,10 +76,10 @@ describe ChatMessage::IssueMessage do
context 'open' do
it 'returns a message regarding opening of issues' do
expect(subject.pretext).to eq(
- '[[project_name](http://somewhere.com)] Issue opened by test.user')
+ '[[project_name](http://somewhere.com)] Issue opened by Test User (test.user)')
expect(subject.attachments).to eq('issue description')
expect(subject.activity).to eq({
- title: 'Issue opened by test.user',
+ title: 'Issue opened by Test User (test.user)',
subtitle: 'in [project_name](http://somewhere.com)',
text: '[#100 Issue title](http://url.com)',
image: 'http://someavatar.com'
@@ -95,10 +95,10 @@ describe ChatMessage::IssueMessage do
it 'returns a message regarding closing of issues' do
expect(subject.pretext). to eq(
- '[[project_name](http://somewhere.com)] Issue [#100 Issue title](http://url.com) closed by test.user')
+ '[[project_name](http://somewhere.com)] Issue [#100 Issue title](http://url.com) closed by Test User (test.user)')
expect(subject.attachments).to be_empty
expect(subject.activity).to eq({
- title: 'Issue closed by test.user',
+ title: 'Issue closed by Test User (test.user)',
subtitle: 'in [project_name](http://somewhere.com)',
text: '[#100 Issue title](http://url.com)',
image: 'http://someavatar.com'
diff --git a/spec/models/project_services/chat_message/merge_message_spec.rb b/spec/models/project_services/chat_message/merge_message_spec.rb
index b600a36f578..184a07ae0f9 100644
--- a/spec/models/project_services/chat_message/merge_message_spec.rb
+++ b/spec/models/project_services/chat_message/merge_message_spec.rb
@@ -33,7 +33,7 @@ describe ChatMessage::MergeMessage do
context 'open' do
it 'returns a message regarding opening of merge requests' do
expect(subject.pretext).to eq(
- 'test.user opened <http://somewhere.com/merge_requests/100|!100 *Merge Request title*> in <http://somewhere.com|project_name>: *Merge Request title*')
+ 'Test User (test.user) opened <http://somewhere.com/merge_requests/100|!100 *Merge Request title*> in <http://somewhere.com|project_name>: *Merge Request title*')
expect(subject.attachments).to be_empty
end
end
@@ -44,7 +44,7 @@ describe ChatMessage::MergeMessage do
end
it 'returns a message regarding closing of merge requests' do
expect(subject.pretext).to eq(
- 'test.user closed <http://somewhere.com/merge_requests/100|!100 *Merge Request title*> in <http://somewhere.com|project_name>: *Merge Request title*')
+ 'Test User (test.user) closed <http://somewhere.com/merge_requests/100|!100 *Merge Request title*> in <http://somewhere.com|project_name>: *Merge Request title*')
expect(subject.attachments).to be_empty
end
end
@@ -58,10 +58,10 @@ describe ChatMessage::MergeMessage do
context 'open' do
it 'returns a message regarding opening of merge requests' do
expect(subject.pretext).to eq(
- 'test.user opened [!100 *Merge Request title*](http://somewhere.com/merge_requests/100) in [project_name](http://somewhere.com): *Merge Request title*')
+ 'Test User (test.user) opened [!100 *Merge Request title*](http://somewhere.com/merge_requests/100) in [project_name](http://somewhere.com): *Merge Request title*')
expect(subject.attachments).to be_empty
expect(subject.activity).to eq({
- title: 'Merge Request opened by test.user',
+ title: 'Merge Request opened by Test User (test.user)',
subtitle: 'in [project_name](http://somewhere.com)',
text: '[!100 *Merge Request title*](http://somewhere.com/merge_requests/100)',
image: 'http://someavatar.com'
@@ -76,10 +76,10 @@ describe ChatMessage::MergeMessage do
it 'returns a message regarding closing of merge requests' do
expect(subject.pretext).to eq(
- 'test.user closed [!100 *Merge Request title*](http://somewhere.com/merge_requests/100) in [project_name](http://somewhere.com): *Merge Request title*')
+ 'Test User (test.user) closed [!100 *Merge Request title*](http://somewhere.com/merge_requests/100) in [project_name](http://somewhere.com): *Merge Request title*')
expect(subject.attachments).to be_empty
expect(subject.activity).to eq({
- title: 'Merge Request closed by test.user',
+ title: 'Merge Request closed by Test User (test.user)',
subtitle: 'in [project_name](http://somewhere.com)',
text: '[!100 *Merge Request title*](http://somewhere.com/merge_requests/100)',
image: 'http://someavatar.com'
diff --git a/spec/models/project_services/chat_message/note_message_spec.rb b/spec/models/project_services/chat_message/note_message_spec.rb
index a09c2f9935c..5abbd7bec18 100644
--- a/spec/models/project_services/chat_message/note_message_spec.rb
+++ b/spec/models/project_services/chat_message/note_message_spec.rb
@@ -38,7 +38,7 @@ describe ChatMessage::NoteMessage do
context 'without markdown' do
it 'returns a message regarding notes on commits' do
- expect(subject.pretext).to eq("test.user <http://url.com|commented on " \
+ expect(subject.pretext).to eq("Test User (test.user) <http://url.com|commented on " \
"commit 5f163b2b> in <http://somewhere.com|project_name>: " \
"*Added a commit message*")
expect(subject.attachments).to eq([{
@@ -55,11 +55,11 @@ describe ChatMessage::NoteMessage do
it 'returns a message regarding notes on commits' do
expect(subject.pretext).to eq(
- 'test.user [commented on commit 5f163b2b](http://url.com) in [project_name](http://somewhere.com): *Added a commit message*'
+ 'Test User (test.user) [commented on commit 5f163b2b](http://url.com) in [project_name](http://somewhere.com): *Added a commit message*'
)
expect(subject.attachments).to eq('comment on a commit')
expect(subject.activity).to eq({
- title: 'test.user [commented on commit 5f163b2b](http://url.com)',
+ title: 'Test User (test.user) [commented on commit 5f163b2b](http://url.com)',
subtitle: 'in [project_name](http://somewhere.com)',
text: 'Added a commit message',
image: 'http://fakeavatar'
@@ -81,7 +81,7 @@ describe ChatMessage::NoteMessage do
context 'without markdown' do
it 'returns a message regarding notes on a merge request' do
- expect(subject.pretext).to eq("test.user <http://url.com|commented on " \
+ expect(subject.pretext).to eq("Test User (test.user) <http://url.com|commented on " \
"merge request !30> in <http://somewhere.com|project_name>: " \
"*merge request title*")
expect(subject.attachments).to eq([{
@@ -98,10 +98,10 @@ describe ChatMessage::NoteMessage do
it 'returns a message regarding notes on a merge request' do
expect(subject.pretext).to eq(
- 'test.user [commented on merge request !30](http://url.com) in [project_name](http://somewhere.com): *merge request title*')
+ 'Test User (test.user) [commented on merge request !30](http://url.com) in [project_name](http://somewhere.com): *merge request title*')
expect(subject.attachments).to eq('comment on a merge request')
expect(subject.activity).to eq({
- title: 'test.user [commented on merge request !30](http://url.com)',
+ title: 'Test User (test.user) [commented on merge request !30](http://url.com)',
subtitle: 'in [project_name](http://somewhere.com)',
text: 'merge request title',
image: 'http://fakeavatar'
@@ -124,7 +124,7 @@ describe ChatMessage::NoteMessage do
context 'without markdown' do
it 'returns a message regarding notes on an issue' do
expect(subject.pretext).to eq(
- "test.user <http://url.com|commented on " \
+ "Test User (test.user) <http://url.com|commented on " \
"issue #20> in <http://somewhere.com|project_name>: " \
"*issue title*")
expect(subject.attachments).to eq([{
@@ -141,10 +141,10 @@ describe ChatMessage::NoteMessage do
it 'returns a message regarding notes on an issue' do
expect(subject.pretext).to eq(
- 'test.user [commented on issue #20](http://url.com) in [project_name](http://somewhere.com): *issue title*')
+ 'Test User (test.user) [commented on issue #20](http://url.com) in [project_name](http://somewhere.com): *issue title*')
expect(subject.attachments).to eq('comment on an issue')
expect(subject.activity).to eq({
- title: 'test.user [commented on issue #20](http://url.com)',
+ title: 'Test User (test.user) [commented on issue #20](http://url.com)',
subtitle: 'in [project_name](http://somewhere.com)',
text: 'issue title',
image: 'http://fakeavatar'
@@ -165,7 +165,7 @@ describe ChatMessage::NoteMessage do
context 'without markdown' do
it 'returns a message regarding notes on a project snippet' do
- expect(subject.pretext).to eq("test.user <http://url.com|commented on " \
+ expect(subject.pretext).to eq("Test User (test.user) <http://url.com|commented on " \
"snippet $5> in <http://somewhere.com|project_name>: " \
"*snippet title*")
expect(subject.attachments).to eq([{
@@ -182,7 +182,7 @@ describe ChatMessage::NoteMessage do
it 'returns a message regarding notes on a project snippet' do
expect(subject.pretext).to eq(
- 'test.user [commented on snippet $5](http://url.com) in [project_name](http://somewhere.com): *snippet title*')
+ 'Test User (test.user) [commented on snippet $5](http://url.com) in [project_name](http://somewhere.com): *snippet title*')
expect(subject.attachments).to eq('comment on a snippet')
end
end
diff --git a/spec/models/project_services/chat_message/pipeline_message_spec.rb b/spec/models/project_services/chat_message/pipeline_message_spec.rb
index 43b02568cb9..0ff20400999 100644
--- a/spec/models/project_services/chat_message/pipeline_message_spec.rb
+++ b/spec/models/project_services/chat_message/pipeline_message_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe ChatMessage::PipelineMessage do
subject { described_class.new(args) }
- let(:user) { { name: 'hacker' } }
+ let(:user) { { name: "The Hacker", username: 'hacker' } }
let(:duration) { 7210 }
let(:args) do
{
@@ -22,12 +22,13 @@ describe ChatMessage::PipelineMessage do
user: user
}
end
+ let(:combined_name) { "The Hacker (hacker)" }
context 'without markdown' do
context 'pipeline succeeded' do
let(:status) { 'success' }
let(:color) { 'good' }
- let(:message) { build_message('passed') }
+ let(:message) { build_message('passed', combined_name) }
it 'returns a message with information about succeeded build' do
expect(subject.pretext).to be_empty
@@ -39,7 +40,7 @@ describe ChatMessage::PipelineMessage do
context 'pipeline failed' do
let(:status) { 'failed' }
let(:color) { 'danger' }
- let(:message) { build_message }
+ let(:message) { build_message(status, combined_name) }
it 'returns a message with information about failed build' do
expect(subject.pretext).to be_empty
@@ -75,13 +76,13 @@ describe ChatMessage::PipelineMessage do
context 'pipeline succeeded' do
let(:status) { 'success' }
let(:color) { 'good' }
- let(:message) { build_markdown_message('passed') }
+ let(:message) { build_markdown_message('passed', combined_name) }
it 'returns a message with information about succeeded build' do
expect(subject.pretext).to be_empty
expect(subject.attachments).to eq(message)
expect(subject.activity).to eq({
- title: 'Pipeline [#123](http://example.gitlab.com/pipelines/123) of branch [develop](http://example.gitlab.com/commits/develop) by hacker passed',
+ title: 'Pipeline [#123](http://example.gitlab.com/pipelines/123) of branch [develop](http://example.gitlab.com/commits/develop) by The Hacker (hacker) passed',
subtitle: 'in [project_name](http://example.gitlab.com)',
text: 'in 02:00:10',
image: ''
@@ -92,13 +93,13 @@ describe ChatMessage::PipelineMessage do
context 'pipeline failed' do
let(:status) { 'failed' }
let(:color) { 'danger' }
- let(:message) { build_markdown_message }
+ let(:message) { build_markdown_message(status, combined_name) }
it 'returns a message with information about failed build' do
expect(subject.pretext).to be_empty
expect(subject.attachments).to eq(message)
expect(subject.activity).to eq({
- title: 'Pipeline [#123](http://example.gitlab.com/pipelines/123) of branch [develop](http://example.gitlab.com/commits/develop) by hacker failed',
+ title: 'Pipeline [#123](http://example.gitlab.com/pipelines/123) of branch [develop](http://example.gitlab.com/commits/develop) by The Hacker (hacker) failed',
subtitle: 'in [project_name](http://example.gitlab.com)',
text: 'in 02:00:10',
image: ''
diff --git a/spec/models/project_services/chat_message/wiki_page_message_spec.rb b/spec/models/project_services/chat_message/wiki_page_message_spec.rb
index c4adee4f489..7efcba9bcfd 100644
--- a/spec/models/project_services/chat_message/wiki_page_message_spec.rb
+++ b/spec/models/project_services/chat_message/wiki_page_message_spec.rb
@@ -29,7 +29,7 @@ describe ChatMessage::WikiPageMessage do
it 'returns a message that a new wiki page was created' do
expect(subject.pretext).to eq(
- 'test.user created <http://url.com|wiki page> in <http://somewhere.com|project_name>: '\
+ 'Test User (test.user) created <http://url.com|wiki page> in <http://somewhere.com|project_name>: '\
'*Wiki page title*')
end
end
@@ -41,7 +41,7 @@ describe ChatMessage::WikiPageMessage do
it 'returns a message that a wiki page was updated' do
expect(subject.pretext).to eq(
- 'test.user edited <http://url.com|wiki page> in <http://somewhere.com|project_name>: '\
+ 'Test User (test.user) edited <http://url.com|wiki page> in <http://somewhere.com|project_name>: '\
'*Wiki page title*')
end
end
@@ -95,7 +95,7 @@ describe ChatMessage::WikiPageMessage do
it 'returns a message that a new wiki page was created' do
expect(subject.pretext).to eq(
- 'test.user created [wiki page](http://url.com) in [project_name](http://somewhere.com): *Wiki page title*')
+ 'Test User (test.user) created [wiki page](http://url.com) in [project_name](http://somewhere.com): *Wiki page title*')
end
end
@@ -106,7 +106,7 @@ describe ChatMessage::WikiPageMessage do
it 'returns a message that a wiki page was updated' do
expect(subject.pretext).to eq(
- 'test.user edited [wiki page](http://url.com) in [project_name](http://somewhere.com): *Wiki page title*')
+ 'Test User (test.user) edited [wiki page](http://url.com) in [project_name](http://somewhere.com): *Wiki page title*')
end
end
end
@@ -141,7 +141,7 @@ describe ChatMessage::WikiPageMessage do
it 'returns the attachment for a new wiki page' do
expect(subject.activity).to eq({
- title: 'test.user created [wiki page](http://url.com)',
+ title: 'Test User (test.user) created [wiki page](http://url.com)',
subtitle: 'in [project_name](http://somewhere.com)',
text: 'Wiki page title',
image: 'http://someavatar.com'
@@ -156,7 +156,7 @@ describe ChatMessage::WikiPageMessage do
it 'returns the attachment for an updated wiki page' do
expect(subject.activity).to eq({
- title: 'test.user edited [wiki page](http://url.com)',
+ title: 'Test User (test.user) edited [wiki page](http://url.com)',
subtitle: 'in [project_name](http://somewhere.com)',
text: 'Wiki page title',
image: 'http://someavatar.com'
diff --git a/spec/models/project_services/kubernetes_service_spec.rb b/spec/models/project_services/kubernetes_service_spec.rb
index 537cdadd528..2298dcab55f 100644
--- a/spec/models/project_services/kubernetes_service_spec.rb
+++ b/spec/models/project_services/kubernetes_service_spec.rb
@@ -208,7 +208,7 @@ describe KubernetesService, :use_clean_rails_memory_store_caching do
config.dig('users', 0, 'user')['token'] = 'token'
config.dig('contexts', 0, 'context')['namespace'] = namespace
config.dig('clusters', 0, 'cluster')['certificate-authority-data'] =
- Base64.encode64('CA PEM DATA')
+ Base64.strict_encode64('CA PEM DATA')
YAML.dump(config)
end
diff --git a/spec/models/project_services/pipelines_email_service_spec.rb b/spec/models/project_services/pipelines_email_service_spec.rb
index 5faab9ba38b..be07ca2d945 100644
--- a/spec/models/project_services/pipelines_email_service_spec.rb
+++ b/spec/models/project_services/pipelines_email_service_spec.rb
@@ -6,7 +6,8 @@ describe PipelinesEmailService, :mailer do
end
let(:project) { create(:project, :repository) }
- let(:recipient) { 'test@gitlab.com' }
+ let(:recipients) { 'test@gitlab.com' }
+ let(:receivers) { [recipients] }
let(:data) do
Gitlab::DataBuilder::Pipeline.build(pipeline)
@@ -48,18 +49,24 @@ describe PipelinesEmailService, :mailer do
shared_examples 'sending email' do
before do
+ subject.recipients = recipients
+
perform_enqueued_jobs do
run
end
end
it 'sends email' do
- should_only_email(double(notification_email: recipient), kind: :bcc)
+ emails = receivers.map { |r| double(notification_email: r) }
+
+ should_only_email(*emails, kind: :bcc)
end
end
shared_examples 'not sending email' do
before do
+ subject.recipients = recipients
+
perform_enqueued_jobs do
run
end
@@ -75,10 +82,6 @@ describe PipelinesEmailService, :mailer do
subject.test(data)
end
- before do
- subject.recipients = recipient
- end
-
context 'when pipeline is failed' do
before do
data[:object_attributes][:status] = 'failed'
@@ -104,10 +107,6 @@ describe PipelinesEmailService, :mailer do
end
context 'with recipients' do
- before do
- subject.recipients = recipient
- end
-
context 'with failed pipeline' do
before do
data[:object_attributes][:status] = 'failed'
@@ -152,9 +151,7 @@ describe PipelinesEmailService, :mailer do
end
context 'with empty recipients list' do
- before do
- subject.recipients = ' ,, '
- end
+ let(:recipients) { ' ,, ' }
context 'with failed pipeline' do
before do
@@ -165,5 +162,19 @@ describe PipelinesEmailService, :mailer do
it_behaves_like 'not sending email'
end
end
+
+ context 'with recipients list separating with newlines' do
+ let(:recipients) { "\ntest@gitlab.com, \r\nexample@gitlab.com" }
+ let(:receivers) { %w[test@gitlab.com example@gitlab.com] }
+
+ context 'with failed pipeline' do
+ before do
+ data[:object_attributes][:status] = 'failed'
+ pipeline.update(status: 'failed')
+ end
+
+ it_behaves_like 'sending email'
+ end
+ end
end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 48fc77423ff..a26c71e5155 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -57,6 +57,7 @@ describe Project do
it { is_expected.to have_many(:commit_statuses) }
it { is_expected.to have_many(:pipelines) }
it { is_expected.to have_many(:builds) }
+ it { is_expected.to have_many(:build_trace_section_names)}
it { is_expected.to have_many(:runner_projects) }
it { is_expected.to have_many(:runners) }
it { is_expected.to have_many(:active_runners) }
@@ -76,6 +77,7 @@ describe Project do
it { is_expected.to have_many(:uploads).dependent(:destroy) }
it { is_expected.to have_many(:pipeline_schedules) }
it { is_expected.to have_many(:members_and_requesters) }
+ it { is_expected.to have_one(:cluster) }
context 'after initialized' do
it "has a project_feature" do
@@ -408,21 +410,23 @@ describe Project do
end
end
- describe '#repository_storage_path' do
- let(:project) { create(:project, repository_storage: 'custom') }
-
- before do
- FileUtils.mkdir('tmp/tests/custom_repositories')
- storages = { 'custom' => { 'path' => 'tmp/tests/custom_repositories' } }
- allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
+ describe '#merge_method' do
+ it 'returns "ff" merge_method when ff is enabled' do
+ project = build(:project, merge_requests_ff_only_enabled: true)
+ expect(project.merge_method).to be :ff
end
- after do
- FileUtils.rm_rf('tmp/tests/custom_repositories')
+ it 'returns "merge" merge_method when ff is disabled' do
+ project = build(:project, merge_requests_ff_only_enabled: false)
+ expect(project.merge_method).to be :merge
end
+ end
+
+ describe '#repository_storage_path' do
+ let(:project) { create(:project) }
it 'returns the repository storage path' do
- expect(project.repository_storage_path).to eq('tmp/tests/custom_repositories')
+ expect(Dir.exist?(project.repository_storage_path)).to be(true)
end
end
@@ -689,6 +693,44 @@ describe Project do
project.cache_has_external_issue_tracker
end.to change { project.has_external_issue_tracker}.to(false)
end
+
+ it 'does not cache data when in a read-only GitLab instance' do
+ allow(Gitlab::Database).to receive(:read_only?) { true }
+
+ expect do
+ project.cache_has_external_issue_tracker
+ end.not_to change { project.has_external_issue_tracker }
+ end
+ end
+
+ describe '#cache_has_external_wiki' do
+ let(:project) { create(:project, has_external_wiki: nil) }
+
+ it 'stores true if there is any external_wikis' do
+ services = double(:service, external_wikis: [ExternalWikiService.new])
+ expect(project).to receive(:services).and_return(services)
+
+ expect do
+ project.cache_has_external_wiki
+ end.to change { project.has_external_wiki}.to(true)
+ end
+
+ it 'stores false if there is no external_wikis' do
+ services = double(:service, external_wikis: [])
+ expect(project).to receive(:services).and_return(services)
+
+ expect do
+ project.cache_has_external_wiki
+ end.to change { project.has_external_wiki}.to(false)
+ end
+
+ it 'does not cache data when in a read-only GitLab instance' do
+ allow(Gitlab::Database).to receive(:read_only?) { true }
+
+ expect do
+ project.cache_has_external_wiki
+ end.not_to change { project.has_external_wiki }
+ end
end
describe '#has_wiki?' do
@@ -1303,7 +1345,7 @@ describe Project do
context 'using a regular repository' do
it 'creates the repository' do
expect(shell).to receive(:add_repository)
- .with(project.repository_storage_path, project.disk_path)
+ .with(project.repository_storage, project.disk_path)
.and_return(true)
expect(project.repository).to receive(:after_create)
@@ -1313,7 +1355,7 @@ describe Project do
it 'adds an error if the repository could not be created' do
expect(shell).to receive(:add_repository)
- .with(project.repository_storage_path, project.disk_path)
+ .with(project.repository_storage, project.disk_path)
.and_return(false)
expect(project.repository).not_to receive(:after_create)
@@ -1370,7 +1412,7 @@ describe Project do
.and_return(false)
expect(shell).to receive(:add_repository)
- .with(project.repository_storage_path, project.disk_path)
+ .with(project.repository_storage, project.disk_path)
.and_return(true)
project.ensure_repository
@@ -1814,6 +1856,59 @@ describe Project do
end
end
+ context 'forks' do
+ include ProjectForksHelper
+
+ let(:project) { create(:project, :public) }
+ let!(:forked_project) { fork_project(project) }
+
+ describe '#fork_network' do
+ it 'includes a fork of the project' do
+ expect(project.fork_network.projects).to include(forked_project)
+ end
+
+ it 'includes a fork of a fork' do
+ other_fork = fork_project(forked_project)
+
+ expect(project.fork_network.projects).to include(other_fork)
+ end
+
+ it 'includes sibling forks' do
+ other_fork = fork_project(project)
+
+ expect(forked_project.fork_network.projects).to include(other_fork)
+ end
+
+ it 'includes the base project' do
+ expect(forked_project.fork_network.projects).to include(project.reload)
+ end
+ end
+
+ describe '#in_fork_network_of?' do
+ it 'is true for a real fork' do
+ expect(forked_project.in_fork_network_of?(project)).to be_truthy
+ end
+
+ it 'is true for a fork of a fork', :postgresql do
+ other_fork = fork_project(forked_project)
+
+ expect(other_fork.in_fork_network_of?(project)).to be_truthy
+ end
+
+ it 'is true for sibling forks' do
+ sibling = fork_project(project)
+
+ expect(sibling.in_fork_network_of?(forked_project)).to be_truthy
+ end
+
+ it 'is false when another project is given' do
+ other_project = build_stubbed(:project)
+
+ expect(forked_project.in_fork_network_of?(other_project)).to be_falsy
+ end
+ end
+ end
+
describe '#pushes_since_gc' do
let(:project) { create(:project) }
@@ -2363,12 +2458,24 @@ describe Project do
describe '#legacy_storage?' do
it 'returns true when storage_version is nil' do
- project = build(:project)
+ project = build(:project, storage_version: nil)
+
+ expect(project.legacy_storage?).to be_truthy
+ end
+
+ it 'returns true when the storage_version is 0' do
+ project = build(:project, storage_version: 0)
expect(project.legacy_storage?).to be_truthy
end
end
+ describe '#hashed_storage?' do
+ it 'returns false' do
+ expect(project.hashed_storage?).to be_falsey
+ end
+ end
+
describe '#rename_repo' do
before do
# Project#gitlab_shell returns a new instance of Gitlab::Shell on every
@@ -2425,6 +2532,38 @@ describe Project do
expect(project.pages_path).to eq(File.join(Settings.pages.path, project.namespace.full_path, project.path))
end
end
+
+ describe '#migrate_to_hashed_storage!' do
+ it 'returns true' do
+ expect(project.migrate_to_hashed_storage!).to be_truthy
+ end
+
+ it 'flags as read-only' do
+ expect { project.migrate_to_hashed_storage! }.to change { project.repository_read_only }.to(true)
+ end
+
+ it 'schedules ProjectMigrateHashedStorageWorker with delayed start when the project repo is in use' do
+ Gitlab::ReferenceCounter.new(project.gl_repository(is_wiki: false)).increase
+
+ expect(ProjectMigrateHashedStorageWorker).to receive(:perform_in)
+
+ project.migrate_to_hashed_storage!
+ end
+
+ it 'schedules ProjectMigrateHashedStorageWorker with delayed start when the wiki repo is in use' do
+ Gitlab::ReferenceCounter.new(project.gl_repository(is_wiki: true)).increase
+
+ expect(ProjectMigrateHashedStorageWorker).to receive(:perform_in)
+
+ project.migrate_to_hashed_storage!
+ end
+
+ it 'schedules ProjectMigrateHashedStorageWorker' do
+ expect(ProjectMigrateHashedStorageWorker).to receive(:perform_async).with(project.id)
+
+ project.migrate_to_hashed_storage!
+ end
+ end
end
context 'hashed storage' do
@@ -2438,6 +2577,18 @@ describe Project do
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
end
+ describe '#legacy_storage?' do
+ it 'returns false' do
+ expect(project.legacy_storage?).to be_falsey
+ end
+ end
+
+ describe '#hashed_storage?' do
+ it 'returns true' do
+ expect(project.hashed_storage?).to be_truthy
+ end
+ end
+
describe '#base_dir' do
it 'returns base_dir based on hash of project id' do
expect(project.base_dir).to eq('@hashed/6b/86')
@@ -2508,6 +2659,26 @@ describe Project do
expect(project.pages_path).to eq(File.join(Settings.pages.path, project.namespace.full_path, project.path))
end
end
+
+ describe '#migrate_to_hashed_storage!' do
+ it 'returns nil' do
+ expect(project.migrate_to_hashed_storage!).to be_nil
+ end
+
+ it 'does not flag as read-only' do
+ expect { project.migrate_to_hashed_storage! }.not_to change { project.repository_read_only }
+ end
+ end
+ end
+
+ describe '#gl_repository' do
+ let(:project) { create(:project) }
+
+ it 'delegates to Gitlab::GlRepository.gl_repository' do
+ expect(Gitlab::GlRepository).to receive(:gl_repository).with(project, true)
+
+ project.gl_repository(is_wiki: true)
+ end
end
describe '#has_ci?' do
@@ -2682,4 +2853,71 @@ describe Project do
end
end
end
+
+ describe '#latest_successful_builds_for' do
+ let(:project) { build(:project) }
+
+ before do
+ allow(project).to receive(:default_branch).and_return('master')
+ end
+
+ context 'without a ref' do
+ it 'returns a pipeline for the default branch' do
+ expect(project)
+ .to receive(:latest_successful_pipeline_for_default_branch)
+
+ project.latest_successful_pipeline_for
+ end
+ end
+
+ context 'with the ref set to the default branch' do
+ it 'returns a pipeline for the default branch' do
+ expect(project)
+ .to receive(:latest_successful_pipeline_for_default_branch)
+
+ project.latest_successful_pipeline_for(project.default_branch)
+ end
+ end
+
+ context 'with a ref that is not the default branch' do
+ it 'returns the latest successful pipeline for the given ref' do
+ expect(project.pipelines).to receive(:latest_successful_for).with('foo')
+
+ project.latest_successful_pipeline_for('foo')
+ end
+ end
+ end
+
+ describe '#check_repository_path_availability' do
+ let(:project) { build(:project) }
+
+ it 'skips gitlab-shell exists?' do
+ project.skip_disk_validation = true
+
+ expect(project.gitlab_shell).not_to receive(:exists?)
+ expect(project.check_repository_path_availability).to be_truthy
+ end
+ end
+
+ describe '#latest_successful_pipeline_for_default_branch' do
+ let(:project) { build(:project) }
+
+ before do
+ allow(project).to receive(:default_branch).and_return('master')
+ end
+
+ it 'memoizes and returns the latest successful pipeline for the default branch' do
+ pipeline = double(:pipeline)
+
+ expect(project.pipelines).to receive(:latest_successful_for)
+ .with(project.default_branch)
+ .and_return(pipeline)
+ .once
+
+ 2.times do
+ expect(project.latest_successful_pipeline_for_default_branch)
+ .to eq(pipeline)
+ end
+ end
+ end
end
diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb
index 953df7746eb..78fb2df884a 100644
--- a/spec/models/project_wiki_spec.rb
+++ b/spec/models/project_wiki_spec.rb
@@ -6,13 +6,10 @@ describe ProjectWiki do
let(:user) { project.owner }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:project_wiki) { described_class.new(project, user) }
+ let(:raw_repository) { Gitlab::Git::Repository.new(project.repository_storage, subject.disk_path + '.git', 'foo') }
subject { project_wiki }
- before do
- project_wiki.wiki
- end
-
describe "#path_with_namespace" do
it "returns the project path with namespace with the .wiki extension" do
expect(subject.path_with_namespace).to eq(project.full_path + '.wiki')
@@ -61,8 +58,8 @@ describe ProjectWiki do
end
describe "#wiki" do
- it "contains a Gollum::Wiki instance" do
- expect(subject.wiki).to be_a Gollum::Wiki
+ it "contains a Gitlab::Git::Wiki instance" do
+ expect(subject.wiki).to be_a Gitlab::Git::Wiki
end
it "creates a new wiki repo if one does not yet exist" do
@@ -70,20 +67,18 @@ describe ProjectWiki do
end
it "raises CouldNotCreateWikiError if it can't create the wiki repository" do
- allow(project_wiki).to receive(:init_repo).and_return(false)
- expect { project_wiki.send(:create_repo!) }.to raise_exception(ProjectWiki::CouldNotCreateWikiError)
+ # Create a fresh project which will not have a wiki
+ project_wiki = described_class.new(create(:project), user)
+ gitlab_shell = double(:gitlab_shell)
+ allow(gitlab_shell).to receive(:add_repository)
+ allow(project_wiki).to receive(:gitlab_shell).and_return(gitlab_shell)
+
+ expect { project_wiki.send(:wiki) }.to raise_exception(ProjectWiki::CouldNotCreateWikiError)
end
end
describe "#empty?" do
context "when the wiki repository is empty" do
- before do
- allow_any_instance_of(Gitlab::Shell).to receive(:add_repository) do
- create_temp_repo("#{Rails.root}/tmp/test-git-base-path/non-existant.wiki.git")
- end
- allow(project).to receive(:full_path).and_return("non-existant")
- end
-
describe '#empty?' do
subject { super().empty? }
it { is_expected.to be_truthy }
@@ -154,13 +149,13 @@ describe ProjectWiki do
before do
file = Gollum::File.new(subject.wiki)
allow_any_instance_of(Gollum::Wiki)
- .to receive(:file).with('image.jpg', 'master', true)
+ .to receive(:file).with('image.jpg', 'master')
.and_return(file)
allow_any_instance_of(Gollum::File)
.to receive(:mime_type)
.and_return('image/jpeg')
allow_any_instance_of(Gollum::Wiki)
- .to receive(:file).with('non-existant', 'master', true)
+ .to receive(:file).with('non-existant', 'master')
.and_return(nil)
end
@@ -178,9 +173,9 @@ describe ProjectWiki do
expect(subject.find_file('non-existant')).to eq(nil)
end
- it 'returns a Gollum::File instance' do
+ it 'returns a Gitlab::Git::WikiFile instance' do
file = subject.find_file('image.jpg')
- expect(file).to be_a Gollum::File
+ expect(file).to be_a Gitlab::Git::WikiFile
end
end
@@ -222,9 +217,9 @@ describe ProjectWiki do
describe "#update_page" do
before do
create_page("update-page", "some content")
- @gollum_page = subject.wiki.paged("update-page")
+ @gitlab_git_wiki_page = subject.wiki.page(title: "update-page")
subject.update_page(
- @gollum_page,
+ @gitlab_git_wiki_page,
content: "some other content",
format: :markdown,
message: "updated page"
@@ -246,7 +241,7 @@ describe ProjectWiki do
it 'updates project activity' do
subject.update_page(
- @gollum_page,
+ @gitlab_git_wiki_page,
content: 'Yet more content',
format: :markdown,
message: 'Updated page again'
@@ -262,7 +257,7 @@ describe ProjectWiki do
describe "#delete_page" do
before do
create_page("index", "some content")
- @page = subject.wiki.paged("index")
+ @page = subject.wiki.page(title: "index")
end
it "deletes the page" do
@@ -282,27 +277,28 @@ describe ProjectWiki do
describe '#create_repo!' do
it 'creates a repository' do
- expect(subject).to receive(:init_repo)
- .with(subject.full_path)
- .and_return(true)
-
+ expect(raw_repository.exists?).to eq(false)
expect(subject.repository).to receive(:after_create)
- expect(subject.create_repo!).to be_an_instance_of(Gollum::Wiki)
+ subject.send(:create_repo!, raw_repository)
+
+ expect(raw_repository.exists?).to eq(true)
end
end
describe '#ensure_repository' do
it 'creates the repository if it not exist' do
- allow(subject).to receive(:repository_exists?).and_return(false)
-
- expect(subject).to receive(:create_repo!)
+ expect(raw_repository.exists?).to eq(false)
+ expect(subject).to receive(:create_repo!).and_call_original
subject.ensure_repository
+
+ expect(raw_repository.exists?).to eq(true)
end
it 'does not create the repository if it exists' do
- allow(subject).to receive(:repository_exists?).and_return(true)
+ subject.wiki
+ expect(raw_repository.exists?).to eq(true)
expect(subject).not_to receive(:create_repo!)
@@ -329,7 +325,7 @@ describe ProjectWiki do
end
def commit_details
- { name: user.name, email: user.email, message: "test commit" }
+ Gitlab::Git::Wiki::CommitDetails.new(user.name, user.email, "test commit")
end
def create_page(name, content)
@@ -337,6 +333,6 @@ describe ProjectWiki do
end
def destroy_page(page)
- subject.wiki.delete_page(page, commit_details)
+ subject.delete_page(page, commit_details)
end
end
diff --git a/spec/models/push_event_spec.rb b/spec/models/push_event_spec.rb
index 532fb024261..ad3c3a406d9 100644
--- a/spec/models/push_event_spec.rb
+++ b/spec/models/push_event_spec.rb
@@ -11,6 +11,94 @@ describe PushEvent do
event
end
+ describe '.created_or_pushed' do
+ let(:event1) { create(:push_event) }
+ let(:event2) { create(:push_event) }
+ let(:event3) { create(:push_event) }
+
+ before do
+ create(:push_event_payload, event: event1, action: :pushed)
+ create(:push_event_payload, event: event2, action: :created)
+ create(:push_event_payload, event: event3, action: :removed)
+ end
+
+ let(:relation) { described_class.created_or_pushed }
+
+ it 'includes events for pushing to existing refs' do
+ expect(relation).to include(event1)
+ end
+
+ it 'includes events for creating new refs' do
+ expect(relation).to include(event2)
+ end
+
+ it 'does not include events for removing refs' do
+ expect(relation).not_to include(event3)
+ end
+ end
+
+ describe '.branch_events' do
+ let(:event1) { create(:push_event) }
+ let(:event2) { create(:push_event) }
+
+ before do
+ create(:push_event_payload, event: event1, ref_type: :branch)
+ create(:push_event_payload, event: event2, ref_type: :tag)
+ end
+
+ let(:relation) { described_class.branch_events }
+
+ it 'includes events for branches' do
+ expect(relation).to include(event1)
+ end
+
+ it 'does not include events for tags' do
+ expect(relation).not_to include(event2)
+ end
+ end
+
+ describe '.without_existing_merge_requests' do
+ let(:project) { create(:project, :repository) }
+ let(:event1) { create(:push_event, project: project) }
+ let(:event2) { create(:push_event, project: project) }
+ let(:event3) { create(:push_event, project: project) }
+ let(:event4) { create(:push_event, project: project) }
+
+ before do
+ create(:push_event_payload, event: event1, ref: 'foo', action: :created)
+ create(:push_event_payload, event: event2, ref: 'bar', action: :created)
+ create(:push_event_payload, event: event3, ref: 'baz', action: :removed)
+ create(:push_event_payload, event: event4, ref: 'baz', ref_type: :tag)
+
+ project.repository.create_branch('bar', 'master')
+
+ create(
+ :merge_request,
+ source_project: project,
+ target_project: project,
+ source_branch: 'bar'
+ )
+ end
+
+ let(:relation) { described_class.without_existing_merge_requests }
+
+ it 'includes events that do not have a corresponding merge request' do
+ expect(relation).to include(event1)
+ end
+
+ it 'does not include events that have a corresponding merge request' do
+ expect(relation).not_to include(event2)
+ end
+
+ it 'does not include events for removed refs' do
+ expect(relation).not_to include(event3)
+ end
+
+ it 'does not include events for pushing to tags' do
+ expect(relation).not_to include(event4)
+ end
+ end
+
describe '.sti_name' do
it 'returns Event::PUSHED' do
expect(described_class.sti_name).to eq(Event::PUSHED)
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 7065d467ec0..5d78aed5b4f 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Repository, models: true do
+describe Repository do
include RepoHelpers
TestBlob = Struct.new(:path)
@@ -8,12 +8,9 @@ describe Repository, models: true do
let(:repository) { project.repository }
let(:broken_repository) { create(:project, :broken_storage).repository }
let(:user) { create(:user) }
- let(:committer) { Gitlab::Git::Committer.from_user(user) }
+ let(:git_user) { Gitlab::Git::User.from_gitlab(user) }
- let(:commit_options) do
- author = repository.user_to_committer(user)
- { message: 'Test message', committer: author, author: author }
- end
+ let(:message) { 'Test message' }
let(:merge_commit) do
merge_request = create(:merge_request, source_branch: 'feature', target_branch: 'master', source_project: project)
@@ -21,7 +18,7 @@ describe Repository, models: true do
merge_commit_id = repository.merge(user,
merge_request.diff_head_sha,
merge_request,
- commit_options)
+ message)
repository.commit(merge_commit_id)
end
@@ -43,7 +40,7 @@ describe Repository, models: true do
it { is_expected.not_to include('feature') }
it { is_expected.not_to include('fix') }
- describe 'when storage is broken', broken_storage: true do
+ describe 'when storage is broken', :broken_storage do
it 'should raise a storage error' do
expect_to_raise_storage_error do
broken_repository.branch_names_contains(sample_commit.id)
@@ -161,7 +158,7 @@ describe Repository, models: true do
it { is_expected.to eq('c1acaa58bbcbc3eafe538cb8274ba387047b69f8') }
- describe 'when storage is broken', broken_storage: true do
+ describe 'when storage is broken', :broken_storage do
it 'should raise a storage error' do
expect_to_raise_storage_error do
broken_repository.last_commit_id_for_path(sample_commit.id, '.gitignore')
@@ -174,7 +171,7 @@ describe Repository, models: true do
it_behaves_like 'getting last commit for path'
end
- context 'when Gitaly feature last_commit_for_path is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly feature last_commit_for_path is disabled', :skip_gitaly_mock do
it_behaves_like 'getting last commit for path'
end
end
@@ -195,7 +192,7 @@ describe Repository, models: true do
is_expected.to eq('c1acaa5')
end
- describe 'when storage is broken', broken_storage: true do
+ describe 'when storage is broken', :broken_storage do
it 'should raise a storage error' do
expect_to_raise_storage_error do
broken_repository.last_commit_for_path(sample_commit.id, '.gitignore').id
@@ -208,7 +205,7 @@ describe Repository, models: true do
it_behaves_like 'getting last commit ID for path'
end
- context 'when Gitaly feature last_commit_for_path is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly feature last_commit_for_path is disabled', :skip_gitaly_mock do
it_behaves_like 'getting last commit ID for path'
end
end
@@ -258,11 +255,11 @@ describe Repository, models: true do
it_behaves_like 'finding commits by message'
end
- context 'when Gitaly commits_by_message feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly commits_by_message feature is disabled', :skip_gitaly_mock do
it_behaves_like 'finding commits by message'
end
- describe 'when storage is broken', broken_storage: true do
+ describe 'when storage is broken', :broken_storage do
it 'should raise a storage error' do
expect_to_raise_storage_error { broken_repository.find_commits_by_message('s') }
end
@@ -592,7 +589,7 @@ describe Repository, models: true do
expect(results).to match_array([])
end
- describe 'when storage is broken', broken_storage: true do
+ describe 'when storage is broken', :broken_storage do
it 'should raise a storage error' do
expect_to_raise_storage_error do
broken_repository.search_files_by_content('feature', 'master')
@@ -629,7 +626,7 @@ describe Repository, models: true do
expect(results).to match_array([])
end
- describe 'when storage is broken', broken_storage: true do
+ describe 'when storage is broken', :broken_storage do
it 'should raise a storage error' do
expect_to_raise_storage_error { broken_repository.search_files_by_name('files', 'master') }
end
@@ -637,20 +634,24 @@ describe Repository, models: true do
end
describe '#fetch_ref' do
- describe 'when storage is broken', broken_storage: true do
- it 'should raise a storage error' do
- path = broken_repository.path_to_repo
+ # Setting the var here, sidesteps the stub that makes gitaly raise an error
+ # before the actual test call
+ set(:broken_repository) { create(:project, :broken_storage).repository }
- expect_to_raise_storage_error { broken_repository.fetch_ref(path, '1', '2') }
+ describe 'when storage is broken', :broken_storage do
+ it 'should raise a storage error' do
+ expect_to_raise_storage_error do
+ broken_repository.fetch_ref(broken_repository, source_ref: '1', target_ref: '2')
+ end
end
end
end
describe '#create_ref' do
- it 'redirects the call to fetch_ref' do
+ it 'redirects the call to write_ref' do
ref, ref_path = '1', '2'
- expect(repository).to receive(:fetch_ref).with(repository.path_to_repo, ref, ref_path)
+ expect(repository.raw_repository).to receive(:write_ref).with(ref_path, ref)
repository.create_ref(ref, ref_path)
end
@@ -818,45 +819,70 @@ describe Repository, models: true do
end
describe '#add_branch' do
- context 'when pre hooks were successful' do
- it 'runs without errors' do
- hook = double(trigger: [true, nil])
- expect(Gitlab::Git::Hook).to receive(:new).exactly(3).times.and_return(hook)
+ let(:branch_name) { 'new_feature' }
+ let(:target) { 'master' }
- expect { repository.add_branch(user, 'new_feature', 'master') }.not_to raise_error
- end
+ subject { repository.add_branch(user, branch_name, target) }
- it 'creates the branch' do
- allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([true, nil])
+ context 'with Gitaly enabled' do
+ it "calls Gitaly's OperationService" do
+ expect_any_instance_of(Gitlab::GitalyClient::OperationService)
+ .to receive(:user_create_branch).with(branch_name, user, target)
+ .and_return(nil)
- branch = repository.add_branch(user, 'new_feature', 'master')
+ subject
+ end
- expect(branch.name).to eq('new_feature')
+ it 'creates_the_branch' do
+ expect(subject.name).to eq(branch_name)
+ expect(repository.find_branch(branch_name)).not_to be_nil
end
- it 'calls the after_create_branch hook' do
- expect(repository).to receive(:after_create_branch)
+ context 'with a non-existing target' do
+ let(:target) { 'fake-target' }
- repository.add_branch(user, 'new_feature', 'master')
+ it "returns false and doesn't create the branch" do
+ expect(subject).to be(false)
+ expect(repository.find_branch(branch_name)).to be_nil
+ end
end
end
- context 'when pre hooks failed' do
- it 'gets an error' do
- allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([false, ''])
+ context 'with Gitaly disabled', :skip_gitaly_mock do
+ context 'when pre hooks were successful' do
+ it 'runs without errors' do
+ hook = double(trigger: [true, nil])
+ expect(Gitlab::Git::Hook).to receive(:new).exactly(3).times.and_return(hook)
- expect do
- repository.add_branch(user, 'new_feature', 'master')
- end.to raise_error(Gitlab::Git::HooksService::PreReceiveError)
+ expect { subject }.not_to raise_error
+ end
+
+ it 'creates the branch' do
+ allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([true, nil])
+
+ expect(subject.name).to eq(branch_name)
+ end
+
+ it 'calls the after_create_branch hook' do
+ expect(repository).to receive(:after_create_branch)
+
+ subject
+ end
end
- it 'does not create the branch' do
- allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([false, ''])
+ context 'when pre hooks failed' do
+ it 'gets an error' do
+ allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([false, ''])
- expect do
- repository.add_branch(user, 'new_feature', 'master')
- end.to raise_error(Gitlab::Git::HooksService::PreReceiveError)
- expect(repository.find_branch('new_feature')).to be_nil
+ expect { subject }.to raise_error(Gitlab::Git::HooksService::PreReceiveError)
+ end
+
+ it 'does not create the branch' do
+ allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([false, ''])
+
+ expect { subject }.to raise_error(Gitlab::Git::HooksService::PreReceiveError)
+ expect(repository.find_branch(branch_name)).to be_nil
+ end
end
end
end
@@ -879,47 +905,6 @@ describe Repository, models: true do
end
end
- describe '#rm_branch' do
- let(:old_rev) { '0b4bc9a49b562e85de7cc9e834518ea6828729b9' } # git rev-parse feature
- let(:blank_sha) { '0000000000000000000000000000000000000000' }
-
- context 'when pre hooks were successful' do
- it 'runs without errors' do
- expect_any_instance_of(Gitlab::Git::HooksService).to receive(:execute)
- .with(committer, repository.raw_repository, old_rev, blank_sha, 'refs/heads/feature')
-
- expect { repository.rm_branch(user, 'feature') }.not_to raise_error
- end
-
- it 'deletes the branch' do
- allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([true, nil])
-
- expect { repository.rm_branch(user, 'feature') }.not_to raise_error
-
- expect(repository.find_branch('feature')).to be_nil
- end
- end
-
- context 'when pre hooks failed' do
- it 'gets an error' do
- allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([false, ''])
-
- expect do
- repository.rm_branch(user, 'feature')
- end.to raise_error(Gitlab::Git::HooksService::PreReceiveError)
- end
-
- it 'does not delete the branch' do
- allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([false, ''])
-
- expect do
- repository.rm_branch(user, 'feature')
- end.to raise_error(Gitlab::Git::HooksService::PreReceiveError)
- expect(repository.find_branch('feature')).not_to be_nil
- end
- end
- end
-
describe '#update_branch_with_hooks' do
let(:old_rev) { '0b4bc9a49b562e85de7cc9e834518ea6828729b9' } # git rev-parse feature
let(:new_rev) { 'a74ae73c1ccde9b974a70e82b901588071dc142a' } # commit whose parent is old_rev
@@ -932,20 +917,20 @@ describe Repository, models: true do
service = Gitlab::Git::HooksService.new
expect(Gitlab::Git::HooksService).to receive(:new).and_return(service)
expect(service).to receive(:execute)
- .with(committer, target_repository.raw_repository, old_rev, new_rev, updating_ref)
+ .with(git_user, target_repository.raw_repository, old_rev, new_rev, updating_ref)
.and_yield(service).and_return(true)
end
it 'runs without errors' do
expect do
- Gitlab::Git::OperationService.new(committer, repository.raw_repository).with_branch('feature') do
+ Gitlab::Git::OperationService.new(git_user, repository.raw_repository).with_branch('feature') do
new_rev
end
end.not_to raise_error
end
it 'ensures the autocrlf Git option is set to :input' do
- service = Gitlab::Git::OperationService.new(committer, repository.raw_repository)
+ service = Gitlab::Git::OperationService.new(git_user, repository.raw_repository)
expect(service).to receive(:update_autocrlf_option)
@@ -956,7 +941,7 @@ describe Repository, models: true do
it 'updates the head' do
expect(repository.find_branch('feature').dereferenced_target.id).to eq(old_rev)
- Gitlab::Git::OperationService.new(committer, repository.raw_repository).with_branch('feature') do
+ Gitlab::Git::OperationService.new(git_user, repository.raw_repository).with_branch('feature') do
new_rev
end
@@ -974,7 +959,7 @@ describe Repository, models: true do
expect(target_project.repository.raw_repository).to receive(:fetch_ref)
.and_call_original
- Gitlab::Git::OperationService.new(committer, target_repository.raw_repository)
+ Gitlab::Git::OperationService.new(git_user, target_repository.raw_repository)
.with_branch(
'master',
start_repository: project.repository.raw_repository,
@@ -990,7 +975,7 @@ describe Repository, models: true do
it 'does not fetch_ref and just pass the commit' do
expect(target_repository).not_to receive(:fetch_ref)
- Gitlab::Git::OperationService.new(committer, target_repository.raw_repository)
+ Gitlab::Git::OperationService.new(git_user, target_repository.raw_repository)
.with_branch('feature', start_repository: project.repository.raw_repository) { new_rev }
end
end
@@ -1009,7 +994,7 @@ describe Repository, models: true do
end
expect do
- Gitlab::Git::OperationService.new(committer, target_project.repository.raw_repository)
+ Gitlab::Git::OperationService.new(git_user, target_project.repository.raw_repository)
.with_branch('feature',
start_repository: project.repository.raw_repository,
&:itself)
@@ -1031,7 +1016,7 @@ describe Repository, models: true do
repository.add_branch(user, branch, old_rev)
expect do
- Gitlab::Git::OperationService.new(committer, repository.raw_repository).with_branch(branch) do
+ Gitlab::Git::OperationService.new(git_user, repository.raw_repository).with_branch(branch) do
new_rev
end
end.not_to raise_error
@@ -1049,7 +1034,7 @@ describe Repository, models: true do
# Updating 'master' to new_rev would lose the commits on 'master' that
# are not contained in new_rev. This should not be allowed.
expect do
- Gitlab::Git::OperationService.new(committer, repository.raw_repository).with_branch(branch) do
+ Gitlab::Git::OperationService.new(git_user, repository.raw_repository).with_branch(branch) do
new_rev
end
end.to raise_error(Gitlab::Git::CommitError)
@@ -1061,7 +1046,7 @@ describe Repository, models: true do
allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([false, ''])
expect do
- Gitlab::Git::OperationService.new(committer, repository.raw_repository).with_branch('feature') do
+ Gitlab::Git::OperationService.new(git_user, repository.raw_repository).with_branch('feature') do
new_rev
end
end.to raise_error(Gitlab::Git::HooksService::PreReceiveError)
@@ -1116,7 +1101,7 @@ describe Repository, models: true do
expect(repository.exists?).to eq(false)
end
- context 'with broken storage', broken_storage: true do
+ context 'with broken storage', :broken_storage do
it 'should raise a storage error' do
expect_to_raise_storage_error { broken_repository.exists? }
end
@@ -1128,27 +1113,37 @@ describe Repository, models: true do
it_behaves_like 'repo exists check'
end
- context 'when repository_exists is enabled', skip_gitaly_mock: true do
+ context 'when repository_exists is enabled', :skip_gitaly_mock do
it_behaves_like 'repo exists check'
end
end
describe '#has_visible_content?' do
- subject { repository.has_visible_content? }
+ before do
+ # If raw_repository.has_visible_content? gets called more than once then
+ # caching is broken. We don't want that.
+ expect(repository.raw_repository).to receive(:has_visible_content?)
+ .once
+ .and_return(result)
+ end
- describe 'when there are no branches' do
- before do
- allow(repository.raw_repository).to receive(:branch_count).and_return(0)
- end
+ context 'when true' do
+ let(:result) { true }
- it { is_expected.to eq(false) }
+ it 'returns true and caches it' do
+ expect(repository.has_visible_content?).to eq(true)
+ # Second call hits the cache
+ expect(repository.has_visible_content?).to eq(true)
+ end
end
- describe 'when there are branches' do
- it 'returns true' do
- expect(repository.raw_repository).to receive(:branch_count).and_return(3)
+ context 'when false' do
+ let(:result) { false }
- expect(subject).to eq(true)
+ it 'returns false and caches it' do
+ expect(repository.has_visible_content?).to eq(false)
+ # Second call hits the cache
+ expect(repository.has_visible_content?).to eq(false)
end
end
end
@@ -1265,6 +1260,7 @@ describe Repository, models: true do
allow(repository).to receive(:empty?).and_return(true)
expect(cache).to receive(:expire).with(:empty?)
+ expect(cache).to receive(:expire).with(:has_visible_content?)
repository.expire_emptiness_caches
end
@@ -1273,6 +1269,7 @@ describe Repository, models: true do
allow(repository).to receive(:empty?).and_return(false)
expect(cache).not_to receive(:expire).with(:empty?)
+ expect(cache).not_to receive(:expire).with(:has_visible_content?)
repository.expire_emptiness_caches
end
@@ -1287,10 +1284,7 @@ describe Repository, models: true do
describe '#merge' do
let(:merge_request) { create(:merge_request, source_branch: 'feature', target_branch: 'master', source_project: project) }
- let(:commit_options) do
- author = repository.user_to_committer(user)
- { message: 'Test \r\n\r\n message', committer: author, author: author }
- end
+ let(:message) { 'Test \r\n\r\n message' }
it 'merges the code and returns the commit id' do
expect(merge_commit).to be_present
@@ -1298,43 +1292,72 @@ describe Repository, models: true do
end
it 'sets the `in_progress_merge_commit_sha` flag for the given merge request' do
- merge_commit_id = merge(repository, user, merge_request, commit_options)
+ merge_commit_id = merge(repository, user, merge_request, message)
expect(merge_request.in_progress_merge_commit_sha).to eq(merge_commit_id)
end
it 'removes carriage returns from commit message' do
- merge_commit_id = merge(repository, user, merge_request, commit_options)
+ merge_commit_id = merge(repository, user, merge_request, message)
+
+ expect(repository.commit(merge_commit_id).message).to eq(message.delete("\r"))
+ end
- expect(repository.commit(merge_commit_id).message).to eq(commit_options[:message].delete("\r"))
+ def merge(repository, user, merge_request, message)
+ repository.merge(user, merge_request.diff_head_sha, merge_request, message)
+ end
+ end
+
+ describe '#ff_merge' do
+ before do
+ repository.add_branch(user, 'ff-target', 'feature~5')
end
- def merge(repository, user, merge_request, options = {})
- repository.merge(user, merge_request.diff_head_sha, merge_request, options)
+ it 'merges the code and return the commit id' do
+ merge_request = create(:merge_request, source_branch: 'feature', target_branch: 'ff-target', source_project: project)
+ merge_commit_id = repository.ff_merge(user,
+ merge_request.diff_head_sha,
+ merge_request.target_branch,
+ merge_request: merge_request)
+ merge_commit = repository.commit(merge_commit_id)
+
+ expect(merge_commit).to be_present
+ expect(repository.blob_at(merge_commit.id, 'files/ruby/feature.rb')).to be_present
+ end
+
+ it 'sets the `in_progress_merge_commit_sha` flag for the given merge request' do
+ merge_request = create(:merge_request, source_branch: 'feature', target_branch: 'ff-target', source_project: project)
+ merge_commit_id = repository.ff_merge(user,
+ merge_request.diff_head_sha,
+ merge_request.target_branch,
+ merge_request: merge_request)
+
+ expect(merge_request.in_progress_merge_commit_sha).to eq(merge_commit_id)
end
end
describe '#revert' do
let(:new_image_commit) { repository.commit('33f3729a45c02fc67d00adb1b8bca394b0e761d9') }
let(:update_image_commit) { repository.commit('2f63565e7aac07bcdadb654e253078b727143ec4') }
+ let(:message) { 'revert message' }
context 'when there is a conflict' do
it 'raises an error' do
- expect { repository.revert(user, new_image_commit, 'master') }.to raise_error(/Failed to/)
+ expect { repository.revert(user, new_image_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
end
end
context 'when commit was already reverted' do
it 'raises an error' do
- repository.revert(user, update_image_commit, 'master')
+ repository.revert(user, update_image_commit, 'master', message)
- expect { repository.revert(user, update_image_commit, 'master') }.to raise_error(/Failed to/)
+ expect { repository.revert(user, update_image_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
end
end
context 'when commit can be reverted' do
it 'reverts the changes' do
- expect(repository.revert(user, update_image_commit, 'master')).to be_truthy
+ expect(repository.revert(user, update_image_commit, 'master', message)).to be_truthy
end
end
@@ -1343,7 +1366,7 @@ describe Repository, models: true do
merge_commit
expect(repository.blob_at_branch('master', 'files/ruby/feature.rb')).to be_present
- repository.revert(user, merge_commit, 'master')
+ repository.revert(user, merge_commit, 'master', message)
expect(repository.blob_at_branch('master', 'files/ruby/feature.rb')).not_to be_present
end
end
@@ -1353,24 +1376,25 @@ describe Repository, models: true do
let(:conflict_commit) { repository.commit('c642fe9b8b9f28f9225d7ea953fe14e74748d53b') }
let(:pickable_commit) { repository.commit('7d3b0f7cff5f37573aea97cebfd5692ea1689924') }
let(:pickable_merge) { repository.commit('e56497bb5f03a90a51293fc6d516788730953899') }
+ let(:message) { 'cherry-pick message' }
context 'when there is a conflict' do
it 'raises an error' do
- expect { repository.cherry_pick(user, conflict_commit, 'master') }.to raise_error(/Failed to/)
+ expect { repository.cherry_pick(user, conflict_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
end
end
context 'when commit was already cherry-picked' do
it 'raises an error' do
- repository.cherry_pick(user, pickable_commit, 'master')
+ repository.cherry_pick(user, pickable_commit, 'master', message)
- expect { repository.cherry_pick(user, pickable_commit, 'master') }.to raise_error(/Failed to/)
+ expect { repository.cherry_pick(user, pickable_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
end
end
context 'when commit can be cherry-picked' do
it 'cherry-picks the changes' do
- expect(repository.cherry_pick(user, pickable_commit, 'master')).to be_truthy
+ expect(repository.cherry_pick(user, pickable_commit, 'master', message)).to be_truthy
end
end
@@ -1378,11 +1402,11 @@ describe Repository, models: true do
it 'cherry-picks the changes' do
expect(repository.blob_at_branch('improve/awesome', 'foo/bar/.gitkeep')).to be_nil
- cherry_pick_commit_sha = repository.cherry_pick(user, pickable_merge, 'improve/awesome')
+ cherry_pick_commit_sha = repository.cherry_pick(user, pickable_merge, 'improve/awesome', message)
cherry_pick_commit_message = project.commit(cherry_pick_commit_sha).message
expect(repository.blob_at_branch('improve/awesome', 'foo/bar/.gitkeep')).not_to be_nil
- expect(cherry_pick_commit_message).to include('cherry picked from')
+ expect(cherry_pick_commit_message).to eq(message)
end
end
end
@@ -1603,7 +1627,7 @@ describe Repository, models: true do
describe '#expire_branches_cache' do
it 'expires the cache' do
expect(repository).to receive(:expire_method_caches)
- .with(%i(branch_names branch_count))
+ .with(%i(branch_names branch_count has_visible_content?))
.and_call_original
repository.expire_branches_cache
@@ -1621,27 +1645,41 @@ describe Repository, models: true do
end
describe '#add_tag' do
- context 'with a valid target' do
- let(:user) { build_stubbed(:user) }
+ let(:user) { build_stubbed(:user) }
- it 'creates the tag using rugged' do
- expect(repository.rugged.tags).to receive(:create)
- .with('8.5', repository.commit('master').id,
- hash_including(message: 'foo',
- tagger: hash_including(name: user.name, email: user.email)))
- .and_call_original
+ shared_examples 'adding tag' do
+ context 'with a valid target' do
+ it 'creates the tag' do
+ repository.add_tag(user, '8.5', 'master', 'foo')
- repository.add_tag(user, '8.5', 'master', 'foo')
- end
+ tag = repository.find_tag('8.5')
+ expect(tag).to be_present
+ expect(tag.message).to eq('foo')
+ expect(tag.dereferenced_target.id).to eq(repository.commit('master').id)
+ end
- it 'returns a Gitlab::Git::Tag object' do
- tag = repository.add_tag(user, '8.5', 'master', 'foo')
+ it 'returns a Gitlab::Git::Tag object' do
+ tag = repository.add_tag(user, '8.5', 'master', 'foo')
+
+ expect(tag).to be_a(Gitlab::Git::Tag)
+ end
+ end
- expect(tag).to be_a(Gitlab::Git::Tag)
+ context 'with an invalid target' do
+ it 'returns false' do
+ expect(repository.add_tag(user, '8.5', 'bar', 'foo')).to be false
+ end
end
+ end
- it 'passes commit SHA to pre-receive and update hooks,\
- and tag SHA to post-receive hook' do
+ context 'when Gitaly operation_user_add_tag feature is enabled' do
+ it_behaves_like 'adding tag'
+ end
+
+ context 'when Gitaly operation_user_add_tag feature is disabled', :skip_gitaly_mock do
+ it_behaves_like 'adding tag'
+
+ it 'passes commit SHA to pre-receive and update hooks and tag SHA to post-receive hook' do
pre_receive_hook = Gitlab::Git::Hook.new('pre-receive', project)
update_hook = Gitlab::Git::Hook.new('update', project)
post_receive_hook = Gitlab::Git::Hook.new('post-receive', project)
@@ -1659,39 +1697,105 @@ describe Repository, models: true do
tag_sha = tag.target
expect(pre_receive_hook).to have_received(:trigger)
- .with(anything, anything, commit_sha, anything)
+ .with(anything, anything, anything, commit_sha, anything)
expect(update_hook).to have_received(:trigger)
- .with(anything, anything, commit_sha, anything)
+ .with(anything, anything, anything, commit_sha, anything)
expect(post_receive_hook).to have_received(:trigger)
- .with(anything, anything, tag_sha, anything)
+ .with(anything, anything, anything, tag_sha, anything)
end
end
+ end
- context 'with an invalid target' do
- it 'returns false' do
- expect(repository.add_tag(user, '8.5', 'bar', 'foo')).to be false
+ describe '#rm_branch' do
+ shared_examples "user deleting a branch" do
+ it 'removes a branch' do
+ expect(repository).to receive(:before_remove_branch)
+ expect(repository).to receive(:after_remove_branch)
+
+ repository.rm_branch(user, 'feature')
end
end
- end
- describe '#rm_branch' do
- let(:user) { create(:user) }
+ context 'with gitaly enabled' do
+ it_behaves_like "user deleting a branch"
+
+ context 'when pre hooks failed' do
+ before do
+ allow_any_instance_of(Gitlab::GitalyClient::OperationService)
+ .to receive(:user_delete_branch).and_raise(Gitlab::Git::HooksService::PreReceiveError)
+ end
+
+ it 'gets an error and does not delete the branch' do
+ expect do
+ repository.rm_branch(user, 'feature')
+ end.to raise_error(Gitlab::Git::HooksService::PreReceiveError)
+
+ expect(repository.find_branch('feature')).not_to be_nil
+ end
+ end
+ end
+
+ context 'with gitaly disabled', :skip_gitaly_mock do
+ it_behaves_like "user deleting a branch"
+
+ let(:old_rev) { '0b4bc9a49b562e85de7cc9e834518ea6828729b9' } # git rev-parse feature
+ let(:blank_sha) { '0000000000000000000000000000000000000000' }
+
+ context 'when pre hooks were successful' do
+ it 'runs without errors' do
+ expect_any_instance_of(Gitlab::Git::HooksService).to receive(:execute)
+ .with(git_user, repository.raw_repository, old_rev, blank_sha, 'refs/heads/feature')
+
+ expect { repository.rm_branch(user, 'feature') }.not_to raise_error
+ end
+
+ it 'deletes the branch' do
+ allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([true, nil])
+
+ expect { repository.rm_branch(user, 'feature') }.not_to raise_error
- it 'removes a branch' do
- expect(repository).to receive(:before_remove_branch)
- expect(repository).to receive(:after_remove_branch)
+ expect(repository.find_branch('feature')).to be_nil
+ end
+ end
+
+ context 'when pre hooks failed' do
+ it 'gets an error' do
+ allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([false, ''])
+
+ expect do
+ repository.rm_branch(user, 'feature')
+ end.to raise_error(Gitlab::Git::HooksService::PreReceiveError)
+ end
+
+ it 'does not delete the branch' do
+ allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([false, ''])
- repository.rm_branch(user, 'feature')
+ expect do
+ repository.rm_branch(user, 'feature')
+ end.to raise_error(Gitlab::Git::HooksService::PreReceiveError)
+ expect(repository.find_branch('feature')).not_to be_nil
+ end
+ end
end
end
describe '#rm_tag' do
- it 'removes a tag' do
- expect(repository).to receive(:before_remove_tag)
+ shared_examples 'removing tag' do
+ it 'removes a tag' do
+ expect(repository).to receive(:before_remove_tag)
- repository.rm_tag(create(:user), 'v1.1.0')
+ repository.rm_tag(build_stubbed(:user), 'v1.1.0')
- expect(repository.find_tag('v1.1.0')).to be_nil
+ expect(repository.find_tag('v1.1.0')).to be_nil
+ end
+ end
+
+ context 'when Gitaly operation_user_delete_tag feature is enabled' do
+ it_behaves_like 'removing tag'
+ end
+
+ context 'when Gitaly operation_user_delete_tag feature is disabled', :skip_gitaly_mock do
+ it_behaves_like 'removing tag'
end
end
@@ -1864,6 +1968,15 @@ describe Repository, models: true do
repository.expire_all_method_caches
end
+
+ it 'all cache_method definitions are in the lists of method caches' do
+ methods = repository.methods.map do |method|
+ match = /^_uncached_(.*)/.match(method)
+ match[1].to_sym if match
+ end.compact
+
+ expect(methods).to match_array(Repository::CACHED_METHODS + Repository::MEMOIZED_CACHED_METHODS)
+ end
end
describe '#file_on_head' do
diff --git a/spec/models/sent_notification_spec.rb b/spec/models/sent_notification_spec.rb
index 8f05deb8b15..5ec04b99957 100644
--- a/spec/models/sent_notification_spec.rb
+++ b/spec/models/sent_notification_spec.rb
@@ -1,6 +1,9 @@
require 'spec_helper'
describe SentNotification do
+ set(:user) { create(:user) }
+ set(:project) { create(:project) }
+
describe 'validation' do
describe 'note validity' do
context "when the project doesn't match the noteable's project" do
@@ -34,7 +37,6 @@ describe SentNotification do
end
describe '.record' do
- let(:user) { create(:user) }
let(:issue) { create(:issue) }
it 'creates a new SentNotification' do
@@ -43,7 +45,6 @@ describe SentNotification do
end
describe '.record_note' do
- let(:user) { create(:user) }
let(:note) { create(:diff_note_on_merge_request) }
it 'creates a new SentNotification' do
@@ -51,6 +52,123 @@ describe SentNotification do
end
end
+ describe '#unsubscribable?' do
+ shared_examples 'an unsubscribable notification' do |noteable_type|
+ subject { described_class.record(noteable, user.id) }
+
+ context "for #{noteable_type}" do
+ it { expect(subject).to be_unsubscribable }
+ end
+ end
+
+ shared_examples 'a non-unsubscribable notification' do |noteable_type|
+ subject { described_class.record(noteable, user.id) }
+
+ context "for a #{noteable_type}" do
+ it { expect(subject).not_to be_unsubscribable }
+ end
+ end
+
+ it_behaves_like 'an unsubscribable notification', 'issue' do
+ let(:noteable) { create(:issue, project: project) }
+ end
+
+ it_behaves_like 'an unsubscribable notification', 'merge request' do
+ let(:noteable) { create(:merge_request, source_project: project) }
+ end
+
+ it_behaves_like 'a non-unsubscribable notification', 'commit' do
+ let(:project) { create(:project, :repository) }
+ let(:noteable) { project.commit }
+ end
+
+ it_behaves_like 'a non-unsubscribable notification', 'personal snippet' do
+ let(:noteable) { create(:personal_snippet, project: project) }
+ end
+
+ it_behaves_like 'a non-unsubscribable notification', 'project snippet' do
+ let(:noteable) { create(:project_snippet, project: project) }
+ end
+ end
+
+ describe '#for_commit?' do
+ shared_examples 'a commit notification' do |noteable_type|
+ subject { described_class.record(noteable, user.id) }
+
+ context "for #{noteable_type}" do
+ it { expect(subject).to be_for_commit }
+ end
+ end
+
+ shared_examples 'a non-commit notification' do |noteable_type|
+ subject { described_class.record(noteable, user.id) }
+
+ context "for a #{noteable_type}" do
+ it { expect(subject).not_to be_for_commit }
+ end
+ end
+
+ it_behaves_like 'a non-commit notification', 'issue' do
+ let(:noteable) { create(:issue, project: project) }
+ end
+
+ it_behaves_like 'a non-commit notification', 'merge request' do
+ let(:noteable) { create(:merge_request, source_project: project) }
+ end
+
+ it_behaves_like 'a commit notification', 'commit' do
+ let(:project) { create(:project, :repository) }
+ let(:noteable) { project.commit }
+ end
+
+ it_behaves_like 'a non-commit notification', 'personal snippet' do
+ let(:noteable) { create(:personal_snippet, project: project) }
+ end
+
+ it_behaves_like 'a non-commit notification', 'project snippet' do
+ let(:noteable) { create(:project_snippet, project: project) }
+ end
+ end
+
+ describe '#for_snippet?' do
+ shared_examples 'a snippet notification' do |noteable_type|
+ subject { described_class.record(noteable, user.id) }
+
+ context "for #{noteable_type}" do
+ it { expect(subject).to be_for_snippet }
+ end
+ end
+
+ shared_examples 'a non-snippet notification' do |noteable_type|
+ subject { described_class.record(noteable, user.id) }
+
+ context "for a #{noteable_type}" do
+ it { expect(subject).not_to be_for_snippet }
+ end
+ end
+
+ it_behaves_like 'a non-snippet notification', 'issue' do
+ let(:noteable) { create(:issue, project: project) }
+ end
+
+ it_behaves_like 'a non-snippet notification', 'merge request' do
+ let(:noteable) { create(:merge_request, source_project: project) }
+ end
+
+ it_behaves_like 'a non-snippet notification', 'commit' do
+ let(:project) { create(:project, :repository) }
+ let(:noteable) { project.commit }
+ end
+
+ it_behaves_like 'a snippet notification', 'personal snippet' do
+ let(:noteable) { create(:personal_snippet, project: project) }
+ end
+
+ it_behaves_like 'a snippet notification', 'project snippet' do
+ let(:noteable) { create(:project_snippet, project: project) }
+ end
+ end
+
describe '#create_reply' do
context 'for issue' do
let(:issue) { create(:issue) }
diff --git a/spec/models/user_custom_attribute_spec.rb b/spec/models/user_custom_attribute_spec.rb
new file mode 100644
index 00000000000..37fc3cb64f0
--- /dev/null
+++ b/spec/models/user_custom_attribute_spec.rb
@@ -0,0 +1,16 @@
+require 'spec_helper'
+
+describe UserCustomAttribute do
+ describe 'assocations' do
+ it { is_expected.to belong_to(:user) }
+ end
+
+ describe 'validations' do
+ subject { build :user_custom_attribute }
+
+ it { is_expected.to validate_presence_of(:user_id) }
+ it { is_expected.to validate_presence_of(:key) }
+ it { is_expected.to validate_presence_of(:value) }
+ it { is_expected.to validate_uniqueness_of(:key).scoped_to(:user_id) }
+ end
+end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 73a1e47149c..1c3c9068f12 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -2,6 +2,7 @@ require 'spec_helper'
describe User do
include Gitlab::CurrentSettings
+ include ProjectForksHelper
describe 'modules' do
subject { described_class }
@@ -39,6 +40,7 @@ describe User do
it { is_expected.to have_many(:chat_names).dependent(:destroy) }
it { is_expected.to have_many(:uploads).dependent(:destroy) }
it { is_expected.to have_many(:reported_abuse_reports).dependent(:destroy).class_name('AbuseReport') }
+ it { is_expected.to have_many(:custom_attributes).class_name('UserCustomAttribute') }
describe "#abuse_report" do
let(:current_user) { create(:user) }
@@ -359,9 +361,22 @@ describe User do
expect(external_user.projects_limit).to be 0
end
end
+
+ describe '#check_for_verified_email' do
+ let(:user) { create(:user) }
+ let(:secondary) { create(:email, :confirmed, email: 'secondary@example.com', user: user) }
+
+ it 'allows a verfied secondary email to be used as the primary without needing reconfirmation' do
+ user.update_attributes!(email: secondary.email)
+ user.reload
+ expect(user.email).to eq secondary.email
+ expect(user.unconfirmed_email).to eq nil
+ expect(user.confirmed?).to be_truthy
+ end
+ end
end
- describe 'after update hook' do
+ describe 'after commit hook' do
describe '.update_invalid_gpg_signatures' do
let(:user) do
create(:user, email: 'tula.torphy@abshire.ca').tap do |user|
@@ -375,10 +390,50 @@ describe User do
end
it 'synchronizes the gpg keys when the email is updated' do
- expect(user).to receive(:update_invalid_gpg_signatures)
+ expect(user).to receive(:update_invalid_gpg_signatures).at_most(:twice)
user.update_attributes!(email: 'shawnee.ritchie@denesik.com')
end
end
+
+ describe '#update_emails_with_primary_email' do
+ before do
+ @user = create(:user, email: 'primary@example.com').tap do |user|
+ user.skip_reconfirmation!
+ end
+ @secondary = create :email, email: 'secondary@example.com', user: @user
+ @user.reload
+ end
+
+ it 'gets called when email updated' do
+ expect(@user).to receive(:update_emails_with_primary_email)
+
+ @user.update_attributes!(email: 'new_primary@example.com')
+ end
+
+ it 'adds old primary to secondary emails when secondary is a new email ' do
+ @user.update_attributes!(email: 'new_primary@example.com')
+ @user.reload
+
+ expect(@user.emails.count).to eq 2
+ expect(@user.emails.pluck(:email)).to match_array([@secondary.email, 'primary@example.com'])
+ end
+
+ it 'adds old primary to secondary emails if secondary is becoming a primary' do
+ @user.update_attributes!(email: @secondary.email)
+ @user.reload
+
+ expect(@user.emails.count).to eq 1
+ expect(@user.emails.first.email).to eq 'primary@example.com'
+ end
+
+ it 'transfers old confirmation values into new secondary' do
+ @user.update_attributes!(email: @secondary.email)
+ @user.reload
+
+ expect(@user.emails.count).to eq 1
+ expect(@user.emails.first.confirmed_at).not_to eq nil
+ end
+ end
end
describe '#update_tracked_fields!', :clean_gitlab_redis_shared_state do
@@ -466,6 +521,7 @@ describe User do
describe '#generate_password' do
it "does not generate password by default" do
user = create(:user, password: 'abcdefghe')
+
expect(user.password).to eq('abcdefghe')
end
end
@@ -473,6 +529,7 @@ describe User do
describe 'authentication token' do
it "has authentication token" do
user = create(:user)
+
expect(user.authentication_token).not_to be_blank
end
end
@@ -480,6 +537,7 @@ describe User do
describe 'ensure incoming email token' do
it 'has incoming email token' do
user = create(:user)
+
expect(user.incoming_email_token).not_to be_blank
end
end
@@ -522,6 +580,7 @@ describe User do
it 'ensures an rss token on read' do
user = create(:user, rss_token: nil)
rss_token = user.rss_token
+
expect(rss_token).not_to be_blank
expect(user.reload.rss_token).to eq rss_token
end
@@ -632,6 +691,7 @@ describe User do
it "blocks user" do
user.block
+
expect(user.blocked?).to be_truthy
end
end
@@ -965,6 +1025,7 @@ describe User do
it 'is case-insensitive' do
user = create(:user, username: 'JohnDoe')
+
expect(described_class.find_by_username('JOHNDOE')).to eq user
end
end
@@ -977,6 +1038,7 @@ describe User do
it 'is case-insensitive' do
user = create(:user, username: 'JohnDoe')
+
expect(described_class.find_by_username!('JOHNDOE')).to eq user
end
end
@@ -1066,11 +1128,13 @@ describe User do
it 'is true if avatar is image' do
user.update_attribute(:avatar, 'uploads/avatar.png')
+
expect(user.avatar_type).to be_truthy
end
it 'is false if avatar is html page' do
user.update_attribute(:avatar, 'uploads/avatar.html')
+
expect(user.avatar_type).to eq(['only images allowed'])
end
end
@@ -1093,6 +1157,50 @@ describe User do
end
end
+ describe '#all_emails' do
+ let(:user) { create(:user) }
+
+ it 'returns all emails' do
+ email_confirmed = create :email, user: user, confirmed_at: Time.now
+ email_unconfirmed = create :email, user: user
+ user.reload
+
+ expect(user.all_emails).to match_array([user.email, email_unconfirmed.email, email_confirmed.email])
+ end
+ end
+
+ describe '#verified_emails' do
+ let(:user) { create(:user) }
+
+ it 'returns only confirmed emails' do
+ email_confirmed = create :email, user: user, confirmed_at: Time.now
+ create :email, user: user
+ user.reload
+
+ expect(user.verified_emails).to match_array([user.email, email_confirmed.email])
+ end
+ end
+
+ describe '#verified_email?' do
+ let(:user) { create(:user) }
+
+ it 'returns true when the email is verified/confirmed' do
+ email_confirmed = create :email, user: user, confirmed_at: Time.now
+ create :email, user: user
+ user.reload
+
+ expect(user.verified_email?(user.email)).to be_truthy
+ expect(user.verified_email?(email_confirmed.email.titlecase)).to be_truthy
+ end
+
+ it 'returns false when the email is not verified/confirmed' do
+ email_unconfirmed = create :email, user: user
+ user.reload
+
+ expect(user.verified_email?(email_unconfirmed.email)).to be_falsy
+ end
+ end
+
describe '#requires_ldap_check?' do
let(:user) { described_class.new }
@@ -1100,6 +1208,7 @@ describe User do
# Create a condition which would otherwise cause 'true' to be returned
allow(user).to receive(:ldap_user?).and_return(true)
user.last_credential_check_at = nil
+
expect(user.requires_ldap_check?).to be_falsey
end
@@ -1110,6 +1219,7 @@ describe User do
it 'is false for non-LDAP users' do
allow(user).to receive(:ldap_user?).and_return(false)
+
expect(user.requires_ldap_check?).to be_falsey
end
@@ -1120,11 +1230,13 @@ describe User do
it 'is true when the user has never had an LDAP check before' do
user.last_credential_check_at = nil
+
expect(user.requires_ldap_check?).to be_truthy
end
it 'is true when the last LDAP check happened over 1 hour ago' do
user.last_credential_check_at = 2.hours.ago
+
expect(user.requires_ldap_check?).to be_truthy
end
end
@@ -1135,16 +1247,19 @@ describe User do
describe '#ldap_user?' do
it 'is true if provider name starts with ldap' do
user = create(:omniauth_user, provider: 'ldapmain')
+
expect(user.ldap_user?).to be_truthy
end
it 'is false for other providers' do
user = create(:omniauth_user, provider: 'other-provider')
+
expect(user.ldap_user?).to be_falsey
end
it 'is false if no extern_uid is provided' do
user = create(:omniauth_user, extern_uid: nil)
+
expect(user.ldap_user?).to be_falsey
end
end
@@ -1152,6 +1267,7 @@ describe User do
describe '#ldap_identity' do
it 'returns ldap identity' do
user = create :omniauth_user
+
expect(user.ldap_identity.provider).not_to be_empty
end
end
@@ -1161,6 +1277,7 @@ describe User do
it 'blocks user flaging the action caming from ldap' do
user.ldap_block
+
expect(user.blocked?).to be_truthy
expect(user.ldap_blocked?).to be_truthy
end
@@ -1233,18 +1350,22 @@ describe User do
expect(user.starred?(project2)).to be_falsey
star1 = UsersStarProject.create!(project: project1, user: user)
+
expect(user.starred?(project1)).to be_truthy
expect(user.starred?(project2)).to be_falsey
star2 = UsersStarProject.create!(project: project2, user: user)
+
expect(user.starred?(project1)).to be_truthy
expect(user.starred?(project2)).to be_truthy
star1.destroy
+
expect(user.starred?(project1)).to be_falsey
expect(user.starred?(project2)).to be_truthy
star2.destroy
+
expect(user.starred?(project1)).to be_falsey
expect(user.starred?(project2)).to be_falsey
end
@@ -1256,9 +1377,13 @@ describe User do
project = create(:project, :public)
expect(user.starred?(project)).to be_falsey
+
user.toggle_star(project)
+
expect(user.starred?(project)).to be_truthy
+
user.toggle_star(project)
+
expect(user.starred?(project)).to be_falsey
end
end
@@ -1307,7 +1432,7 @@ describe User do
describe "#contributed_projects" do
subject { create(:user) }
let!(:project1) { create(:project) }
- let!(:project2) { create(:project, forked_from_project: project3) }
+ let!(:project2) { fork_project(project3) }
let!(:project3) { create(:project) }
let!(:merge_request) { create(:merge_request, source_project: project2, target_project: project3, author: subject) }
let!(:push_event) { create(:push_event, project: project1, author: subject) }
@@ -1331,6 +1456,23 @@ describe User do
end
end
+ describe '#fork_of' do
+ let(:user) { create(:user) }
+
+ it "returns a user's fork of a project" do
+ project = create(:project, :public)
+ user_fork = fork_project(project, user, namespace: user.namespace)
+
+ expect(user.fork_of(project)).to eq(user_fork)
+ end
+
+ it 'returns nil if the project does not have a fork network' do
+ project = create(:project)
+
+ expect(user.fork_of(project)).to be_nil
+ end
+ end
+
describe '#can_be_removed?' do
subject { create(:user) }
@@ -1349,56 +1491,24 @@ describe User do
end
describe "#recent_push" do
- subject { create(:user) }
- let!(:project1) { create(:project, :repository) }
- let!(:project2) { create(:project, :repository, forked_from_project: project1) }
-
- let!(:push_event) do
- event = create(:push_event, project: project2, author: subject)
-
- create(:push_event_payload,
- event: event,
- commit_to: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
- commit_count: 0,
- ref: 'master')
-
- event
- end
-
- before do
- project1.team << [subject, :master]
- project2.team << [subject, :master]
- end
-
- it "includes push event" do
- expect(subject.recent_push).to eq(push_event)
- end
+ let(:user) { build(:user) }
+ let(:project) { build(:project) }
+ let(:event) { build(:push_event) }
- it "excludes push event if branch has been deleted" do
- allow_any_instance_of(Repository).to receive(:branch_exists?).with('master').and_return(false)
+ it 'returns the last push event for the user' do
+ expect_any_instance_of(Users::LastPushEventService)
+ .to receive(:last_event_for_user)
+ .and_return(event)
- expect(subject.recent_push).to eq(nil)
+ expect(user.recent_push).to eq(event)
end
- it "excludes push event if MR is opened for it" do
- create(:merge_request, source_project: project2, target_project: project1, source_branch: project2.default_branch, target_branch: 'fix', author: subject)
+ it 'returns the last push event for a project when one is given' do
+ expect_any_instance_of(Users::LastPushEventService)
+ .to receive(:last_event_for_project)
+ .and_return(event)
- expect(subject.recent_push).to eq(nil)
- end
-
- it "includes push events on any of the provided projects" do
- expect(subject.recent_push(project1)).to eq(nil)
- expect(subject.recent_push(project2)).to eq(push_event)
-
- push_event1 = create(:push_event, project: project1, author: subject)
-
- create(:push_event_payload,
- event: push_event1,
- commit_to: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
- commit_count: 0,
- ref: 'master')
-
- expect(subject.recent_push([project1, project2])).to eq(push_event1) # Newest
+ expect(user.recent_push(project)).to eq(event)
end
end
@@ -1415,7 +1525,7 @@ describe User do
it { is_expected.to eq([private_group]) }
end
- describe '#authorized_projects', truncate: true do
+ describe '#authorized_projects', :truncate do
context 'with a minimum access level' do
it 'includes projects for which the user is an owner' do
user = create(:user)
@@ -1469,9 +1579,11 @@ describe User do
user = create(:user)
member = group.add_developer(user)
+
expect(user.authorized_projects).to include(project)
member.destroy
+
expect(user.authorized_projects).not_to include(project)
end
@@ -1492,9 +1604,11 @@ describe User do
project = create(:project, :private, namespace: user1.namespace)
project.team << [user2, Gitlab::Access::DEVELOPER]
+
expect(user2.authorized_projects).to include(project)
project.destroy
+
expect(user2.authorized_projects).not_to include(project)
end
@@ -1504,9 +1618,11 @@ describe User do
user = create(:user)
group.add_developer(user)
+
expect(user.authorized_projects).to include(project)
group.destroy
+
expect(user.authorized_projects).not_to include(project)
end
end
@@ -1761,7 +1877,7 @@ describe User do
end
end
- describe '#refresh_authorized_projects', clean_gitlab_redis_shared_state: true do
+ describe '#refresh_authorized_projects', :clean_gitlab_redis_shared_state do
let(:project1) { create(:project) }
let(:project2) { create(:project) }
let(:user) { create(:user) }
@@ -2050,7 +2166,9 @@ describe User do
it 'creates the namespace' do
expect(user.namespace).to be_nil
+
user.save!
+
expect(user.namespace).not_to be_nil
end
end
@@ -2071,11 +2189,13 @@ describe User do
it 'updates the namespace name' do
user.update_attributes!(username: new_username)
+
expect(user.namespace.name).to eq(new_username)
end
it 'updates the namespace path' do
user.update_attributes!(username: new_username)
+
expect(user.namespace.path).to eq(new_username)
end
@@ -2089,6 +2209,7 @@ describe User do
it 'adds the namespace errors to the user' do
user.update_attributes(username: new_username)
+
expect(user.errors.full_messages.first).to eq('Namespace name has already been taken')
end
end
@@ -2105,56 +2226,49 @@ describe User do
end
end
- describe '#verified_email?' do
- it 'returns true when the email is the primary email' do
- user = build :user, email: 'email@example.com'
-
- expect(user.verified_email?('email@example.com')).to be true
- end
-
- it 'returns false when the email is not the primary email' do
- user = build :user, email: 'email@example.com'
-
- expect(user.verified_email?('other_email@example.com')).to be false
- end
- end
-
describe '#sync_attribute?' do
let(:user) { described_class.new }
context 'oauth user' do
it 'returns true if name can be synced' do
stub_omniauth_setting(sync_profile_attributes: %w(name location))
+
expect(user.sync_attribute?(:name)).to be_truthy
end
it 'returns true if email can be synced' do
stub_omniauth_setting(sync_profile_attributes: %w(name email))
+
expect(user.sync_attribute?(:email)).to be_truthy
end
it 'returns true if location can be synced' do
stub_omniauth_setting(sync_profile_attributes: %w(location email))
+
expect(user.sync_attribute?(:email)).to be_truthy
end
it 'returns false if name can not be synced' do
stub_omniauth_setting(sync_profile_attributes: %w(location email))
+
expect(user.sync_attribute?(:name)).to be_falsey
end
it 'returns false if email can not be synced' do
stub_omniauth_setting(sync_profile_attributes: %w(location email))
+
expect(user.sync_attribute?(:name)).to be_falsey
end
it 'returns false if location can not be synced' do
stub_omniauth_setting(sync_profile_attributes: %w(location email))
+
expect(user.sync_attribute?(:name)).to be_falsey
end
it 'returns true for all syncable attributes if all syncable attributes can be synced' do
stub_omniauth_setting(sync_profile_attributes: true)
+
expect(user.sync_attribute?(:name)).to be_truthy
expect(user.sync_attribute?(:email)).to be_truthy
expect(user.sync_attribute?(:location)).to be_truthy
@@ -2170,6 +2284,7 @@ describe User do
context 'ldap user' do
it 'returns true for email if ldap user' do
allow(user).to receive(:ldap_user?).and_return(true)
+
expect(user.sync_attribute?(:name)).to be_falsey
expect(user.sync_attribute?(:email)).to be_truthy
expect(user.sync_attribute?(:location)).to be_falsey
@@ -2178,10 +2293,56 @@ describe User do
it 'returns true for email and location if ldap user and location declared as syncable' do
allow(user).to receive(:ldap_user?).and_return(true)
stub_omniauth_setting(sync_profile_attributes: %w(location))
+
expect(user.sync_attribute?(:name)).to be_falsey
expect(user.sync_attribute?(:email)).to be_truthy
expect(user.sync_attribute?(:location)).to be_truthy
end
end
end
+
+ describe '#confirm_deletion_with_password?' do
+ where(
+ password_automatically_set: [true, false],
+ ldap_user: [true, false],
+ password_authentication_disabled: [true, false]
+ )
+
+ with_them do
+ let!(:user) { create(:user, password_automatically_set: password_automatically_set) }
+ let!(:identity) { create(:identity, user: user) if ldap_user }
+
+ # Only confirm deletion with password if all inputs are false
+ let(:expected) { !(password_automatically_set || ldap_user || password_authentication_disabled) }
+
+ before do
+ stub_application_setting(password_authentication_enabled: !password_authentication_disabled)
+ end
+
+ it 'returns false unless all inputs are true' do
+ expect(user.confirm_deletion_with_password?).to eq(expected)
+ end
+ end
+ end
+
+ describe '#delete_async' do
+ let(:user) { create(:user) }
+ let(:deleted_by) { create(:user) }
+
+ it 'blocks the user then schedules them for deletion if a hard delete is specified' do
+ expect(DeleteUserWorker).to receive(:perform_async).with(deleted_by.id, user.id, hard_delete: true)
+
+ user.delete_async(deleted_by: deleted_by, params: { hard_delete: true })
+
+ expect(user).to be_blocked
+ end
+
+ it 'schedules user for deletion without blocking them' do
+ expect(DeleteUserWorker).to receive(:perform_async).with(deleted_by.id, user.id, {})
+
+ user.delete_async(deleted_by: deleted_by)
+
+ expect(user).not_to be_blocked
+ end
+ end
end
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index 9ef8d117123..1f14d06997e 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -80,7 +80,7 @@ describe WikiPage do
context "when initialized with an existing gollum page" do
before do
create_page("test page", "test content")
- @page = wiki.wiki.paged("test page")
+ @page = wiki.wiki.page(title: "test page")
@wiki_page = described_class.new(wiki, @page, true)
end
@@ -105,7 +105,7 @@ describe WikiPage do
end
it "sets the version attribute" do
- expect(@wiki_page.version).to be_a Gollum::Git::Commit
+ expect(@wiki_page.version).to be_a Gitlab::Git::WikiPageVersion
end
end
end
@@ -321,14 +321,14 @@ describe WikiPage do
end
it 'returns true when requesting an old version' do
- old_version = @page.versions.last.to_s
+ old_version = @page.versions.last.id
old_page = wiki.find_page('Update', old_version)
expect(old_page.historical?).to eq true
end
it 'returns false when requesting latest version' do
- latest_version = @page.versions.first.to_s
+ latest_version = @page.versions.first.id
latest_page = wiki.find_page('Update', latest_version)
expect(latest_page.historical?).to eq false
@@ -393,7 +393,7 @@ describe WikiPage do
end
def commit_details
- { name: user.name, email: user.email, message: "test commit" }
+ Gitlab::Git::Wiki::CommitDetails.new(user.name, user.email, "test commit")
end
def create_page(name, content)
@@ -401,8 +401,8 @@ describe WikiPage do
end
def destroy_page(title)
- page = wiki.wiki.paged(title)
- wiki.wiki.delete_page(page, commit_details)
+ page = wiki.wiki.page(title: title)
+ wiki.delete_page(page, commit_details)
end
def get_slugs(page_or_dir)
diff --git a/spec/policies/gcp/cluster_policy_spec.rb b/spec/policies/gcp/cluster_policy_spec.rb
new file mode 100644
index 00000000000..e213aa3d557
--- /dev/null
+++ b/spec/policies/gcp/cluster_policy_spec.rb
@@ -0,0 +1,28 @@
+require 'spec_helper'
+
+describe Gcp::ClusterPolicy, :models do
+ set(:project) { create(:project) }
+ set(:cluster) { create(:gcp_cluster, project: project) }
+ let(:user) { create(:user) }
+ let(:policy) { described_class.new(user, cluster) }
+
+ describe 'rules' do
+ context 'when developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ it { expect(policy).to be_disallowed :update_cluster }
+ it { expect(policy).to be_disallowed :admin_cluster }
+ end
+
+ context 'when master' do
+ before do
+ project.add_master(user)
+ end
+
+ it { expect(policy).to be_allowed :update_cluster }
+ it { expect(policy).to be_allowed :admin_cluster }
+ end
+ end
+end
diff --git a/spec/policies/global_policy_spec.rb b/spec/policies/global_policy_spec.rb
index a6bf70c1e09..5b8cf2e6ab5 100644
--- a/spec/policies/global_policy_spec.rb
+++ b/spec/policies/global_policy_spec.rb
@@ -51,4 +51,41 @@ describe GlobalPolicy do
end
end
end
+
+ describe "create fork" do
+ context "when user has not exceeded project limit" do
+ it { is_expected.to be_allowed(:create_fork) }
+ end
+
+ context "when user has exceeded project limit" do
+ let(:current_user) { create(:user, projects_limit: 0) }
+
+ it { is_expected.not_to be_allowed(:create_fork) }
+ end
+
+ context "when user is a master in a group" do
+ let(:group) { create(:group) }
+ let(:current_user) { create(:user, projects_limit: 0) }
+
+ before do
+ group.add_master(current_user)
+ end
+
+ it { is_expected.to be_allowed(:create_fork) }
+ end
+ end
+
+ describe 'custom attributes' do
+ context 'regular user' do
+ it { is_expected.not_to be_allowed(:read_custom_attribute) }
+ it { is_expected.not_to be_allowed(:update_custom_attribute) }
+ end
+
+ context 'admin' do
+ let(:current_user) { create(:user, :admin) }
+
+ it { is_expected.to be_allowed(:read_custom_attribute) }
+ it { is_expected.to be_allowed(:update_custom_attribute) }
+ end
+ end
end
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index b186a78e44a..17dc3bb4f48 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -11,10 +11,11 @@ describe GroupPolicy do
let(:reporter_permissions) { [:admin_label] }
+ let(:developer_permissions) { [:admin_milestones] }
+
let(:master_permissions) do
[
- :create_projects,
- :admin_milestones
+ :create_projects
]
end
@@ -52,6 +53,7 @@ describe GroupPolicy do
it do
expect_allowed(:read_group)
expect_disallowed(*reporter_permissions)
+ expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
expect_disallowed(*owner_permissions)
end
@@ -63,6 +65,7 @@ describe GroupPolicy do
it do
expect_allowed(:read_group)
expect_disallowed(*reporter_permissions)
+ expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
expect_disallowed(*owner_permissions)
end
@@ -74,6 +77,7 @@ describe GroupPolicy do
it do
expect_allowed(:read_group)
expect_allowed(*reporter_permissions)
+ expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
expect_disallowed(*owner_permissions)
end
@@ -85,6 +89,7 @@ describe GroupPolicy do
it do
expect_allowed(:read_group)
expect_allowed(*reporter_permissions)
+ expect_allowed(*developer_permissions)
expect_disallowed(*master_permissions)
expect_disallowed(*owner_permissions)
end
@@ -96,6 +101,7 @@ describe GroupPolicy do
it do
expect_allowed(:read_group)
expect_allowed(*reporter_permissions)
+ expect_allowed(*developer_permissions)
expect_allowed(*master_permissions)
expect_disallowed(*owner_permissions)
end
@@ -109,6 +115,7 @@ describe GroupPolicy do
expect_allowed(:read_group)
expect_allowed(*reporter_permissions)
+ expect_allowed(*developer_permissions)
expect_allowed(*master_permissions)
expect_allowed(*owner_permissions)
end
@@ -122,6 +129,7 @@ describe GroupPolicy do
expect_allowed(:read_group)
expect_allowed(*reporter_permissions)
+ expect_allowed(*developer_permissions)
expect_allowed(*master_permissions)
expect_allowed(*owner_permissions)
end
@@ -180,6 +188,7 @@ describe GroupPolicy do
it do
expect_disallowed(:read_group)
expect_disallowed(*reporter_permissions)
+ expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
expect_disallowed(*owner_permissions)
end
@@ -191,6 +200,7 @@ describe GroupPolicy do
it do
expect_allowed(:read_group)
expect_disallowed(*reporter_permissions)
+ expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
expect_disallowed(*owner_permissions)
end
@@ -202,6 +212,7 @@ describe GroupPolicy do
it do
expect_allowed(:read_group)
expect_allowed(*reporter_permissions)
+ expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
expect_disallowed(*owner_permissions)
end
@@ -213,6 +224,7 @@ describe GroupPolicy do
it do
expect_allowed(:read_group)
expect_allowed(*reporter_permissions)
+ expect_allowed(*developer_permissions)
expect_disallowed(*master_permissions)
expect_disallowed(*owner_permissions)
end
@@ -224,6 +236,7 @@ describe GroupPolicy do
it do
expect_allowed(:read_group)
expect_allowed(*reporter_permissions)
+ expect_allowed(*developer_permissions)
expect_allowed(*master_permissions)
expect_disallowed(*owner_permissions)
end
@@ -237,6 +250,7 @@ describe GroupPolicy do
expect_allowed(:read_group)
expect_allowed(*reporter_permissions)
+ expect_allowed(*developer_permissions)
expect_allowed(*master_permissions)
expect_allowed(*owner_permissions)
end
diff --git a/spec/policies/issuable_policy_spec.rb b/spec/policies/issuable_policy_spec.rb
new file mode 100644
index 00000000000..2cf669e8191
--- /dev/null
+++ b/spec/policies/issuable_policy_spec.rb
@@ -0,0 +1,28 @@
+require 'spec_helper'
+
+describe IssuablePolicy, models: true do
+ describe '#rules' do
+ context 'when discussion is locked for the issuable' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public) }
+ let(:issue) { create(:issue, project: project, discussion_locked: true) }
+ let(:policies) { described_class.new(user, issue) }
+
+ context 'when the user is not a project member' do
+ it 'can not create a note' do
+ expect(policies).to be_disallowed(:create_note)
+ end
+ end
+
+ context 'when the user is a project member' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'can create a note' do
+ expect(policies).to be_allowed(:create_note)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/policies/namespace_policy_spec.rb b/spec/policies/namespace_policy_spec.rb
new file mode 100644
index 00000000000..e52ff02e5f0
--- /dev/null
+++ b/spec/policies/namespace_policy_spec.rb
@@ -0,0 +1,20 @@
+require 'spec_helper'
+
+describe NamespacePolicy do
+ let(:current_user) { create(:user) }
+ let(:namespace) { current_user.namespace }
+
+ subject { described_class.new(current_user, namespace) }
+
+ context "create projects" do
+ context "user namespace" do
+ it { is_expected.to be_allowed(:create_projects) }
+ end
+
+ context "user who has exceeded project limit" do
+ let(:current_user) { create(:user, projects_limit: 0) }
+
+ it { is_expected.not_to be_allowed(:create_projects) }
+ end
+ end
+end
diff --git a/spec/policies/note_policy_spec.rb b/spec/policies/note_policy_spec.rb
new file mode 100644
index 00000000000..58d36a2c84e
--- /dev/null
+++ b/spec/policies/note_policy_spec.rb
@@ -0,0 +1,71 @@
+require 'spec_helper'
+
+describe NotePolicy, mdoels: true do
+ describe '#rules' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public) }
+ let(:issue) { create(:issue, project: project) }
+
+ def policies(noteable = nil)
+ return @policies if @policies
+
+ noteable ||= issue
+ note = create(:note, noteable: noteable, author: user, project: project)
+
+ @policies = described_class.new(user, note)
+ end
+
+ context 'when the project is public' do
+ context 'when the note author is not a project member' do
+ it 'can edit a note' do
+ expect(policies).to be_allowed(:update_note)
+ expect(policies).to be_allowed(:admin_note)
+ expect(policies).to be_allowed(:resolve_note)
+ expect(policies).to be_allowed(:read_note)
+ end
+ end
+
+ context 'when the noteable is a snippet' do
+ it 'can edit note' do
+ policies = policies(create(:project_snippet, project: project))
+
+ expect(policies).to be_allowed(:update_note)
+ expect(policies).to be_allowed(:admin_note)
+ expect(policies).to be_allowed(:resolve_note)
+ expect(policies).to be_allowed(:read_note)
+ end
+ end
+
+ context 'when a discussion is locked' do
+ before do
+ issue.update_attribute(:discussion_locked, true)
+ end
+
+ context 'when the note author is a project member' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'can edit a note' do
+ expect(policies).to be_allowed(:update_note)
+ expect(policies).to be_allowed(:admin_note)
+ expect(policies).to be_allowed(:resolve_note)
+ expect(policies).to be_allowed(:read_note)
+ end
+ end
+
+ context 'when the note author is not a project member' do
+ it 'can not edit a note' do
+ expect(policies).to be_disallowed(:update_note)
+ expect(policies).to be_disallowed(:admin_note)
+ expect(policies).to be_disallowed(:resolve_note)
+ end
+
+ it 'can read a note' do
+ expect(policies).to be_allowed(:read_note)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index 4dbaf7fb025..f2593a1a75c 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -1,15 +1,15 @@
require 'spec_helper'
describe ProjectPolicy do
- let(:guest) { create(:user) }
- let(:reporter) { create(:user) }
- let(:dev) { create(:user) }
- let(:master) { create(:user) }
- let(:owner) { create(:user) }
- let(:admin) { create(:admin) }
+ set(:guest) { create(:user) }
+ set(:reporter) { create(:user) }
+ set(:developer) { create(:user) }
+ set(:master) { create(:user) }
+ set(:owner) { create(:user) }
+ set(:admin) { create(:admin) }
let(:project) { create(:project, :public, namespace: owner.namespace) }
- let(:guest_permissions) do
+ let(:base_guest_permissions) do
%i[
read_project read_board read_list read_wiki read_issue read_label
read_milestone read_project_snippet read_project_member
@@ -18,7 +18,7 @@ describe ProjectPolicy do
]
end
- let(:reporter_permissions) do
+ let(:base_reporter_permissions) do
%i[
download_code fork_project create_project_snippet update_issue
admin_issue admin_label admin_list read_commit_status read_build
@@ -33,7 +33,7 @@ describe ProjectPolicy do
let(:developer_permissions) do
%i[
- admin_merge_request update_merge_request create_commit_status
+ admin_milestone admin_merge_request update_merge_request create_commit_status
update_commit_status create_build update_build create_pipeline
update_pipeline create_merge_request create_wiki push_code
resolve_note create_container_image update_container_image
@@ -41,10 +41,10 @@ describe ProjectPolicy do
]
end
- let(:master_permissions) do
+ let(:base_master_permissions) do
%i[
delete_protected_branch update_project_snippet update_environment
- update_deployment admin_milestone admin_project_snippet
+ update_deployment admin_project_snippet
admin_project_member admin_note admin_wiki admin_project
admin_commit_status admin_build admin_container_image
admin_pipeline admin_environment admin_deployment
@@ -66,11 +66,20 @@ describe ProjectPolicy do
]
end
+ # Used in EE specs
+ let(:additional_guest_permissions) { [] }
+ let(:additional_reporter_permissions) { [] }
+ let(:additional_master_permissions) { [] }
+
+ let(:guest_permissions) { base_guest_permissions + additional_guest_permissions }
+ let(:reporter_permissions) { base_reporter_permissions + additional_reporter_permissions }
+ let(:master_permissions) { base_master_permissions + additional_master_permissions }
+
before do
- project.team << [guest, :guest]
- project.team << [master, :master]
- project.team << [dev, :developer]
- project.team << [reporter, :reporter]
+ project.add_guest(guest)
+ project.add_master(master)
+ project.add_developer(developer)
+ project.add_reporter(reporter)
end
def expect_allowed(*permissions)
@@ -127,38 +136,41 @@ describe ProjectPolicy do
end
end
- context 'when a project has pending invites, and the current user is anonymous' do
- let(:group) { create(:group, :public) }
- let(:project) { create(:project, :public, namespace: group) }
- let(:user_permissions) { [:create_project, :create_issue, :create_note, :upload_file] }
- let(:anonymous_permissions) { guest_permissions - user_permissions }
+ shared_examples 'project policies as anonymous' do
+ context 'abilities for public projects' do
+ context 'when a project has pending invites' do
+ let(:group) { create(:group, :public) }
+ let(:project) { create(:project, :public, namespace: group) }
+ let(:user_permissions) { [:create_project, :create_issue, :create_note, :upload_file] }
+ let(:anonymous_permissions) { guest_permissions - user_permissions }
- subject { described_class.new(nil, project) }
+ subject { described_class.new(nil, project) }
- before do
- create(:group_member, :invited, group: group)
- end
+ before do
+ create(:group_member, :invited, group: group)
+ end
- it 'does not grant owner access' do
- expect_allowed(*anonymous_permissions)
- expect_disallowed(*user_permissions)
+ it 'does not grant owner access' do
+ expect_allowed(*anonymous_permissions)
+ expect_disallowed(*user_permissions)
+ end
+ end
end
- end
- context 'abilities for non-public projects' do
- let(:project) { create(:project, namespace: owner.namespace) }
+ context 'abilities for non-public projects' do
+ let(:project) { create(:project, namespace: owner.namespace) }
- subject { described_class.new(current_user, project) }
-
- context 'with no user' do
- let(:current_user) { nil }
+ subject { described_class.new(nil, project) }
it { is_expected.to be_banned }
end
+ end
- context 'guests' do
- let(:current_user) { guest }
+ shared_examples 'project policies as guest' do
+ subject { described_class.new(guest, project) }
+ context 'abilities for non-public projects' do
+ let(:project) { create(:project, namespace: owner.namespace) }
let(:reporter_public_build_permissions) do
reporter_permissions - [:read_build, :read_pipeline]
end
@@ -179,7 +191,7 @@ describe ProjectPolicy do
end
end
- context 'public builds disabled' do
+ context 'when public builds disabled' do
before do
project.update(public_builds: false)
end
@@ -192,8 +204,7 @@ describe ProjectPolicy do
context 'when builds are disabled' do
before do
- project.project_feature.update(
- builds_access_level: ProjectFeature::DISABLED)
+ project.project_feature.update(builds_access_level: ProjectFeature::DISABLED)
end
it do
@@ -202,9 +213,13 @@ describe ProjectPolicy do
end
end
end
+ end
+
+ shared_examples 'project policies as reporter' do
+ context 'abilities for non-public projects' do
+ let(:project) { create(:project, namespace: owner.namespace) }
- context 'reporter' do
- let(:current_user) { reporter }
+ subject { described_class.new(reporter, project) }
it do
expect_allowed(*guest_permissions)
@@ -216,9 +231,13 @@ describe ProjectPolicy do
expect_disallowed(*owner_permissions)
end
end
+ end
- context 'developer' do
- let(:current_user) { dev }
+ shared_examples 'project policies as developer' do
+ context 'abilities for non-public projects' do
+ let(:project) { create(:project, namespace: owner.namespace) }
+
+ subject { described_class.new(developer, project) }
it do
expect_allowed(*guest_permissions)
@@ -229,9 +248,13 @@ describe ProjectPolicy do
expect_disallowed(*owner_permissions)
end
end
+ end
+
+ shared_examples 'project policies as master' do
+ context 'abilities for non-public projects' do
+ let(:project) { create(:project, namespace: owner.namespace) }
- context 'master' do
- let(:current_user) { master }
+ subject { described_class.new(master, project) }
it do
expect_allowed(*guest_permissions)
@@ -242,9 +265,13 @@ describe ProjectPolicy do
expect_disallowed(*owner_permissions)
end
end
+ end
+
+ shared_examples 'project policies as owner' do
+ context 'abilities for non-public projects' do
+ let(:project) { create(:project, namespace: owner.namespace) }
- context 'owner' do
- let(:current_user) { owner }
+ subject { described_class.new(owner, project) }
it do
expect_allowed(*guest_permissions)
@@ -255,9 +282,13 @@ describe ProjectPolicy do
expect_allowed(*owner_permissions)
end
end
+ end
- context 'admin' do
- let(:current_user) { admin }
+ shared_examples 'project policies as admin' do
+ context 'abilities for non-public projects' do
+ let(:project) { create(:project, namespace: owner.namespace) }
+
+ subject { described_class.new(admin, project) }
it do
expect_allowed(*guest_permissions)
@@ -269,4 +300,12 @@ describe ProjectPolicy do
end
end
end
+
+ it_behaves_like 'project policies as anonymous'
+ it_behaves_like 'project policies as guest'
+ it_behaves_like 'project policies as reporter'
+ it_behaves_like 'project policies as developer'
+ it_behaves_like 'project policies as master'
+ it_behaves_like 'project policies as owner'
+ it_behaves_like 'project policies as admin'
end
diff --git a/spec/presenters/ci/pipeline_presenter_spec.rb b/spec/presenters/ci/pipeline_presenter_spec.rb
index e4886a8f019..f7ceaf844be 100644
--- a/spec/presenters/ci/pipeline_presenter_spec.rb
+++ b/spec/presenters/ci/pipeline_presenter_spec.rb
@@ -51,4 +51,21 @@ describe Ci::PipelinePresenter do
end
end
end
+
+ context '#failure_reason' do
+ context 'when pipeline has failure reason' do
+ it 'represents a failure reason sentence' do
+ pipeline.failure_reason = :config_error
+
+ expect(presenter.failure_reason)
+ .to eq 'CI/CD YAML configuration error!'
+ end
+ end
+
+ context 'when pipeline does not have failure reason' do
+ it 'returns nil' do
+ expect(presenter.failure_reason).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/presenters/gcp/cluster_presenter_spec.rb b/spec/presenters/gcp/cluster_presenter_spec.rb
new file mode 100644
index 00000000000..8d86dc31582
--- /dev/null
+++ b/spec/presenters/gcp/cluster_presenter_spec.rb
@@ -0,0 +1,35 @@
+require 'spec_helper'
+
+describe Gcp::ClusterPresenter do
+ let(:project) { create(:project) }
+ let(:cluster) { create(:gcp_cluster, project: project) }
+
+ subject(:presenter) do
+ described_class.new(cluster)
+ end
+
+ it 'inherits from Gitlab::View::Presenter::Delegated' do
+ expect(described_class.superclass).to eq(Gitlab::View::Presenter::Delegated)
+ end
+
+ describe '#initialize' do
+ it 'takes a cluster and optional params' do
+ expect { presenter }.not_to raise_error
+ end
+
+ it 'exposes cluster' do
+ expect(presenter.cluster).to eq(cluster)
+ end
+
+ it 'forwards missing methods to cluster' do
+ expect(presenter.gcp_cluster_zone).to eq(cluster.gcp_cluster_zone)
+ end
+ end
+
+ describe '#gke_cluster_url' do
+ subject { described_class.new(cluster).gke_cluster_url }
+
+ it { is_expected.to include(cluster.gcp_cluster_zone) }
+ it { is_expected.to include(cluster.gcp_cluster_name) }
+ end
+end
diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb
index 2187be0190d..5e114434a67 100644
--- a/spec/presenters/merge_request_presenter_spec.rb
+++ b/spec/presenters/merge_request_presenter_spec.rb
@@ -300,6 +300,10 @@ describe MergeRequestPresenter do
described_class.new(resource, current_user: user).remove_wip_path
end
+ before do
+ allow(resource).to receive(:work_in_progress?).and_return(true)
+ end
+
context 'when merge request enabled and has permission' do
it 'has remove_wip_path' do
allow(project).to receive(:merge_requests_enabled?) { true }
diff --git a/spec/requests/api/access_requests_spec.rb b/spec/requests/api/access_requests_spec.rb
index 6bd17697c33..50d0f72f6bc 100644
--- a/spec/requests/api/access_requests_spec.rb
+++ b/spec/requests/api/access_requests_spec.rb
@@ -1,12 +1,12 @@
require 'spec_helper'
describe API::AccessRequests do
- let(:master) { create(:user) }
- let(:developer) { create(:user) }
- let(:access_requester) { create(:user) }
- let(:stranger) { create(:user) }
+ set(:master) { create(:user) }
+ set(:developer) { create(:user) }
+ set(:access_requester) { create(:user) }
+ set(:stranger) { create(:user) }
- let(:project) do
+ set(:project) do
create(:project, :public, :access_requestable, creator_id: master.id, namespace: master.namespace) do |project|
project.team << [developer, :developer]
project.team << [master, :master]
@@ -14,7 +14,7 @@ describe API::AccessRequests do
end
end
- let(:group) do
+ set(:group) do
create(:group, :public, :access_requestable) do |group|
group.add_developer(developer)
group.add_owner(master)
diff --git a/spec/requests/api/award_emoji_spec.rb b/spec/requests/api/award_emoji_spec.rb
index 593068b8cd7..7a0765c1fae 100644
--- a/spec/requests/api/award_emoji_spec.rb
+++ b/spec/requests/api/award_emoji_spec.rb
@@ -1,13 +1,13 @@
require 'spec_helper'
describe API::AwardEmoji do
- let(:user) { create(:user) }
- let!(:project) { create(:project) }
- let(:issue) { create(:issue, project: project) }
- let!(:award_emoji) { create(:award_emoji, awardable: issue, user: user) }
- let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
- let!(:downvote) { create(:award_emoji, :downvote, awardable: merge_request, user: user) }
- let!(:note) { create(:note, project: project, noteable: issue) }
+ set(:user) { create(:user) }
+ set(:project) { create(:project) }
+ set(:issue) { create(:issue, project: project) }
+ set(:award_emoji) { create(:award_emoji, awardable: issue, user: user) }
+ let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let!(:downvote) { create(:award_emoji, :downvote, awardable: merge_request, user: user) }
+ set(:note) { create(:note, project: project, noteable: issue) }
before do
project.team << [user, :master]
diff --git a/spec/requests/api/boards_spec.rb b/spec/requests/api/boards_spec.rb
index f698d5dddb3..fcfa4ddfbfe 100644
--- a/spec/requests/api/boards_spec.rb
+++ b/spec/requests/api/boards_spec.rb
@@ -1,34 +1,34 @@
require 'spec_helper'
describe API::Boards do
- let(:user) { create(:user) }
- let(:user2) { create(:user) }
- let(:non_member) { create(:user) }
- let(:guest) { create(:user) }
- let(:admin) { create(:user, :admin) }
- let!(:project) { create(:project, :public, creator_id: user.id, namespace: user.namespace ) }
-
- let!(:dev_label) do
+ set(:user) { create(:user) }
+ set(:user2) { create(:user) }
+ set(:non_member) { create(:user) }
+ set(:guest) { create(:user) }
+ set(:admin) { create(:user, :admin) }
+ set(:project) { create(:project, :public, creator_id: user.id, namespace: user.namespace ) }
+
+ set(:dev_label) do
create(:label, title: 'Development', color: '#FFAABB', project: project)
end
- let!(:test_label) do
+ set(:test_label) do
create(:label, title: 'Testing', color: '#FFAACC', project: project)
end
- let!(:ux_label) do
+ set(:ux_label) do
create(:label, title: 'UX', color: '#FF0000', project: project)
end
- let!(:dev_list) do
+ set(:dev_list) do
create(:list, label: dev_label, position: 1)
end
- let!(:test_list) do
+ set(:test_list) do
create(:list, label: test_label, position: 2)
end
- let!(:board) do
+ set(:board) do
create(:board, project: project, lists: [dev_list, test_list])
end
@@ -187,8 +187,11 @@ describe API::Boards do
end
context "when the user is project owner" do
- let(:owner) { create(:user) }
- let(:project) { create(:project, namespace: owner.namespace) }
+ set(:owner) { create(:user) }
+
+ before do
+ project.update(namespace: owner.namespace)
+ end
it "deletes the list if an admin requests it" do
delete api("#{base_url}/#{dev_list.id}", owner)
diff --git a/spec/requests/api/branches_spec.rb b/spec/requests/api/branches_spec.rb
index cc794fad3a7..16b12446ed4 100644
--- a/spec/requests/api/branches_spec.rb
+++ b/spec/requests/api/branches_spec.rb
@@ -1,9 +1,9 @@
require 'spec_helper'
describe API::Branches do
- let(:user) { create(:user) }
- let(:guest) { create(:user).tap { |u| project.add_guest(u) } }
+ set(:user) { create(:user) }
let(:project) { create(:project, :repository, creator: user, path: 'my.project') }
+ let(:guest) { create(:user).tap { |u| project.add_guest(u) } }
let(:branch_name) { 'feature' }
let(:branch_sha) { '0b4bc9a49b562e85de7cc9e834518ea6828729b9' }
let(:branch_with_dot) { project.repository.find_branch('ends-with.json') }
@@ -40,7 +40,9 @@ describe API::Branches do
end
context 'when unauthenticated', 'and project is public' do
- let(:project) { create(:project, :public, :repository) }
+ before do
+ project.update(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ end
it_behaves_like 'repository branches'
end
@@ -118,7 +120,9 @@ describe API::Branches do
end
context 'when unauthenticated', 'and project is public' do
- let(:project) { create(:project, :public, :repository) }
+ before do
+ project.update(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ end
it_behaves_like 'repository branch'
end
diff --git a/spec/requests/api/broadcast_messages_spec.rb b/spec/requests/api/broadcast_messages_spec.rb
index b043a333d33..eacc575d97f 100644
--- a/spec/requests/api/broadcast_messages_spec.rb
+++ b/spec/requests/api/broadcast_messages_spec.rb
@@ -1,8 +1,9 @@
require 'spec_helper'
describe API::BroadcastMessages do
- let(:user) { create(:user) }
- let(:admin) { create(:admin) }
+ set(:user) { create(:user) }
+ set(:admin) { create(:admin) }
+ set(:message) { create(:broadcast_message) }
describe 'GET /broadcast_messages' do
it 'returns a 401 for anonymous users' do
@@ -31,8 +32,6 @@ describe API::BroadcastMessages do
end
describe 'GET /broadcast_messages/:id' do
- let!(:message) { create(:broadcast_message) }
-
it 'returns a 401 for anonymous users' do
get api("/broadcast_messages/#{message.id}")
@@ -103,8 +102,6 @@ describe API::BroadcastMessages do
end
describe 'PUT /broadcast_messages/:id' do
- let!(:message) { create(:broadcast_message) }
-
it 'returns a 401 for anonymous users' do
put api("/broadcast_messages/#{message.id}"),
attributes_for(:broadcast_message)
@@ -155,8 +152,6 @@ describe API::BroadcastMessages do
end
describe 'DELETE /broadcast_messages/:id' do
- let!(:message) { create(:broadcast_message) }
-
it 'returns a 401 for anonymous users' do
delete api("/broadcast_messages/#{message.id}"),
attributes_for(:broadcast_message)
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index f663719d28c..94462b4572d 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -491,6 +491,7 @@ describe API::Commits do
expect(json_response['stats']['deletions']).to eq(commit.stats.deletions)
expect(json_response['stats']['total']).to eq(commit.stats.total)
expect(json_response['status']).to be_nil
+ expect(json_response['last_pipeline']).to be_nil
end
context 'when ref does not exist' do
@@ -573,6 +574,10 @@ describe API::Commits do
expect(response).to have_http_status(200)
expect(response).to match_response_schema('public_api/v4/commit/detail')
expect(json_response['status']).to eq('created')
+ expect(json_response['last_pipeline']['id']).to eq(pipeline.id)
+ expect(json_response['last_pipeline']['ref']).to eq(pipeline.ref)
+ expect(json_response['last_pipeline']['sha']).to eq(pipeline.sha)
+ expect(json_response['last_pipeline']['status']).to eq(pipeline.status)
end
context 'when pipeline succeeds' do
diff --git a/spec/requests/api/environments_spec.rb b/spec/requests/api/environments_spec.rb
index 2361809e0e1..f8cd529a06c 100644
--- a/spec/requests/api/environments_spec.rb
+++ b/spec/requests/api/environments_spec.rb
@@ -20,6 +20,7 @@ describe API::Environments do
path path_with_namespace
star_count forks_count
created_at last_activity_at
+ avatar_url
)
get api("/projects/#{project.id}/environments", user)
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index 42f0079e173..1671a046fdf 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -159,11 +159,14 @@ describe API::Groups do
context 'when using owned in the request' do
it 'returns an array of groups the user owns' do
+ group1.add_master(user2)
+
get api('/groups', user2), owned: true
expect(response).to have_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
expect(json_response.first['name']).to eq(group2.name)
end
end
diff --git a/spec/requests/api/helpers_spec.rb b/spec/requests/api/helpers_spec.rb
index d4006fe71a2..862920ad7c3 100644
--- a/spec/requests/api/helpers_spec.rb
+++ b/spec/requests/api/helpers_spec.rb
@@ -1,4 +1,6 @@
require 'spec_helper'
+require 'raven/transports/dummy'
+require_relative '../../../config/initializers/sentry'
describe API::Helpers do
include API::APIGuard::HelperMethods
@@ -159,18 +161,25 @@ describe API::Helpers do
end
describe "when authenticating using a user's private token" do
- it "returns nil for an invalid token" do
+ it "returns a 401 response for an invalid token" do
env[API::APIGuard::PRIVATE_TOKEN_HEADER] = 'invalid token'
allow_any_instance_of(self.class).to receive(:doorkeeper_guard) { false }
- expect(current_user).to be_nil
+ expect { current_user }.to raise_error /401/
end
- it "returns nil for a user without access" do
+ it "returns a 401 response for a user without access" do
env[API::APIGuard::PRIVATE_TOKEN_HEADER] = user.private_token
allow_any_instance_of(Gitlab::UserAccess).to receive(:allowed?).and_return(false)
- expect(current_user).to be_nil
+ expect { current_user }.to raise_error /401/
+ end
+
+ it 'returns a 401 response for a user who is blocked' do
+ user.block!
+ env[API::APIGuard::PRIVATE_TOKEN_HEADER] = user.private_token
+
+ expect { current_user }.to raise_error /401/
end
it "leaves user as is when sudo not specified" do
@@ -193,24 +202,31 @@ describe API::Helpers do
allow_any_instance_of(self.class).to receive(:doorkeeper_guard) { false }
end
- it "returns nil for an invalid token" do
+ it "returns a 401 response for an invalid token" do
env[API::APIGuard::PRIVATE_TOKEN_HEADER] = 'invalid token'
- expect(current_user).to be_nil
+ expect { current_user }.to raise_error /401/
end
- it "returns nil for a user without access" do
+ it "returns a 401 response for a user without access" do
env[API::APIGuard::PRIVATE_TOKEN_HEADER] = personal_access_token.token
allow_any_instance_of(Gitlab::UserAccess).to receive(:allowed?).and_return(false)
- expect(current_user).to be_nil
+ expect { current_user }.to raise_error /401/
end
- it "returns nil for a token without the appropriate scope" do
+ it 'returns a 401 response for a user who is blocked' do
+ user.block!
+ env[API::APIGuard::PRIVATE_TOKEN_HEADER] = personal_access_token.token
+
+ expect { current_user }.to raise_error /401/
+ end
+
+ it "returns a 401 response for a token without the appropriate scope" do
personal_access_token = create(:personal_access_token, user: user, scopes: ['read_user'])
env[API::APIGuard::PRIVATE_TOKEN_HEADER] = personal_access_token.token
- expect(current_user).to be_nil
+ expect { current_user }.to raise_error /401/
end
it "leaves user as is when sudo not specified" do
@@ -226,14 +242,14 @@ describe API::Helpers do
personal_access_token.revoke!
env[API::APIGuard::PRIVATE_TOKEN_HEADER] = personal_access_token.token
- expect(current_user).to be_nil
+ expect { current_user }.to raise_error /401/
end
it 'does not allow expired tokens' do
personal_access_token.update_attributes!(expires_at: 1.day.ago)
env[API::APIGuard::PRIVATE_TOKEN_HEADER] = personal_access_token.token
- expect(current_user).to be_nil
+ expect { current_user }.to raise_error /401/
end
end
@@ -351,6 +367,18 @@ describe API::Helpers do
end
end
end
+
+ context 'when user is blocked' do
+ before do
+ user.block!
+ end
+
+ it 'changes current_user to sudo' do
+ set_env(admin, user.id)
+
+ expect(current_user).to eq(user)
+ end
+ end
end
context 'with regular user' do
@@ -450,10 +478,55 @@ describe API::Helpers do
allow(exception).to receive(:backtrace).and_return(caller)
expect_any_instance_of(self.class).to receive(:sentry_context)
- expect(Raven).to receive(:capture_exception).with(exception)
+ expect(Raven).to receive(:capture_exception).with(exception, extra: {})
handle_api_exception(exception)
end
+
+ context 'with a personal access token given' do
+ let(:token) { create(:personal_access_token, scopes: ['api'], user: user) }
+
+ # Regression test for https://gitlab.com/gitlab-org/gitlab-ce/issues/38571
+ it 'does not raise an additional exception because of missing `request`' do
+ # We need to stub at a lower level than #sentry_enabled? otherwise
+ # Sentry is not enabled when the request below is made, and the test
+ # would pass even without the fix
+ expect(Gitlab::Sentry).to receive(:enabled?).twice.and_return(true)
+ expect(ProjectsFinder).to receive(:new).and_raise('Runtime Error!')
+
+ get api('/projects', personal_access_token: token)
+
+ # The 500 status is expected as we're testing a case where an exception
+ # is raised, but Grape shouldn't raise an additional exception
+ expect(response).to have_gitlab_http_status(500)
+ expect(json_response['message']).not_to include("undefined local variable or method `request'")
+ expect(json_response['message']).to start_with("\nRuntimeError (Runtime Error!):")
+ end
+ end
+
+ context 'extra information' do
+ # Sentry events are an array of the form [auth_header, data, options]
+ let(:event_data) { Raven.client.transport.events.first[1] }
+
+ before do
+ stub_application_setting(
+ sentry_enabled: true,
+ sentry_dsn: "dummy://12345:67890@sentry.localdomain/sentry/42"
+ )
+ configure_sentry
+ Raven.client.configuration.encoding = 'json'
+ end
+
+ it 'sends the params, excluding confidential values' do
+ expect(Gitlab::Sentry).to receive(:enabled?).twice.and_return(true)
+ expect(ProjectsFinder).to receive(:new).and_raise('Runtime Error!')
+
+ get api('/projects', user), password: 'dont_send_this', other_param: 'send_this'
+
+ expect(event_data).to include('other_param=send_this')
+ expect(event_data).to include('password=********')
+ end
+ end
end
describe '.authenticate_non_get!' do
@@ -490,11 +563,10 @@ describe API::Helpers do
context 'current_user is nil' do
before do
expect_any_instance_of(self.class).to receive(:current_user).and_return(nil)
- allow_any_instance_of(self.class).to receive(:initial_current_user).and_return(nil)
end
it 'returns a 401 response' do
- expect { authenticate! }.to raise_error '401 - {"message"=>"401 Unauthorized"}'
+ expect { authenticate! }.to raise_error /401/
end
end
@@ -502,35 +574,12 @@ describe API::Helpers do
let(:user) { build(:user) }
before do
- expect_any_instance_of(self.class).to receive(:current_user).at_least(:once).and_return(user)
- expect_any_instance_of(self.class).to receive(:initial_current_user).and_return(user)
+ expect_any_instance_of(self.class).to receive(:current_user).and_return(user)
end
it 'does not raise an error' do
expect { authenticate! }.not_to raise_error
end
end
-
- context 'current_user is blocked' do
- let(:user) { build(:user, :blocked) }
-
- before do
- expect_any_instance_of(self.class).to receive(:current_user).at_least(:once).and_return(user)
- end
-
- it 'raises an error' do
- expect_any_instance_of(self.class).to receive(:initial_current_user).and_return(user)
-
- expect { authenticate! }.to raise_error '401 - {"message"=>"401 Unauthorized"}'
- end
-
- it "doesn't raise an error if an admin user is impersonating a blocked user (via sudo)" do
- admin_user = build(:user, :admin)
-
- expect_any_instance_of(self.class).to receive(:initial_current_user).and_return(admin_user)
-
- expect { authenticate! }.not_to raise_error
- end
- end
end
end
diff --git a/spec/requests/api/issues_spec.rb b/spec/requests/api/issues_spec.rb
index 1583d1c2435..972e57bc373 100644
--- a/spec/requests/api/issues_spec.rb
+++ b/spec/requests/api/issues_spec.rb
@@ -22,7 +22,8 @@ describe API::Issues, :mailer do
state: :closed,
milestone: milestone,
created_at: generate(:past_time),
- updated_at: 3.hours.ago
+ updated_at: 3.hours.ago,
+ closed_at: 1.hour.ago
end
let!(:confidential_issue) do
create :issue,
@@ -738,6 +739,7 @@ describe API::Issues, :mailer do
expect(json_response['title']).to eq(issue.title)
expect(json_response['description']).to eq(issue.description)
expect(json_response['state']).to eq(issue.state)
+ expect(json_response['closed_at']).to be_falsy
expect(json_response['created_at']).to be_present
expect(json_response['updated_at']).to be_present
expect(json_response['labels']).to eq(issue.label_names)
@@ -748,6 +750,13 @@ describe API::Issues, :mailer do
expect(json_response['confidential']).to be_falsy
end
+ it "exposes the 'closed_at' attribute" do
+ get api("/projects/#{project.id}/issues/#{closed_issue.iid}", user)
+
+ expect(response).to have_http_status(200)
+ expect(json_response['closed_at']).to be_present
+ end
+
context 'links exposure' do
it 'exposes related resources full URIs' do
get api("/projects/#{project.id}/issues/#{issue.iid}", user)
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 21d2c9644fb..5e66e1607ba 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -1,6 +1,8 @@
require "spec_helper"
describe API::MergeRequests do
+ include ProjectForksHelper
+
let(:base_time) { Time.now }
let(:user) { create(:user) }
let(:admin) { create(:user, :admin) }
@@ -28,10 +30,29 @@ describe API::MergeRequests do
describe 'GET /merge_requests' do
context 'when unauthenticated' do
- it 'returns authentication error' do
- get api('/merge_requests')
+ it 'returns an array of all merge requests' do
+ get api('/merge_requests', user), scope: 'all'
+
+ expect(response).to have_http_status(200)
+ expect(json_response).to be_an Array
+ end
+
+ it "returns authentication error without any scope" do
+ get api("/merge_requests")
+
+ expect(response).to have_http_status(401)
+ end
+
+ it "returns authentication error when scope is assigned-to-me" do
+ get api("/merge_requests"), scope: 'assigned-to-me'
+
+ expect(response).to have_http_status(401)
+ end
+
+ it "returns authentication error when scope is created-by-me" do
+ get api("/merge_requests"), scope: 'created-by-me'
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_http_status(401)
end
end
@@ -134,10 +155,18 @@ describe API::MergeRequests do
describe "GET /projects/:id/merge_requests" do
context "when unauthenticated" do
- it "returns authentication error" do
+ it 'returns merge requests for public projects' do
get api("/projects/#{project.id}/merge_requests")
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to have_http_status(200)
+ expect(json_response).to be_an Array
+ end
+
+ it "returns 404 for non public projects" do
+ project = create(:project, :private)
+ get api("/projects/#{project.id}/merge_requests")
+
+ expect(response).to have_http_status(404)
end
end
@@ -589,17 +618,17 @@ describe API::MergeRequests do
context 'forked projects' do
let!(:user2) { create(:user) }
- let!(:fork_project) { create(:project, forked_from_project: project, namespace: user2.namespace, creator_id: user2.id) }
+ let!(:forked_project) { fork_project(project, user2) }
let!(:unrelated_project) { create(:project, namespace: create(:user).namespace, creator_id: user2.id) }
before do
- fork_project.add_reporter(user2)
+ forked_project.add_reporter(user2)
allow_any_instance_of(MergeRequest).to receive(:write_ref)
end
it "returns merge_request" do
- post api("/projects/#{fork_project.id}/merge_requests", user2),
+ post api("/projects/#{forked_project.id}/merge_requests", user2),
title: 'Test merge_request', source_branch: "feature_conflict", target_branch: "master",
author: user2, target_project_id: project.id, description: 'Test description for Test merge_request'
expect(response).to have_gitlab_http_status(201)
@@ -608,10 +637,10 @@ describe API::MergeRequests do
end
it "does not return 422 when source_branch equals target_branch" do
- expect(project.id).not_to eq(fork_project.id)
- expect(fork_project.forked?).to be_truthy
- expect(fork_project.forked_from_project).to eq(project)
- post api("/projects/#{fork_project.id}/merge_requests", user2),
+ expect(project.id).not_to eq(forked_project.id)
+ expect(forked_project.forked?).to be_truthy
+ expect(forked_project.forked_from_project).to eq(project)
+ post api("/projects/#{forked_project.id}/merge_requests", user2),
title: 'Test merge_request', source_branch: "master", target_branch: "master", author: user2, target_project_id: project.id
expect(response).to have_gitlab_http_status(201)
expect(json_response['title']).to eq('Test merge_request')
@@ -620,7 +649,7 @@ describe API::MergeRequests do
it 'returns 422 when target project has disabled merge requests' do
project.project_feature.update(merge_requests_access_level: 0)
- post api("/projects/#{fork_project.id}/merge_requests", user2),
+ post api("/projects/#{forked_project.id}/merge_requests", user2),
title: 'Test',
target_branch: 'master',
source_branch: 'markdown',
@@ -631,36 +660,26 @@ describe API::MergeRequests do
end
it "returns 400 when source_branch is missing" do
- post api("/projects/#{fork_project.id}/merge_requests", user2),
+ post api("/projects/#{forked_project.id}/merge_requests", user2),
title: 'Test merge_request', target_branch: "master", author: user2, target_project_id: project.id
expect(response).to have_gitlab_http_status(400)
end
it "returns 400 when target_branch is missing" do
- post api("/projects/#{fork_project.id}/merge_requests", user2),
+ post api("/projects/#{forked_project.id}/merge_requests", user2),
title: 'Test merge_request', target_branch: "master", author: user2, target_project_id: project.id
expect(response).to have_gitlab_http_status(400)
end
it "returns 400 when title is missing" do
- post api("/projects/#{fork_project.id}/merge_requests", user2),
+ post api("/projects/#{forked_project.id}/merge_requests", user2),
target_branch: 'master', source_branch: 'markdown', author: user2, target_project_id: project.id
expect(response).to have_gitlab_http_status(400)
end
context 'when target_branch is specified' do
- it 'returns 422 if not a forked project' do
- post api("/projects/#{project.id}/merge_requests", user),
- title: 'Test merge_request',
- target_branch: 'master',
- source_branch: 'markdown',
- author: user,
- target_project_id: fork_project.id
- expect(response).to have_gitlab_http_status(422)
- end
-
it 'returns 422 if targeting a different fork' do
- post api("/projects/#{fork_project.id}/merge_requests", user2),
+ post api("/projects/#{forked_project.id}/merge_requests", user2),
title: 'Test merge_request',
target_branch: 'master',
source_branch: 'markdown',
@@ -671,8 +690,8 @@ describe API::MergeRequests do
end
it "returns 201 when target_branch is specified and for the same project" do
- post api("/projects/#{fork_project.id}/merge_requests", user2),
- title: 'Test merge_request', target_branch: 'master', source_branch: 'markdown', author: user2, target_project_id: fork_project.id
+ post api("/projects/#{forked_project.id}/merge_requests", user2),
+ title: 'Test merge_request', target_branch: 'master', source_branch: 'markdown', author: user2, target_project_id: forked_project.id
expect(response).to have_gitlab_http_status(201)
end
end
diff --git a/spec/requests/api/notes_spec.rb b/spec/requests/api/notes_spec.rb
index f5882c0c74a..fb440fa551c 100644
--- a/spec/requests/api/notes_spec.rb
+++ b/spec/requests/api/notes_spec.rb
@@ -302,6 +302,40 @@ describe API::Notes do
expect(private_issue.notes.reload).to be_empty
end
end
+
+ context 'when the merge request discussion is locked' do
+ before do
+ merge_request.update_attribute(:discussion_locked, true)
+ end
+
+ context 'when a user is a team member' do
+ subject { post api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/notes", user), body: 'Hi!' }
+
+ it 'returns 200 status' do
+ subject
+
+ expect(response).to have_http_status(201)
+ end
+
+ it 'creates a new note' do
+ expect { subject }.to change { Note.count }.by(1)
+ end
+ end
+
+ context 'when a user is not a team member' do
+ subject { post api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/notes", private_user), body: 'Hi!' }
+
+ it 'returns 403 status' do
+ subject
+
+ expect(response).to have_http_status(403)
+ end
+
+ it 'does not create a new note' do
+ expect { subject }.not_to change { Note.count }
+ end
+ end
+ end
end
describe "POST /projects/:id/noteable/:noteable_id/notes to test observer on create" do
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 9602584f546..5964244f8c5 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -54,9 +54,9 @@ describe API::Projects do
shared_examples_for 'projects response without N + 1 queries' do
it 'avoids N + 1 queries' do
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
get api('/projects', current_user)
- end.count
+ end
if defined?(additional_project)
additional_project
@@ -64,9 +64,12 @@ describe API::Projects do
create(:project, :public)
end
+ # TODO: We're currently querying to detect if a project is a fork
+ # in 2 ways. Lower this back to 8 when `ForkedProjectLink` relation is
+ # removed
expect do
get api('/projects', current_user)
- end.not_to exceed_query_limit(control_count + 8)
+ end.not_to exceed_query_limit(control).with_threshold(9)
end
end
@@ -193,6 +196,7 @@ describe API::Projects do
path path_with_namespace
star_count forks_count
created_at last_activity_at
+ avatar_url
)
get api('/projects?simple=true', user)
@@ -1181,6 +1185,59 @@ describe API::Projects do
end
end
end
+
+ describe 'GET /projects/:id/forks' do
+ let(:private_fork) { create(:project, :private, :empty_repo) }
+ let(:member) { create(:user) }
+ let(:non_member) { create(:user) }
+
+ before do
+ private_fork.add_developer(member)
+ end
+
+ context 'for a forked project' do
+ before do
+ post api("/projects/#{private_fork.id}/fork/#{project_fork_source.id}", admin)
+ private_fork.reload
+ expect(private_fork.forked_from_project).not_to be_nil
+ expect(private_fork.forked?).to be_truthy
+ project_fork_source.reload
+ expect(project_fork_source.forks.length).to eq(1)
+ expect(project_fork_source.forks).to include(private_fork)
+ end
+
+ context 'for a user that can access the forks' do
+ it 'returns the forks' do
+ get api("/projects/#{project_fork_source.id}/forks", member)
+
+ expect(response).to have_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response.length).to eq(1)
+ expect(json_response[0]['name']).to eq(private_fork.name)
+ end
+ end
+
+ context 'for a user that cannot access the forks' do
+ it 'returns an empty array' do
+ get api("/projects/#{project_fork_source.id}/forks", non_member)
+
+ expect(response).to have_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response.length).to eq(0)
+ end
+ end
+ end
+
+ context 'for a non-forked project' do
+ it 'returns an empty array' do
+ get api("/projects/#{project_fork_source.id}/forks")
+
+ expect(response).to have_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response.length).to eq(0)
+ end
+ end
+ end
end
describe "POST /projects/:id/share" do
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index 25924559952..8247900408b 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -361,6 +361,8 @@ describe API::Runner do
'policy' => 'pull-push' }]
end
+ let(:expected_features) { { 'trace_sections' => true } }
+
it 'picks a job' do
request_job info: { platform: :darwin }
@@ -380,6 +382,7 @@ describe API::Runner do
expect(json_response['artifacts']).to eq(expected_artifacts)
expect(json_response['cache']).to eq(expected_cache)
expect(json_response['variables']).to include(*expected_variables)
+ expect(json_response['features']).to eq(expected_features)
end
context 'when job is made for tag' do
diff --git a/spec/requests/api/services_spec.rb b/spec/requests/api/services_spec.rb
index 48d99841385..7e174903918 100644
--- a/spec/requests/api/services_spec.rb
+++ b/spec/requests/api/services_spec.rb
@@ -1,10 +1,13 @@
require "spec_helper"
describe API::Services do
- let(:user) { create(:user) }
- let(:admin) { create(:admin) }
- let(:user2) { create(:user) }
- let(:project) { create(:project, creator_id: user.id, namespace: user.namespace) }
+ set(:user) { create(:user) }
+ set(:admin) { create(:admin) }
+ set(:user2) { create(:user) }
+
+ set(:project) do
+ create(:project, creator_id: user.id, namespace: user.namespace)
+ end
Service.available_services_names.each do |service|
describe "PUT /projects/:id/services/#{service.dasherize}" do
@@ -98,8 +101,6 @@ describe API::Services do
end
describe 'POST /projects/:id/services/:slug/trigger' do
- let!(:project) { create(:project) }
-
describe 'Mattermost Service' do
let(:service_name) { 'mattermost_slash_commands' }
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 37cb95a16e3..69c8aa4482a 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -23,8 +23,7 @@ describe API::Users do
it "returns the user when a valid `username` parameter is passed" do
get api("/users"), username: user.username
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
+ expect(response).to match_response_schema('public_api/v4/user/basics')
expect(json_response.size).to eq(1)
expect(json_response[0]['id']).to eq(user.id)
expect(json_response[0]['username']).to eq(user.username)
@@ -68,7 +67,7 @@ describe API::Users do
it "renders 200" do
get api("/users", user)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/user/basics')
end
end
@@ -76,7 +75,7 @@ describe API::Users do
it "renders 200" do
get api("/users", admin)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/user/basics')
end
end
end
@@ -84,9 +83,8 @@ describe API::Users do
it "returns an array of users" do
get api("/users", user)
- expect(response).to have_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/user/basics')
expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
username = user.username
expect(json_response.detect do |user|
user['username'] == username
@@ -99,18 +97,16 @@ describe API::Users do
get api("/users?blocked=true", user)
- expect(response).to have_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/user/basics')
expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
expect(json_response).to all(include('state' => /(blocked|ldap_blocked)/))
end
it "returns one user" do
get api("/users?username=#{omniauth_user.username}", user)
- expect(response).to have_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/user/basics')
expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
expect(json_response.first['username']).to eq(omniauth_user.username)
end
@@ -123,25 +119,26 @@ describe API::Users do
it 'does not reveal the `is_admin` flag of the user' do
get api('/users', user)
+ expect(response).to match_response_schema('public_api/v4/user/basics')
expect(json_response.first.keys).not_to include 'is_admin'
end
end
context "when admin" do
+ context 'when sudo is defined' do
+ it 'does not return 500' do
+ admin_personal_access_token = create(:personal_access_token, user: admin).token
+ get api("/users?private_token=#{admin_personal_access_token}&sudo=#{user.id}", admin)
+
+ expect(response).to have_http_status(:success)
+ end
+ end
+
it "returns an array of users" do
get api("/users", admin)
- expect(response).to have_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/user/admins')
expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.first.keys).to include 'email'
- expect(json_response.first.keys).to include 'organization'
- expect(json_response.first.keys).to include 'identities'
- expect(json_response.first.keys).to include 'can_create_project'
- expect(json_response.first.keys).to include 'two_factor_enabled'
- expect(json_response.first.keys).to include 'last_sign_in_at'
- expect(json_response.first.keys).to include 'confirmed_at'
- expect(json_response.first.keys).to include 'is_admin'
end
it "returns an array of external users" do
@@ -149,17 +146,15 @@ describe API::Users do
get api("/users?external=true", admin)
- expect(response).to have_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/user/admins')
expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
expect(json_response).to all(include('external' => true))
end
it "returns one user by external UID" do
get api("/users?extern_uid=#{omniauth_user.identities.first.extern_uid}&provider=#{omniauth_user.identities.first.provider}", admin)
- expect(response).to have_http_status(200)
- expect(json_response).to be_an Array
+ expect(response).to match_response_schema('public_api/v4/user/admins')
expect(json_response.size).to eq(1)
expect(json_response.first['username']).to eq(omniauth_user.username)
end
@@ -181,7 +176,7 @@ describe API::Users do
get api("/users?created_before=2000-01-02T00:00:00.060Z", admin)
- expect(response).to have_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/user/admins')
expect(json_response.size).to eq(1)
expect(json_response.first['username']).to eq(user.username)
end
@@ -191,7 +186,7 @@ describe API::Users do
get api("/users?created_before=2000-01-02T00:00:00.060Z", admin)
- expect(response).to have_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/user/admins')
expect(json_response.size).to eq(0)
end
@@ -200,7 +195,7 @@ describe API::Users do
get api("/users?created_before=2001-01-02T00:00:00.060Z&created_after=1999-01-02T00:00:00.060", admin)
- expect(response).to have_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/user/admins')
expect(json_response.size).to eq(1)
expect(json_response.first['username']).to eq(user.username)
end
@@ -211,22 +206,22 @@ describe API::Users do
it "returns a user by id" do
get api("/users/#{user.id}", user)
- expect(response).to have_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/user/basic')
expect(json_response['username']).to eq(user.username)
end
it "does not return the user's `is_admin` flag" do
get api("/users/#{user.id}", user)
- expect(response).to have_http_status(200)
- expect(json_response['is_admin']).to be_nil
+ expect(response).to match_response_schema('public_api/v4/user/basic')
+ expect(json_response.keys).not_to include 'is_admin'
end
context 'when authenticated as admin' do
it 'includes the `is_admin` field' do
get api("/users/#{user.id}", admin)
- expect(response).to have_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/user/admin')
expect(json_response['is_admin']).to be(false)
end
end
@@ -235,7 +230,7 @@ describe API::Users do
it "returns a user by id" do
get api("/users/#{user.id}")
- expect(response).to have_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/user/basic')
expect(json_response['username']).to eq(user.username)
end
@@ -251,6 +246,7 @@ describe API::Users do
it "returns a 404 error if user id not found" do
get api("/users/9999", user)
+
expect(response).to have_http_status(404)
expect(json_response['message']).to eq('404 User Not Found')
end
@@ -1909,4 +1905,8 @@ describe API::Users do
expect(impersonation_token.reload.revoked).to be_truthy
end
end
+
+ include_examples 'custom attributes endpoints', 'users' do
+ let(:attributable) { user }
+ end
end
diff --git a/spec/requests/api/v3/award_emoji_spec.rb b/spec/requests/api/v3/award_emoji_spec.rb
index 681e8e04295..36d793f505d 100644
--- a/spec/requests/api/v3/award_emoji_spec.rb
+++ b/spec/requests/api/v3/award_emoji_spec.rb
@@ -1,13 +1,13 @@
require 'spec_helper'
describe API::V3::AwardEmoji do
- let(:user) { create(:user) }
- let!(:project) { create(:project) }
- let(:issue) { create(:issue, project: project) }
- let!(:award_emoji) { create(:award_emoji, awardable: issue, user: user) }
- let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
- let!(:downvote) { create(:award_emoji, :downvote, awardable: merge_request, user: user) }
- let!(:note) { create(:note, project: project, noteable: issue) }
+ set(:user) { create(:user) }
+ set(:project) { create(:project) }
+ set(:issue) { create(:issue, project: project) }
+ set(:award_emoji) { create(:award_emoji, awardable: issue, user: user) }
+ let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let!(:downvote) { create(:award_emoji, :downvote, awardable: merge_request, user: user) }
+ set(:note) { create(:note, project: project, noteable: issue) }
before { project.team << [user, :master] }
diff --git a/spec/requests/api/v3/boards_spec.rb b/spec/requests/api/v3/boards_spec.rb
index b86aab2ec70..ea2627142bf 100644
--- a/spec/requests/api/v3/boards_spec.rb
+++ b/spec/requests/api/v3/boards_spec.rb
@@ -1,28 +1,28 @@
require 'spec_helper'
describe API::V3::Boards do
- let(:user) { create(:user) }
- let(:guest) { create(:user) }
- let(:non_member) { create(:user) }
- let!(:project) { create(:project, :public, creator_id: user.id, namespace: user.namespace ) }
+ set(:user) { create(:user) }
+ set(:guest) { create(:user) }
+ set(:non_member) { create(:user) }
+ set(:project) { create(:project, :public, creator_id: user.id, namespace: user.namespace ) }
- let!(:dev_label) do
+ set(:dev_label) do
create(:label, title: 'Development', color: '#FFAABB', project: project)
end
- let!(:test_label) do
+ set(:test_label) do
create(:label, title: 'Testing', color: '#FFAACC', project: project)
end
- let!(:dev_list) do
+ set(:dev_list) do
create(:list, label: dev_label, position: 1)
end
- let!(:test_list) do
+ set(:test_list) do
create(:list, label: test_label, position: 2)
end
- let!(:board) do
+ set(:board) do
create(:board, project: project, lists: [dev_list, test_list])
end
@@ -98,8 +98,11 @@ describe API::V3::Boards do
end
context "when the user is project owner" do
- let(:owner) { create(:user) }
- let(:project) { create(:project, namespace: owner.namespace) }
+ set(:owner) { create(:user) }
+
+ before do
+ project.update(namespace: owner.namespace)
+ end
it "deletes the list if an admin requests it" do
delete v3_api("#{base_url}/#{dev_list.id}", owner)
diff --git a/spec/requests/api/v3/branches_spec.rb b/spec/requests/api/v3/branches_spec.rb
index c88f7788697..9cd11a67712 100644
--- a/spec/requests/api/v3/branches_spec.rb
+++ b/spec/requests/api/v3/branches_spec.rb
@@ -2,11 +2,11 @@ require 'spec_helper'
require 'mime/types'
describe API::V3::Branches do
- let(:user) { create(:user) }
- let(:user2) { create(:user) }
- let!(:project) { create(:project, :repository, creator: user) }
- let!(:master) { create(:project_member, :master, user: user, project: project) }
- let!(:guest) { create(:project_member, :guest, user: user2, project: project) }
+ set(:user) { create(:user) }
+ set(:user2) { create(:user) }
+ set(:project) { create(:project, :repository, creator: user) }
+ set(:master) { create(:project_member, :master, user: user, project: project) }
+ set(:guest) { create(:project_member, :guest, user: user2, project: project) }
let!(:branch_name) { 'feature' }
let!(:branch_sha) { '0b4bc9a49b562e85de7cc9e834518ea6828729b9' }
let!(:branch_with_dot) { CreateBranchService.new(project, user).execute("with.1.2.3", "master") }
diff --git a/spec/requests/api/v3/broadcast_messages_spec.rb b/spec/requests/api/v3/broadcast_messages_spec.rb
index 948cd78c177..d04b1c72004 100644
--- a/spec/requests/api/v3/broadcast_messages_spec.rb
+++ b/spec/requests/api/v3/broadcast_messages_spec.rb
@@ -1,11 +1,11 @@
require 'spec_helper'
describe API::V3::BroadcastMessages do
- let(:user) { create(:user) }
- let(:admin) { create(:admin) }
+ set(:user) { create(:user) }
+ set(:admin) { create(:admin) }
describe 'DELETE /broadcast_messages/:id' do
- let!(:message) { create(:broadcast_message) }
+ set(:message) { create(:broadcast_message) }
it 'returns a 401 for anonymous users' do
delete v3_api("/broadcast_messages/#{message.id}"),
diff --git a/spec/requests/api/v3/builds_spec.rb b/spec/requests/api/v3/builds_spec.rb
index 37710aedbb1..3546b5b85c1 100644
--- a/spec/requests/api/v3/builds_spec.rb
+++ b/spec/requests/api/v3/builds_spec.rb
@@ -1,14 +1,14 @@
require 'spec_helper'
describe API::V3::Builds do
- let(:user) { create(:user) }
+ set(:user) { create(:user) }
let(:api_user) { user }
- let!(:project) { create(:project, :repository, creator: user, public_builds: false) }
- let!(:developer) { create(:project_member, :developer, user: user, project: project) }
- let(:reporter) { create(:project_member, :reporter, project: project) }
- let(:guest) { create(:project_member, :guest, project: project) }
- let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) }
- let(:build) { create(:ci_build, pipeline: pipeline) }
+ set(:project) { create(:project, :repository, creator: user, public_builds: false) }
+ set(:developer) { create(:project_member, :developer, user: user, project: project) }
+ set(:reporter) { create(:project_member, :reporter, project: project) }
+ set(:guest) { create(:project_member, :guest, project: project) }
+ set(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) }
+ let!(:build) { create(:ci_build, pipeline: pipeline) }
describe 'GET /projects/:id/builds ' do
let(:query) { '' }
diff --git a/spec/requests/api/v3/issues_spec.rb b/spec/requests/api/v3/issues_spec.rb
index 9a0e6647ebf..86768d7397a 100644
--- a/spec/requests/api/v3/issues_spec.rb
+++ b/spec/requests/api/v3/issues_spec.rb
@@ -1,13 +1,13 @@
require 'spec_helper'
describe API::V3::Issues, :mailer do
- let(:user) { create(:user) }
- let(:user2) { create(:user) }
- let(:non_member) { create(:user) }
- let(:guest) { create(:user) }
- let(:author) { create(:author) }
- let(:assignee) { create(:assignee) }
- let(:admin) { create(:user, :admin) }
+ set(:user) { create(:user) }
+ set(:user2) { create(:user) }
+ set(:non_member) { create(:user) }
+ set(:guest) { create(:user) }
+ set(:author) { create(:author) }
+ set(:assignee) { create(:assignee) }
+ set(:admin) { create(:user, :admin) }
let!(:project) { create(:project, :public, creator_id: user.id, namespace: user.namespace ) }
let!(:closed_issue) do
create :closed_issue,
@@ -822,7 +822,8 @@ describe API::V3::Issues, :mailer do
end
context 'resolving issues in a merge request' do
- let(:discussion) { create(:diff_note_on_merge_request).to_discussion }
+ set(:diff_note_on_merge_request) { create(:diff_note_on_merge_request) }
+ let(:discussion) { diff_note_on_merge_request.to_discussion }
let(:merge_request) { discussion.noteable }
let(:project) { merge_request.source_project }
before do
@@ -1169,7 +1170,7 @@ describe API::V3::Issues, :mailer do
end
context "when the user is project owner" do
- let(:owner) { create(:user) }
+ set(:owner) { create(:user) }
let(:project) { create(:project, namespace: owner.namespace) }
it "deletes the issue if an admin requests it" do
diff --git a/spec/requests/api/v3/merge_requests_spec.rb b/spec/requests/api/v3/merge_requests_spec.rb
index 86f38dd4ec1..df73c731c96 100644
--- a/spec/requests/api/v3/merge_requests_spec.rb
+++ b/spec/requests/api/v3/merge_requests_spec.rb
@@ -1,6 +1,8 @@
require "spec_helper"
describe API::MergeRequests do
+ include ProjectForksHelper
+
let(:base_time) { Time.now }
let(:user) { create(:user) }
let(:admin) { create(:user, :admin) }
@@ -312,17 +314,17 @@ describe API::MergeRequests do
context 'forked projects' do
let!(:user2) { create(:user) }
- let!(:fork_project) { create(:project, forked_from_project: project, namespace: user2.namespace, creator_id: user2.id) }
+ let!(:forked_project) { fork_project(project, user2) }
let!(:unrelated_project) { create(:project, namespace: create(:user).namespace, creator_id: user2.id) }
before do
- fork_project.add_reporter(user2)
+ forked_project.add_reporter(user2)
allow_any_instance_of(MergeRequest).to receive(:write_ref)
end
it "returns merge_request" do
- post v3_api("/projects/#{fork_project.id}/merge_requests", user2),
+ post v3_api("/projects/#{forked_project.id}/merge_requests", user2),
title: 'Test merge_request', source_branch: "feature_conflict", target_branch: "master",
author: user2, target_project_id: project.id, description: 'Test description for Test merge_request'
expect(response).to have_gitlab_http_status(201)
@@ -331,10 +333,10 @@ describe API::MergeRequests do
end
it "does not return 422 when source_branch equals target_branch" do
- expect(project.id).not_to eq(fork_project.id)
- expect(fork_project.forked?).to be_truthy
- expect(fork_project.forked_from_project).to eq(project)
- post v3_api("/projects/#{fork_project.id}/merge_requests", user2),
+ expect(project.id).not_to eq(forked_project.id)
+ expect(forked_project.forked?).to be_truthy
+ expect(forked_project.forked_from_project).to eq(project)
+ post v3_api("/projects/#{forked_project.id}/merge_requests", user2),
title: 'Test merge_request', source_branch: "master", target_branch: "master", author: user2, target_project_id: project.id
expect(response).to have_gitlab_http_status(201)
expect(json_response['title']).to eq('Test merge_request')
@@ -343,7 +345,7 @@ describe API::MergeRequests do
it "returns 422 when target project has disabled merge requests" do
project.project_feature.update(merge_requests_access_level: 0)
- post v3_api("/projects/#{fork_project.id}/merge_requests", user2),
+ post v3_api("/projects/#{forked_project.id}/merge_requests", user2),
title: 'Test',
target_branch: "master",
source_branch: 'markdown',
@@ -354,36 +356,26 @@ describe API::MergeRequests do
end
it "returns 400 when source_branch is missing" do
- post v3_api("/projects/#{fork_project.id}/merge_requests", user2),
+ post v3_api("/projects/#{forked_project.id}/merge_requests", user2),
title: 'Test merge_request', target_branch: "master", author: user2, target_project_id: project.id
expect(response).to have_gitlab_http_status(400)
end
it "returns 400 when target_branch is missing" do
- post v3_api("/projects/#{fork_project.id}/merge_requests", user2),
+ post v3_api("/projects/#{forked_project.id}/merge_requests", user2),
title: 'Test merge_request', target_branch: "master", author: user2, target_project_id: project.id
expect(response).to have_gitlab_http_status(400)
end
it "returns 400 when title is missing" do
- post v3_api("/projects/#{fork_project.id}/merge_requests", user2),
+ post v3_api("/projects/#{forked_project.id}/merge_requests", user2),
target_branch: 'master', source_branch: 'markdown', author: user2, target_project_id: project.id
expect(response).to have_gitlab_http_status(400)
end
context 'when target_branch is specified' do
- it 'returns 422 if not a forked project' do
- post v3_api("/projects/#{project.id}/merge_requests", user),
- title: 'Test merge_request',
- target_branch: 'master',
- source_branch: 'markdown',
- author: user,
- target_project_id: fork_project.id
- expect(response).to have_gitlab_http_status(422)
- end
-
it 'returns 422 if targeting a different fork' do
- post v3_api("/projects/#{fork_project.id}/merge_requests", user2),
+ post v3_api("/projects/#{forked_project.id}/merge_requests", user2),
title: 'Test merge_request',
target_branch: 'master',
source_branch: 'markdown',
@@ -394,8 +386,8 @@ describe API::MergeRequests do
end
it "returns 201 when target_branch is specified and for the same project" do
- post v3_api("/projects/#{fork_project.id}/merge_requests", user2),
- title: 'Test merge_request', target_branch: 'master', source_branch: 'markdown', author: user2, target_project_id: fork_project.id
+ post v3_api("/projects/#{forked_project.id}/merge_requests", user2),
+ title: 'Test merge_request', target_branch: 'master', source_branch: 'markdown', author: user2, target_project_id: forked_project.id
expect(response).to have_gitlab_http_status(201)
end
end
diff --git a/spec/requests/api/v3/projects_spec.rb b/spec/requests/api/v3/projects_spec.rb
index cae2c3118da..e5282c3311f 100644
--- a/spec/requests/api/v3/projects_spec.rb
+++ b/spec/requests/api/v3/projects_spec.rb
@@ -89,6 +89,7 @@ describe API::V3::Projects do
path path_with_namespace
star_count forks_count
created_at last_activity_at
+ avatar_url
)
get v3_api('/projects?simple=true', user)
diff --git a/spec/requests/jwt_controller_spec.rb b/spec/requests/jwt_controller_spec.rb
index 8d79ea3dd40..41bf43a9bce 100644
--- a/spec/requests/jwt_controller_spec.rb
+++ b/spec/requests/jwt_controller_spec.rb
@@ -49,6 +49,10 @@ describe JwtController do
let(:pat) { create(:personal_access_token, user: user, scopes: ['read_registry']) }
let(:headers) { { authorization: credentials('personal_access_token', pat.token) } }
+ before do
+ stub_container_registry_config(enabled: true)
+ end
+
subject! { get '/jwt/auth', parameters, headers }
it 'authenticates correctly' do
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 00f45e5f702..980e9a38b3c 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -2,6 +2,7 @@ require 'spec_helper'
describe 'Git LFS API and storage' do
include WorkhorseHelpers
+ include ProjectForksHelper
let(:user) { create(:user) }
let!(:lfs_object) { create(:lfs_object, :with_file) }
@@ -841,6 +842,34 @@ describe 'Git LFS API and storage' do
end
end
+ describe 'when handling lfs batch request on a read-only GitLab instance' do
+ let(:authorization) { authorize_user }
+ let(:project) { create(:project) }
+ let(:path) { "#{project.http_url_to_repo}/info/lfs/objects/batch" }
+ let(:body) do
+ { 'objects' => [{ 'oid' => sample_oid, 'size' => sample_size }] }
+ end
+
+ before do
+ allow(Gitlab::Database).to receive(:read_only?) { true }
+ project.team << [user, :master]
+ enable_lfs
+ end
+
+ it 'responds with a 200 message on download' do
+ post_lfs_json path, body.merge('operation' => 'download'), headers
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+
+ it 'responds with a 403 message on upload' do
+ post_lfs_json path, body.merge('operation' => 'upload'), headers
+
+ expect(response).to have_gitlab_http_status(403)
+ expect(json_response).to include('message' => 'You cannot write to this read-only GitLab instance.')
+ end
+ end
+
describe 'when pushing a lfs object' do
before do
enable_lfs
@@ -1239,11 +1268,6 @@ describe 'Git LFS API and storage' do
ActionController::HttpAuthentication::Basic.encode_credentials(user.username, Gitlab::LfsToken.new(user).token)
end
- def fork_project(project, user, object = nil)
- allow(RepositoryForkWorker).to receive(:perform_async).and_return(true)
- Projects::ForkService.new(project, user, {}).execute
- end
-
def post_lfs_json(url, body = nil, headers = nil)
post(url, body.try(:to_json), (headers || {}).merge('Content-Type' => 'application/vnd.git-lfs+json'))
end
diff --git a/spec/rubocop/cop/rspec/verbose_include_metadata_spec.rb b/spec/rubocop/cop/rspec/verbose_include_metadata_spec.rb
new file mode 100644
index 00000000000..278662d32ea
--- /dev/null
+++ b/spec/rubocop/cop/rspec/verbose_include_metadata_spec.rb
@@ -0,0 +1,53 @@
+require 'spec_helper'
+require 'rubocop'
+require 'rubocop/rspec/support'
+require_relative '../../../../rubocop/cop/rspec/verbose_include_metadata'
+
+describe RuboCop::Cop::RSpec::VerboseIncludeMetadata do
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ let(:source_file) { 'foo_spec.rb' }
+
+ # Override `CopHelper#inspect_source` to always appear to be in a spec file,
+ # so that our RSpec-only cop actually runs
+ def inspect_source(*args)
+ super(*args, source_file)
+ end
+
+ shared_examples 'examples with include syntax' do |title|
+ it "flags violation for #{title} examples that uses verbose include syntax" do
+ inspect_source(cop, "#{title} 'Test', js: true do; end")
+
+ expect(cop.offenses.size).to eq(1)
+ offense = cop.offenses.first
+
+ expect(offense.line).to eq(1)
+ expect(cop.highlights).to eq(["#{title} 'Test', js: true"])
+ expect(offense.message).to eq('Use `:js` instead of `js: true`.')
+ end
+
+ it "doesn't flag violation for #{title} examples that uses compact include syntax", :aggregate_failures do
+ inspect_source(cop, "#{title} 'Test', :js do; end")
+
+ expect(cop.offenses).to be_empty
+ end
+
+ it "doesn't flag violation for #{title} examples that uses flag: symbol" do
+ inspect_source(cop, "#{title} 'Test', flag: :symbol do; end")
+
+ expect(cop.offenses).to be_empty
+ end
+
+ it "autocorrects #{title} examples that uses verbose syntax into compact syntax" do
+ autocorrected = autocorrect_source(cop, "#{title} 'Test', js: true do; end", source_file)
+
+ expect(autocorrected).to eql("#{title} 'Test', :js do; end")
+ end
+ end
+
+ %w(describe context feature example_group it specify example scenario its).each do |example|
+ it_behaves_like 'examples with include syntax', example
+ end
+end
diff --git a/spec/serializers/build_details_entity_spec.rb b/spec/serializers/build_details_entity_spec.rb
index 5b7822d5d8e..f6bd6e9ede4 100644
--- a/spec/serializers/build_details_entity_spec.rb
+++ b/spec/serializers/build_details_entity_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe BuildDetailsEntity do
+ include ProjectForksHelper
+
set(:user) { create(:admin) }
it 'inherits from JobEntity' do
@@ -56,18 +58,16 @@ describe BuildDetailsEntity do
end
context 'when merge request is from a fork' do
- let(:fork_project) do
- create(:project, forked_from_project: project)
- end
+ let(:forked_project) { fork_project(project) }
- let(:pipeline) { create(:ci_pipeline, project: fork_project) }
+ let(:pipeline) { create(:ci_pipeline, project: forked_project) }
before do
allow(build).to receive(:merge_request).and_return(merge_request)
end
let(:merge_request) do
- create(:merge_request, source_project: fork_project,
+ create(:merge_request, source_project: forked_project,
target_project: project,
source_branch: build.ref)
end
diff --git a/spec/serializers/build_serializer_spec.rb b/spec/serializers/build_serializer_spec.rb
index 01e2cfed6f8..9673b11c2a2 100644
--- a/spec/serializers/build_serializer_spec.rb
+++ b/spec/serializers/build_serializer_spec.rb
@@ -38,7 +38,7 @@ describe BuildSerializer do
expect(subject[:text]).to eq(status.text)
expect(subject[:label]).to eq(status.label)
expect(subject[:icon]).to eq(status.icon)
- expect(subject[:favicon]).to eq("/assets/ci_favicons/#{status.favicon}.ico")
+ expect(subject[:favicon]).to match_asset_path("/assets/ci_favicons/#{status.favicon}.ico")
end
end
end
diff --git a/spec/serializers/cluster_entity_spec.rb b/spec/serializers/cluster_entity_spec.rb
new file mode 100644
index 00000000000..2c7f49974f1
--- /dev/null
+++ b/spec/serializers/cluster_entity_spec.rb
@@ -0,0 +1,22 @@
+require 'spec_helper'
+
+describe ClusterEntity do
+ set(:cluster) { create(:gcp_cluster, :errored) }
+ let(:request) { double('request') }
+
+ let(:entity) do
+ described_class.new(cluster)
+ end
+
+ describe '#as_json' do
+ subject { entity.as_json }
+
+ it 'contains status' do
+ expect(subject[:status]).to eq(:errored)
+ end
+
+ it 'contains status reason' do
+ expect(subject[:status_reason]).to eq('general error')
+ end
+ end
+end
diff --git a/spec/serializers/cluster_serializer_spec.rb b/spec/serializers/cluster_serializer_spec.rb
new file mode 100644
index 00000000000..1ac6784d28f
--- /dev/null
+++ b/spec/serializers/cluster_serializer_spec.rb
@@ -0,0 +1,19 @@
+require 'spec_helper'
+
+describe ClusterSerializer do
+ let(:serializer) do
+ described_class.new
+ end
+
+ describe '#represent_status' do
+ subject { serializer.represent_status(resource) }
+
+ context 'when represents only status' do
+ let(:resource) { create(:gcp_cluster, :errored) }
+
+ it 'serializes only status' do
+ expect(subject.keys).to contain_exactly(:status, :status_reason)
+ end
+ end
+ end
+end
diff --git a/spec/serializers/container_repository_entity_spec.rb b/spec/serializers/container_repository_entity_spec.rb
new file mode 100644
index 00000000000..c589cd18f77
--- /dev/null
+++ b/spec/serializers/container_repository_entity_spec.rb
@@ -0,0 +1,41 @@
+require 'spec_helper'
+
+describe ContainerRepositoryEntity do
+ let(:entity) do
+ described_class.new(repository, request: request)
+ end
+
+ set(:project) { create(:project) }
+ set(:user) { create(:user) }
+ set(:repository) { create(:container_repository, project: project) }
+
+ let(:request) { double('request') }
+
+ subject { entity.as_json }
+
+ before do
+ stub_container_registry_config(enabled: true)
+ allow(request).to receive(:project).and_return(project)
+ allow(request).to receive(:current_user).and_return(user)
+ end
+
+ it 'exposes required informations' do
+ expect(subject).to include(:id, :path, :location, :tags_path)
+ end
+
+ context 'when user can manage repositories' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'exposes destroy_path' do
+ expect(subject).to include(:destroy_path)
+ end
+ end
+
+ context 'when user cannot manage repositories' do
+ it 'does not expose destroy_path' do
+ expect(subject).not_to include(:destroy_path)
+ end
+ end
+end
diff --git a/spec/serializers/container_tag_entity_spec.rb b/spec/serializers/container_tag_entity_spec.rb
new file mode 100644
index 00000000000..6dcc5204516
--- /dev/null
+++ b/spec/serializers/container_tag_entity_spec.rb
@@ -0,0 +1,43 @@
+require 'spec_helper'
+
+describe ContainerTagEntity do
+ let(:entity) do
+ described_class.new(tag, request: request)
+ end
+
+ set(:project) { create(:project) }
+ set(:user) { create(:user) }
+ set(:repository) { create(:container_repository, name: 'image', project: project) }
+
+ let(:request) { double('request') }
+ let(:tag) { repository.tag('test') }
+
+ subject { entity.as_json }
+
+ before do
+ stub_container_registry_config(enabled: true)
+ stub_container_registry_tags(repository: /image/, tags: %w[test])
+ allow(request).to receive(:project).and_return(project)
+ allow(request).to receive(:current_user).and_return(user)
+ end
+
+ it 'exposes required informations' do
+ expect(subject).to include(:name, :location, :revision, :total_size, :created_at)
+ end
+
+ context 'when user can manage repositories' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'exposes destroy_path' do
+ expect(subject).to include(:destroy_path)
+ end
+ end
+
+ context 'when user cannot manage repositories' do
+ it 'does not expose destroy_path' do
+ expect(subject).not_to include(:destroy_path)
+ end
+ end
+end
diff --git a/spec/serializers/environment_entity_spec.rb b/spec/serializers/environment_entity_spec.rb
index 979d9921941..8f32c5639a1 100644
--- a/spec/serializers/environment_entity_spec.rb
+++ b/spec/serializers/environment_entity_spec.rb
@@ -16,6 +16,10 @@ describe EnvironmentEntity do
expect(subject).to include(:id, :name, :state, :environment_path)
end
+ it 'exposes folder path' do
+ expect(subject).to include(:folder_path)
+ end
+
context 'metrics disabled' do
before do
allow(environment).to receive(:has_metrics?).and_return(false)
diff --git a/spec/serializers/merge_request_entity_spec.rb b/spec/serializers/merge_request_entity_spec.rb
index a2fd5b7daae..4aeb593da44 100644
--- a/spec/serializers/merge_request_entity_spec.rb
+++ b/spec/serializers/merge_request_entity_spec.rb
@@ -11,16 +11,6 @@ describe MergeRequestEntity do
described_class.new(resource, request: request).as_json
end
- it 'includes author' do
- req = double('request')
-
- author_payload = UserEntity
- .represent(resource.author, request: req)
- .as_json
-
- expect(subject[:author]).to eq(author_payload)
- end
-
it 'includes pipeline' do
req = double('request', current_user: user)
pipeline = build_stubbed(:ci_pipeline)
@@ -47,7 +37,8 @@ describe MergeRequestEntity do
:cancel_merge_when_pipeline_succeeds_path,
:create_issue_to_resolve_discussions_path,
:source_branch_path, :target_branch_commits_path,
- :target_branch_tree_path, :commits_count, :merge_ongoing)
+ :target_branch_tree_path, :commits_count, :merge_ongoing,
+ :ff_only_enabled)
end
it 'has email_patches_path' do
diff --git a/spec/serializers/pipeline_entity_spec.rb b/spec/serializers/pipeline_entity_spec.rb
index f8df461bc81..248552d1858 100644
--- a/spec/serializers/pipeline_entity_spec.rb
+++ b/spec/serializers/pipeline_entity_spec.rb
@@ -108,5 +108,18 @@ describe PipelineEntity do
expect(subject[:ref][:path]).to be_nil
end
end
+
+ context 'when pipeline has a failure reason set' do
+ let(:pipeline) { create(:ci_empty_pipeline) }
+
+ before do
+ pipeline.drop!(:config_error)
+ end
+
+ it 'has a correct failure reason' do
+ expect(subject[:failure_reason])
+ .to eq 'CI/CD YAML configuration error!'
+ end
+ end
end
end
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index 2de8daba6b5..8fc1ceedc34 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -103,9 +103,15 @@ describe PipelineSerializer do
let(:project) { create(:project) }
before do
- Ci::Pipeline::AVAILABLE_STATUSES.each do |status|
- create_pipeline(status)
+ # Since RequestStore.active? is true we have to allow the
+ # gitaly calls in this block
+ # Issue: https://gitlab.com/gitlab-org/gitlab-ce/issues/37772
+ Gitlab::GitalyClient.allow_n_plus_1_calls do
+ Ci::Pipeline::AVAILABLE_STATUSES.each do |status|
+ create_pipeline(status)
+ end
end
+ Gitlab::GitalyClient.reset_counts
end
shared_examples 'no N+1 queries' do
@@ -162,7 +168,7 @@ describe PipelineSerializer do
expect(subject[:text]).to eq(status.text)
expect(subject[:label]).to eq(status.label)
expect(subject[:icon]).to eq(status.icon)
- expect(subject[:favicon]).to eq("/assets/ci_favicons/#{status.favicon}.ico")
+ expect(subject[:favicon]).to match_asset_path("/assets/ci_favicons/#{status.favicon}.ico")
end
end
end
diff --git a/spec/serializers/status_entity_spec.rb b/spec/serializers/status_entity_spec.rb
index 3964b998084..16431ed4188 100644
--- a/spec/serializers/status_entity_spec.rb
+++ b/spec/serializers/status_entity_spec.rb
@@ -18,12 +18,12 @@ describe StatusEntity do
it 'contains status details' do
expect(subject).to include :text, :icon, :favicon, :label, :group
expect(subject).to include :has_details, :details_path
- expect(subject[:favicon]).to eq('/assets/ci_favicons/favicon_status_success.ico')
+ expect(subject[:favicon]).to match_asset_path('/assets/ci_favicons/favicon_status_success.ico')
end
it 'contains a dev namespaced favicon if dev env' do
allow(Rails.env).to receive(:development?) { true }
- expect(entity.as_json[:favicon]).to eq('/assets/ci_favicons/dev/favicon_status_success.ico')
+ expect(entity.as_json[:favicon]).to match_asset_path('/assets/ci_favicons/dev/favicon_status_success.ico')
end
end
end
diff --git a/spec/services/ci/create_cluster_service_spec.rb b/spec/services/ci/create_cluster_service_spec.rb
new file mode 100644
index 00000000000..6e7398fbffa
--- /dev/null
+++ b/spec/services/ci/create_cluster_service_spec.rb
@@ -0,0 +1,47 @@
+require 'spec_helper'
+
+describe Ci::CreateClusterService do
+ describe '#execute' do
+ let(:access_token) { 'xxx' }
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:result) { described_class.new(project, user, params).execute(access_token) }
+
+ context 'when correct params' do
+ let(:params) do
+ {
+ gcp_project_id: 'gcp-project',
+ gcp_cluster_name: 'test-cluster',
+ gcp_cluster_zone: 'us-central1-a',
+ gcp_cluster_size: 1
+ }
+ end
+
+ it 'creates a cluster object' do
+ expect(ClusterProvisionWorker).to receive(:perform_async)
+ expect { result }.to change { Gcp::Cluster.count }.by(1)
+ expect(result.gcp_project_id).to eq('gcp-project')
+ expect(result.gcp_cluster_name).to eq('test-cluster')
+ expect(result.gcp_cluster_zone).to eq('us-central1-a')
+ expect(result.gcp_cluster_size).to eq(1)
+ expect(result.gcp_token).to eq(access_token)
+ end
+ end
+
+ context 'when invalid params' do
+ let(:params) do
+ {
+ gcp_project_id: 'gcp-project',
+ gcp_cluster_name: 'test-cluster',
+ gcp_cluster_zone: 'us-central1-a',
+ gcp_cluster_size: 'ABC'
+ }
+ end
+
+ it 'returns an error' do
+ expect(ClusterProvisionWorker).not_to receive(:perform_async)
+ expect { result }.to change { Gcp::Cluster.count }.by(0)
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 4c2ff08039c..08847183bf4 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe Ci::CreatePipelineService do
+ include ProjectForksHelper
+
set(:project) { create(:project, :repository) }
let(:user) { create(:admin) }
let(:ref_name) { 'refs/heads/master' }
@@ -82,13 +84,9 @@ describe Ci::CreatePipelineService do
end
context 'when merge request target project is different from source project' do
+ let!(:project) { fork_project(target_project, nil, repository: true) }
let!(:target_project) { create(:project, :repository) }
- let!(:forked_project_link) do
- create(:forked_project_link, forked_to_project: project,
- forked_from_project: target_project)
- end
-
it 'updates head pipeline for merge request' do
merge_request = create(:merge_request, source_branch: 'master',
target_branch: "branch_1",
@@ -133,6 +131,26 @@ describe Ci::CreatePipelineService do
expect(merge_request.reload.head_pipeline).to eq head_pipeline
end
end
+
+ context 'when pipeline has been skipped' do
+ before do
+ allow_any_instance_of(Ci::Pipeline)
+ .to receive(:git_commit_message)
+ .and_return('some commit [ci skip]')
+ end
+
+ it 'updates merge request head pipeline' do
+ merge_request = create(:merge_request, source_branch: 'master',
+ target_branch: 'feature',
+ source_project: project)
+
+ head_pipeline = execute_service
+
+ expect(head_pipeline).to be_skipped
+ expect(head_pipeline).to be_persisted
+ expect(merge_request.reload.head_pipeline).to eq head_pipeline
+ end
+ end
end
context 'auto-cancel enabled' do
@@ -481,104 +499,4 @@ describe Ci::CreatePipelineService do
end
end
end
-
- describe '#allowed_to_create?' do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
- let(:ref) { 'master' }
-
- subject do
- described_class.new(project, user, ref: ref)
- .send(:allowed_to_create?)
- end
-
- context 'when user is a developer' do
- before do
- project.add_developer(user)
- end
-
- it { is_expected.to be_truthy }
-
- context 'when the branch is protected' do
- let!(:protected_branch) do
- create(:protected_branch, project: project, name: ref)
- end
-
- it { is_expected.to be_falsey }
-
- context 'when developers are allowed to merge' do
- let!(:protected_branch) do
- create(:protected_branch,
- :developers_can_merge,
- project: project,
- name: ref)
- end
-
- it { is_expected.to be_truthy }
- end
- end
-
- context 'when the tag is protected' do
- let(:ref) { 'v1.0.0' }
-
- let!(:protected_tag) do
- create(:protected_tag, project: project, name: ref)
- end
-
- it { is_expected.to be_falsey }
-
- context 'when developers are allowed to create the tag' do
- let!(:protected_tag) do
- create(:protected_tag,
- :developers_can_create,
- project: project,
- name: ref)
- end
-
- it { is_expected.to be_truthy }
- end
- end
- end
-
- context 'when user is a master' do
- before do
- project.add_master(user)
- end
-
- it { is_expected.to be_truthy }
-
- context 'when the branch is protected' do
- let!(:protected_branch) do
- create(:protected_branch, project: project, name: ref)
- end
-
- it { is_expected.to be_truthy }
- end
-
- context 'when the tag is protected' do
- let(:ref) { 'v1.0.0' }
-
- let!(:protected_tag) do
- create(:protected_tag, project: project, name: ref)
- end
-
- it { is_expected.to be_truthy }
-
- context 'when no one can create the tag' do
- let!(:protected_tag) do
- create(:protected_tag,
- :no_one_can_create,
- project: project,
- name: ref)
- end
-
- it { is_expected.to be_falsey }
- end
- end
- end
-
- context 'when owner cannot create pipeline' do
- it { is_expected.to be_falsey }
- end
- end
end
diff --git a/spec/services/ci/extract_sections_from_build_trace_service_spec.rb b/spec/services/ci/extract_sections_from_build_trace_service_spec.rb
new file mode 100644
index 00000000000..28f2fa7903a
--- /dev/null
+++ b/spec/services/ci/extract_sections_from_build_trace_service_spec.rb
@@ -0,0 +1,55 @@
+require 'spec_helper'
+
+describe Ci::ExtractSectionsFromBuildTraceService, '#execute' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:build) { create(:ci_build, project: project) }
+
+ subject { described_class.new(project, user) }
+
+ shared_examples 'build trace has sections markers' do
+ before do
+ build.trace.set(File.read(expand_fixture_path('trace/trace_with_sections')))
+ end
+
+ it 'saves the correct extracted sections' do
+ expect(build.trace_sections).to be_empty
+ expect(subject.execute(build)).to be(true)
+ expect(build.trace_sections).not_to be_empty
+ end
+
+ it "fails if trace_sections isn't empty" do
+ expect(subject.execute(build)).to be(true)
+ expect(build.trace_sections).not_to be_empty
+
+ expect(subject.execute(build)).to be(false)
+ expect(build.trace_sections).not_to be_empty
+ end
+ end
+
+ shared_examples 'build trace has no sections markers' do
+ before do
+ build.trace.set('no markerts')
+ end
+
+ it 'extracts no sections' do
+ expect(build.trace_sections).to be_empty
+ expect(subject.execute(build)).to be(true)
+ expect(build.trace_sections).to be_empty
+ end
+ end
+
+ context 'when the build has no user' do
+ it_behaves_like 'build trace has sections markers'
+ it_behaves_like 'build trace has no sections markers'
+ end
+
+ context 'when the build has a valid user' do
+ before do
+ build.user = user
+ end
+
+ it_behaves_like 'build trace has sections markers'
+ it_behaves_like 'build trace has no sections markers'
+ end
+end
diff --git a/spec/services/ci/fetch_gcp_operation_service_spec.rb b/spec/services/ci/fetch_gcp_operation_service_spec.rb
new file mode 100644
index 00000000000..7792979c5cb
--- /dev/null
+++ b/spec/services/ci/fetch_gcp_operation_service_spec.rb
@@ -0,0 +1,36 @@
+require 'spec_helper'
+require 'google/apis'
+
+describe Ci::FetchGcpOperationService do
+ describe '#execute' do
+ let(:cluster) { create(:gcp_cluster) }
+ let(:operation) { double }
+
+ context 'when suceeded' do
+ before do
+ allow_any_instance_of(GoogleApi::CloudPlatform::Client)
+ .to receive(:projects_zones_operations).and_return(operation)
+ end
+
+ it 'fetch the gcp operaion' do
+ expect { |b| described_class.new.execute(cluster, &b) }
+ .to yield_with_args(operation)
+ end
+ end
+
+ context 'when raises an error' do
+ let(:error) { Google::Apis::ServerError.new('a') }
+
+ before do
+ allow_any_instance_of(GoogleApi::CloudPlatform::Client)
+ .to receive(:projects_zones_operations).and_raise(error)
+ end
+
+ it 'sets an error to cluster object' do
+ expect { |b| described_class.new.execute(cluster, &b) }
+ .not_to yield_with_args
+ expect(cluster.reload).to be_errored
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/fetch_kubernetes_token_service_spec.rb b/spec/services/ci/fetch_kubernetes_token_service_spec.rb
new file mode 100644
index 00000000000..1d05c9671a9
--- /dev/null
+++ b/spec/services/ci/fetch_kubernetes_token_service_spec.rb
@@ -0,0 +1,64 @@
+require 'spec_helper'
+
+describe Ci::FetchKubernetesTokenService do
+ describe '#execute' do
+ subject { described_class.new(api_url, ca_pem, username, password).execute }
+
+ let(:api_url) { 'http://111.111.111.111' }
+ let(:ca_pem) { '' }
+ let(:username) { 'admin' }
+ let(:password) { 'xxx' }
+
+ context 'when params correct' do
+ let(:token) { 'xxx.token.xxx' }
+
+ let(:secrets_json) do
+ [
+ {
+ 'metadata': {
+ name: metadata_name
+ },
+ 'data': {
+ 'token': Base64.encode64(token)
+ }
+ }
+ ]
+ end
+
+ before do
+ allow_any_instance_of(Kubeclient::Client)
+ .to receive(:get_secrets).and_return(secrets_json)
+ end
+
+ context 'when default-token exists' do
+ let(:metadata_name) { 'default-token-123' }
+
+ it { is_expected.to eq(token) }
+ end
+
+ context 'when default-token does not exist' do
+ let(:metadata_name) { 'another-token-123' }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ context 'when api_url is nil' do
+ let(:api_url) { nil }
+
+ it { expect { subject }.to raise_error("Incomplete settings") }
+ end
+
+ context 'when username is nil' do
+ let(:username) { nil }
+
+ it { expect { subject }.to raise_error("Incomplete settings") }
+ end
+
+ context 'when password is nil' do
+ let(:password) { nil }
+
+ it { expect { subject }.to raise_error("Incomplete settings") }
+ end
+ end
+end
diff --git a/spec/services/ci/finalize_cluster_creation_service_spec.rb b/spec/services/ci/finalize_cluster_creation_service_spec.rb
new file mode 100644
index 00000000000..def3709fdb4
--- /dev/null
+++ b/spec/services/ci/finalize_cluster_creation_service_spec.rb
@@ -0,0 +1,61 @@
+require 'spec_helper'
+
+describe Ci::FinalizeClusterCreationService do
+ describe '#execute' do
+ let(:cluster) { create(:gcp_cluster) }
+ let(:result) { described_class.new.execute(cluster) }
+
+ context 'when suceeded to get cluster from api' do
+ let(:gke_cluster) { double }
+
+ before do
+ allow(gke_cluster).to receive(:endpoint).and_return('111.111.111.111')
+ allow(gke_cluster).to receive(:master_auth).and_return(spy)
+ allow_any_instance_of(GoogleApi::CloudPlatform::Client)
+ .to receive(:projects_zones_clusters_get).and_return(gke_cluster)
+ end
+
+ context 'when suceeded to get kubernetes token' do
+ let(:kubernetes_token) { 'abc' }
+
+ before do
+ allow_any_instance_of(Ci::FetchKubernetesTokenService)
+ .to receive(:execute).and_return(kubernetes_token)
+ end
+
+ it 'executes integration cluster' do
+ expect_any_instance_of(Ci::IntegrateClusterService).to receive(:execute)
+ described_class.new.execute(cluster)
+ end
+ end
+
+ context 'when failed to get kubernetes token' do
+ before do
+ allow_any_instance_of(Ci::FetchKubernetesTokenService)
+ .to receive(:execute).and_return(nil)
+ end
+
+ it 'sets an error to cluster object' do
+ described_class.new.execute(cluster)
+
+ expect(cluster.reload).to be_errored
+ end
+ end
+ end
+
+ context 'when failed to get cluster from api' do
+ let(:error) { Google::Apis::ServerError.new('a') }
+
+ before do
+ allow_any_instance_of(GoogleApi::CloudPlatform::Client)
+ .to receive(:projects_zones_clusters_get).and_raise(error)
+ end
+
+ it 'sets an error to cluster object' do
+ described_class.new.execute(cluster)
+
+ expect(cluster.reload).to be_errored
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/integrate_cluster_service_spec.rb b/spec/services/ci/integrate_cluster_service_spec.rb
new file mode 100644
index 00000000000..3a79c205bd1
--- /dev/null
+++ b/spec/services/ci/integrate_cluster_service_spec.rb
@@ -0,0 +1,42 @@
+require 'spec_helper'
+
+describe Ci::IntegrateClusterService do
+ describe '#execute' do
+ let(:cluster) { create(:gcp_cluster, :custom_project_namespace) }
+ let(:endpoint) { '123.123.123.123' }
+ let(:ca_cert) { 'ca_cert_xxx' }
+ let(:token) { 'token_xxx' }
+ let(:username) { 'username_xxx' }
+ let(:password) { 'password_xxx' }
+
+ before do
+ described_class
+ .new.execute(cluster, endpoint, ca_cert, token, username, password)
+
+ cluster.reload
+ end
+
+ context 'when correct params' do
+ it 'creates a cluster object' do
+ expect(cluster.endpoint).to eq(endpoint)
+ expect(cluster.ca_cert).to eq(ca_cert)
+ expect(cluster.kubernetes_token).to eq(token)
+ expect(cluster.username).to eq(username)
+ expect(cluster.password).to eq(password)
+ expect(cluster.service.active).to be_truthy
+ expect(cluster.service.api_url).to eq(cluster.api_url)
+ expect(cluster.service.ca_pem).to eq(ca_cert)
+ expect(cluster.service.namespace).to eq(cluster.project_namespace)
+ expect(cluster.service.token).to eq(token)
+ end
+ end
+
+ context 'when invalid params' do
+ let(:endpoint) { nil }
+
+ it 'sets an error to cluster object' do
+ expect(cluster).to be_errored
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/pipeline_trigger_service_spec.rb b/spec/services/ci/pipeline_trigger_service_spec.rb
index 9a6875e448c..f4ff818c479 100644
--- a/spec/services/ci/pipeline_trigger_service_spec.rb
+++ b/spec/services/ci/pipeline_trigger_service_spec.rb
@@ -34,6 +34,8 @@ describe Ci::PipelineTriggerService do
expect(result[:pipeline].ref).to eq('master')
expect(result[:pipeline].project).to eq(project)
expect(result[:pipeline].user).to eq(trigger.owner)
+ expect(result[:pipeline].trigger_requests.to_a)
+ .to eq(result[:pipeline].builds.map(&:trigger_request).uniq)
expect(result[:status]).to eq(:success)
end
diff --git a/spec/services/ci/provision_cluster_service_spec.rb b/spec/services/ci/provision_cluster_service_spec.rb
new file mode 100644
index 00000000000..5ce5c788314
--- /dev/null
+++ b/spec/services/ci/provision_cluster_service_spec.rb
@@ -0,0 +1,85 @@
+require 'spec_helper'
+
+describe Ci::ProvisionClusterService do
+ describe '#execute' do
+ let(:cluster) { create(:gcp_cluster) }
+ let(:operation) { spy }
+
+ shared_examples 'error' do
+ it 'sets an error to cluster object' do
+ described_class.new.execute(cluster)
+
+ expect(cluster.reload).to be_errored
+ end
+ end
+
+ context 'when suceeded to request provision' do
+ before do
+ allow_any_instance_of(GoogleApi::CloudPlatform::Client)
+ .to receive(:projects_zones_clusters_create).and_return(operation)
+ end
+
+ context 'when operation status is RUNNING' do
+ before do
+ allow(operation).to receive(:status).and_return('RUNNING')
+ end
+
+ context 'when suceeded to parse gcp operation id' do
+ before do
+ allow_any_instance_of(GoogleApi::CloudPlatform::Client)
+ .to receive(:parse_operation_id).and_return('operation-123')
+ end
+
+ context 'when cluster status is scheduled' do
+ before do
+ allow_any_instance_of(GoogleApi::CloudPlatform::Client)
+ .to receive(:parse_operation_id).and_return('operation-123')
+ end
+
+ it 'schedules a worker for status minitoring' do
+ expect(WaitForClusterCreationWorker).to receive(:perform_in)
+
+ described_class.new.execute(cluster)
+ end
+ end
+
+ context 'when cluster status is creating' do
+ before do
+ cluster.make_creating!
+ end
+
+ it_behaves_like 'error'
+ end
+ end
+
+ context 'when failed to parse gcp operation id' do
+ before do
+ allow_any_instance_of(GoogleApi::CloudPlatform::Client)
+ .to receive(:parse_operation_id).and_return(nil)
+ end
+
+ it_behaves_like 'error'
+ end
+ end
+
+ context 'when operation status is others' do
+ before do
+ allow(operation).to receive(:status).and_return('others')
+ end
+
+ it_behaves_like 'error'
+ end
+ end
+
+ context 'when failed to request provision' do
+ let(:error) { Google::Apis::ServerError.new('a') }
+
+ before do
+ allow_any_instance_of(GoogleApi::CloudPlatform::Client)
+ .to receive(:projects_zones_clusters_create).and_raise(error)
+ end
+
+ it_behaves_like 'error'
+ end
+ end
+end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 7f29fac9eef..1005156d18e 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -1,9 +1,10 @@
require 'spec_helper'
describe Ci::RetryBuildService do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
- let(:pipeline) { create(:ci_pipeline, project: project) }
+ set(:user) { create(:user) }
+ set(:project) { create(:project) }
+ set(:pipeline) { create(:ci_pipeline, project: project) }
+
let(:build) { create(:ci_build, pipeline: pipeline) }
let(:service) do
@@ -19,7 +20,7 @@ describe Ci::RetryBuildService do
erased_at auto_canceled_by].freeze
IGNORE_ACCESSORS =
- %i[type lock_version target_url base_tags
+ %i[type lock_version target_url base_tags trace_sections
commit_id deployments erased_by_id last_deployment project_id
runner_id tag_taggings taggings tags trigger_request_id
user_id auto_canceled_by_id retried failure_reason
@@ -38,7 +39,7 @@ describe Ci::RetryBuildService do
:queued, :coverage, :tags, :allowed_to_fail, :on_tag,
:triggered, :trace, :teardown_environment,
description: 'my-job', stage: 'test', pipeline: pipeline,
- auto_canceled_by: create(:ci_empty_pipeline)) do |build|
+ auto_canceled_by: create(:ci_empty_pipeline, project: project)) do |build|
##
# TODO, workaround for FactoryGirl limitation when having both
# stage (text) and stage_id (integer) columns in the table.
diff --git a/spec/services/ci/update_cluster_service_spec.rb b/spec/services/ci/update_cluster_service_spec.rb
new file mode 100644
index 00000000000..a289385b88f
--- /dev/null
+++ b/spec/services/ci/update_cluster_service_spec.rb
@@ -0,0 +1,37 @@
+require 'spec_helper'
+
+describe Ci::UpdateClusterService do
+ describe '#execute' do
+ let(:cluster) { create(:gcp_cluster, :created_on_gke, :with_kubernetes_service) }
+
+ before do
+ described_class.new(cluster.project, cluster.user, params).execute(cluster)
+
+ cluster.reload
+ end
+
+ context 'when correct params' do
+ context 'when enabled is true' do
+ let(:params) { { 'enabled' => 'true' } }
+
+ it 'enables cluster and overwrite kubernetes service' do
+ expect(cluster.enabled).to be_truthy
+ expect(cluster.service.active).to be_truthy
+ expect(cluster.service.api_url).to eq(cluster.api_url)
+ expect(cluster.service.ca_pem).to eq(cluster.ca_cert)
+ expect(cluster.service.namespace).to eq(cluster.project_namespace)
+ expect(cluster.service.token).to eq(cluster.kubernetes_token)
+ end
+ end
+
+ context 'when enabled is false' do
+ let(:params) { { 'enabled' => 'false' } }
+
+ it 'disables cluster and kubernetes service' do
+ expect(cluster.enabled).to be_falsy
+ expect(cluster.service.active).to be_falsy
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/delete_merged_branches_service_spec.rb b/spec/services/delete_merged_branches_service_spec.rb
index 03c682ae0d7..5a9eb359ee1 100644
--- a/spec/services/delete_merged_branches_service_spec.rb
+++ b/spec/services/delete_merged_branches_service_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe DeleteMergedBranchesService do
+ include ProjectForksHelper
+
subject(:service) { described_class.new(project, project.owner) }
let(:project) { create(:project, :repository) }
@@ -50,9 +52,9 @@ describe DeleteMergedBranchesService do
context 'open merge requests' do
it 'does not delete branches from open merge requests' do
- fork_link = create(:forked_project_link, forked_from_project: project)
+ forked_project = fork_project(project)
create(:merge_request, :opened, source_project: project, target_project: project, source_branch: 'branch-merged', target_branch: 'master')
- create(:merge_request, :opened, source_project: fork_link.forked_to_project, target_project: project, target_branch: 'improve/awesome', source_branch: 'master')
+ create(:merge_request, :opened, source_project: forked_project, target_project: project, target_branch: 'improve/awesome', source_branch: 'master')
service.execute
diff --git a/spec/services/deploy_keys/create_service_spec.rb b/spec/services/deploy_keys/create_service_spec.rb
new file mode 100644
index 00000000000..7a604c0cadd
--- /dev/null
+++ b/spec/services/deploy_keys/create_service_spec.rb
@@ -0,0 +1,12 @@
+require 'spec_helper'
+
+describe DeployKeys::CreateService do
+ let(:user) { create(:user) }
+ let(:params) { attributes_for(:deploy_key) }
+
+ subject { described_class.new(user, params) }
+
+ it "creates a deploy key" do
+ expect { subject.execute }.to change { DeployKey.where(params.merge(user: user)).count }.by(1)
+ end
+end
diff --git a/spec/services/discussions/update_diff_position_service_spec.rb b/spec/services/discussions/update_diff_position_service_spec.rb
index 82b156f5ebe..2b84206318f 100644
--- a/spec/services/discussions/update_diff_position_service_spec.rb
+++ b/spec/services/discussions/update_diff_position_service_spec.rb
@@ -164,8 +164,8 @@ describe Discussions::UpdateDiffPositionService do
change_position = discussion.change_position
expect(change_position.start_sha).to eq(old_diff_refs.head_sha)
expect(change_position.head_sha).to eq(new_diff_refs.head_sha)
- expect(change_position.old_line).to eq(9)
- expect(change_position.new_line).to be_nil
+ expect(change_position.formatter.old_line).to eq(9)
+ expect(change_position.formatter.new_line).to be_nil
end
it 'creates a system discussion' do
@@ -184,7 +184,7 @@ describe Discussions::UpdateDiffPositionService do
expect(discussion.original_position).to eq(old_position)
expect(discussion.position).not_to eq(old_position)
- expect(discussion.position.new_line).to eq(22)
+ expect(discussion.position.formatter.new_line).to eq(22)
end
context 'when the resolve_outdated_diff_discussions setting is set' do
diff --git a/spec/services/emails/confirm_service_spec.rb b/spec/services/emails/confirm_service_spec.rb
new file mode 100644
index 00000000000..2b2c31e2521
--- /dev/null
+++ b/spec/services/emails/confirm_service_spec.rb
@@ -0,0 +1,15 @@
+require 'spec_helper'
+
+describe Emails::ConfirmService do
+ let(:user) { create(:user) }
+
+ subject(:service) { described_class.new(user) }
+
+ describe '#execute' do
+ it 'sends a confirmation email again' do
+ email = user.emails.create(email: 'new@email.com')
+ mail = service.execute(email)
+ expect(mail.subject).to eq('Confirmation instructions')
+ end
+ end
+end
diff --git a/spec/services/emails/create_service_spec.rb b/spec/services/emails/create_service_spec.rb
index 641d5538de8..54692c88623 100644
--- a/spec/services/emails/create_service_spec.rb
+++ b/spec/services/emails/create_service_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe Emails::CreateService do
let(:user) { create(:user) }
- let(:opts) { { email: 'new@email.com' } }
+ let(:opts) { { email: 'new@email.com', user: user } }
subject(:service) { described_class.new(user, opts) }
@@ -12,6 +12,11 @@ describe Emails::CreateService do
expect(Email.where(opts)).not_to be_empty
end
+ it 'creates an email with additional attributes' do
+ expect { service.execute(confirmation_token: 'abc') }.to change { Email.count }.by(1)
+ expect(Email.where(opts).first.confirmation_token).to eq 'abc'
+ end
+
it 'has the right user association' do
service.execute
diff --git a/spec/services/emails/destroy_service_spec.rb b/spec/services/emails/destroy_service_spec.rb
index 1f4294dd905..c3204fac3df 100644
--- a/spec/services/emails/destroy_service_spec.rb
+++ b/spec/services/emails/destroy_service_spec.rb
@@ -4,11 +4,11 @@ describe Emails::DestroyService do
let!(:user) { create(:user) }
let!(:email) { create(:email, user: user) }
- subject(:service) { described_class.new(user, email: email.email) }
+ subject(:service) { described_class.new(user, user: user) }
describe '#execute' do
it 'removes an email' do
- expect { service.execute }.to change { user.emails.count }.by(-1)
+ expect { service.execute(email) }.to change { user.emails.count }.by(-1)
end
end
end
diff --git a/spec/services/event_create_service_spec.rb b/spec/services/event_create_service_spec.rb
index 02d7ddeb86b..13395a7cac3 100644
--- a/spec/services/event_create_service_spec.rb
+++ b/spec/services/event_create_service_spec.rb
@@ -149,6 +149,14 @@ describe EventCreateService do
.to change { user_activity(user) }
end
+ it 'caches the last push event for the user' do
+ expect_any_instance_of(Users::LastPushEventService)
+ .to receive(:cache_last_push_event)
+ .with(an_instance_of(PushEvent))
+
+ service.push(project, user, push_data)
+ end
+
it 'does not create any event data when an error is raised' do
payload_service = double(:service)
diff --git a/spec/services/gpg_keys/create_service_spec.rb b/spec/services/gpg_keys/create_service_spec.rb
new file mode 100644
index 00000000000..1cd2625531e
--- /dev/null
+++ b/spec/services/gpg_keys/create_service_spec.rb
@@ -0,0 +1,31 @@
+require 'spec_helper'
+
+describe GpgKeys::CreateService do
+ let(:user) { create(:user) }
+ let(:params) { attributes_for(:gpg_key) }
+
+ subject { described_class.new(user, params) }
+
+ context 'notification', :mailer do
+ it 'sends a notification' do
+ perform_enqueued_jobs do
+ subject.execute
+ end
+ should_email(user)
+ end
+ end
+
+ it 'creates a gpg key' do
+ expect { subject.execute }.to change { user.gpg_keys.where(params).count }.by(1)
+ end
+
+ context 'when the public key contains subkeys' do
+ let(:params) { attributes_for(:gpg_key_with_subkeys) }
+
+ it 'generates the gpg subkeys' do
+ gpg_key = subject.execute
+
+ expect(gpg_key.subkeys.count).to eq(2)
+ end
+ end
+end
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 171f70c32a8..5c27e8fd561 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -42,7 +42,7 @@ describe Issues::CloseService do
service.execute(issue)
end
- it 'refreshes the number of open issues' do
+ it 'refreshes the number of open issues', :use_clean_rails_memory_store_caching do
expect { service.execute(issue) }
.to change { project.open_issues_count }.from(1).to(0)
end
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index cc3d648c340..d86da244520 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -35,7 +35,7 @@ describe Issues::CreateService do
expect(issue.due_date).to eq Date.tomorrow
end
- it 'refreshes the number of open issues' do
+ it 'refreshes the number of open issues', :use_clean_rails_memory_store_caching do
expect { issue }.to change { project.open_issues_count }.from(0).to(1)
end
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 15a50b85f19..f07b81e842a 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -48,7 +48,8 @@ describe Issues::UpdateService, :mailer do
assignee_ids: [user2.id],
state_event: 'close',
label_ids: [label.id],
- due_date: Date.tomorrow
+ due_date: Date.tomorrow,
+ discussion_locked: true
}
end
@@ -62,6 +63,14 @@ describe Issues::UpdateService, :mailer do
expect(issue).to be_closed
expect(issue.labels).to match_array [label]
expect(issue.due_date).to eq Date.tomorrow
+ expect(issue.discussion_locked).to be_truthy
+ end
+
+ it 'refreshes the number of open issues when the issue is made confidential', :use_clean_rails_memory_store_caching do
+ issue # make sure the issue is created first so our counts are correct.
+
+ expect { update_issue(confidential: true) }
+ .to change { project.open_issues_count }.from(1).to(0)
end
it 'updates open issue counter for assignees when issue is reassigned' do
@@ -103,6 +112,7 @@ describe Issues::UpdateService, :mailer do
expect(issue.labels).to be_empty
expect(issue.milestone).to be_nil
expect(issue.due_date).to be_nil
+ expect(issue.discussion_locked).to be_falsey
end
end
@@ -141,6 +151,13 @@ describe Issues::UpdateService, :mailer do
expect(note).not_to be_nil
expect(note.note).to eq 'changed title from **{-Old-} title** to **{+New+} title**'
end
+
+ it 'creates system note about discussion lock' do
+ note = find_note('locked this issue')
+
+ expect(note).not_to be_nil
+ expect(note.note).to eq 'locked this issue'
+ end
end
end
@@ -250,6 +267,30 @@ describe Issues::UpdateService, :mailer do
end
end
+ context 'when a new assignee added' do
+ subject { update_issue(assignees: issue.assignees + [user2]) }
+
+ it 'creates only 1 new todo' do
+ expect { subject }.to change { Todo.count }.by(1)
+ end
+
+ it 'creates a todo for new assignee' do
+ subject
+
+ attributes = {
+ project: project,
+ author: user,
+ user: user2,
+ target_id: issue.id,
+ target_type: issue.class.name,
+ action: Todo::ASSIGNED,
+ state: :pending
+ }
+
+ expect(Todo.where(attributes).count).to eq(1)
+ end
+ end
+
context 'when the milestone change' do
it 'marks todos as done' do
update_issue(milestone: create(:milestone))
diff --git a/spec/services/keys/create_service_spec.rb b/spec/services/keys/create_service_spec.rb
new file mode 100644
index 00000000000..bcb436c1e46
--- /dev/null
+++ b/spec/services/keys/create_service_spec.rb
@@ -0,0 +1,21 @@
+require 'spec_helper'
+
+describe Keys::CreateService do
+ let(:user) { create(:user) }
+ let(:params) { attributes_for(:key) }
+
+ subject { described_class.new(user, params) }
+
+ context 'notification', :mailer do
+ it 'sends a notification' do
+ perform_enqueued_jobs do
+ subject.execute
+ end
+ should_email(user)
+ end
+ end
+
+ it 'creates a key' do
+ expect { subject.execute }.to change { user.keys.where(params).count }.by(1)
+ end
+end
diff --git a/spec/services/keys/last_used_service_spec.rb b/spec/services/keys/last_used_service_spec.rb
new file mode 100644
index 00000000000..bb0fb6acf39
--- /dev/null
+++ b/spec/services/keys/last_used_service_spec.rb
@@ -0,0 +1,63 @@
+require 'spec_helper'
+
+describe Keys::LastUsedService do
+ describe '#execute', :clean_gitlab_redis_shared_state do
+ it 'updates the key when it has not been used recently' do
+ key = create(:key, last_used_at: 1.year.ago)
+ time = Time.zone.now
+
+ Timecop.freeze(time) { described_class.new(key).execute }
+
+ expect(key.last_used_at).to eq(time)
+ end
+
+ it 'does not update the key when it has been used recently' do
+ time = 1.minute.ago
+ key = create(:key, last_used_at: time)
+
+ described_class.new(key).execute
+
+ expect(key.last_used_at).to eq(time)
+ end
+
+ it 'does not update the updated_at field' do
+ # Since a lot of these updates could happen in parallel for different keys
+ # we want these updates to be as lightweight as possible, hence we want to
+ # make sure we _only_ update last_used_at and not always updated_at.
+ key = create(:key, last_used_at: 1.year.ago)
+
+ expect { described_class.new(key).execute }.not_to change { key.updated_at }
+ end
+ end
+
+ describe '#update?', :clean_gitlab_redis_shared_state do
+ it 'returns true when no last used timestamp is present' do
+ key = build(:key, last_used_at: nil)
+ service = described_class.new(key)
+
+ expect(service.update?).to eq(true)
+ end
+
+ it 'returns true when the key needs to be updated' do
+ key = build(:key, last_used_at: 1.year.ago)
+ service = described_class.new(key)
+
+ expect(service.update?).to eq(true)
+ end
+
+ it 'returns false when a lease has already been obtained' do
+ key = build(:key, last_used_at: 1.year.ago)
+ service = described_class.new(key)
+
+ expect(service.update?).to eq(true)
+ expect(service.update?).to eq(false)
+ end
+
+ it 'returns false when the key does not yet need to be updated' do
+ key = build(:key, last_used_at: 1.minute.ago)
+ service = described_class.new(key)
+
+ expect(service.update?).to eq(false)
+ end
+ end
+end
diff --git a/spec/services/merge_requests/close_service_spec.rb b/spec/services/merge_requests/close_service_spec.rb
index 7e65369762c..b3886987316 100644
--- a/spec/services/merge_requests/close_service_spec.rb
+++ b/spec/services/merge_requests/close_service_spec.rb
@@ -52,7 +52,7 @@ describe MergeRequests::CloseService do
end
end
- it 'refreshes the number of open merge requests for a valid MR' do
+ it 'refreshes the number of open merge requests for a valid MR', :use_clean_rails_memory_store_caching do
service = described_class.new(project, user, {})
expect { service.execute(merge_request) }
diff --git a/spec/services/merge_requests/conflicts/resolve_service_spec.rb b/spec/services/merge_requests/conflicts/resolve_service_spec.rb
index 6f49a65d795..9c9b0c4c4a1 100644
--- a/spec/services/merge_requests/conflicts/resolve_service_spec.rb
+++ b/spec/services/merge_requests/conflicts/resolve_service_spec.rb
@@ -1,14 +1,16 @@
require 'spec_helper'
describe MergeRequests::Conflicts::ResolveService do
+ include ProjectForksHelper
let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
-
- let(:fork_project) do
- create(:forked_project_with_submodules) do |fork_project|
- fork_project.build_forked_project_link(forked_to_project_id: fork_project.id, forked_from_project_id: project.id)
- fork_project.save
- end
+ let(:project) { create(:project, :public, :repository) }
+
+ let(:forked_project) do
+ forked_project = fork_project(project, user)
+ TestEnv.copy_repo(forked_project,
+ bare_repo: TestEnv.forked_repo_path_bare,
+ refs: TestEnv::FORKED_BRANCH_SHA)
+ forked_project
end
let(:merge_request) do
@@ -19,7 +21,7 @@ describe MergeRequests::Conflicts::ResolveService do
let(:merge_request_from_fork) do
create(:merge_request,
- source_branch: 'conflict-resolvable-fork', source_project: fork_project,
+ source_branch: 'conflict-resolvable-fork', source_project: forked_project,
target_branch: 'conflict-start', target_project: project)
end
@@ -114,7 +116,7 @@ describe MergeRequests::Conflicts::ResolveService do
end
it 'gets conflicts from the source project' do
- expect(fork_project.repository.rugged).to receive(:merge_commits).and_call_original
+ expect(forked_project.repository.rugged).to receive(:merge_commits).and_call_original
expect(project.repository.rugged).not_to receive(:merge_commits)
resolve_conflicts
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index d6409c0d625..a047f891ab2 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -37,7 +37,7 @@ describe MergeRequests::CreateService do
expect(service).to have_received(:execute_hooks).with(merge_request)
end
- it 'refreshes the number of open merge requests' do
+ it 'refreshes the number of open merge requests', :use_clean_rails_memory_store_caching do
expect { service.execute }
.to change { project.open_merge_requests_count }.from(0).to(1)
end
diff --git a/spec/services/merge_requests/ff_merge_service_spec.rb b/spec/services/merge_requests/ff_merge_service_spec.rb
new file mode 100644
index 00000000000..aaabf3ed2b0
--- /dev/null
+++ b/spec/services/merge_requests/ff_merge_service_spec.rb
@@ -0,0 +1,84 @@
+require 'spec_helper'
+
+describe MergeRequests::FfMergeService do
+ let(:user) { create(:user) }
+ let(:user2) { create(:user) }
+ let(:merge_request) do
+ create(:merge_request,
+ source_branch: 'flatten-dir',
+ target_branch: 'improve/awesome',
+ assignee: user2)
+ end
+ let(:project) { merge_request.project }
+
+ before do
+ project.team << [user, :master]
+ project.team << [user2, :developer]
+ end
+
+ describe '#execute' do
+ context 'valid params' do
+ let(:service) { described_class.new(project, user, {}) }
+
+ before do
+ allow(service).to receive(:execute_hooks)
+
+ perform_enqueued_jobs do
+ service.execute(merge_request)
+ end
+ end
+
+ it "does not create merge commit" do
+ source_branch_sha = merge_request.source_project.repository.commit(merge_request.source_branch).sha
+ target_branch_sha = merge_request.target_project.repository.commit(merge_request.target_branch).sha
+ expect(source_branch_sha).to eq(target_branch_sha)
+ end
+
+ it { expect(merge_request).to be_valid }
+ it { expect(merge_request).to be_merged }
+
+ it 'sends email to user2 about merge of new merge_request' do
+ email = ActionMailer::Base.deliveries.last
+ expect(email.to.first).to eq(user2.email)
+ expect(email.subject).to include(merge_request.title)
+ end
+
+ it 'creates system note about merge_request merge' do
+ note = merge_request.notes.last
+ expect(note.note).to include 'merged'
+ end
+ end
+
+ context "error handling" do
+ let(:service) { described_class.new(project, user, commit_message: 'Awesome message') }
+
+ before do
+ allow(Rails.logger).to receive(:error)
+ end
+
+ it 'logs and saves error if there is an exception' do
+ error_message = 'error message'
+
+ allow(service).to receive(:repository).and_raise("error message")
+ allow(service).to receive(:execute_hooks)
+
+ service.execute(merge_request)
+
+ expect(merge_request.merge_error).to include(error_message)
+ expect(Rails.logger).to have_received(:error).with(a_string_matching(error_message))
+ end
+
+ it 'logs and saves error if there is an PreReceiveError exception' do
+ error_message = 'error message'
+
+ allow(service).to receive(:repository).and_raise(Gitlab::Git::HooksService::PreReceiveError, error_message)
+ allow(service).to receive(:execute_hooks)
+
+ service.execute(merge_request)
+
+ expect(merge_request.merge_error).to include(error_message)
+ expect(Rails.logger).to have_received(:error).with(a_string_matching(error_message))
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/get_urls_service_spec.rb b/spec/services/merge_requests/get_urls_service_spec.rb
index 25599dea19f..274624aa8bb 100644
--- a/spec/services/merge_requests/get_urls_service_spec.rb
+++ b/spec/services/merge_requests/get_urls_service_spec.rb
@@ -1,6 +1,8 @@
require "spec_helper"
describe MergeRequests::GetUrlsService do
+ include ProjectForksHelper
+
let(:project) { create(:project, :public, :repository) }
let(:service) { described_class.new(project) }
let(:source_branch) { "merge-test" }
@@ -85,7 +87,7 @@ describe MergeRequests::GetUrlsService do
context 'pushing to existing branch from forked project' do
let(:user) { create(:user) }
- let!(:forked_project) { Projects::ForkService.new(project, user).execute }
+ let!(:forked_project) { fork_project(project, user, repository: true) }
let!(:merge_request) { create(:merge_request, source_project: forked_project, target_project: project, source_branch: source_branch) }
let(:changes) { existing_branch_changes }
# Source project is now the forked one
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index b60136064b7..80213d093f1 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -13,20 +13,21 @@ describe MergeRequests::MergeService do
describe '#execute' do
context 'MergeRequest#merge_jid' do
+ let(:service) do
+ described_class.new(project, user, commit_message: 'Awesome message')
+ end
+
before do
merge_request.update_column(:merge_jid, 'hash-123')
end
it 'is cleaned when no error is raised' do
- service = described_class.new(project, user, commit_message: 'Awesome message')
-
service.execute(merge_request)
expect(merge_request.reload.merge_jid).to be_nil
end
it 'is cleaned when expected error is raised' do
- service = described_class.new(project, user, commit_message: 'Awesome message')
allow(service).to receive(:commit).and_raise(described_class::MergeError)
service.execute(merge_request)
@@ -34,6 +35,22 @@ describe MergeRequests::MergeService do
expect(merge_request.reload.merge_jid).to be_nil
end
+ it 'is cleaned when merge request is not mergeable' do
+ allow(merge_request).to receive(:mergeable?).and_return(false)
+
+ service.execute(merge_request)
+
+ expect(merge_request.reload.merge_jid).to be_nil
+ end
+
+ it 'is cleaned when no source is found' do
+ allow(merge_request).to receive(:diff_head_sha).and_return(nil)
+
+ service.execute(merge_request)
+
+ expect(merge_request.reload.merge_jid).to be_nil
+ end
+
it 'is not cleaned when unexpected error is raised' do
service = described_class.new(project, user, commit_message: 'Awesome message')
allow(service).to receive(:commit).and_raise(StandardError)
diff --git a/spec/services/merge_requests/post_merge_service_spec.rb b/spec/services/merge_requests/post_merge_service_spec.rb
index a37cdab8928..d2bd05d921f 100644
--- a/spec/services/merge_requests/post_merge_service_spec.rb
+++ b/spec/services/merge_requests/post_merge_service_spec.rb
@@ -11,5 +11,16 @@ describe MergeRequests::PostMergeService do
describe '#execute' do
it_behaves_like 'cache counters invalidator'
+
+ it 'refreshes the number of open merge requests for a valid MR', :use_clean_rails_memory_store_caching do
+ # Cache the counter before the MR changed state.
+ project.open_merge_requests_count
+ merge_request.update!(state: 'merged')
+
+ service = described_class.new(project, user, {})
+
+ expect { service.execute(merge_request) }
+ .to change { project.open_merge_requests_count }.from(1).to(0)
+ end
end
end
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 2af2485eeed..62dbe362ec8 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe MergeRequests::RefreshService do
+ include ProjectForksHelper
+
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:service) { described_class }
@@ -12,7 +14,8 @@ describe MergeRequests::RefreshService do
group.add_owner(@user)
@project = create(:project, :repository, namespace: group)
- @fork_project = Projects::ForkService.new(@project, @user).execute
+ @fork_project = fork_project(@project, @user, repository: true)
+
@merge_request = create(:merge_request,
source_project: @project,
source_branch: 'master',
@@ -150,9 +153,7 @@ describe MergeRequests::RefreshService do
context 'manual merge of source branch' do
before do
# Merge master -> feature branch
- author = { email: 'test@gitlab.com', time: Time.now, name: "Me" }
- commit_options = { message: 'Test message', committer: author, author: author }
- @project.repository.merge(@user, @merge_request.diff_head_sha, @merge_request, commit_options)
+ @project.repository.merge(@user, @merge_request.diff_head_sha, @merge_request, 'Test message')
commit = @project.repository.commit('feature')
service.new(@project, @user).execute(@oldrev, commit.id, 'refs/heads/feature')
reload_mrs
@@ -313,8 +314,7 @@ describe MergeRequests::RefreshService do
context 'when the merge request is sourced from a different project' do
it 'creates a `MergeRequestsClosingIssues` record for each issue closed by a commit' do
- forked_project = create(:project, :repository)
- create(:forked_project_link, forked_to_project: forked_project, forked_from_project: @project)
+ forked_project = fork_project(@project, @user, repository: true)
merge_request = create(:merge_request,
target_branch: 'master',
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 681feee61d1..b11a1b31f32 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -49,7 +49,8 @@ describe MergeRequests::UpdateService, :mailer do
state_event: 'close',
label_ids: [label.id],
target_branch: 'target',
- force_remove_source_branch: '1'
+ force_remove_source_branch: '1',
+ discussion_locked: true
}
end
@@ -73,6 +74,7 @@ describe MergeRequests::UpdateService, :mailer do
expect(@merge_request.labels.first.title).to eq(label.name)
expect(@merge_request.target_branch).to eq('target')
expect(@merge_request.merge_params['force_remove_source_branch']).to eq('1')
+ expect(@merge_request.discussion_locked).to be_truthy
end
it 'executes hooks with update action' do
@@ -123,6 +125,13 @@ describe MergeRequests::UpdateService, :mailer do
expect(note.note).to eq 'changed target branch from `master` to `target`'
end
+ it 'creates system note about discussion lock' do
+ note = find_note('locked this issue')
+
+ expect(note).not_to be_nil
+ expect(note.note).to eq 'locked this issue'
+ end
+
context 'when not including source branch removal options' do
before do
opts.delete(:force_remove_source_branch)
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 3e493148b32..b64ca5be8fc 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -84,7 +84,6 @@ describe NotificationService, :mailer do
let!(:key) { create(:personal_key, key_options) }
it { expect(notification.new_key(key)).to be_truthy }
- it { should_email(key.user) }
describe 'never emails the ghost user' do
let(:key_options) { { user: User.ghost } }
@@ -106,18 +105,6 @@ describe NotificationService, :mailer do
end
end
- describe 'Email' do
- describe '#new_email' do
- let!(:email) { create(:email) }
-
- it { expect(notification.new_email(email)).to be_truthy }
-
- it 'sends email to email owner' do
- expect { notification.new_email(email) }.to change { ActionMailer::Base.deliveries.size }.by(1)
- end
- end
- end
-
describe 'Notes' do
context 'issue note' do
let(:project) { create(:project, :private) }
diff --git a/spec/services/projects/count_service_spec.rb b/spec/services/projects/count_service_spec.rb
index 79b01e7620e..cc496501bad 100644
--- a/spec/services/projects/count_service_spec.rb
+++ b/spec/services/projects/count_service_spec.rb
@@ -66,8 +66,8 @@ describe Projects::CountService do
describe '#cache_key' do
it 'returns the cache key as an Array' do
- allow(service).to receive(:cache_key_name).and_return('count_service')
- expect(service.cache_key).to eq(['projects', 1, 'count_service'])
+ allow(service).to receive(:cache_key_name).and_return('foo')
+ expect(service.cache_key).to eq(['projects', 'count_service', described_class::VERSION, 1, 'foo'])
end
end
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index 5da634e2fb1..dc89fdebce7 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -76,9 +76,8 @@ describe Projects::CreateService, '#execute' do
context 'wiki_enabled true creates wiki repository directory' do
it do
project = create_project(user, opts)
- path = ProjectWiki.new(project, user).send(:path_to_repo)
- expect(File.exist?(path)).to be_truthy
+ expect(wiki_repo(project).exists?).to be_truthy
end
end
@@ -86,11 +85,15 @@ describe Projects::CreateService, '#execute' do
it do
opts[:wiki_enabled] = false
project = create_project(user, opts)
- path = ProjectWiki.new(project, user).send(:path_to_repo)
- expect(File.exist?(path)).to be_falsey
+ expect(wiki_repo(project).exists?).to be_falsey
end
end
+
+ def wiki_repo(project)
+ relative_path = ProjectWiki.new(project).disk_path + '.git'
+ Gitlab::Git::Repository.new(project.repository_storage, relative_path, 'foobar')
+ end
end
context 'builds_enabled global setting' do
@@ -149,6 +152,9 @@ describe Projects::CreateService, '#execute' do
end
context 'when another repository already exists on disk' do
+ let(:repository_storage) { 'default' }
+ let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] }
+
let(:opts) do
{
name: 'Existing',
@@ -156,30 +162,59 @@ describe Projects::CreateService, '#execute' do
}
end
- let(:repository_storage_path) { Gitlab.config.repositories.storages['default']['path'] }
+ context 'with legacy storage' do
+ before do
+ gitlab_shell.add_repository(repository_storage, "#{user.namespace.full_path}/existing")
+ end
- before do
- gitlab_shell.add_repository(repository_storage_path, "#{user.namespace.full_path}/existing")
- end
+ after do
+ gitlab_shell.remove_repository(repository_storage_path, "#{user.namespace.full_path}/existing")
+ end
- after do
- gitlab_shell.remove_repository(repository_storage_path, "#{user.namespace.full_path}/existing")
- end
+ it 'does not allow to create a project when path matches existing repository on disk' do
+ project = create_project(user, opts)
- it 'does not allow to create project with same path' do
- project = create_project(user, opts)
+ expect(project).not_to be_persisted
+ expect(project).to respond_to(:errors)
+ expect(project.errors.messages).to have_key(:base)
+ expect(project.errors.messages[:base].first).to match('There is already a repository with that name on disk')
+ end
- expect(project).to respond_to(:errors)
- expect(project.errors.messages).to have_key(:base)
- expect(project.errors.messages[:base].first).to match('There is already a repository with that name on disk')
+ it 'does not allow to import project when path matches existing repository on disk' do
+ project = create_project(user, opts.merge({ import_url: 'https://gitlab.com/gitlab-org/gitlab-test.git' }))
+
+ expect(project).not_to be_persisted
+ expect(project).to respond_to(:errors)
+ expect(project.errors.messages).to have_key(:base)
+ expect(project.errors.messages[:base].first).to match('There is already a repository with that name on disk')
+ end
end
- it 'does not allow to import a project with the same path' do
- project = create_project(user, opts.merge({ import_url: 'https://gitlab.com/gitlab-org/gitlab-test.git' }))
+ context 'with hashed storage' do
+ let(:hash) { '6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b' }
+ let(:hashed_path) { '@hashed/6b/86/6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b' }
+
+ before do
+ stub_application_setting(hashed_storage_enabled: true)
+ allow(Digest::SHA2).to receive(:hexdigest) { hash }
+ end
- expect(project).to respond_to(:errors)
- expect(project.errors.messages).to have_key(:base)
- expect(project.errors.messages[:base].first).to match('There is already a repository with that name on disk')
+ before do
+ gitlab_shell.add_repository(repository_storage, hashed_path)
+ end
+
+ after do
+ gitlab_shell.remove_repository(repository_storage_path, hashed_path)
+ end
+
+ it 'does not allow to create a project when path matches existing repository on disk' do
+ project = create_project(user, opts)
+
+ expect(project).not_to be_persisted
+ expect(project).to respond_to(:errors)
+ expect(project.errors.messages).to have_key(:base)
+ expect(project.errors.messages[:base].first).to match('There is already a repository with that name on disk')
+ end
end
end
end
@@ -208,6 +243,15 @@ describe Projects::CreateService, '#execute' do
end
end
+ context 'when skip_disk_validation is used' do
+ it 'sets the project attribute' do
+ opts[:skip_disk_validation] = true
+ project = create_project(user, opts)
+
+ expect(project.skip_disk_validation).to be_truthy
+ end
+ end
+
def create_project(user, opts)
Projects::CreateService.new(user, opts).execute
end
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index c867139d1de..c90bad46295 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -212,6 +212,19 @@ describe Projects::DestroyService do
end
end
+ context 'as the root of a fork network' do
+ let!(:fork_network) { create(:fork_network, root_project: project) }
+
+ it 'updates the fork network with the project name' do
+ destroy_project(project, user)
+
+ fork_network.reload
+
+ expect(fork_network.deleted_root_project_name).to eq(project.full_name)
+ expect(fork_network.root_project).to be_nil
+ end
+ end
+
def destroy_project(project, user, params = {})
if async
Projects::DestroyService.new(project, user, params).async_execute
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index a6e0364d44c..53862283a27 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -1,6 +1,7 @@
require 'spec_helper'
describe Projects::ForkService do
+ include ProjectForksHelper
let(:gitlab_shell) { Gitlab::Shell.new }
describe 'fork by user' do
@@ -33,7 +34,7 @@ describe Projects::ForkService do
end
describe "successfully creates project in the user namespace" do
- let(:to_project) { fork_project(@from_project, @to_user) }
+ let(:to_project) { fork_project(@from_project, @to_user, namespace: @to_user.namespace) }
it { expect(to_project).to be_persisted }
it { expect(to_project.errors).to be_empty }
@@ -60,13 +61,40 @@ describe Projects::ForkService do
expect(@from_project.forks_count).to eq(1)
end
+
+ it 'creates a fork network with the new project and the root project set' do
+ to_project
+ fork_network = @from_project.reload.fork_network
+
+ expect(fork_network).not_to be_nil
+ expect(fork_network.root_project).to eq(@from_project)
+ expect(fork_network.projects).to contain_exactly(@from_project, to_project)
+ end
+ end
+
+ context 'creating a fork of a fork' do
+ let(:from_forked_project) { fork_project(@from_project, @to_user) }
+ let(:other_namespace) do
+ group = create(:group)
+ group.add_owner(@to_user)
+ group
+ end
+ let(:to_project) { fork_project(from_forked_project, @to_user, namespace: other_namespace) }
+
+ it 'sets the root of the network to the root project' do
+ expect(to_project.fork_network.root_project).to eq(@from_project)
+ end
+
+ it 'sets the forked_from_project on the membership' do
+ expect(to_project.fork_network_member.forked_from_project).to eq(from_forked_project)
+ end
end
end
context 'project already exists' do
it "fails due to validation, not transaction failure" do
@existing_project = create(:project, :repository, creator_id: @to_user.id, name: @from_project.name, namespace: @to_namespace)
- @to_project = fork_project(@from_project, @to_user)
+ @to_project = fork_project(@from_project, @to_user, namespace: @to_namespace)
expect(@existing_project).to be_persisted
expect(@to_project).not_to be_persisted
@@ -76,10 +104,11 @@ describe Projects::ForkService do
end
context 'repository already exists' do
- let(:repository_storage_path) { Gitlab.config.repositories.storages['default']['path'] }
+ let(:repository_storage) { 'default' }
+ let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] }
before do
- gitlab_shell.add_repository(repository_storage_path, "#{@to_user.namespace.full_path}/#{@from_project.path}")
+ gitlab_shell.add_repository(repository_storage, "#{@to_user.namespace.full_path}/#{@from_project.path}")
end
after do
@@ -87,7 +116,7 @@ describe Projects::ForkService do
end
it 'does not allow creation' do
- to_project = fork_project(@from_project, @to_user)
+ to_project = fork_project(@from_project, @to_user, namespace: @to_user.namespace)
expect(to_project).not_to be_persisted
expect(to_project.errors.messages).to have_key(:base)
@@ -181,9 +210,4 @@ describe Projects::ForkService do
end
end
end
-
- def fork_project(from_project, user, params = {})
- allow(RepositoryForkWorker).to receive(:perform_async).and_return(true)
- Projects::ForkService.new(from_project, user, params).execute
- end
end
diff --git a/spec/services/projects/hashed_storage_migration_service_spec.rb b/spec/services/projects/hashed_storage_migration_service_spec.rb
new file mode 100644
index 00000000000..aa1988d29d6
--- /dev/null
+++ b/spec/services/projects/hashed_storage_migration_service_spec.rb
@@ -0,0 +1,74 @@
+require 'spec_helper'
+
+describe Projects::HashedStorageMigrationService do
+ let(:gitlab_shell) { Gitlab::Shell.new }
+ let(:project) { create(:project, :empty_repo, :wiki_repo) }
+ let(:service) { described_class.new(project) }
+ let(:legacy_storage) { Storage::LegacyProject.new(project) }
+ let(:hashed_storage) { Storage::HashedProject.new(project) }
+
+ describe '#execute' do
+ before do
+ allow(service).to receive(:gitlab_shell) { gitlab_shell }
+ end
+
+ context 'when succeeds' do
+ it 'renames project and wiki repositories' do
+ service.execute
+
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.git")).to be_truthy
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_truthy
+ end
+
+ it 'updates project to be hashed and not read-only' do
+ service.execute
+
+ expect(project.hashed_storage?).to be_truthy
+ expect(project.repository_read_only).to be_falsey
+ end
+
+ it 'move operation is called for both repositories' do
+ expect_move_repository(project.disk_path, hashed_storage.disk_path)
+ expect_move_repository("#{project.disk_path}.wiki", "#{hashed_storage.disk_path}.wiki")
+
+ service.execute
+ end
+ end
+
+ context 'when one move fails' do
+ it 'rollsback repositories to original name' do
+ from_name = project.disk_path
+ to_name = hashed_storage.disk_path
+ allow(service).to receive(:move_repository).and_call_original
+ allow(service).to receive(:move_repository).with(from_name, to_name).once { false } # will disable first move only
+
+ expect(service).to receive(:rollback_folder_move).and_call_original
+
+ service.execute
+
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.git")).to be_falsey
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_falsey
+ end
+
+ context 'when rollback fails' do
+ before do
+ from_name = legacy_storage.disk_path
+ to_name = hashed_storage.disk_path
+
+ hashed_storage.ensure_storage_path_exists
+ gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name)
+ end
+
+ it 'does not try to move nil repository over hashed' do
+ expect_move_repository("#{project.disk_path}.wiki", "#{hashed_storage.disk_path}.wiki")
+
+ service.execute
+ end
+ end
+ end
+
+ def expect_move_repository(from_name, to_name)
+ expect(gitlab_shell).to receive(:mv_repository).with(project.repository_storage_path, from_name, to_name).and_call_original
+ end
+ end
+end
diff --git a/spec/services/projects/housekeeping_service_spec.rb b/spec/services/projects/housekeeping_service_spec.rb
index 437c009e7fa..b7b5de07380 100644
--- a/spec/services/projects/housekeeping_service_spec.rb
+++ b/spec/services/projects/housekeeping_service_spec.rb
@@ -75,7 +75,7 @@ describe Projects::HousekeepingService do
end
end
- it 'uses all three kinds of housekeeping we offer' do
+ it 'goes through all three housekeeping tasks, executing only the highest task when there is overlap' do
allow(subject).to receive(:try_obtain_lease).and_return(:the_uuid)
allow(subject).to receive(:lease_key).and_return(:the_lease_key)
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index a14ed526f68..2459f371a91 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -121,11 +121,14 @@ describe Projects::TransferService do
end
context 'namespace which contains orphan repository with same projects path name' do
- let(:repository_storage_path) { Gitlab.config.repositories.storages['default']['path'] }
+ let(:repository_storage) { 'default' }
+ let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] }
before do
group.add_owner(user)
- gitlab_shell.add_repository(repository_storage_path, "#{group.full_path}/#{project.path}")
+ unless gitlab_shell.add_repository(repository_storage, "#{group.full_path}/#{project.path}")
+ raise 'failed to add repository'
+ end
@result = transfer_project(project, user, group)
end
diff --git a/spec/services/projects/unlink_fork_service_spec.rb b/spec/services/projects/unlink_fork_service_spec.rb
index 4f1ab697460..50d3a4ec982 100644
--- a/spec/services/projects/unlink_fork_service_spec.rb
+++ b/spec/services/projects/unlink_fork_service_spec.rb
@@ -1,19 +1,22 @@
require 'spec_helper'
describe Projects::UnlinkForkService do
- subject { described_class.new(fork_project, user) }
+ include ProjectForksHelper
- let(:fork_link) { create(:forked_project_link) }
- let(:fork_project) { fork_link.forked_to_project }
+ subject { described_class.new(forked_project, user) }
+
+ let(:fork_link) { forked_project.forked_project_link }
+ let(:project) { create(:project, :public) }
+ let(:forked_project) { fork_project(project, user) }
let(:user) { create(:user) }
context 'with opened merge request on the source project' do
- let(:merge_request) { create(:merge_request, source_project: fork_project, target_project: fork_link.forked_from_project) }
- let(:mr_close_service) { MergeRequests::CloseService.new(fork_project, user) }
+ let(:merge_request) { create(:merge_request, source_project: forked_project, target_project: fork_link.forked_from_project) }
+ let(:mr_close_service) { MergeRequests::CloseService.new(forked_project, user) }
before do
allow(MergeRequests::CloseService).to receive(:new)
- .with(fork_project, user)
+ .with(forked_project, user)
.and_return(mr_close_service)
end
@@ -25,13 +28,24 @@ describe Projects::UnlinkForkService do
end
it 'remove fork relation' do
- expect(fork_project.forked_project_link).to receive(:destroy)
+ expect(forked_project.forked_project_link).to receive(:destroy)
+
+ subject.execute
+ end
+
+ it 'removes the link to the fork network' do
+ expect(forked_project.fork_network_member).to be_present
+ expect(forked_project.fork_network).to be_present
subject.execute
+ forked_project.reload
+
+ expect(forked_project.fork_network_member).to be_nil
+ expect(forked_project.reload.fork_network).to be_nil
end
it 'refreshes the forks count cache of the source project' do
- source = fork_project.forked_from_project
+ source = forked_project.forked_from_project
expect(source.forks_count).to eq(1)
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 92cc9a37795..3da222e2ed8 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe Projects::UpdateService, '#execute' do
+ include ProjectForksHelper
+
let(:gitlab_shell) { Gitlab::Shell.new }
let(:user) { create(:user) }
let(:admin) { create(:admin) }
@@ -57,17 +59,26 @@ describe Projects::UpdateService, '#execute' do
end
end
end
+
+ context 'When project visibility is higher than parent group' do
+ let(:group) { create(:group, visibility_level: Gitlab::VisibilityLevel::INTERNAL) }
+
+ before do
+ project.update(namespace: group, visibility_level: group.visibility_level)
+ end
+
+ it 'does not update project visibility level' do
+ result = update_project(project, admin, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+
+ expect(result).to eq({ status: :error, message: 'Visibility level public is not allowed in a internal group.' })
+ expect(project.reload).to be_internal
+ end
+ end
end
describe 'when updating project that has forks' do
let(:project) { create(:project, :internal) }
- let(:forked_project) { create(:forked_project_with_submodules, :internal) }
-
- before do
- forked_project.build_forked_project_link(forked_to_project_id: forked_project.id,
- forked_from_project_id: project.id)
- forked_project.save
- end
+ let(:forked_project) { fork_project(project) }
it 'updates forks visibility level when parent set to more restrictive' do
opts = { visibility_level: Gitlab::VisibilityLevel::PRIVATE }
@@ -134,24 +145,43 @@ describe Projects::UpdateService, '#execute' do
end
context 'when renaming a project' do
- let(:repository_storage_path) { Gitlab.config.repositories.storages['default']['path'] }
+ let(:repository_storage) { 'default' }
+ let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] }
- before do
- gitlab_shell.add_repository(repository_storage_path, "#{user.namespace.full_path}/existing")
- end
+ context 'with legacy storage' do
+ before do
+ gitlab_shell.add_repository(repository_storage, "#{user.namespace.full_path}/existing")
+ end
+
+ after do
+ gitlab_shell.remove_repository(repository_storage_path, "#{user.namespace.full_path}/existing")
+ end
- after do
- gitlab_shell.remove_repository(repository_storage_path, "#{user.namespace.full_path}/existing")
+ it 'does not allow renaming when new path matches existing repository on disk' do
+ result = update_project(project, admin, path: 'existing')
+
+ expect(result).to include(status: :error)
+ expect(result[:message]).to match('There is already a repository with that name on disk')
+ expect(project).not_to be_valid
+ expect(project.errors.messages).to have_key(:base)
+ expect(project.errors.messages[:base]).to include('There is already a repository with that name on disk')
+ end
end
- it 'does not allow renaming when new path matches existing repository on disk' do
- result = update_project(project, admin, path: 'existing')
+ context 'with hashed storage' do
+ let(:project) { create(:project, :repository, creator: user, namespace: user.namespace) }
- expect(result).to include(status: :error)
- expect(result[:message]).to match('Project could not be updated!')
- expect(project).not_to be_valid
- expect(project.errors.messages).to have_key(:base)
- expect(project.errors.messages[:base]).to include('There is already a repository with that name on disk')
+ before do
+ stub_application_setting(hashed_storage_enabled: true)
+ end
+
+ it 'does not check if new path matches existing repository on disk' do
+ expect(project).not_to receive(:repository_with_same_path_already_exists?)
+
+ result = update_project(project, admin, path: 'existing')
+
+ expect(result).to include(status: :success)
+ end
end
end
@@ -159,8 +189,10 @@ describe Projects::UpdateService, '#execute' do
it 'returns an error result when record cannot be updated' do
result = update_project(project, admin, { name: 'foo&bar' })
- expect(result).to eq({ status: :error,
- message: 'Project could not be updated!' })
+ expect(result).to eq({
+ status: :error,
+ message: "Name can contain only letters, digits, emojis, '_', '.', dash, space. It must start with letter, digit, emoji or '_'."
+ })
end
end
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index c2d6d7781b9..b1241cd8d0b 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -232,7 +232,9 @@ describe SystemNoteService do
context 'when milestone added' do
it 'sets the note text' do
- expect(subject.note).to eq "changed milestone to #{milestone.to_reference}"
+ reference = milestone.to_reference(format: :iid)
+
+ expect(subject.note).to eq "changed milestone to #{reference}"
end
end
diff --git a/spec/services/tags/create_service_spec.rb b/spec/services/tags/create_service_spec.rb
index 57013b54560..e7e9080b6b0 100644
--- a/spec/services/tags/create_service_spec.rb
+++ b/spec/services/tags/create_service_spec.rb
@@ -28,7 +28,7 @@ describe Tags::CreateService do
it 'returns an error' do
expect(repository).to receive(:add_tag)
.with(user, 'v1.1.0', 'master', 'Foo')
- .and_raise(Rugged::TagError)
+ .and_raise(Gitlab::Git::Repository::TagExistsError)
response = service.execute('v1.1.0', 'master', 'Foo')
diff --git a/spec/services/users/activity_service_spec.rb b/spec/services/users/activity_service_spec.rb
index fef4da0c76e..17eabad73be 100644
--- a/spec/services/users/activity_service_spec.rb
+++ b/spec/services/users/activity_service_spec.rb
@@ -38,6 +38,18 @@ describe Users::ActivityService do
end
end
end
+
+ context 'when in GitLab read-only instance' do
+ before do
+ allow(Gitlab::Database).to receive(:read_only?).and_return(true)
+ end
+
+ it 'does not update last_activity_at' do
+ service.execute
+
+ expect(last_hour_user_ids).to eq([])
+ end
+ end
end
def last_hour_user_ids
diff --git a/spec/services/users/last_push_event_service_spec.rb b/spec/services/users/last_push_event_service_spec.rb
new file mode 100644
index 00000000000..956358738fe
--- /dev/null
+++ b/spec/services/users/last_push_event_service_spec.rb
@@ -0,0 +1,112 @@
+require 'spec_helper'
+
+describe Users::LastPushEventService do
+ let(:user) { build(:user, id: 1) }
+ let(:project) { build(:project, id: 2) }
+ let(:event) { build(:push_event, id: 3, author: user, project: project) }
+ let(:service) { described_class.new(user) }
+
+ describe '#cache_last_push_event' do
+ it "caches the event for the event's project and current user" do
+ expect(service).to receive(:set_key)
+ .ordered
+ .with('last-push-event/1/2', 3)
+
+ expect(service).to receive(:set_key)
+ .ordered
+ .with('last-push-event/1', 3)
+
+ service.cache_last_push_event(event)
+ end
+
+ it 'caches the event for the origin project when pushing to a fork' do
+ source = build(:project, id: 5)
+
+ allow(project).to receive(:forked?).and_return(true)
+ allow(project).to receive(:forked_from_project).and_return(source)
+
+ expect(service).to receive(:set_key)
+ .ordered
+ .with('last-push-event/1/2', 3)
+
+ expect(service).to receive(:set_key)
+ .ordered
+ .with('last-push-event/1', 3)
+
+ expect(service).to receive(:set_key)
+ .ordered
+ .with('last-push-event/1/5', 3)
+
+ service.cache_last_push_event(event)
+ end
+ end
+
+ describe '#last_event_for_user' do
+ it 'returns the last push event for the current user' do
+ expect(service).to receive(:find_cached_event)
+ .with('last-push-event/1')
+ .and_return(event)
+
+ expect(service.last_event_for_user).to eq(event)
+ end
+
+ it 'returns nil when no push event could be found' do
+ expect(service).to receive(:find_cached_event)
+ .with('last-push-event/1')
+ .and_return(nil)
+
+ expect(service.last_event_for_user).to be_nil
+ end
+ end
+
+ describe '#last_event_for_project' do
+ it 'returns the last push event for the given project' do
+ expect(service).to receive(:find_cached_event)
+ .with('last-push-event/1/2')
+ .and_return(event)
+
+ expect(service.last_event_for_project(project)).to eq(event)
+ end
+
+ it 'returns nil when no push event could be found' do
+ expect(service).to receive(:find_cached_event)
+ .with('last-push-event/1/2')
+ .and_return(nil)
+
+ expect(service.last_event_for_project(project)).to be_nil
+ end
+ end
+
+ describe '#find_cached_event', :use_clean_rails_memory_store_caching do
+ context 'with a non-existing cache key' do
+ it 'returns nil' do
+ expect(service.find_cached_event('bla')).to be_nil
+ end
+ end
+
+ context 'with an existing cache key' do
+ before do
+ service.cache_last_push_event(event)
+ end
+
+ it 'returns a PushEvent when no merge requests exist for the event' do
+ allow(service).to receive(:find_event_in_database)
+ .with(event.id)
+ .and_return(event)
+
+ expect(service.find_cached_event('last-push-event/1')).to eq(event)
+ end
+
+ it 'removes the cache key when no event could be found and returns nil' do
+ allow(PushEvent).to receive(:without_existing_merge_requests)
+ .and_return(PushEvent.none)
+
+ expect(Rails.cache).to receive(:delete)
+ .with('last-push-event/1')
+ .and_call_original
+
+ expect(service.find_cached_event('last-push-event/1')).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/services/users/update_service_spec.rb b/spec/services/users/update_service_spec.rb
index 6ee35a33b2d..f8d4a47b212 100644
--- a/spec/services/users/update_service_spec.rb
+++ b/spec/services/users/update_service_spec.rb
@@ -31,13 +31,13 @@ describe Users::UpdateService do
end
def update_user(user, opts)
- described_class.new(user, opts).execute
+ described_class.new(user, opts.merge(user: user)).execute
end
end
describe '#execute!' do
it 'updates the name' do
- service = described_class.new(user, name: 'New Name')
+ service = described_class.new(user, user: user, name: 'New Name')
expect(service).not_to receive(:notify_new_user)
result = service.execute!
@@ -55,7 +55,7 @@ describe Users::UpdateService do
it 'fires system hooks when a new user is saved' do
system_hook_service = spy(:system_hook_service)
user = build(:user)
- service = described_class.new(user, name: 'John Doe')
+ service = described_class.new(user, user: user, name: 'John Doe')
expect(service).to receive(:notify_new_user).and_call_original
expect(service).to receive(:system_hook_service).and_return(system_hook_service)
@@ -65,7 +65,7 @@ describe Users::UpdateService do
end
def update_user(user, opts)
- described_class.new(user, opts).execute!
+ described_class.new(user, opts.merge(user: user)).execute!
end
end
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index ff1754fbe7e..48cacba6a8a 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -1,7 +1,7 @@
require './spec/simplecov_env'
SimpleCovEnv.start!
-ENV["RAILS_ENV"] ||= 'test'
+ENV["RAILS_ENV"] = 'test'
ENV["IN_MEMORY_APPLICATION_SETTINGS"] = 'true'
require File.expand_path("../../config/environment", __FILE__)
@@ -64,8 +64,16 @@ RSpec.configure do |config|
config.infer_spec_type_from_file_location!
- config.define_derived_metadata(file_path: %r{/spec/requests/(ci/)?api/}) do |metadata|
- metadata[:api] = true
+ config.define_derived_metadata(file_path: %r{/spec/}) do |metadata|
+ location = metadata[:location]
+
+ metadata[:api] = true if location =~ %r{/spec/requests/api/}
+
+ # do not overwrite type if it's already set
+ next if metadata.key?(:type)
+
+ match = location.match(%r{/spec/([^/]+)/})
+ metadata[:type] = match[1].singularize.to_sym if match
end
config.raise_errors_for_deprecations!
@@ -73,7 +81,10 @@ RSpec.configure do |config|
if ENV['CI']
# This includes the first try, i.e. tests will be run 4 times before failing.
config.default_retry_count = 4
- config.reporter.register_listener(RspecFlaky::Listener.new, :example_passed, :dump_summary)
+ config.reporter.register_listener(
+ RspecFlaky::Listener.new,
+ :example_passed,
+ :dump_summary)
end
config.before(:suite) do
@@ -161,6 +172,24 @@ RSpec.configure do |config|
end
end
+# add simpler way to match asset paths containing digest strings
+RSpec::Matchers.define :match_asset_path do |expected|
+ match do |actual|
+ path = Regexp.escape(expected)
+ extname = Regexp.escape(File.extname(expected))
+ digest_regex = Regexp.new(path.sub(extname, "(?:-\\h+)?#{extname}") << '$')
+ digest_regex =~ actual
+ end
+
+ failure_message do |actual|
+ "expected that #{actual} would include an asset path for #{expected}"
+ end
+
+ failure_message_when_negated do |actual|
+ "expected that #{actual} would not include an asset path for #{expected}"
+ end
+end
+
FactoryGirl::SyntaxRunner.class_eval do
include RSpec::Mocks::ExampleMethods
end
diff --git a/spec/support/api/scopes/read_user_shared_examples.rb b/spec/support/api/scopes/read_user_shared_examples.rb
index 3bd589d64b9..57e28e040d7 100644
--- a/spec/support/api/scopes/read_user_shared_examples.rb
+++ b/spec/support/api/scopes/read_user_shared_examples.rb
@@ -23,6 +23,10 @@ shared_examples_for 'allows the "read_user" scope' do
context 'when the requesting token does not have any required scope' do
let(:token) { create(:personal_access_token, scopes: ['read_registry'], user: user) }
+ before do
+ stub_container_registry_config(enabled: true)
+ end
+
it 'returns a "401" response' do
get api_call.call(path, user, personal_access_token: token)
diff --git a/spec/support/features/discussion_comments_shared_example.rb b/spec/support/features/discussion_comments_shared_example.rb
index 81cb94ab8c4..9f05cabf7ae 100644
--- a/spec/support/features/discussion_comments_shared_example.rb
+++ b/spec/support/features/discussion_comments_shared_example.rb
@@ -71,7 +71,7 @@ shared_examples 'discussion comments' do |resource_name|
expect(page).not_to have_selector menu_selector
find(toggle_selector).click
- find('body').click
+ find('body').trigger 'click'
expect(page).not_to have_selector menu_selector
end
diff --git a/spec/support/features/issuable_slash_commands_shared_examples.rb b/spec/support/features/issuable_slash_commands_shared_examples.rb
index 8282ba7e536..061e0d35590 100644
--- a/spec/support/features/issuable_slash_commands_shared_examples.rb
+++ b/spec/support/features/issuable_slash_commands_shared_examples.rb
@@ -29,7 +29,7 @@ shared_examples 'issuable record that supports quick actions in its description
wait_for_requests
end
- describe "new #{issuable_type}", js: true do
+ describe "new #{issuable_type}", :js do
context 'with commands in the description' do
it "creates the #{issuable_type} and interpret commands accordingly" do
case issuable_type
@@ -53,7 +53,7 @@ shared_examples 'issuable record that supports quick actions in its description
end
end
- describe "note on #{issuable_type}", js: true do
+ describe "note on #{issuable_type}", :js do
before do
visit public_send("namespace_project_#{issuable_type}_path", project.namespace, project, issuable)
end
@@ -290,7 +290,7 @@ shared_examples 'issuable record that supports quick actions in its description
end
end
- describe "preview of note on #{issuable_type}", js: true do
+ describe "preview of note on #{issuable_type}", :js do
it 'removes quick actions from note and explains them' do
create(:user, username: 'bob')
diff --git a/spec/support/gpg_helpers.rb b/spec/support/gpg_helpers.rb
index 65b38626a51..3f7279a50e0 100644
--- a/spec/support/gpg_helpers.rb
+++ b/spec/support/gpg_helpers.rb
@@ -92,6 +92,46 @@ module GpgHelpers
KEY
end
+ def public_key_with_extra_signing_key
+ <<~KEY.strip
+ -----BEGIN PGP PUBLIC KEY BLOCK-----
+ Version: GnuPG v1
+
+ mI0EWK7VJwEEANSFayuVYenl7sBKUjmIxwDRc3jd+K+FWUZgknLgiLcevaLh/mxV
+ 98dLxDKGDHHNKc/B7Y4qdlZYv1wfNQVuIbd8dqUQFOOkH7ukbgcGBTxH+2IM67y+
+ QBH618luS5Gz1d4bd0YoFf/xZGEh9G5xicz7TiXYzLKjnMjHu2EmbFePABEBAAG0
+ LU5hbm5pZSBCZXJuaGFyZCA8bmFubmllLmJlcm5oYXJkQGV4YW1wbGUuY29tPoi4
+ BBMBAgAiBQJYrtUnAhsDBgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgAAKCRDM++Gf
+ AKyLHaeSA/99oUWpb02PlfkALcx5RncboMHkgczYEU9wOFIgvXIReswThCMOvPZa
+ piui+ItyJfV3ijJfO8IvbbFcvU7jjGA073Bb7tbzAEOQLA16mWgBLQlGaRWbHDW4
+ uwFxvkJKA0GzEsadEXeniESaZPc4rOXKPO3+/MSQWS2bmvvGsBTEuriNBFiu1ScB
+ BADIXkITf+kKCkD+n8tMsdTLInefu8KrJ8p7YRYCCabEXnWRsDb5zxUAG2VXCVUh
+ Yl6QXQybkNiBaduS+uxilz7gtYZUMFJvQ09+fV7D2N9B7u/1bGdIYz+cDFJnEJit
+ LY4w/nju2Sno5CL5Ead8sZuslKetSXPYHR/kbW462EOw5wARAQABiJ8EGAECAAkF
+ Aliu1ScCGwwACgkQzPvhnwCsix2WRQQAtOXpBS60myrBUXhlcqabDQgSTw+Spbgb
+ 61hEMckyzpk7SfMNLz0EbYMvj9SU6znBG8RGeUljPTVMxPGr9yIpoFMSPKAUi/0K
+ AgRmH3tVpxlMipwXjST1Jukk2eHckt/3jGw3E1ElMSFtULe6u5p4gu578hHukEwT
+ IKzj0ZyC7DK5AQ0EWcx23AEIANwpAq85bT10JCBuNhOMyF2jKVt5wHbI9wBtjWYG
+ fgJFBkRvm6IsbmR0Y5DSBvF/of0UX1iGMfx6mvCDJkb1okquhCUef6MONWRpzXYE
+ CIZDm1TXu6yv0D35tkLfPo+/sY9UHHp1zGRcPAU46e8ztRwoD+zEJwy7lobLHGOL
+ 9OdWtCGjsutLOTqKRK4jsifr8n3rePU09rejhDkRONNs7ufn9GRcWMN7RWiFDtpU
+ gNe84AJ38qaXPU8GHNTrDtDtRRPmn68ezMmE1qTNsxQxD4Isexe5Wsfc4+ElaP9s
+ zaHgij7npX1HS9RpmhnOa2h1ESroM9cqDh3IJVhf+eP6/uMAEQEAAYkBxAQYAQIA
+ DwUCWcx23AIbAgUJAeEzgAEpCRDM++GfAKyLHcBdIAQZAQIABgUCWcx23AAKCRDk
+ garE0uOuES7DCAC2Kgl6zO+NqIBIS6McgcEN0sGyvOvZ8Ps4hBiMwCyDAnsIRAUi
+ v4KZMtQMAyl9njJ3YjPWBsdieuTz45O06DDnrzJpZO5rUGJjAcEue4zvRRWIyu3H
+ qHC8MsvkslsNCygJHoWlknm+HucroskTNtxHQ+FdKZ6Tey+twl1u+PhV8PQVyFkl
+ 4G1chO90EP4dvYrye26CC+ik2JkvC7Vy5M+U0PJikme8pFMjcdNks25BnAKcdqKU
+ AU8RTkSjoYvb8qSmZyldJjYjQRkTPRX1ZdaOID1EdiWl+s5cn0Oypo3z7BChcEMx
+ IWB/gmXQJQXVr5qNQnJObyMO/RczXYi9qNnyGMED/2EJJERLR0nefjHQalwDKQVP
+ s5lX1OKAcf2CrV6ZarckqaQgtqjZbuV2C2mrOHUs5uojlXaopj5gA4yJSGDcYhj1
+ Rg9jdHWBtkHBj3eL32ZqrHDs3ap8ErZMmPE8A+mn9TTnQS+FY2QF7vBjJKM3qPT7
+ DMVGWrg4m1NF8N6yMPMP
+ =RB1y
+ -----END PGP PUBLIC KEY BLOCK-----
+ KEY
+ end
+
def primary_keyid
fingerprint[-16..-1]
end
@@ -201,4 +241,277 @@ module GpgHelpers
['bette.cartwright@example.com', 'bette.cartwright@example.net']
end
end
+
+ # GPG Key with extra signing key
+ module User3
+ extend self
+
+ def signed_commit_signature
+ <<~SIGNATURE
+ -----BEGIN PGP SIGNATURE-----
+
+ iQEzBAABCAAdFiEEBSLdKbmPFnzYQhdS44/8r3Wr2SoFAlnNlT8ACgkQ44/8r3Wr
+ 2SqP1wf9FC4J2S8LIHs/fpxgkYzsyCp5lCbS7JuoD4pqmI2KWyBx+vi9/3mZPCsm
+ Fj9f0vFEtNOb39GNGZbaA8DdGw30/WAS6kI6yco0WSK53KHrLw9Kqd+3e/NAVSsl
+ 991Gq4n8X1U5izSH+gZOMtEEUBGqIlZKgRrEh7lhNcz0G7JTF2VCE4NNtZdq7GDA
+ N6jOQxDGUwi9wQBYORQzIBc3NihfhGloII1hXf0XzrgUY3zNYHTT7QipCxKAmH54
+ skwW+wi8RpBedar4saf7fs5xZbP/0yyVz98MJMdHBL68++Xt1AIHoqrb7eWszqnd
+ PCo/fnz1iHKCig602KLj0/zhADcNkg==
+ =LsTi
+ -----END PGP SIGNATURE-----
+ SIGNATURE
+ end
+
+ def signed_commit_base_data
+ <<~SIGNEDDATA
+ tree 86ec18bfe87ad42a782fdabd8310f9b7ac750f51
+ parent 2d1096e3a0ecf1d2baf6dee036cc80775d4940ba
+ author John Doe <john.doe@example.com> 1506645311 -0500
+ committer John Doe <john.doe@example.com> 1506645311 -0500
+
+ Commit signed with subkey by John Doe
+ SIGNEDDATA
+ end
+
+ def public_key
+ <<~KEY.strip
+ -----BEGIN PGP PUBLIC KEY BLOCK-----
+
+ mQENBFnNgbIBCAC9/WblcR4s/pFTwh9cm2iS59YRhtGfbrnfNE6QMIFIRFaK0u6J
+ LDy+scipXLoGg7X0XNFLW6Y457UUpkaPDVLPuVMONhMXdVqndGVvk9jq3D4oDtRa
+ ukucuXr9F84lHnjL3EosmAUiK3xBmHOGHm8+bvKbgPOdvre48YxoJ1POTen+chfo
+ YtLUfnC9EEGY/bn00aaXm3NV+zZK2zio5bFX9YLWSNh/JaXxuJsLoHR/lVrU7CLt
+ FCaGcPQ9SU46LHPshEYWO7ZsjEYJsYYOIOEzfcfR47T2EDTa6mVc++gC5TCoY3Ql
+ jccgm+EM0LvyEHwupEpxzCg2VsT0yoedhUhtABEBAAG0H0pvaG4gRG9lIDxqb2hu
+ LmRvZUBleGFtcGxlLmNvbT6JAVQEEwEIAD4WIQTqP4uIlyqP1HSHLy8RWzrxqtPt
+ ugUCWc2BsgIbAwUJA8JnAAULCQgHAgYVCAkKCwIEFgIDAQIeAQIXgAAKCRARWzrx
+ qtPturMDCACc1Pi1sLJFcCnJEc9sCInCO4LH8fntNjRLN3MTPU5YCYmFM3fAl5ly
+ vXPZ4jNWZxKbQVeFnkDOg5Ti8bzmFEMc8KbZuguktVFizxnLdFCTTRO89i3HDVSN
+ bIosrs5HJwRKOzul6i2whn3dsr8/P8WJczYjZGiw29hGwH3md4Thn/aAGbzjoCEF
+ IfIb1kccyHMJkaj79S8B2agsbEJLuTSfsXC3kGZIJyKG1DNmDUHW/ZE6ro/Kkhik
+ 3w6Jw14cMsKUIOBkOgsD/gXgX9xxWjYHmKrbCXucTKUevNlaCy5tzwrC0Am3prl9
+ OJj3macOA8hNaTVDflEHNCwHOwqnVQYyuQENBFnNgbIBCAC59MmKc0cqPRPTpCZ5
+ arKRoj23SNKWMDWaxSELdU91Wd/NJk4wF25urk9BtBuwjzaBMqb/xPD88yJC3ucs
+ 2LwCyAb5C/dHcPOpys8Pd+KrdHDR3zAMjcASsizlW/qFI9MtjhcU9Yd6iTUejAZG
+ NEC76WALJ3SLYzCaDkHFjWpH3Xq6ck3/9jpL3csn/5GLCsZQUDYGrZSXvHAIigwW
+ Xo6tMs5LCCO9CZg2qGDpvqlzcmy6CRkf0h/UFYJzGqfbJtxeCIxa93WIPE8eGwao
+ aneDlNtIoYiP6krC3OLsaPWT58QltNKaQuZSpjwtQBHa4JIt55vx+FcvRb7Kflgf
+ fT8bABEBAAGJATwEGAEIACYWIQTqP4uIlyqP1HSHLy8RWzrxqtPtugUCWc2BsgIb
+ DAUJA8JnAAAKCRARWzrxqtPtuqJjCACj+Z4qtgMpJXx3u58wCzkVLl5IylD/tEPA
+ cNIrj8QS8ec+woTJaMGVCh96VC2FPl8KR4Hjhy0yaupyPbTI6VWib63S/NcDfG7r
+ tviRFG2Gf8yduERebyC0cpgnmjVgFfJs7N3K3ncz6myOr9idNI05OC9poL73sDUv
+ jRXhm7uy938bT/R4MQdpYuxucgU3MiwvfG5ht+oJ4Yp+/IrR2PTqRGqMCsodnroa
+ TBKq2kW565TtCvrFkNub/ytorDbIVN9VrIMcuTiOv8sLoQRDJO9HvWUhYAqMY6Uh
+ dy12jR9FrquZnGsDKKs9V0Y6J4Wi8vnmdgWVZUc40pfXq3APAB6suQENBFnNgeAB
+ CADFqQRxLHxLIQ7B72diTMI2tPk9d5c67k+Gzkrg1QYoxBLdRCmhM4ydYqZzvIz4
+ 1npkK20w4somOCwvyAOjO46IGb3f/Wk8mY8o5HMpI1miAfze0YTZKzRo2DmrvwbV
+ /h8jvZNCISwtrOgaaszWSVSuEQQCA1jf4qixfCb3ReETvZc3MTZXhw8IUbszXh5d
+ a6CYqq/fr5Zw4Dc19ZSoHSTh0Wn03mEm/kaYtia/wt1I+xVvTSaC2Pf/kUtr7UEf
+ 3NMc0YF0s4KgeW8KwjHa7Sz9wzydLPRH5kJ26SDUGUhrFf1jNLIegtDsoISV/86G
+ ztXcVq5GY6lXMwmsggRe++7xABEBAAGJAmwEGAEIACAWIQTqP4uIlyqP1HSHLy8R
+ WzrxqtPtugUCWc2B4AIbAgFACRARWzrxqtPtusB0IAQZAQgAHRYhBAUi3Sm5jxZ8
+ 2EIXUuOP/K91q9kqBQJZzYHgAAoJEOOP/K91q9kqlHcH+wbvD14ITYDMfgIfy67O
+ 4Qcmgf1qzGXhpsABz/i/EPgRD990eNBI0YGuvoKRJfetEGn7LerrrCB8Z+ICFPHF
+ rzXoe10zm+QTREck0OB8nPFRycJ+Fbl6JX+cnkEx27Mmg1aVb7+H5LMDaWO1KjLs
+ g2wIDo/jrDfW7NoZzy4XTd7jFCOt4fftL/dhiujQmhLzugZXCxRISOVdmgilDJQo
+ Tz1sEm34ida98JFjdzSgkUvJ/pFTZ21ThCNxlUf01Hr2Pdcg1e2/97sZocMFTY2J
+ KwmiW2LG3B05/VrRtdvsCVj8G49coxgPPKty+m71ovAXdS/CvVqE7TefCplsYJ1d
+ V3abwwf/Xl2SxzbAKbrYMgZfdGzpPg2u6982WvfGIVfjFJh9veHZAbfkPcjdAD2X
+ e67Y4BeKG2OQQqeOY2y+81A7PaehgHzbFHJG/4RjqB66efrZAg4DgIdbr4oyMoUJ
+ VVsl0wfYSIvnd4kvWXYICVwk53HLA3wIowZAsJ1LT2byAKbUzayLzTekrTzGcwQk
+ g2XT798ev2QuR5Ki5x8MULBFX4Lhd03+uGOxjhNPQD6DAAHCQLaXQhaGuyMgt5hD
+ t0nF3yuw3Eg4Ygcbtm24rZXOHJc9bDKeRiWZz1dIyYYVJmHSQwOVXkAwJlF1sIgy
+ /jQYeOgFDKq20x86WulkvsUtBoaZJg==
+ =Q5Z7
+ -----END PGP PUBLIC KEY BLOCK-----
+ KEY
+ end
+
+ def secret_key
+ <<~SECRET
+ -----BEGIN PGP PRIVATE KEY BLOCK-----
+
+ lQPGBFnNgbIBCAC9/WblcR4s/pFTwh9cm2iS59YRhtGfbrnfNE6QMIFIRFaK0u6J
+ LDy+scipXLoGg7X0XNFLW6Y457UUpkaPDVLPuVMONhMXdVqndGVvk9jq3D4oDtRa
+ ukucuXr9F84lHnjL3EosmAUiK3xBmHOGHm8+bvKbgPOdvre48YxoJ1POTen+chfo
+ YtLUfnC9EEGY/bn00aaXm3NV+zZK2zio5bFX9YLWSNh/JaXxuJsLoHR/lVrU7CLt
+ FCaGcPQ9SU46LHPshEYWO7ZsjEYJsYYOIOEzfcfR47T2EDTa6mVc++gC5TCoY3Ql
+ jccgm+EM0LvyEHwupEpxzCg2VsT0yoedhUhtABEBAAH+BwMCOqjIWtWBMo3mjIP1
+ OnIbZ+YJxSUZ/B8wU2loMv4XiKmeXLbjD6h3jojxYlnreSHA9QvoY8uNaWElL/n2
+ jv6bxluivk8tA9FWJVv4HaSlMDd2J2YmUW17r8z9Kvm7b7pFVSrEoYV93Wdj5FJ7
+ ciKrFhYNSD7tH1sHwkrFAbiv6aHyk9h48YmR3kx2wBvz+pWk7M2srCJx2b6DXkj/
+ fsj1c/vnzUUGooOJgOvYAWrpg/rJUNxSsFypAHf8Xtk+xt8S1aZ9jaCmYk6I1B2L
+ m00HP43cXUpKcmETW1zXvfMLKjjoUEAJhSJhbCwiEzGL4ojQTarl8qbb+MisakEJ
+ DkPYtrhiiuVzUIFfqE86yO0UKidtzBmJAW3c6zeiUATvACzU09aGyUY1cJi93oXD
+ w4PCyVZ+nMvGD1wx+gyYdDINwpX4y6od9RDr06DGCzwu+S2vxsu1T8LdSv52fhBr
+ U0FY3Z3VN1ytay4SHi/8Y9VBYQFBh7R7Ch0gEMxLVKXVNqOXHUdGrKWV/WmyLKuZ
+ W9DEnWU4Mpz/di5jU8EDW7EB9DZZhVk3mQw3nuAZrBGD4azmmD5mgSgLeBGmKZ1e
+ q/9IWO44mRBBUtNv+rAkmmYF3MCNHuc7EMj+c/IgBUC7d5qBzGWA3UJ0vKX4xcIQ
+ X/PnU+nGxNvBrdqQaMLczeg28SerojxuX79prOsoySctLAbajd9HshW5SfOZ0rvb
+ BNHPqolQDijYEHGxANh4BbamRMGi60Rop7vJsZOLAemz17x/mvCtAHISOJT77/IM
+ oWC+IksJ5XsA/klJGe/tkx11aRQDDmKvIJXmMuRdvnIR23UBbzRQlWWq0l6CdoF6
+ 6SQ9BJBFq0WY32No9WZAPnDO3buUzWc1Y3uwn/+h7TVYVyTlEqzpYJ9FoJwBHbor
+ 0663eoyz6+AUtB9Kb2huIERvZSA8am9obi5kb2VAZXhhbXBsZS5jb20+iQFUBBMB
+ CAA+FiEE6j+LiJcqj9R0hy8vEVs68arT7boFAlnNgbICGwMFCQPCZwAFCwkIBwIG
+ FQgJCgsCBBYCAwECHgECF4AACgkQEVs68arT7bqzAwgAnNT4tbCyRXApyRHPbAiJ
+ wjuCx/H57TY0SzdzEz1OWAmJhTN3wJeZcr1z2eIzVmcSm0FXhZ5AzoOU4vG85hRD
+ HPCm2boLpLVRYs8Zy3RQk00TvPYtxw1UjWyKLK7ORycESjs7peotsIZ93bK/Pz/F
+ iXM2I2RosNvYRsB95neE4Z/2gBm846AhBSHyG9ZHHMhzCZGo+/UvAdmoLGxCS7k0
+ n7Fwt5BmSCcihtQzZg1B1v2ROq6PypIYpN8OicNeHDLClCDgZDoLA/4F4F/ccVo2
+ B5iq2wl7nEylHrzZWgsubc8KwtAJt6a5fTiY95mnDgPITWk1Q35RBzQsBzsKp1UG
+ Mp0DxgRZzYGyAQgAufTJinNHKj0T06QmeWqykaI9t0jSljA1msUhC3VPdVnfzSZO
+ MBdubq5PQbQbsI82gTKm/8Tw/PMiQt7nLNi8AsgG+Qv3R3DzqcrPD3fiq3Rw0d8w
+ DI3AErIs5Vv6hSPTLY4XFPWHeok1HowGRjRAu+lgCyd0i2Mwmg5BxY1qR916unJN
+ //Y6S93LJ/+RiwrGUFA2Bq2Ul7xwCIoMFl6OrTLOSwgjvQmYNqhg6b6pc3JsugkZ
+ H9If1BWCcxqn2ybcXgiMWvd1iDxPHhsGqGp3g5TbSKGIj+pKwtzi7Gj1k+fEJbTS
+ mkLmUqY8LUAR2uCSLeeb8fhXL0W+yn5YH30/GwARAQAB/gcDAuYn/gmAA3OC5p5Q
+ Pat5kE7MtmSvSPmdjVA2o+6RtqZf81JqtAgtDVDwj7SPFsH6ue5P+iAn9938YYek
+ WQU2+0GXeUbSJt+u4VAchgwA5mYsEnEr1/E5KEfWPWO3jJol1rJG99adrjkMxvug
+ QJmwieqhu0368w1FU0tKstxYbr3Tz3nPCPDJoigMEUkXiFklDCUgeNk0g+zd5ytE
+ lXuuLYcGZX7njxL5jD+cMIKqua5zv8WbvNf/BhM1nCarxp4qzKWim8J8jY+iR+/E
+ qOar4aliGRez0j+qh/r8ywgPwfOO89zrKrMfaclL7dN9yuecmBHKWZvfrP5JKMHj
+ yTU3nRMhUGbfVUaaZI2Ccz2rNOU4oF9wuzpzQi8qOysZixRmH61Nw3ULIKoQgiWp
+ 0p5A3L94OaEfZEq3plVaIXI2YWYFSEAlIAc2dq4GxynousLdhNACi9bHhXrfFUhK
+ ckw1QlbhguO/j63/x8ygsmLZVwHG0fJZtMhT3+EGam9cuMBibIYyu3ufJRy7kMKt
+ kmyuk02X+hYJ7w8Pu6b8zYHBXbsEKamipMgd4oKtc8WnXILZo4lwDSArgs7ZVCBa
+ vGBbpTOsr54WjsyuCdX/wv0F2l31J87UxVtTKXx/+nfMfCE02zd+NsTgqvgqmkaA
+ Sy3qvv326kJNx7p+5hRwDzlAZ7vGJjj5TwCbGYDvctIf6MFrGDRNYwrGwNkPc3TG
+ rturfeL/ioua0Smj8LIbOv9Ir93gUIseNpxv8tXV/lffdIplcw802b3aXIKyv4fq
+ b9y3Oq/pDHFukKuBe9WTXJvjT0+ME+a0C8KIb/sts95pmjZsgN1kPmvuT0ReQwUR
+ eGrqz387bnVUzo4RgM3IERs/0EYzPzE8A2vc1e4/87b5J+1Xnov8Phd29vW8Td5l
+ ApiFrFO2r+/Np4kBPAQYAQgAJhYhBOo/i4iXKo/UdIcvLxFbOvGq0+26BQJZzYGy
+ AhsMBQkDwmcAAAoJEBFbOvGq0+26omMIAKP5niq2AyklfHe7nzALORUuXkjKUP+0
+ Q8Bw0iuPxBLx5z7ChMlowZUKH3pULYU+XwpHgeOHLTJq6nI9tMjpVaJvrdL81wN8
+ buu2+JEUbYZ/zJ24RF5vILRymCeaNWAV8mzs3credzPqbI6v2J00jTk4L2mgvvew
+ NS+NFeGbu7L3fxtP9HgxB2li7G5yBTcyLC98bmG36gnhin78itHY9OpEaowKyh2e
+ uhpMEqraRbnrlO0K+sWQ25v/K2isNshU31Wsgxy5OI6/ywuhBEMk70e9ZSFgCoxj
+ pSF3LXaNH0Wuq5mcawMoqz1XRjonhaLy+eZ2BZVlRzjSl9ercA8AHqydA8YEWc2B
+ 4AEIAMWpBHEsfEshDsHvZ2JMwja0+T13lzruT4bOSuDVBijEEt1EKaEzjJ1ipnO8
+ jPjWemQrbTDiyiY4LC/IA6M7jogZvd/9aTyZjyjkcykjWaIB/N7RhNkrNGjYOau/
+ BtX+HyO9k0IhLC2s6BpqzNZJVK4RBAIDWN/iqLF8JvdF4RO9lzcxNleHDwhRuzNe
+ Hl1roJiqr9+vlnDgNzX1lKgdJOHRafTeYSb+Rpi2Jr/C3Uj7FW9NJoLY9/+RS2vt
+ QR/c0xzRgXSzgqB5bwrCMdrtLP3DPJ0s9EfmQnbpINQZSGsV/WM0sh6C0OyghJX/
+ zobO1dxWrkZjqVczCayCBF777vEAEQEAAf4HAwKESvCIDq5QNeadnSvpkZemItPO
+ lDf+7Wiue2gt776D5xkVyT7WkgTQv+IGWGtqz7pCCO2rMp/F9u1BghdjY46jtrK6
+ MMFKta4YENUhRliH6M2YmRjq5p7xZgH6UOnDlqsafbfyUx30t59tbQj+07aMnH5J
+ LMm37nVkDvo3wpPQPuo7L6qizYsrHrQKeJZ8636u41UjC99lVH7vXzqXw68FJImi
+ XdMZbEVBIprYfCDem+fD6gJBA4JBqWJMxuFMfhWp+1WtYoeNojDm4KxBzc2fvYV/
+ HOIUfLFBvACD/UwU5ovllHN39/O8SMgyLm9ymx2/qXcdIkUz4l7fhOCY1OW12DMu
+ 5OFrrTteGK/Sj4Z8pYRdMdaKyjIlxuVzEQGWsU5+J2ALao5atEHguqwlD3cKh3G8
+ 1sA/l5eTFDt84erYv1MVStV0BhZaCE4mNL4WpnQGDdW05yoGq9jIyLcurb/k/atU
+ TUkAF1csgNlJlR3IP+7U9xfHkjMO5+SV82xoNf9nBjz06TRdnvOSKsMNKp0RxC/L
+ Hbiee9o7Rxqdiyv0ly6bCCymwfvlsEIqo3YKssBfe3XI5yQI2hF9QZaH1ywzmgaH
+ o+rbME/XxddRJueS79eipT7K05Z3ulSHTXzpDw+jZcdUV0Ac72Q9FTDPMl3xc6NW
+ DrYwWw/3+kyZ4SkP56l7KlGczTyNPvU9iou4Cj/cAZk/pHx68Chq8ZZNznFm/bIF
+ gWt3fqE/n+y78B6MI8qTjGJOR0jycxrLH82Z2F+FpMShI2C5NnOa/Ilkv3e2Q5U6
+ MOAwaCIz6RHhcI99O/yta2vLelWZqn2g86rLzTG0HlIABTCPYotwetHh0hsrkSv9
+ Kh6rOzGB4i8lRqcBVY+alMSiRBlzkwpL4YUcO6f3vEDncQ9evE1AQCpD4jUJjB1H
+ JSSJAmwEGAEIACAWIQTqP4uIlyqP1HSHLy8RWzrxqtPtugUCWc2B4AIbAgFACRAR
+ WzrxqtPtusB0IAQZAQgAHRYhBAUi3Sm5jxZ82EIXUuOP/K91q9kqBQJZzYHgAAoJ
+ EOOP/K91q9kqlHcH+wbvD14ITYDMfgIfy67O4Qcmgf1qzGXhpsABz/i/EPgRD990
+ eNBI0YGuvoKRJfetEGn7LerrrCB8Z+ICFPHFrzXoe10zm+QTREck0OB8nPFRycJ+
+ Fbl6JX+cnkEx27Mmg1aVb7+H5LMDaWO1KjLsg2wIDo/jrDfW7NoZzy4XTd7jFCOt
+ 4fftL/dhiujQmhLzugZXCxRISOVdmgilDJQoTz1sEm34ida98JFjdzSgkUvJ/pFT
+ Z21ThCNxlUf01Hr2Pdcg1e2/97sZocMFTY2JKwmiW2LG3B05/VrRtdvsCVj8G49c
+ oxgPPKty+m71ovAXdS/CvVqE7TefCplsYJ1dV3abwwf/Xl2SxzbAKbrYMgZfdGzp
+ Pg2u6982WvfGIVfjFJh9veHZAbfkPcjdAD2Xe67Y4BeKG2OQQqeOY2y+81A7Paeh
+ gHzbFHJG/4RjqB66efrZAg4DgIdbr4oyMoUJVVsl0wfYSIvnd4kvWXYICVwk53HL
+ A3wIowZAsJ1LT2byAKbUzayLzTekrTzGcwQkg2XT798ev2QuR5Ki5x8MULBFX4Lh
+ d03+uGOxjhNPQD6DAAHCQLaXQhaGuyMgt5hDt0nF3yuw3Eg4Ygcbtm24rZXOHJc9
+ bDKeRiWZz1dIyYYVJmHSQwOVXkAwJlF1sIgy/jQYeOgFDKq20x86WulkvsUtBoaZ
+ Jg==
+ =TKlF
+ -----END PGP PRIVATE KEY BLOCK-----
+ SECRET
+ end
+
+ def fingerprint
+ 'EA3F8B88972A8FD474872F2F115B3AF1AAD3EDBA'
+ end
+
+ def subkey_fingerprints
+ %w(159AD5DDF199591D67D2B87AA3CEC5C0A7C270EC 0522DD29B98F167CD8421752E38FFCAF75ABD92A)
+ end
+
+ def names
+ ['John Doe']
+ end
+
+ def emails
+ ['john.doe@example.com']
+ end
+ end
+
+ # GPG Key containing just the main key
+ module User4
+ extend self
+
+ def public_key
+ <<~KEY.strip
+ -----BEGIN PGP PUBLIC KEY BLOCK-----
+
+ mQENBFnWcesBCAC6Y8FXl9ZJ9HPa6dIYcgQrvjIQcwoQCUEsaXNRpc+206RPCIXK
+ aIYr0nTD8GeovMuUONXTj+DdueQU2GAAqHHOqvDDVXqRrW3xfWnSwix7sTuhG1Ew
+ PLHYmjLENqaTsdyliEo3N8VWy2k0QRbC3R6xvop4Ooa87D5vcATIl0gYFtSiHIL+
+ TervYvTG9Eq1qSLZHbe2x4IzeqX2luikPKokL7j8FTZaCmC5MezIUur1ulfyYY/j
+ SkST/1aUFc5QXJJSZA0MYJWZX6x7Y3l7yl0dkHqmK8OTuo8RPWd3ybEiuvRsOL8K
+ GAv/PmVJRGDAf7GGbwXXsE9MiZ5GzVPxHnexABEBAAG0G0pvaG4gRG9lIDxqb2hu
+ QGV4YW1wbGUuY29tPokBTgQTAQgAOBYhBAh0izYM0lwuzJnVlAcBbPnhOj+bBQJZ
+ 1nHrAhsDBQsJCAcCBhUICQoLAgQWAgMBAh4BAheAAAoJEAcBbPnhOj+bkywH/i4w
+ OwpDxoTjUQlPlqGAGuzvWaPzSJndawgmMTr68oRsD+wlQmQQTR5eqxCpUIyV4aYb
+ D697RYzoqbT4mlU49ymzfKSAxFe88r1XQWdm81DcofHVPmw2GBrIqaX3Du4Z7xkI
+ Q9/S43orwknh5FoVwU8Nau7qBuv9vbw2apSkuA1oBj3spQ8hqwLavACyQ+fQloAT
+ hSDNqPiCZj6L0dwM1HYiqVoN3Q7qjgzzeBzlXzljJoWblhxllvMK20bVoa7H+uR2
+ lczFHfsX8VTIMjyTGP7R3oHN91DEahlQybVVNLmNSDKZM2P/0d28BRUmWxQJ4Ws3
+ J4hOWDKnLMed3VOIWzM=
+ =xVuW
+ -----END PGP PUBLIC KEY BLOCK-----
+ KEY
+ end
+
+ def secret_key
+ <<~KEY.strip
+ -----BEGIN PGP PRIVATE KEY BLOCK-----
+
+ lQPGBFnWcesBCAC6Y8FXl9ZJ9HPa6dIYcgQrvjIQcwoQCUEsaXNRpc+206RPCIXK
+ aIYr0nTD8GeovMuUONXTj+DdueQU2GAAqHHOqvDDVXqRrW3xfWnSwix7sTuhG1Ew
+ PLHYmjLENqaTsdyliEo3N8VWy2k0QRbC3R6xvop4Ooa87D5vcATIl0gYFtSiHIL+
+ TervYvTG9Eq1qSLZHbe2x4IzeqX2luikPKokL7j8FTZaCmC5MezIUur1ulfyYY/j
+ SkST/1aUFc5QXJJSZA0MYJWZX6x7Y3l7yl0dkHqmK8OTuo8RPWd3ybEiuvRsOL8K
+ GAv/PmVJRGDAf7GGbwXXsE9MiZ5GzVPxHnexABEBAAH+BwMC4UwgHgH5Cp7meY39
+ G5Q3GV2xtwADoaAvlOvPOLPK2fQqxQfb4WN4eZECp2wQuMRBMj52c4i9yphab1mQ
+ vOzoPIRGvkcJoxG++OxQ0kRk0C0gX6wM6SGVdb1nQnfZnoJCCU3IwCaSGktkLDs1
+ jwdI+VmXJbSugUbd25bakHQcE2BaNHuRBlQWQfFbhGBy0+uMfNDBZ6FRipBu47hO
+ f/wm/xXuV8N8BSgvNR/qtAqSQI34CdsnWAhMYm9rqmTNyt0nq4dveX+E0YzVn4lH
+ lOEa7cpYeuBwIL8L3EvSPNCICiJlF3gVqiYzyqRElnCkv1OGc0x3W5onY/agHgGZ
+ KYyi/ubOdqqDgBR+eMt0JKSGH2EPxUAGFPY5F37u4erdxH86GzIinAExLSmADiVR
+ KtxluZP6S2KLbETN5uVbrfa+HVcMbbUZaBHHtL+YbY8PqaFUIvIUR1HM2SK7IrFw
+ KuQ8ibRgooyP7VgMNiPzlFpY4NXUv+FXIrNJ6ELuIaENi0izJ7aIbVBM8SijDz6u
+ 5EEmodnDvmU2hmQNZJ17TxggE7oeT0rKdDGHM5zBvqZ3deqE9sgKx/aTKcj61ID3
+ M80ZkHPDFazUCohLpYgFN20bYYSmxU4LeNFy8YEiuic8QQKaAFxSf9Lf87UFQwyF
+ dduI1RWEbjMsbEJXwlmGM02ssQHsgoVKwZxijq5A5R1Ul6LowazQ8obPiwRS4NZ4
+ Z+QKDon79MMXiFEeh1jeG/MKKWPxFg3pdtCWhC7WdH4hfkBsCVKf+T58yB2Gzziy
+ fOHvAl7v3PtdZgf1xikF8spGYGCWo4B2lxC79xIflKAb2U6myb5I4dpUYxzxoMxT
+ zxHwxEie3NxzZGUyXSt3LqYe2r4CxWnOCXWjIxxRlLue1BE5Za1ycnDRjgUO24+Z
+ uDQne6KLkhAotBtKb2huIERvZSA8am9obkBleGFtcGxlLmNvbT6JAU4EEwEIADgW
+ IQQIdIs2DNJcLsyZ1ZQHAWz54To/mwUCWdZx6wIbAwULCQgHAgYVCAkKCwIEFgID
+ AQIeAQIXgAAKCRAHAWz54To/m5MsB/4uMDsKQ8aE41EJT5ahgBrs71mj80iZ3WsI
+ JjE6+vKEbA/sJUJkEE0eXqsQqVCMleGmGw+ve0WM6Km0+JpVOPcps3ykgMRXvPK9
+ V0FnZvNQ3KHx1T5sNhgayKml9w7uGe8ZCEPf0uN6K8JJ4eRaFcFPDWru6gbr/b28
+ NmqUpLgNaAY97KUPIasC2rwAskPn0JaAE4Ugzaj4gmY+i9HcDNR2IqlaDd0O6o4M
+ 83gc5V85YyaFm5YcZZbzCttG1aGux/rkdpXMxR37F/FUyDI8kxj+0d6BzfdQxGoZ
+ UMm1VTS5jUgymTNj/9HdvAUVJlsUCeFrNyeITlgypyzHnd1TiFsz
+ =/37z
+ -----END PGP PRIVATE KEY BLOCK-----
+ KEY
+ end
+
+ def primary_keyid
+ fingerprint[-16..-1]
+ end
+
+ def fingerprint
+ '08748B360CD25C2ECC99D59407016CF9E13A3F9B'
+ end
+ end
end
diff --git a/spec/support/helpers/merge_request_diff_helpers.rb b/spec/support/helpers/merge_request_diff_helpers.rb
new file mode 100644
index 00000000000..fd22e384b1b
--- /dev/null
+++ b/spec/support/helpers/merge_request_diff_helpers.rb
@@ -0,0 +1,28 @@
+module MergeRequestDiffHelpers
+ def click_diff_line(line_holder, diff_side = nil)
+ line = get_line_components(line_holder, diff_side)
+ line[:content].hover
+ line[:num].find('.add-diff-note').trigger('click')
+ end
+
+ def get_line_components(line_holder, diff_side = nil)
+ if diff_side.nil?
+ get_inline_line_components(line_holder)
+ else
+ get_parallel_line_components(line_holder, diff_side)
+ end
+ end
+
+ def get_inline_line_components(line_holder)
+ { content: line_holder.find('.line_content', match: :first), num: line_holder.find('.diff-line-num', match: :first) }
+ end
+
+ def get_parallel_line_components(line_holder, diff_side = nil)
+ side_index = diff_side == 'left' ? 0 : 1
+ # Wait for `.line_content`
+ line_holder.find('.line_content', match: :first)
+ # Wait for `.diff-line-num`
+ line_holder.find('.diff-line-num', match: :first)
+ { content: line_holder.all('.line_content')[side_index], num: line_holder.all('.diff-line-num')[side_index] }
+ end
+end
diff --git a/spec/support/ldap_shared_examples.rb b/spec/support/ldap_shared_examples.rb
new file mode 100644
index 00000000000..52c34e78965
--- /dev/null
+++ b/spec/support/ldap_shared_examples.rb
@@ -0,0 +1,69 @@
+shared_examples_for 'normalizes a DN' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:test_description, :given, :expected) do
+ 'strips extraneous whitespace' | 'uid =John Smith , ou = People, dc= example,dc =com' | 'uid=john smith,ou=people,dc=example,dc=com'
+ 'strips extraneous whitespace for a DN with a single RDN' | 'uid = John Smith' | 'uid=john smith'
+ 'unescapes non-reserved, non-special Unicode characters' | 'uid = Sebasti\\c3\\a1n\\ C.\\20Smith, ou=People (aka. \\22humans\\") ,dc=example, dc=com' | 'uid=sebastián c. smith,ou=people (aka. \\"humans\\"),dc=example,dc=com'
+ 'downcases the whole string' | 'UID=John Smith,ou=People,dc=example,dc=com' | 'uid=john smith,ou=people,dc=example,dc=com'
+ 'for a null DN (empty string), returns empty string and does not error' | '' | ''
+ 'does not strip an escaped leading space in an attribute value' | 'uid=\\ John Smith,ou=People,dc=example,dc=com' | 'uid=\\ john smith,ou=people,dc=example,dc=com'
+ 'does not strip an escaped leading space in the last attribute value' | 'uid=\\ John Smith' | 'uid=\\ john smith'
+ 'does not strip an escaped trailing space in an attribute value' | 'uid=John Smith\\ ,ou=People,dc=example,dc=com' | 'uid=john smith\\ ,ou=people,dc=example,dc=com'
+ 'strips extraneous spaces after an escaped trailing space' | 'uid=John Smith\\ ,ou=People,dc=example,dc=com' | 'uid=john smith\\ ,ou=people,dc=example,dc=com'
+ 'strips extraneous spaces after an escaped trailing space at the end of the DN' | 'uid=John Smith,ou=People,dc=example,dc=com\\ ' | 'uid=john smith,ou=people,dc=example,dc=com\\ '
+ 'properly preserves escaped trailing space after unescaped trailing spaces' | 'uid=John Smith \\ ,ou=People,dc=example,dc=com' | 'uid=john smith \\ ,ou=people,dc=example,dc=com'
+ 'preserves multiple inner spaces in an attribute value' | 'uid=John Smith,ou=People,dc=example,dc=com' | 'uid=john smith,ou=people,dc=example,dc=com'
+ 'preserves inner spaces after an escaped space' | 'uid=John\\ Smith,ou=People,dc=example,dc=com' | 'uid=john smith,ou=people,dc=example,dc=com'
+ 'hex-escapes an escaped leading newline in an attribute value' | "uid=\\\nJohn Smith,ou=People,dc=example,dc=com" | "uid=\\0ajohn smith,ou=people,dc=example,dc=com"
+ 'hex-escapes and does not strip an escaped trailing newline in an attribute value' | "uid=John Smith\\\n,ou=People,dc=example,dc=com" | "uid=john smith\\0a,ou=people,dc=example,dc=com"
+ 'hex-escapes an unescaped leading newline (actually an invalid DN?)' | "uid=\nJohn Smith,ou=People,dc=example,dc=com" | "uid=\\0ajohn smith,ou=people,dc=example,dc=com"
+ 'strips an unescaped trailing newline (actually an invalid DN?)' | "uid=John Smith\n,ou=People,dc=example,dc=com" | "uid=john smith,ou=people,dc=example,dc=com"
+ 'does not strip if no extraneous whitespace' | 'uid=John Smith,ou=People,dc=example,dc=com' | 'uid=john smith,ou=people,dc=example,dc=com'
+ 'does not modify an escaped equal sign in an attribute value' | 'uid= foo \\= bar' | 'uid=foo \\= bar'
+ 'converts an escaped hex equal sign to an escaped equal sign in an attribute value' | 'uid= foo \\3D bar' | 'uid=foo \\= bar'
+ 'does not modify an escaped comma in an attribute value' | 'uid= John C. Smith, ou=San Francisco\\, CA' | 'uid=john c. smith,ou=san francisco\\, ca'
+ 'converts an escaped hex comma to an escaped comma in an attribute value' | 'uid= John C. Smith, ou=San Francisco\\2C CA' | 'uid=john c. smith,ou=san francisco\\, ca'
+ 'does not modify an escaped hex carriage return character in an attribute value' | 'uid= John C. Smith, ou=San Francisco\\,\\0DCA' | 'uid=john c. smith,ou=san francisco\\,\\0dca'
+ 'does not modify an escaped hex line feed character in an attribute value' | 'uid= John C. Smith, ou=San Francisco\\,\\0ACA' | 'uid=john c. smith,ou=san francisco\\,\\0aca'
+ 'does not modify an escaped hex CRLF in an attribute value' | 'uid= John C. Smith, ou=San Francisco\\,\\0D\\0ACA' | 'uid=john c. smith,ou=san francisco\\,\\0d\\0aca'
+ 'allows attribute type name OIDs' | '0.9.2342.19200300.100.1.25=Example,0.9.2342.19200300.100.1.25=Com' | '0.9.2342.19200300.100.1.25=example,0.9.2342.19200300.100.1.25=com'
+ 'strips extraneous whitespace from attribute type name OIDs' | '0.9.2342.19200300.100.1.25 = Example, 0.9.2342.19200300.100.1.25 = Com' | '0.9.2342.19200300.100.1.25=example,0.9.2342.19200300.100.1.25=com'
+ end
+
+ with_them do
+ it 'normalizes the DN' do
+ assert_generic_test(test_description, subject, expected)
+ end
+ end
+end
+
+shared_examples_for 'normalizes a DN attribute value' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:test_description, :given, :expected) do
+ 'strips extraneous whitespace' | ' John Smith ' | 'john smith'
+ 'unescapes non-reserved, non-special Unicode characters' | 'Sebasti\\c3\\a1n\\ C.\\20Smith' | 'sebastián c. smith'
+ 'downcases the whole string' | 'JoHn C. Smith' | 'john c. smith'
+ 'does not strip an escaped leading space in an attribute value' | '\\ John Smith' | '\\ john smith'
+ 'does not strip an escaped trailing space in an attribute value' | 'John Smith\\ ' | 'john smith\\ '
+ 'hex-escapes an escaped leading newline in an attribute value' | "\\\nJohn Smith" | "\\0ajohn smith"
+ 'hex-escapes and does not strip an escaped trailing newline in an attribute value' | "John Smith\\\n" | "john smith\\0a"
+ 'hex-escapes an unescaped leading newline (actually an invalid DN value?)' | "\nJohn Smith" | "\\0ajohn smith"
+ 'strips an unescaped trailing newline (actually an invalid DN value?)' | "John Smith\n" | "john smith"
+ 'does not strip if no extraneous whitespace' | 'John Smith' | 'john smith'
+ 'does not modify an escaped equal sign in an attribute value' | ' foo \\= bar' | 'foo \\= bar'
+ 'converts an escaped hex equal sign to an escaped equal sign in an attribute value' | ' foo \\3D bar' | 'foo \\= bar'
+ 'does not modify an escaped comma in an attribute value' | 'San Francisco\\, CA' | 'san francisco\\, ca'
+ 'converts an escaped hex comma to an escaped comma in an attribute value' | 'San Francisco\\2C CA' | 'san francisco\\, ca'
+ 'does not modify an escaped hex carriage return character in an attribute value' | 'San Francisco\\,\\0DCA' | 'san francisco\\,\\0dca'
+ 'does not modify an escaped hex line feed character in an attribute value' | 'San Francisco\\,\\0ACA' | 'san francisco\\,\\0aca'
+ 'does not modify an escaped hex CRLF in an attribute value' | 'San Francisco\\,\\0D\\0ACA' | 'san francisco\\,\\0d\\0aca'
+ end
+
+ with_them do
+ it 'normalizes the DN attribute value' do
+ assert_generic_test(test_description, subject, expected)
+ end
+ end
+end
diff --git a/spec/support/matchers/navigation_matcher.rb b/spec/support/matchers/navigation_matcher.rb
new file mode 100644
index 00000000000..63f59b9654c
--- /dev/null
+++ b/spec/support/matchers/navigation_matcher.rb
@@ -0,0 +1,12 @@
+RSpec::Matchers.define :have_active_navigation do |expected|
+ match do |page|
+ expect(page).to have_selector('.sidebar-top-level-items > li.active', count: 1)
+ expect(page.find('.sidebar-top-level-items > li.active')).to have_content(expected)
+ end
+end
+
+RSpec::Matchers.define :have_active_sub_navigation do |expected|
+ match do |page|
+ expect(page.find('.sidebar-sub-level-items > li.active:not(.fly-out-top-item)')).to have_content(expected)
+ end
+end
diff --git a/spec/support/migrations_helpers.rb b/spec/support/migrations_helpers.rb
index 4ca019c1b05..6522d74ba89 100644
--- a/spec/support/migrations_helpers.rb
+++ b/spec/support/migrations_helpers.rb
@@ -16,7 +16,9 @@ module MigrationsHelpers
end
def reset_column_in_migration_models
- ActiveRecord::Base.clear_cache!
+ ActiveRecord::Base.connection_pool.connections.each do |conn|
+ conn.schema_cache.clear!
+ end
described_class.constants.sort.each do |name|
const = described_class.const_get(name)
diff --git a/spec/support/project_forks_helper.rb b/spec/support/project_forks_helper.rb
new file mode 100644
index 00000000000..0d1c6792d13
--- /dev/null
+++ b/spec/support/project_forks_helper.rb
@@ -0,0 +1,58 @@
+module ProjectForksHelper
+ def fork_project(project, user = nil, params = {})
+ # Load the `fork_network` for the project to fork as there might be one that
+ # wasn't loaded yet.
+ project.reload unless project.fork_network
+
+ unless user
+ user = create(:user)
+ project.add_developer(user)
+ end
+
+ unless params[:namespace] || params[:namespace_id]
+ params[:namespace] = create(:group)
+ params[:namespace].add_owner(user)
+ end
+
+ service = Projects::ForkService.new(project, user, params)
+
+ create_repository = params.delete(:repository)
+ # Avoid creating a repository
+ unless create_repository
+ allow(RepositoryForkWorker).to receive(:perform_async).and_return(true)
+ shell = double('gitlab_shell', fork_repository: true)
+ allow(service).to receive(:gitlab_shell).and_return(shell)
+ end
+
+ forked_project = service.execute
+
+ # Reload the both projects so they know about their newly created fork_network
+ if forked_project.persisted?
+ project.reload
+ forked_project.reload
+ end
+
+ if create_repository
+ # The call to project.repository.after_import in RepositoryForkWorker does
+ # not reset the @exists variable of this forked_project.repository
+ # so we have to explicitely call this method to clear the @exists variable.
+ # of the instance we're returning here.
+ forked_project.repository.after_import
+
+ # We can't leave the hooks in place after a fork, as those would fail in tests
+ # The "internal" API is not available
+ FileUtils.rm_rf("#{forked_project.repository.path}/hooks")
+ end
+
+ forked_project
+ end
+
+ def fork_project_with_submodules(project, user = nil, params = {})
+ forked_project = fork_project(project, user, params)
+ TestEnv.copy_repo(forked_project,
+ bare_repo: TestEnv.forked_repo_path_bare,
+ refs: TestEnv::FORKED_BRANCH_SHA)
+
+ forked_project
+ end
+end
diff --git a/spec/support/query_recorder.rb b/spec/support/query_recorder.rb
index 55b531b4cf7..ba0b805caad 100644
--- a/spec/support/query_recorder.rb
+++ b/spec/support/query_recorder.rb
@@ -34,15 +34,47 @@ RSpec::Matchers.define :exceed_query_limit do |expected|
supports_block_expectations
match do |block|
- query_count(&block) > expected
+ query_count(&block) > expected_count + threshold
end
failure_message_when_negated do |actual|
- "Expected a maximum of #{expected} queries, got #{@recorder.count}:\n\n#{@recorder.log_message}"
+ threshold_message = threshold > 0 ? " (+#{@threshold})" : ''
+ counts = "#{expected_count}#{threshold_message}"
+ "Expected a maximum of #{counts} queries, got #{actual_count}:\n\n#{log_message}"
+ end
+
+ def with_threshold(threshold)
+ @threshold = threshold
+ self
+ end
+
+ def threshold
+ @threshold.to_i
+ end
+
+ def expected_count
+ if expected.is_a?(ActiveRecord::QueryRecorder)
+ expected.count
+ else
+ expected
+ end
+ end
+
+ def actual_count
+ @recorder.count
end
def query_count(&block)
@recorder = ActiveRecord::QueryRecorder.new(&block)
@recorder.count
end
+
+ def log_message
+ if expected.is_a?(ActiveRecord::QueryRecorder)
+ extra_queries = (expected.log - @recorder.log).join("\n\n")
+ "Extra queries: \n\n #{extra_queries}"
+ else
+ @recorder.log_message
+ end
+ end
end
diff --git a/spec/support/shared_examples/features/comments_on_merge_request_files_shared_examples.rb b/spec/support/shared_examples/features/comments_on_merge_request_files_shared_examples.rb
new file mode 100644
index 00000000000..221926aaf7e
--- /dev/null
+++ b/spec/support/shared_examples/features/comments_on_merge_request_files_shared_examples.rb
@@ -0,0 +1,28 @@
+shared_examples 'comment on merge request file' do
+ it 'adds a comment' do
+ click_diff_line(find("[id='#{sample_commit.line_code}']"))
+
+ page.within('.js-discussion-note-form') do
+ fill_in(:note_note, with: 'Line is wrong')
+ click_button('Comment')
+ end
+
+ wait_for_requests
+
+ page.within('.notes_holder') do
+ expect(page).to have_content('Line is wrong')
+ end
+
+ visit(merge_request_path(merge_request))
+
+ page.within('.notes .discussion') do
+ expect(page).to have_content("#{user.name} #{user.to_reference} started a discussion")
+ expect(page).to have_content(sample_commit.line_code_path)
+ expect(page).to have_content('Line is wrong')
+ end
+
+ page.within('.notes-tab .badge') do
+ expect(page).to have_content('1')
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb b/spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb
new file mode 100644
index 00000000000..c92c7f603d6
--- /dev/null
+++ b/spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb
@@ -0,0 +1,21 @@
+shared_examples 'issuable user dropdown behaviors' do
+ include FilteredSearchHelpers
+
+ before do
+ issuable # ensure we have at least one issuable
+ sign_in(user_in_dropdown)
+ end
+
+ %w[author assignee].each do |dropdown|
+ describe "#{dropdown} dropdown", :js do
+ it 'only includes members of the project/group' do
+ visit issuables_path
+
+ filtered_search.set("#{dropdown}:")
+
+ expect(find("#js-dropdown-#{dropdown} .filter-dropdown")).to have_content(user_in_dropdown.name)
+ expect(find("#js-dropdown-#{dropdown} .filter-dropdown")).not_to have_content(user_not_in_dropdown.name)
+ end
+ end
+ end
+end
diff --git a/spec/support/project_features_apply_to_issuables_shared_examples.rb b/spec/support/shared_examples/features/project_features_apply_to_issuables_shared_examples.rb
index 639b0924197..639b0924197 100644
--- a/spec/support/project_features_apply_to_issuables_shared_examples.rb
+++ b/spec/support/shared_examples/features/project_features_apply_to_issuables_shared_examples.rb
diff --git a/spec/support/shared_examples/features/search_shared_examples.rb b/spec/support/shared_examples/features/search_shared_examples.rb
new file mode 100644
index 00000000000..25ebbf011d5
--- /dev/null
+++ b/spec/support/shared_examples/features/search_shared_examples.rb
@@ -0,0 +1,5 @@
+shared_examples 'top right search form' do
+ it 'does not show top right search form' do
+ expect(page).not_to have_selector('.search')
+ end
+end
diff --git a/spec/support/shared_examples/position_formatters.rb b/spec/support/shared_examples/position_formatters.rb
new file mode 100644
index 00000000000..ffc9456dbc7
--- /dev/null
+++ b/spec/support/shared_examples/position_formatters.rb
@@ -0,0 +1,43 @@
+shared_examples_for "position formatter" do
+ let(:formatter) { described_class.new(attrs) }
+
+ describe '#key' do
+ let(:key) { [123, 456, 789, Digest::SHA1.hexdigest(formatter.old_path), Digest::SHA1.hexdigest(formatter.new_path), 1, 2] }
+
+ subject { formatter.key }
+
+ it { is_expected.to eq(key) }
+ end
+
+ describe '#complete?' do
+ subject { formatter.complete? }
+
+ context 'when there are missing key attributes' do
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when old_line and new_line are nil' do
+ let(:attrs) { base_attrs }
+
+ it { is_expected.to be_falsy }
+ end
+ end
+
+ describe '#to_h' do
+ let(:formatter_hash) do
+ attrs.merge(position_type: base_attrs[:position_type] || 'text' )
+ end
+
+ subject { formatter.to_h }
+
+ it { is_expected.to eq(formatter_hash) }
+ end
+
+ describe '#==' do
+ subject { formatter }
+
+ let(:other_formatter) { described_class.new(attrs) }
+
+ it { is_expected.to eq(other_formatter) }
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb b/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb
new file mode 100644
index 00000000000..c9302f7b750
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb
@@ -0,0 +1,103 @@
+shared_examples 'custom attributes endpoints' do |attributable_name|
+ let!(:custom_attribute1) { attributable.custom_attributes.create key: 'foo', value: 'foo' }
+ let!(:custom_attribute2) { attributable.custom_attributes.create key: 'bar', value: 'bar' }
+
+ describe "GET /#{attributable_name} with custom attributes filter" do
+ let!(:other_attributable) { create attributable.class.name.underscore }
+
+ context 'with an unauthorized user' do
+ it 'does not filter by custom attributes' do
+ get api("/#{attributable_name}", user), custom_attributes: { foo: 'foo', bar: 'bar' }
+
+ expect(response).to have_http_status(200)
+ expect(json_response.size).to be 2
+ end
+ end
+
+ it 'filters by custom attributes' do
+ get api("/#{attributable_name}", admin), custom_attributes: { foo: 'foo', bar: 'bar' }
+
+ expect(response).to have_http_status(200)
+ expect(json_response.size).to be 1
+ expect(json_response.first['id']).to eq attributable.id
+ end
+ end
+
+ describe "GET /#{attributable_name}/:id/custom_attributes" do
+ context 'with an unauthorized user' do
+ subject { get api("/#{attributable_name}/#{attributable.id}/custom_attributes", user) }
+
+ it_behaves_like 'an unauthorized API user'
+ end
+
+ it 'returns all custom attributes' do
+ get api("/#{attributable_name}/#{attributable.id}/custom_attributes", admin)
+
+ expect(response).to have_http_status(200)
+ expect(json_response).to contain_exactly(
+ { 'key' => 'foo', 'value' => 'foo' },
+ { 'key' => 'bar', 'value' => 'bar' }
+ )
+ end
+ end
+
+ describe "GET /#{attributable_name}/:id/custom_attributes/:key" do
+ context 'with an unauthorized user' do
+ subject { get api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", user) }
+
+ it_behaves_like 'an unauthorized API user'
+ end
+
+ it 'returns a single custom attribute' do
+ get api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin)
+
+ expect(response).to have_http_status(200)
+ expect(json_response).to eq({ 'key' => 'foo', 'value' => 'foo' })
+ end
+ end
+
+ describe "PUT /#{attributable_name}/:id/custom_attributes/:key" do
+ context 'with an unauthorized user' do
+ subject { put api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", user), value: 'new' }
+
+ it_behaves_like 'an unauthorized API user'
+ end
+
+ it 'creates a new custom attribute' do
+ expect do
+ put api("/#{attributable_name}/#{attributable.id}/custom_attributes/new", admin), value: 'new'
+ end.to change { attributable.custom_attributes.count }.by(1)
+
+ expect(response).to have_http_status(200)
+ expect(json_response).to eq({ 'key' => 'new', 'value' => 'new' })
+ expect(attributable.custom_attributes.find_by(key: 'new').value).to eq 'new'
+ end
+
+ it 'updates an existing custom attribute' do
+ expect do
+ put api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin), value: 'new'
+ end.not_to change { attributable.custom_attributes.count }
+
+ expect(response).to have_http_status(200)
+ expect(json_response).to eq({ 'key' => 'foo', 'value' => 'new' })
+ expect(custom_attribute1.reload.value).to eq 'new'
+ end
+ end
+
+ describe "DELETE /#{attributable_name}/:id/custom_attributes/:key" do
+ context 'with an unauthorized user' do
+ subject { delete api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", user) }
+
+ it_behaves_like 'an unauthorized API user'
+ end
+
+ it 'deletes an existing custom attribute' do
+ expect do
+ delete api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin)
+ end.to change { attributable.custom_attributes.count }.by(-1)
+
+ expect(response).to have_http_status(204)
+ expect(attributable.custom_attributes.find_by(key: 'foo')).to be_nil
+ end
+ end
+end
diff --git a/spec/support/stub_configuration.rb b/spec/support/stub_configuration.rb
index 45c10e78789..2dfb4d4a07f 100644
--- a/spec/support/stub_configuration.rb
+++ b/spec/support/stub_configuration.rb
@@ -42,7 +42,7 @@ module StubConfiguration
# Default storage is always required
messages['default'] ||= Gitlab.config.repositories.storages.default
messages.each do |storage_name, storage_settings|
- storage_settings['path'] ||= TestEnv.repos_path
+ storage_settings['path'] = TestEnv.repos_path unless storage_settings.key?('path')
storage_settings['failure_count_threshold'] ||= 10
storage_settings['failure_wait_time'] ||= 30
storage_settings['failure_reset_time'] ||= 1800
diff --git a/spec/support/stub_gitlab_calls.rb b/spec/support/stub_gitlab_calls.rb
index 78a2ff73746..5f22d886910 100644
--- a/spec/support/stub_gitlab_calls.rb
+++ b/spec/support/stub_gitlab_calls.rb
@@ -39,11 +39,11 @@ module StubGitlabCalls
.and_return({ 'tags' => tags })
allow_any_instance_of(ContainerRegistry::Client)
- .to receive(:repository_manifest).with(repository)
+ .to receive(:repository_manifest).with(repository, anything)
.and_return(stub_container_registry_tag_manifest)
allow_any_instance_of(ContainerRegistry::Client)
- .to receive(:blob).with(repository)
+ .to receive(:blob).with(repository, anything, 'application/octet-stream')
.and_return(stub_container_registry_blob)
end
diff --git a/spec/support/test_env.rb b/spec/support/test_env.rb
index 71b9deeabc3..a27bfdee3d2 100644
--- a/spec/support/test_env.rb
+++ b/spec/support/test_env.rb
@@ -3,6 +3,8 @@ require 'rspec/mocks'
module TestEnv
extend self
+ ComponentFailedToInstallError = Class.new(StandardError)
+
# When developing the seed repository, comment out the branch you will modify.
BRANCH_SHA = {
'signed-commits' => '2d1096e',
@@ -15,6 +17,7 @@ module TestEnv
'feature_conflict' => 'bb5206f',
'fix' => '48f0be4',
'improve/awesome' => '5937ac0',
+ 'merged-target' => '21751bf',
'markdown' => '0ed8c6c',
'lfs' => 'be93687',
'master' => 'b83d6e3',
@@ -43,7 +46,8 @@ module TestEnv
'v1.1.0' => 'b83d6e3',
'add-ipython-files' => '93ee732',
'add-pdf-file' => 'e774ebd',
- 'add-pdf-text-binary' => '79faa7b'
+ 'add-pdf-text-binary' => '79faa7b',
+ 'add_images_and_changes' => '010d106'
}.freeze
# gitlab-test-fork is a fork of gitlab-fork, but we don't necessarily
@@ -63,6 +67,11 @@ module TestEnv
# See gitlab.yml.example test section for paths
#
def init(opts = {})
+ unless Rails.env.test?
+ puts "\nTestEnv.init can only be run if `RAILS_ENV` is set to 'test' not '#{Rails.env}'!\n"
+ exit 1
+ end
+
# Disable mailer for spinach tests
disable_mailer if opts[:mailer] == false
@@ -122,50 +131,23 @@ module TestEnv
end
def setup_gitlab_shell
- puts "\n==> Setting up Gitlab Shell..."
- start = Time.now
- gitlab_shell_dir = Gitlab.config.gitlab_shell.path
- shell_needs_update = component_needs_update?(gitlab_shell_dir,
- Gitlab::Shell.version_required)
-
- unless !shell_needs_update || system('rake', 'gitlab:shell:install')
- puts "\nGitLab Shell failed to install, cleaning up #{gitlab_shell_dir}!\n"
- FileUtils.rm_rf(gitlab_shell_dir)
- exit 1
- end
-
- puts " GitLab Shell setup in #{Time.now - start} seconds...\n"
+ component_timed_setup('GitLab Shell',
+ install_dir: Gitlab.config.gitlab_shell.path,
+ version: Gitlab::Shell.version_required,
+ task: 'gitlab:shell:install')
end
def setup_gitaly
- puts "\n==> Setting up Gitaly..."
- start = Time.now
socket_path = Gitlab::GitalyClient.address('default').sub(/\Aunix:/, '')
gitaly_dir = File.dirname(socket_path)
- if gitaly_dir_stale?(gitaly_dir)
- puts " Gitaly is outdated, cleaning up #{gitaly_dir}!"
- FileUtils.rm_rf(gitaly_dir)
- end
-
- gitaly_needs_update = component_needs_update?(gitaly_dir,
- Gitlab::GitalyClient.expected_server_version)
+ component_timed_setup('Gitaly',
+ install_dir: gitaly_dir,
+ version: Gitlab::GitalyClient.expected_server_version,
+ task: "gitlab:gitaly:install[#{gitaly_dir}]") do
- unless !gitaly_needs_update || system('rake', "gitlab:gitaly:install[#{gitaly_dir}]")
- puts "\nGitaly failed to install, cleaning up #{gitaly_dir}!\n"
- FileUtils.rm_rf(gitaly_dir)
- exit 1
+ start_gitaly(gitaly_dir)
end
-
- start_gitaly(gitaly_dir)
- puts " Gitaly setup in #{Time.now - start} seconds...\n"
- end
-
- def gitaly_dir_stale?(dir)
- gitaly_executable = File.join(dir, 'gitaly')
- return false unless File.exist?(gitaly_executable)
-
- File.mtime(gitaly_executable) < File.mtime(Rails.root.join('GITALY_SERVER_VERSION'))
end
def start_gitaly(gitaly_dir)
@@ -320,6 +302,43 @@ module TestEnv
end
end
+ def component_timed_setup(component, install_dir:, version:, task:)
+ puts "\n==> Setting up #{component}..."
+ start = Time.now
+
+ ensure_component_dir_name_is_correct!(component, install_dir)
+
+ # On CI, once installed, components never need update
+ return if File.exist?(install_dir) && ENV['CI']
+
+ if component_needs_update?(install_dir, version)
+ # Cleanup the component entirely to ensure we start fresh
+ FileUtils.rm_rf(install_dir)
+ unless system('rake', task)
+ raise ComponentFailedToInstallError
+ end
+ end
+
+ yield if block_given?
+
+ rescue ComponentFailedToInstallError
+ puts "\n#{component} failed to install, cleaning up #{install_dir}!\n"
+ FileUtils.rm_rf(install_dir)
+ exit 1
+ ensure
+ puts " #{component} setup in #{Time.now - start} seconds...\n"
+ end
+
+ def ensure_component_dir_name_is_correct!(component, path)
+ actual_component_dir_name = File.basename(path)
+ expected_component_dir_name = component.parameterize
+
+ unless actual_component_dir_name == expected_component_dir_name
+ puts " #{component} install dir should be named '#{expected_component_dir_name}', not '#{actual_component_dir_name}' (full install path given was '#{path}')!\n"
+ exit 1
+ end
+ end
+
def component_needs_update?(component_folder, expected_version)
version = File.read(File.join(component_folder, 'VERSION')).strip
diff --git a/spec/support/update_invalid_issuable.rb b/spec/support/update_invalid_issuable.rb
index 1490287681b..50a1d4a56e2 100644
--- a/spec/support/update_invalid_issuable.rb
+++ b/spec/support/update_invalid_issuable.rb
@@ -25,11 +25,13 @@ shared_examples 'update invalid issuable' do |klass|
.and_raise(ActiveRecord::StaleObjectError.new(issuable, :save))
end
- it 'renders edit when format is html' do
- put :update, params
+ if klass == MergeRequest
+ it 'renders edit when format is html' do
+ put :update, params
- expect(response).to render_template(:edit)
- expect(assigns[:conflict]).to be_truthy
+ expect(response).to render_template(:edit)
+ expect(assigns[:conflict]).to be_truthy
+ end
end
it 'renders json error message when format is json' do
@@ -42,16 +44,17 @@ shared_examples 'update invalid issuable' do |klass|
end
end
- context 'when updating an invalid issuable' do
- before do
- key = klass == Issue ? :issue : :merge_request
- params[key][:title] = ""
- end
+ if klass == MergeRequest
+ context 'when updating an invalid issuable' do
+ before do
+ params[:merge_request][:title] = ""
+ end
- it 'renders edit when merge request is invalid' do
- put :update, params
+ it 'renders edit when merge request is invalid' do
+ put :update, params
- expect(response).to render_template(:edit)
+ expect(response).to render_template(:edit)
+ end
end
end
end
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index 0c8c8a2ab05..886052d7848 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -224,17 +224,20 @@ describe 'gitlab:app namespace rake task' do
end
context 'multiple repository storages' do
+ let(:gitaly_address) { Gitlab.config.repositories.storages.default.gitaly_address }
+ let(:storages) do
+ {
+ 'default' => { 'path' => Settings.absolute('tmp/tests/default_storage'), 'gitaly_address' => gitaly_address },
+ 'test_second_storage' => { 'path' => Settings.absolute('tmp/tests/custom_storage'), 'gitaly_address' => gitaly_address }
+ }
+ end
+
let(:project_a) { create(:project, :repository, repository_storage: 'default') }
- let(:project_b) { create(:project, :repository, repository_storage: 'custom') }
+ let(:project_b) { create(:project, :repository, repository_storage: 'test_second_storage') }
before do
FileUtils.mkdir('tmp/tests/default_storage')
FileUtils.mkdir('tmp/tests/custom_storage')
- gitaly_address = Gitlab.config.repositories.storages.default.gitaly_address
- storages = {
- 'default' => { 'path' => Settings.absolute('tmp/tests/default_storage'), 'gitaly_address' => gitaly_address },
- 'custom' => { 'path' => Settings.absolute('tmp/tests/custom_storage'), 'gitaly_address' => gitaly_address }
- }
allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
# Create the projects now, after mocking the settings but before doing the backup
@@ -253,7 +256,7 @@ describe 'gitlab:app namespace rake task' do
after do
FileUtils.rm_rf('tmp/tests/default_storage')
FileUtils.rm_rf('tmp/tests/custom_storage')
- FileUtils.rm(@backup_tar)
+ FileUtils.rm(@backup_tar) if @backup_tar
end
it 'includes repositories in all repository storages' do
diff --git a/spec/tasks/gitlab/storage_rake_spec.rb b/spec/tasks/gitlab/storage_rake_spec.rb
new file mode 100644
index 00000000000..f59792c3d36
--- /dev/null
+++ b/spec/tasks/gitlab/storage_rake_spec.rb
@@ -0,0 +1,52 @@
+require 'rake_helper'
+
+describe 'gitlab:storage rake tasks' do
+ before do
+ Rake.application.rake_require 'tasks/gitlab/storage'
+
+ stub_warn_user_is_not_gitlab
+ end
+
+ describe 'migrate_to_hashed rake task' do
+ context '0 legacy projects' do
+ it 'does nothing' do
+ expect(StorageMigratorWorker).not_to receive(:perform_async)
+
+ run_rake_task('gitlab:storage:migrate_to_hashed')
+ end
+ end
+
+ context '5 legacy projects' do
+ let(:projects) { create_list(:project, 5, storage_version: 0) }
+
+ context 'in batches of 1' do
+ before do
+ stub_env('BATCH' => 1)
+ end
+
+ it 'enqueues one StorageMigratorWorker per project' do
+ projects.each do |project|
+ expect(StorageMigratorWorker).to receive(:perform_async).with(project.id, project.id)
+ end
+
+ run_rake_task('gitlab:storage:migrate_to_hashed')
+ end
+ end
+
+ context 'in batches of 2' do
+ before do
+ stub_env('BATCH' => 2)
+ end
+
+ it 'enqueues one StorageMigratorWorker per 2 projects' do
+ projects.map(&:id).sort.each_slice(2) do |first, last|
+ last ||= first
+ expect(StorageMigratorWorker).to receive(:perform_async).with(first, last)
+ end
+
+ run_rake_task('gitlab:storage:migrate_to_hashed')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/task_helpers_spec.rb b/spec/tasks/gitlab/task_helpers_spec.rb
index d34617be474..fae5ec35c47 100644
--- a/spec/tasks/gitlab/task_helpers_spec.rb
+++ b/spec/tasks/gitlab/task_helpers_spec.rb
@@ -75,4 +75,24 @@ describe Gitlab::TaskHelpers do
subject.checkout_version(tag, clone_path)
end
end
+
+ describe '#run_command' do
+ it 'runs command and return the output' do
+ expect(subject.run_command(%w(echo it works!))).to eq("it works!\n")
+ end
+
+ it 'returns empty string when command doesnt exist' do
+ expect(subject.run_command(%w(nonexistentcommand with arguments))).to eq('')
+ end
+ end
+
+ describe '#run_command!' do
+ it 'runs command and return the output' do
+ expect(subject.run_command!(%w(echo it works!))).to eq("it works!\n")
+ end
+
+ it 'returns and exception when command exit with non zero code' do
+ expect { subject.run_command!(['bash', '-c', 'exit 1']) }.to raise_error Gitlab::TaskFailedError
+ end
+ end
end
diff --git a/spec/views/ci/lints/show.html.haml_spec.rb b/spec/views/ci/lints/show.html.haml_spec.rb
index f2c19c7642a..7724d54c569 100644
--- a/spec/views/ci/lints/show.html.haml_spec.rb
+++ b/spec/views/ci/lints/show.html.haml_spec.rb
@@ -4,7 +4,7 @@ describe 'ci/lints/show' do
include Devise::Test::ControllerHelpers
describe 'XSS protection' do
- let(:config_processor) { Ci::GitlabCiYamlProcessor.new(YAML.dump(content)) }
+ let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(content)) }
before do
assign(:status, true)
assign(:builds, config_processor.builds)
@@ -59,7 +59,7 @@ describe 'ci/lints/show' do
}
end
- let(:config_processor) { Ci::GitlabCiYamlProcessor.new(YAML.dump(content)) }
+ let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(content)) }
context 'when the content is valid' do
before do
diff --git a/spec/views/dashboard/projects/_nav.html.haml.rb b/spec/views/dashboard/projects/_nav.html.haml.rb
new file mode 100644
index 00000000000..f6a8ca13040
--- /dev/null
+++ b/spec/views/dashboard/projects/_nav.html.haml.rb
@@ -0,0 +1,17 @@
+require 'spec_helper'
+
+describe 'dashboard/projects/_nav.html.haml' do
+ it 'highlights All tab by default' do
+ render
+
+ expect(rendered).to have_css('li.active a', text: 'All')
+ end
+
+ it 'highlights Personal tab personal param is present' do
+ controller.params[:personal] = true
+
+ render
+
+ expect(rendered).to have_css('li.active a', text: 'Personal')
+ end
+end
diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
index b17bc6692f3..c5f455b8948 100644
--- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
@@ -1,16 +1,28 @@
require 'spec_helper'
describe 'layouts/nav/sidebar/_project' do
+ let(:project) { create(:project, :repository) }
+
+ before do
+ assign(:project, project)
+ assign(:repository, project.repository)
+ allow(view).to receive(:current_ref).and_return('master')
+
+ allow(view).to receive(:can?).and_return(true)
+ end
+
+ describe 'issue boards' do
+ it 'has boards tab when multiple issue boards available' do
+ render
+
+ expect(rendered).to have_css('a[title="Board"]')
+ end
+ end
+
describe 'container registry tab' do
before do
- project = create(:project, :repository)
stub_container_registry_config(enabled: true)
- assign(:project, project)
- assign(:repository, project.repository)
- allow(view).to receive(:current_ref).and_return('master')
-
- allow(view).to receive(:can?).and_return(true)
allow(controller).to receive(:controller_name)
.and_return('repositories')
allow(controller).to receive(:controller_path)
diff --git a/spec/views/projects/merge_requests/_commits.html.haml_spec.rb b/spec/views/projects/merge_requests/_commits.html.haml_spec.rb
index 98c7de9b709..efed2e02a1b 100644
--- a/spec/views/projects/merge_requests/_commits.html.haml_spec.rb
+++ b/spec/views/projects/merge_requests/_commits.html.haml_spec.rb
@@ -2,10 +2,11 @@ require 'spec_helper'
describe 'projects/merge_requests/_commits.html.haml' do
include Devise::Test::ControllerHelpers
+ include ProjectForksHelper
let(:user) { create(:user) }
- let(:target_project) { create(:project, :repository) }
- let(:source_project) { create(:project, :repository, forked_from_project: target_project) }
+ let(:target_project) { create(:project, :public, :repository) }
+ let(:source_project) { fork_project(target_project, user, repository: true) }
let(:merge_request) do
create(:merge_request, :simple,
diff --git a/spec/views/projects/merge_requests/edit.html.haml_spec.rb b/spec/views/projects/merge_requests/edit.html.haml_spec.rb
index 69c7d0cbf28..9b74a7e1946 100644
--- a/spec/views/projects/merge_requests/edit.html.haml_spec.rb
+++ b/spec/views/projects/merge_requests/edit.html.haml_spec.rb
@@ -2,16 +2,19 @@ require 'spec_helper'
describe 'projects/merge_requests/edit.html.haml' do
include Devise::Test::ControllerHelpers
+ include ProjectForksHelper
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
- let(:fork_project) { create(:project, :repository, forked_from_project: project) }
- let(:unlink_project) { Projects::UnlinkForkService.new(fork_project, user) }
+ let(:forked_project) { fork_project(project, user, repository: true) }
+ let(:unlink_project) { Projects::UnlinkForkService.new(forked_project, user) }
let(:milestone) { create(:milestone, project: project) }
let(:closed_merge_request) do
+ project.add_developer(user)
+
create(:closed_merge_request,
- source_project: fork_project,
+ source_project: forked_project,
target_project: project,
author: user,
assignee: user,
diff --git a/spec/views/projects/merge_requests/show.html.haml_spec.rb b/spec/views/projects/merge_requests/show.html.haml_spec.rb
index 6f29d12373a..28d54c2fb77 100644
--- a/spec/views/projects/merge_requests/show.html.haml_spec.rb
+++ b/spec/views/projects/merge_requests/show.html.haml_spec.rb
@@ -2,16 +2,17 @@ require 'spec_helper'
describe 'projects/merge_requests/show.html.haml' do
include Devise::Test::ControllerHelpers
+ include ProjectForksHelper
let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
- let(:fork_project) { create(:project, :repository, forked_from_project: project) }
- let(:unlink_project) { Projects::UnlinkForkService.new(fork_project, user) }
+ let(:project) { create(:project, :public, :repository) }
+ let(:forked_project) { fork_project(project, user, repository: true) }
+ let(:unlink_project) { Projects::UnlinkForkService.new(forked_project, user) }
let(:note) { create(:note_on_merge_request, project: project, noteable: closed_merge_request) }
let(:closed_merge_request) do
create(:closed_merge_request,
- source_project: fork_project,
+ source_project: forked_project,
target_project: project,
author: user)
end
@@ -52,7 +53,7 @@ describe 'projects/merge_requests/show.html.haml' do
context 'when the merge request is open' do
it 'closes the merge request if the source project does not exist' do
closed_merge_request.update_attributes(state: 'open')
- fork_project.destroy
+ forked_project.destroy
render
diff --git a/spec/views/projects/pipelines_settings/_show.html.haml_spec.rb b/spec/views/projects/pipelines_settings/_show.html.haml_spec.rb
new file mode 100644
index 00000000000..c757ccf02d3
--- /dev/null
+++ b/spec/views/projects/pipelines_settings/_show.html.haml_spec.rb
@@ -0,0 +1,62 @@
+require 'spec_helper'
+
+describe 'projects/pipelines_settings/_show' do
+ let(:project) { create(:project, :repository) }
+
+ before do
+ assign :project, project
+ end
+
+ context 'when kubernetes is not active' do
+ context 'when auto devops domain is not defined' do
+ it 'shows warning message' do
+ render
+
+ expect(rendered).to have_css('.settings-message')
+ expect(rendered).to have_text('Auto Review Apps and Auto Deploy need a domain name and the')
+ expect(rendered).to have_link('Kubernetes service')
+ end
+ end
+
+ context 'when auto devops domain is defined' do
+ before do
+ project.build_auto_devops(domain: 'example.com')
+ end
+
+ it 'shows warning message' do
+ render
+
+ expect(rendered).to have_css('.settings-message')
+ expect(rendered).to have_text('Auto Review Apps and Auto Deploy need the')
+ expect(rendered).to have_link('Kubernetes service')
+ end
+ end
+ end
+
+ context 'when kubernetes is active' do
+ before do
+ project.build_kubernetes_service(active: true)
+ end
+
+ context 'when auto devops domain is not defined' do
+ it 'shows warning message' do
+ render
+
+ expect(rendered).to have_css('.settings-message')
+ expect(rendered).to have_text('Auto Review Apps and Auto Deploy need a domain name to work correctly.')
+ end
+ end
+
+ context 'when auto devops domain is defined' do
+ before do
+ project.build_auto_devops(domain: 'example.com')
+ end
+
+ it 'does not show warning message' do
+ render
+
+ expect(rendered).not_to have_css('.settings-message')
+ end
+ end
+ end
+end
diff --git a/spec/views/projects/registry/repositories/index.html.haml_spec.rb b/spec/views/projects/registry/repositories/index.html.haml_spec.rb
deleted file mode 100644
index cf0aa44a4a2..00000000000
--- a/spec/views/projects/registry/repositories/index.html.haml_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-require 'spec_helper'
-
-describe 'projects/registry/repositories/index' do
- let(:group) { create(:group, path: 'group') }
- let(:project) { create(:project, group: group, path: 'test') }
-
- let(:repository) do
- create(:container_repository, project: project, name: 'image')
- end
-
- before do
- stub_container_registry_config(enabled: true,
- host_port: 'registry.gitlab',
- api_url: 'http://registry.gitlab')
-
- stub_container_registry_tags(repository: :any, tags: [:latest])
-
- assign(:project, project)
- assign(:images, [repository])
-
- allow(view).to receive(:can?).and_return(true)
- end
-
- it 'contains container repository path' do
- render
-
- expect(rendered).to have_content 'group/test/image'
- end
-
- it 'contains attribute for copying tag location into clipboard' do
- render
-
- expect(rendered).to have_css 'button[data-clipboard-text="docker pull ' \
- 'registry.gitlab/group/test/image:latest"]'
- end
-end
diff --git a/spec/views/shared/issuable/_participants.html.haml.rb b/spec/views/shared/issuable/_participants.html.haml.rb
new file mode 100644
index 00000000000..51059d4c0d7
--- /dev/null
+++ b/spec/views/shared/issuable/_participants.html.haml.rb
@@ -0,0 +1,26 @@
+require 'spec_helper'
+require 'nokogiri'
+
+describe 'shared/issuable/_participants.html.haml' do
+ let(:project) { create(:project) }
+ let(:participants) { create_list(:user, 100) }
+
+ before do
+ allow(view).to receive_messages(project: project,
+ participants: participants)
+ end
+
+ it 'renders lazy loaded avatars' do
+ render 'shared/issuable/participants'
+
+ html = Nokogiri::HTML(rendered)
+
+ avatars = html.css('.participants-author img')
+
+ avatars.each do |avatar|
+ expect(avatar[:class]).to include('lazy')
+ expect(avatar[:src]).to eql(LazyImageTagHelper.placeholder_image)
+ expect(avatar[:"data-src"]).to match('http://www.gravatar.com/avatar/')
+ end
+ end
+end
diff --git a/spec/views/shared/milestones/_issuable.html.haml.rb b/spec/views/shared/milestones/_issuable.html.haml.rb
new file mode 100644
index 00000000000..0a3f877cae0
--- /dev/null
+++ b/spec/views/shared/milestones/_issuable.html.haml.rb
@@ -0,0 +1,19 @@
+require 'spec_helper'
+
+describe 'shared/milestones/_issuable.html.haml' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:milestone) { create(:milestone, project: project) }
+ let(:issuable) { create(:issue, project: project, assignees: [user]) }
+
+ before do
+ assign(:project, project)
+ assign(:milestone, milestone)
+ end
+
+ it 'avatar links to issues page' do
+ render 'shared/milestones/issuable', issuable: issuable, show_project_name: true
+
+ expect(rendered).to have_css("a[href='#{project_issues_path(project, milestone_title: milestone.title, assignee_id: user.id, state: 'all')}']")
+ end
+end
diff --git a/spec/workers/build_finished_worker_spec.rb b/spec/workers/build_finished_worker_spec.rb
index 8cc3f37ebe8..1a7ffd5cdbf 100644
--- a/spec/workers/build_finished_worker_spec.rb
+++ b/spec/workers/build_finished_worker_spec.rb
@@ -11,6 +11,8 @@ describe BuildFinishedWorker do
expect(BuildHooksWorker)
.to receive(:new).ordered.and_call_original
+ expect(BuildTraceSectionsWorker)
+ .to receive(:perform_async)
expect_any_instance_of(BuildCoverageWorker)
.to receive(:perform)
expect_any_instance_of(BuildHooksWorker)
diff --git a/spec/workers/build_trace_sections_worker_spec.rb b/spec/workers/build_trace_sections_worker_spec.rb
new file mode 100644
index 00000000000..45243f45547
--- /dev/null
+++ b/spec/workers/build_trace_sections_worker_spec.rb
@@ -0,0 +1,23 @@
+require 'spec_helper'
+
+describe BuildTraceSectionsWorker do
+ describe '#perform' do
+ context 'when build exists' do
+ let!(:build) { create(:ci_build) }
+
+ it 'updates trace sections' do
+ expect_any_instance_of(Ci::Build)
+ .to receive(:parse_trace_sections!)
+
+ described_class.new.perform(build.id)
+ end
+ end
+
+ context 'when build does not exist' do
+ it 'does not raise exception' do
+ expect { described_class.new.perform(123) }
+ .not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/workers/cluster_provision_worker_spec.rb b/spec/workers/cluster_provision_worker_spec.rb
new file mode 100644
index 00000000000..11f208289db
--- /dev/null
+++ b/spec/workers/cluster_provision_worker_spec.rb
@@ -0,0 +1,23 @@
+require 'spec_helper'
+
+describe ClusterProvisionWorker do
+ describe '#perform' do
+ context 'when cluster exists' do
+ let(:cluster) { create(:gcp_cluster) }
+
+ it 'provision a cluster' do
+ expect_any_instance_of(Ci::ProvisionClusterService).to receive(:execute)
+
+ described_class.new.perform(cluster.id)
+ end
+ end
+
+ context 'when cluster does not exist' do
+ it 'does not provision a cluster' do
+ expect_any_instance_of(Ci::ProvisionClusterService).not_to receive(:execute)
+
+ described_class.new.perform(123)
+ end
+ end
+ end
+end
diff --git a/spec/workers/concerns/cluster_queue_spec.rb b/spec/workers/concerns/cluster_queue_spec.rb
new file mode 100644
index 00000000000..1050651fa51
--- /dev/null
+++ b/spec/workers/concerns/cluster_queue_spec.rb
@@ -0,0 +1,15 @@
+require 'spec_helper'
+
+describe ClusterQueue do
+ let(:worker) do
+ Class.new do
+ include Sidekiq::Worker
+ include ClusterQueue
+ end
+ end
+
+ it 'sets a default pipelines queue automatically' do
+ expect(worker.sidekiq_options['queue'])
+ .to eq :gcp_cluster
+ end
+end
diff --git a/spec/workers/git_garbage_collect_worker_spec.rb b/spec/workers/git_garbage_collect_worker_spec.rb
index 6f9ddb6c63c..47297de738b 100644
--- a/spec/workers/git_garbage_collect_worker_spec.rb
+++ b/spec/workers/git_garbage_collect_worker_spec.rb
@@ -31,7 +31,7 @@ describe GitGarbageCollectWorker do
expect_any_instance_of(Repository).to receive(:after_create_branch).and_call_original
expect_any_instance_of(Repository).to receive(:branch_names).and_call_original
- expect_any_instance_of(Gitlab::Git::Repository).to receive(:branch_count).and_call_original
+ expect_any_instance_of(Repository).to receive(:has_visible_content?).and_call_original
expect_any_instance_of(Gitlab::Git::Repository).to receive(:has_visible_content?).and_call_original
subject.perform(project.id, :gc, lease_key, lease_uuid)
@@ -47,7 +47,6 @@ describe GitGarbageCollectWorker do
expect(subject).not_to receive(:command)
expect_any_instance_of(Repository).not_to receive(:after_create_branch).and_call_original
expect_any_instance_of(Repository).not_to receive(:branch_names).and_call_original
- expect_any_instance_of(Repository).not_to receive(:branch_count).and_call_original
expect_any_instance_of(Repository).not_to receive(:has_visible_content?).and_call_original
subject.perform(project.id, :gc, lease_key, lease_uuid)
@@ -77,7 +76,7 @@ describe GitGarbageCollectWorker do
expect_any_instance_of(Repository).to receive(:after_create_branch).and_call_original
expect_any_instance_of(Repository).to receive(:branch_names).and_call_original
- expect_any_instance_of(Gitlab::Git::Repository).to receive(:branch_count).and_call_original
+ expect_any_instance_of(Repository).to receive(:has_visible_content?).and_call_original
expect_any_instance_of(Gitlab::Git::Repository).to receive(:has_visible_content?).and_call_original
subject.perform(project.id)
@@ -93,7 +92,6 @@ describe GitGarbageCollectWorker do
expect(subject).not_to receive(:command)
expect_any_instance_of(Repository).not_to receive(:after_create_branch).and_call_original
expect_any_instance_of(Repository).not_to receive(:branch_names).and_call_original
- expect_any_instance_of(Repository).not_to receive(:branch_count).and_call_original
expect_any_instance_of(Repository).not_to receive(:has_visible_content?).and_call_original
subject.perform(project.id)
@@ -106,7 +104,7 @@ describe GitGarbageCollectWorker do
it_should_behave_like 'flushing ref caches', true
end
- context "with Gitaly turned off", skip_gitaly_mock: true do
+ context "with Gitaly turned off", :skip_gitaly_mock do
it_should_behave_like 'flushing ref caches', false
end
diff --git a/spec/workers/namespaceless_project_destroy_worker_spec.rb b/spec/workers/namespaceless_project_destroy_worker_spec.rb
index 20cf580af8a..ed8cedc0079 100644
--- a/spec/workers/namespaceless_project_destroy_worker_spec.rb
+++ b/spec/workers/namespaceless_project_destroy_worker_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe NamespacelessProjectDestroyWorker do
+ include ProjectForksHelper
+
subject { described_class.new }
before do
@@ -55,9 +57,11 @@ describe NamespacelessProjectDestroyWorker do
context 'project forked from another' do
let!(:parent_project) { create(:project) }
-
- before do
- create(:forked_project_link, forked_to_project: project, forked_from_project: parent_project)
+ let(:project) do
+ namespaceless_project = fork_project(parent_project)
+ namespaceless_project.namespace_id = nil
+ namespaceless_project.save(validate: false)
+ namespaceless_project
end
it 'closes open merge requests' do
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index d3707a3cc11..05eecf5f0bb 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -70,12 +70,15 @@ describe PostReceive do
context "creates a Ci::Pipeline for every change" do
before do
- allow_any_instance_of(Ci::CreatePipelineService).to receive(:commit) do
- OpenStruct.new(id: '123456')
- end
- allow_any_instance_of(Ci::CreatePipelineService).to receive(:branch?).and_return(true)
- allow_any_instance_of(Repository).to receive(:ref_exists?).and_return(true)
stub_ci_pipeline_to_return_yaml_file
+
+ # TODO, don't stub private methods
+ #
+ allow_any_instance_of(Ci::CreatePipelineService)
+ .to receive(:commit).and_return(OpenStruct.new(id: '123456'))
+
+ allow_any_instance_of(Repository)
+ .to receive(:branch_exists?).and_return(true)
end
it { expect { subject }.to change { Ci::Pipeline.count }.by(2) }
diff --git a/spec/workers/project_migrate_hashed_storage_worker_spec.rb b/spec/workers/project_migrate_hashed_storage_worker_spec.rb
new file mode 100644
index 00000000000..f5226dee0ad
--- /dev/null
+++ b/spec/workers/project_migrate_hashed_storage_worker_spec.rb
@@ -0,0 +1,29 @@
+require 'spec_helper'
+
+describe ProjectMigrateHashedStorageWorker do
+ describe '#perform' do
+ let(:project) { create(:project, :empty_repo) }
+ let(:pending_delete_project) { create(:project, :empty_repo, pending_delete: true) }
+
+ it 'skips when project no longer exists' do
+ nonexistent_id = 999999999999
+
+ expect(::Projects::HashedStorageMigrationService).not_to receive(:new)
+ subject.perform(nonexistent_id)
+ end
+
+ it 'skips when project is pending delete' do
+ expect(::Projects::HashedStorageMigrationService).not_to receive(:new)
+
+ subject.perform(pending_delete_project.id)
+ end
+
+ it 'delegates removal to service class' do
+ service = double('service')
+ expect(::Projects::HashedStorageMigrationService).to receive(:new).with(project, subject.logger).and_return(service)
+ expect(service).to receive(:execute)
+
+ subject.perform(project.id)
+ end
+ end
+end
diff --git a/spec/workers/repository_check/single_repository_worker_spec.rb b/spec/workers/repository_check/single_repository_worker_spec.rb
index d2609d21546..1d9bbf2ca62 100644
--- a/spec/workers/repository_check/single_repository_worker_spec.rb
+++ b/spec/workers/repository_check/single_repository_worker_spec.rb
@@ -69,7 +69,12 @@ describe RepositoryCheck::SingleRepositoryWorker do
end
def break_wiki(project)
- FileUtils.rm_rf(wiki_path(project) + '/objects')
+ objects_dir = wiki_path(project) + '/objects'
+
+ # Replace the /objects directory with a file so that the repo is
+ # invalid, _and_ 'git init' cannot fix it.
+ FileUtils.rm_rf(objects_dir)
+ FileUtils.touch(objects_dir) if File.directory?(wiki_path(project))
end
def wiki_path(project)
diff --git a/spec/workers/storage_migrator_worker_spec.rb b/spec/workers/storage_migrator_worker_spec.rb
new file mode 100644
index 00000000000..8619ff2f7da
--- /dev/null
+++ b/spec/workers/storage_migrator_worker_spec.rb
@@ -0,0 +1,30 @@
+require 'spec_helper'
+
+describe StorageMigratorWorker do
+ subject(:worker) { described_class.new }
+ let(:projects) { create_list(:project, 2) }
+
+ describe '#perform' do
+ let(:ids) { projects.map(&:id) }
+
+ it 'enqueue jobs to ProjectMigrateHashedStorageWorker' do
+ expect(ProjectMigrateHashedStorageWorker).to receive(:perform_async).twice
+
+ worker.perform(ids.min, ids.max)
+ end
+
+ it 'sets projects as read only' do
+ allow(ProjectMigrateHashedStorageWorker).to receive(:perform_async).twice
+ worker.perform(ids.min, ids.max)
+
+ projects.each do |project|
+ expect(project.reload.repository_read_only?).to be_truthy
+ end
+ end
+
+ it 'rescues and log exceptions' do
+ allow_any_instance_of(Project).to receive(:migrate_to_hashed_storage!).and_raise(StandardError)
+ expect { worker.perform(ids.min, ids.max) }.not_to raise_error
+ end
+ end
+end
diff --git a/spec/workers/use_key_worker_spec.rb b/spec/workers/use_key_worker_spec.rb
deleted file mode 100644
index e50c788b82a..00000000000
--- a/spec/workers/use_key_worker_spec.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-require 'spec_helper'
-
-describe UseKeyWorker do
- describe "#perform" do
- it "updates the key's last_used_at attribute to the current time when it exists" do
- worker = described_class.new
- key = create(:key)
- current_time = Time.zone.now
-
- Timecop.freeze(current_time) do
- expect { worker.perform(key.id) }
- .to change { key.reload.last_used_at }.from(nil).to be_like_time(current_time)
- end
- end
-
- it "returns false and skips the job when the key doesn't exist" do
- worker = described_class.new
- key = create(:key)
-
- expect(worker.perform(key.id + 1)).to eq false
- end
- end
-end
diff --git a/spec/workers/wait_for_cluster_creation_worker_spec.rb b/spec/workers/wait_for_cluster_creation_worker_spec.rb
new file mode 100644
index 00000000000..dcd4a3b9aec
--- /dev/null
+++ b/spec/workers/wait_for_cluster_creation_worker_spec.rb
@@ -0,0 +1,67 @@
+require 'spec_helper'
+
+describe WaitForClusterCreationWorker do
+ describe '#perform' do
+ context 'when cluster exists' do
+ let(:cluster) { create(:gcp_cluster) }
+ let(:operation) { double }
+
+ before do
+ allow(operation).to receive(:status).and_return(status)
+ allow(operation).to receive(:start_time).and_return(1.minute.ago)
+ allow(operation).to receive(:status_message).and_return('error')
+ allow_any_instance_of(Ci::FetchGcpOperationService).to receive(:execute).and_yield(operation)
+ end
+
+ context 'when operation status is RUNNING' do
+ let(:status) { 'RUNNING' }
+
+ it 'reschedules worker' do
+ expect(described_class).to receive(:perform_in)
+
+ described_class.new.perform(cluster.id)
+ end
+
+ context 'when operation timeout' do
+ before do
+ allow(operation).to receive(:start_time).and_return(30.minutes.ago.utc)
+ end
+
+ it 'sets an error message on cluster' do
+ described_class.new.perform(cluster.id)
+
+ expect(cluster.reload).to be_errored
+ end
+ end
+ end
+
+ context 'when operation status is DONE' do
+ let(:status) { 'DONE' }
+
+ it 'finalizes cluster creation' do
+ expect_any_instance_of(Ci::FinalizeClusterCreationService).to receive(:execute)
+
+ described_class.new.perform(cluster.id)
+ end
+ end
+
+ context 'when operation status is others' do
+ let(:status) { 'others' }
+
+ it 'sets an error message on cluster' do
+ described_class.new.perform(cluster.id)
+
+ expect(cluster.reload).to be_errored
+ end
+ end
+ end
+
+ context 'when cluster does not exist' do
+ it 'does not provision a cluster' do
+ expect_any_instance_of(Ci::FetchGcpOperationService).not_to receive(:execute)
+
+ described_class.new.perform(1234)
+ end
+ end
+ end
+end