Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorKamil Trzciński <ayufan@ayufan.eu>2018-02-28 22:36:55 +0300
committerKamil Trzciński <ayufan@ayufan.eu>2018-02-28 22:36:55 +0300
commit965dc28691e2d70b7040e28d90ccbc3721a9e416 (patch)
tree84258f35b72f2e7ce6a7198db66032df4ad5aadb /spec
parente3fafa7632e038927085cf8c8228c93be44b36bd (diff)
parent7fabc892f251740dbd9a4755baede662e6854870 (diff)
Merge commit '7fabc892f251740dbd9a4755baede662e6854870' into object-storage-ee-to-ce-backport
Diffstat (limited to 'spec')
-rw-r--r--spec/bin/storage_check_spec.rb13
-rw-r--r--spec/controllers/admin/health_check_controller_spec.rb4
-rw-r--r--spec/controllers/application_controller_spec.rb13
-rw-r--r--spec/controllers/concerns/issuable_collections_spec.rb55
-rw-r--r--spec/controllers/dashboard/todos_controller_spec.rb4
-rw-r--r--spec/controllers/groups/children_controller_spec.rb11
-rw-r--r--spec/controllers/groups/uploads_controller_spec.rb10
-rw-r--r--spec/controllers/health_controller_spec.rb42
-rw-r--r--spec/controllers/import/github_controller_spec.rb4
-rw-r--r--spec/controllers/passwords_controller_spec.rb12
-rw-r--r--spec/controllers/projects/boards_controller_spec.rb12
-rw-r--r--spec/controllers/projects/branches_controller_spec.rb40
-rw-r--r--spec/controllers/projects/clusters/gcp_controller_spec.rb185
-rw-r--r--spec/controllers/projects/clusters/user_controller_spec.rb89
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb405
-rw-r--r--spec/controllers/projects/commit_controller_spec.rb24
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb25
-rw-r--r--spec/controllers/projects/merge_requests/diffs_controller_spec.rb3
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb20
-rw-r--r--spec/controllers/projects/notes_controller_spec.rb23
-rw-r--r--spec/controllers/projects/pipelines_settings_controller_spec.rb29
-rw-r--r--spec/controllers/projects/uploads_controller_spec.rb247
-rw-r--r--spec/controllers/projects_controller_spec.rb58
-rw-r--r--spec/controllers/registrations_controller_spec.rb3
-rw-r--r--spec/factories/appearances.rb1
-rw-r--r--spec/factories/ci/builds.rb42
-rw-r--r--spec/factories/ci/job_artifacts.rb30
-rw-r--r--spec/factories/clusters/clusters.rb (renamed from spec/factories/clusters/cluster.rb)17
-rw-r--r--spec/factories/fork_network_members.rb8
-rw-r--r--spec/factories/notes.rb11
-rw-r--r--spec/factories/uploads.rb16
-rw-r--r--spec/features/admin/admin_appearance_spec.rb33
-rw-r--r--spec/features/admin/admin_health_check_spec.rb12
-rw-r--r--spec/features/admin/admin_users_spec.rb23
-rw-r--r--spec/features/auto_deploy_spec.rb88
-rw-r--r--spec/features/boards/sidebar_spec.rb22
-rw-r--r--spec/features/commits_spec.rb35
-rw-r--r--spec/features/groups/members/manage_members.rb6
-rw-r--r--spec/features/groups/milestones_sorting_spec.rb51
-rw-r--r--spec/features/issuables/discussion_lock_spec.rb2
-rw-r--r--spec/features/issuables/shortcuts_issuable_spec.rb46
-rw-r--r--spec/features/issues/create_branch_merge_request_spec.rb106
-rw-r--r--spec/features/issues/gfm_autocomplete_spec.rb122
-rw-r--r--spec/features/issues/issue_detail_spec.rb7
-rw-r--r--spec/features/issues/user_creates_branch_and_merge_request_spec.rb248
-rw-r--r--spec/features/issues_spec.rb14
-rw-r--r--spec/features/logout_spec.rb22
-rw-r--r--spec/features/markdown_spec.rb9
-rw-r--r--spec/features/merge_requests/create_new_mr_spec.rb22
-rw-r--r--spec/features/merge_requests/filter_by_labels_spec.rb16
-rw-r--r--spec/features/merge_requests/mini_pipeline_graph_spec.rb4
-rw-r--r--spec/features/merge_requests/pipelines_spec.rb15
-rw-r--r--spec/features/merge_requests/versions_spec.rb105
-rw-r--r--spec/features/merge_requests/widget_spec.rb12
-rw-r--r--spec/features/milestone_spec.rb29
-rw-r--r--spec/features/profiles/password_spec.rb7
-rw-r--r--spec/features/projects/blobs/blob_show_spec.rb12
-rw-r--r--spec/features/projects/clusters/applications_spec.rb107
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb138
-rw-r--r--spec/features/projects/clusters/interchangeability_spec.rb16
-rw-r--r--spec/features/projects/clusters/user_spec.rb102
-rw-r--r--spec/features/projects/clusters_spec.rb217
-rw-r--r--spec/features/projects/environments/environment_spec.rb51
-rw-r--r--spec/features/projects/environments/environments_spec.rb68
-rw-r--r--spec/features/projects/features_visibility_spec.rb6
-rw-r--r--spec/features/projects/fork_spec.rb17
-rw-r--r--spec/features/projects/import_export/test_project_export.tar.gzbin688161 -> 343232 bytes
-rw-r--r--spec/features/projects/issuable_templates_spec.rb4
-rw-r--r--spec/features/projects/jobs_spec.rb8
-rw-r--r--spec/features/projects/members/list_spec.rb16
-rw-r--r--spec/features/projects/no_password_spec.rb2
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb30
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb40
-rw-r--r--spec/features/projects/project_settings_spec.rb6
-rw-r--r--spec/features/projects/settings/pipelines_settings_spec.rb25
-rw-r--r--spec/features/projects/snippets_spec.rb5
-rw-r--r--spec/features/projects/tree/create_directory_spec.rb4
-rw-r--r--spec/features/projects/tree/create_file_spec.rb4
-rw-r--r--spec/features/projects/tree/upload_file_spec.rb4
-rw-r--r--spec/features/projects/user_creates_project_spec.rb31
-rw-r--r--spec/features/projects/user_transfers_a_project_spec.rb49
-rw-r--r--spec/finders/admin/projects_finder_spec.rb2
-rw-r--r--spec/finders/autocomplete_users_finder_spec.rb15
-rw-r--r--spec/finders/clusters_finder_spec.rb31
-rw-r--r--spec/finders/runner_jobs_finder_spec.rb39
-rw-r--r--spec/fixtures/api/schemas/issue.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/pages_domain/basic.json18
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/pages_domain/detail.json20
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/pages_domain_basics.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/pages_domains.json21
-rw-r--r--spec/fixtures/emails/valid_new_merge_request.eml18
-rw-r--r--spec/fixtures/emails/valid_new_merge_request_no_subject.eml18
-rw-r--r--spec/fixtures/markdown.md.erb34
-rw-r--r--spec/helpers/application_helper_spec.rb73
-rw-r--r--spec/helpers/boards_helper_spec.rb21
-rw-r--r--spec/helpers/button_helper_spec.rb85
-rw-r--r--spec/helpers/groups_helper_spec.rb32
-rw-r--r--spec/helpers/icons_helper_spec.rb28
-rw-r--r--spec/helpers/issuables_helper_spec.rb1
-rw-r--r--spec/helpers/markup_helper_spec.rb2
-rw-r--r--spec/helpers/merge_requests_helper_spec.rb17
-rw-r--r--spec/helpers/namespaces_helper_spec.rb25
-rw-r--r--spec/helpers/projects_helper_spec.rb31
-rw-r--r--spec/helpers/search_helper_spec.rb4
-rw-r--r--spec/helpers/tree_helper_spec.rb32
-rw-r--r--spec/javascripts/behaviors/autosize_spec.js31
-rw-r--r--spec/javascripts/behaviors/gl_emoji/unicode_support_map_spec.js19
-rw-r--r--spec/javascripts/behaviors/requires_input_spec.js76
-rw-r--r--spec/javascripts/boards/board_card_spec.js29
-rw-r--r--spec/javascripts/boards/issue_spec.js19
-rw-r--r--spec/javascripts/clusters/clusters_bundle_spec.js18
-rw-r--r--spec/javascripts/clusters/clusters_index_spec.js58
-rw-r--r--spec/javascripts/datetime_utility_spec.js22
-rw-r--r--spec/javascripts/deploy_keys/components/action_btn_spec.js2
-rw-r--r--spec/javascripts/deploy_keys/components/app_spec.js14
-rw-r--r--spec/javascripts/droplab/drop_down_spec.js8
-rw-r--r--spec/javascripts/droplab/hook_spec.js2
-rw-r--r--spec/javascripts/emoji_spec.js19
-rw-r--r--spec/javascripts/environments/emtpy_state_spec.js57
-rw-r--r--spec/javascripts/environments/environment_table_spec.js31
-rw-r--r--spec/javascripts/environments/environments_app_spec.js (renamed from spec/javascripts/environments/environment_spec.js)132
-rw-r--r--spec/javascripts/environments/folder/environments_folder_view_spec.js157
-rw-r--r--spec/javascripts/filtered_search/filtered_search_manager_spec.js19
-rw-r--r--spec/javascripts/fixtures/clusters.rb15
-rw-r--r--spec/javascripts/fixtures/environments/element.html.haml1
-rw-r--r--spec/javascripts/fixtures/environments/environments.html.haml9
-rw-r--r--spec/javascripts/fixtures/environments/environments_folder_view.html.haml7
-rw-r--r--spec/javascripts/fixtures/pipelines.html.haml8
-rw-r--r--spec/javascripts/flash_spec.js2
-rw-r--r--spec/javascripts/gfm_auto_complete_spec.js22
-rw-r--r--spec/javascripts/image_diff/helpers/utils_helper_spec.js10
-rw-r--r--spec/javascripts/issuable_spec.js2
-rw-r--r--spec/javascripts/issue_show/components/app_spec.js56
-rw-r--r--spec/javascripts/issue_show/components/description_spec.js99
-rw-r--r--spec/javascripts/issue_show/components/edit_actions_spec.js9
-rw-r--r--spec/javascripts/issue_show/components/form_spec.js1
-rw-r--r--spec/javascripts/issue_show/components/title_spec.js8
-rw-r--r--spec/javascripts/job_spec.js2
-rw-r--r--spec/javascripts/jobs/job_details_mediator_spec.js20
-rw-r--r--spec/javascripts/lib/utils/common_utils_spec.js47
-rw-r--r--spec/javascripts/lib/utils/datefix_spec.js2
-rw-r--r--spec/javascripts/lib/utils/number_utility_spec.js27
-rw-r--r--spec/javascripts/lib/utils/poll_spec.js6
-rw-r--r--spec/javascripts/lib/utils/text_markdown_spec.js62
-rw-r--r--spec/javascripts/lib/utils/text_utility_spec.js120
-rw-r--r--spec/javascripts/merge_request_spec.js4
-rw-r--r--spec/javascripts/monitoring/dashboard_spec.js12
-rw-r--r--spec/javascripts/monitoring/graph_path_spec.js17
-rw-r--r--spec/javascripts/monitoring/mock_data.js15
-rw-r--r--spec/javascripts/new_branch_spec.js3
-rw-r--r--spec/javascripts/notes/components/issue_comment_form_spec.js27
-rw-r--r--spec/javascripts/notes/components/issue_discussion_spec.js4
-rw-r--r--spec/javascripts/notes/components/issue_note_app_spec.js6
-rw-r--r--spec/javascripts/notes/components/issue_note_body_spec.js4
-rw-r--r--spec/javascripts/notes/components/issue_note_form_spec.js4
-rw-r--r--spec/javascripts/notes/components/issue_note_spec.js4
-rw-r--r--spec/javascripts/notes/components/note_actions_spec.js (renamed from spec/javascripts/notes/components/issue_note_actions_spec.js)4
-rw-r--r--spec/javascripts/notes/components/note_attachment_spec.js (renamed from spec/javascripts/notes/components/issue_note_attachment_spec.js)4
-rw-r--r--spec/javascripts/notes/components/note_awards_list_spec.js (renamed from spec/javascripts/notes/components/issue_note_awards_list_spec.js)8
-rw-r--r--spec/javascripts/notes/components/note_edited_text_spec.js (renamed from spec/javascripts/notes/components/issue_note_edited_text_spec.js)6
-rw-r--r--spec/javascripts/notes/components/note_header_spec.js (renamed from spec/javascripts/notes/components/issue_note_header_spec.js)6
-rw-r--r--spec/javascripts/notes/components/note_signed_out_widget_spec.js (renamed from spec/javascripts/notes/components/issue_note_signed_out_widget_spec.js)6
-rw-r--r--spec/javascripts/notes/mock_data.js4
-rw-r--r--spec/javascripts/notes/stores/actions_spec.js8
-rw-r--r--spec/javascripts/notes/stores/getters_spec.js10
-rw-r--r--spec/javascripts/notes/stores/mutation_spec.js10
-rw-r--r--spec/javascripts/notes_spec.js1
-rw-r--r--spec/javascripts/pipelines/graph/job_component_spec.js65
-rw-r--r--spec/javascripts/pipelines/navigation_tabs_spec.js127
-rw-r--r--spec/javascripts/pipelines/pipelines_spec.js137
-rw-r--r--spec/javascripts/projects_dropdown/components/projects_list_item_spec.js12
-rw-r--r--spec/javascripts/repo/components/commit_sidebar/list_collapsed_spec.js33
-rw-r--r--spec/javascripts/repo/components/commit_sidebar/list_item_spec.js53
-rw-r--r--spec/javascripts/repo/components/commit_sidebar/list_spec.js72
-rw-r--r--spec/javascripts/repo/components/repo_commit_section_spec.js22
-rw-r--r--spec/javascripts/repo/components/repo_editor_spec.js10
-rw-r--r--spec/javascripts/repo/components/repo_sidebar_spec.js13
-rw-r--r--spec/javascripts/repo/components/repo_tab_spec.js12
-rw-r--r--spec/javascripts/repo/components/repo_tabs_spec.js5
-rw-r--r--spec/javascripts/repo/helpers.js5
-rw-r--r--spec/javascripts/repo/lib/common/disposable_spec.js44
-rw-r--r--spec/javascripts/repo/lib/common/model_manager_spec.js81
-rw-r--r--spec/javascripts/repo/lib/common/model_spec.js84
-rw-r--r--spec/javascripts/repo/lib/decorations/controller_spec.js120
-rw-r--r--spec/javascripts/repo/lib/diff/controller_spec.js176
-rw-r--r--spec/javascripts/repo/lib/diff/diff_spec.js80
-rw-r--r--spec/javascripts/repo/lib/editor_options_spec.js7
-rw-r--r--spec/javascripts/repo/lib/editor_spec.js128
-rw-r--r--spec/javascripts/repo/stores/actions/branch_spec.js38
-rw-r--r--spec/javascripts/repo/stores/actions/file_spec.js417
-rw-r--r--spec/javascripts/repo/stores/actions/tree_spec.js469
-rw-r--r--spec/javascripts/repo/stores/actions_spec.js419
-rw-r--r--spec/javascripts/repo/stores/getters_spec.js146
-rw-r--r--spec/javascripts/repo/stores/mutations/branch_spec.js18
-rw-r--r--spec/javascripts/repo/stores/mutations/file_spec.js131
-rw-r--r--spec/javascripts/repo/stores/mutations/tree_spec.js71
-rw-r--r--spec/javascripts/repo/stores/mutations_spec.js117
-rw-r--r--spec/javascripts/repo/stores/utils_spec.js102
-rw-r--r--spec/javascripts/search_autocomplete_spec.js2
-rw-r--r--spec/javascripts/sidebar/mock_data.js82
-rw-r--r--spec/javascripts/sidebar/sidebar_mediator_spec.js27
-rw-r--r--spec/javascripts/sidebar/sidebar_service_spec.js66
-rw-r--r--spec/javascripts/sidebar/sidebar_store_spec.js6
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js171
-rw-r--r--spec/javascripts/vue_mr_widget/mock_data.js1
-rw-r--r--spec/javascripts/vue_mr_widget/mr_widget_options_spec.js19
-rw-r--r--spec/javascripts/vue_mr_widget/services/mr_widget_service_spec.js47
-rw-r--r--spec/javascripts/vue_shared/components/icon_spec.js12
-rw-r--r--spec/javascripts/vue_shared/components/issue/issue_warning_spec.js6
-rw-r--r--spec/javascripts/vue_shared/components/loading_button_spec.js18
-rw-r--r--spec/javascripts/vue_shared/components/markdown/toolbar_spec.js37
-rw-r--r--spec/javascripts/vue_shared/components/navigation_tabs_spec.js61
-rw-r--r--spec/javascripts/vue_shared/components/pikaday_spec.js29
-rw-r--r--spec/javascripts/vue_shared/components/sidebar/collapsed_calendar_icon_spec.js35
-rw-r--r--spec/javascripts/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js91
-rw-r--r--spec/javascripts/vue_shared/components/sidebar/date_picker_spec.js117
-rw-r--r--spec/javascripts/vue_shared/components/sidebar/toggle_sidebar_spec.js32
-rw-r--r--spec/javascripts/vue_shared/components/toggle_button_spec.js91
-rw-r--r--spec/javascripts/zen_mode_spec.js146
-rw-r--r--spec/lib/api/helpers/pagination_spec.rb21
-rw-r--r--spec/lib/api/helpers_spec.rb109
-rw-r--r--spec/lib/backup/manager_spec.rb (renamed from spec/lib/gitlab/backup/manager_spec.rb)0
-rw-r--r--spec/lib/backup/repository_spec.rb69
-rw-r--r--spec/lib/banzai/cross_project_reference_spec.rb8
-rw-r--r--spec/lib/banzai/filter/abstract_reference_filter_spec.rb38
-rw-r--r--spec/lib/banzai/filter/commit_reference_filter_spec.rb12
-rw-r--r--spec/lib/banzai/filter/issue_reference_filter_spec.rb63
-rw-r--r--spec/lib/banzai/filter/mermaid_filter_spec.rb12
-rw-r--r--spec/lib/banzai/filter/syntax_highlight_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/upload_link_filter_spec.rb30
-rw-r--r--spec/lib/banzai/reference_parser/issue_parser_spec.rb4
-rw-r--r--spec/lib/container_registry/path_spec.rb18
-rw-r--r--spec/lib/feature_spec.rb41
-rw-r--r--spec/lib/github/client_spec.rb34
-rw-r--r--spec/lib/github/import/legacy_diff_note_spec.rb9
-rw-r--r--spec/lib/github/import/note_spec.rb9
-rw-r--r--spec/lib/gitlab/auth/request_authenticator_spec.rb67
-rw-r--r--spec/lib/gitlab/auth/user_auth_finders_spec.rb194
-rw-r--r--spec/lib/gitlab/auth_spec.rb52
-rw-r--r--spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb9
-rw-r--r--spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb16
-rw-r--r--spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb29
-rw-r--r--spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb (renamed from spec/migrations/populate_merge_requests_latest_merge_request_diff_id_spec.rb)13
-rw-r--r--spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb510
-rw-r--r--spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb242
-rw-r--r--spec/lib/gitlab/backup/repository_spec.rb117
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb33
-rw-r--r--spec/lib/gitlab/checks/change_access_spec.rb43
-rw-r--r--spec/lib/gitlab/checks/lfs_integrity_spec.rb74
-rw-r--r--spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/build_spec.rb51
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb28
-rw-r--r--spec/lib/gitlab/conflict/file_spec.rb7
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb8
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb16
-rw-r--r--spec/lib/gitlab/database_spec.rb58
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb8
-rw-r--r--spec/lib/gitlab/diff/inline_diff_spec.rb4
-rw-r--r--spec/lib/gitlab/diff/position_tracer_spec.rb4
-rw-r--r--spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb84
-rw-r--r--spec/lib/gitlab/email/handler_spec.rb17
-rw-r--r--spec/lib/gitlab/encoding_helper_spec.rb1
-rw-r--r--spec/lib/gitlab/fake_application_settings_spec.rb10
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb29
-rw-r--r--spec/lib/gitlab/git/diff_collection_spec.rb1
-rw-r--r--spec/lib/gitlab/git/remote_repository_spec.rb99
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb170
-rw-r--r--spec/lib/gitlab/git/storage/checker_spec.rb132
-rw-r--r--spec/lib/gitlab/git/storage/circuit_breaker_spec.rb144
-rw-r--r--spec/lib/gitlab/git/storage/failure_info_spec.rb70
-rw-r--r--spec/lib/gitlab/git/storage/health_spec.rb3
-rw-r--r--spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb4
-rw-r--r--spec/lib/gitlab/git/user_spec.rb17
-rw-r--r--spec/lib/gitlab/git_spec.rb25
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_service_spec.rb13
-rw-r--r--spec/lib/gitlab/gitaly_client/wiki_service_spec.rb88
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb16
-rw-r--r--spec/lib/gitlab/github_import/bulk_importing_spec.rb62
-rw-r--r--spec/lib/gitlab/github_import/caching_spec.rb117
-rw-r--r--spec/lib/gitlab/github_import/client_spec.rb389
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb152
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb119
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_and_label_links_importer_spec.rb27
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_importer_spec.rb201
-rw-r--r--spec/lib/gitlab/github_import/importer/issues_importer_spec.rb111
-rw-r--r--spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb82
-rw-r--r--spec/lib/gitlab/github_import/importer/labels_importer_spec.rb107
-rw-r--r--spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb120
-rw-r--r--spec/lib/gitlab/github_import/importer/note_importer_spec.rb151
-rw-r--r--spec/lib/gitlab/github_import/importer/notes_importer_spec.rb116
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb221
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb272
-rw-r--r--spec/lib/gitlab/github_import/importer/releases_importer_spec.rb125
-rw-r--r--spec/lib/gitlab/github_import/importer/repository_importer_spec.rb227
-rw-r--r--spec/lib/gitlab/github_import/issuable_finder_spec.rb38
-rw-r--r--spec/lib/gitlab/github_import/label_finder_spec.rb61
-rw-r--r--spec/lib/gitlab/github_import/markdown_text_spec.rb28
-rw-r--r--spec/lib/gitlab/github_import/milestone_finder_spec.rb57
-rw-r--r--spec/lib/gitlab/github_import/page_counter_spec.rb32
-rw-r--r--spec/lib/gitlab/github_import/parallel_importer_spec.rb40
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb296
-rw-r--r--spec/lib/gitlab/github_import/representation/diff_note_spec.rb164
-rw-r--r--spec/lib/gitlab/github_import/representation/expose_attribute_spec.rb19
-rw-r--r--spec/lib/gitlab/github_import/representation/issue_spec.rb182
-rw-r--r--spec/lib/gitlab/github_import/representation/note_spec.rb107
-rw-r--r--spec/lib/gitlab/github_import/representation/pull_request_spec.rb288
-rw-r--r--spec/lib/gitlab/github_import/representation/to_hash_spec.rb37
-rw-r--r--spec/lib/gitlab/github_import/representation/user_spec.rb33
-rw-r--r--spec/lib/gitlab/github_import/representation_spec.rb17
-rw-r--r--spec/lib/gitlab/github_import/sequential_importer_spec.rb37
-rw-r--r--spec/lib/gitlab/github_import/user_finder_spec.rb333
-rw-r--r--spec/lib/gitlab/github_import_spec.rb79
-rw-r--r--spec/lib/gitlab/hook_data/issuable_builder_spec.rb7
-rw-r--r--spec/lib/gitlab/hook_data/issue_builder_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml1
-rw-r--r--spec/lib/gitlab/import_export/project.json735
-rw-r--r--spec/lib/gitlab/import_export/project_tree_restorer_spec.rb24
-rw-r--r--spec/lib/gitlab/import_export/project_tree_saver_spec.rb10
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml1
-rw-r--r--spec/lib/gitlab/import_export/uploads_restorer_spec.rb55
-rw-r--r--spec/lib/gitlab/import_export/uploads_saver_spec.rb61
-rw-r--r--spec/lib/gitlab/import_sources_spec.rb4
-rw-r--r--spec/lib/gitlab/issuable_metadata_spec.rb12
-rw-r--r--spec/lib/gitlab/legacy_github_import/branch_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/branch_formatter_spec.rb)2
-rw-r--r--spec/lib/gitlab/legacy_github_import/client_spec.rb97
-rw-r--r--spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/comment_formatter_spec.rb)2
-rw-r--r--spec/lib/gitlab/legacy_github_import/importer_spec.rb (renamed from spec/lib/gitlab/github_import/importer_spec.rb)28
-rw-r--r--spec/lib/gitlab/legacy_github_import/issuable_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/issuable_formatter_spec.rb)2
-rw-r--r--spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/issue_formatter_spec.rb)14
-rw-r--r--spec/lib/gitlab/legacy_github_import/label_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/label_formatter_spec.rb)2
-rw-r--r--spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/milestone_formatter_spec.rb)8
-rw-r--r--spec/lib/gitlab/legacy_github_import/project_creator_spec.rb (renamed from spec/lib/gitlab/github_import/project_creator_spec.rb)2
-rw-r--r--spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/pull_request_formatter_spec.rb)26
-rw-r--r--spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/release_formatter_spec.rb)2
-rw-r--r--spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/user_formatter_spec.rb)2
-rw-r--r--spec/lib/gitlab/legacy_github_import/wiki_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/wiki_formatter_spec.rb)2
-rw-r--r--spec/lib/gitlab/metrics/method_call_spec.rb58
-rw-r--r--spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb40
-rw-r--r--spec/lib/gitlab/middleware/go_spec.rb8
-rw-r--r--spec/lib/gitlab/middleware/read_only_spec.rb16
-rw-r--r--spec/lib/gitlab/o_auth/user_spec.rb9
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb9
-rw-r--r--spec/lib/gitlab/reference_extractor_spec.rb30
-rw-r--r--spec/lib/gitlab/shell_spec.rb8
-rw-r--r--spec/lib/gitlab/sidekiq_config_spec.rb24
-rw-r--r--spec/lib/gitlab/sql/pattern_spec.rb30
-rw-r--r--spec/lib/gitlab/storage_check/cli_spec.rb19
-rw-r--r--spec/lib/gitlab/storage_check/gitlab_caller_spec.rb46
-rw-r--r--spec/lib/gitlab/storage_check/option_parser_spec.rb31
-rw-r--r--spec/lib/gitlab/storage_check/response_spec.rb54
-rw-r--r--spec/lib/gitlab/url_blocker_spec.rb16
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb2
-rw-r--r--spec/lib/gitlab/utils/strong_memoize_spec.rb52
-rw-r--r--spec/lib/gitlab/utils_spec.rb10
-rw-r--r--spec/lib/google_api/cloud_platform/client_spec.rb24
-rw-r--r--spec/lib/milestone_array_spec.rb34
-rw-r--r--spec/mailers/notify_spec.rb24
-rw-r--r--spec/migrations/migrate_gcp_clusters_to_new_clusters_architectures_spec.rb4
-rw-r--r--spec/migrations/migrate_old_artifacts_spec.rb39
-rw-r--r--spec/migrations/remove_empty_fork_networks_spec.rb24
-rw-r--r--spec/migrations/schedule_merge_request_diff_migrations_spec.rb19
-rw-r--r--spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb19
-rw-r--r--spec/migrations/schedule_merge_request_latest_merge_request_diff_id_migrations_spec.rb64
-rw-r--r--spec/migrations/track_untracked_uploads_spec.rb27
-rw-r--r--spec/models/appearance_spec.rb3
-rw-r--r--spec/models/application_setting_spec.rb92
-rw-r--r--spec/models/blob_spec.rb17
-rw-r--r--spec/models/ci/build_spec.rb428
-rw-r--r--spec/models/ci/job_artifact_spec.rb74
-rw-r--r--spec/models/ci/pipeline_spec.rb228
-rw-r--r--spec/models/ci/runner_spec.rb2
-rw-r--r--spec/models/clusters/cluster_spec.rb23
-rw-r--r--spec/models/clusters/platforms/kubernetes_spec.rb192
-rw-r--r--spec/models/commit_collection_spec.rb59
-rw-r--r--spec/models/commit_spec.rb11
-rw-r--r--spec/models/concerns/avatarable_spec.rb44
-rw-r--r--spec/models/concerns/has_variable_spec.rb18
-rw-r--r--spec/models/concerns/issuable_spec.rb53
-rw-r--r--spec/models/concerns/manual_inverse_association_spec.rb51
-rw-r--r--spec/models/concerns/milestoneish_spec.rb17
-rw-r--r--spec/models/diff_note_spec.rb35
-rw-r--r--spec/models/diff_viewer/base_spec.rb22
-rw-r--r--spec/models/diff_viewer/server_side_spec.rb9
-rw-r--r--spec/models/environment_spec.rb51
-rw-r--r--spec/models/fork_network_member_spec.rb18
-rw-r--r--spec/models/group_spec.rb10
-rw-r--r--spec/models/identity_spec.rb10
-rw-r--r--spec/models/issue_spec.rb18
-rw-r--r--spec/models/key_spec.rb23
-rw-r--r--spec/models/merge_request_diff_spec.rb80
-rw-r--r--spec/models/merge_request_spec.rb121
-rw-r--r--spec/models/milestone_spec.rb2
-rw-r--r--spec/models/namespace_spec.rb10
-rw-r--r--spec/models/note_spec.rb33
-rw-r--r--spec/models/personal_access_token_spec.rb25
-rw-r--r--spec/models/project_services/flowdock_service_spec.rb1
-rw-r--r--spec/models/project_services/kubernetes_service_spec.rb4
-rw-r--r--spec/models/project_spec.rb211
-rw-r--r--spec/models/project_statistics_spec.rb26
-rw-r--r--spec/models/project_wiki_spec.rb4
-rw-r--r--spec/models/repository_spec.rb179
-rw-r--r--spec/models/snippet_spec.rb2
-rw-r--r--spec/models/user_spec.rb268
-rw-r--r--spec/models/wiki_page_spec.rb2
-rw-r--r--spec/policies/ci/build_policy_spec.rb77
-rw-r--r--spec/policies/group_policy_spec.rb28
-rw-r--r--spec/policies/namespace_policy_spec.rb38
-rw-r--r--spec/presenters/clusters/cluster_presenter_spec.rb40
-rw-r--r--spec/presenters/merge_request_presenter_spec.rb2
-rw-r--r--spec/requests/api/circuit_breakers_spec.rb2
-rw-r--r--spec/requests/api/groups_spec.rb150
-rw-r--r--spec/requests/api/helpers_spec.rb46
-rw-r--r--spec/requests/api/internal_spec.rb54
-rw-r--r--spec/requests/api/jobs_spec.rb21
-rw-r--r--spec/requests/api/merge_requests_spec.rb8
-rw-r--r--spec/requests/api/namespaces_spec.rb123
-rw-r--r--spec/requests/api/notes_spec.rb124
-rw-r--r--spec/requests/api/pages_domains_spec.rb47
-rw-r--r--spec/requests/api/projects_spec.rb8
-rw-r--r--spec/requests/api/protected_branches_spec.rb36
-rw-r--r--spec/requests/api/runner_spec.rb15
-rw-r--r--spec/requests/api/runners_spec.rb134
-rw-r--r--spec/requests/api/services_spec.rb21
-rw-r--r--spec/requests/api/settings_spec.rb10
-rw-r--r--spec/requests/api/users_spec.rb8
-rw-r--r--spec/requests/api/v3/builds_spec.rb2
-rw-r--r--spec/requests/api/v3/merge_requests_spec.rb2
-rw-r--r--spec/requests/api/v3/projects_spec.rb2
-rw-r--r--spec/requests/api/v3/settings_spec.rb4
-rw-r--r--spec/requests/git_http_spec.rb2
-rw-r--r--spec/requests/jwt_controller_spec.rb2
-rw-r--r--spec/requests/lfs_http_spec.rb47
-rw-r--r--spec/requests/openid_connect_spec.rb13
-rw-r--r--spec/requests/rack_attack_global_spec.rb362
-rw-r--r--spec/routing/group_routing_spec.rb28
-rw-r--r--spec/routing/routing_spec.rb6
-rw-r--r--spec/rubocop/cop/line_break_after_guard_clauses_spec.rb160
-rw-r--r--spec/rubocop/cop/migration/add_column_with_default_to_large_table_spec.rb44
-rw-r--r--spec/rubocop/cop/migration/update_large_table_spec.rb69
-rw-r--r--spec/serializers/merge_request_entity_spec.rb30
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb4
-rw-r--r--spec/services/base_count_service_spec.rb86
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb46
-rw-r--r--spec/services/ci/process_pipeline_service_spec.rb24
-rw-r--r--spec/services/ci/register_job_service_spec.rb106
-rw-r--r--spec/services/ci/retry_build_service_spec.rb4
-rw-r--r--spec/services/clusters/applications/schedule_installation_service_spec.rb2
-rw-r--r--spec/services/clusters/create_service_spec.rb75
-rw-r--r--spec/services/issuable/common_system_notes_service_spec.rb43
-rw-r--r--spec/services/issuable/destroy_service_spec.rb38
-rw-r--r--spec/services/merge_requests/build_service_spec.rb33
-rw-r--r--spec/services/merge_requests/create_from_issue_service_spec.rb24
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb22
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb14
-rw-r--r--spec/services/merge_requests/update_service_spec.rb16
-rw-r--r--spec/services/milestones/destroy_service_spec.rb4
-rw-r--r--spec/services/milestones/promote_service_spec.rb36
-rw-r--r--spec/services/notification_service_spec.rb30
-rw-r--r--spec/services/projects/count_service_spec.rb12
-rw-r--r--spec/services/projects/fork_service_spec.rb353
-rw-r--r--spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb63
-rw-r--r--spec/services/projects/hashed_storage/migrate_repository_service_spec.rb76
-rw-r--r--spec/services/projects/hashed_storage_migration_service_spec.rb66
-rw-r--r--spec/services/projects/import_service_spec.rb83
-rw-r--r--spec/services/projects/transfer_service_spec.rb32
-rw-r--r--spec/services/projects/update_pages_service_spec.rb116
-rw-r--r--spec/services/projects/update_service_spec.rb289
-rw-r--r--spec/services/search/global_service_spec.rb4
-rw-r--r--spec/services/system_note_service_spec.rb40
-rw-r--r--spec/services/users/keys_count_service_spec.rb66
-rw-r--r--spec/services/web_hook_service_spec.rb2
-rw-r--r--spec/spec_helper.rb16
-rw-r--r--spec/support/capybara.rb36
-rw-r--r--spec/support/controllers/githubish_import_controller_shared_examples.rb36
-rw-r--r--spec/support/fixture_helpers.rb1
-rwxr-xr-xspec/support/generate-seed-repo-rb1
-rw-r--r--spec/support/gitaly.rb9
-rw-r--r--spec/support/google_api/cloud_platform_helpers.rb2
-rw-r--r--spec/support/matchers/be_a_binary_string.rb9
-rw-r--r--spec/support/matchers/have_gitlab_http_status.rb6
-rw-r--r--spec/support/matchers/security_header_matcher.rb5
-rw-r--r--spec/support/prometheus/additional_metrics_shared_examples.rb28
-rw-r--r--spec/support/protected_tags/access_control_ce_shared_examples.rb2
-rw-r--r--spec/support/query_recorder.rb37
-rw-r--r--spec/support/selection_helper.rb6
-rw-r--r--spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb240
-rw-r--r--spec/support/shared_examples/features/protected_branches_access_control_ce.rb4
-rw-r--r--spec/support/shared_examples/throttled_touch.rb20
-rw-r--r--spec/support/stored_repositories.rb21
-rw-r--r--spec/support/stub_configuration.rb2
-rw-r--r--spec/support/stub_gitlab_calls.rb6
-rw-r--r--spec/support/test_env.rb5
-rw-r--r--spec/support/track_untracked_uploads_helpers.rb20
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/cleanup_rake_spec.rb67
-rw-r--r--spec/tasks/gitlab/gitaly_rake_spec.rb17
-rw-r--r--spec/unicorn/unicorn_spec.rb22
-rw-r--r--spec/uploaders/file_uploader_spec.rb50
-rw-r--r--spec/uploaders/job_artifact_uploader_spec.rb51
-rw-r--r--spec/uploaders/legacy_artifact_uploader_spec.rb (renamed from spec/uploaders/artifact_uploader_spec.rb)41
-rw-r--r--spec/uploaders/namespace_file_uploader_spec.rb21
-rw-r--r--spec/views/dashboard/projects/_blank_state_admin_welcome.haml.rb15
-rw-r--r--spec/views/projects/commit/show.html.haml_spec.rb22
-rw-r--r--spec/views/projects/jobs/show.html.haml_spec.rb25
-rw-r--r--spec/views/projects/merge_requests/_commits.html.haml_spec.rb4
-rw-r--r--spec/views/projects/merge_requests/diffs/_diffs.html.haml_spec.rb36
-rw-r--r--spec/views/projects/pipelines_settings/_show.html.haml_spec.rb2
-rw-r--r--spec/workers/authorized_projects_worker_spec.rb1
-rw-r--r--spec/workers/background_migration_worker_spec.rb31
-rw-r--r--spec/workers/concerns/application_worker_spec.rb58
-rw-r--r--spec/workers/concerns/cluster_queue_spec.rb6
-rw-r--r--spec/workers/concerns/cronjob_queue_spec.rb6
-rw-r--r--spec/workers/concerns/dedicated_sidekiq_queue_spec.rb20
-rw-r--r--spec/workers/concerns/gitlab/github_import/notify_upon_death_spec.rb49
-rw-r--r--spec/workers/concerns/gitlab/github_import/object_importer_spec.rb74
-rw-r--r--spec/workers/concerns/gitlab/github_import/queue_spec.rb16
-rw-r--r--spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb110
-rw-r--r--spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb77
-rw-r--r--spec/workers/concerns/pipeline_queue_spec.rb6
-rw-r--r--spec/workers/concerns/repository_check_queue_spec.rb6
-rw-r--r--spec/workers/create_pipeline_worker_spec.rb36
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb35
-rw-r--r--spec/workers/expire_build_instance_artifacts_worker_spec.rb22
-rw-r--r--spec/workers/gitlab/github_import/advance_stage_worker_spec.rb115
-rw-r--r--spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb42
-rw-r--r--spec/workers/gitlab/github_import/import_issue_worker_spec.rb45
-rw-r--r--spec/workers/gitlab/github_import/import_note_worker_spec.rb40
-rw-r--r--spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb51
-rw-r--r--spec/workers/gitlab/github_import/refresh_import_jid_worker_spec.rb95
-rw-r--r--spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb32
-rw-r--r--spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb30
-rw-r--r--spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb32
-rw-r--r--spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb29
-rw-r--r--spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb32
-rw-r--r--spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb49
-rw-r--r--spec/workers/pipeline_schedule_worker_spec.rb31
-rw-r--r--spec/workers/post_receive_spec.rb12
-rw-r--r--spec/workers/project_migrate_hashed_storage_worker_spec.rb52
-rw-r--r--spec/workers/reactive_caching_worker_spec.rb25
-rw-r--r--spec/workers/repository_fork_worker_spec.rb77
-rw-r--r--spec/workers/repository_import_worker_spec.rb23
-rw-r--r--spec/workers/stuck_ci_jobs_worker_spec.rb10
-rw-r--r--spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb38
-rw-r--r--spec/workers/update_merge_requests_worker_spec.rb12
544 files changed, 22627 insertions, 4800 deletions
diff --git a/spec/bin/storage_check_spec.rb b/spec/bin/storage_check_spec.rb
new file mode 100644
index 00000000000..02f6fcb6e3a
--- /dev/null
+++ b/spec/bin/storage_check_spec.rb
@@ -0,0 +1,13 @@
+require 'spec_helper'
+
+describe 'bin/storage_check' do
+ it 'is executable' do
+ command = %w[bin/storage_check -t unix://the/path/to/a/unix-socket.sock -i 10 -d]
+ expected_output = 'Checking unix://the/path/to/a/unix-socket.sock every 10 seconds'
+
+ output, status = Gitlab::Popen.popen(command, Rails.root.to_s)
+
+ expect(status).to eq(0)
+ expect(output).to include(expected_output)
+ end
+end
diff --git a/spec/controllers/admin/health_check_controller_spec.rb b/spec/controllers/admin/health_check_controller_spec.rb
index 0b8e0c8a065..d15ee0021d9 100644
--- a/spec/controllers/admin/health_check_controller_spec.rb
+++ b/spec/controllers/admin/health_check_controller_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Admin::HealthCheckController, broken_storage: true do
+describe Admin::HealthCheckController do
let(:admin) { create(:admin) }
before do
@@ -17,7 +17,7 @@ describe Admin::HealthCheckController, broken_storage: true do
describe 'POST reset_storage_health' do
it 'resets all storage health information' do
- expect(Gitlab::Git::Storage::CircuitBreaker).to receive(:reset_all!)
+ expect(Gitlab::Git::Storage::FailureInfo).to receive(:reset_all!)
post :reset_storage_health
end
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index b73ca0c2346..fe95d1ef9cd 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -6,6 +6,10 @@ describe ApplicationController do
describe '#check_password_expiration' do
let(:controller) { described_class.new }
+ before do
+ allow(controller).to receive(:session).and_return({})
+ end
+
it 'redirects if the user is over their password expiry' do
user.password_expires_at = Time.new(2002)
@@ -37,14 +41,13 @@ describe ApplicationController do
controller.send(:check_password_expiration)
end
- it 'redirects if the user is over their password expiry and sign-in is disabled' do
- stub_application_setting(password_authentication_enabled: false)
+ it 'does not redirect if the user is over their password expiry but password authentication is disabled for the web interface' do
+ stub_application_setting(password_authentication_enabled_for_web: false)
+ stub_application_setting(password_authentication_enabled_for_git: false)
user.password_expires_at = Time.new(2002)
- expect(user.ldap_user?).to be_falsey
allow(controller).to receive(:current_user).and_return(user)
- expect(controller).to receive(:redirect_to)
- expect(controller).to receive(:new_profile_password_path)
+ expect(controller).not_to receive(:redirect_to)
controller.send(:check_password_expiration)
end
diff --git a/spec/controllers/concerns/issuable_collections_spec.rb b/spec/controllers/concerns/issuable_collections_spec.rb
index cd3bf785d34..d7825364ed5 100644
--- a/spec/controllers/concerns/issuable_collections_spec.rb
+++ b/spec/controllers/concerns/issuable_collections_spec.rb
@@ -12,12 +12,14 @@ describe IssuableCollections do
controller = klass.new
- allow(controller).to receive(:params).and_return(state: 'opened')
+ allow(controller).to receive(:params).and_return(ActionController::Parameters.new(params))
controller
end
describe '#page_count_for_relation' do
+ let(:params) { { state: 'opened' } }
+
it 'returns the number of pages' do
relation = double(:relation, limit_value: 20)
pages = controller.send(:page_count_for_relation, relation, 28)
@@ -25,4 +27,55 @@ describe IssuableCollections do
expect(pages).to eq(2)
end
end
+
+ describe '#filter_params' do
+ let(:params) do
+ {
+ assignee_id: '1',
+ assignee_username: 'user1',
+ author_id: '2',
+ author_username: 'user2',
+ authorized_only: 'true',
+ due_date: '2017-01-01',
+ group_id: '3',
+ iids: '4',
+ label_name: 'foo',
+ milestone_title: 'bar',
+ my_reaction_emoji: 'thumbsup',
+ non_archived: 'true',
+ project_id: '5',
+ scope: 'all',
+ search: 'baz',
+ sort: 'priority',
+ state: 'opened',
+ invalid_param: 'invalid_param'
+ }
+ end
+
+ it 'filters params' do
+ allow(controller).to receive(:cookies).and_return({})
+
+ filtered_params = controller.send(:filter_params)
+
+ expect(filtered_params).to eq({
+ 'assignee_id' => '1',
+ 'assignee_username' => 'user1',
+ 'author_id' => '2',
+ 'author_username' => 'user2',
+ 'authorized_only' => 'true',
+ 'due_date' => '2017-01-01',
+ 'group_id' => '3',
+ 'iids' => '4',
+ 'label_name' => 'foo',
+ 'milestone_title' => 'bar',
+ 'my_reaction_emoji' => 'thumbsup',
+ 'non_archived' => 'true',
+ 'project_id' => '5',
+ 'scope' => 'all',
+ 'search' => 'baz',
+ 'sort' => 'priority',
+ 'state' => 'opened'
+ })
+ end
+ end
end
diff --git a/spec/controllers/dashboard/todos_controller_spec.rb b/spec/controllers/dashboard/todos_controller_spec.rb
index d862e1447e3..f9faa4fa59a 100644
--- a/spec/controllers/dashboard/todos_controller_spec.rb
+++ b/spec/controllers/dashboard/todos_controller_spec.rb
@@ -44,11 +44,11 @@ describe Dashboard::TodosController do
context 'when using pagination' do
let(:last_page) { user.todos.page.total_pages }
- let!(:issues) { create_list(:issue, 2, project: project, assignees: [user]) }
+ let!(:issues) { create_list(:issue, 3, project: project, assignees: [user]) }
before do
issues.each { |issue| todo_service.new_issue(issue, user) }
- allow(Kaminari.config).to receive(:default_per_page).and_return(1)
+ allow(Kaminari.config).to receive(:default_per_page).and_return(2)
end
it 'redirects to last_page if page number is larger than number of pages' do
diff --git a/spec/controllers/groups/children_controller_spec.rb b/spec/controllers/groups/children_controller_spec.rb
index 4262d474e59..cb1b460fc0e 100644
--- a/spec/controllers/groups/children_controller_spec.rb
+++ b/spec/controllers/groups/children_controller_spec.rb
@@ -280,6 +280,17 @@ describe Groups::ChildrenController do
expect(assigns(:children)).to contain_exactly(other_subgroup, *next_page_projects.take(per_page - 1))
end
+
+ context 'with a mixed first page' do
+ let!(:first_page_subgroups) { [create(:group, :public, parent: group)] }
+ let!(:first_page_projects) { create_list(:project, per_page, :public, namespace: group) }
+
+ it 'correctly calculates the counts' do
+ get :index, group_id: group.to_param, sort: 'id_asc', page: 2, format: :json
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
end
end
end
diff --git a/spec/controllers/groups/uploads_controller_spec.rb b/spec/controllers/groups/uploads_controller_spec.rb
new file mode 100644
index 00000000000..67a11e56e94
--- /dev/null
+++ b/spec/controllers/groups/uploads_controller_spec.rb
@@ -0,0 +1,10 @@
+require 'spec_helper'
+
+describe Groups::UploadsController do
+ let(:model) { create(:group, :public) }
+ let(:params) do
+ { group_id: model }
+ end
+
+ it_behaves_like 'handle uploads'
+end
diff --git a/spec/controllers/health_controller_spec.rb b/spec/controllers/health_controller_spec.rb
index 9e9cf4f2c1f..95946def5f9 100644
--- a/spec/controllers/health_controller_spec.rb
+++ b/spec/controllers/health_controller_spec.rb
@@ -14,6 +14,48 @@ describe HealthController do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
end
+ describe '#storage_check' do
+ before do
+ allow(Gitlab::RequestContext).to receive(:client_ip).and_return(whitelisted_ip)
+ end
+
+ subject { post :storage_check }
+
+ it 'checks all the configured storages' do
+ expect(Gitlab::Git::Storage::Checker).to receive(:check_all).and_call_original
+
+ subject
+ end
+
+ it 'returns the check interval' do
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'true')
+ stub_application_setting(circuitbreaker_check_interval: 10)
+
+ subject
+
+ expect(json_response['check_interval']).to eq(10)
+ end
+
+ context 'with failing storages', :broken_storage do
+ before do
+ stub_storage_settings(
+ broken: { path: 'tmp/tests/non-existent-repositories' }
+ )
+ end
+
+ it 'includes the failure information' do
+ subject
+
+ expected_results = [
+ { 'storage' => 'broken', 'success' => false },
+ { 'storage' => 'default', 'success' => true }
+ ]
+
+ expect(json_response['results']).to eq(expected_results)
+ end
+ end
+ end
+
describe '#readiness' do
shared_context 'endpoint responding with readiness data' do
let(:request_params) { {} }
diff --git a/spec/controllers/import/github_controller_spec.rb b/spec/controllers/import/github_controller_spec.rb
index 45c3fa075ef..9bbd97ec305 100644
--- a/spec/controllers/import/github_controller_spec.rb
+++ b/spec/controllers/import/github_controller_spec.rb
@@ -21,9 +21,9 @@ describe Import::GithubController do
describe "GET callback" do
it "updates access token" do
token = "asdasd12345"
- allow_any_instance_of(Gitlab::GithubImport::Client)
+ allow_any_instance_of(Gitlab::LegacyGithubImport::Client)
.to receive(:get_token).and_return(token)
- allow_any_instance_of(Gitlab::GithubImport::Client)
+ allow_any_instance_of(Gitlab::LegacyGithubImport::Client)
.to receive(:github_options).and_return({})
stub_omniauth_provider('github')
diff --git a/spec/controllers/passwords_controller_spec.rb b/spec/controllers/passwords_controller_spec.rb
index 8778bff1190..4d31cfedbd2 100644
--- a/spec/controllers/passwords_controller_spec.rb
+++ b/spec/controllers/passwords_controller_spec.rb
@@ -1,18 +1,20 @@
require 'spec_helper'
describe PasswordsController do
- describe '#prevent_ldap_reset' do
+ describe '#check_password_authentication_available' do
before do
@request.env["devise.mapping"] = Devise.mappings[:user]
end
- context 'when password authentication is disabled' do
- it 'allows password reset' do
- stub_application_setting(password_authentication_enabled: false)
+ context 'when password authentication is disabled for the web interface and Git' do
+ it 'prevents a password reset' do
+ stub_application_setting(password_authentication_enabled_for_web: false)
+ stub_application_setting(password_authentication_enabled_for_git: false)
post :create
expect(response).to have_gitlab_http_status(302)
+ expect(flash[:alert]).to eq 'Password authentication is unavailable.'
end
end
@@ -22,7 +24,7 @@ describe PasswordsController do
it 'prevents a password reset' do
post :create, user: { email: user.email }
- expect(flash[:alert]).to eq('Cannot reset password for LDAP user.')
+ expect(flash[:alert]).to eq 'Password authentication is unavailable.'
end
end
end
diff --git a/spec/controllers/projects/boards_controller_spec.rb b/spec/controllers/projects/boards_controller_spec.rb
index 84cde33d944..d6ccb92c54b 100644
--- a/spec/controllers/projects/boards_controller_spec.rb
+++ b/spec/controllers/projects/boards_controller_spec.rb
@@ -14,6 +14,12 @@ describe Projects::BoardsController do
expect { list_boards }.to change(project.boards, :count).by(1)
end
+ it 'sets boards_endpoint instance variable to a boards path' do
+ list_boards
+
+ expect(assigns(:boards_endpoint)).to eq project_boards_path(project)
+ end
+
context 'when format is HTML' do
it 'renders template' do
list_boards
@@ -59,6 +65,12 @@ describe Projects::BoardsController do
describe 'GET show' do
let!(:board) { create(:board, project: project) }
+ it 'sets boards_endpoint instance variable to a boards path' do
+ read_board board: board
+
+ expect(assigns(:boards_endpoint)).to eq project_boards_path(project)
+ end
+
context 'when format is HTML' do
it 'renders template' do
read_board board: board
diff --git a/spec/controllers/projects/branches_controller_spec.rb b/spec/controllers/projects/branches_controller_spec.rb
index 973d6fed288..d731200f70f 100644
--- a/spec/controllers/projects/branches_controller_spec.rb
+++ b/spec/controllers/projects/branches_controller_spec.rb
@@ -113,22 +113,38 @@ describe Projects::BranchesController do
expect(response).to redirect_to project_tree_path(project, branch)
end
- it 'redirects to autodeploy setup page' do
- result = { status: :success, branch: double(name: branch) }
+ shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do
+ it 'redirects to autodeploy setup page' do
+ result = { status: :success, branch: double(name: branch) }
+
+ expect_any_instance_of(CreateBranchService).to receive(:execute).and_return(result)
+ expect(SystemNoteService).to receive(:new_issue_branch).and_return(true)
+
+ post :create,
+ namespace_id: project.namespace.to_param,
+ project_id: project.to_param,
+ branch_name: branch,
+ issue_iid: issue.iid
+
+ expect(response.location).to include(project_new_blob_path(project, branch))
+ expect(response).to have_gitlab_http_status(302)
+ end
+ end
- project.services << build(:kubernetes_service)
+ context 'when user configured kubernetes from Integration > Kubernetes' do
+ before do
+ project.services << build(:kubernetes_service)
+ end
- expect_any_instance_of(CreateBranchService).to receive(:execute).and_return(result)
- expect(SystemNoteService).to receive(:new_issue_branch).and_return(true)
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
+ end
- post :create,
- namespace_id: project.namespace.to_param,
- project_id: project.to_param,
- branch_name: branch,
- issue_iid: issue.iid
+ context 'when user configured kubernetes from CI/CD > Clusters' do
+ before do
+ create(:cluster, :provided_by_gcp, projects: [project])
+ end
- expect(response.location).to include(project_new_blob_path(project, branch))
- expect(response).to have_gitlab_http_status(302)
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
end
end
diff --git a/spec/controllers/projects/clusters/gcp_controller_spec.rb b/spec/controllers/projects/clusters/gcp_controller_spec.rb
new file mode 100644
index 00000000000..ee7928beb7e
--- /dev/null
+++ b/spec/controllers/projects/clusters/gcp_controller_spec.rb
@@ -0,0 +1,185 @@
+require 'spec_helper'
+
+describe Projects::Clusters::GcpController do
+ include AccessMatchersForController
+ include GoogleApi::CloudPlatformHelpers
+
+ set(:project) { create(:project) }
+
+ describe 'GET login' do
+ describe 'functionality' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+ end
+
+ context 'when omniauth has been configured' do
+ let(:key) { 'secret-key' }
+
+ let(:session_key_for_redirect_uri) do
+ GoogleApi::CloudPlatform::Client.session_key_for_redirect_uri(key)
+ end
+
+ before do
+ allow(SecureRandom).to receive(:hex).and_return(key)
+ end
+
+ it 'has authorize_url' do
+ go
+
+ expect(assigns(:authorize_url)).to include(key)
+ expect(session[session_key_for_redirect_uri]).to eq(gcp_new_project_clusters_path(project))
+ end
+ end
+
+ context 'when omniauth has not configured' do
+ before do
+ stub_omniauth_setting(providers: [])
+ end
+
+ it 'does not have authorize_url' do
+ go
+
+ expect(assigns(:authorize_url)).to be_nil
+ end
+ end
+ end
+
+ describe 'security' do
+ it { expect { go }.to be_allowed_for(:admin) }
+ it { expect { go }.to be_allowed_for(:owner).of(project) }
+ it { expect { go }.to be_allowed_for(:master).of(project) }
+ it { expect { go }.to be_denied_for(:developer).of(project) }
+ it { expect { go }.to be_denied_for(:reporter).of(project) }
+ it { expect { go }.to be_denied_for(:guest).of(project) }
+ it { expect { go }.to be_denied_for(:user) }
+ it { expect { go }.to be_denied_for(:external) }
+ end
+
+ def go
+ get :login, namespace_id: project.namespace, project_id: project
+ end
+ end
+
+ describe 'GET new' do
+ describe 'functionality' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+ end
+
+ context 'when access token is valid' do
+ before do
+ stub_google_api_validate_token
+ end
+
+ it 'has new object' do
+ go
+
+ expect(assigns(:cluster)).to be_an_instance_of(Clusters::Cluster)
+ end
+ end
+
+ context 'when access token is expired' do
+ before do
+ stub_google_api_expired_token
+ end
+
+ it { expect(go).to redirect_to(gcp_login_project_clusters_path(project)) }
+ end
+
+ context 'when access token is not stored in session' do
+ it { expect(go).to redirect_to(gcp_login_project_clusters_path(project)) }
+ end
+ end
+
+ describe 'security' do
+ it { expect { go }.to be_allowed_for(:admin) }
+ it { expect { go }.to be_allowed_for(:owner).of(project) }
+ it { expect { go }.to be_allowed_for(:master).of(project) }
+ it { expect { go }.to be_denied_for(:developer).of(project) }
+ it { expect { go }.to be_denied_for(:reporter).of(project) }
+ it { expect { go }.to be_denied_for(:guest).of(project) }
+ it { expect { go }.to be_denied_for(:user) }
+ it { expect { go }.to be_denied_for(:external) }
+ end
+
+ def go
+ get :new, namespace_id: project.namespace, project_id: project
+ end
+ end
+
+ describe 'POST create' do
+ let(:params) do
+ {
+ cluster: {
+ name: 'new-cluster',
+ provider_gcp_attributes: {
+ gcp_project_id: '111'
+ }
+ }
+ }
+ end
+
+ describe 'functionality' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+ end
+
+ context 'when access token is valid' do
+ before do
+ stub_google_api_validate_token
+ end
+
+ context 'when creates a cluster on gke' do
+ it 'creates a new cluster' do
+ expect(ClusterProvisionWorker).to receive(:perform_async)
+ expect { go }.to change { Clusters::Cluster.count }
+ .and change { Clusters::Providers::Gcp.count }
+ expect(response).to redirect_to(project_cluster_path(project, project.clusters.first))
+ expect(project.clusters.first).to be_gcp
+ expect(project.clusters.first).to be_kubernetes
+ end
+ end
+ end
+
+ context 'when access token is expired' do
+ before do
+ stub_google_api_expired_token
+ end
+
+ it 'redirects to login page' do
+ expect(go).to redirect_to(gcp_login_project_clusters_path(project))
+ end
+ end
+
+ context 'when access token is not stored in session' do
+ it 'redirects to login page' do
+ expect(go).to redirect_to(gcp_login_project_clusters_path(project))
+ end
+ end
+ end
+
+ describe 'security' do
+ it { expect { go }.to be_allowed_for(:admin) }
+ it { expect { go }.to be_allowed_for(:owner).of(project) }
+ it { expect { go }.to be_allowed_for(:master).of(project) }
+ it { expect { go }.to be_denied_for(:developer).of(project) }
+ it { expect { go }.to be_denied_for(:reporter).of(project) }
+ it { expect { go }.to be_denied_for(:guest).of(project) }
+ it { expect { go }.to be_denied_for(:user) }
+ it { expect { go }.to be_denied_for(:external) }
+ end
+
+ def go
+ post :create, params.merge(namespace_id: project.namespace, project_id: project)
+ end
+ end
+end
diff --git a/spec/controllers/projects/clusters/user_controller_spec.rb b/spec/controllers/projects/clusters/user_controller_spec.rb
new file mode 100644
index 00000000000..913976d187f
--- /dev/null
+++ b/spec/controllers/projects/clusters/user_controller_spec.rb
@@ -0,0 +1,89 @@
+require 'spec_helper'
+
+describe Projects::Clusters::UserController do
+ include AccessMatchersForController
+
+ set(:project) { create(:project) }
+
+ describe 'GET new' do
+ describe 'functionality' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+ end
+
+ it 'has new object' do
+ go
+
+ expect(assigns(:cluster)).to be_an_instance_of(Clusters::Cluster)
+ end
+ end
+
+ describe 'security' do
+ it { expect { go }.to be_allowed_for(:admin) }
+ it { expect { go }.to be_allowed_for(:owner).of(project) }
+ it { expect { go }.to be_allowed_for(:master).of(project) }
+ it { expect { go }.to be_denied_for(:developer).of(project) }
+ it { expect { go }.to be_denied_for(:reporter).of(project) }
+ it { expect { go }.to be_denied_for(:guest).of(project) }
+ it { expect { go }.to be_denied_for(:user) }
+ it { expect { go }.to be_denied_for(:external) }
+ end
+
+ def go
+ get :new, namespace_id: project.namespace, project_id: project
+ end
+ end
+
+ describe 'POST create' do
+ let(:params) do
+ {
+ cluster: {
+ name: 'new-cluster',
+ platform_kubernetes_attributes: {
+ api_url: 'http://my-url',
+ token: 'test',
+ namespace: 'aaa'
+ }
+ }
+ }
+ end
+
+ describe 'functionality' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+ end
+
+ context 'when creates a cluster' do
+ it 'creates a new cluster' do
+ expect(ClusterProvisionWorker).to receive(:perform_async)
+ expect { go }.to change { Clusters::Cluster.count }
+ .and change { Clusters::Platforms::Kubernetes.count }
+ expect(response).to redirect_to(project_cluster_path(project, project.clusters.first))
+ expect(project.clusters.first).to be_user
+ expect(project.clusters.first).to be_kubernetes
+ end
+ end
+ end
+
+ describe 'security' do
+ it { expect { go }.to be_allowed_for(:admin) }
+ it { expect { go }.to be_allowed_for(:owner).of(project) }
+ it { expect { go }.to be_allowed_for(:master).of(project) }
+ it { expect { go }.to be_denied_for(:developer).of(project) }
+ it { expect { go }.to be_denied_for(:reporter).of(project) }
+ it { expect { go }.to be_denied_for(:guest).of(project) }
+ it { expect { go }.to be_denied_for(:user) }
+ it { expect { go }.to be_denied_for(:external) }
+ end
+
+ def go
+ post :create, params.merge(namespace_id: project.namespace, project_id: project)
+ end
+ end
+end
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index ca2bcb2b5ae..280b7e4d8b9 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -4,6 +4,8 @@ describe Projects::ClustersController do
include AccessMatchersForController
include GoogleApi::CloudPlatformHelpers
+ set(:project) { create(:project) }
+
describe 'GET index' do
describe 'functionality' do
let(:user) { create(:user) }
@@ -13,213 +15,78 @@ describe Projects::ClustersController do
sign_in(user)
end
- context 'when project has a cluster' do
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:project) { cluster.project }
-
- it { expect(go).to redirect_to(project_cluster_path(project, project.cluster)) }
- end
-
- context 'when project does not have a cluster' do
+ context 'when project has one or more clusters' do
let(:project) { create(:project) }
-
- it { expect(go).to redirect_to(new_project_cluster_path(project)) }
- end
- end
-
- describe 'security' do
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:project) { cluster.project }
-
- it { expect { go }.to be_allowed_for(:admin) }
- it { expect { go }.to be_allowed_for(:owner).of(project) }
- it { expect { go }.to be_allowed_for(:master).of(project) }
- it { expect { go }.to be_denied_for(:developer).of(project) }
- it { expect { go }.to be_denied_for(:reporter).of(project) }
- it { expect { go }.to be_denied_for(:guest).of(project) }
- it { expect { go }.to be_denied_for(:user) }
- it { expect { go }.to be_denied_for(:external) }
- end
-
- def go
- get :index, namespace_id: project.namespace.to_param, project_id: project
- end
- end
-
- describe 'GET login' do
- let(:project) { create(:project) }
-
- describe 'functionality' do
- let(:user) { create(:user) }
-
- before do
- project.add_master(user)
- sign_in(user)
- end
-
- context 'when omniauth has been configured' do
- let(:key) { 'secere-key' }
-
- let(:session_key_for_redirect_uri) do
- GoogleApi::CloudPlatform::Client.session_key_for_redirect_uri(key)
- end
-
- before do
- allow(SecureRandom).to receive(:hex).and_return(key)
- end
-
- it 'has authorize_url' do
+ let!(:enabled_cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
+ let!(:disabled_cluster) { create(:cluster, :disabled, :provided_by_gcp, projects: [project]) }
+ it 'lists available clusters' do
go
- expect(assigns(:authorize_url)).to include(key)
- expect(session[session_key_for_redirect_uri]).to eq(providers_gcp_new_project_clusters_url(project))
- end
- end
-
- context 'when omniauth has not configured' do
- before do
- stub_omniauth_setting(providers: [])
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:index)
+ expect(assigns(:clusters)).to match_array([enabled_cluster, disabled_cluster])
end
- it 'does not have authorize_url' do
+ it 'assigns counters to correct values' do
go
- expect(assigns(:authorize_url)).to be_nil
+ expect(assigns(:active_count)).to eq(1)
+ expect(assigns(:inactive_count)).to eq(1)
end
- end
- end
-
- describe 'security' do
- it { expect { go }.to be_allowed_for(:admin) }
- it { expect { go }.to be_allowed_for(:owner).of(project) }
- it { expect { go }.to be_allowed_for(:master).of(project) }
- it { expect { go }.to be_denied_for(:developer).of(project) }
- it { expect { go }.to be_denied_for(:reporter).of(project) }
- it { expect { go }.to be_denied_for(:guest).of(project) }
- it { expect { go }.to be_denied_for(:user) }
- it { expect { go }.to be_denied_for(:external) }
- end
-
- def go
- get :login, namespace_id: project.namespace, project_id: project
- end
- end
-
- shared_examples 'requires to login' do
- it 'redirects to create a cluster' do
- subject
-
- expect(response).to redirect_to(login_project_clusters_path(project))
- end
- end
-
- describe 'GET new_gcp' do
- let(:project) { create(:project) }
- describe 'functionality' do
- let(:user) { create(:user) }
+ context 'when page is specified' do
+ let(:last_page) { project.clusters.page.total_pages }
- before do
- project.add_master(user)
- sign_in(user)
- end
+ before do
+ allow(Clusters::Cluster).to receive(:paginates_per).and_return(1)
+ create_list(:cluster, 2, :provided_by_gcp, projects: [project])
+ get :index, namespace_id: project.namespace, project_id: project, page: last_page
+ end
- context 'when access token is valid' do
- before do
- stub_google_api_validate_token
+ it 'redirects to the page' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(assigns(:clusters).current_page).to eq(last_page)
+ end
end
- it 'has new object' do
- go
-
- expect(assigns(:cluster)).to be_an_instance_of(Clusters::Cluster)
+ context 'when only enabled clusters are requested' do
+ it 'returns only enabled clusters' do
+ get :index, namespace_id: project.namespace, project_id: project, scope: 'active'
+ expect(assigns(:clusters)).to all(have_attributes(enabled: true))
+ end
end
- end
- context 'when access token is expired' do
- before do
- stub_google_api_expired_token
+ context 'when only disabled clusters are requested' do
+ it 'returns only disabled clusters' do
+ get :index, namespace_id: project.namespace, project_id: project, scope: 'inactive'
+ expect(assigns(:clusters)).to all(have_attributes(enabled: false))
+ end
end
-
- it { expect(go).to redirect_to(login_project_clusters_path(project)) }
- end
-
- context 'when access token is not stored in session' do
- it { expect(go).to redirect_to(login_project_clusters_path(project)) }
- end
- end
-
- describe 'security' do
- it { expect { go }.to be_allowed_for(:admin) }
- it { expect { go }.to be_allowed_for(:owner).of(project) }
- it { expect { go }.to be_allowed_for(:master).of(project) }
- it { expect { go }.to be_denied_for(:developer).of(project) }
- it { expect { go }.to be_denied_for(:reporter).of(project) }
- it { expect { go }.to be_denied_for(:guest).of(project) }
- it { expect { go }.to be_denied_for(:user) }
- it { expect { go }.to be_denied_for(:external) }
- end
-
- def go
- get :new_gcp, namespace_id: project.namespace, project_id: project
- end
- end
-
- describe 'POST create' do
- let(:project) { create(:project) }
-
- let(:params) do
- {
- cluster: {
- name: 'new-cluster',
- provider_type: :gcp,
- provider_gcp_attributes: {
- gcp_project_id: '111'
- }
- }
- }
- end
-
- describe 'functionality' do
- let(:user) { create(:user) }
-
- before do
- project.add_master(user)
- sign_in(user)
end
- context 'when access token is valid' do
- before do
- stub_google_api_validate_token
- end
+ context 'when project does not have a cluster' do
+ let(:project) { create(:project) }
- context 'when creates a cluster on gke' do
- it 'creates a new cluster' do
- expect(ClusterProvisionWorker).to receive(:perform_async)
- expect { go }.to change { Clusters::Cluster.count }
- expect(response).to redirect_to(project_cluster_path(project, project.cluster))
- end
- end
- end
+ it 'returns an empty state page' do
+ go
- context 'when access token is expired' do
- before do
- stub_google_api_expired_token
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:index, partial: :empty_state)
+ expect(assigns(:clusters)).to eq([])
end
- it 'redirects to login page' do
- expect(go).to redirect_to(login_project_clusters_path(project))
- end
- end
+ it 'assigns counters to zero' do
+ go
- context 'when access token is not stored in session' do
- it 'redirects to login page' do
- expect(go).to redirect_to(login_project_clusters_path(project))
+ expect(assigns(:active_count)).to eq(0)
+ expect(assigns(:inactive_count)).to eq(0)
end
end
end
describe 'security' do
+ let(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
+
it { expect { go }.to be_allowed_for(:admin) }
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:master).of(project) }
@@ -231,13 +98,12 @@ describe Projects::ClustersController do
end
def go
- post :create, params.merge(namespace_id: project.namespace, project_id: project)
+ get :index, namespace_id: project.namespace.to_param, project_id: project
end
end
describe 'GET status' do
- let(:cluster) { create(:cluster, :project, :providing_by_gcp) }
- let(:project) { cluster.project }
+ let(:cluster) { create(:cluster, :providing_by_gcp, projects: [project]) }
describe 'functionality' do
let(:user) { create(:user) }
@@ -275,8 +141,7 @@ describe Projects::ClustersController do
end
describe 'GET show' do
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:project) { cluster.project }
+ let(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
describe 'functionality' do
let(:user) { create(:user) }
@@ -313,10 +178,8 @@ describe Projects::ClustersController do
end
describe 'PUT update' do
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:project) { cluster.project }
-
- describe 'functionality' do
+ context 'when cluster is provided by GCP' do
+ let(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
let(:user) { create(:user) }
before do
@@ -324,10 +187,16 @@ describe Projects::ClustersController do
sign_in(user)
end
- context 'when update enabled' do
+ context 'when changing parameters' do
let(:params) do
{
- cluster: { enabled: false }
+ cluster: {
+ enabled: false,
+ name: 'my-new-cluster-name',
+ platform_kubernetes_attributes: {
+ namespace: 'my-namespace'
+ }
+ }
}
end
@@ -335,13 +204,19 @@ describe Projects::ClustersController do
go
cluster.reload
- expect(response).to redirect_to(project_cluster_path(project, project.cluster))
+ expect(response).to redirect_to(project_cluster_path(project, cluster))
expect(flash[:notice]).to eq('Cluster was successfully updated.')
expect(cluster.enabled).to be_falsey
end
+ it "does not change cluster name" do
+ go
+
+ expect(cluster.name).to eq('test-cluster')
+ end
+
context 'when cluster is being created' do
- let(:cluster) { create(:cluster, :project, :providing_by_gcp) }
+ let(:cluster) { create(:cluster, :providing_by_gcp, projects: [project]) }
it "rejects changes" do
go
@@ -354,11 +229,95 @@ describe Projects::ClustersController do
end
end
+ context 'when cluster is provided by user' do
+ let(:cluster) { create(:cluster, :provided_by_user, projects: [project]) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+ end
+
+ context 'when format is json' do
+ context 'when changing parameters' do
+ context 'when valid parameters are used' do
+ let(:params) do
+ {
+ cluster: {
+ enabled: false,
+ name: 'my-new-cluster-name',
+ platform_kubernetes_attributes: {
+ namespace: 'my-namespace'
+ }
+ }
+ }
+ end
+
+ it "updates and redirects back to show page" do
+ go_json
+
+ cluster.reload
+ expect(response).to have_http_status(:no_content)
+ expect(cluster.enabled).to be_falsey
+ expect(cluster.name).to eq('my-new-cluster-name')
+ expect(cluster.platform_kubernetes.namespace).to eq('my-namespace')
+ end
+ end
+
+ context 'when invalid parameters are used' do
+ let(:params) do
+ {
+ cluster: {
+ enabled: false,
+ platform_kubernetes_attributes: {
+ namespace: 'my invalid namespace #@'
+ }
+ }
+ }
+ end
+
+ it "rejects changes" do
+ go_json
+
+ expect(response).to have_http_status(:bad_request)
+ end
+ end
+ end
+ end
+
+ context 'when format is html' do
+ context 'when update enabled' do
+ let(:params) do
+ {
+ cluster: {
+ enabled: false,
+ name: 'my-new-cluster-name',
+ platform_kubernetes_attributes: {
+ namespace: 'my-namespace'
+ }
+ }
+ }
+ end
+
+ it "updates and redirects back to show page" do
+ go
+
+ cluster.reload
+ expect(response).to redirect_to(project_cluster_path(project, cluster))
+ expect(flash[:notice]).to eq('Cluster was successfully updated.')
+ expect(cluster.enabled).to be_falsey
+ expect(cluster.name).to eq('my-new-cluster-name')
+ expect(cluster.platform_kubernetes.namespace).to eq('my-namespace')
+ end
+ end
+ end
+ end
+
describe 'security' do
+ set(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
+
let(:params) do
- {
- cluster: { enabled: false }
- }
+ { cluster: { enabled: false } }
end
it { expect { go }.to be_allowed_for(:admin) }
@@ -376,12 +335,16 @@ describe Projects::ClustersController do
project_id: project,
id: cluster)
end
- end
- describe 'delete update' do
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:project) { cluster.project }
+ def go_json
+ put :update, params.merge(namespace_id: project.namespace,
+ project_id: project,
+ id: cluster,
+ format: :json)
+ end
+ end
+ describe 'DELETE destroy' do
describe 'functionality' do
let(:user) { create(:user) }
@@ -390,31 +353,37 @@ describe Projects::ClustersController do
sign_in(user)
end
- it "destroys and redirects back to clusters list" do
- expect { go }
- .to change { Clusters::Cluster.count }.by(-1)
- .and change { Clusters::Platforms::Kubernetes.count }.by(-1)
- .and change { Clusters::Providers::Gcp.count }.by(-1)
+ context 'when cluster is provided by GCP' do
+ context 'when cluster is created' do
+ let!(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
- expect(response).to redirect_to(project_clusters_path(project))
- expect(flash[:notice]).to eq('Cluster integration was successfully removed.')
- end
+ it "destroys and redirects back to clusters list" do
+ expect { go }
+ .to change { Clusters::Cluster.count }.by(-1)
+ .and change { Clusters::Platforms::Kubernetes.count }.by(-1)
+ .and change { Clusters::Providers::Gcp.count }.by(-1)
- context 'when cluster is being created' do
- let(:cluster) { create(:cluster, :project, :providing_by_gcp) }
+ expect(response).to redirect_to(project_clusters_path(project))
+ expect(flash[:notice]).to eq('Cluster integration was successfully removed.')
+ end
+ end
- it "destroys and redirects back to clusters list" do
- expect { go }
- .to change { Clusters::Cluster.count }.by(-1)
- .and change { Clusters::Providers::Gcp.count }.by(-1)
+ context 'when cluster is being created' do
+ let!(:cluster) { create(:cluster, :providing_by_gcp, projects: [project]) }
- expect(response).to redirect_to(project_clusters_path(project))
- expect(flash[:notice]).to eq('Cluster integration was successfully removed.')
+ it "destroys and redirects back to clusters list" do
+ expect { go }
+ .to change { Clusters::Cluster.count }.by(-1)
+ .and change { Clusters::Providers::Gcp.count }.by(-1)
+
+ expect(response).to redirect_to(project_clusters_path(project))
+ expect(flash[:notice]).to eq('Cluster integration was successfully removed.')
+ end
end
end
- context 'when provider is user' do
- let(:cluster) { create(:cluster, :project, :provided_by_user) }
+ context 'when cluster is provided by user' do
+ let!(:cluster) { create(:cluster, :provided_by_user, projects: [project]) }
it "destroys and redirects back to clusters list" do
expect { go }
@@ -429,6 +398,8 @@ describe Projects::ClustersController do
end
describe 'security' do
+ set(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
+
it { expect { go }.to be_allowed_for(:admin) }
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:master).of(project) }
diff --git a/spec/controllers/projects/commit_controller_spec.rb b/spec/controllers/projects/commit_controller_spec.rb
index 5dc27e2bbba..694c64ae1ad 100644
--- a/spec/controllers/projects/commit_controller_spec.rb
+++ b/spec/controllers/projects/commit_controller_spec.rb
@@ -1,15 +1,15 @@
require 'spec_helper'
describe Projects::CommitController do
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user) }
+ set(:project) { create(:project, :repository) }
+ set(:user) { create(:user) }
let(:commit) { project.commit("master") }
let(:master_pickable_sha) { '7d3b0f7cff5f37573aea97cebfd5692ea1689924' }
let(:master_pickable_commit) { project.commit(master_pickable_sha) }
before do
sign_in(user)
- project.team << [user, :master]
+ project.add_master(user)
end
describe 'GET show' do
@@ -132,6 +132,22 @@ describe Projects::CommitController do
expect(response).to be_success
end
end
+
+ context 'in the context of a merge_request' do
+ let(:merge_request) { create(:merge_request, source_project: project) }
+ let(:commit) { merge_request.commits.first }
+
+ it 'prepare diff notes in the context of the merge request' do
+ go(id: commit.id, merge_request_iid: merge_request.iid)
+
+ expect(assigns(:new_diff_note_attrs)).to eq({
+ noteable_type: 'MergeRequest',
+ noteable_id: merge_request.id,
+ commit_id: commit.id
+ })
+ expect(response).to be_ok
+ end
+ end
end
describe 'GET branches' do
@@ -323,7 +339,7 @@ describe Projects::CommitController do
context 'when the commit does not exist' do
before do
- diff_for_path(id: commit.id.succ, old_path: existing_path, new_path: existing_path)
+ diff_for_path(id: commit.id.reverse, old_path: existing_path, new_path: existing_path)
end
it 'returns a 404' do
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index f9688949a19..7490f8fefce 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -371,8 +371,10 @@ describe Projects::JobsController do
end
describe 'POST erase' do
+ let(:role) { :master }
+
before do
- project.add_developer(user)
+ project.team << [user, role]
sign_in(user)
post_erase
@@ -404,6 +406,27 @@ describe Projects::JobsController do
end
end
+ context 'when user is developer' do
+ let(:role) { :developer }
+ let(:job) { create(:ci_build, :erasable, :trace, pipeline: pipeline, user: triggered_by) }
+
+ context 'when triggered by same user' do
+ let(:triggered_by) { user }
+
+ it 'has successful status' do
+ expect(response).to have_gitlab_http_status(:found)
+ end
+ end
+
+ context 'when triggered by different user' do
+ let(:triggered_by) { create(:user) }
+
+ it 'does not have successful status' do
+ expect(response).not_to have_gitlab_http_status(:found)
+ end
+ end
+ end
+
def post_erase
post :erase, namespace_id: project.namespace,
project_id: project,
diff --git a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
index 18a70bec103..ba97ccfbbd4 100644
--- a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
@@ -100,7 +100,8 @@ describe Projects::MergeRequests::DiffsController do
expect(assigns(:diff_notes_disabled)).to be_falsey
expect(assigns(:new_diff_note_attrs)).to eq(noteable_type: 'MergeRequest',
- noteable_id: merge_request.id)
+ noteable_id: merge_request.id,
+ commit_id: nil)
end
it 'only renders the diffs for the path given' do
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index bfdad85c082..51d5d6a52b3 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -324,12 +324,12 @@ describe Projects::MergeRequestsController do
end
context 'when the pipeline succeeds is passed' do
- def merge_when_pipeline_succeeds
- post :merge, base_params.merge(sha: merge_request.diff_head_sha, merge_when_pipeline_succeeds: '1')
+ let!(:head_pipeline) do
+ create(:ci_empty_pipeline, project: project, sha: merge_request.diff_head_sha, ref: merge_request.source_branch, head_pipeline_of: merge_request)
end
- before do
- create(:ci_empty_pipeline, project: project, sha: merge_request.diff_head_sha, ref: merge_request.source_branch, head_pipeline_of: merge_request)
+ def merge_when_pipeline_succeeds
+ post :merge, base_params.merge(sha: merge_request.diff_head_sha, merge_when_pipeline_succeeds: '1')
end
it 'returns :merge_when_pipeline_succeeds' do
@@ -354,6 +354,18 @@ describe Projects::MergeRequestsController do
project.update_column(:only_allow_merge_if_pipeline_succeeds, true)
end
+ context 'and head pipeline is not the current one' do
+ before do
+ head_pipeline.update(sha: 'not_current_sha')
+ end
+
+ it 'returns :failed' do
+ merge_when_pipeline_succeeds
+
+ expect(json_response).to eq('status' => 'failed')
+ end
+ end
+
it 'returns :merge_when_pipeline_succeeds' do
merge_when_pipeline_succeeds
diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb
index 5f5a789d5cc..37e9f863fc4 100644
--- a/spec/controllers/projects/notes_controller_spec.rb
+++ b/spec/controllers/projects/notes_controller_spec.rb
@@ -336,6 +336,29 @@ describe Projects::NotesController do
end
end
+ describe 'PUT update' do
+ let(:request_params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ id: note,
+ format: :json,
+ note: {
+ note: "New comment"
+ }
+ }
+ end
+
+ before do
+ sign_in(note.author)
+ project.team << [note.author, :developer]
+ end
+
+ it "updates the note" do
+ expect { put :update, request_params }.to change { note.reload.note }
+ end
+ end
+
describe 'DELETE destroy' do
let(:request_params) do
{
diff --git a/spec/controllers/projects/pipelines_settings_controller_spec.rb b/spec/controllers/projects/pipelines_settings_controller_spec.rb
index 21b6a6d45f5..1cc488bef32 100644
--- a/spec/controllers/projects/pipelines_settings_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_settings_controller_spec.rb
@@ -12,7 +12,7 @@ describe Projects::PipelinesSettingsController do
end
describe 'PATCH update' do
- before do
+ subject do
patch :update,
namespace_id: project.namespace.to_param,
project_id: project,
@@ -25,6 +25,8 @@ describe Projects::PipelinesSettingsController do
let(:params) { { enabled: '', domain: 'mepmep.md' } }
it 'redirects to the settings page' do
+ subject
+
expect(response).to have_gitlab_http_status(302)
expect(flash[:notice]).to eq("Pipelines settings for '#{project.name}' were successfully updated.")
end
@@ -33,11 +35,36 @@ describe Projects::PipelinesSettingsController do
let(:params) { { enabled: '' } }
it 'allows enabled to be set to nil' do
+ subject
project_auto_devops.reload
expect(project_auto_devops.enabled).to be_nil
end
end
+
+ context 'when run_auto_devops_pipeline is true' do
+ before do
+ expect_any_instance_of(Projects::UpdateService).to receive(:run_auto_devops_pipeline?).and_return(true)
+ end
+
+ it 'queues a CreatePipelineWorker' do
+ expect(CreatePipelineWorker).to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args)
+
+ subject
+ end
+ end
+
+ context 'when run_auto_devops_pipeline is not true' do
+ before do
+ expect_any_instance_of(Projects::UpdateService).to receive(:run_auto_devops_pipeline?).and_return(false)
+ end
+
+ it 'does not queue a CreatePipelineWorker' do
+ expect(CreatePipelineWorker).not_to receive(:perform_async).with(project.id, user.id, :web, any_args)
+
+ subject
+ end
+ end
end
end
end
diff --git a/spec/controllers/projects/uploads_controller_spec.rb b/spec/controllers/projects/uploads_controller_spec.rb
index c2550b1efa7..d572085661d 100644
--- a/spec/controllers/projects/uploads_controller_spec.rb
+++ b/spec/controllers/projects/uploads_controller_spec.rb
@@ -1,247 +1,10 @@
-require('spec_helper')
+require 'spec_helper'
describe Projects::UploadsController do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
- let(:jpg) { fixture_file_upload(Rails.root + 'spec/fixtures/rails_sample.jpg', 'image/jpg') }
- let(:txt) { fixture_file_upload(Rails.root + 'spec/fixtures/doc_sample.txt', 'text/plain') }
-
- describe "POST #create" do
- before do
- sign_in(user)
- project.team << [user, :developer]
- end
-
- context "without params['file']" do
- it "returns an error" do
- post :create,
- namespace_id: project.namespace.to_param,
- project_id: project,
- format: :json
- expect(response).to have_gitlab_http_status(422)
- end
- end
-
- context 'with valid image' do
- before do
- post :create,
- namespace_id: project.namespace.to_param,
- project_id: project,
- file: jpg,
- format: :json
- end
-
- it 'returns a content with original filename, new link, and correct type.' do
- expect(response.body).to match '\"alt\":\"rails_sample\"'
- expect(response.body).to match "\"url\":\"/uploads"
- end
-
- # NOTE: This is as close as we're getting to an Integration test for this
- # behavior. We're avoiding a proper Feature test because those should be
- # testing things entirely user-facing, which the Upload model is very much
- # not.
- it 'creates a corresponding Upload record' do
- upload = Upload.last
-
- aggregate_failures do
- expect(upload).to exist
- expect(upload.model).to eq project
- end
- end
- end
-
- context 'with valid non-image file' do
- before do
- post :create,
- namespace_id: project.namespace.to_param,
- project_id: project,
- file: txt,
- format: :json
- end
-
- it 'returns a content with original filename, new link, and correct type.' do
- expect(response.body).to match '\"alt\":\"doc_sample.txt\"'
- expect(response.body).to match "\"url\":\"/uploads"
- end
- end
+ let(:model) { create(:project, :public) }
+ let(:params) do
+ { namespace_id: model.namespace.to_param, project_id: model }
end
- describe "GET #show" do
- let(:go) do
- get :show,
- namespace_id: project.namespace.to_param,
- project_id: project,
- secret: "123456",
- filename: "image.jpg"
- end
-
- context "when the project is public" do
- before do
- project.update_attribute(:visibility_level, Project::PUBLIC)
- end
-
- context "when not signed in" do
- context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
- it "responds with status 200" do
- go
-
- expect(response).to have_gitlab_http_status(200)
- end
- end
-
- context "when the file doesn't exist" do
- it "responds with status 404" do
- go
-
- expect(response).to have_gitlab_http_status(404)
- end
- end
- end
-
- context "when signed in" do
- before do
- sign_in(user)
- end
-
- context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
- it "responds with status 200" do
- go
-
- expect(response).to have_gitlab_http_status(200)
- end
- end
-
- context "when the file doesn't exist" do
- it "responds with status 404" do
- go
-
- expect(response).to have_gitlab_http_status(404)
- end
- end
- end
- end
-
- context "when the project is private" do
- before do
- project.update_attribute(:visibility_level, Project::PRIVATE)
- end
-
- context "when not signed in" do
- context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
- context "when the file is an image" do
- before do
- allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
- end
-
- it "responds with status 200" do
- go
-
- expect(response).to have_gitlab_http_status(200)
- end
- end
-
- context "when the file is not an image" do
- it "redirects to the sign in page" do
- go
-
- expect(response).to redirect_to(new_user_session_path)
- end
- end
- end
-
- context "when the file doesn't exist" do
- it "redirects to the sign in page" do
- go
-
- expect(response).to redirect_to(new_user_session_path)
- end
- end
- end
-
- context "when signed in" do
- before do
- sign_in(user)
- end
-
- context "when the user has access to the project" do
- before do
- project.team << [user, :master]
- end
-
- context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
- it "responds with status 200" do
- go
-
- expect(response).to have_gitlab_http_status(200)
- end
- end
-
- context "when the file doesn't exist" do
- it "responds with status 404" do
- go
-
- expect(response).to have_gitlab_http_status(404)
- end
- end
- end
-
- context "when the user doesn't have access to the project" do
- context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
- context "when the file is an image" do
- before do
- allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
- end
-
- it "responds with status 200" do
- go
-
- expect(response).to have_gitlab_http_status(200)
- end
- end
-
- context "when the file is not an image" do
- it "responds with status 404" do
- go
-
- expect(response).to have_gitlab_http_status(404)
- end
- end
- end
-
- context "when the file doesn't exist" do
- it "responds with status 404" do
- go
-
- expect(response).to have_gitlab_http_status(404)
- end
- end
- end
- end
- end
- end
+ it_behaves_like 'handle uploads'
end
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index e7ab714c550..e61187fb518 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -261,6 +261,27 @@ describe ProjectsController do
expect(response).to redirect_to(namespace_project_path)
end
end
+
+ context 'when the project is forked and has a repository', :request_store do
+ let(:public_project) { create(:project, :public, :repository) }
+ let(:other_user) { create(:user) }
+
+ render_views
+
+ before do
+ # View the project as a user that does not have any rights
+ sign_in(other_user)
+
+ fork_project(public_project)
+ end
+
+ it 'does not increase the number of queries when the project is forked' do
+ expected_query = /#{public_project.fork_network.find_forks_in(other_user.namespace).to_sql}/
+
+ expect { get(:show, namespace_id: public_project.namespace, id: public_project) }
+ .not_to exceed_query_limit(1).for_query(expected_query)
+ end
+ end
end
describe "#update" do
@@ -405,11 +426,12 @@ describe ProjectsController do
end
end
- describe 'PUT #new_issue_address' do
+ describe 'PUT #new_issuable_address for issue' do
subject do
- put :new_issue_address,
+ put :new_issuable_address,
namespace_id: project.namespace,
- id: project
+ id: project,
+ issuable_type: 'issue'
user.reload
end
@@ -428,7 +450,35 @@ describe ProjectsController do
end
it 'changes projects new issue address' do
- expect { subject }.to change { project.new_issue_address(user) }
+ expect { subject }.to change { project.new_issuable_address(user, 'issue') }
+ end
+ end
+
+ describe 'PUT #new_issuable_address for merge request' do
+ subject do
+ put :new_issuable_address,
+ namespace_id: project.namespace,
+ id: project,
+ issuable_type: 'merge_request'
+ user.reload
+ end
+
+ before do
+ sign_in(user)
+ project.team << [user, :developer]
+ allow(Gitlab.config.incoming_email).to receive(:enabled).and_return(true)
+ end
+
+ it 'has http status 200' do
+ expect(response).to have_http_status(200)
+ end
+
+ it 'changes the user incoming email token' do
+ expect { subject }.to change { user.incoming_email_token }
+ end
+
+ it 'changes projects new merge request address' do
+ expect { subject }.to change { project.new_issuable_address(user, 'merge_request') }
end
end
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index 1d3ddfbd220..346944fd5b0 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -118,7 +118,8 @@ describe RegistrationsController do
context 'user does not require password confirmation' do
before do
- stub_application_setting(password_authentication_enabled: false)
+ stub_application_setting(password_authentication_enabled_for_web: false)
+ stub_application_setting(password_authentication_enabled_for_git: false)
end
it 'fails if username confirmation is not provided' do
diff --git a/spec/factories/appearances.rb b/spec/factories/appearances.rb
index cf2a2b76bcb..860973024c9 100644
--- a/spec/factories/appearances.rb
+++ b/spec/factories/appearances.rb
@@ -4,5 +4,6 @@ FactoryGirl.define do
factory :appearance do
title "MepMep"
description "This is my Community Edition instance"
+ new_project_guidelines "Custom project guidelines"
end
end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index 4c485461dc7..c868525cbc0 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -154,39 +154,27 @@ FactoryGirl.define do
runner factory: :ci_runner
end
- trait :artifacts do
+ trait :legacy_artifacts do
after(:create) do |build, _|
- build.artifacts_file =
- fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'),
- 'application/zip')
-
- build.artifacts_metadata =
- fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'),
- 'application/x-gzip')
-
- build.save!
+ build.update!(
+ legacy_artifacts_file: fixture_file_upload(
+ Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip'),
+ legacy_artifacts_metadata: fixture_file_upload(
+ Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), 'application/x-gzip')
+ )
end
end
- trait :remote_store do
- artifacts_file_store ArtifactUploader::REMOTE_STORE
- artifacts_metadata_store ArtifactUploader::REMOTE_STORE
+ trait :artifacts do
+ after(:create) do |build|
+ create(:ci_job_artifact, :archive, job: build)
+ create(:ci_job_artifact, :metadata, job: build)
+ build.reload
+ end
end
- trait :artifacts_expired do
- after(:create) do |build, _|
- build.artifacts_file =
- fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'),
- 'application/zip')
-
- build.artifacts_metadata =
- fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'),
- 'application/x-gzip')
-
- build.artifacts_expire_at = 1.minute.ago
-
- build.save!
- end
+ trait :expired do
+ artifacts_expire_at 1.minute.ago
end
trait :with_commit do
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
new file mode 100644
index 00000000000..538dc422832
--- /dev/null
+++ b/spec/factories/ci/job_artifacts.rb
@@ -0,0 +1,30 @@
+include ActionDispatch::TestProcess
+
+FactoryGirl.define do
+ factory :ci_job_artifact, class: Ci::JobArtifact do
+ job factory: :ci_build
+ file_type :archive
+
+ after :build do |artifact|
+ artifact.project ||= artifact.job.project
+ end
+
+ trait :archive do
+ file_type :archive
+
+ after(:build) do |artifact, _|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
+ end
+ end
+
+ trait :metadata do
+ file_type :metadata
+
+ after(:build) do |artifact, _|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), 'application/x-gzip')
+ end
+ end
+ end
+end
diff --git a/spec/factories/clusters/cluster.rb b/spec/factories/clusters/clusters.rb
index c4261178f2d..9e73a19e856 100644
--- a/spec/factories/clusters/cluster.rb
+++ b/spec/factories/clusters/clusters.rb
@@ -13,27 +13,24 @@ FactoryGirl.define do
provider_type :user
platform_type :kubernetes
- platform_kubernetes do
- create(:cluster_platform_kubernetes, :configured)
- end
+ platform_kubernetes factory: [:cluster_platform_kubernetes, :configured]
end
trait :provided_by_gcp do
provider_type :gcp
platform_type :kubernetes
- before(:create) do |cluster, evaluator|
- cluster.platform_kubernetes = build(:cluster_platform_kubernetes, :configured)
- cluster.provider_gcp = build(:cluster_provider_gcp, :created)
- end
+ provider_gcp factory: [:cluster_provider_gcp, :created]
+ platform_kubernetes factory: [:cluster_platform_kubernetes, :configured]
end
trait :providing_by_gcp do
provider_type :gcp
+ provider_gcp factory: [:cluster_provider_gcp, :creating]
+ end
- provider_gcp do
- create(:cluster_provider_gcp, :creating)
- end
+ trait :disabled do
+ enabled false
end
end
end
diff --git a/spec/factories/fork_network_members.rb b/spec/factories/fork_network_members.rb
new file mode 100644
index 00000000000..509c4e1fa1c
--- /dev/null
+++ b/spec/factories/fork_network_members.rb
@@ -0,0 +1,8 @@
+FactoryGirl.define do
+ factory :fork_network_member do
+ association :project
+ association :fork_network
+
+ forked_from_project { fork_network.root_project }
+ end
+end
diff --git a/spec/factories/notes.rb b/spec/factories/notes.rb
index f0d05504b7e..471bfb3213a 100644
--- a/spec/factories/notes.rb
+++ b/spec/factories/notes.rb
@@ -63,13 +63,19 @@ FactoryGirl.define do
factory :diff_note_on_commit, traits: [:on_commit], class: DiffNote do
association :project, :repository
+
+ transient do
+ line_number 14
+ diff_refs { project.commit(commit_id).try(:diff_refs) }
+ end
+
position do
Gitlab::Diff::Position.new(
old_path: "files/ruby/popen.rb",
new_path: "files/ruby/popen.rb",
old_line: nil,
- new_line: 14,
- diff_refs: project.commit(commit_id).try(:diff_refs)
+ new_line: line_number,
+ diff_refs: diff_refs
)
end
end
@@ -130,6 +136,7 @@ FactoryGirl.define do
before(:create) do |note, evaluator|
discussion = evaluator.in_reply_to
next unless discussion
+
discussion = discussion.to_discussion if discussion.is_a?(Note)
next unless discussion
diff --git a/spec/factories/uploads.rb b/spec/factories/uploads.rb
index 3222c41c3d8..e18f1a6bd4a 100644
--- a/spec/factories/uploads.rb
+++ b/spec/factories/uploads.rb
@@ -4,5 +4,21 @@ FactoryGirl.define do
path { "uploads/-/system/project/avatar/avatar.jpg" }
size 100.kilobytes
uploader "AvatarUploader"
+
+ trait :personal_snippet do
+ model { build(:personal_snippet) }
+ uploader "PersonalFileUploader"
+ end
+
+ trait :issuable_upload do
+ path { "#{SecureRandom.hex}/myfile.jpg" }
+ uploader "FileUploader"
+ end
+
+ trait :namespace_upload do
+ path { "#{SecureRandom.hex}/myfile.jpg" }
+ model { build(:group) }
+ uploader "NamespaceFileUploader"
+ end
end
end
diff --git a/spec/features/admin/admin_appearance_spec.rb b/spec/features/admin/admin_appearance_spec.rb
index 5f3a37c1dcc..d91dcf76191 100644
--- a/spec/features/admin/admin_appearance_spec.rb
+++ b/spec/features/admin/admin_appearance_spec.rb
@@ -9,6 +9,7 @@ feature 'Admin Appearance' do
fill_in 'appearance_title', with: 'MyCompany'
fill_in 'appearance_description', with: 'dev server'
+ fill_in 'appearance_new_project_guidelines', with: 'Custom project guidelines'
click_button 'Save'
expect(current_path).to eq admin_appearances_path
@@ -16,21 +17,39 @@ feature 'Admin Appearance' do
expect(page).to have_field('appearance_title', with: 'MyCompany')
expect(page).to have_field('appearance_description', with: 'dev server')
+ expect(page).to have_field('appearance_new_project_guidelines', with: 'Custom project guidelines')
expect(page).to have_content 'Last edit'
end
- scenario 'Preview appearance' do
+ scenario 'Preview sign-in page appearance' do
sign_in(create(:admin))
visit admin_appearances_path
- click_link "Preview"
+ click_link "Sign-in page"
- expect_page_has_custom_appearance(appearance)
+ expect_custom_sign_in_appearance(appearance)
+ end
+
+ scenario 'Preview new project page appearance' do
+ sign_in(create(:admin))
+
+ visit admin_appearances_path
+ click_link "New project page"
+
+ expect_custom_new_project_appearance(appearance)
end
scenario 'Custom sign-in page' do
visit new_user_session_path
- expect_page_has_custom_appearance(appearance)
+
+ expect_custom_sign_in_appearance(appearance)
+ end
+
+ scenario 'Custom new project page' do
+ sign_in create(:user)
+ visit new_project_path
+
+ expect_custom_new_project_appearance(appearance)
end
scenario 'Appearance logo' do
@@ -57,11 +76,15 @@ feature 'Admin Appearance' do
expect(page).not_to have_css(header_logo_selector)
end
- def expect_page_has_custom_appearance(appearance)
+ def expect_custom_sign_in_appearance(appearance)
expect(page).to have_content appearance.title
expect(page).to have_content appearance.description
end
+ def expect_custom_new_project_appearance(appearance)
+ expect(page).to have_content appearance.new_project_guidelines
+ end
+
def logo_selector
'//img[data-src^="/uploads/-/system/appearance/logo"]'
end
diff --git a/spec/features/admin/admin_health_check_spec.rb b/spec/features/admin/admin_health_check_spec.rb
index 4430fc15501..ac3392b49f9 100644
--- a/spec/features/admin/admin_health_check_spec.rb
+++ b/spec/features/admin/admin_health_check_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature "Admin Health Check", :feature, :broken_storage do
+feature "Admin Health Check", :feature do
include StubENV
before do
@@ -36,6 +36,7 @@ feature "Admin Health Check", :feature, :broken_storage do
context 'when services are up' do
before do
+ stub_storage_settings({}) # Hide the broken storage
visit admin_health_check_path
end
@@ -56,10 +57,8 @@ feature "Admin Health Check", :feature, :broken_storage do
end
end
- context 'with repository storage failures' do
+ context 'with repository storage failures', :broken_storage do
before do
- # Track a failure
- Gitlab::Git::Storage::CircuitBreaker.for_storage('broken').perform { nil } rescue nil
visit admin_health_check_path
end
@@ -67,9 +66,10 @@ feature "Admin Health Check", :feature, :broken_storage do
hostname = Gitlab::Environment.hostname
maximum_failures = Gitlab::CurrentSettings.current_application_settings
.circuitbreaker_failure_count_threshold
+ number_of_failures = maximum_failures + 1
- expect(page).to have_content('broken: failed storage access attempt on host:')
- expect(page).to have_content("#{hostname}: 1 of #{maximum_failures} failures.")
+ expect(page).to have_content("broken: #{number_of_failures} failed storage access attempts:")
+ expect(page).to have_content("#{hostname}: #{number_of_failures} of #{maximum_failures} failures.")
end
it 'allows resetting storage failures' do
diff --git a/spec/features/admin/admin_users_spec.rb b/spec/features/admin/admin_users_spec.rb
index b47f9055d29..a69b428d117 100644
--- a/spec/features/admin/admin_users_spec.rb
+++ b/spec/features/admin/admin_users_spec.rb
@@ -167,19 +167,36 @@ describe "Admin::Users" do
it 'sees impersonation log out icon' do
icon = first('.fa.fa-user-secret')
- expect(icon).not_to eql nil
+ expect(icon).not_to be nil
end
it 'logs out of impersonated user back to original user' do
find(:css, 'li.impersonation a').click
- expect(page.find(:css, '.header-user .profile-link')['data-user']).to eql(current_user.username)
+ expect(page.find(:css, '.header-user .profile-link')['data-user']).to eq(current_user.username)
end
it 'is redirected back to the impersonated users page in the admin after stopping' do
find(:css, 'li.impersonation a').click
- expect(current_path).to eql "/admin/users/#{another_user.username}"
+ expect(current_path).to eq("/admin/users/#{another_user.username}")
+ end
+ end
+
+ context 'when impersonating a user with an expired password' do
+ before do
+ another_user.update(password_expires_at: Time.now - 5.minutes)
+ click_link 'Impersonate'
+ end
+
+ it 'does not redirect to password change page' do
+ expect(current_path).to eq('/')
+ end
+
+ it 'is redirected back to the impersonated users page in the admin after stopping' do
+ find(:css, 'li.impersonation a').click
+
+ expect(current_path).to eq("/admin/users/#{another_user.username}")
end
end
end
diff --git a/spec/features/auto_deploy_spec.rb b/spec/features/auto_deploy_spec.rb
index 4a7c3e4f1ab..7a395f62511 100644
--- a/spec/features/auto_deploy_spec.rb
+++ b/spec/features/auto_deploy_spec.rb
@@ -4,52 +4,74 @@ describe 'Auto deploy' do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
- before do
- create :kubernetes_service, project: project
- project.team << [user, :master]
- sign_in user
- end
+ shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do
+ context 'when no deployment service is active' do
+ before do
+ trun_off
+ end
- context 'when no deployment service is active' do
- before do
- project.kubernetes_service.update!(active: false)
+ it 'does not show a button to set up auto deploy' do
+ visit project_path(project)
+ expect(page).to have_no_content('Set up auto deploy')
+ end
end
- it 'does not show a button to set up auto deploy' do
- visit project_path(project)
- expect(page).to have_no_content('Set up auto deploy')
+ context 'when a deployment service is active' do
+ before do
+ trun_on
+ visit project_path(project)
+ end
+
+ it 'shows a button to set up auto deploy' do
+ expect(page).to have_link('Set up auto deploy')
+ end
+
+ it 'includes OpenShift as an available template', :js do
+ click_link 'Set up auto deploy'
+ click_button 'Apply a GitLab CI Yaml template'
+
+ within '.gitlab-ci-yml-selector' do
+ expect(page).to have_content('OpenShift')
+ end
+ end
+
+ it 'creates a merge request using "auto-deploy" branch', :js do
+ click_link 'Set up auto deploy'
+ click_button 'Apply a GitLab CI Yaml template'
+ within '.gitlab-ci-yml-selector' do
+ click_on 'OpenShift'
+ end
+ wait_for_requests
+ click_button 'Commit changes'
+
+ expect(page).to have_content('New Merge Request From auto-deploy into master')
+ end
end
end
- context 'when a deployment service is active' do
+ context 'when user configured kubernetes from Integration > Kubernetes' do
before do
- project.kubernetes_service.update!(active: true)
- visit project_path(project)
+ create :kubernetes_service, project: project
+ project.team << [user, :master]
+ sign_in user
end
- it 'shows a button to set up auto deploy' do
- expect(page).to have_link('Set up auto deploy')
- end
+ let(:trun_on) { project.deployment_platform.update!(active: true) }
+ let(:trun_off) { project.deployment_platform.update!(active: false) }
- it 'includes OpenShift as an available template', :js do
- click_link 'Set up auto deploy'
- click_button 'Apply a GitLab CI Yaml template'
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
+ end
- within '.gitlab-ci-yml-selector' do
- expect(page).to have_content('OpenShift')
- end
+ context 'when user configured kubernetes from CI/CD > Clusters' do
+ before do
+ create(:cluster, :provided_by_gcp, projects: [project])
+ project.team << [user, :master]
+ sign_in user
end
- it 'creates a merge request using "auto-deploy" branch', :js do
- click_link 'Set up auto deploy'
- click_button 'Apply a GitLab CI Yaml template'
- within '.gitlab-ci-yml-selector' do
- click_on 'OpenShift'
- end
- wait_for_requests
- click_button 'Commit changes'
+ let(:trun_on) { project.deployment_platform.cluster.update!(enabled: true) }
+ let(:trun_off) { project.deployment_platform.cluster.update!(enabled: false) }
- expect(page).to have_content('New Merge Request From auto-deploy into master')
- end
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
end
end
diff --git a/spec/features/boards/sidebar_spec.rb b/spec/features/boards/sidebar_spec.rb
index 9137ab82ff4..205900615c4 100644
--- a/spec/features/boards/sidebar_spec.rb
+++ b/spec/features/boards/sidebar_spec.rb
@@ -331,11 +331,29 @@ describe 'Issue Boards', :js do
context 'subscription' do
it 'changes issue subscription' do
click_card(card)
+ wait_for_requests
- page.within('.subscription') do
+ page.within('.subscriptions') do
click_button 'Subscribe'
wait_for_requests
- expect(page).to have_content("Unsubscribe")
+
+ expect(page).to have_content('Unsubscribe')
+ end
+ end
+
+ it 'has "Unsubscribe" button when already subscribed' do
+ create(:subscription, user: user, project: project, subscribable: issue2, subscribed: true)
+ visit project_board_path(project, board)
+ wait_for_requests
+
+ click_card(card)
+ wait_for_requests
+
+ page.within('.subscriptions') do
+ click_button 'Unsubscribe'
+ wait_for_requests
+
+ expect(page).to have_content('Subscribe')
end
end
end
diff --git a/spec/features/commits_spec.rb b/spec/features/commits_spec.rb
index 479fb713297..c870910c8ea 100644
--- a/spec/features/commits_spec.rb
+++ b/spec/features/commits_spec.rb
@@ -1,8 +1,6 @@
require 'spec_helper'
describe 'Commits' do
- include CiStatusHelper
-
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
@@ -33,7 +31,7 @@ describe 'Commits' do
describe 'Commit builds' do
before do
- visit ci_status_path(pipeline)
+ visit pipeline_path(pipeline)
end
it { expect(page).to have_content pipeline.sha[0..7] }
@@ -79,7 +77,7 @@ describe 'Commits' do
describe 'Commit builds', :js do
before do
- visit ci_status_path(pipeline)
+ visit pipeline_path(pipeline)
end
it 'shows pipeline`s data' do
@@ -91,11 +89,11 @@ describe 'Commits' do
context 'Download artifacts' do
before do
- build.update_attributes(artifacts_file: artifacts_file)
+ build.update_attributes(legacy_artifacts_file: artifacts_file)
end
it do
- visit ci_status_path(pipeline)
+ visit pipeline_path(pipeline)
click_on 'Download artifacts'
expect(page.response_headers['Content-Type']).to eq(artifacts_file.content_type)
end
@@ -103,7 +101,7 @@ describe 'Commits' do
describe 'Cancel all builds' do
it 'cancels commit', :js do
- visit ci_status_path(pipeline)
+ visit pipeline_path(pipeline)
click_on 'Cancel running'
expect(page).to have_content 'canceled'
end
@@ -111,7 +109,7 @@ describe 'Commits' do
describe 'Cancel build' do
it 'cancels build', :js do
- visit ci_status_path(pipeline)
+ visit pipeline_path(pipeline)
find('.js-btn-cancel-pipeline').click
expect(page).to have_content 'canceled'
end
@@ -120,13 +118,13 @@ describe 'Commits' do
describe '.gitlab-ci.yml not found warning' do
context 'ci builds enabled' do
it "does not show warning" do
- visit ci_status_path(pipeline)
+ visit pipeline_path(pipeline)
expect(page).not_to have_content '.gitlab-ci.yml not found in this commit'
end
it 'shows warning' do
stub_ci_pipeline_yaml_file(nil)
- visit ci_status_path(pipeline)
+ visit pipeline_path(pipeline)
expect(page).to have_content '.gitlab-ci.yml not found in this commit'
end
end
@@ -135,7 +133,7 @@ describe 'Commits' do
before do
stub_ci_builds_disabled
stub_ci_pipeline_yaml_file(nil)
- visit ci_status_path(pipeline)
+ visit pipeline_path(pipeline)
end
it 'does not show warning' do
@@ -148,8 +146,8 @@ describe 'Commits' do
context "when logged as reporter" do
before do
project.team << [user, :reporter]
- build.update_attributes(artifacts_file: artifacts_file)
- visit ci_status_path(pipeline)
+ build.update_attributes(legacy_artifacts_file: artifacts_file)
+ visit pipeline_path(pipeline)
end
it 'Renders header', :js do
@@ -170,8 +168,8 @@ describe 'Commits' do
project.update(
visibility_level: Gitlab::VisibilityLevel::INTERNAL,
public_builds: false)
- build.update_attributes(artifacts_file: artifacts_file)
- visit ci_status_path(pipeline)
+ build.update_attributes(legacy_artifacts_file: artifacts_file)
+ visit pipeline_path(pipeline)
end
it do
@@ -202,5 +200,12 @@ describe 'Commits' do
expect(page).to have_content("committed #{commit.committed_date.strftime("%b %d, %Y")}")
end
end
+
+ it 'shows the ref switcher with the multi-file editor enabled', :js do
+ set_cookie('new_repo', 'true')
+ visit project_commits_path(project, branch_name)
+
+ expect(find('.js-project-refs-dropdown')).to have_content branch_name
+ end
end
end
diff --git a/spec/features/groups/members/manage_members.rb b/spec/features/groups/members/manage_members.rb
index 9039b283393..da1e17225db 100644
--- a/spec/features/groups/members/manage_members.rb
+++ b/spec/features/groups/members/manage_members.rb
@@ -44,7 +44,11 @@ feature 'Groups > Members > Manage members' do
visit group_group_members_path(group)
- find(:css, '.project-members-page li', text: user2.name).find(:css, 'a.btn-remove').click
+ accept_confirm do
+ find(:css, '.project-members-page li', text: user2.name).find(:css, 'a.btn-remove').click
+ end
+
+ wait_for_requests
expect(page).not_to have_content(user2.name)
expect(group.users).not_to include(user2)
diff --git a/spec/features/groups/milestones_sorting_spec.rb b/spec/features/groups/milestones_sorting_spec.rb
new file mode 100644
index 00000000000..a0fe40cf1d3
--- /dev/null
+++ b/spec/features/groups/milestones_sorting_spec.rb
@@ -0,0 +1,51 @@
+require 'spec_helper'
+
+feature 'Milestones sorting', :js do
+ let(:group) { create(:group) }
+ let!(:project) { create(:project_empty_repo, group: group) }
+ let!(:other_project) { create(:project_empty_repo, group: group) }
+ let!(:project_milestone1) { create(:milestone, project: project, title: 'v1.0', due_date: 10.days.from_now) }
+ let!(:other_project_milestone1) { create(:milestone, project: other_project, title: 'v1.0', due_date: 10.days.from_now) }
+ let!(:project_milestone2) { create(:milestone, project: project, title: 'v2.0', due_date: 5.days.from_now) }
+ let!(:other_project_milestone2) { create(:milestone, project: other_project, title: 'v2.0', due_date: 5.days.from_now) }
+ let!(:group_milestone) { create(:milestone, group: group, title: 'v3.0', due_date: 7.days.from_now) }
+ let(:user) { create(:group_member, :master, user: create(:user), group: group ).user }
+
+ before do
+ sign_in(user)
+ end
+
+ scenario 'visit group milestones and sort by due_date_asc' do
+ visit group_milestones_path(group)
+
+ expect(page).to have_button('Due soon')
+
+ # assert default sorting
+ within '.milestones' do
+ expect(page.all('ul.content-list > li').first.text).to include('v2.0')
+ expect(page.all('ul.content-list > li')[1].text).to include('v3.0')
+ expect(page.all('ul.content-list > li').last.text).to include('v1.0')
+ end
+
+ click_button 'Due soon'
+
+ sort_options = find('ul.dropdown-menu-sort li').all('a').collect(&:text)
+
+ expect(sort_options[0]).to eq('Due soon')
+ expect(sort_options[1]).to eq('Due later')
+ expect(sort_options[2]).to eq('Start soon')
+ expect(sort_options[3]).to eq('Start later')
+ expect(sort_options[4]).to eq('Name, ascending')
+ expect(sort_options[5]).to eq('Name, descending')
+
+ click_link 'Due later'
+
+ expect(page).to have_button('Due later')
+
+ within '.milestones' do
+ expect(page.all('ul.content-list > li').first.text).to include('v1.0')
+ expect(page.all('ul.content-list > li')[1].text).to include('v3.0')
+ expect(page.all('ul.content-list > li').last.text).to include('v2.0')
+ end
+ end
+end
diff --git a/spec/features/issuables/discussion_lock_spec.rb b/spec/features/issuables/discussion_lock_spec.rb
index 7ea29ff252b..ecbe51a7bc2 100644
--- a/spec/features/issuables/discussion_lock_spec.rb
+++ b/spec/features/issuables/discussion_lock_spec.rb
@@ -14,7 +14,7 @@ describe 'Discussion Lock', :js do
project.add_developer(user)
end
- context 'when the discussion is unlocked' do
+ context 'when the discussion is unlocked' do
it 'the user can lock the issue' do
visit project_issue_path(project, issue)
diff --git a/spec/features/issuables/shortcuts_issuable_spec.rb b/spec/features/issuables/shortcuts_issuable_spec.rb
new file mode 100644
index 00000000000..e25fd1a6249
--- /dev/null
+++ b/spec/features/issuables/shortcuts_issuable_spec.rb
@@ -0,0 +1,46 @@
+require 'spec_helper'
+
+feature 'Blob shortcuts', :js do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public, :repository) }
+ let(:issue) { create(:issue, project: project, author: user) }
+ let(:merge_request) { create(:merge_request, source_project: project) }
+ let(:note_text) { 'I got this!' }
+
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ end
+
+ describe 'pressing "r"' do
+ describe 'On an Issue' do
+ before do
+ create(:note, noteable: issue, project: project, note: note_text)
+ visit project_issue_path(project, issue)
+ wait_for_requests
+ end
+
+ it 'quotes the selected text' do
+ select_element('.note-text')
+ find('body').native.send_key('r')
+
+ expect(find('.js-main-target-form .js-vue-comment-form').value).to include(note_text)
+ end
+ end
+
+ describe 'On a Merge Request' do
+ before do
+ create(:note, noteable: merge_request, project: project, note: note_text)
+ visit project_merge_request_path(project, merge_request)
+ wait_for_requests
+ end
+
+ it 'quotes the selected text' do
+ select_element('.note-text')
+ find('body').native.send_key('r')
+
+ expect(find('.js-main-target-form #note_note').value).to include(note_text)
+ end
+ end
+ end
+end
diff --git a/spec/features/issues/create_branch_merge_request_spec.rb b/spec/features/issues/create_branch_merge_request_spec.rb
deleted file mode 100644
index edea95c6699..00000000000
--- a/spec/features/issues/create_branch_merge_request_spec.rb
+++ /dev/null
@@ -1,106 +0,0 @@
-require 'rails_helper'
-
-feature 'Create Branch/Merge Request Dropdown on issue page', :feature, :js do
- let(:user) { create(:user) }
- let!(:project) { create(:project, :repository) }
- let(:issue) { create(:issue, project: project, title: 'Cherry-Coloured Funk') }
-
- context 'for team members' do
- before do
- project.team << [user, :developer]
- sign_in(user)
- end
-
- it 'allows creating a merge request from the issue page' do
- visit project_issue_path(project, issue)
-
- perform_enqueued_jobs do
- select_dropdown_option('create-mr')
-
- expect(page).to have_content('WIP: Resolve "Cherry-Coloured Funk"')
- expect(current_path).to eq(project_merge_request_path(project, MergeRequest.first))
-
- wait_for_requests
- end
-
- visit project_issue_path(project, issue)
-
- expect(page).to have_content("created branch 1-cherry-coloured-funk")
- expect(page).to have_content("mentioned in merge request !1")
- end
-
- it 'allows creating a branch from the issue page' do
- visit project_issue_path(project, issue)
-
- select_dropdown_option('create-branch')
-
- wait_for_requests
-
- expect(page).to have_selector('.dropdown-toggle-text ', text: '1-cherry-coloured-funk')
- expect(current_path).to eq project_tree_path(project, '1-cherry-coloured-funk')
- end
-
- context "when there is a referenced merge request" do
- let!(:note) do
- create(:note, :on_issue, :system, project: project, noteable: issue,
- note: "mentioned in #{referenced_mr.to_reference}")
- end
-
- let(:referenced_mr) do
- create(:merge_request, :simple, source_project: project, target_project: project,
- description: "Fixes #{issue.to_reference}", author: user)
- end
-
- before do
- referenced_mr.cache_merge_request_closes_issues!(user)
-
- visit project_issue_path(project, issue)
- end
-
- it 'disables the create branch button' do
- expect(page).to have_css('.create-mr-dropdown-wrap .unavailable:not(.hide)')
- expect(page).to have_css('.create-mr-dropdown-wrap .available.hide', visible: false)
- expect(page).to have_content /1 Related Merge Request/
- end
- end
-
- context 'when merge requests are disabled' do
- before do
- project.project_feature.update(merge_requests_access_level: 0)
-
- visit project_issue_path(project, issue)
- end
-
- it 'shows only create branch button' do
- expect(page).not_to have_button('Create a merge request')
- expect(page).to have_button('Create a branch')
- end
- end
-
- context 'when issue is confidential' do
- it 'disables the create branch button' do
- issue = create(:issue, :confidential, project: project)
-
- visit project_issue_path(project, issue)
-
- expect(page).not_to have_css('.create-mr-dropdown-wrap')
- end
- end
- end
-
- context 'for visitors' do
- before do
- visit project_issue_path(project, issue)
- end
-
- it 'shows no buttons' do
- expect(page).not_to have_selector('.create-mr-dropdown-wrap')
- end
- end
-
- def select_dropdown_option(option)
- find('.create-mr-dropdown-wrap .dropdown-toggle').click
- find("li[data-value='#{option}']").click
- find('.js-create-merge-request').click
- end
-end
diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb
index b8a66245153..c31b636d67f 100644
--- a/spec/features/issues/gfm_autocomplete_spec.rb
+++ b/spec/features/issues/gfm_autocomplete_spec.rb
@@ -218,18 +218,124 @@ feature 'GFM autocomplete', :js do
user_item = find('.atwho-view li', text: user.username)
expect(user_item).to have_content(user.username)
end
+ end
+
+ # This context has jsut one example in each contexts in order to improve spec performance.
+ context 'labels' do
+ let!(:backend) { create(:label, project: project, title: 'backend') }
+ let!(:bug) { create(:label, project: project, title: 'bug') }
+ let!(:feature_proposal) { create(:label, project: project, title: 'feature proposal') }
+
+ context 'when no labels are assigned' do
+ it 'shows labels' do
+ note = find('#note-body')
+
+ # It should show all the labels on "~".
+ type(note, '~')
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show all the labels on "/label ~".
+ type(note, '/label ~')
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show all the labels on "/relabel ~".
+ type(note, '/relabel ~')
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show no labels on "/unlabel ~".
+ type(note, '/unlabel ~')
+ expect_labels(not_shown: [backend, bug, feature_proposal])
+ end
+ end
+
+ context 'when some labels are assigned' do
+ before do
+ issue.labels << [backend]
+ end
+
+ it 'shows labels' do
+ note = find('#note-body')
+
+ # It should show all the labels on "~".
+ type(note, '~')
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show only unset labels on "/label ~".
+ type(note, '/label ~')
+ expect_labels(shown: [bug, feature_proposal], not_shown: [backend])
+
+ # It should show all the labels on "/relabel ~".
+ type(note, '/relabel ~')
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show only set labels on "/unlabel ~".
+ type(note, '/unlabel ~')
+ expect_labels(shown: [backend], not_shown: [bug, feature_proposal])
+ end
+ end
+
+ context 'when all labels are assigned' do
+ before do
+ issue.labels << [backend, bug, feature_proposal]
+ end
+
+ it 'shows labels' do
+ note = find('#note-body')
+
+ # It should show all the labels on "~".
+ type(note, '~')
+ expect_labels(shown: [backend, bug, feature_proposal])
- def expect_to_wrap(should_wrap, item, note, value)
- expect(item).to have_content(value)
- expect(item).not_to have_content("\"#{value}\"")
+ # It should show no labels on "/label ~".
+ type(note, '/label ~')
+ expect_labels(not_shown: [backend, bug, feature_proposal])
- item.click
+ # It should show all the labels on "/relabel ~".
+ type(note, '/relabel ~')
+ expect_labels(shown: [backend, bug, feature_proposal])
- if should_wrap
- expect(note.value).to include("\"#{value}\"")
- else
- expect(note.value).not_to include("\"#{value}\"")
+ # It should show all the labels on "/unlabel ~".
+ type(note, '/unlabel ~')
+ expect_labels(shown: [backend, bug, feature_proposal])
end
end
end
+
+ private
+
+ def expect_to_wrap(should_wrap, item, note, value)
+ expect(item).to have_content(value)
+ expect(item).not_to have_content("\"#{value}\"")
+
+ item.click
+
+ if should_wrap
+ expect(note.value).to include("\"#{value}\"")
+ else
+ expect(note.value).not_to include("\"#{value}\"")
+ end
+ end
+
+ def expect_labels(shown: nil, not_shown: nil)
+ page.within('.atwho-container') do
+ if shown
+ expect(page).to have_selector('.atwho-view li', count: shown.size)
+ shown.each { |label| expect(page).to have_content(label.title) }
+ end
+
+ if not_shown
+ expect(page).not_to have_selector('.atwho-view li') unless shown
+ not_shown.each { |label| expect(page).not_to have_content(label.title) }
+ end
+ end
+ end
+
+ # `note` is a textarea where the given text should be typed.
+ # We don't want to find it each time this function gets called.
+ def type(note, text)
+ page.within('.timeline-content-form') do
+ note.set('')
+ note.native.send_keys(text)
+ end
+ end
end
diff --git a/spec/features/issues/issue_detail_spec.rb b/spec/features/issues/issue_detail_spec.rb
index 6fbee0ebcb5..4224a8fe5d4 100644
--- a/spec/features/issues/issue_detail_spec.rb
+++ b/spec/features/issues/issue_detail_spec.rb
@@ -1,9 +1,9 @@
require 'rails_helper'
feature 'Issue Detail', :js do
- let(:user) { create(:user) }
- let(:project) { create(:project, :public) }
- let(:issue) { create(:issue, project: project, author: user) }
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public) }
+ let(:issue) { create(:issue, project: project, author: user) }
context 'when user displays the issue' do
before do
@@ -27,6 +27,7 @@ feature 'Issue Detail', :js do
click_link 'Edit'
fill_in 'issuable-title', with: 'issue title'
click_button 'Save'
+ wait_for_requests
Users::DestroyService.new(user).execute(user)
diff --git a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
new file mode 100644
index 00000000000..539d7e9ff01
--- /dev/null
+++ b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
@@ -0,0 +1,248 @@
+require 'rails_helper'
+
+describe 'User creates branch and merge request on issue page', :js do
+ let(:user) { create(:user) }
+ let!(:project) { create(:project, :repository) }
+ let(:issue) { create(:issue, project: project, title: 'Cherry-Coloured Funk') }
+
+ context 'when signed out' do
+ before do
+ visit project_issue_path(project, issue)
+ end
+
+ it "doesn't show 'Create merge request' button" do
+ expect(page).not_to have_selector('.create-mr-dropdown-wrap')
+ end
+ end
+
+ context 'when signed in' do
+ before do
+ project.add_developer(user)
+
+ sign_in(user)
+ end
+
+ context 'when interacting with the dropdown' do
+ before do
+ visit project_issue_path(project, issue)
+ end
+
+ # In order to improve tests performance, all UI checks are placed in this test.
+ it 'shows elements' do
+ button_create_merge_request = find('.js-create-merge-request')
+ button_toggle_dropdown = find('.create-mr-dropdown-wrap .dropdown-toggle')
+
+ button_toggle_dropdown.click
+
+ dropdown = find('.create-merge-request-dropdown-menu')
+
+ page.within(dropdown) do
+ button_create_target = find('.js-create-target')
+ input_branch_name = find('.js-branch-name')
+ input_source = find('.js-ref')
+ li_create_branch = find("li[data-value='create-branch']")
+ li_create_merge_request = find("li[data-value='create-mr']")
+
+ # Test that all elements are presented.
+ expect(page).to have_content('Create merge request and branch')
+ expect(page).to have_content('Create branch')
+ expect(page).to have_content('Branch name')
+ expect(page).to have_content('Source (branch or tag)')
+ expect(page).to have_button('Create merge request')
+ expect(page).to have_selector('.js-branch-name:focus')
+
+ test_selection_mark(li_create_branch, li_create_merge_request, button_create_target, button_create_merge_request)
+ test_branch_name_checking(input_branch_name)
+ test_source_checking(input_source)
+
+ # The button inside dropdown should be disabled if any errors occured.
+ expect(page).to have_button('Create branch', disabled: true)
+ end
+
+ # The top level button should be disabled if any errors occured.
+ expect(page).to have_button('Create branch', disabled: true)
+ end
+
+ context 'when branch name is auto-generated' do
+ it 'creates a merge request' do
+ perform_enqueued_jobs do
+ select_dropdown_option('create-mr')
+
+ expect(page).to have_content('WIP: Resolve "Cherry-Coloured Funk"')
+ expect(current_path).to eq(project_merge_request_path(project, MergeRequest.first))
+
+ wait_for_requests
+ end
+
+ visit project_issue_path(project, issue)
+
+ expect(page).to have_content('created branch 1-cherry-coloured-funk')
+ expect(page).to have_content('mentioned in merge request !1')
+ end
+
+ it 'creates a branch' do
+ select_dropdown_option('create-branch')
+
+ wait_for_requests
+
+ expect(page).to have_selector('.dropdown-toggle-text ', text: '1-cherry-coloured-funk')
+ expect(current_path).to eq project_tree_path(project, '1-cherry-coloured-funk')
+ end
+ end
+
+ context 'when branch name is custom' do
+ let(:branch_name) { 'custom-branch-name' }
+
+ it 'creates a merge request' do
+ perform_enqueued_jobs do
+ select_dropdown_option('create-mr', branch_name)
+
+ expect(page).to have_content('WIP: Resolve "Cherry-Coloured Funk"')
+ expect(page).to have_content('Request to merge custom-branch-name into')
+ expect(current_path).to eq(project_merge_request_path(project, MergeRequest.first))
+
+ wait_for_requests
+ end
+
+ visit project_issue_path(project, issue)
+
+ expect(page).to have_content('created branch custom-branch-name')
+ expect(page).to have_content('mentioned in merge request !1')
+ end
+
+ it 'creates a branch' do
+ select_dropdown_option('create-branch', branch_name)
+
+ wait_for_requests
+
+ expect(page).to have_selector('.dropdown-toggle-text ', text: branch_name)
+ expect(current_path).to eq project_tree_path(project, branch_name)
+ end
+ end
+ end
+
+ context "when there is a referenced merge request" do
+ let!(:note) do
+ create(:note, :on_issue, :system, project: project, noteable: issue,
+ note: "mentioned in #{referenced_mr.to_reference}")
+ end
+
+ let(:referenced_mr) do
+ create(:merge_request, :simple, source_project: project, target_project: project,
+ description: "Fixes #{issue.to_reference}", author: user)
+ end
+
+ before do
+ referenced_mr.cache_merge_request_closes_issues!(user)
+
+ visit project_issue_path(project, issue)
+ end
+
+ it 'disables the create branch button' do
+ expect(page).to have_css('.create-mr-dropdown-wrap .unavailable:not(.hide)')
+ expect(page).to have_css('.create-mr-dropdown-wrap .available.hide', visible: false)
+ expect(page).to have_content /1 Related Merge Request/
+ end
+ end
+
+ context 'when merge requests are disabled' do
+ before do
+ project.project_feature.update(merge_requests_access_level: 0)
+
+ visit project_issue_path(project, issue)
+ end
+
+ it 'shows only create branch button' do
+ expect(page).not_to have_button('Create merge request')
+ expect(page).to have_button('Create branch')
+ end
+ end
+
+ context 'when issue is confidential' do
+ let(:issue) { create(:issue, :confidential, project: project) }
+
+ it 'disables the create branch button' do
+ visit project_issue_path(project, issue)
+
+ expect(page).not_to have_css('.create-mr-dropdown-wrap')
+ end
+ end
+ end
+
+ private
+
+ def select_dropdown_option(option, branch_name = nil)
+ find('.create-mr-dropdown-wrap .dropdown-toggle').click
+ find("li[data-value='#{option}']").click
+
+ if branch_name
+ find('.js-branch-name').set(branch_name)
+
+ # Javascript debounces AJAX calls.
+ # So we have to wait until AJAX requests are started.
+ # Details are in app/assets/javascripts/create_merge_request_dropdown.js
+ # this.refDebounce = _.debounce(...)
+ sleep 0.5
+
+ wait_for_requests
+ end
+
+ find('.js-create-merge-request').click
+ end
+
+ def test_branch_name_checking(input_branch_name)
+ expect(input_branch_name.value).to eq(issue.to_branch_name)
+
+ input_branch_name.set('new-branch-name')
+ branch_name_message = find('.js-branch-message')
+
+ expect(branch_name_message).to have_text('Checking branch name availability…')
+
+ wait_for_requests
+
+ expect(branch_name_message).to have_text('Branch name is available')
+
+ input_branch_name.set(project.default_branch)
+
+ expect(branch_name_message).to have_text('Checking branch name availability…')
+
+ wait_for_requests
+
+ expect(branch_name_message).to have_text('Branch is already taken')
+ end
+
+ def test_selection_mark(li_create_branch, li_create_merge_request, button_create_target, button_create_merge_request)
+ page.within(li_create_merge_request) do
+ expect(page).to have_css('i.fa.fa-check')
+ expect(button_create_target).to have_text('Create merge request')
+ expect(button_create_merge_request).to have_text('Create merge request')
+ end
+
+ li_create_branch.click
+
+ page.within(li_create_branch) do
+ expect(page).to have_css('i.fa.fa-check')
+ expect(button_create_target).to have_text('Create branch')
+ expect(button_create_merge_request).to have_text('Create branch')
+ end
+ end
+
+ def test_source_checking(input_source)
+ expect(input_source.value).to eq(project.default_branch)
+
+ input_source.set('mas') # Intentionally entered first 3 letters of `master` to check autocomplete feature later.
+ source_message = find('.js-ref-message')
+
+ expect(source_message).to have_text('Checking source availability…')
+
+ wait_for_requests
+
+ expect(source_message).to have_text('Source is not available')
+
+ # JavaScript gets refs started with `mas` (entered above) and places the first match.
+ # User sees `mas` in black color (the part he entered) and the `ter` in gray color (a hint).
+ # Since hinting is implemented via text selection and rspec/capybara doesn't have matchers for it,
+ # we just checking the whole source name.
+ expect(input_source.value).to eq(project.default_branch)
+ end
+end
diff --git a/spec/features/issues_spec.rb b/spec/features/issues_spec.rb
index b9af77f918a..852d9e368aa 100644
--- a/spec/features/issues_spec.rb
+++ b/spec/features/issues_spec.rb
@@ -365,16 +365,16 @@ describe 'Issues' do
end
it 'changes incoming email address token', :js do
- find('.issue-email-modal-btn').click
- previous_token = find('input#issue_email').value
+ find('.issuable-email-modal-btn').click
+ previous_token = find('input#issuable_email').value
find('.incoming-email-token-reset').click
wait_for_requests
- expect(page).to have_no_field('issue_email', with: previous_token)
- new_token = project1.new_issue_address(user.reload)
+ expect(page).to have_no_field('issuable_email', with: previous_token)
+ new_token = project1.new_issuable_address(user.reload, 'issue')
expect(page).to have_field(
- 'issue_email',
+ 'issuable_email',
with: new_token
)
end
@@ -630,8 +630,8 @@ describe 'Issues' do
end
it 'click the button to show modal for the new email' do
- page.within '#issue-email-modal' do
- email = project.new_issue_address(user)
+ page.within '#issuable-email-modal' do
+ email = project.new_issuable_address(user, 'issue')
expect(page).to have_selector("input[value='#{email}']")
end
diff --git a/spec/features/logout_spec.rb b/spec/features/logout_spec.rb
new file mode 100644
index 00000000000..635729efa53
--- /dev/null
+++ b/spec/features/logout_spec.rb
@@ -0,0 +1,22 @@
+require 'spec_helper'
+
+describe 'Logout/Sign out', :js do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ visit root_path
+ end
+
+ it 'sign out redirects to sign in page' do
+ gitlab_sign_out
+
+ expect(current_path).to eq new_user_session_path
+ end
+
+ it 'sign out does not show signed out flash notice' do
+ gitlab_sign_out
+
+ expect(page).not_to have_selector('.flash-notice')
+ end
+end
diff --git a/spec/features/markdown_spec.rb b/spec/features/markdown_spec.rb
index b70d3060f05..e285befc66f 100644
--- a/spec/features/markdown_spec.rb
+++ b/spec/features/markdown_spec.rb
@@ -69,6 +69,12 @@ describe 'GitLab Markdown' do
end
end
+ it 'parses mermaid code block' do
+ aggregate_failures do
+ expect(doc).to have_selector('pre.code.js-render-mermaid')
+ end
+ end
+
it 'parses strikethroughs' do
expect(doc).to have_selector(%{del:contains("and this text doesn't")})
end
@@ -201,8 +207,9 @@ describe 'GitLab Markdown' do
before do
@feat = MarkdownFeature.new
- # `markdown` helper expects a `@project` variable
+ # `markdown` helper expects a `@project` and `@group` variable
@project = @feat.project
+ @group = @feat.group
end
context 'default pipeline' do
diff --git a/spec/features/merge_requests/create_new_mr_spec.rb b/spec/features/merge_requests/create_new_mr_spec.rb
index 5402d61da54..db5ce2d11a8 100644
--- a/spec/features/merge_requests/create_new_mr_spec.rb
+++ b/spec/features/merge_requests/create_new_mr_spec.rb
@@ -67,6 +67,28 @@ feature 'Create New Merge Request', :js do
expect(page).to have_content 'git checkout -b orphaned-branch origin/orphaned-branch'
end
+ it 'allows filtering multiple dropdowns' do
+ visit project_new_merge_request_path(project)
+
+ first('.js-source-branch').click
+
+ input = find('.dropdown-source-branch .dropdown-input-field')
+ input.click
+ input.send_keys('orphaned-branch')
+
+ find('.dropdown-source-branch .dropdown-content li', match: :first)
+ source_items = all('.dropdown-source-branch .dropdown-content li')
+
+ expect(source_items.count).to eq(1)
+
+ first('.js-target-branch').click
+
+ find('.dropdown-target-branch .dropdown-content li', match: :first)
+ target_items = all('.dropdown-target-branch .dropdown-content li')
+
+ expect(target_items.count).to be > 1
+ end
+
context 'when target project cannot be viewed by the current user' do
it 'does not leak the private project name & namespace' do
private_project = create(:project, :private, :repository)
diff --git a/spec/features/merge_requests/filter_by_labels_spec.rb b/spec/features/merge_requests/filter_by_labels_spec.rb
index 9912e8165e6..7adae08e499 100644
--- a/spec/features/merge_requests/filter_by_labels_spec.rb
+++ b/spec/features/merge_requests/filter_by_labels_spec.rb
@@ -79,22 +79,6 @@ feature 'Merge Request filtering by Labels', :js do
end
end
- context 'clear button' do
- before do
- input_filtered_search('label:~bug')
- end
-
- it 'allows user to remove filtered labels' do
- first('.clear-search').click
- filtered_search.send_keys(:enter)
-
- expect(page).to have_issuable_counts(open: 3, closed: 0, all: 3)
- expect(page).to have_content "Bugfix2"
- expect(page).to have_content "Feature1"
- expect(page).to have_content "Bugfix1"
- end
- end
-
context 'filter dropdown' do
it 'filters by label name' do
init_label_search
diff --git a/spec/features/merge_requests/mini_pipeline_graph_spec.rb b/spec/features/merge_requests/mini_pipeline_graph_spec.rb
index bac56270362..93c5e945453 100644
--- a/spec/features/merge_requests/mini_pipeline_graph_spec.rb
+++ b/spec/features/merge_requests/mini_pipeline_graph_spec.rb
@@ -28,14 +28,14 @@ feature 'Mini Pipeline Graph', :js do
let(:artifacts_file2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png') }
before do
- create(:ci_build, pipeline: pipeline, artifacts_file: artifacts_file1)
+ create(:ci_build, pipeline: pipeline, legacy_artifacts_file: artifacts_file1)
create(:ci_build, pipeline: pipeline, when: 'manual')
end
it 'avoids repeated database queries' do
before = ActiveRecord::QueryRecorder.new { visit_merge_request(:json) }
- create(:ci_build, pipeline: pipeline, artifacts_file: artifacts_file2)
+ create(:ci_build, pipeline: pipeline, legacy_artifacts_file: artifacts_file2)
create(:ci_build, pipeline: pipeline, when: 'manual')
after = ActiveRecord::QueryRecorder.new { visit_merge_request(:json) }
diff --git a/spec/features/merge_requests/pipelines_spec.rb b/spec/features/merge_requests/pipelines_spec.rb
index a3fcc27cab0..307c860eac4 100644
--- a/spec/features/merge_requests/pipelines_spec.rb
+++ b/spec/features/merge_requests/pipelines_spec.rb
@@ -20,10 +20,14 @@ feature 'Pipelines for Merge Requests', :js do
end
before do
- visit project_merge_request_path(project, merge_request)
+ merge_request.update_attribute(:head_pipeline_id, pipeline.id)
end
scenario 'user visits merge request pipelines tab' do
+ visit project_merge_request_path(project, merge_request)
+
+ expect(page.find('.ci-widget')).to have_content('pending')
+
page.within('.merge-request-tabs') do
click_link('Pipelines')
end
@@ -31,6 +35,15 @@ feature 'Pipelines for Merge Requests', :js do
expect(page).to have_selector('.stage-cell')
end
+
+ scenario 'pipeline sha does not equal last commit sha' do
+ pipeline.update_attribute(:sha, '19e2e9b4ef76b422ce1154af39a91323ccc57434')
+ visit project_merge_request_path(project, merge_request)
+ wait_for_requests
+
+ expect(page.find('.ci-widget')).to have_content(
+ 'Could not connect to the CI server. Please check your settings and try again')
+ end
end
context 'without pipelines' do
diff --git a/spec/features/merge_requests/versions_spec.rb b/spec/features/merge_requests/versions_spec.rb
index 29f95039af8..482f2e51c8b 100644
--- a/spec/features/merge_requests/versions_spec.rb
+++ b/spec/features/merge_requests/versions_spec.rb
@@ -6,18 +6,47 @@ feature 'Merge Request versions', :js do
let!(:merge_request_diff1) { merge_request.merge_request_diffs.create(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') }
let!(:merge_request_diff2) { merge_request.merge_request_diffs.create(head_commit_sha: nil) }
let!(:merge_request_diff3) { merge_request.merge_request_diffs.create(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e') }
+ let!(:params) { Hash.new }
before do
sign_in(create(:admin))
- visit diffs_project_merge_request_path(project, merge_request)
+ visit diffs_project_merge_request_path(project, merge_request, params)
end
- it 'show the latest version of the diff' do
- page.within '.mr-version-dropdown' do
- expect(page).to have_content 'latest version'
+ shared_examples 'allows commenting' do |file_id:, line_code:, comment:|
+ it do
+ diff_file_selector = ".diff-file[id='#{file_id}']"
+ line_code = "#{file_id}_#{line_code}"
+
+ page.within(diff_file_selector) do
+ find(".line_holder[id='#{line_code}'] td:nth-of-type(1)").hover
+ find(".line_holder[id='#{line_code}'] button").click
+
+ page.within("form[data-line-code='#{line_code}']") do
+ fill_in "note[note]", with: comment
+ find(".js-comment-button").click
+ end
+
+ wait_for_requests
+
+ expect(page).to have_content(comment)
+ end
end
+ end
- expect(page).to have_content '8 changed files'
+ describe 'compare with the latest version' do
+ it 'show the latest version of the diff' do
+ page.within '.mr-version-dropdown' do
+ expect(page).to have_content 'latest version'
+ end
+
+ expect(page).to have_content '8 changed files'
+ end
+
+ it_behaves_like 'allows commenting',
+ file_id: '7445606fbf8f3683cd42bdc54b05d7a0bc2dfc44',
+ line_code: '1_1',
+ comment: 'Typo, please fix.'
end
describe 'switch between versions' do
@@ -62,24 +91,10 @@ feature 'Merge Request versions', :js do
expect(page).to have_css(".diffs .notes[data-discussion-id='#{outdated_diff_note.discussion_id}']")
end
- it 'allows commenting' do
- diff_file_selector = ".diff-file[id='7445606fbf8f3683cd42bdc54b05d7a0bc2dfc44']"
- line_code = '7445606fbf8f3683cd42bdc54b05d7a0bc2dfc44_2_2'
-
- page.within(diff_file_selector) do
- find(".line_holder[id='#{line_code}'] td:nth-of-type(1)").hover
- find(".line_holder[id='#{line_code}'] button").click
-
- page.within("form[data-line-code='#{line_code}']") do
- fill_in "note[note]", with: "Typo, please fix"
- find(".js-comment-button").click
- end
-
- wait_for_requests
-
- expect(page).to have_content("Typo, please fix")
- end
- end
+ it_behaves_like 'allows commenting',
+ file_id: '7445606fbf8f3683cd42bdc54b05d7a0bc2dfc44',
+ line_code: '2_2',
+ comment: 'Typo, please fix.'
end
describe 'compare with older version' do
@@ -132,25 +147,6 @@ feature 'Merge Request versions', :js do
expect(page).to have_css(".diffs .notes[data-discussion-id='#{outdated_diff_note.discussion_id}']")
end
- it 'allows commenting' do
- diff_file_selector = ".diff-file[id='7445606fbf8f3683cd42bdc54b05d7a0bc2dfc44']"
- line_code = '7445606fbf8f3683cd42bdc54b05d7a0bc2dfc44_4_4'
-
- page.within(diff_file_selector) do
- find(".line_holder[id='#{line_code}'] td:nth-of-type(1)").hover
- find(".line_holder[id='#{line_code}'] button").click
-
- page.within("form[data-line-code='#{line_code}']") do
- fill_in "note[note]", with: "Typo, please fix"
- find(".js-comment-button").click
- end
-
- wait_for_requests
-
- expect(page).to have_content("Typo, please fix")
- end
- end
-
it 'show diff between new and old version' do
expect(page).to have_content '4 changed files with 15 additions and 6 deletions'
end
@@ -162,6 +158,11 @@ feature 'Merge Request versions', :js do
end
expect(page).to have_content '8 changed files'
end
+
+ it_behaves_like 'allows commenting',
+ file_id: '7445606fbf8f3683cd42bdc54b05d7a0bc2dfc44',
+ line_code: '4_4',
+ comment: 'Typo, please fix.'
end
describe 'compare with same version' do
@@ -210,4 +211,24 @@ feature 'Merge Request versions', :js do
expect(page).to have_content '0 changed files'
end
end
+
+ describe 'scoped in a commit' do
+ let(:params) { { commit_id: '570e7b2abdd848b95f2f578043fc23bd6f6fd24d' } }
+
+ before do
+ wait_for_requests
+ end
+
+ it 'should only show diffs from the commit' do
+ diff_commit_ids = find_all('.diff-file [data-commit-id]').map {|diff| diff['data-commit-id']}
+
+ expect(diff_commit_ids).not_to be_empty
+ expect(diff_commit_ids).to all(eq(params[:commit_id]))
+ end
+
+ it_behaves_like 'allows commenting',
+ file_id: '2f6fcd96b88b36ce98c38da085c795a27d92a3dd',
+ line_code: '6_6',
+ comment: 'Typo, please fix.'
+ end
end
diff --git a/spec/features/merge_requests/widget_spec.rb b/spec/features/merge_requests/widget_spec.rb
index 2bad3b02250..3ee094c216e 100644
--- a/spec/features/merge_requests/widget_spec.rb
+++ b/spec/features/merge_requests/widget_spec.rb
@@ -63,6 +63,18 @@ describe 'Merge request', :js do
expect(page).to have_selector('.accept-merge-request')
expect(find('.accept-merge-request')['disabled']).not_to be(true)
end
+
+ it 'allows me to merge, see cherry-pick modal and load branches list' do
+ wait_for_requests
+ click_button 'Merge'
+
+ wait_for_requests
+ click_link 'Cherry-pick'
+ page.find('.js-project-refs-dropdown').click
+ wait_for_requests
+
+ expect(page.all('.js-cherry-pick-form .dropdown-content li').size).to be > 1
+ end
end
context 'view merge request with external CI service' do
diff --git a/spec/features/milestone_spec.rb b/spec/features/milestone_spec.rb
index 6c9dc67ad74..27efc32c95b 100644
--- a/spec/features/milestone_spec.rb
+++ b/spec/features/milestone_spec.rb
@@ -65,4 +65,33 @@ feature 'Milestone' do
expect(find('.alert-danger')).to have_content('already being used for another group or project milestone.')
end
end
+
+ feature 'Open a milestone' do
+ scenario 'shows total issue time spent correctly when no time has been logged' do
+ milestone = create(:milestone, project: project, title: 8.7)
+
+ visit project_milestone_path(project, milestone)
+
+ page.within('.block.time_spent') do
+ expect(page).to have_content 'No time spent'
+ expect(page).to have_content 'None'
+ end
+ end
+
+ scenario 'shows total issue time spent' do
+ milestone = create(:milestone, project: project, title: 8.7)
+ issue1 = create(:issue, project: project, milestone: milestone)
+ issue2 = create(:issue, project: project, milestone: milestone)
+ issue1.spend_time(duration: 3600, user: user)
+ issue1.save!
+ issue2.spend_time(duration: 7200, user: user)
+ issue2.save!
+
+ visit project_milestone_path(project, milestone)
+
+ page.within('.block.time_spent') do
+ expect(page).to have_content '3h'
+ end
+ end
+ end
end
diff --git a/spec/features/profiles/password_spec.rb b/spec/features/profiles/password_spec.rb
index fb4355074df..4665626f114 100644
--- a/spec/features/profiles/password_spec.rb
+++ b/spec/features/profiles/password_spec.rb
@@ -53,12 +53,13 @@ describe 'Profile > Password' do
context 'Regular user' do
let(:user) { create(:user) }
- it 'renders 200 when sign-in is disabled' do
- stub_application_setting(password_authentication_enabled: false)
+ it 'renders 404 when password authentication is disabled for the web interface and Git' do
+ stub_application_setting(password_authentication_enabled_for_web: false)
+ stub_application_setting(password_authentication_enabled_for_git: false)
visit edit_profile_password_path
- expect(page).to have_gitlab_http_status(200)
+ expect(page).to have_gitlab_http_status(404)
end
end
diff --git a/spec/features/projects/blobs/blob_show_spec.rb b/spec/features/projects/blobs/blob_show_spec.rb
index 3d465e709b9..88813d9b5ff 100644
--- a/spec/features/projects/blobs/blob_show_spec.rb
+++ b/spec/features/projects/blobs/blob_show_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
feature 'File blob', :js do
+ include MobileHelpers
+
let(:project) { create(:project, :public, :repository) }
def visit_blob(path, anchor: nil, ref: 'master')
@@ -30,6 +32,16 @@ feature 'File blob', :js do
expect(page).to have_link('Open raw')
end
end
+
+ it 'displays file actions on all screen sizes' do
+ file_actions_selector = '.file-actions'
+
+ resize_screen_sm
+ expect(page).to have_selector(file_actions_selector, visible: true)
+
+ resize_screen_xs
+ expect(page).to have_selector(file_actions_selector, visible: true)
+ end
end
context 'Markdown file' do
diff --git a/spec/features/projects/clusters/applications_spec.rb b/spec/features/projects/clusters/applications_spec.rb
new file mode 100644
index 00000000000..b34cd061ec6
--- /dev/null
+++ b/spec/features/projects/clusters/applications_spec.rb
@@ -0,0 +1,107 @@
+require 'spec_helper'
+
+feature 'Clusters Applications', :js do
+ include GoogleApi::CloudPlatformHelpers
+
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ sign_in(user)
+ end
+
+ describe 'Installing applications' do
+ before do
+ visit project_cluster_path(project, cluster)
+ end
+
+ context 'when cluster is being created' do
+ let(:cluster) { create(:cluster, :providing_by_gcp, projects: [project])}
+
+ scenario 'user is unable to install applications' do
+ page.within('.js-cluster-application-row-helm') do
+ expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
+ expect(page.find(:css, '.js-cluster-application-install-button').text).to eq('Install')
+ end
+ end
+ end
+
+ context 'when cluster is created' do
+ let(:cluster) { create(:cluster, :provided_by_gcp, projects: [project])}
+
+ scenario 'user can install applications' do
+ page.within('.js-cluster-application-row-helm') do
+ expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to be_nil
+ expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Install')
+ end
+ end
+
+ context 'when user installs Helm' do
+ before do
+ allow(ClusterInstallAppWorker).to receive(:perform_async).and_return(nil)
+
+ page.within('.js-cluster-application-row-helm') do
+ page.find(:css, '.js-cluster-application-install-button').click
+ end
+ end
+
+ it 'he sees status transition' do
+ page.within('.js-cluster-application-row-helm') do
+ # FE sends request and gets the response, then the buttons is "Install"
+ expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
+ expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Install')
+
+ Clusters::Cluster.last.application_helm.make_installing!
+
+ # FE starts polling and update the buttons to "Installing"
+ expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
+ expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Installing')
+
+ Clusters::Cluster.last.application_helm.make_installed!
+
+ expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
+ expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Installed')
+ end
+
+ expect(page).to have_content('Helm Tiller was successfully installed on your cluster')
+ end
+ end
+
+ context 'when user installs Ingress' do
+ context 'when user installs application: Ingress' do
+ before do
+ allow(ClusterInstallAppWorker).to receive(:perform_async).and_return(nil)
+
+ create(:cluster_applications_helm, :installed, cluster: cluster)
+
+ page.within('.js-cluster-application-row-ingress') do
+ page.find(:css, '.js-cluster-application-install-button').click
+ end
+ end
+
+ it 'he sees status transition' do
+ page.within('.js-cluster-application-row-ingress') do
+ # FE sends request and gets the response, then the buttons is "Install"
+ expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
+ expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Install')
+
+ Clusters::Cluster.last.application_ingress.make_installing!
+
+ # FE starts polling and update the buttons to "Installing"
+ expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
+ expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Installing')
+
+ Clusters::Cluster.last.application_ingress.make_installed!
+
+ expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
+ expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Installed')
+ end
+
+ expect(page).to have_content('Ingress was successfully installed on your cluster')
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
new file mode 100644
index 00000000000..67b8901f8fb
--- /dev/null
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -0,0 +1,138 @@
+require 'spec_helper'
+
+feature 'Gcp Cluster', :js do
+ include GoogleApi::CloudPlatformHelpers
+
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ gitlab_sign_in(user)
+ allow(Projects::ClustersController).to receive(:STATUS_POLLING_INTERVAL) { 100 }
+ end
+
+ context 'when user has signed with Google' do
+ before do
+ allow_any_instance_of(Projects::Clusters::GcpController)
+ .to receive(:token_in_session).and_return('token')
+ allow_any_instance_of(Projects::Clusters::GcpController)
+ .to receive(:expires_at_in_session).and_return(1.hour.since.to_i.to_s)
+ end
+
+ context 'when user does not have a cluster and visits cluster index page' do
+ before do
+ visit project_clusters_path(project)
+
+ click_link 'Add cluster'
+ click_link 'Create on GKE'
+ end
+
+ context 'when user filled form with valid parameters' do
+ before do
+ allow_any_instance_of(GoogleApi::CloudPlatform::Client)
+ .to receive(:projects_zones_clusters_create) do
+ OpenStruct.new(
+ self_link: 'projects/gcp-project-12345/zones/us-central1-a/operations/ope-123',
+ status: 'RUNNING'
+ )
+ end
+
+ allow(WaitForClusterCreationWorker).to receive(:perform_in).and_return(nil)
+
+ fill_in 'cluster_provider_gcp_attributes_gcp_project_id', with: 'gcp-project-123'
+ fill_in 'cluster_name', with: 'dev-cluster'
+ click_button 'Create cluster'
+ end
+
+ it 'user sees a cluster details page and creation status' do
+ expect(page).to have_content('Cluster is being created on Google Kubernetes Engine...')
+
+ Clusters::Cluster.last.provider.make_created!
+
+ expect(page).to have_content('Cluster was successfully created on Google Kubernetes Engine')
+ end
+
+ it 'user sees a error if something worng during creation' do
+ expect(page).to have_content('Cluster is being created on Google Kubernetes Engine...')
+
+ Clusters::Cluster.last.provider.make_errored!('Something wrong!')
+
+ expect(page).to have_content('Something wrong!')
+ end
+ end
+
+ context 'when user filled form with invalid parameters' do
+ before do
+ click_button 'Create cluster'
+ end
+
+ it 'user sees a validation error' do
+ expect(page).to have_css('#error_explanation')
+ end
+ end
+ end
+
+ context 'when user does have a cluster and visits cluster page' do
+ let(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
+
+ before do
+ visit project_cluster_path(project, cluster)
+ end
+
+ it 'user sees a cluster details page' do
+ expect(page).to have_button('Save')
+ expect(page.find(:css, '.cluster-name').value).to eq(cluster.name)
+ end
+
+ context 'when user disables the cluster' do
+ before do
+ page.find(:css, '.js-toggle-cluster').click
+ click_button 'Save'
+ end
+
+ it 'user sees the successful message' do
+ expect(page).to have_content('Cluster was successfully updated.')
+ end
+ end
+
+ context 'when user changes cluster parameters' do
+ before do
+ fill_in 'cluster_platform_kubernetes_attributes_namespace', with: 'my-namespace'
+ click_button 'Save changes'
+ end
+
+ it 'user sees the successful message' do
+ expect(page).to have_content('Cluster was successfully updated.')
+ expect(cluster.reload.platform_kubernetes.namespace).to eq('my-namespace')
+ end
+ end
+
+ context 'when user destroy the cluster' do
+ before do
+ page.accept_confirm do
+ click_link 'Remove integration'
+ end
+ end
+
+ it 'user sees creation form with the successful message' do
+ expect(page).to have_content('Cluster integration was successfully removed.')
+ expect(page).to have_link('Add cluster')
+ end
+ end
+ end
+ end
+
+ context 'when user has not signed with Google' do
+ before do
+ visit project_clusters_path(project)
+
+ click_link 'Add cluster'
+ click_link 'Create on GKE'
+ end
+
+ it 'user sees a login page' do
+ expect(page).to have_css('.signin-with-google')
+ end
+ end
+end
diff --git a/spec/features/projects/clusters/interchangeability_spec.rb b/spec/features/projects/clusters/interchangeability_spec.rb
new file mode 100644
index 00000000000..01f9526608f
--- /dev/null
+++ b/spec/features/projects/clusters/interchangeability_spec.rb
@@ -0,0 +1,16 @@
+require 'spec_helper'
+
+feature 'Interchangeability between KubernetesService and Platform::Kubernetes' do
+ EXCEPT_METHODS = %i[test title description help fields initialize_properties namespace namespace= api_url api_url=].freeze
+ EXCEPT_METHODS_GREP_V = %w[_touched? _changed? _was].freeze
+
+ it 'Clusters::Platform::Kubernetes covers core interfaces in KubernetesService' do
+ expected_interfaces = KubernetesService.instance_methods(false)
+ expected_interfaces = expected_interfaces - EXCEPT_METHODS
+ EXCEPT_METHODS_GREP_V.each do |g|
+ expected_interfaces = expected_interfaces.grep_v(/#{Regexp.escape(g)}\z/)
+ end
+
+ expect(expected_interfaces - Clusters::Platforms::Kubernetes.instance_methods).to be_empty
+ end
+end
diff --git a/spec/features/projects/clusters/user_spec.rb b/spec/features/projects/clusters/user_spec.rb
new file mode 100644
index 00000000000..414f4acba86
--- /dev/null
+++ b/spec/features/projects/clusters/user_spec.rb
@@ -0,0 +1,102 @@
+require 'spec_helper'
+
+feature 'User Cluster', :js do
+ include GoogleApi::CloudPlatformHelpers
+
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ gitlab_sign_in(user)
+ allow(Projects::ClustersController).to receive(:STATUS_POLLING_INTERVAL) { 100 }
+ end
+
+ context 'when user does not have a cluster and visits cluster index page' do
+ before do
+ visit project_clusters_path(project)
+
+ click_link 'Add cluster'
+ click_link 'Add an existing cluster'
+ end
+
+ context 'when user filled form with valid parameters' do
+ before do
+ fill_in 'cluster_name', with: 'dev-cluster'
+ fill_in 'cluster_platform_kubernetes_attributes_api_url', with: 'http://example.com'
+ fill_in 'cluster_platform_kubernetes_attributes_token', with: 'my-token'
+ click_button 'Add cluster'
+ end
+
+ it 'user sees a cluster details page' do
+ expect(page).to have_content('Enable cluster integration')
+ expect(page.find_field('cluster[name]').value).to eq('dev-cluster')
+ expect(page.find_field('cluster[platform_kubernetes_attributes][api_url]').value)
+ .to have_content('http://example.com')
+ expect(page.find_field('cluster[platform_kubernetes_attributes][token]').value)
+ .to have_content('my-token')
+ end
+ end
+
+ context 'when user filled form with invalid parameters' do
+ before do
+ click_button 'Add cluster'
+ end
+
+ it 'user sees a validation error' do
+ expect(page).to have_css('#error_explanation')
+ end
+ end
+ end
+
+ context 'when user does have a cluster and visits cluster page' do
+ let(:cluster) { create(:cluster, :provided_by_user, projects: [project]) }
+
+ before do
+ visit project_cluster_path(project, cluster)
+ end
+
+ it 'user sees a cluster details page' do
+ expect(page).to have_button('Save')
+ end
+
+ context 'when user disables the cluster' do
+ before do
+ page.find(:css, '.js-toggle-cluster').click
+ fill_in 'cluster_name', with: 'dev-cluster'
+ click_button 'Save'
+ end
+
+ it 'user sees the successful message' do
+ expect(page).to have_content('Cluster was successfully updated.')
+ end
+ end
+
+ context 'when user changes cluster parameters' do
+ before do
+ fill_in 'cluster_name', with: 'my-dev-cluster'
+ fill_in 'cluster_platform_kubernetes_attributes_namespace', with: 'my-namespace'
+ click_button 'Save changes'
+ end
+
+ it 'user sees the successful message' do
+ expect(page).to have_content('Cluster was successfully updated.')
+ expect(cluster.reload.name).to eq('my-dev-cluster')
+ expect(cluster.reload.platform_kubernetes.namespace).to eq('my-namespace')
+ end
+ end
+
+ context 'when user destroy the cluster' do
+ before do
+ page.accept_confirm do
+ click_link 'Remove integration'
+ end
+ end
+
+ it 'user sees creation form with the successful message' do
+ expect(page).to have_content('Cluster integration was successfully removed.')
+ expect(page).to have_link('Add cluster')
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/clusters_spec.rb b/spec/features/projects/clusters_spec.rb
index 197e6df4997..008bdf2044b 100644
--- a/spec/features/projects/clusters_spec.rb
+++ b/spec/features/projects/clusters_spec.rb
@@ -3,204 +3,89 @@ require 'spec_helper'
feature 'Clusters', :js do
include GoogleApi::CloudPlatformHelpers
- let!(:project) { create(:project, :repository) }
- let!(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
before do
project.add_master(user)
gitlab_sign_in(user)
end
- context 'when user has signed in Google' do
+ context 'when user does not have a cluster and visits cluster index page' do
before do
- allow_any_instance_of(Projects::ClustersController)
- .to receive(:token_in_session).and_return('token')
- allow_any_instance_of(Projects::ClustersController)
- .to receive(:expires_at_in_session).and_return(1.hour.since.to_i.to_s)
+ visit project_clusters_path(project)
end
- context 'when user does not have a cluster and visits cluster index page' do
- before do
- visit project_clusters_path(project)
-
- click_link 'Create on GKE'
- end
+ it 'sees empty state' do
+ expect(page).to have_link('Add cluster')
+ expect(page).to have_selector('.empty-state')
+ end
+ end
- it 'user sees a new page' do
- expect(page).to have_button('Create cluster')
- end
+ context 'when user has a cluster and visits cluster index page' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:project) { cluster.project }
- context 'when user filled form with valid parameters' do
- before do
- double.tap do |dbl|
- allow(dbl).to receive(:status).and_return('RUNNING')
- allow(dbl).to receive(:self_link)
- .and_return('projects/gcp-project-12345/zones/us-central1-a/operations/ope-123')
- allow_any_instance_of(GoogleApi::CloudPlatform::Client)
- .to receive(:projects_zones_clusters_create).and_return(dbl)
- end
-
- allow(WaitForClusterCreationWorker).to receive(:perform_in).and_return(nil)
-
- fill_in 'cluster_provider_gcp_attributes_gcp_project_id', with: 'gcp-project-123'
- fill_in 'cluster_name', with: 'dev-cluster'
- click_button 'Create cluster'
- end
+ before do
+ visit project_clusters_path(project)
+ end
- it 'user sees a cluster details page and creation status' do
- expect(page).to have_content('Cluster is being created on Google Container Engine...')
+ it 'user sees a table with one cluster' do
+ # One is the header row, the other the cluster row
+ expect(page).to have_selector('.gl-responsive-table-row', count: 2)
+ end
- # Application Installation buttons
- page.within('.js-cluster-application-row-helm') do
- expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
- expect(page.find(:css, '.js-cluster-application-install-button').text).to eq('Install')
- end
+ it 'user sees navigation tabs' do
+ expect(page.find('.js-active-tab').text).to include('Active')
+ expect(page.find('.js-active-tab .badge').text).to include('1')
- Clusters::Cluster.last.provider.make_created!
+ expect(page.find('.js-inactive-tab').text).to include('Inactive')
+ expect(page.find('.js-inactive-tab .badge').text).to include('0')
- expect(page).to have_content('Cluster was successfully created on Google Container Engine')
- end
+ expect(page.find('.js-all-tab').text).to include('All')
+ expect(page.find('.js-all-tab .badge').text).to include('1')
+ end
- it 'user sees a error if something worng during creation' do
- expect(page).to have_content('Cluster is being created on Google Container Engine...')
+ context 'inline update of cluster' do
+ it 'user can update cluster' do
+ expect(page).to have_selector('.js-toggle-cluster-list')
+ end
- Clusters::Cluster.last.provider.make_errored!('Something wrong!')
+ context 'with sucessfull request' do
+ it 'user sees updated cluster' do
+ expect do
+ page.find('.js-toggle-cluster-list').click
+ wait_for_requests
+ end.to change { cluster.reload.enabled }
- expect(page).to have_content('Something wrong!')
+ expect(page).not_to have_selector('.is-checked')
+ expect(cluster.reload).not_to be_enabled
end
end
- context 'when user filled form with invalid parameters' do
- before do
- click_button 'Create cluster'
- end
+ context 'with failed request' do
+ it 'user sees not update cluster and error message' do
+ expect_any_instance_of(Clusters::UpdateService).to receive(:execute).and_call_original
+ allow_any_instance_of(Clusters::Cluster).to receive(:valid?) { false }
- it 'user sees a validation error' do
- expect(page).to have_css('#error_explanation')
+ page.find('.js-toggle-cluster-list').click
+
+ expect(page).to have_content('Something went wrong on our end.')
+ expect(page).to have_selector('.is-checked')
+ expect(cluster.reload).to be_enabled
end
end
end
- context 'when user has a cluster and visits cluster index page' do
- let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:project) { cluster.project }
-
+ context 'when user clicks on a cluster' do
before do
- visit project_clusters_path(project)
+ click_link cluster.name
end
- it 'user sees an cluster details page' do
+ it 'user sees a cluster details page' do
expect(page).to have_button('Save')
expect(page.find(:css, '.cluster-name').value).to eq(cluster.name)
-
- # Application Installation buttons
- page.within('.js-cluster-application-row-helm') do
- expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to be_nil
- expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Install')
- end
- end
-
- context 'when user installs application: Helm Tiller' do
- before do
- allow(ClusterInstallAppWorker).to receive(:perform_async).and_return(nil)
-
- page.within('.js-cluster-application-row-helm') do
- page.find(:css, '.js-cluster-application-install-button').click
- end
- end
-
- it 'user sees status transition' do
- page.within('.js-cluster-application-row-helm') do
- # FE sends request and gets the response, then the buttons is "Install"
- expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
- expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Install')
-
- Clusters::Cluster.last.application_helm.make_installing!
-
- # FE starts polling and update the buttons to "Installing"
- expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
- expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Installing')
-
- Clusters::Cluster.last.application_helm.make_installed!
-
- expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
- expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Installed')
- end
-
- expect(page).to have_content('Helm Tiller was successfully installed on your cluster')
- end
end
-
- context 'when user installs application: Ingress' do
- before do
- allow(ClusterInstallAppWorker).to receive(:perform_async).and_return(nil)
- # Helm Tiller needs to be installed before you can install Ingress
- create(:cluster_applications_helm, :installed, cluster: cluster)
-
- visit project_clusters_path(project)
-
- page.within('.js-cluster-application-row-ingress') do
- page.find(:css, '.js-cluster-application-install-button').click
- end
- end
-
- it 'user sees status transition' do
- page.within('.js-cluster-application-row-ingress') do
- # FE sends request and gets the response, then the buttons is "Install"
- expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
- expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Install')
-
- Clusters::Cluster.last.application_ingress.make_installing!
-
- # FE starts polling and update the buttons to "Installing"
- expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
- expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Installing')
-
- Clusters::Cluster.last.application_ingress.make_installed!
-
- expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
- expect(page.find(:css, '.js-cluster-application-install-button')).to have_content('Installed')
- end
-
- expect(page).to have_content('Ingress was successfully installed on your cluster')
- end
- end
-
- context 'when user disables the cluster' do
- before do
- page.find(:css, '.js-toggle-cluster').click
- click_button 'Save'
- end
-
- it 'user sees the succeccful message' do
- expect(page).to have_content('Cluster was successfully updated.')
- end
- end
-
- context 'when user destory the cluster' do
- before do
- page.accept_confirm do
- click_link 'Remove integration'
- end
- end
-
- it 'user sees creation form with the succeccful message' do
- expect(page).to have_content('Cluster integration was successfully removed.')
- expect(page).to have_link('Create on GKE')
- end
- end
- end
- end
-
- context 'when user has not signed in Google' do
- before do
- visit project_clusters_path(project)
-
- click_link 'Create on GKE'
- end
-
- it 'user sees a login page' do
- expect(page).to have_css('.signin-with-google')
end
end
end
diff --git a/spec/features/projects/environments/environment_spec.rb b/spec/features/projects/environments/environment_spec.rb
index 5fc3ba54f65..dfcf97ad495 100644
--- a/spec/features/projects/environments/environment_spec.rb
+++ b/spec/features/projects/environments/environment_spec.rb
@@ -101,35 +101,48 @@ feature 'Environment' do
end
context 'with terminal' do
- let(:project) { create(:kubernetes_project, :test_repo) }
+ shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do
+ context 'for project master' do
+ let(:role) { :master }
- context 'for project master' do
- let(:role) { :master }
+ scenario 'it shows the terminal button' do
+ expect(page).to have_terminal_button
+ end
- scenario 'it shows the terminal button' do
- expect(page).to have_terminal_button
+ context 'web terminal', :js do
+ before do
+ # Stub #terminals as it causes js-enabled feature specs to render the page incorrectly
+ allow_any_instance_of(Environment).to receive(:terminals) { nil }
+ visit terminal_project_environment_path(project, environment)
+ end
+
+ it 'displays a web terminal' do
+ expect(page).to have_selector('#terminal')
+ expect(page).to have_link(nil, href: environment.external_url)
+ end
+ end
end
- context 'web terminal', :js do
- before do
- # Stub #terminals as it causes js-enabled feature specs to render the page incorrectly
- allow_any_instance_of(Environment).to receive(:terminals) { nil }
- visit terminal_project_environment_path(project, environment)
- end
+ context 'for developer' do
+ let(:role) { :developer }
- it 'displays a web terminal' do
- expect(page).to have_selector('#terminal')
- expect(page).to have_link(nil, href: environment.external_url)
+ scenario 'does not show terminal button' do
+ expect(page).not_to have_terminal_button
end
end
end
- context 'for developer' do
- let(:role) { :developer }
+ context 'when user configured kubernetes from Integration > Kubernetes' do
+ let(:project) { create(:kubernetes_project, :test_repo) }
- scenario 'does not show terminal button' do
- expect(page).not_to have_terminal_button
- end
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
+ end
+
+ context 'when user configured kubernetes from CI/CD > Clusters' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:project) { cluster.project }
+
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
end
end
diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb
index b4eb5795470..4a05313c14a 100644
--- a/spec/features/projects/environments/environments_spec.rb
+++ b/spec/features/projects/environments/environments_spec.rb
@@ -14,8 +14,10 @@ feature 'Environments page', :js do
it 'shows "Available" and "Stopped" tab with links' do
visit_environments(project)
- expect(page).to have_link('Available')
- expect(page).to have_link('Stopped')
+ expect(page).to have_selector('.js-environments-tab-available')
+ expect(page).to have_content('Available')
+ expect(page).to have_selector('.js-environments-tab-stopped')
+ expect(page).to have_content('Stopped')
end
describe 'with one available environment' do
@@ -75,8 +77,8 @@ feature 'Environments page', :js do
it 'does not show environments and counters are set to zero' do
expect(page).to have_content('You don\'t have any environments right now.')
- expect(page.find('.js-available-environments-count').text).to eq('0')
- expect(page.find('.js-stopped-environments-count').text).to eq('0')
+ expect(page.find('.js-environments-tab-available .badge').text).to eq('0')
+ expect(page.find('.js-environments-tab-stopped .badge').text).to eq('0')
end
end
@@ -93,8 +95,8 @@ feature 'Environments page', :js do
it 'shows environments names and counters' do
expect(page).to have_link(environment.name)
- expect(page.find('.js-available-environments-count').text).to eq('1')
- expect(page.find('.js-stopped-environments-count').text).to eq('0')
+ expect(page.find('.js-environments-tab-available .badge').text).to eq('1')
+ expect(page.find('.js-environments-tab-stopped .badge').text).to eq('0')
end
it 'does not show deployments' do
@@ -206,22 +208,35 @@ feature 'Environments page', :js do
end
context 'when kubernetes terminal is available' do
- let(:project) { create(:kubernetes_project, :test_repo) }
+ shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do
+ context 'for project master' do
+ let(:role) { :master }
- context 'for project master' do
- let(:role) { :master }
+ it 'shows the terminal button' do
+ expect(page).to have_terminal_button
+ end
+ end
+
+ context 'when user is a developer' do
+ let(:role) { :developer }
- it 'shows the terminal button' do
- expect(page).to have_terminal_button
+ it 'does not show terminal button' do
+ expect(page).not_to have_terminal_button
+ end
end
end
- context 'when user is a developer' do
- let(:role) { :developer }
+ context 'when user configured kubernetes from Integration > Kubernetes' do
+ let(:project) { create(:kubernetes_project, :test_repo) }
- it 'does not show terminal button' do
- expect(page).not_to have_terminal_button
- end
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
+ end
+
+ context 'when user configured kubernetes from CI/CD > Clusters' do
+ let(:cluster) { create(:cluster, :provided_by_gcp, projects: [create(:project, :repository)]) }
+ let(:project) { cluster.project }
+
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
end
end
end
@@ -294,11 +309,32 @@ feature 'Environments page', :js do
end
end
+ describe 'environments folders view' do
+ before do
+ create(:environment, project: project,
+ name: 'staging.review/review-1',
+ state: :available)
+ create(:environment, project: project,
+ name: 'staging.review/review-2',
+ state: :available)
+ end
+
+ scenario 'user opens folder view' do
+ visit folder_project_environments_path(project, 'staging.review')
+ wait_for_requests
+
+ expect(page).to have_content('Environments / staging.review')
+ expect(page).to have_content('review-1')
+ expect(page).to have_content('review-2')
+ end
+ end
+
def have_terminal_button
have_link(nil, href: terminal_project_environment_path(project, environment))
end
def visit_environments(project, **opts)
visit project_environments_path(project, **opts)
+ wait_for_requests
end
end
diff --git a/spec/features/projects/features_visibility_spec.rb b/spec/features/projects/features_visibility_spec.rb
index 951456763dc..033c45a60bf 100644
--- a/spec/features/projects/features_visibility_spec.rb
+++ b/spec/features/projects/features_visibility_spec.rb
@@ -177,7 +177,7 @@ describe 'Edit Project Settings' do
click_button "Save changes"
end
- expect(find(".sharing-permissions")).to have_selector(".project-feature-toggle.disabled", count: 2)
+ expect(find(".sharing-permissions")).to have_selector(".project-feature-toggle.is-disabled", count: 2)
end
it "shows empty features project homepage" do
@@ -272,10 +272,10 @@ describe 'Edit Project Settings' do
end
def toggle_feature_off(feature_name)
- find(".project-feature-controls[data-for=\"#{feature_name}\"] .project-feature-toggle.checked").click
+ find(".project-feature-controls[data-for=\"#{feature_name}\"] .project-feature-toggle.is-checked").click
end
def toggle_feature_on(feature_name)
- find(".project-feature-controls[data-for=\"#{feature_name}\"] .project-feature-toggle:not(.checked)").click
+ find(".project-feature-controls[data-for=\"#{feature_name}\"] .project-feature-toggle:not(.is-checked)").click
end
end
diff --git a/spec/features/projects/fork_spec.rb b/spec/features/projects/fork_spec.rb
index e10d29e5eea..842840cc04c 100644
--- a/spec/features/projects/fork_spec.rb
+++ b/spec/features/projects/fork_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe 'Project fork' do
+ include ProjectForksHelper
+
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
@@ -24,8 +26,9 @@ describe 'Project fork' do
end
context 'master in group' do
+ let(:group) { create(:group) }
+
before do
- group = create(:group)
group.add_master(user)
end
@@ -53,5 +56,17 @@ describe 'Project fork' do
expect(page).to have_css('.fork-thumbnail', count: 2)
expect(page).to have_css('.fork-thumbnail.disabled')
end
+
+ it 'links to the fork if the project was already forked within that namespace' do
+ forked_project = fork_project(project, user, namespace: group, repository: true)
+
+ visit new_project_fork_path(project)
+
+ expect(page).to have_css('div.forked', text: group.full_name)
+
+ click_link group.full_name
+
+ expect(current_path).to eq(project_path(forked_project))
+ end
end
end
diff --git a/spec/features/projects/import_export/test_project_export.tar.gz b/spec/features/projects/import_export/test_project_export.tar.gz
index fb6a3b8e733..0c354298433 100644
--- a/spec/features/projects/import_export/test_project_export.tar.gz
+++ b/spec/features/projects/import_export/test_project_export.tar.gz
Binary files differ
diff --git a/spec/features/projects/issuable_templates_spec.rb b/spec/features/projects/issuable_templates_spec.rb
index a012db8fd27..0257cd157c9 100644
--- a/spec/features/projects/issuable_templates_spec.rb
+++ b/spec/features/projects/issuable_templates_spec.rb
@@ -32,7 +32,7 @@ feature 'issuable templates', :js do
message: 'added issue template',
branch_name: 'master')
visit project_issue_path project, issue
- page.within('.content .issuable-actions') do
+ page.within('.js-issuable-actions') do
click_on 'Edit'
end
fill_in :'issuable-title', with: 'test issue title'
@@ -77,7 +77,7 @@ feature 'issuable templates', :js do
message: 'added issue template',
branch_name: 'master')
visit project_issue_path project, issue
- page.within('.content .issuable-actions') do
+ page.within('.js-issuable-actions') do
click_on 'Edit'
end
fill_in :'issuable-title', with: 'test issue title'
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index c2a0d2395a9..0b0d5a2dce8 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -187,7 +187,7 @@ feature 'Jobs' do
context "Download artifacts" do
before do
- job.update_attributes(artifacts_file: artifacts_file)
+ job.update_attributes(legacy_artifacts_file: artifacts_file)
visit project_job_path(project, job)
end
@@ -198,7 +198,7 @@ feature 'Jobs' do
context 'Artifacts expire date' do
before do
- job.update_attributes(artifacts_file: artifacts_file,
+ job.update_attributes(legacy_artifacts_file: artifacts_file,
artifacts_expire_at: expire_at)
visit project_job_path(project, job)
@@ -422,14 +422,14 @@ feature 'Jobs' do
describe "GET /:project/jobs/:id/download" do
before do
- job.update_attributes(artifacts_file: artifacts_file)
+ job.update_attributes(legacy_artifacts_file: artifacts_file)
visit project_job_path(project, job)
click_link 'Download'
end
context "Build from other project" do
before do
- job2.update_attributes(artifacts_file: artifacts_file)
+ job2.update_attributes(legacy_artifacts_file: artifacts_file)
visit download_project_job_artifacts_path(project, job2)
end
diff --git a/spec/features/projects/members/list_spec.rb b/spec/features/projects/members/list_spec.rb
index 237c059e595..65b11a1d9e7 100644
--- a/spec/features/projects/members/list_spec.rb
+++ b/spec/features/projects/members/list_spec.rb
@@ -55,6 +55,22 @@ feature 'Project members list' do
end
end
+ scenario 'remove user from project', :js do
+ other_user = create(:user)
+ project.add_developer(other_user)
+
+ visit_members_page
+
+ accept_confirm do
+ find(:css, 'li.project_member', text: other_user.name).find(:css, 'a.btn-remove').click
+ end
+
+ wait_for_requests
+
+ expect(page).not_to have_content(other_user.name)
+ expect(project.users).not_to include(other_user)
+ end
+
scenario 'invite user to project', :js do
visit_members_page
diff --git a/spec/features/projects/no_password_spec.rb b/spec/features/projects/no_password_spec.rb
index 6aff079dd39..b3b3212556c 100644
--- a/spec/features/projects/no_password_spec.rb
+++ b/spec/features/projects/no_password_spec.rb
@@ -30,7 +30,7 @@ feature 'No Password Alert' do
let(:user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'saml') }
before do
- stub_application_setting(password_authentication_enabled?: false)
+ stub_application_setting(password_authentication_enabled_for_git?: false)
stub_omniauth_saml_config(enabled: true, auto_link_saml_user: true, allow_single_sign_on: ['saml'], providers: [mock_saml_config])
end
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index b8fa1a54c24..888e290292b 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -185,6 +185,36 @@ describe 'Pipeline', :js do
end
end
+ context 'when user does not have access to read jobs' do
+ before do
+ project.update(public_builds: false)
+ end
+
+ describe 'GET /:project/pipelines/:id' do
+ include_context 'pipeline builds'
+
+ let(:project) { create(:project, :repository) }
+ let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master', sha: project.commit.id, user: user) }
+
+ before do
+ visit project_pipeline_path(project, pipeline)
+ end
+
+ it 'shows the pipeline graph' do
+ expect(page).to have_selector('.pipeline-visualization')
+ expect(page).to have_content('Build')
+ expect(page).to have_content('Test')
+ expect(page).to have_content('Deploy')
+ expect(page).to have_content('Retry')
+ expect(page).to have_content('Cancel running')
+ end
+
+ it 'should not link to job' do
+ expect(page).not_to have_selector('.js-pipeline-graph-job-link')
+ end
+ end
+ end
+
describe 'GET /:project/pipelines/:id/builds' do
include_context 'pipeline builds'
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index fc689bbb486..b87b47d0e1a 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -56,31 +56,37 @@ describe 'Pipelines', :js do
end
it 'shows a tab for All pipelines and count' do
- expect(page.find('.js-pipelines-tab-all a').text).to include('All')
+ expect(page.find('.js-pipelines-tab-all').text).to include('All')
expect(page.find('.js-pipelines-tab-all .badge').text).to include('1')
end
it 'shows a tab for Pending pipelines and count' do
- expect(page.find('.js-pipelines-tab-pending a').text).to include('Pending')
+ expect(page.find('.js-pipelines-tab-pending').text).to include('Pending')
expect(page.find('.js-pipelines-tab-pending .badge').text).to include('0')
end
it 'shows a tab for Running pipelines and count' do
- expect(page.find('.js-pipelines-tab-running a').text).to include('Running')
+ expect(page.find('.js-pipelines-tab-running').text).to include('Running')
expect(page.find('.js-pipelines-tab-running .badge').text).to include('1')
end
it 'shows a tab for Finished pipelines and count' do
- expect(page.find('.js-pipelines-tab-finished a').text).to include('Finished')
+ expect(page.find('.js-pipelines-tab-finished').text).to include('Finished')
expect(page.find('.js-pipelines-tab-finished .badge').text).to include('0')
end
it 'shows a tab for Branches' do
- expect(page.find('.js-pipelines-tab-branches a').text).to include('Branches')
+ expect(page.find('.js-pipelines-tab-branches').text).to include('Branches')
end
it 'shows a tab for Tags' do
- expect(page.find('.js-pipelines-tab-tags a').text).to include('Tags')
+ expect(page.find('.js-pipelines-tab-tags').text).to include('Tags')
+ end
+
+ it 'updates content when tab is clicked' do
+ page.find('.js-pipelines-tab-pending').click
+ wait_for_requests
+ expect(page).to have_content('No pipelines to show.')
end
end
@@ -298,7 +304,7 @@ describe 'Pipelines', :js do
context 'with artifacts expired' do
let!(:with_artifacts_expired) do
- create(:ci_build, :artifacts_expired, :success,
+ create(:ci_build, :expired, :success,
pipeline: pipeline,
name: 'rspec',
stage: 'test')
@@ -396,6 +402,14 @@ describe 'Pipelines', :js do
expect(page).to have_selector('.gl-pagination .page', count: 2)
end
+
+ it 'should show updated content' do
+ visit project_pipelines_path(project)
+ wait_for_requests
+ page.find('.js-next-button a').click
+
+ expect(page).to have_selector('.gl-pagination .page', count: 2)
+ end
end
end
@@ -486,6 +500,18 @@ describe 'Pipelines', :js do
end
it { expect(page).to have_content('Missing .gitlab-ci.yml file') }
+ it 'creates a pipeline after first request failed and a valid gitlab-ci.yml file is available when trying again' do
+ click_button project.default_branch
+
+ stub_ci_pipeline_to_return_yaml_file
+
+ page.within '.dropdown-menu' do
+ click_link 'master'
+ end
+
+ expect { click_on 'Create pipeline' }
+ .to change { Ci::Pipeline.count }.by(1)
+ end
end
end
end
diff --git a/spec/features/projects/project_settings_spec.rb b/spec/features/projects/project_settings_spec.rb
index 15a5cd9990b..a3ea778d401 100644
--- a/spec/features/projects/project_settings_spec.rb
+++ b/spec/features/projects/project_settings_spec.rb
@@ -144,7 +144,10 @@ describe 'Edit Project Settings' do
specify 'the project is accessible via the new path' do
transfer_project(project, group)
new_path = namespace_project_path(group, project)
+
visit new_path
+ wait_for_requests
+
expect(current_path).to eq(new_path)
expect(find('.breadcrumbs')).to have_content(project.name)
end
@@ -153,7 +156,10 @@ describe 'Edit Project Settings' do
old_path = project_path(project)
transfer_project(project, group)
new_path = namespace_project_path(group, project)
+
visit old_path
+ wait_for_requests
+
expect(current_path).to eq(new_path)
expect(find('.breadcrumbs')).to have_content(project.name)
end
diff --git a/spec/features/projects/settings/pipelines_settings_spec.rb b/spec/features/projects/settings/pipelines_settings_spec.rb
index ea8f997409d..561f08cba00 100644
--- a/spec/features/projects/settings/pipelines_settings_spec.rb
+++ b/spec/features/projects/settings/pipelines_settings_spec.rb
@@ -8,13 +8,14 @@ feature "Pipelines settings" do
background do
sign_in(user)
project.team << [user, role]
- visit project_pipelines_settings_path(project)
end
context 'for developer' do
given(:role) { :developer }
scenario 'to be disallowed to view' do
+ visit project_settings_ci_cd_path(project)
+
expect(page.status_code).to eq(404)
end
end
@@ -23,6 +24,8 @@ feature "Pipelines settings" do
given(:role) { :master }
scenario 'be allowed to change' do
+ visit project_settings_ci_cd_path(project)
+
fill_in('Test coverage parsing', with: 'coverage_regex')
click_on 'Save changes'
@@ -32,6 +35,8 @@ feature "Pipelines settings" do
end
scenario 'updates auto_cancel_pending_pipelines' do
+ visit project_settings_ci_cd_path(project)
+
page.check('Auto-cancel redundant, pending pipelines')
click_on 'Save changes'
@@ -42,14 +47,18 @@ feature "Pipelines settings" do
expect(checkbox).to be_checked
end
- scenario 'update auto devops settings' do
- fill_in('project_auto_devops_attributes_domain', with: 'test.com')
- page.choose('project_auto_devops_attributes_enabled_false')
- click_on 'Save changes'
+ describe 'Auto DevOps' do
+ it 'update auto devops settings' do
+ visit project_settings_ci_cd_path(project)
- expect(page.status_code).to eq(200)
- expect(project.auto_devops).to be_present
- expect(project.auto_devops).not_to be_enabled
+ fill_in('project_auto_devops_attributes_domain', with: 'test.com')
+ page.choose('project_auto_devops_attributes_enabled_false')
+ click_on 'Save changes'
+
+ expect(page.status_code).to eq(200)
+ expect(project.auto_devops).to be_present
+ expect(project.auto_devops).not_to be_enabled
+ end
end
end
end
diff --git a/spec/features/projects/snippets_spec.rb b/spec/features/projects/snippets_spec.rb
index 1cfbbb4cb62..0fa7ca9afd4 100644
--- a/spec/features/projects/snippets_spec.rb
+++ b/spec/features/projects/snippets_spec.rb
@@ -39,6 +39,11 @@ describe 'Project snippets', :js do
expect(page).to have_selector('.atwho-view')
end
+
+ it 'should have zen mode' do
+ find('.js-zen-enter').click()
+ expect(page).to have_selector('.fullscreen')
+ end
end
end
end
diff --git a/spec/features/projects/tree/create_directory_spec.rb b/spec/features/projects/tree/create_directory_spec.rb
index 1686e7fa342..156293289dd 100644
--- a/spec/features/projects/tree/create_directory_spec.rb
+++ b/spec/features/projects/tree/create_directory_spec.rb
@@ -26,9 +26,11 @@ feature 'Multi-file editor new directory', :js do
click_button('Create directory')
end
+ find('.multi-file-commit-panel-collapse-btn').click
+
fill_in('commit-message', with: 'commit message')
- click_button('Commit 1 file')
+ click_button('Commit')
expect(page).to have_selector('td', text: 'commit message')
end
diff --git a/spec/features/projects/tree/create_file_spec.rb b/spec/features/projects/tree/create_file_spec.rb
index 1e2de0711b8..8fb8476e631 100644
--- a/spec/features/projects/tree/create_file_spec.rb
+++ b/spec/features/projects/tree/create_file_spec.rb
@@ -26,9 +26,11 @@ feature 'Multi-file editor new file', :js do
click_button('Create file')
end
+ find('.multi-file-commit-panel-collapse-btn').click
+
fill_in('commit-message', with: 'commit message')
- click_button('Commit 1 file')
+ click_button('Commit')
expect(page).to have_selector('td', text: 'commit message')
end
diff --git a/spec/features/projects/tree/upload_file_spec.rb b/spec/features/projects/tree/upload_file_spec.rb
index 8439bb5a69e..d4e57d1ecfa 100644
--- a/spec/features/projects/tree/upload_file_spec.rb
+++ b/spec/features/projects/tree/upload_file_spec.rb
@@ -26,7 +26,7 @@ feature 'Multi-file editor upload file', :js do
find('.add-to-tree').click
- expect(page).to have_selector('.repo-tab', text: 'doc_sample.txt')
+ expect(page).to have_selector('.multi-file-tab', text: 'doc_sample.txt')
expect(find('.blob-editor-container .lines-content')['innerText']).to have_content(File.open(txt_file, &:readline))
end
@@ -39,7 +39,7 @@ feature 'Multi-file editor upload file', :js do
find('.add-to-tree').click
- expect(page).to have_selector('.repo-tab', text: 'dk.png')
+ expect(page).to have_selector('.multi-file-tab', text: 'dk.png')
expect(page).not_to have_selector('.monaco-editor')
expect(page).to have_content('The source could not be displayed for this temporary file.')
end
diff --git a/spec/features/projects/user_creates_project_spec.rb b/spec/features/projects/user_creates_project_spec.rb
index 4a152572502..f95469ad070 100644
--- a/spec/features/projects/user_creates_project_spec.rb
+++ b/spec/features/projects/user_creates_project_spec.rb
@@ -6,10 +6,11 @@ feature 'User creates a project', :js do
before do
sign_in(user)
create(:personal_key, user: user)
- visit(new_project_path)
end
it 'creates a new project' do
+ visit(new_project_path)
+
fill_in(:project_path, with: 'Empty')
page.within('#content-body') do
@@ -24,4 +25,32 @@ feature 'User creates a project', :js do
expect(page).to have_content('git remote')
expect(page).to have_content(project.url_to_repo)
end
+
+ context 'in a subgroup they do not own', :nested_groups do
+ let(:parent) { create(:group) }
+ let!(:subgroup) { create(:group, parent: parent) }
+
+ before do
+ parent.add_owner(user)
+ end
+
+ it 'creates a new project' do
+ visit(new_project_path)
+
+ fill_in :project_path, with: 'a-subgroup-project'
+
+ page.find('.js-select-namespace').click
+ page.find("div[role='option']", text: subgroup.full_path).click
+
+ page.within('#content-body') do
+ click_button('Create project')
+ end
+
+ expect(page).to have_content("Project 'a-subgroup-project' was successfully created")
+
+ project = Project.last
+
+ expect(project.namespace).to eq(subgroup)
+ end
+ end
end
diff --git a/spec/features/projects/user_transfers_a_project_spec.rb b/spec/features/projects/user_transfers_a_project_spec.rb
new file mode 100644
index 00000000000..78f72b644ff
--- /dev/null
+++ b/spec/features/projects/user_transfers_a_project_spec.rb
@@ -0,0 +1,49 @@
+require 'spec_helper'
+
+feature 'User transfers a project', :js do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :repository, namespace: user.namespace) }
+
+ before do
+ sign_in user
+ end
+
+ def transfer_project(project, group)
+ visit edit_project_path(project)
+
+ page.within('.js-project-transfer-form') do
+ page.find('.select2-container').click
+ end
+
+ page.find("div[role='option']", text: group.full_name).click
+
+ click_button('Transfer project')
+
+ fill_in 'confirm_name_input', with: project.name
+
+ click_button 'Confirm'
+
+ wait_for_requests
+ end
+
+ it 'allows transferring a project to a subgroup of a namespace' do
+ group = create(:group)
+ group.add_owner(user)
+
+ transfer_project(project, group)
+
+ expect(project.reload.namespace).to eq(group)
+ end
+
+ context 'when nested groups are available', :nested_groups do
+ it 'allows transferring a project to a subgroup' do
+ parent = create(:group)
+ parent.add_owner(user)
+ subgroup = create(:group, parent: parent)
+
+ transfer_project(project, subgroup)
+
+ expect(project.reload.namespace).to eq(subgroup)
+ end
+ end
+end
diff --git a/spec/finders/admin/projects_finder_spec.rb b/spec/finders/admin/projects_finder_spec.rb
index 4b67203a0df..7901d5fee28 100644
--- a/spec/finders/admin/projects_finder_spec.rb
+++ b/spec/finders/admin/projects_finder_spec.rb
@@ -136,7 +136,7 @@ describe Admin::ProjectsFinder do
context 'filter by name' do
let(:params) { { name: 'C' } }
- it { is_expected.to match_array([shared_project, public_project, private_project]) }
+ it { is_expected.to match_array([public_project]) }
end
context 'sorting' do
diff --git a/spec/finders/autocomplete_users_finder_spec.rb b/spec/finders/autocomplete_users_finder_spec.rb
index 684af74d750..dcf9111776e 100644
--- a/spec/finders/autocomplete_users_finder_spec.rb
+++ b/spec/finders/autocomplete_users_finder_spec.rb
@@ -42,6 +42,21 @@ describe AutocompleteUsersFinder do
it { is_expected.to match_array([user1]) }
end
+ context 'when passed a subgroup', :nested_groups do
+ let(:grandparent) { create(:group, :public) }
+ let(:parent) { create(:group, :public, parent: grandparent) }
+ let(:child) { create(:group, :public, parent: parent) }
+ let(:group) { parent }
+
+ let!(:grandparent_user) { create(:group_member, :developer, group: grandparent).user }
+ let!(:parent_user) { create(:group_member, :developer, group: parent).user }
+ let!(:child_user) { create(:group_member, :developer, group: child).user }
+
+ it 'includes users from parent groups as well' do
+ expect(subject).to match_array([grandparent_user, parent_user])
+ end
+ end
+
it { is_expected.to match_array([user1, external_user, omniauth_user, current_user]) }
context 'when filtered by search' do
diff --git a/spec/finders/clusters_finder_spec.rb b/spec/finders/clusters_finder_spec.rb
new file mode 100644
index 00000000000..c10efac2432
--- /dev/null
+++ b/spec/finders/clusters_finder_spec.rb
@@ -0,0 +1,31 @@
+require 'spec_helper'
+
+describe ClustersFinder do
+ let(:project) { create(:project) }
+ set(:user) { create(:user) }
+
+ describe '#execute' do
+ let(:enabled_cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
+ let(:disabled_cluster) { create(:cluster, :disabled, :provided_by_gcp, projects: [project]) }
+
+ subject { described_class.new(project, user, scope).execute }
+
+ context 'when scope is all' do
+ let(:scope) { :all }
+
+ it { is_expected.to match_array([enabled_cluster, disabled_cluster]) }
+ end
+
+ context 'when scope is active' do
+ let(:scope) { :active }
+
+ it { is_expected.to match_array([enabled_cluster]) }
+ end
+
+ context 'when scope is inactive' do
+ let(:scope) { :inactive }
+
+ it { is_expected.to match_array([disabled_cluster]) }
+ end
+ end
+end
diff --git a/spec/finders/runner_jobs_finder_spec.rb b/spec/finders/runner_jobs_finder_spec.rb
new file mode 100644
index 00000000000..4275b1a7ff1
--- /dev/null
+++ b/spec/finders/runner_jobs_finder_spec.rb
@@ -0,0 +1,39 @@
+require 'spec_helper'
+
+describe RunnerJobsFinder do
+ let(:project) { create(:project) }
+ let(:runner) { create(:ci_runner, :shared) }
+
+ subject { described_class.new(runner, params).execute }
+
+ describe '#execute' do
+ context 'when params is empty' do
+ let(:params) { {} }
+ let!(:job) { create(:ci_build, runner: runner, project: project) }
+ let!(:job1) { create(:ci_build, project: project) }
+
+ it 'returns all jobs assigned to Runner' do
+ is_expected.to match_array(job)
+ is_expected.not_to match_array(job1)
+ end
+ end
+
+ context 'when params contains status' do
+ HasStatus::AVAILABLE_STATUSES.each do |target_status|
+ context "when status is #{target_status}" do
+ let(:params) { { status: target_status } }
+ let!(:job) { create(:ci_build, runner: runner, project: project, status: target_status) }
+
+ before do
+ exception_status = HasStatus::AVAILABLE_STATUSES - [target_status]
+ create(:ci_build, runner: runner, project: project, status: exception_status.first)
+ end
+
+ it 'returns matched job' do
+ is_expected.to eq([job])
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/fixtures/api/schemas/issue.json b/spec/fixtures/api/schemas/issue.json
index a55ecaa5697..b579e32c9aa 100644
--- a/spec/fixtures/api/schemas/issue.json
+++ b/spec/fixtures/api/schemas/issue.json
@@ -13,6 +13,8 @@
"confidential": { "type": "boolean" },
"due_date": { "type": ["date", "null"] },
"relative_position": { "type": "integer" },
+ "issue_sidebar_endpoint": { "type": "string" },
+ "toggle_subscription_endpoint": { "type": "string" },
"project": {
"id": { "type": "integer" },
"path": { "type": "string" }
diff --git a/spec/fixtures/api/schemas/public_api/v4/pages_domain/basic.json b/spec/fixtures/api/schemas/public_api/v4/pages_domain/basic.json
new file mode 100644
index 00000000000..4ba6422406c
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/pages_domain/basic.json
@@ -0,0 +1,18 @@
+{
+ "type": "object",
+ "properties": {
+ "domain": { "type": "string" },
+ "url": { "type": "uri" },
+ "certificate_expiration": {
+ "type": "object",
+ "properties": {
+ "expired": { "type": "boolean" },
+ "expiration": { "type": "string" }
+ },
+ "required": ["expired", "expiration"],
+ "additionalProperties": false
+ }
+ },
+ "required": ["domain", "url"],
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/pages_domain/detail.json b/spec/fixtures/api/schemas/public_api/v4/pages_domain/detail.json
new file mode 100644
index 00000000000..08db8d47050
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/pages_domain/detail.json
@@ -0,0 +1,20 @@
+{
+ "type": "object",
+ "properties": {
+ "domain": { "type": "string" },
+ "url": { "type": "uri" },
+ "certificate": {
+ "type": "object",
+ "properties": {
+ "subject": { "type": "string" },
+ "expired": { "type": "boolean" },
+ "certificate": { "type": "string" },
+ "certificate_text": { "type": "string" }
+ },
+ "required": ["subject", "expired"],
+ "additionalProperties": false
+ }
+ },
+ "required": ["domain", "url"],
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/pages_domain_basics.json b/spec/fixtures/api/schemas/public_api/v4/pages_domain_basics.json
new file mode 100644
index 00000000000..c7d86de7d8e
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/pages_domain_basics.json
@@ -0,0 +1,4 @@
+{
+ "type": "array",
+ "items": { "$ref": "pages_domain/basic.json" }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/pages_domains.json b/spec/fixtures/api/schemas/public_api/v4/pages_domains.json
index 0de1d0f1228..7c27218dc5a 100644
--- a/spec/fixtures/api/schemas/public_api/v4/pages_domains.json
+++ b/spec/fixtures/api/schemas/public_api/v4/pages_domains.json
@@ -1,23 +1,4 @@
{
"type": "array",
- "items": {
- "type": "object",
- "properties": {
- "domain": { "type": "string" },
- "url": { "type": "uri" },
- "certificate": {
- "type": "object",
- "properties": {
- "subject": { "type": "string" },
- "expired": { "type": "boolean" },
- "certificate": { "type": "string" },
- "certificate_text": { "type": "string" }
- },
- "required": ["subject", "expired"],
- "additionalProperties": false
- }
- },
- "required": ["domain", "url"],
- "additionalProperties": false
- }
+ "items": { "$ref": "pages_domain/detail.json" }
}
diff --git a/spec/fixtures/emails/valid_new_merge_request.eml b/spec/fixtures/emails/valid_new_merge_request.eml
new file mode 100644
index 00000000000..480675a6d7e
--- /dev/null
+++ b/spec/fixtures/emails/valid_new_merge_request.eml
@@ -0,0 +1,18 @@
+Return-Path: <jake@adventuretime.ooo>
+Received: from iceking.adventuretime.ooo ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400
+Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 17:03:50 -0400
+Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 14:03:48 -0700
+Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700
+Date: Thu, 13 Jun 2013 17:03:48 -0400
+From: Jake the Dog <jake@adventuretime.ooo>
+To: incoming+gitlabhq/gitlabhq+merge-request+auth_token@appmail.adventuretime.ooo
+Message-ID: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
+Subject: feature
+Mime-Version: 1.0
+Content-Type: text/plain;
+ charset=ISO-8859-1
+Content-Transfer-Encoding: 7bit
+X-Sieve: CMU Sieve 2.2
+X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu,
+ 13 Jun 2013 14:03:48 -0700 (PDT)
+X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1
diff --git a/spec/fixtures/emails/valid_new_merge_request_no_subject.eml b/spec/fixtures/emails/valid_new_merge_request_no_subject.eml
new file mode 100644
index 00000000000..27eb1b7d922
--- /dev/null
+++ b/spec/fixtures/emails/valid_new_merge_request_no_subject.eml
@@ -0,0 +1,18 @@
+Return-Path: <jake@adventuretime.ooo>
+Received: from iceking.adventuretime.ooo ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400
+Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 17:03:50 -0400
+Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 14:03:48 -0700
+Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700
+Date: Thu, 13 Jun 2013 17:03:48 -0400
+From: Jake the Dog <jake@adventuretime.ooo>
+To: incoming+gitlabhq/gitlabhq+merge-request+auth_token@appmail.adventuretime.ooo
+Message-ID: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
+Subject:
+Mime-Version: 1.0
+Content-Type: text/plain;
+ charset=ISO-8859-1
+Content-Transfer-Encoding: 7bit
+X-Sieve: CMU Sieve 2.2
+X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu,
+ 13 Jun 2013 14:03:48 -0700 (PDT)
+X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1
diff --git a/spec/fixtures/markdown.md.erb b/spec/fixtures/markdown.md.erb
index 4f46e40ce7a..638cd8b07c8 100644
--- a/spec/fixtures/markdown.md.erb
+++ b/spec/fixtures/markdown.md.erb
@@ -268,3 +268,37 @@ However the wrapping tags can not be mixed as such -
### Videos
![My Video](/assets/videos/gitlab-demo.mp4)
+
+### Mermaid
+
+> If this is not rendered correctly, see
+https://gitlab.com/gitlab-org/gitlab-ce/blob/master/doc/user/markdown.md#mermaid
+
+It is possible to generate diagrams and flowcharts from text using [Mermaid][mermaid].
+
+In order to generate a diagram or flowchart, you should write your text inside the `mermaid` block.
+
+Example:
+
+ ```mermaid
+ graph TD;
+ A-->B;
+ A-->C;
+ B-->D;
+ C-->D;
+ ```
+
+Becomes:
+
+```mermaid
+graph TD;
+ A-->B;
+ A-->C;
+ B-->D;
+ C-->D;
+```
+
+For details see the [Mermaid official page][mermaid].
+
+[mermaid]: https://mermaidjs.github.io/ "Mermaid website"
+
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index 7a241b02d28..5c5d53877a6 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -4,8 +4,6 @@ require 'spec_helper'
describe ApplicationHelper do
include UploadHelpers
- let(:gitlab_host) { "http://#{Gitlab.config.gitlab.host}" }
-
describe 'current_controller?' do
it 'returns true when controller matches argument' do
stub_controller_name('foo')
@@ -57,30 +55,11 @@ describe ApplicationHelper do
end
describe 'project_icon' do
- let(:asset_host) { 'http://assets' }
-
it 'returns an url for the avatar' do
project = create(:project, :public, avatar: File.open(uploaded_image_temp_path))
- avatar_url = "/uploads/-/system/project/avatar/#{project.id}/banana_sample.gif"
-
- expect(helper.project_icon(project.full_path).to_s)
- .to eq "<img data-src=\"#{avatar_url}\" class=\" lazy\" src=\"#{LazyImageTagHelper.placeholder_image}\" />"
-
- allow(ActionController::Base).to receive(:asset_host).and_return(asset_host)
- avatar_url = "#{asset_host}/uploads/-/system/project/avatar/#{project.id}/banana_sample.gif"
-
- expect(helper.project_icon(project.full_path).to_s)
- .to eq "<img data-src=\"#{avatar_url}\" class=\" lazy\" src=\"#{LazyImageTagHelper.placeholder_image}\" />"
- end
-
- it 'gives uploaded icon when present' do
- project = create(:project)
- allow_any_instance_of(Project).to receive(:avatar_in_git).and_return(true)
-
- avatar_url = "#{gitlab_host}#{project_avatar_path(project)}"
expect(helper.project_icon(project.full_path).to_s)
- .to eq "<img data-src=\"#{avatar_url}\" class=\" lazy\" src=\"#{LazyImageTagHelper.placeholder_image}\" />"
+ .to eq "<img data-src=\"#{project.avatar.url}\" class=\" lazy\" src=\"#{LazyImageTagHelper.placeholder_image}\" />"
end
end
@@ -91,40 +70,7 @@ describe ApplicationHelper do
context 'when there is a matching user' do
it 'returns a relative URL for the avatar' do
expect(helper.avatar_icon(user.email).to_s)
- .to eq("/uploads/-/system/user/avatar/#{user.id}/banana_sample.gif")
- end
-
- context 'when an asset_host is set in the config' do
- let(:asset_host) { 'http://assets' }
-
- before do
- allow(ActionController::Base).to receive(:asset_host).and_return(asset_host)
- end
-
- it 'returns an absolute URL on that asset host' do
- expect(helper.avatar_icon(user.email, only_path: false).to_s)
- .to eq("#{asset_host}/uploads/-/system/user/avatar/#{user.id}/banana_sample.gif")
- end
- end
-
- context 'when only_path is set to false' do
- it 'returns an absolute URL for the avatar' do
- expect(helper.avatar_icon(user.email, only_path: false).to_s)
- .to eq("#{gitlab_host}/uploads/-/system/user/avatar/#{user.id}/banana_sample.gif")
- end
- end
-
- context 'when the GitLab instance is at a relative URL' do
- before do
- stub_config_setting(relative_url_root: '/gitlab')
- # Must be stubbed after the stub above, and separately
- stub_config_setting(url: Settings.send(:build_gitlab_url))
- end
-
- it 'returns a relative URL with the correct prefix' do
- expect(helper.avatar_icon(user.email).to_s)
- .to eq("/gitlab/uploads/-/system/user/avatar/#{user.id}/banana_sample.gif")
- end
+ .to eq(user.avatar.url)
end
end
@@ -138,18 +84,9 @@ describe ApplicationHelper do
end
describe 'using a user' do
- context 'when only_path is true' do
- it 'returns a relative URL for the avatar' do
- expect(helper.avatar_icon(user, only_path: true).to_s)
- .to eq("/uploads/-/system/user/avatar/#{user.id}/banana_sample.gif")
- end
- end
-
- context 'when only_path is false' do
- it 'returns an absolute URL for the avatar' do
- expect(helper.avatar_icon(user, only_path: false).to_s)
- .to eq("#{gitlab_host}/uploads/-/system/user/avatar/#{user.id}/banana_sample.gif")
- end
+ it 'returns a relative URL for the avatar' do
+ expect(helper.avatar_icon(user).to_s)
+ .to eq(user.avatar.url)
end
end
end
diff --git a/spec/helpers/boards_helper_spec.rb b/spec/helpers/boards_helper_spec.rb
new file mode 100644
index 00000000000..a3c5ab99c87
--- /dev/null
+++ b/spec/helpers/boards_helper_spec.rb
@@ -0,0 +1,21 @@
+require 'spec_helper'
+
+describe BoardsHelper do
+ describe '#board_data' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:board) { create(:board, project: project) }
+
+ before do
+ assign(:board, board)
+ assign(:project, project)
+
+ allow(helper).to receive(:current_user) { user }
+ allow(helper).to receive(:can?).with(user, :admin_list, project).and_return(true)
+ end
+
+ it 'returns a board_lists_path as lists_endpoint' do
+ expect(helper.board_data[:lists_endpoint]).to eq(board_lists_path(board))
+ end
+ end
+end
diff --git a/spec/helpers/button_helper_spec.rb b/spec/helpers/button_helper_spec.rb
index 4423560ecaa..fee8df10129 100644
--- a/spec/helpers/button_helper_spec.rb
+++ b/spec/helpers/button_helper_spec.rb
@@ -26,30 +26,24 @@ describe ButtonHelper do
context 'when user has password automatically set' do
let(:user) { create(:user, password_automatically_set: true) }
- it 'shows a password tooltip' do
- expect(element.attr('class')).to include(has_tooltip_class)
- expect(element.attr('data-title')).to eq('Set a password on your account to pull or push via HTTP.')
+ it 'shows the password text on the dropdown' do
+ description = element.search('.dropdown-menu-inner-content').first
+
+ expect(description.inner_text).to eq 'Set a password on your account to pull or push via HTTP.'
end
end
end
context 'with internal auth disabled' do
before do
- stub_application_setting(password_authentication_enabled?: false)
+ stub_application_setting(password_authentication_enabled_for_git?: false)
end
context 'when user has no personal access tokens' do
- it 'has a personal access token tooltip ' do
- expect(element.attr('class')).to include(has_tooltip_class)
- expect(element.attr('data-title')).to eq('Create a personal access token on your account to pull or push via HTTP.')
- end
- end
+ it 'has a personal access token text on the dropdown description ' do
+ description = element.search('.dropdown-menu-inner-content').first
- context 'when user has a personal access token' do
- it 'shows no tooltip' do
- create(:personal_access_token, user: user)
-
- expect(element.attr('class')).not_to include(has_tooltip_class)
+ expect(description.inner_text).to eq 'Create a personal access token on your account to pull or push via HTTP.'
end
end
end
@@ -63,6 +57,69 @@ describe ButtonHelper do
end
end
+ describe 'ssh_button' do
+ let(:user) { create(:user) }
+ let(:project) { build_stubbed(:project) }
+
+ def element
+ element = helper.ssh_clone_button(project)
+
+ Nokogiri::HTML::DocumentFragment.parse(element).first_element_child
+ end
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ end
+
+ context 'without an ssh key on the user' do
+ it 'shows a warning on the dropdown description' do
+ description = element.search('.dropdown-menu-inner-content').first
+
+ expect(description.inner_text).to eq "You won't be able to pull or push project code via SSH until you add an SSH key to your profile"
+ end
+ end
+
+ context 'with an ssh key on the user' do
+ before do
+ create(:key, user: user)
+ end
+
+ it 'there is no warning on the dropdown description' do
+ description = element.search('.dropdown-menu-inner-content').first
+
+ expect(description).to eq nil
+ end
+ end
+ end
+
+ describe 'ssh and http clone buttons' do
+ let(:user) { create(:user) }
+ let(:project) { build_stubbed(:project) }
+
+ def http_button_element
+ element = helper.http_clone_button(project, append_link: false)
+
+ Nokogiri::HTML::DocumentFragment.parse(element).first_element_child
+ end
+
+ def ssh_button_element
+ element = helper.ssh_clone_button(project, append_link: false)
+
+ Nokogiri::HTML::DocumentFragment.parse(element).first_element_child
+ end
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ end
+
+ it 'only shows the title of any of the clone buttons when append_link is false' do
+ expect(http_button_element.text).to eq('HTTP')
+ expect(http_button_element.search('.dropdown-menu-inner-content').first).to eq(nil)
+ expect(ssh_button_element.text).to eq('SSH')
+ expect(ssh_button_element.search('.dropdown-menu-inner-content').first).to eq(nil)
+ end
+ end
+
describe 'clipboard_button' do
let(:user) { create(:user) }
let(:project) { build_stubbed(:project) }
diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb
index 97f0ed4904e..32432ee1e81 100644
--- a/spec/helpers/groups_helper_spec.rb
+++ b/spec/helpers/groups_helper_spec.rb
@@ -3,8 +3,6 @@ require 'spec_helper'
describe GroupsHelper do
include ApplicationHelper
- let(:asset_host) { 'http://assets' }
-
describe 'group_icon' do
avatar_file_path = File.join(Rails.root, 'spec', 'fixtures', 'banana_sample.gif')
@@ -13,16 +11,8 @@ describe GroupsHelper do
group.avatar = fixture_file_upload(avatar_file_path)
group.save!
- avatar_url = "/uploads/-/system/group/avatar/#{group.id}/banana_sample.gif"
-
- expect(helper.group_icon(group).to_s)
- .to eq "<img data-src=\"#{avatar_url}\" class=\" lazy\" src=\"#{LazyImageTagHelper.placeholder_image}\" />"
-
- allow(ActionController::Base).to receive(:asset_host).and_return(asset_host)
- avatar_url = "#{asset_host}/uploads/-/system/group/avatar/#{group.id}/banana_sample.gif"
-
expect(helper.group_icon(group).to_s)
- .to eq "<img data-src=\"#{avatar_url}\" class=\" lazy\" src=\"#{LazyImageTagHelper.placeholder_image}\" />"
+ .to eq "<img data-src=\"#{group.avatar.url}\" class=\" lazy\" src=\"#{LazyImageTagHelper.placeholder_image}\" />"
end
end
@@ -34,25 +24,7 @@ describe GroupsHelper do
group.avatar = fixture_file_upload(avatar_file_path)
group.save!
expect(group_icon_url(group.path).to_s)
- .to match("/uploads/-/system/group/avatar/#{group.id}/banana_sample.gif")
- end
-
- it 'returns an CDN url for the avatar' do
- allow(ActionController::Base).to receive(:asset_host).and_return(asset_host)
- group = create(:group)
- group.avatar = fixture_file_upload(avatar_file_path)
- group.save!
- expect(group_icon_url(group.path).to_s)
- .to match("#{asset_host}/uploads/-/system/group/avatar/#{group.id}/banana_sample.gif")
- end
-
- it 'returns an based url for the avatar if private' do
- allow(ActionController::Base).to receive(:asset_host).and_return(asset_host)
- group = create(:group, :private)
- group.avatar = fixture_file_upload(avatar_file_path)
- group.save!
- expect(group_icon_url(group.path).to_s)
- .to match("/uploads/-/system/group/avatar/#{group.id}/banana_sample.gif")
+ .to match(group.avatar.url)
end
it 'gives default avatar_icon when no avatar is present' do
diff --git a/spec/helpers/icons_helper_spec.rb b/spec/helpers/icons_helper_spec.rb
index 3d79dac284f..2f23ed55d99 100644
--- a/spec/helpers/icons_helper_spec.rb
+++ b/spec/helpers/icons_helper_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe IconsHelper do
+ let(:icons_path) { ActionController::Base.helpers.image_path("icons.svg") }
+
describe 'icon' do
it 'returns aria-hidden by default' do
star = icon('star')
@@ -16,22 +18,42 @@ describe IconsHelper do
end
end
+ describe 'sprite_icon_path' do
+ it 'returns relative path' do
+ expect(sprite_icon_path)
+ .to eq icons_path
+ end
+
+ context 'when an asset_host is set in the config it will return an absolute local URL' do
+ let(:asset_host) { 'http://assets' }
+
+ before do
+ allow(ActionController::Base).to receive(:asset_host).and_return(asset_host)
+ end
+
+ it 'returns an absolute URL on that asset host' do
+ expect(sprite_icon_path)
+ .to eq ActionController::Base.helpers.image_path("icons.svg", host: Gitlab.config.gitlab.url)
+ end
+ end
+ end
+
describe 'sprite_icon' do
icon_name = 'clock'
it 'returns svg icon html' do
expect(sprite_icon(icon_name).to_s)
- .to eq "<svg><use xlink:href=\"/images/icons.svg##{icon_name}\"></use></svg>"
+ .to eq "<svg><use xlink:href=\"#{icons_path}##{icon_name}\"></use></svg>"
end
it 'returns svg icon html + size classes' do
expect(sprite_icon(icon_name, size: 72).to_s)
- .to eq "<svg class=\"s72\"><use xlink:href=\"/images/icons.svg##{icon_name}\"></use></svg>"
+ .to eq "<svg class=\"s72\"><use xlink:href=\"#{icons_path}##{icon_name}\"></use></svg>"
end
it 'returns svg icon html + size classes + additional class' do
expect(sprite_icon(icon_name, size: 72, css_class: 'icon-danger').to_s)
- .to eq "<svg class=\"s72 icon-danger\"><use xlink:href=\"/images/icons.svg##{icon_name}\"></use></svg>"
+ .to eq "<svg class=\"s72 icon-danger\"><use xlink:href=\"#{icons_path}##{icon_name}\"></use></svg>"
end
end
diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb
index cb851d828f2..d601cbdb39b 100644
--- a/spec/helpers/issuables_helper_spec.rb
+++ b/spec/helpers/issuables_helper_spec.rb
@@ -174,6 +174,7 @@ describe IssuablesHelper do
expected_data = {
'endpoint' => "/#{@project.full_path}/issues/#{issue.iid}",
+ 'updateEndpoint' => "/#{@project.full_path}/issues/#{issue.iid}.json",
'canUpdate' => true,
'canDestroy' => true,
'issuableRef' => "##{issue.iid}",
diff --git a/spec/helpers/markup_helper_spec.rb b/spec/helpers/markup_helper_spec.rb
index 62ea6d48542..ba0039f3a11 100644
--- a/spec/helpers/markup_helper_spec.rb
+++ b/spec/helpers/markup_helper_spec.rb
@@ -205,7 +205,7 @@ describe MarkupHelper do
it "uses Wiki pipeline for markdown files" do
allow(@wiki).to receive(:format).and_return(:markdown)
- expect(helper).to receive(:markdown_unsafe).with('wiki content', pipeline: :wiki, project: project, project_wiki: @wiki, page_slug: "nested/page")
+ expect(helper).to receive(:markdown_unsafe).with('wiki content', pipeline: :wiki, project: project, project_wiki: @wiki, page_slug: "nested/page", issuable_state_filter_enabled: true)
helper.render_wiki_content(@wiki)
end
diff --git a/spec/helpers/merge_requests_helper_spec.rb b/spec/helpers/merge_requests_helper_spec.rb
index fd7900c32f4..3008528e60c 100644
--- a/spec/helpers/merge_requests_helper_spec.rb
+++ b/spec/helpers/merge_requests_helper_spec.rb
@@ -1,7 +1,9 @@
require 'spec_helper'
describe MergeRequestsHelper do
+ include ActionView::Helpers::UrlHelper
include ProjectForksHelper
+
describe 'ci_build_details_path' do
let(:project) { create(:project) }
let(:merge_request) { MergeRequest.new }
@@ -41,4 +43,19 @@ describe MergeRequestsHelper do
it { is_expected.to eq([source_title, target_title]) }
end
end
+
+ describe '#tab_link_for' do
+ let(:merge_request) { create(:merge_request, :simple) }
+ let(:options) { Hash.new }
+
+ subject { tab_link_for(merge_request, :show, options) { 'Discussion' } }
+
+ describe 'supports the :force_link option' do
+ let(:options) { { force_link: true } }
+
+ it 'removes the data-toggle attributes' do
+ is_expected.not_to match(/data-toggle="tab"/)
+ end
+ end
+ end
end
diff --git a/spec/helpers/namespaces_helper_spec.rb b/spec/helpers/namespaces_helper_spec.rb
index 8365b3f5538..460d3b6a7e4 100644
--- a/spec/helpers/namespaces_helper_spec.rb
+++ b/spec/helpers/namespaces_helper_spec.rb
@@ -29,5 +29,30 @@ describe NamespacesHelper do
expect(options).not_to include(admin_group.name)
expect(options).to include(user_group.name)
end
+
+ context 'when nested groups are available', :nested_groups do
+ it 'includes groups nested in groups the user can administer' do
+ allow(helper).to receive(:current_user).and_return(user)
+ child_group = create(:group, :private, parent: user_group)
+
+ options = helper.namespaces_options
+
+ expect(options).to include(child_group.name)
+ end
+
+ it 'orders the groups correctly' do
+ allow(helper).to receive(:current_user).and_return(user)
+ child_group = create(:group, :private, parent: user_group)
+ other_child = create(:group, :private, parent: user_group)
+ sub_child = create(:group, :private, parent: child_group)
+
+ expect(helper).to receive(:options_for_group)
+ .with([user_group, child_group, sub_child, other_child], anything)
+ .and_call_original
+ allow(helper).to receive(:options_for_group).and_call_original
+
+ helper.namespaces_options
+ end
+ end
end
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 5777b5c4025..ede9d232efd 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -150,17 +150,26 @@ describe ProjectsHelper do
end
end
- context 'user requires a password' do
- let(:user) { create(:user, password_automatically_set: true) }
+ context 'user has hidden the message' do
+ it 'returns false' do
+ allow(helper).to receive(:cookies).and_return(hide_no_password_message: true)
+
+ expect(helper.show_no_password_message?).to be_falsey
+ end
+ end
+ context 'user requires a password for Git' do
it 'returns true' do
+ allow(user).to receive(:require_password_creation_for_git?).and_return(true)
+
expect(helper.show_no_password_message?).to be_truthy
end
end
- context 'user requires a personal access token' do
+ context 'user requires a personal access token for Git' do
it 'returns true' do
- stub_application_setting(password_authentication_enabled?: false)
+ allow(user).to receive(:require_password_creation_for_git?).and_return(false)
+ allow(user).to receive(:require_personal_access_token_creation_for_git_auth?).and_return(true)
expect(helper.show_no_password_message?).to be_truthy
end
@@ -168,23 +177,23 @@ describe ProjectsHelper do
end
describe '#link_to_set_password' do
+ let(:user) { create(:user, password_automatically_set: true) }
+
before do
allow(helper).to receive(:current_user).and_return(user)
end
- context 'user requires a password' do
- let(:user) { create(:user, password_automatically_set: true) }
-
+ context 'password authentication is enabled for Git' do
it 'returns link to set a password' do
+ stub_application_setting(password_authentication_enabled_for_git?: true)
+
expect(helper.link_to_set_password).to match %r{<a href="#{edit_profile_password_path}">set a password</a>}
end
end
- context 'user requires a personal access token' do
- let(:user) { create(:user) }
-
+ context 'password authentication is disabled for Git' do
it 'returns link to create a personal access token' do
- stub_application_setting(password_authentication_enabled?: false)
+ stub_application_setting(password_authentication_enabled_for_git?: false)
expect(helper.link_to_set_password).to match %r{<a href="#{profile_personal_access_tokens_path}">create a personal access token</a>}
end
diff --git a/spec/helpers/search_helper_spec.rb b/spec/helpers/search_helper_spec.rb
index ab647401e14..6c9a7febf14 100644
--- a/spec/helpers/search_helper_spec.rb
+++ b/spec/helpers/search_helper_spec.rb
@@ -102,6 +102,10 @@ describe SearchHelper do
it 'includes project base-endpoint' do
expect(search_filter_input_options('')[:data]['base-endpoint']).to eq(project_path(@project))
end
+
+ it 'includes autocomplete=off flag' do
+ expect(search_filter_input_options('')[:autocomplete]).to eq('off')
+ end
end
context 'group' do
diff --git a/spec/helpers/tree_helper_spec.rb b/spec/helpers/tree_helper_spec.rb
index d7b66e6f078..c358ccae9c3 100644
--- a/spec/helpers/tree_helper_spec.rb
+++ b/spec/helpers/tree_helper_spec.rb
@@ -1,10 +1,36 @@
require 'spec_helper'
describe TreeHelper do
+ let(:project) { create(:project, :repository) }
+ let(:repository) { project.repository }
+ let(:sha) { 'ce369011c189f62c815f5971d096b26759bab0d1' }
+
+ describe '.render_tree' do
+ before do
+ @id = sha
+ @project = project
+ end
+
+ it 'displays all entries without a warning' do
+ tree = repository.tree(sha, 'files')
+
+ html = render_tree(tree)
+
+ expect(html).not_to have_selector('.tree-truncated-warning')
+ end
+
+ it 'truncates entries and adds a warning' do
+ stub_const('TreeHelper::FILE_LIMIT', 1)
+ tree = repository.tree(sha, 'files')
+
+ html = render_tree(tree)
+
+ expect(html).to have_selector('.tree-truncated-warning', count: 1)
+ expect(html).to have_selector('.tree-item-file-name', count: 1)
+ end
+ end
+
describe 'flatten_tree' do
- let(:project) { create(:project, :repository) }
- let(:repository) { project.repository }
- let(:sha) { 'ce369011c189f62c815f5971d096b26759bab0d1' }
let(:tree) { repository.tree(sha, 'files') }
let(:root_path) { 'files' }
let(:tree_item) { tree.entries.find { |entry| entry.path == path } }
diff --git a/spec/javascripts/behaviors/autosize_spec.js b/spec/javascripts/behaviors/autosize_spec.js
index 67afba19190..960b731892a 100644
--- a/spec/javascripts/behaviors/autosize_spec.js
+++ b/spec/javascripts/behaviors/autosize_spec.js
@@ -1,21 +1,18 @@
-/* eslint-disable space-before-function-paren, no-var, comma-dangle, no-return-assign, max-len */
-
import '~/behaviors/autosize';
-(function() {
- describe('Autosize behavior', function() {
- var load;
- beforeEach(function() {
- return setFixtures('<textarea class="js-autosize" style="resize: vertical"></textarea>');
- });
- it('does not overwrite the resize property', function() {
- load();
- return expect($('textarea')).toHaveCss({
- resize: 'vertical'
- });
+function load() {
+ $(document).trigger('load');
+}
+
+describe('Autosize behavior', () => {
+ beforeEach(() => {
+ setFixtures('<textarea class="js-autosize" style="resize: vertical"></textarea>');
+ });
+
+ it('does not overwrite the resize property', () => {
+ load();
+ expect($('textarea')).toHaveCss({
+ resize: 'vertical',
});
- return load = function() {
- return $(document).trigger('load');
- };
});
-}).call(window);
+});
diff --git a/spec/javascripts/behaviors/gl_emoji/unicode_support_map_spec.js b/spec/javascripts/behaviors/gl_emoji/unicode_support_map_spec.js
index ec2c549e032..f96f20ed4a5 100644
--- a/spec/javascripts/behaviors/gl_emoji/unicode_support_map_spec.js
+++ b/spec/javascripts/behaviors/gl_emoji/unicode_support_map_spec.js
@@ -21,13 +21,18 @@ describe('Unicode Support Map', () => {
});
it('should call .getItem and .setItem', () => {
- const allArgs = window.localStorage.setItem.calls.allArgs();
-
- expect(window.localStorage.getItem).toHaveBeenCalledWith('gl-emoji-user-agent');
- expect(allArgs[0][0]).toBe('gl-emoji-user-agent');
- expect(allArgs[0][1]).toBe(navigator.userAgent);
- expect(allArgs[1][0]).toBe('gl-emoji-unicode-support-map');
- expect(allArgs[1][1]).toBe(stringSupportMap);
+ const getArgs = window.localStorage.getItem.calls.allArgs();
+ const setArgs = window.localStorage.setItem.calls.allArgs();
+
+ expect(getArgs[0][0]).toBe('gl-emoji-version');
+ expect(getArgs[1][0]).toBe('gl-emoji-user-agent');
+
+ expect(setArgs[0][0]).toBe('gl-emoji-version');
+ expect(setArgs[0][1]).toBe('0.2.0');
+ expect(setArgs[1][0]).toBe('gl-emoji-user-agent');
+ expect(setArgs[1][1]).toBe(navigator.userAgent);
+ expect(setArgs[2][0]).toBe('gl-emoji-unicode-support-map');
+ expect(setArgs[2][1]).toBe(stringSupportMap);
});
});
diff --git a/spec/javascripts/behaviors/requires_input_spec.js b/spec/javascripts/behaviors/requires_input_spec.js
index f9fa814b801..8287c58ac5a 100644
--- a/spec/javascripts/behaviors/requires_input_spec.js
+++ b/spec/javascripts/behaviors/requires_input_spec.js
@@ -1,39 +1,43 @@
-/* eslint-disable space-before-function-paren, no-var */
-
import '~/behaviors/requires_input';
-(function() {
- describe('requiresInput', function() {
- preloadFixtures('branches/new_branch.html.raw');
- beforeEach(function() {
- loadFixtures('branches/new_branch.html.raw');
- this.submitButton = $('button[type="submit"]');
- });
- it('disables submit when any field is required', function() {
- $('.js-requires-input').requiresInput();
- return expect(this.submitButton).toBeDisabled();
- });
- it('enables submit when no field is required', function() {
- $('*[required=required]').removeAttr('required');
- $('.js-requires-input').requiresInput();
- return expect(this.submitButton).not.toBeDisabled();
- });
- it('enables submit when all required fields are pre-filled', function() {
- $('*[required=required]').remove();
- $('.js-requires-input').requiresInput();
- return expect($('.submit')).not.toBeDisabled();
- });
- it('enables submit when all required fields receive input', function() {
- $('.js-requires-input').requiresInput();
- $('#required1').val('input1').change();
- expect(this.submitButton).toBeDisabled();
- $('#optional1').val('input1').change();
- expect(this.submitButton).toBeDisabled();
- $('#required2').val('input2').change();
- $('#required3').val('input3').change();
- $('#required4').val('input4').change();
- $('#required5').val('1').change();
- return expect($('.submit')).not.toBeDisabled();
- });
+describe('requiresInput', () => {
+ let submitButton;
+ preloadFixtures('branches/new_branch.html.raw');
+
+ beforeEach(() => {
+ loadFixtures('branches/new_branch.html.raw');
+ submitButton = $('button[type="submit"]');
+ });
+
+ it('disables submit when any field is required', () => {
+ $('.js-requires-input').requiresInput();
+ expect(submitButton).toBeDisabled();
+ });
+
+ it('enables submit when no field is required', () => {
+ $('*[required=required]').removeAttr('required');
+ $('.js-requires-input').requiresInput();
+ expect(submitButton).not.toBeDisabled();
+ });
+
+ it('enables submit when all required fields are pre-filled', () => {
+ $('*[required=required]').remove();
+ $('.js-requires-input').requiresInput();
+ expect($('.submit')).not.toBeDisabled();
+ });
+
+ it('enables submit when all required fields receive input', () => {
+ $('.js-requires-input').requiresInput();
+ $('#required1').val('input1').change();
+ expect(submitButton).toBeDisabled();
+
+ $('#optional1').val('input1').change();
+ expect(submitButton).toBeDisabled();
+
+ $('#required2').val('input2').change();
+ $('#required3').val('input3').change();
+ $('#required4').val('input4').change();
+ $('#required5').val('1').change();
+ expect($('.submit')).not.toBeDisabled();
});
-}).call(window);
+});
diff --git a/spec/javascripts/boards/board_card_spec.js b/spec/javascripts/boards/board_card_spec.js
index 83b13b06dc1..8f607899b20 100644
--- a/spec/javascripts/boards/board_card_spec.js
+++ b/spec/javascripts/boards/board_card_spec.js
@@ -9,10 +9,11 @@
import Vue from 'vue';
import '~/boards/models/assignee';
+import eventHub from '~/boards/eventhub';
import '~/boards/models/list';
import '~/boards/models/label';
import '~/boards/stores/boards_store';
-import boardCard from '~/boards/components/board_card';
+import boardCard from '~/boards/components/board_card.vue';
import './mock_data';
describe('Board card', () => {
@@ -157,33 +158,35 @@ describe('Board card', () => {
});
it('sets detail issue to card issue on mouse up', () => {
+ spyOn(eventHub, '$emit');
+
triggerEvent('mousedown');
triggerEvent('mouseup');
- expect(gl.issueBoards.BoardsStore.detail.issue).toEqual(vm.issue);
+ expect(eventHub.$emit).toHaveBeenCalledWith('newDetailIssue', vm.issue);
expect(gl.issueBoards.BoardsStore.detail.list).toEqual(vm.list);
});
it('adds active class if detail issue is set', (done) => {
- triggerEvent('mousedown');
- triggerEvent('mouseup');
-
- setTimeout(() => {
- expect(vm.$el.classList.contains('is-active')).toBe(true);
- done();
- }, 0);
+ vm.detailIssue.issue = vm.issue;
+
+ Vue.nextTick()
+ .then(() => {
+ expect(vm.$el.classList.contains('is-active')).toBe(true);
+ })
+ .then(done)
+ .catch(done.fail);
});
it('resets detail issue to empty if already set', () => {
- triggerEvent('mousedown');
- triggerEvent('mouseup');
+ spyOn(eventHub, '$emit');
- expect(gl.issueBoards.BoardsStore.detail.issue).toEqual(vm.issue);
+ gl.issueBoards.BoardsStore.detail.issue = vm.issue;
triggerEvent('mousedown');
triggerEvent('mouseup');
- expect(gl.issueBoards.BoardsStore.detail.issue).toEqual({});
+ expect(eventHub.$emit).toHaveBeenCalledWith('clearDetailIssue');
});
});
});
diff --git a/spec/javascripts/boards/issue_spec.js b/spec/javascripts/boards/issue_spec.js
index 022d286d5df..10b88878c2a 100644
--- a/spec/javascripts/boards/issue_spec.js
+++ b/spec/javascripts/boards/issue_spec.js
@@ -133,6 +133,25 @@ describe('Issue model', () => {
expect(relativePositionIssue.position).toBe(1);
});
+ it('updates data', () => {
+ issue.updateData({ subscribed: true });
+ expect(issue.subscribed).toBe(true);
+ });
+
+ it('sets fetching state', () => {
+ expect(issue.isFetching.subscriptions).toBe(true);
+
+ issue.setFetchingState('subscriptions', false);
+
+ expect(issue.isFetching.subscriptions).toBe(false);
+ });
+
+ it('sets loading state', () => {
+ issue.setLoadingState('foo', true);
+
+ expect(issue.isLoading.foo).toBe(true);
+ });
+
describe('update', () => {
it('passes assignee ids when there are assignees', (done) => {
spyOn(Vue.http, 'patch').and.callFake((url, data) => {
diff --git a/spec/javascripts/clusters/clusters_bundle_spec.js b/spec/javascripts/clusters/clusters_bundle_spec.js
index 027e8001053..f5be9ea0fb2 100644
--- a/spec/javascripts/clusters/clusters_bundle_spec.js
+++ b/spec/javascripts/clusters/clusters_bundle_spec.js
@@ -28,7 +28,7 @@ describe('Clusters', () => {
expect(
cluster.toggleButton.classList,
- ).not.toContain('checked');
+ ).not.toContain('is-checked');
expect(
cluster.toggleInput.getAttribute('value'),
@@ -36,6 +36,20 @@ describe('Clusters', () => {
});
});
+ describe('showToken', () => {
+ it('should update tye field type', () => {
+ cluster.showTokenButton.click();
+ expect(
+ cluster.tokenField.getAttribute('type'),
+ ).toEqual('text');
+
+ cluster.showTokenButton.click();
+ expect(
+ cluster.tokenField.getAttribute('type'),
+ ).toEqual('password');
+ });
+ });
+
describe('checkForNewInstalls', () => {
const INITIAL_APP_MAP = {
helm: { status: null, title: 'Helm Tiller' },
@@ -113,7 +127,7 @@ describe('Clusters', () => {
});
describe('when cluster is created', () => {
- it('should show the success container', () => {
+ it('should show the success container and fresh the page', () => {
cluster.updateContainer(null, 'created');
expect(
diff --git a/spec/javascripts/clusters/clusters_index_spec.js b/spec/javascripts/clusters/clusters_index_spec.js
new file mode 100644
index 00000000000..0a8b63ed5b4
--- /dev/null
+++ b/spec/javascripts/clusters/clusters_index_spec.js
@@ -0,0 +1,58 @@
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import setClusterTableToggles from '~/clusters/clusters_index';
+import { setTimeout } from 'core-js/library/web/timers';
+
+describe('Clusters table', () => {
+ preloadFixtures('clusters/index_cluster.html.raw');
+ let mock;
+
+ beforeEach(() => {
+ loadFixtures('clusters/index_cluster.html.raw');
+ mock = new MockAdapter(axios);
+ setClusterTableToggles();
+ });
+
+ describe('update cluster', () => {
+ it('renders loading state while request is made', () => {
+ const button = document.querySelector('.js-toggle-cluster-list');
+
+ button.click();
+
+ expect(button.classList).toContain('is-loading');
+ expect(button.getAttribute('disabled')).toEqual('true');
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('shows updated state after sucessfull request', (done) => {
+ mock.onPut().reply(200, {}, {});
+ const button = document.querySelector('.js-toggle-cluster-list');
+ button.click();
+
+ expect(button.classList).toContain('is-loading');
+
+ setTimeout(() => {
+ expect(button.classList).not.toContain('is-loading');
+ expect(button.classList).not.toContain('is-checked');
+ done();
+ }, 0);
+ });
+
+ it('shows inital state after failed request', (done) => {
+ mock.onPut().reply(500, {}, {});
+ const button = document.querySelector('.js-toggle-cluster-list');
+
+ button.click();
+ expect(button.classList).toContain('is-loading');
+
+ setTimeout(() => {
+ expect(button.classList).not.toContain('is-loading');
+ expect(button.classList).toContain('is-checked');
+ done();
+ }, 0);
+ });
+ });
+});
diff --git a/spec/javascripts/datetime_utility_spec.js b/spec/javascripts/datetime_utility_spec.js
index 3391cade541..0f7bf9ec712 100644
--- a/spec/javascripts/datetime_utility_spec.js
+++ b/spec/javascripts/datetime_utility_spec.js
@@ -1,4 +1,4 @@
-import { timeIntervalInWords } from '~/lib/utils/datetime_utility';
+import * as datetimeUtility from '~/lib/utils/datetime_utility';
(() => {
describe('Date time utils', () => {
@@ -89,10 +89,22 @@ import { timeIntervalInWords } from '~/lib/utils/datetime_utility';
describe('timeIntervalInWords', () => {
it('should return string with number of minutes and seconds', () => {
- expect(timeIntervalInWords(9.54)).toEqual('9 seconds');
- expect(timeIntervalInWords(1)).toEqual('1 second');
- expect(timeIntervalInWords(200)).toEqual('3 minutes 20 seconds');
- expect(timeIntervalInWords(6008)).toEqual('100 minutes 8 seconds');
+ expect(datetimeUtility.timeIntervalInWords(9.54)).toEqual('9 seconds');
+ expect(datetimeUtility.timeIntervalInWords(1)).toEqual('1 second');
+ expect(datetimeUtility.timeIntervalInWords(200)).toEqual('3 minutes 20 seconds');
+ expect(datetimeUtility.timeIntervalInWords(6008)).toEqual('100 minutes 8 seconds');
+ });
+ });
+
+ describe('dateInWords', () => {
+ const date = new Date('07/01/2016');
+
+ it('should return date in words', () => {
+ expect(datetimeUtility.dateInWords(date)).toEqual('July 1, 2016');
+ });
+
+ it('should return abbreviated month name', () => {
+ expect(datetimeUtility.dateInWords(date, true)).toEqual('Jul 1, 2016');
});
});
})();
diff --git a/spec/javascripts/deploy_keys/components/action_btn_spec.js b/spec/javascripts/deploy_keys/components/action_btn_spec.js
index 5b93fbc5575..7025c3d836c 100644
--- a/spec/javascripts/deploy_keys/components/action_btn_spec.js
+++ b/spec/javascripts/deploy_keys/components/action_btn_spec.js
@@ -34,7 +34,7 @@ describe('Deploy keys action btn', () => {
setTimeout(() => {
expect(
eventHub.$emit,
- ).toHaveBeenCalledWith('enable.key', deployKey);
+ ).toHaveBeenCalledWith('enable.key', deployKey, jasmine.anything());
done();
});
diff --git a/spec/javascripts/deploy_keys/components/app_spec.js b/spec/javascripts/deploy_keys/components/app_spec.js
index 700897f50b0..0ca9290d3d2 100644
--- a/spec/javascripts/deploy_keys/components/app_spec.js
+++ b/spec/javascripts/deploy_keys/components/app_spec.js
@@ -139,4 +139,18 @@ describe('Deploy keys app component', () => {
it('hasKeys returns true when there are keys', () => {
expect(vm.hasKeys).toEqual(3);
});
+
+ it('resets remove button loading state', (done) => {
+ spyOn(window, 'confirm').and.returnValue(false);
+
+ const btn = vm.$el.querySelector('.btn-warning');
+
+ btn.click();
+
+ Vue.nextTick(() => {
+ expect(btn.querySelector('.fa')).toBeNull();
+
+ done();
+ });
+ });
});
diff --git a/spec/javascripts/droplab/drop_down_spec.js b/spec/javascripts/droplab/drop_down_spec.js
index 1ef494a00b8..1225fe2cb66 100644
--- a/spec/javascripts/droplab/drop_down_spec.js
+++ b/spec/javascripts/droplab/drop_down_spec.js
@@ -279,7 +279,12 @@ describe('DropDown', function () {
describe('addEvents', function () {
beforeEach(function () {
this.list = { addEventListener: () => {} };
- this.dropdown = { list: this.list, clickEvent: () => {}, eventWrapper: {} };
+ this.dropdown = {
+ list: this.list,
+ clickEvent: () => {},
+ closeDropdown: () => {},
+ eventWrapper: {},
+ };
spyOn(this.list, 'addEventListener');
@@ -288,6 +293,7 @@ describe('DropDown', function () {
it('should call .addEventListener', function () {
expect(this.list.addEventListener).toHaveBeenCalledWith('click', jasmine.any(Function));
+ expect(this.list.addEventListener).toHaveBeenCalledWith('keyup', jasmine.any(Function));
});
});
diff --git a/spec/javascripts/droplab/hook_spec.js b/spec/javascripts/droplab/hook_spec.js
index 75bf5f3d611..3d39bd0812b 100644
--- a/spec/javascripts/droplab/hook_spec.js
+++ b/spec/javascripts/droplab/hook_spec.js
@@ -24,7 +24,7 @@ describe('Hook', function () {
});
it('should call DropDown constructor', function () {
- expect(dropdownSrc.default).toHaveBeenCalledWith(this.list);
+ expect(dropdownSrc.default).toHaveBeenCalledWith(this.list, this.config);
});
it('should set .type', function () {
diff --git a/spec/javascripts/emoji_spec.js b/spec/javascripts/emoji_spec.js
index fa11c602ec3..124d91f4477 100644
--- a/spec/javascripts/emoji_spec.js
+++ b/spec/javascripts/emoji_spec.js
@@ -1,6 +1,7 @@
import { glEmojiTag } from '~/emoji';
import isEmojiUnicodeSupported, {
isFlagEmoji,
+ isRainbowFlagEmoji,
isKeycapEmoji,
isSkinToneComboEmoji,
isHorceRacingSkinToneComboEmoji,
@@ -217,6 +218,24 @@ describe('gl_emoji', () => {
});
});
+ describe('isRainbowFlagEmoji', () => {
+ it('should gracefully handle empty string', () => {
+ expect(isRainbowFlagEmoji('')).toBeFalsy();
+ });
+ it('should detect rainbow_flag', () => {
+ expect(isRainbowFlagEmoji('🏳🌈')).toBeTruthy();
+ });
+ it('should not detect flag_white on its\' own', () => {
+ expect(isRainbowFlagEmoji('🏳')).toBeFalsy();
+ });
+ it('should not detect rainbow on its\' own', () => {
+ expect(isRainbowFlagEmoji('🌈')).toBeFalsy();
+ });
+ it('should not detect flag_white with something else', () => {
+ expect(isRainbowFlagEmoji('🏳🔵')).toBeFalsy();
+ });
+ });
+
describe('isKeycapEmoji', () => {
it('should gracefully handle empty string', () => {
expect(isKeycapEmoji('')).toBeFalsy();
diff --git a/spec/javascripts/environments/emtpy_state_spec.js b/spec/javascripts/environments/emtpy_state_spec.js
new file mode 100644
index 00000000000..82de35933f5
--- /dev/null
+++ b/spec/javascripts/environments/emtpy_state_spec.js
@@ -0,0 +1,57 @@
+
+import Vue from 'vue';
+import emptyState from '~/environments/components/empty_state.vue';
+import mountComponent from '../helpers/vue_mount_component_helper';
+
+describe('environments empty state', () => {
+ let vm;
+ let Component;
+
+ beforeEach(() => {
+ Component = Vue.extend(emptyState);
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ describe('With permissions', () => {
+ beforeEach(() => {
+ vm = mountComponent(Component, {
+ newPath: 'foo',
+ canCreateEnvironment: true,
+ helpPath: 'bar',
+ });
+ });
+
+ it('renders empty state and new environment button', () => {
+ expect(
+ vm.$el.querySelector('.js-blank-state-title').textContent.trim(),
+ ).toEqual('You don\'t have any environments right now.');
+
+ expect(
+ vm.$el.querySelector('.js-new-environment-button').getAttribute('href'),
+ ).toEqual('foo');
+ });
+ });
+
+ describe('Without permission', () => {
+ beforeEach(() => {
+ vm = mountComponent(Component, {
+ newPath: 'foo',
+ canCreateEnvironment: false,
+ helpPath: 'bar',
+ });
+ });
+
+ it('renders empty state without new button', () => {
+ expect(
+ vm.$el.querySelector('.js-blank-state-title').textContent.trim(),
+ ).toEqual('You don\'t have any environments right now.');
+
+ expect(
+ vm.$el.querySelector('.js-new-environment-button'),
+ ).toBeNull();
+ });
+ });
+});
diff --git a/spec/javascripts/environments/environment_table_spec.js b/spec/javascripts/environments/environment_table_spec.js
index 2862971bec4..9bd42863759 100644
--- a/spec/javascripts/environments/environment_table_spec.js
+++ b/spec/javascripts/environments/environment_table_spec.js
@@ -1,10 +1,17 @@
import Vue from 'vue';
import environmentTableComp from '~/environments/components/environments_table.vue';
+import mountComponent from '../helpers/vue_mount_component_helper';
+
+describe('Environment table', () => {
+ let Component;
+ let vm;
-describe('Environment item', () => {
- preloadFixtures('static/environments/element.html.raw');
beforeEach(() => {
- loadFixtures('static/environments/element.html.raw');
+ Component = Vue.extend(environmentTableComp);
+ });
+
+ afterEach(() => {
+ vm.$destroy();
});
it('Should render a table', () => {
@@ -17,18 +24,12 @@ describe('Environment item', () => {
},
};
- const EnvironmentTable = Vue.extend(environmentTableComp);
-
- const component = new EnvironmentTable({
- el: document.querySelector('.test-dom-element'),
- propsData: {
- environments: [{ mockItem }],
- canCreateDeployment: false,
- canReadEnvironment: true,
- service: {},
- },
- }).$mount();
+ vm = mountComponent(Component, {
+ environments: [mockItem],
+ canCreateDeployment: false,
+ canReadEnvironment: true,
+ });
- expect(component.$el.getAttribute('class')).toContain('ci-table');
+ expect(vm.$el.getAttribute('class')).toContain('ci-table');
});
});
diff --git a/spec/javascripts/environments/environment_spec.js b/spec/javascripts/environments/environments_app_spec.js
index 0c8817a8148..d02adb25b4e 100644
--- a/spec/javascripts/environments/environment_spec.js
+++ b/spec/javascripts/environments/environments_app_spec.js
@@ -1,18 +1,24 @@
import Vue from 'vue';
-import '~/flash';
-import environmentsComponent from '~/environments/components/environment.vue';
+import environmentsComponent from '~/environments/components/environments_app.vue';
import { environment, folder } from './mock_data';
import { headersInterceptor } from '../helpers/vue_resource_helper';
+import mountComponent from '../helpers/vue_mount_component_helper';
describe('Environment', () => {
- preloadFixtures('static/environments/environments.html.raw');
+ const mockData = {
+ endpoint: 'environments.json',
+ canCreateEnvironment: true,
+ canCreateDeployment: true,
+ canReadEnvironment: true,
+ cssContainerClass: 'container',
+ newEnvironmentPath: 'environments/new',
+ helpPagePath: 'help',
+ };
let EnvironmentsComponent;
let component;
beforeEach(() => {
- loadFixtures('static/environments/environments.html.raw');
-
EnvironmentsComponent = Vue.extend(environmentsComponent);
});
@@ -37,9 +43,7 @@ describe('Environment', () => {
});
it('should render the empty state', (done) => {
- component = new EnvironmentsComponent({
- el: document.querySelector('#environments-list-view'),
- });
+ component = mountComponent(EnvironmentsComponent, mockData);
setTimeout(() => {
expect(
@@ -81,9 +85,7 @@ describe('Environment', () => {
beforeEach(() => {
Vue.http.interceptors.push(environmentsResponseInterceptor);
Vue.http.interceptors.push(headersInterceptor);
- component = new EnvironmentsComponent({
- el: document.querySelector('#environments-list-view'),
- });
+ component = mountComponent(EnvironmentsComponent, mockData);
});
afterEach(() => {
@@ -95,7 +97,7 @@ describe('Environment', () => {
it('should render a table with environments', (done) => {
setTimeout(() => {
- expect(component.$el.querySelectorAll('table')).toBeDefined();
+ expect(component.$el.querySelectorAll('table')).not.toBeNull();
expect(
component.$el.querySelector('.environment-name').textContent.trim(),
).toEqual(environment.name);
@@ -104,10 +106,6 @@ describe('Environment', () => {
});
describe('pagination', () => {
- afterEach(() => {
- window.history.pushState({}, null, '');
- });
-
it('should render pagination', (done) => {
setTimeout(() => {
expect(
@@ -117,46 +115,23 @@ describe('Environment', () => {
}, 0);
});
- it('should update url when no search params are present', (done) => {
- spyOn(gl.utils, 'visitUrl');
+ it('should make an API request when page is clicked', (done) => {
+ spyOn(component, 'updateContent');
setTimeout(() => {
component.$el.querySelector('.gl-pagination li:nth-child(5) a').click();
- expect(gl.utils.visitUrl).toHaveBeenCalledWith('?page=2');
+ expect(component.updateContent).toHaveBeenCalledWith({ scope: 'available', page: '2' });
done();
}, 0);
});
- it('should update url when page is already present', (done) => {
- spyOn(gl.utils, 'visitUrl');
- window.history.pushState({}, null, '?page=1');
-
- setTimeout(() => {
- component.$el.querySelector('.gl-pagination li:nth-child(5) a').click();
- expect(gl.utils.visitUrl).toHaveBeenCalledWith('?page=2');
- done();
- }, 0);
- });
-
- it('should update url when page and scope are already present', (done) => {
- spyOn(gl.utils, 'visitUrl');
- window.history.pushState({}, null, '?scope=all&page=1');
-
+ it('should make an API request when using tabs', (done) => {
setTimeout(() => {
- component.$el.querySelector('.gl-pagination li:nth-child(5) a').click();
- expect(gl.utils.visitUrl).toHaveBeenCalledWith('?scope=all&page=2');
- done();
- }, 0);
- });
+ spyOn(component, 'updateContent');
+ component.$el.querySelector('.js-environments-tab-stopped').click();
- it('should update url when page and scope are already present and page is first param', (done) => {
- spyOn(gl.utils, 'visitUrl');
- window.history.pushState({}, null, '?page=1&scope=all');
-
- setTimeout(() => {
- component.$el.querySelector('.gl-pagination li:nth-child(5) a').click();
- expect(gl.utils.visitUrl).toHaveBeenCalledWith('?page=2&scope=all');
+ expect(component.updateContent).toHaveBeenCalledWith({ scope: 'stopped', page: '1' });
done();
- }, 0);
+ });
});
});
});
@@ -180,9 +155,7 @@ describe('Environment', () => {
});
it('should render empty state', (done) => {
- component = new EnvironmentsComponent({
- el: document.querySelector('#environments-list-view'),
- });
+ component = mountComponent(EnvironmentsComponent, mockData);
setTimeout(() => {
expect(
@@ -214,9 +187,7 @@ describe('Environment', () => {
beforeEach(() => {
Vue.http.interceptors.push(environmentsResponseInterceptor);
- component = new EnvironmentsComponent({
- el: document.querySelector('#environments-list-view'),
- });
+ component = mountComponent(EnvironmentsComponent, mockData);
});
afterEach(() => {
@@ -289,4 +260,59 @@ describe('Environment', () => {
});
});
});
+
+ describe('methods', () => {
+ const environmentsEmptyResponseInterceptor = (request, next) => {
+ next(request.respondWith(JSON.stringify([]), {
+ status: 200,
+ }));
+ };
+
+ beforeEach(() => {
+ Vue.http.interceptors.push(environmentsEmptyResponseInterceptor);
+ Vue.http.interceptors.push(headersInterceptor);
+
+ component = mountComponent(EnvironmentsComponent, mockData);
+ spyOn(history, 'pushState').and.stub();
+ });
+
+ afterEach(() => {
+ Vue.http.interceptors = _.without(
+ Vue.http.interceptors, environmentsEmptyResponseInterceptor,
+ );
+ Vue.http.interceptors = _.without(Vue.http.interceptors, headersInterceptor);
+ });
+
+ describe('updateContent', () => {
+ it('should set given parameters', (done) => {
+ component.updateContent({ scope: 'stopped', page: '3' })
+ .then(() => {
+ expect(component.page).toEqual('3');
+ expect(component.scope).toEqual('stopped');
+ expect(component.requestData.scope).toEqual('stopped');
+ expect(component.requestData.page).toEqual('3');
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('onChangeTab', () => {
+ it('should set page to 1', () => {
+ spyOn(component, 'updateContent');
+ component.onChangeTab('stopped');
+
+ expect(component.updateContent).toHaveBeenCalledWith({ scope: 'stopped', page: '1' });
+ });
+ });
+
+ describe('onChangePage', () => {
+ it('should update page and keep scope', () => {
+ spyOn(component, 'updateContent');
+ component.onChangePage(4);
+
+ expect(component.updateContent).toHaveBeenCalledWith({ scope: component.scope, page: '4' });
+ });
+ });
+ });
});
diff --git a/spec/javascripts/environments/folder/environments_folder_view_spec.js b/spec/javascripts/environments/folder/environments_folder_view_spec.js
index 7e62d356bd2..4ea4d9d7499 100644
--- a/spec/javascripts/environments/folder/environments_folder_view_spec.js
+++ b/spec/javascripts/environments/folder/environments_folder_view_spec.js
@@ -1,25 +1,28 @@
import Vue from 'vue';
-import '~/flash';
import environmentsFolderViewComponent from '~/environments/folder/environments_folder_view.vue';
import { environmentsList } from '../mock_data';
import { headersInterceptor } from '../../helpers/vue_resource_helper';
+import mountComponent from '../../helpers/vue_mount_component_helper';
describe('Environments Folder View', () => {
- preloadFixtures('static/environments/environments_folder_view.html.raw');
- let EnvironmentsFolderViewComponent;
+ let Component;
+ let component;
+ const mockData = {
+ endpoint: 'environments.json',
+ folderName: 'review',
+ canCreateDeployment: true,
+ canReadEnvironment: true,
+ cssContainerClass: 'container',
+ };
beforeEach(() => {
- loadFixtures('static/environments/environments_folder_view.html.raw');
- EnvironmentsFolderViewComponent = Vue.extend(environmentsFolderViewComponent);
- window.history.pushState({}, null, 'environments/folders/build');
+ Component = Vue.extend(environmentsFolderViewComponent);
});
afterEach(() => {
- window.history.pushState({}, null, '/');
+ component.$destroy();
});
- let component;
-
describe('successfull request', () => {
const environmentsResponseInterceptor = (request, next) => {
next(request.respondWith(JSON.stringify({
@@ -31,10 +34,10 @@ describe('Environments Folder View', () => {
headers: {
'X-nExt-pAge': '2',
'x-page': '1',
- 'X-Per-Page': '1',
+ 'X-Per-Page': '2',
'X-Prev-Page': '',
- 'X-TOTAL': '37',
- 'X-Total-Pages': '2',
+ 'X-TOTAL': '20',
+ 'X-Total-Pages': '10',
},
}));
};
@@ -43,9 +46,7 @@ describe('Environments Folder View', () => {
Vue.http.interceptors.push(environmentsResponseInterceptor);
Vue.http.interceptors.push(headersInterceptor);
- component = new EnvironmentsFolderViewComponent({
- el: document.querySelector('#environments-folder-list-view'),
- });
+ component = mountComponent(Component, mockData);
});
afterEach(() => {
@@ -57,7 +58,7 @@ describe('Environments Folder View', () => {
it('should render a table with environments', (done) => {
setTimeout(() => {
- expect(component.$el.querySelectorAll('table')).toBeDefined();
+ expect(component.$el.querySelectorAll('table')).not.toBeNull();
expect(
component.$el.querySelector('.environment-name').textContent.trim(),
).toEqual(environmentsList[0].name);
@@ -68,11 +69,11 @@ describe('Environments Folder View', () => {
it('should render available tab with count', (done) => {
setTimeout(() => {
expect(
- component.$el.querySelector('.js-available-environments-folder-tab').textContent,
+ component.$el.querySelector('.js-environments-tab-available').textContent,
).toContain('Available');
expect(
- component.$el.querySelector('.js-available-environments-folder-tab .js-available-environments-count').textContent,
+ component.$el.querySelector('.js-environments-tab-available .badge').textContent,
).toContain('0');
done();
}, 0);
@@ -81,11 +82,11 @@ describe('Environments Folder View', () => {
it('should render stopped tab with count', (done) => {
setTimeout(() => {
expect(
- component.$el.querySelector('.js-stopped-environments-folder-tab').textContent,
+ component.$el.querySelector('.js-environments-tab-stopped').textContent,
).toContain('Stopped');
expect(
- component.$el.querySelector('.js-stopped-environments-folder-tab .js-stopped-environments-count').textContent,
+ component.$el.querySelector('.js-environments-tab-stopped .badge').textContent,
).toContain('1');
done();
}, 0);
@@ -94,8 +95,8 @@ describe('Environments Folder View', () => {
it('should render parent folder name', (done) => {
setTimeout(() => {
expect(
- component.$el.querySelector('.js-folder-name').textContent,
- ).toContain('Environments / build');
+ component.$el.querySelector('.js-folder-name').textContent.trim(),
+ ).toContain('Environments / review');
done();
}, 0);
});
@@ -104,52 +105,30 @@ describe('Environments Folder View', () => {
it('should render pagination', (done) => {
setTimeout(() => {
expect(
- component.$el.querySelectorAll('.gl-pagination li').length,
- ).toEqual(5);
+ component.$el.querySelectorAll('.gl-pagination'),
+ ).not.toBeNull();
done();
}, 0);
});
- it('should update url when no search params are present', (done) => {
- spyOn(gl.utils, 'visitUrl');
+ it('should make an API request when changing page', (done) => {
+ spyOn(component, 'updateContent');
setTimeout(() => {
- component.$el.querySelector('.gl-pagination li:nth-child(5) a').click();
- expect(gl.utils.visitUrl).toHaveBeenCalledWith('?page=2');
- done();
- }, 0);
- });
-
- it('should update url when page is already present', (done) => {
- spyOn(gl.utils, 'visitUrl');
- window.history.pushState({}, null, '?page=1');
+ component.$el.querySelector('.gl-pagination .js-last-button a').click();
- setTimeout(() => {
- component.$el.querySelector('.gl-pagination li:nth-child(5) a').click();
- expect(gl.utils.visitUrl).toHaveBeenCalledWith('?page=2');
+ expect(component.updateContent).toHaveBeenCalledWith({ scope: component.scope, page: '10' });
done();
}, 0);
});
- it('should update url when page and scope are already present', (done) => {
- spyOn(gl.utils, 'visitUrl');
- window.history.pushState({}, null, '?scope=all&page=1');
-
+ it('should make an API request when using tabs', (done) => {
setTimeout(() => {
- component.$el.querySelector('.gl-pagination li:nth-child(5) a').click();
- expect(gl.utils.visitUrl).toHaveBeenCalledWith('?scope=all&page=2');
- done();
- }, 0);
- });
-
- it('should update url when page and scope are already present and page is first param', (done) => {
- spyOn(gl.utils, 'visitUrl');
- window.history.pushState({}, null, '?page=1&scope=all');
+ spyOn(component, 'updateContent');
+ component.$el.querySelector('.js-environments-tab-stopped').click();
- setTimeout(() => {
- component.$el.querySelector('.gl-pagination li:nth-child(5) a').click();
- expect(gl.utils.visitUrl).toHaveBeenCalledWith('?page=2&scope=all');
+ expect(component.updateContent).toHaveBeenCalledWith({ scope: 'stopped', page: '1' });
done();
- }, 0);
+ });
});
});
});
@@ -172,9 +151,7 @@ describe('Environments Folder View', () => {
});
it('should not render a table', (done) => {
- component = new EnvironmentsFolderViewComponent({
- el: document.querySelector('#environments-folder-list-view'),
- });
+ component = mountComponent(Component, mockData);
setTimeout(() => {
expect(
@@ -187,11 +164,11 @@ describe('Environments Folder View', () => {
it('should render available tab with count 0', (done) => {
setTimeout(() => {
expect(
- component.$el.querySelector('.js-available-environments-folder-tab').textContent,
+ component.$el.querySelector('.js-environments-tab-available').textContent,
).toContain('Available');
expect(
- component.$el.querySelector('.js-available-environments-folder-tab .js-available-environments-count').textContent,
+ component.$el.querySelector('.js-environments-tab-available .badge').textContent,
).toContain('0');
done();
}, 0);
@@ -200,14 +177,70 @@ describe('Environments Folder View', () => {
it('should render stopped tab with count 0', (done) => {
setTimeout(() => {
expect(
- component.$el.querySelector('.js-stopped-environments-folder-tab').textContent,
+ component.$el.querySelector('.js-environments-tab-stopped').textContent,
).toContain('Stopped');
expect(
- component.$el.querySelector('.js-stopped-environments-folder-tab .js-stopped-environments-count').textContent,
+ component.$el.querySelector('.js-environments-tab-stopped .badge').textContent,
).toContain('0');
done();
}, 0);
});
});
+
+ describe('methods', () => {
+ const environmentsEmptyResponseInterceptor = (request, next) => {
+ next(request.respondWith(JSON.stringify([]), {
+ status: 200,
+ }));
+ };
+
+ beforeEach(() => {
+ Vue.http.interceptors.push(environmentsEmptyResponseInterceptor);
+ Vue.http.interceptors.push(headersInterceptor);
+
+ component = mountComponent(Component, mockData);
+ spyOn(history, 'pushState').and.stub();
+ });
+
+ afterEach(() => {
+ Vue.http.interceptors = _.without(
+ Vue.http.interceptors, environmentsEmptyResponseInterceptor,
+ );
+ Vue.http.interceptors = _.without(Vue.http.interceptors, headersInterceptor);
+ });
+
+ describe('updateContent', () => {
+ it('should set given parameters', (done) => {
+ component.updateContent({ scope: 'stopped', page: '4' })
+ .then(() => {
+ expect(component.page).toEqual('4');
+ expect(component.scope).toEqual('stopped');
+ expect(component.requestData.scope).toEqual('stopped');
+ expect(component.requestData.page).toEqual('4');
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('onChangeTab', () => {
+ it('should set page to 1', () => {
+ spyOn(component, 'updateContent');
+ component.onChangeTab('stopped');
+
+ expect(component.updateContent).toHaveBeenCalledWith({ scope: 'stopped', page: '1' });
+ });
+ });
+
+ describe('onChangePage', () => {
+ it('should update page and keep scope', () => {
+ spyOn(component, 'updateContent');
+
+ component.onChangePage(4);
+
+ expect(component.updateContent).toHaveBeenCalledWith({ scope: component.scope, page: '4' });
+ });
+ });
+ });
});
diff --git a/spec/javascripts/filtered_search/filtered_search_manager_spec.js b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
index f209328dee1..230c15e5de6 100644
--- a/spec/javascripts/filtered_search/filtered_search_manager_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
@@ -396,6 +396,25 @@ describe('Filtered Search Manager', () => {
});
});
+ describe('Clearing search', () => {
+ beforeEach(() => {
+ initializeManager();
+ });
+
+ it('Clicking the "x" clear button, clears the input', () => {
+ const inputValue = 'label:~bug ';
+ manager.filteredSearchInput.value = inputValue;
+ manager.filteredSearchInput.dispatchEvent(new Event('input'));
+
+ expect(gl.DropdownUtils.getSearchQuery()).toEqual(inputValue);
+
+ manager.clearSearchButton.click();
+
+ expect(manager.filteredSearchInput.value).toEqual('');
+ expect(gl.DropdownUtils.getSearchQuery()).toEqual('');
+ });
+ });
+
describe('toggleInputContainerFocus', () => {
beforeEach(() => {
initializeManager();
diff --git a/spec/javascripts/fixtures/clusters.rb b/spec/javascripts/fixtures/clusters.rb
index 8e74c4f859c..d26ea3febe8 100644
--- a/spec/javascripts/fixtures/clusters.rb
+++ b/spec/javascripts/fixtures/clusters.rb
@@ -31,4 +31,19 @@ describe Projects::ClustersController, '(JavaScript fixtures)', type: :controlle
expect(response).to be_success
store_frontend_fixture(response, example.description)
end
+
+ context 'rendering non-empty state' do
+ before do
+ cluster
+ end
+
+ it 'clusters/index_cluster.html.raw' do |example|
+ get :index,
+ namespace_id: namespace,
+ project_id: project
+
+ expect(response).to be_success
+ store_frontend_fixture(response, example.description)
+ end
+ end
end
diff --git a/spec/javascripts/fixtures/environments/element.html.haml b/spec/javascripts/fixtures/environments/element.html.haml
deleted file mode 100644
index 8d7aeb23356..00000000000
--- a/spec/javascripts/fixtures/environments/element.html.haml
+++ /dev/null
@@ -1 +0,0 @@
-.test-dom-element
diff --git a/spec/javascripts/fixtures/environments/environments.html.haml b/spec/javascripts/fixtures/environments/environments.html.haml
deleted file mode 100644
index e6000fbb553..00000000000
--- a/spec/javascripts/fixtures/environments/environments.html.haml
+++ /dev/null
@@ -1,9 +0,0 @@
-%div
- #environments-list-view{ data: { environments_data: "foo/environments",
- "can-create-deployment" => "true",
- "can-read-environment" => "true",
- "can-create-environment" => "true",
- "project-environments-path" => "https://gitlab.com/foo/environments",
- "project-stopped-environments-path" => "https://gitlab.com/foo/environments?scope=stopped",
- "new-environment-path" => "https://gitlab.com/foo/environments/new",
- "help-page-path" => "https://gitlab.com/help_page"}}
diff --git a/spec/javascripts/fixtures/environments/environments_folder_view.html.haml b/spec/javascripts/fixtures/environments/environments_folder_view.html.haml
deleted file mode 100644
index aceec139730..00000000000
--- a/spec/javascripts/fixtures/environments/environments_folder_view.html.haml
+++ /dev/null
@@ -1,7 +0,0 @@
-%div
- #environments-folder-list-view{ data: { "can-create-deployment" => "true",
- "can-read-environment" => "true",
- "css-class" => "",
- "commit-icon-svg" => custom_icon("icon_commit"),
- "terminal-icon-svg" => custom_icon("icon_terminal"),
- "play-icon-svg" => custom_icon("icon_play") } }
diff --git a/spec/javascripts/fixtures/pipelines.html.haml b/spec/javascripts/fixtures/pipelines.html.haml
index 97b0c25c923..85ee61f0b54 100644
--- a/spec/javascripts/fixtures/pipelines.html.haml
+++ b/spec/javascripts/fixtures/pipelines.html.haml
@@ -1,16 +1,10 @@
%div
#pipelines-list-vue{ data: { endpoint: 'foo',
- "css-class" => 'foo',
"help-page-path" => 'foo',
+ "help-auto-devops-path" => 'foo',
"empty-state-svg-path" => 'foo',
"error-state-svg-path" => 'foo',
"new-pipeline-path" => 'foo',
"can-create-pipeline" => 'true',
- "all-path" => 'foo',
- "pending-path" => 'foo',
- "running-path" => 'foo',
- "finished-path" => 'foo',
- "branches-path" => 'foo',
- "tags-path" => 'foo',
"has-ci" => 'foo',
"ci-lint-path" => 'foo' } }
diff --git a/spec/javascripts/flash_spec.js b/spec/javascripts/flash_spec.js
index b669aabcee4..97e3ab682c5 100644
--- a/spec/javascripts/flash_spec.js
+++ b/spec/javascripts/flash_spec.js
@@ -278,7 +278,7 @@ describe('Flash', () => {
removeFlashClickListener(flashEl, false);
- flashEl.parentNode.click();
+ flashEl.click();
setTimeout(() => {
expect(document.querySelector('.flash')).toBeNull();
diff --git a/spec/javascripts/gfm_auto_complete_spec.js b/spec/javascripts/gfm_auto_complete_spec.js
index ad0c7264616..6f357306ec7 100644
--- a/spec/javascripts/gfm_auto_complete_spec.js
+++ b/spec/javascripts/gfm_auto_complete_spec.js
@@ -67,6 +67,28 @@ describe('GfmAutoComplete', function () {
});
});
+ describe('DefaultOptions.beforeInsert', () => {
+ const beforeInsert = (context, value) => (
+ gfmAutoCompleteCallbacks.beforeInsert.call(context, value)
+ );
+
+ const atwhoInstance = { setting: { skipSpecialCharacterTest: false } };
+
+ it('should not quote if value only contains alphanumeric charecters', () => {
+ expect(beforeInsert(atwhoInstance, '@user1')).toBe('@user1');
+ expect(beforeInsert(atwhoInstance, '~label1')).toBe('~label1');
+ });
+
+ it('should quote if value contains any non-alphanumeric characters', () => {
+ expect(beforeInsert(atwhoInstance, '~label-20')).toBe('~"label-20"');
+ expect(beforeInsert(atwhoInstance, '~label 20')).toBe('~"label 20"');
+ });
+
+ it('should quote integer labels', () => {
+ expect(beforeInsert(atwhoInstance, '~1234')).toBe('~"1234"');
+ });
+ });
+
describe('DefaultOptions.matcher', function () {
const defaultMatcher = (context, flag, subtext) => (
gfmAutoCompleteCallbacks.matcher.call(context, flag, subtext)
diff --git a/spec/javascripts/image_diff/helpers/utils_helper_spec.js b/spec/javascripts/image_diff/helpers/utils_helper_spec.js
index 56d77a05c4c..31949c39d9c 100644
--- a/spec/javascripts/image_diff/helpers/utils_helper_spec.js
+++ b/spec/javascripts/image_diff/helpers/utils_helper_spec.js
@@ -157,27 +157,19 @@ describe('utilsHelper', () => {
beforeEach(() => {
window.gl = window.gl || (window.gl = {});
glCache = window.gl;
- window.gl.ImageFile = () => {};
fileEl = document.createElement('div');
fileEl.innerHTML = `
<div class="diff-file"></div>
`;
- spyOn(ImageDiff.prototype, 'init').and.callFake(() => {});
spyOn(ReplacedImageDiff.prototype, 'init').and.callFake(() => {});
+ spyOn(ImageDiff.prototype, 'init').and.callFake(() => {});
});
afterEach(() => {
window.gl = glCache;
});
- it('should initialize gl.ImageFile', () => {
- spyOn(window.gl, 'ImageFile');
-
- utilsHelper.initImageDiff(fileEl, false, false);
- expect(gl.ImageFile).toHaveBeenCalled();
- });
-
it('should initialize ImageDiff if js-single-image', () => {
const diffFileEl = fileEl.querySelector('.diff-file');
diffFileEl.innerHTML = `
diff --git a/spec/javascripts/issuable_spec.js b/spec/javascripts/issuable_spec.js
index ceee08d47c5..5a9112716f4 100644
--- a/spec/javascripts/issuable_spec.js
+++ b/spec/javascripts/issuable_spec.js
@@ -26,7 +26,7 @@ describe('Issuable', () => {
document.body.appendChild(element);
const input = document.createElement('input');
- input.setAttribute('id', 'issue_email');
+ input.setAttribute('id', 'issuable_email');
document.body.appendChild(input);
Issuable = new IssuableIndex('issue_');
diff --git a/spec/javascripts/issue_show/components/app_spec.js b/spec/javascripts/issue_show/components/app_spec.js
index 2ea290108a4..b47a8bf705f 100644
--- a/spec/javascripts/issue_show/components/app_spec.js
+++ b/spec/javascripts/issue_show/components/app_spec.js
@@ -35,11 +35,12 @@ describe('Issuable output', () => {
canUpdate: true,
canDestroy: true,
endpoint: '/gitlab-org/gitlab-shell/issues/9/realtime_changes',
+ updateEndpoint: gl.TEST_HOST,
issuableRef: '#1',
initialTitleHtml: '',
initialTitleText: '',
- initialDescriptionHtml: '',
- initialDescriptionText: '',
+ initialDescriptionHtml: 'test',
+ initialDescriptionText: 'test',
markdownPreviewPath: '/',
markdownDocsPath: '/',
projectNamespace: '/',
@@ -223,23 +224,46 @@ describe('Issuable output', () => {
});
});
- it('closes form on error', (done) => {
- spyOn(window, 'Flash').and.callThrough();
- spyOn(vm.service, 'updateIssuable').and.callFake(() => new Promise((resolve, reject) => {
- reject();
- }));
+ describe('error when updating', () => {
+ beforeEach(() => {
+ spyOn(window, 'Flash').and.callThrough();
+ spyOn(vm.service, 'updateIssuable').and.callFake(() => new Promise((resolve, reject) => {
+ reject();
+ }));
+ });
- vm.updateIssuable();
+ it('closes form on error', (done) => {
+ vm.updateIssuable();
- setTimeout(() => {
- expect(
- eventHub.$emit,
- ).toHaveBeenCalledWith('close.form');
- expect(
- window.Flash,
- ).toHaveBeenCalledWith('Error updating issue');
+ setTimeout(() => {
+ expect(
+ eventHub.$emit,
+ ).toHaveBeenCalledWith('close.form');
+ expect(
+ window.Flash,
+ ).toHaveBeenCalledWith('Error updating issue');
- done();
+ done();
+ });
+ });
+
+ it('returns the correct error message for issuableType', (done) => {
+ vm.issuableType = 'merge request';
+
+ Vue.nextTick(() => {
+ vm.updateIssuable();
+
+ setTimeout(() => {
+ expect(
+ eventHub.$emit,
+ ).toHaveBeenCalledWith('close.form');
+ expect(
+ window.Flash,
+ ).toHaveBeenCalledWith('Error updating merge request');
+
+ done();
+ });
+ });
});
});
});
diff --git a/spec/javascripts/issue_show/components/description_spec.js b/spec/javascripts/issue_show/components/description_spec.js
index 360691a3546..163e5cdd062 100644
--- a/spec/javascripts/issue_show/components/description_spec.js
+++ b/spec/javascripts/issue_show/components/description_spec.js
@@ -1,11 +1,22 @@
import Vue from 'vue';
import descriptionComponent from '~/issue_show/components/description.vue';
+import * as taskList from '~/task_list';
+import mountComponent from '../../helpers/vue_mount_component_helper';
describe('Description component', () => {
let vm;
+ let DescriptionComponent;
+ const props = {
+ canUpdate: true,
+ descriptionHtml: 'test',
+ descriptionText: 'test',
+ updatedAt: new Date().toString(),
+ taskStatus: '',
+ updateUrl: gl.TEST_HOST,
+ };
beforeEach(() => {
- const Component = Vue.extend(descriptionComponent);
+ DescriptionComponent = Vue.extend(descriptionComponent);
if (!document.querySelector('.issuable-meta')) {
const metaData = document.createElement('div');
@@ -15,15 +26,11 @@ describe('Description component', () => {
document.body.appendChild(metaData);
}
- vm = new Component({
- propsData: {
- canUpdate: true,
- descriptionHtml: 'test',
- descriptionText: 'test',
- updatedAt: new Date().toString(),
- taskStatus: '',
- },
- }).$mount();
+ vm = mountComponent(DescriptionComponent, props);
+ });
+
+ afterEach(() => {
+ vm.$destroy();
});
it('animates description changes', (done) => {
@@ -44,34 +51,46 @@ describe('Description component', () => {
});
});
- // TODO: gl.TaskList no longer exists. rewrite these tests once we have a way to rewire ES modules
-
- // it('re-inits the TaskList when description changed', (done) => {
- // spyOn(gl, 'TaskList');
- // vm.descriptionHtml = 'changed';
- //
- // setTimeout(() => {
- // expect(
- // gl.TaskList,
- // ).toHaveBeenCalled();
- //
- // done();
- // });
- // });
-
- // it('does not re-init the TaskList when canUpdate is false', (done) => {
- // spyOn(gl, 'TaskList');
- // vm.canUpdate = false;
- // vm.descriptionHtml = 'changed';
- //
- // setTimeout(() => {
- // expect(
- // gl.TaskList,
- // ).not.toHaveBeenCalled();
- //
- // done();
- // });
- // });
+ describe('TaskList', () => {
+ beforeEach(() => {
+ vm = mountComponent(DescriptionComponent, Object.assign({}, props, {
+ issuableType: 'issuableType',
+ }));
+ spyOn(taskList, 'default');
+ });
+
+ it('re-inits the TaskList when description changed', (done) => {
+ vm.descriptionHtml = 'changed';
+
+ setTimeout(() => {
+ expect(taskList.default).toHaveBeenCalled();
+ done();
+ });
+ });
+
+ it('does not re-init the TaskList when canUpdate is false', (done) => {
+ vm.canUpdate = false;
+ vm.descriptionHtml = 'changed';
+
+ setTimeout(() => {
+ expect(taskList.default).not.toHaveBeenCalled();
+ done();
+ });
+ });
+
+ it('calls with issuableType dataType', (done) => {
+ vm.descriptionHtml = 'changed';
+
+ setTimeout(() => {
+ expect(taskList.default).toHaveBeenCalledWith({
+ dataType: 'issuableType',
+ fieldName: 'description',
+ selector: '.detail-page-description',
+ });
+ done();
+ });
+ });
+ });
describe('taskStatus', () => {
it('adds full taskStatus', (done) => {
@@ -126,4 +145,8 @@ describe('Description component', () => {
});
});
});
+
+ it('sets data-update-url', () => {
+ expect(vm.$el.querySelector('textarea').dataset.updateUrl).toEqual(gl.TEST_HOST);
+ });
});
diff --git a/spec/javascripts/issue_show/components/edit_actions_spec.js b/spec/javascripts/issue_show/components/edit_actions_spec.js
index f6625b748b6..d779ab7bb31 100644
--- a/spec/javascripts/issue_show/components/edit_actions_spec.js
+++ b/spec/javascripts/issue_show/components/edit_actions_spec.js
@@ -61,6 +61,15 @@ describe('Edit Actions components', () => {
});
});
+ it('should not show delete button if showDeleteButton is false', (done) => {
+ vm.showDeleteButton = false;
+
+ Vue.nextTick(() => {
+ expect(vm.$el.querySelector('.btn-danger')).toBeNull();
+ done();
+ });
+ });
+
describe('updateIssuable', () => {
it('sends update.issauble event when clicking save button', () => {
vm.$el.querySelector('.btn-save').click();
diff --git a/spec/javascripts/issue_show/components/form_spec.js b/spec/javascripts/issue_show/components/form_spec.js
index 6e89528a3ea..000b53af016 100644
--- a/spec/javascripts/issue_show/components/form_spec.js
+++ b/spec/javascripts/issue_show/components/form_spec.js
@@ -34,7 +34,6 @@ describe('Inline edit form component', () => {
});
it('renders template selector when templates exists', (done) => {
- spyOn(gl, 'IssuableTemplateSelectors');
vm.issuableTemplates = ['test'];
Vue.nextTick(() => {
diff --git a/spec/javascripts/issue_show/components/title_spec.js b/spec/javascripts/issue_show/components/title_spec.js
index c1edc785d0f..5370f4e1fea 100644
--- a/spec/javascripts/issue_show/components/title_spec.js
+++ b/spec/javascripts/issue_show/components/title_spec.js
@@ -80,19 +80,19 @@ describe('Title component', () => {
});
it('should not show by default', () => {
- expect(vm.$el.querySelector('.note-action-button')).toBeNull();
+ expect(vm.$el.querySelector('.btn-edit')).toBeNull();
});
it('should not show if canUpdate is false', () => {
vm.showInlineEditButton = true;
vm.canUpdate = false;
- expect(vm.$el.querySelector('.note-action-button')).toBeNull();
+ expect(vm.$el.querySelector('.btn-edit')).toBeNull();
});
it('should show if showInlineEditButton and canUpdate', () => {
vm.showInlineEditButton = true;
vm.canUpdate = true;
- expect(vm.$el.querySelector('.note-action-button')).toBeDefined();
+ expect(vm.$el.querySelector('.btn-edit')).toBeDefined();
});
it('should trigger open.form event when clicked', () => {
@@ -100,7 +100,7 @@ describe('Title component', () => {
vm.canUpdate = true;
Vue.nextTick(() => {
- vm.$el.querySelector('.note-action-button').click();
+ vm.$el.querySelector('.btn-edit').click();
expect(eventHub.$emit).toHaveBeenCalledWith('open.form');
});
});
diff --git a/spec/javascripts/job_spec.js b/spec/javascripts/job_spec.js
index 5e67911d338..20c4caa865d 100644
--- a/spec/javascripts/job_spec.js
+++ b/spec/javascripts/job_spec.js
@@ -28,7 +28,7 @@ describe('Job', () => {
});
it('copies build options', function () {
- expect(this.job.pageUrl).toBe(JOB_URL);
+ expect(this.job.pagePath).toBe(JOB_URL);
expect(this.job.buildStatus).toBe('success');
expect(this.job.buildStage).toBe('test');
expect(this.job.state).toBe('');
diff --git a/spec/javascripts/jobs/job_details_mediator_spec.js b/spec/javascripts/jobs/job_details_mediator_spec.js
index 1d7fa7e12fc..3069a0cd60e 100644
--- a/spec/javascripts/jobs/job_details_mediator_spec.js
+++ b/spec/javascripts/jobs/job_details_mediator_spec.js
@@ -1,39 +1,35 @@
-import Vue from 'vue';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import JobMediator from '~/jobs/job_details_mediator';
import job from './mock_data';
describe('JobMediator', () => {
let mediator;
+ let mock;
beforeEach(() => {
- mediator = new JobMediator({ endpoint: 'foo' });
+ mediator = new JobMediator({ endpoint: 'jobs/40291672.json' });
+ mock = new MockAdapter(axios);
});
it('should set defaults', () => {
expect(mediator.store).toBeDefined();
expect(mediator.service).toBeDefined();
- expect(mediator.options).toEqual({ endpoint: 'foo' });
+ expect(mediator.options).toEqual({ endpoint: 'jobs/40291672.json' });
expect(mediator.state.isLoading).toEqual(false);
});
describe('request and store data', () => {
- const interceptor = (request, next) => {
- next(request.respondWith(JSON.stringify(job), {
- status: 200,
- }));
- };
-
beforeEach(() => {
- Vue.http.interceptors.push(interceptor);
+ mock.onGet().reply(200, job, {});
});
afterEach(() => {
- Vue.http.interceptors = _.without(Vue.http.interceptor, interceptor);
+ mock.restore();
});
it('should store received data', (done) => {
mediator.fetchJob();
-
setTimeout(() => {
expect(mediator.store.state.job).toEqual(job);
done();
diff --git a/spec/javascripts/lib/utils/common_utils_spec.js b/spec/javascripts/lib/utils/common_utils_spec.js
index a5298be5669..0a9d815f469 100644
--- a/spec/javascripts/lib/utils/common_utils_spec.js
+++ b/spec/javascripts/lib/utils/common_utils_spec.js
@@ -142,44 +142,33 @@ describe('common_utils', () => {
});
});
- describe('setParamInURL', () => {
+ describe('historyPushState', () => {
afterEach(() => {
- window.history.pushState({}, null, '');
+ window.history.replaceState({}, null, null);
});
- it('should return the parameter', () => {
- window.history.replaceState({}, null, '');
-
- expect(commonUtils.setParamInURL('page', 156)).toBe('?page=156');
- expect(commonUtils.setParamInURL('page', '156')).toBe('?page=156');
- });
+ it('should call pushState with the correct path', () => {
+ spyOn(window.history, 'pushState');
- it('should update the existing parameter when its a number', () => {
- window.history.pushState({}, null, '?page=15');
+ commonUtils.historyPushState('newpath?page=2');
- expect(commonUtils.setParamInURL('page', 16)).toBe('?page=16');
- expect(commonUtils.setParamInURL('page', '16')).toBe('?page=16');
- expect(commonUtils.setParamInURL('page', true)).toBe('?page=true');
+ expect(window.history.pushState).toHaveBeenCalled();
+ expect(window.history.pushState.calls.allArgs()[0][2]).toContain('newpath?page=2');
});
+ });
- it('should update the existing parameter when its a string', () => {
- window.history.pushState({}, null, '?scope=all');
-
- expect(commonUtils.setParamInURL('scope', 'finished')).toBe('?scope=finished');
- });
-
- it('should update the existing parameter when more than one parameter exists', () => {
- window.history.pushState({}, null, '?scope=all&page=15');
-
- expect(commonUtils.setParamInURL('scope', 'finished')).toBe('?scope=finished&page=15');
+ describe('parseQueryStringIntoObject', () => {
+ it('should return object with query parameters', () => {
+ expect(commonUtils.parseQueryStringIntoObject('scope=all&page=2')).toEqual({ scope: 'all', page: '2' });
+ expect(commonUtils.parseQueryStringIntoObject('scope=all')).toEqual({ scope: 'all' });
+ expect(commonUtils.parseQueryStringIntoObject()).toEqual({});
});
+ });
- it('should add a new parameter to the end of the existing ones', () => {
- window.history.pushState({}, null, '?scope=all');
-
- expect(commonUtils.setParamInURL('page', 16)).toBe('?scope=all&page=16');
- expect(commonUtils.setParamInURL('page', '16')).toBe('?scope=all&page=16');
- expect(commonUtils.setParamInURL('page', true)).toBe('?scope=all&page=true');
+ describe('buildUrlWithCurrentLocation', () => {
+ it('should build an url with current location and given parameters', () => {
+ expect(commonUtils.buildUrlWithCurrentLocation()).toEqual(window.location.pathname);
+ expect(commonUtils.buildUrlWithCurrentLocation('?page=2')).toEqual(`${window.location.pathname}?page=2`);
});
});
diff --git a/spec/javascripts/lib/utils/datefix_spec.js b/spec/javascripts/lib/utils/datefix_spec.js
index e58ac4300ba..a9f3abcf2a4 100644
--- a/spec/javascripts/lib/utils/datefix_spec.js
+++ b/spec/javascripts/lib/utils/datefix_spec.js
@@ -21,7 +21,7 @@ describe('datefix', () => {
describe('pikadayToString', () => {
it('should format a UTC date into yyyy-mm-dd format', () => {
- expect(pikadayToString(new Date('2020-01-29'))).toEqual('2020-01-29');
+ expect(pikadayToString(new Date('2020-01-29:00:00'))).toEqual('2020-01-29');
});
});
});
diff --git a/spec/javascripts/lib/utils/number_utility_spec.js b/spec/javascripts/lib/utils/number_utility_spec.js
index 83c92deccdc..fcf27f6805f 100644
--- a/spec/javascripts/lib/utils/number_utility_spec.js
+++ b/spec/javascripts/lib/utils/number_utility_spec.js
@@ -1,4 +1,4 @@
-import { formatRelevantDigits, bytesToKiB, bytesToMiB } from '~/lib/utils/number_utils';
+import { formatRelevantDigits, bytesToKiB, bytesToMiB, bytesToGiB, numberToHumanSize } from '~/lib/utils/number_utils';
describe('Number Utils', () => {
describe('formatRelevantDigits', () => {
@@ -52,4 +52,29 @@ describe('Number Utils', () => {
expect(bytesToMiB(1000000)).toEqual(0.95367431640625);
});
});
+
+ describe('bytesToGiB', () => {
+ it('calculates GiB for the given bytes', () => {
+ expect(bytesToGiB(1073741824)).toEqual(1);
+ expect(bytesToGiB(10737418240)).toEqual(10);
+ });
+ });
+
+ describe('numberToHumanSize', () => {
+ it('should return bytes', () => {
+ expect(numberToHumanSize(654)).toEqual('654 bytes');
+ });
+
+ it('should return KiB', () => {
+ expect(numberToHumanSize(1079)).toEqual('1.05 KiB');
+ });
+
+ it('should return MiB', () => {
+ expect(numberToHumanSize(10485764)).toEqual('10.00 MiB');
+ });
+
+ it('should return GiB', () => {
+ expect(numberToHumanSize(10737418240)).toEqual('10.00 GiB');
+ });
+ });
});
diff --git a/spec/javascripts/lib/utils/poll_spec.js b/spec/javascripts/lib/utils/poll_spec.js
index 2aa7011ca51..9b8f68f1676 100644
--- a/spec/javascripts/lib/utils/poll_spec.js
+++ b/spec/javascripts/lib/utils/poll_spec.js
@@ -155,7 +155,7 @@ describe('Poll', () => {
successCallback: () => {
Polling.stop();
setTimeout(() => {
- Polling.restart();
+ Polling.restart({ data: { page: 4 } });
}, 0);
},
errorCallback: callbacks.error,
@@ -170,10 +170,10 @@ describe('Poll', () => {
Polling.stop();
expect(service.fetch.calls.count()).toEqual(2);
- expect(service.fetch).toHaveBeenCalledWith({ page: 1 });
+ expect(service.fetch).toHaveBeenCalledWith({ page: 4 });
expect(Polling.stop).toHaveBeenCalled();
expect(Polling.restart).toHaveBeenCalled();
-
+ expect(Polling.options.data).toEqual({ page: 4 });
done();
});
});
diff --git a/spec/javascripts/lib/utils/text_markdown_spec.js b/spec/javascripts/lib/utils/text_markdown_spec.js
new file mode 100644
index 00000000000..a95a7e2a5be
--- /dev/null
+++ b/spec/javascripts/lib/utils/text_markdown_spec.js
@@ -0,0 +1,62 @@
+import textUtils from '~/lib/utils/text_markdown';
+
+describe('init markdown', () => {
+ let textArea;
+
+ beforeAll(() => {
+ textArea = document.createElement('textarea');
+ document.querySelector('body').appendChild(textArea);
+ textArea.focus();
+ });
+
+ afterAll(() => {
+ textArea.parentNode.removeChild(textArea);
+ });
+
+ describe('without selection', () => {
+ it('inserts the tag on an empty line', () => {
+ const initialValue = '';
+
+ textArea.value = initialValue;
+ textArea.selectionStart = 0;
+ textArea.selectionEnd = 0;
+
+ textUtils.insertText(textArea, textArea.value, '*', null, '', false);
+
+ expect(textArea.value).toEqual(`${initialValue}* `);
+ });
+
+ it('inserts the tag on a new line if the current one is not empty', () => {
+ const initialValue = 'some text';
+
+ textArea.value = initialValue;
+ textArea.setSelectionRange(initialValue.length, initialValue.length);
+
+ textUtils.insertText(textArea, textArea.value, '*', null, '', false);
+
+ expect(textArea.value).toEqual(`${initialValue}\n* `);
+ });
+
+ it('inserts the tag on the same line if the current line only contains spaces', () => {
+ const initialValue = ' ';
+
+ textArea.value = initialValue;
+ textArea.setSelectionRange(initialValue.length, initialValue.length);
+
+ textUtils.insertText(textArea, textArea.value, '*', null, '', false);
+
+ expect(textArea.value).toEqual(`${initialValue}* `);
+ });
+
+ it('inserts the tag on the same line if the current line only contains tabs', () => {
+ const initialValue = '\t\t\t';
+
+ textArea.value = initialValue;
+ textArea.setSelectionRange(initialValue.length, initialValue.length);
+
+ textUtils.insertText(textArea, textArea.value, '*', null, '', false);
+
+ expect(textArea.value).toEqual(`${initialValue}* `);
+ });
+ });
+});
diff --git a/spec/javascripts/lib/utils/text_utility_spec.js b/spec/javascripts/lib/utils/text_utility_spec.js
index 829b3ef5735..1f46c225071 100644
--- a/spec/javascripts/lib/utils/text_utility_spec.js
+++ b/spec/javascripts/lib/utils/text_utility_spec.js
@@ -1,109 +1,65 @@
-import { highCountTrim } from '~/lib/utils/text_utility';
+import * as textUtils from '~/lib/utils/text_utility';
describe('text_utility', () => {
- describe('gl.text.getTextWidth', () => {
- it('returns zero width when no text is passed', () => {
- expect(gl.text.getTextWidth('')).toBe(0);
+ describe('addDelimiter', () => {
+ it('should add a delimiter to the given string', () => {
+ expect(textUtils.addDelimiter('1234')).toEqual('1,234');
+ expect(textUtils.addDelimiter('222222')).toEqual('222,222');
});
- it('returns zero width when no text is passed and font is passed', () => {
- expect(gl.text.getTextWidth('', '100px sans-serif')).toBe(0);
+ it('should not add a delimiter if string contains no numbers', () => {
+ expect(textUtils.addDelimiter('aaaa')).toEqual('aaaa');
});
+ });
- it('returns width when text is passed', () => {
- expect(gl.text.getTextWidth('foo') > 0).toBe(true);
+ describe('highCountTrim', () => {
+ it('returns 99+ for count >= 100', () => {
+ expect(textUtils.highCountTrim(105)).toBe('99+');
+ expect(textUtils.highCountTrim(100)).toBe('99+');
});
- it('returns bigger width when font is larger', () => {
- const largeFont = gl.text.getTextWidth('foo', '100px sans-serif');
- const regular = gl.text.getTextWidth('foo', '10px sans-serif');
- expect(largeFont > regular).toBe(true);
+ it('returns exact number for count < 100', () => {
+ expect(textUtils.highCountTrim(45)).toBe(45);
});
});
- describe('gl.text.pluralize', () => {
- it('returns pluralized', () => {
- expect(gl.text.pluralize('test', 2)).toBe('tests');
- });
-
- it('returns pluralized when count is 0', () => {
- expect(gl.text.pluralize('test', 0)).toBe('tests');
+ describe('capitalizeFirstCharacter', () => {
+ it('returns string with first letter capitalized', () => {
+ expect(textUtils.capitalizeFirstCharacter('gitlab')).toEqual('Gitlab');
+ expect(textUtils.highCountTrim(105)).toBe('99+');
+ expect(textUtils.highCountTrim(100)).toBe('99+');
});
+ });
- it('does not return pluralized', () => {
- expect(gl.text.pluralize('test', 1)).toBe('test');
+ describe('humanize', () => {
+ it('should remove underscores and uppercase the first letter', () => {
+ expect(textUtils.humanize('foo_bar')).toEqual('Foo bar');
});
});
- describe('highCountTrim', () => {
- it('returns 99+ for count >= 100', () => {
- expect(highCountTrim(105)).toBe('99+');
- expect(highCountTrim(100)).toBe('99+');
+ describe('pluralize', () => {
+ it('should pluralize given string', () => {
+ expect(textUtils.pluralize('test', 2)).toBe('tests');
});
- it('returns exact number for count < 100', () => {
- expect(highCountTrim(45)).toBe(45);
+ it('should pluralize when count is 0', () => {
+ expect(textUtils.pluralize('test', 0)).toBe('tests');
});
- });
-
- describe('gl.text.insertText', () => {
- let textArea;
- beforeAll(() => {
- textArea = document.createElement('textarea');
- document.querySelector('body').appendChild(textArea);
- textArea.focus();
+ it('should not pluralize when count is 1', () => {
+ expect(textUtils.pluralize('test', 1)).toBe('test');
});
+ });
- afterAll(() => {
- textArea.parentNode.removeChild(textArea);
+ describe('dasherize', () => {
+ it('should replace underscores with dashes', () => {
+ expect(textUtils.dasherize('foo_bar_foo')).toEqual('foo-bar-foo');
});
+ });
- describe('without selection', () => {
- it('inserts the tag on an empty line', () => {
- const initialValue = '';
-
- textArea.value = initialValue;
- textArea.selectionStart = 0;
- textArea.selectionEnd = 0;
-
- gl.text.insertText(textArea, textArea.value, '*', null, '', false);
-
- expect(textArea.value).toEqual(`${initialValue}* `);
- });
-
- it('inserts the tag on a new line if the current one is not empty', () => {
- const initialValue = 'some text';
-
- textArea.value = initialValue;
- textArea.setSelectionRange(initialValue.length, initialValue.length);
-
- gl.text.insertText(textArea, textArea.value, '*', null, '', false);
-
- expect(textArea.value).toEqual(`${initialValue}\n* `);
- });
-
- it('inserts the tag on the same line if the current line only contains spaces', () => {
- const initialValue = ' ';
-
- textArea.value = initialValue;
- textArea.setSelectionRange(initialValue.length, initialValue.length);
-
- gl.text.insertText(textArea, textArea.value, '*', null, '', false);
-
- expect(textArea.value).toEqual(`${initialValue}* `);
- });
-
- it('inserts the tag on the same line if the current line only contains tabs', () => {
- const initialValue = '\t\t\t';
-
- textArea.value = initialValue;
- textArea.setSelectionRange(initialValue.length, initialValue.length);
-
- gl.text.insertText(textArea, textArea.value, '*', null, '', false);
-
- expect(textArea.value).toEqual(`${initialValue}* `);
- });
+ describe('slugify', () => {
+ it('should remove accents and convert to lower case', () => {
+ expect(textUtils.slugify('João')).toEqual('joão');
});
});
});
diff --git a/spec/javascripts/merge_request_spec.js b/spec/javascripts/merge_request_spec.js
index 3ab901da6b6..70ae63ba036 100644
--- a/spec/javascripts/merge_request_spec.js
+++ b/spec/javascripts/merge_request_spec.js
@@ -63,7 +63,7 @@ import IssuablesHelper from '~/helpers/issuables_helper';
describe('merge request of another user', () => {
beforeEach(() => {
loadFixtures('merge_requests/merge_request_with_task_list.html.raw');
- this.el = document.querySelector('.merge-request .issuable-actions');
+ this.el = document.querySelector('.js-issuable-actions');
const merge = new MergeRequest();
merge.hideCloseButton();
});
@@ -83,7 +83,7 @@ import IssuablesHelper from '~/helpers/issuables_helper';
describe('merge request of current_user', () => {
beforeEach(() => {
loadFixtures('merge_requests/merge_request_of_current_user.html.raw');
- this.el = document.querySelector('.merge-request .issuable-actions');
+ this.el = document.querySelector('.js-issuable-actions');
const merge = new MergeRequest();
merge.hideCloseButton();
});
diff --git a/spec/javascripts/monitoring/dashboard_spec.js b/spec/javascripts/monitoring/dashboard_spec.js
index 752fdfb4614..9885b8a790f 100644
--- a/spec/javascripts/monitoring/dashboard_spec.js
+++ b/spec/javascripts/monitoring/dashboard_spec.js
@@ -1,6 +1,8 @@
import Vue from 'vue';
+import MockAdapter from 'axios-mock-adapter';
import Dashboard from '~/monitoring/components/dashboard.vue';
-import { MonitorMockInterceptor } from './mock_data';
+import axios from '~/lib/utils/axios_utils';
+import { metricsGroupsAPIResponse, mockApiEndpoint } from './mock_data';
describe('Dashboard', () => {
const fixtureName = 'environments/metrics/metrics.html.raw';
@@ -26,13 +28,17 @@ describe('Dashboard', () => {
});
describe('requests information to the server', () => {
+ let mock;
beforeEach(() => {
document.querySelector('#prometheus-graphs').setAttribute('data-has-metrics', 'true');
- Vue.http.interceptors.push(MonitorMockInterceptor);
+ mock = new MockAdapter(axios);
+ mock.onGet(mockApiEndpoint).reply(200, {
+ metricsGroupsAPIResponse,
+ });
});
afterEach(() => {
- Vue.http.interceptors = _.without(Vue.http.interceptors, MonitorMockInterceptor);
+ mock.reset();
});
it('shows up a loading state', (done) => {
diff --git a/spec/javascripts/monitoring/graph_path_spec.js b/spec/javascripts/monitoring/graph_path_spec.js
index 8ece913ada8..c83bd19345f 100644
--- a/spec/javascripts/monitoring/graph_path_spec.js
+++ b/spec/javascripts/monitoring/graph_path_spec.js
@@ -32,4 +32,21 @@ describe('Monitoring Paths', () => {
expect(metricLine.getAttribute('stroke')).toBe('#1f78d1');
expect(metricLine.getAttribute('d')).toBe(firstTimeSeries.linePath);
});
+
+ describe('Computed properties', () => {
+ it('strokeDashArray', () => {
+ const component = createComponent({
+ generatedLinePath: firstTimeSeries.linePath,
+ generatedAreaPath: firstTimeSeries.areaPath,
+ lineColor: firstTimeSeries.lineColor,
+ areaColor: firstTimeSeries.areaColor,
+ });
+
+ component.lineStyle = 'dashed';
+ expect(component.strokeDashArray).toBe('3, 1');
+
+ component.lineStyle = 'dotted';
+ expect(component.strokeDashArray).toBe('1, 1');
+ });
+ });
});
diff --git a/spec/javascripts/monitoring/mock_data.js b/spec/javascripts/monitoring/mock_data.js
index 7ceab657464..6b34855b8b2 100644
--- a/spec/javascripts/monitoring/mock_data.js
+++ b/spec/javascripts/monitoring/mock_data.js
@@ -2425,13 +2425,6 @@ const metricsGroupsAPIResponse = {
export default metricsGroupsAPIResponse;
-const responseMockData = {
- 'GET': {
- '/root/hello-prometheus/environments/30/additional_metrics.json': metricsGroupsAPIResponse,
- 'http://test.host/frontend-fixtures/environments-project/environments/1/additional_metrics.json': metricsGroupsAPIResponse, // TODO: MAke sure this works in the monitoring_bundle_spec
- },
-};
-
export const deploymentData = [
{
id: 111,
@@ -8320,11 +8313,3 @@ export function convertDatesMultipleSeries(multipleSeries) {
});
return convertedMultiple;
}
-
-export function MonitorMockInterceptor(request, next) {
- const body = responseMockData[request.method.toUpperCase()][request.url];
-
- next(request.respondWith(JSON.stringify(body), {
- status: 200,
- }));
-}
diff --git a/spec/javascripts/new_branch_spec.js b/spec/javascripts/new_branch_spec.js
index c57f44dae17..50a5e4ff056 100644
--- a/spec/javascripts/new_branch_spec.js
+++ b/spec/javascripts/new_branch_spec.js
@@ -1,7 +1,6 @@
/* eslint-disable space-before-function-paren, one-var, no-var, one-var-declaration-per-line, no-return-assign, quotes, max-len */
-/* global NewBranchForm */
-import '~/new_branch_form';
+import NewBranchForm from '~/new_branch_form';
(function() {
describe('Branch', function() {
diff --git a/spec/javascripts/notes/components/issue_comment_form_spec.js b/spec/javascripts/notes/components/issue_comment_form_spec.js
index a26fc8f63cc..04a7f8e32f1 100644
--- a/spec/javascripts/notes/components/issue_comment_form_spec.js
+++ b/spec/javascripts/notes/components/issue_comment_form_spec.js
@@ -2,7 +2,7 @@ import Vue from 'vue';
import Autosize from 'autosize';
import store from '~/notes/stores';
import issueCommentForm from '~/notes/components/issue_comment_form.vue';
-import { loggedOutIssueData, notesDataMock, userDataMock, issueDataMock } from '../mock_data';
+import { loggedOutnoteableData, notesDataMock, userDataMock, noteableDataMock } from '../mock_data';
import { keyboardDownEvent } from '../../issue_show/helpers';
describe('issue_comment_form component', () => {
@@ -23,7 +23,7 @@ describe('issue_comment_form component', () => {
describe('user is logged in', () => {
beforeEach(() => {
store.dispatch('setUserData', userDataMock);
- store.dispatch('setIssueData', issueDataMock);
+ store.dispatch('setNoteableData', noteableDataMock);
store.dispatch('setNotesData', notesDataMock);
vm = mountComponent();
@@ -55,6 +55,25 @@ describe('issue_comment_form component', () => {
expect(vm.toggleIssueState).toHaveBeenCalled();
});
+
+ it('should disable action button whilst submitting', (done) => {
+ const saveNotePromise = Promise.resolve();
+ vm.note = 'hello world';
+ spyOn(vm, 'saveNote').and.returnValue(saveNotePromise);
+ spyOn(vm, 'stopPolling');
+
+ const actionButton = vm.$el.querySelector('.js-action-button');
+
+ vm.handleSave();
+
+ Vue.nextTick()
+ .then(() => expect(actionButton.disabled).toBeTruthy())
+ .then(saveNotePromise)
+ .then(Vue.nextTick)
+ .then(() => expect(actionButton.disabled).toBeFalsy())
+ .then(done)
+ .catch(done.fail);
+ });
});
describe('textarea', () => {
@@ -159,7 +178,7 @@ describe('issue_comment_form component', () => {
describe('issue is confidential', () => {
it('shows information warning', (done) => {
- store.dispatch('setIssueData', Object.assign(issueDataMock, { confidential: true }));
+ store.dispatch('setNoteableData', Object.assign(noteableDataMock, { confidential: true }));
Vue.nextTick(() => {
expect(vm.$el.querySelector('.confidential-issue-warning')).toBeDefined();
done();
@@ -171,7 +190,7 @@ describe('issue_comment_form component', () => {
describe('user is not logged in', () => {
beforeEach(() => {
store.dispatch('setUserData', null);
- store.dispatch('setIssueData', loggedOutIssueData);
+ store.dispatch('setNoteableData', loggedOutnoteableData);
store.dispatch('setNotesData', notesDataMock);
vm = mountComponent();
diff --git a/spec/javascripts/notes/components/issue_discussion_spec.js b/spec/javascripts/notes/components/issue_discussion_spec.js
index 05c6b57f93e..b6ae55d44f5 100644
--- a/spec/javascripts/notes/components/issue_discussion_spec.js
+++ b/spec/javascripts/notes/components/issue_discussion_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
import store from '~/notes/stores';
import issueDiscussion from '~/notes/components/issue_discussion.vue';
-import { issueDataMock, discussionMock, notesDataMock } from '../mock_data';
+import { noteableDataMock, discussionMock, notesDataMock } from '../mock_data';
describe('issue_discussion component', () => {
let vm;
@@ -9,7 +9,7 @@ describe('issue_discussion component', () => {
beforeEach(() => {
const Component = Vue.extend(issueDiscussion);
- store.dispatch('setIssueData', issueDataMock);
+ store.dispatch('setNoteableData', noteableDataMock);
store.dispatch('setNotesData', notesDataMock);
vm = new Component({
diff --git a/spec/javascripts/notes/components/issue_note_app_spec.js b/spec/javascripts/notes/components/issue_note_app_spec.js
index 22e91c4c40f..8e43037f356 100644
--- a/spec/javascripts/notes/components/issue_note_app_spec.js
+++ b/spec/javascripts/notes/components/issue_note_app_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
import issueNotesApp from '~/notes/components/issue_notes_app.vue';
-import service from '~/notes/services/issue_notes_service';
+import service from '~/notes/services/notes_service';
import * as mockData from '../mock_data';
describe('issue_note_app', () => {
@@ -24,7 +24,7 @@ describe('issue_note_app', () => {
mountComponent = (data) => {
const props = data || {
- issueData: mockData.issueDataMock,
+ noteableData: mockData.noteableDataMock,
notesData: mockData.notesDataMock,
userData: mockData.userDataMock,
};
@@ -60,7 +60,7 @@ describe('issue_note_app', () => {
});
it('should set issue data', () => {
- expect(vm.$store.state.issueData).toEqual(mockData.issueDataMock);
+ expect(vm.$store.state.noteableData).toEqual(mockData.noteableDataMock);
});
it('should set user data', () => {
diff --git a/spec/javascripts/notes/components/issue_note_body_spec.js b/spec/javascripts/notes/components/issue_note_body_spec.js
index 81f07ed47cc..37aad50737b 100644
--- a/spec/javascripts/notes/components/issue_note_body_spec.js
+++ b/spec/javascripts/notes/components/issue_note_body_spec.js
@@ -2,7 +2,7 @@
import Vue from 'vue';
import store from '~/notes/stores';
import noteBody from '~/notes/components/issue_note_body.vue';
-import { issueDataMock, notesDataMock, note } from '../mock_data';
+import { noteableDataMock, notesDataMock, note } from '../mock_data';
describe('issue_note_body component', () => {
let vm;
@@ -10,7 +10,7 @@ describe('issue_note_body component', () => {
beforeEach(() => {
const Component = Vue.extend(noteBody);
- store.dispatch('setIssueData', issueDataMock);
+ store.dispatch('setNoteableData', noteableDataMock);
store.dispatch('setNotesData', notesDataMock);
vm = new Component({
diff --git a/spec/javascripts/notes/components/issue_note_form_spec.js b/spec/javascripts/notes/components/issue_note_form_spec.js
index a90dbcb72b5..d42ef239711 100644
--- a/spec/javascripts/notes/components/issue_note_form_spec.js
+++ b/spec/javascripts/notes/components/issue_note_form_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
import store from '~/notes/stores';
import issueNoteForm from '~/notes/components/issue_note_form.vue';
-import { issueDataMock, notesDataMock } from '../mock_data';
+import { noteableDataMock, notesDataMock } from '../mock_data';
import { keyboardDownEvent } from '../../issue_show/helpers';
describe('issue_note_form component', () => {
@@ -11,7 +11,7 @@ describe('issue_note_form component', () => {
beforeEach(() => {
const Component = Vue.extend(issueNoteForm);
- store.dispatch('setIssueData', issueDataMock);
+ store.dispatch('setNoteableData', noteableDataMock);
store.dispatch('setNotesData', notesDataMock);
props = {
diff --git a/spec/javascripts/notes/components/issue_note_spec.js b/spec/javascripts/notes/components/issue_note_spec.js
index 7ef85d5b4f0..73fd188dbe5 100644
--- a/spec/javascripts/notes/components/issue_note_spec.js
+++ b/spec/javascripts/notes/components/issue_note_spec.js
@@ -2,7 +2,7 @@
import Vue from 'vue';
import store from '~/notes/stores';
import issueNote from '~/notes/components/issue_note.vue';
-import { issueDataMock, notesDataMock, note } from '../mock_data';
+import { noteableDataMock, notesDataMock, note } from '../mock_data';
describe('issue_note', () => {
let vm;
@@ -10,7 +10,7 @@ describe('issue_note', () => {
beforeEach(() => {
const Component = Vue.extend(issueNote);
- store.dispatch('setIssueData', issueDataMock);
+ store.dispatch('setNoteableData', noteableDataMock);
store.dispatch('setNotesData', notesDataMock);
vm = new Component({
diff --git a/spec/javascripts/notes/components/issue_note_actions_spec.js b/spec/javascripts/notes/components/note_actions_spec.js
index 7bcc061f167..ab81aabb992 100644
--- a/spec/javascripts/notes/components/issue_note_actions_spec.js
+++ b/spec/javascripts/notes/components/note_actions_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
import store from '~/notes/stores';
-import issueActions from '~/notes/components/issue_note_actions.vue';
+import noteActions from '~/notes/components/note_actions.vue';
import { userDataMock } from '../mock_data';
describe('issse_note_actions component', () => {
@@ -8,7 +8,7 @@ describe('issse_note_actions component', () => {
let Component;
beforeEach(() => {
- Component = Vue.extend(issueActions);
+ Component = Vue.extend(noteActions);
});
afterEach(() => {
diff --git a/spec/javascripts/notes/components/issue_note_attachment_spec.js b/spec/javascripts/notes/components/note_attachment_spec.js
index 8f33b874ad6..b14a518b622 100644
--- a/spec/javascripts/notes/components/issue_note_attachment_spec.js
+++ b/spec/javascripts/notes/components/note_attachment_spec.js
@@ -1,5 +1,5 @@
import Vue from 'vue';
-import issueNoteAttachment from '~/notes/components/issue_note_attachment.vue';
+import noteAttachment from '~/notes/components/note_attachment.vue';
describe('issue note attachment', () => {
it('should render properly', () => {
@@ -11,7 +11,7 @@ describe('issue note attachment', () => {
},
};
- const Component = Vue.extend(issueNoteAttachment);
+ const Component = Vue.extend(noteAttachment);
const vm = new Component({
propsData: props,
}).$mount();
diff --git a/spec/javascripts/notes/components/issue_note_awards_list_spec.js b/spec/javascripts/notes/components/note_awards_list_spec.js
index 3b6c34f1494..15995ec5a05 100644
--- a/spec/javascripts/notes/components/issue_note_awards_list_spec.js
+++ b/spec/javascripts/notes/components/note_awards_list_spec.js
@@ -1,16 +1,16 @@
import Vue from 'vue';
import store from '~/notes/stores';
-import awardsNote from '~/notes/components/issue_note_awards_list.vue';
-import { issueDataMock, notesDataMock } from '../mock_data';
+import awardsNote from '~/notes/components/note_awards_list.vue';
+import { noteableDataMock, notesDataMock } from '../mock_data';
-describe('issue_note_awards_list component', () => {
+describe('note_awards_list component', () => {
let vm;
let awardsMock;
beforeEach(() => {
const Component = Vue.extend(awardsNote);
- store.dispatch('setIssueData', issueDataMock);
+ store.dispatch('setNoteableData', noteableDataMock);
store.dispatch('setNotesData', notesDataMock);
awardsMock = [
{
diff --git a/spec/javascripts/notes/components/issue_note_edited_text_spec.js b/spec/javascripts/notes/components/note_edited_text_spec.js
index 6603241eb64..e0b991c32ec 100644
--- a/spec/javascripts/notes/components/issue_note_edited_text_spec.js
+++ b/spec/javascripts/notes/components/note_edited_text_spec.js
@@ -1,12 +1,12 @@
import Vue from 'vue';
-import issueNoteEditedText from '~/notes/components/issue_note_edited_text.vue';
+import noteEditedText from '~/notes/components/note_edited_text.vue';
-describe('issue_note_edited_text', () => {
+describe('note_edited_text', () => {
let vm;
let props;
beforeEach(() => {
- const Component = Vue.extend(issueNoteEditedText);
+ const Component = Vue.extend(noteEditedText);
props = {
actionText: 'Edited',
className: 'foo-bar',
diff --git a/spec/javascripts/notes/components/issue_note_header_spec.js b/spec/javascripts/notes/components/note_header_spec.js
index 83ea18508ae..16a76b11321 100644
--- a/spec/javascripts/notes/components/issue_note_header_spec.js
+++ b/spec/javascripts/notes/components/note_header_spec.js
@@ -1,13 +1,13 @@
import Vue from 'vue';
-import issueNoteHeader from '~/notes/components/issue_note_header.vue';
+import noteHeader from '~/notes/components/note_header.vue';
import store from '~/notes/stores';
-describe('issue_note_header component', () => {
+describe('note_header component', () => {
let vm;
let Component;
beforeEach(() => {
- Component = Vue.extend(issueNoteHeader);
+ Component = Vue.extend(noteHeader);
});
afterEach(() => {
diff --git a/spec/javascripts/notes/components/issue_note_signed_out_widget_spec.js b/spec/javascripts/notes/components/note_signed_out_widget_spec.js
index f20d9ce9268..6cba8053888 100644
--- a/spec/javascripts/notes/components/issue_note_signed_out_widget_spec.js
+++ b/spec/javascripts/notes/components/note_signed_out_widget_spec.js
@@ -1,13 +1,13 @@
import Vue from 'vue';
-import issueNoteSignedOut from '~/notes/components/issue_note_signed_out_widget.vue';
+import noteSignedOut from '~/notes/components/note_signed_out_widget.vue';
import store from '~/notes/stores';
import { notesDataMock } from '../mock_data';
-describe('issue_note_signed_out_widget component', () => {
+describe('note_signed_out_widget component', () => {
let vm;
beforeEach(() => {
- const Component = Vue.extend(issueNoteSignedOut);
+ const Component = Vue.extend(noteSignedOut);
store.dispatch('setNotesData', notesDataMock);
vm = new Component({
diff --git a/spec/javascripts/notes/mock_data.js b/spec/javascripts/notes/mock_data.js
index 89ba3a002b7..42497de3c55 100644
--- a/spec/javascripts/notes/mock_data.js
+++ b/spec/javascripts/notes/mock_data.js
@@ -18,7 +18,7 @@ export const userDataMock = {
username: 'root',
};
-export const issueDataMock = {
+export const noteableDataMock = {
assignees: [],
author_id: 1,
branch_name: null,
@@ -271,7 +271,7 @@ export const discussionMock = {
individual_note: false,
};
-export const loggedOutIssueData = {
+export const loggedOutnoteableData = {
"id": 98,
"iid": 26,
"author_id": 1,
diff --git a/spec/javascripts/notes/stores/actions_spec.js b/spec/javascripts/notes/stores/actions_spec.js
index 3d1ca870ca4..e092320f9a3 100644
--- a/spec/javascripts/notes/stores/actions_spec.js
+++ b/spec/javascripts/notes/stores/actions_spec.js
@@ -1,6 +1,6 @@
import * as actions from '~/notes/stores/actions';
import testAction from '../../helpers/vuex_action_helper';
-import { discussionMock, notesDataMock, userDataMock, issueDataMock, individualNote } from '../mock_data';
+import { discussionMock, notesDataMock, userDataMock, noteableDataMock, individualNote } from '../mock_data';
describe('Actions Notes Store', () => {
describe('setNotesData', () => {
@@ -11,10 +11,10 @@ describe('Actions Notes Store', () => {
});
});
- describe('setIssueData', () => {
+ describe('setNoteableData', () => {
it('should set received issue data', (done) => {
- testAction(actions.setIssueData, null, { issueData: {} }, [
- { type: 'SET_ISSUE_DATA', payload: issueDataMock },
+ testAction(actions.setNoteableData, null, { noteableData: {} }, [
+ { type: 'SET_NOTEABLE_DATA', payload: noteableDataMock },
], done);
});
});
diff --git a/spec/javascripts/notes/stores/getters_spec.js b/spec/javascripts/notes/stores/getters_spec.js
index 48ee1bf9a52..c5a84b71788 100644
--- a/spec/javascripts/notes/stores/getters_spec.js
+++ b/spec/javascripts/notes/stores/getters_spec.js
@@ -1,5 +1,5 @@
import * as getters from '~/notes/stores/getters';
-import { notesDataMock, userDataMock, issueDataMock, individualNote } from '../mock_data';
+import { notesDataMock, userDataMock, noteableDataMock, individualNote } from '../mock_data';
describe('Getters Notes Store', () => {
let state;
@@ -11,7 +11,7 @@ describe('Getters Notes Store', () => {
notesData: notesDataMock,
userData: userDataMock,
- issueData: issueDataMock,
+ noteableData: noteableDataMock,
};
});
describe('notes', () => {
@@ -32,9 +32,9 @@ describe('Getters Notes Store', () => {
});
});
- describe('getIssueData', () => {
- it('should return all data in `issueData`', () => {
- expect(getters.getIssueData(state)).toEqual(issueDataMock);
+ describe('getNoteableData', () => {
+ it('should return all data in `noteableData`', () => {
+ expect(getters.getNoteableData(state)).toEqual(noteableDataMock);
});
});
diff --git a/spec/javascripts/notes/stores/mutation_spec.js b/spec/javascripts/notes/stores/mutation_spec.js
index 1e22e03e178..22d99998a7d 100644
--- a/spec/javascripts/notes/stores/mutation_spec.js
+++ b/spec/javascripts/notes/stores/mutation_spec.js
@@ -1,5 +1,5 @@
import mutations from '~/notes/stores/mutations';
-import { note, discussionMock, notesDataMock, userDataMock, issueDataMock, individualNote } from '../mock_data';
+import { note, discussionMock, notesDataMock, userDataMock, noteableDataMock, individualNote } from '../mock_data';
describe('Mutation Notes Store', () => {
describe('ADD_NEW_NOTE', () => {
@@ -74,14 +74,14 @@ describe('Mutation Notes Store', () => {
});
});
- describe('SET_ISSUE_DATA', () => {
+ describe('SET_NOTEABLE_DATA', () => {
it('should set the issue data', () => {
const state = {
- issueData: {},
+ noteableData: {},
};
- mutations.SET_ISSUE_DATA(state, issueDataMock);
- expect(state.issueData).toEqual(issueDataMock);
+ mutations.SET_NOTEABLE_DATA(state, noteableDataMock);
+ expect(state.noteableData).toEqual(noteableDataMock);
});
});
diff --git a/spec/javascripts/notes_spec.js b/spec/javascripts/notes_spec.js
index 928a4b461cc..677a389b88f 100644
--- a/spec/javascripts/notes_spec.js
+++ b/spec/javascripts/notes_spec.js
@@ -5,7 +5,6 @@ import 'autosize';
import '~/gl_form';
import '~/lib/utils/text_utility';
import '~/render_gfm';
-import '~/render_math';
import '~/notes';
(function() {
diff --git a/spec/javascripts/pipelines/graph/job_component_spec.js b/spec/javascripts/pipelines/graph/job_component_spec.js
index 342ee6c1242..23c87610d83 100644
--- a/spec/javascripts/pipelines/graph/job_component_spec.js
+++ b/spec/javascripts/pipelines/graph/job_component_spec.js
@@ -1,8 +1,10 @@
import Vue from 'vue';
import jobComponent from '~/pipelines/components/graph/job_component.vue';
+import mountComponent from '../../helpers/vue_mount_component_helper';
describe('pipeline graph job component', () => {
let JobComponent;
+ let component;
const mockJob = {
id: 4256,
@@ -13,6 +15,7 @@ describe('pipeline graph job component', () => {
label: 'passed',
group: 'success',
details_path: '/root/ci-mock/builds/4256',
+ has_details: true,
action: {
icon: 'retry',
title: 'Retry',
@@ -26,13 +29,13 @@ describe('pipeline graph job component', () => {
JobComponent = Vue.extend(jobComponent);
});
+ afterEach(() => {
+ component.$destroy();
+ });
+
describe('name with link', () => {
it('should render the job name and status with a link', (done) => {
- const component = new JobComponent({
- propsData: {
- job: mockJob,
- },
- }).$mount();
+ component = mountComponent(JobComponent, { job: mockJob });
Vue.nextTick(() => {
const link = component.$el.querySelector('a');
@@ -56,23 +59,23 @@ describe('pipeline graph job component', () => {
describe('name without link', () => {
it('it should render status and name', () => {
- const component = new JobComponent({
- propsData: {
- job: {
- id: 4256,
- name: 'test',
- status: {
- icon: 'icon_status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- details_path: '/root/ci-mock/builds/4256',
- },
+ component = mountComponent(JobComponent, {
+ job: {
+ id: 4256,
+ name: 'test',
+ status: {
+ icon: 'icon_status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ details_path: '/root/ci-mock/builds/4256',
+ has_details: false,
},
},
- }).$mount();
+ });
expect(component.$el.querySelector('.js-status-icon-success')).toBeDefined();
+ expect(component.$el.querySelector('a')).toBeNull();
expect(
component.$el.querySelector('.ci-status-text').textContent.trim(),
@@ -82,11 +85,7 @@ describe('pipeline graph job component', () => {
describe('action icon', () => {
it('it should render the action icon', () => {
- const component = new JobComponent({
- propsData: {
- job: mockJob,
- },
- }).$mount();
+ component = mountComponent(JobComponent, { job: mockJob });
expect(component.$el.querySelector('a.ci-action-icon-container')).toBeDefined();
expect(component.$el.querySelector('i.ci-action-icon-wrapper')).toBeDefined();
@@ -95,24 +94,20 @@ describe('pipeline graph job component', () => {
describe('dropdown', () => {
it('should render the dropdown action icon', () => {
- const component = new JobComponent({
- propsData: {
- job: mockJob,
- isDropdown: true,
- },
- }).$mount();
+ component = mountComponent(JobComponent, {
+ job: mockJob,
+ isDropdown: true,
+ });
expect(component.$el.querySelector('a.ci-action-icon-wrapper')).toBeDefined();
});
});
it('should render provided class name', () => {
- const component = new JobComponent({
- propsData: {
- job: mockJob,
- cssClassJobName: 'css-class-job-name',
- },
- }).$mount();
+ component = mountComponent(JobComponent, {
+ job: mockJob,
+ cssClassJobName: 'css-class-job-name',
+ });
expect(
component.$el.querySelector('a').classList.contains('css-class-job-name'),
diff --git a/spec/javascripts/pipelines/navigation_tabs_spec.js b/spec/javascripts/pipelines/navigation_tabs_spec.js
deleted file mode 100644
index 53a88e6322f..00000000000
--- a/spec/javascripts/pipelines/navigation_tabs_spec.js
+++ /dev/null
@@ -1,127 +0,0 @@
-import Vue from 'vue';
-import navigationTabs from '~/pipelines/components/navigation_tabs.vue';
-import mountComponent from '../helpers/vue_mount_component_helper';
-
-describe('navigation tabs pipeline component', () => {
- let vm;
- let Component;
- let data;
-
- beforeEach(() => {
- data = {
- scope: 'all',
- count: {
- all: 16,
- running: 1,
- pending: 10,
- finished: 0,
- },
- paths: {
- allPath: '/gitlab-org/gitlab-ce/pipelines',
- pendingPath: '/gitlab-org/gitlab-ce/pipelines?scope=pending',
- finishedPath: '/gitlab-org/gitlab-ce/pipelines?scope=finished',
- runningPath: '/gitlab-org/gitlab-ce/pipelines?scope=running',
- branchesPath: '/gitlab-org/gitlab-ce/pipelines?scope=branches',
- tagsPath: '/gitlab-org/gitlab-ce/pipelines?scope=tags',
- },
- };
-
- Component = Vue.extend(navigationTabs);
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- it('should render tabs with correct paths', () => {
- vm = mountComponent(Component, data);
-
- // All
- const allTab = vm.$el.querySelector('.js-pipelines-tab-all a');
- expect(allTab.textContent.trim()).toContain('All');
- expect(allTab.getAttribute('href')).toEqual(data.paths.allPath);
-
- // Pending
- const pendingTab = vm.$el.querySelector('.js-pipelines-tab-pending a');
- expect(pendingTab.textContent.trim()).toContain('Pending');
- expect(pendingTab.getAttribute('href')).toEqual(data.paths.pendingPath);
-
- // Running
- const runningTab = vm.$el.querySelector('.js-pipelines-tab-running a');
- expect(runningTab.textContent.trim()).toContain('Running');
- expect(runningTab.getAttribute('href')).toEqual(data.paths.runningPath);
-
- // Finished
- const finishedTab = vm.$el.querySelector('.js-pipelines-tab-finished a');
- expect(finishedTab.textContent.trim()).toContain('Finished');
- expect(finishedTab.getAttribute('href')).toEqual(data.paths.finishedPath);
-
- // Branches
- const branchesTab = vm.$el.querySelector('.js-pipelines-tab-branches a');
- expect(branchesTab.textContent.trim()).toContain('Branches');
-
- // Tags
- const tagsTab = vm.$el.querySelector('.js-pipelines-tab-tags a');
- expect(tagsTab.textContent.trim()).toContain('Tags');
- });
-
- describe('scope', () => {
- it('should render scope provided as active tab', () => {
- vm = mountComponent(Component, data);
- expect(vm.$el.querySelector('.js-pipelines-tab-all').className).toContain('active');
- });
- });
-
- describe('badges', () => {
- it('should render provided number', () => {
- vm = mountComponent(Component, data);
- // All
- expect(
- vm.$el.querySelector('.js-totalbuilds-count').textContent.trim(),
- ).toContain(data.count.all);
-
- // Pending
- expect(
- vm.$el.querySelector('.js-pipelines-tab-pending .badge').textContent.trim(),
- ).toContain(data.count.pending);
-
- // Running
- expect(
- vm.$el.querySelector('.js-pipelines-tab-running .badge').textContent.trim(),
- ).toContain(data.count.running);
-
- // Finished
- expect(
- vm.$el.querySelector('.js-pipelines-tab-finished .badge').textContent.trim(),
- ).toContain(data.count.finished);
- });
-
- it('should not render badge when number is undefined', () => {
- vm = mountComponent(Component, {
- scope: 'all',
- paths: {},
- count: {},
- });
-
- // All
- expect(
- vm.$el.querySelector('.js-totalbuilds-count'),
- ).toEqual(null);
-
- // Pending
- expect(
- vm.$el.querySelector('.js-pipelines-tab-pending .badge'),
- ).toEqual(null);
-
- // Running
- expect(
- vm.$el.querySelector('.js-pipelines-tab-running .badge'),
- ).toEqual(null);
-
- // Finished
- expect(
- vm.$el.querySelector('.js-pipelines-tab-finished .badge'),
- ).toEqual(null);
- });
- });
-});
diff --git a/spec/javascripts/pipelines/pipelines_spec.js b/spec/javascripts/pipelines/pipelines_spec.js
index c30abb2edb0..367b42cefb0 100644
--- a/spec/javascripts/pipelines/pipelines_spec.js
+++ b/spec/javascripts/pipelines/pipelines_spec.js
@@ -1,6 +1,7 @@
import Vue from 'vue';
import pipelinesComp from '~/pipelines/components/pipelines.vue';
import Store from '~/pipelines/stores/pipelines_store';
+import mountComponent from '../helpers/vue_mount_component_helper';
describe('Pipelines', () => {
const jsonFixtureName = 'pipelines/pipelines.json';
@@ -9,26 +10,33 @@ describe('Pipelines', () => {
preloadFixtures(jsonFixtureName);
let PipelinesComponent;
- let pipeline;
+ let pipelines;
+ let component;
beforeEach(() => {
loadFixtures('static/pipelines.html.raw');
- const pipelines = getJSONFixture(jsonFixtureName).pipelines;
- pipeline = pipelines.find(p => p.id === 1);
+ pipelines = getJSONFixture(jsonFixtureName);
PipelinesComponent = Vue.extend(pipelinesComp);
});
+ afterEach(() => {
+ component.$destroy();
+ });
+
describe('successfull request', () => {
describe('with pipelines', () => {
const pipelinesInterceptor = (request, next) => {
- next(request.respondWith(JSON.stringify(pipeline), {
+ next(request.respondWith(JSON.stringify(pipelines), {
status: 200,
}));
};
beforeEach(() => {
Vue.http.interceptors.push(pipelinesInterceptor);
+ component = mountComponent(PipelinesComponent, {
+ store: new Store(),
+ });
});
afterEach(() => {
@@ -38,18 +46,71 @@ describe('Pipelines', () => {
});
it('should render table', (done) => {
- const component = new PipelinesComponent({
- propsData: {
- store: new Store(),
- },
- }).$mount();
-
setTimeout(() => {
expect(component.$el.querySelector('.table-holder')).toBeDefined();
- expect(component.$el.querySelector('.realtime-loading')).toBe(null);
+ expect(
+ component.$el.querySelectorAll('.gl-responsive-table-row').length,
+ ).toEqual(pipelines.pipelines.length + 1);
+ done();
+ });
+ });
+
+ it('should render navigation tabs', (done) => {
+ setTimeout(() => {
+ expect(
+ component.$el.querySelector('.js-pipelines-tab-pending').textContent.trim(),
+ ).toContain('Pending');
+ expect(
+ component.$el.querySelector('.js-pipelines-tab-all').textContent.trim(),
+ ).toContain('All');
+ expect(
+ component.$el.querySelector('.js-pipelines-tab-running').textContent.trim(),
+ ).toContain('Running');
+ expect(
+ component.$el.querySelector('.js-pipelines-tab-finished').textContent.trim(),
+ ).toContain('Finished');
+ expect(
+ component.$el.querySelector('.js-pipelines-tab-branches').textContent.trim(),
+ ).toContain('Branches');
+ expect(
+ component.$el.querySelector('.js-pipelines-tab-tags').textContent.trim(),
+ ).toContain('Tags');
+ done();
+ });
+ });
+
+ it('should make an API request when using tabs', (done) => {
+ setTimeout(() => {
+ spyOn(component, 'updateContent');
+ component.$el.querySelector('.js-pipelines-tab-finished').click();
+
+ expect(component.updateContent).toHaveBeenCalledWith({ scope: 'finished', page: '1' });
done();
});
});
+
+ describe('with pagination', () => {
+ it('should make an API request when using pagination', (done) => {
+ setTimeout(() => {
+ spyOn(component, 'updateContent');
+ // Mock pagination
+ component.store.state.pageInfo = {
+ page: 1,
+ total: 10,
+ perPage: 2,
+ nextPage: 2,
+ totalPages: 5,
+ };
+
+ Vue.nextTick(() => {
+ component.$el.querySelector('.js-next-button a').click();
+ expect(component.updateContent).toHaveBeenCalledWith({ scope: 'all', page: '2' });
+
+ done();
+ });
+ });
+ });
+ });
});
describe('without pipelines', () => {
@@ -70,15 +131,14 @@ describe('Pipelines', () => {
});
it('should render empty state', (done) => {
- const component = new PipelinesComponent({
+ component = new PipelinesComponent({
propsData: {
store: new Store(),
},
}).$mount();
setTimeout(() => {
- expect(component.$el.querySelector('.empty-state')).toBeDefined();
- expect(component.$el.querySelector('.realtime-loading')).toBe(null);
+ expect(component.$el.querySelector('.empty-state')).not.toBe(null);
done();
});
});
@@ -103,7 +163,7 @@ describe('Pipelines', () => {
});
it('should render error state', (done) => {
- const component = new PipelinesComponent({
+ component = new PipelinesComponent({
propsData: {
store: new Store(),
},
@@ -111,9 +171,54 @@ describe('Pipelines', () => {
setTimeout(() => {
expect(component.$el.querySelector('.js-pipelines-error-state')).toBeDefined();
- expect(component.$el.querySelector('.realtime-loading')).toBe(null);
done();
});
});
});
+
+ describe('methods', () => {
+ beforeEach(() => {
+ spyOn(history, 'pushState').and.stub();
+ });
+
+ describe('updateContent', () => {
+ it('should set given parameters', () => {
+ component = mountComponent(PipelinesComponent, {
+ store: new Store(),
+ });
+ component.updateContent({ scope: 'finished', page: '4' });
+
+ expect(component.page).toEqual('4');
+ expect(component.scope).toEqual('finished');
+ expect(component.requestData.scope).toEqual('finished');
+ expect(component.requestData.page).toEqual('4');
+ });
+ });
+
+ describe('onChangeTab', () => {
+ it('should set page to 1', () => {
+ component = mountComponent(PipelinesComponent, {
+ store: new Store(),
+ });
+ spyOn(component, 'updateContent');
+
+ component.onChangeTab('running');
+
+ expect(component.updateContent).toHaveBeenCalledWith({ scope: 'running', page: '1' });
+ });
+ });
+
+ describe('onChangePage', () => {
+ it('should update page and keep scope', () => {
+ component = mountComponent(PipelinesComponent, {
+ store: new Store(),
+ });
+ spyOn(component, 'updateContent');
+
+ component.onChangePage(4);
+
+ expect(component.updateContent).toHaveBeenCalledWith({ scope: component.scope, page: '4' });
+ });
+ });
+ });
});
diff --git a/spec/javascripts/projects_dropdown/components/projects_list_item_spec.js b/spec/javascripts/projects_dropdown/components/projects_list_item_spec.js
index 171629fcd6b..edef150dd1e 100644
--- a/spec/javascripts/projects_dropdown/components/projects_list_item_spec.js
+++ b/spec/javascripts/projects_dropdown/components/projects_list_item_spec.js
@@ -50,6 +50,18 @@ describe('ProjectsListItemComponent', () => {
expect(vm.highlightedProjectName).toBe(mockProject.name);
});
});
+
+ describe('truncatedNamespace', () => {
+ it('should truncate project name from namespace string', () => {
+ vm.namespace = 'platform / nokia-3310';
+ expect(vm.truncatedNamespace).toBe('platform');
+ });
+
+ it('should truncate namespace string from the middle if it includes more than two groups in path', () => {
+ vm.namespace = 'platform / hardware / broadcom / Wifi Group / Mobile Chipset / nokia-3310';
+ expect(vm.truncatedNamespace).toBe('platform / ... / Mobile Chipset');
+ });
+ });
});
describe('template', () => {
diff --git a/spec/javascripts/repo/components/commit_sidebar/list_collapsed_spec.js b/spec/javascripts/repo/components/commit_sidebar/list_collapsed_spec.js
new file mode 100644
index 00000000000..f750061a6a1
--- /dev/null
+++ b/spec/javascripts/repo/components/commit_sidebar/list_collapsed_spec.js
@@ -0,0 +1,33 @@
+import Vue from 'vue';
+import store from '~/repo/stores';
+import listCollapsed from '~/repo/components/commit_sidebar/list_collapsed.vue';
+import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
+import { file } from '../../helpers';
+
+describe('Multi-file editor commit sidebar list collapsed', () => {
+ let vm;
+
+ beforeEach(() => {
+ const Component = Vue.extend(listCollapsed);
+
+ vm = createComponentWithStore(Component, store);
+
+ vm.$store.state.openFiles.push(file(), file());
+ vm.$store.state.openFiles[0].tempFile = true;
+ vm.$store.state.openFiles.forEach((f) => {
+ Object.assign(f, {
+ changed: true,
+ });
+ });
+
+ vm.$mount();
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('renders added & modified files count', () => {
+ expect(vm.$el.textContent.replace(/\s+/g, ' ').trim()).toBe('1 1');
+ });
+});
diff --git a/spec/javascripts/repo/components/commit_sidebar/list_item_spec.js b/spec/javascripts/repo/components/commit_sidebar/list_item_spec.js
new file mode 100644
index 00000000000..18c9b46fcd9
--- /dev/null
+++ b/spec/javascripts/repo/components/commit_sidebar/list_item_spec.js
@@ -0,0 +1,53 @@
+import Vue from 'vue';
+import listItem from '~/repo/components/commit_sidebar/list_item.vue';
+import mountComponent from '../../../helpers/vue_mount_component_helper';
+import { file } from '../../helpers';
+
+describe('Multi-file editor commit sidebar list item', () => {
+ let vm;
+ let f;
+
+ beforeEach(() => {
+ const Component = Vue.extend(listItem);
+
+ f = file();
+
+ vm = mountComponent(Component, {
+ file: f,
+ });
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('renders file path', () => {
+ expect(vm.$el.querySelector('.multi-file-commit-list-path').textContent.trim()).toBe(f.path);
+ });
+
+ describe('computed', () => {
+ describe('iconName', () => {
+ it('returns modified when not a tempFile', () => {
+ expect(vm.iconName).toBe('file-modified');
+ });
+
+ it('returns addition when not a tempFile', () => {
+ f.tempFile = true;
+
+ expect(vm.iconName).toBe('file-addition');
+ });
+ });
+
+ describe('iconClass', () => {
+ it('returns modified when not a tempFile', () => {
+ expect(vm.iconClass).toContain('multi-file-modified');
+ });
+
+ it('returns addition when not a tempFile', () => {
+ f.tempFile = true;
+
+ expect(vm.iconClass).toContain('multi-file-addition');
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/repo/components/commit_sidebar/list_spec.js b/spec/javascripts/repo/components/commit_sidebar/list_spec.js
new file mode 100644
index 00000000000..df7e3c5de21
--- /dev/null
+++ b/spec/javascripts/repo/components/commit_sidebar/list_spec.js
@@ -0,0 +1,72 @@
+import Vue from 'vue';
+import store from '~/repo/stores';
+import commitSidebarList from '~/repo/components/commit_sidebar/list.vue';
+import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
+import { file } from '../../helpers';
+
+describe('Multi-file editor commit sidebar list', () => {
+ let vm;
+
+ beforeEach(() => {
+ const Component = Vue.extend(commitSidebarList);
+
+ vm = createComponentWithStore(Component, store, {
+ title: 'Staged',
+ fileList: [],
+ collapsed: false,
+ }).$mount();
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ describe('empty file list', () => {
+ it('renders no changes text', () => {
+ expect(vm.$el.querySelector('.help-block').textContent.trim()).toBe('No changes');
+ });
+ });
+
+ describe('with a list of files', () => {
+ beforeEach((done) => {
+ const f = file('file name');
+ f.changed = true;
+ vm.fileList.push(f);
+
+ Vue.nextTick(done);
+ });
+
+ it('renders list', () => {
+ expect(vm.$el.querySelectorAll('li').length).toBe(1);
+ });
+ });
+
+ describe('collapsed', () => {
+ beforeEach((done) => {
+ vm.collapsed = true;
+
+ Vue.nextTick(done);
+ });
+
+ it('adds collapsed class', () => {
+ expect(vm.$el.querySelector('.is-collapsed')).not.toBeNull();
+ });
+
+ it('hides list', () => {
+ expect(vm.$el.querySelector('.list-unstyled')).toBeNull();
+ expect(vm.$el.querySelector('.help-block')).toBeNull();
+ });
+
+ it('hides collapse button', () => {
+ expect(vm.$el.querySelector('.multi-file-commit-panel-collapse-btn')).toBeNull();
+ });
+ });
+
+ it('clicking toggle collapse button emits toggle event', () => {
+ spyOn(vm, '$emit');
+
+ vm.$el.querySelector('.multi-file-commit-panel-collapse-btn').click();
+
+ expect(vm.$emit).toHaveBeenCalledWith('toggleCollapsed');
+ });
+});
diff --git a/spec/javascripts/repo/components/repo_commit_section_spec.js b/spec/javascripts/repo/components/repo_commit_section_spec.js
index 0f991e1b727..1c794123095 100644
--- a/spec/javascripts/repo/components/repo_commit_section_spec.js
+++ b/spec/javascripts/repo/components/repo_commit_section_spec.js
@@ -25,8 +25,12 @@ describe('RepoCommitSection', () => {
return comp.$mount();
}
- beforeEach(() => {
+ beforeEach((done) => {
vm = createComponent();
+
+ vm.collapsed = false;
+
+ Vue.nextTick(done);
});
afterEach(() => {
@@ -36,12 +40,11 @@ describe('RepoCommitSection', () => {
});
it('renders a commit section', () => {
- const changedFileElements = [...vm.$el.querySelectorAll('.changed-files > li')];
- const submitCommit = vm.$el.querySelector('.btn');
- const targetBranch = vm.$el.querySelector('.target-branch');
+ const changedFileElements = [...vm.$el.querySelectorAll('.multi-file-commit-list li')];
+ const submitCommit = vm.$el.querySelector('form .btn');
- expect(vm.$el.querySelector(':scope > form')).toBeTruthy();
- expect(vm.$el.querySelector('.staged-files').textContent.trim()).toEqual('Staged files (2)');
+ expect(vm.$el.querySelector('.multi-file-commit-form')).not.toBeNull();
+ expect(vm.$el.querySelector('.multi-file-commit-panel-section header').textContent.trim()).toEqual('Staged');
expect(changedFileElements.length).toEqual(2);
changedFileElements.forEach((changedFile, i) => {
@@ -49,10 +52,7 @@ describe('RepoCommitSection', () => {
});
expect(submitCommit.disabled).toBeTruthy();
- expect(submitCommit.querySelector('.fa-spinner.fa-spin')).toBeFalsy();
- expect(vm.$el.querySelector('.commit-summary').textContent.trim()).toEqual('Commit 2 files');
- expect(targetBranch.querySelector(':scope > label').textContent.trim()).toEqual('Target branch');
- expect(targetBranch.querySelector('.help-block').textContent.trim()).toEqual('master');
+ expect(submitCommit.querySelector('.fa-spinner.fa-spin')).toBeNull();
});
describe('when submitting', () => {
@@ -69,7 +69,7 @@ describe('RepoCommitSection', () => {
});
it('allows you to submit', () => {
- expect(vm.$el.querySelector('.btn').disabled).toBeTruthy();
+ expect(vm.$el.querySelector('form .btn').disabled).toBeTruthy();
});
it('submits commit', (done) => {
diff --git a/spec/javascripts/repo/components/repo_editor_spec.js b/spec/javascripts/repo/components/repo_editor_spec.js
index 979d2185076..81158cad639 100644
--- a/spec/javascripts/repo/components/repo_editor_spec.js
+++ b/spec/javascripts/repo/components/repo_editor_spec.js
@@ -1,12 +1,13 @@
import Vue from 'vue';
import store from '~/repo/stores';
import repoEditor from '~/repo/components/repo_editor.vue';
+import monacoLoader from '~/repo/monaco_loader';
import { file, resetStore } from '../helpers';
describe('RepoEditor', () => {
let vm;
- beforeEach(() => {
+ beforeEach((done) => {
const f = file();
const RepoEditor = Vue.extend(repoEditor);
@@ -21,6 +22,10 @@ describe('RepoEditor', () => {
vm.monaco = true;
vm.$mount();
+
+ monacoLoader(['vs/editor/editor.main'], () => {
+ setTimeout(done, 0);
+ });
});
afterEach(() => {
@@ -32,7 +37,6 @@ describe('RepoEditor', () => {
it('renders an ide container', (done) => {
Vue.nextTick(() => {
expect(vm.shouldHideEditor).toBeFalsy();
- expect(vm.$el.textContent.trim()).toBe('');
done();
});
@@ -50,7 +54,7 @@ describe('RepoEditor', () => {
});
it('shows activeFile html', () => {
- expect(vm.$el.textContent.trim()).toBe('testing');
+ expect(vm.$el.textContent).toContain('testing');
});
});
});
diff --git a/spec/javascripts/repo/components/repo_sidebar_spec.js b/spec/javascripts/repo/components/repo_sidebar_spec.js
index 7cb4dace491..df7cf8aabbb 100644
--- a/spec/javascripts/repo/components/repo_sidebar_spec.js
+++ b/spec/javascripts/repo/components/repo_sidebar_spec.js
@@ -29,7 +29,6 @@ describe('RepoSidebar', () => {
const thead = vm.$el.querySelector('thead');
const tbody = vm.$el.querySelector('tbody');
- expect(vm.$el.id).toEqual('sidebar');
expect(vm.$el.classList.contains('sidebar-mini')).toBeFalsy();
expect(thead.querySelector('.name').textContent.trim()).toEqual('Name');
expect(thead.querySelector('.last-commit').textContent.trim()).toEqual('Last commit');
@@ -40,18 +39,6 @@ describe('RepoSidebar', () => {
expect(tbody.querySelector('.file')).toBeTruthy();
});
- it('does not render a thead, renders repo-file-options and sets sidebar-mini class if isMini', (done) => {
- vm.$store.state.openFiles.push(vm.$store.state.tree[0]);
-
- Vue.nextTick(() => {
- expect(vm.$el.classList.contains('sidebar-mini')).toBeTruthy();
- expect(vm.$el.querySelector('thead')).toBeTruthy();
- expect(vm.$el.querySelector('thead .repo-file-options')).toBeTruthy();
-
- done();
- });
- });
-
it('renders 5 loading files if tree is loading', (done) => {
vm.$store.state.tree = [];
vm.$store.state.loading = true;
diff --git a/spec/javascripts/repo/components/repo_tab_spec.js b/spec/javascripts/repo/components/repo_tab_spec.js
index df0ca55aafc..7d2174196c9 100644
--- a/spec/javascripts/repo/components/repo_tab_spec.js
+++ b/spec/javascripts/repo/components/repo_tab_spec.js
@@ -24,8 +24,8 @@ describe('RepoTab', () => {
tab: file(),
});
vm.$store.state.openFiles.push(vm.tab);
- const close = vm.$el.querySelector('.close-btn');
- const name = vm.$el.querySelector(`a[title="${vm.tab.url}"]`);
+ const close = vm.$el.querySelector('.multi-file-tab-close');
+ const name = vm.$el.querySelector(`[title="${vm.tab.url}"]`);
expect(close.querySelector('.fa-times')).toBeTruthy();
expect(name.textContent.trim()).toEqual(vm.tab.name);
@@ -50,7 +50,7 @@ describe('RepoTab', () => {
spyOn(vm, 'closeFile');
- vm.$el.querySelector('.close-btn').click();
+ vm.$el.querySelector('.multi-file-tab-close').click();
expect(vm.closeFile).toHaveBeenCalledWith({ file: vm.tab });
});
@@ -62,7 +62,7 @@ describe('RepoTab', () => {
tab,
});
- expect(vm.$el.querySelector('.close-btn .fa-circle')).toBeTruthy();
+ expect(vm.$el.querySelector('.multi-file-tab-close .fa-circle')).not.toBeNull();
});
describe('methods', () => {
@@ -77,7 +77,7 @@ describe('RepoTab', () => {
vm.$store.state.openFiles.push(tab);
vm.$store.dispatch('setFileActive', tab);
- vm.$el.querySelector('.close-btn').click();
+ vm.$el.querySelector('.multi-file-tab-close').click();
vm.$nextTick(() => {
expect(tab.opened).toBeTruthy();
@@ -95,7 +95,7 @@ describe('RepoTab', () => {
vm.$store.state.openFiles.push(tab);
vm.$store.dispatch('setFileActive', tab);
- vm.$el.querySelector('.close-btn').click();
+ vm.$el.querySelector('.multi-file-tab-close').click();
vm.$nextTick(() => {
expect(tab.opened).toBeFalsy();
diff --git a/spec/javascripts/repo/components/repo_tabs_spec.js b/spec/javascripts/repo/components/repo_tabs_spec.js
index d0246cc72e6..1fb2242c051 100644
--- a/spec/javascripts/repo/components/repo_tabs_spec.js
+++ b/spec/javascripts/repo/components/repo_tabs_spec.js
@@ -25,12 +25,11 @@ describe('RepoTabs', () => {
vm.$store.state.openFiles = openedFiles;
vm.$nextTick(() => {
- const tabs = [...vm.$el.querySelectorAll(':scope > li')];
+ const tabs = [...vm.$el.querySelectorAll('.multi-file-tab')];
- expect(tabs.length).toEqual(3);
+ expect(tabs.length).toEqual(2);
expect(tabs[0].classList.contains('active')).toBeTruthy();
expect(tabs[1].classList.contains('active')).toBeFalsy();
- expect(tabs[2].classList.contains('tabs-divider')).toBeTruthy();
done();
});
diff --git a/spec/javascripts/repo/helpers.js b/spec/javascripts/repo/helpers.js
index 376c291c64b..820a44992b4 100644
--- a/spec/javascripts/repo/helpers.js
+++ b/spec/javascripts/repo/helpers.js
@@ -12,9 +12,4 @@ export const file = (name = 'name', id = name, type = '') => decorateData({
url: 'url',
name,
path: name,
- last_commit: {
- id: '123',
- message: 'test',
- committed_date: new Date().toISOString(),
- },
});
diff --git a/spec/javascripts/repo/lib/common/disposable_spec.js b/spec/javascripts/repo/lib/common/disposable_spec.js
new file mode 100644
index 00000000000..62c3913bf4d
--- /dev/null
+++ b/spec/javascripts/repo/lib/common/disposable_spec.js
@@ -0,0 +1,44 @@
+import Disposable from '~/repo/lib/common/disposable';
+
+describe('Multi-file editor library disposable class', () => {
+ let instance;
+ let disposableClass;
+
+ beforeEach(() => {
+ instance = new Disposable();
+
+ disposableClass = {
+ dispose: jasmine.createSpy('dispose'),
+ };
+ });
+
+ afterEach(() => {
+ instance.dispose();
+ });
+
+ describe('add', () => {
+ it('adds disposable classes', () => {
+ instance.add(disposableClass);
+
+ expect(instance.disposers.size).toBe(1);
+ });
+ });
+
+ describe('dispose', () => {
+ beforeEach(() => {
+ instance.add(disposableClass);
+ });
+
+ it('calls dispose on all cached disposers', () => {
+ instance.dispose();
+
+ expect(disposableClass.dispose).toHaveBeenCalled();
+ });
+
+ it('clears cached disposers', () => {
+ instance.dispose();
+
+ expect(instance.disposers.size).toBe(0);
+ });
+ });
+});
diff --git a/spec/javascripts/repo/lib/common/model_manager_spec.js b/spec/javascripts/repo/lib/common/model_manager_spec.js
new file mode 100644
index 00000000000..8c134f178c0
--- /dev/null
+++ b/spec/javascripts/repo/lib/common/model_manager_spec.js
@@ -0,0 +1,81 @@
+/* global monaco */
+import monacoLoader from '~/repo/monaco_loader';
+import ModelManager from '~/repo/lib/common/model_manager';
+import { file } from '../../helpers';
+
+describe('Multi-file editor library model manager', () => {
+ let instance;
+
+ beforeEach((done) => {
+ monacoLoader(['vs/editor/editor.main'], () => {
+ instance = new ModelManager(monaco);
+
+ done();
+ });
+ });
+
+ afterEach(() => {
+ instance.dispose();
+ });
+
+ describe('addModel', () => {
+ it('caches model', () => {
+ instance.addModel(file());
+
+ expect(instance.models.size).toBe(1);
+ });
+
+ it('caches model by file path', () => {
+ instance.addModel(file('path-name'));
+
+ expect(instance.models.keys().next().value).toBe('path-name');
+ });
+
+ it('adds model into disposable', () => {
+ spyOn(instance.disposable, 'add').and.callThrough();
+
+ instance.addModel(file());
+
+ expect(instance.disposable.add).toHaveBeenCalled();
+ });
+
+ it('returns cached model', () => {
+ spyOn(instance.models, 'get').and.callThrough();
+
+ instance.addModel(file());
+ instance.addModel(file());
+
+ expect(instance.models.get).toHaveBeenCalled();
+ });
+ });
+
+ describe('hasCachedModel', () => {
+ it('returns false when no models exist', () => {
+ expect(instance.hasCachedModel('path')).toBeFalsy();
+ });
+
+ it('returns true when model exists', () => {
+ instance.addModel(file('path-name'));
+
+ expect(instance.hasCachedModel('path-name')).toBeTruthy();
+ });
+ });
+
+ describe('dispose', () => {
+ it('clears cached models', () => {
+ instance.addModel(file());
+
+ instance.dispose();
+
+ expect(instance.models.size).toBe(0);
+ });
+
+ it('calls disposable dispose', () => {
+ spyOn(instance.disposable, 'dispose').and.callThrough();
+
+ instance.dispose();
+
+ expect(instance.disposable.dispose).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/javascripts/repo/lib/common/model_spec.js b/spec/javascripts/repo/lib/common/model_spec.js
new file mode 100644
index 00000000000..d41ade237ca
--- /dev/null
+++ b/spec/javascripts/repo/lib/common/model_spec.js
@@ -0,0 +1,84 @@
+/* global monaco */
+import monacoLoader from '~/repo/monaco_loader';
+import Model from '~/repo/lib/common/model';
+import { file } from '../../helpers';
+
+describe('Multi-file editor library model', () => {
+ let model;
+
+ beforeEach((done) => {
+ monacoLoader(['vs/editor/editor.main'], () => {
+ model = new Model(monaco, file('path'));
+
+ done();
+ });
+ });
+
+ afterEach(() => {
+ model.dispose();
+ });
+
+ it('creates original model & new model', () => {
+ expect(model.originalModel).not.toBeNull();
+ expect(model.model).not.toBeNull();
+ });
+
+ describe('path', () => {
+ it('returns file path', () => {
+ expect(model.path).toBe('path');
+ });
+ });
+
+ describe('getModel', () => {
+ it('returns model', () => {
+ expect(model.getModel()).toBe(model.model);
+ });
+ });
+
+ describe('getOriginalModel', () => {
+ it('returns original model', () => {
+ expect(model.getOriginalModel()).toBe(model.originalModel);
+ });
+ });
+
+ describe('onChange', () => {
+ it('caches event by path', () => {
+ model.onChange(() => {});
+
+ expect(model.events.size).toBe(1);
+ expect(model.events.keys().next().value).toBe('path');
+ });
+
+ it('calls callback on change', (done) => {
+ const spy = jasmine.createSpy();
+ model.onChange(spy);
+
+ model.getModel().setValue('123');
+
+ setTimeout(() => {
+ expect(spy).toHaveBeenCalledWith(model.getModel(), jasmine.anything());
+ done();
+ });
+ });
+ });
+
+ describe('dispose', () => {
+ it('calls disposable dispose', () => {
+ spyOn(model.disposable, 'dispose').and.callThrough();
+
+ model.dispose();
+
+ expect(model.disposable.dispose).toHaveBeenCalled();
+ });
+
+ it('clears events', () => {
+ model.onChange(() => {});
+
+ expect(model.events.size).toBe(1);
+
+ model.dispose();
+
+ expect(model.events.size).toBe(0);
+ });
+ });
+});
diff --git a/spec/javascripts/repo/lib/decorations/controller_spec.js b/spec/javascripts/repo/lib/decorations/controller_spec.js
new file mode 100644
index 00000000000..2e32e8fa0bd
--- /dev/null
+++ b/spec/javascripts/repo/lib/decorations/controller_spec.js
@@ -0,0 +1,120 @@
+/* global monaco */
+import monacoLoader from '~/repo/monaco_loader';
+import editor from '~/repo/lib/editor';
+import DecorationsController from '~/repo/lib/decorations/controller';
+import Model from '~/repo/lib/common/model';
+import { file } from '../../helpers';
+
+describe('Multi-file editor library decorations controller', () => {
+ let editorInstance;
+ let controller;
+ let model;
+
+ beforeEach((done) => {
+ monacoLoader(['vs/editor/editor.main'], () => {
+ editorInstance = editor.create(monaco);
+ editorInstance.createInstance(document.createElement('div'));
+
+ controller = new DecorationsController(editorInstance);
+ model = new Model(monaco, file('path'));
+
+ done();
+ });
+ });
+
+ afterEach(() => {
+ model.dispose();
+ editorInstance.dispose();
+ controller.dispose();
+ });
+
+ describe('getAllDecorationsForModel', () => {
+ it('returns empty array when no decorations exist for model', () => {
+ const decorations = controller.getAllDecorationsForModel(model);
+
+ expect(decorations).toEqual([]);
+ });
+
+ it('returns decorations by model URL', () => {
+ controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]);
+
+ const decorations = controller.getAllDecorationsForModel(model);
+
+ expect(decorations[0]).toEqual({ decoration: 'decorationValue' });
+ });
+ });
+
+ describe('addDecorations', () => {
+ it('caches decorations in a new map', () => {
+ controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]);
+
+ expect(controller.decorations.size).toBe(1);
+ });
+
+ it('does not create new cache model', () => {
+ controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]);
+ controller.addDecorations(model, 'key', [{ decoration: 'decorationValue2' }]);
+
+ expect(controller.decorations.size).toBe(1);
+ });
+
+ it('caches decorations by model URL', () => {
+ controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]);
+
+ expect(controller.decorations.size).toBe(1);
+ expect(controller.decorations.keys().next().value).toBe('path');
+ });
+
+ it('calls decorate method', () => {
+ spyOn(controller, 'decorate');
+
+ controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]);
+
+ expect(controller.decorate).toHaveBeenCalled();
+ });
+ });
+
+ describe('decorate', () => {
+ it('sets decorations on editor instance', () => {
+ spyOn(controller.editor.instance, 'deltaDecorations');
+
+ controller.decorate(model);
+
+ expect(controller.editor.instance.deltaDecorations).toHaveBeenCalledWith([], []);
+ });
+
+ it('caches decorations', () => {
+ spyOn(controller.editor.instance, 'deltaDecorations').and.returnValue([]);
+
+ controller.decorate(model);
+
+ expect(controller.editorDecorations.size).toBe(1);
+ });
+
+ it('caches decorations by model URL', () => {
+ spyOn(controller.editor.instance, 'deltaDecorations').and.returnValue([]);
+
+ controller.decorate(model);
+
+ expect(controller.editorDecorations.keys().next().value).toBe('path');
+ });
+ });
+
+ describe('dispose', () => {
+ it('clears cached decorations', () => {
+ controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]);
+
+ controller.dispose();
+
+ expect(controller.decorations.size).toBe(0);
+ });
+
+ it('clears cached editorDecorations', () => {
+ controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]);
+
+ controller.dispose();
+
+ expect(controller.editorDecorations.size).toBe(0);
+ });
+ });
+});
diff --git a/spec/javascripts/repo/lib/diff/controller_spec.js b/spec/javascripts/repo/lib/diff/controller_spec.js
new file mode 100644
index 00000000000..ed62e28d3a3
--- /dev/null
+++ b/spec/javascripts/repo/lib/diff/controller_spec.js
@@ -0,0 +1,176 @@
+/* global monaco */
+import monacoLoader from '~/repo/monaco_loader';
+import editor from '~/repo/lib/editor';
+import ModelManager from '~/repo/lib/common/model_manager';
+import DecorationsController from '~/repo/lib/decorations/controller';
+import DirtyDiffController, { getDiffChangeType, getDecorator } from '~/repo/lib/diff/controller';
+import { computeDiff } from '~/repo/lib/diff/diff';
+import { file } from '../../helpers';
+
+describe('Multi-file editor library dirty diff controller', () => {
+ let editorInstance;
+ let controller;
+ let modelManager;
+ let decorationsController;
+ let model;
+
+ beforeEach((done) => {
+ monacoLoader(['vs/editor/editor.main'], () => {
+ editorInstance = editor.create(monaco);
+ editorInstance.createInstance(document.createElement('div'));
+
+ modelManager = new ModelManager(monaco);
+ decorationsController = new DecorationsController(editorInstance);
+
+ model = modelManager.addModel(file());
+
+ controller = new DirtyDiffController(modelManager, decorationsController);
+
+ done();
+ });
+ });
+
+ afterEach(() => {
+ controller.dispose();
+ model.dispose();
+ decorationsController.dispose();
+ editorInstance.dispose();
+ });
+
+ describe('getDiffChangeType', () => {
+ ['added', 'removed', 'modified'].forEach((type) => {
+ it(`returns ${type}`, () => {
+ const change = {
+ [type]: true,
+ };
+
+ expect(getDiffChangeType(change)).toBe(type);
+ });
+ });
+ });
+
+ describe('getDecorator', () => {
+ ['added', 'removed', 'modified'].forEach((type) => {
+ it(`returns with linesDecorationsClassName for ${type}`, () => {
+ const change = {
+ [type]: true,
+ };
+
+ expect(
+ getDecorator(change).options.linesDecorationsClassName,
+ ).toBe(`dirty-diff dirty-diff-${type}`);
+ });
+
+ it('returns with line numbers', () => {
+ const change = {
+ lineNumber: 1,
+ endLineNumber: 2,
+ [type]: true,
+ };
+
+ const range = getDecorator(change).range;
+
+ expect(range.startLineNumber).toBe(1);
+ expect(range.endLineNumber).toBe(2);
+ expect(range.startColumn).toBe(1);
+ expect(range.endColumn).toBe(1);
+ });
+ });
+ });
+
+ describe('attachModel', () => {
+ it('adds change event callback', () => {
+ spyOn(model, 'onChange');
+
+ controller.attachModel(model);
+
+ expect(model.onChange).toHaveBeenCalled();
+ });
+
+ it('calls throttledComputeDiff on change', () => {
+ spyOn(controller, 'throttledComputeDiff');
+
+ controller.attachModel(model);
+
+ model.getModel().setValue('123');
+
+ expect(controller.throttledComputeDiff).toHaveBeenCalled();
+ });
+ });
+
+ describe('computeDiff', () => {
+ it('posts to worker', () => {
+ spyOn(controller.dirtyDiffWorker, 'postMessage');
+
+ controller.computeDiff(model);
+
+ expect(controller.dirtyDiffWorker.postMessage).toHaveBeenCalledWith({
+ path: model.path,
+ originalContent: '',
+ newContent: '',
+ });
+ });
+ });
+
+ describe('reDecorate', () => {
+ it('calls decorations controller decorate', () => {
+ spyOn(controller.decorationsController, 'decorate');
+
+ controller.reDecorate(model);
+
+ expect(controller.decorationsController.decorate).toHaveBeenCalledWith(model);
+ });
+ });
+
+ describe('decorate', () => {
+ it('adds decorations into decorations controller', () => {
+ spyOn(controller.decorationsController, 'addDecorations');
+
+ controller.decorate({ data: { changes: [], path: 'path' } });
+
+ expect(controller.decorationsController.addDecorations).toHaveBeenCalledWith('path', 'dirtyDiff', jasmine.anything());
+ });
+
+ it('adds decorations into editor', () => {
+ const spy = spyOn(controller.decorationsController.editor.instance, 'deltaDecorations');
+
+ controller.decorate({ data: { changes: computeDiff('123', '1234'), path: 'path' } });
+
+ expect(spy).toHaveBeenCalledWith([], [{
+ range: new monaco.Range(
+ 1, 1, 1, 1,
+ ),
+ options: {
+ isWholeLine: true,
+ linesDecorationsClassName: 'dirty-diff dirty-diff-modified',
+ },
+ }]);
+ });
+ });
+
+ describe('dispose', () => {
+ it('calls disposable dispose', () => {
+ spyOn(controller.disposable, 'dispose').and.callThrough();
+
+ controller.dispose();
+
+ expect(controller.disposable.dispose).toHaveBeenCalled();
+ });
+
+ it('terminates worker', () => {
+ spyOn(controller.dirtyDiffWorker, 'terminate').and.callThrough();
+
+ controller.dispose();
+
+ expect(controller.dirtyDiffWorker.terminate).toHaveBeenCalled();
+ });
+
+ it('removes worker event listener', () => {
+ spyOn(controller.dirtyDiffWorker, 'removeEventListener').and.callThrough();
+
+ controller.dispose();
+
+ expect(controller.dirtyDiffWorker.removeEventListener).toHaveBeenCalledWith('message', jasmine.anything());
+ });
+ });
+});
diff --git a/spec/javascripts/repo/lib/diff/diff_spec.js b/spec/javascripts/repo/lib/diff/diff_spec.js
new file mode 100644
index 00000000000..3269ec5d2c9
--- /dev/null
+++ b/spec/javascripts/repo/lib/diff/diff_spec.js
@@ -0,0 +1,80 @@
+import { computeDiff } from '~/repo/lib/diff/diff';
+
+describe('Multi-file editor library diff calculator', () => {
+ describe('computeDiff', () => {
+ it('returns empty array if no changes', () => {
+ const diff = computeDiff('123', '123');
+
+ expect(diff).toEqual([]);
+ });
+
+ describe('modified', () => {
+ it('', () => {
+ const diff = computeDiff('123', '1234')[0];
+
+ expect(diff.added).toBeTruthy();
+ expect(diff.modified).toBeTruthy();
+ expect(diff.removed).toBeUndefined();
+ });
+
+ it('', () => {
+ const diff = computeDiff('123\n123\n123', '123\n1234\n123')[0];
+
+ expect(diff.added).toBeTruthy();
+ expect(diff.modified).toBeTruthy();
+ expect(diff.removed).toBeUndefined();
+ expect(diff.lineNumber).toBe(2);
+ });
+ });
+
+ describe('added', () => {
+ it('', () => {
+ const diff = computeDiff('123', '123\n123')[0];
+
+ expect(diff.added).toBeTruthy();
+ expect(diff.modified).toBeUndefined();
+ expect(diff.removed).toBeUndefined();
+ });
+
+ it('', () => {
+ const diff = computeDiff('123\n123\n123', '123\n123\n1234\n123')[0];
+
+ expect(diff.added).toBeTruthy();
+ expect(diff.modified).toBeUndefined();
+ expect(diff.removed).toBeUndefined();
+ expect(diff.lineNumber).toBe(3);
+ });
+ });
+
+ describe('removed', () => {
+ it('', () => {
+ const diff = computeDiff('123', '')[0];
+
+ expect(diff.added).toBeUndefined();
+ expect(diff.modified).toBeUndefined();
+ expect(diff.removed).toBeTruthy();
+ });
+
+ it('', () => {
+ const diff = computeDiff('123\n123\n123', '123\n123')[0];
+
+ expect(diff.added).toBeUndefined();
+ expect(diff.modified).toBeTruthy();
+ expect(diff.removed).toBeTruthy();
+ expect(diff.lineNumber).toBe(2);
+ });
+ });
+
+ it('includes line number of change', () => {
+ const diff = computeDiff('123', '')[0];
+
+ expect(diff.lineNumber).toBe(1);
+ });
+
+ it('includes end line number of change', () => {
+ const diff = computeDiff('123', '')[0];
+
+ expect(diff.endLineNumber).toBe(1);
+ });
+ });
+});
diff --git a/spec/javascripts/repo/lib/editor_options_spec.js b/spec/javascripts/repo/lib/editor_options_spec.js
new file mode 100644
index 00000000000..b4887d063ed
--- /dev/null
+++ b/spec/javascripts/repo/lib/editor_options_spec.js
@@ -0,0 +1,7 @@
+import editorOptions from '~/repo/lib/editor_options';
+
+describe('Multi-file editor library editor options', () => {
+ it('returns an array', () => {
+ expect(editorOptions).toEqual(jasmine.any(Array));
+ });
+});
diff --git a/spec/javascripts/repo/lib/editor_spec.js b/spec/javascripts/repo/lib/editor_spec.js
new file mode 100644
index 00000000000..cd32832a232
--- /dev/null
+++ b/spec/javascripts/repo/lib/editor_spec.js
@@ -0,0 +1,128 @@
+/* global monaco */
+import monacoLoader from '~/repo/monaco_loader';
+import editor from '~/repo/lib/editor';
+import { file } from '../helpers';
+
+describe('Multi-file editor library', () => {
+ let instance;
+
+ beforeEach((done) => {
+ monacoLoader(['vs/editor/editor.main'], () => {
+ instance = editor.create(monaco);
+
+ done();
+ });
+ });
+
+ afterEach(() => {
+ instance.dispose();
+ });
+
+ it('creates instance of editor', () => {
+ expect(editor.editorInstance).not.toBeNull();
+ });
+
+ describe('createInstance', () => {
+ let el;
+
+ beforeEach(() => {
+ el = document.createElement('div');
+ });
+
+ it('creates editor instance', () => {
+ spyOn(instance.monaco.editor, 'create').and.callThrough();
+
+ instance.createInstance(el);
+
+ expect(instance.monaco.editor.create).toHaveBeenCalled();
+ });
+
+ it('creates dirty diff controller', () => {
+ instance.createInstance(el);
+
+ expect(instance.dirtyDiffController).not.toBeNull();
+ });
+ });
+
+ describe('createModel', () => {
+ it('calls model manager addModel', () => {
+ spyOn(instance.modelManager, 'addModel');
+
+ instance.createModel('FILE');
+
+ expect(instance.modelManager.addModel).toHaveBeenCalledWith('FILE');
+ });
+ });
+
+ describe('attachModel', () => {
+ let model;
+
+ beforeEach(() => {
+ instance.createInstance(document.createElement('div'));
+
+ model = instance.createModel(file());
+ });
+
+ it('sets the current model on the instance', () => {
+ instance.attachModel(model);
+
+ expect(instance.currentModel).toBe(model);
+ });
+
+ it('attaches the model to the current instance', () => {
+ spyOn(instance.instance, 'setModel');
+
+ instance.attachModel(model);
+
+ expect(instance.instance.setModel).toHaveBeenCalledWith(model.getModel());
+ });
+
+ it('attaches the model to the dirty diff controller', () => {
+ spyOn(instance.dirtyDiffController, 'attachModel');
+
+ instance.attachModel(model);
+
+ expect(instance.dirtyDiffController.attachModel).toHaveBeenCalledWith(model);
+ });
+
+ it('re-decorates with the dirty diff controller', () => {
+ spyOn(instance.dirtyDiffController, 'reDecorate');
+
+ instance.attachModel(model);
+
+ expect(instance.dirtyDiffController.reDecorate).toHaveBeenCalledWith(model);
+ });
+ });
+
+ describe('clearEditor', () => {
+ it('resets the editor model', () => {
+ instance.createInstance(document.createElement('div'));
+
+ spyOn(instance.instance, 'setModel');
+
+ instance.clearEditor();
+
+ expect(instance.instance.setModel).toHaveBeenCalledWith(null);
+ });
+ });
+
+ describe('dispose', () => {
+ it('calls disposble dispose method', () => {
+ spyOn(instance.disposable, 'dispose').and.callThrough();
+
+ instance.dispose();
+
+ expect(instance.disposable.dispose).toHaveBeenCalled();
+ });
+
+ it('resets instance', () => {
+ instance.createInstance(document.createElement('div'));
+
+ expect(instance.instance).not.toBeNull();
+
+ instance.dispose();
+
+ expect(instance.instance).toBeNull();
+ });
+ });
+});
diff --git a/spec/javascripts/repo/stores/actions/branch_spec.js b/spec/javascripts/repo/stores/actions/branch_spec.js
new file mode 100644
index 00000000000..af9d6835a67
--- /dev/null
+++ b/spec/javascripts/repo/stores/actions/branch_spec.js
@@ -0,0 +1,38 @@
+import store from '~/repo/stores';
+import service from '~/repo/services';
+import { resetStore } from '../../helpers';
+
+describe('Multi-file store branch actions', () => {
+ afterEach(() => {
+ resetStore(store);
+ });
+
+ describe('createNewBranch', () => {
+ beforeEach(() => {
+ spyOn(service, 'createBranch').and.returnValue(Promise.resolve({
+ json: () => ({
+ name: 'testing',
+ }),
+ }));
+ spyOn(history, 'pushState');
+
+ store.state.project.id = 2;
+ store.state.currentBranch = 'testing';
+ });
+
+ it('creates new branch', (done) => {
+ store.dispatch('createNewBranch', 'master')
+ .then(() => {
+ expect(store.state.currentBranch).toBe('testing');
+ expect(service.createBranch).toHaveBeenCalledWith(2, {
+ branch: 'master',
+ ref: 'testing',
+ });
+ expect(history.pushState).toHaveBeenCalled();
+
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+});
diff --git a/spec/javascripts/repo/stores/actions/file_spec.js b/spec/javascripts/repo/stores/actions/file_spec.js
new file mode 100644
index 00000000000..099c0556e71
--- /dev/null
+++ b/spec/javascripts/repo/stores/actions/file_spec.js
@@ -0,0 +1,417 @@
+import Vue from 'vue';
+import store from '~/repo/stores';
+import service from '~/repo/services';
+import { file, resetStore } from '../../helpers';
+
+describe('Multi-file store file actions', () => {
+ afterEach(() => {
+ resetStore(store);
+ });
+
+ describe('closeFile', () => {
+ let localFile;
+ let getLastCommitDataSpy;
+ let oldGetLastCommitData;
+
+ beforeEach(() => {
+ getLastCommitDataSpy = jasmine.createSpy('getLastCommitData');
+ oldGetLastCommitData = store._actions.getLastCommitData; // eslint-disable-line
+ store._actions.getLastCommitData = [getLastCommitDataSpy]; // eslint-disable-line
+
+ localFile = file();
+ localFile.active = true;
+ localFile.opened = true;
+ localFile.parentTreeUrl = 'parentTreeUrl';
+
+ store.state.openFiles.push(localFile);
+
+ spyOn(history, 'pushState');
+ });
+
+ afterEach(() => {
+ store._actions.getLastCommitData = oldGetLastCommitData; // eslint-disable-line
+ });
+
+ it('closes open files', (done) => {
+ store.dispatch('closeFile', { file: localFile })
+ .then(() => {
+ expect(localFile.opened).toBeFalsy();
+ expect(localFile.active).toBeFalsy();
+ expect(store.state.openFiles.length).toBe(0);
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('does not close file if has changed', (done) => {
+ localFile.changed = true;
+
+ store.dispatch('closeFile', { file: localFile })
+ .then(() => {
+ expect(localFile.opened).toBeTruthy();
+ expect(localFile.active).toBeTruthy();
+ expect(store.state.openFiles.length).toBe(1);
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('does not close file if temp file', (done) => {
+ localFile.tempFile = true;
+
+ store.dispatch('closeFile', { file: localFile })
+ .then(() => {
+ expect(localFile.opened).toBeTruthy();
+ expect(localFile.active).toBeTruthy();
+ expect(store.state.openFiles.length).toBe(1);
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('force closes a changed file', (done) => {
+ localFile.changed = true;
+
+ store.dispatch('closeFile', { file: localFile, force: true })
+ .then(() => {
+ expect(localFile.opened).toBeFalsy();
+ expect(localFile.active).toBeFalsy();
+ expect(store.state.openFiles.length).toBe(0);
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('calls pushState when no open files are left', (done) => {
+ store.dispatch('closeFile', { file: localFile })
+ .then(() => {
+ expect(history.pushState).toHaveBeenCalledWith(jasmine.anything(), '', 'parentTreeUrl');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('sets next file as active', (done) => {
+ const f = file();
+ store.state.openFiles.push(f);
+
+ expect(f.active).toBeFalsy();
+
+ store.dispatch('closeFile', { file: localFile })
+ .then(() => {
+ expect(f.active).toBeTruthy();
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('calls getLastCommitData', (done) => {
+ store.dispatch('closeFile', { file: localFile })
+ .then(() => {
+ expect(getLastCommitDataSpy).toHaveBeenCalled();
+
+ done();
+ }).catch(done.fail);
+ });
+ });
+
+ describe('setFileActive', () => {
+ let scrollToTabSpy;
+ let oldScrollToTab;
+
+ beforeEach(() => {
+ scrollToTabSpy = jasmine.createSpy('scrollToTab');
+ oldScrollToTab = store._actions.scrollToTab; // eslint-disable-line
+ store._actions.scrollToTab = [scrollToTabSpy]; // eslint-disable-line
+ });
+
+ afterEach(() => {
+ store._actions.scrollToTab = oldScrollToTab; // eslint-disable-line
+ });
+
+ it('calls scrollToTab', (done) => {
+ store.dispatch('setFileActive', file())
+ .then(() => {
+ expect(scrollToTabSpy).toHaveBeenCalled();
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('sets the file active', (done) => {
+ const localFile = file();
+
+ store.dispatch('setFileActive', localFile)
+ .then(() => {
+ expect(localFile.active).toBeTruthy();
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('returns early if file is already active', (done) => {
+ const localFile = file();
+ localFile.active = true;
+
+ store.dispatch('setFileActive', localFile)
+ .then(() => {
+ expect(scrollToTabSpy).not.toHaveBeenCalled();
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('sets current active file to not active', (done) => {
+ const localFile = file();
+ localFile.active = true;
+ store.state.openFiles.push(localFile);
+
+ store.dispatch('setFileActive', file())
+ .then(() => {
+ expect(localFile.active).toBeFalsy();
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('resets location.hash for line highlighting', (done) => {
+ location.hash = 'test';
+
+ store.dispatch('setFileActive', file())
+ .then(() => {
+ expect(location.hash).not.toBe('test');
+
+ done();
+ }).catch(done.fail);
+ });
+ });
+
+ describe('getFileData', () => {
+ let localFile = file();
+
+ beforeEach(() => {
+ spyOn(service, 'getFileData').and.returnValue(Promise.resolve({
+ headers: {
+ 'page-title': 'testing getFileData',
+ },
+ json: () => Promise.resolve({
+ blame_path: 'blame_path',
+ commits_path: 'commits_path',
+ permalink: 'permalink',
+ raw_path: 'raw_path',
+ binary: false,
+ html: '123',
+ render_error: '',
+ }),
+ }));
+
+ localFile = file();
+ localFile.url = 'getFileDataURL';
+ });
+
+ it('calls the service', (done) => {
+ store.dispatch('getFileData', localFile)
+ .then(() => {
+ expect(service.getFileData).toHaveBeenCalledWith('getFileDataURL');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('sets the file data', (done) => {
+ store.dispatch('getFileData', localFile)
+ .then(Vue.nextTick)
+ .then(() => {
+ expect(localFile.blamePath).toBe('blame_path');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('sets document title', (done) => {
+ store.dispatch('getFileData', localFile)
+ .then(() => {
+ expect(document.title).toBe('testing getFileData');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('sets the file as active', (done) => {
+ store.dispatch('getFileData', localFile)
+ .then(Vue.nextTick)
+ .then(() => {
+ expect(localFile.active).toBeTruthy();
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('adds the file to open files', (done) => {
+ store.dispatch('getFileData', localFile)
+ .then(Vue.nextTick)
+ .then(() => {
+ expect(store.state.openFiles.length).toBe(1);
+ expect(store.state.openFiles[0].name).toBe(localFile.name);
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('toggles the file loading', (done) => {
+ store.dispatch('getFileData', localFile)
+ .then(() => {
+ expect(localFile.loading).toBeTruthy();
+
+ return Vue.nextTick();
+ })
+ .then(() => {
+ expect(localFile.loading).toBeFalsy();
+
+ done();
+ }).catch(done.fail);
+ });
+ });
+
+ describe('getRawFileData', () => {
+ let tmpFile;
+
+ beforeEach(() => {
+ spyOn(service, 'getRawFileData').and.returnValue(Promise.resolve('raw'));
+
+ tmpFile = file();
+ });
+
+ it('calls getRawFileData service method', (done) => {
+ store.dispatch('getRawFileData', tmpFile)
+ .then(() => {
+ expect(service.getRawFileData).toHaveBeenCalledWith(tmpFile);
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('updates file raw data', (done) => {
+ store.dispatch('getRawFileData', tmpFile)
+ .then(() => {
+ expect(tmpFile.raw).toBe('raw');
+
+ done();
+ }).catch(done.fail);
+ });
+ });
+
+ describe('changeFileContent', () => {
+ let tmpFile;
+
+ beforeEach(() => {
+ tmpFile = file();
+ });
+
+ it('updates file content', (done) => {
+ store.dispatch('changeFileContent', {
+ file: tmpFile,
+ content: 'content',
+ })
+ .then(() => {
+ expect(tmpFile.content).toBe('content');
+
+ done();
+ }).catch(done.fail);
+ });
+ });
+
+ describe('createTempFile', () => {
+ beforeEach(() => {
+ document.body.innerHTML += '<div class="flash-container"></div>';
+ });
+
+ afterEach(() => {
+ document.querySelector('.flash-container').remove();
+ });
+
+ it('creates temp file', (done) => {
+ store.dispatch('createTempFile', {
+ tree: store.state,
+ name: 'test',
+ }).then((f) => {
+ expect(f.tempFile).toBeTruthy();
+ expect(store.state.tree.length).toBe(1);
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('adds tmp file to open files', (done) => {
+ store.dispatch('createTempFile', {
+ tree: store.state,
+ name: 'test',
+ }).then((f) => {
+ expect(store.state.openFiles.length).toBe(1);
+ expect(store.state.openFiles[0].name).toBe(f.name);
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('sets tmp file as active', (done) => {
+ store.dispatch('createTempFile', {
+ tree: store.state,
+ name: 'test',
+ }).then((f) => {
+ expect(f.active).toBeTruthy();
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('enters edit mode if file is not base64', (done) => {
+ store.dispatch('createTempFile', {
+ tree: store.state,
+ name: 'test',
+ }).then(() => {
+ expect(store.state.editMode).toBeTruthy();
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('does not enter edit mode if file is base64', (done) => {
+ store.dispatch('createTempFile', {
+ tree: store.state,
+ name: 'test',
+ base64: true,
+ }).then(() => {
+ expect(store.state.editMode).toBeFalsy();
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('creates flash message is file already exists', (done) => {
+ store.state.tree.push(file('test', '1', 'blob'));
+
+ store.dispatch('createTempFile', {
+ tree: store.state,
+ name: 'test',
+ }).then(() => {
+ expect(document.querySelector('.flash-alert')).not.toBeNull();
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('increases level of file', (done) => {
+ store.state.level = 1;
+
+ store.dispatch('createTempFile', {
+ tree: store.state,
+ name: 'test',
+ }).then((f) => {
+ expect(f.level).toBe(2);
+
+ done();
+ }).catch(done.fail);
+ });
+ });
+});
diff --git a/spec/javascripts/repo/stores/actions/tree_spec.js b/spec/javascripts/repo/stores/actions/tree_spec.js
new file mode 100644
index 00000000000..393a797c6a3
--- /dev/null
+++ b/spec/javascripts/repo/stores/actions/tree_spec.js
@@ -0,0 +1,469 @@
+import Vue from 'vue';
+import store from '~/repo/stores';
+import service from '~/repo/services';
+import { file, resetStore } from '../../helpers';
+
+describe('Multi-file store tree actions', () => {
+ afterEach(() => {
+ resetStore(store);
+ });
+
+ describe('getTreeData', () => {
+ beforeEach(() => {
+ spyOn(service, 'getTreeData').and.returnValue(Promise.resolve({
+ headers: {
+ 'page-title': 'test',
+ },
+ json: () => Promise.resolve({
+ last_commit_path: 'last_commit_path',
+ parent_tree_url: 'parent_tree_url',
+ path: '/',
+ trees: [{ name: 'tree' }],
+ blobs: [{ name: 'blob' }],
+ submodules: [{ name: 'submodule' }],
+ }),
+ }));
+ spyOn(history, 'pushState');
+
+ Object.assign(store.state.endpoints, {
+ rootEndpoint: 'rootEndpoint',
+ });
+ });
+
+ it('calls service getTreeData', (done) => {
+ store.dispatch('getTreeData')
+ .then(() => {
+ expect(service.getTreeData).toHaveBeenCalledWith('rootEndpoint');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('adds data into tree', (done) => {
+ store.dispatch('getTreeData')
+ .then(Vue.nextTick)
+ .then(() => {
+ expect(store.state.tree.length).toBe(3);
+ expect(store.state.tree[0].type).toBe('tree');
+ expect(store.state.tree[1].type).toBe('submodule');
+ expect(store.state.tree[2].type).toBe('blob');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('sets parent tree URL', (done) => {
+ store.dispatch('getTreeData')
+ .then(Vue.nextTick)
+ .then(() => {
+ expect(store.state.parentTreeUrl).toBe('parent_tree_url');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('sets last commit path', (done) => {
+ store.dispatch('getTreeData')
+ .then(Vue.nextTick)
+ .then(() => {
+ expect(store.state.lastCommitPath).toBe('last_commit_path');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('sets root if not currently at root', (done) => {
+ store.state.isInitialRoot = false;
+
+ store.dispatch('getTreeData')
+ .then(Vue.nextTick)
+ .then(() => {
+ expect(store.state.isInitialRoot).toBeTruthy();
+ expect(store.state.isRoot).toBeTruthy();
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('sets page title', (done) => {
+ store.dispatch('getTreeData')
+ .then(() => {
+ expect(document.title).toBe('test');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('toggles loading', (done) => {
+ store.dispatch('getTreeData')
+ .then(() => {
+ expect(store.state.loading).toBeTruthy();
+
+ return Vue.nextTick();
+ })
+ .then(() => {
+ expect(store.state.loading).toBeFalsy();
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('calls pushState with endpoint', (done) => {
+ store.dispatch('getTreeData')
+ .then(Vue.nextTick)
+ .then(() => {
+ expect(history.pushState).toHaveBeenCalledWith(jasmine.anything(), '', 'rootEndpoint');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('calls getLastCommitData if prevLastCommitPath is not null', (done) => {
+ const getLastCommitDataSpy = jasmine.createSpy('getLastCommitData');
+ const oldGetLastCommitData = store._actions.getLastCommitData; // eslint-disable-line
+ store._actions.getLastCommitData = [getLastCommitDataSpy]; // eslint-disable-line
+ store.state.prevLastCommitPath = 'test';
+
+ store.dispatch('getTreeData')
+ .then(Vue.nextTick)
+ .then(() => {
+ expect(getLastCommitDataSpy).toHaveBeenCalledWith(store.state);
+
+ store._actions.getLastCommitData = oldGetLastCommitData; // eslint-disable-line
+
+ done();
+ }).catch(done.fail);
+ });
+ });
+
+ describe('toggleTreeOpen', () => {
+ let oldGetTreeData;
+ let getTreeDataSpy;
+ let tree;
+
+ beforeEach(() => {
+ getTreeDataSpy = jasmine.createSpy('getTreeData');
+
+ oldGetTreeData = store._actions.getTreeData; // eslint-disable-line
+ store._actions.getTreeData = [getTreeDataSpy]; // eslint-disable-line
+
+ tree = {
+ opened: false,
+ tree: [],
+ };
+ });
+
+ afterEach(() => {
+ store._actions.getTreeData = oldGetTreeData; // eslint-disable-line
+ });
+
+ it('toggles the tree open', (done) => {
+ store.dispatch('toggleTreeOpen', {
+ endpoint: 'test',
+ tree,
+ }).then(() => {
+ expect(tree.opened).toBeTruthy();
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('calls getTreeData if tree is closed', (done) => {
+ store.dispatch('toggleTreeOpen', {
+ endpoint: 'test',
+ tree,
+ }).then(() => {
+ expect(getTreeDataSpy).toHaveBeenCalledWith({
+ endpoint: 'test',
+ tree,
+ });
+ expect(store.state.previousUrl).toBe('test');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('resets entries tree', (done) => {
+ Object.assign(tree, {
+ opened: true,
+ tree: ['a'],
+ });
+
+ store.dispatch('toggleTreeOpen', {
+ endpoint: 'test',
+ tree,
+ }).then(() => {
+ expect(tree.tree.length).toBe(0);
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('pushes new state', (done) => {
+ spyOn(history, 'pushState');
+ Object.assign(tree, {
+ opened: true,
+ parentTreeUrl: 'testing',
+ });
+
+ store.dispatch('toggleTreeOpen', {
+ endpoint: 'test',
+ tree,
+ }).then(() => {
+ expect(history.pushState).toHaveBeenCalledWith(jasmine.anything(), '', 'testing');
+
+ done();
+ }).catch(done.fail);
+ });
+ });
+
+ describe('clickedTreeRow', () => {
+ describe('tree', () => {
+ let toggleTreeOpenSpy;
+ let oldToggleTreeOpen;
+
+ beforeEach(() => {
+ toggleTreeOpenSpy = jasmine.createSpy('toggleTreeOpen');
+
+ oldToggleTreeOpen = store._actions.toggleTreeOpen; // eslint-disable-line
+ store._actions.toggleTreeOpen = [toggleTreeOpenSpy]; // eslint-disable-line
+ });
+
+ afterEach(() => {
+ store._actions.toggleTreeOpen = oldToggleTreeOpen; // eslint-disable-line
+ });
+
+ it('opens tree', (done) => {
+ const tree = {
+ url: 'a',
+ type: 'tree',
+ };
+
+ store.dispatch('clickedTreeRow', tree)
+ .then(() => {
+ expect(toggleTreeOpenSpy).toHaveBeenCalledWith({
+ endpoint: tree.url,
+ tree,
+ });
+
+ done();
+ }).catch(done.fail);
+ });
+ });
+
+ describe('submodule', () => {
+ let row;
+
+ beforeEach(() => {
+ spyOn(gl.utils, 'visitUrl');
+
+ row = {
+ url: 'submoduleurl',
+ type: 'submodule',
+ loading: false,
+ };
+ });
+
+ it('toggles loading for row', (done) => {
+ store.dispatch('clickedTreeRow', row)
+ .then(() => {
+ expect(row.loading).toBeTruthy();
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('opens submodule URL', (done) => {
+ store.dispatch('clickedTreeRow', row)
+ .then(() => {
+ expect(gl.utils.visitUrl).toHaveBeenCalledWith('submoduleurl');
+
+ done();
+ }).catch(done.fail);
+ });
+ });
+
+ describe('blob', () => {
+ let row;
+
+ beforeEach(() => {
+ row = {
+ type: 'blob',
+ opened: false,
+ };
+ });
+
+ it('calls getFileData', (done) => {
+ const getFileDataSpy = jasmine.createSpy('getFileData');
+ const oldGetFileData = store._actions.getFileData; // eslint-disable-line
+ store._actions.getFileData = [getFileDataSpy]; // eslint-disable-line
+
+ store.dispatch('clickedTreeRow', row)
+ .then(() => {
+ expect(getFileDataSpy).toHaveBeenCalledWith(row);
+
+ store._actions.getFileData = oldGetFileData; // eslint-disable-line
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('calls setFileActive when file is opened', (done) => {
+ const setFileActiveSpy = jasmine.createSpy('setFileActive');
+ const oldSetFileActive = store._actions.setFileActive; // eslint-disable-line
+ store._actions.setFileActive = [setFileActiveSpy]; // eslint-disable-line
+
+ row.opened = true;
+
+ store.dispatch('clickedTreeRow', row)
+ .then(() => {
+ expect(setFileActiveSpy).toHaveBeenCalledWith(row);
+
+ store._actions.setFileActive = oldSetFileActive; // eslint-disable-line
+
+ done();
+ }).catch(done.fail);
+ });
+ });
+ });
+
+ describe('createTempTree', () => {
+ it('creates temp tree', (done) => {
+ store.dispatch('createTempTree', 'test')
+ .then(() => {
+ expect(store.state.tree[0].tempFile).toBeTruthy();
+ expect(store.state.tree[0].name).toBe('test');
+ expect(store.state.tree[0].type).toBe('tree');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('creates .gitkeep file in temp tree', (done) => {
+ store.dispatch('createTempTree', 'test')
+ .then(() => {
+ expect(store.state.tree[0].tree[0].tempFile).toBeTruthy();
+ expect(store.state.tree[0].tree[0].name).toBe('.gitkeep');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('creates new folder inside another tree', (done) => {
+ const tree = {
+ type: 'tree',
+ name: 'testing',
+ tree: [],
+ };
+
+ store.state.tree.push(tree);
+
+ store.dispatch('createTempTree', 'testing/test')
+ .then(() => {
+ expect(store.state.tree[0].name).toBe('testing');
+ expect(store.state.tree[0].tree[0].tempFile).toBeTruthy();
+ expect(store.state.tree[0].tree[0].name).toBe('test');
+ expect(store.state.tree[0].tree[0].type).toBe('tree');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('does not create new tree if already exists', (done) => {
+ const tree = {
+ type: 'tree',
+ name: 'testing',
+ tree: [],
+ };
+
+ store.state.tree.push(tree);
+
+ store.dispatch('createTempTree', 'testing/test')
+ .then(() => {
+ expect(store.state.tree[0].name).toBe('testing');
+ expect(store.state.tree[0].tempFile).toBeUndefined();
+
+ done();
+ }).catch(done.fail);
+ });
+ });
+
+ describe('getLastCommitData', () => {
+ beforeEach(() => {
+ spyOn(service, 'getTreeLastCommit').and.returnValue(Promise.resolve({
+ headers: {
+ 'more-logs-url': null,
+ },
+ json: () => Promise.resolve([{
+ type: 'tree',
+ file_name: 'testing',
+ commit: {
+ message: 'commit message',
+ authored_date: '123',
+ },
+ }]),
+ }));
+
+ store.state.tree.push(file('testing', '1', 'tree'));
+ store.state.lastCommitPath = 'lastcommitpath';
+ });
+
+ it('calls service with lastCommitPath', (done) => {
+ store.dispatch('getLastCommitData')
+ .then(() => {
+ expect(service.getTreeLastCommit).toHaveBeenCalledWith('lastcommitpath');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('updates trees last commit data', (done) => {
+ store.dispatch('getLastCommitData')
+ .then(Vue.nextTick)
+ .then(() => {
+ expect(store.state.tree[0].lastCommit.message).toBe('commit message');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('does not update entry if not found', (done) => {
+ store.state.tree[0].name = 'a';
+
+ store.dispatch('getLastCommitData')
+ .then(Vue.nextTick)
+ .then(() => {
+ expect(store.state.tree[0].lastCommit.message).not.toBe('commit message');
+
+ done();
+ }).catch(done.fail);
+ });
+ });
+
+ describe('updateDirectoryData', () => {
+ it('adds data into tree', (done) => {
+ const tree = {
+ tree: [],
+ };
+ const data = {
+ trees: [{ name: 'tree' }],
+ submodules: [{ name: 'submodule' }],
+ blobs: [{ name: 'blob' }],
+ };
+
+ store.dispatch('updateDirectoryData', {
+ data,
+ tree,
+ }).then(() => {
+ expect(tree.tree[0].name).toBe('tree');
+ expect(tree.tree[0].type).toBe('tree');
+ expect(tree.tree[1].name).toBe('submodule');
+ expect(tree.tree[1].type).toBe('submodule');
+ expect(tree.tree[2].name).toBe('blob');
+ expect(tree.tree[2].type).toBe('blob');
+
+ done();
+ }).catch(done.fail);
+ });
+ });
+});
diff --git a/spec/javascripts/repo/stores/actions_spec.js b/spec/javascripts/repo/stores/actions_spec.js
new file mode 100644
index 00000000000..f2a7a698912
--- /dev/null
+++ b/spec/javascripts/repo/stores/actions_spec.js
@@ -0,0 +1,419 @@
+import Vue from 'vue';
+import store from '~/repo/stores';
+import service from '~/repo/services';
+import { resetStore, file } from '../helpers';
+
+describe('Multi-file store actions', () => {
+ afterEach(() => {
+ resetStore(store);
+ });
+
+ describe('redirectToUrl', () => {
+ it('calls visitUrl', (done) => {
+ spyOn(gl.utils, 'visitUrl');
+
+ store.dispatch('redirectToUrl', 'test')
+ .then(() => {
+ expect(gl.utils.visitUrl).toHaveBeenCalledWith('test');
+
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('setInitialData', () => {
+ it('commits initial data', (done) => {
+ store.dispatch('setInitialData', { canCommit: true })
+ .then(() => {
+ expect(store.state.canCommit).toBeTruthy();
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('closeDiscardPopup', () => {
+ it('closes the discard popup', (done) => {
+ store.dispatch('closeDiscardPopup', false)
+ .then(() => {
+ expect(store.state.discardPopupOpen).toBeFalsy();
+
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('discardAllChanges', () => {
+ beforeEach(() => {
+ store.state.openFiles.push(file());
+ store.state.openFiles[0].changed = true;
+ });
+ });
+
+ describe('closeAllFiles', () => {
+ beforeEach(() => {
+ store.state.openFiles.push(file());
+ store.state.openFiles[0].opened = true;
+ });
+
+ it('closes all open files', (done) => {
+ store.dispatch('closeAllFiles')
+ .then(() => {
+ expect(store.state.openFiles.length).toBe(0);
+
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('toggleEditMode', () => {
+ it('toggles edit mode', (done) => {
+ store.state.editMode = true;
+
+ store.dispatch('toggleEditMode')
+ .then(() => {
+ expect(store.state.editMode).toBeFalsy();
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('sets preview mode', (done) => {
+ store.state.currentBlobView = 'repo-editor';
+ store.state.editMode = true;
+
+ store.dispatch('toggleEditMode')
+ .then(Vue.nextTick)
+ .then(() => {
+ expect(store.state.currentBlobView).toBe('repo-preview');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('opens discard popup if there are changed files', (done) => {
+ store.state.editMode = true;
+ store.state.openFiles.push(file());
+ store.state.openFiles[0].changed = true;
+
+ store.dispatch('toggleEditMode')
+ .then(() => {
+ expect(store.state.discardPopupOpen).toBeTruthy();
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('can force closed if there are changed files', (done) => {
+ store.state.editMode = true;
+ store.state.openFiles.push(file());
+ store.state.openFiles[0].changed = true;
+
+ store.dispatch('toggleEditMode', true)
+ .then(() => {
+ expect(store.state.discardPopupOpen).toBeFalsy();
+ expect(store.state.editMode).toBeFalsy();
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('discards file changes', (done) => {
+ const f = file();
+ store.state.editMode = true;
+ store.state.tree.push(f);
+ store.state.openFiles.push(f);
+ f.changed = true;
+
+ store.dispatch('toggleEditMode', true)
+ .then(Vue.nextTick)
+ .then(() => {
+ expect(f.changed).toBeFalsy();
+
+ done();
+ }).catch(done.fail);
+ });
+ });
+
+ describe('toggleBlobView', () => {
+ it('sets edit mode view if in edit mode', (done) => {
+ store.state.editMode = true;
+
+ store.dispatch('toggleBlobView')
+ .then(() => {
+ expect(store.state.currentBlobView).toBe('repo-editor');
+
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('sets preview mode view if not in edit mode', (done) => {
+ store.dispatch('toggleBlobView')
+ .then(() => {
+ expect(store.state.currentBlobView).toBe('repo-preview');
+
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('checkCommitStatus', () => {
+ beforeEach(() => {
+ store.state.project.id = 2;
+ store.state.currentBranch = 'master';
+ store.state.currentRef = '1';
+ });
+
+ it('calls service', (done) => {
+ spyOn(service, 'getBranchData').and.returnValue(Promise.resolve({
+ commit: { id: '123' },
+ }));
+
+ store.dispatch('checkCommitStatus')
+ .then(() => {
+ expect(service.getBranchData).toHaveBeenCalledWith(2, 'master');
+
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('returns true if current ref does not equal returned ID', (done) => {
+ spyOn(service, 'getBranchData').and.returnValue(Promise.resolve({
+ commit: { id: '123' },
+ }));
+
+ store.dispatch('checkCommitStatus')
+ .then((val) => {
+ expect(val).toBeTruthy();
+
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('returns false if current ref equals returned ID', (done) => {
+ spyOn(service, 'getBranchData').and.returnValue(Promise.resolve({
+ commit: { id: '1' },
+ }));
+
+ store.dispatch('checkCommitStatus')
+ .then((val) => {
+ expect(val).toBeFalsy();
+
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('commitChanges', () => {
+ let payload;
+
+ beforeEach(() => {
+ spyOn(window, 'scrollTo');
+
+ document.body.innerHTML += '<div class="flash-container"></div>';
+
+ store.state.project.id = 123;
+ payload = {
+ branch: 'master',
+ };
+ });
+
+ afterEach(() => {
+ document.querySelector('.flash-container').remove();
+ });
+
+ describe('success', () => {
+ beforeEach(() => {
+ spyOn(service, 'commit').and.returnValue(Promise.resolve({
+ id: '123456',
+ short_id: '123',
+ message: 'test message',
+ committed_date: 'date',
+ stats: {
+ additions: '1',
+ deletions: '2',
+ },
+ }));
+ });
+
+ it('calls service', (done) => {
+ store.dispatch('commitChanges', { payload, newMr: false })
+ .then(() => {
+ expect(service.commit).toHaveBeenCalledWith(123, payload);
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('shows flash notice', (done) => {
+ store.dispatch('commitChanges', { payload, newMr: false })
+ .then(() => {
+ const alert = document.querySelector('.flash-container');
+
+ expect(alert.querySelector('.flash-notice')).not.toBeNull();
+ expect(alert.textContent.trim()).toBe(
+ 'Your changes have been committed. Commit 123 with 1 additions, 2 deletions.',
+ );
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('adds commit data to changed files', (done) => {
+ const changedFile = file();
+ const f = file();
+ changedFile.changed = true;
+
+ store.state.openFiles.push(changedFile, f);
+
+ store.dispatch('commitChanges', { payload, newMr: false })
+ .then(() => {
+ expect(changedFile.lastCommit.message).toBe('test message');
+ expect(f.lastCommit.message).not.toBe('test message');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('toggles edit mode', (done) => {
+ store.state.editMode = true;
+
+ store.dispatch('commitChanges', { payload, newMr: false })
+ .then(() => {
+ expect(store.state.editMode).toBeFalsy();
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('closes all files', (done) => {
+ store.state.openFiles.push(file());
+ store.state.openFiles[0].opened = true;
+
+ store.dispatch('commitChanges', { payload, newMr: false })
+ .then(Vue.nextTick)
+ .then(() => {
+ expect(store.state.openFiles.length).toBe(0);
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('scrolls to top of page', (done) => {
+ store.dispatch('commitChanges', { payload, newMr: false })
+ .then(() => {
+ expect(window.scrollTo).toHaveBeenCalledWith(0, 0);
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('updates commit ref', (done) => {
+ store.dispatch('commitChanges', { payload, newMr: false })
+ .then(() => {
+ expect(store.state.currentRef).toBe('123456');
+
+ done();
+ }).catch(done.fail);
+ });
+
+ it('redirects to new merge request page', (done) => {
+ spyOn(gl.utils, 'visitUrl');
+
+ store.state.endpoints.newMergeRequestUrl = 'newMergeRequestUrl?branch=';
+
+ store.dispatch('commitChanges', { payload, newMr: true })
+ .then(() => {
+ expect(gl.utils.visitUrl).toHaveBeenCalledWith('newMergeRequestUrl?branch=master');
+
+ done();
+ }).catch(done.fail);
+ });
+ });
+
+ describe('failed', () => {
+ beforeEach(() => {
+ spyOn(service, 'commit').and.returnValue(Promise.resolve({
+ message: 'failed message',
+ }));
+ });
+
+ it('shows failed message', (done) => {
+ store.dispatch('commitChanges', { payload, newMr: false })
+ .then(() => {
+ const alert = document.querySelector('.flash-container');
+
+ expect(alert.textContent.trim()).toBe(
+ 'failed message',
+ );
+
+ done();
+ }).catch(done.fail);
+ });
+ });
+ });
+
+ describe('createTempEntry', () => {
+ it('creates a temp tree', (done) => {
+ store.dispatch('createTempEntry', {
+ name: 'test',
+ type: 'tree',
+ })
+ .then(() => {
+ expect(store.state.tree.length).toBe(1);
+ expect(store.state.tree[0].tempFile).toBeTruthy();
+ expect(store.state.tree[0].type).toBe('tree');
+
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('creates temp file', (done) => {
+ store.dispatch('createTempEntry', {
+ name: 'test',
+ type: 'blob',
+ })
+ .then(() => {
+ expect(store.state.tree.length).toBe(1);
+ expect(store.state.tree[0].tempFile).toBeTruthy();
+ expect(store.state.tree[0].type).toBe('blob');
+
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('popHistoryState', () => {
+
+ });
+
+ describe('scrollToTab', () => {
+ it('focuses the current active element', (done) => {
+ document.body.innerHTML += '<div id="tabs"><div class="active"><div class="repo-tab"></div></div></div>';
+ const el = document.querySelector('.repo-tab');
+ spyOn(el, 'focus');
+
+ store.dispatch('scrollToTab')
+ .then(() => {
+ setTimeout(() => {
+ expect(el.focus).toHaveBeenCalled();
+
+ document.getElementById('tabs').remove();
+
+ done();
+ });
+ })
+ .catch(done.fail);
+ });
+ });
+});
diff --git a/spec/javascripts/repo/stores/getters_spec.js b/spec/javascripts/repo/stores/getters_spec.js
new file mode 100644
index 00000000000..952b8ec3a59
--- /dev/null
+++ b/spec/javascripts/repo/stores/getters_spec.js
@@ -0,0 +1,146 @@
+import * as getters from '~/repo/stores/getters';
+import state from '~/repo/stores/state';
+import { file } from '../helpers';
+
+describe('Multi-file store getters', () => {
+ let localState;
+
+ beforeEach(() => {
+ localState = state();
+ });
+
+ describe('treeList', () => {
+ it('returns flat tree list', () => {
+ localState.tree.push(file('1'));
+ localState.tree[0].tree.push(file('2'));
+ localState.tree[0].tree[0].tree.push(file('3'));
+
+ const treeList = getters.treeList(localState);
+
+ expect(treeList.length).toBe(3);
+ expect(treeList[1].name).toBe(localState.tree[0].tree[0].name);
+ expect(treeList[2].name).toBe(localState.tree[0].tree[0].tree[0].name);
+ });
+ });
+
+ describe('changedFiles', () => {
+ it('returns a list of changed opened files', () => {
+ localState.openFiles.push(file());
+ localState.openFiles.push(file('changed'));
+ localState.openFiles[1].changed = true;
+
+ const changedFiles = getters.changedFiles(localState);
+
+ expect(changedFiles.length).toBe(1);
+ expect(changedFiles[0].name).toBe('changed');
+ });
+ });
+
+ describe('activeFile', () => {
+ it('returns the current active file', () => {
+ localState.openFiles.push(file());
+ localState.openFiles.push(file('active'));
+ localState.openFiles[1].active = true;
+
+ expect(getters.activeFile(localState).name).toBe('active');
+ });
+
+ it('returns undefined if no active files are found', () => {
+ localState.openFiles.push(file());
+ localState.openFiles.push(file('active'));
+
+ expect(getters.activeFile(localState)).toBeUndefined();
+ });
+ });
+
+ describe('activeFileExtension', () => {
+ it('returns the file extension for the current active file', () => {
+ localState.openFiles.push(file('active'));
+ localState.openFiles[0].active = true;
+ localState.openFiles[0].path = 'test.js';
+
+ expect(getters.activeFileExtension(localState)).toBe('.js');
+
+ localState.openFiles[0].path = 'test.es6.js';
+
+ expect(getters.activeFileExtension(localState)).toBe('.js');
+ });
+ });
+
+ describe('isCollapsed', () => {
+ it('returns true if state has open files', () => {
+ localState.openFiles.push(file());
+
+ expect(getters.isCollapsed(localState)).toBeTruthy();
+ });
+
+ it('returns false if state has no open files', () => {
+ expect(getters.isCollapsed(localState)).toBeFalsy();
+ });
+ });
+
+ describe('canEditFile', () => {
+ beforeEach(() => {
+ localState.onTopOfBranch = true;
+ localState.canCommit = true;
+
+ localState.openFiles.push(file());
+ localState.openFiles[0].active = true;
+ });
+
+ it('returns true if user can commit and has open files', () => {
+ expect(getters.canEditFile(localState)).toBeTruthy();
+ });
+
+ it('returns false if user can commit and has no open files', () => {
+ localState.openFiles = [];
+
+ expect(getters.canEditFile(localState)).toBeFalsy();
+ });
+
+ it('returns false if user can commit and active file is binary', () => {
+ localState.openFiles[0].binary = true;
+
+ expect(getters.canEditFile(localState)).toBeFalsy();
+ });
+
+ it('returns false if user cant commit', () => {
+ localState.canCommit = false;
+
+ expect(getters.canEditFile(localState)).toBeFalsy();
+ });
+
+ it('returns false if user can commit but on a branch', () => {
+ localState.onTopOfBranch = false;
+
+ expect(getters.canEditFile(localState)).toBeFalsy();
+ });
+ });
+
+ describe('modifiedFiles', () => {
+ it('returns a list of modified files', () => {
+ localState.openFiles.push(file());
+ localState.openFiles.push(file('changed'));
+ localState.openFiles[1].changed = true;
+
+ const modifiedFiles = getters.modifiedFiles(localState);
+
+ expect(modifiedFiles.length).toBe(1);
+ expect(modifiedFiles[0].name).toBe('changed');
+ });
+ });
+
+ describe('addedFiles', () => {
+ it('returns a list of added files', () => {
+ localState.openFiles.push(file());
+ localState.openFiles.push(file('added'));
+ localState.openFiles[1].changed = true;
+ localState.openFiles[1].tempFile = true;
+
+ const modifiedFiles = getters.addedFiles(localState);
+
+ expect(modifiedFiles.length).toBe(1);
+ expect(modifiedFiles[0].name).toBe('added');
+ });
+ });
+});
diff --git a/spec/javascripts/repo/stores/mutations/branch_spec.js b/spec/javascripts/repo/stores/mutations/branch_spec.js
new file mode 100644
index 00000000000..3c06794d5e3
--- /dev/null
+++ b/spec/javascripts/repo/stores/mutations/branch_spec.js
@@ -0,0 +1,18 @@
+import mutations from '~/repo/stores/mutations/branch';
+import state from '~/repo/stores/state';
+
+describe('Multi-file store branch mutations', () => {
+ let localState;
+
+ beforeEach(() => {
+ localState = state();
+ });
+
+ describe('SET_CURRENT_BRANCH', () => {
+ it('sets currentBranch', () => {
+ mutations.SET_CURRENT_BRANCH(localState, 'master');
+
+ expect(localState.currentBranch).toBe('master');
+ });
+ });
+});
diff --git a/spec/javascripts/repo/stores/mutations/file_spec.js b/spec/javascripts/repo/stores/mutations/file_spec.js
new file mode 100644
index 00000000000..2f2835dde1f
--- /dev/null
+++ b/spec/javascripts/repo/stores/mutations/file_spec.js
@@ -0,0 +1,131 @@
+import mutations from '~/repo/stores/mutations/file';
+import state from '~/repo/stores/state';
+import { file } from '../../helpers';
+
+describe('Multi-file store file mutations', () => {
+ let localState;
+ let localFile;
+
+ beforeEach(() => {
+ localState = state();
+ localFile = file();
+ });
+
+ describe('SET_FILE_ACTIVE', () => {
+ it('sets the file active', () => {
+ mutations.SET_FILE_ACTIVE(localState, {
+ file: localFile,
+ active: true,
+ });
+
+ expect(localFile.active).toBeTruthy();
+ });
+ });
+
+ describe('TOGGLE_FILE_OPEN', () => {
+ beforeEach(() => {
+ mutations.TOGGLE_FILE_OPEN(localState, localFile);
+ });
+
+ it('adds into opened files', () => {
+ expect(localFile.opened).toBeTruthy();
+ expect(localState.openFiles.length).toBe(1);
+ });
+
+ it('removes from opened files', () => {
+ mutations.TOGGLE_FILE_OPEN(localState, localFile);
+
+ expect(localFile.opened).toBeFalsy();
+ expect(localState.openFiles.length).toBe(0);
+ });
+ });
+
+ describe('SET_FILE_DATA', () => {
+ it('sets extra file data', () => {
+ mutations.SET_FILE_DATA(localState, {
+ data: {
+ blame_path: 'blame',
+ commits_path: 'commits',
+ permalink: 'permalink',
+ raw_path: 'raw',
+ binary: true,
+ html: 'html',
+ render_error: 'render_error',
+ },
+ file: localFile,
+ });
+
+ expect(localFile.blamePath).toBe('blame');
+ expect(localFile.commitsPath).toBe('commits');
+ expect(localFile.permalink).toBe('permalink');
+ expect(localFile.rawPath).toBe('raw');
+ expect(localFile.binary).toBeTruthy();
+ expect(localFile.html).toBe('html');
+ expect(localFile.renderError).toBe('render_error');
+ });
+ });
+
+ describe('SET_FILE_RAW_DATA', () => {
+ it('sets raw data', () => {
+ mutations.SET_FILE_RAW_DATA(localState, {
+ file: localFile,
+ raw: 'testing',
+ });
+
+ expect(localFile.raw).toBe('testing');
+ });
+ });
+
+ describe('UPDATE_FILE_CONTENT', () => {
+ beforeEach(() => {
+ localFile.raw = 'test';
+ });
+
+ it('sets content', () => {
+ mutations.UPDATE_FILE_CONTENT(localState, {
+ file: localFile,
+ content: 'test',
+ });
+
+ expect(localFile.content).toBe('test');
+ });
+
+ it('sets changed if content does not match raw', () => {
+ mutations.UPDATE_FILE_CONTENT(localState, {
+ file: localFile,
+ content: 'testing',
+ });
+
+ expect(localFile.content).toBe('testing');
+ expect(localFile.changed).toBeTruthy();
+ });
+ });
+
+ describe('DISCARD_FILE_CHANGES', () => {
+ beforeEach(() => {
+ localFile.content = 'test';
+ localFile.changed = true;
+ });
+
+ it('resets content and changed', () => {
+ mutations.DISCARD_FILE_CHANGES(localState, localFile);
+
+ expect(localFile.content).toBe('');
+ expect(localFile.changed).toBeFalsy();
+ });
+ });
+
+ describe('CREATE_TMP_FILE', () => {
+ it('adds file into parent tree', () => {
+ const f = file();
+
+ mutations.CREATE_TMP_FILE(localState, {
+ file: f,
+ parent: localFile,
+ });
+
+ expect(localFile.tree.length).toBe(1);
+ expect(localFile.tree[0].name).toBe(f.name);
+ });
+ });
+});
diff --git a/spec/javascripts/repo/stores/mutations/tree_spec.js b/spec/javascripts/repo/stores/mutations/tree_spec.js
new file mode 100644
index 00000000000..1c76cfed9c8
--- /dev/null
+++ b/spec/javascripts/repo/stores/mutations/tree_spec.js
@@ -0,0 +1,71 @@
+import mutations from '~/repo/stores/mutations/tree';
+import state from '~/repo/stores/state';
+import { file } from '../../helpers';
+
+describe('Multi-file store tree mutations', () => {
+ let localState;
+ let localTree;
+
+ beforeEach(() => {
+ localState = state();
+ localTree = file();
+ });
+
+ describe('TOGGLE_TREE_OPEN', () => {
+ it('toggles tree open', () => {
+ mutations.TOGGLE_TREE_OPEN(localState, localTree);
+
+ expect(localTree.opened).toBeTruthy();
+
+ mutations.TOGGLE_TREE_OPEN(localState, localTree);
+
+ expect(localTree.opened).toBeFalsy();
+ });
+ });
+
+ describe('SET_DIRECTORY_DATA', () => {
+ const data = [{
+ name: 'tree',
+ },
+ {
+ name: 'submodule',
+ },
+ {
+ name: 'blob',
+ }];
+
+ it('adds directory data', () => {
+ mutations.SET_DIRECTORY_DATA(localState, {
+ data,
+ tree: localState,
+ });
+
+ expect(localState.tree.length).toBe(3);
+ expect(localState.tree[0].name).toBe('tree');
+ expect(localState.tree[1].name).toBe('submodule');
+ expect(localState.tree[2].name).toBe('blob');
+ });
+ });
+
+ describe('SET_PARENT_TREE_URL', () => {
+ it('sets the parent tree url', () => {
+ mutations.SET_PARENT_TREE_URL(localState, 'test');
+
+ expect(localState.parentTreeUrl).toBe('test');
+ });
+ });
+
+ describe('CREATE_TMP_TREE', () => {
+ it('adds tree into parent tree', () => {
+ const tmpEntry = file();
+
+ mutations.CREATE_TMP_TREE(localState, {
+ tmpEntry,
+ parent: localTree,
+ });
+
+ expect(localTree.tree.length).toBe(1);
+ expect(localTree.tree[0].name).toBe(tmpEntry.name);
+ });
+ });
+});
diff --git a/spec/javascripts/repo/stores/mutations_spec.js b/spec/javascripts/repo/stores/mutations_spec.js
new file mode 100644
index 00000000000..d1c9885e01d
--- /dev/null
+++ b/spec/javascripts/repo/stores/mutations_spec.js
@@ -0,0 +1,117 @@
+import mutations from '~/repo/stores/mutations';
+import state from '~/repo/stores/state';
+import { file } from '../helpers';
+
+describe('Multi-file store mutations', () => {
+ let localState;
+ let entry;
+
+ beforeEach(() => {
+ localState = state();
+ entry = file();
+ });
+
+ describe('SET_INITIAL_DATA', () => {
+ it('sets all initial data', () => {
+ mutations.SET_INITIAL_DATA(localState, {
+ test: 'test',
+ });
+
+ expect(localState.test).toBe('test');
+ });
+ });
+
+ describe('SET_PREVIEW_MODE', () => {
+ it('sets currentBlobView to repo-preview', () => {
+ mutations.SET_PREVIEW_MODE(localState);
+
+ expect(localState.currentBlobView).toBe('repo-preview');
+
+ localState.currentBlobView = 'testing';
+
+ mutations.SET_PREVIEW_MODE(localState);
+
+ expect(localState.currentBlobView).toBe('repo-preview');
+ });
+ });
+
+ describe('SET_EDIT_MODE', () => {
+ it('sets currentBlobView to repo-editor', () => {
+ mutations.SET_EDIT_MODE(localState);
+
+ expect(localState.currentBlobView).toBe('repo-editor');
+
+ localState.currentBlobView = 'testing';
+
+ mutations.SET_EDIT_MODE(localState);
+
+ expect(localState.currentBlobView).toBe('repo-editor');
+ });
+ });
+
+ describe('TOGGLE_LOADING', () => {
+ it('toggles loading of entry', () => {
+ mutations.TOGGLE_LOADING(localState, entry);
+
+ expect(entry.loading).toBeTruthy();
+
+ mutations.TOGGLE_LOADING(localState, entry);
+
+ expect(entry.loading).toBeFalsy();
+ });
+ });
+
+ describe('TOGGLE_EDIT_MODE', () => {
+ it('toggles editMode', () => {
+ mutations.TOGGLE_EDIT_MODE(localState);
+
+ expect(localState.editMode).toBeTruthy();
+
+ mutations.TOGGLE_EDIT_MODE(localState);
+
+ expect(localState.editMode).toBeFalsy();
+ });
+ });
+
+ describe('TOGGLE_DISCARD_POPUP', () => {
+ it('sets discardPopupOpen', () => {
+ mutations.TOGGLE_DISCARD_POPUP(localState, true);
+
+ expect(localState.discardPopupOpen).toBeTruthy();
+
+ mutations.TOGGLE_DISCARD_POPUP(localState, false);
+
+ expect(localState.discardPopupOpen).toBeFalsy();
+ });
+ });
+
+ describe('SET_COMMIT_REF', () => {
+ it('sets currentRef', () => {
+ mutations.SET_COMMIT_REF(localState, '123');
+
+ expect(localState.currentRef).toBe('123');
+ });
+ });
+
+ describe('SET_ROOT', () => {
+ it('sets isRoot & initialRoot', () => {
+ mutations.SET_ROOT(localState, true);
+
+ expect(localState.isRoot).toBeTruthy();
+ expect(localState.isInitialRoot).toBeTruthy();
+
+ mutations.SET_ROOT(localState, false);
+
+ expect(localState.isRoot).toBeFalsy();
+ expect(localState.isInitialRoot).toBeFalsy();
+ });
+ });
+
+ describe('SET_PREVIOUS_URL', () => {
+ it('sets previousUrl', () => {
+ mutations.SET_PREVIOUS_URL(localState, 'testing');
+
+ expect(localState.previousUrl).toBe('testing');
+ });
+ });
+});
diff --git a/spec/javascripts/repo/stores/utils_spec.js b/spec/javascripts/repo/stores/utils_spec.js
new file mode 100644
index 00000000000..37287c587d7
--- /dev/null
+++ b/spec/javascripts/repo/stores/utils_spec.js
@@ -0,0 +1,102 @@
+import * as utils from '~/repo/stores/utils';
+
+describe('Multi-file store utils', () => {
+ describe('setPageTitle', () => {
+ it('sets the document page title', () => {
+ utils.setPageTitle('test');
+
+ expect(document.title).toBe('test');
+ });
+ });
+
+ describe('pushState', () => {
+ it('calls history.pushState', () => {
+ spyOn(history, 'pushState');
+
+ utils.pushState('test');
+
+ expect(history.pushState).toHaveBeenCalledWith({ url: 'test' }, '', 'test');
+ });
+ });
+
+ describe('createTemp', () => {
+ it('creates temp tree', () => {
+ const tmp = utils.createTemp({
+ name: 'test',
+ path: 'test',
+ type: 'tree',
+ level: 0,
+ changed: false,
+ content: '',
+ base64: '',
+ });
+
+ expect(tmp.tempFile).toBeTruthy();
+ expect(tmp.icon).toBe('fa-folder');
+ });
+
+ it('creates temp file', () => {
+ const tmp = utils.createTemp({
+ name: 'test',
+ path: 'test',
+ type: 'blob',
+ level: 0,
+ changed: false,
+ content: '',
+ base64: '',
+ });
+
+ expect(tmp.tempFile).toBeTruthy();
+ expect(tmp.icon).toBe('fa-file-text-o');
+ });
+ });
+
+ describe('findIndexOfFile', () => {
+ let state;
+
+ beforeEach(() => {
+ state = [{
+ path: '1',
+ }, {
+ path: '2',
+ }];
+ });
+
+ it('finds in the index of an entry by path', () => {
+ const index = utils.findIndexOfFile(state, {
+ path: '2',
+ });
+
+ expect(index).toBe(1);
+ });
+ });
+
+ describe('findEntry', () => {
+ let state;
+
+ beforeEach(() => {
+ state = {
+ tree: [{
+ type: 'tree',
+ name: 'test',
+ }, {
+ type: 'blob',
+ name: 'file',
+ }],
+ };
+ });
+
+ it('returns an entry found by name', () => {
+ const foundEntry = utils.findEntry(state, 'tree', 'test');
+
+ expect(foundEntry.type).toBe('tree');
+ expect(foundEntry.name).toBe('test');
+ });
+
+ it('returns undefined when no entry found', () => {
+ const foundEntry = utils.findEntry(state, 'blob', 'test');
+
+ expect(foundEntry).toBeUndefined();
+ });
+ });
+});
diff --git a/spec/javascripts/search_autocomplete_spec.js b/spec/javascripts/search_autocomplete_spec.js
index a2394857b82..fdfc59a6f12 100644
--- a/spec/javascripts/search_autocomplete_spec.js
+++ b/spec/javascripts/search_autocomplete_spec.js
@@ -191,8 +191,6 @@ import '~/lib/utils/common_utils';
// browsers will not trigger default behavior (form submit, in this
// example) on JavaScript-created keypresses.
expect(submitSpy).not.toHaveBeenTriggered();
- // Does a worse job at capturing the intent of the test, but works.
- expect(enterKeyEvent.isDefaultPrevented()).toBe(true);
});
});
}).call(window);
diff --git a/spec/javascripts/sidebar/mock_data.js b/spec/javascripts/sidebar/mock_data.js
index 0682b463043..3b094d20838 100644
--- a/spec/javascripts/sidebar/mock_data.js
+++ b/spec/javascripts/sidebar/mock_data.js
@@ -1,6 +1,6 @@
/* eslint-disable quote-props*/
-const sidebarMockData = {
+const RESPONSE_MAP = {
'GET': {
'/gitlab-org/gitlab-shell/issues/5.json': {
id: 45,
@@ -66,6 +66,65 @@ const sidebarMockData = {
},
labels: [],
},
+ '/gitlab-org/gitlab-shell/issues/5.json?serializer=sidebar': {
+ assignees: [
+ {
+ name: 'User 0',
+ username: 'user0',
+ id: 22,
+ state: 'active',
+ avatar_url: 'http: //www.gravatar.com/avatar/52e4ce24a915fb7e51e1ad3b57f4b00a?s=80\u0026d=identicon',
+ web_url: 'http: //localhost:3001/user0',
+ },
+ {
+ name: 'Marguerite Bartell',
+ username: 'tajuana',
+ id: 18,
+ state: 'active',
+ avatar_url: 'http: //www.gravatar.com/avatar/4852a41fb41616bf8f140d3701673f53?s=80\u0026d=identicon',
+ web_url: 'http: //localhost:3001/tajuana',
+ },
+ {
+ name: 'Laureen Ritchie',
+ username: 'michaele.will',
+ id: 16,
+ state: 'active',
+ avatar_url: 'http: //www.gravatar.com/avatar/e301827eb03be955c9c172cb9a8e4e8a?s=80\u0026d=identicon',
+ web_url: 'http: //localhost:3001/michaele.will',
+ },
+ ],
+ human_time_estimate: null,
+ human_total_time_spent: null,
+ participants: [
+ {
+ name: 'User 0',
+ username: 'user0',
+ id: 22,
+ state: 'active',
+ avatar_url: 'http: //www.gravatar.com/avatar/52e4ce24a915fb7e51e1ad3b57f4b00a?s=80\u0026d=identicon',
+ web_url: 'http: //localhost:3001/user0',
+ },
+ {
+ name: 'Marguerite Bartell',
+ username: 'tajuana',
+ id: 18,
+ state: 'active',
+ avatar_url: 'http: //www.gravatar.com/avatar/4852a41fb41616bf8f140d3701673f53?s=80\u0026d=identicon',
+ web_url: 'http: //localhost:3001/tajuana',
+ },
+ {
+ name: 'Laureen Ritchie',
+ username: 'michaele.will',
+ id: 16,
+ state: 'active',
+ avatar_url: 'http: //www.gravatar.com/avatar/e301827eb03be955c9c172cb9a8e4e8a?s=80\u0026d=identicon',
+ web_url: 'http: //localhost:3001/michaele.will',
+ },
+ ],
+ subscribed: true,
+ time_estimate: 0,
+ total_time_spent: 0,
+ },
'/autocomplete/projects?project_id=15': [
{
'id': 0,
@@ -113,9 +172,10 @@ const sidebarMockData = {
},
};
-export default {
+const mockData = {
+ responseMap: RESPONSE_MAP,
mediator: {
- endpoint: '/gitlab-org/gitlab-shell/issues/5.json',
+ endpoint: '/gitlab-org/gitlab-shell/issues/5.json?serializer=sidebar',
toggleSubscriptionEndpoint: '/gitlab-org/gitlab-shell/issues/5/toggle_subscription',
moveIssueEndpoint: '/gitlab-org/gitlab-shell/issues/5/move',
projectsAutocompleteEndpoint: '/autocomplete/projects?project_id=15',
@@ -141,12 +201,14 @@ export default {
name: 'Administrator',
username: 'root',
},
+};
- sidebarMockInterceptor(request, next) {
- const body = sidebarMockData[request.method.toUpperCase()][request.url];
+mockData.sidebarMockInterceptor = function (request, next) {
+ const body = this.responseMap[request.method.toUpperCase()][request.url];
- next(request.respondWith(JSON.stringify(body), {
- status: 200,
- }));
- },
-};
+ next(request.respondWith(JSON.stringify(body), {
+ status: 200,
+ }));
+}.bind(mockData);
+
+export default mockData;
diff --git a/spec/javascripts/sidebar/sidebar_mediator_spec.js b/spec/javascripts/sidebar/sidebar_mediator_spec.js
index 7deb1fd2118..14c34d5a78c 100644
--- a/spec/javascripts/sidebar/sidebar_mediator_spec.js
+++ b/spec/javascripts/sidebar/sidebar_mediator_spec.js
@@ -33,10 +33,29 @@ describe('Sidebar mediator', () => {
.catch(done.fail);
});
- it('fetches the data', () => {
- spyOn(this.mediator.service, 'get').and.callThrough();
- this.mediator.fetch();
- expect(this.mediator.service.get).toHaveBeenCalled();
+ it('fetches the data', (done) => {
+ const mockData = Mock.responseMap.GET['/gitlab-org/gitlab-shell/issues/5.json?serializer=sidebar'];
+ spyOn(this.mediator, 'processFetchedData').and.callThrough();
+
+ this.mediator.fetch()
+ .then(() => {
+ expect(this.mediator.processFetchedData).toHaveBeenCalledWith(mockData);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('processes fetched data', () => {
+ const mockData = Mock.responseMap.GET['/gitlab-org/gitlab-shell/issues/5.json?serializer=sidebar'];
+ this.mediator.processFetchedData(mockData);
+
+ expect(this.mediator.store.assignees).toEqual(mockData.assignees);
+ expect(this.mediator.store.humanTimeEstimate).toEqual(mockData.human_time_estimate);
+ expect(this.mediator.store.humanTotalTimeSpent).toEqual(mockData.human_total_time_spent);
+ expect(this.mediator.store.participants).toEqual(mockData.participants);
+ expect(this.mediator.store.subscribed).toEqual(mockData.subscribed);
+ expect(this.mediator.store.timeEstimate).toEqual(mockData.time_estimate);
+ expect(this.mediator.store.totalTimeSpent).toEqual(mockData.total_time_spent);
});
it('sets moveToProjectId', () => {
diff --git a/spec/javascripts/sidebar/sidebar_service_spec.js b/spec/javascripts/sidebar/sidebar_service_spec.js
deleted file mode 100644
index 7324d34d84a..00000000000
--- a/spec/javascripts/sidebar/sidebar_service_spec.js
+++ /dev/null
@@ -1,66 +0,0 @@
-import Vue from 'vue';
-import SidebarService from '~/sidebar/services/sidebar_service';
-import Mock from './mock_data';
-
-describe('Sidebar service', () => {
- beforeEach(() => {
- Vue.http.interceptors.push(Mock.sidebarMockInterceptor);
- this.service = new SidebarService({
- endpoint: '/gitlab-org/gitlab-shell/issues/5.json',
- toggleSubscriptionEndpoint: '/gitlab-org/gitlab-shell/issues/5/toggle_subscription',
- moveIssueEndpoint: '/gitlab-org/gitlab-shell/issues/5/move',
- projectsAutocompleteEndpoint: '/autocomplete/projects?project_id=15',
- });
- });
-
- afterEach(() => {
- SidebarService.singleton = null;
- Vue.http.interceptors = _.without(Vue.http.interceptors, Mock.sidebarMockInterceptor);
- });
-
- it('gets the data', (done) => {
- this.service.get()
- .then((resp) => {
- expect(resp).toBeDefined();
- done();
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('updates the data', (done) => {
- this.service.update('issue[assignee_ids]', [1])
- .then((resp) => {
- expect(resp).toBeDefined();
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('gets projects for autocomplete', (done) => {
- this.service.getProjectsAutocomplete()
- .then((resp) => {
- expect(resp).toBeDefined();
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('moves the issue to another project', (done) => {
- this.service.moveIssue(123)
- .then((resp) => {
- expect(resp).toBeDefined();
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('toggles the subscription', (done) => {
- this.service.toggleSubscription()
- .then((resp) => {
- expect(resp).toBeDefined();
- })
- .then(done)
- .catch(done.fail);
- });
-});
diff --git a/spec/javascripts/sidebar/sidebar_store_spec.js b/spec/javascripts/sidebar/sidebar_store_spec.js
index 51dee64fb93..ea4eae1e23f 100644
--- a/spec/javascripts/sidebar/sidebar_store_spec.js
+++ b/spec/javascripts/sidebar/sidebar_store_spec.js
@@ -120,6 +120,12 @@ describe('Sidebar store', () => {
expect(this.store.isFetching.participants).toEqual(false);
});
+ it('sets loading state', () => {
+ this.store.setLoadingState('assignees', true);
+
+ expect(this.store.isLoading.assignees).toEqual(true);
+ });
+
it('set time tracking data', () => {
this.store.setTimeTrackingData(Mock.time);
expect(this.store.timeEstimate).toEqual(Mock.time.time_estimate);
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js
index 33ed0cb4342..d7af956c9c1 100644
--- a/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js
@@ -1,140 +1,147 @@
import Vue from 'vue';
-import pipelineComponent from '~/vue_merge_request_widget/components/mr_widget_pipeline';
+import pipelineComponent from '~/vue_merge_request_widget/components/mr_widget_pipeline.vue';
+import mountComponent from '../../helpers/vue_mount_component_helper';
import mockData from '../mock_data';
-const createComponent = (mr) => {
- const Component = Vue.extend(pipelineComponent);
- return new Component({
- el: document.createElement('div'),
- propsData: { mr },
- });
-};
-
describe('MRWidgetPipeline', () => {
- describe('props', () => {
- it('should have props', () => {
- const { mr } = pipelineComponent.props;
+ let vm;
+ let Component;
- expect(mr.type instanceof Object).toBeTruthy();
- expect(mr.required).toBeTruthy();
- });
+ beforeEach(() => {
+ Component = Vue.extend(pipelineComponent);
});
- describe('components', () => {
- it('should have components added', () => {
- expect(pipelineComponent.components['pipeline-stage']).toBeDefined();
- expect(pipelineComponent.components.ciIcon).toBeDefined();
- });
+ afterEach(() => {
+ vm.$destroy();
});
describe('computed', () => {
describe('hasPipeline', () => {
it('should return true when there is a pipeline', () => {
- expect(Object.keys(mockData.pipeline).length).toBeGreaterThan(0);
-
- const vm = createComponent({
+ vm = mountComponent(Component, {
pipeline: mockData.pipeline,
+ ciStatus: 'success',
+ hasCi: true,
});
- expect(vm.hasPipeline).toBeTruthy();
+ expect(vm.hasPipeline).toEqual(true);
});
it('should return false when there is no pipeline', () => {
- const vm = createComponent({
- pipeline: null,
+ vm = mountComponent(Component, {
+ pipeline: {},
});
- expect(vm.hasPipeline).toBeFalsy();
+ expect(vm.hasPipeline).toEqual(false);
});
});
describe('hasCIError', () => {
it('should return false when there is no CI error', () => {
- const vm = createComponent({
+ vm = mountComponent(Component, {
pipeline: mockData.pipeline,
- hasCI: true,
+ hasCi: true,
ciStatus: 'success',
});
- expect(vm.hasCIError).toBeFalsy();
+ expect(vm.hasCIError).toEqual(false);
});
it('should return true when there is a CI error', () => {
- const vm = createComponent({
+ vm = mountComponent(Component, {
pipeline: mockData.pipeline,
- hasCI: true,
+ hasCi: true,
ciStatus: null,
});
- expect(vm.hasCIError).toBeTruthy();
+ expect(vm.hasCIError).toEqual(true);
});
});
});
- describe('template', () => {
- let vm;
- let el;
- const { pipeline } = mockData;
- const mr = {
- hasCI: true,
- ciStatus: 'success',
- pipelineDetailedStatus: pipeline.details.status,
- pipeline,
- };
-
- beforeEach(() => {
- vm = createComponent(mr);
- el = vm.$el;
- });
+ describe('rendered output', () => {
+ it('should render CI error', () => {
+ vm = mountComponent(Component, {
+ pipeline: mockData.pipeline,
+ hasCi: true,
+ ciStatus: null,
+ });
- it('should render template elements correctly', () => {
- expect(el.classList.contains('mr-widget-heading')).toBeTruthy();
- expect(el.querySelectorAll('.ci-status-icon.ci-status-icon-success').length).toEqual(1);
- expect(el.querySelector('.pipeline-id').textContent).toContain(`#${pipeline.id}`);
- expect(el.innerText).toContain('passed');
- expect(el.querySelector('.pipeline-id').getAttribute('href')).toEqual(pipeline.path);
- expect(el.querySelectorAll('.stage-container').length).toEqual(2);
- expect(el.querySelector('.js-ci-error')).toEqual(null);
- expect(el.querySelector('.js-commit-link').getAttribute('href')).toEqual(pipeline.commit.commit_path);
- expect(el.querySelector('.js-commit-link').textContent).toContain(pipeline.commit.short_id);
- expect(el.querySelector('.js-mr-coverage').textContent).toContain(`Coverage ${pipeline.coverage}%`);
+ expect(
+ vm.$el.querySelector('.media-body').textContent.trim(),
+ ).toEqual('Could not connect to the CI server. Please check your settings and try again');
});
- it('should list single stage', (done) => {
- pipeline.details.stages.splice(0, 1);
+ describe('with a pipeline', () => {
+ beforeEach(() => {
+ vm = mountComponent(Component, {
+ pipeline: mockData.pipeline,
+ hasCi: true,
+ ciStatus: 'success',
+ });
+ });
+
+ it('should render pipeline ID', () => {
+ expect(
+ vm.$el.querySelector('.pipeline-id').textContent.trim(),
+ ).toEqual(`#${mockData.pipeline.id}`);
+ });
+
+ it('should render pipeline status and commit id', () => {
+ expect(
+ vm.$el.querySelector('.media-body').textContent.trim(),
+ ).toContain(mockData.pipeline.details.status.label);
- Vue.nextTick(() => {
- expect(el.querySelectorAll('.stage-container button').length).toEqual(1);
- done();
+ expect(
+ vm.$el.querySelector('.js-commit-link').textContent.trim(),
+ ).toEqual(mockData.pipeline.commit.short_id);
+
+ expect(
+ vm.$el.querySelector('.js-commit-link').getAttribute('href'),
+ ).toEqual(mockData.pipeline.commit.commit_path);
});
- });
- it('should not have stages when there is no stage', (done) => {
- vm.mr.pipeline.details.stages = [];
+ it('should render pipeline graph', () => {
+ expect(vm.$el.querySelector('.mr-widget-pipeline-graph')).toBeDefined();
+ expect(vm.$el.querySelectorAll('.stage-container').length).toEqual(mockData.pipeline.details.stages.length);
+ });
- Vue.nextTick(() => {
- expect(el.querySelectorAll('.stage-container button').length).toEqual(0);
- done();
+ it('should render coverage information', () => {
+ expect(
+ vm.$el.querySelector('.media-body').textContent,
+ ).toContain(`Coverage ${mockData.pipeline.coverage}`);
});
});
- it('should not have coverage text when pipeline has no coverage info', (done) => {
- vm.mr.pipeline.coverage = null;
+ describe('without coverage', () => {
+ it('should not render a coverage', () => {
+ const mockCopy = Object.assign({}, mockData);
+ delete mockCopy.pipeline.coverage;
- Vue.nextTick(() => {
- expect(el.querySelector('.js-mr-coverage')).toEqual(null);
- done();
+ vm = mountComponent(Component, {
+ pipeline: mockCopy.pipeline,
+ hasCi: true,
+ ciStatus: 'success',
+ });
+
+ expect(
+ vm.$el.querySelector('.media-body').textContent,
+ ).not.toContain('Coverage');
});
});
- it('should show CI error when there is a CI error', (done) => {
- vm.mr.ciStatus = null;
+ describe('without a pipeline graph', () => {
+ it('should not render a pipeline graph', () => {
+ const mockCopy = Object.assign({}, mockData);
+ delete mockCopy.pipeline.details.stages;
+
+ vm = mountComponent(Component, {
+ pipeline: mockCopy.pipeline,
+ hasCi: true,
+ ciStatus: 'success',
+ });
- Vue.nextTick(() => {
- expect(el.querySelectorAll('.js-ci-error').length).toEqual(1);
- expect(el.innerText).toContain('Could not connect to the CI server');
- expect(el.querySelector('.ci-status-icon svg use').getAttribute('xlink:href')).toContain('status_failed');
- done();
+ expect(vm.$el.querySelector('.js-mini-pipeline-graph')).toEqual(null);
});
});
});
diff --git a/spec/javascripts/vue_mr_widget/mock_data.js b/spec/javascripts/vue_mr_widget/mock_data.js
index 0795d0aaa82..1ad7c2d8efa 100644
--- a/spec/javascripts/vue_mr_widget/mock_data.js
+++ b/spec/javascripts/vue_mr_widget/mock_data.js
@@ -202,7 +202,6 @@ export default {
"revert_in_fork_path": "/root/acets-app/forks?continue%5Bnotice%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+has+been+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.+Try+to+cherry-pick+this+commit+again.&continue%5Bnotice_now%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+is+being+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.&continue%5Bto%5D=%2Froot%2Facets-app%2Fmerge_requests%2F22&namespace_key=1",
"email_patches_path": "/root/acets-app/merge_requests/22.patch",
"plain_diff_path": "/root/acets-app/merge_requests/22.diff",
- "ci_status_path": "/root/acets-app/merge_requests/22/ci_status",
"status_path": "/root/acets-app/merge_requests/22.json",
"merge_check_path": "/root/acets-app/merge_requests/22/merge_check",
"ci_environments_status_url": "/root/acets-app/merge_requests/22/ci_environments_status",
diff --git a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
index 8832dd161c7..9e6d0aa472c 100644
--- a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
@@ -3,13 +3,7 @@ import mrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options';
import eventHub from '~/vue_merge_request_widget/event_hub';
import notify from '~/lib/utils/notify';
import mockData from './mock_data';
-
-const createComponent = () => {
- delete mrWidgetOptions.el; // Prevent component mounting
- gl.mrWidgetData = mockData;
- const Component = Vue.extend(mrWidgetOptions);
- return new Component();
-};
+import mountComponent from '../helpers/vue_mount_component_helper';
const returnPromise = data => new Promise((resolve) => {
resolve({
@@ -22,9 +16,16 @@ const returnPromise = data => new Promise((resolve) => {
describe('mrWidgetOptions', () => {
let vm;
+ let MrWidgetOptions;
beforeEach(() => {
- vm = createComponent();
+ // Prevent component mounting
+ delete mrWidgetOptions.el;
+
+ MrWidgetOptions = Vue.extend(mrWidgetOptions);
+ vm = mountComponent(MrWidgetOptions, {
+ mrData: { ...mockData },
+ });
});
describe('data', () => {
@@ -77,7 +78,7 @@ describe('mrWidgetOptions', () => {
});
it('should return true if there is relatedLinks in MR', () => {
- vm.mr.relatedLinks = {};
+ Vue.set(vm.mr, 'relatedLinks', {});
expect(vm.shouldRenderRelatedLinks).toBeTruthy();
});
});
diff --git a/spec/javascripts/vue_mr_widget/services/mr_widget_service_spec.js b/spec/javascripts/vue_mr_widget/services/mr_widget_service_spec.js
deleted file mode 100644
index e667b4b3677..00000000000
--- a/spec/javascripts/vue_mr_widget/services/mr_widget_service_spec.js
+++ /dev/null
@@ -1,47 +0,0 @@
-import Vue from 'vue';
-import VueResource from 'vue-resource';
-import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service';
-
-Vue.use(VueResource);
-
-describe('MRWidgetService', () => {
- const mr = {
- mergePath: './',
- mergeCheckPath: './',
- cancelAutoMergePath: './',
- removeWIPPath: './',
- sourceBranchPath: './',
- ciEnvironmentsStatusPath: './',
- statusPath: './',
- mergeActionsContentPath: './',
- isServiceStore: true,
- };
-
- it('should have store and resources created in constructor', () => {
- const service = new MRWidgetService(mr);
-
- expect(service.mergeResource).toBeDefined();
- expect(service.mergeCheckResource).toBeDefined();
- expect(service.cancelAutoMergeResource).toBeDefined();
- expect(service.removeWIPResource).toBeDefined();
- expect(service.removeSourceBranchResource).toBeDefined();
- expect(service.deploymentsResource).toBeDefined();
- expect(service.pollResource).toBeDefined();
- expect(service.mergeActionsContentResource).toBeDefined();
- });
-
- it('should have methods defined', () => {
- window.history.pushState({}, null, '/');
- const service = new MRWidgetService(mr);
-
- expect(service.merge()).toBeDefined();
- expect(service.cancelAutomaticMerge()).toBeDefined();
- expect(service.removeWIP()).toBeDefined();
- expect(service.removeSourceBranch()).toBeDefined();
- expect(service.fetchDeployments()).toBeDefined();
- expect(service.poll()).toBeDefined();
- expect(service.checkStatus()).toBeDefined();
- expect(service.fetchMergeActionsContent()).toBeDefined();
- expect(MRWidgetService.stopEnvironment()).toBeDefined();
- });
-});
diff --git a/spec/javascripts/vue_shared/components/icon_spec.js b/spec/javascripts/vue_shared/components/icon_spec.js
index 104da4473ce..a22b6bd3a67 100644
--- a/spec/javascripts/vue_shared/components/icon_spec.js
+++ b/spec/javascripts/vue_shared/components/icon_spec.js
@@ -11,7 +11,7 @@ describe('Sprite Icon Component', function () {
icon = mountComponent(IconComponent, {
name: 'test',
- size: 99,
+ size: 32,
cssClasses: 'extraclasses',
});
});
@@ -34,12 +34,18 @@ describe('Sprite Icon Component', function () {
});
it('should properly compute iconSizeClass', function () {
- expect(icon.iconSizeClass).toBe('s99');
+ expect(icon.iconSizeClass).toBe('s32');
+ });
+
+ it('forbids invalid size prop', () => {
+ expect(icon.$options.props.size.validator(NaN)).toBeFalsy();
+ expect(icon.$options.props.size.validator(0)).toBeFalsy();
+ expect(icon.$options.props.size.validator(9001)).toBeFalsy();
});
it('should properly render img css', function () {
const classList = icon.$el.classList;
- const containsSizeClass = classList.contains('s99');
+ const containsSizeClass = classList.contains('s32');
const containsCustomClass = classList.contains('extraclasses');
expect(containsSizeClass).toBe(true);
expect(containsCustomClass).toBe(true);
diff --git a/spec/javascripts/vue_shared/components/issue/issue_warning_spec.js b/spec/javascripts/vue_shared/components/issue/issue_warning_spec.js
index 2cf4d8e00ed..24484796bf1 100644
--- a/spec/javascripts/vue_shared/components/issue/issue_warning_spec.js
+++ b/spec/javascripts/vue_shared/components/issue/issue_warning_spec.js
@@ -16,7 +16,7 @@ describe('Issue Warning Component', () => {
isLocked: true,
});
- expect(vm.$el.querySelector('i').className).toEqual('fa icon fa-lock');
+ expect(vm.$el.querySelector('.icon use').href.baseVal).toMatch(/lock$/);
expect(formatWarning(vm.$el.querySelector('span').textContent)).toEqual('This issue is locked. Only project members can comment.');
});
});
@@ -27,7 +27,7 @@ describe('Issue Warning Component', () => {
isConfidential: true,
});
- expect(vm.$el.querySelector('i').className).toEqual('fa icon fa-eye-slash');
+ expect(vm.$el.querySelector('.icon use').href.baseVal).toMatch(/eye-slash$/);
expect(formatWarning(vm.$el.querySelector('span').textContent)).toEqual('This is a confidential issue. Your comment will not be visible to the public.');
});
});
@@ -39,7 +39,7 @@ describe('Issue Warning Component', () => {
isConfidential: true,
});
- expect(vm.$el.querySelector('i')).toBeFalsy();
+ expect(vm.$el.querySelector('.icon')).toBeFalsy();
expect(formatWarning(vm.$el.querySelector('span').textContent)).toEqual('This issue is confidential and locked. People without permission will never get a notification and won\'t be able to comment.');
});
});
diff --git a/spec/javascripts/vue_shared/components/loading_button_spec.js b/spec/javascripts/vue_shared/components/loading_button_spec.js
index 97c8a08fcdd..49bf8ee6f7c 100644
--- a/spec/javascripts/vue_shared/components/loading_button_spec.js
+++ b/spec/javascripts/vue_shared/components/loading_button_spec.js
@@ -66,6 +66,23 @@ describe('LoadingButton', function () {
});
});
+ describe('container class', () => {
+ it('should default to btn btn-align-content', () => {
+ vm = mountComponent(LoadingButton, {});
+ expect(vm.$el.classList.contains('btn')).toEqual(true);
+ expect(vm.$el.classList.contains('btn-align-content')).toEqual(true);
+ });
+
+ it('should be configurable through props', () => {
+ vm = mountComponent(LoadingButton, {
+ containerClass: 'test-class',
+ });
+ expect(vm.$el.classList.contains('btn')).toEqual(false);
+ expect(vm.$el.classList.contains('btn-align-content')).toEqual(false);
+ expect(vm.$el.classList.contains('test-class')).toEqual(true);
+ });
+ });
+
describe('click callback prop', () => {
it('calls given callback when normal', () => {
vm = mountComponent(LoadingButton, {
@@ -81,7 +98,6 @@ describe('LoadingButton', function () {
it('does not call given callback when disabled because of loading', () => {
vm = mountComponent(LoadingButton, {
loading: true,
- indeterminate: true,
});
spyOn(vm, '$emit');
diff --git a/spec/javascripts/vue_shared/components/markdown/toolbar_spec.js b/spec/javascripts/vue_shared/components/markdown/toolbar_spec.js
new file mode 100644
index 00000000000..818ef0af3c2
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/markdown/toolbar_spec.js
@@ -0,0 +1,37 @@
+import Vue from 'vue';
+import toolbar from '~/vue_shared/components/markdown/toolbar.vue';
+import mountComponent from '../../../helpers/vue_mount_component_helper';
+
+describe('toolbar', () => {
+ let vm;
+ const Toolbar = Vue.extend(toolbar);
+ const props = {
+ markdownDocsPath: '',
+ };
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ describe('user can attach file', () => {
+ beforeEach(() => {
+ vm = mountComponent(Toolbar, props);
+ });
+
+ it('should render uploading-container', () => {
+ expect(vm.$el.querySelector('.uploading-container')).not.toBeNull();
+ });
+ });
+
+ describe('user cannot attach file', () => {
+ beforeEach(() => {
+ vm = mountComponent(Toolbar, Object.assign({}, props, {
+ canAttachFile: false,
+ }));
+ });
+
+ it('should not render uploading-container', () => {
+ expect(vm.$el.querySelector('.uploading-container')).toBeNull();
+ });
+ });
+});
diff --git a/spec/javascripts/vue_shared/components/navigation_tabs_spec.js b/spec/javascripts/vue_shared/components/navigation_tabs_spec.js
new file mode 100644
index 00000000000..78e7d747b92
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/navigation_tabs_spec.js
@@ -0,0 +1,61 @@
+import Vue from 'vue';
+import navigationTabs from '~/vue_shared/components/navigation_tabs.vue';
+import mountComponent from '../../helpers/vue_mount_component_helper';
+
+describe('navigation tabs component', () => {
+ let vm;
+ let Component;
+ let data;
+
+ beforeEach(() => {
+ data = [
+ {
+ name: 'All',
+ scope: 'all',
+ count: 1,
+ isActive: true,
+ },
+ {
+ name: 'Pending',
+ scope: 'pending',
+ count: 0,
+ isActive: false,
+ },
+ {
+ name: 'Running',
+ scope: 'running',
+ isActive: false,
+ },
+ ];
+
+ Component = Vue.extend(navigationTabs);
+ vm = mountComponent(Component, { tabs: data, scope: 'pipelines' });
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('should render tabs', () => {
+ expect(vm.$el.querySelectorAll('li').length).toEqual(data.length);
+ });
+
+ it('should render active tab', () => {
+ expect(vm.$el.querySelector('.active .js-pipelines-tab-all')).toBeDefined();
+ });
+
+ it('should render badge', () => {
+ expect(vm.$el.querySelector('.js-pipelines-tab-all .badge').textContent.trim()).toEqual('1');
+ expect(vm.$el.querySelector('.js-pipelines-tab-pending .badge').textContent.trim()).toEqual('0');
+ });
+
+ it('should not render badge', () => {
+ expect(vm.$el.querySelector('.js-pipelines-tab-running .badge')).toEqual(null);
+ });
+
+ it('should trigger onTabClick', () => {
+ spyOn(vm, '$emit');
+ vm.$el.querySelector('.js-pipelines-tab-pending').click();
+ expect(vm.$emit).toHaveBeenCalledWith('onChangeTab', 'pending');
+ });
+});
diff --git a/spec/javascripts/vue_shared/components/pikaday_spec.js b/spec/javascripts/vue_shared/components/pikaday_spec.js
new file mode 100644
index 00000000000..47af9534737
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/pikaday_spec.js
@@ -0,0 +1,29 @@
+import Vue from 'vue';
+import datePicker from '~/vue_shared/components/pikaday.vue';
+import mountComponent from '../../helpers/vue_mount_component_helper';
+
+describe('datePicker', () => {
+ let vm;
+ beforeEach(() => {
+ const DatePicker = Vue.extend(datePicker);
+ vm = mountComponent(DatePicker, {
+ label: 'label',
+ });
+ });
+
+ it('should render label text', () => {
+ expect(vm.$el.querySelector('.dropdown-toggle-text').innerText.trim()).toEqual('label');
+ });
+
+ it('should show calendar', () => {
+ expect(vm.$el.querySelector('.pika-single')).toBeDefined();
+ });
+
+ it('should toggle when dropdown is clicked', () => {
+ const hidePicker = jasmine.createSpy();
+ vm.$on('hidePicker', hidePicker);
+
+ vm.$el.querySelector('.dropdown-menu-toggle').click();
+ expect(hidePicker).toHaveBeenCalled();
+ });
+});
diff --git a/spec/javascripts/vue_shared/components/sidebar/collapsed_calendar_icon_spec.js b/spec/javascripts/vue_shared/components/sidebar/collapsed_calendar_icon_spec.js
new file mode 100644
index 00000000000..cce53193870
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/sidebar/collapsed_calendar_icon_spec.js
@@ -0,0 +1,35 @@
+import Vue from 'vue';
+import collapsedCalendarIcon from '~/vue_shared/components/sidebar/collapsed_calendar_icon.vue';
+import mountComponent from '../../../helpers/vue_mount_component_helper';
+
+describe('collapsedCalendarIcon', () => {
+ let vm;
+ beforeEach(() => {
+ const CollapsedCalendarIcon = Vue.extend(collapsedCalendarIcon);
+ vm = mountComponent(CollapsedCalendarIcon, {
+ containerClass: 'test-class',
+ text: 'text',
+ showIcon: false,
+ });
+ });
+
+ it('should add class to container', () => {
+ expect(vm.$el.classList.contains('test-class')).toEqual(true);
+ });
+
+ it('should hide calendar icon if showIcon', () => {
+ expect(vm.$el.querySelector('.fa-calendar')).toBeNull();
+ });
+
+ it('should render text', () => {
+ expect(vm.$el.querySelector('span').innerText.trim()).toEqual('text');
+ });
+
+ it('should emit click event when container is clicked', () => {
+ const click = jasmine.createSpy();
+ vm.$on('click', click);
+
+ vm.$el.click();
+ expect(click).toHaveBeenCalled();
+ });
+});
diff --git a/spec/javascripts/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js b/spec/javascripts/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js
new file mode 100644
index 00000000000..20363e78094
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js
@@ -0,0 +1,91 @@
+import Vue from 'vue';
+import collapsedGroupedDatePicker from '~/vue_shared/components/sidebar/collapsed_grouped_date_picker.vue';
+import mountComponent from '../../../helpers/vue_mount_component_helper';
+
+describe('collapsedGroupedDatePicker', () => {
+ let vm;
+ beforeEach(() => {
+ const CollapsedGroupedDatePicker = Vue.extend(collapsedGroupedDatePicker);
+ vm = mountComponent(CollapsedGroupedDatePicker, {
+ showToggleSidebar: true,
+ });
+ });
+
+ it('should render toggle sidebar if showToggleSidebar', (done) => {
+ expect(vm.$el.querySelector('.issuable-sidebar-header')).toBeDefined();
+
+ vm.showToggleSidebar = false;
+ Vue.nextTick(() => {
+ expect(vm.$el.querySelector('.issuable-sidebar-header')).toBeNull();
+ done();
+ });
+ });
+
+ it('toggleCollapse events', () => {
+ const toggleCollapse = jasmine.createSpy();
+
+ beforeEach((done) => {
+ vm.minDate = new Date('07/17/2016');
+ Vue.nextTick(done);
+ });
+
+ it('should emit when sidebar is toggled', () => {
+ vm.$el.querySelector('.gutter-toggle').click();
+ expect(toggleCollapse).toHaveBeenCalled();
+ });
+
+ it('should emit when collapsed-calendar-icon is clicked', () => {
+ vm.$el.querySelector('.sidebar-collapsed-icon').click();
+ expect(toggleCollapse).toHaveBeenCalled();
+ });
+ });
+
+ describe('minDate and maxDate', () => {
+ beforeEach((done) => {
+ vm.minDate = new Date('07/17/2016');
+ vm.maxDate = new Date('07/17/2017');
+ Vue.nextTick(done);
+ });
+
+ it('should render both collapsed-calendar-icon', () => {
+ const icons = vm.$el.querySelectorAll('.sidebar-collapsed-icon');
+ expect(icons.length).toEqual(2);
+ expect(icons[0].innerText.trim()).toEqual('Jul 17 2016');
+ expect(icons[1].innerText.trim()).toEqual('Jul 17 2017');
+ });
+ });
+
+ describe('minDate', () => {
+ beforeEach((done) => {
+ vm.minDate = new Date('07/17/2016');
+ Vue.nextTick(done);
+ });
+
+ it('should render minDate in collapsed-calendar-icon', () => {
+ const icons = vm.$el.querySelectorAll('.sidebar-collapsed-icon');
+ expect(icons.length).toEqual(1);
+ expect(icons[0].innerText.trim()).toEqual('From Jul 17 2016');
+ });
+ });
+
+ describe('maxDate', () => {
+ beforeEach((done) => {
+ vm.maxDate = new Date('07/17/2017');
+ Vue.nextTick(done);
+ });
+
+ it('should render maxDate in collapsed-calendar-icon', () => {
+ const icons = vm.$el.querySelectorAll('.sidebar-collapsed-icon');
+ expect(icons.length).toEqual(1);
+ expect(icons[0].innerText.trim()).toEqual('Until Jul 17 2017');
+ });
+ });
+
+ describe('no dates', () => {
+ it('should render None', () => {
+ const icons = vm.$el.querySelectorAll('.sidebar-collapsed-icon');
+ expect(icons.length).toEqual(1);
+ expect(icons[0].innerText.trim()).toEqual('None');
+ });
+ });
+});
diff --git a/spec/javascripts/vue_shared/components/sidebar/date_picker_spec.js b/spec/javascripts/vue_shared/components/sidebar/date_picker_spec.js
new file mode 100644
index 00000000000..926e11b4d30
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/sidebar/date_picker_spec.js
@@ -0,0 +1,117 @@
+import Vue from 'vue';
+import sidebarDatePicker from '~/vue_shared/components/sidebar/date_picker.vue';
+import mountComponent from '../../../helpers/vue_mount_component_helper';
+
+describe('sidebarDatePicker', () => {
+ let vm;
+ beforeEach(() => {
+ const SidebarDatePicker = Vue.extend(sidebarDatePicker);
+ vm = mountComponent(SidebarDatePicker, {
+ label: 'label',
+ isLoading: true,
+ });
+ });
+
+ it('should emit toggleCollapse when collapsed toggle sidebar is clicked', () => {
+ const toggleCollapse = jasmine.createSpy();
+ vm.$on('toggleCollapse', toggleCollapse);
+
+ vm.$el.querySelector('.issuable-sidebar-header .gutter-toggle').click();
+ expect(toggleCollapse).toHaveBeenCalled();
+ });
+
+ it('should render collapsed-calendar-icon', () => {
+ expect(vm.$el.querySelector('.sidebar-collapsed-icon')).toBeDefined();
+ });
+
+ it('should render label', () => {
+ expect(vm.$el.querySelector('.title').innerText.trim()).toEqual('label');
+ });
+
+ it('should render loading-icon when isLoading', () => {
+ expect(vm.$el.querySelector('.fa-spin')).toBeDefined();
+ });
+
+ it('should render value when not editing', () => {
+ expect(vm.$el.querySelector('.value-content')).toBeDefined();
+ });
+
+ it('should render None if there is no selectedDate', () => {
+ expect(vm.$el.querySelector('.value-content span').innerText.trim()).toEqual('None');
+ });
+
+ it('should render date-picker when editing', (done) => {
+ vm.editing = true;
+ Vue.nextTick(() => {
+ expect(vm.$el.querySelector('.pika-label')).toBeDefined();
+ done();
+ });
+ });
+
+ describe('editable', () => {
+ beforeEach((done) => {
+ vm.editable = true;
+ Vue.nextTick(done);
+ });
+
+ it('should render edit button', () => {
+ expect(vm.$el.querySelector('.title .btn-blank').innerText.trim()).toEqual('Edit');
+ });
+
+ it('should enable editing when edit button is clicked', (done) => {
+ vm.isLoading = false;
+ Vue.nextTick(() => {
+ vm.$el.querySelector('.title .btn-blank').click();
+ expect(vm.editing).toEqual(true);
+ done();
+ });
+ });
+ });
+
+ it('should render date if selectedDate', (done) => {
+ vm.selectedDate = new Date('07/07/2017');
+ Vue.nextTick(() => {
+ expect(vm.$el.querySelector('.value-content strong').innerText.trim()).toEqual('Jul 7, 2017');
+ done();
+ });
+ });
+
+ describe('selectedDate and editable', () => {
+ beforeEach((done) => {
+ vm.selectedDate = new Date('07/07/2017');
+ vm.editable = true;
+ Vue.nextTick(done);
+ });
+
+ it('should render remove button if selectedDate and editable', () => {
+ expect(vm.$el.querySelector('.value-content .btn-blank').innerText.trim()).toEqual('remove');
+ });
+
+ it('should emit saveDate when remove button is clicked', () => {
+ const saveDate = jasmine.createSpy();
+ vm.$on('saveDate', saveDate);
+
+ vm.$el.querySelector('.value-content .btn-blank').click();
+ expect(saveDate).toHaveBeenCalled();
+ });
+ });
+
+ describe('showToggleSidebar', () => {
+ beforeEach((done) => {
+ vm.showToggleSidebar = true;
+ Vue.nextTick(done);
+ });
+
+ it('should render toggle-sidebar when showToggleSidebar', () => {
+ expect(vm.$el.querySelector('.title .gutter-toggle')).toBeDefined();
+ });
+
+ it('should emit toggleCollapse when toggle sidebar is clicked', () => {
+ const toggleCollapse = jasmine.createSpy();
+ vm.$on('toggleCollapse', toggleCollapse);
+
+ vm.$el.querySelector('.title .gutter-toggle').click();
+ expect(toggleCollapse).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/javascripts/vue_shared/components/sidebar/toggle_sidebar_spec.js b/spec/javascripts/vue_shared/components/sidebar/toggle_sidebar_spec.js
new file mode 100644
index 00000000000..752a9e89d50
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/sidebar/toggle_sidebar_spec.js
@@ -0,0 +1,32 @@
+import Vue from 'vue';
+import toggleSidebar from '~/vue_shared/components/sidebar/toggle_sidebar.vue';
+import mountComponent from '../../../helpers/vue_mount_component_helper';
+
+describe('toggleSidebar', () => {
+ let vm;
+ beforeEach(() => {
+ const ToggleSidebar = Vue.extend(toggleSidebar);
+ vm = mountComponent(ToggleSidebar, {
+ collapsed: true,
+ });
+ });
+
+ it('should render << when collapsed', () => {
+ expect(vm.$el.querySelector('.fa').classList.contains('fa-angle-double-left')).toEqual(true);
+ });
+
+ it('should render >> when collapsed', () => {
+ vm.collapsed = false;
+ Vue.nextTick(() => {
+ expect(vm.$el.querySelector('.fa').classList.contains('fa-angle-double-right')).toEqual(true);
+ });
+ });
+
+ it('should emit toggle event when button clicked', () => {
+ const toggle = jasmine.createSpy();
+ vm.$on('toggle', toggle);
+ vm.$el.click();
+
+ expect(toggle).toHaveBeenCalled();
+ });
+});
diff --git a/spec/javascripts/vue_shared/components/toggle_button_spec.js b/spec/javascripts/vue_shared/components/toggle_button_spec.js
new file mode 100644
index 00000000000..447d74d4e08
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/toggle_button_spec.js
@@ -0,0 +1,91 @@
+import Vue from 'vue';
+import toggleButton from '~/vue_shared/components/toggle_button.vue';
+import mountComponent from '../../helpers/vue_mount_component_helper';
+
+describe('Toggle Button', () => {
+ let vm;
+ let Component;
+
+ beforeEach(() => {
+ Component = Vue.extend(toggleButton);
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ describe('render output', () => {
+ beforeEach(() => {
+ vm = mountComponent(Component, {
+ value: true,
+ name: 'foo',
+ });
+ });
+
+ it('renders input with provided name', () => {
+ expect(vm.$el.querySelector('input').getAttribute('name')).toEqual('foo');
+ });
+
+ it('renders input with provided value', () => {
+ expect(vm.$el.querySelector('input').getAttribute('value')).toEqual('true');
+ });
+
+ it('renders Enabled and Disabled text data attributes', () => {
+ expect(vm.$el.querySelector('button').getAttribute('data-enabled-text')).toEqual('Enabled');
+ expect(vm.$el.querySelector('button').getAttribute('data-disabled-text')).toEqual('Disabled');
+ });
+ });
+
+ describe('is-checked', () => {
+ beforeEach(() => {
+ vm = mountComponent(Component, {
+ value: true,
+ });
+
+ spyOn(vm, '$emit');
+ });
+
+ it('renders is checked class', () => {
+ expect(vm.$el.querySelector('button').classList.contains('is-checked')).toEqual(true);
+ });
+
+ it('emits change event when clicked', () => {
+ vm.$el.querySelector('button').click();
+
+ expect(vm.$emit).toHaveBeenCalledWith('change', false);
+ });
+ });
+
+ describe('is-disabled', () => {
+ beforeEach(() => {
+ vm = mountComponent(Component, {
+ value: true,
+ disabledInput: true,
+ });
+ spyOn(vm, '$emit');
+ });
+
+ it('renders disabled button', () => {
+ expect(vm.$el.querySelector('button').classList.contains('is-disabled')).toEqual(true);
+ });
+
+ it('does not emit change event when clicked', () => {
+ vm.$el.querySelector('button').click();
+
+ expect(vm.$emit).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('is-loading', () => {
+ beforeEach(() => {
+ vm = mountComponent(Component, {
+ value: true,
+ isLoading: true,
+ });
+ });
+
+ it('renders loading class', () => {
+ expect(vm.$el.querySelector('button').classList.contains('is-loading')).toEqual(true);
+ });
+ });
+});
diff --git a/spec/javascripts/zen_mode_spec.js b/spec/javascripts/zen_mode_spec.js
index 7047053d131..45a0bb0650f 100644
--- a/spec/javascripts/zen_mode_spec.js
+++ b/spec/javascripts/zen_mode_spec.js
@@ -1,77 +1,93 @@
-/* eslint-disable space-before-function-paren, no-var, one-var, one-var-declaration-per-line, object-shorthand, comma-dangle, no-return-assign, new-cap, max-len */
/* global Mousetrap */
import Dropzone from 'dropzone';
import ZenMode from '~/zen_mode';
-(function() {
- var enterZen, escapeKeydown, exitZen;
-
- describe('ZenMode', function() {
- var fixtureName = 'merge_requests/merge_request_with_comment.html.raw';
- preloadFixtures(fixtureName);
- beforeEach(function() {
- loadFixtures(fixtureName);
- spyOn(Dropzone, 'forElement').and.callFake(function() {
- return {
- enable: function() {
- return true;
- }
- };
- // Stub Dropzone.forElement(...).enable()
- });
- this.zen = new ZenMode();
- // Set this manually because we can't actually scroll the window
- return this.zen.scroll_position = 456;
+describe('ZenMode', () => {
+ let zen;
+ const fixtureName = 'merge_requests/merge_request_with_comment.html.raw';
+
+ preloadFixtures(fixtureName);
+
+ function enterZen() {
+ $('.notes-form .js-zen-enter').click();
+ }
+
+ function exitZen() {
+ $('.notes-form .js-zen-leave').click();
+ }
+
+ function escapeKeydown() {
+ $('.notes-form textarea').trigger($.Event('keydown', {
+ keyCode: 27,
+ }));
+ }
+
+ beforeEach(() => {
+ loadFixtures(fixtureName);
+
+ spyOn(Dropzone, 'forElement').and.callFake(() => ({
+ enable: () => true,
+ }));
+ zen = new ZenMode();
+
+ // Set this manually because we can't actually scroll the window
+ zen.scroll_position = 456;
+ });
+
+ describe('on enter', () => {
+ it('pauses Mousetrap', () => {
+ spyOn(Mousetrap, 'pause');
+ enterZen();
+ expect(Mousetrap.pause).toHaveBeenCalled();
});
- describe('on enter', function() {
- it('pauses Mousetrap', function() {
- spyOn(Mousetrap, 'pause');
- enterZen();
- return expect(Mousetrap.pause).toHaveBeenCalled();
- });
- return it('removes textarea styling', function() {
- $('.notes-form textarea').attr('style', 'height: 400px');
- enterZen();
- return expect($('.notes-form textarea')).not.toHaveAttr('style');
- });
+
+ it('removes textarea styling', () => {
+ $('.notes-form textarea').attr('style', 'height: 400px');
+ enterZen();
+ expect($('.notes-form textarea')).not.toHaveAttr('style');
});
- describe('in use', function() {
- beforeEach(function() {
- return enterZen();
- });
- return it('exits on Escape', function() {
- escapeKeydown();
- return expect($('.notes-form .zen-backdrop')).not.toHaveClass('fullscreen');
- });
+ });
+
+ describe('in use', () => {
+ beforeEach(enterZen);
+
+ it('exits on Escape', () => {
+ escapeKeydown();
+ expect($('.notes-form .zen-backdrop')).not.toHaveClass('fullscreen');
+ });
+ });
+
+ describe('on exit', () => {
+ beforeEach(enterZen);
+
+ it('unpauses Mousetrap', () => {
+ spyOn(Mousetrap, 'unpause');
+ exitZen();
+ expect(Mousetrap.unpause).toHaveBeenCalled();
});
- return describe('on exit', function() {
- beforeEach(function() {
- return enterZen();
- });
- it('unpauses Mousetrap', function() {
- spyOn(Mousetrap, 'unpause');
- exitZen();
- return expect(Mousetrap.unpause).toHaveBeenCalled();
- });
- return it('restores the scroll position', function() {
- spyOn(this.zen, 'scrollTo');
- exitZen();
- return expect(this.zen.scrollTo).toHaveBeenCalled();
- });
+
+ it('restores the scroll position', () => {
+ spyOn(zen, 'scrollTo');
+ exitZen();
+ expect(zen.scrollTo).toHaveBeenCalled();
});
});
- enterZen = function() {
- return $('.notes-form .js-zen-enter').click();
- };
+ describe('enabling dropzone', () => {
+ beforeEach(() => {
+ enterZen();
+ });
- exitZen = function() {
- return $('.notes-form .js-zen-leave').click();
- };
+ it('should not call dropzone if element is not dropzone valid', () => {
+ $('.div-dropzone').addClass('js-invalid-dropzone');
+ exitZen();
+ expect(Dropzone.forElement).not.toHaveBeenCalled();
+ });
- escapeKeydown = function() {
- return $('.notes-form textarea').trigger($.Event('keydown', {
- keyCode: 27
- }));
- };
-}).call(window);
+ it('should call dropzone if element is dropzone valid', () => {
+ $('.div-dropzone').removeClass('js-invalid-dropzone');
+ exitZen();
+ expect(Dropzone.forElement).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/lib/api/helpers/pagination_spec.rb b/spec/lib/api/helpers/pagination_spec.rb
index 59deca7757b..a547988d631 100644
--- a/spec/lib/api/helpers/pagination_spec.rb
+++ b/spec/lib/api/helpers/pagination_spec.rb
@@ -92,6 +92,27 @@ describe API::Helpers::Pagination do
subject.paginate(resource)
end
end
+
+ context 'if order' do
+ it 'is not present it adds default order(:id) if no order is present' do
+ resource.order_values = []
+
+ paginated_relation = subject.paginate(resource)
+
+ expect(resource.order_values).to be_empty
+ expect(paginated_relation.order_values).to be_present
+ expect(paginated_relation.order_values.first).to be_ascending
+ expect(paginated_relation.order_values.first.expr.name).to eq :id
+ end
+
+ it 'is present it does not add anything' do
+ paginated_relation = subject.paginate(resource.order(created_at: :desc))
+
+ expect(paginated_relation.order_values).to be_present
+ expect(paginated_relation.order_values.first).to be_descending
+ expect(paginated_relation.order_values.first.expr.name).to eq :created_at
+ end
+ end
end
context 'when resource empty' do
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
new file mode 100644
index 00000000000..3c4deba4712
--- /dev/null
+++ b/spec/lib/api/helpers_spec.rb
@@ -0,0 +1,109 @@
+require 'spec_helper'
+
+describe API::Helpers do
+ subject { Class.new.include(described_class).new }
+
+ describe '#find_namespace' do
+ let(:namespace) { create(:namespace) }
+
+ shared_examples 'namespace finder' do
+ context 'when namespace exists' do
+ it 'returns requested namespace' do
+ expect(subject.find_namespace(existing_id)).to eq(namespace)
+ end
+ end
+
+ context "when namespace doesn't exists" do
+ it 'returns nil' do
+ expect(subject.find_namespace(non_existing_id)).to be_nil
+ end
+ end
+ end
+
+ context 'when ID is used as an argument' do
+ let(:existing_id) { namespace.id }
+ let(:non_existing_id) { 9999 }
+
+ it_behaves_like 'namespace finder'
+ end
+
+ context 'when PATH is used as an argument' do
+ let(:existing_id) { namespace.path }
+ let(:non_existing_id) { 'non-existing-path' }
+
+ it_behaves_like 'namespace finder'
+ end
+ end
+
+ shared_examples 'user namespace finder' do
+ let(:user1) { create(:user) }
+
+ before do
+ allow(subject).to receive(:current_user).and_return(user1)
+ allow(subject).to receive(:header).and_return(nil)
+ allow(subject).to receive(:not_found!).and_raise('404 Namespace not found')
+ end
+
+ context 'when namespace is group' do
+ let(:namespace) { create(:group) }
+
+ context 'when user has access to group' do
+ before do
+ namespace.add_guest(user1)
+ namespace.save!
+ end
+
+ it 'returns requested namespace' do
+ expect(namespace_finder).to eq(namespace)
+ end
+ end
+
+ context "when user doesn't have access to group" do
+ it 'raises not found error' do
+ expect { namespace_finder }.to raise_error(RuntimeError, '404 Namespace not found')
+ end
+ end
+ end
+
+ context "when namespace is user's personal namespace" do
+ let(:namespace) { create(:namespace) }
+
+ context 'when user owns the namespace' do
+ before do
+ namespace.owner = user1
+ namespace.save!
+ end
+
+ it 'returns requested namespace' do
+ expect(namespace_finder).to eq(namespace)
+ end
+ end
+
+ context "when user doesn't own the namespace" do
+ it 'raises not found error' do
+ expect { namespace_finder }.to raise_error(RuntimeError, '404 Namespace not found')
+ end
+ end
+ end
+ end
+
+ describe '#find_namespace!' do
+ let(:namespace_finder) do
+ subject.find_namespace!(namespace.id)
+ end
+
+ it_behaves_like 'user namespace finder'
+ end
+
+ describe '#user_namespace' do
+ let(:namespace_finder) do
+ subject.user_namespace
+ end
+
+ before do
+ allow(subject).to receive(:params).and_return({ id: namespace.id })
+ end
+
+ it_behaves_like 'user namespace finder'
+ end
+end
diff --git a/spec/lib/gitlab/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb
index b68301a066a..b68301a066a 100644
--- a/spec/lib/gitlab/backup/manager_spec.rb
+++ b/spec/lib/backup/manager_spec.rb
diff --git a/spec/lib/backup/repository_spec.rb b/spec/lib/backup/repository_spec.rb
new file mode 100644
index 00000000000..6ee3d531d6e
--- /dev/null
+++ b/spec/lib/backup/repository_spec.rb
@@ -0,0 +1,69 @@
+require 'spec_helper'
+
+describe Backup::Repository do
+ let(:progress) { StringIO.new }
+ let!(:project) { create(:project) }
+
+ before do
+ allow(progress).to receive(:puts)
+ allow(progress).to receive(:print)
+
+ allow_any_instance_of(String).to receive(:color) do |string, _color|
+ string
+ end
+
+ allow_any_instance_of(described_class).to receive(:progress).and_return(progress)
+ end
+
+ describe '#dump' do
+ describe 'repo failure' do
+ before do
+ allow(Gitlab::Popen).to receive(:popen).and_return(['normal output', 0])
+ end
+
+ it 'does not raise error' do
+ expect { described_class.new.dump }.not_to raise_error
+ end
+ end
+ end
+
+ describe '#restore' do
+ describe 'command failure' do
+ before do
+ allow(Gitlab::Popen).to receive(:popen).and_return(['error', 1])
+ end
+
+ it 'shows the appropriate error' do
+ described_class.new.restore
+
+ expect(progress).to have_received(:puts).with("Ignoring error on #{project.full_path} - error")
+ end
+ end
+ end
+
+ describe '#empty_repo?' do
+ context 'for a wiki' do
+ let(:wiki) { create(:project_wiki) }
+
+ it 'invalidates the emptiness cache' do
+ expect(wiki.repository).to receive(:expire_emptiness_caches).once
+
+ wiki.empty?
+ end
+
+ context 'wiki repo has content' do
+ let!(:wiki_page) { create(:wiki_page, wiki: wiki) }
+
+ it 'returns true, regardless of bad cache value' do
+ expect(described_class.new.send(:empty_repo?, wiki)).to be(false)
+ end
+ end
+
+ context 'wiki repo does not have content' do
+ it 'returns true, regardless of bad cache value' do
+ expect(described_class.new.send(:empty_repo?, wiki)).to be_truthy
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/banzai/cross_project_reference_spec.rb b/spec/lib/banzai/cross_project_reference_spec.rb
index d70749536b8..68ca960caab 100644
--- a/spec/lib/banzai/cross_project_reference_spec.rb
+++ b/spec/lib/banzai/cross_project_reference_spec.rb
@@ -3,20 +3,20 @@ require 'spec_helper'
describe Banzai::CrossProjectReference do
include described_class
- describe '#project_from_ref' do
+ describe '#parent_from_ref' do
context 'when no project was referenced' do
it 'returns the project from context' do
project = double
allow(self).to receive(:context).and_return({ project: project })
- expect(project_from_ref(nil)).to eq project
+ expect(parent_from_ref(nil)).to eq project
end
end
context 'when referenced project does not exist' do
it 'returns nil' do
- expect(project_from_ref('invalid/reference')).to be_nil
+ expect(parent_from_ref('invalid/reference')).to be_nil
end
end
@@ -27,7 +27,7 @@ describe Banzai::CrossProjectReference do
expect(Project).to receive(:find_by_full_path)
.with('cross/reference').and_return(project2)
- expect(project_from_ref('cross/reference')).to eq project2
+ expect(parent_from_ref('cross/reference')).to eq project2
end
end
end
diff --git a/spec/lib/banzai/filter/abstract_reference_filter_spec.rb b/spec/lib/banzai/filter/abstract_reference_filter_spec.rb
index 7c0ba9ee67f..1e82d18d056 100644
--- a/spec/lib/banzai/filter/abstract_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/abstract_reference_filter_spec.rb
@@ -3,67 +3,67 @@ require 'spec_helper'
describe Banzai::Filter::AbstractReferenceFilter do
let(:project) { create(:project) }
- describe '#references_per_project' do
- it 'returns a Hash containing references grouped per project paths' do
+ describe '#references_per_parent' do
+ it 'returns a Hash containing references grouped per parent paths' do
doc = Nokogiri::HTML.fragment("#1 #{project.full_path}#2")
filter = described_class.new(doc, project: project)
expect(filter).to receive(:object_class).exactly(4).times.and_return(Issue)
expect(filter).to receive(:object_sym).twice.and_return(:issue)
- refs = filter.references_per_project
+ refs = filter.references_per_parent
expect(refs).to be_an_instance_of(Hash)
expect(refs[project.full_path]).to eq(Set.new(%w[1 2]))
end
end
- describe '#projects_per_reference' do
- it 'returns a Hash containing projects grouped per project paths' do
+ describe '#parent_per_reference' do
+ it 'returns a Hash containing projects grouped per parent paths' do
doc = Nokogiri::HTML.fragment('')
filter = described_class.new(doc, project: project)
- expect(filter).to receive(:references_per_project)
+ expect(filter).to receive(:references_per_parent)
.and_return({ project.full_path => Set.new(%w[1]) })
- expect(filter.projects_per_reference)
+ expect(filter.parent_per_reference)
.to eq({ project.full_path => project })
end
end
- describe '#find_projects_for_paths' do
+ describe '#find_for_paths' do
let(:doc) { Nokogiri::HTML.fragment('') }
let(:filter) { described_class.new(doc, project: project) }
context 'with RequestStore disabled' do
it 'returns a list of Projects for a list of paths' do
- expect(filter.find_projects_for_paths([project.full_path]))
+ expect(filter.find_for_paths([project.full_path]))
.to eq([project])
end
it "return an empty array for paths that don't exist" do
- expect(filter.find_projects_for_paths(['nonexistent/project']))
+ expect(filter.find_for_paths(['nonexistent/project']))
.to eq([])
end
end
context 'with RequestStore enabled', :request_store do
it 'returns a list of Projects for a list of paths' do
- expect(filter.find_projects_for_paths([project.full_path]))
+ expect(filter.find_for_paths([project.full_path]))
.to eq([project])
end
context "when no project with that path exists" do
it "returns no value" do
- expect(filter.find_projects_for_paths(['nonexistent/project']))
+ expect(filter.find_for_paths(['nonexistent/project']))
.to eq([])
end
it "adds the ref to the project refs cache" do
project_refs_cache = {}
- allow(filter).to receive(:project_refs_cache).and_return(project_refs_cache)
+ allow(filter).to receive(:refs_cache).and_return(project_refs_cache)
- filter.find_projects_for_paths(['nonexistent/project'])
+ filter.find_for_paths(['nonexistent/project'])
expect(project_refs_cache).to eq({ 'nonexistent/project' => nil })
end
@@ -71,11 +71,11 @@ describe Banzai::Filter::AbstractReferenceFilter do
context 'when the project refs cache includes nil values' do
before do
# adds { 'nonexistent/project' => nil } to cache
- filter.project_from_ref_cached('nonexistent/project')
+ filter.from_ref_cached('nonexistent/project')
end
it "return an empty array for paths that don't exist" do
- expect(filter.find_projects_for_paths(['nonexistent/project']))
+ expect(filter.find_for_paths(['nonexistent/project']))
.to eq([])
end
end
@@ -83,12 +83,12 @@ describe Banzai::Filter::AbstractReferenceFilter do
end
end
- describe '#current_project_path' do
- it 'returns the path of the current project' do
+ describe '#current_parent_path' do
+ it 'returns the path of the current parent' do
doc = Nokogiri::HTML.fragment('')
filter = described_class.new(doc, project: project)
- expect(filter.current_project_path).to eq(project.full_path)
+ expect(filter.current_parent_path).to eq(project.full_path)
end
end
end
diff --git a/spec/lib/banzai/filter/commit_reference_filter_spec.rb b/spec/lib/banzai/filter/commit_reference_filter_spec.rb
index 702fcac0c6f..080a5f57da9 100644
--- a/spec/lib/banzai/filter/commit_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/commit_reference_filter_spec.rb
@@ -92,6 +92,18 @@ describe Banzai::Filter::CommitReferenceFilter do
expect(link).not_to match %r(https?://)
expect(link).to eq urls.project_commit_url(project, reference, only_path: true)
end
+
+ context "in merge request context" do
+ let(:noteable) { create(:merge_request, target_project: project, source_project: project) }
+ let(:commit) { noteable.commits.first }
+
+ it 'handles merge request contextual commit references' do
+ url = urls.diffs_project_merge_request_url(project, noteable, commit_id: commit.id)
+ doc = reference_filter("See #{reference}", noteable: noteable)
+
+ expect(doc.css('a').first[:href]).to eq(url)
+ end
+ end
end
context 'cross-project / cross-namespace complete reference' do
diff --git a/spec/lib/banzai/filter/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/issue_reference_filter_spec.rb
index 3c98b18f99b..3a5f52ea23f 100644
--- a/spec/lib/banzai/filter/issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/issue_reference_filter_spec.rb
@@ -157,6 +157,12 @@ describe Banzai::Filter::IssueReferenceFilter do
expect(doc.text).to eq("Fixed (#{project2.full_path}##{issue.iid}.)")
end
+ it 'includes default classes' do
+ doc = reference_filter("Fixed (#{reference}.)")
+
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ end
+
it 'ignores invalid issue IDs on the referenced project' do
exp = act = "Fixed #{invalidate_reference(reference)}"
@@ -201,6 +207,12 @@ describe Banzai::Filter::IssueReferenceFilter do
expect(doc.text).to eq("Fixed (#{project2.path}##{issue.iid}.)")
end
+ it 'includes default classes' do
+ doc = reference_filter("Fixed (#{reference}.)")
+
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ end
+
it 'ignores invalid issue IDs on the referenced project' do
exp = act = "Fixed #{invalidate_reference(reference)}"
@@ -245,6 +257,12 @@ describe Banzai::Filter::IssueReferenceFilter do
expect(doc.text).to eq("Fixed (#{project2.path}##{issue.iid}.)")
end
+ it 'includes default classes' do
+ doc = reference_filter("Fixed (#{reference}.)")
+
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ end
+
it 'ignores invalid issue IDs on the referenced project' do
exp = act = "Fixed #{invalidate_reference(reference)}"
@@ -269,8 +287,15 @@ describe Banzai::Filter::IssueReferenceFilter do
it 'links with adjacent text' do
doc = reference_filter("Fixed (#{reference}.)")
+
expect(doc.to_html).to match(/\(<a.+>#{Regexp.escape(issue.to_reference(project))} \(comment 123\)<\/a>\.\)/)
end
+
+ it 'includes default classes' do
+ doc = reference_filter("Fixed (#{reference}.)")
+
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ end
end
context 'cross-project reference in link href' do
@@ -291,8 +316,15 @@ describe Banzai::Filter::IssueReferenceFilter do
it 'links with adjacent text' do
doc = reference_filter("Fixed (#{reference_link}.)")
+
expect(doc.to_html).to match(/\(<a.+>Reference<\/a>\.\)/)
end
+
+ it 'includes default classes' do
+ doc = reference_filter("Fixed (#{reference_link}.)")
+
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ end
end
context 'cross-project URL in link href' do
@@ -313,8 +345,15 @@ describe Banzai::Filter::IssueReferenceFilter do
it 'links with adjacent text' do
doc = reference_filter("Fixed (#{reference_link}.)")
+
expect(doc.to_html).to match(/\(<a.+>Reference<\/a>\.\)/)
end
+
+ it 'includes default classes' do
+ doc = reference_filter("Fixed (#{reference_link}.)")
+
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ end
end
context 'group context' do
@@ -343,7 +382,9 @@ describe Banzai::Filter::IssueReferenceFilter do
reference = "#{project.full_path}##{issue.iid}"
doc = reference_filter("See #{reference}", context)
- expect(doc.css('a').first.attr('href')).to eq helper.url_for_issue(issue.iid, project)
+ link = doc.css('a').first
+ expect(link.attr('href')).to eq(helper.url_for_issue(issue.iid, project))
+ expect(link.text).to include("#{project.full_path}##{issue.iid}")
end
it 'ignores reference for shorthand cross-reference' do
@@ -358,7 +399,9 @@ describe Banzai::Filter::IssueReferenceFilter do
doc = reference_filter("See #{reference}", context)
- expect(doc.css('a').first.attr('href')).to eq(helper.url_for_issue(issue.iid, project) + "#note_123")
+ link = doc.css('a').first
+ expect(link.attr('href')).to eq(helper.url_for_issue(issue.iid, project) + "#note_123")
+ expect(link.text).to include("#{project.full_path}##{issue.iid}")
end
it 'links to a valid reference for cross-reference in link href' do
@@ -367,7 +410,9 @@ describe Banzai::Filter::IssueReferenceFilter do
doc = reference_filter("See #{reference_link}", context)
- expect(doc.css('a').first.attr('href')).to eq helper.url_for_issue(issue.iid, project) + "#note_123"
+ link = doc.css('a').first
+ expect(link.attr('href')).to eq(helper.url_for_issue(issue.iid, project) + "#note_123")
+ expect(link.text).to include('Reference')
end
it 'links to a valid reference for issue reference in the link href' do
@@ -375,23 +420,25 @@ describe Banzai::Filter::IssueReferenceFilter do
reference_link = %{<a href="#{reference}">Reference</a>}
doc = reference_filter("See #{reference_link}", context)
- expect(doc.css('a').first.attr('href')).to eq helper.url_for_issue(issue.iid, project)
+ link = doc.css('a').first
+ expect(link.attr('href')).to eq(helper.url_for_issue(issue.iid, project))
+ expect(link.text).to include('Reference')
end
end
- describe '#issues_per_project' do
+ describe '#records_per_parent' do
context 'using an internal issue tracker' do
it 'returns a Hash containing the issues per project' do
doc = Nokogiri::HTML.fragment('')
filter = described_class.new(doc, project: project)
- expect(filter).to receive(:projects_per_reference)
+ expect(filter).to receive(:parent_per_reference)
.and_return({ project.full_path => project })
- expect(filter).to receive(:references_per_project)
+ expect(filter).to receive(:references_per_parent)
.and_return({ project.full_path => Set.new([issue.iid]) })
- expect(filter.issues_per_project)
+ expect(filter.records_per_parent)
.to eq({ project => { issue.iid => issue } })
end
end
diff --git a/spec/lib/banzai/filter/mermaid_filter_spec.rb b/spec/lib/banzai/filter/mermaid_filter_spec.rb
new file mode 100644
index 00000000000..532d25e121d
--- /dev/null
+++ b/spec/lib/banzai/filter/mermaid_filter_spec.rb
@@ -0,0 +1,12 @@
+require 'spec_helper'
+
+describe Banzai::Filter::MermaidFilter do
+ include FilterSpecHelper
+
+ it 'adds `js-render-mermaid` class to the `pre` tag' do
+ doc = filter("<pre class='code highlight js-syntax-highlight mermaid' lang='mermaid' v-pre='true'><code>graph TD;\n A--&gt;B;\n</code></pre>")
+ result = doc.xpath('descendant-or-self::pre').first
+
+ expect(result[:class]).to include('js-render-mermaid')
+ end
+end
diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
index 5a23e0e70cc..9f2efa05a01 100644
--- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
+++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
@@ -31,7 +31,7 @@ describe Banzai::Filter::SyntaxHighlightFilter do
it "highlights as plaintext" do
result = filter('<pre><code lang="ruby">This is a test</code></pre>')
- expect(result.to_html).to eq('<pre class="code highlight" lang="" v-pre="true"><code>This is a test</code></pre>')
+ expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight" lang="" v-pre="true"><code>This is a test</code></pre>')
end
end
end
diff --git a/spec/lib/banzai/filter/upload_link_filter_spec.rb b/spec/lib/banzai/filter/upload_link_filter_spec.rb
index 60a88e903ef..76bc0c36ab7 100644
--- a/spec/lib/banzai/filter/upload_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/upload_link_filter_spec.rb
@@ -89,7 +89,35 @@ describe Banzai::Filter::UploadLinkFilter do
end
end
- context 'when project context does not exist' do
+ context 'in group context' do
+ let(:upload_link) { link('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg') }
+ let(:group) { create(:group) }
+ let(:filter_context) { { project: nil, group: group } }
+ let(:relative_path) { "groups/#{group.full_path}/-/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg" }
+
+ it 'rewrites the link correctly' do
+ doc = raw_filter(upload_link, filter_context)
+
+ expect(doc.at_css('a')['href']).to eq("#{Gitlab.config.gitlab.url}/#{relative_path}")
+ end
+
+ it 'rewrites the link correctly for subgroup' do
+ subgroup = create(:group, parent: group)
+ relative_path = "groups/#{subgroup.full_path}/-/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg"
+
+ doc = raw_filter(upload_link, { project: nil, group: subgroup })
+
+ expect(doc.at_css('a')['href']).to eq("#{Gitlab.config.gitlab.url}/#{relative_path}")
+ end
+
+ it 'does not modify absolute URL' do
+ doc = filter(link('http://example.com'), filter_context)
+
+ expect(doc.at_css('a')['href']).to eq 'http://example.com'
+ end
+ end
+
+ context 'when project or group context does not exist' do
let(:upload_link) { link('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg') }
it 'does not raise error' do
diff --git a/spec/lib/banzai/reference_parser/issue_parser_spec.rb b/spec/lib/banzai/reference_parser/issue_parser_spec.rb
index 23dbe2b6238..4cef3bdb24b 100644
--- a/spec/lib/banzai/reference_parser/issue_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/issue_parser_spec.rb
@@ -70,12 +70,12 @@ describe Banzai::ReferenceParser::IssueParser do
end
end
- describe '#issues_for_nodes' do
+ describe '#records_for_nodes' do
it 'returns a Hash containing the issues for a list of nodes' do
link['data-issue'] = issue.id.to_s
nodes = [link]
- expect(subject.issues_for_nodes(nodes)).to eq({ link => issue })
+ expect(subject.records_for_nodes(nodes)).to eq({ link => issue })
end
end
end
diff --git a/spec/lib/container_registry/path_spec.rb b/spec/lib/container_registry/path_spec.rb
index 84cacdd3f0d..010deae822c 100644
--- a/spec/lib/container_registry/path_spec.rb
+++ b/spec/lib/container_registry/path_spec.rb
@@ -86,6 +86,24 @@ describe ContainerRegistry::Path do
it { is_expected.to be_valid }
end
+
+ context 'when path contains double underscore' do
+ let(:path) { 'my/repository__name' }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when path contains invalid separator with dot' do
+ let(:path) { 'some/registry-.name' }
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when path contains invalid separator with underscore' do
+ let(:path) { 'some/registry._name' }
+
+ it { is_expected.not_to be_valid }
+ end
end
describe '#has_repository?' do
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 1076c63b5f2..10020511bf8 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -13,6 +13,47 @@ describe Feature do
end
end
+ describe '.persisted_names' do
+ it 'returns the names of the persisted features' do
+ Feature::FlipperFeature.create!(key: 'foo')
+
+ expect(described_class.persisted_names).to eq(%w[foo])
+ end
+
+ it 'returns an empty Array when no features are presisted' do
+ expect(described_class.persisted_names).to be_empty
+ end
+
+ it 'caches the feature names when request store is active', :request_store do
+ Feature::FlipperFeature.create!(key: 'foo')
+
+ expect(Feature::FlipperFeature)
+ .to receive(:feature_names)
+ .once
+ .and_call_original
+
+ 2.times do
+ expect(described_class.persisted_names).to eq(%w[foo])
+ end
+ end
+ end
+
+ describe '.persisted?' do
+ it 'returns true for a persisted feature' do
+ Feature::FlipperFeature.create!(key: 'foo')
+
+ feature = double(:feature, name: 'foo')
+
+ expect(described_class.persisted?(feature)).to eq(true)
+ end
+
+ it 'returns false for a feature that is not persisted' do
+ feature = double(:feature, name: 'foo')
+
+ expect(described_class.persisted?(feature)).to eq(false)
+ end
+ end
+
describe '.all' do
let(:features) { Set.new }
diff --git a/spec/lib/github/client_spec.rb b/spec/lib/github/client_spec.rb
deleted file mode 100644
index b846096fe25..00000000000
--- a/spec/lib/github/client_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-require 'spec_helper'
-
-describe Github::Client do
- let(:connection) { spy }
- let(:rate_limit) { double(get: [false, 1]) }
- let(:client) { described_class.new({}) }
- let(:results) { double }
- let(:response) { double }
-
- before do
- allow(Faraday).to receive(:new).and_return(connection)
- allow(Github::RateLimit).to receive(:new).with(connection).and_return(rate_limit)
- end
-
- describe '#get' do
- before do
- allow(Github::Response).to receive(:new).with(results).and_return(response)
- end
-
- it 'uses a default per_page param' do
- expect(connection).to receive(:get).with('/foo', per_page: 100).and_return(results)
-
- expect(client.get('/foo')).to eq(response)
- end
-
- context 'with per_page given' do
- it 'overwrites the default per_page' do
- expect(connection).to receive(:get).with('/foo', per_page: 30).and_return(results)
-
- expect(client.get('/foo', per_page: 30)).to eq(response)
- end
- end
- end
-end
diff --git a/spec/lib/github/import/legacy_diff_note_spec.rb b/spec/lib/github/import/legacy_diff_note_spec.rb
deleted file mode 100644
index 8c50b46cacb..00000000000
--- a/spec/lib/github/import/legacy_diff_note_spec.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-require 'spec_helper'
-
-describe Github::Import::LegacyDiffNote do
- describe '#type' do
- it 'returns the original note type' do
- expect(described_class.new.type).to eq('LegacyDiffNote')
- end
- end
-end
diff --git a/spec/lib/github/import/note_spec.rb b/spec/lib/github/import/note_spec.rb
deleted file mode 100644
index fcdccd9e097..00000000000
--- a/spec/lib/github/import/note_spec.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-require 'spec_helper'
-
-describe Github::Import::Note do
- describe '#type' do
- it 'returns the original note type' do
- expect(described_class.new.type).to eq('Note')
- end
- end
-end
diff --git a/spec/lib/gitlab/auth/request_authenticator_spec.rb b/spec/lib/gitlab/auth/request_authenticator_spec.rb
new file mode 100644
index 00000000000..ffcd90b9fcb
--- /dev/null
+++ b/spec/lib/gitlab/auth/request_authenticator_spec.rb
@@ -0,0 +1,67 @@
+require 'spec_helper'
+
+describe Gitlab::Auth::RequestAuthenticator do
+ let(:env) do
+ {
+ 'rack.input' => '',
+ 'REQUEST_METHOD' => 'GET'
+ }
+ end
+ let(:request) { ActionDispatch::Request.new(env) }
+
+ subject { described_class.new(request) }
+
+ describe '#user' do
+ let!(:sessionless_user) { build(:user) }
+ let!(:session_user) { build(:user) }
+
+ it 'returns sessionless user first' do
+ allow_any_instance_of(described_class).to receive(:find_sessionless_user).and_return(sessionless_user)
+ allow_any_instance_of(described_class).to receive(:find_user_from_warden).and_return(session_user)
+
+ expect(subject.user).to eq sessionless_user
+ end
+
+ it 'returns session user if no sessionless user found' do
+ allow_any_instance_of(described_class).to receive(:find_user_from_warden).and_return(session_user)
+
+ expect(subject.user).to eq session_user
+ end
+
+ it 'returns nil if no user found' do
+ expect(subject.user).to be_blank
+ end
+
+ it 'bubbles up exceptions' do
+ allow_any_instance_of(described_class).to receive(:find_user_from_warden).and_raise(Gitlab::Auth::UnauthorizedError)
+ end
+ end
+
+ describe '#find_sessionless_user' do
+ let!(:access_token_user) { build(:user) }
+ let!(:rss_token_user) { build(:user) }
+
+ it 'returns access_token user first' do
+ allow_any_instance_of(described_class).to receive(:find_user_from_access_token).and_return(access_token_user)
+ allow_any_instance_of(described_class).to receive(:find_user_from_rss_token).and_return(rss_token_user)
+
+ expect(subject.find_sessionless_user).to eq access_token_user
+ end
+
+ it 'returns rss_token user if no access_token user found' do
+ allow_any_instance_of(described_class).to receive(:find_user_from_rss_token).and_return(rss_token_user)
+
+ expect(subject.find_sessionless_user).to eq rss_token_user
+ end
+
+ it 'returns nil if no user found' do
+ expect(subject.find_sessionless_user).to be_blank
+ end
+
+ it 'rescue Gitlab::Auth::AuthenticationError exceptions' do
+ allow_any_instance_of(described_class).to receive(:find_user_from_access_token).and_raise(Gitlab::Auth::UnauthorizedError)
+
+ expect(subject.find_sessionless_user).to be_blank
+ end
+ end
+end
diff --git a/spec/lib/gitlab/auth/user_auth_finders_spec.rb b/spec/lib/gitlab/auth/user_auth_finders_spec.rb
new file mode 100644
index 00000000000..4637816570c
--- /dev/null
+++ b/spec/lib/gitlab/auth/user_auth_finders_spec.rb
@@ -0,0 +1,194 @@
+require 'spec_helper'
+
+describe Gitlab::Auth::UserAuthFinders do
+ include described_class
+
+ let(:user) { create(:user) }
+ let(:env) do
+ {
+ 'rack.input' => ''
+ }
+ end
+ let(:request) { Rack::Request.new(env)}
+
+ def set_param(key, value)
+ request.update_param(key, value)
+ end
+
+ describe '#find_user_from_warden' do
+ context 'with CSRF token' do
+ before do
+ allow(Gitlab::RequestForgeryProtection).to receive(:verified?).and_return(true)
+ end
+
+ context 'with invalid credentials' do
+ it 'returns nil' do
+ expect(find_user_from_warden).to be_nil
+ end
+ end
+
+ context 'with valid credentials' do
+ it 'returns the user' do
+ env['warden'] = double("warden", authenticate: user)
+
+ expect(find_user_from_warden).to eq user
+ end
+ end
+ end
+
+ context 'without CSRF token' do
+ it 'returns nil' do
+ allow(Gitlab::RequestForgeryProtection).to receive(:verified?).and_return(false)
+ env['warden'] = double("warden", authenticate: user)
+
+ expect(find_user_from_warden).to be_nil
+ end
+ end
+ end
+
+ describe '#find_user_from_rss_token' do
+ context 'when the request format is atom' do
+ before do
+ env['HTTP_ACCEPT'] = 'application/atom+xml'
+ end
+
+ it 'returns user if valid rss_token' do
+ set_param(:rss_token, user.rss_token)
+
+ expect(find_user_from_rss_token).to eq user
+ end
+
+ it 'returns nil if rss_token is blank' do
+ expect(find_user_from_rss_token).to be_nil
+ end
+
+ it 'returns exception if invalid rss_token' do
+ set_param(:rss_token, 'invalid_token')
+
+ expect { find_user_from_rss_token }.to raise_error(Gitlab::Auth::UnauthorizedError)
+ end
+ end
+
+ context 'when the request format is not atom' do
+ it 'returns nil' do
+ set_param(:rss_token, user.rss_token)
+
+ expect(find_user_from_rss_token).to be_nil
+ end
+ end
+ end
+
+ describe '#find_user_from_access_token' do
+ let(:personal_access_token) { create(:personal_access_token, user: user) }
+
+ it 'returns nil if no access_token present' do
+ expect(find_personal_access_token).to be_nil
+ end
+
+ context 'when validate_access_token! returns valid' do
+ it 'returns user' do
+ env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token
+
+ expect(find_user_from_access_token).to eq user
+ end
+
+ it 'returns exception if token has no user' do
+ env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token
+ allow_any_instance_of(PersonalAccessToken).to receive(:user).and_return(nil)
+
+ expect { find_user_from_access_token }.to raise_error(Gitlab::Auth::UnauthorizedError)
+ end
+ end
+ end
+
+ describe '#find_personal_access_token' do
+ let(:personal_access_token) { create(:personal_access_token, user: user) }
+
+ context 'passed as header' do
+ it 'returns token if valid personal_access_token' do
+ env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token
+
+ expect(find_personal_access_token).to eq personal_access_token
+ end
+ end
+
+ context 'passed as param' do
+ it 'returns token if valid personal_access_token' do
+ set_param(Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_PARAM, personal_access_token.token)
+
+ expect(find_personal_access_token).to eq personal_access_token
+ end
+ end
+
+ it 'returns nil if no personal_access_token' do
+ expect(find_personal_access_token).to be_nil
+ end
+
+ it 'returns exception if invalid personal_access_token' do
+ env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = 'invalid_token'
+
+ expect { find_personal_access_token }.to raise_error(Gitlab::Auth::UnauthorizedError)
+ end
+ end
+
+ describe '#find_oauth_access_token' do
+ let(:application) { Doorkeeper::Application.create!(name: 'MyApp', redirect_uri: 'https://app.com', owner: user) }
+ let(:token) { Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: 'api') }
+
+ context 'passed as header' do
+ it 'returns token if valid oauth_access_token' do
+ env['HTTP_AUTHORIZATION'] = "Bearer #{token.token}"
+
+ expect(find_oauth_access_token.token).to eq token.token
+ end
+ end
+
+ context 'passed as param' do
+ it 'returns user if valid oauth_access_token' do
+ set_param(:access_token, token.token)
+
+ expect(find_oauth_access_token.token).to eq token.token
+ end
+ end
+
+ it 'returns nil if no oauth_access_token' do
+ expect(find_oauth_access_token).to be_nil
+ end
+
+ it 'returns exception if invalid oauth_access_token' do
+ env['HTTP_AUTHORIZATION'] = "Bearer invalid_token"
+
+ expect { find_oauth_access_token }.to raise_error(Gitlab::Auth::UnauthorizedError)
+ end
+ end
+
+ describe '#validate_access_token!' do
+ let(:personal_access_token) { create(:personal_access_token, user: user) }
+
+ it 'returns nil if no access_token present' do
+ expect(validate_access_token!).to be_nil
+ end
+
+ context 'token is not valid' do
+ before do
+ allow_any_instance_of(described_class).to receive(:access_token).and_return(personal_access_token)
+ end
+
+ it 'returns Gitlab::Auth::ExpiredError if token expired' do
+ personal_access_token.expires_at = 1.day.ago
+
+ expect { validate_access_token! }.to raise_error(Gitlab::Auth::ExpiredError)
+ end
+
+ it 'returns Gitlab::Auth::RevokedError if token revoked' do
+ personal_access_token.revoke!
+
+ expect { validate_access_token! }.to raise_error(Gitlab::Auth::RevokedError)
+ end
+
+ it 'returns Gitlab::Auth::InsufficientScopeError if invalid token scope' do
+ expect { validate_access_token!(scopes: [:sudo]) }.to raise_error(Gitlab::Auth::InsufficientScopeError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 54a853c9ce3..a6fbec295b5 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -133,6 +133,25 @@ describe Gitlab::Auth do
gl_auth.find_for_git_client(user.username, token, project: nil, ip: 'ip')
end
+
+ it 'grants deploy key write permissions' do
+ project = create(:project)
+ key = create(:deploy_key, can_push: true)
+ create(:deploy_keys_project, deploy_key: key, project: project)
+ token = Gitlab::LfsToken.new(key).token
+
+ expect(gl_auth).to receive(:rate_limit!).with('ip', success: true, login: "lfs+deploy-key-#{key.id}")
+ expect(gl_auth.find_for_git_client("lfs+deploy-key-#{key.id}", token, project: project, ip: 'ip')).to eq(Gitlab::Auth::Result.new(key, nil, :lfs_deploy_token, read_write_authentication_abilities))
+ end
+
+ it 'does not grant deploy key write permissions' do
+ project = create(:project)
+ key = create(:deploy_key, can_push: true)
+ token = Gitlab::LfsToken.new(key).token
+
+ expect(gl_auth).to receive(:rate_limit!).with('ip', success: true, login: "lfs+deploy-key-#{key.id}")
+ expect(gl_auth.find_for_git_client("lfs+deploy-key-#{key.id}", token, project: project, ip: 'ip')).to eq(Gitlab::Auth::Result.new(key, nil, :lfs_deploy_token, read_authentication_abilities))
+ end
end
context 'while using OAuth tokens as passwords' do
@@ -188,7 +207,7 @@ describe Gitlab::Auth do
end
it 'limits abilities based on scope' do
- personal_access_token = create(:personal_access_token, scopes: ['read_user'])
+ personal_access_token = create(:personal_access_token, scopes: %w[read_user sudo])
expect(gl_auth).to receive(:rate_limit!).with('ip', success: true, login: '')
expect(gl_auth.find_for_git_client('', personal_access_token.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(personal_access_token.user, nil, :personal_access_token, []))
@@ -232,7 +251,7 @@ describe Gitlab::Auth do
end
it 'throws an error suggesting user create a PAT when internal auth is disabled' do
- allow_any_instance_of(ApplicationSetting).to receive(:password_authentication_enabled?) { false }
+ allow_any_instance_of(ApplicationSetting).to receive(:password_authentication_enabled_for_git?) { false }
expect { gl_auth.find_for_git_client('foo', 'bar', project: nil, ip: 'ip') }.to raise_error(Gitlab::Auth::MissingPersonalAccessTokenError)
end
@@ -305,6 +324,26 @@ describe Gitlab::Auth do
gl_auth.find_with_user_password('ldap_user', 'password')
end
end
+
+ context "with password authentication disabled for Git" do
+ before do
+ stub_application_setting(password_authentication_enabled_for_git: false)
+ end
+
+ it "does not find user by valid login/password" do
+ expect(gl_auth.find_with_user_password(username, password)).to be_nil
+ end
+
+ context "with ldap enabled" do
+ before do
+ allow(Gitlab::LDAP::Config).to receive(:enabled?).and_return(true)
+ end
+
+ it "does not find non-ldap user by valid login/password" do
+ expect(gl_auth.find_with_user_password(username, password)).to be_nil
+ end
+ end
+ end
end
private
@@ -326,10 +365,15 @@ describe Gitlab::Auth do
]
end
- def full_authentication_abilities
+ def read_write_authentication_abilities
read_authentication_abilities + [
:push_code,
- :create_container_image,
+ :create_container_image
+ ]
+ end
+
+ def full_authentication_abilities
+ read_write_authentication_abilities + [
:admin_container_image
]
end
diff --git a/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb b/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb
index 1a4ea2bac48..79d2c071446 100644
--- a/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb
+++ b/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb
@@ -93,7 +93,14 @@ describe Gitlab::BackgroundMigration::CreateForkNetworkMembershipsRange, :migrat
end
it 'knows it is finished for this range' do
- expect(migration.missing_members?(1, 7)).to be_falsy
+ expect(migration.missing_members?(1, 8)).to be_falsy
+ end
+
+ it 'does not miss members for forks of forks for which the root was deleted' do
+ forked_project_links.create(id: 9, forked_from_project_id: base1_fork1.id, forked_to_project_id: create(:project).id)
+ base1.destroy
+
+ expect(migration.missing_members?(7, 10)).to be_falsy
end
context 'with more forks' do
diff --git a/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb b/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
index 4d3fdbd9554..84d9e635810 100644
--- a/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
+++ b/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
@@ -1,9 +1,13 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :truncate do
+describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :truncate, :migration, schema: 20171114162227 do
+ let(:merge_request_diffs) { table(:merge_request_diffs) }
+ let(:merge_requests) { table(:merge_requests) }
+
describe '#perform' do
- let(:merge_request) { create(:merge_request) }
- let(:merge_request_diff) { merge_request.merge_request_diff }
+ let(:project) { create(:project, :repository) }
+ let(:merge_request) { merge_requests.create!(iid: 1, target_project_id: project.id, source_project_id: project.id, target_branch: 'feature', source_branch: 'master').becomes(MergeRequest) }
+ let(:merge_request_diff) { MergeRequest.find(merge_request.id).create_merge_request_diff }
let(:updated_merge_request_diff) { MergeRequestDiff.find(merge_request_diff.id) }
def diffs_to_hashes(diffs)
@@ -68,7 +72,7 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
let(:stop_id) { described_class::MergeRequestDiff.maximum(:id) }
before do
- merge_request.reload_diff(true)
+ merge_request.create_merge_request_diff
convert_to_yaml(start_id, merge_request_diff.commits, diffs_to_hashes(merge_request_diff.merge_request_diff_files))
convert_to_yaml(stop_id, updated_merge_request_diff.commits, diffs_to_hashes(updated_merge_request_diff.merge_request_diff_files))
@@ -288,7 +292,7 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
context 'when the merge request diffs are Rugged::Patch instances' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
- let(:first_commit) { merge_request.project.repository.commit(merge_request_diff.head_commit_sha) }
+ let(:first_commit) { project.repository.commit(merge_request_diff.head_commit_sha) }
let(:expected_commits) { commits }
let(:diffs) { first_commit.rugged_diff_from_parent.patches }
let(:expected_diffs) { [] }
@@ -298,7 +302,7 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
context 'when the merge request diffs are Rugged::Diff::Delta instances' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
- let(:first_commit) { merge_request.project.repository.commit(merge_request_diff.head_commit_sha) }
+ let(:first_commit) { project.repository.commit(merge_request_diff.head_commit_sha) }
let(:expected_commits) { commits }
let(:diffs) { first_commit.rugged_diff_from_parent.deltas }
let(:expected_diffs) { [] }
diff --git a/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb b/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb
index 2c2684a6fc9..e52baf8dde7 100644
--- a/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb
+++ b/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb
@@ -3,12 +3,9 @@ require 'spec_helper'
describe Gitlab::BackgroundMigration::PopulateForkNetworksRange, :migration, schema: 20170929131201 do
let(:migration) { described_class.new }
let(:base1) { create(:project) }
- let(:base1_fork1) { create(:project) }
- let(:base1_fork2) { create(:project) }
let(:base2) { create(:project) }
let(:base2_fork1) { create(:project) }
- let(:base2_fork2) { create(:project) }
let!(:forked_project_links) { table(:forked_project_links) }
let!(:fork_networks) { table(:fork_networks) }
@@ -21,21 +18,24 @@ describe Gitlab::BackgroundMigration::PopulateForkNetworksRange, :migration, sch
# A normal fork link
forked_project_links.create(id: 1,
forked_from_project_id: base1.id,
- forked_to_project_id: base1_fork1.id)
+ forked_to_project_id: create(:project).id)
forked_project_links.create(id: 2,
forked_from_project_id: base1.id,
- forked_to_project_id: base1_fork2.id)
-
+ forked_to_project_id: create(:project).id)
forked_project_links.create(id: 3,
forked_from_project_id: base2.id,
forked_to_project_id: base2_fork1.id)
+
+ # create a fork of a fork
forked_project_links.create(id: 4,
forked_from_project_id: base2_fork1.id,
forked_to_project_id: create(:project).id)
-
forked_project_links.create(id: 5,
- forked_from_project_id: base2.id,
- forked_to_project_id: base2_fork2.id)
+ forked_from_project_id: create(:project).id,
+ forked_to_project_id: create(:project).id)
+
+ # Stub out the calls to the other migrations
+ allow(BackgroundMigrationWorker).to receive(:perform_in)
migration.perform(1, 3)
end
@@ -62,12 +62,15 @@ describe Gitlab::BackgroundMigration::PopulateForkNetworksRange, :migration, sch
expect(base2_membership).not_to be_nil
end
- it 'skips links that had their source project deleted' do
- forked_project_links.create(id: 6, forked_from_project_id: 99999, forked_to_project_id: create(:project).id)
+ it 'creates a fork network for the fork of which the source was deleted' do
+ fork = create(:project)
+ forked_project_links.create(id: 6, forked_from_project_id: 99999, forked_to_project_id: fork.id)
migration.perform(5, 8)
expect(fork_networks.find_by(root_project_id: 99999)).to be_nil
+ expect(fork_networks.find_by(root_project_id: fork.id)).not_to be_nil
+ expect(fork_network_members.find_by(project_id: fork.id)).not_to be_nil
end
it 'schedules a job for inserting memberships for forks-of-forks' do
@@ -80,11 +83,11 @@ describe Gitlab::BackgroundMigration::PopulateForkNetworksRange, :migration, sch
end
it 'only processes a single batch of links at a time' do
- expect(fork_network_members.count).to eq(5)
+ expect(fork_networks.count).to eq(2)
migration.perform(3, 5)
- expect(fork_network_members.count).to eq(7)
+ expect(fork_networks.count).to eq(3)
end
it 'can be repeated without effect' do
diff --git a/spec/migrations/populate_merge_requests_latest_merge_request_diff_id_spec.rb b/spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb
index 4ea7f441f7c..0cb753c5853 100644
--- a/spec/migrations/populate_merge_requests_latest_merge_request_diff_id_spec.rb
+++ b/spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb
@@ -1,7 +1,6 @@
require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20171026082505_populate_merge_requests_latest_merge_request_diff_id')
-describe PopulateMergeRequestsLatestMergeRequestDiffId, :migration do
+describe Gitlab::BackgroundMigration::PopulateMergeRequestsLatestMergeRequestDiffId, :migration, schema: 20171026082505 do
let(:projects_table) { table(:projects) }
let(:merge_requests_table) { table(:merge_requests) }
let(:merge_request_diffs_table) { table(:merge_request_diffs) }
@@ -27,30 +26,32 @@ describe PopulateMergeRequestsLatestMergeRequestDiffId, :migration do
merge_request_diffs_table.where(merge_request_id: merge_request.id)
end
- describe '#up' do
+ describe '#perform' do
it 'ignores MRs without diffs' do
merge_request_without_diff = create_mr!('without_diff')
+ mr_id = merge_request_without_diff.id
expect(merge_request_without_diff.latest_merge_request_diff_id).to be_nil
- expect { migrate! }
+ expect { subject.perform(mr_id, mr_id) }
.not_to change { merge_request_without_diff.reload.latest_merge_request_diff_id }
end
it 'ignores MRs that have a diff ID already set' do
merge_request_with_multiple_diffs = create_mr!('with_multiple_diffs', diffs: 3)
diff_id = diffs_for(merge_request_with_multiple_diffs).minimum(:id)
+ mr_id = merge_request_with_multiple_diffs.id
merge_request_with_multiple_diffs.update!(latest_merge_request_diff_id: diff_id)
- expect { migrate! }
+ expect { subject.perform(mr_id, mr_id) }
.not_to change { merge_request_with_multiple_diffs.reload.latest_merge_request_diff_id }
end
it 'migrates multiple MR diffs to the correct values' do
merge_requests = Array.new(3).map.with_index { |_, i| create_mr!(i, diffs: 3) }
- migrate!
+ subject.perform(merge_requests.first.id, merge_requests.last.id)
merge_requests.each do |merge_request|
expect(merge_request.reload.latest_merge_request_diff_id)
diff --git a/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb
new file mode 100644
index 00000000000..b80df6956b0
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb
@@ -0,0 +1,510 @@
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :sidekiq do
+ include TrackUntrackedUploadsHelpers
+
+ subject { described_class.new }
+
+ let!(:untracked_files_for_uploads) { described_class::UntrackedFile }
+ let!(:uploads) { described_class::Upload }
+
+ before do
+ DatabaseCleaner.clean
+ drop_temp_table_if_exists
+ ensure_temporary_tracking_table_exists
+ uploads.delete_all
+ end
+
+ after(:all) do
+ drop_temp_table_if_exists
+ end
+
+ context 'with untracked files and tracked files in untracked_files_for_uploads' do
+ let!(:appearance) { create_or_update_appearance(logo: uploaded_file, header_logo: uploaded_file) }
+ let!(:user1) { create(:user, :with_avatar) }
+ let!(:user2) { create(:user, :with_avatar) }
+ let!(:project1) { create(:project, :with_avatar) }
+ let!(:project2) { create(:project, :with_avatar) }
+
+ before do
+ UploadService.new(project1, uploaded_file, FileUploader).execute # Markdown upload
+ UploadService.new(project2, uploaded_file, FileUploader).execute # Markdown upload
+
+ # File records created by PrepareUntrackedUploads
+ untracked_files_for_uploads.create!(path: appearance.uploads.first.path)
+ untracked_files_for_uploads.create!(path: appearance.uploads.last.path)
+ untracked_files_for_uploads.create!(path: user1.uploads.first.path)
+ untracked_files_for_uploads.create!(path: user2.uploads.first.path)
+ untracked_files_for_uploads.create!(path: project1.uploads.first.path)
+ untracked_files_for_uploads.create!(path: project2.uploads.first.path)
+ untracked_files_for_uploads.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{project1.full_path}/#{project1.uploads.last.path}")
+ untracked_files_for_uploads.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{project2.full_path}/#{project2.uploads.last.path}")
+
+ # Untrack 4 files
+ user2.uploads.delete_all
+ project2.uploads.delete_all # 2 files: avatar and a Markdown upload
+ appearance.uploads.where("path like '%header_logo%'").delete_all
+ end
+
+ it 'adds untracked files to the uploads table' do
+ expect do
+ subject.perform(1, untracked_files_for_uploads.last.id)
+ end.to change { uploads.count }.from(4).to(8)
+
+ expect(user2.uploads.count).to eq(1)
+ expect(project2.uploads.count).to eq(2)
+ expect(appearance.uploads.count).to eq(2)
+ end
+
+ it 'deletes rows after processing them' do
+ expect(subject).to receive(:drop_temp_table_if_finished) # Don't drop the table so we can look at it
+
+ expect do
+ subject.perform(1, untracked_files_for_uploads.last.id)
+ end.to change { untracked_files_for_uploads.count }.from(8).to(0)
+ end
+
+ it 'does not create duplicate uploads of already tracked files' do
+ subject.perform(1, untracked_files_for_uploads.last.id)
+
+ expect(user1.uploads.count).to eq(1)
+ expect(project1.uploads.count).to eq(2)
+ expect(appearance.uploads.count).to eq(2)
+ end
+
+ it 'uses the start and end batch ids [only 1st half]' do
+ ids = untracked_files_for_uploads.all.order(:id).pluck(:id)
+ start_id = ids[0]
+ end_id = ids[3]
+
+ expect do
+ subject.perform(start_id, end_id)
+ end.to change { uploads.count }.from(4).to(6)
+
+ expect(user1.uploads.count).to eq(1)
+ expect(user2.uploads.count).to eq(1)
+ expect(appearance.uploads.count).to eq(2)
+ expect(project1.uploads.count).to eq(2)
+ expect(project2.uploads.count).to eq(0)
+
+ # Only 4 have been either confirmed or added to uploads
+ expect(untracked_files_for_uploads.count).to eq(4)
+ end
+
+ it 'uses the start and end batch ids [only 2nd half]' do
+ ids = untracked_files_for_uploads.all.order(:id).pluck(:id)
+ start_id = ids[4]
+ end_id = ids[7]
+
+ expect do
+ subject.perform(start_id, end_id)
+ end.to change { uploads.count }.from(4).to(6)
+
+ expect(user1.uploads.count).to eq(1)
+ expect(user2.uploads.count).to eq(0)
+ expect(appearance.uploads.count).to eq(1)
+ expect(project1.uploads.count).to eq(2)
+ expect(project2.uploads.count).to eq(2)
+
+ # Only 4 have been either confirmed or added to uploads
+ expect(untracked_files_for_uploads.count).to eq(4)
+ end
+
+ it 'does not drop the temporary tracking table after processing the batch, if there are still untracked rows' do
+ subject.perform(1, untracked_files_for_uploads.last.id - 1)
+
+ expect(ActiveRecord::Base.connection.table_exists?(:untracked_files_for_uploads)).to be_truthy
+ end
+
+ it 'drops the temporary tracking table after processing the batch, if there are no untracked rows left' do
+ subject.perform(1, untracked_files_for_uploads.last.id)
+
+ expect(ActiveRecord::Base.connection.table_exists?(:untracked_files_for_uploads)).to be_falsey
+ end
+
+ it 'does not block a whole batch because of one bad path' do
+ untracked_files_for_uploads.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{project2.full_path}/._7d37bf4c747916390e596744117d5d1a")
+ expect(untracked_files_for_uploads.count).to eq(9)
+ expect(uploads.count).to eq(4)
+
+ subject.perform(1, untracked_files_for_uploads.last.id)
+
+ expect(untracked_files_for_uploads.count).to eq(1)
+ expect(uploads.count).to eq(8)
+ end
+
+ it 'an unparseable path is shown in error output' do
+ bad_path = "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{project2.full_path}/._7d37bf4c747916390e596744117d5d1a"
+ untracked_files_for_uploads.create!(path: bad_path)
+
+ expect(Rails.logger).to receive(:error).with(/Error parsing path "#{bad_path}":/)
+
+ subject.perform(1, untracked_files_for_uploads.last.id)
+ end
+ end
+
+ context 'with no untracked files' do
+ it 'does not add to the uploads table (and does not raise error)' do
+ expect do
+ subject.perform(1, 1000)
+ end.not_to change { uploads.count }.from(0)
+ end
+ end
+
+ describe 'upload outcomes for each path pattern' do
+ shared_examples_for 'non_markdown_file' do
+ let!(:expected_upload_attrs) { model.uploads.first.attributes.slice('path', 'uploader', 'size', 'checksum') }
+ let!(:untracked_file) { untracked_files_for_uploads.create!(path: expected_upload_attrs['path']) }
+
+ before do
+ model.uploads.delete_all
+ end
+
+ it 'creates an Upload record' do
+ expect do
+ subject.perform(1, untracked_files_for_uploads.last.id)
+ end.to change { model.reload.uploads.count }.from(0).to(1)
+
+ expect(model.uploads.first.attributes).to include(expected_upload_attrs)
+ end
+ end
+
+ context 'for an appearance logo file path' do
+ let(:model) { create_or_update_appearance(logo: uploaded_file) }
+
+ it_behaves_like 'non_markdown_file'
+ end
+
+ context 'for an appearance header_logo file path' do
+ let(:model) { create_or_update_appearance(header_logo: uploaded_file) }
+
+ it_behaves_like 'non_markdown_file'
+ end
+
+ context 'for a pre-Markdown Note attachment file path' do
+ class Note < ActiveRecord::Base
+ has_many :uploads, as: :model, dependent: :destroy
+ end
+
+ let(:model) { create(:note, :with_attachment) }
+
+ it_behaves_like 'non_markdown_file'
+ end
+
+ context 'for a user avatar file path' do
+ let(:model) { create(:user, :with_avatar) }
+
+ it_behaves_like 'non_markdown_file'
+ end
+
+ context 'for a group avatar file path' do
+ let(:model) { create(:group, :with_avatar) }
+
+ it_behaves_like 'non_markdown_file'
+ end
+
+ context 'for a project avatar file path' do
+ let(:model) { create(:project, :with_avatar) }
+
+ it_behaves_like 'non_markdown_file'
+ end
+
+ context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
+ let(:model) { create(:project) }
+
+ before do
+ # Upload the file
+ UploadService.new(model, uploaded_file, FileUploader).execute
+
+ # Create the untracked_files_for_uploads record
+ untracked_files_for_uploads.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{model.full_path}/#{model.uploads.first.path}")
+
+ # Save the expected upload attributes
+ @expected_upload_attrs = model.reload.uploads.first.attributes.slice('path', 'uploader', 'size', 'checksum')
+
+ # Untrack the file
+ model.reload.uploads.delete_all
+ end
+
+ it 'creates an Upload record' do
+ expect do
+ subject.perform(1, untracked_files_for_uploads.last.id)
+ end.to change { model.reload.uploads.count }.from(0).to(1)
+
+ expect(model.uploads.first.attributes).to include(@expected_upload_attrs)
+ end
+ end
+ end
+end
+
+describe Gitlab::BackgroundMigration::PopulateUntrackedUploads::UntrackedFile do
+ include TrackUntrackedUploadsHelpers
+
+ let(:upload_class) { Gitlab::BackgroundMigration::PopulateUntrackedUploads::Upload }
+
+ before(:all) do
+ ensure_temporary_tracking_table_exists
+ end
+
+ after(:all) do
+ drop_temp_table_if_exists
+ end
+
+ describe '#upload_path' do
+ def assert_upload_path(file_path, expected_upload_path)
+ untracked_file = create_untracked_file(file_path)
+
+ expect(untracked_file.upload_path).to eq(expected_upload_path)
+ end
+
+ context 'for an appearance logo file path' do
+ it 'returns the file path relative to the CarrierWave root' do
+ assert_upload_path('/-/system/appearance/logo/1/some_logo.jpg', 'uploads/-/system/appearance/logo/1/some_logo.jpg')
+ end
+ end
+
+ context 'for an appearance header_logo file path' do
+ it 'returns the file path relative to the CarrierWave root' do
+ assert_upload_path('/-/system/appearance/header_logo/1/some_logo.jpg', 'uploads/-/system/appearance/header_logo/1/some_logo.jpg')
+ end
+ end
+
+ context 'for a pre-Markdown Note attachment file path' do
+ it 'returns the file path relative to the CarrierWave root' do
+ assert_upload_path('/-/system/note/attachment/1234/some_attachment.pdf', 'uploads/-/system/note/attachment/1234/some_attachment.pdf')
+ end
+ end
+
+ context 'for a user avatar file path' do
+ it 'returns the file path relative to the CarrierWave root' do
+ assert_upload_path('/-/system/user/avatar/1234/avatar.jpg', 'uploads/-/system/user/avatar/1234/avatar.jpg')
+ end
+ end
+
+ context 'for a group avatar file path' do
+ it 'returns the file path relative to the CarrierWave root' do
+ assert_upload_path('/-/system/group/avatar/1234/avatar.jpg', 'uploads/-/system/group/avatar/1234/avatar.jpg')
+ end
+ end
+
+ context 'for a project avatar file path' do
+ it 'returns the file path relative to the CarrierWave root' do
+ assert_upload_path('/-/system/project/avatar/1234/avatar.jpg', 'uploads/-/system/project/avatar/1234/avatar.jpg')
+ end
+ end
+
+ context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
+ it 'returns the file path relative to the project directory in uploads' do
+ project = create(:project)
+ random_hex = SecureRandom.hex
+
+ assert_upload_path("/#{project.full_path}/#{random_hex}/Some file.jpg", "#{random_hex}/Some file.jpg")
+ end
+ end
+ end
+
+ describe '#uploader' do
+ def assert_uploader(file_path, expected_uploader)
+ untracked_file = create_untracked_file(file_path)
+
+ expect(untracked_file.uploader).to eq(expected_uploader)
+ end
+
+ context 'for an appearance logo file path' do
+ it 'returns AttachmentUploader as a string' do
+ assert_uploader('/-/system/appearance/logo/1/some_logo.jpg', 'AttachmentUploader')
+ end
+ end
+
+ context 'for an appearance header_logo file path' do
+ it 'returns AttachmentUploader as a string' do
+ assert_uploader('/-/system/appearance/header_logo/1/some_logo.jpg', 'AttachmentUploader')
+ end
+ end
+
+ context 'for a pre-Markdown Note attachment file path' do
+ it 'returns AttachmentUploader as a string' do
+ assert_uploader('/-/system/note/attachment/1234/some_attachment.pdf', 'AttachmentUploader')
+ end
+ end
+
+ context 'for a user avatar file path' do
+ it 'returns AvatarUploader as a string' do
+ assert_uploader('/-/system/user/avatar/1234/avatar.jpg', 'AvatarUploader')
+ end
+ end
+
+ context 'for a group avatar file path' do
+ it 'returns AvatarUploader as a string' do
+ assert_uploader('/-/system/group/avatar/1234/avatar.jpg', 'AvatarUploader')
+ end
+ end
+
+ context 'for a project avatar file path' do
+ it 'returns AvatarUploader as a string' do
+ assert_uploader('/-/system/project/avatar/1234/avatar.jpg', 'AvatarUploader')
+ end
+ end
+
+ context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
+ it 'returns FileUploader as a string' do
+ project = create(:project)
+
+ assert_uploader("/#{project.full_path}/#{SecureRandom.hex}/Some file.jpg", 'FileUploader')
+ end
+ end
+ end
+
+ describe '#model_type' do
+ def assert_model_type(file_path, expected_model_type)
+ untracked_file = create_untracked_file(file_path)
+
+ expect(untracked_file.model_type).to eq(expected_model_type)
+ end
+
+ context 'for an appearance logo file path' do
+ it 'returns Appearance as a string' do
+ assert_model_type('/-/system/appearance/logo/1/some_logo.jpg', 'Appearance')
+ end
+ end
+
+ context 'for an appearance header_logo file path' do
+ it 'returns Appearance as a string' do
+ assert_model_type('/-/system/appearance/header_logo/1/some_logo.jpg', 'Appearance')
+ end
+ end
+
+ context 'for a pre-Markdown Note attachment file path' do
+ it 'returns Note as a string' do
+ assert_model_type('/-/system/note/attachment/1234/some_attachment.pdf', 'Note')
+ end
+ end
+
+ context 'for a user avatar file path' do
+ it 'returns User as a string' do
+ assert_model_type('/-/system/user/avatar/1234/avatar.jpg', 'User')
+ end
+ end
+
+ context 'for a group avatar file path' do
+ it 'returns Namespace as a string' do
+ assert_model_type('/-/system/group/avatar/1234/avatar.jpg', 'Namespace')
+ end
+ end
+
+ context 'for a project avatar file path' do
+ it 'returns Project as a string' do
+ assert_model_type('/-/system/project/avatar/1234/avatar.jpg', 'Project')
+ end
+ end
+
+ context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
+ it 'returns Project as a string' do
+ project = create(:project)
+
+ assert_model_type("/#{project.full_path}/#{SecureRandom.hex}/Some file.jpg", 'Project')
+ end
+ end
+ end
+
+ describe '#model_id' do
+ def assert_model_id(file_path, expected_model_id)
+ untracked_file = create_untracked_file(file_path)
+
+ expect(untracked_file.model_id).to eq(expected_model_id)
+ end
+
+ context 'for an appearance logo file path' do
+ it 'returns the ID as a string' do
+ assert_model_id('/-/system/appearance/logo/1/some_logo.jpg', 1)
+ end
+ end
+
+ context 'for an appearance header_logo file path' do
+ it 'returns the ID as a string' do
+ assert_model_id('/-/system/appearance/header_logo/1/some_logo.jpg', 1)
+ end
+ end
+
+ context 'for a pre-Markdown Note attachment file path' do
+ it 'returns the ID as a string' do
+ assert_model_id('/-/system/note/attachment/1234/some_attachment.pdf', 1234)
+ end
+ end
+
+ context 'for a user avatar file path' do
+ it 'returns the ID as a string' do
+ assert_model_id('/-/system/user/avatar/1234/avatar.jpg', 1234)
+ end
+ end
+
+ context 'for a group avatar file path' do
+ it 'returns the ID as a string' do
+ assert_model_id('/-/system/group/avatar/1234/avatar.jpg', 1234)
+ end
+ end
+
+ context 'for a project avatar file path' do
+ it 'returns the ID as a string' do
+ assert_model_id('/-/system/project/avatar/1234/avatar.jpg', 1234)
+ end
+ end
+
+ context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
+ it 'returns the ID as a string' do
+ project = create(:project)
+
+ assert_model_id("/#{project.full_path}/#{SecureRandom.hex}/Some file.jpg", project.id)
+ end
+ end
+ end
+
+ describe '#file_size' do
+ context 'for an appearance logo file path' do
+ let(:appearance) { create_or_update_appearance(logo: uploaded_file) }
+ let(:untracked_file) { described_class.create!(path: appearance.uploads.first.path) }
+
+ it 'returns the file size' do
+ expect(untracked_file.file_size).to eq(35255)
+ end
+
+ it 'returns the same thing that CarrierWave would return' do
+ expect(untracked_file.file_size).to eq(appearance.logo.size)
+ end
+ end
+
+ context 'for a project avatar file path' do
+ let(:project) { create(:project, avatar: uploaded_file) }
+ let(:untracked_file) { described_class.create!(path: project.uploads.first.path) }
+
+ it 'returns the file size' do
+ expect(untracked_file.file_size).to eq(35255)
+ end
+
+ it 'returns the same thing that CarrierWave would return' do
+ expect(untracked_file.file_size).to eq(project.avatar.size)
+ end
+ end
+
+ context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
+ let(:project) { create(:project) }
+ let(:untracked_file) { create_untracked_file("/#{project.full_path}/#{project.uploads.first.path}") }
+
+ before do
+ UploadService.new(project, uploaded_file, FileUploader).execute
+ end
+
+ it 'returns the file size' do
+ expect(untracked_file.file_size).to eq(35255)
+ end
+
+ it 'returns the same thing that CarrierWave would return' do
+ expect(untracked_file.file_size).to eq(project.uploads.first.size)
+ end
+ end
+ end
+
+ def create_untracked_file(path_relative_to_upload_dir)
+ described_class.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}#{path_relative_to_upload_dir}")
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
new file mode 100644
index 00000000000..cd3f1a45270
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
@@ -0,0 +1,242 @@
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
+ include TrackUntrackedUploadsHelpers
+
+ let!(:untracked_files_for_uploads) { described_class::UntrackedFile }
+
+ matcher :be_scheduled_migration do |*expected|
+ match do |migration|
+ BackgroundMigrationWorker.jobs.any? do |job|
+ job['args'] == [migration, expected]
+ end
+ end
+
+ failure_message do |migration|
+ "Migration `#{migration}` with args `#{expected.inspect}` not scheduled!"
+ end
+ end
+
+ before do
+ DatabaseCleaner.clean
+
+ drop_temp_table_if_exists
+ end
+
+ after do
+ drop_temp_table_if_exists
+ end
+
+ around do |example|
+ # Especially important so the follow-up migration does not get run
+ Sidekiq::Testing.fake! do
+ example.run
+ end
+ end
+
+ it 'ensures the untracked_files_for_uploads table exists' do
+ expect do
+ described_class.new.perform
+ end.to change { ActiveRecord::Base.connection.table_exists?(:untracked_files_for_uploads) }.from(false).to(true)
+ end
+
+ it 'has a path field long enough for really long paths' do
+ described_class.new.perform
+
+ component = 'a' * 255
+
+ long_path = [
+ 'uploads',
+ component, # project.full_path
+ component # filename
+ ].flatten.join('/')
+
+ record = untracked_files_for_uploads.create!(path: long_path)
+ expect(record.reload.path.size).to eq(519)
+ end
+
+ context "test bulk insert with ON CONFLICT DO NOTHING or IGNORE" do
+ around do |example|
+ # If this is CI, we use Postgres 9.2 so this whole context should be
+ # skipped since we're unable to use ON CONFLICT DO NOTHING or IGNORE.
+ if described_class.new.send(:can_bulk_insert_and_ignore_duplicates?)
+ example.run
+ end
+ end
+
+ context 'when files were uploaded before and after hashed storage was enabled' do
+ let!(:appearance) { create_or_update_appearance(logo: uploaded_file, header_logo: uploaded_file) }
+ let!(:user) { create(:user, :with_avatar) }
+ let!(:project1) { create(:project, :with_avatar) }
+ let(:project2) { create(:project) } # instantiate after enabling hashed_storage
+
+ before do
+ # Markdown upload before enabling hashed_storage
+ UploadService.new(project1, uploaded_file, FileUploader).execute
+
+ stub_application_setting(hashed_storage_enabled: true)
+
+ # Markdown upload after enabling hashed_storage
+ UploadService.new(project2, uploaded_file, FileUploader).execute
+ end
+
+ it 'adds unhashed files to the untracked_files_for_uploads table' do
+ described_class.new.perform
+
+ expect(untracked_files_for_uploads.count).to eq(5)
+ end
+
+ it 'adds files with paths relative to CarrierWave.root' do
+ described_class.new.perform
+ untracked_files_for_uploads.all.each do |file|
+ expect(file.path.start_with?('uploads/')).to be_truthy
+ end
+ end
+
+ it 'does not add hashed files to the untracked_files_for_uploads table' do
+ described_class.new.perform
+
+ hashed_file_path = project2.uploads.where(uploader: 'FileUploader').first.path
+ expect(untracked_files_for_uploads.where("path like '%#{hashed_file_path}%'").exists?).to be_falsey
+ end
+
+ it 'correctly schedules the follow-up background migration jobs' do
+ described_class.new.perform
+
+ expect(described_class::FOLLOW_UP_MIGRATION).to be_scheduled_migration(1, 5)
+ expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ end
+
+ # E.g. from a previous failed run of this background migration
+ context 'when there is existing data in untracked_files_for_uploads' do
+ before do
+ described_class.new.perform
+ end
+
+ it 'does not error or produce duplicates of existing data' do
+ expect do
+ described_class.new.perform
+ end.not_to change { untracked_files_for_uploads.count }.from(5)
+ end
+ end
+
+ # E.g. The installation is in use at the time of migration, and someone has
+ # just uploaded a file
+ context 'when there are files in /uploads/tmp' do
+ let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
+
+ before do
+ FileUtils.touch(tmp_file)
+ end
+
+ after do
+ FileUtils.rm(tmp_file)
+ end
+
+ it 'does not add files from /uploads/tmp' do
+ described_class.new.perform
+
+ expect(untracked_files_for_uploads.count).to eq(5)
+ end
+ end
+ end
+ end
+
+ context 'test bulk insert without ON CONFLICT DO NOTHING or IGNORE' do
+ before do
+ # If this is CI, we use Postgres 9.2 so this stub has no effect.
+ #
+ # If this is being run on Postgres 9.5+ or MySQL, then this stub allows us
+ # to test the bulk insert functionality without ON CONFLICT DO NOTHING or
+ # IGNORE.
+ allow_any_instance_of(described_class).to receive(:postgresql_pre_9_5?).and_return(true)
+ end
+
+ context 'when files were uploaded before and after hashed storage was enabled' do
+ let!(:appearance) { create_or_update_appearance(logo: uploaded_file, header_logo: uploaded_file) }
+ let!(:user) { create(:user, :with_avatar) }
+ let!(:project1) { create(:project, :with_avatar) }
+ let(:project2) { create(:project) } # instantiate after enabling hashed_storage
+
+ before do
+ # Markdown upload before enabling hashed_storage
+ UploadService.new(project1, uploaded_file, FileUploader).execute
+
+ stub_application_setting(hashed_storage_enabled: true)
+
+ # Markdown upload after enabling hashed_storage
+ UploadService.new(project2, uploaded_file, FileUploader).execute
+ end
+
+ it 'adds unhashed files to the untracked_files_for_uploads table' do
+ described_class.new.perform
+
+ expect(untracked_files_for_uploads.count).to eq(5)
+ end
+
+ it 'adds files with paths relative to CarrierWave.root' do
+ described_class.new.perform
+ untracked_files_for_uploads.all.each do |file|
+ expect(file.path.start_with?('uploads/')).to be_truthy
+ end
+ end
+
+ it 'does not add hashed files to the untracked_files_for_uploads table' do
+ described_class.new.perform
+
+ hashed_file_path = project2.uploads.where(uploader: 'FileUploader').first.path
+ expect(untracked_files_for_uploads.where("path like '%#{hashed_file_path}%'").exists?).to be_falsey
+ end
+
+ it 'correctly schedules the follow-up background migration jobs' do
+ described_class.new.perform
+
+ expect(described_class::FOLLOW_UP_MIGRATION).to be_scheduled_migration(1, 5)
+ expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ end
+
+ # E.g. from a previous failed run of this background migration
+ context 'when there is existing data in untracked_files_for_uploads' do
+ before do
+ described_class.new.perform
+ end
+
+ it 'does not error or produce duplicates of existing data' do
+ expect do
+ described_class.new.perform
+ end.not_to change { untracked_files_for_uploads.count }.from(5)
+ end
+ end
+
+ # E.g. The installation is in use at the time of migration, and someone has
+ # just uploaded a file
+ context 'when there are files in /uploads/tmp' do
+ let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
+
+ before do
+ FileUtils.touch(tmp_file)
+ end
+
+ after do
+ FileUtils.rm(tmp_file)
+ end
+
+ it 'does not add files from /uploads/tmp' do
+ described_class.new.perform
+
+ expect(untracked_files_for_uploads.count).to eq(5)
+ end
+ end
+ end
+ end
+
+ # Very new or lightly-used installations that are running this migration
+ # may not have an upload directory because they have no uploads.
+ context 'when no files were ever uploaded' do
+ it 'does not add to the untracked_files_for_uploads table (and does not raise error)' do
+ described_class.new.perform
+
+ expect(untracked_files_for_uploads.count).to eq(0)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/backup/repository_spec.rb b/spec/lib/gitlab/backup/repository_spec.rb
deleted file mode 100644
index 535cce12780..00000000000
--- a/spec/lib/gitlab/backup/repository_spec.rb
+++ /dev/null
@@ -1,117 +0,0 @@
-require 'spec_helper'
-
-describe Backup::Repository do
- let(:progress) { StringIO.new }
- let!(:project) { create(:project) }
-
- before do
- allow(progress).to receive(:puts)
- allow(progress).to receive(:print)
-
- allow_any_instance_of(String).to receive(:color) do |string, _color|
- string
- end
-
- allow_any_instance_of(described_class).to receive(:progress).and_return(progress)
- end
-
- describe '#dump' do
- describe 'repo failure' do
- before do
- allow_any_instance_of(Repository).to receive(:empty_repo?).and_raise(Rugged::OdbError)
- allow(Gitlab::Popen).to receive(:popen).and_return(['normal output', 0])
- end
-
- it 'does not raise error' do
- expect { described_class.new.dump }.not_to raise_error
- end
-
- it 'shows the appropriate error' do
- described_class.new.dump
-
- expect(progress).to have_received(:puts).with("Ignoring repository error and continuing backing up project: #{project.full_path} - Rugged::OdbError")
- end
- end
-
- describe 'command failure' do
- before do
- allow_any_instance_of(Repository).to receive(:empty_repo?).and_return(false)
- allow(Gitlab::Popen).to receive(:popen).and_return(['error', 1])
- end
-
- it 'shows the appropriate error' do
- described_class.new.dump
-
- expect(progress).to have_received(:puts).with("Ignoring error on #{project.full_path} - error")
- end
- end
- end
-
- describe '#restore' do
- describe 'command failure' do
- before do
- allow(Gitlab::Popen).to receive(:popen).and_return(['error', 1])
- end
-
- it 'shows the appropriate error' do
- described_class.new.restore
-
- expect(progress).to have_received(:puts).with("Ignoring error on #{project.full_path} - error")
- end
- end
- end
-
- describe '#empty_repo?' do
- context 'for a wiki' do
- let(:wiki) { create(:project_wiki) }
-
- context 'wiki repo has content' do
- let!(:wiki_page) { create(:wiki_page, wiki: wiki) }
-
- before do
- wiki.repository.exists? # initial cache
- end
-
- context '`repository.exists?` is incorrectly cached as false' do
- before do
- repo = wiki.repository
- repo.send(:cache).expire(:exists?)
- repo.send(:cache).fetch(:exists?) { false }
- repo.send(:instance_variable_set, :@exists, false)
- end
-
- it 'returns false, regardless of bad cache value' do
- expect(described_class.new.send(:empty_repo?, wiki)).to be_falsey
- end
- end
-
- context '`repository.exists?` is correctly cached as true' do
- it 'returns false' do
- expect(described_class.new.send(:empty_repo?, wiki)).to be_falsey
- end
- end
- end
-
- context 'wiki repo does not have content' do
- context '`repository.exists?` is incorrectly cached as true' do
- before do
- repo = wiki.repository
- repo.send(:cache).expire(:exists?)
- repo.send(:cache).fetch(:exists?) { true }
- repo.send(:instance_variable_set, :@exists, true)
- end
-
- it 'returns true, regardless of bad cache value' do
- expect(described_class.new.send(:empty_repo?, wiki)).to be_truthy
- end
- end
-
- context '`repository.exists?` is correctly cached as false' do
- it 'returns true' do
- expect(described_class.new.send(:empty_repo?, wiki)).to be_truthy
- end
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
index a66347ead76..a6a1d9e619f 100644
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
@@ -54,11 +54,13 @@ describe Gitlab::BitbucketImport::Importer do
create(
:project,
import_source: project_identifier,
+ import_url: "https://bitbucket.org/#{project_identifier}.git",
import_data_attributes: { credentials: data }
)
end
let(:importer) { described_class.new(project) }
+ let(:gitlab_shell) { double }
let(:issues_statuses_sample_data) do
{
@@ -67,6 +69,10 @@ describe Gitlab::BitbucketImport::Importer do
}
end
+ before do
+ allow(importer).to receive(:gitlab_shell) { gitlab_shell }
+ end
+
context 'issues statuses' do
before do
# HACK: Bitbucket::Representation.const_get('Issue') seems to return ::Issue without this
@@ -110,15 +116,36 @@ describe Gitlab::BitbucketImport::Importer do
end
it 'maps statuses to open or closed' do
+ allow(importer).to receive(:import_wiki)
+
importer.execute
expect(project.issues.where(state: "closed").size).to eq(5)
expect(project.issues.where(state: "opened").size).to eq(2)
end
- it 'calls import_wiki' do
- expect(importer).to receive(:import_wiki)
- importer.execute
+ describe 'wiki import' do
+ it 'is skipped when the wiki exists' do
+ expect(project.wiki).to receive(:repository_exists?) { true }
+ expect(importer.gitlab_shell).not_to receive(:import_repository)
+
+ importer.execute
+
+ expect(importer.errors).to be_empty
+ end
+
+ it 'imports to the project disk_path' do
+ expect(project.wiki).to receive(:repository_exists?) { false }
+ expect(importer.gitlab_shell).to receive(:import_repository).with(
+ project.repository_storage_path,
+ project.wiki.disk_path,
+ project.import_url + '/wiki'
+ )
+
+ importer.execute
+
+ expect(importer.errors).to be_empty
+ end
end
end
end
diff --git a/spec/lib/gitlab/checks/change_access_spec.rb b/spec/lib/gitlab/checks/change_access_spec.rb
index 74a24a4424b..c2bca816aae 100644
--- a/spec/lib/gitlab/checks/change_access_spec.rb
+++ b/spec/lib/gitlab/checks/change_access_spec.rb
@@ -165,47 +165,16 @@ describe Gitlab::Checks::ChangeAccess do
end
context 'LFS integrity check' do
- let(:blob_object) { project.repository.blob_at_branch('lfs', 'files/lfs/lfs_object.iso') }
+ it 'fails if any LFS blobs are missing' do
+ allow_any_instance_of(Gitlab::Checks::LfsIntegrity).to receive(:objects_missing?).and_return(true)
- before do
- allow_any_instance_of(Gitlab::Git::RevList).to receive(:new_objects) do |&lazy_block|
- lazy_block.call([blob_object.id])
- end
- end
-
- context 'with LFS not enabled' do
- it 'skips integrity check' do
- expect_any_instance_of(Gitlab::Git::RevList).not_to receive(:new_objects)
-
- subject.exec
- end
+ expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /LFS objects are missing/)
end
- context 'with LFS enabled' do
- before do
- allow(project).to receive(:lfs_enabled?).and_return(true)
- end
-
- context 'deletion' do
- let(:changes) { { oldrev: oldrev, ref: ref } }
-
- it 'skips integrity check' do
- expect_any_instance_of(Gitlab::Git::RevList).not_to receive(:new_objects)
+ it 'succeeds if LFS objects have already been uploaded' do
+ allow_any_instance_of(Gitlab::Checks::LfsIntegrity).to receive(:objects_missing?).and_return(false)
- subject.exec
- end
- end
-
- it 'fails if any LFS blobs are missing' do
- expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /LFS objects are missing/)
- end
-
- it 'succeeds if LFS objects have already been uploaded' do
- lfs_object = create(:lfs_object, oid: blob_object.lfs_oid)
- create(:lfs_objects_project, project: project, lfs_object: lfs_object)
-
- expect { subject.exec }.not_to raise_error
- end
+ expect { subject.exec }.not_to raise_error
end
end
end
diff --git a/spec/lib/gitlab/checks/lfs_integrity_spec.rb b/spec/lib/gitlab/checks/lfs_integrity_spec.rb
new file mode 100644
index 00000000000..17756621221
--- /dev/null
+++ b/spec/lib/gitlab/checks/lfs_integrity_spec.rb
@@ -0,0 +1,74 @@
+require 'spec_helper'
+
+describe Gitlab::Checks::LfsIntegrity do
+ include ProjectForksHelper
+ let(:project) { create(:project, :repository) }
+ let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
+
+ subject { described_class.new(project, newrev) }
+
+ describe '#objects_missing?' do
+ let(:blob_object) { project.repository.blob_at_branch('lfs', 'files/lfs/lfs_object.iso') }
+
+ before do
+ allow_any_instance_of(Gitlab::Git::RevList).to receive(:new_objects) do |&lazy_block|
+ lazy_block.call([blob_object.id])
+ end
+ end
+
+ context 'with LFS not enabled' do
+ it 'skips integrity check' do
+ expect_any_instance_of(Gitlab::Git::RevList).not_to receive(:new_objects)
+
+ subject.objects_missing?
+ end
+ end
+
+ context 'with LFS enabled' do
+ before do
+ allow(project).to receive(:lfs_enabled?).and_return(true)
+ end
+
+ context 'deletion' do
+ let(:newrev) { nil }
+
+ it 'skips integrity check' do
+ expect_any_instance_of(Gitlab::Git::RevList).not_to receive(:new_objects)
+
+ expect(subject.objects_missing?).to be_falsey
+ end
+ end
+
+ it 'is true if any LFS blobs are missing' do
+ expect(subject.objects_missing?).to be_truthy
+ end
+
+ it 'is false if LFS objects have already been uploaded' do
+ lfs_object = create(:lfs_object, oid: blob_object.lfs_oid)
+ create(:lfs_objects_project, project: project, lfs_object: lfs_object)
+
+ expect(subject.objects_missing?).to be_falsey
+ end
+ end
+
+ context 'for forked project' do
+ let(:parent_project) { create(:project, :repository) }
+ let(:project) { fork_project(parent_project, nil, repository: true) }
+
+ before do
+ allow(project).to receive(:lfs_enabled?).and_return(true)
+ end
+
+ it 'is true parent project is missing LFS objects' do
+ expect(subject.objects_missing?).to be_truthy
+ end
+
+ it 'is false parent project already conatins LFS objects for the fork' do
+ lfs_object = create(:lfs_object, oid: blob_object.lfs_oid)
+ create(:lfs_objects_project, project: parent_project, lfs_object: lfs_object)
+
+ expect(subject.objects_missing?).to be_falsey
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb b/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb
index 15eb01eb472..4884d5f8ba4 100644
--- a/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb
@@ -4,11 +4,24 @@ describe Gitlab::Ci::Build::Policy::Kubernetes do
let(:pipeline) { create(:ci_pipeline, project: project) }
context 'when kubernetes service is active' do
- set(:project) { create(:kubernetes_project) }
+ shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do
+ it 'is satisfied by a kubernetes pipeline' do
+ expect(described_class.new('active'))
+ .to be_satisfied_by(pipeline)
+ end
+ end
- it 'is satisfied by a kubernetes pipeline' do
- expect(described_class.new('active'))
- .to be_satisfied_by(pipeline)
+ context 'when user configured kubernetes from Integration > Kubernetes' do
+ let(:project) { create(:kubernetes_project) }
+
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
+ end
+
+ context 'when user configured kubernetes from CI/CD > Clusters' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:project) { cluster.project }
+
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
new file mode 100644
index 00000000000..0f1d72080c5
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
@@ -0,0 +1,51 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Pipeline::Chain::Build do
+ set(:project) { create(:project, :repository) }
+ set(:user) { create(:user) }
+ let(:pipeline) { Ci::Pipeline.new }
+
+ let(:command) do
+ double('command', source: :push,
+ origin_ref: 'master',
+ checkout_sha: project.commit.id,
+ after_sha: nil,
+ before_sha: nil,
+ trigger_request: nil,
+ schedule: nil,
+ project: project,
+ current_user: user)
+ end
+
+ let(:step) { described_class.new(pipeline, command) }
+
+ before do
+ stub_repository_ci_yaml_file(sha: anything)
+
+ step.perform!
+ end
+
+ it 'never breaks the chain' do
+ expect(step.break?).to be false
+ end
+
+ it 'fills pipeline object with data' do
+ expect(pipeline.sha).not_to be_empty
+ expect(pipeline.sha).to eq project.commit.id
+ expect(pipeline.ref).to eq 'master'
+ expect(pipeline.user).to eq user
+ expect(pipeline.project).to eq project
+ end
+
+ it 'sets a valid config source' do
+ expect(pipeline.repository_source?).to be true
+ end
+
+ it 'returns a valid pipeline' do
+ expect(pipeline).to be_valid
+ end
+
+ it 'does not persist a pipeline' do
+ expect(pipeline).not_to be_persisted
+ end
+end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index d72f8553f55..98880fe9f28 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -178,15 +178,29 @@ module Gitlab
end
context 'when kubernetes is active' do
- let(:project) { create(:kubernetes_project) }
- let(:pipeline) { create(:ci_empty_pipeline, project: project) }
+ shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do
+ it 'returns seeds for kubernetes dependent job' do
+ seeds = subject.stage_seeds(pipeline)
- it 'returns seeds for kubernetes dependent job' do
- seeds = subject.stage_seeds(pipeline)
+ expect(seeds.size).to eq 2
+ expect(seeds.first.builds.dig(0, :name)).to eq 'spinach'
+ expect(seeds.second.builds.dig(0, :name)).to eq 'production'
+ end
+ end
- expect(seeds.size).to eq 2
- expect(seeds.first.builds.dig(0, :name)).to eq 'spinach'
- expect(seeds.second.builds.dig(0, :name)).to eq 'production'
+ context 'when user configured kubernetes from Integration > Kubernetes' do
+ let(:project) { create(:kubernetes_project) }
+ let(:pipeline) { create(:ci_empty_pipeline, project: project) }
+
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
+ end
+
+ context 'when user configured kubernetes from CI/CD > Clusters' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:project) { cluster.project }
+ let(:pipeline) { create(:ci_empty_pipeline, project: project) }
+
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
end
end
diff --git a/spec/lib/gitlab/conflict/file_spec.rb b/spec/lib/gitlab/conflict/file_spec.rb
index bf981d2f6f6..92792144429 100644
--- a/spec/lib/gitlab/conflict/file_spec.rb
+++ b/spec/lib/gitlab/conflict/file_spec.rb
@@ -84,6 +84,13 @@ describe Gitlab::Conflict::File do
expect(line.text).to eq(html_to_text(line.rich_text))
end
end
+
+ # This spec will break if Rouge's highlighting changes, but we need to
+ # ensure that the lines are actually highlighted.
+ it 'highlights the lines correctly' do
+ expect(conflict_file.lines.first.rich_text)
+ .to eq("<span id=\"LC1\" class=\"line\" lang=\"ruby\"><span class=\"k\">module</span> <span class=\"nn\">Gitlab</span></span>\n")
+ end
end
describe '#sections' do
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 3c8350b3aad..664ba0f7234 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -942,8 +942,8 @@ describe Gitlab::Database::MigrationHelpers do
end
it 'queues jobs in groups of buffer size 1' do
- expect(BackgroundMigrationWorker).to receive(:perform_bulk).with([['FooJob', [id1, id2]]])
- expect(BackgroundMigrationWorker).to receive(:perform_bulk).with([['FooJob', [id3, id3]]])
+ expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id1, id2]]])
+ expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id3, id3]]])
model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
end
@@ -960,8 +960,8 @@ describe Gitlab::Database::MigrationHelpers do
end
it 'queues jobs in bulk all at once (big buffer size)' do
- expect(BackgroundMigrationWorker).to receive(:perform_bulk).with([['FooJob', [id1, id2]],
- ['FooJob', [id3, id3]]])
+ expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id1, id2]],
+ ['FooJob', [id3, id3]]])
model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
index 8922370b0a0..e850b5cd6a4 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
@@ -87,6 +87,14 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :tr
subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0')
end
+ it 'does not move the repositories when hashed storage is enabled' do
+ project.update!(storage_version: Project::HASHED_STORAGE_FEATURES[:repository])
+
+ expect(subject).not_to receive(:move_repository)
+
+ subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0')
+ end
+
it 'moves uploads' do
expect(subject).to receive(:move_uploads)
.with('known-parent/the-path', 'known-parent/the-path0')
@@ -94,6 +102,14 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :tr
subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0')
end
+ it 'does not move uploads when hashed storage is enabled for attachments' do
+ project.update!(storage_version: Project::HASHED_STORAGE_FEATURES[:attachments])
+
+ expect(subject).not_to receive(:move_uploads)
+
+ subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0')
+ end
+
it 'moves pages' do
expect(subject).to receive(:move_pages)
.with('known-parent/the-path', 'known-parent/the-path0')
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 7aeb85b8f5a..b2f13fae73f 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -73,6 +73,28 @@ describe Gitlab::Database do
end
end
+ describe '.replication_slots_supported?' do
+ it 'returns false when using MySQL' do
+ allow(described_class).to receive(:postgresql?).and_return(false)
+
+ expect(described_class.replication_slots_supported?).to eq(false)
+ end
+
+ it 'returns false when using PostgreSQL 9.3' do
+ allow(described_class).to receive(:postgresql?).and_return(true)
+ allow(described_class).to receive(:version).and_return('9.3.1')
+
+ expect(described_class.replication_slots_supported?).to eq(false)
+ end
+
+ it 'returns true when using PostgreSQL 9.4.0 or newer' do
+ allow(described_class).to receive(:postgresql?).and_return(true)
+ allow(described_class).to receive(:version).and_return('9.4.0')
+
+ expect(described_class.replication_slots_supported?).to eq(true)
+ end
+ end
+
describe '.nulls_last_order' do
context 'when using PostgreSQL' do
before do
@@ -199,9 +221,45 @@ describe Gitlab::Database do
described_class.bulk_insert('test', rows)
end
+ it 'does not quote values of a column in the disable_quote option' do
+ [1, 2, 4, 5].each do |i|
+ expect(connection).to receive(:quote).with(i)
+ end
+
+ described_class.bulk_insert('test', rows, disable_quote: :c)
+ end
+
+ it 'does not quote values of columns in the disable_quote option' do
+ [2, 5].each do |i|
+ expect(connection).to receive(:quote).with(i)
+ end
+
+ described_class.bulk_insert('test', rows, disable_quote: [:a, :c])
+ end
+
it 'handles non-UTF-8 data' do
expect { described_class.bulk_insert('test', [{ a: "\255" }]) }.not_to raise_error
end
+
+ context 'when using PostgreSQL' do
+ before do
+ allow(described_class).to receive(:mysql?).and_return(false)
+ end
+
+ it 'allows the returning of the IDs of the inserted rows' do
+ result = double(:result, values: [['10']])
+
+ expect(connection)
+ .to receive(:execute)
+ .with(/RETURNING id/)
+ .and_return(result)
+
+ ids = described_class
+ .bulk_insert('test', [{ number: 10 }], return_ids: true)
+
+ expect(ids).to eq([10])
+ end
+ end
end
describe '.create_connection_pool' do
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index c91895cedc3..ff9acfd08b9 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -116,12 +116,8 @@ describe Gitlab::Diff::File do
end
context 'when renamed' do
- let(:commit) { project.commit('6907208d755b60ebeacb2e9dfea74c92c3449a1f') }
- let(:diff_file) { commit.diffs.diff_file_with_new_path('files/js/commit.coffee') }
-
- before do
- allow(diff_file.new_blob).to receive(:id).and_return(diff_file.old_blob.id)
- end
+ let(:commit) { project.commit('94bb47ca1297b7b3731ff2a36923640991e9236f') }
+ let(:diff_file) { commit.diffs.diff_file_with_new_path('CHANGELOG.md') }
it 'returns false' do
expect(diff_file.content_changed?).to be_falsey
diff --git a/spec/lib/gitlab/diff/inline_diff_spec.rb b/spec/lib/gitlab/diff/inline_diff_spec.rb
index 15451c2cf99..0a41362f606 100644
--- a/spec/lib/gitlab/diff/inline_diff_spec.rb
+++ b/spec/lib/gitlab/diff/inline_diff_spec.rb
@@ -31,6 +31,10 @@ describe Gitlab::Diff::InlineDiff do
expect(subject[7]).to eq([17..17])
expect(subject[8]).to be_nil
end
+
+ it 'can handle unchanged empty lines' do
+ expect { described_class.for_lines(['- bar', '+ baz', '']) }.not_to raise_error
+ end
end
describe "#inline_diffs" do
diff --git a/spec/lib/gitlab/diff/position_tracer_spec.rb b/spec/lib/gitlab/diff/position_tracer_spec.rb
index e5138705443..ddc4f6c5b5c 100644
--- a/spec/lib/gitlab/diff/position_tracer_spec.rb
+++ b/spec/lib/gitlab/diff/position_tracer_spec.rb
@@ -1771,9 +1771,9 @@ describe Gitlab::Diff::PositionTracer do
describe "merge of target branch" do
let(:merge_commit) do
- update_file_again_commit
+ second_create_file_commit
- merge_request = create(:merge_request, source_branch: second_create_file_commit.sha, target_branch: branch_name, source_project: project)
+ merge_request = create(:merge_request, source_branch: second_branch_name, target_branch: branch_name, source_project: project)
repository.merge(current_user, merge_request.diff_head_sha, merge_request, "Merge branches")
diff --git a/spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb b/spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb
new file mode 100644
index 00000000000..51ce3116880
--- /dev/null
+++ b/spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb
@@ -0,0 +1,84 @@
+require 'spec_helper'
+require_relative '../email_shared_blocks'
+
+describe Gitlab::Email::Handler::CreateMergeRequestHandler do
+ include_context :email_shared_context
+ it_behaves_like :reply_processing_shared_examples
+
+ before do
+ stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.adventuretime.ooo")
+ stub_config_setting(host: 'localhost')
+ end
+
+ after do
+ TestEnv.clean_test_path
+ end
+
+ let(:email_raw) { fixture_file('emails/valid_new_merge_request.eml') }
+ let(:namespace) { create(:namespace, path: 'gitlabhq') }
+
+ let!(:project) { create(:project, :public, :repository, namespace: namespace, path: 'gitlabhq') }
+ let!(:user) do
+ create(
+ :user,
+ email: 'jake@adventuretime.ooo',
+ incoming_email_token: 'auth_token'
+ )
+ end
+
+ context "as a non-developer" do
+ before do
+ project.add_guest(user)
+ end
+
+ it "raises UserNotAuthorizedError if the user is not a member" do
+ expect { receiver.execute }.to raise_error(Gitlab::Email::UserNotAuthorizedError)
+ end
+ end
+
+ context "as a developer" do
+ before do
+ project.add_developer(user)
+ end
+
+ context "when everything is fine" do
+ it "creates a new merge request" do
+ expect { receiver.execute }.to change { project.merge_requests.count }.by(1)
+ merge_request = project.merge_requests.last
+
+ expect(merge_request.author).to eq(user)
+ expect(merge_request.source_branch).to eq('feature')
+ expect(merge_request.title).to eq('Feature added')
+ expect(merge_request.target_branch).to eq(project.default_branch)
+ end
+ end
+
+ context "something is wrong" do
+ context "when the merge request could not be saved" do
+ before do
+ allow_any_instance_of(MergeRequest).to receive(:save).and_return(false)
+ end
+
+ it "raises an InvalidMergeRequestError" do
+ expect { receiver.execute }.to raise_error(Gitlab::Email::InvalidMergeRequestError)
+ end
+ end
+
+ context "when we can't find the incoming_email_token" do
+ let(:email_raw) { fixture_file("emails/wrong_incoming_email_token.eml") }
+
+ it "raises an UserNotFoundError" do
+ expect { receiver.execute }.to raise_error(Gitlab::Email::UserNotFoundError)
+ end
+ end
+
+ context "when the subject is blank" do
+ let(:email_raw) { fixture_file("emails/valid_new_merge_request_no_subject.eml") }
+
+ it "raises an InvalidMergeRequestError" do
+ expect { receiver.execute }.to raise_error(Gitlab::Email::InvalidMergeRequestError)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/handler_spec.rb b/spec/lib/gitlab/email/handler_spec.rb
new file mode 100644
index 00000000000..650b01c4df4
--- /dev/null
+++ b/spec/lib/gitlab/email/handler_spec.rb
@@ -0,0 +1,17 @@
+require 'spec_helper'
+
+describe Gitlab::Email::Handler do
+ describe '.for' do
+ it 'picks issue handler if there is not merge request prefix' do
+ expect(described_class.for('email', 'project+key')).to be_an_instance_of(Gitlab::Email::Handler::CreateIssueHandler)
+ end
+
+ it 'picks merge request handler if there is merge request key' do
+ expect(described_class.for('email', 'project+merge-request+key')).to be_an_instance_of(Gitlab::Email::Handler::CreateMergeRequestHandler)
+ end
+
+ it 'returns nil if no handler is found' do
+ expect(described_class.for('email', '')).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/encoding_helper_spec.rb b/spec/lib/gitlab/encoding_helper_spec.rb
index 9151c66afb3..f6e5c55240f 100644
--- a/spec/lib/gitlab/encoding_helper_spec.rb
+++ b/spec/lib/gitlab/encoding_helper_spec.rb
@@ -9,6 +9,7 @@ describe Gitlab::EncodingHelper do
["nil", nil, nil],
["empty string", "".encode("ASCII-8BIT"), "".encode("UTF-8")],
["invalid utf-8 encoded string", "my bad string\xE5".force_encoding("UTF-8"), "my bad string"],
+ ["frozen non-ascii string", "é".force_encoding("ASCII-8BIT").freeze, "é".encode("UTF-8")],
[
'leaves ascii only string as is',
'ascii only string',
diff --git a/spec/lib/gitlab/fake_application_settings_spec.rb b/spec/lib/gitlab/fake_application_settings_spec.rb
index 34322c2a693..af12e13d36d 100644
--- a/spec/lib/gitlab/fake_application_settings_spec.rb
+++ b/spec/lib/gitlab/fake_application_settings_spec.rb
@@ -1,25 +1,25 @@
require 'spec_helper'
describe Gitlab::FakeApplicationSettings do
- let(:defaults) { { password_authentication_enabled: false, foobar: 'asdf', signup_enabled: true, 'test?' => 123 } }
+ let(:defaults) { { password_authentication_enabled_for_web: false, foobar: 'asdf', signup_enabled: true, 'test?' => 123 } }
subject { described_class.new(defaults) }
it 'wraps OpenStruct variables properly' do
- expect(subject.password_authentication_enabled).to be_falsey
+ expect(subject.password_authentication_enabled_for_web).to be_falsey
expect(subject.signup_enabled).to be_truthy
expect(subject.foobar).to eq('asdf')
end
it 'defines predicate methods' do
- expect(subject.password_authentication_enabled?).to be_falsey
+ expect(subject.password_authentication_enabled_for_web?).to be_falsey
expect(subject.signup_enabled?).to be_truthy
end
it 'predicate method changes when value is updated' do
- subject.password_authentication_enabled = true
+ subject.password_authentication_enabled_for_web = true
- expect(subject.password_authentication_enabled?).to be_truthy
+ expect(subject.password_authentication_enabled_for_web?).to be_truthy
end
it 'does not define a predicate method' do
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index 9f4e3c49adc..5ed639543e0 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -278,6 +278,35 @@ describe Gitlab::Git::Commit, seed_helper: true do
it { is_expected.not_to include(SeedRepo::FirstCommit::ID) }
end
+ shared_examples '.shas_with_signatures' do
+ let(:signed_shas) { %w[5937ac0a7beb003549fc5fd26fc247adbce4a52e 570e7b2abdd848b95f2f578043fc23bd6f6fd24d] }
+ let(:unsigned_shas) { %w[19e2e9b4ef76b422ce1154af39a91323ccc57434 c642fe9b8b9f28f9225d7ea953fe14e74748d53b] }
+ let(:first_signed_shas) { %w[5937ac0a7beb003549fc5fd26fc247adbce4a52e c642fe9b8b9f28f9225d7ea953fe14e74748d53b] }
+
+ it 'has 2 signed shas' do
+ ret = described_class.shas_with_signatures(repository, signed_shas)
+ expect(ret).to eq(signed_shas)
+ end
+
+ it 'has 0 signed shas' do
+ ret = described_class.shas_with_signatures(repository, unsigned_shas)
+ expect(ret).to eq([])
+ end
+
+ it 'has 1 signed sha' do
+ ret = described_class.shas_with_signatures(repository, first_signed_shas)
+ expect(ret).to contain_exactly(first_signed_shas.first)
+ end
+ end
+
+ describe '.shas_with_signatures with gitaly on' do
+ it_should_behave_like '.shas_with_signatures'
+ end
+
+ describe '.shas_with_signatures with gitaly disabled', :disable_gitaly do
+ it_should_behave_like '.shas_with_signatures'
+ end
+
describe '.find_all' do
shared_examples 'finding all commits' do
it 'should return a return a collection of commits' do
diff --git a/spec/lib/gitlab/git/diff_collection_spec.rb b/spec/lib/gitlab/git/diff_collection_spec.rb
index ee657101f4c..65edc750f39 100644
--- a/spec/lib/gitlab/git/diff_collection_spec.rb
+++ b/spec/lib/gitlab/git/diff_collection_spec.rb
@@ -487,6 +487,7 @@ describe Gitlab::Git::DiffCollection, seed_helper: true do
loop do
break if @count.zero?
+
# It is critical to decrement before yielding. We may never reach the lines after 'yield'.
@count -= 1
yield @value
diff --git a/spec/lib/gitlab/git/remote_repository_spec.rb b/spec/lib/gitlab/git/remote_repository_spec.rb
new file mode 100644
index 00000000000..eb148cc3804
--- /dev/null
+++ b/spec/lib/gitlab/git/remote_repository_spec.rb
@@ -0,0 +1,99 @@
+require 'spec_helper'
+
+describe Gitlab::Git::RemoteRepository, seed_helper: true do
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') }
+ subject { described_class.new(repository) }
+
+ describe '#empty?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:repository, :result) do
+ Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') | false
+ Gitlab::Git::Repository.new('default', 'does-not-exist.git', '') | true
+ end
+
+ with_them do
+ it { expect(subject.empty?).to eq(result) }
+ end
+ end
+
+ describe '#commit_id' do
+ it 'returns an OID if the revision exists' do
+ expect(subject.commit_id('v1.0.0')).to eq('6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9')
+ end
+
+ it 'is nil when the revision does not exist' do
+ expect(subject.commit_id('does-not-exist')).to be_nil
+ end
+ end
+
+ describe '#branch_exists?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:branch, :result) do
+ 'master' | true
+ 'does-not-exist' | false
+ end
+
+ with_them do
+ it { expect(subject.branch_exists?(branch)).to eq(result) }
+ end
+ end
+
+ describe '#same_repository?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:other_repository, :result) do
+ repository | true
+ Gitlab::Git::Repository.new(repository.storage, repository.relative_path, '') | true
+ Gitlab::Git::Repository.new('broken', TEST_REPO_PATH, '') | false
+ Gitlab::Git::Repository.new(repository.storage, 'wrong/relative-path.git', '') | false
+ Gitlab::Git::Repository.new('broken', 'wrong/relative-path.git', '') | false
+ end
+
+ with_them do
+ it { expect(subject.same_repository?(other_repository)).to eq(result) }
+ end
+ end
+
+ describe '#fetch_env' do
+ let(:remote_repository) { described_class.new(repository) }
+
+ let(:gitaly_client) { double(:gitaly_client) }
+ let(:address) { 'fake-address' }
+ let(:token) { 'fake-token' }
+
+ subject { remote_repository.fetch_env }
+
+ before do
+ allow(remote_repository).to receive(:gitaly_client).and_return(gitaly_client)
+
+ expect(gitaly_client).to receive(:address).with(repository.storage).and_return(address)
+ expect(gitaly_client).to receive(:token).with(repository.storage).and_return(token)
+ end
+
+ it { expect(subject).to be_a(Hash) }
+ it { expect(subject['GITALY_ADDRESS']).to eq(address) }
+ it { expect(subject['GITALY_TOKEN']).to eq(token) }
+ it { expect(subject['GITALY_WD']).to eq(Dir.pwd) }
+
+ it 'creates a plausible GIT_SSH_COMMAND' do
+ git_ssh_command = subject['GIT_SSH_COMMAND']
+
+ expect(git_ssh_command).to start_with('/')
+ expect(git_ssh_command).to end_with('/gitaly-ssh upload-pack')
+ end
+
+ it 'creates a plausible GITALY_PAYLOAD' do
+ req = Gitaly::SSHUploadPackRequest.decode_json(subject['GITALY_PAYLOAD'])
+
+ expect(remote_repository.gitaly_repository).to eq(req.repository)
+ end
+
+ context 'when the token is blank' do
+ let(:token) { '' }
+
+ it { expect(subject.keys).not_to include('GITALY_TOKEN') }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 96e162ac087..f19b65a5f71 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -257,7 +257,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
describe '#empty?' do
- it { expect(repository.empty?).to be_falsey }
+ it { expect(repository).not_to be_empty }
end
describe '#ref_names' do
@@ -449,7 +449,6 @@ describe Gitlab::Git::Repository, seed_helper: true do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') }
after do
- FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
ensure_seeds
end
@@ -484,7 +483,6 @@ describe Gitlab::Git::Repository, seed_helper: true do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') }
after do
- FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
ensure_seeds
end
@@ -544,7 +542,6 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
after(:all) do
- FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
ensure_seeds
end
end
@@ -570,7 +567,6 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
after(:all) do
- FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
ensure_seeds
end
end
@@ -588,17 +584,16 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
after(:all) do
- FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
ensure_seeds
end
end
- describe '#fetch_mirror' do
+ describe '#fetch_as_mirror_without_shell' do
let(:new_repository) do
Gitlab::Git::Repository.new('default', 'my_project.git', '')
end
- subject { new_repository.fetch_mirror(repository.path) }
+ subject { new_repository.fetch_as_mirror_without_shell(repository.path) }
before do
Gitlab::Shell.new.add_repository('default', 'my_project')
@@ -634,20 +629,48 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
describe '#remote_tags' do
+ let(:remote_name) { 'upstream' }
let(:target_commit_id) { SeedRepo::Commit::ID }
+ let(:user) { create(:user) }
+ let(:tag_name) { 'v0.0.1' }
+ let(:tag_message) { 'My tag' }
+ let(:remote_repository) do
+ Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '')
+ end
- subject { repository.remote_tags('upstream') }
+ subject { repository.remote_tags(remote_name) }
- it 'gets the remote tags' do
- expect(repository).to receive(:list_remote_tags).with('upstream')
- .and_return(["#{target_commit_id}\trefs/tags/v0.0.1\n"])
+ before do
+ repository.add_remote(remote_name, remote_repository.path)
+ remote_repository.add_tag(tag_name, user: user, target: target_commit_id)
+ end
+
+ after do
+ ensure_seeds
+ end
+ it 'gets the remote tags' do
expect(subject.first).to be_an_instance_of(Gitlab::Git::Tag)
- expect(subject.first.name).to eq('v0.0.1')
+ expect(subject.first.name).to eq(tag_name)
expect(subject.first.dereferenced_target.id).to eq(target_commit_id)
end
end
+ describe '#remote_exists?' do
+ before(:all) do
+ @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '')
+ @repo.add_remote("new_remote", SeedHelper::GITLAB_GIT_TEST_REPO_URL)
+ end
+
+ it 'returns true for an existing remote' do
+ expect(@repo.remote_exists?('new_remote')).to eq(true)
+ end
+
+ it 'returns false for a non-existing remote' do
+ expect(@repo.remote_exists?('foo')).to eq(false)
+ end
+ end
+
describe "#log" do
let(:commit_with_old_name) do
Gitlab::Git::Commit.decorate(repository, @commit_with_old_name_id)
@@ -1107,7 +1130,6 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
after do
- FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
ensure_seeds
end
@@ -1154,7 +1176,6 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
after do
- FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
ensure_seeds
end
@@ -1189,13 +1210,32 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
end
+ context 'when no root ref is available' do
+ it 'returns empty list' do
+ project = create(:project, :empty_repo)
+
+ names = project.repository.merged_branch_names(%w[feature])
+
+ expect(names).to be_empty
+ end
+ end
+
context 'when no branch names are specified' do
- it 'returns all merged branch names' do
+ before do
+ repository.create_branch('identical', 'master')
+ end
+
+ after do
+ ensure_seeds
+ end
+
+ it 'returns all merged branch names except for identical one' do
names = repository.merged_branch_names
expect(names).to include('merge-test')
expect(names).to include('fix-mode')
expect(names).not_to include('feature')
+ expect(names).not_to include('identical')
end
end
end
@@ -1404,7 +1444,6 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
after(:all) do
- FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
ensure_seeds
end
@@ -1522,35 +1561,60 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
describe '#fetch_source_branch!' do
- let(:local_ref) { 'refs/merge-requests/1/head' }
+ shared_examples '#fetch_source_branch!' do
+ let(:local_ref) { 'refs/merge-requests/1/head' }
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') }
+ let(:source_repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') }
- context 'when the branch exists' do
- let(:source_branch) { 'master' }
+ after do
+ ensure_seeds
+ end
- it 'writes the ref' do
- expect(repository).to receive(:write_ref).with(local_ref, /\h{40}/)
+ context 'when the branch exists' do
+ context 'when the commit does not exist locally' do
+ let(:source_branch) { 'new-branch-for-fetch-source-branch' }
+ let(:source_rugged) { source_repository.rugged }
+ let(:new_oid) { new_commit_edit_old_file(source_rugged).oid }
- repository.fetch_source_branch!(repository, source_branch, local_ref)
- end
+ before do
+ source_rugged.branches.create(source_branch, new_oid)
+ end
- it 'returns true' do
- expect(repository.fetch_source_branch!(repository, source_branch, local_ref)).to eq(true)
- end
- end
+ it 'writes the ref' do
+ expect(repository.fetch_source_branch!(source_repository, source_branch, local_ref)).to eq(true)
+ expect(repository.commit(local_ref).sha).to eq(new_oid)
+ end
+ end
- context 'when the branch does not exist' do
- let(:source_branch) { 'definitely-not-master' }
+ context 'when the commit exists locally' do
+ let(:source_branch) { 'master' }
+ let(:expected_oid) { SeedRepo::LastCommit::ID }
- it 'does not write the ref' do
- expect(repository).not_to receive(:write_ref)
+ it 'writes the ref' do
+ # Sanity check: the commit should already exist
+ expect(repository.commit(expected_oid)).not_to be_nil
- repository.fetch_source_branch!(repository, source_branch, local_ref)
+ expect(repository.fetch_source_branch!(source_repository, source_branch, local_ref)).to eq(true)
+ expect(repository.commit(local_ref).sha).to eq(expected_oid)
+ end
+ end
end
- it 'returns false' do
- expect(repository.fetch_source_branch!(repository, source_branch, local_ref)).to eq(false)
+ context 'when the branch does not exist' do
+ let(:source_branch) { 'definitely-not-master' }
+
+ it 'does not write the ref' do
+ expect(repository.fetch_source_branch!(source_repository, source_branch, local_ref)).to eq(false)
+ expect(repository.commit(local_ref)).to be_nil
+ end
end
end
+
+ it_behaves_like '#fetch_source_branch!'
+
+ context 'without gitaly', :skip_gitaly_mock do
+ it_behaves_like '#fetch_source_branch!'
+ end
end
describe '#rm_branch' do
@@ -1598,15 +1662,15 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
end
- describe '#fetch' do
+ describe '#fetch_remote_without_shell' do
let(:git_path) { Gitlab.config.git.bin_path }
let(:remote_name) { 'my_remote' }
- subject { repository.fetch(remote_name) }
+ subject { repository.fetch_remote_without_shell(remote_name) }
it 'fetches the remote and returns true if the command was successful' do
expect(repository).to receive(:popen)
- .with(%W(#{git_path} fetch #{remote_name}), repository.path)
+ .with(%W(#{git_path} fetch #{remote_name}), repository.path, {})
.and_return(['', 0])
expect(subject).to be(true)
@@ -1626,7 +1690,6 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
after do
- FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
ensure_seeds
end
@@ -1724,18 +1787,29 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
end
- describe '#fetch' do
- let(:git_path) { Gitlab.config.git.bin_path }
- let(:remote_name) { 'my_remote' }
+ describe '#delete_all_refs_except' do
+ let(:repository) do
+ Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '')
+ end
- subject { repository.fetch(remote_name) }
+ before do
+ repository.write_ref("refs/delete/a", "0b4bc9a49b562e85de7cc9e834518ea6828729b9")
+ repository.write_ref("refs/also-delete/b", "12d65c8dd2b2676fa3ac47d955accc085a37a9c1")
+ repository.write_ref("refs/keep/c", "6473c90867124755509e100d0d35ebdc85a0b6ae")
+ repository.write_ref("refs/also-keep/d", "0b4bc9a49b562e85de7cc9e834518ea6828729b9")
+ end
- it 'fetches the remote and returns true if the command was successful' do
- expect(repository).to receive(:popen)
- .with(%W(#{git_path} fetch #{remote_name}), repository.path)
- .and_return(['', 0])
+ after do
+ ensure_seeds
+ end
- expect(subject).to be(true)
+ it 'deletes all refs except those with the specified prefixes' do
+ repository.delete_all_refs_except(%w(refs/keep refs/also-keep refs/heads))
+ expect(repository.ref_exists?("refs/delete/a")).to be(false)
+ expect(repository.ref_exists?("refs/also-delete/b")).to be(false)
+ expect(repository.ref_exists?("refs/keep/c")).to be(true)
+ expect(repository.ref_exists?("refs/also-keep/d")).to be(true)
+ expect(repository.ref_exists?("refs/heads/master")).to be(true)
end
end
diff --git a/spec/lib/gitlab/git/storage/checker_spec.rb b/spec/lib/gitlab/git/storage/checker_spec.rb
new file mode 100644
index 00000000000..d74c3bcb04c
--- /dev/null
+++ b/spec/lib/gitlab/git/storage/checker_spec.rb
@@ -0,0 +1,132 @@
+require 'spec_helper'
+
+describe Gitlab::Git::Storage::Checker, :clean_gitlab_redis_shared_state do
+ let(:storage_name) { 'default' }
+ let(:hostname) { Gitlab::Environment.hostname }
+ let(:cache_key) { "storage_accessible:#{storage_name}:#{hostname}" }
+
+ subject(:checker) { described_class.new(storage_name) }
+
+ def value_from_redis(name)
+ Gitlab::Git::Storage.redis.with do |redis|
+ redis.hmget(cache_key, name)
+ end.first
+ end
+
+ def set_in_redis(name, value)
+ Gitlab::Git::Storage.redis.with do |redis|
+ redis.hmset(cache_key, name, value)
+ end.first
+ end
+
+ describe '.check_all' do
+ it 'calls a check for each storage' do
+ fake_checker_default = double
+ fake_checker_broken = double
+ fake_logger = fake_logger
+
+ expect(described_class).to receive(:new).with('default', fake_logger) { fake_checker_default }
+ expect(described_class).to receive(:new).with('broken', fake_logger) { fake_checker_broken }
+ expect(fake_checker_default).to receive(:check_with_lease)
+ expect(fake_checker_broken).to receive(:check_with_lease)
+
+ described_class.check_all(fake_logger)
+ end
+
+ context 'with broken storage', :broken_storage do
+ it 'returns the results' do
+ expected_result = [
+ { storage: 'default', success: true },
+ { storage: 'broken', success: false }
+ ]
+
+ expect(described_class.check_all).to eq(expected_result)
+ end
+ end
+ end
+
+ describe '#initialize' do
+ it 'assigns the settings' do
+ expect(checker.hostname).to eq(hostname)
+ expect(checker.storage).to eq('default')
+ expect(checker.storage_path).to eq(TestEnv.repos_path)
+ end
+ end
+
+ describe '#check_with_lease' do
+ it 'only allows one check at a time' do
+ expect(checker).to receive(:check).once { sleep 1 }
+
+ thread = Thread.new { checker.check_with_lease }
+ checker.check_with_lease
+ thread.join
+ end
+
+ it 'returns a result hash' do
+ expect(checker.check_with_lease).to eq(storage: 'default', success: true)
+ end
+ end
+
+ describe '#check' do
+ it 'tracks that the storage was accessible' do
+ set_in_redis(:failure_count, 10)
+ set_in_redis(:last_failure, Time.now.to_f)
+
+ checker.check
+
+ expect(value_from_redis(:failure_count).to_i).to eq(0)
+ expect(value_from_redis(:last_failure)).to be_empty
+ expect(value_from_redis(:first_failure)).to be_empty
+ end
+
+ it 'calls the check with the correct arguments' do
+ stub_application_setting(circuitbreaker_storage_timeout: 30,
+ circuitbreaker_access_retries: 3)
+
+ expect(Gitlab::Git::Storage::ForkedStorageCheck)
+ .to receive(:storage_available?).with(TestEnv.repos_path, 30, 3)
+ .and_call_original
+
+ checker.check
+ end
+
+ it 'returns `true`' do
+ expect(checker.check).to eq(true)
+ end
+
+ it 'maintains known storage keys' do
+ Timecop.freeze do
+ # Insert an old key to expire
+ old_entry = Time.now.to_i - 3.days.to_i
+ Gitlab::Git::Storage.redis.with do |redis|
+ redis.zadd(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, old_entry, 'to_be_removed')
+ end
+
+ checker.check
+
+ known_keys = Gitlab::Git::Storage.redis.with do |redis|
+ redis.zrange(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, -1)
+ end
+
+ expect(known_keys).to contain_exactly(cache_key)
+ end
+ end
+
+ context 'the storage is not available', :broken_storage do
+ let(:storage_name) { 'broken' }
+
+ it 'tracks that the storage was inaccessible' do
+ Timecop.freeze do
+ expect { checker.check }.to change { value_from_redis(:failure_count).to_i }.by(1)
+
+ expect(value_from_redis(:last_failure)).not_to be_empty
+ expect(value_from_redis(:first_failure)).not_to be_empty
+ end
+ end
+
+ it 'returns `false`' do
+ expect(checker.check).to eq(false)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb b/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb
index 72dabca793a..210b90bfba9 100644
--- a/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb
+++ b/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb
@@ -1,11 +1,18 @@
require 'spec_helper'
-describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state: true, broken_storage: true do
+describe Gitlab::Git::Storage::CircuitBreaker, :broken_storage do
let(:storage_name) { 'default' }
let(:circuit_breaker) { described_class.new(storage_name, hostname) }
let(:hostname) { Gitlab::Environment.hostname }
let(:cache_key) { "storage_accessible:#{storage_name}:#{hostname}" }
+ def set_in_redis(name, value)
+ Gitlab::Git::Storage.redis.with do |redis|
+ redis.zadd(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, cache_key)
+ redis.hmset(cache_key, name, value)
+ end.first
+ end
+
before do
# Override test-settings for the circuitbreaker with something more realistic
# for these specs.
@@ -19,35 +26,7 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
)
end
- def value_from_redis(name)
- Gitlab::Git::Storage.redis.with do |redis|
- redis.hmget(cache_key, name)
- end.first
- end
-
- def set_in_redis(name, value)
- Gitlab::Git::Storage.redis.with do |redis|
- redis.hmset(cache_key, name, value)
- end.first
- end
-
- describe '.reset_all!' do
- it 'clears all entries form redis' do
- set_in_redis(:failure_count, 10)
-
- described_class.reset_all!
-
- key_exists = Gitlab::Git::Storage.redis.with { |redis| redis.exists(cache_key) }
-
- expect(key_exists).to be_falsey
- end
-
- it 'does not break when there are no keys in redis' do
- expect { described_class.reset_all! }.not_to raise_error
- end
- end
-
- describe '.for_storage' do
+ describe '.for_storage', :request_store do
it 'only builds a single circuitbreaker per storage' do
expect(described_class).to receive(:new).once.and_call_original
@@ -70,7 +49,6 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
it 'assigns the settings' do
expect(circuit_breaker.hostname).to eq(hostname)
expect(circuit_breaker.storage).to eq('default')
- expect(circuit_breaker.storage_path).to eq(TestEnv.repos_path)
end
end
@@ -90,9 +68,9 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
end
end
- describe '#failure_wait_time' do
+ describe '#check_interval' do
it 'reads the value from settings' do
- expect(circuit_breaker.failure_wait_time).to eq(1)
+ expect(circuit_breaker.check_interval).to eq(1)
end
end
@@ -113,12 +91,6 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
expect(circuit_breaker.access_retries).to eq(4)
end
end
-
- describe '#backoff_threshold' do
- it 'reads the value from settings' do
- expect(circuit_breaker.backoff_threshold).to eq(5)
- end
- end
end
describe '#perform' do
@@ -133,19 +105,6 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
end
end
- it 'raises the correct exception when backing off' do
- Timecop.freeze do
- set_in_redis(:last_failure, 1.second.ago.to_f)
- set_in_redis(:failure_count, 90)
-
- expect { |b| circuit_breaker.perform(&b) }
- .to raise_error do |exception|
- expect(exception).to be_kind_of(Gitlab::Git::Storage::Failing)
- expect(exception.retry_after).to eq(30)
- end
- end
- end
-
it 'yields the block' do
expect { |b| circuit_breaker.perform(&b) }
.to yield_control
@@ -169,36 +128,6 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
.to raise_error(Rugged::OSError)
end
- it 'tracks that the storage was accessible' do
- set_in_redis(:failure_count, 10)
- set_in_redis(:last_failure, Time.now.to_f)
-
- circuit_breaker.perform { '' }
-
- expect(value_from_redis(:failure_count).to_i).to eq(0)
- expect(value_from_redis(:last_failure)).to be_empty
- expect(circuit_breaker.failure_count).to eq(0)
- expect(circuit_breaker.last_failure).to be_nil
- end
-
- it 'only performs the accessibility check once' do
- expect(Gitlab::Git::Storage::ForkedStorageCheck)
- .to receive(:storage_available?).once.and_call_original
-
- 2.times { circuit_breaker.perform { '' } }
- end
-
- it 'calls the check with the correct arguments' do
- stub_application_setting(circuitbreaker_storage_timeout: 30,
- circuitbreaker_access_retries: 3)
-
- expect(Gitlab::Git::Storage::ForkedStorageCheck)
- .to receive(:storage_available?).with(TestEnv.repos_path, 30, 3)
- .and_call_original
-
- circuit_breaker.perform { '' }
- end
-
context 'with the feature disabled' do
before do
stub_feature_flags(git_storage_circuit_breaker: false)
@@ -221,31 +150,6 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
expect(result).to eq('hello')
end
end
-
- context 'the storage is not available' do
- let(:storage_name) { 'broken' }
-
- it 'raises the correct exception' do
- expect(circuit_breaker).to receive(:track_storage_inaccessible)
-
- expect { circuit_breaker.perform { '' } }
- .to raise_error do |exception|
- expect(exception).to be_kind_of(Gitlab::Git::Storage::Inaccessible)
- expect(exception.retry_after).to eq(30)
- end
- end
-
- it 'tracks that the storage was inaccessible' do
- Timecop.freeze do
- expect { circuit_breaker.perform { '' } }.to raise_error(Gitlab::Git::Storage::Inaccessible)
-
- expect(value_from_redis(:failure_count).to_i).to eq(1)
- expect(value_from_redis(:last_failure)).not_to be_empty
- expect(circuit_breaker.failure_count).to eq(1)
- expect(circuit_breaker.last_failure).to be_within(1.second).of(Time.now)
- end
- end
- end
end
describe '#circuit_broken?' do
@@ -264,32 +168,6 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
end
end
- describe '#backing_off?' do
- it 'is true when there was a recent failure' do
- Timecop.freeze do
- set_in_redis(:last_failure, 1.second.ago.to_f)
- set_in_redis(:failure_count, 90)
-
- expect(circuit_breaker.backing_off?).to be_truthy
- end
- end
-
- context 'the `failure_wait_time` is set to 0' do
- before do
- stub_application_setting(circuitbreaker_failure_wait_time: 0)
- end
-
- it 'is working even when there are failures' do
- Timecop.freeze do
- set_in_redis(:last_failure, 0.seconds.ago.to_f)
- set_in_redis(:failure_count, 90)
-
- expect(circuit_breaker.backing_off?).to be_falsey
- end
- end
- end
- end
-
describe '#last_failure' do
it 'returns the last failure time' do
time = Time.parse("2017-05-26 17:52:30")
diff --git a/spec/lib/gitlab/git/storage/failure_info_spec.rb b/spec/lib/gitlab/git/storage/failure_info_spec.rb
new file mode 100644
index 00000000000..bae88fdda86
--- /dev/null
+++ b/spec/lib/gitlab/git/storage/failure_info_spec.rb
@@ -0,0 +1,70 @@
+require 'spec_helper'
+
+describe Gitlab::Git::Storage::FailureInfo, :broken_storage do
+ let(:storage_name) { 'default' }
+ let(:hostname) { Gitlab::Environment.hostname }
+ let(:cache_key) { "storage_accessible:#{storage_name}:#{hostname}" }
+
+ def value_from_redis(name)
+ Gitlab::Git::Storage.redis.with do |redis|
+ redis.hmget(cache_key, name)
+ end.first
+ end
+
+ def set_in_redis(name, value)
+ Gitlab::Git::Storage.redis.with do |redis|
+ redis.zadd(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, cache_key)
+ redis.hmset(cache_key, name, value)
+ end.first
+ end
+
+ describe '.reset_all!' do
+ it 'clears all entries form redis' do
+ set_in_redis(:failure_count, 10)
+
+ described_class.reset_all!
+
+ key_exists = Gitlab::Git::Storage.redis.with { |redis| redis.exists(cache_key) }
+
+ expect(key_exists).to be_falsey
+ end
+
+ it 'does not break when there are no keys in redis' do
+ expect { described_class.reset_all! }.not_to raise_error
+ end
+ end
+
+ describe '.load' do
+ it 'loads failure information for a storage on a host' do
+ first_failure = Time.parse("2017-11-14 17:52:30")
+ last_failure = Time.parse("2017-11-14 18:54:37")
+ failure_count = 11
+
+ set_in_redis(:first_failure, first_failure.to_i)
+ set_in_redis(:last_failure, last_failure.to_i)
+ set_in_redis(:failure_count, failure_count.to_i)
+
+ info = described_class.load(cache_key)
+
+ expect(info.first_failure).to eq(first_failure)
+ expect(info.last_failure).to eq(last_failure)
+ expect(info.failure_count).to eq(failure_count)
+ end
+ end
+
+ describe '#no_failures?' do
+ it 'is true when there are no failures' do
+ info = described_class.new(nil, nil, 0)
+
+ expect(info.no_failures?).to be_truthy
+ end
+
+ it 'is false when there are failures' do
+ info = described_class.new(Time.parse("2017-11-14 17:52:30"),
+ Time.parse("2017-11-14 18:54:37"),
+ 20)
+
+ expect(info.no_failures?).to be_falsy
+ end
+ end
+end
diff --git a/spec/lib/gitlab/git/storage/health_spec.rb b/spec/lib/gitlab/git/storage/health_spec.rb
index 4a14a5201d1..bb670fc5d94 100644
--- a/spec/lib/gitlab/git/storage/health_spec.rb
+++ b/spec/lib/gitlab/git/storage/health_spec.rb
@@ -1,11 +1,12 @@
require 'spec_helper'
-describe Gitlab::Git::Storage::Health, clean_gitlab_redis_shared_state: true, broken_storage: true do
+describe Gitlab::Git::Storage::Health, broken_storage: true do
let(:host1_key) { 'storage_accessible:broken:web01' }
let(:host2_key) { 'storage_accessible:default:kiq01' }
def set_in_redis(cache_key, value)
Gitlab::Git::Storage.redis.with do |redis|
+ redis.zadd(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, cache_key)
redis.hmset(cache_key, :failure_count, value)
end.first
end
diff --git a/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb b/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb
index 5db37f55e03..93ad20011de 100644
--- a/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb
+++ b/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb
@@ -27,7 +27,7 @@ describe Gitlab::Git::Storage::NullCircuitBreaker do
end
describe '#failure_info' do
- it { Timecop.freeze { expect(breaker.failure_info).to eq(Gitlab::Git::Storage::CircuitBreaker::FailureInfo.new(Time.now, breaker.failure_count_threshold)) } }
+ it { expect(breaker.failure_info.no_failures?).to be_falsy }
end
end
@@ -49,7 +49,7 @@ describe Gitlab::Git::Storage::NullCircuitBreaker do
end
describe '#failure_info' do
- it { expect(breaker.failure_info).to eq(Gitlab::Git::Storage::CircuitBreaker::FailureInfo.new(nil, 0)) }
+ it { expect(breaker.failure_info.no_failures?).to be_truthy }
end
end
diff --git a/spec/lib/gitlab/git/user_spec.rb b/spec/lib/gitlab/git/user_spec.rb
index eb8db819045..99d850e1df9 100644
--- a/spec/lib/gitlab/git/user_spec.rb
+++ b/spec/lib/gitlab/git/user_spec.rb
@@ -1,9 +1,9 @@
require 'spec_helper'
describe Gitlab::Git::User do
- let(:username) { 'janedo' }
- let(:name) { 'Jane Doe' }
- let(:email) { 'janedoe@example.com' }
+ let(:username) { 'janedoe' }
+ let(:name) { 'Jane Doé' }
+ let(:email) { 'janedoé@example.com' }
let(:gl_id) { 'user-123' }
let(:user) do
described_class.new(username, name, email, gl_id)
@@ -13,7 +13,7 @@ describe Gitlab::Git::User do
describe '.from_gitaly' do
let(:gitaly_user) do
- Gitaly::User.new(gl_username: username, name: name, email: email, gl_id: gl_id)
+ Gitaly::User.new(gl_username: username, name: name.b, email: email.b, gl_id: gl_id)
end
subject { described_class.from_gitaly(gitaly_user) }
@@ -48,8 +48,13 @@ describe Gitlab::Git::User do
it 'creates a Gitaly::User with the correct data' do
expect(subject).to be_a(Gitaly::User)
expect(subject.gl_username).to eq(username)
- expect(subject.name).to eq(name)
- expect(subject.email).to eq(email)
+
+ expect(subject.name).to eq(name.b)
+ expect(subject.name).to be_a_binary_string
+
+ expect(subject.email).to eq(email.b)
+ expect(subject.email).to be_a_binary_string
+
expect(subject.gl_id).to eq(gl_id)
end
end
diff --git a/spec/lib/gitlab/git_spec.rb b/spec/lib/gitlab/git_spec.rb
index 494dfe0e595..ce15057dd7d 100644
--- a/spec/lib/gitlab/git_spec.rb
+++ b/spec/lib/gitlab/git_spec.rb
@@ -38,4 +38,29 @@ describe Gitlab::Git do
expect(described_class.ref_name(utf8_invalid_ref)).to eq("an_invalid_ref_å")
end
end
+
+ describe '.shas_eql?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:sha1, :sha2, :result) do
+ sha = RepoHelpers.sample_commit.id
+ short_sha = sha[0, Gitlab::Git::Commit::MIN_SHA_LENGTH]
+ too_short_sha = sha[0, Gitlab::Git::Commit::MIN_SHA_LENGTH - 1]
+
+ [
+ [sha, sha, true],
+ [sha, short_sha, true],
+ [sha, sha.reverse, false],
+ [sha, too_short_sha, false],
+ [sha, nil, false]
+ ]
+ end
+
+ with_them do
+ it { expect(described_class.shas_eql?(sha1, sha2)).to eq(result) }
+ it 'is commutative' do
+ expect(described_class.shas_eql?(sha2, sha1)).to eq(result)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
index 8127b4842b7..951e146a30a 100644
--- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
@@ -104,4 +104,17 @@ describe Gitlab::GitalyClient::RefService do
expect { client.ref_exists?('reXXXXX') }.to raise_error(ArgumentError)
end
end
+
+ describe '#delete_refs' do
+ let(:prefixes) { %w(refs/heads refs/keep-around) }
+
+ it 'sends a delete_refs message' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:delete_refs)
+ .with(gitaly_request_with_params(except_with_prefix: prefixes), kind_of(Hash))
+ .and_return(double('delete_refs_response'))
+
+ client.delete_refs(except_with_prefixes: prefixes)
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb b/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb
new file mode 100644
index 00000000000..6ad9f5ef766
--- /dev/null
+++ b/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb
@@ -0,0 +1,88 @@
+require 'spec_helper'
+
+describe Gitlab::GitalyClient::WikiService do
+ let(:project) { create(:project) }
+ let(:storage_name) { project.repository_storage }
+ let(:relative_path) { project.disk_path + '.git' }
+ let(:client) { described_class.new(project.repository) }
+ let(:commit) { create(:gitaly_commit) }
+ let(:page_version) { Gitaly::WikiPageVersion.new(format: 'markdown', commit: commit) }
+ let(:page_info) { { title: 'My Page', raw_data: 'a', version: page_version } }
+
+ describe '#find_page' do
+ let(:response) do
+ [
+ Gitaly::WikiFindPageResponse.new(page: Gitaly::WikiPage.new(page_info)),
+ Gitaly::WikiFindPageResponse.new(page: Gitaly::WikiPage.new(raw_data: 'b'))
+ ]
+ end
+ let(:wiki_page) { subject.first }
+ let(:wiki_page_version) { subject.last }
+
+ subject { client.find_page(title: 'My Page', version: 'master', dir: '') }
+
+ it 'sends a wiki_find_page message' do
+ expect_any_instance_of(Gitaly::WikiService::Stub)
+ .to receive(:wiki_find_page)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return([].each)
+
+ subject
+ end
+
+ it 'concatenates the raw data and returns a pair of WikiPage and WikiPageVersion' do
+ expect_any_instance_of(Gitaly::WikiService::Stub)
+ .to receive(:wiki_find_page)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return(response.each)
+
+ expect(wiki_page.title).to eq('My Page')
+ expect(wiki_page.raw_data).to eq('ab')
+ expect(wiki_page_version.format).to eq('markdown')
+ end
+ end
+
+ describe '#get_all_pages' do
+ let(:page_2_info) { { title: 'My Page 2', raw_data: 'c', version: page_version } }
+ let(:response) do
+ [
+ Gitaly::WikiGetAllPagesResponse.new(page: Gitaly::WikiPage.new(page_info)),
+ Gitaly::WikiGetAllPagesResponse.new(page: Gitaly::WikiPage.new(raw_data: 'b')),
+ Gitaly::WikiGetAllPagesResponse.new(end_of_page: true),
+ Gitaly::WikiGetAllPagesResponse.new(page: Gitaly::WikiPage.new(page_2_info)),
+ Gitaly::WikiGetAllPagesResponse.new(page: Gitaly::WikiPage.new(raw_data: 'd')),
+ Gitaly::WikiGetAllPagesResponse.new(end_of_page: true)
+ ]
+ end
+ let(:wiki_page_1) { subject[0].first }
+ let(:wiki_page_1_version) { subject[0].last }
+ let(:wiki_page_2) { subject[1].first }
+ let(:wiki_page_2_version) { subject[1].last }
+
+ subject { client.get_all_pages }
+
+ it 'sends a wiki_get_all_pages message' do
+ expect_any_instance_of(Gitaly::WikiService::Stub)
+ .to receive(:wiki_get_all_pages)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return([].each)
+
+ subject
+ end
+
+ it 'concatenates the raw data and returns a pair of WikiPage and WikiPageVersion for each page' do
+ expect_any_instance_of(Gitaly::WikiService::Stub)
+ .to receive(:wiki_get_all_pages)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return(response.each)
+
+ expect(subject.size).to be(2)
+ expect(wiki_page_1.title).to eq('My Page')
+ expect(wiki_page_1.raw_data).to eq('ab')
+ expect(wiki_page_1_version.format).to eq('markdown')
+ expect(wiki_page_2.title).to eq('My Page 2')
+ expect(wiki_page_2.raw_data).to eq('cd')
+ expect(wiki_page_2_version.format).to eq('markdown')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index a1f4e65b8d4..a871ed0df0e 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -278,4 +278,20 @@ describe Gitlab::GitalyClient, skip_gitaly_mock: true do
end
end
end
+
+ describe 'timeouts' do
+ context 'with default values' do
+ before do
+ stub_application_setting(gitaly_timeout_default: 55)
+ stub_application_setting(gitaly_timeout_medium: 30)
+ stub_application_setting(gitaly_timeout_fast: 10)
+ end
+
+ it 'returns expected values' do
+ expect(described_class.default_timeout).to be(55)
+ expect(described_class.medium_timeout).to be(30)
+ expect(described_class.fast_timeout).to be(10)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/bulk_importing_spec.rb b/spec/lib/gitlab/github_import/bulk_importing_spec.rb
new file mode 100644
index 00000000000..91229d9c7d4
--- /dev/null
+++ b/spec/lib/gitlab/github_import/bulk_importing_spec.rb
@@ -0,0 +1,62 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::BulkImporting do
+ let(:importer) do
+ Class.new { include(Gitlab::GithubImport::BulkImporting) }.new
+ end
+
+ describe '#build_database_rows' do
+ it 'returns an Array containing the rows to insert' do
+ object = double(:object, title: 'Foo')
+
+ expect(importer)
+ .to receive(:build)
+ .with(object)
+ .and_return({ title: 'Foo' })
+
+ expect(importer)
+ .to receive(:already_imported?)
+ .with(object)
+ .and_return(false)
+
+ enum = [[object, 1]].to_enum
+
+ expect(importer.build_database_rows(enum)).to eq([{ title: 'Foo' }])
+ end
+
+ it 'does not import objects that have already been imported' do
+ object = double(:object, title: 'Foo')
+
+ expect(importer)
+ .not_to receive(:build)
+
+ expect(importer)
+ .to receive(:already_imported?)
+ .with(object)
+ .and_return(true)
+
+ enum = [[object, 1]].to_enum
+
+ expect(importer.build_database_rows(enum)).to be_empty
+ end
+ end
+
+ describe '#bulk_insert' do
+ it 'bulk inserts rows into the database' do
+ rows = [{ title: 'Foo' }] * 10
+ model = double(:model, table_name: 'kittens')
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .ordered
+ .with('kittens', rows.first(5))
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .ordered
+ .with('kittens', rows.last(5))
+
+ importer.bulk_insert(model, rows, batch_size: 5)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/caching_spec.rb b/spec/lib/gitlab/github_import/caching_spec.rb
new file mode 100644
index 00000000000..70ecdc16da1
--- /dev/null
+++ b/spec/lib/gitlab/github_import/caching_spec.rb
@@ -0,0 +1,117 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Caching, :clean_gitlab_redis_cache do
+ describe '.read' do
+ it 'reads a value from the cache' do
+ described_class.write('foo', 'bar')
+
+ expect(described_class.read('foo')).to eq('bar')
+ end
+
+ it 'returns nil if the cache key does not exist' do
+ expect(described_class.read('foo')).to be_nil
+ end
+
+ it 'refreshes the cache key if a value is present' do
+ described_class.write('foo', 'bar')
+
+ redis = double(:redis)
+
+ expect(redis).to receive(:get).with(/foo/).and_return('bar')
+ expect(redis).to receive(:expire).with(/foo/, described_class::TIMEOUT)
+ expect(Gitlab::Redis::Cache).to receive(:with).twice.and_yield(redis)
+
+ described_class.read('foo')
+ end
+
+ it 'does not refresh the cache key if a value is empty' do
+ described_class.write('foo', nil)
+
+ redis = double(:redis)
+
+ expect(redis).to receive(:get).with(/foo/).and_return('')
+ expect(redis).not_to receive(:expire)
+ expect(Gitlab::Redis::Cache).to receive(:with).and_yield(redis)
+
+ described_class.read('foo')
+ end
+ end
+
+ describe '.read_integer' do
+ it 'returns an Integer' do
+ described_class.write('foo', '10')
+
+ expect(described_class.read_integer('foo')).to eq(10)
+ end
+
+ it 'returns nil if no value was found' do
+ expect(described_class.read_integer('foo')).to be_nil
+ end
+ end
+
+ describe '.write' do
+ it 'writes a value to the cache and returns the written value' do
+ expect(described_class.write('foo', 10)).to eq(10)
+ expect(described_class.read('foo')).to eq('10')
+ end
+ end
+
+ describe '.set_add' do
+ it 'adds a value to a set' do
+ described_class.set_add('foo', 10)
+ described_class.set_add('foo', 10)
+
+ key = described_class.cache_key_for('foo')
+ values = Gitlab::Redis::Cache.with { |r| r.smembers(key) }
+
+ expect(values).to eq(['10'])
+ end
+ end
+
+ describe '.set_includes?' do
+ it 'returns false when the key does not exist' do
+ expect(described_class.set_includes?('foo', 10)).to eq(false)
+ end
+
+ it 'returns false when the value is not present in the set' do
+ described_class.set_add('foo', 10)
+
+ expect(described_class.set_includes?('foo', 20)).to eq(false)
+ end
+
+ it 'returns true when the set includes the given value' do
+ described_class.set_add('foo', 10)
+
+ expect(described_class.set_includes?('foo', 10)).to eq(true)
+ end
+ end
+
+ describe '.write_multiple' do
+ it 'sets multiple keys' do
+ mapping = { 'foo' => 10, 'bar' => 20 }
+
+ described_class.write_multiple(mapping)
+
+ mapping.each do |key, value|
+ full_key = described_class.cache_key_for(key)
+ found = Gitlab::Redis::Cache.with { |r| r.get(full_key) }
+
+ expect(found).to eq(value.to_s)
+ end
+ end
+ end
+
+ describe '.expire' do
+ it 'sets the expiration time of a key' do
+ timeout = 1.hour.to_i
+
+ described_class.write('foo', 'bar', timeout: 2.hours.to_i)
+ described_class.expire('foo', timeout)
+
+ key = described_class.cache_key_for('foo')
+ found_ttl = Gitlab::Redis::Cache.with { |r| r.ttl(key) }
+
+ expect(found_ttl).to be <= timeout
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb
index 66273255b6f..5b2642d9473 100644
--- a/spec/lib/gitlab/github_import/client_spec.rb
+++ b/spec/lib/gitlab/github_import/client_spec.rb
@@ -1,97 +1,392 @@
require 'spec_helper'
describe Gitlab::GithubImport::Client do
- let(:token) { '123456' }
- let(:github_provider) { Settingslogic.new('app_id' => 'asd123', 'app_secret' => 'asd123', 'name' => 'github', 'args' => { 'client_options' => {} }) }
+ describe '#parallel?' do
+ it 'returns true when the client is running in parallel mode' do
+ client = described_class.new('foo', parallel: true)
- subject(:client) { described_class.new(token) }
+ expect(client).to be_parallel
+ end
+
+ it 'returns false when the client is running in sequential mode' do
+ client = described_class.new('foo', parallel: false)
- before do
- allow(Gitlab.config.omniauth).to receive(:providers).and_return([github_provider])
+ expect(client).not_to be_parallel
+ end
end
- it 'convert OAuth2 client options to symbols' do
- client.client.options.keys.each do |key|
- expect(key).to be_kind_of(Symbol)
+ describe '#user' do
+ it 'returns the details for the given username' do
+ client = described_class.new('foo')
+
+ expect(client.octokit).to receive(:user).with('foo')
+ expect(client).to receive(:with_rate_limit).and_yield
+
+ client.user('foo')
end
end
- it 'does not crash (e.g. Settingslogic::MissingSetting) when verify_ssl config is not present' do
- expect { client.api }.not_to raise_error
+ describe '#repository' do
+ it 'returns the details of a repository' do
+ client = described_class.new('foo')
+
+ expect(client.octokit).to receive(:repo).with('foo/bar')
+ expect(client).to receive(:with_rate_limit).and_yield
+
+ client.repository('foo/bar')
+ end
end
- context 'when config is missing' do
- before do
- allow(Gitlab.config.omniauth).to receive(:providers).and_return([])
+ describe '#labels' do
+ it 'returns the labels' do
+ client = described_class.new('foo')
+
+ expect(client)
+ .to receive(:each_object)
+ .with(:labels, 'foo/bar')
+
+ client.labels('foo/bar')
end
+ end
- it 'is still possible to get an Octokit client' do
- expect { client.api }.not_to raise_error
+ describe '#milestones' do
+ it 'returns the milestones' do
+ client = described_class.new('foo')
+
+ expect(client)
+ .to receive(:each_object)
+ .with(:milestones, 'foo/bar')
+
+ client.milestones('foo/bar')
end
+ end
- it 'is not be possible to get an OAuth2 client' do
- expect { client.client }.to raise_error(Projects::ImportService::Error)
+ describe '#releases' do
+ it 'returns the releases' do
+ client = described_class.new('foo')
+
+ expect(client)
+ .to receive(:each_object)
+ .with(:releases, 'foo/bar')
+
+ client.releases('foo/bar')
end
end
- context 'allow SSL verification to be configurable on API' do
+ describe '#each_page' do
+ let(:client) { described_class.new('foo') }
+ let(:object1) { double(:object1) }
+ let(:object2) { double(:object2) }
+
before do
- github_provider['verify_ssl'] = false
+ allow(client)
+ .to receive(:with_rate_limit)
+ .and_yield
+
+ allow(client.octokit)
+ .to receive(:public_send)
+ .and_return([object1])
+
+ response = double(:response, data: [object2], rels: { next: nil })
+ next_page = double(:next_page, get: response)
+
+ allow(client.octokit)
+ .to receive(:last_response)
+ .and_return(double(:last_response, rels: { next: next_page }))
+ end
+
+ context 'without a block' do
+ it 'returns an Enumerator' do
+ expect(client.each_page(:foo)).to be_an_instance_of(Enumerator)
+ end
+
+ it 'the returned Enumerator returns Page objects' do
+ enum = client.each_page(:foo)
+
+ page1 = enum.next
+ page2 = enum.next
+
+ expect(page1).to be_an_instance_of(described_class::Page)
+ expect(page2).to be_an_instance_of(described_class::Page)
+
+ expect(page1.objects).to eq([object1])
+ expect(page1.number).to eq(1)
+
+ expect(page2.objects).to eq([object2])
+ expect(page2.number).to eq(2)
+ end
+ end
+
+ context 'with a block' do
+ it 'yields every retrieved page to the supplied block' do
+ pages = []
+
+ client.each_page(:foo) { |page| pages << page }
+
+ expect(pages[0]).to be_an_instance_of(described_class::Page)
+ expect(pages[1]).to be_an_instance_of(described_class::Page)
+
+ expect(pages[0].objects).to eq([object1])
+ expect(pages[0].number).to eq(1)
+
+ expect(pages[1].objects).to eq([object2])
+ expect(pages[1].number).to eq(2)
+ end
+
+ it 'starts at the given page' do
+ pages = []
+
+ client.each_page(:foo, page: 2) { |page| pages << page }
+
+ expect(pages[0].number).to eq(2)
+ expect(pages[1].number).to eq(3)
+ end
+ end
+ end
+
+ describe '#with_rate_limit' do
+ let(:client) { described_class.new('foo') }
+
+ it 'yields the supplied block when enough requests remain' do
+ expect(client).to receive(:requests_remaining?).and_return(true)
+
+ expect { |b| client.with_rate_limit(&b) }.to yield_control
+ end
+
+ it 'waits before yielding if not enough requests remain' do
+ expect(client).to receive(:requests_remaining?).and_return(false)
+ expect(client).to receive(:raise_or_wait_for_rate_limit)
+
+ expect { |b| client.with_rate_limit(&b) }.to yield_control
+ end
+
+ it 'waits and retries the operation if all requests were consumed in the supplied block' do
+ retries = 0
+
+ expect(client).to receive(:requests_remaining?).and_return(true)
+ expect(client).to receive(:raise_or_wait_for_rate_limit)
+
+ client.with_rate_limit do
+ if retries.zero?
+ retries += 1
+ raise(Octokit::TooManyRequests)
+ end
+ end
+
+ expect(retries).to eq(1)
+ end
+
+ it 'increments the request count counter' do
+ expect(client.request_count_counter)
+ .to receive(:increment)
+ .and_call_original
+
+ expect(client).to receive(:requests_remaining?).and_return(true)
+
+ client.with_rate_limit { }
+ end
+
+ it 'ignores rate limiting when disabled' do
+ expect(client)
+ .to receive(:rate_limiting_enabled?)
+ .and_return(false)
+
+ expect(client)
+ .not_to receive(:requests_remaining?)
+
+ expect(client.with_rate_limit { 10 }).to eq(10)
+ end
+ end
+
+ describe '#requests_remaining?' do
+ let(:client) { described_class.new('foo') }
+
+ it 'returns true if enough requests remain' do
+ expect(client).to receive(:remaining_requests).and_return(9000)
+
+ expect(client.requests_remaining?).to eq(true)
+ end
+
+ it 'returns false if not enough requests remain' do
+ expect(client).to receive(:remaining_requests).and_return(1)
+
+ expect(client.requests_remaining?).to eq(false)
+ end
+ end
+
+ describe '#raise_or_wait_for_rate_limit' do
+ it 'raises RateLimitError when running in parallel mode' do
+ client = described_class.new('foo', parallel: true)
+
+ expect { client.raise_or_wait_for_rate_limit }
+ .to raise_error(Gitlab::GithubImport::RateLimitError)
end
- it 'uses supplied value' do
- expect(client.client.options[:connection_opts][:ssl]).to eq({ verify: false })
- expect(client.api.connection_options[:ssl]).to eq({ verify: false })
+ it 'sleeps when running in sequential mode' do
+ client = described_class.new('foo', parallel: false)
+
+ expect(client).to receive(:rate_limit_resets_in).and_return(1)
+ expect(client).to receive(:sleep).with(1)
+
+ client.raise_or_wait_for_rate_limit
+ end
+
+ it 'increments the rate limit counter' do
+ client = described_class.new('foo', parallel: false)
+
+ expect(client)
+ .to receive(:rate_limit_resets_in)
+ .and_return(1)
+
+ expect(client)
+ .to receive(:sleep)
+ .with(1)
+
+ expect(client.rate_limit_counter)
+ .to receive(:increment)
+ .and_call_original
+
+ client.raise_or_wait_for_rate_limit
+ end
+ end
+
+ describe '#remaining_requests' do
+ it 'returns the number of remaining requests' do
+ client = described_class.new('foo')
+ rate_limit = double(remaining: 1)
+
+ expect(client.octokit).to receive(:rate_limit).and_return(rate_limit)
+ expect(client.remaining_requests).to eq(1)
+ end
+ end
+
+ describe '#rate_limit_resets_in' do
+ it 'returns the number of seconds after which the rate limit is reset' do
+ client = described_class.new('foo')
+ rate_limit = double(resets_in: 1)
+
+ expect(client.octokit).to receive(:rate_limit).and_return(rate_limit)
+
+ expect(client.rate_limit_resets_in).to eq(6)
end
end
describe '#api_endpoint' do
- context 'when provider does not specity an API endpoint' do
- it 'uses GitHub root API endpoint' do
- expect(client.api.api_endpoint).to eq 'https://api.github.com/'
+ let(:client) { described_class.new('foo') }
+
+ context 'without a custom endpoint configured in Omniauth' do
+ it 'returns the default API endpoint' do
+ expect(client)
+ .to receive(:custom_api_endpoint)
+ .and_return(nil)
+
+ expect(client.api_endpoint).to eq('https://api.github.com')
end
end
- context 'when provider specify a custom API endpoint' do
- before do
- github_provider['args']['client_options']['site'] = 'https://github.company.com/'
+ context 'with a custom endpoint configured in Omniauth' do
+ it 'returns the custom endpoint' do
+ endpoint = 'https://github.kittens.com'
+
+ expect(client)
+ .to receive(:custom_api_endpoint)
+ .and_return(endpoint)
+
+ expect(client.api_endpoint).to eq(endpoint)
end
+ end
+ end
+
+ describe '#custom_api_endpoint' do
+ let(:client) { described_class.new('foo') }
+
+ context 'without a custom endpoint' do
+ it 'returns nil' do
+ expect(client)
+ .to receive(:github_omniauth_provider)
+ .and_return({})
+
+ expect(client.custom_api_endpoint).to be_nil
+ end
+ end
+
+ context 'with a custom endpoint' do
+ it 'returns the API endpoint' do
+ endpoint = 'https://github.kittens.com'
+
+ expect(client)
+ .to receive(:github_omniauth_provider)
+ .and_return({ 'args' => { 'client_options' => { 'site' => endpoint } } })
- it 'uses the custom API endpoint' do
- expect(OmniAuth::Strategies::GitHub).not_to receive(:default_options)
- expect(client.api.api_endpoint).to eq 'https://github.company.com/'
+ expect(client.custom_api_endpoint).to eq(endpoint)
end
end
+ end
+
+ describe '#default_api_endpoint' do
+ it 'returns the default API endpoint' do
+ client = described_class.new('foo')
+
+ expect(client.default_api_endpoint).to eq('https://api.github.com')
+ end
+ end
+
+ describe '#verify_ssl' do
+ let(:client) { described_class.new('foo') }
- context 'when given a host' do
- subject(:client) { described_class.new(token, host: 'https://try.gitea.io/') }
+ context 'without a custom configuration' do
+ it 'returns true' do
+ expect(client)
+ .to receive(:github_omniauth_provider)
+ .and_return({})
- it 'builds a endpoint with the given host and the default API version' do
- expect(client.api.api_endpoint).to eq 'https://try.gitea.io/api/v3/'
+ expect(client.verify_ssl).to eq(true)
end
end
- context 'when given an API version' do
- subject(:client) { described_class.new(token, api_version: 'v3') }
+ context 'with a custom configuration' do
+ it 'returns the configured value' do
+ expect(client.verify_ssl).to eq(false)
+ end
+ end
+ end
+
+ describe '#github_omniauth_provider' do
+ let(:client) { described_class.new('foo') }
- it 'does not use the API version without a host' do
- expect(client.api.api_endpoint).to eq 'https://api.github.com/'
+ context 'without a configured provider' do
+ it 'returns an empty Hash' do
+ expect(Gitlab.config.omniauth)
+ .to receive(:providers)
+ .and_return([])
+
+ expect(client.github_omniauth_provider).to eq({})
end
end
- context 'when given a host and version' do
- subject(:client) { described_class.new(token, host: 'https://try.gitea.io/', api_version: 'v3') }
+ context 'with a configured provider' do
+ it 'returns the provider details as a Hash' do
+ hash = client.github_omniauth_provider
- it 'builds a endpoint with the given options' do
- expect(client.api.api_endpoint).to eq 'https://try.gitea.io/api/v3/'
+ expect(hash['name']).to eq('github')
+ expect(hash['url']).to eq('https://github.com/')
end
end
end
- it 'does not raise error when rate limit is disabled' do
- stub_request(:get, /api.github.com/)
- allow(client.api).to receive(:rate_limit!).and_raise(Octokit::NotFound)
+ describe '#rate_limiting_enabled?' do
+ let(:client) { described_class.new('foo') }
- expect { client.issues {} }.not_to raise_error
+ it 'returns true when using GitHub.com' do
+ expect(client.rate_limiting_enabled?).to eq(true)
+ end
+
+ it 'returns false for GitHub enterprise installations' do
+ expect(client)
+ .to receive(:api_endpoint)
+ .and_return('https://github.kittens.com/')
+
+ expect(client.rate_limiting_enabled?).to eq(false)
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
new file mode 100644
index 00000000000..1568c657a1e
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
@@ -0,0 +1,152 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::DiffNoteImporter do
+ let(:project) { create(:project) }
+ let(:client) { double(:client) }
+ let(:user) { create(:user) }
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+
+ let(:hunk) do
+ '@@ -1 +1 @@
+ -Hello
+ +Hello world'
+ end
+
+ let(:note) do
+ Gitlab::GithubImport::Representation::DiffNote.new(
+ noteable_type: 'MergeRequest',
+ noteable_id: 1,
+ commit_id: '123abc',
+ file_path: 'README.md',
+ diff_hunk: hunk,
+ author: Gitlab::GithubImport::Representation::User
+ .new(id: user.id, login: user.username),
+ note: 'Hello',
+ created_at: created_at,
+ updated_at: updated_at,
+ github_id: 1
+ )
+ end
+
+ let(:importer) { described_class.new(note, project, client) }
+
+ describe '#execute' do
+ context 'when the merge request no longer exists' do
+ it 'does not import anything' do
+ expect(Gitlab::Database).not_to receive(:bulk_insert)
+
+ importer.execute
+ end
+ end
+
+ context 'when the merge request exists' do
+ let!(:merge_request) do
+ create(:merge_request, source_project: project, target_project: project)
+ end
+
+ before do
+ allow(importer)
+ .to receive(:find_merge_request_id)
+ .and_return(merge_request.id)
+ end
+
+ it 'imports the note' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .and_return([user.id, true])
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .with(
+ LegacyDiffNote.table_name,
+ [
+ {
+ noteable_type: 'MergeRequest',
+ noteable_id: merge_request.id,
+ project_id: project.id,
+ author_id: user.id,
+ note: 'Hello',
+ system: false,
+ commit_id: '123abc',
+ line_code: note.line_code,
+ type: 'LegacyDiffNote',
+ created_at: created_at,
+ updated_at: updated_at,
+ st_diff: note.diff_hash.to_yaml
+ }
+ ]
+ )
+ .and_call_original
+
+ importer.execute
+ end
+
+ it 'imports the note when the author could not be found' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .and_return([project.creator_id, false])
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .with(
+ LegacyDiffNote.table_name,
+ [
+ {
+ noteable_type: 'MergeRequest',
+ noteable_id: merge_request.id,
+ project_id: project.id,
+ author_id: project.creator_id,
+ note: "*Created by: #{user.username}*\n\nHello",
+ system: false,
+ commit_id: '123abc',
+ line_code: note.line_code,
+ type: 'LegacyDiffNote',
+ created_at: created_at,
+ updated_at: updated_at,
+ st_diff: note.diff_hash.to_yaml
+ }
+ ]
+ )
+ .and_call_original
+
+ importer.execute
+ end
+
+ it 'produces a valid LegacyDiffNote' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .and_return([user.id, true])
+
+ importer.execute
+
+ note = project.notes.diff_notes.take
+
+ expect(note).to be_valid
+ expect(note.diff).to be_an_instance_of(Gitlab::Git::Diff)
+ end
+
+ it 'does not import the note when a foreign key error is raised' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .and_return([project.creator_id, false])
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
+
+ expect { importer.execute }.not_to raise_error
+ end
+ end
+ end
+
+ describe '#find_merge_request_id' do
+ it 'returns a merge request ID' do
+ expect_any_instance_of(Gitlab::GithubImport::IssuableFinder)
+ .to receive(:database_id)
+ .and_return(10)
+
+ expect(importer.find_merge_request_id).to eq(10)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
new file mode 100644
index 00000000000..4713c6795bb
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
@@ -0,0 +1,119 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::DiffNotesImporter do
+ let(:project) { double(:project, id: 4, import_source: 'foo/bar') }
+ let(:client) { double(:client) }
+
+ let(:github_comment) do
+ double(
+ :response,
+ html_url: 'https://github.com/foo/bar/pull/42',
+ path: 'README.md',
+ commit_id: '123abc',
+ diff_hunk: "@@ -1 +1 @@\n-Hello\n+Hello world",
+ user: double(:user, id: 4, login: 'alice'),
+ body: 'Hello world',
+ created_at: Time.zone.now,
+ updated_at: Time.zone.now,
+ id: 1
+ )
+ end
+
+ describe '#parallel?' do
+ it 'returns true when running in parallel mode' do
+ importer = described_class.new(project, client)
+ expect(importer).to be_parallel
+ end
+
+ it 'returns false when running in sequential mode' do
+ importer = described_class.new(project, client, parallel: false)
+ expect(importer).not_to be_parallel
+ end
+ end
+
+ describe '#execute' do
+ context 'when running in parallel mode' do
+ it 'imports diff notes in parallel' do
+ importer = described_class.new(project, client)
+
+ expect(importer).to receive(:parallel_import)
+
+ importer.execute
+ end
+ end
+
+ context 'when running in sequential mode' do
+ it 'imports diff notes in sequence' do
+ importer = described_class.new(project, client, parallel: false)
+
+ expect(importer).to receive(:sequential_import)
+
+ importer.execute
+ end
+ end
+ end
+
+ describe '#sequential_import' do
+ it 'imports each diff note in sequence' do
+ importer = described_class.new(project, client, parallel: false)
+ diff_note_importer = double(:diff_note_importer)
+
+ allow(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(github_comment)
+
+ expect(Gitlab::GithubImport::Importer::DiffNoteImporter)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Representation::DiffNote),
+ project,
+ client
+ )
+ .and_return(diff_note_importer)
+
+ expect(diff_note_importer).to receive(:execute)
+
+ importer.sequential_import
+ end
+ end
+
+ describe '#parallel_import' do
+ it 'imports each diff note in parallel' do
+ importer = described_class.new(project, client)
+
+ allow(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(github_comment)
+
+ expect(Gitlab::GithubImport::ImportDiffNoteWorker)
+ .to receive(:perform_async)
+ .with(project.id, an_instance_of(Hash), an_instance_of(String))
+
+ waiter = importer.parallel_import
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(1)
+ end
+ end
+
+ describe '#id_for_already_imported_cache' do
+ it 'returns the ID of the given note' do
+ importer = described_class.new(project, client)
+
+ expect(importer.id_for_already_imported_cache(github_comment))
+ .to eq(1)
+ end
+ end
+
+ describe '#collection_options' do
+ it 'returns an empty Hash' do
+ # For large projects (e.g. kubernetes/kubernetes) GitHub's API may produce
+ # HTTP 500 errors when using explicit sorting options, regardless of what
+ # order you sort in. Not using any sorting options at all allows us to
+ # work around this.
+ importer = described_class.new(project, client)
+
+ expect(importer.collection_options).to eq({})
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/issue_and_label_links_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_and_label_links_importer_spec.rb
new file mode 100644
index 00000000000..665b31ef244
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/issue_and_label_links_importer_spec.rb
@@ -0,0 +1,27 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::IssueAndLabelLinksImporter do
+ describe '#execute' do
+ it 'imports an issue and its labels' do
+ issue = double(:issue)
+ project = double(:project)
+ client = double(:client)
+ label_links_instance = double(:label_links_importer)
+ importer = described_class.new(issue, project, client)
+
+ expect(Gitlab::GithubImport::Importer::IssueImporter)
+ .to receive(:import_if_issue)
+ .with(issue, project, client)
+
+ expect(Gitlab::GithubImport::Importer::LabelLinksImporter)
+ .to receive(:new)
+ .with(issue, project, client)
+ .and_return(label_links_instance)
+
+ expect(label_links_instance)
+ .to receive(:execute)
+
+ importer.execute
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
new file mode 100644
index 00000000000..d34ca0b76b8
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
@@ -0,0 +1,201 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redis_cache do
+ let(:project) { create(:project) }
+ let(:client) { double(:client) }
+ let(:user) { create(:user) }
+ let(:milestone) { create(:milestone, project: project) }
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+
+ let(:issue) do
+ Gitlab::GithubImport::Representation::Issue.new(
+ iid: 42,
+ title: 'My Issue',
+ description: 'This is my issue',
+ milestone_number: 1,
+ state: :opened,
+ assignees: [
+ Gitlab::GithubImport::Representation::User.new(id: 4, login: 'alice'),
+ Gitlab::GithubImport::Representation::User.new(id: 5, login: 'bob')
+ ],
+ label_names: %w[bug],
+ author: Gitlab::GithubImport::Representation::User.new(id: 4, login: 'alice'),
+ created_at: created_at,
+ updated_at: updated_at,
+ pull_request: false
+ )
+ end
+
+ describe '.import_if_issue' do
+ it 'imports an issuable if it is a regular issue' do
+ importer = double(:importer)
+
+ expect(described_class)
+ .to receive(:new)
+ .with(issue, project, client)
+ .and_return(importer)
+
+ expect(importer).to receive(:execute)
+
+ described_class.import_if_issue(issue, project, client)
+ end
+
+ it 'does not import the issuable if it is a pull request' do
+ expect(issue).to receive(:pull_request?).and_return(true)
+
+ expect(described_class).not_to receive(:new)
+
+ described_class.import_if_issue(issue, project, client)
+ end
+ end
+
+ describe '#execute' do
+ let(:importer) { described_class.new(issue, project, client) }
+
+ it 'creates the issue and assignees' do
+ expect(importer)
+ .to receive(:create_issue)
+ .and_return(10)
+
+ expect(importer)
+ .to receive(:create_assignees)
+ .with(10)
+
+ expect(importer.issuable_finder)
+ .to receive(:cache_database_id)
+ .with(10)
+
+ importer.execute
+ end
+ end
+
+ describe '#create_issue' do
+ let(:importer) { described_class.new(issue, project, client) }
+
+ before do
+ allow(importer.milestone_finder)
+ .to receive(:id_for)
+ .with(issue)
+ .and_return(milestone.id)
+ end
+
+ context 'when the issue author could be found' do
+ it 'creates the issue with the found author as the issue author' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(issue)
+ .and_return([user.id, true])
+
+ expect(Gitlab::GithubImport)
+ .to receive(:insert_and_return_id)
+ .with(
+ {
+ iid: 42,
+ title: 'My Issue',
+ author_id: user.id,
+ project_id: project.id,
+ description: 'This is my issue',
+ milestone_id: milestone.id,
+ state: :opened,
+ created_at: created_at,
+ updated_at: updated_at
+ },
+ project.issues
+ )
+ .and_call_original
+
+ importer.create_issue
+ end
+ end
+
+ context 'when the issue author could not be found' do
+ it 'creates the issue with the project creator as the issue author' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(issue)
+ .and_return([project.creator_id, false])
+
+ expect(Gitlab::GithubImport)
+ .to receive(:insert_and_return_id)
+ .with(
+ {
+ iid: 42,
+ title: 'My Issue',
+ author_id: project.creator_id,
+ project_id: project.id,
+ description: "*Created by: alice*\n\nThis is my issue",
+ milestone_id: milestone.id,
+ state: :opened,
+ created_at: created_at,
+ updated_at: updated_at
+ },
+ project.issues
+ )
+ .and_call_original
+
+ importer.create_issue
+ end
+ end
+
+ context 'when the import fails due to a foreign key error' do
+ it 'does not raise any errors' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(issue)
+ .and_return([user.id, true])
+
+ expect(Gitlab::GithubImport)
+ .to receive(:insert_and_return_id)
+ .and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
+
+ expect { importer.create_issue }.not_to raise_error
+ end
+ end
+
+ it 'produces a valid Issue' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(issue)
+ .and_return([user.id, true])
+
+ importer.create_issue
+
+ expect(project.issues.take).to be_valid
+ end
+
+ it 'returns the ID of the created issue' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(issue)
+ .and_return([user.id, true])
+
+ expect(importer.create_issue).to be_a_kind_of(Numeric)
+ end
+ end
+
+ describe '#create_assignees' do
+ it 'inserts the issue assignees in bulk' do
+ importer = described_class.new(issue, project, client)
+
+ allow(importer.user_finder)
+ .to receive(:user_id_for)
+ .ordered.with(issue.assignees[0])
+ .and_return(4)
+
+ allow(importer.user_finder)
+ .to receive(:user_id_for)
+ .ordered.with(issue.assignees[1])
+ .and_return(5)
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .with(
+ IssueAssignee.table_name,
+ [{ issue_id: 1, user_id: 4 }, { issue_id: 1, user_id: 5 }]
+ )
+
+ importer.create_assignees(1)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
new file mode 100644
index 00000000000..e237e79e94b
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
@@ -0,0 +1,111 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::IssuesImporter do
+ let(:project) { double(:project, id: 4, import_source: 'foo/bar') }
+ let(:client) { double(:client) }
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+
+ let(:github_issue) do
+ double(
+ :response,
+ number: 42,
+ title: 'My Issue',
+ body: 'This is my issue',
+ milestone: double(:milestone, number: 4),
+ state: 'open',
+ assignees: [double(:user, id: 4, login: 'alice')],
+ labels: [double(:label, name: 'bug')],
+ user: double(:user, id: 4, login: 'alice'),
+ created_at: created_at,
+ updated_at: updated_at,
+ pull_request: false
+ )
+ end
+
+ describe '#parallel?' do
+ it 'returns true when running in parallel mode' do
+ importer = described_class.new(project, client)
+ expect(importer).to be_parallel
+ end
+
+ it 'returns false when running in sequential mode' do
+ importer = described_class.new(project, client, parallel: false)
+ expect(importer).not_to be_parallel
+ end
+ end
+
+ describe '#execute' do
+ context 'when running in parallel mode' do
+ it 'imports issues in parallel' do
+ importer = described_class.new(project, client)
+
+ expect(importer).to receive(:parallel_import)
+
+ importer.execute
+ end
+ end
+
+ context 'when running in sequential mode' do
+ it 'imports issues in sequence' do
+ importer = described_class.new(project, client, parallel: false)
+
+ expect(importer).to receive(:sequential_import)
+
+ importer.execute
+ end
+ end
+ end
+
+ describe '#sequential_import' do
+ it 'imports each issue in sequence' do
+ importer = described_class.new(project, client, parallel: false)
+ issue_importer = double(:importer)
+
+ allow(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(github_issue)
+
+ expect(Gitlab::GithubImport::Importer::IssueAndLabelLinksImporter)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Representation::Issue),
+ project,
+ client
+ )
+ .and_return(issue_importer)
+
+ expect(issue_importer).to receive(:execute)
+
+ importer.sequential_import
+ end
+ end
+
+ describe '#parallel_import' do
+ it 'imports each issue in parallel' do
+ importer = described_class.new(project, client)
+
+ allow(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(github_issue)
+
+ expect(Gitlab::GithubImport::ImportIssueWorker)
+ .to receive(:perform_async)
+ .with(project.id, an_instance_of(Hash), an_instance_of(String))
+
+ waiter = importer.parallel_import
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(1)
+ end
+ end
+
+ describe '#id_for_already_imported_cache' do
+ it 'returns the issue number of the given issue' do
+ importer = described_class.new(project, client)
+
+ expect(importer.id_for_already_imported_cache(github_issue))
+ .to eq(42)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb b/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb
new file mode 100644
index 00000000000..e2a71e78574
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb
@@ -0,0 +1,82 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::LabelLinksImporter do
+ let(:project) { create(:project) }
+ let(:client) { double(:client) }
+ let(:issue) do
+ double(
+ :issue,
+ iid: 4,
+ label_names: %w[bug],
+ issuable_type: Issue,
+ pull_request?: false
+ )
+ end
+
+ let(:importer) { described_class.new(issue, project, client) }
+
+ describe '#execute' do
+ it 'creates the label links' do
+ importer = described_class.new(issue, project, client)
+
+ expect(importer).to receive(:create_labels)
+
+ importer.execute
+ end
+ end
+
+ describe '#create_labels' do
+ it 'inserts the label links in bulk' do
+ expect(importer.label_finder)
+ .to receive(:id_for)
+ .with('bug')
+ .and_return(2)
+
+ expect(importer)
+ .to receive(:find_target_id)
+ .and_return(1)
+
+ Timecop.freeze do
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .with(
+ LabelLink.table_name,
+ [
+ {
+ label_id: 2,
+ target_id: 1,
+ target_type: Issue,
+ created_at: Time.zone.now,
+ updated_at: Time.zone.now
+ }
+ ]
+ )
+
+ importer.create_labels
+ end
+ end
+
+ it 'does not insert label links for non-existing labels' do
+ expect(importer.label_finder)
+ .to receive(:id_for)
+ .with('bug')
+ .and_return(nil)
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .with(LabelLink.table_name, [])
+
+ importer.create_labels
+ end
+ end
+
+ describe '#find_target_id' do
+ it 'returns the ID of the issuable to create the label link for' do
+ expect_any_instance_of(Gitlab::GithubImport::IssuableFinder)
+ .to receive(:database_id)
+ .and_return(10)
+
+ expect(importer.find_target_id).to eq(10)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb b/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
new file mode 100644
index 00000000000..156ef96a0fa
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
@@ -0,0 +1,107 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::LabelsImporter, :clean_gitlab_redis_cache do
+ let(:project) { create(:project, import_source: 'foo/bar') }
+ let(:client) { double(:client) }
+ let(:importer) { described_class.new(project, client) }
+
+ describe '#execute' do
+ it 'imports the labels in bulk' do
+ label_hash = { title: 'bug', color: '#fffaaa' }
+
+ expect(importer)
+ .to receive(:build_labels)
+ .and_return([label_hash])
+
+ expect(importer)
+ .to receive(:bulk_insert)
+ .with(Label, [label_hash])
+
+ expect(importer)
+ .to receive(:build_labels_cache)
+
+ importer.execute
+ end
+ end
+
+ describe '#build_labels' do
+ it 'returns an Array containnig label rows' do
+ label = double(:label, name: 'bug', color: 'ffffff')
+
+ expect(importer).to receive(:each_label).and_return([label])
+
+ rows = importer.build_labels
+
+ expect(rows.length).to eq(1)
+ expect(rows[0][:title]).to eq('bug')
+ end
+
+ it 'does not create labels that already exist' do
+ create(:label, project: project, title: 'bug')
+
+ label = double(:label, name: 'bug', color: 'ffffff')
+
+ expect(importer).to receive(:each_label).and_return([label])
+ expect(importer.build_labels).to be_empty
+ end
+ end
+
+ describe '#build_labels_cache' do
+ it 'builds the labels cache' do
+ expect_any_instance_of(Gitlab::GithubImport::LabelFinder)
+ .to receive(:build_cache)
+
+ importer.build_labels_cache
+ end
+ end
+
+ describe '#build' do
+ let(:label_hash) do
+ importer.build(double(:label, name: 'bug', color: 'ffffff'))
+ end
+
+ it 'returns the attributes of the label as a Hash' do
+ expect(label_hash).to be_an_instance_of(Hash)
+ end
+
+ context 'the returned Hash' do
+ it 'includes the label title' do
+ expect(label_hash[:title]).to eq('bug')
+ end
+
+ it 'includes the label color' do
+ expect(label_hash[:color]).to eq('#ffffff')
+ end
+
+ it 'includes the project ID' do
+ expect(label_hash[:project_id]).to eq(project.id)
+ end
+
+ it 'includes the label type' do
+ expect(label_hash[:type]).to eq('ProjectLabel')
+ end
+
+ it 'includes the created timestamp' do
+ Timecop.freeze do
+ expect(label_hash[:created_at]).to eq(Time.zone.now)
+ end
+ end
+
+ it 'includes the updated timestamp' do
+ Timecop.freeze do
+ expect(label_hash[:updated_at]).to eq(Time.zone.now)
+ end
+ end
+ end
+ end
+
+ describe '#each_label' do
+ it 'returns the labels' do
+ expect(client)
+ .to receive(:labels)
+ .with('foo/bar')
+
+ importer.each_label
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb b/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
new file mode 100644
index 00000000000..b1cac3b6e46
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
@@ -0,0 +1,120 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab_redis_cache do
+ let(:project) { create(:project, import_source: 'foo/bar') }
+ let(:client) { double(:client) }
+ let(:importer) { described_class.new(project, client) }
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+
+ let(:milestone) do
+ double(
+ :milestone,
+ number: 1,
+ title: '1.0',
+ description: 'The first release',
+ state: 'open',
+ created_at: created_at,
+ updated_at: updated_at
+ )
+ end
+
+ describe '#execute' do
+ it 'imports the milestones in bulk' do
+ milestone_hash = { number: 1, title: '1.0' }
+
+ expect(importer)
+ .to receive(:build_milestones)
+ .and_return([milestone_hash])
+
+ expect(importer)
+ .to receive(:bulk_insert)
+ .with(Milestone, [milestone_hash])
+
+ expect(importer)
+ .to receive(:build_milestones_cache)
+
+ importer.execute
+ end
+ end
+
+ describe '#build_milestones' do
+ it 'returns an Array containnig milestone rows' do
+ expect(importer)
+ .to receive(:each_milestone)
+ .and_return([milestone])
+
+ rows = importer.build_milestones
+
+ expect(rows.length).to eq(1)
+ expect(rows[0][:title]).to eq('1.0')
+ end
+
+ it 'does not create milestones that already exist' do
+ create(:milestone, project: project, title: '1.0', iid: 1)
+
+ expect(importer)
+ .to receive(:each_milestone)
+ .and_return([milestone])
+
+ expect(importer.build_milestones).to be_empty
+ end
+ end
+
+ describe '#build_milestones_cache' do
+ it 'builds the milestones cache' do
+ expect_any_instance_of(Gitlab::GithubImport::MilestoneFinder)
+ .to receive(:build_cache)
+
+ importer.build_milestones_cache
+ end
+ end
+
+ describe '#build' do
+ let(:milestone_hash) { importer.build(milestone) }
+
+ it 'returns the attributes of the milestone as a Hash' do
+ expect(milestone_hash).to be_an_instance_of(Hash)
+ end
+
+ context 'the returned Hash' do
+ it 'includes the milestone number' do
+ expect(milestone_hash[:iid]).to eq(1)
+ end
+
+ it 'includes the milestone title' do
+ expect(milestone_hash[:title]).to eq('1.0')
+ end
+
+ it 'includes the milestone description' do
+ expect(milestone_hash[:description]).to eq('The first release')
+ end
+
+ it 'includes the project ID' do
+ expect(milestone_hash[:project_id]).to eq(project.id)
+ end
+
+ it 'includes the milestone state' do
+ expect(milestone_hash[:state]).to eq(:active)
+ end
+
+ it 'includes the created timestamp' do
+ expect(milestone_hash[:created_at]).to eq(created_at)
+ end
+
+ it 'includes the updated timestamp' do
+ expect(milestone_hash[:updated_at]).to eq(updated_at)
+ end
+ end
+ end
+
+ describe '#each_milestone' do
+ it 'returns the milestones' do
+ expect(client)
+ .to receive(:milestones)
+ .with('foo/bar', state: 'all')
+
+ importer.each_milestone
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
new file mode 100644
index 00000000000..9bdcc42be19
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
@@ -0,0 +1,151 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::NoteImporter do
+ let(:client) { double(:client) }
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+
+ let(:github_note) do
+ Gitlab::GithubImport::Representation::Note.new(
+ noteable_id: 1,
+ noteable_type: 'Issue',
+ author: Gitlab::GithubImport::Representation::User.new(id: 4, login: 'alice'),
+ note: 'This is my note',
+ created_at: created_at,
+ updated_at: updated_at,
+ github_id: 1
+ )
+ end
+
+ let(:importer) { described_class.new(github_note, project, client) }
+
+ describe '#execute' do
+ context 'when the noteable exists' do
+ let!(:issue_row) { create(:issue, project: project, iid: 1) }
+
+ before do
+ allow(importer)
+ .to receive(:find_noteable_id)
+ .and_return(issue_row.id)
+ end
+
+ context 'when the author could be found' do
+ it 'imports the note with the found author as the note author' do
+ expect(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(github_note)
+ .and_return([user.id, true])
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .with(
+ Note.table_name,
+ [
+ {
+ noteable_type: 'Issue',
+ noteable_id: issue_row.id,
+ project_id: project.id,
+ author_id: user.id,
+ note: 'This is my note',
+ system: false,
+ created_at: created_at,
+ updated_at: updated_at
+ }
+ ]
+ )
+ .and_call_original
+
+ importer.execute
+ end
+ end
+
+ context 'when the note author could not be found' do
+ it 'imports the note with the project creator as the note author' do
+ expect(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(github_note)
+ .and_return([project.creator_id, false])
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .with(
+ Note.table_name,
+ [
+ {
+ noteable_type: 'Issue',
+ noteable_id: issue_row.id,
+ project_id: project.id,
+ author_id: project.creator_id,
+ note: "*Created by: alice*\n\nThis is my note",
+ system: false,
+ created_at: created_at,
+ updated_at: updated_at
+ }
+ ]
+ )
+ .and_call_original
+
+ importer.execute
+ end
+ end
+ end
+
+ context 'when the noteable does not exist' do
+ it 'does not import the note' do
+ expect(Gitlab::Database).not_to receive(:bulk_insert)
+
+ importer.execute
+ end
+ end
+
+ context 'when the import fails due to a foreign key error' do
+ it 'does not raise any errors' do
+ issue_row = create(:issue, project: project, iid: 1)
+
+ allow(importer)
+ .to receive(:find_noteable_id)
+ .and_return(issue_row.id)
+
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(github_note)
+ .and_return([user.id, true])
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
+
+ expect { importer.execute }.not_to raise_error
+ end
+ end
+
+ it 'produces a valid Note' do
+ issue_row = create(:issue, project: project, iid: 1)
+
+ allow(importer)
+ .to receive(:find_noteable_id)
+ .and_return(issue_row.id)
+
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(github_note)
+ .and_return([user.id, true])
+
+ importer.execute
+
+ expect(project.notes.take).to be_valid
+ end
+ end
+
+ describe '#find_noteable_id' do
+ it 'returns the ID of the noteable' do
+ expect_any_instance_of(Gitlab::GithubImport::IssuableFinder)
+ .to receive(:database_id)
+ .and_return(10)
+
+ expect(importer.find_noteable_id).to eq(10)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
new file mode 100644
index 00000000000..f046d13f879
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
@@ -0,0 +1,116 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::NotesImporter do
+ let(:project) { double(:project, id: 4, import_source: 'foo/bar') }
+ let(:client) { double(:client) }
+
+ let(:github_comment) do
+ double(
+ :response,
+ html_url: 'https://github.com/foo/bar/issues/42',
+ user: double(:user, id: 4, login: 'alice'),
+ body: 'Hello world',
+ created_at: Time.zone.now,
+ updated_at: Time.zone.now,
+ id: 1
+ )
+ end
+
+ describe '#parallel?' do
+ it 'returns true when running in parallel mode' do
+ importer = described_class.new(project, client)
+ expect(importer).to be_parallel
+ end
+
+ it 'returns false when running in sequential mode' do
+ importer = described_class.new(project, client, parallel: false)
+ expect(importer).not_to be_parallel
+ end
+ end
+
+ describe '#execute' do
+ context 'when running in parallel mode' do
+ it 'imports notes in parallel' do
+ importer = described_class.new(project, client)
+
+ expect(importer).to receive(:parallel_import)
+
+ importer.execute
+ end
+ end
+
+ context 'when running in sequential mode' do
+ it 'imports notes in sequence' do
+ importer = described_class.new(project, client, parallel: false)
+
+ expect(importer).to receive(:sequential_import)
+
+ importer.execute
+ end
+ end
+ end
+
+ describe '#sequential_import' do
+ it 'imports each note in sequence' do
+ importer = described_class.new(project, client, parallel: false)
+ note_importer = double(:note_importer)
+
+ allow(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(github_comment)
+
+ expect(Gitlab::GithubImport::Importer::NoteImporter)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Representation::Note),
+ project,
+ client
+ )
+ .and_return(note_importer)
+
+ expect(note_importer).to receive(:execute)
+
+ importer.sequential_import
+ end
+ end
+
+ describe '#parallel_import' do
+ it 'imports each note in parallel' do
+ importer = described_class.new(project, client)
+
+ allow(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(github_comment)
+
+ expect(Gitlab::GithubImport::ImportNoteWorker)
+ .to receive(:perform_async)
+ .with(project.id, an_instance_of(Hash), an_instance_of(String))
+
+ waiter = importer.parallel_import
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(1)
+ end
+ end
+
+ describe '#id_for_already_imported_cache' do
+ it 'returns the ID of the given note' do
+ importer = described_class.new(project, client)
+
+ expect(importer.id_for_already_imported_cache(github_comment))
+ .to eq(1)
+ end
+ end
+
+ describe '#collection_options' do
+ it 'returns an empty Hash' do
+ # For large projects (e.g. kubernetes/kubernetes) GitHub's API may produce
+ # HTTP 500 errors when using explicit sorting options, regardless of what
+ # order you sort in. Not using any sorting options at all allows us to
+ # work around this.
+ importer = described_class.new(project, client)
+
+ expect(importer.collection_options).to eq({})
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
new file mode 100644
index 00000000000..35f3fdf8304
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
@@ -0,0 +1,221 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redis_cache do
+ let(:project) { create(:project, :repository) }
+ let(:client) { double(:client) }
+ let(:user) { create(:user) }
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+ let(:merged_at) { Time.new(2017, 1, 1, 12, 17) }
+
+ let(:source_commit) { project.repository.commit('feature') }
+ let(:target_commit) { project.repository.commit('master') }
+ let(:milestone) { create(:milestone, project: project) }
+
+ let(:pull_request) do
+ alice = Gitlab::GithubImport::Representation::User.new(id: 4, login: 'alice')
+
+ Gitlab::GithubImport::Representation::PullRequest.new(
+ iid: 42,
+ title: 'My Pull Request',
+ description: 'This is my pull request',
+ source_branch: 'feature',
+ source_branch_sha: source_commit.id,
+ target_branch: 'master',
+ target_branch_sha: target_commit.id,
+ source_repository_id: 400,
+ target_repository_id: 200,
+ source_repository_owner: 'alice',
+ state: :closed,
+ milestone_number: milestone.iid,
+ author: alice,
+ assignee: alice,
+ created_at: created_at,
+ updated_at: updated_at,
+ merged_at: merged_at
+ )
+ end
+
+ let(:importer) { described_class.new(pull_request, project, client) }
+
+ describe '#execute' do
+ it 'imports the pull request' do
+ expect(importer)
+ .to receive(:create_merge_request)
+ .and_return(10)
+
+ expect_any_instance_of(Gitlab::GithubImport::IssuableFinder)
+ .to receive(:cache_database_id)
+ .with(10)
+
+ importer.execute
+ end
+ end
+
+ describe '#create_merge_request' do
+ before do
+ allow(importer.milestone_finder)
+ .to receive(:id_for)
+ .with(pull_request)
+ .and_return(milestone.id)
+ end
+
+ context 'when the author could be found' do
+ before do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(pull_request)
+ .and_return([user.id, true])
+
+ allow(importer.user_finder)
+ .to receive(:assignee_id_for)
+ .with(pull_request)
+ .and_return(user.id)
+ end
+
+ it 'imports the pull request with the pull request author as the merge request author' do
+ expect(Gitlab::GithubImport)
+ .to receive(:insert_and_return_id)
+ .with(
+ {
+ iid: 42,
+ title: 'My Pull Request',
+ description: 'This is my pull request',
+ source_project_id: project.id,
+ target_project_id: project.id,
+ source_branch: 'alice:feature',
+ target_branch: 'master',
+ state: :merged,
+ milestone_id: milestone.id,
+ author_id: user.id,
+ assignee_id: user.id,
+ created_at: created_at,
+ updated_at: updated_at
+ },
+ project.merge_requests
+ )
+ .and_call_original
+
+ importer.create_merge_request
+ end
+
+ it 'returns the ID of the created merge request' do
+ id = importer.create_merge_request
+
+ expect(id).to be_a_kind_of(Numeric)
+ end
+
+ it 'creates the merge request diffs' do
+ importer.create_merge_request
+
+ mr = project.merge_requests.take
+
+ expect(mr.merge_request_diffs.exists?).to eq(true)
+ end
+ end
+
+ context 'when the author could not be found' do
+ it 'imports the pull request with the project creator as the merge request author' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(pull_request)
+ .and_return([project.creator_id, false])
+
+ allow(importer.user_finder)
+ .to receive(:assignee_id_for)
+ .with(pull_request)
+ .and_return(user.id)
+
+ expect(Gitlab::GithubImport)
+ .to receive(:insert_and_return_id)
+ .with(
+ {
+ iid: 42,
+ title: 'My Pull Request',
+ description: "*Created by: alice*\n\nThis is my pull request",
+ source_project_id: project.id,
+ target_project_id: project.id,
+ source_branch: 'alice:feature',
+ target_branch: 'master',
+ state: :merged,
+ milestone_id: milestone.id,
+ author_id: project.creator_id,
+ assignee_id: user.id,
+ created_at: created_at,
+ updated_at: updated_at
+ },
+ project.merge_requests
+ )
+ .and_call_original
+
+ importer.create_merge_request
+ end
+ end
+
+ context 'when the source and target branch are identical' do
+ it 'uses a generated source branch name for the merge request' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(pull_request)
+ .and_return([user.id, true])
+
+ allow(importer.user_finder)
+ .to receive(:assignee_id_for)
+ .with(pull_request)
+ .and_return(user.id)
+
+ allow(pull_request)
+ .to receive(:source_repository_id)
+ .and_return(pull_request.target_repository_id)
+
+ allow(pull_request)
+ .to receive(:source_branch)
+ .and_return('master')
+
+ expect(Gitlab::GithubImport)
+ .to receive(:insert_and_return_id)
+ .with(
+ {
+ iid: 42,
+ title: 'My Pull Request',
+ description: 'This is my pull request',
+ source_project_id: project.id,
+ target_project_id: project.id,
+ source_branch: 'master-42',
+ target_branch: 'master',
+ state: :merged,
+ milestone_id: milestone.id,
+ author_id: user.id,
+ assignee_id: user.id,
+ created_at: created_at,
+ updated_at: updated_at
+ },
+ project.merge_requests
+ )
+ .and_call_original
+
+ importer.create_merge_request
+ end
+ end
+
+ context 'when the import fails due to a foreign key error' do
+ it 'does not raise any errors' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(pull_request)
+ .and_return([user.id, true])
+
+ allow(importer.user_finder)
+ .to receive(:assignee_id_for)
+ .with(pull_request)
+ .and_return(user.id)
+
+ expect(Gitlab::GithubImport)
+ .to receive(:insert_and_return_id)
+ .and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
+
+ expect { importer.create_merge_request }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
new file mode 100644
index 00000000000..d72572cd510
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
@@ -0,0 +1,272 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::PullRequestsImporter do
+ let(:project) { create(:project, import_source: 'foo/bar') }
+ let(:client) { double(:client) }
+
+ let(:pull_request) do
+ double(
+ :response,
+ number: 42,
+ title: 'My Pull Request',
+ body: 'This is my pull request',
+ state: 'closed',
+ head: double(
+ :head,
+ sha: '123abc',
+ ref: 'my-feature',
+ repo: double(:repo, id: 400),
+ user: double(:user, id: 4, login: 'alice')
+ ),
+ base: double(
+ :base,
+ sha: '456def',
+ ref: 'master',
+ repo: double(:repo, id: 200)
+ ),
+ milestone: double(:milestone, number: 4),
+ user: double(:user, id: 4, login: 'alice'),
+ assignee: double(:user, id: 4, login: 'alice'),
+ created_at: Time.zone.now,
+ updated_at: Time.zone.now,
+ merged_at: Time.zone.now
+ )
+ end
+
+ describe '#parallel?' do
+ it 'returns true when running in parallel mode' do
+ importer = described_class.new(project, client)
+ expect(importer).to be_parallel
+ end
+
+ it 'returns false when running in sequential mode' do
+ importer = described_class.new(project, client, parallel: false)
+ expect(importer).not_to be_parallel
+ end
+ end
+
+ describe '#execute' do
+ context 'when running in parallel mode' do
+ it 'imports pull requests in parallel' do
+ importer = described_class.new(project, client)
+
+ expect(importer).to receive(:parallel_import)
+
+ importer.execute
+ end
+ end
+
+ context 'when running in sequential mode' do
+ it 'imports pull requests in sequence' do
+ importer = described_class.new(project, client, parallel: false)
+
+ expect(importer).to receive(:sequential_import)
+
+ importer.execute
+ end
+ end
+ end
+
+ describe '#sequential_import' do
+ it 'imports each pull request in sequence' do
+ importer = described_class.new(project, client, parallel: false)
+ pull_request_importer = double(:pull_request_importer)
+
+ allow(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(pull_request)
+
+ expect(Gitlab::GithubImport::Importer::PullRequestImporter)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Representation::PullRequest),
+ project,
+ client
+ )
+ .and_return(pull_request_importer)
+
+ expect(pull_request_importer).to receive(:execute)
+
+ importer.sequential_import
+ end
+ end
+
+ describe '#parallel_import' do
+ it 'imports each note in parallel' do
+ importer = described_class.new(project, client)
+
+ allow(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(pull_request)
+
+ expect(Gitlab::GithubImport::ImportPullRequestWorker)
+ .to receive(:perform_async)
+ .with(project.id, an_instance_of(Hash), an_instance_of(String))
+
+ waiter = importer.parallel_import
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(1)
+ end
+ end
+
+ describe '#each_object_to_import', :clean_gitlab_redis_cache do
+ let(:importer) { described_class.new(project, client) }
+
+ before do
+ page = double(:page, objects: [pull_request], number: 1)
+
+ expect(client)
+ .to receive(:each_page)
+ .with(
+ :pull_requests,
+ 'foo/bar',
+ { state: 'all', sort: 'created', direction: 'asc', page: 1 }
+ )
+ .and_yield(page)
+ end
+
+ it 'yields every pull request to the supplied block' do
+ expect { |b| importer.each_object_to_import(&b) }
+ .to yield_with_args(pull_request)
+ end
+
+ it 'updates the repository if a pull request was updated after the last clone' do
+ expect(importer)
+ .to receive(:update_repository?)
+ .with(pull_request)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:update_repository)
+
+ importer.each_object_to_import { }
+ end
+ end
+
+ describe '#update_repository' do
+ it 'updates the repository' do
+ importer = described_class.new(project, client)
+
+ expect(project.repository)
+ .to receive(:fetch_remote)
+ .with('github', forced: false)
+
+ expect(Rails.logger)
+ .to receive(:info)
+ .with(an_instance_of(String))
+
+ expect(importer.repository_updates_counter)
+ .to receive(:increment)
+ .with(project: project.path_with_namespace)
+ .and_call_original
+
+ Timecop.freeze do
+ importer.update_repository
+
+ expect(project.last_repository_updated_at).to eq(Time.zone.now)
+ end
+ end
+ end
+
+ describe '#update_repository?' do
+ let(:importer) { described_class.new(project, client) }
+
+ context 'when the pull request was updated after the last update' do
+ let(:pr) do
+ double(
+ :pr,
+ updated_at: Time.zone.now,
+ head: double(:head, sha: '123'),
+ base: double(:base, sha: '456')
+ )
+ end
+
+ before do
+ allow(project)
+ .to receive(:last_repository_updated_at)
+ .and_return(1.year.ago)
+ end
+
+ it 'returns true when the head SHA is not present' do
+ expect(importer)
+ .to receive(:commit_exists?)
+ .with(pr.head.sha)
+ .and_return(false)
+
+ expect(importer.update_repository?(pr)).to eq(true)
+ end
+
+ it 'returns true when the base SHA is not present' do
+ expect(importer)
+ .to receive(:commit_exists?)
+ .with(pr.head.sha)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:commit_exists?)
+ .with(pr.base.sha)
+ .and_return(false)
+
+ expect(importer.update_repository?(pr)).to eq(true)
+ end
+
+ it 'returns false if both the head and base SHAs are present' do
+ expect(importer)
+ .to receive(:commit_exists?)
+ .with(pr.head.sha)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:commit_exists?)
+ .with(pr.base.sha)
+ .and_return(true)
+
+ expect(importer.update_repository?(pr)).to eq(false)
+ end
+ end
+
+ context 'when the pull request was updated before the last update' do
+ it 'returns false' do
+ pr = double(:pr, updated_at: 1.year.ago)
+
+ allow(project)
+ .to receive(:last_repository_updated_at)
+ .and_return(Time.zone.now)
+
+ expect(importer.update_repository?(pr)).to eq(false)
+ end
+ end
+ end
+
+ describe '#commit_exists?' do
+ let(:importer) { described_class.new(project, client) }
+
+ it 'returns true when a commit exists' do
+ expect(project.repository)
+ .to receive(:lookup)
+ .with('123')
+ .and_return(double(:commit))
+
+ expect(importer.commit_exists?('123')).to eq(true)
+ end
+
+ it 'returns false when a commit does not exist' do
+ expect(project.repository)
+ .to receive(:lookup)
+ .with('123')
+ .and_raise(Rugged::OdbError)
+
+ expect(importer.commit_exists?('123')).to eq(false)
+ end
+ end
+
+ describe '#id_for_already_imported_cache' do
+ it 'returns the PR number of the given PR' do
+ importer = described_class.new(project, client)
+
+ expect(importer.id_for_already_imported_cache(pull_request))
+ .to eq(42)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb b/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
new file mode 100644
index 00000000000..23ae026fb14
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
@@ -0,0 +1,125 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::ReleasesImporter do
+ let(:project) { create(:project) }
+ let(:client) { double(:client) }
+ let(:importer) { described_class.new(project, client) }
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+
+ let(:release) do
+ double(
+ :release,
+ tag_name: '1.0',
+ body: 'This is my release',
+ created_at: created_at,
+ updated_at: updated_at
+ )
+ end
+
+ describe '#execute' do
+ it 'imports the releases in bulk' do
+ release_hash = {
+ tag_name: '1.0',
+ description: 'This is my release',
+ created_at: created_at,
+ updated_at: updated_at
+ }
+
+ expect(importer).to receive(:build_releases).and_return([release_hash])
+ expect(importer).to receive(:bulk_insert).with(Release, [release_hash])
+
+ importer.execute
+ end
+ end
+
+ describe '#build_releases' do
+ it 'returns an Array containnig release rows' do
+ expect(importer).to receive(:each_release).and_return([release])
+
+ rows = importer.build_releases
+
+ expect(rows.length).to eq(1)
+ expect(rows[0][:tag]).to eq('1.0')
+ end
+
+ it 'does not create releases that already exist' do
+ create(:release, project: project, tag: '1.0', description: '1.0')
+
+ expect(importer).to receive(:each_release).and_return([release])
+ expect(importer.build_releases).to be_empty
+ end
+
+ it 'uses a default release description if none is provided' do
+ expect(release).to receive(:body).and_return('')
+ expect(importer).to receive(:each_release).and_return([release])
+
+ release = importer.build_releases.first
+
+ expect(release[:description]).to eq('Release for tag 1.0')
+ end
+ end
+
+ describe '#build' do
+ let(:release_hash) { importer.build(release) }
+
+ it 'returns the attributes of the release as a Hash' do
+ expect(release_hash).to be_an_instance_of(Hash)
+ end
+
+ context 'the returned Hash' do
+ it 'includes the tag name' do
+ expect(release_hash[:tag]).to eq('1.0')
+ end
+
+ it 'includes the release description' do
+ expect(release_hash[:description]).to eq('This is my release')
+ end
+
+ it 'includes the project ID' do
+ expect(release_hash[:project_id]).to eq(project.id)
+ end
+
+ it 'includes the created timestamp' do
+ expect(release_hash[:created_at]).to eq(created_at)
+ end
+
+ it 'includes the updated timestamp' do
+ expect(release_hash[:updated_at]).to eq(updated_at)
+ end
+ end
+ end
+
+ describe '#each_release' do
+ let(:release) { double(:release) }
+
+ before do
+ allow(project).to receive(:import_source).and_return('foo/bar')
+
+ allow(client)
+ .to receive(:releases)
+ .with('foo/bar')
+ .and_return([release].to_enum)
+ end
+
+ it 'returns an Enumerator' do
+ expect(importer.each_release).to be_an_instance_of(Enumerator)
+ end
+
+ it 'yields every release to the Enumerator' do
+ expect(importer.each_release.next).to eq(release)
+ end
+ end
+
+ describe '#description_for' do
+ it 'returns the description when present' do
+ expect(importer.description_for(release)).to eq(release.body)
+ end
+
+ it 'returns a generated description when one is not present' do
+ allow(release).to receive(:body).and_return('')
+
+ expect(importer.description_for(release)).to eq('Release for tag 1.0')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
new file mode 100644
index 00000000000..168e5d07504
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
@@ -0,0 +1,227 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::RepositoryImporter do
+ let(:repository) { double(:repository) }
+ let(:client) { double(:client) }
+
+ let(:project) do
+ double(
+ :project,
+ import_url: 'foo.git',
+ import_source: 'foo/bar',
+ repository_storage_path: 'foo',
+ disk_path: 'foo',
+ repository: repository
+ )
+ end
+
+ let(:importer) { described_class.new(project, client) }
+ let(:shell_adapter) { Gitlab::Shell.new }
+
+ before do
+ # The method "gitlab_shell" returns a new instance every call, making
+ # it harder to set expectations. To work around this we'll stub the method
+ # and return the same instance on every call.
+ allow(importer).to receive(:gitlab_shell).and_return(shell_adapter)
+ end
+
+ describe '#import_wiki?' do
+ it 'returns true if the wiki should be imported' do
+ repo = double(:repo, has_wiki: true)
+
+ expect(client)
+ .to receive(:repository)
+ .with('foo/bar')
+ .and_return(repo)
+
+ expect(project)
+ .to receive(:wiki_repository_exists?)
+ .and_return(false)
+
+ expect(importer.import_wiki?).to eq(true)
+ end
+
+ it 'returns false if the GitHub wiki is disabled' do
+ repo = double(:repo, has_wiki: false)
+
+ expect(client)
+ .to receive(:repository)
+ .with('foo/bar')
+ .and_return(repo)
+
+ expect(importer.import_wiki?).to eq(false)
+ end
+
+ it 'returns false if the wiki has already been imported' do
+ repo = double(:repo, has_wiki: true)
+
+ expect(client)
+ .to receive(:repository)
+ .with('foo/bar')
+ .and_return(repo)
+
+ expect(project)
+ .to receive(:wiki_repository_exists?)
+ .and_return(true)
+
+ expect(importer.import_wiki?).to eq(false)
+ end
+ end
+
+ describe '#execute' do
+ it 'imports the repository and wiki' do
+ expect(repository)
+ .to receive(:empty_repo?)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:import_wiki?)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:import_repository)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:import_wiki_repository)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:update_clone_time)
+
+ expect(importer.execute).to eq(true)
+ end
+
+ it 'does not import the repository if it already exists' do
+ expect(repository)
+ .to receive(:empty_repo?)
+ .and_return(false)
+
+ expect(importer)
+ .to receive(:import_wiki?)
+ .and_return(true)
+
+ expect(importer)
+ .not_to receive(:import_repository)
+
+ expect(importer)
+ .to receive(:import_wiki_repository)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:update_clone_time)
+
+ expect(importer.execute).to eq(true)
+ end
+
+ it 'does not import the wiki if it is disabled' do
+ expect(repository)
+ .to receive(:empty_repo?)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:import_wiki?)
+ .and_return(false)
+
+ expect(importer)
+ .to receive(:import_repository)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:update_clone_time)
+
+ expect(importer)
+ .not_to receive(:import_wiki_repository)
+
+ expect(importer.execute).to eq(true)
+ end
+
+ it 'does not import the wiki if the repository could not be imported' do
+ expect(repository)
+ .to receive(:empty_repo?)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:import_wiki?)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:import_repository)
+ .and_return(false)
+
+ expect(importer)
+ .not_to receive(:update_clone_time)
+
+ expect(importer)
+ .not_to receive(:import_wiki_repository)
+
+ expect(importer.execute).to eq(false)
+ end
+ end
+
+ describe '#import_repository' do
+ it 'imports the repository' do
+ expect(project)
+ .to receive(:ensure_repository)
+
+ expect(repository)
+ .to receive(:fetch_as_mirror)
+ .with(project.import_url, refmap: Gitlab::GithubImport.refmap, forced: true, remote_name: 'github')
+
+ expect(importer.import_repository).to eq(true)
+ end
+
+ it 'marks the import as failed when an error was raised' do
+ expect(project).to receive(:ensure_repository)
+ .and_raise(Gitlab::Git::Repository::NoRepository)
+
+ expect(importer)
+ .to receive(:fail_import)
+ .and_return(false)
+
+ expect(importer.import_repository).to eq(false)
+ end
+ end
+
+ describe '#import_wiki_repository' do
+ it 'imports the wiki repository' do
+ expect(importer.gitlab_shell)
+ .to receive(:import_repository)
+ .with('foo', 'foo.wiki', 'foo.wiki.git')
+
+ expect(importer.import_wiki_repository).to eq(true)
+ end
+
+ it 'marks the import as failed if an error was raised' do
+ expect(importer.gitlab_shell)
+ .to receive(:import_repository)
+ .and_raise(Gitlab::Shell::Error)
+
+ expect(importer)
+ .to receive(:fail_import)
+ .and_return(false)
+
+ expect(importer.import_wiki_repository).to eq(false)
+ end
+ end
+
+ describe '#fail_import' do
+ it 'marks the import as failed' do
+ expect(project).to receive(:mark_import_as_failed).with('foo')
+
+ expect(importer.fail_import('foo')).to eq(false)
+ end
+ end
+
+ describe '#update_clone_time' do
+ it 'sets the timestamp for when the cloning process finished' do
+ Timecop.freeze do
+ expect(project)
+ .to receive(:update_column)
+ .with(:last_repository_updated_at, Time.zone.now)
+
+ importer.update_clone_time
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/issuable_finder_spec.rb b/spec/lib/gitlab/github_import/issuable_finder_spec.rb
new file mode 100644
index 00000000000..da69911812a
--- /dev/null
+++ b/spec/lib/gitlab/github_import/issuable_finder_spec.rb
@@ -0,0 +1,38 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::IssuableFinder, :clean_gitlab_redis_cache do
+ let(:project) { double(:project, id: 4) }
+ let(:issue) do
+ double(:issue, issuable_type: MergeRequest, iid: 1)
+ end
+
+ let(:finder) { described_class.new(project, issue) }
+
+ describe '#database_id' do
+ it 'returns nil when no cache is in place' do
+ expect(finder.database_id).to be_nil
+ end
+
+ it 'returns the ID of an issuable when the cache is in place' do
+ finder.cache_database_id(10)
+
+ expect(finder.database_id).to eq(10)
+ end
+
+ it 'raises TypeError when the object is not supported' do
+ finder = described_class.new(project, double(:issue))
+
+ expect { finder.database_id }.to raise_error(TypeError)
+ end
+ end
+
+ describe '#cache_database_id' do
+ it 'caches the ID of a database row' do
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:write)
+ .with('github-import/issuable-finder/4/MergeRequest/1', 10)
+
+ finder.cache_database_id(10)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/label_finder_spec.rb b/spec/lib/gitlab/github_import/label_finder_spec.rb
new file mode 100644
index 00000000000..8ba766944d6
--- /dev/null
+++ b/spec/lib/gitlab/github_import/label_finder_spec.rb
@@ -0,0 +1,61 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::LabelFinder, :clean_gitlab_redis_cache do
+ let(:project) { create(:project) }
+ let(:finder) { described_class.new(project) }
+ let!(:bug) { create(:label, project: project, name: 'Bug') }
+ let!(:feature) { create(:label, project: project, name: 'Feature') }
+
+ describe '#id_for' do
+ context 'with a cache in place' do
+ before do
+ finder.build_cache
+ end
+
+ it 'returns the ID of the given label' do
+ expect(finder.id_for(feature.name)).to eq(feature.id)
+ end
+
+ it 'returns nil for an empty cache key' do
+ key = finder.cache_key_for(bug.name)
+
+ Gitlab::GithubImport::Caching.write(key, '')
+
+ expect(finder.id_for(bug.name)).to be_nil
+ end
+
+ it 'returns nil for a non existing label name' do
+ expect(finder.id_for('kittens')).to be_nil
+ end
+ end
+
+ context 'without a cache in place' do
+ it 'returns nil for a label' do
+ expect(finder.id_for(feature.name)).to be_nil
+ end
+ end
+ end
+
+ describe '#build_cache' do
+ it 'builds the cache of all project labels' do
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:write_multiple)
+ .with(
+ {
+ "github-import/label-finder/#{project.id}/Bug" => bug.id,
+ "github-import/label-finder/#{project.id}/Feature" => feature.id
+ }
+ )
+ .and_call_original
+
+ finder.build_cache
+ end
+ end
+
+ describe '#cache_key_for' do
+ it 'returns the cache key for a label name' do
+ expect(finder.cache_key_for('foo'))
+ .to eq("github-import/label-finder/#{project.id}/foo")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/markdown_text_spec.rb b/spec/lib/gitlab/github_import/markdown_text_spec.rb
new file mode 100644
index 00000000000..1ff5b9d66b3
--- /dev/null
+++ b/spec/lib/gitlab/github_import/markdown_text_spec.rb
@@ -0,0 +1,28 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::MarkdownText do
+ describe '.format' do
+ it 'formats the text' do
+ author = double(:author, login: 'Alice')
+ text = described_class.format('Hello', author)
+
+ expect(text).to eq("*Created by: Alice*\n\nHello")
+ end
+ end
+
+ describe '#to_s' do
+ it 'returns the text when the author was found' do
+ author = double(:author, login: 'Alice')
+ text = described_class.new('Hello', author, true)
+
+ expect(text.to_s).to eq('Hello')
+ end
+
+ it 'returns the text with an extra header when the author was not found' do
+ author = double(:author, login: 'Alice')
+ text = described_class.new('Hello', author)
+
+ expect(text.to_s).to eq("*Created by: Alice*\n\nHello")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/milestone_finder_spec.rb b/spec/lib/gitlab/github_import/milestone_finder_spec.rb
new file mode 100644
index 00000000000..dff931a2fe8
--- /dev/null
+++ b/spec/lib/gitlab/github_import/milestone_finder_spec.rb
@@ -0,0 +1,57 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::MilestoneFinder, :clean_gitlab_redis_cache do
+ let!(:project) { create(:project) }
+ let!(:milestone) { create(:milestone, project: project) }
+ let(:finder) { described_class.new(project) }
+
+ describe '#id_for' do
+ let(:issuable) { double(:issuable, milestone_number: milestone.iid) }
+
+ context 'with a cache in place' do
+ before do
+ finder.build_cache
+ end
+
+ it 'returns the milestone ID of the given issuable' do
+ expect(finder.id_for(issuable)).to eq(milestone.id)
+ end
+
+ it 'returns nil for an empty cache key' do
+ key = finder.cache_key_for(milestone.iid)
+
+ Gitlab::GithubImport::Caching.write(key, '')
+
+ expect(finder.id_for(issuable)).to be_nil
+ end
+
+ it 'returns nil for an issuable with a non-existing milestone' do
+ expect(finder.id_for(double(:issuable, milestone_number: 5))).to be_nil
+ end
+ end
+
+ context 'without a cache in place' do
+ it 'returns nil' do
+ expect(finder.id_for(issuable)).to be_nil
+ end
+ end
+ end
+
+ describe '#build_cache' do
+ it 'builds the cache of all project milestones' do
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:write_multiple)
+ .with("github-import/milestone-finder/#{project.id}/1" => milestone.id)
+ .and_call_original
+
+ finder.build_cache
+ end
+ end
+
+ describe '#cache_key_for' do
+ it 'returns the cache key for an IID' do
+ expect(finder.cache_key_for(10))
+ .to eq("github-import/milestone-finder/#{project.id}/10")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/page_counter_spec.rb b/spec/lib/gitlab/github_import/page_counter_spec.rb
new file mode 100644
index 00000000000..c2613a9a415
--- /dev/null
+++ b/spec/lib/gitlab/github_import/page_counter_spec.rb
@@ -0,0 +1,32 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::PageCounter, :clean_gitlab_redis_cache do
+ let(:project) { double(:project, id: 1) }
+ let(:counter) { described_class.new(project, :issues) }
+
+ describe '#initialize' do
+ it 'sets the initial page number to 1 when no value is cached' do
+ expect(counter.current).to eq(1)
+ end
+
+ it 'sets the initial page number to the cached value when one is present' do
+ Gitlab::GithubImport::Caching.write(counter.cache_key, 2)
+
+ expect(described_class.new(project, :issues).current).to eq(2)
+ end
+ end
+
+ describe '#set' do
+ it 'overwrites the page number when the given number is greater than the current number' do
+ counter.set(4)
+ expect(counter.current).to eq(4)
+ end
+
+ it 'does not overwrite the page number when the given number is lower than the current number' do
+ counter.set(2)
+ counter.set(1)
+
+ expect(counter.current).to eq(2)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/parallel_importer_spec.rb b/spec/lib/gitlab/github_import/parallel_importer_spec.rb
new file mode 100644
index 00000000000..e2a821d4d5c
--- /dev/null
+++ b/spec/lib/gitlab/github_import/parallel_importer_spec.rb
@@ -0,0 +1,40 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::ParallelImporter do
+ describe '.async?' do
+ it 'returns true' do
+ expect(described_class).to be_async
+ end
+ end
+
+ describe '#execute', :clean_gitlab_redis_shared_state do
+ let(:project) { create(:project) }
+ let(:importer) { described_class.new(project) }
+
+ before do
+ expect(Gitlab::GithubImport::Stage::ImportRepositoryWorker)
+ .to receive(:perform_async)
+ .with(project.id)
+ .and_return('123')
+ end
+
+ it 'schedules the importing of the repository' do
+ expect(importer.execute).to eq(true)
+ end
+
+ it 'sets the JID in Redis' do
+ expect(Gitlab::SidekiqStatus)
+ .to receive(:set)
+ .with("github-importer/#{project.id}", StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION)
+ .and_call_original
+
+ importer.execute
+ end
+
+ it 'updates the import JID of the project' do
+ importer.execute
+
+ expect(project.import_jid).to eq("github-importer/#{project.id}")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
new file mode 100644
index 00000000000..98205d3ee25
--- /dev/null
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -0,0 +1,296 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::ParallelScheduling do
+ let(:importer_class) do
+ Class.new do
+ include(Gitlab::GithubImport::ParallelScheduling)
+
+ def collection_method
+ :issues
+ end
+ end
+ end
+
+ let(:project) { double(:project, id: 4, import_source: 'foo/bar') }
+ let(:client) { double(:client) }
+
+ describe '#parallel?' do
+ it 'returns true when running in parallel mode' do
+ expect(importer_class.new(project, client)).to be_parallel
+ end
+
+ it 'returns false when running in sequential mode' do
+ importer = importer_class.new(project, client, parallel: false)
+
+ expect(importer).not_to be_parallel
+ end
+ end
+
+ describe '#execute' do
+ it 'imports data in parallel when running in parallel mode' do
+ importer = importer_class.new(project, client)
+ waiter = double(:waiter)
+
+ expect(importer)
+ .to receive(:parallel_import)
+ .and_return(waiter)
+
+ expect(importer.execute)
+ .to eq(waiter)
+ end
+
+ it 'imports data in parallel when running in sequential mode' do
+ importer = importer_class.new(project, client, parallel: false)
+
+ expect(importer)
+ .to receive(:sequential_import)
+ .and_return([])
+
+ expect(importer.execute)
+ .to eq([])
+ end
+
+ it 'expires the cache used for tracking already imported objects' do
+ importer = importer_class.new(project, client)
+
+ expect(importer).to receive(:parallel_import)
+
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:expire)
+ .with(importer.already_imported_cache_key, a_kind_of(Numeric))
+
+ importer.execute
+ end
+ end
+
+ describe '#sequential_import' do
+ let(:importer) { importer_class.new(project, client, parallel: false) }
+
+ it 'imports data in sequence' do
+ repr_class = double(:representation_class)
+ repr_instance = double(:representation_instance)
+ gh_importer = double(:github_importer)
+ gh_importer_instance = double(:github_importer_instance)
+ object = double(:object)
+
+ expect(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(object)
+
+ expect(importer)
+ .to receive(:representation_class)
+ .and_return(repr_class)
+
+ expect(repr_class)
+ .to receive(:from_api_response)
+ .with(object)
+ .and_return(repr_instance)
+
+ expect(importer)
+ .to receive(:importer_class)
+ .and_return(gh_importer)
+
+ expect(gh_importer)
+ .to receive(:new)
+ .with(repr_instance, project, client)
+ .and_return(gh_importer_instance)
+
+ expect(gh_importer_instance)
+ .to receive(:execute)
+
+ importer.sequential_import
+ end
+ end
+
+ describe '#parallel_import' do
+ let(:importer) { importer_class.new(project, client) }
+
+ it 'imports data in parallel' do
+ repr_class = double(:representation)
+ worker_class = double(:worker)
+ object = double(:object)
+
+ expect(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(object)
+
+ expect(importer)
+ .to receive(:representation_class)
+ .and_return(repr_class)
+
+ expect(importer)
+ .to receive(:sidekiq_worker_class)
+ .and_return(worker_class)
+
+ expect(repr_class)
+ .to receive(:from_api_response)
+ .with(object)
+ .and_return({ title: 'Foo' })
+
+ expect(worker_class)
+ .to receive(:perform_async)
+ .with(project.id, { title: 'Foo' }, an_instance_of(String))
+
+ expect(importer.parallel_import)
+ .to be_an_instance_of(Gitlab::JobWaiter)
+ end
+ end
+
+ describe '#each_object_to_import' do
+ let(:importer) { importer_class.new(project, client) }
+ let(:object) { double(:object) }
+
+ before do
+ expect(importer)
+ .to receive(:collection_options)
+ .and_return({ state: 'all' })
+ end
+
+ it 'yields every object to import' do
+ page = double(:page, objects: [object], number: 1)
+
+ expect(client)
+ .to receive(:each_page)
+ .with(:issues, 'foo/bar', { state: 'all', page: 1 })
+ .and_yield(page)
+
+ expect(importer.page_counter)
+ .to receive(:set)
+ .with(1)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:already_imported?)
+ .with(object)
+ .and_return(false)
+
+ expect(importer)
+ .to receive(:mark_as_imported)
+ .with(object)
+
+ expect { |b| importer.each_object_to_import(&b) }
+ .to yield_with_args(object)
+ end
+
+ it 'resumes from the last page' do
+ page = double(:page, objects: [object], number: 2)
+
+ expect(importer.page_counter)
+ .to receive(:current)
+ .and_return(2)
+
+ expect(client)
+ .to receive(:each_page)
+ .with(:issues, 'foo/bar', { state: 'all', page: 2 })
+ .and_yield(page)
+
+ expect(importer.page_counter)
+ .to receive(:set)
+ .with(2)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:already_imported?)
+ .with(object)
+ .and_return(false)
+
+ expect(importer)
+ .to receive(:mark_as_imported)
+ .with(object)
+
+ expect { |b| importer.each_object_to_import(&b) }
+ .to yield_with_args(object)
+ end
+
+ it 'does not yield any objects if the page number was not set' do
+ page = double(:page, objects: [object], number: 1)
+
+ expect(client)
+ .to receive(:each_page)
+ .with(:issues, 'foo/bar', { state: 'all', page: 1 })
+ .and_yield(page)
+
+ expect(importer.page_counter)
+ .to receive(:set)
+ .with(1)
+ .and_return(false)
+
+ expect { |b| importer.each_object_to_import(&b) }
+ .not_to yield_control
+ end
+
+ it 'does not yield the object if it was already imported' do
+ page = double(:page, objects: [object], number: 1)
+
+ expect(client)
+ .to receive(:each_page)
+ .with(:issues, 'foo/bar', { state: 'all', page: 1 })
+ .and_yield(page)
+
+ expect(importer.page_counter)
+ .to receive(:set)
+ .with(1)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:already_imported?)
+ .with(object)
+ .and_return(true)
+
+ expect(importer)
+ .not_to receive(:mark_as_imported)
+
+ expect { |b| importer.each_object_to_import(&b) }
+ .not_to yield_control
+ end
+ end
+
+ describe '#already_imported?', :clean_gitlab_redis_cache do
+ let(:importer) { importer_class.new(project, client) }
+
+ it 'returns false when an object has not yet been imported' do
+ object = double(:object, id: 10)
+
+ expect(importer)
+ .to receive(:id_for_already_imported_cache)
+ .with(object)
+ .and_return(object.id)
+
+ expect(importer.already_imported?(object))
+ .to eq(false)
+ end
+
+ it 'returns true when an object has already been imported' do
+ object = double(:object, id: 10)
+
+ allow(importer)
+ .to receive(:id_for_already_imported_cache)
+ .with(object)
+ .and_return(object.id)
+
+ importer.mark_as_imported(object)
+
+ expect(importer.already_imported?(object))
+ .to eq(true)
+ end
+ end
+
+ describe '#mark_as_imported', :clean_gitlab_redis_cache do
+ it 'marks an object as already imported' do
+ object = double(:object, id: 10)
+ importer = importer_class.new(project, client)
+
+ expect(importer)
+ .to receive(:id_for_already_imported_cache)
+ .with(object)
+ .and_return(object.id)
+
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:set_add)
+ .with(importer.already_imported_cache_key, object.id)
+ .and_call_original
+
+ importer.mark_as_imported(object)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/diff_note_spec.rb b/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
new file mode 100644
index 00000000000..7b0a1ea4948
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
@@ -0,0 +1,164 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Representation::DiffNote do
+ let(:hunk) do
+ '@@ -1 +1 @@
+ -Hello
+ +Hello world'
+ end
+
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+
+ shared_examples 'a DiffNote' do
+ it 'returns an instance of DiffNote' do
+ expect(note).to be_an_instance_of(described_class)
+ end
+
+ context 'the returned DiffNote' do
+ it 'includes the number of the note' do
+ expect(note.noteable_id).to eq(42)
+ end
+
+ it 'includes the file path of the diff' do
+ expect(note.file_path).to eq('README.md')
+ end
+
+ it 'includes the commit ID' do
+ expect(note.commit_id).to eq('123abc')
+ end
+
+ it 'includes the user details' do
+ expect(note.author)
+ .to be_an_instance_of(Gitlab::GithubImport::Representation::User)
+
+ expect(note.author.id).to eq(4)
+ expect(note.author.login).to eq('alice')
+ end
+
+ it 'includes the note body' do
+ expect(note.note).to eq('Hello world')
+ end
+
+ it 'includes the created timestamp' do
+ expect(note.created_at).to eq(created_at)
+ end
+
+ it 'includes the updated timestamp' do
+ expect(note.updated_at).to eq(updated_at)
+ end
+
+ it 'includes the GitHub ID' do
+ expect(note.github_id).to eq(1)
+ end
+
+ it 'returns the noteable type' do
+ expect(note.noteable_type).to eq('MergeRequest')
+ end
+ end
+ end
+
+ describe '.from_api_response' do
+ let(:response) do
+ double(
+ :response,
+ html_url: 'https://github.com/foo/bar/pull/42',
+ path: 'README.md',
+ commit_id: '123abc',
+ diff_hunk: hunk,
+ user: double(:user, id: 4, login: 'alice'),
+ body: 'Hello world',
+ created_at: created_at,
+ updated_at: updated_at,
+ id: 1
+ )
+ end
+
+ it_behaves_like 'a DiffNote' do
+ let(:note) { described_class.from_api_response(response) }
+ end
+
+ it 'does not set the user if the response did not include a user' do
+ allow(response)
+ .to receive(:user)
+ .and_return(nil)
+
+ note = described_class.from_api_response(response)
+
+ expect(note.author).to be_nil
+ end
+ end
+
+ describe '.from_json_hash' do
+ it_behaves_like 'a DiffNote' do
+ let(:hash) do
+ {
+ 'noteable_type' => 'MergeRequest',
+ 'noteable_id' => 42,
+ 'file_path' => 'README.md',
+ 'commit_id' => '123abc',
+ 'diff_hunk' => hunk,
+ 'author' => { 'id' => 4, 'login' => 'alice' },
+ 'note' => 'Hello world',
+ 'created_at' => created_at.to_s,
+ 'updated_at' => updated_at.to_s,
+ 'github_id' => 1
+ }
+ end
+
+ let(:note) { described_class.from_json_hash(hash) }
+ end
+
+ it 'does not convert the author if it was not specified' do
+ hash = {
+ 'noteable_type' => 'MergeRequest',
+ 'noteable_id' => 42,
+ 'file_path' => 'README.md',
+ 'commit_id' => '123abc',
+ 'diff_hunk' => hunk,
+ 'note' => 'Hello world',
+ 'created_at' => created_at.to_s,
+ 'updated_at' => updated_at.to_s,
+ 'github_id' => 1
+ }
+
+ note = described_class.from_json_hash(hash)
+
+ expect(note.author).to be_nil
+ end
+ end
+
+ describe '#line_code' do
+ it 'returns a String' do
+ note = described_class.new(diff_hunk: hunk, file_path: 'README.md')
+
+ expect(note.line_code).to be_an_instance_of(String)
+ end
+ end
+
+ describe '#diff_hash' do
+ it 'returns a Hash containing the diff details' do
+ note = described_class.from_json_hash(
+ 'noteable_type' => 'MergeRequest',
+ 'noteable_id' => 42,
+ 'file_path' => 'README.md',
+ 'commit_id' => '123abc',
+ 'diff_hunk' => hunk,
+ 'author' => { 'id' => 4, 'login' => 'alice' },
+ 'note' => 'Hello world',
+ 'created_at' => created_at.to_s,
+ 'updated_at' => updated_at.to_s,
+ 'github_id' => 1
+ )
+
+ expect(note.diff_hash).to eq(
+ diff: hunk,
+ new_path: 'README.md',
+ old_path: 'README.md',
+ a_mode: '100644',
+ b_mode: '100644',
+ new_file: false
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/expose_attribute_spec.rb b/spec/lib/gitlab/github_import/representation/expose_attribute_spec.rb
new file mode 100644
index 00000000000..15de0fe49ff
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/expose_attribute_spec.rb
@@ -0,0 +1,19 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Representation::ExposeAttribute do
+ it 'defines a getter method that returns an attribute value' do
+ klass = Class.new do
+ include Gitlab::GithubImport::Representation::ExposeAttribute
+
+ expose_attribute :number
+
+ attr_reader :attributes
+
+ def initialize
+ @attributes = { number: 42 }
+ end
+ end
+
+ expect(klass.new.number).to eq(42)
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/issue_spec.rb b/spec/lib/gitlab/github_import/representation/issue_spec.rb
new file mode 100644
index 00000000000..99330ce42cb
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/issue_spec.rb
@@ -0,0 +1,182 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Representation::Issue do
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+
+ shared_examples 'an Issue' do
+ it 'returns an instance of Issue' do
+ expect(issue).to be_an_instance_of(described_class)
+ end
+
+ context 'the returned Issue' do
+ it 'includes the issue number' do
+ expect(issue.iid).to eq(42)
+ end
+
+ it 'includes the issue title' do
+ expect(issue.title).to eq('My Issue')
+ end
+
+ it 'includes the issue description' do
+ expect(issue.description).to eq('This is my issue')
+ end
+
+ it 'includes the milestone number' do
+ expect(issue.milestone_number).to eq(4)
+ end
+
+ it 'includes the issue state' do
+ expect(issue.state).to eq(:opened)
+ end
+
+ it 'includes the issue assignees' do
+ expect(issue.assignees[0])
+ .to be_an_instance_of(Gitlab::GithubImport::Representation::User)
+
+ expect(issue.assignees[0].id).to eq(4)
+ expect(issue.assignees[0].login).to eq('alice')
+ end
+
+ it 'includes the label names' do
+ expect(issue.label_names).to eq(%w[bug])
+ end
+
+ it 'includes the author details' do
+ expect(issue.author)
+ .to be_an_instance_of(Gitlab::GithubImport::Representation::User)
+
+ expect(issue.author.id).to eq(4)
+ expect(issue.author.login).to eq('alice')
+ end
+
+ it 'includes the created timestamp' do
+ expect(issue.created_at).to eq(created_at)
+ end
+
+ it 'includes the updated timestamp' do
+ expect(issue.updated_at).to eq(updated_at)
+ end
+
+ it 'is not a pull request' do
+ expect(issue.pull_request?).to eq(false)
+ end
+ end
+ end
+
+ describe '.from_api_response' do
+ let(:response) do
+ double(
+ :response,
+ number: 42,
+ title: 'My Issue',
+ body: 'This is my issue',
+ milestone: double(:milestone, number: 4),
+ state: 'open',
+ assignees: [double(:user, id: 4, login: 'alice')],
+ labels: [double(:label, name: 'bug')],
+ user: double(:user, id: 4, login: 'alice'),
+ created_at: created_at,
+ updated_at: updated_at,
+ pull_request: false
+ )
+ end
+
+ it_behaves_like 'an Issue' do
+ let(:issue) { described_class.from_api_response(response) }
+ end
+
+ it 'does not set the user if the response did not include a user' do
+ allow(response)
+ .to receive(:user)
+ .and_return(nil)
+
+ issue = described_class.from_api_response(response)
+
+ expect(issue.author).to be_nil
+ end
+ end
+
+ describe '.from_json_hash' do
+ it_behaves_like 'an Issue' do
+ let(:hash) do
+ {
+ 'iid' => 42,
+ 'title' => 'My Issue',
+ 'description' => 'This is my issue',
+ 'milestone_number' => 4,
+ 'state' => 'opened',
+ 'assignees' => [{ 'id' => 4, 'login' => 'alice' }],
+ 'label_names' => %w[bug],
+ 'author' => { 'id' => 4, 'login' => 'alice' },
+ 'created_at' => created_at.to_s,
+ 'updated_at' => updated_at.to_s,
+ 'pull_request' => false
+ }
+ end
+
+ let(:issue) { described_class.from_json_hash(hash) }
+ end
+
+ it 'does not convert the author if it was not specified' do
+ hash = {
+ 'iid' => 42,
+ 'title' => 'My Issue',
+ 'description' => 'This is my issue',
+ 'milestone_number' => 4,
+ 'state' => 'opened',
+ 'assignees' => [{ 'id' => 4, 'login' => 'alice' }],
+ 'label_names' => %w[bug],
+ 'created_at' => created_at.to_s,
+ 'updated_at' => updated_at.to_s,
+ 'pull_request' => false
+ }
+
+ issue = described_class.from_json_hash(hash)
+
+ expect(issue.author).to be_nil
+ end
+ end
+
+ describe '#labels?' do
+ it 'returns true when the issue has labels assigned' do
+ issue = described_class.new(label_names: %w[bug])
+
+ expect(issue.labels?).to eq(true)
+ end
+
+ it 'returns false when the issue has no labels assigned' do
+ issue = described_class.new(label_names: [])
+
+ expect(issue.labels?).to eq(false)
+ end
+ end
+
+ describe '#pull_request?' do
+ it 'returns false for an issue' do
+ issue = described_class.new(pull_request: false)
+
+ expect(issue.pull_request?).to eq(false)
+ end
+
+ it 'returns true for a pull request' do
+ issue = described_class.new(pull_request: true)
+
+ expect(issue.pull_request?).to eq(true)
+ end
+ end
+
+ describe '#truncated_title' do
+ it 'truncates the title to 255 characters' do
+ object = described_class.new(title: 'm' * 300)
+
+ expect(object.truncated_title.length).to eq(255)
+ end
+
+ it 'does not truncate the title if it is shorter than 255 characters' do
+ object = described_class.new(title: 'foo')
+
+ expect(object.truncated_title).to eq('foo')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/note_spec.rb b/spec/lib/gitlab/github_import/representation/note_spec.rb
new file mode 100644
index 00000000000..f2c1c66b357
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/note_spec.rb
@@ -0,0 +1,107 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Representation::Note do
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+
+ shared_examples 'a Note' do
+ it 'returns an instance of Note' do
+ expect(note).to be_an_instance_of(described_class)
+ end
+
+ context 'the returned Note' do
+ it 'includes the noteable ID' do
+ expect(note.noteable_id).to eq(42)
+ end
+
+ it 'includes the noteable type' do
+ expect(note.noteable_type).to eq('Issue')
+ end
+
+ it 'includes the author details' do
+ expect(note.author)
+ .to be_an_instance_of(Gitlab::GithubImport::Representation::User)
+
+ expect(note.author.id).to eq(4)
+ expect(note.author.login).to eq('alice')
+ end
+
+ it 'includes the note body' do
+ expect(note.note).to eq('Hello world')
+ end
+
+ it 'includes the created timestamp' do
+ expect(note.created_at).to eq(created_at)
+ end
+
+ it 'includes the updated timestamp' do
+ expect(note.updated_at).to eq(updated_at)
+ end
+
+ it 'includes the GitHub ID' do
+ expect(note.github_id).to eq(1)
+ end
+ end
+ end
+
+ describe '.from_api_response' do
+ let(:response) do
+ double(
+ :response,
+ html_url: 'https://github.com/foo/bar/issues/42',
+ user: double(:user, id: 4, login: 'alice'),
+ body: 'Hello world',
+ created_at: created_at,
+ updated_at: updated_at,
+ id: 1
+ )
+ end
+
+ it_behaves_like 'a Note' do
+ let(:note) { described_class.from_api_response(response) }
+ end
+
+ it 'does not set the user if the response did not include a user' do
+ allow(response)
+ .to receive(:user)
+ .and_return(nil)
+
+ note = described_class.from_api_response(response)
+
+ expect(note.author).to be_nil
+ end
+ end
+
+ describe '.from_json_hash' do
+ it_behaves_like 'a Note' do
+ let(:hash) do
+ {
+ 'noteable_id' => 42,
+ 'noteable_type' => 'Issue',
+ 'author' => { 'id' => 4, 'login' => 'alice' },
+ 'note' => 'Hello world',
+ 'created_at' => created_at.to_s,
+ 'updated_at' => updated_at.to_s,
+ 'github_id' => 1
+ }
+ end
+
+ let(:note) { described_class.from_json_hash(hash) }
+ end
+
+ it 'does not convert the author if it was not specified' do
+ hash = {
+ 'noteable_id' => 42,
+ 'noteable_type' => 'Issue',
+ 'note' => 'Hello world',
+ 'created_at' => created_at.to_s,
+ 'updated_at' => updated_at.to_s,
+ 'github_id' => 1
+ }
+
+ note = described_class.from_json_hash(hash)
+
+ expect(note.author).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/pull_request_spec.rb b/spec/lib/gitlab/github_import/representation/pull_request_spec.rb
new file mode 100644
index 00000000000..33f6ff0ae6a
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/pull_request_spec.rb
@@ -0,0 +1,288 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Representation::PullRequest do
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+ let(:merged_at) { Time.new(2017, 1, 1, 12, 17) }
+
+ shared_examples 'a PullRequest' do
+ it 'returns an instance of PullRequest' do
+ expect(pr).to be_an_instance_of(described_class)
+ end
+
+ context 'the returned PullRequest' do
+ it 'includes the pull request number' do
+ expect(pr.iid).to eq(42)
+ end
+
+ it 'includes the pull request title' do
+ expect(pr.title).to eq('My Pull Request')
+ end
+
+ it 'includes the pull request description' do
+ expect(pr.description).to eq('This is my pull request')
+ end
+
+ it 'includes the source branch name' do
+ expect(pr.source_branch).to eq('my-feature')
+ end
+
+ it 'includes the source branch SHA' do
+ expect(pr.source_branch_sha).to eq('123abc')
+ end
+
+ it 'includes the target branch name' do
+ expect(pr.target_branch).to eq('master')
+ end
+
+ it 'includes the target branch SHA' do
+ expect(pr.target_branch_sha).to eq('456def')
+ end
+
+ it 'includes the milestone number' do
+ expect(pr.milestone_number).to eq(4)
+ end
+
+ it 'includes the user details' do
+ expect(pr.author)
+ .to be_an_instance_of(Gitlab::GithubImport::Representation::User)
+
+ expect(pr.author.id).to eq(4)
+ expect(pr.author.login).to eq('alice')
+ end
+
+ it 'includes the assignee details' do
+ expect(pr.assignee)
+ .to be_an_instance_of(Gitlab::GithubImport::Representation::User)
+
+ expect(pr.assignee.id).to eq(4)
+ expect(pr.assignee.login).to eq('alice')
+ end
+
+ it 'includes the created timestamp' do
+ expect(pr.created_at).to eq(created_at)
+ end
+
+ it 'includes the updated timestamp' do
+ expect(pr.updated_at).to eq(updated_at)
+ end
+
+ it 'includes the merged timestamp' do
+ expect(pr.merged_at).to eq(merged_at)
+ end
+
+ it 'includes the source repository ID' do
+ expect(pr.source_repository_id).to eq(400)
+ end
+
+ it 'includes the target repository ID' do
+ expect(pr.target_repository_id).to eq(200)
+ end
+
+ it 'includes the source repository owner name' do
+ expect(pr.source_repository_owner).to eq('alice')
+ end
+
+ it 'includes the pull request state' do
+ expect(pr.state).to eq(:merged)
+ end
+ end
+ end
+
+ describe '.from_api_response' do
+ let(:response) do
+ double(
+ :response,
+ number: 42,
+ title: 'My Pull Request',
+ body: 'This is my pull request',
+ state: 'closed',
+ head: double(
+ :head,
+ sha: '123abc',
+ ref: 'my-feature',
+ repo: double(:repo, id: 400),
+ user: double(:user, id: 4, login: 'alice')
+ ),
+ base: double(
+ :base,
+ sha: '456def',
+ ref: 'master',
+ repo: double(:repo, id: 200)
+ ),
+ milestone: double(:milestone, number: 4),
+ user: double(:user, id: 4, login: 'alice'),
+ assignee: double(:user, id: 4, login: 'alice'),
+ created_at: created_at,
+ updated_at: updated_at,
+ merged_at: merged_at
+ )
+ end
+
+ it_behaves_like 'a PullRequest' do
+ let(:pr) { described_class.from_api_response(response) }
+ end
+
+ it 'does not set the user if the response did not include a user' do
+ allow(response)
+ .to receive(:user)
+ .and_return(nil)
+
+ pr = described_class.from_api_response(response)
+
+ expect(pr.author).to be_nil
+ end
+ end
+
+ describe '.from_json_hash' do
+ it_behaves_like 'a PullRequest' do
+ let(:hash) do
+ {
+ 'iid' => 42,
+ 'title' => 'My Pull Request',
+ 'description' => 'This is my pull request',
+ 'source_branch' => 'my-feature',
+ 'source_branch_sha' => '123abc',
+ 'target_branch' => 'master',
+ 'target_branch_sha' => '456def',
+ 'source_repository_id' => 400,
+ 'target_repository_id' => 200,
+ 'source_repository_owner' => 'alice',
+ 'state' => 'closed',
+ 'milestone_number' => 4,
+ 'author' => { 'id' => 4, 'login' => 'alice' },
+ 'assignee' => { 'id' => 4, 'login' => 'alice' },
+ 'created_at' => created_at.to_s,
+ 'updated_at' => updated_at.to_s,
+ 'merged_at' => merged_at.to_s
+ }
+ end
+
+ let(:pr) { described_class.from_json_hash(hash) }
+ end
+
+ it 'does not convert the author if it was not specified' do
+ hash = {
+ 'iid' => 42,
+ 'title' => 'My Pull Request',
+ 'description' => 'This is my pull request',
+ 'source_branch' => 'my-feature',
+ 'source_branch_sha' => '123abc',
+ 'target_branch' => 'master',
+ 'target_branch_sha' => '456def',
+ 'source_repository_id' => 400,
+ 'target_repository_id' => 200,
+ 'source_repository_owner' => 'alice',
+ 'state' => 'closed',
+ 'milestone_number' => 4,
+ 'assignee' => { 'id' => 4, 'login' => 'alice' },
+ 'created_at' => created_at.to_s,
+ 'updated_at' => updated_at.to_s,
+ 'merged_at' => merged_at.to_s
+ }
+
+ pr = described_class.from_json_hash(hash)
+
+ expect(pr.author).to be_nil
+ end
+ end
+
+ describe '#state' do
+ it 'returns :opened for an open pull request' do
+ pr = described_class.new(state: :opened)
+
+ expect(pr.state).to eq(:opened)
+ end
+
+ it 'returns :closed for a closed pull request' do
+ pr = described_class.new(state: :closed)
+
+ expect(pr.state).to eq(:closed)
+ end
+
+ it 'returns :merged for a merged pull request' do
+ pr = described_class.new(state: :closed, merged_at: merged_at)
+
+ expect(pr.state).to eq(:merged)
+ end
+ end
+
+ describe '#cross_project?' do
+ it 'returns false for a pull request submitted from the target project' do
+ pr = described_class.new(source_repository_id: 1, target_repository_id: 1)
+
+ expect(pr).not_to be_cross_project
+ end
+
+ it 'returns true for a pull request submitted from a different project' do
+ pr = described_class.new(source_repository_id: 1, target_repository_id: 2)
+
+ expect(pr).to be_cross_project
+ end
+
+ it 'returns true if no source repository is present' do
+ pr = described_class.new(target_repository_id: 2)
+
+ expect(pr).to be_cross_project
+ end
+ end
+
+ describe '#formatted_source_branch' do
+ context 'for a cross-project pull request' do
+ it 'includes the owner name in the branch name' do
+ pr = described_class.new(
+ source_repository_owner: 'foo',
+ source_branch: 'branch',
+ target_branch: 'master',
+ source_repository_id: 1,
+ target_repository_id: 2
+ )
+
+ expect(pr.formatted_source_branch).to eq('foo:branch')
+ end
+ end
+
+ context 'for a regular pull request' do
+ it 'returns the source branch name' do
+ pr = described_class.new(
+ source_repository_owner: 'foo',
+ source_branch: 'branch',
+ target_branch: 'master',
+ source_repository_id: 1,
+ target_repository_id: 1
+ )
+
+ expect(pr.formatted_source_branch).to eq('branch')
+ end
+ end
+
+ context 'for a pull request with the same source and target branches' do
+ it 'returns a generated source branch name' do
+ pr = described_class.new(
+ iid: 1,
+ source_repository_owner: 'foo',
+ source_branch: 'branch',
+ target_branch: 'branch',
+ source_repository_id: 1,
+ target_repository_id: 1
+ )
+
+ expect(pr.formatted_source_branch).to eq('branch-1')
+ end
+ end
+ end
+
+ describe '#truncated_title' do
+ it 'truncates the title to 255 characters' do
+ object = described_class.new(title: 'm' * 300)
+
+ expect(object.truncated_title.length).to eq(255)
+ end
+
+ it 'does not truncate the title if it is shorter than 255 characters' do
+ object = described_class.new(title: 'foo')
+
+ expect(object.truncated_title).to eq('foo')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/to_hash_spec.rb b/spec/lib/gitlab/github_import/representation/to_hash_spec.rb
new file mode 100644
index 00000000000..c296aa0a45b
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/to_hash_spec.rb
@@ -0,0 +1,37 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Representation::ToHash do
+ describe '#to_hash' do
+ let(:user) { double(:user, attributes: { login: 'alice' }) }
+
+ let(:issue) do
+ double(
+ :issue,
+ attributes: { user: user, assignees: [user], number: 42 }
+ )
+ end
+
+ let(:issue_hash) { issue.to_hash }
+
+ before do
+ user.extend(described_class)
+ issue.extend(described_class)
+ end
+
+ it 'converts an object to a Hash' do
+ expect(issue_hash).to be_an_instance_of(Hash)
+ end
+
+ it 'converts nested objects to Hashes' do
+ expect(issue_hash[:user]).to eq({ login: 'alice' })
+ end
+
+ it 'converts Array values to Hashes' do
+ expect(issue_hash[:assignees]).to eq([{ login: 'alice' }])
+ end
+
+ it 'keeps values as-is if they do not respond to #to_hash' do
+ expect(issue_hash[:number]).to eq(42)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/user_spec.rb b/spec/lib/gitlab/github_import/representation/user_spec.rb
new file mode 100644
index 00000000000..4e63e8ea568
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/user_spec.rb
@@ -0,0 +1,33 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Representation::User do
+ shared_examples 'a User' do
+ it 'returns an instance of User' do
+ expect(user).to be_an_instance_of(described_class)
+ end
+
+ context 'the returned User' do
+ it 'includes the user ID' do
+ expect(user.id).to eq(42)
+ end
+
+ it 'includes the username' do
+ expect(user.login).to eq('alice')
+ end
+ end
+ end
+
+ describe '.from_api_response' do
+ it_behaves_like 'a User' do
+ let(:response) { double(:response, id: 42, login: 'alice') }
+ let(:user) { described_class.from_api_response(response) }
+ end
+ end
+
+ describe '.from_json_hash' do
+ it_behaves_like 'a User' do
+ let(:hash) { { 'id' => 42, 'login' => 'alice' } }
+ let(:user) { described_class.from_json_hash(hash) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation_spec.rb b/spec/lib/gitlab/github_import/representation_spec.rb
new file mode 100644
index 00000000000..0b0610817b0
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation_spec.rb
@@ -0,0 +1,17 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Representation do
+ describe '.symbolize_hash' do
+ it 'returns a Hash with the keys as Symbols' do
+ hash = described_class.symbolize_hash('number' => 10)
+
+ expect(hash).to eq({ number: 10 })
+ end
+
+ it 'parses timestamp fields into Time instances' do
+ hash = described_class.symbolize_hash('created_at' => '2017-01-01 12:00')
+
+ expect(hash[:created_at]).to be_an_instance_of(Time)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/sequential_importer_spec.rb b/spec/lib/gitlab/github_import/sequential_importer_spec.rb
new file mode 100644
index 00000000000..6089b0b751f
--- /dev/null
+++ b/spec/lib/gitlab/github_import/sequential_importer_spec.rb
@@ -0,0 +1,37 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::SequentialImporter do
+ describe '#execute' do
+ it 'imports a project in sequence' do
+ repository = double(:repository)
+ project = double(:project, id: 1, repository: repository)
+ importer = described_class.new(project, token: 'foo')
+
+ expect_any_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter)
+ .to receive(:execute)
+
+ described_class::SEQUENTIAL_IMPORTERS.each do |klass|
+ instance = double(:instance)
+
+ expect(klass).to receive(:new)
+ .with(project, importer.client)
+ .and_return(instance)
+
+ expect(instance).to receive(:execute)
+ end
+
+ described_class::PARALLEL_IMPORTERS.each do |klass|
+ instance = double(:instance)
+
+ expect(klass).to receive(:new)
+ .with(project, importer.client, parallel: false)
+ .and_return(instance)
+
+ expect(instance).to receive(:execute)
+ end
+
+ expect(repository).to receive(:after_import)
+ expect(importer.execute).to eq(true)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/user_finder_spec.rb b/spec/lib/gitlab/github_import/user_finder_spec.rb
new file mode 100644
index 00000000000..29f4c00d9c7
--- /dev/null
+++ b/spec/lib/gitlab/github_import/user_finder_spec.rb
@@ -0,0 +1,333 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
+ let(:project) { create(:project) }
+ let(:client) { double(:client) }
+ let(:finder) { described_class.new(project, client) }
+
+ describe '#author_id_for' do
+ it 'returns the user ID for the author of an object' do
+ user = double(:user, id: 4, login: 'kittens')
+ note = double(:note, author: user)
+
+ expect(finder).to receive(:user_id_for).with(user).and_return(42)
+
+ expect(finder.author_id_for(note)).to eq([42, true])
+ end
+
+ it 'returns the ID of the project creator if no user ID could be found' do
+ user = double(:user, id: 4, login: 'kittens')
+ note = double(:note, author: user)
+
+ expect(finder).to receive(:user_id_for).with(user).and_return(nil)
+
+ expect(finder.author_id_for(note)).to eq([project.creator_id, false])
+ end
+
+ it 'returns the ID of the ghost user when the object has no user' do
+ note = double(:note, author: nil)
+
+ expect(finder.author_id_for(note)).to eq([User.ghost.id, true])
+ end
+
+ it 'returns the ID of the ghost user when the given object is nil' do
+ expect(finder.author_id_for(nil)).to eq([User.ghost.id, true])
+ end
+ end
+
+ describe '#assignee_id_for' do
+ it 'returns the user ID for the assignee of an issuable' do
+ user = double(:user, id: 4, login: 'kittens')
+ issue = double(:issue, assignee: user)
+
+ expect(finder).to receive(:user_id_for).with(user).and_return(42)
+ expect(finder.assignee_id_for(issue)).to eq(42)
+ end
+
+ it 'returns nil if the issuable does not have an assignee' do
+ issue = double(:issue, assignee: nil)
+
+ expect(finder).not_to receive(:user_id_for)
+ expect(finder.assignee_id_for(issue)).to be_nil
+ end
+ end
+
+ describe '#user_id_for' do
+ it 'returns the user ID for the given user' do
+ user = double(:user, id: 4, login: 'kittens')
+
+ expect(finder).to receive(:find).with(user.id, user.login).and_return(42)
+ expect(finder.user_id_for(user)).to eq(42)
+ end
+ end
+
+ describe '#find' do
+ let(:user) { create(:user) }
+
+ before do
+ allow(finder).to receive(:email_for_github_username)
+ .and_return(user.email)
+ end
+
+ context 'without a cache' do
+ before do
+ allow(finder).to receive(:find_from_cache).and_return([false, nil])
+ expect(finder).to receive(:find_id_from_database).and_call_original
+ end
+
+ it 'finds a GitLab user for a GitHub user ID' do
+ user.identities.create!(provider: :github, extern_uid: 42)
+
+ expect(finder.find(42, user.username)).to eq(user.id)
+ end
+
+ it 'finds a GitLab user for a GitHub Email address' do
+ expect(finder.find(42, user.username)).to eq(user.id)
+ end
+ end
+
+ context 'with a cache' do
+ it 'returns the cached user ID' do
+ expect(finder).to receive(:find_from_cache).and_return([true, user.id])
+ expect(finder).not_to receive(:find_id_from_database)
+
+ expect(finder.find(42, user.username)).to eq(user.id)
+ end
+
+ it 'does not query the database if the cache key exists but is empty' do
+ expect(finder).to receive(:find_from_cache).and_return([true, nil])
+ expect(finder).not_to receive(:find_id_from_database)
+
+ expect(finder.find(42, user.username)).to be_nil
+ end
+ end
+ end
+
+ describe '#find_from_cache' do
+ it 'retrieves a GitLab user ID for a GitHub user ID' do
+ expect(finder)
+ .to receive(:cached_id_for_github_id)
+ .with(42)
+ .and_return([true, 4])
+
+ expect(finder.find_from_cache(42)).to eq([true, 4])
+ end
+
+ it 'retrieves a GitLab user ID for a GitHub Email address' do
+ email = 'kittens@example.com'
+
+ expect(finder)
+ .to receive(:cached_id_for_github_id)
+ .with(42)
+ .and_return([false, nil])
+
+ expect(finder)
+ .to receive(:cached_id_for_github_email)
+ .with(email)
+ .and_return([true, 4])
+
+ expect(finder.find_from_cache(42, email)).to eq([true, 4])
+ end
+
+ it 'does not query the cache for an Email address when none is given' do
+ expect(finder)
+ .to receive(:cached_id_for_github_id)
+ .with(42)
+ .and_return([false, nil])
+
+ expect(finder).not_to receive(:cached_id_for_github_id)
+
+ expect(finder.find_from_cache(42)).to eq([false])
+ end
+ end
+
+ describe '#find_id_from_database' do
+ let(:user) { create(:user) }
+
+ it 'returns the GitLab user ID for a GitHub user ID' do
+ user.identities.create!(provider: :github, extern_uid: 42)
+
+ expect(finder.find_id_from_database(42, user.email)).to eq(user.id)
+ end
+
+ it 'returns the GitLab user ID for a GitHub Email address' do
+ expect(finder.find_id_from_database(42, user.email)).to eq(user.id)
+ end
+ end
+
+ describe '#email_for_github_username' do
+ let(:email) { 'kittens@example.com' }
+
+ context 'when an Email address is cached' do
+ it 'reads the Email address from the cache' do
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:read)
+ .and_return(email)
+
+ expect(client).not_to receive(:user)
+ expect(finder.email_for_github_username('kittens')).to eq(email)
+ end
+ end
+
+ context 'when an Email address is not cached' do
+ let(:user) { double(:user, email: email) }
+
+ it 'retrieves the Email address from the GitHub API' do
+ expect(client).to receive(:user).with('kittens').and_return(user)
+ expect(finder.email_for_github_username('kittens')).to eq(email)
+ end
+
+ it 'caches the Email address when an Email address is available' do
+ expect(client).to receive(:user).with('kittens').and_return(user)
+
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:write)
+ .with(an_instance_of(String), email)
+
+ finder.email_for_github_username('kittens')
+ end
+
+ it 'returns nil if the user does not exist' do
+ expect(client)
+ .to receive(:user)
+ .with('kittens')
+ .and_return(nil)
+
+ expect(Gitlab::GithubImport::Caching)
+ .not_to receive(:write)
+
+ expect(finder.email_for_github_username('kittens')).to be_nil
+ end
+ end
+ end
+
+ describe '#cached_id_for_github_id' do
+ let(:id) { 4 }
+
+ it 'reads a user ID from the cache' do
+ Gitlab::GithubImport::Caching
+ .write(described_class::ID_CACHE_KEY % id, 4)
+
+ expect(finder.cached_id_for_github_id(id)).to eq([true, 4])
+ end
+
+ it 'reads a non existing cache key' do
+ expect(finder.cached_id_for_github_id(id)).to eq([false, nil])
+ end
+ end
+
+ describe '#cached_id_for_github_email' do
+ let(:email) { 'kittens@example.com' }
+
+ it 'reads a user ID from the cache' do
+ Gitlab::GithubImport::Caching
+ .write(described_class::ID_FOR_EMAIL_CACHE_KEY % email, 4)
+
+ expect(finder.cached_id_for_github_email(email)).to eq([true, 4])
+ end
+
+ it 'reads a non existing cache key' do
+ expect(finder.cached_id_for_github_email(email)).to eq([false, nil])
+ end
+ end
+
+ describe '#id_for_github_id' do
+ let(:id) { 4 }
+
+ it 'queries and caches the user ID for a given GitHub ID' do
+ expect(finder).to receive(:query_id_for_github_id)
+ .with(id)
+ .and_return(42)
+
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:write)
+ .with(described_class::ID_CACHE_KEY % id, 42)
+
+ finder.id_for_github_id(id)
+ end
+
+ it 'caches a nil value if no ID could be found' do
+ expect(finder).to receive(:query_id_for_github_id)
+ .with(id)
+ .and_return(nil)
+
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:write)
+ .with(described_class::ID_CACHE_KEY % id, nil)
+
+ finder.id_for_github_id(id)
+ end
+ end
+
+ describe '#id_for_github_email' do
+ let(:email) { 'kittens@example.com' }
+
+ it 'queries and caches the user ID for a given Email address' do
+ expect(finder).to receive(:query_id_for_github_email)
+ .with(email)
+ .and_return(42)
+
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:write)
+ .with(described_class::ID_FOR_EMAIL_CACHE_KEY % email, 42)
+
+ finder.id_for_github_email(email)
+ end
+
+ it 'caches a nil value if no ID could be found' do
+ expect(finder).to receive(:query_id_for_github_email)
+ .with(email)
+ .and_return(nil)
+
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:write)
+ .with(described_class::ID_FOR_EMAIL_CACHE_KEY % email, nil)
+
+ finder.id_for_github_email(email)
+ end
+ end
+
+ describe '#query_id_for_github_id' do
+ it 'returns the ID of the user for the given GitHub user ID' do
+ user = create(:user)
+
+ user.identities.create!(provider: :github, extern_uid: '42')
+
+ expect(finder.query_id_for_github_id(42)).to eq(user.id)
+ end
+
+ it 'returns nil when no user ID could be found' do
+ expect(finder.query_id_for_github_id(42)).to be_nil
+ end
+ end
+
+ describe '#query_id_for_github_email' do
+ it 'returns the ID of the user for the given Email address' do
+ user = create(:user, email: 'kittens@example.com')
+
+ expect(finder.query_id_for_github_email(user.email)).to eq(user.id)
+ end
+
+ it 'returns nil if no user ID could be found' do
+ expect(finder.query_id_for_github_email('kittens@example.com')).to be_nil
+ end
+ end
+
+ describe '#read_id_from_cache' do
+ it 'reads an ID from the cache' do
+ Gitlab::GithubImport::Caching.write('foo', 10)
+
+ expect(finder.read_id_from_cache('foo')).to eq([true, 10])
+ end
+
+ it 'reads a cache key with an empty value' do
+ Gitlab::GithubImport::Caching.write('foo', nil)
+
+ expect(finder.read_id_from_cache('foo')).to eq([true, nil])
+ end
+
+ it 'reads a cache key that does not exist' do
+ expect(finder.read_id_from_cache('foo')).to eq([false, nil])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import_spec.rb b/spec/lib/gitlab/github_import_spec.rb
new file mode 100644
index 00000000000..51414800e8c
--- /dev/null
+++ b/spec/lib/gitlab/github_import_spec.rb
@@ -0,0 +1,79 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport do
+ let(:project) { double(:project) }
+
+ describe '.new_client_for' do
+ it 'returns a new Client with a custom token' do
+ expect(described_class::Client)
+ .to receive(:new)
+ .with('123', parallel: true)
+
+ described_class.new_client_for(project, token: '123')
+ end
+
+ it 'returns a new Client with a token stored in the import data' do
+ import_data = double(:import_data, credentials: { user: '123' })
+
+ expect(project)
+ .to receive(:import_data)
+ .and_return(import_data)
+
+ expect(described_class::Client)
+ .to receive(:new)
+ .with('123', parallel: true)
+
+ described_class.new_client_for(project)
+ end
+ end
+
+ describe '.insert_and_return_id' do
+ let(:attributes) { { iid: 1, title: 'foo' } }
+ let(:project) { create(:project) }
+
+ context 'on PostgreSQL' do
+ it 'returns the ID returned by the query' do
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .with(Issue.table_name, [attributes], return_ids: true)
+ .and_return([10])
+
+ id = described_class.insert_and_return_id(attributes, project.issues)
+
+ expect(id).to eq(10)
+ end
+ end
+
+ context 'on MySQL' do
+ it 'uses a separate query to retrieve the ID' do
+ issue = create(:issue, project: project, iid: attributes[:iid])
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .with(Issue.table_name, [attributes], return_ids: true)
+ .and_return([])
+
+ id = described_class.insert_and_return_id(attributes, project.issues)
+
+ expect(id).to eq(issue.id)
+ end
+ end
+ end
+
+ describe '.ghost_user_id', :clean_gitlab_redis_cache do
+ it 'returns the ID of the ghost user' do
+ expect(described_class.ghost_user_id).to eq(User.ghost.id)
+ end
+
+ it 'caches the ghost user ID' do
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:write)
+ .once
+ .and_call_original
+
+ 2.times do
+ described_class.ghost_user_id
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/hook_data/issuable_builder_spec.rb b/spec/lib/gitlab/hook_data/issuable_builder_spec.rb
index 30da56bec16..26529c4759d 100644
--- a/spec/lib/gitlab/hook_data/issuable_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/issuable_builder_spec.rb
@@ -41,7 +41,8 @@ describe Gitlab::HookData::IssuableBuilder do
labels: [
[{ id: 1, title: 'foo' }],
[{ id: 1, title: 'foo' }, { id: 2, title: 'bar' }]
- ]
+ ],
+ total_time_spent: [1, 2]
}
end
let(:data) { builder.build(user: user, changes: changes) }
@@ -53,6 +54,10 @@ describe Gitlab::HookData::IssuableBuilder do
labels: {
previous: [{ id: 1, title: 'foo' }],
current: [{ id: 1, title: 'foo' }, { id: 2, title: 'bar' }]
+ },
+ total_time_spent: {
+ previous: 1,
+ current: 2
}
}))
end
diff --git a/spec/lib/gitlab/hook_data/issue_builder_spec.rb b/spec/lib/gitlab/hook_data/issue_builder_spec.rb
index 6c529cdd051..aeacd577d18 100644
--- a/spec/lib/gitlab/hook_data/issue_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/issue_builder_spec.rb
@@ -11,7 +11,6 @@ describe Gitlab::HookData::IssueBuilder do
%w[
assignee_id
author_id
- branch_name
closed_at
confidential
created_at
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index bf1e97654e5..0ecb50f7110 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -88,6 +88,7 @@ merge_requests:
- metrics
- timelogs
- head_pipeline
+- latest_merge_request_diff
merge_request_diff:
- merge_request
- merge_request_diff_commits
diff --git a/spec/lib/gitlab/import_export/project.json b/spec/lib/gitlab/import_export/project.json
index f7c90093bde..f0752649121 100644
--- a/spec/lib/gitlab/import_export/project.json
+++ b/spec/lib/gitlab/import_export/project.json
@@ -3146,13 +3146,12 @@
"merge_request_diff": {
"id": 26,
"state": "collected",
- "st_commits": [
+ "merge_request_diff_commits": [
{
- "id": "0b4bc9a49b562e85de7cc9e834518ea6828729b9",
+ "merge_request_diff_id": 26,
+ "sha": "0b4bc9a49b562e85de7cc9e834518ea6828729b9",
+ "relative_order": 0,
"message": "Feature added\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "ae73cb07c9eeaf35924a10f713b364d32b2dd34f"
- ],
"authored_date": "2014-02-27T09:26:01.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -3161,9 +3160,11 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
}
],
- "utf8_st_diffs": [
+ "merge_request_diff_files": [
{
- "diff": "--- /dev/null\n+++ b/files/ruby/feature.rb\n@@ -0,0 +1,5 @@\n+class Feature\n+ def foo\n+ puts 'bar'\n+ end\n+end\n",
+ "merge_request_diff_id": 26,
+ "relative_order": 0,
+ "utf8_diff": "--- /dev/null\n+++ b/files/ruby/feature.rb\n@@ -0,0 +1,5 @@\n+class Feature\n+ def foo\n+ puts 'bar'\n+ end\n+end\n",
"new_path": "files/ruby/feature.rb",
"old_path": "files/ruby/feature.rb",
"a_mode": "0",
@@ -3425,13 +3426,12 @@
"merge_request_diff": {
"id": 15,
"state": "collected",
- "st_commits": [
+ "merge_request_diff_commits": [
{
- "id": "94b8d581c48d894b86661718582fecbc5e3ed2eb",
+ "merge_request_diff_id": 15,
+ "relative_order": 0,
+ "sha": "94b8d581c48d894b86661718582fecbc5e3ed2eb",
"message": "fixes #10\n",
- "parent_ids": [
- "be93687618e4b132087f430a4d8fc3a609c9b77c"
- ],
"authored_date": "2016-01-19T13:22:56.000+01:00",
"author_name": "James Lopez",
"author_email": "james@jameslopez.es",
@@ -3440,9 +3440,11 @@
"committer_email": "james@jameslopez.es"
}
],
- "utf8_st_diffs": [
+ "merge_request_diff_files": [
{
- "diff": "--- /dev/null\n+++ b/test\n",
+ "merge_request_diff_id": 15,
+ "relative_order": 0,
+ "utf8_diff": "--- /dev/null\n+++ b/test\n",
"new_path": "test",
"old_path": "test",
"a_mode": "0",
@@ -3704,13 +3706,12 @@
"merge_request_diff": {
"id": 14,
"state": "collected",
- "st_commits": [
+ "merge_request_diff_commits": [
{
- "id": "ddd4ff416a931589c695eb4f5b23f844426f6928",
+ "merge_request_diff_id": 14,
+ "relative_order": 0,
+ "sha": "ddd4ff416a931589c695eb4f5b23f844426f6928",
"message": "fixes #10\n",
- "parent_ids": [
- "be93687618e4b132087f430a4d8fc3a609c9b77c"
- ],
"authored_date": "2016-01-19T14:14:43.000+01:00",
"author_name": "James Lopez",
"author_email": "james@jameslopez.es",
@@ -3719,12 +3720,10 @@
"committer_email": "james@jameslopez.es"
},
{
- "id": "be93687618e4b132087f430a4d8fc3a609c9b77c",
+ "merge_request_diff_id": 14,
+ "relative_order": 1,
+ "sha": "be93687618e4b132087f430a4d8fc3a609c9b77c",
"message": "Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6",
- "parent_ids": [
- "5f923865dde3436854e9ceb9cdb7815618d4e849",
- "048721d90c449b244b7b4c53a9186b04330174ec"
- ],
"authored_date": "2015-12-07T12:52:12.000+01:00",
"author_name": "Marin Jankovski",
"author_email": "marin@gitlab.com",
@@ -3733,11 +3732,10 @@
"committer_email": "marin@gitlab.com"
},
{
- "id": "048721d90c449b244b7b4c53a9186b04330174ec",
+ "merge_request_diff_id": 14,
+ "relative_order": 2,
+ "sha": "048721d90c449b244b7b4c53a9186b04330174ec",
"message": "LFS object pointer.\n",
- "parent_ids": [
- "5f923865dde3436854e9ceb9cdb7815618d4e849"
- ],
"authored_date": "2015-12-07T11:54:28.000+01:00",
"author_name": "Marin Jankovski",
"author_email": "maxlazio@gmail.com",
@@ -3746,11 +3744,10 @@
"committer_email": "maxlazio@gmail.com"
},
{
- "id": "5f923865dde3436854e9ceb9cdb7815618d4e849",
+ "merge_request_diff_id": 14,
+ "relative_order": 3,
+ "sha": "5f923865dde3436854e9ceb9cdb7815618d4e849",
"message": "GitLab currently doesn't support patches that involve a merge commit: add a commit here\n",
- "parent_ids": [
- "d2d430676773caa88cdaf7c55944073b2fd5561a"
- ],
"authored_date": "2015-11-13T16:27:12.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -3759,12 +3756,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "d2d430676773caa88cdaf7c55944073b2fd5561a",
+ "merge_request_diff_id": 14,
+ "relative_order": 4,
+ "sha": "d2d430676773caa88cdaf7c55944073b2fd5561a",
"message": "Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5",
- "parent_ids": [
- "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
- "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73"
- ],
"authored_date": "2015-11-13T08:50:17.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -3773,11 +3768,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
+ "merge_request_diff_id": 14,
+ "relative_order": 5,
+ "sha": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
"message": "Add GitLab SVG\n",
- "parent_ids": [
- "59e29889be61e6e0e5e223bfa9ac2721d31605b8"
- ],
"authored_date": "2015-11-13T08:39:43.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -3786,12 +3780,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
+ "merge_request_diff_id": 14,
+ "relative_order": 6,
+ "sha": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
"message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4",
- "parent_ids": [
- "19e2e9b4ef76b422ce1154af39a91323ccc57434",
- "66eceea0db202bb39c4e445e8ca28689645366c5"
- ],
"authored_date": "2015-11-13T07:21:40.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -3800,11 +3792,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "66eceea0db202bb39c4e445e8ca28689645366c5",
+ "merge_request_diff_id": 14,
+ "relative_order": 7,
+ "sha": "66eceea0db202bb39c4e445e8ca28689645366c5",
"message": "add spaces in whitespace file\n",
- "parent_ids": [
- "08f22f255f082689c0d7d39d19205085311542bc"
- ],
"authored_date": "2015-11-13T06:01:27.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -3813,11 +3804,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "08f22f255f082689c0d7d39d19205085311542bc",
+ "merge_request_diff_id": 14,
+ "relative_order": 8,
+ "sha": "08f22f255f082689c0d7d39d19205085311542bc",
"message": "remove emtpy file.(beacase git ignore empty file)\nadd whitespace test file.\n",
- "parent_ids": [
- "c642fe9b8b9f28f9225d7ea953fe14e74748d53b"
- ],
"authored_date": "2015-11-13T06:00:16.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -3826,12 +3816,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
+ "merge_request_diff_id": 14,
+ "relative_order": 9,
+ "sha": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
"message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3",
- "parent_ids": [
- "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
- "c642fe9b8b9f28f9225d7ea953fe14e74748d53b"
- ],
"authored_date": "2015-11-13T05:23:14.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -3840,11 +3828,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
+ "merge_request_diff_id": 14,
+ "relative_order": 10,
+ "sha": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
"message": "add whitespace in empty\n",
- "parent_ids": [
- "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0"
- ],
"authored_date": "2015-11-13T05:08:45.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -3853,11 +3840,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
+ "merge_request_diff_id": 14,
+ "relative_order": 11,
+ "sha": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
"message": "add empty file\n",
- "parent_ids": [
- "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"
- ],
"authored_date": "2015-11-13T05:08:04.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -3866,11 +3852,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
+ "merge_request_diff_id": 14,
+ "relative_order": 12,
+ "sha": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
"message": "Add ISO-8859 test file\n",
- "parent_ids": [
- "e56497bb5f03a90a51293fc6d516788730953899"
- ],
"authored_date": "2015-08-25T17:53:12.000+02:00",
"author_name": "Stan Hu",
"author_email": "stanhu@packetzoom.com",
@@ -3879,12 +3864,10 @@
"committer_email": "stanhu@packetzoom.com"
},
{
- "id": "e56497bb5f03a90a51293fc6d516788730953899",
+ "merge_request_diff_id": 14,
+ "relative_order": 13,
+ "sha": "e56497bb5f03a90a51293fc6d516788730953899",
"message": "Merge branch 'tree_helper_spec' into 'master'\n\nAdd directory structure for tree_helper spec\n\nThis directory structure is needed for a testing the method flatten_tree(tree) in the TreeHelper module\n\nSee [merge request #275](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/275#note_732774)\n\nSee merge request !2\n",
- "parent_ids": [
- "5937ac0a7beb003549fc5fd26fc247adbce4a52e",
- "4cd80ccab63c82b4bad16faa5193fbd2aa06df40"
- ],
"authored_date": "2015-01-10T22:23:29.000+01:00",
"author_name": "Sytse Sijbrandij",
"author_email": "sytse@gitlab.com",
@@ -3893,11 +3876,10 @@
"committer_email": "sytse@gitlab.com"
},
{
- "id": "4cd80ccab63c82b4bad16faa5193fbd2aa06df40",
+ "merge_request_diff_id": 14,
+ "relative_order": 14,
+ "sha": "4cd80ccab63c82b4bad16faa5193fbd2aa06df40",
"message": "add directory structure for tree_helper spec\n",
- "parent_ids": [
- "5937ac0a7beb003549fc5fd26fc247adbce4a52e"
- ],
"authored_date": "2015-01-10T21:28:18.000+01:00",
"author_name": "marmis85",
"author_email": "marmis85@gmail.com",
@@ -3906,11 +3888,10 @@
"committer_email": "marmis85@gmail.com"
},
{
- "id": "5937ac0a7beb003549fc5fd26fc247adbce4a52e",
+ "merge_request_diff_id": 14,
+ "relative_order": 15,
+ "sha": "5937ac0a7beb003549fc5fd26fc247adbce4a52e",
"message": "Add submodule from gitlab.com\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "570e7b2abdd848b95f2f578043fc23bd6f6fd24d"
- ],
"authored_date": "2014-02-27T10:01:38.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -3919,11 +3900,10 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
},
{
- "id": "570e7b2abdd848b95f2f578043fc23bd6f6fd24d",
+ "merge_request_diff_id": 14,
+ "relative_order": 16,
+ "sha": "570e7b2abdd848b95f2f578043fc23bd6f6fd24d",
"message": "Change some files\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9"
- ],
"authored_date": "2014-02-27T09:57:31.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -3932,11 +3912,10 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
},
{
- "id": "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9",
+ "merge_request_diff_id": 14,
+ "relative_order": 17,
+ "sha": "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9",
"message": "More submodules\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "d14d6c0abdd253381df51a723d58691b2ee1ab08"
- ],
"authored_date": "2014-02-27T09:54:21.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -3945,11 +3924,10 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
},
{
- "id": "d14d6c0abdd253381df51a723d58691b2ee1ab08",
+ "merge_request_diff_id": 14,
+ "relative_order": 18,
+ "sha": "d14d6c0abdd253381df51a723d58691b2ee1ab08",
"message": "Remove ds_store files\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "c1acaa58bbcbc3eafe538cb8274ba387047b69f8"
- ],
"authored_date": "2014-02-27T09:49:50.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -3958,11 +3936,10 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
},
{
- "id": "c1acaa58bbcbc3eafe538cb8274ba387047b69f8",
+ "merge_request_diff_id": 14,
+ "relative_order": 19,
+ "sha": "c1acaa58bbcbc3eafe538cb8274ba387047b69f8",
"message": "Ignore DS files\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "ae73cb07c9eeaf35924a10f713b364d32b2dd34f"
- ],
"authored_date": "2014-02-27T09:48:32.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -3971,9 +3948,11 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
}
],
- "utf8_st_diffs": [
+ "merge_request_diff_files": [
{
- "diff": "Binary files a/.DS_Store and /dev/null differ\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 0,
+ "utf8_diff": "Binary files a/.DS_Store and /dev/null differ\n",
"new_path": ".DS_Store",
"old_path": ".DS_Store",
"a_mode": "100644",
@@ -3984,7 +3963,9 @@
"too_large": false
},
{
- "diff": "--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 1,
+ "utf8_diff": "--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n",
"new_path": ".gitignore",
"old_path": ".gitignore",
"a_mode": "100644",
@@ -3995,7 +3976,9 @@
"too_large": false
},
{
- "diff": "--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 2,
+ "utf8_diff": "--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n",
"new_path": ".gitmodules",
"old_path": ".gitmodules",
"a_mode": "100644",
@@ -4006,7 +3989,9 @@
"too_large": false
},
{
- "diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 3,
+ "utf8_diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
"new_path": "CHANGELOG",
"old_path": "CHANGELOG",
"a_mode": "100644",
@@ -4017,7 +4002,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 4,
+ "utf8_diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
"new_path": "encoding/iso8859.txt",
"old_path": "encoding/iso8859.txt",
"a_mode": "0",
@@ -4028,7 +4015,9 @@
"too_large": false
},
{
- "diff": "Binary files a/files/.DS_Store and /dev/null differ\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 5,
+ "utf8_diff": "Binary files a/files/.DS_Store and /dev/null differ\n",
"new_path": "files/.DS_Store",
"old_path": "files/.DS_Store",
"a_mode": "100644",
@@ -4039,7 +4028,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+\u003c?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?\u003e\n+\u003csvg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\"\u003e\n+ \u003c!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch --\u003e\n+ \u003ctitle\u003ewm\u003c/title\u003e\n+ \u003cdesc\u003eCreated with Sketch.\u003c/desc\u003e\n+ \u003cdefs\u003e\n+ \u003cpath id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"\u003e\u003c/path\u003e\n+ \u003c/defs\u003e\n+ \u003cg id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\"\u003e\n+ \u003cpath d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\"\u003e\n+ \u003cg id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\"\u003e\n+ \u003cg id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\n+ \u003cpath d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g16\"\u003e\n+ \u003cg id=\"g18-Clipped\"\u003e\n+ \u003cmask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\"\u003e\n+ \u003cuse xlink:href=\"#path-1\"\u003e\u003c/use\u003e\n+ \u003c/mask\u003e\n+ \u003cg id=\"path22\"\u003e\u003c/g\u003e\n+ \u003cg id=\"g18\" mask=\"url(#mask-2)\"\u003e\n+ \u003cg transform=\"translate(382.736659, 312.879425)\"\u003e\n+ \u003cg id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\"\u003e\n+ \u003cpath d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\"\u003e\n+ \u003cpath d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\"\u003e\n+ \u003cpath d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\"\u003e\n+ \u003cpath d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cpath d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cpath d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\"\u003e\n+ \u003cpath d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\"\u003e\n+ \u003cpath d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path54\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\"\u003e\n+ \u003cpath d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cg id=\"path62\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\"\u003e\n+ \u003cpath d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path70\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\"\u003e\n+ \u003cpath d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cpath d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\"\u003e\n+ \u003cpath d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\"\u003e\n+ \u003cpath d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+\u003c/svg\u003e\n\\ No newline at end of file\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 6,
+ "utf8_diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+\u003c?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?\u003e\n+\u003csvg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\"\u003e\n+ \u003c!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch --\u003e\n+ \u003ctitle\u003ewm\u003c/title\u003e\n+ \u003cdesc\u003eCreated with Sketch.\u003c/desc\u003e\n+ \u003cdefs\u003e\n+ \u003cpath id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"\u003e\u003c/path\u003e\n+ \u003c/defs\u003e\n+ \u003cg id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\"\u003e\n+ \u003cpath d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\"\u003e\n+ \u003cg id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\"\u003e\n+ \u003cg id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\n+ \u003cpath d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g16\"\u003e\n+ \u003cg id=\"g18-Clipped\"\u003e\n+ \u003cmask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\"\u003e\n+ \u003cuse xlink:href=\"#path-1\"\u003e\u003c/use\u003e\n+ \u003c/mask\u003e\n+ \u003cg id=\"path22\"\u003e\u003c/g\u003e\n+ \u003cg id=\"g18\" mask=\"url(#mask-2)\"\u003e\n+ \u003cg transform=\"translate(382.736659, 312.879425)\"\u003e\n+ \u003cg id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\"\u003e\n+ \u003cpath d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\"\u003e\n+ \u003cpath d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\"\u003e\n+ \u003cpath d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\"\u003e\n+ \u003cpath d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cpath d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cpath d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\"\u003e\n+ \u003cpath d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\"\u003e\n+ \u003cpath d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path54\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\"\u003e\n+ \u003cpath d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cg id=\"path62\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\"\u003e\n+ \u003cpath d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path70\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\"\u003e\n+ \u003cpath d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cpath d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\"\u003e\n+ \u003cpath d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\"\u003e\n+ \u003cpath d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+\u003c/svg\u003e\n\\ No newline at end of file\n",
"new_path": "files/images/wm.svg",
"old_path": "files/images/wm.svg",
"a_mode": "0",
@@ -4050,7 +4041,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 7,
+ "utf8_diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
"new_path": "files/lfs/lfs_object.iso",
"old_path": "files/lfs/lfs_object.iso",
"a_mode": "0",
@@ -4061,7 +4054,9 @@
"too_large": false
},
{
- "diff": "--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" =\u003e path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" =\u003e path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output \u003c\u003c stdout.read\n @cmd_output \u003c\u003c stderr.read\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 8,
+ "utf8_diff": "--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" =\u003e path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" =\u003e path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output \u003c\u003c stdout.read\n @cmd_output \u003c\u003c stderr.read\n",
"new_path": "files/ruby/popen.rb",
"old_path": "files/ruby/popen.rb",
"a_mode": "100644",
@@ -4072,7 +4067,9 @@
"too_large": false
},
{
- "diff": "--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 9,
+ "utf8_diff": "--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n",
"new_path": "files/ruby/regex.rb",
"old_path": "files/ruby/regex.rb",
"a_mode": "100644",
@@ -4083,7 +4080,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
+ "merge_request_diff_id": 14,
+ "relative_order": 10,
+ "utf8_diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
"new_path": "files/whitespace",
"old_path": "files/whitespace",
"a_mode": "0",
@@ -4094,7 +4093,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/foo/bar/.gitkeep\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 11,
+ "utf8_diff": "--- /dev/null\n+++ b/foo/bar/.gitkeep\n",
"new_path": "foo/bar/.gitkeep",
"old_path": "foo/bar/.gitkeep",
"a_mode": "0",
@@ -4105,7 +4106,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 12,
+ "utf8_diff": "--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n",
"new_path": "gitlab-grack",
"old_path": "gitlab-grack",
"a_mode": "0",
@@ -4116,7 +4119,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 13,
+ "utf8_diff": "--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n",
"new_path": "gitlab-shell",
"old_path": "gitlab-shell",
"a_mode": "0",
@@ -4127,7 +4132,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/test\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 14,
+ "utf8_diff": "--- /dev/null\n+++ b/test\n",
"new_path": "test",
"old_path": "test",
"a_mode": "0",
@@ -4215,6 +4222,7 @@
},
"events": [
{
+ "merge_request_diff_id": 14,
"id": 529,
"target_type": "Note",
"target_id": 793,
@@ -4398,13 +4406,12 @@
"merge_request_diff": {
"id": 13,
"state": "collected",
- "st_commits": [
+ "merge_request_diff_commits": [
{
- "id": "0bfedc29d30280c7e8564e19f654584b459e5868",
+ "merge_request_diff_id": 13,
+ "relative_order": 0,
+ "sha": "0bfedc29d30280c7e8564e19f654584b459e5868",
"message": "fixes #10\n",
- "parent_ids": [
- "be93687618e4b132087f430a4d8fc3a609c9b77c"
- ],
"authored_date": "2016-01-19T15:25:23.000+01:00",
"author_name": "James Lopez",
"author_email": "james@jameslopez.es",
@@ -4413,12 +4420,10 @@
"committer_email": "james@jameslopez.es"
},
{
- "id": "be93687618e4b132087f430a4d8fc3a609c9b77c",
+ "merge_request_diff_id": 13,
+ "relative_order": 1,
+ "sha": "be93687618e4b132087f430a4d8fc3a609c9b77c",
"message": "Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6",
- "parent_ids": [
- "5f923865dde3436854e9ceb9cdb7815618d4e849",
- "048721d90c449b244b7b4c53a9186b04330174ec"
- ],
"authored_date": "2015-12-07T12:52:12.000+01:00",
"author_name": "Marin Jankovski",
"author_email": "marin@gitlab.com",
@@ -4427,11 +4432,10 @@
"committer_email": "marin@gitlab.com"
},
{
- "id": "048721d90c449b244b7b4c53a9186b04330174ec",
+ "merge_request_diff_id": 13,
+ "relative_order": 2,
+ "sha": "048721d90c449b244b7b4c53a9186b04330174ec",
"message": "LFS object pointer.\n",
- "parent_ids": [
- "5f923865dde3436854e9ceb9cdb7815618d4e849"
- ],
"authored_date": "2015-12-07T11:54:28.000+01:00",
"author_name": "Marin Jankovski",
"author_email": "maxlazio@gmail.com",
@@ -4440,11 +4444,10 @@
"committer_email": "maxlazio@gmail.com"
},
{
- "id": "5f923865dde3436854e9ceb9cdb7815618d4e849",
+ "merge_request_diff_id": 13,
+ "relative_order": 3,
+ "sha": "5f923865dde3436854e9ceb9cdb7815618d4e849",
"message": "GitLab currently doesn't support patches that involve a merge commit: add a commit here\n",
- "parent_ids": [
- "d2d430676773caa88cdaf7c55944073b2fd5561a"
- ],
"authored_date": "2015-11-13T16:27:12.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -4453,12 +4456,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "d2d430676773caa88cdaf7c55944073b2fd5561a",
+ "merge_request_diff_id": 13,
+ "relative_order": 4,
+ "sha": "d2d430676773caa88cdaf7c55944073b2fd5561a",
"message": "Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5",
- "parent_ids": [
- "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
- "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73"
- ],
"authored_date": "2015-11-13T08:50:17.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -4467,11 +4468,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
+ "merge_request_diff_id": 13,
+ "relative_order": 5,
+ "sha": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
"message": "Add GitLab SVG\n",
- "parent_ids": [
- "59e29889be61e6e0e5e223bfa9ac2721d31605b8"
- ],
"authored_date": "2015-11-13T08:39:43.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -4480,12 +4480,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
+ "merge_request_diff_id": 13,
+ "relative_order": 6,
+ "sha": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
"message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4",
- "parent_ids": [
- "19e2e9b4ef76b422ce1154af39a91323ccc57434",
- "66eceea0db202bb39c4e445e8ca28689645366c5"
- ],
"authored_date": "2015-11-13T07:21:40.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -4494,11 +4492,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "66eceea0db202bb39c4e445e8ca28689645366c5",
+ "merge_request_diff_id": 13,
+ "relative_order": 7,
+ "sha": "66eceea0db202bb39c4e445e8ca28689645366c5",
"message": "add spaces in whitespace file\n",
- "parent_ids": [
- "08f22f255f082689c0d7d39d19205085311542bc"
- ],
"authored_date": "2015-11-13T06:01:27.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -4507,11 +4504,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "08f22f255f082689c0d7d39d19205085311542bc",
+ "merge_request_diff_id": 13,
+ "relative_order": 8,
+ "sha": "08f22f255f082689c0d7d39d19205085311542bc",
"message": "remove emtpy file.(beacase git ignore empty file)\nadd whitespace test file.\n",
- "parent_ids": [
- "c642fe9b8b9f28f9225d7ea953fe14e74748d53b"
- ],
"authored_date": "2015-11-13T06:00:16.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -4520,12 +4516,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
+ "merge_request_diff_id": 13,
+ "relative_order": 9,
+ "sha": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
"message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3",
- "parent_ids": [
- "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
- "c642fe9b8b9f28f9225d7ea953fe14e74748d53b"
- ],
"authored_date": "2015-11-13T05:23:14.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -4534,11 +4528,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
+ "merge_request_diff_id": 13,
+ "relative_order": 10,
+ "sha": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
"message": "add whitespace in empty\n",
- "parent_ids": [
- "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0"
- ],
"authored_date": "2015-11-13T05:08:45.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -4547,11 +4540,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
+ "merge_request_diff_id": 13,
+ "relative_order": 11,
+ "sha": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
"message": "add empty file\n",
- "parent_ids": [
- "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"
- ],
"authored_date": "2015-11-13T05:08:04.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -4560,11 +4552,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
+ "merge_request_diff_id": 13,
+ "relative_order": 12,
+ "sha": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
"message": "Add ISO-8859 test file\n",
- "parent_ids": [
- "e56497bb5f03a90a51293fc6d516788730953899"
- ],
"authored_date": "2015-08-25T17:53:12.000+02:00",
"author_name": "Stan Hu",
"author_email": "stanhu@packetzoom.com",
@@ -4573,12 +4564,10 @@
"committer_email": "stanhu@packetzoom.com"
},
{
- "id": "e56497bb5f03a90a51293fc6d516788730953899",
+ "merge_request_diff_id": 13,
+ "relative_order": 13,
+ "sha": "e56497bb5f03a90a51293fc6d516788730953899",
"message": "Merge branch 'tree_helper_spec' into 'master'\n\nAdd directory structure for tree_helper spec\n\nThis directory structure is needed for a testing the method flatten_tree(tree) in the TreeHelper module\n\nSee [merge request #275](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/275#note_732774)\n\nSee merge request !2\n",
- "parent_ids": [
- "5937ac0a7beb003549fc5fd26fc247adbce4a52e",
- "4cd80ccab63c82b4bad16faa5193fbd2aa06df40"
- ],
"authored_date": "2015-01-10T22:23:29.000+01:00",
"author_name": "Sytse Sijbrandij",
"author_email": "sytse@gitlab.com",
@@ -4587,11 +4576,10 @@
"committer_email": "sytse@gitlab.com"
},
{
- "id": "4cd80ccab63c82b4bad16faa5193fbd2aa06df40",
+ "merge_request_diff_id": 13,
+ "relative_order": 14,
+ "sha": "4cd80ccab63c82b4bad16faa5193fbd2aa06df40",
"message": "add directory structure for tree_helper spec\n",
- "parent_ids": [
- "5937ac0a7beb003549fc5fd26fc247adbce4a52e"
- ],
"authored_date": "2015-01-10T21:28:18.000+01:00",
"author_name": "marmis85",
"author_email": "marmis85@gmail.com",
@@ -4600,9 +4588,11 @@
"committer_email": "marmis85@gmail.com"
}
],
- "utf8_st_diffs": [
+ "merge_request_diff_files": [
{
- "diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
+ "merge_request_diff_id": 13,
+ "relative_order": 0,
+ "utf8_diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
"new_path": "CHANGELOG",
"old_path": "CHANGELOG",
"a_mode": "100644",
@@ -4613,7 +4603,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
+ "merge_request_diff_id": 13,
+ "relative_order": 1,
+ "utf8_diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
"new_path": "encoding/iso8859.txt",
"old_path": "encoding/iso8859.txt",
"a_mode": "0",
@@ -4624,7 +4616,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+\u003c?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?\u003e\n+\u003csvg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\"\u003e\n+ \u003c!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch --\u003e\n+ \u003ctitle\u003ewm\u003c/title\u003e\n+ \u003cdesc\u003eCreated with Sketch.\u003c/desc\u003e\n+ \u003cdefs\u003e\n+ \u003cpath id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"\u003e\u003c/path\u003e\n+ \u003c/defs\u003e\n+ \u003cg id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\"\u003e\n+ \u003cpath d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\"\u003e\n+ \u003cg id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\"\u003e\n+ \u003cg id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\n+ \u003cpath d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g16\"\u003e\n+ \u003cg id=\"g18-Clipped\"\u003e\n+ \u003cmask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\"\u003e\n+ \u003cuse xlink:href=\"#path-1\"\u003e\u003c/use\u003e\n+ \u003c/mask\u003e\n+ \u003cg id=\"path22\"\u003e\u003c/g\u003e\n+ \u003cg id=\"g18\" mask=\"url(#mask-2)\"\u003e\n+ \u003cg transform=\"translate(382.736659, 312.879425)\"\u003e\n+ \u003cg id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\"\u003e\n+ \u003cpath d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\"\u003e\n+ \u003cpath d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\"\u003e\n+ \u003cpath d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\"\u003e\n+ \u003cpath d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cpath d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cpath d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\"\u003e\n+ \u003cpath d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\"\u003e\n+ \u003cpath d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path54\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\"\u003e\n+ \u003cpath d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cg id=\"path62\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\"\u003e\n+ \u003cpath d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path70\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\"\u003e\n+ \u003cpath d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cpath d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\"\u003e\n+ \u003cpath d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\"\u003e\n+ \u003cpath d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+\u003c/svg\u003e\n\\ No newline at end of file\n",
+ "merge_request_diff_id": 13,
+ "relative_order": 2,
+ "utf8_diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+\u003c?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?\u003e\n+\u003csvg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\"\u003e\n+ \u003c!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch --\u003e\n+ \u003ctitle\u003ewm\u003c/title\u003e\n+ \u003cdesc\u003eCreated with Sketch.\u003c/desc\u003e\n+ \u003cdefs\u003e\n+ \u003cpath id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"\u003e\u003c/path\u003e\n+ \u003c/defs\u003e\n+ \u003cg id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\"\u003e\n+ \u003cpath d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\"\u003e\n+ \u003cg id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\"\u003e\n+ \u003cg id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\n+ \u003cpath d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g16\"\u003e\n+ \u003cg id=\"g18-Clipped\"\u003e\n+ \u003cmask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\"\u003e\n+ \u003cuse xlink:href=\"#path-1\"\u003e\u003c/use\u003e\n+ \u003c/mask\u003e\n+ \u003cg id=\"path22\"\u003e\u003c/g\u003e\n+ \u003cg id=\"g18\" mask=\"url(#mask-2)\"\u003e\n+ \u003cg transform=\"translate(382.736659, 312.879425)\"\u003e\n+ \u003cg id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\"\u003e\n+ \u003cpath d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\"\u003e\n+ \u003cpath d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\"\u003e\n+ \u003cpath d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\"\u003e\n+ \u003cpath d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cpath d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cpath d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\"\u003e\n+ \u003cpath d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\"\u003e\n+ \u003cpath d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path54\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\"\u003e\n+ \u003cpath d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cg id=\"path62\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\"\u003e\n+ \u003cpath d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path70\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\"\u003e\n+ \u003cpath d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cpath d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\"\u003e\n+ \u003cpath d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\"\u003e\n+ \u003cpath d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+\u003c/svg\u003e\n\\ No newline at end of file\n",
"new_path": "files/images/wm.svg",
"old_path": "files/images/wm.svg",
"a_mode": "0",
@@ -4635,7 +4629,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
+ "merge_request_diff_id": 13,
+ "relative_order": 3,
+ "utf8_diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
"new_path": "files/lfs/lfs_object.iso",
"old_path": "files/lfs/lfs_object.iso",
"a_mode": "0",
@@ -4646,7 +4642,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
+ "merge_request_diff_id": 13,
+ "relative_order": 4,
+ "utf8_diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
"new_path": "files/whitespace",
"old_path": "files/whitespace",
"a_mode": "0",
@@ -4657,7 +4655,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/foo/bar/.gitkeep\n",
+ "merge_request_diff_id": 13,
+ "relative_order": 5,
+ "utf8_diff": "--- /dev/null\n+++ b/foo/bar/.gitkeep\n",
"new_path": "foo/bar/.gitkeep",
"old_path": "foo/bar/.gitkeep",
"a_mode": "0",
@@ -4668,7 +4668,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/test\n",
+ "merge_request_diff_id": 13,
+ "relative_order": 6,
+ "utf8_diff": "--- /dev/null\n+++ b/test\n",
"new_path": "test",
"old_path": "test",
"a_mode": "0",
@@ -4930,13 +4932,12 @@
"merge_request_diff": {
"id": 12,
"state": "collected",
- "st_commits": [
+ "merge_request_diff_commits": [
{
- "id": "97a0df9696e2aebf10c31b3016f40214e0e8f243",
+ "merge_request_diff_id": 12,
+ "relative_order": 0,
+ "sha": "97a0df9696e2aebf10c31b3016f40214e0e8f243",
"message": "fixes #10\n",
- "parent_ids": [
- "be93687618e4b132087f430a4d8fc3a609c9b77c"
- ],
"authored_date": "2016-01-19T14:08:21.000+01:00",
"author_name": "James Lopez",
"author_email": "james@jameslopez.es",
@@ -4945,12 +4946,10 @@
"committer_email": "james@jameslopez.es"
},
{
- "id": "be93687618e4b132087f430a4d8fc3a609c9b77c",
+ "merge_request_diff_id": 12,
+ "relative_order": 1,
+ "sha": "be93687618e4b132087f430a4d8fc3a609c9b77c",
"message": "Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6",
- "parent_ids": [
- "5f923865dde3436854e9ceb9cdb7815618d4e849",
- "048721d90c449b244b7b4c53a9186b04330174ec"
- ],
"authored_date": "2015-12-07T12:52:12.000+01:00",
"author_name": "Marin Jankovski",
"author_email": "marin@gitlab.com",
@@ -4959,11 +4958,10 @@
"committer_email": "marin@gitlab.com"
},
{
- "id": "048721d90c449b244b7b4c53a9186b04330174ec",
+ "merge_request_diff_id": 12,
+ "relative_order": 2,
+ "sha": "048721d90c449b244b7b4c53a9186b04330174ec",
"message": "LFS object pointer.\n",
- "parent_ids": [
- "5f923865dde3436854e9ceb9cdb7815618d4e849"
- ],
"authored_date": "2015-12-07T11:54:28.000+01:00",
"author_name": "Marin Jankovski",
"author_email": "maxlazio@gmail.com",
@@ -4972,11 +4970,10 @@
"committer_email": "maxlazio@gmail.com"
},
{
- "id": "5f923865dde3436854e9ceb9cdb7815618d4e849",
+ "merge_request_diff_id": 12,
+ "relative_order": 3,
+ "sha": "5f923865dde3436854e9ceb9cdb7815618d4e849",
"message": "GitLab currently doesn't support patches that involve a merge commit: add a commit here\n",
- "parent_ids": [
- "d2d430676773caa88cdaf7c55944073b2fd5561a"
- ],
"authored_date": "2015-11-13T16:27:12.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -4985,12 +4982,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "d2d430676773caa88cdaf7c55944073b2fd5561a",
+ "merge_request_diff_id": 12,
+ "relative_order": 4,
+ "sha": "d2d430676773caa88cdaf7c55944073b2fd5561a",
"message": "Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5",
- "parent_ids": [
- "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
- "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73"
- ],
"authored_date": "2015-11-13T08:50:17.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -4999,11 +4994,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
+ "merge_request_diff_id": 12,
+ "relative_order": 5,
+ "sha": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
"message": "Add GitLab SVG\n",
- "parent_ids": [
- "59e29889be61e6e0e5e223bfa9ac2721d31605b8"
- ],
"authored_date": "2015-11-13T08:39:43.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -5012,12 +5006,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
+ "merge_request_diff_id": 12,
+ "relative_order": 6,
+ "sha": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
"message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4",
- "parent_ids": [
- "19e2e9b4ef76b422ce1154af39a91323ccc57434",
- "66eceea0db202bb39c4e445e8ca28689645366c5"
- ],
"authored_date": "2015-11-13T07:21:40.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -5026,11 +5018,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "66eceea0db202bb39c4e445e8ca28689645366c5",
+ "merge_request_diff_id": 12,
+ "relative_order": 7,
+ "sha": "66eceea0db202bb39c4e445e8ca28689645366c5",
"message": "add spaces in whitespace file\n",
- "parent_ids": [
- "08f22f255f082689c0d7d39d19205085311542bc"
- ],
"authored_date": "2015-11-13T06:01:27.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -5039,11 +5030,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "08f22f255f082689c0d7d39d19205085311542bc",
+ "merge_request_diff_id": 12,
+ "relative_order": 8,
+ "sha": "08f22f255f082689c0d7d39d19205085311542bc",
"message": "remove emtpy file.(beacase git ignore empty file)\nadd whitespace test file.\n",
- "parent_ids": [
- "c642fe9b8b9f28f9225d7ea953fe14e74748d53b"
- ],
"authored_date": "2015-11-13T06:00:16.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -5052,12 +5042,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
+ "merge_request_diff_id": 12,
+ "relative_order": 9,
+ "sha": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
"message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3",
- "parent_ids": [
- "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
- "c642fe9b8b9f28f9225d7ea953fe14e74748d53b"
- ],
"authored_date": "2015-11-13T05:23:14.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -5066,11 +5054,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
+ "merge_request_diff_id": 12,
+ "relative_order": 10,
+ "sha": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
"message": "add whitespace in empty\n",
- "parent_ids": [
- "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0"
- ],
"authored_date": "2015-11-13T05:08:45.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -5079,11 +5066,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
+ "merge_request_diff_id": 12,
+ "relative_order": 11,
+ "sha": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
"message": "add empty file\n",
- "parent_ids": [
- "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"
- ],
"authored_date": "2015-11-13T05:08:04.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -5092,11 +5078,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
+ "merge_request_diff_id": 12,
+ "relative_order": 12,
+ "sha": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
"message": "Add ISO-8859 test file\n",
- "parent_ids": [
- "e56497bb5f03a90a51293fc6d516788730953899"
- ],
"authored_date": "2015-08-25T17:53:12.000+02:00",
"author_name": "Stan Hu",
"author_email": "stanhu@packetzoom.com",
@@ -5105,9 +5090,11 @@
"committer_email": "stanhu@packetzoom.com"
}
],
- "utf8_st_diffs": [
+ "merge_request_diff_files": [
{
- "diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
+ "merge_request_diff_id": 12,
+ "relative_order": 0,
+ "utf8_diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
"new_path": "CHANGELOG",
"old_path": "CHANGELOG",
"a_mode": "100644",
@@ -5118,7 +5105,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
+ "merge_request_diff_id": 12,
+ "relative_order": 1,
+ "utf8_diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
"new_path": "encoding/iso8859.txt",
"old_path": "encoding/iso8859.txt",
"a_mode": "0",
@@ -5129,7 +5118,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+\u003c?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?\u003e\n+\u003csvg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\"\u003e\n+ \u003c!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch --\u003e\n+ \u003ctitle\u003ewm\u003c/title\u003e\n+ \u003cdesc\u003eCreated with Sketch.\u003c/desc\u003e\n+ \u003cdefs\u003e\n+ \u003cpath id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"\u003e\u003c/path\u003e\n+ \u003c/defs\u003e\n+ \u003cg id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\"\u003e\n+ \u003cpath d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\"\u003e\n+ \u003cg id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\"\u003e\n+ \u003cg id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\n+ \u003cpath d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g16\"\u003e\n+ \u003cg id=\"g18-Clipped\"\u003e\n+ \u003cmask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\"\u003e\n+ \u003cuse xlink:href=\"#path-1\"\u003e\u003c/use\u003e\n+ \u003c/mask\u003e\n+ \u003cg id=\"path22\"\u003e\u003c/g\u003e\n+ \u003cg id=\"g18\" mask=\"url(#mask-2)\"\u003e\n+ \u003cg transform=\"translate(382.736659, 312.879425)\"\u003e\n+ \u003cg id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\"\u003e\n+ \u003cpath d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\"\u003e\n+ \u003cpath d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\"\u003e\n+ \u003cpath d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\"\u003e\n+ \u003cpath d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cpath d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cpath d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\"\u003e\n+ \u003cpath d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\"\u003e\n+ \u003cpath d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path54\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\"\u003e\n+ \u003cpath d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cg id=\"path62\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\"\u003e\n+ \u003cpath d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path70\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\"\u003e\n+ \u003cpath d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cpath d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\"\u003e\n+ \u003cpath d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\"\u003e\n+ \u003cpath d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+\u003c/svg\u003e\n\\ No newline at end of file\n",
+ "merge_request_diff_id": 12,
+ "relative_order": 2,
+ "utf8_diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+\u003c?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?\u003e\n+\u003csvg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\"\u003e\n+ \u003c!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch --\u003e\n+ \u003ctitle\u003ewm\u003c/title\u003e\n+ \u003cdesc\u003eCreated with Sketch.\u003c/desc\u003e\n+ \u003cdefs\u003e\n+ \u003cpath id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"\u003e\u003c/path\u003e\n+ \u003c/defs\u003e\n+ \u003cg id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\"\u003e\n+ \u003cpath d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\"\u003e\n+ \u003cg id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\"\u003e\n+ \u003cg id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\n+ \u003cpath d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g16\"\u003e\n+ \u003cg id=\"g18-Clipped\"\u003e\n+ \u003cmask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\"\u003e\n+ \u003cuse xlink:href=\"#path-1\"\u003e\u003c/use\u003e\n+ \u003c/mask\u003e\n+ \u003cg id=\"path22\"\u003e\u003c/g\u003e\n+ \u003cg id=\"g18\" mask=\"url(#mask-2)\"\u003e\n+ \u003cg transform=\"translate(382.736659, 312.879425)\"\u003e\n+ \u003cg id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\"\u003e\n+ \u003cpath d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\"\u003e\n+ \u003cpath d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\"\u003e\n+ \u003cpath d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\"\u003e\n+ \u003cpath d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cpath d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cpath d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\"\u003e\n+ \u003cpath d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\"\u003e\n+ \u003cpath d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path54\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\"\u003e\n+ \u003cpath d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cg id=\"path62\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\"\u003e\n+ \u003cpath d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path70\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\"\u003e\n+ \u003cpath d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cpath d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\"\u003e\n+ \u003cpath d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\"\u003e\n+ \u003cpath d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+\u003c/svg\u003e\n\\ No newline at end of file\n",
"new_path": "files/images/wm.svg",
"old_path": "files/images/wm.svg",
"a_mode": "0",
@@ -5140,7 +5131,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
+ "merge_request_diff_id": 12,
+ "relative_order": 3,
+ "utf8_diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
"new_path": "files/lfs/lfs_object.iso",
"old_path": "files/lfs/lfs_object.iso",
"a_mode": "0",
@@ -5151,7 +5144,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
+ "merge_request_diff_id": 12,
+ "relative_order": 4,
+ "utf8_diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
"new_path": "files/whitespace",
"old_path": "files/whitespace",
"a_mode": "0",
@@ -5162,7 +5157,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/test\n",
+ "merge_request_diff_id": 12,
+ "relative_order": 5,
+ "utf8_diff": "--- /dev/null\n+++ b/test\n",
"new_path": "test",
"old_path": "test",
"a_mode": "0",
@@ -5424,9 +5421,9 @@
"merge_request_diff": {
"id": 11,
"state": "empty",
- "st_commits": null,
- "utf8_st_diffs": [
-
+ "merge_request_diff_commits": [
+ ],
+ "merge_request_diff_files": [
],
"merge_request_id": 11,
"created_at": "2016-06-14T15:02:23.772Z",
@@ -5679,13 +5676,12 @@
"merge_request_diff": {
"id": 10,
"state": "collected",
- "st_commits": [
+ "merge_request_diff_commits": [
{
- "id": "f998ac87ac9244f15e9c15109a6f4e62a54b779d",
+ "merge_request_diff_id": 10,
+ "relative_order": 0,
+ "sha": "f998ac87ac9244f15e9c15109a6f4e62a54b779d",
"message": "fixes #10\n",
- "parent_ids": [
- "be93687618e4b132087f430a4d8fc3a609c9b77c"
- ],
"authored_date": "2016-01-19T14:43:23.000+01:00",
"author_name": "James Lopez",
"author_email": "james@jameslopez.es",
@@ -5694,12 +5690,10 @@
"committer_email": "james@jameslopez.es"
},
{
- "id": "be93687618e4b132087f430a4d8fc3a609c9b77c",
+ "merge_request_diff_id": 10,
+ "relative_order": 1,
+ "sha": "be93687618e4b132087f430a4d8fc3a609c9b77c",
"message": "Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6",
- "parent_ids": [
- "5f923865dde3436854e9ceb9cdb7815618d4e849",
- "048721d90c449b244b7b4c53a9186b04330174ec"
- ],
"authored_date": "2015-12-07T12:52:12.000+01:00",
"author_name": "Marin Jankovski",
"author_email": "marin@gitlab.com",
@@ -5708,11 +5702,10 @@
"committer_email": "marin@gitlab.com"
},
{
- "id": "048721d90c449b244b7b4c53a9186b04330174ec",
+ "merge_request_diff_id": 10,
+ "relative_order": 2,
+ "sha": "048721d90c449b244b7b4c53a9186b04330174ec",
"message": "LFS object pointer.\n",
- "parent_ids": [
- "5f923865dde3436854e9ceb9cdb7815618d4e849"
- ],
"authored_date": "2015-12-07T11:54:28.000+01:00",
"author_name": "Marin Jankovski",
"author_email": "maxlazio@gmail.com",
@@ -5721,11 +5714,10 @@
"committer_email": "maxlazio@gmail.com"
},
{
- "id": "5f923865dde3436854e9ceb9cdb7815618d4e849",
+ "merge_request_diff_id": 10,
+ "relative_order": 3,
+ "sha": "5f923865dde3436854e9ceb9cdb7815618d4e849",
"message": "GitLab currently doesn't support patches that involve a merge commit: add a commit here\n",
- "parent_ids": [
- "d2d430676773caa88cdaf7c55944073b2fd5561a"
- ],
"authored_date": "2015-11-13T16:27:12.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -5734,12 +5726,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "d2d430676773caa88cdaf7c55944073b2fd5561a",
+ "merge_request_diff_id": 10,
+ "relative_order": 4,
+ "sha": "d2d430676773caa88cdaf7c55944073b2fd5561a",
"message": "Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5",
- "parent_ids": [
- "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
- "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73"
- ],
"authored_date": "2015-11-13T08:50:17.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -5748,11 +5738,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
+ "merge_request_diff_id": 10,
+ "relative_order": 5,
+ "sha": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
"message": "Add GitLab SVG\n",
- "parent_ids": [
- "59e29889be61e6e0e5e223bfa9ac2721d31605b8"
- ],
"authored_date": "2015-11-13T08:39:43.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -5761,12 +5750,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
+ "merge_request_diff_id": 10,
+ "relative_order": 6,
+ "sha": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
"message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4",
- "parent_ids": [
- "19e2e9b4ef76b422ce1154af39a91323ccc57434",
- "66eceea0db202bb39c4e445e8ca28689645366c5"
- ],
"authored_date": "2015-11-13T07:21:40.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -5775,11 +5762,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "66eceea0db202bb39c4e445e8ca28689645366c5",
+ "merge_request_diff_id": 10,
+ "relative_order": 7,
+ "sha": "66eceea0db202bb39c4e445e8ca28689645366c5",
"message": "add spaces in whitespace file\n",
- "parent_ids": [
- "08f22f255f082689c0d7d39d19205085311542bc"
- ],
"authored_date": "2015-11-13T06:01:27.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -5788,11 +5774,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "08f22f255f082689c0d7d39d19205085311542bc",
+ "merge_request_diff_id": 10,
+ "relative_order": 8,
+ "sha": "08f22f255f082689c0d7d39d19205085311542bc",
"message": "remove emtpy file.(beacase git ignore empty file)\nadd whitespace test file.\n",
- "parent_ids": [
- "c642fe9b8b9f28f9225d7ea953fe14e74748d53b"
- ],
"authored_date": "2015-11-13T06:00:16.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -5801,12 +5786,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
+ "merge_request_diff_id": 10,
+ "relative_order": 9,
+ "sha": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
"message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3",
- "parent_ids": [
- "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
- "c642fe9b8b9f28f9225d7ea953fe14e74748d53b"
- ],
"authored_date": "2015-11-13T05:23:14.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -5815,11 +5798,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
+ "merge_request_diff_id": 10,
+ "relative_order": 10,
+ "sha": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
"message": "add whitespace in empty\n",
- "parent_ids": [
- "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0"
- ],
"authored_date": "2015-11-13T05:08:45.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -5828,11 +5810,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
+ "merge_request_diff_id": 10,
+ "relative_order": 11,
+ "sha": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
"message": "add empty file\n",
- "parent_ids": [
- "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"
- ],
"authored_date": "2015-11-13T05:08:04.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -5841,11 +5822,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
+ "merge_request_diff_id": 10,
+ "relative_order": 12,
+ "sha": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
"message": "Add ISO-8859 test file\n",
- "parent_ids": [
- "e56497bb5f03a90a51293fc6d516788730953899"
- ],
"authored_date": "2015-08-25T17:53:12.000+02:00",
"author_name": "Stan Hu",
"author_email": "stanhu@packetzoom.com",
@@ -5854,12 +5834,10 @@
"committer_email": "stanhu@packetzoom.com"
},
{
- "id": "e56497bb5f03a90a51293fc6d516788730953899",
+ "merge_request_diff_id": 10,
+ "relative_order": 13,
+ "sha": "e56497bb5f03a90a51293fc6d516788730953899",
"message": "Merge branch 'tree_helper_spec' into 'master'\n\nAdd directory structure for tree_helper spec\n\nThis directory structure is needed for a testing the method flatten_tree(tree) in the TreeHelper module\n\nSee [merge request #275](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/275#note_732774)\n\nSee merge request !2\n",
- "parent_ids": [
- "5937ac0a7beb003549fc5fd26fc247adbce4a52e",
- "4cd80ccab63c82b4bad16faa5193fbd2aa06df40"
- ],
"authored_date": "2015-01-10T22:23:29.000+01:00",
"author_name": "Sytse Sijbrandij",
"author_email": "sytse@gitlab.com",
@@ -5868,11 +5846,10 @@
"committer_email": "sytse@gitlab.com"
},
{
- "id": "4cd80ccab63c82b4bad16faa5193fbd2aa06df40",
+ "merge_request_diff_id": 10,
+ "relative_order": 14,
+ "sha": "4cd80ccab63c82b4bad16faa5193fbd2aa06df40",
"message": "add directory structure for tree_helper spec\n",
- "parent_ids": [
- "5937ac0a7beb003549fc5fd26fc247adbce4a52e"
- ],
"authored_date": "2015-01-10T21:28:18.000+01:00",
"author_name": "marmis85",
"author_email": "marmis85@gmail.com",
@@ -5881,11 +5858,10 @@
"committer_email": "marmis85@gmail.com"
},
{
- "id": "5937ac0a7beb003549fc5fd26fc247adbce4a52e",
+ "merge_request_diff_id": 10,
+ "relative_order": 16,
+ "sha": "5937ac0a7beb003549fc5fd26fc247adbce4a52e",
"message": "Add submodule from gitlab.com\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "570e7b2abdd848b95f2f578043fc23bd6f6fd24d"
- ],
"authored_date": "2014-02-27T10:01:38.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -5894,11 +5870,10 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
},
{
- "id": "570e7b2abdd848b95f2f578043fc23bd6f6fd24d",
+ "merge_request_diff_id": 10,
+ "relative_order": 17,
+ "sha": "570e7b2abdd848b95f2f578043fc23bd6f6fd24d",
"message": "Change some files\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9"
- ],
"authored_date": "2014-02-27T09:57:31.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -5907,11 +5882,10 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
},
{
- "id": "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9",
+ "merge_request_diff_id": 10,
+ "relative_order": 18,
+ "sha": "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9",
"message": "More submodules\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "d14d6c0abdd253381df51a723d58691b2ee1ab08"
- ],
"authored_date": "2014-02-27T09:54:21.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -5920,11 +5894,10 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
},
{
- "id": "d14d6c0abdd253381df51a723d58691b2ee1ab08",
+ "merge_request_diff_id": 10,
+ "relative_order": 19,
+ "sha": "d14d6c0abdd253381df51a723d58691b2ee1ab08",
"message": "Remove ds_store files\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "c1acaa58bbcbc3eafe538cb8274ba387047b69f8"
- ],
"authored_date": "2014-02-27T09:49:50.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -5933,11 +5906,10 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
},
{
- "id": "c1acaa58bbcbc3eafe538cb8274ba387047b69f8",
+ "merge_request_diff_id": 10,
+ "relative_order": 20,
+ "sha": "c1acaa58bbcbc3eafe538cb8274ba387047b69f8",
"message": "Ignore DS files\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "ae73cb07c9eeaf35924a10f713b364d32b2dd34f"
- ],
"authored_date": "2014-02-27T09:48:32.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -5946,9 +5918,11 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
}
],
- "utf8_st_diffs": [
+ "merge_request_diff_files": [
{
- "diff": "Binary files a/.DS_Store and /dev/null differ\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 0,
+ "utf8_diff": "Binary files a/.DS_Store and /dev/null differ\n",
"new_path": ".DS_Store",
"old_path": ".DS_Store",
"a_mode": "100644",
@@ -5959,7 +5933,9 @@
"too_large": false
},
{
- "diff": "--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 1,
+ "utf8_diff": "--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n",
"new_path": ".gitignore",
"old_path": ".gitignore",
"a_mode": "100644",
@@ -5970,7 +5946,9 @@
"too_large": false
},
{
- "diff": "--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 2,
+ "utf8_diff": "--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n",
"new_path": ".gitmodules",
"old_path": ".gitmodules",
"a_mode": "100644",
@@ -5981,7 +5959,9 @@
"too_large": false
},
{
- "diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 3,
+ "utf8_diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
"new_path": "CHANGELOG",
"old_path": "CHANGELOG",
"a_mode": "100644",
@@ -5992,7 +5972,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 4,
+ "utf8_diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
"new_path": "encoding/iso8859.txt",
"old_path": "encoding/iso8859.txt",
"a_mode": "0",
@@ -6003,7 +5985,9 @@
"too_large": false
},
{
- "diff": "Binary files a/files/.DS_Store and /dev/null differ\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 5,
+ "utf8_diff": "Binary files a/files/.DS_Store and /dev/null differ\n",
"new_path": "files/.DS_Store",
"old_path": "files/.DS_Store",
"a_mode": "100644",
@@ -6014,7 +5998,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+\u003c?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?\u003e\n+\u003csvg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\"\u003e\n+ \u003c!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch --\u003e\n+ \u003ctitle\u003ewm\u003c/title\u003e\n+ \u003cdesc\u003eCreated with Sketch.\u003c/desc\u003e\n+ \u003cdefs\u003e\n+ \u003cpath id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"\u003e\u003c/path\u003e\n+ \u003c/defs\u003e\n+ \u003cg id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\"\u003e\n+ \u003cpath d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\"\u003e\n+ \u003cg id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\"\u003e\n+ \u003cg id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\n+ \u003cpath d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g16\"\u003e\n+ \u003cg id=\"g18-Clipped\"\u003e\n+ \u003cmask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\"\u003e\n+ \u003cuse xlink:href=\"#path-1\"\u003e\u003c/use\u003e\n+ \u003c/mask\u003e\n+ \u003cg id=\"path22\"\u003e\u003c/g\u003e\n+ \u003cg id=\"g18\" mask=\"url(#mask-2)\"\u003e\n+ \u003cg transform=\"translate(382.736659, 312.879425)\"\u003e\n+ \u003cg id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\"\u003e\n+ \u003cpath d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\"\u003e\n+ \u003cpath d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\"\u003e\n+ \u003cpath d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\"\u003e\n+ \u003cpath d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cpath d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cpath d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\"\u003e\n+ \u003cpath d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\"\u003e\n+ \u003cpath d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path54\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\"\u003e\n+ \u003cpath d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cg id=\"path62\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\"\u003e\n+ \u003cpath d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path70\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\"\u003e\n+ \u003cpath d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cpath d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\"\u003e\n+ \u003cpath d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\"\u003e\n+ \u003cpath d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+\u003c/svg\u003e\n\\ No newline at end of file\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 6,
+ "utf8_diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+\u003c?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?\u003e\n+\u003csvg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\"\u003e\n+ \u003c!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch --\u003e\n+ \u003ctitle\u003ewm\u003c/title\u003e\n+ \u003cdesc\u003eCreated with Sketch.\u003c/desc\u003e\n+ \u003cdefs\u003e\n+ \u003cpath id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"\u003e\u003c/path\u003e\n+ \u003c/defs\u003e\n+ \u003cg id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\"\u003e\n+ \u003cpath d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\"\u003e\n+ \u003cg id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\"\u003e\n+ \u003cg id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\n+ \u003cpath d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g16\"\u003e\n+ \u003cg id=\"g18-Clipped\"\u003e\n+ \u003cmask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\"\u003e\n+ \u003cuse xlink:href=\"#path-1\"\u003e\u003c/use\u003e\n+ \u003c/mask\u003e\n+ \u003cg id=\"path22\"\u003e\u003c/g\u003e\n+ \u003cg id=\"g18\" mask=\"url(#mask-2)\"\u003e\n+ \u003cg transform=\"translate(382.736659, 312.879425)\"\u003e\n+ \u003cg id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\"\u003e\n+ \u003cpath d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\"\u003e\n+ \u003cpath d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\"\u003e\n+ \u003cpath d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\"\u003e\n+ \u003cpath d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cpath d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cpath d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\"\u003e\n+ \u003cpath d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\"\u003e\n+ \u003cpath d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path54\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\"\u003e\n+ \u003cpath d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cg id=\"path62\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\"\u003e\n+ \u003cpath d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path70\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\"\u003e\n+ \u003cpath d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cpath d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\"\u003e\n+ \u003cpath d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\"\u003e\n+ \u003cpath d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+\u003c/svg\u003e\n\\ No newline at end of file\n",
"new_path": "files/images/wm.svg",
"old_path": "files/images/wm.svg",
"a_mode": "0",
@@ -6025,7 +6011,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 7,
+ "utf8_diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
"new_path": "files/lfs/lfs_object.iso",
"old_path": "files/lfs/lfs_object.iso",
"a_mode": "0",
@@ -6036,7 +6024,9 @@
"too_large": false
},
{
- "diff": "--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" =\u003e path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" =\u003e path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output \u003c\u003c stdout.read\n @cmd_output \u003c\u003c stderr.read\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 8,
+ "utf8_diff": "--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" =\u003e path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" =\u003e path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output \u003c\u003c stdout.read\n @cmd_output \u003c\u003c stderr.read\n",
"new_path": "files/ruby/popen.rb",
"old_path": "files/ruby/popen.rb",
"a_mode": "100644",
@@ -6047,7 +6037,9 @@
"too_large": false
},
{
- "diff": "--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 9,
+ "utf8_diff": "--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n",
"new_path": "files/ruby/regex.rb",
"old_path": "files/ruby/regex.rb",
"a_mode": "100644",
@@ -6058,7 +6050,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
+ "merge_request_diff_id": 10,
+ "relative_order": 10,
+ "utf8_diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
"new_path": "files/whitespace",
"old_path": "files/whitespace",
"a_mode": "0",
@@ -6069,7 +6063,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/foo/bar/.gitkeep\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 11,
+ "utf8_diff": "--- /dev/null\n+++ b/foo/bar/.gitkeep\n",
"new_path": "foo/bar/.gitkeep",
"old_path": "foo/bar/.gitkeep",
"a_mode": "0",
@@ -6080,7 +6076,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 12,
+ "utf8_diff": "--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n",
"new_path": "gitlab-grack",
"old_path": "gitlab-grack",
"a_mode": "0",
@@ -6091,7 +6089,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 13,
+ "utf8_diff": "--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n",
"new_path": "gitlab-shell",
"old_path": "gitlab-shell",
"a_mode": "0",
@@ -6102,7 +6102,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/test\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 14,
+ "utf8_diff": "--- /dev/null\n+++ b/test\n",
"new_path": "test",
"old_path": "test",
"a_mode": "0",
@@ -6364,13 +6366,12 @@
"merge_request_diff": {
"id": 9,
"state": "collected",
- "st_commits": [
+ "merge_request_diff_commits": [
{
- "id": "a4e5dfebf42e34596526acb8611bc7ed80e4eb3f",
+ "merge_request_diff_id": 9,
+ "relative_order": 0,
+ "sha": "a4e5dfebf42e34596526acb8611bc7ed80e4eb3f",
"message": "fixes #10\n",
- "parent_ids": [
- "be93687618e4b132087f430a4d8fc3a609c9b77c"
- ],
"authored_date": "2016-01-19T15:44:02.000+01:00",
"author_name": "James Lopez",
"author_email": "james@jameslopez.es",
@@ -6379,9 +6380,11 @@
"committer_email": "james@jameslopez.es"
}
],
- "utf8_st_diffs": [
+ "merge_request_diff_files": [
{
- "diff": "--- /dev/null\n+++ b/test\n",
+ "merge_request_diff_id": 9,
+ "relative_order": 0,
+ "utf8_diff": "--- /dev/null\n+++ b/test\n",
"new_path": "test",
"old_path": "test",
"a_mode": "0",
diff --git a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
index e4b4cf5ba85..0ab3afd0074 100644
--- a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
@@ -95,26 +95,18 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
end
- it 'has the correct data for merge request st_diffs' do
- # makes sure we are renaming the custom method +utf8_st_diffs+ into +st_diffs+
- # one MergeRequestDiff uses the new format, where st_diffs is expected to be nil
-
- expect(MergeRequestDiff.where.not(st_diffs: nil).count).to eq(8)
- end
-
it 'has the correct data for merge request diff files' do
- expect(MergeRequestDiffFile.where.not(diff: nil).count).to eq(9)
+ expect(MergeRequestDiffFile.where.not(diff: nil).count).to eq(55)
end
- it 'has the correct data for merge request diff commits in serialised and table formats' do
- expect(MergeRequestDiff.where.not(st_commits: nil).count).to eq(7)
- expect(MergeRequestDiffCommit.count).to eq(6)
+ it 'has the correct data for merge request diff commits' do
+ expect(MergeRequestDiffCommit.count).to eq(77)
end
- it 'has the correct time for merge request st_commits' do
- st_commits = MergeRequestDiff.where.not(st_commits: nil).first.st_commits
-
- expect(st_commits.first[:committed_date]).to be_kind_of(Time)
+ it 'has the correct data for merge request latest_merge_request_diff' do
+ MergeRequest.find_each do |merge_request|
+ expect(merge_request.latest_merge_request_diff_id).to eq(merge_request.merge_request_diffs.maximum(:id))
+ end
end
it 'has labels associated to label links, associated to issues' do
@@ -155,7 +147,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
it 'has no source if source/target differ' do
- expect(MergeRequest.find_by_title('MR2').source_project_id).to eq(-1)
+ expect(MergeRequest.find_by_title('MR2').source_project_id).to be_nil
end
end
diff --git a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
index ee173afbd50..6243b6ac9f0 100644
--- a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
@@ -93,10 +93,6 @@ describe Gitlab::ImportExport::ProjectTreeSaver do
expect(saved_project_json['merge_requests'].first['merge_request_diff']).not_to be_empty
end
- it 'has merge requests diff st_diffs' do
- expect(saved_project_json['merge_requests'].first['merge_request_diff']['utf8_st_diffs']).not_to be_nil
- end
-
it 'has merge request diff files' do
expect(saved_project_json['merge_requests'].first['merge_request_diff']['merge_request_diff_files']).not_to be_empty
end
@@ -172,12 +168,6 @@ describe Gitlab::ImportExport::ProjectTreeSaver do
expect(saved_project_json['custom_attributes'].count).to eq(2)
end
- it 'does not complain about non UTF-8 characters in MR diffs' do
- ActiveRecord::Base.connection.execute("UPDATE merge_request_diffs SET st_diffs = '---\n- :diff: !binary |-\n LS0tIC9kZXYvbnVsbAorKysgYi9pbWFnZXMvbnVjb3IucGRmCkBAIC0wLDAg\n KzEsMTY3OSBAQAorJVBERi0xLjUNJeLjz9MNCisxIDAgb2JqDTw8L01ldGFk\n YXR'")
-
- expect(project_tree_saver.save).to be true
- end
-
it 'does not complain about non UTF-8 characters in MR diff files' do
ActiveRecord::Base.connection.execute("UPDATE merge_request_diff_files SET diff = '---\n- :diff: !binary |-\n LS0tIC9kZXYvbnVsbAorKysgYi9pbWFnZXMvbnVjb3IucGRmCkBAIC0wLDAg\n KzEsMTY3OSBAQAorJVBERi0xLjUNJeLjz9MNCisxIDAgb2JqDTw8L01ldGFk\n YXR'")
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 4f97f2017ca..1016a7ab591 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -173,7 +173,6 @@ MergeRequest:
MergeRequestDiff:
- id
- state
-- st_commits
- merge_request_id
- created_at
- updated_at
diff --git a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
new file mode 100644
index 00000000000..63992ea8ab8
--- /dev/null
+++ b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
@@ -0,0 +1,55 @@
+require 'spec_helper'
+
+describe Gitlab::ImportExport::UploadsRestorer do
+ describe 'bundle a project Git repo' do
+ let(:export_path) { "#{Dir.tmpdir}/uploads_saver_spec" }
+ let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) }
+ let(:uploads_path) { FileUploader.dynamic_path_segment(project) }
+
+ before do
+ allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
+ FileUtils.mkdir_p(File.join(shared.export_path, 'uploads/random'))
+ FileUtils.touch(File.join(shared.export_path, 'uploads/random', "dummy.txt"))
+ end
+
+ after do
+ FileUtils.rm_rf(export_path)
+ end
+
+ describe 'legacy storage' do
+ let(:project) { create(:project) }
+
+ subject(:restorer) { described_class.new(project: project, shared: shared) }
+
+ it 'saves the uploads successfully' do
+ expect(restorer.restore).to be true
+ end
+
+ it 'copies the uploads to the project path' do
+ restorer.restore
+
+ uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) }
+
+ expect(uploads).to include('dummy.txt')
+ end
+ end
+
+ describe 'hashed storage' do
+ let(:project) { create(:project, :hashed) }
+
+ subject(:restorer) { described_class.new(project: project, shared: shared) }
+
+ it 'saves the uploads successfully' do
+ expect(restorer.restore).to be true
+ end
+
+ it 'copies the uploads to the project path' do
+ restorer.restore
+
+ uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) }
+
+ expect(uploads).to include('dummy.txt')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/uploads_saver_spec.rb b/spec/lib/gitlab/import_export/uploads_saver_spec.rb
new file mode 100644
index 00000000000..e8948de1f3a
--- /dev/null
+++ b/spec/lib/gitlab/import_export/uploads_saver_spec.rb
@@ -0,0 +1,61 @@
+require 'spec_helper'
+
+describe Gitlab::ImportExport::UploadsSaver do
+ describe 'bundle a project Git repo' do
+ let(:export_path) { "#{Dir.tmpdir}/uploads_saver_spec" }
+ let(:file) { fixture_file_upload(Rails.root + 'spec/fixtures/banana_sample.gif', 'image/gif') }
+ let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) }
+
+ before do
+ allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
+ end
+
+ after do
+ FileUtils.rm_rf(export_path)
+ end
+
+ describe 'legacy storage' do
+ let(:project) { create(:project) }
+
+ subject(:saver) { described_class.new(shared: shared, project: project) }
+
+ before do
+ UploadService.new(project, file, FileUploader).execute
+ end
+
+ it 'saves the uploads successfully' do
+ expect(saver.save).to be true
+ end
+
+ it 'copies the uploads to the export path' do
+ saver.save
+
+ uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) }
+
+ expect(uploads).to include('banana_sample.gif')
+ end
+ end
+
+ describe 'hashed storage' do
+ let(:project) { create(:project, :hashed) }
+
+ subject(:saver) { described_class.new(shared: shared, project: project) }
+
+ before do
+ UploadService.new(project, file, FileUploader).execute
+ end
+
+ it 'saves the uploads successfully' do
+ expect(saver.save).to be true
+ end
+
+ it 'copies the uploads to the export path' do
+ saver.save
+
+ uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) }
+
+ expect(uploads).to include('banana_sample.gif')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_sources_spec.rb b/spec/lib/gitlab/import_sources_spec.rb
index c5725f47453..f2fa315e3ec 100644
--- a/spec/lib/gitlab/import_sources_spec.rb
+++ b/spec/lib/gitlab/import_sources_spec.rb
@@ -56,14 +56,14 @@ describe Gitlab::ImportSources do
describe '.importer' do
import_sources = {
- 'github' => Github::Import,
+ 'github' => Gitlab::GithubImport::ParallelImporter,
'bitbucket' => Gitlab::BitbucketImport::Importer,
'gitlab' => Gitlab::GitlabImport::Importer,
'google_code' => Gitlab::GoogleCodeImport::Importer,
'fogbugz' => Gitlab::FogbugzImport::Importer,
'git' => nil,
'gitlab_project' => Gitlab::ImportExport::Importer,
- 'gitea' => Gitlab::GithubImport::Importer
+ 'gitea' => Gitlab::LegacyGithubImport::Importer
}
import_sources.each do |name, klass|
diff --git a/spec/lib/gitlab/issuable_metadata_spec.rb b/spec/lib/gitlab/issuable_metadata_spec.rb
index 2455969a183..42635a68ee1 100644
--- a/spec/lib/gitlab/issuable_metadata_spec.rb
+++ b/spec/lib/gitlab/issuable_metadata_spec.rb
@@ -1,8 +1,8 @@
require 'spec_helper'
describe Gitlab::IssuableMetadata do
- let(:user) { create(:user) }
- let!(:project) { create(:project, :public, :repository, creator: user, namespace: user.namespace) }
+ let(:user) { create(:user) }
+ let!(:project) { create(:project, :public, :repository, creator: user, namespace: user.namespace) }
subject { Class.new { include Gitlab::IssuableMetadata }.new }
@@ -10,6 +10,10 @@ describe Gitlab::IssuableMetadata do
expect(subject.issuable_meta_data(Issue.none, 'Issue')).to eq({})
end
+ it 'raises an error when given a collection with no limit' do
+ expect { subject.issuable_meta_data(Issue.all, 'Issue') }.to raise_error(/must have a limit/)
+ end
+
context 'issues' do
let!(:issue) { create(:issue, author: user, project: project) }
let!(:closed_issue) { create(:issue, state: :closed, author: user, project: project) }
@@ -19,7 +23,7 @@ describe Gitlab::IssuableMetadata do
let!(:closing_issues) { create(:merge_requests_closing_issues, issue: issue, merge_request: merge_request) }
it 'aggregates stats on issues' do
- data = subject.issuable_meta_data(Issue.all, 'Issue')
+ data = subject.issuable_meta_data(Issue.all.limit(10), 'Issue')
expect(data.count).to eq(2)
expect(data[issue.id].upvotes).to eq(1)
@@ -42,7 +46,7 @@ describe Gitlab::IssuableMetadata do
let!(:note) { create(:note_on_merge_request, author: user, project: project, noteable: merge_request, note: "a comment on a MR") }
it 'aggregates stats on merge requests' do
- data = subject.issuable_meta_data(MergeRequest.all, 'MergeRequest')
+ data = subject.issuable_meta_data(MergeRequest.all.limit(10), 'MergeRequest')
expect(data.count).to eq(2)
expect(data[merge_request.id].upvotes).to eq(1)
diff --git a/spec/lib/gitlab/github_import/branch_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/branch_formatter_spec.rb
index 426b43f8b51..48655851140 100644
--- a/spec/lib/gitlab/github_import/branch_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/branch_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::BranchFormatter do
+describe Gitlab::LegacyGithubImport::BranchFormatter do
let(:project) { create(:project, :repository) }
let(:commit) { create(:commit, project: project) }
let(:repo) { double }
diff --git a/spec/lib/gitlab/legacy_github_import/client_spec.rb b/spec/lib/gitlab/legacy_github_import/client_spec.rb
new file mode 100644
index 00000000000..80b767abce0
--- /dev/null
+++ b/spec/lib/gitlab/legacy_github_import/client_spec.rb
@@ -0,0 +1,97 @@
+require 'spec_helper'
+
+describe Gitlab::LegacyGithubImport::Client do
+ let(:token) { '123456' }
+ let(:github_provider) { Settingslogic.new('app_id' => 'asd123', 'app_secret' => 'asd123', 'name' => 'github', 'args' => { 'client_options' => {} }) }
+
+ subject(:client) { described_class.new(token) }
+
+ before do
+ allow(Gitlab.config.omniauth).to receive(:providers).and_return([github_provider])
+ end
+
+ it 'convert OAuth2 client options to symbols' do
+ client.client.options.keys.each do |key|
+ expect(key).to be_kind_of(Symbol)
+ end
+ end
+
+ it 'does not crash (e.g. Settingslogic::MissingSetting) when verify_ssl config is not present' do
+ expect { client.api }.not_to raise_error
+ end
+
+ context 'when config is missing' do
+ before do
+ allow(Gitlab.config.omniauth).to receive(:providers).and_return([])
+ end
+
+ it 'is still possible to get an Octokit client' do
+ expect { client.api }.not_to raise_error
+ end
+
+ it 'is not be possible to get an OAuth2 client' do
+ expect { client.client }.to raise_error(Projects::ImportService::Error)
+ end
+ end
+
+ context 'allow SSL verification to be configurable on API' do
+ before do
+ github_provider['verify_ssl'] = false
+ end
+
+ it 'uses supplied value' do
+ expect(client.client.options[:connection_opts][:ssl]).to eq({ verify: false })
+ expect(client.api.connection_options[:ssl]).to eq({ verify: false })
+ end
+ end
+
+ describe '#api_endpoint' do
+ context 'when provider does not specity an API endpoint' do
+ it 'uses GitHub root API endpoint' do
+ expect(client.api.api_endpoint).to eq 'https://api.github.com/'
+ end
+ end
+
+ context 'when provider specify a custom API endpoint' do
+ before do
+ github_provider['args']['client_options']['site'] = 'https://github.company.com/'
+ end
+
+ it 'uses the custom API endpoint' do
+ expect(OmniAuth::Strategies::GitHub).not_to receive(:default_options)
+ expect(client.api.api_endpoint).to eq 'https://github.company.com/'
+ end
+ end
+
+ context 'when given a host' do
+ subject(:client) { described_class.new(token, host: 'https://try.gitea.io/') }
+
+ it 'builds a endpoint with the given host and the default API version' do
+ expect(client.api.api_endpoint).to eq 'https://try.gitea.io/api/v3/'
+ end
+ end
+
+ context 'when given an API version' do
+ subject(:client) { described_class.new(token, api_version: 'v3') }
+
+ it 'does not use the API version without a host' do
+ expect(client.api.api_endpoint).to eq 'https://api.github.com/'
+ end
+ end
+
+ context 'when given a host and version' do
+ subject(:client) { described_class.new(token, host: 'https://try.gitea.io/', api_version: 'v3') }
+
+ it 'builds a endpoint with the given options' do
+ expect(client.api.api_endpoint).to eq 'https://try.gitea.io/api/v3/'
+ end
+ end
+ end
+
+ it 'does not raise error when rate limit is disabled' do
+ stub_request(:get, /api.github.com/)
+ allow(client.api).to receive(:rate_limit!).and_raise(Octokit::NotFound)
+
+ expect { client.issues {} }.not_to raise_error
+ end
+end
diff --git a/spec/lib/gitlab/github_import/comment_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb
index 035ac8c7c1f..413654e108c 100644
--- a/spec/lib/gitlab/github_import/comment_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::CommentFormatter do
+describe Gitlab::LegacyGithubImport::CommentFormatter do
let(:client) { double }
let(:project) { create(:project) }
let(:octocat) { double(id: 123456, login: 'octocat', email: 'octocat@example.com') }
diff --git a/spec/lib/gitlab/github_import/importer_spec.rb b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
index d570f34985b..20514486727 100644
--- a/spec/lib/gitlab/github_import/importer_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
-describe Gitlab::GithubImport::Importer do
- shared_examples 'Gitlab::GithubImport::Importer#execute' do
+describe Gitlab::LegacyGithubImport::Importer do
+ shared_examples 'Gitlab::LegacyGithubImport::Importer#execute' do
let(:expected_not_called) { [] }
before do
@@ -35,7 +35,7 @@ describe Gitlab::GithubImport::Importer do
end
end
- shared_examples 'Gitlab::GithubImport::Importer#execute an error occurs' do
+ shared_examples 'Gitlab::LegacyGithubImport::Importer#execute an error occurs' do
before do
allow(project).to receive(:import_data).and_return(double.as_null_object)
@@ -178,7 +178,7 @@ describe Gitlab::GithubImport::Importer do
end
end
- shared_examples 'Gitlab::GithubImport unit-testing' do
+ shared_examples 'Gitlab::LegacyGithubImport unit-testing' do
describe '#clean_up_restored_branches' do
subject { described_class.new(project) }
@@ -188,7 +188,7 @@ describe Gitlab::GithubImport::Importer do
end
context 'when pull request stills open' do
- let(:gh_pull_request) { Gitlab::GithubImport::PullRequestFormatter.new(project, pull_request) }
+ let(:gh_pull_request) { Gitlab::LegacyGithubImport::PullRequestFormatter.new(project, pull_request) }
it 'does not remove branches' do
expect(subject).not_to receive(:remove_branch)
@@ -197,7 +197,7 @@ describe Gitlab::GithubImport::Importer do
end
context 'when pull request is closed' do
- let(:gh_pull_request) { Gitlab::GithubImport::PullRequestFormatter.new(project, closed_pull_request) }
+ let(:gh_pull_request) { Gitlab::LegacyGithubImport::PullRequestFormatter.new(project, closed_pull_request) }
it 'does remove branches' do
expect(subject).to receive(:remove_branch).at_least(2).times
@@ -262,14 +262,14 @@ describe Gitlab::GithubImport::Importer do
let(:repo_root) { 'https://github.com' }
subject { described_class.new(project) }
- it_behaves_like 'Gitlab::GithubImport::Importer#execute'
- it_behaves_like 'Gitlab::GithubImport::Importer#execute an error occurs'
- it_behaves_like 'Gitlab::GithubImport unit-testing'
+ it_behaves_like 'Gitlab::LegacyGithubImport::Importer#execute'
+ it_behaves_like 'Gitlab::LegacyGithubImport::Importer#execute an error occurs'
+ it_behaves_like 'Gitlab::LegacyGithubImport unit-testing'
describe '#client' do
it 'instantiates a Client' do
allow(project).to receive(:import_data).and_return(double(credentials: credentials))
- expect(Gitlab::GithubImport::Client).to receive(:new).with(
+ expect(Gitlab::LegacyGithubImport::Client).to receive(:new).with(
credentials[:user],
{}
)
@@ -288,16 +288,16 @@ describe Gitlab::GithubImport::Importer do
project.update(import_type: 'gitea', import_url: "#{repo_root}/foo/group/project.git")
end
- it_behaves_like 'Gitlab::GithubImport::Importer#execute' do
+ it_behaves_like 'Gitlab::LegacyGithubImport::Importer#execute' do
let(:expected_not_called) { [:import_releases] }
end
- it_behaves_like 'Gitlab::GithubImport::Importer#execute an error occurs'
- it_behaves_like 'Gitlab::GithubImport unit-testing'
+ it_behaves_like 'Gitlab::LegacyGithubImport::Importer#execute an error occurs'
+ it_behaves_like 'Gitlab::LegacyGithubImport unit-testing'
describe '#client' do
it 'instantiates a Client' do
allow(project).to receive(:import_data).and_return(double(credentials: credentials))
- expect(Gitlab::GithubImport::Client).to receive(:new).with(
+ expect(Gitlab::LegacyGithubImport::Client).to receive(:new).with(
credentials[:user],
{ host: "#{repo_root}:443/foo", api_version: 'v1' }
)
diff --git a/spec/lib/gitlab/github_import/issuable_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/issuable_formatter_spec.rb
index 05294d227bd..3b5d8945344 100644
--- a/spec/lib/gitlab/github_import/issuable_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/issuable_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::IssuableFormatter do
+describe Gitlab::LegacyGithubImport::IssuableFormatter do
let(:raw_data) do
double(number: 42)
end
diff --git a/spec/lib/gitlab/github_import/issue_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb
index 0fc56d92aa6..1a4d5dbfb70 100644
--- a/spec/lib/gitlab/github_import/issue_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::IssueFormatter do
+describe Gitlab::LegacyGithubImport::IssueFormatter do
let(:client) { double }
let!(:project) { create(:project, namespace: create(:namespace, path: 'octocat')) }
let(:octocat) { double(id: 123456, login: 'octocat', email: 'octocat@example.com') }
@@ -30,7 +30,7 @@ describe Gitlab::GithubImport::IssueFormatter do
allow(client).to receive(:user).and_return(octocat)
end
- shared_examples 'Gitlab::GithubImport::IssueFormatter#attributes' do
+ shared_examples 'Gitlab::LegacyGithubImport::IssueFormatter#attributes' do
context 'when issue is open' do
let(:raw_data) { double(base_data.merge(state: 'open')) }
@@ -135,7 +135,7 @@ describe Gitlab::GithubImport::IssueFormatter do
end
end
- shared_examples 'Gitlab::GithubImport::IssueFormatter#number' do
+ shared_examples 'Gitlab::LegacyGithubImport::IssueFormatter#number' do
let(:raw_data) { double(base_data.merge(number: 1347)) }
it 'returns issue number' do
@@ -144,8 +144,8 @@ describe Gitlab::GithubImport::IssueFormatter do
end
context 'when importing a GitHub project' do
- it_behaves_like 'Gitlab::GithubImport::IssueFormatter#attributes'
- it_behaves_like 'Gitlab::GithubImport::IssueFormatter#number'
+ it_behaves_like 'Gitlab::LegacyGithubImport::IssueFormatter#attributes'
+ it_behaves_like 'Gitlab::LegacyGithubImport::IssueFormatter#number'
end
context 'when importing a Gitea project' do
@@ -153,8 +153,8 @@ describe Gitlab::GithubImport::IssueFormatter do
project.update(import_type: 'gitea')
end
- it_behaves_like 'Gitlab::GithubImport::IssueFormatter#attributes'
- it_behaves_like 'Gitlab::GithubImport::IssueFormatter#number'
+ it_behaves_like 'Gitlab::LegacyGithubImport::IssueFormatter#attributes'
+ it_behaves_like 'Gitlab::LegacyGithubImport::IssueFormatter#number'
end
describe '#has_comments?' do
diff --git a/spec/lib/gitlab/github_import/label_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/label_formatter_spec.rb
index 83fdd2cc415..0d1d04f1bf6 100644
--- a/spec/lib/gitlab/github_import/label_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/label_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::LabelFormatter do
+describe Gitlab::LegacyGithubImport::LabelFormatter do
let(:project) { create(:project) }
let(:raw) { double(name: 'improvements', color: 'e6e6e6') }
diff --git a/spec/lib/gitlab/github_import/milestone_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb
index 683fa51b78e..1db4bbb568c 100644
--- a/spec/lib/gitlab/github_import/milestone_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::MilestoneFormatter do
+describe Gitlab::LegacyGithubImport::MilestoneFormatter do
let(:project) { create(:project) }
let(:created_at) { DateTime.strptime('2011-01-26T19:01:12Z') }
let(:updated_at) { DateTime.strptime('2011-01-27T19:01:12Z') }
@@ -19,7 +19,7 @@ describe Gitlab::GithubImport::MilestoneFormatter do
subject(:formatter) { described_class.new(project, raw_data) }
- shared_examples 'Gitlab::GithubImport::MilestoneFormatter#attributes' do
+ shared_examples 'Gitlab::LegacyGithubImport::MilestoneFormatter#attributes' do
let(:data) { base_data.merge(iid_attr => 1347) }
context 'when milestone is open' do
@@ -82,7 +82,7 @@ describe Gitlab::GithubImport::MilestoneFormatter do
end
context 'when importing a GitHub project' do
- it_behaves_like 'Gitlab::GithubImport::MilestoneFormatter#attributes'
+ it_behaves_like 'Gitlab::LegacyGithubImport::MilestoneFormatter#attributes'
end
context 'when importing a Gitea project' do
@@ -91,6 +91,6 @@ describe Gitlab::GithubImport::MilestoneFormatter do
project.update(import_type: 'gitea')
end
- it_behaves_like 'Gitlab::GithubImport::MilestoneFormatter#attributes'
+ it_behaves_like 'Gitlab::LegacyGithubImport::MilestoneFormatter#attributes'
end
end
diff --git a/spec/lib/gitlab/github_import/project_creator_spec.rb b/spec/lib/gitlab/legacy_github_import/project_creator_spec.rb
index 948e7469a18..737c9a624e0 100644
--- a/spec/lib/gitlab/github_import/project_creator_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/project_creator_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::ProjectCreator do
+describe Gitlab::LegacyGithubImport::ProjectCreator do
let(:user) { create(:user) }
let(:namespace) { create(:group, owner: user) }
diff --git a/spec/lib/gitlab/github_import/pull_request_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb
index 2e42f6239b7..267a41e3f32 100644
--- a/spec/lib/gitlab/github_import/pull_request_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::PullRequestFormatter do
+describe Gitlab::LegacyGithubImport::PullRequestFormatter do
let(:client) { double }
let(:project) { create(:project, :repository) }
let(:source_sha) { create(:commit, project: project).id }
@@ -44,7 +44,7 @@ describe Gitlab::GithubImport::PullRequestFormatter do
allow(client).to receive(:user).and_return(octocat)
end
- shared_examples 'Gitlab::GithubImport::PullRequestFormatter#attributes' do
+ shared_examples 'Gitlab::LegacyGithubImport::PullRequestFormatter#attributes' do
context 'when pull request is open' do
let(:raw_data) { double(base_data.merge(state: 'open')) }
@@ -189,7 +189,7 @@ describe Gitlab::GithubImport::PullRequestFormatter do
end
end
- shared_examples 'Gitlab::GithubImport::PullRequestFormatter#number' do
+ shared_examples 'Gitlab::LegacyGithubImport::PullRequestFormatter#number' do
let(:raw_data) { double(base_data) }
it 'returns pull request number' do
@@ -197,7 +197,7 @@ describe Gitlab::GithubImport::PullRequestFormatter do
end
end
- shared_examples 'Gitlab::GithubImport::PullRequestFormatter#source_branch_name' do
+ shared_examples 'Gitlab::LegacyGithubImport::PullRequestFormatter#source_branch_name' do
context 'when source branch exists' do
let(:raw_data) { double(base_data) }
@@ -231,7 +231,7 @@ describe Gitlab::GithubImport::PullRequestFormatter do
end
end
- shared_examples 'Gitlab::GithubImport::PullRequestFormatter#target_branch_name' do
+ shared_examples 'Gitlab::LegacyGithubImport::PullRequestFormatter#target_branch_name' do
context 'when target branch exists' do
let(:raw_data) { double(base_data) }
@@ -250,10 +250,10 @@ describe Gitlab::GithubImport::PullRequestFormatter do
end
context 'when importing a GitHub project' do
- it_behaves_like 'Gitlab::GithubImport::PullRequestFormatter#attributes'
- it_behaves_like 'Gitlab::GithubImport::PullRequestFormatter#number'
- it_behaves_like 'Gitlab::GithubImport::PullRequestFormatter#source_branch_name'
- it_behaves_like 'Gitlab::GithubImport::PullRequestFormatter#target_branch_name'
+ it_behaves_like 'Gitlab::LegacyGithubImport::PullRequestFormatter#attributes'
+ it_behaves_like 'Gitlab::LegacyGithubImport::PullRequestFormatter#number'
+ it_behaves_like 'Gitlab::LegacyGithubImport::PullRequestFormatter#source_branch_name'
+ it_behaves_like 'Gitlab::LegacyGithubImport::PullRequestFormatter#target_branch_name'
end
context 'when importing a Gitea project' do
@@ -261,10 +261,10 @@ describe Gitlab::GithubImport::PullRequestFormatter do
project.update(import_type: 'gitea')
end
- it_behaves_like 'Gitlab::GithubImport::PullRequestFormatter#attributes'
- it_behaves_like 'Gitlab::GithubImport::PullRequestFormatter#number'
- it_behaves_like 'Gitlab::GithubImport::PullRequestFormatter#source_branch_name'
- it_behaves_like 'Gitlab::GithubImport::PullRequestFormatter#target_branch_name'
+ it_behaves_like 'Gitlab::LegacyGithubImport::PullRequestFormatter#attributes'
+ it_behaves_like 'Gitlab::LegacyGithubImport::PullRequestFormatter#number'
+ it_behaves_like 'Gitlab::LegacyGithubImport::PullRequestFormatter#source_branch_name'
+ it_behaves_like 'Gitlab::LegacyGithubImport::PullRequestFormatter#target_branch_name'
end
describe '#valid?' do
diff --git a/spec/lib/gitlab/github_import/release_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb
index 926bf725d6a..082e3b36dd0 100644
--- a/spec/lib/gitlab/github_import/release_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::ReleaseFormatter do
+describe Gitlab::LegacyGithubImport::ReleaseFormatter do
let!(:project) { create(:project, namespace: create(:namespace, path: 'octocat')) }
let(:octocat) { double(id: 123456, login: 'octocat') }
let(:created_at) { DateTime.strptime('2011-01-26T19:01:12Z') }
diff --git a/spec/lib/gitlab/github_import/user_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb
index 98e3a7c28b9..3cd096eb0ad 100644
--- a/spec/lib/gitlab/github_import/user_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::UserFormatter do
+describe Gitlab::LegacyGithubImport::UserFormatter do
let(:client) { double }
let(:octocat) { double(id: 123456, login: 'octocat', email: 'octocat@example.com') }
diff --git a/spec/lib/gitlab/github_import/wiki_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/wiki_formatter_spec.rb
index 2662cc20b32..7723533aee2 100644
--- a/spec/lib/gitlab/github_import/wiki_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/wiki_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::WikiFormatter do
+describe Gitlab::LegacyGithubImport::WikiFormatter do
let(:project) do
create(:project,
namespace: create(:namespace, path: 'gitlabhq'),
diff --git a/spec/lib/gitlab/metrics/method_call_spec.rb b/spec/lib/gitlab/metrics/method_call_spec.rb
index f1e9e414e0d..5341addf911 100644
--- a/spec/lib/gitlab/metrics/method_call_spec.rb
+++ b/spec/lib/gitlab/metrics/method_call_spec.rb
@@ -13,16 +13,52 @@ describe Gitlab::Metrics::MethodCall do
expect(method_call.call_count).to eq(1)
end
- it 'observes the performance of the supplied block' do
- expect(described_class.call_real_duration_histogram)
- .to receive(:observe)
- .with({ module: :Foo, method: '#bar' }, be_a_kind_of(Numeric))
+ context 'when measurement is above threshold' do
+ before do
+ allow(method_call).to receive(:above_threshold?).and_return(true)
+ end
- expect(described_class.call_cpu_duration_histogram)
- .to receive(:observe)
- .with({ module: :Foo, method: '#bar' }, be_a_kind_of(Numeric))
+ context 'prometheus instrumentation is enabled' do
+ before do
+ Feature.get(:prometheus_metrics_method_instrumentation).enable
+ end
- method_call.measure { 'foo' }
+ it 'observes the performance of the supplied block' do
+ expect(described_class.call_duration_histogram)
+ .to receive(:observe)
+ .with({ module: :Foo, method: '#bar' }, be_a_kind_of(Numeric))
+
+ method_call.measure { 'foo' }
+ end
+ end
+
+ context 'prometheus instrumentation is disabled' do
+ before do
+ Feature.get(:prometheus_metrics_method_instrumentation).disable
+ end
+
+ it 'does not observe the performance' do
+ expect(described_class.call_duration_histogram)
+ .not_to receive(:observe)
+
+ method_call.measure { 'foo' }
+ end
+ end
+ end
+
+ context 'when measurement is below threshold' do
+ before do
+ allow(method_call).to receive(:above_threshold?).and_return(false)
+
+ Feature.get(:prometheus_metrics_method_instrumentation).enable
+ end
+
+ it 'does not observe the performance' do
+ expect(described_class.call_duration_histogram)
+ .not_to receive(:observe)
+
+ method_call.measure { 'foo' }
+ end
end
end
@@ -43,7 +79,13 @@ describe Gitlab::Metrics::MethodCall do
end
describe '#above_threshold?' do
+ before do
+ allow(Gitlab::Metrics).to receive(:method_call_threshold).and_return(100)
+ end
+
it 'returns false when the total call time is not above the threshold' do
+ expect(method_call).to receive(:real_time).and_return(9)
+
expect(method_call.above_threshold?).to eq(false)
end
diff --git a/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb b/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb
index 6d69b5305d2..ae1d8b47fe9 100644
--- a/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb
@@ -4,32 +4,40 @@ describe Gitlab::Metrics::SidekiqMiddleware do
let(:middleware) { described_class.new }
let(:message) { { 'args' => ['test'], 'enqueued_at' => Time.new(2016, 6, 23, 6, 59).to_f } }
- def run(worker, message)
- expect(Gitlab::Metrics::BackgroundTransaction).to receive(:new)
- .with(worker.class)
- .and_call_original
-
- expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:set)
- .with(:sidekiq_queue_duration, instance_of(Float))
+ describe '#call' do
+ it 'tracks the transaction' do
+ worker = double(:worker, class: double(:class, name: 'TestWorker'))
- expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:finish)
+ expect(Gitlab::Metrics::BackgroundTransaction).to receive(:new)
+ .with(worker.class)
+ .and_call_original
- middleware.call(worker, message, :test) { nil }
- end
+ expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:set)
+ .with(:sidekiq_queue_duration, instance_of(Float))
- describe '#call' do
- let(:test_worker_class) { double(:class, name: 'TestWorker') }
- let(:worker) { double(:worker, class: test_worker_class) }
+ expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:finish)
- it 'tracks the transaction' do
- run(worker, message)
+ middleware.call(worker, message, :test) { nil }
end
it 'tracks the transaction (for messages without `enqueued_at`)' do
- run(worker, {})
+ worker = double(:worker, class: double(:class, name: 'TestWorker'))
+
+ expect(Gitlab::Metrics::BackgroundTransaction).to receive(:new)
+ .with(worker.class)
+ .and_call_original
+
+ expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:set)
+ .with(:sidekiq_queue_duration, instance_of(Float))
+
+ expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:finish)
+
+ middleware.call(worker, {}, :test) { nil }
end
it 'tracks any raised exceptions' do
+ worker = double(:worker, class: double(:class, name: 'TestWorker'))
+
expect_any_instance_of(Gitlab::Metrics::Transaction)
.to receive(:run).and_raise(RuntimeError)
diff --git a/spec/lib/gitlab/middleware/go_spec.rb b/spec/lib/gitlab/middleware/go_spec.rb
index 67121937398..60a134be939 100644
--- a/spec/lib/gitlab/middleware/go_spec.rb
+++ b/spec/lib/gitlab/middleware/go_spec.rb
@@ -127,6 +127,14 @@ describe Gitlab::Middleware::Go do
include_examples 'go-get=1', enabled_protocol: nil
end
+
+ context 'with nothing disabled (blank string)' do
+ before do
+ stub_application_setting(enabled_git_access_protocol: '')
+ end
+
+ include_examples 'go-get=1', enabled_protocol: nil
+ end
end
def go
diff --git a/spec/lib/gitlab/middleware/read_only_spec.rb b/spec/lib/gitlab/middleware/read_only_spec.rb
index 86be06ff595..07ba11b93a3 100644
--- a/spec/lib/gitlab/middleware/read_only_spec.rb
+++ b/spec/lib/gitlab/middleware/read_only_spec.rb
@@ -84,21 +84,23 @@ describe Gitlab::Middleware::ReadOnly do
end
it 'expects POST of new file that looks like an LFS batch url to be disallowed' do
+ expect(Rails.application.routes).to receive(:recognize_path).and_call_original
response = request.post('/root/gitlab-ce/new/master/app/info/lfs/objects/batch')
expect(response).to be_a_redirect
expect(subject).to disallow_request
end
- context 'whitelisted requests' do
- it 'expects DELETE request to logout to be allowed' do
- response = request.delete('/users/sign_out')
+ it 'returns last_vistited_url for disallowed request' do
+ response = request.post('/test_request')
- expect(response).not_to be_a_redirect
- expect(subject).not_to disallow_request
- end
+ expect(response.location).to eq 'http://localhost/'
+ end
+ context 'whitelisted requests' do
it 'expects a POST internal request to be allowed' do
+ expect(Rails.application.routes).not_to receive(:recognize_path)
+
response = request.post("/api/#{API::API.version}/internal")
expect(response).not_to be_a_redirect
@@ -106,6 +108,7 @@ describe Gitlab::Middleware::ReadOnly do
end
it 'expects a POST LFS request to batch URL to be allowed' do
+ expect(Rails.application.routes).to receive(:recognize_path).and_call_original
response = request.post('/root/rouge.git/info/lfs/objects/batch')
expect(response).not_to be_a_redirect
@@ -113,6 +116,7 @@ describe Gitlab::Middleware::ReadOnly do
end
it 'expects a POST request to git-upload-pack URL to be allowed' do
+ expect(Rails.application.routes).to receive(:recognize_path).and_call_original
response = request.post('/root/rouge.git/git-upload-pack')
expect(response).not_to be_a_redirect
diff --git a/spec/lib/gitlab/o_auth/user_spec.rb b/spec/lib/gitlab/o_auth/user_spec.rb
index c7471a21fda..2f19fb7312d 100644
--- a/spec/lib/gitlab/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/o_auth/user_spec.rb
@@ -662,4 +662,13 @@ describe Gitlab::OAuth::User do
end
end
end
+
+ describe '.find_by_uid_and_provider' do
+ let!(:existing_user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'my-provider') }
+
+ it 'normalizes extern_uid' do
+ allow(oauth_user.auth_hash).to receive(:uid).and_return('MY-UID')
+ expect(oauth_user.find_user).to eql gl_user
+ end
+ end
end
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index 9c3e7d7e9ba..a424f0f5cfe 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -70,6 +70,15 @@ describe Gitlab::ProjectSearchResults do
subject { described_class.parse_search_result(search_result) }
+ it 'can correctly parse filenames including ":"' do
+ special_char_result = "\nmaster:testdata/project::function1.yaml-1----\nmaster:testdata/project::function1.yaml:2:test: data1\n"
+
+ blob = described_class.parse_search_result(special_char_result)
+
+ expect(blob.ref).to eq('master')
+ expect(blob.filename).to eq('testdata/project::function1.yaml')
+ end
+
it "returns a valid FoundBlob" do
is_expected.to be_an Gitlab::SearchResults::FoundBlob
expect(subject.id).to be_nil
diff --git a/spec/lib/gitlab/reference_extractor_spec.rb b/spec/lib/gitlab/reference_extractor_spec.rb
index 476a3f1998d..ef874368077 100644
--- a/spec/lib/gitlab/reference_extractor_spec.rb
+++ b/spec/lib/gitlab/reference_extractor_spec.rb
@@ -250,4 +250,34 @@ describe Gitlab::ReferenceExtractor do
subject { described_class.references_pattern }
it { is_expected.to be_kind_of Regexp }
end
+
+ describe 'referables prefixes' do
+ def prefixes
+ described_class::REFERABLES.each_with_object({}) do |referable, result|
+ klass = referable.to_s.camelize.constantize
+
+ next unless klass.respond_to?(:reference_prefix)
+
+ prefix = klass.reference_prefix
+ result[prefix] ||= []
+ result[prefix] << referable
+ end
+ end
+
+ it 'returns all supported prefixes' do
+ expect(prefixes.keys.uniq).to match_array(%w(@ # ~ % ! $ &))
+ end
+
+ it 'does not allow one prefix for multiple referables if not allowed specificly' do
+ # make sure you are not overriding existing prefix before changing this hash
+ multiple_allowed = {
+ '@' => 3
+ }
+
+ prefixes.each do |prefix, referables|
+ expected_count = multiple_allowed[prefix] || 1
+ expect(referables.count).to eq(expected_count)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/shell_spec.rb b/spec/lib/gitlab/shell_spec.rb
index 2158b2837e2..eec6858a5de 100644
--- a/spec/lib/gitlab/shell_spec.rb
+++ b/spec/lib/gitlab/shell_spec.rb
@@ -200,18 +200,18 @@ describe Gitlab::Shell do
describe '#fork_repository' do
it 'returns true when the command succeeds' do
expect(Gitlab::Popen).to receive(:popen)
- .with([projects_path, 'fork-project', 'current/storage', 'project/path.git', 'new/storage', 'new-namespace'],
+ .with([projects_path, 'fork-repository', 'current/storage', 'project/path.git', 'new/storage', 'fork/path.git'],
nil, popen_vars).and_return([nil, 0])
- expect(gitlab_shell.fork_repository('current/storage', 'project/path', 'new/storage', 'new-namespace')).to be true
+ expect(gitlab_shell.fork_repository('current/storage', 'project/path', 'new/storage', 'fork/path')).to be true
end
it 'return false when the command fails' do
expect(Gitlab::Popen).to receive(:popen)
- .with([projects_path, 'fork-project', 'current/storage', 'project/path.git', 'new/storage', 'new-namespace'],
+ .with([projects_path, 'fork-repository', 'current/storage', 'project/path.git', 'new/storage', 'fork/path.git'],
nil, popen_vars).and_return(["error", 1])
- expect(gitlab_shell.fork_repository('current/storage', 'project/path', 'new/storage', 'new-namespace')).to be false
+ expect(gitlab_shell.fork_repository('current/storage', 'project/path', 'new/storage', 'fork/path')).to be false
end
end
diff --git a/spec/lib/gitlab/sidekiq_config_spec.rb b/spec/lib/gitlab/sidekiq_config_spec.rb
new file mode 100644
index 00000000000..09f95be2213
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_config_spec.rb
@@ -0,0 +1,24 @@
+require 'rails_helper'
+
+describe Gitlab::SidekiqConfig do
+ describe '.workers' do
+ it 'includes all workers' do
+ workers = described_class.workers
+
+ expect(workers).to include(PostReceive)
+ expect(workers).to include(MergeWorker)
+ end
+ end
+
+ describe '.worker_queues' do
+ it 'includes all queues' do
+ queues = described_class.worker_queues
+
+ expect(queues).to include('post_receive')
+ expect(queues).to include('merge')
+ expect(queues).to include('cronjob')
+ expect(queues).to include('mailers')
+ expect(queues).to include('default')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sql/pattern_spec.rb b/spec/lib/gitlab/sql/pattern_spec.rb
index 48d56628ed5..ef51e3cc8df 100644
--- a/spec/lib/gitlab/sql/pattern_spec.rb
+++ b/spec/lib/gitlab/sql/pattern_spec.rb
@@ -137,22 +137,22 @@ describe Gitlab::SQL::Pattern do
end
end
- describe '.to_fuzzy_arel' do
- subject(:to_fuzzy_arel) { Issue.to_fuzzy_arel(:title, query) }
+ describe '.fuzzy_arel_match' do
+ subject(:fuzzy_arel_match) { Issue.fuzzy_arel_match(:title, query) }
context 'with a word equal to 3 chars' do
let(:query) { 'foo' }
it 'returns a single ILIKE condition' do
- expect(to_fuzzy_arel.to_sql).to match(/title.*I?LIKE '\%foo\%'/)
+ expect(fuzzy_arel_match.to_sql).to match(/title.*I?LIKE '\%foo\%'/)
end
end
context 'with a word shorter than 3 chars' do
let(:query) { 'fo' }
- it 'returns nil' do
- expect(to_fuzzy_arel).to be_nil
+ it 'returns a single equality condition' do
+ expect(fuzzy_arel_match.to_sql).to match(/title.*I?LIKE 'fo'/)
end
end
@@ -160,7 +160,23 @@ describe Gitlab::SQL::Pattern do
let(:query) { 'foo baz' }
it 'returns a joining LIKE condition using a AND' do
- expect(to_fuzzy_arel.to_sql).to match(/title.+I?LIKE '\%foo\%' AND .*title.*I?LIKE '\%baz\%'/)
+ expect(fuzzy_arel_match.to_sql).to match(/title.+I?LIKE '\%foo\%' AND .*title.*I?LIKE '\%baz\%'/)
+ end
+ end
+
+ context 'with two words both shorter than 3 chars' do
+ let(:query) { 'fo ba' }
+
+ it 'returns a single ILIKE condition' do
+ expect(fuzzy_arel_match.to_sql).to match(/title.*I?LIKE 'fo ba'/)
+ end
+ end
+
+ context 'with two words, one shorter 3 chars' do
+ let(:query) { 'foo ba' }
+
+ it 'returns a single ILIKE condition using the longer word' do
+ expect(fuzzy_arel_match.to_sql).to match(/title.+I?LIKE '\%foo\%'/)
end
end
@@ -168,7 +184,7 @@ describe Gitlab::SQL::Pattern do
let(:query) { 'foo "really bar" baz' }
it 'returns a joining LIKE condition using a AND' do
- expect(to_fuzzy_arel.to_sql).to match(/title.+I?LIKE '\%foo\%' AND .*title.*I?LIKE '\%baz\%' AND .*title.*I?LIKE '\%really bar\%'/)
+ expect(fuzzy_arel_match.to_sql).to match(/title.+I?LIKE '\%foo\%' AND .*title.*I?LIKE '\%baz\%' AND .*title.*I?LIKE '\%really bar\%'/)
end
end
end
diff --git a/spec/lib/gitlab/storage_check/cli_spec.rb b/spec/lib/gitlab/storage_check/cli_spec.rb
new file mode 100644
index 00000000000..6db0925899c
--- /dev/null
+++ b/spec/lib/gitlab/storage_check/cli_spec.rb
@@ -0,0 +1,19 @@
+require 'spec_helper'
+
+describe Gitlab::StorageCheck::CLI do
+ let(:options) { Gitlab::StorageCheck::Options.new('unix://tmp/socket.sock', nil, 1, false) }
+ subject(:runner) { described_class.new(options) }
+
+ describe '#update_settings' do
+ it 'updates the interval when changed in a valid response and logs the change' do
+ fake_response = double
+ expect(fake_response).to receive(:valid?).and_return(true)
+ expect(fake_response).to receive(:check_interval).and_return(42)
+ expect(runner.logger).to receive(:info)
+
+ runner.update_settings(fake_response)
+
+ expect(options.interval).to eq(42)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/storage_check/gitlab_caller_spec.rb b/spec/lib/gitlab/storage_check/gitlab_caller_spec.rb
new file mode 100644
index 00000000000..d869022fd31
--- /dev/null
+++ b/spec/lib/gitlab/storage_check/gitlab_caller_spec.rb
@@ -0,0 +1,46 @@
+require 'spec_helper'
+
+describe Gitlab::StorageCheck::GitlabCaller do
+ let(:options) { Gitlab::StorageCheck::Options.new('unix://tmp/socket.sock', nil, nil, false) }
+ subject(:gitlab_caller) { described_class.new(options) }
+
+ describe '#call!' do
+ context 'when a socket is given' do
+ it 'calls a socket' do
+ fake_connection = double
+ expect(fake_connection).to receive(:post)
+ expect(Excon).to receive(:new).with('unix://tmp/socket.sock', socket: "tmp/socket.sock") { fake_connection }
+
+ gitlab_caller.call!
+ end
+ end
+
+ context 'when a host is given' do
+ let(:options) { Gitlab::StorageCheck::Options.new('http://localhost:8080', nil, nil, false) }
+
+ it 'it calls a http response' do
+ fake_connection = double
+ expect(Excon).to receive(:new).with('http://localhost:8080', socket: nil) { fake_connection }
+ expect(fake_connection).to receive(:post)
+
+ gitlab_caller.call!
+ end
+ end
+ end
+
+ describe '#headers' do
+ it 'Adds the JSON header' do
+ headers = gitlab_caller.headers
+
+ expect(headers['Content-Type']).to eq('application/json')
+ end
+
+ context 'when a token was provided' do
+ let(:options) { Gitlab::StorageCheck::Options.new('unix://tmp/socket.sock', 'atoken', nil, false) }
+
+ it 'adds it to the headers' do
+ expect(gitlab_caller.headers['TOKEN']).to eq('atoken')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/storage_check/option_parser_spec.rb b/spec/lib/gitlab/storage_check/option_parser_spec.rb
new file mode 100644
index 00000000000..cad4dfbefcf
--- /dev/null
+++ b/spec/lib/gitlab/storage_check/option_parser_spec.rb
@@ -0,0 +1,31 @@
+require 'spec_helper'
+
+describe Gitlab::StorageCheck::OptionParser do
+ describe '.parse!' do
+ it 'assigns all options' do
+ args = %w(--target unix://tmp/hello/world.sock --token thetoken --interval 42)
+
+ options = described_class.parse!(args)
+
+ expect(options.token).to eq('thetoken')
+ expect(options.interval).to eq(42)
+ expect(options.target).to eq('unix://tmp/hello/world.sock')
+ end
+
+ it 'requires the interval to be a number' do
+ args = %w(--target unix://tmp/hello/world.sock --interval fortytwo)
+
+ expect { described_class.parse!(args) }.to raise_error(OptionParser::InvalidArgument)
+ end
+
+ it 'raises an error if the scheme is not included' do
+ args = %w(--target tmp/hello/world.sock)
+
+ expect { described_class.parse!(args) }.to raise_error(OptionParser::InvalidArgument)
+ end
+
+ it 'raises an error if both socket and host are missing' do
+ expect { described_class.parse!([]) }.to raise_error(OptionParser::InvalidArgument)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/storage_check/response_spec.rb b/spec/lib/gitlab/storage_check/response_spec.rb
new file mode 100644
index 00000000000..0ff2963e443
--- /dev/null
+++ b/spec/lib/gitlab/storage_check/response_spec.rb
@@ -0,0 +1,54 @@
+require 'spec_helper'
+
+describe Gitlab::StorageCheck::Response do
+ let(:fake_json) do
+ {
+ check_interval: 42,
+ results: [
+ { storage: 'working', success: true },
+ { storage: 'skipped', success: nil },
+ { storage: 'failing', success: false }
+ ]
+ }.to_json
+ end
+
+ let(:fake_http_response) do
+ fake_response = instance_double("Excon::Response - Status check")
+ allow(fake_response).to receive(:status).and_return(200)
+ allow(fake_response).to receive(:body).and_return(fake_json)
+ allow(fake_response).to receive(:headers).and_return('Content-Type' => 'application/json')
+
+ fake_response
+ end
+ let(:response) { described_class.new(fake_http_response) }
+
+ describe '#valid?' do
+ it 'is valid for a success response with parseable JSON' do
+ expect(response).to be_valid
+ end
+ end
+
+ describe '#check_interval' do
+ it 'returns the result from the JSON' do
+ expect(response.check_interval).to eq(42)
+ end
+ end
+
+ describe '#responsive_shards' do
+ it 'contains the names of working shards' do
+ expect(response.responsive_shards).to contain_exactly('working')
+ end
+ end
+
+ describe '#skipped_shards' do
+ it 'contains the names of skipped shards' do
+ expect(response.skipped_shards).to contain_exactly('skipped')
+ end
+ end
+
+ describe '#failing_shards' do
+ it 'contains the name of failing shards' do
+ expect(response.failing_shards).to contain_exactly('failing')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb
index f18823b61ef..d9b3c2350b1 100644
--- a/spec/lib/gitlab/url_blocker_spec.rb
+++ b/spec/lib/gitlab/url_blocker_spec.rb
@@ -20,6 +20,22 @@ describe Gitlab::UrlBlocker do
expect(described_class.blocked_url?('https://gitlab.com:25/foo/foo.git')).to be true
end
+ it 'returns true for alternative version of 127.0.0.1 (0177.1)' do
+ expect(described_class.blocked_url?('https://0177.1:65535/foo/foo.git')).to be true
+ end
+
+ it 'returns true for alternative version of 127.0.0.1 (0x7f.1)' do
+ expect(described_class.blocked_url?('https://0x7f.1:65535/foo/foo.git')).to be true
+ end
+
+ it 'returns true for alternative version of 127.0.0.1 (2130706433)' do
+ expect(described_class.blocked_url?('https://2130706433:65535/foo/foo.git')).to be true
+ end
+
+ it 'returns true for alternative version of 127.0.0.1 (127.000.000.001)' do
+ expect(described_class.blocked_url?('https://127.000.000.001:65535/foo/foo.git')).to be true
+ end
+
it 'returns true for a non-alphanumeric hostname' do
stub_resolv
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index a4c1113ae37..b5f2a15ada3 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -103,7 +103,7 @@ describe Gitlab::UsageData do
subject { described_class.features_usage_data_ce }
it 'gathers feature usage data' do
- expect(subject[:signup]).to eq(current_application_settings.signup_enabled?)
+ expect(subject[:signup]).to eq(current_application_settings.allow_signup?)
expect(subject[:ldap]).to eq(Gitlab.config.ldap.enabled)
expect(subject[:gravatar]).to eq(current_application_settings.gravatar_enabled?)
expect(subject[:omniauth]).to eq(Gitlab.config.omniauth.enabled)
diff --git a/spec/lib/gitlab/utils/strong_memoize_spec.rb b/spec/lib/gitlab/utils/strong_memoize_spec.rb
new file mode 100644
index 00000000000..4a104ab6d97
--- /dev/null
+++ b/spec/lib/gitlab/utils/strong_memoize_spec.rb
@@ -0,0 +1,52 @@
+require 'spec_helper'
+
+describe Gitlab::Utils::StrongMemoize do
+ let(:klass) do
+ struct = Struct.new(:value) do
+ def method_name
+ strong_memoize(:method_name) do
+ trace << value
+ value
+ end
+ end
+
+ def trace
+ @trace ||= []
+ end
+ end
+
+ struct.include(described_class)
+ struct
+ end
+
+ subject(:object) { klass.new(value) }
+
+ shared_examples 'caching the value' do
+ it 'only calls the block once' do
+ value0 = object.method_name
+ value1 = object.method_name
+
+ expect(value0).to eq(value)
+ expect(value1).to eq(value)
+ expect(object.trace).to contain_exactly(value)
+ end
+
+ it 'returns and defines the instance variable for the exact value' do
+ returned_value = object.method_name
+ memoized_value = object.instance_variable_get(:@method_name)
+
+ expect(returned_value).to eql(value)
+ expect(memoized_value).to eql(value)
+ end
+ end
+
+ describe '#strong_memoize' do
+ [nil, false, true, 'value', 0, [0]].each do |value|
+ context "with value #{value}" do
+ let(:value) { value }
+
+ it_behaves_like 'caching the value'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index 3137a72fdc4..e872a5290c5 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe Gitlab::Utils do
- delegate :to_boolean, :boolean_to_yes_no, :slugify, :random_string, to: :described_class
+ delegate :to_boolean, :boolean_to_yes_no, :slugify, :random_string, :which, to: :described_class
describe '.slugify' do
{
@@ -59,4 +59,12 @@ describe Gitlab::Utils do
expect(random_string).to be_kind_of(String)
end
end
+
+ describe '.which' do
+ it 'finds the full path to an executable binary' do
+ expect(File).to receive(:executable?).with('/bin/sh').and_return(true)
+
+ expect(which('sh', 'PATH' => '/bin')).to eq('/bin/sh')
+ end
+ end
end
diff --git a/spec/lib/google_api/cloud_platform/client_spec.rb b/spec/lib/google_api/cloud_platform/client_spec.rb
index fac23dce44d..ecb4034ec8b 100644
--- a/spec/lib/google_api/cloud_platform/client_spec.rb
+++ b/spec/lib/google_api/cloud_platform/client_spec.rb
@@ -3,6 +3,7 @@ require 'spec_helper'
describe GoogleApi::CloudPlatform::Client do
let(:token) { 'token' }
let(:client) { described_class.new(token, nil) }
+ let(:user_agent_options) { client.instance_eval { user_agent_header } }
describe '.session_key_for_redirect_uri' do
let(:state) { 'random_string' }
@@ -55,7 +56,8 @@ describe GoogleApi::CloudPlatform::Client do
before do
allow_any_instance_of(Google::Apis::ContainerV1::ContainerService)
- .to receive(:get_zone_cluster).and_return(gke_cluster)
+ .to receive(:get_zone_cluster).with(any_args, options: user_agent_options)
+ .and_return(gke_cluster)
end
it { is_expected.to eq(gke_cluster) }
@@ -74,7 +76,8 @@ describe GoogleApi::CloudPlatform::Client do
before do
allow_any_instance_of(Google::Apis::ContainerV1::ContainerService)
- .to receive(:create_cluster).and_return(operation)
+ .to receive(:create_cluster).with(any_args, options: user_agent_options)
+ .and_return(operation)
end
it { is_expected.to eq(operation) }
@@ -102,7 +105,8 @@ describe GoogleApi::CloudPlatform::Client do
before do
allow_any_instance_of(Google::Apis::ContainerV1::ContainerService)
- .to receive(:get_zone_operation).and_return(operation)
+ .to receive(:get_zone_operation).with(any_args, options: user_agent_options)
+ .and_return(operation)
end
it { is_expected.to eq(operation) }
@@ -125,4 +129,18 @@ describe GoogleApi::CloudPlatform::Client do
it { is_expected.to be_nil }
end
end
+
+ describe '#user_agent_header' do
+ subject { client.instance_eval { user_agent_header } }
+
+ it 'returns a RequestOptions object' do
+ expect(subject).to be_instance_of(Google::Apis::RequestOptions)
+ end
+
+ it 'has the correct GitLab version in User-Agent header' do
+ stub_const('Gitlab::VERSION', '10.3.0-pre')
+
+ expect(subject.header).to eq({ 'User-Agent': 'GitLab/10.3 (GPN:GitLab;)' })
+ end
+ end
end
diff --git a/spec/lib/milestone_array_spec.rb b/spec/lib/milestone_array_spec.rb
new file mode 100644
index 00000000000..df91677b925
--- /dev/null
+++ b/spec/lib/milestone_array_spec.rb
@@ -0,0 +1,34 @@
+require 'spec_helper'
+
+describe MilestoneArray do
+ let(:object1) { instance_double("BirdMilestone", due_date: Time.now, start_date: Time.now - 15.days, title: 'v2.0') }
+ let(:object2) { instance_double("CatMilestone", due_date: Time.now - 1.day, start_date: nil, title: 'v1.0') }
+ let(:object3) { instance_double("DogMilestone", due_date: nil, start_date: Time.now - 30.days, title: 'v3.0') }
+ let(:array) { [object1, object3, object2] }
+
+ describe '#sort' do
+ it 'reorders array with due date in ascending order with nulls last' do
+ expect(described_class.sort(array, 'due_date_asc')).to eq([object2, object1, object3])
+ end
+
+ it 'reorders array with due date in desc order with nulls last' do
+ expect(described_class.sort(array, 'due_date_desc')).to eq([object1, object2, object3])
+ end
+
+ it 'reorders array with start date in ascending order with nulls last' do
+ expect(described_class.sort(array, 'start_date_asc')).to eq([object3, object1, object2])
+ end
+
+ it 'reorders array with start date in descending order with nulls last' do
+ expect(described_class.sort(array, 'start_date_desc')).to eq([object1, object3, object2])
+ end
+
+ it 'reorders array with title in ascending order' do
+ expect(described_class.sort(array, 'name_asc')).to eq([object2, object1, object3])
+ end
+
+ it 'reorders array with title in descending order' do
+ expect(described_class.sort(array, 'name_desc')).to eq([object3, object1, object2])
+ end
+ end
+end
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index c832cee965b..e1d71a9573b 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -602,7 +602,7 @@ describe Notify do
it 'has the correct subject and body' do
aggregate_failures do
- is_expected.to have_subject("Re: #{project.name} | #{commit.title.strip} (#{commit.short_id})")
+ is_expected.to have_subject("Re: #{project.name} | #{commit.title} (#{commit.short_id})")
is_expected.to have_body_text(commit.short_id)
end
end
@@ -712,7 +712,7 @@ describe Notify do
it_behaves_like 'a user cannot unsubscribe through footer link'
it 'has the correct subject' do
- is_expected.to have_subject "Re: #{project.name} | #{commit.title.strip} (#{commit.short_id})"
+ is_expected.to have_subject "Re: #{project.name} | #{commit.title} (#{commit.short_id})"
end
it 'contains a link to the commit' do
@@ -783,7 +783,25 @@ describe Notify do
shared_examples 'an email for a note on a diff discussion' do |model|
let(:note) { create(model, author: note_author) }
- it "includes diffs with character-level highlighting" do
+ context 'when note is on image' do
+ before do
+ allow_any_instance_of(DiffDiscussion).to receive(:on_image?).and_return(true)
+ end
+
+ it 'does not include diffs with character-level highlighting' do
+ is_expected.not_to have_body_text '<span class="p">}</span></span>'
+ end
+
+ it 'ends the intro with a dot' do
+ is_expected.to have_body_text "#{note.diff_file.file_path}</a>."
+ end
+ end
+
+ it 'ends the intro with a colon' do
+ is_expected.to have_body_text "#{note.diff_file.file_path}</a>:"
+ end
+
+ it 'includes diffs with character-level highlighting' do
is_expected.to have_body_text '<span class="p">}</span></span>'
end
diff --git a/spec/migrations/migrate_gcp_clusters_to_new_clusters_architectures_spec.rb b/spec/migrations/migrate_gcp_clusters_to_new_clusters_architectures_spec.rb
index 9f41534441b..05f281fffff 100644
--- a/spec/migrations/migrate_gcp_clusters_to_new_clusters_architectures_spec.rb
+++ b/spec/migrations/migrate_gcp_clusters_to_new_clusters_architectures_spec.rb
@@ -57,7 +57,7 @@ describe MigrateGcpClustersToNewClustersArchitectures, :migration do
expect(cluster.platform_type).to eq('kubernetes')
expect(cluster.project).to eq(project)
- expect(project.cluster).to eq(cluster)
+ expect(project.clusters).to include(cluster)
expect(cluster.provider_gcp.cluster).to eq(cluster)
expect(cluster.provider_gcp.status).to eq(status)
@@ -134,7 +134,7 @@ describe MigrateGcpClustersToNewClustersArchitectures, :migration do
expect(cluster.platform_type).to eq('kubernetes')
expect(cluster.project).to eq(project)
- expect(project.cluster).to eq(cluster)
+ expect(project.clusters).to include(cluster)
expect(cluster.provider_gcp.cluster).to eq(cluster)
expect(cluster.provider_gcp.status).to eq(status)
diff --git a/spec/migrations/migrate_old_artifacts_spec.rb b/spec/migrations/migrate_old_artifacts_spec.rb
index 81366d15b34..92eb1d9ce86 100644
--- a/spec/migrations/migrate_old_artifacts_spec.rb
+++ b/spec/migrations/migrate_old_artifacts_spec.rb
@@ -16,20 +16,22 @@ describe MigrateOldArtifacts do
end
context 'with migratable data' do
- let(:project1) { create(:project, ci_id: 2) }
- let(:project2) { create(:project, ci_id: 3) }
- let(:project3) { create(:project) }
+ set(:project1) { create(:project, ci_id: 2) }
+ set(:project2) { create(:project, ci_id: 3) }
+ set(:project3) { create(:project) }
- let(:pipeline1) { create(:ci_empty_pipeline, project: project1) }
- let(:pipeline2) { create(:ci_empty_pipeline, project: project2) }
- let(:pipeline3) { create(:ci_empty_pipeline, project: project3) }
+ set(:pipeline1) { create(:ci_empty_pipeline, project: project1) }
+ set(:pipeline2) { create(:ci_empty_pipeline, project: project2) }
+ set(:pipeline3) { create(:ci_empty_pipeline, project: project3) }
let!(:build_with_legacy_artifacts) { create(:ci_build, pipeline: pipeline1) }
let!(:build_without_artifacts) { create(:ci_build, pipeline: pipeline1) }
- let!(:build2) { create(:ci_build, :artifacts, pipeline: pipeline2) }
- let!(:build3) { create(:ci_build, :artifacts, pipeline: pipeline3) }
+ let!(:build2) { create(:ci_build, pipeline: pipeline2) }
+ let!(:build3) { create(:ci_build, pipeline: pipeline3) }
before do
+ setup_builds(build2, build3)
+
store_artifacts_in_legacy_path(build_with_legacy_artifacts)
end
@@ -38,7 +40,7 @@ describe MigrateOldArtifacts do
end
it "legacy artifacts are set" do
- expect(build_with_legacy_artifacts.artifacts_file_identifier).not_to be_nil
+ expect(build_with_legacy_artifacts.legacy_artifacts_file_identifier).not_to be_nil
end
describe '#min_id' do
@@ -113,5 +115,24 @@ describe MigrateOldArtifacts do
build.project.ci_id.to_s,
build.id.to_s)
end
+
+ def new_legacy_path(build)
+ File.join(directory,
+ build.created_at.utc.strftime('%Y_%m'),
+ build.project_id.to_s,
+ build.id.to_s)
+ end
+
+ def setup_builds(*builds)
+ builds.each do |build|
+ FileUtils.mkdir_p(new_legacy_path(build))
+
+ build.update_columns(
+ artifacts_file: 'ci_build_artifacts.zip',
+ artifacts_metadata: 'ci_build_artifacts_metadata.gz')
+
+ build.reload
+ end
+ end
end
end
diff --git a/spec/migrations/remove_empty_fork_networks_spec.rb b/spec/migrations/remove_empty_fork_networks_spec.rb
new file mode 100644
index 00000000000..cf6ae5cda74
--- /dev/null
+++ b/spec/migrations/remove_empty_fork_networks_spec.rb
@@ -0,0 +1,24 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20171114104051_remove_empty_fork_networks.rb')
+
+describe RemoveEmptyForkNetworks, :migration do
+ let!(:fork_networks) { table(:fork_networks) }
+
+ let(:deleted_project) { create(:project) }
+ let!(:empty_network) { create(:fork_network, id: 1, root_project_id: deleted_project.id) }
+ let!(:other_network) { create(:fork_network, id: 2, root_project_id: create(:project).id) }
+
+ before do
+ deleted_project.destroy!
+ end
+
+ it 'deletes only the fork network without members' do
+ expect(fork_networks.count).to eq(2)
+
+ migrate!
+
+ expect(fork_networks.find_by(id: empty_network.id)).to be_nil
+ expect(fork_networks.find_by(id: other_network.id)).not_to be_nil
+ expect(fork_networks.count).to eq(1)
+ end
+end
diff --git a/spec/migrations/schedule_merge_request_diff_migrations_spec.rb b/spec/migrations/schedule_merge_request_diff_migrations_spec.rb
index f95bd6e3511..76afb6c19cf 100644
--- a/spec/migrations/schedule_merge_request_diff_migrations_spec.rb
+++ b/spec/migrations/schedule_merge_request_diff_migrations_spec.rb
@@ -2,19 +2,6 @@ require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170703130158_schedule_merge_request_diff_migrations')
describe ScheduleMergeRequestDiffMigrations, :migration, :sidekiq do
- matcher :be_scheduled_migration do |time, *expected|
- match do |migration|
- BackgroundMigrationWorker.jobs.any? do |job|
- job['args'] == [migration, expected] &&
- job['at'].to_i == time.to_i
- end
- end
-
- failure_message do |migration|
- "Migration `#{migration}` with args `#{expected.inspect}` not scheduled!"
- end
- end
-
let(:merge_request_diffs) { table(:merge_request_diffs) }
let(:merge_requests) { table(:merge_requests) }
let(:projects) { table(:projects) }
@@ -37,9 +24,9 @@ describe ScheduleMergeRequestDiffMigrations, :migration, :sidekiq do
Timecop.freeze do
migrate!
- expect(described_class::MIGRATION).to be_scheduled_migration(5.minutes.from_now, 1, 1)
- expect(described_class::MIGRATION).to be_scheduled_migration(10.minutes.from_now, 2, 2)
- expect(described_class::MIGRATION).to be_scheduled_migration(15.minutes.from_now, 4, 4)
+ expect(described_class::MIGRATION).to be_scheduled_migration(5.minutes, 1, 1)
+ expect(described_class::MIGRATION).to be_scheduled_migration(10.minutes, 2, 2)
+ expect(described_class::MIGRATION).to be_scheduled_migration(15.minutes, 4, 4)
expect(BackgroundMigrationWorker.jobs.size).to eq 3
end
end
diff --git a/spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb b/spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb
index 4ab1bb67058..cf323973384 100644
--- a/spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb
+++ b/spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb
@@ -2,19 +2,6 @@ require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170926150348_schedule_merge_request_diff_migrations_take_two')
describe ScheduleMergeRequestDiffMigrationsTakeTwo, :migration, :sidekiq do
- matcher :be_scheduled_migration do |time, *expected|
- match do |migration|
- BackgroundMigrationWorker.jobs.any? do |job|
- job['args'] == [migration, expected] &&
- job['at'].to_i == time.to_i
- end
- end
-
- failure_message do |migration|
- "Migration `#{migration}` with args `#{expected.inspect}` not scheduled!"
- end
- end
-
let(:merge_request_diffs) { table(:merge_request_diffs) }
let(:merge_requests) { table(:merge_requests) }
let(:projects) { table(:projects) }
@@ -37,9 +24,9 @@ describe ScheduleMergeRequestDiffMigrationsTakeTwo, :migration, :sidekiq do
Timecop.freeze do
migrate!
- expect(described_class::MIGRATION).to be_scheduled_migration(10.minutes.from_now, 1, 1)
- expect(described_class::MIGRATION).to be_scheduled_migration(20.minutes.from_now, 2, 2)
- expect(described_class::MIGRATION).to be_scheduled_migration(30.minutes.from_now, 4, 4)
+ expect(described_class::MIGRATION).to be_scheduled_migration(10.minutes, 1, 1)
+ expect(described_class::MIGRATION).to be_scheduled_migration(20.minutes, 2, 2)
+ expect(described_class::MIGRATION).to be_scheduled_migration(30.minutes, 4, 4)
expect(BackgroundMigrationWorker.jobs.size).to eq 3
end
end
diff --git a/spec/migrations/schedule_merge_request_latest_merge_request_diff_id_migrations_spec.rb b/spec/migrations/schedule_merge_request_latest_merge_request_diff_id_migrations_spec.rb
new file mode 100644
index 00000000000..158d0bc02ed
--- /dev/null
+++ b/spec/migrations/schedule_merge_request_latest_merge_request_diff_id_migrations_spec.rb
@@ -0,0 +1,64 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20171026082505_schedule_merge_request_latest_merge_request_diff_id_migrations')
+
+describe ScheduleMergeRequestLatestMergeRequestDiffIdMigrations, :migration, :sidekiq do
+ let(:projects_table) { table(:projects) }
+ let(:merge_requests_table) { table(:merge_requests) }
+ let(:merge_request_diffs_table) { table(:merge_request_diffs) }
+
+ let(:project) { projects_table.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce') }
+
+ let!(:merge_request_1) { create_mr!('mr_1', diffs: 1) }
+ let!(:merge_request_2) { create_mr!('mr_2', diffs: 2) }
+ let!(:merge_request_migrated) { create_mr!('merge_request_migrated', diffs: 3) }
+ let!(:merge_request_4) { create_mr!('mr_4', diffs: 3) }
+
+ def create_mr!(name, diffs: 0)
+ merge_request =
+ merge_requests_table.create!(target_project_id: project.id,
+ target_branch: 'master',
+ source_project_id: project.id,
+ source_branch: name,
+ title: name)
+
+ diffs.times do
+ merge_request_diffs_table.create!(merge_request_id: merge_request.id)
+ end
+
+ merge_request
+ end
+
+ def diffs_for(merge_request)
+ merge_request_diffs_table.where(merge_request_id: merge_request.id)
+ end
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 1)
+
+ diff_id = diffs_for(merge_request_migrated).minimum(:id)
+ merge_request_migrated.update!(latest_merge_request_diff_id: diff_id)
+ end
+
+ it 'correctly schedules background migrations' do
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(described_class::MIGRATION).to be_scheduled_migration(5.minutes, merge_request_1.id, merge_request_1.id)
+ expect(described_class::MIGRATION).to be_scheduled_migration(10.minutes, merge_request_2.id, merge_request_2.id)
+ expect(described_class::MIGRATION).to be_scheduled_migration(15.minutes, merge_request_4.id, merge_request_4.id)
+ expect(BackgroundMigrationWorker.jobs.size).to eq 3
+ end
+ end
+ end
+
+ it 'schedules background migrations' do
+ Sidekiq::Testing.inline! do
+ expect(merge_requests_table.where(latest_merge_request_diff_id: nil).count).to eq 3
+
+ migrate!
+
+ expect(merge_requests_table.where(latest_merge_request_diff_id: nil).count).to eq 0
+ end
+ end
+end
diff --git a/spec/migrations/track_untracked_uploads_spec.rb b/spec/migrations/track_untracked_uploads_spec.rb
new file mode 100644
index 00000000000..7fe7a140e2f
--- /dev/null
+++ b/spec/migrations/track_untracked_uploads_spec.rb
@@ -0,0 +1,27 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20171103140253_track_untracked_uploads')
+
+describe TrackUntrackedUploads, :migration, :sidekiq do
+ include TrackUntrackedUploadsHelpers
+
+ matcher :be_scheduled_migration do
+ match do |migration|
+ BackgroundMigrationWorker.jobs.any? do |job|
+ job['args'] == [migration]
+ end
+ end
+
+ failure_message do |migration|
+ "Migration `#{migration}` with args `#{expected.inspect}` not scheduled!"
+ end
+ end
+
+ it 'correctly schedules the follow-up background migration' do
+ Sidekiq::Testing.fake! do
+ migrate!
+
+ expect(described_class::MIGRATION).to be_scheduled_migration
+ expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ end
+ end
+end
diff --git a/spec/models/appearance_spec.rb b/spec/models/appearance_spec.rb
index 49f44525b29..56b5d616284 100644
--- a/spec/models/appearance_spec.rb
+++ b/spec/models/appearance_spec.rb
@@ -5,9 +5,6 @@ describe Appearance do
it { is_expected.to be_valid }
- it { is_expected.to validate_presence_of(:title) }
- it { is_expected.to validate_presence_of(:description) }
-
it { is_expected.to have_many(:uploads).dependent(:destroy) }
describe '.current', :use_clean_rails_memory_store_caching do
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 47b7150d36f..ef480e7a80a 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -115,9 +115,8 @@ describe ApplicationSetting do
end
context 'circuitbreaker settings' do
- [:circuitbreaker_backoff_threshold,
- :circuitbreaker_failure_count_threshold,
- :circuitbreaker_failure_wait_time,
+ [:circuitbreaker_failure_count_threshold,
+ :circuitbreaker_check_interval,
:circuitbreaker_failure_reset_time,
:circuitbreaker_storage_timeout].each do |field|
it "Validates #{field} as number" do
@@ -126,16 +125,6 @@ describe ApplicationSetting do
.is_greater_than_or_equal_to(0)
end
end
-
- it 'requires the `backoff_threshold` to be lower than the `failure_count_threshold`' do
- setting.circuitbreaker_failure_count_threshold = 10
- setting.circuitbreaker_backoff_threshold = 15
- failure_message = "The circuitbreaker backoff threshold should be lower "\
- "than the failure count threshold"
-
- expect(setting).not_to be_valid
- expect(setting.errors[:circuitbreaker_backoff_threshold]).to include(failure_message)
- end
end
context 'repository storages' do
@@ -219,6 +208,65 @@ describe ApplicationSetting do
expect(subject).to be_valid
end
end
+
+ context 'gitaly timeouts' do
+ [:gitaly_timeout_default, :gitaly_timeout_medium, :gitaly_timeout_fast].each do |timeout_name|
+ it do
+ is_expected.to validate_presence_of(timeout_name)
+ is_expected.to validate_numericality_of(timeout_name).only_integer
+ .is_greater_than_or_equal_to(0)
+ end
+ end
+
+ [:gitaly_timeout_medium, :gitaly_timeout_fast].each do |timeout_name|
+ it "validates that #{timeout_name} is lower than timeout_default" do
+ subject[:gitaly_timeout_default] = 50
+ subject[timeout_name] = 100
+
+ expect(subject).to be_invalid
+ end
+ end
+
+ it 'accepts all timeouts equal' do
+ subject.gitaly_timeout_default = 0
+ subject.gitaly_timeout_medium = 0
+ subject.gitaly_timeout_fast = 0
+
+ expect(subject).to be_valid
+ end
+
+ it 'accepts timeouts in descending order' do
+ subject.gitaly_timeout_default = 50
+ subject.gitaly_timeout_medium = 30
+ subject.gitaly_timeout_fast = 20
+
+ expect(subject).to be_valid
+ end
+
+ it 'rejects timeouts in ascending order' do
+ subject.gitaly_timeout_default = 20
+ subject.gitaly_timeout_medium = 30
+ subject.gitaly_timeout_fast = 50
+
+ expect(subject).to be_invalid
+ end
+
+ it 'rejects medium timeout larger than default' do
+ subject.gitaly_timeout_default = 30
+ subject.gitaly_timeout_medium = 50
+ subject.gitaly_timeout_fast = 20
+
+ expect(subject).to be_invalid
+ end
+
+ it 'rejects medium timeout smaller than fast' do
+ subject.gitaly_timeout_default = 30
+ subject.gitaly_timeout_medium = 15
+ subject.gitaly_timeout_fast = 20
+
+ expect(subject).to be_invalid
+ end
+ end
end
describe '.current' do
@@ -564,4 +612,22 @@ describe ApplicationSetting do
expect(setting.key_restriction_for(:foo)).to eq(described_class::FORBIDDEN_KEY_VALUE)
end
end
+
+ describe '#allow_signup?' do
+ it 'returns true' do
+ expect(setting.allow_signup?).to be_truthy
+ end
+
+ it 'returns false if signup is disabled' do
+ allow(setting).to receive(:signup_enabled?).and_return(false)
+
+ expect(setting.allow_signup?).to be_falsey
+ end
+
+ it 'returns false if password authentication is disabled for the web interface' do
+ allow(setting).to receive(:password_authentication_enabled_for_web?).and_return(false)
+
+ expect(setting.allow_signup?).to be_falsey
+ end
+ end
end
diff --git a/spec/models/blob_spec.rb b/spec/models/blob_spec.rb
index 47342f98283..81e35e6c931 100644
--- a/spec/models/blob_spec.rb
+++ b/spec/models/blob_spec.rb
@@ -16,6 +16,23 @@ describe Blob do
end
end
+ describe '.lazy' do
+ let(:project) { create(:project, :repository) }
+ let(:commit) { project.commit_by(oid: 'e63f41fe459e62e1228fcef60d7189127aeba95a') }
+
+ it 'fetches all blobs when the first is accessed' do
+ changelog = described_class.lazy(project, commit.id, 'CHANGELOG')
+ contributing = described_class.lazy(project, commit.id, 'CONTRIBUTING.md')
+
+ expect(Gitlab::Git::Blob).to receive(:batch).once.and_call_original
+ expect(Gitlab::Git::Blob).not_to receive(:find)
+
+ # Access property so the values are loaded
+ changelog.id
+ contributing.id
+ end
+ end
+
describe '#data' do
context 'using a binary blob' do
it 'returns the data as-is' do
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 99a669464e0..1a20c2dda00 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -23,6 +23,8 @@ describe Ci::Build do
it { is_expected.to respond_to(:has_trace?) }
it { is_expected.to respond_to(:trace) }
+ it { is_expected.to be_a(ArtifactMigratable) }
+
describe 'callbacks' do
context 'when running after_create callback' do
it 'triggers asynchronous build hooks worker' do
@@ -130,34 +132,55 @@ describe Ci::Build do
end
describe '#artifacts?' do
- subject { build.artifacts? }
+ context 'when new artifacts are used' do
+ let(:build) { create(:ci_build, :artifacts) }
- context 'artifacts archive does not exist' do
- before do
- build.update_attributes(artifacts_file: nil)
+ subject { build.artifacts? }
+
+ context 'artifacts archive does not exist' do
+ let(:build) { create(:ci_build) }
+
+ it { is_expected.to be_falsy }
end
- it { is_expected.to be_falsy }
- end
+ context 'artifacts archive exists' do
+ it { is_expected.to be_truthy }
- context 'artifacts archive exists' do
- let(:build) { create(:ci_build, :artifacts) }
- it { is_expected.to be_truthy }
+ context 'is expired' do
+ let!(:build) { create(:ci_build, :artifacts, :expired) }
- context 'is expired' do
- before do
- build.update(artifacts_expire_at: Time.now - 7.days)
+ it { is_expected.to be_falsy }
end
+ context 'is not expired' do
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+
+ context 'when legacy artifacts are used' do
+ let(:build) { create(:ci_build, :legacy_artifacts) }
+
+ subject { build.artifacts? }
+
+ context 'artifacts archive does not exist' do
+ let(:build) { create(:ci_build) }
+
it { is_expected.to be_falsy }
end
- context 'is not expired' do
- before do
- build.update(artifacts_expire_at: Time.now + 7.days)
+ context 'artifacts archive exists' do
+ it { is_expected.to be_truthy }
+
+ context 'is expired' do
+ let!(:build) { create(:ci_build, :legacy_artifacts, :expired) }
+
+ it { is_expected.to be_falsy }
end
- it { is_expected.to be_truthy }
+ context 'is not expired' do
+ it { is_expected.to be_truthy }
+ end
end
end
end
@@ -314,6 +337,23 @@ describe Ci::Build do
end
end
+ describe '#triggered_by?' do
+ subject { build.triggered_by?(user) }
+
+ context 'when user is owner' do
+ let(:build) { create(:ci_build, pipeline: pipeline, user: user) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when user is not owner' do
+ let(:another_user) { create(:user) }
+ let(:build) { create(:ci_build, pipeline: pipeline, user: another_user) }
+
+ it { is_expected.to be_falsy }
+ end
+ end
+
describe '#detailed_status' do
it 'returns a detailed status' do
expect(build.detailed_status(user))
@@ -639,71 +679,144 @@ describe Ci::Build do
describe '#erasable?' do
subject { build.erasable? }
+
it { is_expected.to eq false }
end
end
context 'build is erasable' do
- let!(:build) { create(:ci_build, :trace, :success, :artifacts) }
+ context 'new artifacts' do
+ let!(:build) { create(:ci_build, :trace, :success, :artifacts) }
- describe '#erase' do
- before do
- build.erase(erased_by: user)
- end
+ describe '#erase' do
+ before do
+ build.erase(erased_by: user)
+ end
- context 'erased by user' do
- let!(:user) { create(:user, username: 'eraser') }
+ context 'erased by user' do
+ let!(:user) { create(:user, username: 'eraser') }
- include_examples 'erasable'
+ include_examples 'erasable'
- it 'records user who erased a build' do
- expect(build.erased_by).to eq user
+ it 'records user who erased a build' do
+ expect(build.erased_by).to eq user
+ end
end
- end
- context 'erased by system' do
- let(:user) { nil }
+ context 'erased by system' do
+ let(:user) { nil }
- include_examples 'erasable'
+ include_examples 'erasable'
- it 'does not set user who erased a build' do
- expect(build.erased_by).to be_nil
+ it 'does not set user who erased a build' do
+ expect(build.erased_by).to be_nil
+ end
end
end
- end
- describe '#erasable?' do
- subject { build.erasable? }
- it { is_expected.to be_truthy }
- end
+ describe '#erasable?' do
+ subject { build.erasable? }
+ it { is_expected.to be_truthy }
+ end
- describe '#erased?' do
- let!(:build) { create(:ci_build, :trace, :success, :artifacts) }
- subject { build.erased? }
+ describe '#erased?' do
+ let!(:build) { create(:ci_build, :trace, :success, :artifacts) }
+ subject { build.erased? }
- context 'job has not been erased' do
- it { is_expected.to be_falsey }
+ context 'job has not been erased' do
+ it { is_expected.to be_falsey }
+ end
+
+ context 'job has been erased' do
+ before do
+ build.erase
+ end
+
+ it { is_expected.to be_truthy }
+ end
end
- context 'job has been erased' do
+ context 'metadata and build trace are not available' do
+ let!(:build) { create(:ci_build, :success, :artifacts) }
+
before do
- build.erase
+ build.remove_artifacts_metadata!
end
- it { is_expected.to be_truthy }
+ describe '#erase' do
+ it 'does not raise error' do
+ expect { build.erase }.not_to raise_error
+ end
+ end
end
end
+ end
- context 'metadata and build trace are not available' do
- let!(:build) { create(:ci_build, :success, :artifacts) }
+ context 'old artifacts' do
+ context 'build is erasable' do
+ context 'new artifacts' do
+ let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) }
- before do
- build.remove_artifacts_metadata!
- end
+ describe '#erase' do
+ before do
+ build.erase(erased_by: user)
+ end
- describe '#erase' do
- it 'does not raise error' do
- expect { build.erase }.not_to raise_error
+ context 'erased by user' do
+ let!(:user) { create(:user, username: 'eraser') }
+
+ include_examples 'erasable'
+
+ it 'records user who erased a build' do
+ expect(build.erased_by).to eq user
+ end
+ end
+
+ context 'erased by system' do
+ let(:user) { nil }
+
+ include_examples 'erasable'
+
+ it 'does not set user who erased a build' do
+ expect(build.erased_by).to be_nil
+ end
+ end
+ end
+
+ describe '#erasable?' do
+ subject { build.erasable? }
+ it { is_expected.to be_truthy }
+ end
+
+ describe '#erased?' do
+ let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) }
+ subject { build.erased? }
+
+ context 'job has not been erased' do
+ it { is_expected.to be_falsey }
+ end
+
+ context 'job has been erased' do
+ before do
+ build.erase
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ context 'metadata and build trace are not available' do
+ let!(:build) { create(:ci_build, :success, :legacy_artifacts) }
+
+ before do
+ build.remove_artifacts_metadata!
+ end
+
+ describe '#erase' do
+ it 'does not raise error' do
+ expect { build.erase }.not_to raise_error
+ end
+ end
end
end
end
@@ -939,11 +1052,23 @@ describe Ci::Build do
describe '#keep_artifacts!' do
let(:build) { create(:ci_build, artifacts_expire_at: Time.now + 7.days) }
+ subject { build.keep_artifacts! }
+
it 'to reset expire_at' do
- build.keep_artifacts!
+ subject
expect(build.artifacts_expire_at).to be_nil
end
+
+ context 'when having artifacts files' do
+ let!(:artifact) { create(:ci_job_artifact, job: build, expire_in: '7 days') }
+
+ it 'to reset dependent objects' do
+ subject
+
+ expect(artifact.reload.expire_at).to be_nil
+ end
+ end
end
describe '#merge_request' do
@@ -1268,10 +1393,10 @@ describe Ci::Build do
context 'when config does not have a questioned job' do
let(:config) do
YAML.dump({
- test_other: {
- script: 'Hello World'
- }
- })
+ test_other: {
+ script: 'Hello World'
+ }
+ })
end
it { is_expected.to eq('on_success') }
@@ -1280,11 +1405,11 @@ describe Ci::Build do
context 'when config has `when`' do
let(:config) do
YAML.dump({
- test: {
- script: 'Hello World',
- when: 'always'
- }
- })
+ test: {
+ script: 'Hello World',
+ when: 'always'
+ }
+ })
end
it { is_expected.to eq('always') }
@@ -1365,10 +1490,10 @@ describe Ci::Build do
let!(:environment) do
create(:environment,
- project: build.project,
- name: 'production',
- slug: 'prod-slug',
- external_url: '')
+ project: build.project,
+ name: 'production',
+ slug: 'prod-slug',
+ external_url: '')
end
before do
@@ -1592,8 +1717,8 @@ describe Ci::Build do
let!(:pipeline_schedule_variable) do
create(:ci_pipeline_schedule_variable,
- key: 'SCHEDULE_VARIABLE_KEY',
- pipeline_schedule: pipeline_schedule)
+ key: 'SCHEDULE_VARIABLE_KEY',
+ pipeline_schedule: pipeline_schedule)
end
before do
@@ -1735,8 +1860,8 @@ describe Ci::Build do
allow_any_instance_of(Project)
.to receive(:secret_variables_for)
.with(ref: 'master', environment: nil) do
- [create(:ci_variable, key: 'secret', value: 'value')]
- end
+ [create(:ci_variable, key: 'secret', value: 'value')]
+ end
allow_any_instance_of(Ci::Pipeline)
.to receive(:predefined_variables) { [pipeline_pre_var] }
@@ -1787,6 +1912,94 @@ describe Ci::Build do
end
end
+ describe 'state transition: any => [:running]' do
+ shared_examples 'validation is active' do
+ context 'when depended job has not been completed yet' do
+ let!(:pre_stage_job) { create(:ci_build, :running, pipeline: pipeline, name: 'test', stage_idx: 0) }
+
+ it { expect { job.run! }.to raise_error(Ci::Build::MissingDependenciesError) }
+ end
+
+ context 'when artifacts of depended job has been expired' do
+ let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) }
+
+ it { expect { job.run! }.to raise_error(Ci::Build::MissingDependenciesError) }
+ end
+
+ context 'when artifacts of depended job has been erased' do
+ let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) }
+
+ before do
+ pre_stage_job.erase
+ end
+
+ it { expect { job.run! }.to raise_error(Ci::Build::MissingDependenciesError) }
+ end
+ end
+
+ shared_examples 'validation is not active' do
+ context 'when depended job has not been completed yet' do
+ let!(:pre_stage_job) { create(:ci_build, :running, pipeline: pipeline, name: 'test', stage_idx: 0) }
+
+ it { expect { job.run! }.not_to raise_error }
+ end
+
+ context 'when artifacts of depended job has been expired' do
+ let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) }
+
+ it { expect { job.run! }.not_to raise_error }
+ end
+
+ context 'when artifacts of depended job has been erased' do
+ let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) }
+
+ before do
+ pre_stage_job.erase
+ end
+
+ it { expect { job.run! }.not_to raise_error }
+ end
+ end
+
+ let!(:job) { create(:ci_build, :pending, pipeline: pipeline, stage_idx: 1, options: options) }
+
+ context 'when validates for dependencies is enabled' do
+ before do
+ stub_feature_flags(ci_disable_validates_dependencies: false)
+ end
+
+ let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0) }
+
+ context 'when "dependencies" keyword is not defined' do
+ let(:options) { {} }
+
+ it { expect { job.run! }.not_to raise_error }
+ end
+
+ context 'when "dependencies" keyword is empty' do
+ let(:options) { { dependencies: [] } }
+
+ it { expect { job.run! }.not_to raise_error }
+ end
+
+ context 'when "dependencies" keyword is specified' do
+ let(:options) { { dependencies: ['test'] } }
+
+ it_behaves_like 'validation is active'
+ end
+ end
+
+ context 'when validates for dependencies is disabled' do
+ let(:options) { { dependencies: ['test'] } }
+
+ before do
+ stub_feature_flags(ci_disable_validates_dependencies: true)
+ end
+
+ it_behaves_like 'validation is not active'
+ end
+ end
+
describe 'state transition when build fails' do
let(:service) { MergeRequests::AddTodoWhenBuildFailsService.new(project, user) }
@@ -1840,4 +2053,77 @@ describe Ci::Build do
end
end
end
+
+ describe '.matches_tag_ids' do
+ set(:build) { create(:ci_build, project: project, user: user) }
+ let(:tag_ids) { ::ActsAsTaggableOn::Tag.named_any(tag_list).ids }
+
+ subject { described_class.where(id: build).matches_tag_ids(tag_ids) }
+
+ before do
+ build.update(tag_list: build_tag_list)
+ end
+
+ context 'when have different tags' do
+ let(:build_tag_list) { %w(A B) }
+ let(:tag_list) { %w(C D) }
+
+ it "does not match a build" do
+ is_expected.not_to contain_exactly(build)
+ end
+ end
+
+ context 'when have a subset of tags' do
+ let(:build_tag_list) { %w(A B) }
+ let(:tag_list) { %w(A B C D) }
+
+ it "does match a build" do
+ is_expected.to contain_exactly(build)
+ end
+ end
+
+ context 'when build does not have tags' do
+ let(:build_tag_list) { [] }
+ let(:tag_list) { %w(C D) }
+
+ it "does match a build" do
+ is_expected.to contain_exactly(build)
+ end
+ end
+
+ context 'when does not have a subset of tags' do
+ let(:build_tag_list) { %w(A B C) }
+ let(:tag_list) { %w(C D) }
+
+ it "does not match a build" do
+ is_expected.not_to contain_exactly(build)
+ end
+ end
+ end
+
+ describe '.matches_tags' do
+ set(:build) { create(:ci_build, project: project, user: user) }
+
+ subject { described_class.where(id: build).with_any_tags }
+
+ before do
+ build.update(tag_list: tag_list)
+ end
+
+ context 'when does have tags' do
+ let(:tag_list) { %w(A B) }
+
+ it "does match a build" do
+ is_expected.to contain_exactly(build)
+ end
+ end
+
+ context 'when does not have tags' do
+ let(:tag_list) { [] }
+
+ it "does not match a build" do
+ is_expected.not_to contain_exactly(build)
+ end
+ end
+ end
end
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
new file mode 100644
index 00000000000..0e18a326c68
--- /dev/null
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -0,0 +1,74 @@
+require 'spec_helper'
+
+describe Ci::JobArtifact do
+ set(:artifact) { create(:ci_job_artifact, :archive) }
+
+ describe "Associations" do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:job) }
+ end
+
+ it { is_expected.to respond_to(:file) }
+ it { is_expected.to respond_to(:created_at) }
+ it { is_expected.to respond_to(:updated_at) }
+
+ describe '#set_size' do
+ it 'sets the size' do
+ expect(artifact.size).to eq(106365)
+ end
+ end
+
+ describe '#file' do
+ subject { artifact.file }
+
+ context 'the uploader api' do
+ it { is_expected.to respond_to(:store_dir) }
+ it { is_expected.to respond_to(:cache_dir) }
+ it { is_expected.to respond_to(:work_dir) }
+ end
+ end
+
+ describe '#expire_in' do
+ subject { artifact.expire_in }
+
+ it { is_expected.to be_nil }
+
+ context 'when expire_at is specified' do
+ let(:expire_at) { Time.now + 7.days }
+
+ before do
+ artifact.expire_at = expire_at
+ end
+
+ it { is_expected.to be_within(5).of(expire_at - Time.now) }
+ end
+ end
+
+ describe '#expire_in=' do
+ subject { artifact.expire_in }
+
+ it 'when assigning valid duration' do
+ artifact.expire_in = '7 days'
+
+ is_expected.to be_within(10).of(7.days.to_i)
+ end
+
+ it 'when assigning invalid duration' do
+ expect { artifact.expire_in = '7 elephants' }.to raise_error(ChronicDuration::DurationParseError)
+
+ is_expected.to be_nil
+ end
+
+ it 'when resetting value' do
+ artifact.expire_in = nil
+
+ is_expected.to be_nil
+ end
+
+ it 'when setting to 0' do
+ artifact.expire_in = '0'
+
+ is_expected.to be_nil
+ end
+ end
+end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 2c9e7013b77..bb89e093890 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -557,10 +557,23 @@ describe Ci::Pipeline, :mailer do
describe '#has_kubernetes_active?' do
context 'when kubernetes is active' do
- let(:project) { create(:kubernetes_project) }
+ shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do
+ it 'returns true' do
+ expect(pipeline).to have_kubernetes_active
+ end
+ end
- it 'returns true' do
- expect(pipeline).to have_kubernetes_active
+ context 'when user configured kubernetes from Integration > Kubernetes' do
+ let(:project) { create(:kubernetes_project) }
+
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
+ end
+
+ context 'when user configured kubernetes from CI/CD > Clusters' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:project) { cluster.project }
+
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
end
end
@@ -625,38 +638,29 @@ describe Ci::Pipeline, :mailer do
shared_context 'with some outdated pipelines' do
before do
- create_pipeline(:canceled, 'ref', 'A')
- create_pipeline(:success, 'ref', 'A')
- create_pipeline(:failed, 'ref', 'B')
- create_pipeline(:skipped, 'feature', 'C')
+ create_pipeline(:canceled, 'ref', 'A', project)
+ create_pipeline(:success, 'ref', 'A', project)
+ create_pipeline(:failed, 'ref', 'B', project)
+ create_pipeline(:skipped, 'feature', 'C', project)
end
- def create_pipeline(status, ref, sha)
- create(:ci_empty_pipeline, status: status, ref: ref, sha: sha)
+ def create_pipeline(status, ref, sha, project)
+ create(
+ :ci_empty_pipeline,
+ status: status,
+ ref: ref,
+ sha: sha,
+ project: project
+ )
end
end
- describe '.latest' do
+ describe '.newest_first' do
include_context 'with some outdated pipelines'
- context 'when no ref is specified' do
- let(:pipelines) { described_class.latest.all }
-
- it 'returns the latest pipeline for the same ref and different sha' do
- expect(pipelines.map(&:sha)).to contain_exactly('A', 'B', 'C')
- expect(pipelines.map(&:status))
- .to contain_exactly('success', 'failed', 'skipped')
- end
- end
-
- context 'when ref is specified' do
- let(:pipelines) { described_class.latest('ref').all }
-
- it 'returns the latest pipeline for ref and different sha' do
- expect(pipelines.map(&:sha)).to contain_exactly('A', 'B')
- expect(pipelines.map(&:status))
- .to contain_exactly('success', 'failed')
- end
+ it 'returns the pipelines from new to old' do
+ expect(described_class.newest_first.pluck(:status))
+ .to eq(%w[skipped failed success canceled])
end
end
@@ -664,20 +668,14 @@ describe Ci::Pipeline, :mailer do
include_context 'with some outdated pipelines'
context 'when no ref is specified' do
- let(:latest_status) { described_class.latest_status }
-
- it 'returns the latest status for the same ref and different sha' do
- expect(latest_status).to eq(described_class.latest.status)
- expect(latest_status).to eq('failed')
+ it 'returns the status of the latest pipeline' do
+ expect(described_class.latest_status).to eq('skipped')
end
end
context 'when ref is specified' do
- let(:latest_status) { described_class.latest_status('ref') }
-
- it 'returns the latest status for ref and different sha' do
- expect(latest_status).to eq(described_class.latest_status('ref'))
- expect(latest_status).to eq('failed')
+ it 'returns the status of the latest pipeline for the given ref' do
+ expect(described_class.latest_status('ref')).to eq('failed')
end
end
end
@@ -686,7 +684,7 @@ describe Ci::Pipeline, :mailer do
include_context 'with some outdated pipelines'
let!(:latest_successful_pipeline) do
- create_pipeline(:success, 'ref', 'D')
+ create_pipeline(:success, 'ref', 'D', project)
end
it 'returns the latest successful pipeline' do
@@ -698,8 +696,13 @@ describe Ci::Pipeline, :mailer do
describe '.latest_successful_for_refs' do
include_context 'with some outdated pipelines'
- let!(:latest_successful_pipeline1) { create_pipeline(:success, 'ref1', 'D') }
- let!(:latest_successful_pipeline2) { create_pipeline(:success, 'ref2', 'D') }
+ let!(:latest_successful_pipeline1) do
+ create_pipeline(:success, 'ref1', 'D', project)
+ end
+
+ let!(:latest_successful_pipeline2) do
+ create_pipeline(:success, 'ref2', 'D', project)
+ end
it 'returns the latest successful pipeline for both refs' do
refs = %w(ref1 ref2 ref3)
@@ -708,6 +711,62 @@ describe Ci::Pipeline, :mailer do
end
end
+ describe '.latest_status_per_commit' do
+ let(:project) { create(:project) }
+
+ before do
+ pairs = [
+ %w[success ref1 123],
+ %w[manual master 123],
+ %w[failed ref 456]
+ ]
+
+ pairs.each do |(status, ref, sha)|
+ create(
+ :ci_empty_pipeline,
+ status: status,
+ ref: ref,
+ sha: sha,
+ project: project
+ )
+ end
+ end
+
+ context 'without a ref' do
+ it 'returns a Hash containing the latest status per commit for all refs' do
+ expect(described_class.latest_status_per_commit(%w[123 456]))
+ .to eq({ '123' => 'manual', '456' => 'failed' })
+ end
+
+ it 'only includes the status of the given commit SHAs' do
+ expect(described_class.latest_status_per_commit(%w[123]))
+ .to eq({ '123' => 'manual' })
+ end
+
+ context 'when there are two pipelines for a ref and SHA' do
+ it 'returns the status of the latest pipeline' do
+ create(
+ :ci_empty_pipeline,
+ status: 'failed',
+ ref: 'master',
+ sha: '123',
+ project: project
+ )
+
+ expect(described_class.latest_status_per_commit(%w[123]))
+ .to eq({ '123' => 'failed' })
+ end
+ end
+ end
+
+ context 'with a ref' do
+ it 'only includes the pipelines for the given ref' do
+ expect(described_class.latest_status_per_commit(%w[123 456], 'master'))
+ .to eq({ '123' => 'manual' })
+ end
+ end
+ end
+
describe '.internal_sources' do
subject { described_class.internal_sources }
@@ -809,62 +868,59 @@ describe Ci::Pipeline, :mailer do
end
describe '#set_config_source' do
- context 'on object initialisation' do
- context 'when pipelines does not contain needed data' do
- let(:pipeline) do
- Ci::Pipeline.new
- end
+ context 'when pipelines does not contain needed data' do
+ it 'defines source to be unknown' do
+ pipeline.set_config_source
- it 'defines source to be unknown' do
- expect(pipeline).to be_unknown_source
- end
+ expect(pipeline).to be_unknown_source
+ end
+ end
+
+ context 'when pipeline contains all needed data' do
+ let(:pipeline) do
+ create(:ci_pipeline, project: project,
+ sha: '1234',
+ ref: 'master',
+ source: :push)
end
- context 'when pipeline contains all needed data' do
- let(:pipeline) do
- Ci::Pipeline.new(
- project: project,
- sha: '1234',
- ref: 'master',
- source: :push)
+ context 'when the repository has a config file' do
+ before do
+ allow(project.repository).to receive(:gitlab_ci_yml_for)
+ .and_return('config')
end
- context 'when the repository has a config file' do
- before do
- allow(project.repository).to receive(:gitlab_ci_yml_for)
- .and_return('config')
- end
+ it 'defines source to be from repository' do
+ pipeline.set_config_source
- it 'defines source to be from repository' do
- expect(pipeline).to be_repository_source
- end
+ expect(pipeline).to be_repository_source
+ end
- context 'when loading an object' do
- let(:new_pipeline) { Ci::Pipeline.find(pipeline.id) }
+ context 'when loading an object' do
+ let(:new_pipeline) { Ci::Pipeline.find(pipeline.id) }
- it 'does not redefine the source' do
- # force to overwrite the source
- pipeline.unknown_source!
+ it 'does not redefine the source' do
+ # force to overwrite the source
+ pipeline.unknown_source!
- expect(new_pipeline).to be_unknown_source
- end
+ expect(new_pipeline).to be_unknown_source
end
end
+ end
- context 'when the repository does not have a config file' do
- let(:implied_yml) { Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps').content }
+ context 'when the repository does not have a config file' do
+ let(:implied_yml) { Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps').content }
- context 'auto devops enabled' do
- before do
- stub_application_setting(auto_devops_enabled: true)
- allow(project).to receive(:ci_config_path) { 'custom' }
- end
+ context 'auto devops enabled' do
+ before do
+ stub_application_setting(auto_devops_enabled: true)
+ allow(project).to receive(:ci_config_path) { 'custom' }
+ end
- it 'defines source to be auto devops' do
- subject
+ it 'defines source to be auto devops' do
+ pipeline.set_config_source
- expect(pipeline).to be_auto_devops_source
- end
+ expect(pipeline).to be_auto_devops_source
end
end
end
@@ -1188,7 +1244,7 @@ describe Ci::Pipeline, :mailer do
describe '#execute_hooks' do
let!(:build_a) { create_build('a', 0) }
- let!(:build_b) { create_build('b', 1) }
+ let!(:build_b) { create_build('b', 0) }
let!(:hook) do
create(:project_hook, project: project, pipeline_events: enabled)
@@ -1244,6 +1300,8 @@ describe Ci::Pipeline, :mailer do
end
context 'when stage one failed' do
+ let!(:build_b) { create_build('b', 1) }
+
before do
build_a.drop
end
@@ -1456,6 +1514,10 @@ describe Ci::Pipeline, :mailer do
create(:ci_build, :success, :artifacts, pipeline: pipeline)
end
+ it 'returns an Array' do
+ expect(pipeline.latest_builds_with_artifacts).to be_an_instance_of(Array)
+ end
+
it 'returns the latest builds' do
expect(pipeline.latest_builds_with_artifacts).to eq([build])
end
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 584dfe9a5c1..a93e7e233a8 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -473,7 +473,7 @@ describe Ci::Runner do
end
describe '.search' do
- let(:runner) { create(:ci_runner, token: '123abc') }
+ let(:runner) { create(:ci_runner, token: '123abc', description: 'test runner') }
it 'returns runners with a matching token' do
expect(described_class.search(runner.token)).to eq([runner])
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index b91a5e7a272..2683d21ddbe 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -9,7 +9,6 @@ describe Clusters::Cluster do
it { is_expected.to delegate_method(:status_reason).to(:provider) }
it { is_expected.to delegate_method(:status_name).to(:provider) }
it { is_expected.to delegate_method(:on_creation?).to(:provider) }
- it { is_expected.to delegate_method(:update_kubernetes_integration!).to(:platform) }
it { is_expected.to respond_to :project }
describe '.enabled' do
@@ -199,4 +198,26 @@ describe Clusters::Cluster do
end
end
end
+
+ describe '#created?' do
+ let(:cluster) { create(:cluster, :provided_by_gcp) }
+
+ subject { cluster.created? }
+
+ context 'when status_name is :created' do
+ before do
+ allow(cluster).to receive_message_chain(:provider, :status_name).and_return(:created)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when status_name is not :created' do
+ before do
+ allow(cluster).to receive_message_chain(:provider, :status_name).and_return(:creating)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
end
diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb
index ed76be703a5..53a4e545ff6 100644
--- a/spec/models/clusters/platforms/kubernetes_spec.rb
+++ b/spec/models/clusters/platforms/kubernetes_spec.rb
@@ -5,6 +5,8 @@ describe Clusters::Platforms::Kubernetes, :use_clean_rails_memory_store_caching
include ReactiveCachingHelpers
it { is_expected.to belong_to(:cluster) }
+ it { is_expected.to be_kind_of(Gitlab::Kubernetes) }
+ it { is_expected.to be_kind_of(ReactiveCaching) }
it { is_expected.to respond_to :ca_pem }
describe 'before_validation' do
@@ -90,99 +92,175 @@ describe Clusters::Platforms::Kubernetes, :use_clean_rails_memory_store_caching
end
end
- describe 'after_save from Clusters::Cluster' do
- context 'when platform_kubernetes is being cerated' do
- let(:enabled) { true }
- let(:project) { create(:project) }
- let(:cluster) { build(:cluster, provider_type: :gcp, platform_type: :kubernetes, platform_kubernetes: platform, provider_gcp: provider, enabled: enabled, projects: [project]) }
- let(:platform) { build(:cluster_platform_kubernetes, :configured) }
- let(:provider) { build(:cluster_provider_gcp) }
- let(:kubernetes_service) { project.kubernetes_service }
+ describe '#actual_namespace' do
+ subject { kubernetes.actual_namespace }
- it 'updates KubernetesService' do
- cluster.save!
+ let!(:cluster) { create(:cluster, :project, platform_kubernetes: kubernetes) }
+ let(:project) { cluster.project }
+ let(:kubernetes) { create(:cluster_platform_kubernetes, :configured, namespace: namespace) }
- expect(kubernetes_service.active).to eq(enabled)
- expect(kubernetes_service.api_url).to eq(platform.api_url)
- expect(kubernetes_service.namespace).to eq(platform.namespace)
- expect(kubernetes_service.ca_pem).to eq(platform.ca_cert)
- end
+ context 'when namespace is present' do
+ let(:namespace) { 'namespace-123' }
+
+ it { is_expected.to eq(namespace) }
end
- context 'when platform_kubernetes has been created' do
- let(:enabled) { false }
- let!(:project) { create(:project) }
- let!(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
- let(:platform) { cluster.platform }
- let(:kubernetes_service) { project.kubernetes_service }
+ context 'when namespace is not present' do
+ let(:namespace) { nil }
+
+ it { is_expected.to eq("#{project.path}-#{project.id}") }
+ end
+ end
- it 'updates KubernetesService' do
- cluster.update(enabled: enabled)
+ describe '#default_namespace' do
+ subject { kubernetes.send(:default_namespace) }
- expect(kubernetes_service.active).to eq(enabled)
+ let(:kubernetes) { create(:cluster_platform_kubernetes, :configured) }
+
+ context 'when cluster belongs to a project' do
+ let!(:cluster) { create(:cluster, :project, platform_kubernetes: kubernetes) }
+ let(:project) { cluster.project }
+
+ it { is_expected.to eq("#{project.path}-#{project.id}") }
+ end
+
+ context 'when cluster belongs to nothing' do
+ let!(:cluster) { create(:cluster, platform_kubernetes: kubernetes) }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#predefined_variables' do
+ let!(:cluster) { create(:cluster, :project, platform_kubernetes: kubernetes) }
+ let(:kubernetes) { create(:cluster_platform_kubernetes, api_url: api_url, ca_cert: ca_pem, token: token) }
+ let(:api_url) { 'https://kube.domain.com' }
+ let(:ca_pem) { 'CA PEM DATA' }
+ let(:token) { 'token' }
+
+ let(:kubeconfig) do
+ config_file = expand_fixture_path('config/kubeconfig.yml')
+ config = YAML.load(File.read(config_file))
+ config.dig('users', 0, 'user')['token'] = token
+ config.dig('contexts', 0, 'context')['namespace'] = namespace
+ config.dig('clusters', 0, 'cluster')['certificate-authority-data'] =
+ Base64.strict_encode64(ca_pem)
+
+ YAML.dump(config)
+ end
+
+ shared_examples 'setting variables' do
+ it 'sets the variables' do
+ expect(kubernetes.predefined_variables).to include(
+ { key: 'KUBE_URL', value: api_url, public: true },
+ { key: 'KUBE_TOKEN', value: token, public: false },
+ { key: 'KUBE_NAMESPACE', value: namespace, public: true },
+ { key: 'KUBECONFIG', value: kubeconfig, public: false, file: true },
+ { key: 'KUBE_CA_PEM', value: ca_pem, public: true },
+ { key: 'KUBE_CA_PEM_FILE', value: ca_pem, public: true, file: true }
+ )
end
end
- context 'when kubernetes_service has been configured without cluster integration' do
- let!(:project) { create(:project) }
- let(:cluster) { build(:cluster, provider_type: :gcp, platform_type: :kubernetes, platform_kubernetes: platform, provider_gcp: provider, projects: [project]) }
- let(:platform) { build(:cluster_platform_kubernetes, :configured, api_url: 'https://111.111.111.111') }
- let(:provider) { build(:cluster_provider_gcp) }
+ context 'namespace is provided' do
+ let(:namespace) { 'my-project' }
before do
- create(:kubernetes_service, project: project)
+ kubernetes.namespace = namespace
end
- it 'raises an error' do
- expect { cluster.save! }.to raise_error('Kubernetes service already configured')
+ it_behaves_like 'setting variables'
+ end
+
+ context 'no namespace provided' do
+ let(:namespace) { kubernetes.actual_namespace }
+
+ it_behaves_like 'setting variables'
+
+ it 'sets the KUBE_NAMESPACE' do
+ kube_namespace = kubernetes.predefined_variables.find { |h| h[:key] == 'KUBE_NAMESPACE' }
+
+ expect(kube_namespace).not_to be_nil
+ expect(kube_namespace[:value]).to match(/\A#{Gitlab::PathRegex::PATH_REGEX_STR}-\d+\z/)
end
end
end
- describe '#actual_namespace' do
- subject { kubernetes.actual_namespace }
+ describe '#terminals' do
+ subject { service.terminals(environment) }
- let!(:cluster) { create(:cluster, :project, platform_kubernetes: kubernetes) }
+ let!(:cluster) { create(:cluster, :project, platform_kubernetes: service) }
let(:project) { cluster.project }
- let(:kubernetes) { create(:cluster_platform_kubernetes, :configured, namespace: namespace) }
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+ let(:environment) { build(:environment, project: project, name: "env", slug: "env-000000") }
- context 'when namespace is present' do
- let(:namespace) { 'namespace-123' }
+ context 'with invalid pods' do
+ it 'returns no terminals' do
+ stub_reactive_cache(service, pods: [{ "bad" => "pod" }])
- it { is_expected.to eq(namespace) }
+ is_expected.to be_empty
+ end
end
- context 'when namespace is not present' do
- let(:namespace) { nil }
+ context 'with valid pods' do
+ let(:pod) { kube_pod(app: environment.slug) }
+ let(:terminals) { kube_terminals(service, pod) }
- it { is_expected.to eq("#{project.path}-#{project.id}") }
+ before do
+ stub_reactive_cache(
+ service,
+ pods: [pod, pod, kube_pod(app: "should-be-filtered-out")]
+ )
+ end
+
+ it 'returns terminals' do
+ is_expected.to eq(terminals + terminals)
+ end
+
+ it 'uses max session time from settings' do
+ stub_application_setting(terminal_max_session_time: 600)
+
+ times = subject.map { |terminal| terminal[:max_session_time] }
+ expect(times).to eq [600, 600, 600, 600]
+ end
end
end
- describe '.namespace_for_project' do
- subject { described_class.namespace_for_project(project) }
+ describe '#calculate_reactive_cache' do
+ subject { service.calculate_reactive_cache }
- let(:project) { create(:project) }
+ let!(:cluster) { create(:cluster, :project, enabled: enabled, platform_kubernetes: service) }
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+ let(:enabled) { true }
- it { is_expected.to eq("#{project.path}-#{project.id}") }
- end
+ context 'when cluster is disabled' do
+ let(:enabled) { false }
- describe '#default_namespace' do
- subject { kubernetes.default_namespace }
+ it { is_expected.to be_nil }
+ end
- let(:kubernetes) { create(:cluster_platform_kubernetes, :configured) }
+ context 'when kubernetes responds with valid pods' do
+ before do
+ stub_kubeclient_pods
+ end
- context 'when cluster belongs to a project' do
- let!(:cluster) { create(:cluster, :project, platform_kubernetes: kubernetes) }
- let(:project) { cluster.project }
+ it { is_expected.to eq(pods: [kube_pod]) }
+ end
- it { is_expected.to eq("#{project.path}-#{project.id}") }
+ context 'when kubernetes responds with 500s' do
+ before do
+ stub_kubeclient_pods(status: 500)
+ end
+
+ it { expect { subject }.to raise_error(KubeException) }
end
- context 'when cluster belongs to nothing' do
- let!(:cluster) { create(:cluster, platform_kubernetes: kubernetes) }
+ context 'when kubernetes responds with 404s' do
+ before do
+ stub_kubeclient_pods(status: 404)
+ end
- it { is_expected.to be_nil }
+ it { is_expected.to eq(pods: []) }
end
end
end
diff --git a/spec/models/commit_collection_spec.rb b/spec/models/commit_collection_spec.rb
new file mode 100644
index 00000000000..066fe7d154e
--- /dev/null
+++ b/spec/models/commit_collection_spec.rb
@@ -0,0 +1,59 @@
+require 'spec_helper'
+
+describe CommitCollection do
+ let(:project) { create(:project, :repository) }
+ let(:commit) { project.commit }
+
+ describe '#each' do
+ it 'yields every commit' do
+ collection = described_class.new(project, [commit])
+
+ expect { |b| collection.each(&b) }.to yield_with_args(commit)
+ end
+ end
+
+ describe '#with_pipeline_status' do
+ it 'sets the pipeline status for every commit so no additional queries are necessary' do
+ create(
+ :ci_empty_pipeline,
+ ref: 'master',
+ sha: commit.id,
+ status: 'success',
+ project: project
+ )
+
+ collection = described_class.new(project, [commit])
+ collection.with_pipeline_status
+
+ recorder = ActiveRecord::QueryRecorder.new do
+ expect(commit.status).to eq('success')
+ end
+
+ expect(recorder.count).to be_zero
+ end
+ end
+
+ describe '#respond_to_missing?' do
+ it 'returns true when the underlying Array responds to the message' do
+ collection = described_class.new(project, [])
+
+ expect(collection.respond_to?(:last)).to eq(true)
+ end
+
+ it 'returns false when the underlying Array does not respond to the message' do
+ collection = described_class.new(project, [])
+
+ expect(collection.respond_to?(:foo)).to eq(false)
+ end
+ end
+
+ describe '#method_missing' do
+ it 'delegates undefined methods to the underlying Array' do
+ collection = described_class.new(project, [commit])
+
+ expect(collection.length).to eq(1)
+ expect(collection.last).to eq(commit)
+ expect(collection).not_to be_empty
+ end
+ end
+end
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index e3cfa149e3a..d18a5c9dfa6 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -351,12 +351,19 @@ eos
end
it 'gives compound status from latest pipelines if ref is nil' do
- expect(commit.status(nil)).to eq(Ci::Pipeline.latest_status)
- expect(commit.status(nil)).to eq('failed')
+ expect(commit.status(nil)).to eq(pipeline_from_fix.status)
end
end
end
+ describe '#set_status_for_ref' do
+ it 'sets the status for a given reference' do
+ commit.set_status_for_ref('master', 'failed')
+
+ expect(commit.status('master')).to eq('failed')
+ end
+ end
+
describe '#participants' do
let(:user1) { build(:user) }
let(:user2) { build(:user) }
diff --git a/spec/models/concerns/avatarable_spec.rb b/spec/models/concerns/avatarable_spec.rb
new file mode 100644
index 00000000000..cbdc438be0b
--- /dev/null
+++ b/spec/models/concerns/avatarable_spec.rb
@@ -0,0 +1,44 @@
+require 'spec_helper'
+
+describe Avatarable do
+ subject { create(:project, avatar: fixture_file_upload(File.join(Rails.root, 'spec/fixtures/dk.png'))) }
+
+ let(:gitlab_host) { "https://gitlab.example.com" }
+ let(:relative_url_root) { "/gitlab" }
+ let(:asset_host) { "https://gitlab-assets.example.com" }
+
+ before do
+ stub_config_setting(base_url: gitlab_host)
+ stub_config_setting(relative_url_root: relative_url_root)
+ end
+
+ describe '#avatar_path' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:has_asset_host, :visibility_level, :only_path, :avatar_path) do
+ true | Project::PRIVATE | true | [gitlab_host, relative_url_root, subject.avatar.url]
+ true | Project::PRIVATE | false | [gitlab_host, relative_url_root, subject.avatar.url]
+ true | Project::INTERNAL | true | [gitlab_host, relative_url_root, subject.avatar.url]
+ true | Project::INTERNAL | false | [gitlab_host, relative_url_root, subject.avatar.url]
+ true | Project::PUBLIC | true | [subject.avatar.url]
+ true | Project::PUBLIC | false | [asset_host, subject.avatar.url]
+ false | Project::PRIVATE | true | [relative_url_root, subject.avatar.url]
+ false | Project::PRIVATE | false | [gitlab_host, relative_url_root, subject.avatar.url]
+ false | Project::INTERNAL | true | [relative_url_root, subject.avatar.url]
+ false | Project::INTERNAL | false | [gitlab_host, relative_url_root, subject.avatar.url]
+ false | Project::PUBLIC | true | [relative_url_root, subject.avatar.url]
+ false | Project::PUBLIC | false | [gitlab_host, relative_url_root, subject.avatar.url]
+ end
+
+ with_them do
+ before do
+ allow(ActionController::Base).to receive(:asset_host).and_return(has_asset_host ? asset_host : nil)
+ subject.visibility_level = visibility_level
+ end
+
+ it 'returns the expected avatar path' do
+ expect(subject.avatar_path(only_path: only_path)).to eq(avatar_path.join)
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/has_variable_spec.rb b/spec/models/concerns/has_variable_spec.rb
index f4b24e6d1d9..f87869a2fdc 100644
--- a/spec/models/concerns/has_variable_spec.rb
+++ b/spec/models/concerns/has_variable_spec.rb
@@ -9,6 +9,24 @@ describe HasVariable do
it { is_expected.not_to allow_value('foo bar').for(:key) }
it { is_expected.not_to allow_value('foo/bar').for(:key) }
+ describe '#key=' do
+ context 'when the new key is nil' do
+ it 'strips leading and trailing whitespaces' do
+ subject.key = nil
+
+ expect(subject.key).to eq('')
+ end
+ end
+
+ context 'when the new key has leadind and trailing whitespaces' do
+ it 'strips leading and trailing whitespaces' do
+ subject.key = ' my key '
+
+ expect(subject.key).to eq('my key')
+ end
+ end
+ end
+
describe '#value' do
before do
subject.value = 'secret'
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index ba57301a3c9..9df26f06a11 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -67,6 +67,7 @@ describe Issuable do
describe ".search" do
let!(:searchable_issue) { create(:issue, title: "Searchable awesome issue") }
+ let!(:searchable_issue2) { create(:issue, title: 'Aw') }
it 'returns issues with a matching title' do
expect(issuable_class.search(searchable_issue.title))
@@ -86,8 +87,8 @@ describe Issuable do
expect(issuable_class.search('searchable issue')).to eq([searchable_issue])
end
- it 'returns all issues with a query shorter than 3 chars' do
- expect(issuable_class.search('zz')).to eq(issuable_class.all)
+ it 'returns issues with a matching title for a query shorter than 3 chars' do
+ expect(issuable_class.search(searchable_issue2.title.downcase)).to eq([searchable_issue2])
end
end
@@ -95,6 +96,7 @@ describe Issuable do
let!(:searchable_issue) do
create(:issue, title: "Searchable awesome issue", description: 'Many cute kittens')
end
+ let!(:searchable_issue2) { create(:issue, title: "Aw", description: "Cu") }
it 'returns issues with a matching title' do
expect(issuable_class.full_search(searchable_issue.title))
@@ -133,8 +135,8 @@ describe Issuable do
expect(issuable_class.full_search('many kittens')).to eq([searchable_issue])
end
- it 'returns all issues with a query shorter than 3 chars' do
- expect(issuable_class.search('zz')).to eq(issuable_class.all)
+ it 'returns issues with a matching description for a query shorter than 3 chars' do
+ expect(issuable_class.full_search(searchable_issue2.description.downcase)).to eq([searchable_issue2])
end
end
@@ -169,7 +171,7 @@ describe Issuable do
it "returns false when record has been updated" do
allow(issue).to receive(:today?).and_return(true)
- issue.touch
+ issue.update_attribute(:updated_at, 1.hour.ago)
expect(issue.new?).to be_falsey
end
end
@@ -265,25 +267,44 @@ describe Issuable do
end
describe '#to_hook_data' do
+ let(:builder) { double }
+
context 'labels are updated' do
let(:labels) { create_list(:label, 2) }
before do
issue.update(labels: [labels[1]])
+ expect(Gitlab::HookData::IssuableBuilder)
+ .to receive(:new).with(issue).and_return(builder)
end
it 'delegates to Gitlab::HookData::IssuableBuilder#build' do
- builder = double
+ expect(builder).to receive(:build).with(
+ user: user,
+ changes: hash_including(
+ 'labels' => [[labels[0].hook_attrs], [labels[1].hook_attrs]]
+ ))
+
+ issue.to_hook_data(user, old_associations: { labels: [labels[0]] })
+ end
+ end
+ context 'total_time_spent is updated' do
+ before do
+ issue.spend_time(duration: 2, user: user, spent_at: Time.now)
+ issue.save
expect(Gitlab::HookData::IssuableBuilder)
.to receive(:new).with(issue).and_return(builder)
+ end
+
+ it 'delegates to Gitlab::HookData::IssuableBuilder#build' do
expect(builder).to receive(:build).with(
user: user,
changes: hash_including(
- 'labels' => [[labels[0].hook_attrs], [labels[1].hook_attrs]]
+ 'total_time_spent' => [1, 2]
))
- issue.to_hook_data(user, old_labels: [labels[0]])
+ issue.to_hook_data(user, old_associations: { total_time_spent: 1 })
end
end
@@ -292,20 +313,18 @@ describe Issuable do
before do
issue.assignees << user << user2
+ expect(Gitlab::HookData::IssuableBuilder)
+ .to receive(:new).with(issue).and_return(builder)
end
it 'delegates to Gitlab::HookData::IssuableBuilder#build' do
- builder = double
-
- expect(Gitlab::HookData::IssuableBuilder)
- .to receive(:new).with(issue).and_return(builder)
expect(builder).to receive(:build).with(
user: user,
changes: hash_including(
'assignees' => [[user.hook_attrs], [user.hook_attrs, user2.hook_attrs]]
))
- issue.to_hook_data(user, old_assignees: [user])
+ issue.to_hook_data(user, old_associations: { assignees: [user] })
end
end
@@ -316,13 +335,11 @@ describe Issuable do
before do
merge_request.update(assignee: user)
merge_request.update(assignee: user2)
+ expect(Gitlab::HookData::IssuableBuilder)
+ .to receive(:new).with(merge_request).and_return(builder)
end
it 'delegates to Gitlab::HookData::IssuableBuilder#build' do
- builder = double
-
- expect(Gitlab::HookData::IssuableBuilder)
- .to receive(:new).with(merge_request).and_return(builder)
expect(builder).to receive(:build).with(
user: user,
changes: hash_including(
@@ -330,7 +347,7 @@ describe Issuable do
'assignee' => [user.hook_attrs, user2.hook_attrs]
))
- merge_request.to_hook_data(user, old_assignees: [user])
+ merge_request.to_hook_data(user, old_associations: { assignees: [user] })
end
end
end
diff --git a/spec/models/concerns/manual_inverse_association_spec.rb b/spec/models/concerns/manual_inverse_association_spec.rb
new file mode 100644
index 00000000000..aad40883854
--- /dev/null
+++ b/spec/models/concerns/manual_inverse_association_spec.rb
@@ -0,0 +1,51 @@
+require 'spec_helper'
+
+describe ManualInverseAssociation do
+ let(:model) do
+ Class.new(MergeRequest) do
+ belongs_to :manual_association, class_name: 'MergeRequestDiff', foreign_key: :latest_merge_request_diff_id
+ manual_inverse_association :manual_association, :merge_request
+ end
+ end
+
+ before do
+ stub_const("#{described_class}::Model", model)
+ end
+
+ let(:instance) { create(:merge_request).becomes(model) }
+
+ describe '.manual_inverse_association' do
+ context 'when the relation exists' do
+ before do
+ instance.create_merge_request_diff
+ instance.reload
+ end
+
+ it 'loads the relation' do
+ expect(instance.manual_association).to be_an_instance_of(MergeRequestDiff)
+ end
+
+ it 'does not perform extra queries after loading' do
+ instance.manual_association
+
+ expect { instance.manual_association.merge_request }
+ .not_to exceed_query_limit(0)
+ end
+
+ it 'passes arguments to the default association method, to allow reloading' do
+ query_count = ActiveRecord::QueryRecorder.new do
+ instance.manual_association
+ instance.manual_association(true)
+ end.count
+
+ expect(query_count).to eq(2)
+ end
+ end
+
+ context 'when the relation does not return a value' do
+ it 'does not try to set an inverse' do
+ expect(instance.manual_association).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/milestoneish_spec.rb b/spec/models/concerns/milestoneish_spec.rb
index 66353935427..9048da0c73d 100644
--- a/spec/models/concerns/milestoneish_spec.rb
+++ b/spec/models/concerns/milestoneish_spec.rb
@@ -186,4 +186,21 @@ describe Milestone, 'Milestoneish' do
expect(milestone.elapsed_days).to eq(2)
end
end
+
+ describe '#total_issue_time_spent' do
+ it 'calculates total issue time spent' do
+ closed_issue_1.spend_time(duration: 300, user: author)
+ closed_issue_1.save!
+ closed_issue_2.spend_time(duration: 600, user: assignee)
+ closed_issue_2.save!
+
+ expect(milestone.total_issue_time_spent).to eq(900)
+ end
+ end
+
+ describe '#human_total_issue_time_spent' do
+ it 'returns nil if no time has been spent' do
+ expect(milestone.human_total_issue_time_spent).to be_nil
+ end
+ end
end
diff --git a/spec/models/diff_note_spec.rb b/spec/models/diff_note_spec.rb
index da972d2d86a..4d0b3245a13 100644
--- a/spec/models/diff_note_spec.rb
+++ b/spec/models/diff_note_spec.rb
@@ -9,13 +9,14 @@ describe DiffNote do
let(:path) { "files/ruby/popen.rb" }
+ let(:diff_refs) { merge_request.diff_refs }
let!(:position) do
Gitlab::Diff::Position.new(
old_path: path,
new_path: path,
old_line: nil,
new_line: 14,
- diff_refs: merge_request.diff_refs
+ diff_refs: diff_refs
)
end
@@ -25,7 +26,7 @@ describe DiffNote do
new_path: path,
old_line: 16,
new_line: 22,
- diff_refs: merge_request.diff_refs
+ diff_refs: diff_refs
)
end
@@ -158,25 +159,21 @@ describe DiffNote do
describe "creation" do
describe "updating of position" do
context "when noteable is a commit" do
- let(:diff_note) { create(:diff_note_on_commit, project: project, position: position) }
+ let(:diff_refs) { commit.diff_refs }
- it "doesn't update the position" do
- diff_note
+ subject { create(:diff_note_on_commit, project: project, position: position, commit_id: commit.id) }
- expect(diff_note.original_position).to eq(position)
- expect(diff_note.position).to eq(position)
+ it "doesn't update the position" do
+ is_expected.to have_attributes(original_position: position,
+ position: position)
end
end
context "when noteable is a merge request" do
- let(:diff_note) { create(:diff_note_on_merge_request, project: project, position: position, noteable: merge_request) }
-
context "when the note is active" do
it "doesn't update the position" do
- diff_note
-
- expect(diff_note.original_position).to eq(position)
- expect(diff_note.position).to eq(position)
+ expect(subject.original_position).to eq(position)
+ expect(subject.position).to eq(position)
end
end
@@ -186,10 +183,8 @@ describe DiffNote do
end
it "updates the position" do
- diff_note
-
- expect(diff_note.original_position).to eq(position)
- expect(diff_note.position).not_to eq(position)
+ expect(subject.original_position).to eq(position)
+ expect(subject.position).not_to eq(position)
end
end
end
@@ -283,6 +278,12 @@ describe DiffNote do
expect(diff_line).to be nil
expect(subject).to be_valid
end
+
+ it "does not update the position" do
+ expect(subject).not_to receive(:update_position)
+
+ subject.save
+ end
end
it "returns true for on_image?" do
diff --git a/spec/models/diff_viewer/base_spec.rb b/spec/models/diff_viewer/base_spec.rb
index b26de3f3b97..c90b32c5d77 100644
--- a/spec/models/diff_viewer/base_spec.rb
+++ b/spec/models/diff_viewer/base_spec.rb
@@ -32,10 +32,8 @@ describe DiffViewer::Base do
end
context 'when the binaryness does not match' do
- before do
- allow(diff_file.old_blob).to receive(:binary?).and_return(false)
- allow(diff_file.new_blob).to receive(:binary?).and_return(false)
- end
+ let(:commit) { project.commit_by(oid: 'ae73cb07c9eeaf35924a10f713b364d32b2dd34f') }
+ let(:diff_file) { commit.diffs.diff_file_with_new_path('Gemfile.zip') }
it 'returns false' do
expect(viewer_class.can_render?(diff_file)).to be_falsey
@@ -60,8 +58,7 @@ describe DiffViewer::Base do
context 'when the binaryness does not match' do
before do
- allow(diff_file.old_blob).to receive(:binary?).and_return(true)
- allow(diff_file.new_blob).to receive(:binary?).and_return(true)
+ allow_any_instance_of(Blob).to receive(:binary?).and_return(true)
end
it 'returns false' do
@@ -77,12 +74,12 @@ describe DiffViewer::Base do
end
context 'when the file was renamed and only the old blob is supported' do
- let(:commit) { project.commit('2f63565e7aac07bcdadb654e253078b727143ec4') }
+ let(:commit) { project.commit_by(oid: '2f63565e7aac07bcdadb654e253078b727143ec4') }
let(:diff_file) { commit.diffs.diff_file_with_new_path('files/images/6049019_460s.jpg') }
before do
allow(diff_file).to receive(:renamed_file?).and_return(true)
- allow(diff_file.new_blob).to receive(:extension).and_return('jpeg')
+ viewer_class.extensions = %w(notjpg)
end
it 'returns false' do
@@ -94,8 +91,7 @@ describe DiffViewer::Base do
describe '#collapsed?' do
context 'when the combined blob size is larger than the collapse limit' do
before do
- allow(diff_file.old_blob).to receive(:raw_size).and_return(512.kilobytes)
- allow(diff_file.new_blob).to receive(:raw_size).and_return(513.kilobytes)
+ allow(diff_file).to receive(:raw_size).and_return(1025.kilobytes)
end
it 'returns true' do
@@ -113,8 +109,7 @@ describe DiffViewer::Base do
describe '#too_large?' do
context 'when the combined blob size is larger than the size limit' do
before do
- allow(diff_file.old_blob).to receive(:raw_size).and_return(2.megabytes)
- allow(diff_file.new_blob).to receive(:raw_size).and_return(4.megabytes)
+ allow(diff_file).to receive(:raw_size).and_return(6.megabytes)
end
it 'returns true' do
@@ -132,8 +127,7 @@ describe DiffViewer::Base do
describe '#render_error' do
context 'when the combined blob size is larger than the size limit' do
before do
- allow(diff_file.old_blob).to receive(:raw_size).and_return(2.megabytes)
- allow(diff_file.new_blob).to receive(:raw_size).and_return(4.megabytes)
+ allow(diff_file).to receive(:raw_size).and_return(6.megabytes)
end
it 'returns :too_large' do
diff --git a/spec/models/diff_viewer/server_side_spec.rb b/spec/models/diff_viewer/server_side_spec.rb
index 92e613f92de..98a8f6d4cc9 100644
--- a/spec/models/diff_viewer/server_side_spec.rb
+++ b/spec/models/diff_viewer/server_side_spec.rb
@@ -1,9 +1,9 @@
require 'spec_helper'
describe DiffViewer::ServerSide do
- let(:project) { create(:project, :repository) }
- let(:commit) { project.commit('570e7b2abdd848b95f2f578043fc23bd6f6fd24d') }
- let(:diff_file) { commit.diffs.diff_file_with_new_path('files/ruby/popen.rb') }
+ set(:project) { create(:project, :repository) }
+ let(:commit) { project.commit_by(oid: '570e7b2abdd848b95f2f578043fc23bd6f6fd24d') }
+ let!(:diff_file) { commit.diffs.diff_file_with_new_path('files/ruby/popen.rb') }
let(:viewer_class) do
Class.new(DiffViewer::Base) do
@@ -15,8 +15,7 @@ describe DiffViewer::ServerSide do
describe '#prepare!' do
it 'loads all diff file data' do
- expect(diff_file.old_blob).to receive(:load_all_data!)
- expect(diff_file.new_blob).to receive(:load_all_data!)
+ expect(Blob).to receive(:lazy).at_least(:twice)
subject.prepare!
end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 1ce1d595c60..6f24a039998 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -327,15 +327,28 @@ describe Environment do
context 'when the enviroment is available' do
context 'with a deployment service' do
- let(:project) { create(:kubernetes_project) }
+ shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do
+ context 'and a deployment' do
+ let!(:deployment) { create(:deployment, environment: environment) }
+ it { is_expected.to be_truthy }
+ end
- context 'and a deployment' do
- let!(:deployment) { create(:deployment, environment: environment) }
- it { is_expected.to be_truthy }
+ context 'but no deployments' do
+ it { is_expected.to be_falsy }
+ end
end
- context 'but no deployments' do
- it { is_expected.to be_falsy }
+ context 'when user configured kubernetes from Integration > Kubernetes' do
+ let(:project) { create(:kubernetes_project) }
+
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
+ end
+
+ context 'when user configured kubernetes from CI/CD > Clusters' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:project) { cluster.project }
+
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
end
end
@@ -356,7 +369,6 @@ describe Environment do
end
describe '#terminals' do
- let(:project) { create(:kubernetes_project) }
subject { environment.terminals }
context 'when the environment has terminals' do
@@ -364,12 +376,27 @@ describe Environment do
allow(environment).to receive(:has_terminals?).and_return(true)
end
- it 'returns the terminals from the deployment service' do
- expect(project.deployment_service)
- .to receive(:terminals).with(environment)
- .and_return(:fake_terminals)
+ shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do
+ it 'returns the terminals from the deployment service' do
+ expect(project.deployment_platform)
+ .to receive(:terminals).with(environment)
+ .and_return(:fake_terminals)
+
+ is_expected.to eq(:fake_terminals)
+ end
+ end
+
+ context 'when user configured kubernetes from Integration > Kubernetes' do
+ let(:project) { create(:kubernetes_project) }
+
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
+ end
+
+ context 'when user configured kubernetes from CI/CD > Clusters' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:project) { cluster.project }
- is_expected.to eq(:fake_terminals)
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
end
end
diff --git a/spec/models/fork_network_member_spec.rb b/spec/models/fork_network_member_spec.rb
index 532ca1fca8c..25bf596fddc 100644
--- a/spec/models/fork_network_member_spec.rb
+++ b/spec/models/fork_network_member_spec.rb
@@ -5,4 +5,22 @@ describe ForkNetworkMember do
it { is_expected.to validate_presence_of(:project) }
it { is_expected.to validate_presence_of(:fork_network) }
end
+
+ describe 'destroying a ForkNetworkMember' do
+ let(:fork_network_member) { create(:fork_network_member) }
+ let(:fork_network) { fork_network_member.fork_network }
+
+ it 'removes the fork network if it was the last member' do
+ fork_network.fork_network_members.destroy_all
+
+ expect(ForkNetwork.count).to eq(0)
+ end
+
+ it 'does not destroy the fork network if there are members left' do
+ fork_network_member.destroy!
+
+ # The root of the fork network is left
+ expect(ForkNetwork.count).to eq(1)
+ end
+ end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index d4052a64570..5e82a2988ce 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -247,8 +247,6 @@ describe Group do
describe '#avatar_url' do
let!(:group) { create(:group, :access_requestable, :with_avatar) }
let(:user) { create(:user) }
- let(:gitlab_host) { "http://#{Gitlab.config.gitlab.host}" }
- let(:avatar_path) { "/uploads/-/system/group/avatar/#{group.id}/dk.png" }
context 'when avatar file is uploaded' do
before do
@@ -256,12 +254,8 @@ describe Group do
end
it 'shows correct avatar url' do
- expect(group.avatar_url).to eq(avatar_path)
- expect(group.avatar_url(only_path: false)).to eq([gitlab_host, avatar_path].join)
-
- allow(ActionController::Base).to receive(:asset_host).and_return(gitlab_host)
-
- expect(group.avatar_url).to eq([gitlab_host, avatar_path].join)
+ expect(group.avatar_url).to eq(group.avatar.url)
+ expect(group.avatar_url(only_path: false)).to eq([Gitlab.config.gitlab.url, group.avatar.url].join)
end
end
end
diff --git a/spec/models/identity_spec.rb b/spec/models/identity_spec.rb
index 3ed048744de..a45a6088831 100644
--- a/spec/models/identity_spec.rb
+++ b/spec/models/identity_spec.rb
@@ -33,5 +33,15 @@ describe Identity do
expect(identity).to eq(ldap_identity)
end
end
+
+ context 'any other provider' do
+ let!(:test_entity) { create(:identity, provider: 'test_provider', extern_uid: 'test_uid') }
+
+ it 'the extern_uid lookup is case insensitive' do
+ identity = described_class.with_extern_uid('test_provider', 'TEST_UID').first
+
+ expect(identity).to eq(test_entity)
+ end
+ end
end
end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index bb5033c1628..0ea287d007a 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -766,21 +766,7 @@ describe Issue do
end
end
- describe '#update_project_counter_caches?' do
- it 'returns true when the state changes' do
- subject.state = 'closed'
-
- expect(subject.update_project_counter_caches?).to eq(true)
- end
-
- it 'returns true when the confidential flag changes' do
- subject.confidential = true
-
- expect(subject.update_project_counter_caches?).to eq(true)
- end
-
- it 'returns false when the state or confidential flag did not change' do
- expect(subject.update_project_counter_caches?).to eq(false)
- end
+ it_behaves_like 'throttled touch' do
+ subject { create(:issue, updated_at: 1.hour.ago) }
end
end
diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb
index 81c2057e175..4cd9e3f4f1d 100644
--- a/spec/models/key_spec.rb
+++ b/spec/models/key_spec.rb
@@ -166,4 +166,27 @@ describe Key, :mailer do
expect(key.public_key.key_text).to eq(valid_key)
end
end
+
+ describe '#refresh_user_cache', :use_clean_rails_memory_store_caching do
+ context 'when the key belongs to a user' do
+ it 'refreshes the keys count cache for the user' do
+ expect_any_instance_of(Users::KeysCountService)
+ .to receive(:refresh_cache)
+ .and_call_original
+
+ key = create(:personal_key)
+
+ expect(Users::KeysCountService.new(key.user).count).to eq(1)
+ end
+ end
+
+ context 'when the key does not belong to a user' do
+ it 'does nothing' do
+ expect_any_instance_of(Users::KeysCountService)
+ .not_to receive(:refresh_cache)
+
+ create(:key)
+ end
+ end
+ end
end
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index 0cfaa17676e..d556004eccf 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -1,8 +1,10 @@
require 'spec_helper'
describe MergeRequestDiff do
+ let(:diff_with_commits) { create(:merge_request).merge_request_diff }
+
describe 'create new record' do
- subject { create(:merge_request).merge_request_diff }
+ subject { diff_with_commits }
it { expect(subject).to be_valid }
it { expect(subject).to be_persisted }
@@ -18,62 +20,46 @@ describe MergeRequestDiff do
let!(:first_diff) { mr.merge_request_diff }
let!(:last_diff) { mr.create_merge_request_diff }
- it { expect(last_diff.latest?).to be_truthy }
- it { expect(first_diff.latest?).to be_falsey }
+ it { expect(last_diff.reload).to be_latest }
+ it { expect(first_diff.reload).not_to be_latest }
end
describe '#diffs' do
- let(:mr) { create(:merge_request, :with_diffs) }
- let(:mr_diff) { mr.merge_request_diff }
-
context 'when the :ignore_whitespace_change option is set' do
it 'creates a new compare object instead of loading from the DB' do
- expect(mr_diff).not_to receive(:load_diffs)
- expect(Gitlab::Git::Compare).to receive(:new).and_call_original
+ expect(diff_with_commits).not_to receive(:load_diffs)
+ expect(diff_with_commits.compare).to receive(:diffs).and_call_original
- mr_diff.raw_diffs(ignore_whitespace_change: true)
+ diff_with_commits.raw_diffs(ignore_whitespace_change: true)
end
end
context 'when the raw diffs are empty' do
before do
- MergeRequestDiffFile.delete_all(merge_request_diff_id: mr_diff.id)
- end
-
- it 'returns an empty DiffCollection' do
- expect(mr_diff.raw_diffs).to be_a(Gitlab::Git::DiffCollection)
- expect(mr_diff.raw_diffs).to be_empty
- end
- end
-
- context 'when the raw diffs have invalid content' do
- before do
- MergeRequestDiffFile.delete_all(merge_request_diff_id: mr_diff.id)
- mr_diff.update_attributes(st_diffs: ["--broken-diff"])
+ MergeRequestDiffFile.delete_all(merge_request_diff_id: diff_with_commits.id)
end
it 'returns an empty DiffCollection' do
- expect(mr_diff.raw_diffs.to_a).to be_empty
- expect(mr_diff.raw_diffs).to be_a(Gitlab::Git::DiffCollection)
- expect(mr_diff.raw_diffs).to be_empty
+ expect(diff_with_commits.raw_diffs).to be_a(Gitlab::Git::DiffCollection)
+ expect(diff_with_commits.raw_diffs).to be_empty
end
end
context 'when the raw diffs exist' do
it 'returns the diffs' do
- expect(mr_diff.raw_diffs).to be_a(Gitlab::Git::DiffCollection)
- expect(mr_diff.raw_diffs).not_to be_empty
+ expect(diff_with_commits.raw_diffs).to be_a(Gitlab::Git::DiffCollection)
+ expect(diff_with_commits.raw_diffs).not_to be_empty
end
context 'when the :paths option is set' do
- let(:diffs) { mr_diff.raw_diffs(paths: ['files/ruby/popen.rb', 'files/ruby/popen.rb']) }
+ let(:diffs) { diff_with_commits.raw_diffs(paths: ['files/ruby/popen.rb', 'files/ruby/popen.rb']) }
it 'only returns diffs that match the (old path, new path) given' do
expect(diffs.map(&:new_path)).to contain_exactly('files/ruby/popen.rb')
end
it 'uses the diffs from the DB' do
- expect(mr_diff).to receive(:load_diffs)
+ expect(diff_with_commits).to receive(:load_diffs)
diffs
end
@@ -117,51 +103,29 @@ describe MergeRequestDiff do
end
describe '#commit_shas' do
- it 'returns all commits SHA using serialized commits' do
- subject.st_commits = [
- { id: 'sha1' },
- { id: 'sha2' }
- ]
-
- expect(subject.commit_shas).to eq(%w(sha1 sha2))
+ it 'returns all commit SHAs using commits from the DB' do
+ expect(diff_with_commits.commit_shas).not_to be_empty
+ expect(diff_with_commits.commit_shas).to all(match(/\h{40}/))
end
end
describe '#compare_with' do
- subject { create(:merge_request, source_branch: 'fix').merge_request_diff }
-
it 'delegates compare to the service' do
expect(CompareService).to receive(:new).and_call_original
- subject.compare_with(nil)
+ diff_with_commits.compare_with(nil)
end
it 'uses git diff A..B approach by default' do
- diffs = subject.compare_with('0b4bc9a49b562e85de7cc9e834518ea6828729b9').diffs
+ diffs = diff_with_commits.compare_with('0b4bc9a49b562e85de7cc9e834518ea6828729b9').diffs
- expect(diffs.size).to eq(3)
+ expect(diffs.size).to eq(21)
end
end
describe '#commits_count' do
it 'returns number of commits using serialized commits' do
- subject.st_commits = [
- { id: 'sha1' },
- { id: 'sha2' }
- ]
-
- expect(subject.commits_count).to eq 2
- end
- end
-
- describe '#utf8_st_diffs' do
- it 'does not raise error when a hash value is in binary' do
- subject.st_diffs = [
- { diff: "\0" },
- { diff: "\x05\x00\x68\x65\x6c\x6c\x6f" }
- ]
-
- expect { subject.utf8_st_diffs }.not_to raise_error
+ expect(diff_with_commits.commits_count).to eq(29)
end
end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index d022dae3476..30a5a3bbff7 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -79,6 +79,43 @@ describe MergeRequest do
end
end
+ describe '.set_latest_merge_request_diff_ids!' do
+ def create_merge_request_with_diffs(source_branch, diffs: 2)
+ params = {
+ target_project: project,
+ target_branch: 'master',
+ source_project: project,
+ source_branch: source_branch
+ }
+
+ create(:merge_request, params).tap do |mr|
+ diffs.times { mr.merge_request_diffs.create }
+ end
+ end
+
+ let(:project) { create(:project) }
+
+ it 'sets IDs for merge requests, whether they are already set or not' do
+ merge_requests = [
+ create_merge_request_with_diffs('feature'),
+ create_merge_request_with_diffs('feature-conflict'),
+ create_merge_request_with_diffs('wip', diffs: 0),
+ create_merge_request_with_diffs('csv')
+ ]
+
+ merge_requests.take(2).each do |merge_request|
+ merge_request.update_column(:latest_merge_request_diff_id, nil)
+ end
+
+ expected = merge_requests.map do |merge_request|
+ merge_request.merge_request_diffs.maximum(:id)
+ end
+
+ expect { project.merge_requests.set_latest_merge_request_diff_ids! }
+ .to change { merge_requests.map { |mr| mr.reload.latest_merge_request_diff_id } }.to(expected)
+ end
+ end
+
describe '#target_branch_sha' do
let(:project) { create(:project, :repository) }
@@ -222,7 +259,7 @@ describe MergeRequest do
end
describe '#source_branch_sha' do
- let(:last_branch_commit) { subject.source_project.repository.commit(subject.source_branch) }
+ let(:last_branch_commit) { subject.source_project.repository.commit(Gitlab::Git::BRANCH_REF_PREFIX + subject.source_branch) }
context 'with diffs' do
subject { create(:merge_request, :with_diffs) }
@@ -236,6 +273,21 @@ describe MergeRequest do
it 'returns the sha of the source branch last commit' do
expect(subject.source_branch_sha).to eq(last_branch_commit.sha)
end
+
+ context 'when there is a tag name matching the branch name' do
+ let(:tag_name) { subject.source_branch }
+
+ it 'returns the sha of the source branch last commit' do
+ subject.source_project.repository.add_tag(subject.author,
+ tag_name,
+ subject.target_branch_sha,
+ 'Add a tag')
+
+ expect(subject.source_branch_sha).to eq(last_branch_commit.sha)
+
+ subject.source_project.repository.rm_tag(subject.author, tag_name)
+ end
+ end
end
context 'when the merge request is being created' do
@@ -775,20 +827,47 @@ describe MergeRequest do
end
end
- describe '#head_pipeline' do
- describe 'when the source project exists' do
- it 'returns the latest pipeline' do
- pipeline = create(:ci_empty_pipeline, project: subject.source_project, ref: 'master', status: 'running', sha: "123abc", head_pipeline_of: subject)
+ context 'head pipeline' do
+ before do
+ allow(subject).to receive(:diff_head_sha).and_return('lastsha')
+ end
+
+ describe '#head_pipeline' do
+ it 'returns nil for MR without head_pipeline_id' do
+ subject.update_attribute(:head_pipeline_id, nil)
+
+ expect(subject.head_pipeline).to be_nil
+ end
+
+ context 'when the source project does not exist' do
+ it 'returns nil' do
+ allow(subject).to receive(:source_project).and_return(nil)
- expect(subject.head_pipeline).to eq(pipeline)
+ expect(subject.head_pipeline).to be_nil
+ end
end
end
- describe 'when the source project does not exist' do
- it 'returns nil' do
+ describe '#actual_head_pipeline' do
+ it 'returns nil for MR with old pipeline' do
+ pipeline = create(:ci_empty_pipeline, sha: 'notlatestsha')
+ subject.update_attribute(:head_pipeline_id, pipeline.id)
+
+ expect(subject.actual_head_pipeline).to be_nil
+ end
+
+ it 'returns the pipeline for MR with recent pipeline' do
+ pipeline = create(:ci_empty_pipeline, sha: 'lastsha')
+ subject.update_attribute(:head_pipeline_id, pipeline.id)
+
+ expect(subject.actual_head_pipeline).to eq(subject.head_pipeline)
+ expect(subject.actual_head_pipeline).to eq(pipeline)
+ end
+
+ it 'returns nil when source project does not exist' do
allow(subject).to receive(:source_project).and_return(nil)
- expect(subject.head_pipeline).to be_nil
+ expect(subject.actual_head_pipeline).to be_nil
end
end
end
@@ -888,7 +967,7 @@ describe MergeRequest do
end
shared_examples 'returning all SHA' do
- it 'returns all SHA from all merge_request_diffs' do
+ it 'returns all SHAs from all merge_request_diffs' do
expect(subject.merge_request_diffs.size).to eq(2)
expect(subject.all_commit_shas).to match_array(all_commit_shas)
end
@@ -896,7 +975,7 @@ describe MergeRequest do
context 'with a completely different branch' do
before do
- subject.update(target_branch: 'v1.0.0')
+ subject.update(target_branch: 'csv')
end
it_behaves_like 'returning all SHA'
@@ -904,7 +983,7 @@ describe MergeRequest do
context 'with a branch having no difference' do
before do
- subject.update(target_branch: 'v1.1.0')
+ subject.update(target_branch: 'branch-merged')
subject.reload # make sure commits were not cached
end
@@ -1127,7 +1206,7 @@ describe MergeRequest do
context 'when it is only allowed to merge when build is green' do
context 'and a failed pipeline is associated' do
before do
- pipeline.update(status: 'failed')
+ pipeline.update(status: 'failed', sha: subject.diff_head_sha)
allow(subject).to receive(:head_pipeline) { pipeline }
end
@@ -1136,7 +1215,7 @@ describe MergeRequest do
context 'and a successful pipeline is associated' do
before do
- pipeline.update(status: 'success')
+ pipeline.update(status: 'success', sha: subject.diff_head_sha)
allow(subject).to receive(:head_pipeline) { pipeline }
end
@@ -1145,7 +1224,7 @@ describe MergeRequest do
context 'and a skipped pipeline is associated' do
before do
- pipeline.update(status: 'skipped')
+ pipeline.update(status: 'skipped', sha: subject.diff_head_sha)
allow(subject).to receive(:head_pipeline) { pipeline }
end
@@ -1773,15 +1852,7 @@ describe MergeRequest do
end
end
- describe '#update_project_counter_caches?' do
- it 'returns true when the state changes' do
- subject.state = 'closed'
-
- expect(subject.update_project_counter_caches?).to eq(true)
- end
-
- it 'returns false when the state did not change' do
- expect(subject.update_project_counter_caches?).to eq(false)
- end
+ it_behaves_like 'throttled touch' do
+ subject { create(:merge_request, updated_at: 1.hour.ago) }
end
end
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index 13e37fffa4e..47f4a792e5c 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -11,7 +11,7 @@ describe Milestone do
milestone = build(:milestone, start_date: Date.tomorrow, due_date: Date.yesterday)
expect(milestone).not_to be_valid
- expect(milestone.errors[:start_date]).to include("Can't be greater than due date")
+ expect(milestone.errors[:due_date]).to include("must be greater than start date")
end
end
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 90b768f595e..3817f20bfe7 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -531,7 +531,7 @@ describe Namespace do
end
end
- describe '#has_forks_of?' do
+ describe '#find_fork_of?' do
let(:project) { create(:project, :public) }
let!(:forked_project) { fork_project(project, namespace.owner, namespace: namespace) }
@@ -550,5 +550,13 @@ describe Namespace do
expect(other_namespace.find_fork_of(project)).to eq(other_fork)
end
+
+ context 'with request store enabled', :request_store do
+ it 'only queries once' do
+ expect(project.fork_network).to receive(:find_forks_in).once.and_call_original
+
+ 2.times { namespace.find_fork_of(project) }
+ end
+ end
end
end
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 1ecb50586c7..e1a0c55b6a6 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -5,7 +5,7 @@ describe Note do
describe 'associations' do
it { is_expected.to belong_to(:project) }
- it { is_expected.to belong_to(:noteable).touch(true) }
+ it { is_expected.to belong_to(:noteable).touch(false) }
it { is_expected.to belong_to(:author).class_name('User') }
it { is_expected.to have_many(:todos).dependent(:destroy) }
@@ -231,6 +231,37 @@ describe Note do
end
end
+ describe '#cross_reference?' do
+ it 'falsey for user-generated notes' do
+ note = create(:note, system: false)
+
+ expect(note.cross_reference?).to be_falsy
+ end
+
+ context 'when the note might contain cross references' do
+ SystemNoteMetadata::TYPES_WITH_CROSS_REFERENCES.each do |type|
+ let(:note) { create(:note, :system) }
+ let!(:metadata) { create(:system_note_metadata, note: note, action: type) }
+
+ it 'delegates to the cross-reference regex' do
+ expect(note).to receive(:matches_cross_reference_regex?).and_return(false)
+
+ note.cross_reference?
+ end
+ end
+ end
+
+ context 'when the note cannot contain cross references' do
+ let(:commit_note) { build(:note, note: 'mentioned in 1312312313 something else.', system: true) }
+ let(:label_note) { build(:note, note: 'added ~2323232323', system: true) }
+
+ it 'scan for a `mentioned in` prefix' do
+ expect(commit_note.cross_reference?).to be_truthy
+ expect(label_note.cross_reference?).to be_falsy
+ end
+ end
+ end
+
describe 'clear_blank_line_code!' do
it 'clears a blank line code before validation' do
note = build(:note, line_code: ' ')
diff --git a/spec/models/personal_access_token_spec.rb b/spec/models/personal_access_token_spec.rb
index 01440b15674..2bb1c49b740 100644
--- a/spec/models/personal_access_token_spec.rb
+++ b/spec/models/personal_access_token_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe PersonalAccessToken do
+ subject { described_class }
+
describe '.build' do
let(:personal_access_token) { build(:personal_access_token) }
let(:invalid_personal_access_token) { build(:personal_access_token, :invalid) }
@@ -45,6 +47,29 @@ describe PersonalAccessToken do
end
end
+ describe 'Redis storage' do
+ let(:user_id) { 123 }
+ let(:token) { 'abc000foo' }
+
+ before do
+ subject.redis_store!(user_id, token)
+ end
+
+ it 'returns stored data' do
+ expect(subject.redis_getdel(user_id)).to eq(token)
+ end
+
+ context 'after deletion' do
+ before do
+ expect(subject.redis_getdel(user_id)).to eq(token)
+ end
+
+ it 'token is removed' do
+ expect(subject.redis_getdel(user_id)).to be_nil
+ end
+ end
+ end
+
context "validations" do
let(:personal_access_token) { build(:personal_access_token) }
diff --git a/spec/models/project_services/flowdock_service_spec.rb b/spec/models/project_services/flowdock_service_spec.rb
index 5e8e880985e..fabcb142858 100644
--- a/spec/models/project_services/flowdock_service_spec.rb
+++ b/spec/models/project_services/flowdock_service_spec.rb
@@ -46,6 +46,7 @@ describe FlowdockService do
@sample_data[:commits].each do |commit|
# One request to Flowdock per new commit
next if commit[:id] == @sample_data[:before]
+
expect(WebMock).to have_requested(:post, @api_url).with(
body: /#{commit[:id]}.*#{project.path}/
).once
diff --git a/spec/models/project_services/kubernetes_service_spec.rb b/spec/models/project_services/kubernetes_service_spec.rb
index 1c629155e1e..f037ee77a94 100644
--- a/spec/models/project_services/kubernetes_service_spec.rb
+++ b/spec/models/project_services/kubernetes_service_spec.rb
@@ -4,8 +4,8 @@ describe KubernetesService, :use_clean_rails_memory_store_caching do
include KubernetesHelpers
include ReactiveCachingHelpers
- let(:project) { build_stubbed(:kubernetes_project) }
- let(:service) { project.kubernetes_service }
+ let(:project) { create(:kubernetes_project) }
+ let(:service) { project.deployment_platform }
describe 'Associations' do
it { is_expected.to belong_to :project }
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 6185f55c1dc..f4699fd243d 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -78,7 +78,7 @@ describe Project do
it { is_expected.to have_many(:uploads).dependent(:destroy) }
it { is_expected.to have_many(:pipeline_schedules) }
it { is_expected.to have_many(:members_and_requesters) }
- it { is_expected.to have_one(:cluster) }
+ it { is_expected.to have_many(:clusters) }
it { is_expected.to have_many(:custom_attributes).class_name('ProjectCustomAttribute') }
context 'after initialized' do
@@ -138,6 +138,7 @@ describe Project do
it { is_expected.to validate_length_of(:ci_config_path).is_at_most(255) }
it { is_expected.to allow_value('').for(:ci_config_path) }
it { is_expected.not_to allow_value('test/../foo').for(:ci_config_path) }
+ it { is_expected.not_to allow_value('/test/foo').for(:ci_config_path) }
it { is_expected.to validate_presence_of(:creator) }
@@ -312,9 +313,7 @@ describe Project do
it { is_expected.to delegate_method(method).to(:team) }
end
- it { is_expected.to delegate_method(:empty_repo?).to(:repository) }
it { is_expected.to delegate_method(:members).to(:team).with_prefix(true) }
- it { is_expected.to delegate_method(:count).to(:forks).with_prefix(true) }
it { is_expected.to delegate_method(:name).to(:owner).with_prefix(true).with_arguments(allow_nil: true) }
end
@@ -451,7 +450,7 @@ describe Project do
end
end
- describe "#new_issue_address" do
+ describe "#new_issuable_address" do
let(:project) { create(:project, path: "somewhere") }
let(:user) { create(:user) }
@@ -463,7 +462,13 @@ describe Project do
it 'returns the address to create a new issue' do
address = "p+#{project.full_path}+#{user.incoming_email_token}@gl.ab"
- expect(project.new_issue_address(user)).to eq(address)
+ expect(project.new_issuable_address(user, 'issue')).to eq(address)
+ end
+
+ it 'returns the address to create a new merge request' do
+ address = "p+#{project.full_path}+merge-request+#{user.incoming_email_token}@gl.ab"
+
+ expect(project.new_issuable_address(user, 'merge_request')).to eq(address)
end
end
@@ -473,7 +478,11 @@ describe Project do
end
it 'returns nil' do
- expect(project.new_issue_address(user)).to be_nil
+ expect(project.new_issuable_address(user, 'issue')).to be_nil
+ end
+
+ it 'returns nil' do
+ expect(project.new_issuable_address(user, 'merge_request')).to be_nil
end
end
end
@@ -646,6 +655,24 @@ describe Project do
end
end
+ describe '#empty_repo?' do
+ context 'when the repo does not exist' do
+ let(:project) { build_stubbed(:project) }
+
+ it 'returns true' do
+ expect(project.empty_repo?).to be(true)
+ end
+ end
+
+ context 'when the repo exists' do
+ let(:project) { create(:project, :repository) }
+ let(:empty_project) { create(:project, :empty_repo) }
+
+ it { expect(empty_project.empty_repo?).to be(true) }
+ it { expect(project.empty_repo?).to be(false) }
+ end
+ end
+
describe '#external_issue_tracker' do
let(:project) { create(:project) }
let(:ext_project) { create(:redmine_project) }
@@ -883,20 +910,14 @@ describe Project do
context 'when avatar file is uploaded' do
let(:project) { create(:project, :public, :with_avatar) }
- let(:avatar_path) { "/uploads/-/system/project/avatar/#{project.id}/dk.png" }
- let(:gitlab_host) { "http://#{Gitlab.config.gitlab.host}" }
it 'shows correct url' do
- expect(project.avatar_url).to eq(avatar_path)
- expect(project.avatar_url(only_path: false)).to eq([gitlab_host, avatar_path].join)
-
- allow(ActionController::Base).to receive(:asset_host).and_return(gitlab_host)
-
- expect(project.avatar_url).to eq([gitlab_host, avatar_path].join)
+ expect(project.avatar_url).to eq(project.avatar.url)
+ expect(project.avatar_url(only_path: false)).to eq([Gitlab.config.gitlab.url, project.avatar.url].join)
end
end
- context 'When avatar file in git' do
+ context 'when avatar file in git' do
before do
allow(project).to receive(:avatar_in_git) { true }
end
@@ -1260,24 +1281,6 @@ describe Project do
expect(described_class.search(project.path.upcase)).to eq([project])
end
- it 'returns projects with a matching namespace name' do
- expect(described_class.search(project.namespace.name)).to eq([project])
- end
-
- it 'returns projects with a partially matching namespace name' do
- expect(described_class.search(project.namespace.name[0..2])).to eq([project])
- end
-
- it 'returns projects with a matching namespace name regardless of the casing' do
- expect(described_class.search(project.namespace.name.upcase)).to eq([project])
- end
-
- it 'returns projects when eager loading namespaces' do
- relation = described_class.all.includes(:namespace)
-
- expect(relation.search(project.namespace.name)).to eq([project])
- end
-
describe 'with pending_delete project' do
let(:pending_delete_project) { create(:project, pending_delete: true) }
@@ -1572,8 +1575,8 @@ describe Project do
expect(project.ci_config_path).to eq('foo/.gitlab_ci.yml')
end
- it 'sets a string but removes all leading slashes and null characters' do
- project.update!(ci_config_path: "///f\0oo/\0/.gitlab_ci.yml")
+ it 'sets a string but removes all null characters' do
+ project.update!(ci_config_path: "f\0oo/\0/.gitlab_ci.yml")
expect(project.ci_config_path).to eq('foo//.gitlab_ci.yml')
end
@@ -1740,8 +1743,7 @@ describe Project do
expect(RepositoryForkWorker).to receive(:perform_async).with(
project.id,
forked_from_project.repository_storage_path,
- forked_from_project.disk_path,
- project.namespace.full_path).and_return(import_jid)
+ forked_from_project.disk_path).and_return(import_jid)
expect(project.add_import_job).to eq(import_jid)
end
@@ -1944,6 +1946,24 @@ describe Project do
expect(second_fork.fork_source).to eq(project)
end
end
+
+ describe '#lfs_storage_project' do
+ it 'returns self for non-forks' do
+ expect(project.lfs_storage_project).to eq project
+ end
+
+ it 'returns the fork network root for forks' do
+ second_fork = fork_project(forked_project)
+
+ expect(second_fork.lfs_storage_project).to eq project
+ end
+
+ it 'returns self when fork_source is nil' do
+ expect(forked_project).to receive(:fork_source).and_return(nil)
+
+ expect(forked_project.lfs_storage_project).to eq forked_project
+ end
+ end
end
describe '#pushes_since_gc' do
@@ -2008,12 +2028,25 @@ describe Project do
end
context 'when project has a deployment service' do
- let(:project) { create(:kubernetes_project) }
+ shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do
+ it 'returns variables from this service' do
+ expect(project.deployment_variables).to include(
+ { key: 'KUBE_TOKEN', value: project.deployment_platform.token, public: false }
+ )
+ end
+ end
+
+ context 'when user configured kubernetes from Integration > Kubernetes' do
+ let(:project) { create(:kubernetes_project) }
- it 'returns variables from this service' do
- expect(project.deployment_variables).to include(
- { key: 'KUBE_TOKEN', value: project.kubernetes_service.token, public: false }
- )
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
+ end
+
+ context 'when user configured kubernetes from CI/CD > Clusters' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:project) { cluster.project }
+
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
end
end
end
@@ -2465,7 +2498,7 @@ describe Project do
it 'returns the number of forks' do
project = build(:project)
- allow(project.forks).to receive(:count).and_return(1)
+ expect_any_instance_of(Projects::ForksCountService).to receive(:count).and_return(1)
expect(project.forks_count).to eq(1)
end
@@ -3016,4 +3049,96 @@ describe Project do
end
end
end
+
+ describe '#after_import' do
+ let(:project) { build(:project) }
+
+ it 'runs the correct hooks' do
+ expect(project.repository).to receive(:after_import)
+ expect(project).to receive(:import_finish)
+ expect(project).to receive(:update_project_counter_caches)
+ expect(project).to receive(:remove_import_jid)
+
+ project.after_import
+ end
+ end
+
+ describe '#update_project_counter_caches' do
+ let(:project) { create(:project) }
+
+ it 'updates all project counter caches' do
+ expect_any_instance_of(Projects::OpenIssuesCountService)
+ .to receive(:refresh_cache)
+ .and_call_original
+
+ expect_any_instance_of(Projects::OpenMergeRequestsCountService)
+ .to receive(:refresh_cache)
+ .and_call_original
+
+ project.update_project_counter_caches
+ end
+ end
+
+ describe '#remove_import_jid', :clean_gitlab_redis_cache do
+ let(:project) { }
+
+ context 'without an import JID' do
+ it 'does nothing' do
+ project = create(:project)
+
+ expect(Gitlab::SidekiqStatus)
+ .not_to receive(:unset)
+
+ project.remove_import_jid
+ end
+ end
+
+ context 'with an import JID' do
+ it 'unsets the import JID' do
+ project = create(:project, import_jid: '123')
+
+ expect(Gitlab::SidekiqStatus)
+ .to receive(:unset)
+ .with('123')
+ .and_call_original
+
+ project.remove_import_jid
+
+ expect(project.import_jid).to be_nil
+ end
+ end
+ end
+
+ describe '#wiki_repository_exists?' do
+ it 'returns true when the wiki repository exists' do
+ project = create(:project, :wiki_repo)
+
+ expect(project.wiki_repository_exists?).to eq(true)
+ end
+
+ it 'returns false when the wiki repository does not exist' do
+ project = create(:project)
+
+ expect(project.wiki_repository_exists?).to eq(false)
+ end
+ end
+
+ describe '#deployment_platform' do
+ subject { project.deployment_platform }
+
+ let(:project) { create(:project) }
+
+ context 'when user configured kubernetes from Integration > Kubernetes' do
+ let!(:kubernetes_service) { create(:kubernetes_service, project: project) }
+
+ it { is_expected.to eq(kubernetes_service) }
+ end
+
+ context 'when user configured kubernetes from CI/CD > Clusters' do
+ let!(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
+ let(:platform_kubernetes) { cluster.platform_kubernetes }
+
+ it { is_expected.to eq(platform_kubernetes) }
+ end
+ end
end
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index 59e20e84c2f..e78ed1df821 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -133,15 +133,29 @@ describe ProjectStatistics do
describe '#update_build_artifacts_size' do
let!(:pipeline) { create(:ci_pipeline, project: project) }
- let!(:build1) { create(:ci_build, pipeline: pipeline, artifacts_size: 45.megabytes) }
- let!(:build2) { create(:ci_build, pipeline: pipeline, artifacts_size: 56.megabytes) }
- before do
- statistics.update_build_artifacts_size
+ context 'when new job artifacts are calculated' do
+ let(:ci_build) { create(:ci_build, pipeline: pipeline) }
+
+ before do
+ create(:ci_job_artifact, :archive, project: pipeline.project, job: ci_build)
+ end
+
+ it "stores the size of related build artifacts" do
+ statistics.update_build_artifacts_size
+
+ expect(statistics.build_artifacts_size).to be(106365)
+ end
end
- it "stores the size of related build artifacts" do
- expect(statistics.build_artifacts_size).to eq 101.megabytes
+ context 'when legacy artifacts are used' do
+ let!(:ci_build) { create(:ci_build, pipeline: pipeline, artifacts_size: 10.megabytes) }
+
+ it "stores the size of related build artifacts" do
+ statistics.update_build_artifacts_size
+
+ expect(statistics.build_artifacts_size).to eq(10.megabytes)
+ end
end
end
diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb
index 3d46434fc27..929086305ba 100644
--- a/spec/models/project_wiki_spec.rb
+++ b/spec/models/project_wiki_spec.rb
@@ -10,6 +10,10 @@ describe ProjectWiki do
subject { project_wiki }
+ it { is_expected.to delegate_method(:empty?).to :pages }
+ it { is_expected.to delegate_method(:repository_storage_path).to :project }
+ it { is_expected.to delegate_method(:hashed_storage?).to :project }
+
describe "#path_with_namespace" do
it "returns the project path with namespace with the .wiki extension" do
expect(subject.path_with_namespace).to eq(project.full_path + '.wiki')
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 8a6aa767ce6..358bc3dfb94 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -29,7 +29,9 @@ describe Repository do
def expect_to_raise_storage_error
expect { yield }.to raise_error do |exception|
storage_exceptions = [Gitlab::Git::Storage::Inaccessible, Gitlab::Git::CommandError, GRPC::Unavailable]
- expect(exception.class).to be_in(storage_exceptions)
+ known_exception = storage_exceptions.select { |e| exception.is_a?(e) }
+
+ expect(known_exception).not_to be_nil
end
end
@@ -299,24 +301,6 @@ describe Repository do
it { is_expected.to be_falsey }
end
-
- context 'when pre-loaded merged branches are provided' do
- using RSpec::Parameterized::TableSyntax
-
- where(:branch, :pre_loaded, :expected) do
- 'not-merged-branch' | ['branch-merged'] | false
- 'branch-merged' | ['not-merged-branch'] | false
- 'branch-merged' | ['branch-merged'] | true
- 'not-merged-branch' | ['not-merged-branch'] | false
- 'master' | ['master'] | false
- end
-
- with_them do
- subject { repository.merged_to_root_ref?(branch, pre_loaded) }
-
- it { is_expected.to eq(expected) }
- end
- end
end
describe '#can_be_merged?' do
@@ -601,7 +585,7 @@ describe Repository do
end
it 'properly handles query when repo is empty' do
- repository = create(:project).repository
+ repository = create(:project, :empty_repo).repository
results = repository.search_files_by_content('test', 'master')
expect(results).to match_array([])
@@ -637,7 +621,7 @@ describe Repository do
end
it 'properly handles query when repo is empty' do
- repository = create(:project).repository
+ repository = create(:project, :empty_repo).repository
results = repository.search_files_by_name('test', 'master')
@@ -652,9 +636,7 @@ describe Repository do
end
describe '#fetch_ref' do
- # Setting the var here, sidesteps the stub that makes gitaly raise an error
- # before the actual test call
- set(:broken_repository) { create(:project, :broken_storage).repository }
+ let(:broken_repository) { create(:project, :broken_storage).repository }
describe 'when storage is broken', :broken_storage do
it 'should raise a storage error' do
@@ -1166,6 +1148,31 @@ describe Repository do
end
end
+ describe '#branch_exists?' do
+ it 'uses branch_names' do
+ allow(repository).to receive(:branch_names).and_return(['foobar'])
+
+ expect(repository.branch_exists?('foobar')).to eq(true)
+ expect(repository.branch_exists?('master')).to eq(false)
+ end
+ end
+
+ describe '#branch_names', :use_clean_rails_memory_store_caching do
+ let(:fake_branch_names) { ['foobar'] }
+
+ it 'gets cached across Repository instances' do
+ allow(repository.raw_repository).to receive(:branch_names).once.and_return(fake_branch_names)
+
+ expect(repository.branch_names).to eq(fake_branch_names)
+
+ fresh_repository = Project.find(project.id).repository
+ expect(fresh_repository.object_id).not_to eq(repository.object_id)
+
+ expect(fresh_repository.raw_repository).not_to receive(:branch_names)
+ expect(fresh_repository.branch_names).to eq(fake_branch_names)
+ end
+ end
+
describe '#update_autocrlf_option' do
describe 'when autocrlf is not already set to :input' do
before do
@@ -1197,17 +1204,15 @@ describe Repository do
let(:empty_repository) { create(:project_empty_repo).repository }
it 'returns true for an empty repository' do
- expect(empty_repository.empty?).to eq(true)
+ expect(empty_repository).to be_empty
end
it 'returns false for a non-empty repository' do
- expect(repository.empty?).to eq(false)
+ expect(repository).not_to be_empty
end
it 'caches the output' do
- expect(repository.raw_repository).to receive(:empty?)
- .once
- .and_return(false)
+ expect(repository.raw_repository).to receive(:has_visible_content?).once
repository.empty?
repository.empty?
@@ -1365,78 +1370,98 @@ describe Repository do
end
describe '#revert' do
- let(:new_image_commit) { repository.commit('33f3729a45c02fc67d00adb1b8bca394b0e761d9') }
- let(:update_image_commit) { repository.commit('2f63565e7aac07bcdadb654e253078b727143ec4') }
- let(:message) { 'revert message' }
-
- context 'when there is a conflict' do
- it 'raises an error' do
- expect { repository.revert(user, new_image_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
+ shared_examples 'reverting a commit' do
+ let(:new_image_commit) { repository.commit('33f3729a45c02fc67d00adb1b8bca394b0e761d9') }
+ let(:update_image_commit) { repository.commit('2f63565e7aac07bcdadb654e253078b727143ec4') }
+ let(:message) { 'revert message' }
+
+ context 'when there is a conflict' do
+ it 'raises an error' do
+ expect { repository.revert(user, new_image_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
+ end
end
- end
- context 'when commit was already reverted' do
- it 'raises an error' do
- repository.revert(user, update_image_commit, 'master', message)
+ context 'when commit was already reverted' do
+ it 'raises an error' do
+ repository.revert(user, update_image_commit, 'master', message)
- expect { repository.revert(user, update_image_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
+ expect { repository.revert(user, update_image_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
+ end
end
- end
- context 'when commit can be reverted' do
- it 'reverts the changes' do
- expect(repository.revert(user, update_image_commit, 'master', message)).to be_truthy
+ context 'when commit can be reverted' do
+ it 'reverts the changes' do
+ expect(repository.revert(user, update_image_commit, 'master', message)).to be_truthy
+ end
end
- end
- context 'reverting a merge commit' do
- it 'reverts the changes' do
- merge_commit
- expect(repository.blob_at_branch('master', 'files/ruby/feature.rb')).to be_present
+ context 'reverting a merge commit' do
+ it 'reverts the changes' do
+ merge_commit
+ expect(repository.blob_at_branch('master', 'files/ruby/feature.rb')).to be_present
- repository.revert(user, merge_commit, 'master', message)
- expect(repository.blob_at_branch('master', 'files/ruby/feature.rb')).not_to be_present
+ repository.revert(user, merge_commit, 'master', message)
+ expect(repository.blob_at_branch('master', 'files/ruby/feature.rb')).not_to be_present
+ end
end
end
+
+ context 'when Gitaly revert feature is enabled' do
+ it_behaves_like 'reverting a commit'
+ end
+
+ context 'when Gitaly revert feature is disabled', :disable_gitaly do
+ it_behaves_like 'reverting a commit'
+ end
end
describe '#cherry_pick' do
- let(:conflict_commit) { repository.commit('c642fe9b8b9f28f9225d7ea953fe14e74748d53b') }
- let(:pickable_commit) { repository.commit('7d3b0f7cff5f37573aea97cebfd5692ea1689924') }
- let(:pickable_merge) { repository.commit('e56497bb5f03a90a51293fc6d516788730953899') }
- let(:message) { 'cherry-pick message' }
-
- context 'when there is a conflict' do
- it 'raises an error' do
- expect { repository.cherry_pick(user, conflict_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
+ shared_examples 'cherry-picking a commit' do
+ let(:conflict_commit) { repository.commit('c642fe9b8b9f28f9225d7ea953fe14e74748d53b') }
+ let(:pickable_commit) { repository.commit('7d3b0f7cff5f37573aea97cebfd5692ea1689924') }
+ let(:pickable_merge) { repository.commit('e56497bb5f03a90a51293fc6d516788730953899') }
+ let(:message) { 'cherry-pick message' }
+
+ context 'when there is a conflict' do
+ it 'raises an error' do
+ expect { repository.cherry_pick(user, conflict_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
+ end
end
- end
- context 'when commit was already cherry-picked' do
- it 'raises an error' do
- repository.cherry_pick(user, pickable_commit, 'master', message)
+ context 'when commit was already cherry-picked' do
+ it 'raises an error' do
+ repository.cherry_pick(user, pickable_commit, 'master', message)
- expect { repository.cherry_pick(user, pickable_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
+ expect { repository.cherry_pick(user, pickable_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
+ end
end
- end
- context 'when commit can be cherry-picked' do
- it 'cherry-picks the changes' do
- expect(repository.cherry_pick(user, pickable_commit, 'master', message)).to be_truthy
+ context 'when commit can be cherry-picked' do
+ it 'cherry-picks the changes' do
+ expect(repository.cherry_pick(user, pickable_commit, 'master', message)).to be_truthy
+ end
end
- end
- context 'cherry-picking a merge commit' do
- it 'cherry-picks the changes' do
- expect(repository.blob_at_branch('improve/awesome', 'foo/bar/.gitkeep')).to be_nil
+ context 'cherry-picking a merge commit' do
+ it 'cherry-picks the changes' do
+ expect(repository.blob_at_branch('improve/awesome', 'foo/bar/.gitkeep')).to be_nil
- cherry_pick_commit_sha = repository.cherry_pick(user, pickable_merge, 'improve/awesome', message)
- cherry_pick_commit_message = project.commit(cherry_pick_commit_sha).message
+ cherry_pick_commit_sha = repository.cherry_pick(user, pickable_merge, 'improve/awesome', message)
+ cherry_pick_commit_message = project.commit(cherry_pick_commit_sha).message
- expect(repository.blob_at_branch('improve/awesome', 'foo/bar/.gitkeep')).not_to be_nil
- expect(cherry_pick_commit_message).to eq(message)
+ expect(repository.blob_at_branch('improve/awesome', 'foo/bar/.gitkeep')).not_to be_nil
+ expect(cherry_pick_commit_message).to eq(message)
+ end
end
end
+
+ context 'when Gitaly cherry_pick feature is enabled' do
+ it_behaves_like 'cherry-picking a commit'
+ end
+
+ context 'when Gitaly cherry_pick feature is disabled', :disable_gitaly do
+ it_behaves_like 'cherry-picking a commit'
+ end
end
describe '#before_delete' do
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index de3ca300ae3..e09d89d235d 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -88,7 +88,7 @@ describe Snippet do
end
describe '.search' do
- let(:snippet) { create(:snippet) }
+ let(:snippet) { create(:snippet, title: 'test snippet') }
it 'returns snippets with a matching title' do
expect(described_class.search(snippet.title)).to eq([snippet])
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index d2f97009ad9..03c96a8f5aa 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -642,16 +642,40 @@ describe User do
end
describe 'groups' do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+
before do
- @user = create :user
- @group = create :group
- @group.add_owner(@user)
+ group.add_owner(user)
end
- it { expect(@user.several_namespaces?).to be_truthy }
- it { expect(@user.authorized_groups).to eq([@group]) }
- it { expect(@user.owned_groups).to eq([@group]) }
- it { expect(@user.namespaces).to match_array([@user.namespace, @group]) }
+ it { expect(user.several_namespaces?).to be_truthy }
+ it { expect(user.authorized_groups).to eq([group]) }
+ it { expect(user.owned_groups).to eq([group]) }
+ it { expect(user.namespaces).to contain_exactly(user.namespace, group) }
+ it { expect(user.manageable_namespaces).to contain_exactly(user.namespace, group) }
+
+ context 'with child groups', :nested_groups do
+ let!(:subgroup) { create(:group, parent: group) }
+
+ describe '#manageable_namespaces' do
+ it 'includes all the namespaces the user can manage' do
+ expect(user.manageable_namespaces).to contain_exactly(user.namespace, group, subgroup)
+ end
+ end
+
+ describe '#manageable_groups' do
+ it 'includes all the namespaces the user can manage' do
+ expect(user.manageable_groups).to contain_exactly(group, subgroup)
+ end
+
+ it 'does not include duplicates if a membership was added for the subgroup' do
+ subgroup.add_owner(user)
+
+ expect(user.manageable_groups).to contain_exactly(group, subgroup)
+ end
+ end
+ end
end
describe 'group multiple owners' do
@@ -804,7 +828,7 @@ describe User do
end
end
- describe '#require_ssh_key?' do
+ describe '#require_ssh_key?', :use_clean_rails_memory_store_caching do
protocol_and_expectation = {
'http' => false,
'ssh' => true,
@@ -819,6 +843,12 @@ describe User do
expect(user.require_ssh_key?).to eq(expected)
end
end
+
+ it 'returns false when the user has 1 or more SSH keys' do
+ key = create(:personal_key)
+
+ expect(key.user.require_ssh_key?).to eq(false)
+ end
end
end
@@ -841,6 +871,19 @@ describe User do
end
end
+ describe '.by_any_email' do
+ it 'returns an ActiveRecord::Relation' do
+ expect(described_class.by_any_email('foo@example.com'))
+ .to be_a_kind_of(ActiveRecord::Relation)
+ end
+
+ it 'returns a relation of users' do
+ user = create(:user)
+
+ expect(described_class.by_any_email(user.email)).to eq([user])
+ end
+ end
+
describe '.search' do
let!(:user) { create(:user, name: 'user', username: 'usern', email: 'email@gmail.com') }
let!(:user2) { create(:user, name: 'user name', username: 'username', email: 'someemail@gmail.com') }
@@ -1136,16 +1179,9 @@ describe User do
let(:user) { create(:user, :with_avatar) }
context 'when avatar file is uploaded' do
- let(:gitlab_host) { "http://#{Gitlab.config.gitlab.host}" }
- let(:avatar_path) { "/uploads/-/system/user/avatar/#{user.id}/dk.png" }
-
it 'shows correct avatar url' do
- expect(user.avatar_url).to eq(avatar_path)
- expect(user.avatar_url(only_path: false)).to eq([gitlab_host, avatar_path].join)
-
- allow(ActionController::Base).to receive(:asset_host).and_return(gitlab_host)
-
- expect(user.avatar_url).to eq([gitlab_host, avatar_path].join)
+ expect(user.avatar_url).to eq(user.avatar.url)
+ expect(user.avatar_url(only_path: false)).to eq([Gitlab.config.gitlab.url, user.avatar.url].join)
end
end
end
@@ -2107,25 +2143,47 @@ describe User do
end
end
- describe '#allow_password_authentication?' do
+ describe '#allow_password_authentication_for_web?' do
+ context 'regular user' do
+ let(:user) { build(:user) }
+
+ it 'returns true when password authentication is enabled for the web interface' do
+ expect(user.allow_password_authentication_for_web?).to be_truthy
+ end
+
+ it 'returns false when password authentication is disabled for the web interface' do
+ stub_application_setting(password_authentication_enabled_for_web: false)
+
+ expect(user.allow_password_authentication_for_web?).to be_falsey
+ end
+ end
+
+ it 'returns false for ldap user' do
+ user = create(:omniauth_user, provider: 'ldapmain')
+
+ expect(user.allow_password_authentication_for_web?).to be_falsey
+ end
+ end
+
+ describe '#allow_password_authentication_for_git?' do
context 'regular user' do
let(:user) { build(:user) }
- it 'returns true when sign-in is enabled' do
- expect(user.allow_password_authentication?).to be_truthy
+ it 'returns true when password authentication is enabled for Git' do
+ expect(user.allow_password_authentication_for_git?).to be_truthy
end
- it 'returns false when sign-in is disabled' do
- stub_application_setting(password_authentication_enabled: false)
+ it 'returns false when password authentication is disabled Git' do
+ stub_application_setting(password_authentication_enabled_for_git: false)
- expect(user.allow_password_authentication?).to be_falsey
+ expect(user.allow_password_authentication_for_git?).to be_falsey
end
end
it 'returns false for ldap user' do
user = create(:omniauth_user, provider: 'ldapmain')
- expect(user.allow_password_authentication?).to be_falsey
+ expect(user.allow_password_authentication_for_git?).to be_falsey
end
end
@@ -2345,7 +2403,8 @@ describe User do
let(:expected) { !(password_automatically_set || ldap_user || password_authentication_disabled) }
before do
- stub_application_setting(password_authentication_enabled: !password_authentication_disabled)
+ stub_application_setting(password_authentication_enabled_for_web: !password_authentication_disabled)
+ stub_application_setting(password_authentication_enabled_for_git: !password_authentication_disabled)
end
it 'returns false unless all inputs are true' do
@@ -2374,4 +2433,163 @@ describe User do
expect(user).not_to be_blocked
end
end
+
+ describe '#max_member_access_for_project_ids' do
+ shared_examples 'max member access for projects' do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:owner_project) { create(:project, group: group) }
+ let(:master_project) { create(:project) }
+ let(:reporter_project) { create(:project) }
+ let(:developer_project) { create(:project) }
+ let(:guest_project) { create(:project) }
+ let(:no_access_project) { create(:project) }
+
+ let(:projects) do
+ [owner_project, master_project, reporter_project, developer_project, guest_project, no_access_project].map(&:id)
+ end
+
+ let(:expected) do
+ {
+ owner_project.id => Gitlab::Access::OWNER,
+ master_project.id => Gitlab::Access::MASTER,
+ reporter_project.id => Gitlab::Access::REPORTER,
+ developer_project.id => Gitlab::Access::DEVELOPER,
+ guest_project.id => Gitlab::Access::GUEST,
+ no_access_project.id => Gitlab::Access::NO_ACCESS
+ }
+ end
+
+ before do
+ create(:group_member, user: user, group: group)
+ master_project.add_master(user)
+ reporter_project.add_reporter(user)
+ developer_project.add_developer(user)
+ guest_project.add_guest(user)
+ end
+
+ it 'returns correct roles for different projects' do
+ expect(user.max_member_access_for_project_ids(projects)).to eq(expected)
+ end
+ end
+
+ context 'with RequestStore enabled', :request_store do
+ include_examples 'max member access for projects'
+
+ def access_levels(projects)
+ user.max_member_access_for_project_ids(projects)
+ end
+
+ it 'does not perform extra queries when asked for projects who have already been found' do
+ access_levels(projects)
+
+ expect { access_levels(projects) }.not_to exceed_query_limit(0)
+
+ expect(access_levels(projects)).to eq(expected)
+ end
+
+ it 'only requests the extra projects when uncached projects are passed' do
+ second_master_project = create(:project)
+ second_developer_project = create(:project)
+ second_master_project.add_master(user)
+ second_developer_project.add_developer(user)
+
+ all_projects = projects + [second_master_project.id, second_developer_project.id]
+
+ expected_all = expected.merge(second_master_project.id => Gitlab::Access::MASTER,
+ second_developer_project.id => Gitlab::Access::DEVELOPER)
+
+ access_levels(projects)
+
+ queries = ActiveRecord::QueryRecorder.new { access_levels(all_projects) }
+
+ expect(queries.count).to eq(1)
+ expect(queries.log_message).to match(/\W(#{second_master_project.id}, #{second_developer_project.id})\W/)
+ expect(access_levels(all_projects)).to eq(expected_all)
+ end
+ end
+
+ context 'with RequestStore disabled' do
+ include_examples 'max member access for projects'
+ end
+ end
+
+ describe '#max_member_access_for_group_ids' do
+ shared_examples 'max member access for groups' do
+ let(:user) { create(:user) }
+ let(:owner_group) { create(:group) }
+ let(:master_group) { create(:group) }
+ let(:reporter_group) { create(:group) }
+ let(:developer_group) { create(:group) }
+ let(:guest_group) { create(:group) }
+ let(:no_access_group) { create(:group) }
+
+ let(:groups) do
+ [owner_group, master_group, reporter_group, developer_group, guest_group, no_access_group].map(&:id)
+ end
+
+ let(:expected) do
+ {
+ owner_group.id => Gitlab::Access::OWNER,
+ master_group.id => Gitlab::Access::MASTER,
+ reporter_group.id => Gitlab::Access::REPORTER,
+ developer_group.id => Gitlab::Access::DEVELOPER,
+ guest_group.id => Gitlab::Access::GUEST,
+ no_access_group.id => Gitlab::Access::NO_ACCESS
+ }
+ end
+
+ before do
+ owner_group.add_owner(user)
+ master_group.add_master(user)
+ reporter_group.add_reporter(user)
+ developer_group.add_developer(user)
+ guest_group.add_guest(user)
+ end
+
+ it 'returns correct roles for different groups' do
+ expect(user.max_member_access_for_group_ids(groups)).to eq(expected)
+ end
+ end
+
+ context 'with RequestStore enabled', :request_store do
+ include_examples 'max member access for groups'
+
+ def access_levels(groups)
+ user.max_member_access_for_group_ids(groups)
+ end
+
+ it 'does not perform extra queries when asked for groups who have already been found' do
+ access_levels(groups)
+
+ expect { access_levels(groups) }.not_to exceed_query_limit(0)
+
+ expect(access_levels(groups)).to eq(expected)
+ end
+
+ it 'only requests the extra groups when uncached groups are passed' do
+ second_master_group = create(:group)
+ second_developer_group = create(:group)
+ second_master_group.add_master(user)
+ second_developer_group.add_developer(user)
+
+ all_groups = groups + [second_master_group.id, second_developer_group.id]
+
+ expected_all = expected.merge(second_master_group.id => Gitlab::Access::MASTER,
+ second_developer_group.id => Gitlab::Access::DEVELOPER)
+
+ access_levels(groups)
+
+ queries = ActiveRecord::QueryRecorder.new { access_levels(all_groups) }
+
+ expect(queries.count).to eq(1)
+ expect(queries.log_message).to match(/\W(#{second_master_group.id}, #{second_developer_group.id})\W/)
+ expect(access_levels(all_groups)).to eq(expected_all)
+ end
+ end
+
+ context 'with RequestStore disabled' do
+ include_examples 'max member access for groups'
+ end
+ end
end
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index a7227b38850..ea75434e399 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -373,7 +373,7 @@ describe WikiPage do
end
it 'returns commit sha' do
- expect(@page.last_commit_sha).to eq @page.commit.sha
+ expect(@page.last_commit_sha).to eq @page.last_version.sha
end
it 'is changed after page updated' do
diff --git a/spec/policies/ci/build_policy_spec.rb b/spec/policies/ci/build_policy_spec.rb
index 8e1bc3d1543..298a9d16425 100644
--- a/spec/policies/ci/build_policy_spec.rb
+++ b/spec/policies/ci/build_policy_spec.rb
@@ -150,5 +150,82 @@ describe Ci::BuildPolicy do
end
end
end
+
+ describe 'rules for erase build' do
+ let(:project) { create(:project, :repository) }
+ let(:build) { create(:ci_build, pipeline: pipeline, ref: 'some-ref', user: owner) }
+
+ context 'when a developer erases a build' do
+ before do
+ project.add_developer(user)
+ end
+
+ context 'when developers can push to the branch' do
+ before do
+ create(:protected_branch, :developers_can_push,
+ name: build.ref, project: project)
+ end
+
+ context 'when the build was created by the developer' do
+ let(:owner) { user }
+
+ it { expect(policy).to be_allowed :erase_build }
+ end
+
+ context 'when the build was created by the other' do
+ let(:owner) { create(:user) }
+
+ it { expect(policy).to be_disallowed :erase_build }
+ end
+ end
+
+ context 'when no one can push or merge to the branch' do
+ let(:owner) { user }
+
+ before do
+ create(:protected_branch, :no_one_can_push, :no_one_can_merge,
+ name: build.ref, project: project)
+ end
+
+ it { expect(policy).to be_disallowed :erase_build }
+ end
+ end
+
+ context 'when a master erases a build' do
+ before do
+ project.add_master(user)
+ end
+
+ context 'when masters can push to the branch' do
+ before do
+ create(:protected_branch, :masters_can_push,
+ name: build.ref, project: project)
+ end
+
+ context 'when the build was created by the master' do
+ let(:owner) { user }
+
+ it { expect(policy).to be_allowed :erase_build }
+ end
+
+ context 'when the build was created by the other' do
+ let(:owner) { create(:user) }
+
+ it { expect(policy).to be_allowed :erase_build }
+ end
+ end
+
+ context 'when no one can push or merge to the branch' do
+ let(:owner) { user }
+
+ before do
+ create(:protected_branch, :no_one_can_push, :no_one_can_merge,
+ name: build.ref, project: project)
+ end
+
+ it { expect(policy).to be_disallowed :erase_build }
+ end
+ end
+ end
end
end
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index 17dc3bb4f48..b4d25e06d9a 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -9,6 +9,8 @@ describe GroupPolicy do
let(:admin) { create(:admin) }
let(:group) { create(:group) }
+ let(:guest_permissions) { [:read_group, :upload_file, :read_namespace] }
+
let(:reporter_permissions) { [:admin_label] }
let(:developer_permissions) { [:admin_milestones] }
@@ -52,10 +54,12 @@ describe GroupPolicy do
it do
expect_allowed(:read_group)
+ expect_disallowed(:upload_file)
expect_disallowed(*reporter_permissions)
expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
expect_disallowed(*owner_permissions)
+ expect_disallowed(:read_namespace)
end
end
@@ -63,7 +67,7 @@ describe GroupPolicy do
let(:current_user) { guest }
it do
- expect_allowed(:read_group)
+ expect_allowed(*guest_permissions)
expect_disallowed(*reporter_permissions)
expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
@@ -75,7 +79,7 @@ describe GroupPolicy do
let(:current_user) { reporter }
it do
- expect_allowed(:read_group)
+ expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
@@ -87,7 +91,7 @@ describe GroupPolicy do
let(:current_user) { developer }
it do
- expect_allowed(:read_group)
+ expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
expect_disallowed(*master_permissions)
@@ -99,7 +103,7 @@ describe GroupPolicy do
let(:current_user) { master }
it do
- expect_allowed(:read_group)
+ expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
expect_allowed(*master_permissions)
@@ -113,7 +117,7 @@ describe GroupPolicy do
it do
allow(Group).to receive(:supports_nested_groups?).and_return(true)
- expect_allowed(:read_group)
+ expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
expect_allowed(*master_permissions)
@@ -127,7 +131,7 @@ describe GroupPolicy do
it do
allow(Group).to receive(:supports_nested_groups?).and_return(true)
- expect_allowed(:read_group)
+ expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
expect_allowed(*master_permissions)
@@ -186,7 +190,7 @@ describe GroupPolicy do
let(:current_user) { nil }
it do
- expect_disallowed(:read_group)
+ expect_disallowed(*guest_permissions)
expect_disallowed(*reporter_permissions)
expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
@@ -198,7 +202,7 @@ describe GroupPolicy do
let(:current_user) { guest }
it do
- expect_allowed(:read_group)
+ expect_allowed(*guest_permissions)
expect_disallowed(*reporter_permissions)
expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
@@ -210,7 +214,7 @@ describe GroupPolicy do
let(:current_user) { reporter }
it do
- expect_allowed(:read_group)
+ expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
@@ -222,7 +226,7 @@ describe GroupPolicy do
let(:current_user) { developer }
it do
- expect_allowed(:read_group)
+ expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
expect_disallowed(*master_permissions)
@@ -234,7 +238,7 @@ describe GroupPolicy do
let(:current_user) { master }
it do
- expect_allowed(:read_group)
+ expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
expect_allowed(*master_permissions)
@@ -248,7 +252,7 @@ describe GroupPolicy do
it do
allow(Group).to receive(:supports_nested_groups?).and_return(true)
- expect_allowed(:read_group)
+ expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
expect_allowed(*master_permissions)
diff --git a/spec/policies/namespace_policy_spec.rb b/spec/policies/namespace_policy_spec.rb
index e52ff02e5f0..1fdf95ad716 100644
--- a/spec/policies/namespace_policy_spec.rb
+++ b/spec/policies/namespace_policy_spec.rb
@@ -1,20 +1,42 @@
require 'spec_helper'
describe NamespacePolicy do
- let(:current_user) { create(:user) }
- let(:namespace) { current_user.namespace }
+ let(:user) { create(:user) }
+ let(:owner) { create(:user) }
+ let(:admin) { create(:admin) }
+ let(:namespace) { create(:namespace, owner: owner) }
+
+ let(:owner_permissions) { [:create_projects, :admin_namespace, :read_namespace] }
subject { described_class.new(current_user, namespace) }
- context "create projects" do
- context "user namespace" do
- it { is_expected.to be_allowed(:create_projects) }
- end
+ context 'with no user' do
+ let(:current_user) { nil }
+
+ it { is_expected.to be_banned }
+ end
+
+ context 'regular user' do
+ let(:current_user) { user }
+
+ it { is_expected.to be_disallowed(*owner_permissions) }
+ end
+
+ context 'owner' do
+ let(:current_user) { owner }
+
+ it { is_expected.to be_allowed(*owner_permissions) }
- context "user who has exceeded project limit" do
- let(:current_user) { create(:user, projects_limit: 0) }
+ context 'user who has exceeded project limit' do
+ let(:owner) { create(:user, projects_limit: 0) }
it { is_expected.not_to be_allowed(:create_projects) }
end
end
+
+ context 'admin' do
+ let(:current_user) { admin }
+
+ it { is_expected.to be_allowed(*owner_permissions) }
+ end
end
diff --git a/spec/presenters/clusters/cluster_presenter_spec.rb b/spec/presenters/clusters/cluster_presenter_spec.rb
index 48d4f3671c5..e96dbfb73c0 100644
--- a/spec/presenters/clusters/cluster_presenter_spec.rb
+++ b/spec/presenters/clusters/cluster_presenter_spec.rb
@@ -31,4 +31,44 @@ describe Clusters::ClusterPresenter do
it { is_expected.to include(cluster.provider.zone) }
it { is_expected.to include(cluster.name) }
end
+
+ describe '#can_toggle_cluster' do
+ let(:user) { create(:user) }
+
+ before do
+ allow(cluster).to receive(:current_user).and_return(user)
+ end
+
+ subject { described_class.new(cluster).can_toggle_cluster? }
+
+ context 'when user can update' do
+ before do
+ allow_any_instance_of(described_class).to receive(:can?).with(user, :update_cluster, cluster).and_return(true)
+ end
+
+ context 'when cluster is created' do
+ before do
+ allow(cluster).to receive(:created?).and_return(true)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when cluster is not created' do
+ before do
+ allow(cluster).to receive(:created?).and_return(false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ context 'when user can not update' do
+ before do
+ allow_any_instance_of(described_class).to receive(:can?).with(user, :update_cluster, cluster).and_return(false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
end
diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb
index 5e114434a67..f325d1776e4 100644
--- a/spec/presenters/merge_request_presenter_spec.rb
+++ b/spec/presenters/merge_request_presenter_spec.rb
@@ -31,7 +31,7 @@ describe MergeRequestPresenter do
let(:pipeline) { build_stubbed(:ci_pipeline) }
before do
- allow(resource).to receive(:head_pipeline).and_return(pipeline)
+ allow(resource).to receive(:actual_head_pipeline).and_return(pipeline)
end
context 'success with warnings' do
diff --git a/spec/requests/api/circuit_breakers_spec.rb b/spec/requests/api/circuit_breakers_spec.rb
index 3b858c40fd6..fe76f057115 100644
--- a/spec/requests/api/circuit_breakers_spec.rb
+++ b/spec/requests/api/circuit_breakers_spec.rb
@@ -47,7 +47,7 @@ describe API::CircuitBreakers do
describe 'DELETE circuit_breakers/repository_storage' do
it 'clears all circuit_breakers' do
- expect(Gitlab::Git::Storage::CircuitBreaker).to receive(:reset_all!)
+ expect(Gitlab::Git::Storage::FailureInfo).to receive(:reset_all!)
delete api('/circuit_breakers/repository_storage', admin)
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index 780dbce6488..554723d6b1e 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -401,6 +401,20 @@ describe API::Groups do
expect(response).to have_gitlab_http_status(404)
end
+
+ it 'avoids N+1 queries' do
+ get api("/groups/#{group1.id}/projects", admin)
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ get api("/groups/#{group1.id}/projects", admin)
+ end.count
+
+ create(:project, namespace: group1)
+
+ expect do
+ get api("/groups/#{group1.id}/projects", admin)
+ end.not_to exceed_query_limit(control_count)
+ end
end
context 'when using group path in URL' do
@@ -427,6 +441,142 @@ describe API::Groups do
end
end
+ describe 'GET /groups/:id/subgroups', :nested_groups do
+ let!(:subgroup1) { create(:group, parent: group1) }
+ let!(:subgroup2) { create(:group, :private, parent: group1) }
+ let!(:subgroup3) { create(:group, :private, parent: group2) }
+
+ context 'when unauthenticated' do
+ it 'returns only public subgroups' do
+ get api("/groups/#{group1.id}/subgroups")
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
+ expect(json_response.first['id']).to eq(subgroup1.id)
+ expect(json_response.first['parent_id']).to eq(group1.id)
+ end
+
+ it 'returns 404 for a private group' do
+ get api("/groups/#{group2.id}/subgroups")
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
+ context 'when authenticated as user' do
+ context 'when user is not member of a public group' do
+ it 'returns no subgroups for the public group' do
+ get api("/groups/#{group1.id}/subgroups", user2)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(0)
+ end
+
+ context 'when using all_available in request' do
+ it 'returns public subgroups' do
+ get api("/groups/#{group1.id}/subgroups", user2), all_available: true
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
+ expect(json_response[0]['id']).to eq(subgroup1.id)
+ expect(json_response[0]['parent_id']).to eq(group1.id)
+ end
+ end
+ end
+
+ context 'when user is not member of a private group' do
+ it 'returns 404 for the private group' do
+ get api("/groups/#{group2.id}/subgroups", user1)
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
+ context 'when user is member of public group' do
+ before do
+ group1.add_guest(user2)
+ end
+
+ it 'returns private subgroups' do
+ get api("/groups/#{group1.id}/subgroups", user2)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(2)
+ private_subgroups = json_response.select { |group| group['visibility'] == 'private' }
+ expect(private_subgroups.length).to eq(1)
+ expect(private_subgroups.first['id']).to eq(subgroup2.id)
+ expect(private_subgroups.first['parent_id']).to eq(group1.id)
+ end
+
+ context 'when using statistics in request' do
+ it 'does not include statistics' do
+ get api("/groups/#{group1.id}/subgroups", user2), statistics: true
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.first).not_to include 'statistics'
+ end
+ end
+ end
+
+ context 'when user is member of private group' do
+ before do
+ group2.add_guest(user1)
+ end
+
+ it 'returns subgroups' do
+ get api("/groups/#{group2.id}/subgroups", user1)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
+ expect(json_response.first['id']).to eq(subgroup3.id)
+ expect(json_response.first['parent_id']).to eq(group2.id)
+ end
+ end
+ end
+
+ context 'when authenticated as admin' do
+ it 'returns private subgroups of a public group' do
+ get api("/groups/#{group1.id}/subgroups", admin)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(2)
+ end
+
+ it 'returns subgroups of a private group' do
+ get api("/groups/#{group2.id}/subgroups", admin)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
+ end
+
+ it 'does not include statistics by default' do
+ get api("/groups/#{group1.id}/subgroups", admin)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.first).not_to include('statistics')
+ end
+
+ it 'includes statistics if requested' do
+ get api("/groups/#{group1.id}/subgroups", admin), statistics: true
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.first).to include('statistics')
+ end
+ end
+ end
+
describe "POST /groups" do
context "when authenticated as user without group permissions" do
it "does not create group" do
diff --git a/spec/requests/api/helpers_spec.rb b/spec/requests/api/helpers_spec.rb
index 6c0996c543d..0462f494e15 100644
--- a/spec/requests/api/helpers_spec.rb
+++ b/spec/requests/api/helpers_spec.rb
@@ -11,7 +11,6 @@ describe API::Helpers do
let(:admin) { create(:admin) }
let(:key) { create(:key, user: user) }
- let(:params) { {} }
let(:csrf_token) { SecureRandom.base64(ActionController::RequestForgeryProtection::AUTHENTICITY_TOKEN_LENGTH) }
let(:env) do
{
@@ -19,10 +18,13 @@ describe API::Helpers do
'rack.session' => {
_csrf_token: csrf_token
},
- 'REQUEST_METHOD' => 'GET'
+ 'REQUEST_METHOD' => 'GET',
+ 'CONTENT_TYPE' => 'text/plain;charset=utf-8'
}
end
let(:header) { }
+ let(:request) { Grape::Request.new(env)}
+ let(:params) { request.params }
before do
allow_any_instance_of(self.class).to receive(:options).and_return({})
@@ -37,6 +39,10 @@ describe API::Helpers do
raise Exception.new("#{status} - #{message}")
end
+ def set_param(key, value)
+ request.update_param(key, value)
+ end
+
describe ".current_user" do
subject { current_user }
@@ -132,13 +138,13 @@ describe API::Helpers do
let(:personal_access_token) { create(:personal_access_token, user: user) }
it "returns a 401 response for an invalid token" do
- env[API::APIGuard::PRIVATE_TOKEN_HEADER] = 'invalid token'
+ env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = 'invalid token'
expect { current_user }.to raise_error /401/
end
it "returns a 403 response for a user without access" do
- env[API::APIGuard::PRIVATE_TOKEN_HEADER] = personal_access_token.token
+ env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token
allow_any_instance_of(Gitlab::UserAccess).to receive(:allowed?).and_return(false)
expect { current_user }.to raise_error /403/
@@ -146,35 +152,35 @@ describe API::Helpers do
it 'returns a 403 response for a user who is blocked' do
user.block!
- env[API::APIGuard::PRIVATE_TOKEN_HEADER] = personal_access_token.token
+ env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token
expect { current_user }.to raise_error /403/
end
it "sets current_user" do
- env[API::APIGuard::PRIVATE_TOKEN_HEADER] = personal_access_token.token
+ env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token
expect(current_user).to eq(user)
end
it "does not allow tokens without the appropriate scope" do
personal_access_token = create(:personal_access_token, user: user, scopes: ['read_user'])
- env[API::APIGuard::PRIVATE_TOKEN_HEADER] = personal_access_token.token
+ env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token
- expect { current_user }.to raise_error API::APIGuard::InsufficientScopeError
+ expect { current_user }.to raise_error Gitlab::Auth::InsufficientScopeError
end
it 'does not allow revoked tokens' do
personal_access_token.revoke!
- env[API::APIGuard::PRIVATE_TOKEN_HEADER] = personal_access_token.token
+ env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token
- expect { current_user }.to raise_error API::APIGuard::RevokedError
+ expect { current_user }.to raise_error Gitlab::Auth::RevokedError
end
it 'does not allow expired tokens' do
personal_access_token.update_attributes!(expires_at: 1.day.ago)
- env[API::APIGuard::PRIVATE_TOKEN_HEADER] = personal_access_token.token
+ env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token
- expect { current_user }.to raise_error API::APIGuard::ExpiredError
+ expect { current_user }.to raise_error Gitlab::Auth::ExpiredError
end
end
end
@@ -350,7 +356,7 @@ describe API::Helpers do
context 'when using param' do
context 'when providing username' do
before do
- params[API::Helpers::SUDO_PARAM] = user.username
+ set_param(API::Helpers::SUDO_PARAM, user.username)
end
it_behaves_like 'successful sudo'
@@ -358,7 +364,7 @@ describe API::Helpers do
context 'when providing user ID' do
before do
- params[API::Helpers::SUDO_PARAM] = user.id.to_s
+ set_param(API::Helpers::SUDO_PARAM, user.id.to_s)
end
it_behaves_like 'successful sudo'
@@ -368,7 +374,7 @@ describe API::Helpers do
context 'when user does not exist' do
before do
- params[API::Helpers::SUDO_PARAM] = 'nonexistent'
+ set_param(API::Helpers::SUDO_PARAM, 'nonexistent')
end
it 'raises an error' do
@@ -382,11 +388,11 @@ describe API::Helpers do
token.scopes = %w[api]
token.save!
- params[API::Helpers::SUDO_PARAM] = user.id.to_s
+ set_param(API::Helpers::SUDO_PARAM, user.id.to_s)
end
it 'raises an error' do
- expect { current_user }.to raise_error API::APIGuard::InsufficientScopeError
+ expect { current_user }.to raise_error Gitlab::Auth::InsufficientScopeError
end
end
end
@@ -396,7 +402,7 @@ describe API::Helpers do
token.user = user
token.save!
- params[API::Helpers::SUDO_PARAM] = user.id.to_s
+ set_param(API::Helpers::SUDO_PARAM, user.id.to_s)
end
it 'raises an error' do
@@ -420,7 +426,7 @@ describe API::Helpers do
context 'passed as param' do
before do
- params[API::APIGuard::PRIVATE_TOKEN_PARAM] = token.token
+ set_param(Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_PARAM, token.token)
end
it_behaves_like 'sudo'
@@ -428,7 +434,7 @@ describe API::Helpers do
context 'passed as header' do
before do
- env[API::APIGuard::PRIVATE_TOKEN_HEADER] = token.token
+ env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = token.token
end
it_behaves_like 'sudo'
diff --git a/spec/requests/api/internal_spec.rb b/spec/requests/api/internal_spec.rb
index d919899282d..67e1539cbc3 100644
--- a/spec/requests/api/internal_spec.rb
+++ b/spec/requests/api/internal_spec.rb
@@ -203,18 +203,44 @@ describe API::Internal do
end
context 'with env passed as a JSON' do
- it 'sets env in RequestStore' do
- expect(Gitlab::Git::Env).to receive(:set).with({
- 'GIT_OBJECT_DIRECTORY' => 'foo',
- 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar'
- })
+ context 'when relative path envs are not set' do
+ it 'sets env in RequestStore' do
+ expect(Gitlab::Git::Env).to receive(:set).with({
+ 'GIT_OBJECT_DIRECTORY' => 'foo',
+ 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar'
+ })
+
+ push(key, project.wiki, env: {
+ GIT_OBJECT_DIRECTORY: 'foo',
+ GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar'
+ }.to_json)
- push(key, project.wiki, env: {
- GIT_OBJECT_DIRECTORY: 'foo',
- GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar'
- }.to_json)
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
- expect(response).to have_gitlab_http_status(200)
+ context 'when relative path envs are set' do
+ it 'sets env in RequestStore' do
+ obj_dir_relative = './objects'
+ alt_obj_dirs_relative = ['./alt-objects-1', './alt-objects-2']
+ repo_path = project.wiki.repository.path_to_repo
+
+ expect(Gitlab::Git::Env).to receive(:set).with({
+ 'GIT_OBJECT_DIRECTORY' => File.join(repo_path, obj_dir_relative),
+ 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => alt_obj_dirs_relative.map { |d| File.join(repo_path, d) },
+ 'GIT_OBJECT_DIRECTORY_RELATIVE' => obj_dir_relative,
+ 'GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE' => alt_obj_dirs_relative
+ })
+
+ push(key, project.wiki, env: {
+ GIT_OBJECT_DIRECTORY: 'foo',
+ GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar',
+ GIT_OBJECT_DIRECTORY_RELATIVE: obj_dir_relative,
+ GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE: alt_obj_dirs_relative
+ }.to_json)
+
+ expect(response).to have_gitlab_http_status(200)
+ end
end
end
@@ -243,9 +269,8 @@ describe API::Internal do
end
context "git pull" do
- context "gitaly disabled" do
+ context "gitaly disabled", :disable_gitaly do
it "has the correct payload" do
- allow(Gitlab::GitalyClient).to receive(:feature_enabled?).with(:ssh_upload_pack).and_return(false)
pull(key, project)
expect(response).to have_gitlab_http_status(200)
@@ -259,7 +284,6 @@ describe API::Internal do
context "gitaly enabled" do
it "has the correct payload" do
- allow(Gitlab::GitalyClient).to receive(:feature_enabled?).with(:ssh_upload_pack).and_return(true)
pull(key, project)
expect(response).to have_gitlab_http_status(200)
@@ -278,9 +302,8 @@ describe API::Internal do
end
context "git push" do
- context "gitaly disabled" do
+ context "gitaly disabled", :disable_gitaly do
it "has the correct payload" do
- allow(Gitlab::GitalyClient).to receive(:feature_enabled?).with(:ssh_receive_pack).and_return(false)
push(key, project)
expect(response).to have_gitlab_http_status(200)
@@ -294,7 +317,6 @@ describe API::Internal do
context "gitaly enabled" do
it "has the correct payload" do
- allow(Gitlab::GitalyClient).to receive(:feature_enabled?).with(:ssh_receive_pack).and_return(true)
push(key, project)
expect(response).to have_gitlab_http_status(200)
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 8bb3d5ffb03..a435945fea2 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -527,7 +527,11 @@ describe API::Jobs do
end
describe 'POST /projects/:id/jobs/:job_id/erase' do
+ let(:role) { :master }
+
before do
+ project.team << [user, role]
+
post api("/projects/#{project.id}/jobs/#{job.id}/erase", user)
end
@@ -556,6 +560,23 @@ describe API::Jobs do
expect(response).to have_gitlab_http_status(403)
end
end
+
+ context 'when a developer erases a build' do
+ let(:role) { :developer }
+ let(:job) { create(:ci_build, :trace, :artifacts, :success, project: project, pipeline: pipeline, user: owner) }
+
+ context 'when the build was created by the developer' do
+ let(:owner) { user }
+
+ it { expect(response).to have_gitlab_http_status(201) }
+ end
+
+ context 'when the build was created by the other' do
+ let(:owner) { create(:user) }
+
+ it { expect(response).to have_gitlab_http_status(403) }
+ end
+ end
end
describe 'POST /projects/:id/jobs/:job_id/artifacts/keep' do
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index a928ba79a4d..91616da6d9a 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -172,15 +172,15 @@ describe API::MergeRequests do
context "when authenticated" do
it 'avoids N+1 queries' do
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
get api("/projects/#{project.id}/merge_requests", user)
- end.count
+ end
create(:merge_request, state: 'closed', milestone: milestone1, author: user, assignee: user, source_project: project, target_project: project, title: "Test", created_at: base_time)
expect do
get api("/projects/#{project.id}/merge_requests", user)
- end.not_to exceed_query_limit(control_count)
+ end.not_to exceed_query_limit(control)
end
it "returns an array of all merge_requests" do
@@ -628,7 +628,7 @@ describe API::MergeRequests do
context 'forked projects' do
let!(:user2) { create(:user) }
- let!(:forked_project) { fork_project(project, user2) }
+ let!(:forked_project) { fork_project(project, user2, repository: true) }
let!(:unrelated_project) { create(:project, namespace: create(:user).namespace, creator_id: user2.id) }
before do
diff --git a/spec/requests/api/namespaces_spec.rb b/spec/requests/api/namespaces_spec.rb
index e60716d46d7..98102fcd6a7 100644
--- a/spec/requests/api/namespaces_spec.rb
+++ b/spec/requests/api/namespaces_spec.rb
@@ -91,4 +91,127 @@ describe API::Namespaces do
end
end
end
+
+ describe 'GET /namespaces/:id' do
+ let(:owned_group) { group1 }
+ let(:user2) { create(:user) }
+
+ shared_examples 'can access namespace' do
+ it 'returns namespace details' do
+ get api("/namespaces/#{namespace_id}", request_actor)
+
+ expect(response).to have_gitlab_http_status(200)
+
+ expect(json_response['id']).to eq(requested_namespace.id)
+ expect(json_response['path']).to eq(requested_namespace.path)
+ expect(json_response['name']).to eq(requested_namespace.name)
+ end
+ end
+
+ shared_examples 'namespace reader' do
+ let(:requested_namespace) { owned_group }
+
+ before do
+ owned_group.add_owner(request_actor)
+ end
+
+ context 'when namespace exists' do
+ context 'when requested by ID' do
+ context 'when requesting group' do
+ let(:namespace_id) { owned_group.id }
+
+ it_behaves_like 'can access namespace'
+ end
+
+ context 'when requesting personal namespace' do
+ let(:namespace_id) { request_actor.namespace.id }
+ let(:requested_namespace) { request_actor.namespace }
+
+ it_behaves_like 'can access namespace'
+ end
+ end
+
+ context 'when requested by path' do
+ context 'when requesting group' do
+ let(:namespace_id) { owned_group.path }
+
+ it_behaves_like 'can access namespace'
+ end
+
+ context 'when requesting personal namespace' do
+ let(:namespace_id) { request_actor.namespace.path }
+ let(:requested_namespace) { request_actor.namespace }
+
+ it_behaves_like 'can access namespace'
+ end
+ end
+ end
+
+ context "when namespace doesn't exist" do
+ it 'returns not-found' do
+ get api('/namespaces/9999', request_actor)
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ context 'when unauthenticated' do
+ it 'returns authentication error' do
+ get api("/namespaces/#{group1.id}")
+
+ expect(response).to have_gitlab_http_status(401)
+ end
+ end
+
+ context 'when authenticated as regular user' do
+ let(:request_actor) { user }
+
+ context 'when requested namespace is not owned by user' do
+ context 'when requesting group' do
+ it 'returns not-found' do
+ get api("/namespaces/#{group2.id}", request_actor)
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
+ context 'when requesting personal namespace' do
+ it 'returns not-found' do
+ get api("/namespaces/#{user2.namespace.id}", request_actor)
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ context 'when requested namespace is owned by user' do
+ it_behaves_like 'namespace reader'
+ end
+ end
+
+ context 'when authenticated as admin' do
+ let(:request_actor) { admin }
+
+ context 'when requested namespace is not owned by user' do
+ context 'when requesting group' do
+ let(:namespace_id) { group2.id }
+ let(:requested_namespace) { group2 }
+
+ it_behaves_like 'can access namespace'
+ end
+
+ context 'when requesting personal namespace' do
+ let(:namespace_id) { user2.namespace.id }
+ let(:requested_namespace) { user2.namespace }
+
+ it_behaves_like 'can access namespace'
+ end
+ end
+
+ context 'when requested namespace is owned by user' do
+ it_behaves_like 'namespace reader'
+ end
+ end
+ end
end
diff --git a/spec/requests/api/notes_spec.rb b/spec/requests/api/notes_spec.rb
index 784070db173..3bfb4c5506f 100644
--- a/spec/requests/api/notes_spec.rb
+++ b/spec/requests/api/notes_spec.rb
@@ -34,6 +34,48 @@ describe API::Notes do
describe "GET /projects/:id/noteable/:noteable_id/notes" do
context "when noteable is an Issue" do
+ context 'sorting' do
+ before do
+ create_list(:note, 3, noteable: issue, project: project, author: user)
+ end
+
+ it 'sorts by created_at in descending order by default' do
+ get api("/projects/#{project.id}/issues/#{issue.iid}/notes", user)
+
+ response_dates = json_response.map { |noteable| noteable['created_at'] }
+
+ expect(json_response.length).to eq(4)
+ expect(response_dates).to eq(response_dates.sort.reverse)
+ end
+
+ it 'sorts by ascending order when requested' do
+ get api("/projects/#{project.id}/issues/#{issue.iid}/notes?sort=asc", user)
+
+ response_dates = json_response.map { |noteable| noteable['created_at'] }
+
+ expect(json_response.length).to eq(4)
+ expect(response_dates).to eq(response_dates.sort)
+ end
+
+ it 'sorts by updated_at in descending order when requested' do
+ get api("/projects/#{project.id}/issues/#{issue.iid}/notes?order_by=updated_at", user)
+
+ response_dates = json_response.map { |noteable| noteable['updated_at'] }
+
+ expect(json_response.length).to eq(4)
+ expect(response_dates).to eq(response_dates.sort.reverse)
+ end
+
+ it 'sorts by updated_at in ascending order when requested' do
+ get api("/projects/#{project.id}/issues/#{issue.iid}/notes??order_by=updated_at&sort=asc", user)
+
+ response_dates = json_response.map { |noteable| noteable['updated_at'] }
+
+ expect(json_response.length).to eq(4)
+ expect(response_dates).to eq(response_dates.sort)
+ end
+ end
+
it "returns an array of issue notes" do
get api("/projects/#{project.id}/issues/#{issue.iid}/notes", user)
@@ -85,6 +127,47 @@ describe API::Notes do
end
context "when noteable is a Snippet" do
+ context 'sorting' do
+ before do
+ create_list(:note, 3, noteable: snippet, project: project, author: user)
+ end
+
+ it 'sorts by created_at in descending order by default' do
+ get api("/projects/#{project.id}/snippets/#{snippet.id}/notes", user)
+
+ response_dates = json_response.map { |noteable| noteable['created_at'] }
+
+ expect(json_response.length).to eq(4)
+ expect(response_dates).to eq(response_dates.sort.reverse)
+ end
+
+ it 'sorts by ascending order when requested' do
+ get api("/projects/#{project.id}/snippets/#{snippet.id}/notes?sort=asc", user)
+
+ response_dates = json_response.map { |noteable| noteable['created_at'] }
+
+ expect(json_response.length).to eq(4)
+ expect(response_dates).to eq(response_dates.sort)
+ end
+
+ it 'sorts by updated_at in descending order when requested' do
+ get api("/projects/#{project.id}/snippets/#{snippet.id}/notes?order_by=updated_at", user)
+
+ response_dates = json_response.map { |noteable| noteable['updated_at'] }
+
+ expect(json_response.length).to eq(4)
+ expect(response_dates).to eq(response_dates.sort.reverse)
+ end
+
+ it 'sorts by updated_at in ascending order when requested' do
+ get api("/projects/#{project.id}/snippets/#{snippet.id}/notes??order_by=updated_at&sort=asc", user)
+
+ response_dates = json_response.map { |noteable| noteable['updated_at'] }
+
+ expect(json_response.length).to eq(4)
+ expect(response_dates).to eq(response_dates.sort)
+ end
+ end
it "returns an array of snippet notes" do
get api("/projects/#{project.id}/snippets/#{snippet.id}/notes", user)
@@ -108,6 +191,47 @@ describe API::Notes do
end
context "when noteable is a Merge Request" do
+ context 'sorting' do
+ before do
+ create_list(:note, 3, noteable: merge_request, project: project, author: user)
+ end
+
+ it 'sorts by created_at in descending order by default' do
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/notes", user)
+
+ response_dates = json_response.map { |noteable| noteable['created_at'] }
+
+ expect(json_response.length).to eq(4)
+ expect(response_dates).to eq(response_dates.sort.reverse)
+ end
+
+ it 'sorts by ascending order when requested' do
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/notes?sort=asc", user)
+
+ response_dates = json_response.map { |noteable| noteable['created_at'] }
+
+ expect(json_response.length).to eq(4)
+ expect(response_dates).to eq(response_dates.sort)
+ end
+
+ it 'sorts by updated_at in descending order when requested' do
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/notes?order_by=updated_at", user)
+
+ response_dates = json_response.map { |noteable| noteable['updated_at'] }
+
+ expect(json_response.length).to eq(4)
+ expect(response_dates).to eq(response_dates.sort.reverse)
+ end
+
+ it 'sorts by updated_at in ascending order when requested' do
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/notes??order_by=updated_at&sort=asc", user)
+
+ response_dates = json_response.map { |noteable| noteable['updated_at'] }
+
+ expect(json_response.length).to eq(4)
+ expect(response_dates).to eq(response_dates.sort)
+ end
+ end
it "returns an array of merge_requests notes" do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/notes", user)
diff --git a/spec/requests/api/pages_domains_spec.rb b/spec/requests/api/pages_domains_spec.rb
index d13b3a958c9..d412b045e9f 100644
--- a/spec/requests/api/pages_domains_spec.rb
+++ b/spec/requests/api/pages_domains_spec.rb
@@ -3,6 +3,7 @@ require 'rails_helper'
describe API::PagesDomains do
set(:project) { create(:project) }
set(:user) { create(:user) }
+ set(:admin) { create(:admin) }
set(:pages_domain) { create(:pages_domain, domain: 'www.domain.test', project: project) }
set(:pages_domain_secure) { create(:pages_domain, :with_certificate, :with_key, domain: 'ssl.domain.test', project: project) }
@@ -23,12 +24,49 @@ describe API::PagesDomains do
allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
end
+ describe 'GET /pages/domains' do
+ context 'when pages is disabled' do
+ before do
+ allow(Gitlab.config.pages).to receive(:enabled).and_return(false)
+ end
+
+ it_behaves_like '404 response' do
+ let(:request) { get api('/pages/domains', admin) }
+ end
+ end
+
+ context 'when pages is enabled' do
+ context 'when authenticated as an admin' do
+ it 'returns paginated all pages domains' do
+ get api('/pages/domains', admin)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/pages_domain_basics')
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.size).to eq(3)
+ expect(json_response.last).to have_key('domain')
+ expect(json_response.last).to have_key('certificate_expiration')
+ expect(json_response.last['certificate_expiration']['expired']).to be true
+ expect(json_response.first).not_to have_key('certificate_expiration')
+ end
+ end
+
+ context 'when authenticated as a non-member' do
+ it_behaves_like '403 response' do
+ let(:request) { get api('/pages/domains', user) }
+ end
+ end
+ end
+ end
+
describe 'GET /projects/:project_id/pages/domains' do
shared_examples_for 'get pages domains' do
it 'returns paginated pages domains' do
get api(route, user)
expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/pages_domains')
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(3)
@@ -99,6 +137,7 @@ describe API::PagesDomains do
get api(route_domain, user)
expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(json_response['domain']).to eq(pages_domain.domain)
expect(json_response['url']).to eq(pages_domain.url)
expect(json_response['certificate']).to be_nil
@@ -108,6 +147,7 @@ describe API::PagesDomains do
get api(route_secure_domain, user)
expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(json_response['domain']).to eq(pages_domain_secure.domain)
expect(json_response['url']).to eq(pages_domain_secure.url)
expect(json_response['certificate']['subject']).to eq(pages_domain_secure.subject)
@@ -118,6 +158,7 @@ describe API::PagesDomains do
get api(route_expired_domain, user)
expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(json_response['certificate']['expired']).to be true
end
end
@@ -187,6 +228,7 @@ describe API::PagesDomains do
pages_domain = PagesDomain.find_by(domain: json_response['domain'])
expect(response).to have_gitlab_http_status(201)
+ expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain.domain).to eq(params[:domain])
expect(pages_domain.certificate).to be_nil
expect(pages_domain.key).to be_nil
@@ -197,6 +239,7 @@ describe API::PagesDomains do
pages_domain = PagesDomain.find_by(domain: json_response['domain'])
expect(response).to have_gitlab_http_status(201)
+ expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain.domain).to eq(params_secure[:domain])
expect(pages_domain.certificate).to eq(params_secure[:certificate])
expect(pages_domain.key).to eq(params_secure[:key])
@@ -270,6 +313,7 @@ describe API::PagesDomains do
pages_domain_secure.reload
expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain_secure.certificate).to be_nil
expect(pages_domain_secure.key).to be_nil
end
@@ -279,6 +323,7 @@ describe API::PagesDomains do
pages_domain.reload
expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain.certificate).to eq(params_secure[:certificate])
expect(pages_domain.key).to eq(params_secure[:key])
end
@@ -288,6 +333,7 @@ describe API::PagesDomains do
pages_domain_expired.reload
expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain_expired.certificate).to eq(params_secure[:certificate])
expect(pages_domain_expired.key).to eq(params_secure[:key])
end
@@ -297,6 +343,7 @@ describe API::PagesDomains do
pages_domain_secure.reload
expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain_secure.certificate).to eq(params_secure_nokey[:certificate])
end
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index abe367d4e11..a41345da05b 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -50,6 +50,12 @@ describe API::Projects do
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(*projects.map(&:id))
end
+
+ it 'returns the proper security headers' do
+ get api('/projects', current_user), filter
+
+ expect(response).to include_security_headers
+ end
end
shared_examples_for 'projects response without N + 1 queries' do
@@ -431,6 +437,7 @@ describe API::Projects do
project.each_pair do |k, v|
next if %i[has_external_issue_tracker issues_enabled merge_requests_enabled wiki_enabled].include?(k)
+
expect(json_response[k.to_s]).to eq(v)
end
@@ -637,6 +644,7 @@ describe API::Projects do
expect(response).to have_gitlab_http_status(201)
project.each_pair do |k, v|
next if %i[has_external_issue_tracker path].include?(k)
+
expect(json_response[k.to_s]).to eq(v)
end
end
diff --git a/spec/requests/api/protected_branches_spec.rb b/spec/requests/api/protected_branches_spec.rb
index 07d7f96bd70..10e6a3c07c8 100644
--- a/spec/requests/api/protected_branches_spec.rb
+++ b/spec/requests/api/protected_branches_spec.rb
@@ -95,6 +95,12 @@ describe API::ProtectedBranches do
describe 'POST /projects/:id/protected_branches' do
let(:branch_name) { 'new_branch' }
+ let(:post_endpoint) { api("/projects/#{project.id}/protected_branches", user) }
+
+ def expect_protection_to_be_successful
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['name']).to eq(branch_name)
+ end
context 'when authenticated as a master' do
before do
@@ -102,7 +108,7 @@ describe API::ProtectedBranches do
end
it 'protects a single branch' do
- post api("/projects/#{project.id}/protected_branches", user), name: branch_name
+ post post_endpoint, name: branch_name
expect(response).to have_gitlab_http_status(201)
expect(json_response['name']).to eq(branch_name)
@@ -111,8 +117,7 @@ describe API::ProtectedBranches do
end
it 'protects a single branch and developers can push' do
- post api("/projects/#{project.id}/protected_branches", user),
- name: branch_name, push_access_level: 30
+ post post_endpoint, name: branch_name, push_access_level: 30
expect(response).to have_gitlab_http_status(201)
expect(json_response['name']).to eq(branch_name)
@@ -121,8 +126,7 @@ describe API::ProtectedBranches do
end
it 'protects a single branch and developers can merge' do
- post api("/projects/#{project.id}/protected_branches", user),
- name: branch_name, merge_access_level: 30
+ post post_endpoint, name: branch_name, merge_access_level: 30
expect(response).to have_gitlab_http_status(201)
expect(json_response['name']).to eq(branch_name)
@@ -131,8 +135,7 @@ describe API::ProtectedBranches do
end
it 'protects a single branch and developers can push and merge' do
- post api("/projects/#{project.id}/protected_branches", user),
- name: branch_name, push_access_level: 30, merge_access_level: 30
+ post post_endpoint, name: branch_name, push_access_level: 30, merge_access_level: 30
expect(response).to have_gitlab_http_status(201)
expect(json_response['name']).to eq(branch_name)
@@ -141,8 +144,7 @@ describe API::ProtectedBranches do
end
it 'protects a single branch and no one can push' do
- post api("/projects/#{project.id}/protected_branches", user),
- name: branch_name, push_access_level: 0
+ post post_endpoint, name: branch_name, push_access_level: 0
expect(response).to have_gitlab_http_status(201)
expect(json_response['name']).to eq(branch_name)
@@ -151,8 +153,7 @@ describe API::ProtectedBranches do
end
it 'protects a single branch and no one can merge' do
- post api("/projects/#{project.id}/protected_branches", user),
- name: branch_name, merge_access_level: 0
+ post post_endpoint, name: branch_name, merge_access_level: 0
expect(response).to have_gitlab_http_status(201)
expect(json_response['name']).to eq(branch_name)
@@ -161,8 +162,7 @@ describe API::ProtectedBranches do
end
it 'protects a single branch and no one can push or merge' do
- post api("/projects/#{project.id}/protected_branches", user),
- name: branch_name, push_access_level: 0, merge_access_level: 0
+ post post_endpoint, name: branch_name, push_access_level: 0, merge_access_level: 0
expect(response).to have_gitlab_http_status(201)
expect(json_response['name']).to eq(branch_name)
@@ -171,7 +171,8 @@ describe API::ProtectedBranches do
end
it 'returns a 409 error if the same branch is protected twice' do
- post api("/projects/#{project.id}/protected_branches", user), name: protected_name
+ post post_endpoint, name: protected_name
+
expect(response).to have_gitlab_http_status(409)
end
@@ -179,10 +180,9 @@ describe API::ProtectedBranches do
let(:branch_name) { 'feature/*' }
it "protects multiple branches with a wildcard in the name" do
- post api("/projects/#{project.id}/protected_branches", user), name: branch_name
+ post post_endpoint, name: branch_name
- expect(response).to have_gitlab_http_status(201)
- expect(json_response['name']).to eq(branch_name)
+ expect_protection_to_be_successful
expect(json_response['push_access_levels'][0]['access_level']).to eq(Gitlab::Access::MASTER)
expect(json_response['merge_access_levels'][0]['access_level']).to eq(Gitlab::Access::MASTER)
end
@@ -195,7 +195,7 @@ describe API::ProtectedBranches do
end
it "returns a 403 error if guest" do
- post api("/projects/#{project.id}/protected_branches/", user), name: branch_name
+ post post_endpoint, name: branch_name
expect(response).to have_gitlab_http_status(403)
end
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index 671b988ec91..3406b17401f 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -947,7 +947,7 @@ describe API::Runner do
context 'when artifacts are being stored inside of tmp path' do
before do
# by configuring this path we allow to pass temp file from any path
- allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
+ allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
end
context 'when job has been erased' do
@@ -987,15 +987,6 @@ describe API::Runner do
it_behaves_like 'successful artifacts upload'
end
- context 'when updates artifact' do
- before do
- upload_artifacts(file_upload2, headers_with_token)
- upload_artifacts(file_upload, headers_with_token)
- end
-
- it_behaves_like 'successful artifacts upload'
- end
-
context 'when using runners token' do
it 'responds with forbidden' do
upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token))
@@ -1108,7 +1099,7 @@ describe API::Runner do
expect(response).to have_gitlab_http_status(201)
expect(stored_artifacts_file.original_filename).to eq(artifacts.original_filename)
expect(stored_metadata_file.original_filename).to eq(metadata.original_filename)
- expect(stored_artifacts_size).to eq(71759)
+ expect(stored_artifacts_size).to eq(72821)
end
end
@@ -1133,7 +1124,7 @@ describe API::Runner do
# by configuring this path we allow to pass file from @tmpdir only
# but all temporary files are stored in system tmp directory
@tmpdir = Dir.mktmpdir
- allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir)
+ allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir)
end
after do
diff --git a/spec/requests/api/runners_spec.rb b/spec/requests/api/runners_spec.rb
index fe38a7b3251..ec5cad4f4fd 100644
--- a/spec/requests/api/runners_spec.rb
+++ b/spec/requests/api/runners_spec.rb
@@ -354,6 +354,140 @@ describe API::Runners do
end
end
+ describe 'GET /runners/:id/jobs' do
+ set(:job_1) { create(:ci_build) }
+ let!(:job_2) { create(:ci_build, :running, runner: shared_runner, project: project) }
+ let!(:job_3) { create(:ci_build, :failed, runner: shared_runner, project: project) }
+ let!(:job_4) { create(:ci_build, :running, runner: specific_runner, project: project) }
+ let!(:job_5) { create(:ci_build, :failed, runner: specific_runner, project: project) }
+
+ context 'admin user' do
+ context 'when runner exists' do
+ context 'when runner is shared' do
+ it 'return jobs' do
+ get api("/runners/#{shared_runner.id}/jobs", admin)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+
+ expect(json_response).to be_an(Array)
+ expect(json_response.length).to eq(2)
+ end
+ end
+
+ context 'when runner is specific' do
+ it 'return jobs' do
+ get api("/runners/#{specific_runner.id}/jobs", admin)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+
+ expect(json_response).to be_an(Array)
+ expect(json_response.length).to eq(2)
+ end
+ end
+
+ context 'when valid status is provided' do
+ it 'return filtered jobs' do
+ get api("/runners/#{specific_runner.id}/jobs?status=failed", admin)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+
+ expect(json_response).to be_an(Array)
+ expect(json_response.length).to eq(1)
+ expect(json_response.first).to include('id' => job_5.id)
+ end
+ end
+
+ context 'when invalid status is provided' do
+ it 'return 400' do
+ get api("/runners/#{specific_runner.id}/jobs?status=non-existing", admin)
+
+ expect(response).to have_gitlab_http_status(400)
+ end
+ end
+ end
+
+ context "when runner doesn't exist" do
+ it 'returns 404' do
+ get api('/runners/9999/jobs', admin)
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ context "runner project's administrative user" do
+ context 'when runner exists' do
+ context 'when runner is shared' do
+ it 'returns 403' do
+ get api("/runners/#{shared_runner.id}/jobs", user)
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+
+ context 'when runner is specific' do
+ it 'return jobs' do
+ get api("/runners/#{specific_runner.id}/jobs", user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+
+ expect(json_response).to be_an(Array)
+ expect(json_response.length).to eq(2)
+ end
+ end
+
+ context 'when valid status is provided' do
+ it 'return filtered jobs' do
+ get api("/runners/#{specific_runner.id}/jobs?status=failed", user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+
+ expect(json_response).to be_an(Array)
+ expect(json_response.length).to eq(1)
+ expect(json_response.first).to include('id' => job_5.id)
+ end
+ end
+
+ context 'when invalid status is provided' do
+ it 'return 400' do
+ get api("/runners/#{specific_runner.id}/jobs?status=non-existing", user)
+
+ expect(response).to have_gitlab_http_status(400)
+ end
+ end
+ end
+
+ context "when runner doesn't exist" do
+ it 'returns 404' do
+ get api('/runners/9999/jobs', user)
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ context 'other authorized user' do
+ it 'does not return jobs' do
+ get api("/runners/#{specific_runner.id}/jobs", user2)
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+
+ context 'unauthorized user' do
+ it 'does not return jobs' do
+ get api("/runners/#{specific_runner.id}/jobs")
+
+ expect(response).to have_gitlab_http_status(401)
+ end
+ end
+ end
+
describe 'GET /projects/:id/runners' do
context 'authorized user with master privileges' do
it "returns project's runners" do
diff --git a/spec/requests/api/services_spec.rb b/spec/requests/api/services_spec.rb
index dfe48e45d49..ba697e2b305 100644
--- a/spec/requests/api/services_spec.rb
+++ b/spec/requests/api/services_spec.rb
@@ -175,4 +175,25 @@ describe API::Services do
end
end
end
+
+ describe 'Mattermost service' do
+ let(:service_name) { 'mattermost' }
+ let(:params) do
+ { webhook: 'https://hook.example.com', username: 'username' }
+ end
+
+ before do
+ project.create_mattermost_service(
+ active: true,
+ properties: params
+ )
+ end
+
+ it 'accepts a username for update' do
+ put api("/projects/#{project.id}/services/mattermost", user), params.merge(username: 'new_username')
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['properties']['username']).to eq('new_username')
+ end
+ end
end
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index 5d3e78dd7c8..015d4b9a491 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -10,7 +10,7 @@ describe API::Settings, 'Settings' do
expect(response).to have_gitlab_http_status(200)
expect(json_response).to be_an Hash
expect(json_response['default_projects_limit']).to eq(42)
- expect(json_response['password_authentication_enabled']).to be_truthy
+ expect(json_response['password_authentication_enabled_for_web']).to be_truthy
expect(json_response['repository_storages']).to eq(['default'])
expect(json_response['koding_enabled']).to be_falsey
expect(json_response['koding_url']).to be_nil
@@ -37,7 +37,7 @@ describe API::Settings, 'Settings' do
it "updates application settings" do
put api("/application/settings", admin),
default_projects_limit: 3,
- password_authentication_enabled: false,
+ password_authentication_enabled_for_web: false,
repository_storages: ['custom'],
koding_enabled: true,
koding_url: 'http://koding.example.com',
@@ -54,11 +54,11 @@ describe API::Settings, 'Settings' do
dsa_key_restriction: 2048,
ecdsa_key_restriction: 384,
ed25519_key_restriction: 256,
- circuitbreaker_failure_wait_time: 2
+ circuitbreaker_check_interval: 2
expect(response).to have_gitlab_http_status(200)
expect(json_response['default_projects_limit']).to eq(3)
- expect(json_response['password_authentication_enabled']).to be_falsey
+ expect(json_response['password_authentication_enabled_for_web']).to be_falsey
expect(json_response['repository_storages']).to eq(['custom'])
expect(json_response['koding_enabled']).to be_truthy
expect(json_response['koding_url']).to eq('http://koding.example.com')
@@ -75,7 +75,7 @@ describe API::Settings, 'Settings' do
expect(json_response['dsa_key_restriction']).to eq(2048)
expect(json_response['ecdsa_key_restriction']).to eq(384)
expect(json_response['ed25519_key_restriction']).to eq(256)
- expect(json_response['circuitbreaker_failure_wait_time']).to eq(2)
+ expect(json_response['circuitbreaker_check_interval']).to eq(2)
end
end
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 2aeae6f9ec7..2428e63e149 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -510,6 +510,14 @@ describe API::Users do
expect(user.reload.notification_email).to eq('new@email.com')
end
+ it 'skips reconfirmation when requested' do
+ put api("/users/#{user.id}", admin), { skip_reconfirmation: true }
+
+ user.reload
+
+ expect(user.confirmed_at).to be_present
+ end
+
it 'updates user with his own username' do
put api("/users/#{user.id}", admin), username: user.username
diff --git a/spec/requests/api/v3/builds_spec.rb b/spec/requests/api/v3/builds_spec.rb
index 3b7d99b84b0..266ae654227 100644
--- a/spec/requests/api/v3/builds_spec.rb
+++ b/spec/requests/api/v3/builds_spec.rb
@@ -435,6 +435,8 @@ describe API::V3::Builds do
describe 'POST /projects/:id/builds/:build_id/erase' do
before do
+ project.add_master(user)
+
post v3_api("/projects/#{project.id}/builds/#{build.id}/erase", user)
end
diff --git a/spec/requests/api/v3/merge_requests_spec.rb b/spec/requests/api/v3/merge_requests_spec.rb
index 91897e5ee01..2e2b9449429 100644
--- a/spec/requests/api/v3/merge_requests_spec.rb
+++ b/spec/requests/api/v3/merge_requests_spec.rb
@@ -314,7 +314,7 @@ describe API::MergeRequests do
context 'forked projects' do
let!(:user2) { create(:user) }
- let!(:forked_project) { fork_project(project, user2) }
+ let!(:forked_project) { fork_project(project, user2, repository: true) }
let!(:unrelated_project) { create(:project, namespace: create(:user).namespace, creator_id: user2.id) }
before do
diff --git a/spec/requests/api/v3/projects_spec.rb b/spec/requests/api/v3/projects_spec.rb
index f62ad747c73..27288b98d1c 100644
--- a/spec/requests/api/v3/projects_spec.rb
+++ b/spec/requests/api/v3/projects_spec.rb
@@ -404,6 +404,7 @@ describe API::V3::Projects do
project.each_pair do |k, v|
next if %i[has_external_issue_tracker issues_enabled merge_requests_enabled wiki_enabled].include?(k)
+
expect(json_response[k.to_s]).to eq(v)
end
@@ -547,6 +548,7 @@ describe API::V3::Projects do
expect(response).to have_gitlab_http_status(201)
project.each_pair do |k, v|
next if %i[has_external_issue_tracker path].include?(k)
+
expect(json_response[k.to_s]).to eq(v)
end
end
diff --git a/spec/requests/api/v3/settings_spec.rb b/spec/requests/api/v3/settings_spec.rb
index 25fa0a8aabd..985bfbfa09c 100644
--- a/spec/requests/api/v3/settings_spec.rb
+++ b/spec/requests/api/v3/settings_spec.rb
@@ -28,11 +28,11 @@ describe API::V3::Settings, 'Settings' do
it "updates application settings" do
put v3_api("/application/settings", admin),
- default_projects_limit: 3, password_authentication_enabled: false, repository_storage: 'custom', koding_enabled: true, koding_url: 'http://koding.example.com',
+ default_projects_limit: 3, password_authentication_enabled_for_web: false, repository_storage: 'custom', koding_enabled: true, koding_url: 'http://koding.example.com',
plantuml_enabled: true, plantuml_url: 'http://plantuml.example.com'
expect(response).to have_gitlab_http_status(200)
expect(json_response['default_projects_limit']).to eq(3)
- expect(json_response['password_authentication_enabled']).to be_falsey
+ expect(json_response['password_authentication_enabled_for_web']).to be_falsey
expect(json_response['repository_storage']).to eq('custom')
expect(json_response['repository_storages']).to eq(['custom'])
expect(json_response['koding_enabled']).to be_truthy
diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb
index cd52194033a..a16f98bec36 100644
--- a/spec/requests/git_http_spec.rb
+++ b/spec/requests/git_http_spec.rb
@@ -463,7 +463,7 @@ describe 'Git HTTP requests' do
context 'when internal auth is disabled' do
before do
- allow_any_instance_of(ApplicationSetting).to receive(:password_authentication_enabled?) { false }
+ allow_any_instance_of(ApplicationSetting).to receive(:password_authentication_enabled_for_git?) { false }
end
it 'rejects pulls with personal access token error message' do
diff --git a/spec/requests/jwt_controller_spec.rb b/spec/requests/jwt_controller_spec.rb
index 94e04ce5608..6f40a02aaa9 100644
--- a/spec/requests/jwt_controller_spec.rb
+++ b/spec/requests/jwt_controller_spec.rb
@@ -105,7 +105,7 @@ describe JwtController do
context 'when internal auth is disabled' do
it 'rejects the authorization attempt with personal access token message' do
- allow_any_instance_of(ApplicationSetting).to receive(:password_authentication_enabled?) { false }
+ allow_any_instance_of(ApplicationSetting).to receive(:password_authentication_enabled_for_git?) { false }
get '/jwt/auth', parameters, headers
expect(response).to have_gitlab_http_status(401)
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index e3dfecd8898..b5948505701 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -671,6 +671,20 @@ describe 'Git LFS API and storage' do
}
end
+ shared_examples 'pushes new LFS objects' do
+ let(:sample_size) { 150.megabytes }
+ let(:sample_oid) { '91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897' }
+
+ it 'responds with upload hypermedia link' do
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['objects']).to be_kind_of(Array)
+ expect(json_response['objects'].first['oid']).to eq(sample_oid)
+ expect(json_response['objects'].first['size']).to eq(sample_size)
+ expect(json_response['objects'].first['actions']['upload']['href']).to eq("#{Gitlab.config.gitlab.url}/#{project.full_path}.git/gitlab-lfs/objects/#{sample_oid}/#{sample_size}")
+ expect(json_response['objects'].first['actions']['upload']['header']).to eq('Authorization' => authorization)
+ end
+ end
+
describe 'when request is authenticated' do
describe 'when user has project push access' do
let(:authorization) { authorize_user }
@@ -701,27 +715,7 @@ describe 'Git LFS API and storage' do
end
context 'when pushing a lfs object that does not exist' do
- let(:body) do
- {
- 'operation' => 'upload',
- 'objects' => [
- { 'oid' => '91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897',
- 'size' => 1575078 }
- ]
- }
- end
-
- it 'responds with status 200' do
- expect(response).to have_gitlab_http_status(200)
- end
-
- it 'responds with upload hypermedia link' do
- expect(json_response['objects']).to be_kind_of(Array)
- expect(json_response['objects'].first['oid']).to eq("91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897")
- expect(json_response['objects'].first['size']).to eq(1575078)
- expect(json_response['objects'].first['actions']['upload']['href']).to eq("#{Gitlab.config.gitlab.url}/#{project.full_path}.git/gitlab-lfs/objects/91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897/1575078")
- expect(json_response['objects'].first['actions']['upload']['header']).to eq('Authorization' => authorization)
- end
+ it_behaves_like 'pushes new LFS objects'
end
context 'when pushing one new and one existing lfs object' do
@@ -802,6 +796,17 @@ describe 'Git LFS API and storage' do
end
end
end
+
+ context 'when deploy key has project push access' do
+ let(:key) { create(:deploy_key, can_push: true) }
+ let(:authorization) { authorize_deploy_key }
+
+ let(:update_user_permissions) do
+ project.deploy_keys << key
+ end
+
+ it_behaves_like 'pushes new LFS objects'
+ end
end
context 'when user is not authenticated' do
diff --git a/spec/requests/openid_connect_spec.rb b/spec/requests/openid_connect_spec.rb
index 0b1f8ce6f6d..1a5ad9b04e4 100644
--- a/spec/requests/openid_connect_spec.rb
+++ b/spec/requests/openid_connect_spec.rb
@@ -107,6 +107,15 @@ describe 'OpenID Connect requests' do
end
end
+ # These 2 calls shouldn't actually throw, they should be handled as an
+ # unauthorized request, so we should be able to check the response.
+ #
+ # This was not possible due to an issue with Warden:
+ # https://github.com/hassox/warden/pull/162
+ #
+ # When the patch gets merged and we update Warden, these specs will need to
+ # updated to check the response instead of a raised exception.
+ # https://gitlab.com/gitlab-org/gitlab-ce/issues/40218
context 'when user is blocked' do
it 'returns authentication error' do
access_grant
@@ -114,7 +123,7 @@ describe 'OpenID Connect requests' do
expect do
request_access_token
- end.to throw_symbol :warden
+ end.to raise_error UncaughtThrowError
end
end
@@ -125,7 +134,7 @@ describe 'OpenID Connect requests' do
expect do
request_access_token
- end.to throw_symbol :warden
+ end.to raise_error UncaughtThrowError
end
end
end
diff --git a/spec/requests/rack_attack_global_spec.rb b/spec/requests/rack_attack_global_spec.rb
new file mode 100644
index 00000000000..0fec14d0cce
--- /dev/null
+++ b/spec/requests/rack_attack_global_spec.rb
@@ -0,0 +1,362 @@
+require 'spec_helper'
+
+describe 'Rack Attack global throttles' do
+ let(:settings) { Gitlab::CurrentSettings.current_application_settings }
+
+ # Start with really high limits and override them with low limits to ensure
+ # the right settings are being exercised
+ let(:settings_to_set) do
+ {
+ throttle_unauthenticated_requests_per_period: 100,
+ throttle_unauthenticated_period_in_seconds: 1,
+ throttle_authenticated_api_requests_per_period: 100,
+ throttle_authenticated_api_period_in_seconds: 1,
+ throttle_authenticated_web_requests_per_period: 100,
+ throttle_authenticated_web_period_in_seconds: 1
+ }
+ end
+
+ let(:requests_per_period) { 1 }
+ let(:period_in_seconds) { 10000 }
+ let(:period) { period_in_seconds.seconds }
+
+ let(:url_that_does_not_require_authentication) { '/users/sign_in' }
+ let(:url_that_requires_authentication) { '/dashboard/snippets' }
+ let(:api_partial_url) { '/todos' }
+
+ around do |example|
+ # Instead of test environment's :null_store so the throttles can increment
+ Rack::Attack.cache.store = ActiveSupport::Cache::MemoryStore.new
+
+ # Make time-dependent tests deterministic
+ Timecop.freeze { example.run }
+
+ Rack::Attack.cache.store = Rails.cache
+ end
+
+ # Requires let variables:
+ # * throttle_setting_prefix (e.g. "throttle_authenticated_api" or "throttle_authenticated_web")
+ # * get_args
+ # * other_user_get_args
+ shared_examples_for 'rate-limited token-authenticated requests' do
+ before do
+ # Set low limits
+ settings_to_set[:"#{throttle_setting_prefix}_requests_per_period"] = requests_per_period
+ settings_to_set[:"#{throttle_setting_prefix}_period_in_seconds"] = period_in_seconds
+ end
+
+ context 'when the throttle is enabled' do
+ before do
+ settings_to_set[:"#{throttle_setting_prefix}_enabled"] = true
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'rejects requests over the rate limit' do
+ # At first, allow requests under the rate limit.
+ requests_per_period.times do
+ get(*get_args)
+ expect(response).to have_http_status 200
+ end
+
+ # the last straw
+ expect_rejection { get(*get_args) }
+ end
+
+ it 'allows requests after throttling and then waiting for the next period' do
+ requests_per_period.times do
+ get(*get_args)
+ expect(response).to have_http_status 200
+ end
+
+ expect_rejection { get(*get_args) }
+
+ Timecop.travel(period.from_now) do
+ requests_per_period.times do
+ get(*get_args)
+ expect(response).to have_http_status 200
+ end
+
+ expect_rejection { get(*get_args) }
+ end
+ end
+
+ it 'counts requests from different users separately, even from the same IP' do
+ requests_per_period.times do
+ get(*get_args)
+ expect(response).to have_http_status 200
+ end
+
+ # would be over the limit if this wasn't a different user
+ get(*other_user_get_args)
+ expect(response).to have_http_status 200
+ end
+
+ it 'counts all requests from the same user, even via different IPs' do
+ requests_per_period.times do
+ get(*get_args)
+ expect(response).to have_http_status 200
+ end
+
+ expect_any_instance_of(Rack::Attack::Request).to receive(:ip).and_return('1.2.3.4')
+
+ expect_rejection { get(*get_args) }
+ end
+ end
+
+ context 'when the throttle is disabled' do
+ before do
+ settings_to_set[:"#{throttle_setting_prefix}_enabled"] = false
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'allows requests over the rate limit' do
+ (1 + requests_per_period).times do
+ get(*get_args)
+ expect(response).to have_http_status 200
+ end
+ end
+ end
+ end
+
+ describe 'unauthenticated requests' do
+ before do
+ # Set low limits
+ settings_to_set[:throttle_unauthenticated_requests_per_period] = requests_per_period
+ settings_to_set[:throttle_unauthenticated_period_in_seconds] = period_in_seconds
+ end
+
+ context 'when the throttle is enabled' do
+ before do
+ settings_to_set[:throttle_unauthenticated_enabled] = true
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'rejects requests over the rate limit' do
+ # At first, allow requests under the rate limit.
+ requests_per_period.times do
+ get url_that_does_not_require_authentication
+ expect(response).to have_http_status 200
+ end
+
+ # the last straw
+ expect_rejection { get url_that_does_not_require_authentication }
+ end
+
+ it 'allows requests after throttling and then waiting for the next period' do
+ requests_per_period.times do
+ get url_that_does_not_require_authentication
+ expect(response).to have_http_status 200
+ end
+
+ expect_rejection { get url_that_does_not_require_authentication }
+
+ Timecop.travel(period.from_now) do
+ requests_per_period.times do
+ get url_that_does_not_require_authentication
+ expect(response).to have_http_status 200
+ end
+
+ expect_rejection { get url_that_does_not_require_authentication }
+ end
+ end
+
+ it 'counts requests from different IPs separately' do
+ requests_per_period.times do
+ get url_that_does_not_require_authentication
+ expect(response).to have_http_status 200
+ end
+
+ expect_any_instance_of(Rack::Attack::Request).to receive(:ip).and_return('1.2.3.4')
+
+ # would be over limit for the same IP
+ get url_that_does_not_require_authentication
+ expect(response).to have_http_status 200
+ end
+ end
+
+ context 'when the throttle is disabled' do
+ before do
+ settings_to_set[:throttle_unauthenticated_enabled] = false
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'allows requests over the rate limit' do
+ (1 + requests_per_period).times do
+ get url_that_does_not_require_authentication
+ expect(response).to have_http_status 200
+ end
+ end
+ end
+ end
+
+ describe 'API requests authenticated with personal access token', :api do
+ let(:user) { create(:user) }
+ let(:token) { create(:personal_access_token, user: user) }
+ let(:other_user) { create(:user) }
+ let(:other_user_token) { create(:personal_access_token, user: other_user) }
+ let(:throttle_setting_prefix) { 'throttle_authenticated_api' }
+
+ context 'with the token in the query string' do
+ let(:get_args) { [api(api_partial_url, personal_access_token: token)] }
+ let(:other_user_get_args) { [api(api_partial_url, personal_access_token: other_user_token)] }
+
+ it_behaves_like 'rate-limited token-authenticated requests'
+ end
+
+ context 'with the token in the headers' do
+ let(:get_args) { api_get_args_with_token_headers(api_partial_url, personal_access_token_headers(token)) }
+ let(:other_user_get_args) { api_get_args_with_token_headers(api_partial_url, personal_access_token_headers(other_user_token)) }
+
+ it_behaves_like 'rate-limited token-authenticated requests'
+ end
+ end
+
+ describe 'API requests authenticated with OAuth token', :api do
+ let(:user) { create(:user) }
+ let(:application) { Doorkeeper::Application.create!(name: "MyApp", redirect_uri: "https://app.com", owner: user) }
+ let(:token) { Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: "api") }
+ let(:other_user) { create(:user) }
+ let(:other_user_application) { Doorkeeper::Application.create!(name: "MyApp", redirect_uri: "https://app.com", owner: other_user) }
+ let(:other_user_token) { Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: other_user.id, scopes: "api") }
+ let(:throttle_setting_prefix) { 'throttle_authenticated_api' }
+
+ context 'with the token in the query string' do
+ let(:get_args) { [api(api_partial_url, oauth_access_token: token)] }
+ let(:other_user_get_args) { [api(api_partial_url, oauth_access_token: other_user_token)] }
+
+ it_behaves_like 'rate-limited token-authenticated requests'
+ end
+
+ context 'with the token in the headers' do
+ let(:get_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(token)) }
+ let(:other_user_get_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(other_user_token)) }
+
+ it_behaves_like 'rate-limited token-authenticated requests'
+ end
+ end
+
+ describe '"web" (non-API) requests authenticated with RSS token' do
+ let(:user) { create(:user) }
+ let(:other_user) { create(:user) }
+ let(:throttle_setting_prefix) { 'throttle_authenticated_web' }
+
+ context 'with the token in the query string' do
+ let(:get_args) { [rss_url(user), nil] }
+ let(:other_user_get_args) { [rss_url(other_user), nil] }
+
+ it_behaves_like 'rate-limited token-authenticated requests'
+ end
+ end
+
+ describe 'web requests authenticated with regular login' do
+ let(:user) { create(:user) }
+
+ before do
+ login_as(user)
+
+ # Set low limits
+ settings_to_set[:throttle_authenticated_web_requests_per_period] = requests_per_period
+ settings_to_set[:throttle_authenticated_web_period_in_seconds] = period_in_seconds
+ end
+
+ context 'when the throttle is enabled' do
+ before do
+ settings_to_set[:throttle_authenticated_web_enabled] = true
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'rejects requests over the rate limit' do
+ # At first, allow requests under the rate limit.
+ requests_per_period.times do
+ get url_that_requires_authentication
+ expect(response).to have_http_status 200
+ end
+
+ # the last straw
+ expect_rejection { get url_that_requires_authentication }
+ end
+
+ it 'allows requests after throttling and then waiting for the next period' do
+ requests_per_period.times do
+ get url_that_requires_authentication
+ expect(response).to have_http_status 200
+ end
+
+ expect_rejection { get url_that_requires_authentication }
+
+ Timecop.travel(period.from_now) do
+ requests_per_period.times do
+ get url_that_requires_authentication
+ expect(response).to have_http_status 200
+ end
+
+ expect_rejection { get url_that_requires_authentication }
+ end
+ end
+
+ it 'counts requests from different users separately, even from the same IP' do
+ requests_per_period.times do
+ get url_that_requires_authentication
+ expect(response).to have_http_status 200
+ end
+
+ # would be over the limit if this wasn't a different user
+ login_as(create(:user))
+
+ get url_that_requires_authentication
+ expect(response).to have_http_status 200
+ end
+
+ it 'counts all requests from the same user, even via different IPs' do
+ requests_per_period.times do
+ get url_that_requires_authentication
+ expect(response).to have_http_status 200
+ end
+
+ expect_any_instance_of(Rack::Attack::Request).to receive(:ip).and_return('1.2.3.4')
+
+ expect_rejection { get url_that_requires_authentication }
+ end
+ end
+
+ context 'when the throttle is disabled' do
+ before do
+ settings_to_set[:throttle_authenticated_web_enabled] = false
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'allows requests over the rate limit' do
+ (1 + requests_per_period).times do
+ get url_that_requires_authentication
+ expect(response).to have_http_status 200
+ end
+ end
+ end
+ end
+
+ def api_get_args_with_token_headers(partial_url, token_headers)
+ ["/api/#{API::API.version}#{partial_url}", nil, token_headers]
+ end
+
+ def rss_url(user)
+ "/dashboard/projects.atom?rss_token=#{user.rss_token}"
+ end
+
+ def private_token_headers(user)
+ { 'HTTP_PRIVATE_TOKEN' => user.private_token }
+ end
+
+ def personal_access_token_headers(personal_access_token)
+ { 'HTTP_PRIVATE_TOKEN' => personal_access_token.token }
+ end
+
+ def oauth_token_headers(oauth_access_token)
+ { 'AUTHORIZATION' => "Bearer #{oauth_access_token.token}" }
+ end
+
+ def expect_rejection(&block)
+ yield
+
+ expect(response).to have_http_status(429)
+ end
+end
diff --git a/spec/routing/group_routing_spec.rb b/spec/routing/group_routing_spec.rb
index 7a4c8304e62..71788028cbf 100644
--- a/spec/routing/group_routing_spec.rb
+++ b/spec/routing/group_routing_spec.rb
@@ -39,13 +39,19 @@ describe "Groups", "routing" do
describe 'legacy redirection' do
describe 'labels' do
- it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/labels", "/groups/complex.group-namegit/-/labels/" do
+ it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/labels", "/groups/complex.group-namegit/-/labels" do
let(:resource) { create(:group, parent: group, path: 'labels') }
end
+
+ context 'when requesting JSON' do
+ it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/labels.json", "/groups/complex.group-namegit/-/labels.json" do
+ let(:resource) { create(:group, parent: group, path: 'labels') }
+ end
+ end
end
describe 'group_members' do
- it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/group_members", "/groups/complex.group-namegit/-/group_members/" do
+ it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/group_members", "/groups/complex.group-namegit/-/group_members" do
let(:resource) { create(:group, parent: group, path: 'group_members') }
end
end
@@ -60,7 +66,7 @@ describe "Groups", "routing" do
end
describe 'milestones' do
- it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/milestones", "/groups/complex.group-namegit/-/milestones/" do
+ it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/milestones", "/groups/complex.group-namegit/-/milestones" do
let(:resource) { create(:group, parent: group, path: 'milestones') }
end
@@ -76,18 +82,18 @@ describe "Groups", "routing" do
end
context 'with a query string' do
- it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/milestones?hello=world", "/groups/complex.group-namegit/-/milestones/?hello=world" do
+ it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/milestones?hello=world", "/groups/complex.group-namegit/-/milestones?hello=world" do
let(:resource) { create(:group, parent: group, path: 'milestones') }
end
- it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/milestones?milestones=/milestones", "/groups/complex.group-namegit/-/milestones/?milestones=/milestones" do
+ it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/milestones?milestones=/milestones", "/groups/complex.group-namegit/-/milestones?milestones=/milestones" do
let(:resource) { create(:group, parent: group, path: 'milestones') }
end
end
end
describe 'edit' do
- it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/edit", "/groups/complex.group-namegit/-/edit/" do
+ it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/edit", "/groups/complex.group-namegit/-/edit" do
let(:resource) do
pending('still rejected because of the wildcard reserved word')
create(:group, parent: group, path: 'edit')
@@ -96,29 +102,29 @@ describe "Groups", "routing" do
end
describe 'issues' do
- it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/issues", "/groups/complex.group-namegit/-/issues/" do
+ it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/issues", "/groups/complex.group-namegit/-/issues" do
let(:resource) { create(:group, parent: group, path: 'issues') }
end
end
describe 'merge_requests' do
- it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/merge_requests", "/groups/complex.group-namegit/-/merge_requests/" do
+ it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/merge_requests", "/groups/complex.group-namegit/-/merge_requests" do
let(:resource) { create(:group, parent: group, path: 'merge_requests') }
end
end
describe 'projects' do
- it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/projects", "/groups/complex.group-namegit/-/projects/" do
+ it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/projects", "/groups/complex.group-namegit/-/projects" do
let(:resource) { create(:group, parent: group, path: 'projects') }
end
end
describe 'activity' do
- it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/activity", "/groups/complex.group-namegit/-/activity/" do
+ it_behaves_like 'redirecting a legacy path', "/groups/complex.group-namegit/activity", "/groups/complex.group-namegit/-/activity" do
let(:resource) { create(:group, parent: group, path: 'activity') }
end
- it_behaves_like 'redirecting a legacy path', "/groups/activity/activity", "/groups/activity/-/activity/" do
+ it_behaves_like 'redirecting a legacy path', "/groups/activity/activity", "/groups/activity/-/activity" do
let!(:parent) { create(:group, path: 'activity') }
let(:resource) { create(:group, parent: parent, path: 'activity') }
end
diff --git a/spec/routing/routing_spec.rb b/spec/routing/routing_spec.rb
index 32aa6e5ad52..91aefa84d0e 100644
--- a/spec/routing/routing_spec.rb
+++ b/spec/routing/routing_spec.rb
@@ -257,8 +257,10 @@ describe "Authentication", "routing" do
expect(post("/users/sign_in")).to route_to('sessions#create')
end
- it "DELETE /users/sign_out" do
- expect(delete("/users/sign_out")).to route_to('sessions#destroy')
+ # sign_out with GET instead of DELETE facilitates ad-hoc single-sign-out processes
+ # (https://gitlab.com/gitlab-org/gitlab-ce/issues/39708)
+ it "GET /users/sign_out" do
+ expect(get("/users/sign_out")).to route_to('sessions#destroy')
end
it "POST /users/password" do
diff --git a/spec/rubocop/cop/line_break_after_guard_clauses_spec.rb b/spec/rubocop/cop/line_break_after_guard_clauses_spec.rb
new file mode 100644
index 00000000000..8899dc85384
--- /dev/null
+++ b/spec/rubocop/cop/line_break_after_guard_clauses_spec.rb
@@ -0,0 +1,160 @@
+require 'spec_helper'
+require 'rubocop'
+require 'rubocop/rspec/support'
+require_relative '../../../rubocop/cop/line_break_after_guard_clauses'
+
+describe RuboCop::Cop::LineBreakAfterGuardClauses do
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ shared_examples 'examples with guard clause' do |title|
+ %w[if unless].each do |conditional|
+ it "flags violation for #{title} #{conditional} without line breaks" do
+ source = <<~RUBY
+ #{title} #{conditional} condition
+ do_stuff
+ RUBY
+ inspect_source(cop, source)
+
+ expect(cop.offenses.size).to eq(1)
+ offense = cop.offenses.first
+
+ expect(offense.line).to eq(1)
+ expect(cop.highlights).to eq(["#{title} #{conditional} condition"])
+ expect(offense.message).to eq('Add a line break after guard clauses')
+ end
+
+ it "doesn't flag violation for #{title} #{conditional} with line break" do
+ source = <<~RUBY
+ #{title} #{conditional} condition
+
+ do_stuff
+ RUBY
+ inspect_source(cop, source)
+
+ expect(cop.offenses).to be_empty
+ end
+
+ it "doesn't flag violation for #{title} #{conditional} on multiple lines without line break" do
+ source = <<~RUBY
+ #{conditional} condition
+ #{title}
+ end
+ do_stuff
+ RUBY
+ inspect_source(cop, source)
+
+ expect(cop.offenses).to be_empty
+ end
+
+ it "doesn't flag violation for #{title} #{conditional} without line breaks when followed by end keyword" do
+ source = <<~RUBY
+ def test
+ #{title} #{conditional} condition
+ end
+ RUBY
+ inspect_source(cop, source)
+
+ expect(cop.offenses).to be_empty
+ end
+
+ it "doesn't flag violation for #{title} #{conditional} without line breaks when followed by elsif keyword" do
+ source = <<~RUBY
+ if model
+ #{title} #{conditional} condition
+ elsif
+ do_something
+ end
+ RUBY
+ inspect_source(cop, source)
+
+ expect(cop.offenses).to be_empty
+ end
+
+ it "doesn't flag violation for #{title} #{conditional} without line breaks when followed by else keyword" do
+ source = <<~RUBY
+ if model
+ #{title} #{conditional} condition
+ else
+ do_something
+ end
+ RUBY
+ inspect_source(cop, source)
+
+ expect(cop.offenses).to be_empty
+ end
+
+ it "doesn't flag violation for #{title} #{conditional} without line breaks when followed by when keyword" do
+ source = <<~RUBY
+ case model
+ when condition_a
+ #{title} #{conditional} condition
+ when condition_b
+ do_something
+ end
+ RUBY
+ inspect_source(cop, source)
+
+ expect(cop.offenses).to be_empty
+ end
+
+ it "doesn't flag violation for #{title} #{conditional} without line breaks when followed by rescue keyword" do
+ source = <<~RUBY
+ begin
+ #{title} #{conditional} condition
+ rescue StandardError
+ do_something
+ end
+ RUBY
+ inspect_source(cop, source)
+
+ expect(cop.offenses).to be_empty
+ end
+
+ it "doesn't flag violation for #{title} #{conditional} without line breaks when followed by ensure keyword" do
+ source = <<~RUBY
+ def foo
+ #{title} #{conditional} condition
+ ensure
+ do_something
+ end
+ RUBY
+ inspect_source(cop, source)
+
+ expect(cop.offenses).to be_empty
+ end
+
+ it "doesn't flag violation for #{title} #{conditional} without line breaks when followed by another guard clause" do
+ source = <<~RUBY
+ #{title} #{conditional} condition
+ #{title} #{conditional} condition
+
+ do_stuff
+ RUBY
+ inspect_source(cop, source)
+
+ expect(cop.offenses).to be_empty
+ end
+
+ it "autocorrects #{title} #{conditional} guard clauses without line break" do
+ source = <<~RUBY
+ #{title} #{conditional} condition
+ do_stuff
+ RUBY
+ autocorrected = autocorrect_source(cop, source)
+
+ expected_source = <<~RUBY
+ #{title} #{conditional} condition
+
+ do_stuff
+ RUBY
+ expect(autocorrected).to eql(expected_source)
+ end
+ end
+ end
+
+ %w[return fail raise next break throw].each do |example|
+ it_behaves_like 'examples with guard clause', example
+ end
+end
diff --git a/spec/rubocop/cop/migration/add_column_with_default_to_large_table_spec.rb b/spec/rubocop/cop/migration/add_column_with_default_to_large_table_spec.rb
deleted file mode 100644
index 07cb3fc4a2e..00000000000
--- a/spec/rubocop/cop/migration/add_column_with_default_to_large_table_spec.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-require 'spec_helper'
-
-require 'rubocop'
-require 'rubocop/rspec/support'
-
-require_relative '../../../../rubocop/cop/migration/add_column_with_default_to_large_table'
-
-describe RuboCop::Cop::Migration::AddColumnWithDefaultToLargeTable do
- include CopHelper
-
- subject(:cop) { described_class.new }
-
- context 'in migration' do
- before do
- allow(cop).to receive(:in_migration?).and_return(true)
- end
-
- described_class::LARGE_TABLES.each do |table|
- it "registers an offense for the #{table} table" do
- inspect_source(cop, "add_column_with_default :#{table}, :column, default: true")
-
- aggregate_failures do
- expect(cop.offenses.size).to eq(1)
- expect(cop.offenses.map(&:line)).to eq([1])
- end
- end
- end
-
- it 'registers no offense for non-blacklisted tables' do
- inspect_source(cop, "add_column_with_default :table, :column, default: true")
-
- expect(cop.offenses).to be_empty
- end
- end
-
- context 'outside of migration' do
- it 'registers no offense' do
- table = described_class::LARGE_TABLES.sample
- inspect_source(cop, "add_column_with_default :#{table}, :column, default: true")
-
- expect(cop.offenses).to be_empty
- end
- end
-end
diff --git a/spec/rubocop/cop/migration/update_large_table_spec.rb b/spec/rubocop/cop/migration/update_large_table_spec.rb
new file mode 100644
index 00000000000..17b19e139e4
--- /dev/null
+++ b/spec/rubocop/cop/migration/update_large_table_spec.rb
@@ -0,0 +1,69 @@
+require 'spec_helper'
+
+require 'rubocop'
+require 'rubocop/rspec/support'
+
+require_relative '../../../../rubocop/cop/migration/update_large_table'
+
+describe RuboCop::Cop::Migration::UpdateLargeTable do
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ context 'in migration' do
+ before do
+ allow(cop).to receive(:in_migration?).and_return(true)
+ end
+
+ shared_examples 'large tables' do |update_method|
+ described_class::LARGE_TABLES.each do |table|
+ it "registers an offense for the #{table} table" do
+ inspect_source(cop, "#{update_method} :#{table}, :column, default: true")
+
+ aggregate_failures do
+ expect(cop.offenses.size).to eq(1)
+ expect(cop.offenses.map(&:line)).to eq([1])
+ end
+ end
+ end
+ end
+
+ context 'for the add_column_with_default method' do
+ include_examples 'large tables', 'add_column_with_default'
+ end
+
+ context 'for the update_column_in_batches method' do
+ include_examples 'large tables', 'update_column_in_batches'
+ end
+
+ it 'registers no offense for non-blacklisted tables' do
+ inspect_source(cop, "add_column_with_default :table, :column, default: true")
+
+ expect(cop.offenses).to be_empty
+ end
+
+ it 'registers no offense for non-blacklisted methods' do
+ table = described_class::LARGE_TABLES.sample
+
+ inspect_source(cop, "some_other_method :#{table}, :column, default: true")
+
+ expect(cop.offenses).to be_empty
+ end
+ end
+
+ context 'outside of migration' do
+ let(:table) { described_class::LARGE_TABLES.sample }
+
+ it 'registers no offense for add_column_with_default' do
+ inspect_source(cop, "add_column_with_default :#{table}, :column, default: true")
+
+ expect(cop.offenses).to be_empty
+ end
+
+ it 'registers no offense for update_column_in_batches' do
+ inspect_source(cop, "add_column_with_default :#{table}, :column, default: true")
+
+ expect(cop.offenses).to be_empty
+ end
+ end
+end
diff --git a/spec/serializers/merge_request_entity_spec.rb b/spec/serializers/merge_request_entity_spec.rb
index f9285049c0d..1ad672fd355 100644
--- a/spec/serializers/merge_request_entity_spec.rb
+++ b/spec/serializers/merge_request_entity_spec.rb
@@ -5,22 +5,34 @@ describe MergeRequestEntity do
let(:resource) { create(:merge_request, source_project: project, target_project: project) }
let(:user) { create(:user) }
- let(:request) { double('request', current_user: user) }
+ let(:request) { double('request', current_user: user, project: project) }
subject do
described_class.new(resource, request: request).as_json
end
- it 'includes pipeline' do
- req = double('request', current_user: user)
- pipeline = build_stubbed(:ci_pipeline)
- allow(resource).to receive(:head_pipeline).and_return(pipeline)
+ describe 'pipeline' do
+ let(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.source_branch, sha: resource.source_branch_sha, head_pipeline_of: resource) }
- pipeline_payload = PipelineDetailsEntity
- .represent(pipeline, request: req)
- .as_json
+ context 'when is up to date' do
+ let(:req) { double('request', current_user: user, project: project) }
- expect(subject[:pipeline]).to eq(pipeline_payload)
+ it 'returns pipeline' do
+ pipeline_payload = PipelineDetailsEntity
+ .represent(pipeline, request: req)
+ .as_json
+
+ expect(subject[:pipeline]).to eq(pipeline_payload)
+ end
+ end
+
+ context 'when is not up to date' do
+ it 'returns nil' do
+ pipeline.update(sha: "not up to date")
+
+ expect(subject[:pipeline]).to be_nil
+ end
+ end
end
it 'includes issues_links' do
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index 8fc1ceedc34..88d347322a6 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe PipelineSerializer do
- let(:user) { create(:user) }
+ set(:user) { create(:user) }
let(:serializer) do
described_class.new(current_user: user)
@@ -117,7 +117,7 @@ describe PipelineSerializer do
shared_examples 'no N+1 queries' do
it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject }
- expect(recorded.count).to be_within(1).of(57)
+ expect(recorded.count).to be_within(1).of(36)
expect(recorded.cached_count).to eq(0)
end
end
diff --git a/spec/services/base_count_service_spec.rb b/spec/services/base_count_service_spec.rb
new file mode 100644
index 00000000000..090b2dcdd43
--- /dev/null
+++ b/spec/services/base_count_service_spec.rb
@@ -0,0 +1,86 @@
+require 'spec_helper'
+
+describe BaseCountService, :use_clean_rails_memory_store_caching do
+ let(:service) { described_class.new }
+
+ describe '#relation_for_count' do
+ it 'raises NotImplementedError' do
+ expect { service.relation_for_count }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#count' do
+ it 'returns the number of values' do
+ expect(service)
+ .to receive(:cache_key)
+ .and_return('foo')
+
+ expect(service)
+ .to receive(:uncached_count)
+ .and_return(5)
+
+ expect(service.count).to eq(5)
+ end
+ end
+
+ describe '#uncached_count' do
+ it 'returns the uncached number of values' do
+ expect(service)
+ .to receive(:relation_for_count)
+ .and_return(double(:relation, count: 5))
+
+ expect(service.uncached_count).to eq(5)
+ end
+ end
+
+ describe '#refresh_cache' do
+ it 'refreshes the cache' do
+ allow(service)
+ .to receive(:cache_key)
+ .and_return('foo')
+
+ allow(service)
+ .to receive(:uncached_count)
+ .and_return(4)
+
+ service.refresh_cache
+
+ expect(Rails.cache.fetch(service.cache_key, raw: service.raw?)).to eq(4)
+ end
+ end
+
+ describe '#delete_cache' do
+ it 'deletes the cache' do
+ allow(service)
+ .to receive(:cache_key)
+ .and_return('foo')
+
+ allow(service)
+ .to receive(:uncached_count)
+ .and_return(4)
+
+ service.refresh_cache
+ service.delete_cache
+
+ expect(Rails.cache.fetch(service.cache_key, raw: service.raw?)).to be_nil
+ end
+ end
+
+ describe '#raw?' do
+ it 'returns false' do
+ expect(service.raw?).to eq(false)
+ end
+ end
+
+ describe '#cache_key' do
+ it 'raises NotImplementedError' do
+ expect { service.cache_key }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#cache_options' do
+ it 'returns the default in options' do
+ expect(service.cache_options).to eq({ raw: false })
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 08847183bf4..b0de8d447a2 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -8,7 +8,7 @@ describe Ci::CreatePipelineService do
let(:ref_name) { 'refs/heads/master' }
before do
- stub_ci_pipeline_to_return_yaml_file
+ stub_repository_ci_yaml_file(sha: anything)
end
describe '#execute' do
@@ -44,6 +44,7 @@ describe Ci::CreatePipelineService do
expect(pipeline).to eq(project.pipelines.last)
expect(pipeline).to have_attributes(user: user)
expect(pipeline).to have_attributes(status: 'pending')
+ expect(pipeline.repository_source?).to be true
expect(pipeline.builds.first).to be_kind_of(Ci::Build)
end
@@ -56,19 +57,39 @@ describe Ci::CreatePipelineService do
end
context 'when merge requests already exist for this source branch' do
- it 'updates head pipeline of each merge request' do
- merge_request_1 = create(:merge_request, source_branch: 'master',
- target_branch: "branch_1",
- source_project: project)
+ let(:merge_request_1) do
+ create(:merge_request, source_branch: 'master', target_branch: "branch_1", source_project: project)
+ end
+ let(:merge_request_2) do
+ create(:merge_request, source_branch: 'master', target_branch: "branch_2", source_project: project)
+ end
- merge_request_2 = create(:merge_request, source_branch: 'master',
- target_branch: "branch_2",
- source_project: project)
+ context 'when the head pipeline sha equals merge request sha' do
+ it 'updates head pipeline of each merge request' do
+ merge_request_1
+ merge_request_2
- head_pipeline = execute_service
+ head_pipeline = execute_service
- expect(merge_request_1.reload.head_pipeline).to eq(head_pipeline)
- expect(merge_request_2.reload.head_pipeline).to eq(head_pipeline)
+ expect(merge_request_1.reload.head_pipeline).to eq(head_pipeline)
+ expect(merge_request_2.reload.head_pipeline).to eq(head_pipeline)
+ end
+ end
+
+ context 'when the head pipeline sha does not equal merge request sha' do
+ it 'raises the ArgumentError error from worker and does not update the head piepeline of MRs' do
+ merge_request_1
+ merge_request_2
+
+ allow_any_instance_of(Ci::Pipeline).to receive(:latest?).and_return(true)
+
+ expect { execute_service(after: 'ae73cb07c9eeaf35924a10f713b364d32b2dd34f') }.to raise_error(ArgumentError)
+
+ last_pipeline = Ci::Pipeline.last
+
+ expect(merge_request_1.reload.head_pipeline).not_to eq(last_pipeline)
+ expect(merge_request_2.reload.head_pipeline).not_to eq(last_pipeline)
+ end
end
context 'when there is no pipeline for source branch' do
@@ -105,8 +126,7 @@ describe Ci::CreatePipelineService do
target_branch: "branch_1",
source_project: project)
- allow_any_instance_of(Ci::Pipeline)
- .to receive(:latest?).and_return(false)
+ allow_any_instance_of(Ci::Pipeline).to receive(:latest?).and_return(false)
execute_service
diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb
index 214adc9960f..0ce41e7c7ee 100644
--- a/spec/services/ci/process_pipeline_service_spec.rb
+++ b/spec/services/ci/process_pipeline_service_spec.rb
@@ -292,6 +292,30 @@ describe Ci::ProcessPipelineService, '#execute' do
end
end
+ context 'when there is only one manual action' do
+ before do
+ create_build('deploy', stage_idx: 0, when: 'manual', allow_failure: true)
+
+ process_pipeline
+ end
+
+ it 'skips the pipeline' do
+ expect(pipeline.reload).to be_skipped
+ end
+
+ context 'when the action was played' do
+ before do
+ play_manual_action('deploy')
+ end
+
+ it 'queues the action and pipeline' do
+ expect(all_builds_statuses).to eq(%w[pending])
+
+ expect(pipeline.reload).to be_pending
+ end
+ end
+ end
+
context 'when blocking manual actions are defined' do
before do
create_build('code:test', stage_idx: 0)
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 5ac30111ec9..3ee59014b5b 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -15,16 +15,14 @@ module Ci
describe '#execute' do
context 'runner follow tag list' do
it "picks build with the same tag" do
- pending_job.tag_list = ["linux"]
- pending_job.save
- specific_runner.tag_list = ["linux"]
+ pending_job.update(tag_list: ["linux"])
+ specific_runner.update(tag_list: ["linux"])
expect(execute(specific_runner)).to eq(pending_job)
end
it "does not pick build with different tag" do
- pending_job.tag_list = ["linux"]
- pending_job.save
- specific_runner.tag_list = ["win32"]
+ pending_job.update(tag_list: ["linux"])
+ specific_runner.update(tag_list: ["win32"])
expect(execute(specific_runner)).to be_falsey
end
@@ -33,13 +31,12 @@ module Ci
end
it "does not pick build with tag" do
- pending_job.tag_list = ["linux"]
- pending_job.save
+ pending_job.update(tag_list: ["linux"])
expect(execute(specific_runner)).to be_falsey
end
it "pick build without tag" do
- specific_runner.tag_list = ["win32"]
+ specific_runner.update(tag_list: ["win32"])
expect(execute(specific_runner)).to eq(pending_job)
end
end
@@ -172,7 +169,7 @@ module Ci
context 'when first build is stalled' do
before do
- pending_job.lock_version = 10
+ pending_job.update(lock_version: 0)
end
subject { described_class.new(specific_runner).execute }
@@ -182,7 +179,7 @@ module Ci
before do
allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_specific_runner)
- .and_return([pending_job, other_build])
+ .and_return(Ci::Build.where(id: [pending_job, other_build]))
end
it "receives second build from the queue" do
@@ -194,7 +191,7 @@ module Ci
context 'when single build is in queue' do
before do
allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_specific_runner)
- .and_return([pending_job])
+ .and_return(Ci::Build.where(id: pending_job))
end
it "does not receive any valid result" do
@@ -205,7 +202,7 @@ module Ci
context 'when there is no build in queue' do
before do
allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_specific_runner)
- .and_return([])
+ .and_return(Ci::Build.none)
end
it "does not receive builds but result is valid" do
@@ -279,6 +276,89 @@ module Ci
end
end
+ context 'when "dependencies" keyword is specified' do
+ shared_examples 'not pick' do
+ it 'does not pick the build and drops the build' do
+ expect(subject).to be_nil
+ expect(pending_job.reload).to be_failed
+ expect(pending_job).to be_missing_dependency_failure
+ end
+ end
+
+ shared_examples 'validation is active' do
+ context 'when depended job has not been completed yet' do
+ let!(:pre_stage_job) { create(:ci_build, :running, pipeline: pipeline, name: 'test', stage_idx: 0) }
+
+ it_behaves_like 'not pick'
+ end
+
+ context 'when artifacts of depended job has been expired' do
+ let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) }
+
+ it_behaves_like 'not pick'
+ end
+
+ context 'when artifacts of depended job has been erased' do
+ let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) }
+
+ before do
+ pre_stage_job.erase
+ end
+
+ it_behaves_like 'not pick'
+ end
+ end
+
+ shared_examples 'validation is not active' do
+ context 'when depended job has not been completed yet' do
+ let!(:pre_stage_job) { create(:ci_build, :running, pipeline: pipeline, name: 'test', stage_idx: 0) }
+
+ it { expect(subject).to eq(pending_job) }
+ end
+
+ context 'when artifacts of depended job has been expired' do
+ let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) }
+
+ it { expect(subject).to eq(pending_job) }
+ end
+
+ context 'when artifacts of depended job has been erased' do
+ let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) }
+
+ before do
+ pre_stage_job.erase
+ end
+
+ it { expect(subject).to eq(pending_job) }
+ end
+ end
+
+ before do
+ stub_feature_flags(ci_disable_validates_dependencies: false)
+ end
+
+ let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0) }
+ let!(:pending_job) { create(:ci_build, :pending, pipeline: pipeline, stage_idx: 1, options: { dependencies: ['test'] } ) }
+
+ subject { execute(specific_runner) }
+
+ context 'when validates for dependencies is enabled' do
+ before do
+ stub_feature_flags(ci_disable_validates_dependencies: false)
+ end
+
+ it_behaves_like 'validation is active'
+ end
+
+ context 'when validates for dependencies is disabled' do
+ before do
+ stub_feature_flags(ci_disable_validates_dependencies: true)
+ end
+
+ it_behaves_like 'validation is not active'
+ end
+ end
+
def execute(runner)
described_class.new(runner).execute.build
end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index c1ba021fcba..9e02644ae0a 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -17,7 +17,7 @@ describe Ci::RetryBuildService do
%i[id status user token coverage trace runner artifacts_expire_at
artifacts_file artifacts_metadata artifacts_size created_at
updated_at started_at finished_at queued_at erased_by
- erased_at auto_canceled_by].freeze
+ erased_at auto_canceled_by job_artifacts job_artifacts_archive job_artifacts_metadata].freeze
IGNORE_ACCESSORS =
%i[type lock_version target_url base_tags trace_sections
@@ -35,7 +35,7 @@ describe Ci::RetryBuildService do
end
let(:build) do
- create(:ci_build, :failed, :artifacts_expired, :erased,
+ create(:ci_build, :failed, :artifacts, :expired, :erased,
:queued, :coverage, :tags, :allowed_to_fail, :on_tag,
:triggered, :trace, :teardown_environment,
description: 'my-job', stage: 'test', pipeline: pipeline,
diff --git a/spec/services/clusters/applications/schedule_installation_service_spec.rb b/spec/services/clusters/applications/schedule_installation_service_spec.rb
index cf95361c935..047a6e44dab 100644
--- a/spec/services/clusters/applications/schedule_installation_service_spec.rb
+++ b/spec/services/clusters/applications/schedule_installation_service_spec.rb
@@ -22,6 +22,8 @@ describe Clusters::Applications::ScheduleInstallationService do
let(:service) { described_class.new(project, nil, cluster: cluster, application_class: application_class) }
it 'creates a new application' do
+ allow(ClusterInstallAppWorker).to receive(:perform_async)
+
expect { service.execute }.to change { application_class.count }.by(1)
end
diff --git a/spec/services/clusters/create_service_spec.rb b/spec/services/clusters/create_service_spec.rb
index 5b6edb73beb..e2e64659dfa 100644
--- a/spec/services/clusters/create_service_spec.rb
+++ b/spec/services/clusters/create_service_spec.rb
@@ -4,10 +4,11 @@ describe Clusters::CreateService do
let(:access_token) { 'xxx' }
let(:project) { create(:project) }
let(:user) { create(:user) }
- let(:result) { described_class.new(project, user, params).execute(access_token) }
+
+ subject { described_class.new(project, user, params).execute(access_token) }
context 'when provider is gcp' do
- context 'when correct params' do
+ shared_context 'valid params' do
let(:params) do
{
name: 'test-cluster',
@@ -20,27 +21,9 @@ describe Clusters::CreateService do
}
}
end
-
- it 'creates a cluster object and performs a worker' do
- expect(ClusterProvisionWorker).to receive(:perform_async)
-
- expect { result }
- .to change { Clusters::Cluster.count }.by(1)
- .and change { Clusters::Providers::Gcp.count }.by(1)
-
- expect(result.name).to eq('test-cluster')
- expect(result.user).to eq(user)
- expect(result.project).to eq(project)
- expect(result.provider.gcp_project_id).to eq('gcp-project')
- expect(result.provider.zone).to eq('us-central1-a')
- expect(result.provider.num_nodes).to eq(1)
- expect(result.provider.machine_type).to eq('machine_type-a')
- expect(result.provider.access_token).to eq(access_token)
- expect(result.platform).to be_nil
- end
end
- context 'when invalid params' do
+ shared_context 'invalid params' do
let(:params) do
{
name: 'test-cluster',
@@ -53,11 +36,57 @@ describe Clusters::CreateService do
}
}
end
+ end
+
+ shared_examples 'create cluster' do
+ it 'creates a cluster object and performs a worker' do
+ expect(ClusterProvisionWorker).to receive(:perform_async)
+
+ expect { subject }
+ .to change { Clusters::Cluster.count }.by(1)
+ .and change { Clusters::Providers::Gcp.count }.by(1)
+ expect(subject.name).to eq('test-cluster')
+ expect(subject.user).to eq(user)
+ expect(subject.project).to eq(project)
+ expect(subject.provider.gcp_project_id).to eq('gcp-project')
+ expect(subject.provider.zone).to eq('us-central1-a')
+ expect(subject.provider.num_nodes).to eq(1)
+ expect(subject.provider.machine_type).to eq('machine_type-a')
+ expect(subject.provider.access_token).to eq(access_token)
+ expect(subject.platform).to be_nil
+ end
+ end
+
+ shared_examples 'error' do
it 'returns an error' do
expect(ClusterProvisionWorker).not_to receive(:perform_async)
- expect { result }.to change { Clusters::Cluster.count }.by(0)
- expect(result.errors[:"provider_gcp.gcp_project_id"]).to be_present
+ expect { subject }.to change { Clusters::Cluster.count }.by(0)
+ expect(subject.errors[:"provider_gcp.gcp_project_id"]).to be_present
+ end
+ end
+
+ context 'when project has no clusters' do
+ context 'when correct params' do
+ include_context 'valid params'
+
+ include_examples 'create cluster'
+ end
+
+ context 'when invalid params' do
+ include_context 'invalid params'
+
+ include_examples 'error'
+ end
+ end
+
+ context 'when project has a cluster' do
+ include_context 'valid params'
+ let!(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
+
+ it 'does not create a cluster' do
+ expect(ClusterProvisionWorker).not_to receive(:perform_async)
+ expect { subject }.to raise_error(ArgumentError).and change { Clusters::Cluster.count }.by(0)
end
end
end
diff --git a/spec/services/issuable/common_system_notes_service_spec.rb b/spec/services/issuable/common_system_notes_service_spec.rb
index 9f92b662be1..b8fa3e3d124 100644
--- a/spec/services/issuable/common_system_notes_service_spec.rb
+++ b/spec/services/issuable/common_system_notes_service_spec.rb
@@ -18,7 +18,18 @@ describe Issuable::CommonSystemNotesService do
note = Note.last
expect(note.note).to match(note_text)
- expect(note.noteable_type).to eq('Issue')
+ expect(note.noteable_type).to eq(issuable.class.name)
+ end
+ end
+
+ shared_examples 'WIP notes creation' do |wip_action|
+ subject { described_class.new(project, user).execute(issuable, []) }
+
+ it 'creates WIP toggle and title change notes' do
+ expect { subject }.to change { Note.count }.from(0).to(2)
+
+ expect(Note.first.note).to match("#{wip_action} as a **Work In Progress**")
+ expect(Note.second.note).to match('changed title')
end
end
@@ -45,5 +56,35 @@ describe Issuable::CommonSystemNotesService do
it_behaves_like 'system note creation', {}, 'changed milestone'
end
+
+ context 'with merge requests WIP note' do
+ context 'adding WIP note' do
+ let(:issuable) { create(:merge_request, title: "merge request") }
+
+ it_behaves_like 'system note creation', { title: "WIP merge request" }, 'marked as a **Work In Progress**'
+
+ context 'and changing title' do
+ before do
+ issuable.update_attribute(:title, "WIP changed title")
+ end
+
+ it_behaves_like 'WIP notes creation', 'marked'
+ end
+ end
+
+ context 'removing WIP note' do
+ let(:issuable) { create(:merge_request, title: "WIP merge request") }
+
+ it_behaves_like 'system note creation', { title: "merge request" }, 'unmarked as a **Work In Progress**'
+
+ context 'and changing title' do
+ before do
+ issuable.update_attribute(:title, "changed title")
+ end
+
+ it_behaves_like 'WIP notes creation', 'unmarked'
+ end
+ end
+ end
end
end
diff --git a/spec/services/issuable/destroy_service_spec.rb b/spec/services/issuable/destroy_service_spec.rb
new file mode 100644
index 00000000000..d74d98c6079
--- /dev/null
+++ b/spec/services/issuable/destroy_service_spec.rb
@@ -0,0 +1,38 @@
+require 'spec_helper'
+
+describe Issuable::DestroyService do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+
+ subject(:service) { described_class.new(project, user) }
+
+ describe '#execute' do
+ context 'when issuable is an issue' do
+ let!(:issue) { create(:issue, project: project, author: user) }
+
+ it 'destroys the issue' do
+ expect { service.execute(issue) }.to change { project.issues.count }.by(-1)
+ end
+
+ it 'updates open issues count cache' do
+ expect_any_instance_of(Projects::OpenIssuesCountService).to receive(:refresh_cache)
+
+ service.execute(issue)
+ end
+ end
+
+ context 'when issuable is a merge request' do
+ let!(:merge_request) { create(:merge_request, target_project: project, source_project: project, author: user) }
+
+ it 'destroys the merge request' do
+ expect { service.execute(merge_request) }.to change { project.merge_requests.count }.by(-1)
+ end
+
+ it 'updates open merge requests count cache' do
+ expect_any_instance_of(Projects::OpenMergeRequestsCountService).to receive(:refresh_cache)
+
+ service.execute(merge_request)
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb
index b46c419de14..b5c92e681fb 100644
--- a/spec/services/merge_requests/build_service_spec.rb
+++ b/spec/services/merge_requests/build_service_spec.rb
@@ -29,13 +29,28 @@ describe MergeRequests::BuildService do
before do
project.team << [user, :guest]
+ end
+ def stub_compare
allow(CompareService).to receive_message_chain(:new, :execute).and_return(compare)
allow(project).to receive(:commit).and_return(commit_1)
allow(project).to receive(:commit).and_return(commit_2)
end
- describe 'execute' do
+ describe '#execute' do
+ it 'calls the compare service with the correct arguments' do
+ allow_any_instance_of(described_class).to receive(:branches_valid?).and_return(true)
+ expect(CompareService).to receive(:new)
+ .with(project, Gitlab::Git::BRANCH_REF_PREFIX + source_branch)
+ .and_call_original
+
+ expect_any_instance_of(CompareService).to receive(:execute)
+ .with(project, Gitlab::Git::BRANCH_REF_PREFIX + target_branch)
+ .and_call_original
+
+ merge_request
+ end
+
context 'missing source branch' do
let(:source_branch) { '' }
@@ -52,6 +67,10 @@ describe MergeRequests::BuildService do
let(:target_branch) { nil }
let(:commits) { Commit.decorate([commit_1], project) }
+ before do
+ stub_compare
+ end
+
it 'creates compare object with target branch as default branch' do
expect(merge_request.compare).to be_present
expect(merge_request.target_branch).to eq(project.default_branch)
@@ -77,6 +96,10 @@ describe MergeRequests::BuildService do
context 'no commits in the diff' do
let(:commits) { [] }
+ before do
+ stub_compare
+ end
+
it 'allows the merge request to be created' do
expect(merge_request.can_be_created).to eq(true)
end
@@ -89,6 +112,10 @@ describe MergeRequests::BuildService do
context 'one commit in the diff' do
let(:commits) { Commit.decorate([commit_1], project) }
+ before do
+ stub_compare
+ end
+
it 'allows the merge request to be created' do
expect(merge_request.can_be_created).to eq(true)
end
@@ -149,6 +176,10 @@ describe MergeRequests::BuildService do
context 'more than one commit in the diff' do
let(:commits) { Commit.decorate([commit_1, commit_2], project) }
+ before do
+ stub_compare
+ end
+
it 'allows the merge request to be created' do
expect(merge_request.can_be_created).to eq(true)
end
diff --git a/spec/services/merge_requests/create_from_issue_service_spec.rb b/spec/services/merge_requests/create_from_issue_service_spec.rb
index 313f87ae1f6..a7ab389b357 100644
--- a/spec/services/merge_requests/create_from_issue_service_spec.rb
+++ b/spec/services/merge_requests/create_from_issue_service_spec.rb
@@ -6,8 +6,10 @@ describe MergeRequests::CreateFromIssueService do
let(:label_ids) { create_pair(:label, project: project).map(&:id) }
let(:milestone_id) { create(:milestone, project: project).id }
let(:issue) { create(:issue, project: project, milestone_id: milestone_id) }
+ let(:custom_source_branch) { 'custom-source-branch' }
subject(:service) { described_class.new(project, user, issue_iid: issue.iid) }
+ subject(:service_with_custom_source_branch) { described_class.new(project, user, issue_iid: issue.iid, branch_name: custom_source_branch) }
before do
project.add_developer(user)
@@ -17,8 +19,8 @@ describe MergeRequests::CreateFromIssueService do
it 'returns an error with invalid issue iid' do
result = described_class.new(project, user, issue_iid: -1).execute
- expect(result[:status]).to eq :error
- expect(result[:message]).to eq 'Invalid issue iid'
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Invalid issue iid')
end
it 'delegates issue search to IssuesFinder' do
@@ -53,6 +55,12 @@ describe MergeRequests::CreateFromIssueService do
expect(project.repository.branch_exists?(issue.to_branch_name)).to be_truthy
end
+ it 'creates a branch using passed name' do
+ service_with_custom_source_branch.execute
+
+ expect(project.repository.branch_exists?(custom_source_branch)).to be_truthy
+ end
+
it 'creates a system note' do
expect(SystemNoteService).to receive(:new_issue_branch).with(issue, project, user, issue.to_branch_name)
@@ -72,19 +80,25 @@ describe MergeRequests::CreateFromIssueService do
it 'sets the merge request author to current user' do
result = service.execute
- expect(result[:merge_request].author).to eq user
+ expect(result[:merge_request].author).to eq(user)
end
it 'sets the merge request source branch to the new issue branch' do
result = service.execute
- expect(result[:merge_request].source_branch).to eq issue.to_branch_name
+ expect(result[:merge_request].source_branch).to eq(issue.to_branch_name)
+ end
+
+ it 'sets the merge request source branch to the passed branch name' do
+ result = service_with_custom_source_branch.execute
+
+ expect(result[:merge_request].source_branch).to eq(custom_source_branch)
end
it 'sets the merge request target branch to the project default branch' do
result = service.execute
- expect(result[:merge_request].target_branch).to eq project.default_branch
+ expect(result[:merge_request].target_branch).to eq(project.default_branch)
end
end
end
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index ac196e92601..f86f1ac2443 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -248,6 +248,28 @@ describe MergeRequests::MergeService do
expect(merge_request.merge_error).to include(error_message)
expect(Rails.logger).to have_received(:error).with(a_string_matching(error_message))
end
+
+ context "when fast-forward merge is not allowed" do
+ before do
+ allow_any_instance_of(Repository).to receive(:ancestor?).and_return(nil)
+ end
+
+ %w(semi-linear ff).each do |merge_method|
+ it "logs and saves error if merge is #{merge_method} only" do
+ merge_method = 'rebase_merge' if merge_method == 'semi-linear'
+ merge_request.project.update(merge_method: merge_method)
+ error_message = 'Only fast-forward merge is allowed for your project. Please update your source branch'
+ allow(service).to receive(:execute_hooks)
+
+ service.execute(merge_request)
+
+ expect(merge_request).to be_open
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.merge_error).to include(error_message)
+ expect(Rails.logger).to have_received(:error).with(a_string_matching(error_message))
+ end
+ end
+ end
end
end
end
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index a2c05761f6b..61ec4709c59 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -74,6 +74,20 @@ describe MergeRequests::RefreshService do
end
end
+ context 'when pipeline exists for the source branch' do
+ let!(:pipeline) { create(:ci_empty_pipeline, ref: @merge_request.source_branch, project: @project, sha: @commits.first.sha)}
+
+ subject { service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/master') }
+
+ it 'updates the head_pipeline_id for @merge_request' do
+ expect { subject }.to change { @merge_request.reload.head_pipeline_id }.from(nil).to(pipeline.id)
+ end
+
+ it 'does not update the head_pipeline_id for @fork_merge_request' do
+ expect { subject }.not_to change { @fork_merge_request.reload.head_pipeline_id }
+ end
+ end
+
context 'push to origin repo source branch when an MR was reopened' do
let(:refresh_service) { service.new(@project, @user) }
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 98409be4236..7a66b809550 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -65,7 +65,7 @@ describe MergeRequests::UpdateService, :mailer do
end
end
- it 'mathces base expectations' do
+ it 'matches base expectations' do
expect(@merge_request).to be_valid
expect(@merge_request.title).to eq('New title')
expect(@merge_request.assignee).to eq(user2)
@@ -78,9 +78,17 @@ describe MergeRequests::UpdateService, :mailer do
end
it 'executes hooks with update action' do
- expect(service)
- .to have_received(:execute_hooks)
- .with(@merge_request, 'update', old_labels: [], old_assignees: [user3])
+ expect(service).to have_received(:execute_hooks)
+ .with(
+ @merge_request,
+ 'update',
+ old_associations: {
+ labels: [],
+ mentioned_users: [user2],
+ assignees: [user3],
+ total_time_spent: 0
+ }
+ )
end
it 'sends email to user2 about assign of new merge request and email to user3 about merge request unassignment' do
diff --git a/spec/services/milestones/destroy_service_spec.rb b/spec/services/milestones/destroy_service_spec.rb
index 5739386dd0d..af35e17bfa7 100644
--- a/spec/services/milestones/destroy_service_spec.rb
+++ b/spec/services/milestones/destroy_service_spec.rb
@@ -4,8 +4,8 @@ describe Milestones::DestroyService do
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:milestone) { create(:milestone, title: 'Milestone v1.0', project: project) }
- let(:issue) { create(:issue, project: project, milestone: milestone) }
- let(:merge_request) { create(:merge_request, source_project: project, milestone: milestone) }
+ let!(:issue) { create(:issue, project: project, milestone: milestone) }
+ let!(:merge_request) { create(:merge_request, source_project: project, milestone: milestone) }
before do
project.team << [user, :master]
diff --git a/spec/services/milestones/promote_service_spec.rb b/spec/services/milestones/promote_service_spec.rb
index 9f2df6d6d19..a0a2843b676 100644
--- a/spec/services/milestones/promote_service_spec.rb
+++ b/spec/services/milestones/promote_service_spec.rb
@@ -25,6 +25,18 @@ describe Milestones::PromoteService do
expect { service.execute(milestone) }.to raise_error(described_class::PromoteMilestoneError)
end
+
+ it 'does not promote milestone and update issuables if promoted milestone is not valid' do
+ issue = create(:issue, milestone: milestone, project: project)
+ merge_request = create(:merge_request, milestone: milestone, source_project: project)
+ allow_any_instance_of(Milestone).to receive(:valid?).and_return(false)
+
+ expect { service.execute(milestone) }.to raise_error(described_class::PromoteMilestoneError)
+
+ expect(milestone.reload).to be_persisted
+ expect(issue.reload.milestone).to eq(milestone)
+ expect(merge_request.reload.milestone).to eq(milestone)
+ end
end
context 'without duplicated milestone titles across projects' do
@@ -34,6 +46,16 @@ describe Milestones::PromoteService do
expect(promoted_milestone).to be_group_milestone
end
+ it 'does not update issuables without milestone with the new promoted milestone' do
+ issue_without_milestone = create(:issue, project: project, milestone: nil)
+ merge_request_without_milestone = create(:merge_request, milestone: nil, source_project: project)
+
+ service.execute(milestone)
+
+ expect(issue_without_milestone.reload.milestone).to be_nil
+ expect(merge_request_without_milestone.reload.milestone).to be_nil
+ end
+
it 'sets issuables with new promoted milestone' do
issue = create(:issue, milestone: milestone, project: project)
merge_request = create(:merge_request, milestone: milestone, source_project: project)
@@ -59,6 +81,20 @@ describe Milestones::PromoteService do
expect(Milestone.exists?(milestone_2.id)).to be_falsy
end
+ it 'does not update issuables without milestone with the new promoted milestone' do
+ issue_without_milestone_1 = create(:issue, project: project, milestone: nil)
+ issue_without_milestone_2 = create(:issue, project: project_2, milestone: nil)
+ merge_request_without_milestone_1 = create(:merge_request, milestone: nil, source_project: project)
+ merge_request_without_milestone_2 = create(:merge_request, milestone: nil, source_project: project_2)
+
+ service.execute(milestone)
+
+ expect(issue_without_milestone_1.reload.milestone).to be_nil
+ expect(issue_without_milestone_2.reload.milestone).to be_nil
+ expect(merge_request_without_milestone_1.reload.milestone).to be_nil
+ expect(merge_request_without_milestone_2.reload.milestone).to be_nil
+ end
+
it 'sets all issuables with new promoted milestone' do
issue = create(:issue, milestone: milestone, project: project)
issue_2 = create(:issue, milestone: milestone_2, project: project_2)
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index b13e12e7c94..43e2643f709 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -12,6 +12,8 @@ describe NotificationService, :mailer do
shared_examples 'notifications for new mentions' do
def send_notifications(*new_mentions)
+ mentionable.description = new_mentions.map(&:to_reference).join(' ')
+
notification.send(notification_method, mentionable, new_mentions, @u_disabled)
end
@@ -20,13 +22,13 @@ describe NotificationService, :mailer do
should_not_email_anyone
end
- it 'emails new mentions with a watch level higher than participant' do
- send_notifications(@u_watcher, @u_participant_mentioned, @u_custom_global)
- should_only_email(@u_watcher, @u_participant_mentioned, @u_custom_global)
+ it 'emails new mentions with a watch level higher than mention' do
+ send_notifications(@u_watcher, @u_participant_mentioned, @u_custom_global, @u_mentioned)
+ should_only_email(@u_watcher, @u_participant_mentioned, @u_custom_global, @u_mentioned)
end
- it 'does not email new mentions with a watch level equal to or less than participant' do
- send_notifications(@u_participating, @u_mentioned)
+ it 'does not email new mentions with a watch level equal to or less than mention' do
+ send_notifications(@u_disabled)
should_not_email_anyone
end
end
@@ -280,6 +282,7 @@ describe NotificationService, :mailer do
next if member.id == @u_disabled.id
# Author should not be notified
next if member.id == note.author.id
+
should_email(member)
end
@@ -327,6 +330,7 @@ describe NotificationService, :mailer do
next if member.id == @u_disabled.id
# Author should not be notified
next if member.id == note.author.id
+
should_email(member)
end
@@ -507,6 +511,14 @@ describe NotificationService, :mailer do
should_not_email(issue.assignees.first)
end
+ it "emails any mentioned users with the mention level" do
+ issue.description = @u_mentioned.to_reference
+
+ notification.new_issue(issue, @u_disabled)
+
+ should_email(@u_mentioned)
+ end
+
it "emails the author if they've opted into notifications about their activity" do
issue.author.notified_of_own_activity = true
@@ -898,6 +910,14 @@ describe NotificationService, :mailer do
should_not_email(@u_lazy_participant)
end
+ it "emails any mentioned users with the mention level" do
+ merge_request.description = @u_mentioned.to_reference
+
+ notification.new_merge_request(merge_request, @u_disabled)
+
+ should_email(@u_mentioned)
+ end
+
it "emails the author if they've opted into notifications about their activity" do
merge_request.author.notified_of_own_activity = true
diff --git a/spec/services/projects/count_service_spec.rb b/spec/services/projects/count_service_spec.rb
index cc496501bad..183f6128c7b 100644
--- a/spec/services/projects/count_service_spec.rb
+++ b/spec/services/projects/count_service_spec.rb
@@ -4,9 +4,17 @@ describe Projects::CountService do
let(:project) { build(:project, id: 1) }
let(:service) { described_class.new(project) }
- describe '#relation_for_count' do
+ describe '.query' do
it 'raises NotImplementedError' do
- expect { service.relation_for_count }.to raise_error(NotImplementedError)
+ expect { described_class.query(project.id) }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#relation_for_count' do
+ it 'calls the class method query with the project id' do
+ expect(described_class).to receive(:query).with(project.id)
+
+ service.relation_for_count
end
end
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index 53862283a27..4057caca2ac 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -3,210 +3,253 @@ require 'spec_helper'
describe Projects::ForkService do
include ProjectForksHelper
let(:gitlab_shell) { Gitlab::Shell.new }
+ context 'when forking a new project' do
+ describe 'fork by user' do
+ before do
+ @from_user = create(:user)
+ @from_namespace = @from_user.namespace
+ avatar = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "image/png")
+ @from_project = create(:project,
+ :repository,
+ creator_id: @from_user.id,
+ namespace: @from_namespace,
+ star_count: 107,
+ avatar: avatar,
+ description: 'wow such project')
+ @to_user = create(:user)
+ @to_namespace = @to_user.namespace
+ @from_project.add_user(@to_user, :developer)
+ end
- describe 'fork by user' do
- before do
- @from_user = create(:user)
- @from_namespace = @from_user.namespace
- avatar = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "image/png")
- @from_project = create(:project,
- :repository,
- creator_id: @from_user.id,
- namespace: @from_namespace,
- star_count: 107,
- avatar: avatar,
- description: 'wow such project')
- @to_user = create(:user)
- @to_namespace = @to_user.namespace
- @from_project.add_user(@to_user, :developer)
- end
+ context 'fork project' do
+ context 'when forker is a guest' do
+ before do
+ @guest = create(:user)
+ @from_project.add_user(@guest, :guest)
+ end
+ subject { fork_project(@from_project, @guest) }
- context 'fork project' do
- context 'when forker is a guest' do
- before do
- @guest = create(:user)
- @from_project.add_user(@guest, :guest)
+ it { is_expected.not_to be_persisted }
+ it { expect(subject.errors[:forked_from_project_id]).to eq(['is forbidden']) }
end
- subject { fork_project(@from_project, @guest) }
- it { is_expected.not_to be_persisted }
- it { expect(subject.errors[:forked_from_project_id]).to eq(['is forbidden']) }
- end
+ describe "successfully creates project in the user namespace" do
+ let(:to_project) { fork_project(@from_project, @to_user, namespace: @to_user.namespace) }
- describe "successfully creates project in the user namespace" do
- let(:to_project) { fork_project(@from_project, @to_user, namespace: @to_user.namespace) }
-
- it { expect(to_project).to be_persisted }
- it { expect(to_project.errors).to be_empty }
- it { expect(to_project.owner).to eq(@to_user) }
- it { expect(to_project.namespace).to eq(@to_user.namespace) }
- it { expect(to_project.star_count).to be_zero }
- it { expect(to_project.description).to eq(@from_project.description) }
- it { expect(to_project.avatar.file).to be_exists }
-
- # This test is here because we had a bug where the from-project lost its
- # avatar after being forked.
- # https://gitlab.com/gitlab-org/gitlab-ce/issues/26158
- it "after forking the from-project still has its avatar" do
- # If we do not fork the project first we cannot detect the bug.
- expect(to_project).to be_persisted
-
- expect(@from_project.avatar.file).to be_exists
- end
+ it { expect(to_project).to be_persisted }
+ it { expect(to_project.errors).to be_empty }
+ it { expect(to_project.owner).to eq(@to_user) }
+ it { expect(to_project.namespace).to eq(@to_user.namespace) }
+ it { expect(to_project.star_count).to be_zero }
+ it { expect(to_project.description).to eq(@from_project.description) }
+ it { expect(to_project.avatar.file).to be_exists }
- it 'flushes the forks count cache of the source project' do
- expect(@from_project.forks_count).to be_zero
+ # This test is here because we had a bug where the from-project lost its
+ # avatar after being forked.
+ # https://gitlab.com/gitlab-org/gitlab-ce/issues/26158
+ it "after forking the from-project still has its avatar" do
+ # If we do not fork the project first we cannot detect the bug.
+ expect(to_project).to be_persisted
- fork_project(@from_project, @to_user)
+ expect(@from_project.avatar.file).to be_exists
+ end
- expect(@from_project.forks_count).to eq(1)
- end
+ it 'flushes the forks count cache of the source project' do
+ expect(@from_project.forks_count).to be_zero
- it 'creates a fork network with the new project and the root project set' do
- to_project
- fork_network = @from_project.reload.fork_network
+ fork_project(@from_project, @to_user)
- expect(fork_network).not_to be_nil
- expect(fork_network.root_project).to eq(@from_project)
- expect(fork_network.projects).to contain_exactly(@from_project, to_project)
- end
- end
+ expect(@from_project.forks_count).to eq(1)
+ end
- context 'creating a fork of a fork' do
- let(:from_forked_project) { fork_project(@from_project, @to_user) }
- let(:other_namespace) do
- group = create(:group)
- group.add_owner(@to_user)
- group
- end
- let(:to_project) { fork_project(from_forked_project, @to_user, namespace: other_namespace) }
+ it 'creates a fork network with the new project and the root project set' do
+ to_project
+ fork_network = @from_project.reload.fork_network
- it 'sets the root of the network to the root project' do
- expect(to_project.fork_network.root_project).to eq(@from_project)
+ expect(fork_network).not_to be_nil
+ expect(fork_network.root_project).to eq(@from_project)
+ expect(fork_network.projects).to contain_exactly(@from_project, to_project)
+ end
end
- it 'sets the forked_from_project on the membership' do
- expect(to_project.fork_network_member.forked_from_project).to eq(from_forked_project)
+ context 'creating a fork of a fork' do
+ let(:from_forked_project) { fork_project(@from_project, @to_user) }
+ let(:other_namespace) do
+ group = create(:group)
+ group.add_owner(@to_user)
+ group
+ end
+ let(:to_project) { fork_project(from_forked_project, @to_user, namespace: other_namespace) }
+
+ it 'sets the root of the network to the root project' do
+ expect(to_project.fork_network.root_project).to eq(@from_project)
+ end
+
+ it 'sets the forked_from_project on the membership' do
+ expect(to_project.fork_network_member.forked_from_project).to eq(from_forked_project)
+ end
end
end
- end
- context 'project already exists' do
- it "fails due to validation, not transaction failure" do
- @existing_project = create(:project, :repository, creator_id: @to_user.id, name: @from_project.name, namespace: @to_namespace)
- @to_project = fork_project(@from_project, @to_user, namespace: @to_namespace)
- expect(@existing_project).to be_persisted
+ context 'project already exists' do
+ it "fails due to validation, not transaction failure" do
+ @existing_project = create(:project, :repository, creator_id: @to_user.id, name: @from_project.name, namespace: @to_namespace)
+ @to_project = fork_project(@from_project, @to_user, namespace: @to_namespace)
+ expect(@existing_project).to be_persisted
- expect(@to_project).not_to be_persisted
- expect(@to_project.errors[:name]).to eq(['has already been taken'])
- expect(@to_project.errors[:path]).to eq(['has already been taken'])
+ expect(@to_project).not_to be_persisted
+ expect(@to_project.errors[:name]).to eq(['has already been taken'])
+ expect(@to_project.errors[:path]).to eq(['has already been taken'])
+ end
end
- end
- context 'repository already exists' do
- let(:repository_storage) { 'default' }
- let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] }
+ context 'repository already exists' do
+ let(:repository_storage) { 'default' }
+ let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] }
- before do
- gitlab_shell.add_repository(repository_storage, "#{@to_user.namespace.full_path}/#{@from_project.path}")
- end
+ before do
+ gitlab_shell.add_repository(repository_storage, "#{@to_user.namespace.full_path}/#{@from_project.path}")
+ end
- after do
- gitlab_shell.remove_repository(repository_storage_path, "#{@to_user.namespace.full_path}/#{@from_project.path}")
- end
+ after do
+ gitlab_shell.remove_repository(repository_storage_path, "#{@to_user.namespace.full_path}/#{@from_project.path}")
+ end
- it 'does not allow creation' do
- to_project = fork_project(@from_project, @to_user, namespace: @to_user.namespace)
+ it 'does not allow creation' do
+ to_project = fork_project(@from_project, @to_user, namespace: @to_user.namespace)
- expect(to_project).not_to be_persisted
- expect(to_project.errors.messages).to have_key(:base)
- expect(to_project.errors.messages[:base].first).to match('There is already a repository with that name on disk')
+ expect(to_project).not_to be_persisted
+ expect(to_project.errors.messages).to have_key(:base)
+ expect(to_project.errors.messages[:base].first).to match('There is already a repository with that name on disk')
+ end
end
- end
- context 'GitLab CI is enabled' do
- it "forks and enables CI for fork" do
- @from_project.enable_ci
- @to_project = fork_project(@from_project, @to_user)
- expect(@to_project.builds_enabled?).to be_truthy
+ context 'GitLab CI is enabled' do
+ it "forks and enables CI for fork" do
+ @from_project.enable_ci
+ @to_project = fork_project(@from_project, @to_user)
+ expect(@to_project.builds_enabled?).to be_truthy
+ end
end
- end
- context "when project has restricted visibility level" do
- context "and only one visibility level is restricted" do
- before do
- @from_project.update_attributes(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
- stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL])
+ context "when project has restricted visibility level" do
+ context "and only one visibility level is restricted" do
+ before do
+ @from_project.update_attributes(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL])
+ end
+
+ it "creates fork with highest allowed level" do
+ forked_project = fork_project(@from_project, @to_user)
+
+ expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
+ end
end
- it "creates fork with highest allowed level" do
- forked_project = fork_project(@from_project, @to_user)
+ context "and all visibility levels are restricted" do
+ before do
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC, Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PRIVATE])
+ end
+
+ it "creates fork with private visibility levels" do
+ forked_project = fork_project(@from_project, @to_user)
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
+ expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
end
end
+ end
- context "and all visibility levels are restricted" do
- before do
- stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC, Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PRIVATE])
+ describe 'fork to namespace' do
+ before do
+ @group_owner = create(:user)
+ @developer = create(:user)
+ @project = create(:project, :repository,
+ creator_id: @group_owner.id,
+ star_count: 777,
+ description: 'Wow, such a cool project!')
+ @group = create(:group)
+ @group.add_user(@group_owner, GroupMember::OWNER)
+ @group.add_user(@developer, GroupMember::DEVELOPER)
+ @project.add_user(@developer, :developer)
+ @project.add_user(@group_owner, :developer)
+ @opts = { namespace: @group }
+ end
+
+ context 'fork project for group' do
+ it 'group owner successfully forks project into the group' do
+ to_project = fork_project(@project, @group_owner, @opts)
+
+ expect(to_project).to be_persisted
+ expect(to_project.errors).to be_empty
+ expect(to_project.owner).to eq(@group)
+ expect(to_project.namespace).to eq(@group)
+ expect(to_project.name).to eq(@project.name)
+ expect(to_project.path).to eq(@project.path)
+ expect(to_project.description).to eq(@project.description)
+ expect(to_project.star_count).to be_zero
end
+ end
- it "creates fork with private visibility levels" do
- forked_project = fork_project(@from_project, @to_user)
+ context 'fork project for group when user not owner' do
+ it 'group developer fails to fork project into the group' do
+ to_project = fork_project(@project, @developer, @opts)
+ expect(to_project.errors[:namespace]).to eq(['is not valid'])
+ end
+ end
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ context 'project already exists in group' do
+ it 'fails due to validation, not transaction failure' do
+ existing_project = create(:project, :repository,
+ name: @project.name,
+ namespace: @group)
+ to_project = fork_project(@project, @group_owner, @opts)
+ expect(existing_project.persisted?).to be_truthy
+ expect(to_project.errors[:name]).to eq(['has already been taken'])
+ expect(to_project.errors[:path]).to eq(['has already been taken'])
end
end
end
end
- describe 'fork to namespace' do
- before do
- @group_owner = create(:user)
- @developer = create(:user)
- @project = create(:project, :repository,
- creator_id: @group_owner.id,
- star_count: 777,
- description: 'Wow, such a cool project!')
- @group = create(:group)
- @group.add_user(@group_owner, GroupMember::OWNER)
- @group.add_user(@developer, GroupMember::DEVELOPER)
- @project.add_user(@developer, :developer)
- @project.add_user(@group_owner, :developer)
- @opts = { namespace: @group }
+ context 'when linking fork to an existing project' do
+ let(:fork_from_project) { create(:project, :public) }
+ let(:fork_to_project) { create(:project, :public) }
+ let(:user) { create(:user) }
+
+ subject { described_class.new(fork_from_project, user) }
+
+ def forked_from_project(project)
+ project.fork_network_member&.forked_from_project
end
- context 'fork project for group' do
- it 'group owner successfully forks project into the group' do
- to_project = fork_project(@project, @group_owner, @opts)
-
- expect(to_project).to be_persisted
- expect(to_project.errors).to be_empty
- expect(to_project.owner).to eq(@group)
- expect(to_project.namespace).to eq(@group)
- expect(to_project.name).to eq(@project.name)
- expect(to_project.path).to eq(@project.path)
- expect(to_project.description).to eq(@project.description)
- expect(to_project.star_count).to be_zero
+ context 'if project is already forked' do
+ it 'does not create fork relation' do
+ allow(fork_to_project).to receive(:forked?).and_return(true)
+ expect(forked_from_project(fork_to_project)).to be_nil
+ expect(subject.execute(fork_to_project)).to be_nil
+ expect(forked_from_project(fork_to_project)).to be_nil
end
end
- context 'fork project for group when user not owner' do
- it 'group developer fails to fork project into the group' do
- to_project = fork_project(@project, @developer, @opts)
- expect(to_project.errors[:namespace]).to eq(['is not valid'])
+ context 'if project is not forked' do
+ it 'creates fork relation' do
+ expect(fork_to_project.forked?).to be false
+ expect(forked_from_project(fork_to_project)).to be_nil
+
+ subject.execute(fork_to_project)
+
+ expect(fork_to_project.forked?).to be true
+ expect(forked_from_project(fork_to_project)).to eq fork_from_project
+ expect(fork_to_project.forked_from_project).to eq fork_from_project
end
- end
- context 'project already exists in group' do
- it 'fails due to validation, not transaction failure' do
- existing_project = create(:project, :repository,
- name: @project.name,
- namespace: @group)
- to_project = fork_project(@project, @group_owner, @opts)
- expect(existing_project.persisted?).to be_truthy
- expect(to_project.errors[:name]).to eq(['has already been taken'])
- expect(to_project.errors[:path]).to eq(['has already been taken'])
+ it 'flushes the forks count cache of the source project' do
+ expect(fork_from_project.forks_count).to be_zero
+
+ subject.execute(fork_to_project)
+
+ expect(fork_from_project.forks_count).to eq(1)
end
end
end
diff --git a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
new file mode 100644
index 00000000000..50e59954f73
--- /dev/null
+++ b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
@@ -0,0 +1,63 @@
+require 'spec_helper'
+
+describe Projects::HashedStorage::MigrateAttachmentsService do
+ subject(:service) { described_class.new(project) }
+ let(:project) { create(:project) }
+ let(:legacy_storage) { Storage::LegacyProject.new(project) }
+ let(:hashed_storage) { Storage::HashedProject.new(project) }
+
+ let!(:upload) { Upload.find_by(path: file_uploader.relative_path) }
+ let(:file_uploader) { build(:file_uploader, project: project) }
+ let(:old_path) { File.join(base_path(legacy_storage), upload.path) }
+ let(:new_path) { File.join(base_path(hashed_storage), upload.path) }
+
+ context '#execute' do
+ context 'when succeeds' do
+ it 'moves attachments to hashed storage layout' do
+ expect(File.file?(old_path)).to be_truthy
+ expect(File.file?(new_path)).to be_falsey
+ expect(File.exist?(base_path(legacy_storage))).to be_truthy
+ expect(File.exist?(base_path(hashed_storage))).to be_falsey
+ expect(FileUtils).to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage)).and_call_original
+
+ service.execute
+
+ expect(File.exist?(base_path(hashed_storage))).to be_truthy
+ expect(File.exist?(base_path(legacy_storage))).to be_falsey
+ expect(File.file?(old_path)).to be_falsey
+ expect(File.file?(new_path)).to be_truthy
+ end
+ end
+
+ context 'when original folder does not exist anymore' do
+ before do
+ FileUtils.rm_rf(base_path(legacy_storage))
+ end
+
+ it 'skips moving folders and go to next' do
+ expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
+
+ service.execute
+
+ expect(File.exist?(base_path(hashed_storage))).to be_falsey
+ expect(File.file?(new_path)).to be_falsey
+ end
+ end
+
+ context 'when target folder already exists' do
+ before do
+ FileUtils.mkdir_p(base_path(hashed_storage))
+ end
+
+ it 'raises AttachmentMigrationError' do
+ expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
+
+ expect { service.execute }.to raise_error(Projects::HashedStorage::AttachmentMigrationError)
+ end
+ end
+ end
+
+ def base_path(storage)
+ FileUploader.dynamic_path_builder(storage.disk_path)
+ end
+end
diff --git a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
new file mode 100644
index 00000000000..3a3e47fd9c0
--- /dev/null
+++ b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
@@ -0,0 +1,76 @@
+require 'spec_helper'
+
+describe Projects::HashedStorage::MigrateRepositoryService do
+ let(:gitlab_shell) { Gitlab::Shell.new }
+ let(:project) { create(:project, :empty_repo, :wiki_repo) }
+ let(:service) { described_class.new(project) }
+ let(:legacy_storage) { Storage::LegacyProject.new(project) }
+ let(:hashed_storage) { Storage::HashedProject.new(project) }
+
+ describe '#execute' do
+ before do
+ allow(service).to receive(:gitlab_shell) { gitlab_shell }
+ end
+
+ context 'when succeeds' do
+ it 'renames project and wiki repositories' do
+ service.execute
+
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.git")).to be_truthy
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_truthy
+ end
+
+ it 'updates project to be hashed and not read-only' do
+ service.execute
+
+ expect(project.hashed_storage?(:repository)).to be_truthy
+ expect(project.repository_read_only).to be_falsey
+ end
+
+ it 'move operation is called for both repositories' do
+ expect_move_repository(project.disk_path, hashed_storage.disk_path)
+ expect_move_repository("#{project.disk_path}.wiki", "#{hashed_storage.disk_path}.wiki")
+
+ service.execute
+ end
+ end
+
+ context 'when one move fails' do
+ it 'rollsback repositories to original name' do
+ from_name = project.disk_path
+ to_name = hashed_storage.disk_path
+ allow(service).to receive(:move_repository).and_call_original
+ allow(service).to receive(:move_repository).with(from_name, to_name).once { false } # will disable first move only
+
+ expect(service).to receive(:rollback_folder_move).and_call_original
+
+ service.execute
+
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.git")).to be_falsey
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_falsey
+ expect(project.repository_read_only?).to be_falsey
+ end
+
+ context 'when rollback fails' do
+ let(:from_name) { legacy_storage.disk_path }
+ let(:to_name) { hashed_storage.disk_path }
+
+ before do
+ hashed_storage.ensure_storage_path_exists
+ gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name)
+ end
+
+ it 'does not try to move nil repository over hashed' do
+ expect(gitlab_shell).not_to receive(:mv_repository).with(project.repository_storage_path, from_name, to_name)
+ expect_move_repository("#{project.disk_path}.wiki", "#{hashed_storage.disk_path}.wiki")
+
+ service.execute
+ end
+ end
+ end
+
+ def expect_move_repository(from_name, to_name)
+ expect(gitlab_shell).to receive(:mv_repository).with(project.repository_storage_path, from_name, to_name).and_call_original
+ end
+ end
+end
diff --git a/spec/services/projects/hashed_storage_migration_service_spec.rb b/spec/services/projects/hashed_storage_migration_service_spec.rb
index b71b47c59b6..466f0b5d7c2 100644
--- a/spec/services/projects/hashed_storage_migration_service_spec.rb
+++ b/spec/services/projects/hashed_storage_migration_service_spec.rb
@@ -1,74 +1,44 @@
require 'spec_helper'
describe Projects::HashedStorageMigrationService do
- let(:gitlab_shell) { Gitlab::Shell.new }
let(:project) { create(:project, :empty_repo, :wiki_repo) }
- let(:service) { described_class.new(project) }
- let(:legacy_storage) { Storage::LegacyProject.new(project) }
- let(:hashed_storage) { Storage::HashedProject.new(project) }
+ subject(:service) { described_class.new(project) }
describe '#execute' do
- before do
- allow(service).to receive(:gitlab_shell) { gitlab_shell }
- end
-
- context 'when succeeds' do
- it 'renames project and wiki repositories' do
- service.execute
+ context 'repository migration' do
+ let(:repository_service) { Projects::HashedStorage::MigrateRepositoryService.new(project, subject.logger) }
- expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.git")).to be_truthy
- expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_truthy
- end
+ it 'delegates migration to Projects::HashedStorage::MigrateRepositoryService' do
+ expect(Projects::HashedStorage::MigrateRepositoryService).to receive(:new).with(project, subject.logger).and_return(repository_service)
+ expect(repository_service).to receive(:execute)
- it 'updates project to be hashed and not read-only' do
service.execute
-
- expect(project.hashed_storage?(:repository)).to be_truthy
- expect(project.repository_read_only).to be_falsey
end
- it 'move operation is called for both repositories' do
- expect_move_repository(project.disk_path, hashed_storage.disk_path)
- expect_move_repository("#{project.disk_path}.wiki", "#{hashed_storage.disk_path}.wiki")
+ it 'does not delegate migration if repository is already migrated' do
+ project.storage_version = ::Project::LATEST_STORAGE_VERSION
+ expect(Projects::HashedStorage::MigrateRepositoryService).not_to receive(:new)
service.execute
end
end
- context 'when one move fails' do
- it 'rollsback repositories to original name' do
- from_name = project.disk_path
- to_name = hashed_storage.disk_path
- allow(service).to receive(:move_repository).and_call_original
- allow(service).to receive(:move_repository).with(from_name, to_name).once { false } # will disable first move only
+ context 'attachments migration' do
+ let(:attachments_service) { Projects::HashedStorage::MigrateAttachmentsService.new(project, subject.logger) }
- expect(service).to receive(:rollback_folder_move).and_call_original
+ it 'delegates migration to Projects::HashedStorage::MigrateRepositoryService' do
+ expect(Projects::HashedStorage::MigrateAttachmentsService).to receive(:new).with(project, subject.logger).and_return(attachments_service)
+ expect(attachments_service).to receive(:execute)
service.execute
-
- expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.git")).to be_falsey
- expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_falsey
end
- context 'when rollback fails' do
- before do
- from_name = legacy_storage.disk_path
- to_name = hashed_storage.disk_path
+ it 'does not delegate migration if attachments are already migrated' do
+ project.storage_version = ::Project::LATEST_STORAGE_VERSION
+ expect(Projects::HashedStorage::MigrateAttachmentsService).not_to receive(:new)
- hashed_storage.ensure_storage_path_exists
- gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name)
- end
-
- it 'does not try to move nil repository over hashed' do
- expect_move_repository("#{project.disk_path}.wiki", "#{hashed_storage.disk_path}.wiki")
-
- service.execute
- end
+ service.execute
end
end
-
- def expect_move_repository(from_name, to_name)
- expect(gitlab_shell).to receive(:mv_repository).with(project.repository_storage_path, from_name, to_name).and_call_original
- end
end
end
diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb
index 034065aab00..bf7facaec99 100644
--- a/spec/services/projects/import_service_spec.rb
+++ b/spec/services/projects/import_service_spec.rb
@@ -6,6 +6,41 @@ describe Projects::ImportService do
subject { described_class.new(project, user) }
+ describe '#async?' do
+ it 'returns true for an asynchronous importer' do
+ importer_class = double(:importer, async?: true)
+
+ allow(subject).to receive(:has_importer?).and_return(true)
+ allow(subject).to receive(:importer_class).and_return(importer_class)
+
+ expect(subject).to be_async
+ end
+
+ it 'returns false for a regular importer' do
+ importer_class = double(:importer, async?: false)
+
+ allow(subject).to receive(:has_importer?).and_return(true)
+ allow(subject).to receive(:importer_class).and_return(importer_class)
+
+ expect(subject).not_to be_async
+ end
+
+ it 'returns false when the importer does not define #async?' do
+ importer_class = double(:importer)
+
+ allow(subject).to receive(:has_importer?).and_return(true)
+ allow(subject).to receive(:importer_class).and_return(importer_class)
+
+ expect(subject).not_to be_async
+ end
+
+ it 'returns false when the importer does not exist' do
+ allow(subject).to receive(:has_importer?).and_return(false)
+
+ expect(subject).not_to be_async
+ end
+ end
+
describe '#execute' do
context 'with unknown url' do
before do
@@ -37,21 +72,24 @@ describe Projects::ImportService do
end
context 'with a Github repository' do
- it 'succeeds if repository import is successfully' do
- expect_any_instance_of(Github::Import).to receive(:execute).and_return(true)
+ it 'succeeds if repository import was scheduled' do
+ expect_any_instance_of(Gitlab::GithubImport::ParallelImporter)
+ .to receive(:execute)
+ .and_return(true)
result = subject.execute
expect(result[:status]).to eq :success
end
- it 'fails if repository import fails' do
- expect_any_instance_of(Repository).to receive(:fetch_remote).and_raise(Gitlab::Shell::Error.new('Failed to import the repository'))
+ it 'fails if repository import was not scheduled' do
+ expect_any_instance_of(Gitlab::GithubImport::ParallelImporter)
+ .to receive(:execute)
+ .and_return(false)
result = subject.execute
expect(result[:status]).to eq :error
- expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.path_with_namespace} - The remote data could not be imported."
end
end
@@ -92,47 +130,22 @@ describe Projects::ImportService do
end
it 'succeeds if importer succeeds' do
- allow_any_instance_of(Github::Import).to receive(:execute).and_return(true)
+ allow_any_instance_of(Gitlab::GithubImport::ParallelImporter)
+ .to receive(:execute).and_return(true)
result = subject.execute
expect(result[:status]).to eq :success
end
- it 'flushes various caches' do
- allow_any_instance_of(Github::Import).to receive(:execute)
- .and_return(true)
-
- expect_any_instance_of(Repository).to receive(:expire_content_cache)
-
- subject.execute
- end
-
it 'fails if importer fails' do
- allow_any_instance_of(Github::Import).to receive(:execute).and_return(false)
-
- result = subject.execute
-
- expect(result[:status]).to eq :error
- expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.full_path} - The remote data could not be imported."
- end
-
- it 'fails if importer raise an error' do
- allow_any_instance_of(Github::Import).to receive(:execute).and_raise(Projects::ImportService::Error.new('Github: failed to connect API'))
+ allow_any_instance_of(Gitlab::GithubImport::ParallelImporter)
+ .to receive(:execute)
+ .and_return(false)
result = subject.execute
expect(result[:status]).to eq :error
- expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.full_path} - Github: failed to connect API"
- end
-
- it 'expires content cache after error' do
- allow_any_instance_of(Project).to receive(:repository_exists?).and_return(false)
-
- expect_any_instance_of(Repository).to receive(:fetch_remote).and_raise(Gitlab::Shell::Error.new)
- expect_any_instance_of(Repository).to receive(:expire_content_cache)
-
- subject.execute
end
end
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index 2459f371a91..2b1337bee7e 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -42,6 +42,18 @@ describe Projects::TransferService do
expect(service).to receive(:execute_system_hooks)
end
end
+
+ it 'disk path has moved' do
+ old_path = project.repository.disk_path
+ old_full_path = project.repository.full_path
+
+ transfer_project(project, user, group)
+
+ expect(project.repository.disk_path).not_to eq(old_path)
+ expect(project.repository.full_path).not_to eq(old_full_path)
+ expect(project.disk_path).not_to eq(old_path)
+ expect(project.disk_path).to start_with(group.path)
+ end
end
context 'when transfer fails' do
@@ -188,6 +200,26 @@ describe Projects::TransferService do
end
end
+ context 'when hashed storage in use' do
+ let(:hashed_project) { create(:project, :repository, :hashed, namespace: user.namespace) }
+
+ before do
+ group.add_owner(user)
+ end
+
+ it 'does not move the directory' do
+ old_path = hashed_project.repository.disk_path
+ old_full_path = hashed_project.repository.full_path
+
+ transfer_project(hashed_project, user, group)
+ project.reload
+
+ expect(hashed_project.repository.disk_path).to eq(old_path)
+ expect(hashed_project.repository.full_path).to eq(old_full_path)
+ expect(hashed_project.disk_path).to eq(old_path)
+ end
+ end
+
describe 'refreshing project authorizations' do
let(:group) { create(:group) }
let(:owner) { project.namespace.owner }
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index d4ac1f6ad81..bfb86284d86 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -1,10 +1,18 @@
require "spec_helper"
describe Projects::UpdatePagesService do
- let(:project) { create(:project, :repository) }
- let(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
- let(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
+ set(:project) { create(:project, :repository) }
+ set(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
+ set(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
let(:invalid_file) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png') }
+ let(:extension) { 'zip' }
+
+ let(:file) { fixture_file_upload(Rails.root + "spec/fixtures/pages.#{extension}") }
+ let(:empty_file) { fixture_file_upload(Rails.root + "spec/fixtures/pages_empty.#{extension}") }
+ let(:metadata) do
+ filename = Rails.root + "spec/fixtures/pages.#{extension}.meta"
+ fixture_file_upload(filename) if File.exist?(filename)
+ end
subject { described_class.new(project, build) }
@@ -12,18 +20,85 @@ describe Projects::UpdatePagesService do
project.remove_pages
end
- %w(tar.gz zip).each do |format|
- context "for valid #{format}" do
- let(:file) { fixture_file_upload(Rails.root + "spec/fixtures/pages.#{format}") }
- let(:empty_file) { fixture_file_upload(Rails.root + "spec/fixtures/pages_empty.#{format}") }
- let(:metadata) do
- filename = Rails.root + "spec/fixtures/pages.#{format}.meta"
- fixture_file_upload(filename) if File.exist?(filename)
+ context 'legacy artifacts' do
+ %w(tar.gz zip).each do |format|
+ let(:extension) { format }
+
+ context "for valid #{format}" do
+ before do
+ build.update_attributes(legacy_artifacts_file: file)
+ build.update_attributes(legacy_artifacts_metadata: metadata)
+ end
+
+ describe 'pages artifacts' do
+ context 'with expiry date' do
+ before do
+ build.artifacts_expire_in = "2 days"
+ end
+
+ it "doesn't delete artifacts" do
+ expect(execute).to eq(:success)
+
+ expect(build.reload.artifacts?).to eq(true)
+ end
+ end
+
+ context 'without expiry date' do
+ it "does delete artifacts" do
+ expect(execute).to eq(:success)
+
+ expect(build.reload.artifacts?).to eq(false)
+ end
+ end
+ end
+
+ it 'succeeds' do
+ expect(project.pages_deployed?).to be_falsey
+ expect(execute).to eq(:success)
+ expect(project.pages_deployed?).to be_truthy
+
+ # Check that all expected files are extracted
+ %w[index.html zero .hidden/file].each do |filename|
+ expect(File.exist?(File.join(project.public_pages_path, filename))).to be_truthy
+ end
+ end
+
+ it 'limits pages size' do
+ stub_application_setting(max_pages_size: 1)
+ expect(execute).not_to eq(:success)
+ end
+
+ it 'removes pages after destroy' do
+ expect(PagesWorker).to receive(:perform_in)
+ expect(project.pages_deployed?).to be_falsey
+ expect(execute).to eq(:success)
+ expect(project.pages_deployed?).to be_truthy
+ project.destroy
+ expect(project.pages_deployed?).to be_falsey
+ end
+
+ it 'fails if sha on branch is not latest' do
+ build.update_attributes(ref: 'feature')
+
+ expect(execute).not_to eq(:success)
+ end
+
+ it 'fails for empty file fails' do
+ build.update_attributes(legacy_artifacts_file: empty_file)
+
+ expect(execute).not_to eq(:success)
+ end
end
+ end
+ end
+ context 'for new artifacts' do
+ context "for a valid job" do
before do
- build.update_attributes(artifacts_file: file)
- build.update_attributes(artifacts_metadata: metadata)
+ create(:ci_job_artifact, file: file, job: build)
+ create(:ci_job_artifact, file_type: :metadata, file: metadata, job: build)
+
+ build.reload
end
describe 'pages artifacts' do
@@ -35,7 +110,7 @@ describe Projects::UpdatePagesService do
it "doesn't delete artifacts" do
expect(execute).to eq(:success)
- expect(build.reload.artifacts_file?).to eq(true)
+ expect(build.artifacts?).to eq(true)
end
end
@@ -43,7 +118,7 @@ describe Projects::UpdatePagesService do
it "does delete artifacts" do
expect(execute).to eq(:success)
- expect(build.reload.artifacts_file?).to eq(false)
+ expect(build.reload.artifacts?).to eq(false)
end
end
end
@@ -74,13 +149,14 @@ describe Projects::UpdatePagesService do
end
it 'fails if sha on branch is not latest' do
- pipeline.update_attributes(sha: 'old_sha')
- build.update_attributes(artifacts_file: file)
+ build.update_attributes(ref: 'feature')
+
expect(execute).not_to eq(:success)
end
it 'fails for empty file fails' do
- build.update_attributes(artifacts_file: empty_file)
+ build.job_artifacts_archive.update_attributes(file: empty_file)
+
expect(execute).not_to eq(:success)
end
end
@@ -97,7 +173,7 @@ describe Projects::UpdatePagesService do
end
it 'fails for invalid archive' do
- build.update_attributes(artifacts_file: invalid_file)
+ build.update_attributes(legacy_artifacts_file: invalid_file)
expect(execute).not_to eq(:success)
end
@@ -108,8 +184,8 @@ describe Projects::UpdatePagesService do
file = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip')
metafile = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta')
- build.update_attributes(artifacts_file: file)
- build.update_attributes(artifacts_metadata: metafile)
+ build.update_attributes(legacy_artifacts_file: file)
+ build.update_attributes(legacy_artifacts_metadata: metafile)
allow(build).to receive(:artifacts_metadata_entry)
.and_return(metadata)
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 3da222e2ed8..d887f70efae 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -1,198 +1,251 @@
require 'spec_helper'
-describe Projects::UpdateService, '#execute' do
+describe Projects::UpdateService do
include ProjectForksHelper
- let(:gitlab_shell) { Gitlab::Shell.new }
let(:user) { create(:user) }
- let(:admin) { create(:admin) }
-
let(:project) do
create(:project, creator: user, namespace: user.namespace)
end
- context 'when changing visibility level' do
- context 'when visibility_level is INTERNAL' do
- it 'updates the project to internal' do
- result = update_project(project, user, visibility_level: Gitlab::VisibilityLevel::INTERNAL)
-
- expect(result).to eq({ status: :success })
- expect(project).to be_internal
- end
- end
-
- context 'when visibility_level is PUBLIC' do
- it 'updates the project to public' do
- result = update_project(project, user, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
- expect(result).to eq({ status: :success })
- expect(project).to be_public
- end
- end
-
- context 'when visibility levels are restricted to PUBLIC only' do
- before do
- stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
- end
+ describe '#execute' do
+ let(:gitlab_shell) { Gitlab::Shell.new }
+ let(:admin) { create(:admin) }
+ context 'when changing visibility level' do
context 'when visibility_level is INTERNAL' do
it 'updates the project to internal' do
result = update_project(project, user, visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+
expect(result).to eq({ status: :success })
expect(project).to be_internal
end
end
context 'when visibility_level is PUBLIC' do
- it 'does not update the project to public' do
+ it 'updates the project to public' do
result = update_project(project, user, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ expect(result).to eq({ status: :success })
+ expect(project).to be_public
+ end
+ end
- expect(result).to eq({ status: :error, message: 'New visibility level not allowed!' })
- expect(project).to be_private
+ context 'when visibility levels are restricted to PUBLIC only' do
+ before do
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
end
- context 'when updated by an admin' do
- it 'updates the project to public' do
- result = update_project(project, admin, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ context 'when visibility_level is INTERNAL' do
+ it 'updates the project to internal' do
+ result = update_project(project, user, visibility_level: Gitlab::VisibilityLevel::INTERNAL)
expect(result).to eq({ status: :success })
- expect(project).to be_public
+ expect(project).to be_internal
+ end
+ end
+
+ context 'when visibility_level is PUBLIC' do
+ it 'does not update the project to public' do
+ result = update_project(project, user, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+
+ expect(result).to eq({ status: :error, message: 'New visibility level not allowed!' })
+ expect(project).to be_private
+ end
+
+ context 'when updated by an admin' do
+ it 'updates the project to public' do
+ result = update_project(project, admin, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ expect(result).to eq({ status: :success })
+ expect(project).to be_public
+ end
end
end
end
+
+ context 'When project visibility is higher than parent group' do
+ let(:group) { create(:group, visibility_level: Gitlab::VisibilityLevel::INTERNAL) }
+
+ before do
+ project.update(namespace: group, visibility_level: group.visibility_level)
+ end
+
+ it 'does not update project visibility level' do
+ result = update_project(project, admin, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+
+ expect(result).to eq({ status: :error, message: 'Visibility level public is not allowed in a internal group.' })
+ expect(project.reload).to be_internal
+ end
+ end
end
- context 'When project visibility is higher than parent group' do
- let(:group) { create(:group, visibility_level: Gitlab::VisibilityLevel::INTERNAL) }
+ describe 'when updating project that has forks' do
+ let(:project) { create(:project, :internal) }
+ let(:forked_project) { fork_project(project) }
- before do
- project.update(namespace: group, visibility_level: group.visibility_level)
+ it 'updates forks visibility level when parent set to more restrictive' do
+ opts = { visibility_level: Gitlab::VisibilityLevel::PRIVATE }
+
+ expect(project).to be_internal
+ expect(forked_project).to be_internal
+
+ expect(update_project(project, admin, opts)).to eq({ status: :success })
+
+ expect(project).to be_private
+ expect(forked_project.reload).to be_private
end
- it 'does not update project visibility level' do
- result = update_project(project, admin, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ it 'does not update forks visibility level when parent set to less restrictive' do
+ opts = { visibility_level: Gitlab::VisibilityLevel::PUBLIC }
- expect(result).to eq({ status: :error, message: 'Visibility level public is not allowed in a internal group.' })
- expect(project.reload).to be_internal
+ expect(project).to be_internal
+ expect(forked_project).to be_internal
+
+ expect(update_project(project, admin, opts)).to eq({ status: :success })
+
+ expect(project).to be_public
+ expect(forked_project.reload).to be_internal
end
end
- end
- describe 'when updating project that has forks' do
- let(:project) { create(:project, :internal) }
- let(:forked_project) { fork_project(project) }
+ context 'when updating a default branch' do
+ let(:project) { create(:project, :repository) }
- it 'updates forks visibility level when parent set to more restrictive' do
- opts = { visibility_level: Gitlab::VisibilityLevel::PRIVATE }
+ it 'changes a default branch' do
+ update_project(project, admin, default_branch: 'feature')
- expect(project).to be_internal
- expect(forked_project).to be_internal
+ expect(Project.find(project.id).default_branch).to eq 'feature'
+ end
- expect(update_project(project, admin, opts)).to eq({ status: :success })
+ it 'does not change a default branch' do
+ # The branch 'unexisted-branch' does not exist.
+ update_project(project, admin, default_branch: 'unexisted-branch')
- expect(project).to be_private
- expect(forked_project.reload).to be_private
+ expect(Project.find(project.id).default_branch).to eq 'master'
+ end
end
- it 'does not update forks visibility level when parent set to less restrictive' do
- opts = { visibility_level: Gitlab::VisibilityLevel::PUBLIC }
+ context 'when updating a project that contains container images' do
+ before do
+ stub_container_registry_config(enabled: true)
+ stub_container_registry_tags(repository: /image/, tags: %w[rc1])
+ create(:container_repository, project: project, name: :image)
+ end
+
+ it 'does not allow to rename the project' do
+ result = update_project(project, admin, path: 'renamed')
- expect(project).to be_internal
- expect(forked_project).to be_internal
+ expect(result).to include(status: :error)
+ expect(result[:message]).to match(/contains container registry tags/)
+ end
- expect(update_project(project, admin, opts)).to eq({ status: :success })
+ it 'allows to update other settings' do
+ result = update_project(project, admin, public_builds: true)
- expect(project).to be_public
- expect(forked_project.reload).to be_internal
+ expect(result[:status]).to eq :success
+ expect(project.reload.public_builds).to be true
+ end
end
- end
- context 'when updating a default branch' do
- let(:project) { create(:project, :repository) }
+ context 'when renaming a project' do
+ let(:repository_storage) { 'default' }
+ let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] }
- it 'changes a default branch' do
- update_project(project, admin, default_branch: 'feature')
+ context 'with legacy storage' do
+ before do
+ gitlab_shell.add_repository(repository_storage, "#{user.namespace.full_path}/existing")
+ end
- expect(Project.find(project.id).default_branch).to eq 'feature'
- end
+ after do
+ gitlab_shell.remove_repository(repository_storage_path, "#{user.namespace.full_path}/existing")
+ end
- it 'does not change a default branch' do
- # The branch 'unexisted-branch' does not exist.
- update_project(project, admin, default_branch: 'unexisted-branch')
+ it 'does not allow renaming when new path matches existing repository on disk' do
+ result = update_project(project, admin, path: 'existing')
- expect(Project.find(project.id).default_branch).to eq 'master'
- end
- end
+ expect(result).to include(status: :error)
+ expect(result[:message]).to match('There is already a repository with that name on disk')
+ expect(project).not_to be_valid
+ expect(project.errors.messages).to have_key(:base)
+ expect(project.errors.messages[:base]).to include('There is already a repository with that name on disk')
+ end
+ end
- context 'when updating a project that contains container images' do
- before do
- stub_container_registry_config(enabled: true)
- stub_container_registry_tags(repository: /image/, tags: %w[rc1])
- create(:container_repository, project: project, name: :image)
- end
+ context 'with hashed storage' do
+ let(:project) { create(:project, :repository, creator: user, namespace: user.namespace) }
+
+ before do
+ stub_application_setting(hashed_storage_enabled: true)
+ end
+
+ it 'does not check if new path matches existing repository on disk' do
+ expect(project).not_to receive(:repository_with_same_path_already_exists?)
- it 'does not allow to rename the project' do
- result = update_project(project, admin, path: 'renamed')
+ result = update_project(project, admin, path: 'existing')
- expect(result).to include(status: :error)
- expect(result[:message]).to match(/contains container registry tags/)
+ expect(result).to include(status: :success)
+ end
+ end
end
- it 'allows to update other settings' do
- result = update_project(project, admin, public_builds: true)
+ context 'when passing invalid parameters' do
+ it 'returns an error result when record cannot be updated' do
+ result = update_project(project, admin, { name: 'foo&bar' })
- expect(result[:status]).to eq :success
- expect(project.reload.public_builds).to be true
+ expect(result).to eq({
+ status: :error,
+ message: "Name can contain only letters, digits, emojis, '_', '.', dash, space. It must start with letter, digit, emoji or '_'."
+ })
+ end
end
end
- context 'when renaming a project' do
- let(:repository_storage) { 'default' }
- let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] }
+ describe '#run_auto_devops_pipeline?' do
+ subject { described_class.new(project, user).run_auto_devops_pipeline? }
- context 'with legacy storage' do
+ context 'when master contains a .gitlab-ci.yml file' do
before do
- gitlab_shell.add_repository(repository_storage, "#{user.namespace.full_path}/existing")
+ allow(project.repository).to receive(:gitlab_ci_yml).and_return("script: ['test']")
end
- after do
- gitlab_shell.remove_repository(repository_storage_path, "#{user.namespace.full_path}/existing")
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when auto devops is explicitly enabled' do
+ before do
+ project.create_auto_devops!(enabled: true)
end
- it 'does not allow renaming when new path matches existing repository on disk' do
- result = update_project(project, admin, path: 'existing')
+ it { is_expected.to eq(true) }
+ end
- expect(result).to include(status: :error)
- expect(result[:message]).to match('There is already a repository with that name on disk')
- expect(project).not_to be_valid
- expect(project.errors.messages).to have_key(:base)
- expect(project.errors.messages[:base]).to include('There is already a repository with that name on disk')
+ context 'when auto devops is explicitly disabled' do
+ before do
+ project.create_auto_devops!(enabled: false)
end
- end
- context 'with hashed storage' do
- let(:project) { create(:project, :repository, creator: user, namespace: user.namespace) }
+ it { is_expected.to eq(false) }
+ end
+ context 'when auto devops is set to instance setting' do
before do
- stub_application_setting(hashed_storage_enabled: true)
+ project.create_auto_devops!(enabled: nil)
+ allow(project.auto_devops).to receive(:previous_changes).and_return('enabled' => true)
end
- it 'does not check if new path matches existing repository on disk' do
- expect(project).not_to receive(:repository_with_same_path_already_exists?)
-
- result = update_project(project, admin, path: 'existing')
+ context 'when auto devops is enabled system-wide' do
+ before do
+ stub_application_setting(auto_devops_enabled: true)
+ end
- expect(result).to include(status: :success)
+ it { is_expected.to eq(true) }
end
- end
- end
- context 'when passing invalid parameters' do
- it 'returns an error result when record cannot be updated' do
- result = update_project(project, admin, { name: 'foo&bar' })
+ context 'when auto devops is disabled system-wide' do
+ before do
+ stub_application_setting(auto_devops_enabled: false)
+ end
- expect(result).to eq({
- status: :error,
- message: "Name can contain only letters, digits, emojis, '_', '.', dash, space. It must start with letter, digit, emoji or '_'."
- })
+ it { is_expected.to eq(false) }
+ end
end
end
diff --git a/spec/services/search/global_service_spec.rb b/spec/services/search/global_service_spec.rb
index 1309240b430..d8dba26e194 100644
--- a/spec/services/search/global_service_spec.rb
+++ b/spec/services/search/global_service_spec.rb
@@ -35,8 +35,8 @@ describe Search::GlobalService do
expect(results.objects('projects')).to match_array [internal_project, public_project]
end
- it 'namespace name is searchable' do
- results = described_class.new(user, search: found_project.namespace.path).execute
+ it 'project name is searchable' do
+ results = described_class.new(user, search: found_project.name).execute
expect(results.objects('projects')).to match_array [found_project]
end
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index 0a6ab455abe..47412110b4b 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -692,9 +692,9 @@ describe SystemNoteService do
describe '.new_commit_summary' do
it 'escapes HTML titles' do
commit = double(title: '<pre>This is a test</pre>', short_id: '12345678')
- escaped = '* 12345678 - &lt;pre&gt;This is a test&lt;&#x2F;pre&gt;'
+ escaped = '&lt;pre&gt;This is a test&lt;&#x2F;pre&gt;'
- expect(described_class.new_commit_summary([commit])).to eq([escaped])
+ expect(described_class.new_commit_summary([commit])).to all(match(%r[- #{escaped}]))
end
end
@@ -970,31 +970,33 @@ describe SystemNoteService do
end
end
- describe '.remove_merge_request_wip' do
- let(:noteable) { create(:issue, project: project, title: 'WIP: Lorem ipsum') }
+ describe '.handle_merge_request_wip' do
+ context 'adding wip note' do
+ let(:noteable) { create(:merge_request, source_project: project, title: 'WIP Lorem ipsum') }
- subject { described_class.remove_merge_request_wip(noteable, project, author) }
+ subject { described_class.handle_merge_request_wip(noteable, project, author) }
- it_behaves_like 'a system note' do
- let(:action) { 'title' }
- end
+ it_behaves_like 'a system note' do
+ let(:action) { 'title' }
+ end
- it 'sets the note text' do
- expect(subject.note).to eq 'unmarked as a **Work In Progress**'
+ it 'sets the note text' do
+ expect(subject.note).to eq 'marked as a **Work In Progress**'
+ end
end
- end
- describe '.add_merge_request_wip' do
- let(:noteable) { create(:issue, project: project, title: 'Lorem ipsum') }
+ context 'removing wip note' do
+ let(:noteable) { create(:merge_request, source_project: project, title: 'Lorem ipsum') }
- subject { described_class.add_merge_request_wip(noteable, project, author) }
+ subject { described_class.handle_merge_request_wip(noteable, project, author) }
- it_behaves_like 'a system note' do
- let(:action) { 'title' }
- end
+ it_behaves_like 'a system note' do
+ let(:action) { 'title' }
+ end
- it 'sets the note text' do
- expect(subject.note).to eq 'marked as a **Work In Progress**'
+ it 'sets the note text' do
+ expect(subject.note).to eq 'unmarked as a **Work In Progress**'
+ end
end
end
diff --git a/spec/services/users/keys_count_service_spec.rb b/spec/services/users/keys_count_service_spec.rb
new file mode 100644
index 00000000000..a188cf86772
--- /dev/null
+++ b/spec/services/users/keys_count_service_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Users::KeysCountService, :use_clean_rails_memory_store_caching do
+ let(:user) { create(:user) }
+ let(:service) { described_class.new(user) }
+
+ describe '#count' do
+ before do
+ create(:personal_key, user: user)
+ end
+
+ it 'returns the number of SSH keys as an Integer' do
+ expect(service.count).to eq(1)
+ end
+
+ it 'caches the number of keys in Redis' do
+ service.delete_cache
+
+ recorder = ActiveRecord::QueryRecorder.new do
+ 2.times { service.count }
+ end
+
+ expect(recorder.count).to eq(1)
+ end
+ end
+
+ describe '#refresh_cache' do
+ it 'refreshes the Redis cache' do
+ Rails.cache.write(service.cache_key, 10)
+ service.refresh_cache
+
+ expect(Rails.cache.fetch(service.cache_key, raw: true)).to be_zero
+ end
+ end
+
+ describe '#delete_cache' do
+ it 'removes the cache' do
+ service.count
+ service.delete_cache
+
+ expect(Rails.cache.fetch(service.cache_key, raw: true)).to be_nil
+ end
+ end
+
+ describe '#uncached_count' do
+ it 'returns the number of SSH keys' do
+ expect(service.uncached_count).to be_zero
+ end
+
+ it 'does not cache the number of keys' do
+ recorder = ActiveRecord::QueryRecorder.new do
+ 2.times { service.uncached_count }
+ end
+
+ expect(recorder.count).to be > 0
+ end
+ end
+
+ describe '#cache_key' do
+ it 'returns the cache key' do
+ expect(service.cache_key).to eq("users/key-count-service/#{user.id}")
+ end
+ end
+end
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index a669429ce3e..21910e69d2e 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -146,7 +146,7 @@ describe WebHookService do
let(:system_hook) { create(:system_hook) }
it 'enqueue WebHookWorker' do
- expect(Sidekiq::Client).to receive(:enqueue).with(WebHookWorker, project_hook.id, data, 'push_hooks')
+ expect(WebHookWorker).to receive(:perform_async).with(project_hook.id, data, 'push_hooks')
described_class.new(project_hook, data, 'push_hooks').async_execute
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 7c8331f6c60..f94fb8733d5 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -50,6 +50,7 @@ RSpec.configure do |config|
config.include SearchHelpers, type: :feature
config.include CookieHelper, :js
config.include InputHelper, :js
+ config.include SelectionHelper, :js
config.include InspectRequests, :js
config.include WaitForRequests, :js
config.include LiveDebugger, :js
@@ -120,18 +121,6 @@ RSpec.configure do |config|
reset_delivered_emails!
end
- # Stub the `ForkedStorageCheck.storage_available?` method unless
- # `:broken_storage` metadata is defined
- #
- # This check can be slow and is unnecessary in a test environment where we
- # know the storage is available, because we create it at runtime
- config.before(:example) do |example|
- unless example.metadata[:broken_storage]
- allow(Gitlab::Git::Storage::ForkedStorageCheck)
- .to receive(:storage_available?).and_return(true)
- end
- end
-
config.around(:each, :use_clean_rails_memory_store_caching) do |example|
caching_store = Rails.cache
Rails.cache = ActiveSupport::Cache::MemoryStore.new
@@ -206,3 +195,6 @@ Shoulda::Matchers.configure do |config|
with.library :rails
end
end
+
+# Prevent Rugged from picking up local developer gitconfig.
+Rugged::Settings['search_path_global'] = Rails.root.join('tmp/tests').to_s
diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb
index 9f672bc92fc..935b170a0f6 100644
--- a/spec/support/capybara.rb
+++ b/spec/support/capybara.rb
@@ -7,21 +7,41 @@ require 'selenium-webdriver'
# Give CI some extra time
timeout = (ENV['CI'] || ENV['CI_SERVER']) ? 60 : 30
-Capybara.javascript_driver = :chrome
Capybara.register_driver :chrome do |app|
- extra_args = []
- extra_args << 'headless' unless ENV['CHROME_HEADLESS'] =~ /^(false|no|0)$/i
-
capabilities = Selenium::WebDriver::Remote::Capabilities.chrome(
- chromeOptions: {
- 'args' => %w[no-sandbox disable-gpu --window-size=1240,1400] + extra_args
+ # This enables access to logs with `page.driver.manage.get_log(:browser)`
+ loggingPrefs: {
+ browser: "ALL",
+ client: "ALL",
+ driver: "ALL",
+ server: "ALL"
}
)
- Capybara::Selenium::Driver
- .new(app, browser: :chrome, desired_capabilities: capabilities)
+ options = Selenium::WebDriver::Chrome::Options.new
+ options.add_argument("window-size=1240,1400")
+
+ # Chrome won't work properly in a Docker container in sandbox mode
+ options.add_argument("no-sandbox")
+
+ # Run headless by default unless CHROME_HEADLESS specified
+ unless ENV['CHROME_HEADLESS'] =~ /^(false|no|0)$/i
+ options.add_argument("headless")
+
+ # Chrome documentation says this flag is needed for now
+ # https://developers.google.com/web/updates/2017/04/headless-chrome#cli
+ options.add_argument("disable-gpu")
+ end
+
+ Capybara::Selenium::Driver.new(
+ app,
+ browser: :chrome,
+ desired_capabilities: capabilities,
+ options: options
+ )
end
+Capybara.javascript_driver = :chrome
Capybara.default_max_wait_time = timeout
Capybara.ignore_hidden_elements = true
diff --git a/spec/support/controllers/githubish_import_controller_shared_examples.rb b/spec/support/controllers/githubish_import_controller_shared_examples.rb
index b23d81a226a..a0839eefe6c 100644
--- a/spec/support/controllers/githubish_import_controller_shared_examples.rb
+++ b/spec/support/controllers/githubish_import_controller_shared_examples.rb
@@ -14,7 +14,7 @@ shared_examples 'a GitHub-ish import controller: POST personal_access_token' do
it "updates access token" do
token = 'asdfasdf9876'
- allow_any_instance_of(Gitlab::GithubImport::Client)
+ allow_any_instance_of(Gitlab::LegacyGithubImport::Client)
.to receive(:user).and_return(true)
post :personal_access_token, personal_access_token: token
@@ -79,7 +79,7 @@ shared_examples 'a GitHub-ish import controller: GET status' do
end
it "handles an invalid access token" do
- allow_any_instance_of(Gitlab::GithubImport::Client)
+ allow_any_instance_of(Gitlab::LegacyGithubImport::Client)
.to receive(:repos).and_raise(Octokit::Unauthorized)
get :status
@@ -110,7 +110,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do
context "when the repository owner is the provider user" do
context "when the provider user and GitLab user's usernames match" do
it "takes the current user's namespace" do
- expect(Gitlab::GithubImport::ProjectCreator)
+ expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider)
.and_return(double(execute: true))
@@ -122,7 +122,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do
let(:provider_username) { "someone_else" }
it "takes the current user's namespace" do
- expect(Gitlab::GithubImport::ProjectCreator)
+ expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider)
.and_return(double(execute: true))
@@ -149,7 +149,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do
end
it "takes the existing namespace" do
- expect(Gitlab::GithubImport::ProjectCreator)
+ expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, existing_namespace, user, access_params, type: provider)
.and_return(double(execute: true))
@@ -161,7 +161,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do
it "creates a project using user's namespace" do
create(:user, username: other_username)
- expect(Gitlab::GithubImport::ProjectCreator)
+ expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider)
.and_return(double(execute: true))
@@ -173,14 +173,14 @@ shared_examples 'a GitHub-ish import controller: POST create' do
context "when a namespace with the provider user's username doesn't exist" do
context "when current user can create namespaces" do
it "creates the namespace" do
- expect(Gitlab::GithubImport::ProjectCreator)
+ expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).and_return(double(execute: true))
expect { post :create, target_namespace: provider_repo.name, format: :js }.to change(Namespace, :count).by(1)
end
it "takes the new namespace" do
- expect(Gitlab::GithubImport::ProjectCreator)
+ expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, an_instance_of(Group), user, access_params, type: provider)
.and_return(double(execute: true))
@@ -194,14 +194,14 @@ shared_examples 'a GitHub-ish import controller: POST create' do
end
it "doesn't create the namespace" do
- expect(Gitlab::GithubImport::ProjectCreator)
+ expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).and_return(double(execute: true))
expect { post :create, format: :js }.not_to change(Namespace, :count)
end
it "takes the current user's namespace" do
- expect(Gitlab::GithubImport::ProjectCreator)
+ expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider)
.and_return(double(execute: true))
@@ -219,7 +219,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do
end
it 'takes the selected namespace and name' do
- expect(Gitlab::GithubImport::ProjectCreator)
+ expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, test_namespace, user, access_params, type: provider)
.and_return(double(execute: true))
@@ -227,7 +227,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do
end
it 'takes the selected name and default namespace' do
- expect(Gitlab::GithubImport::ProjectCreator)
+ expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, user.namespace, user, access_params, type: provider)
.and_return(double(execute: true))
@@ -245,7 +245,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do
end
it 'takes the selected namespace and name' do
- expect(Gitlab::GithubImport::ProjectCreator)
+ expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, nested_namespace, user, access_params, type: provider)
.and_return(double(execute: true))
@@ -257,7 +257,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do
let(:test_name) { 'test_name' }
it 'takes the selected namespace and name' do
- expect(Gitlab::GithubImport::ProjectCreator)
+ expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider)
.and_return(double(execute: true))
@@ -265,7 +265,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do
end
it 'creates the namespaces' do
- allow(Gitlab::GithubImport::ProjectCreator)
+ allow(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider)
.and_return(double(execute: true))
@@ -274,7 +274,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do
end
it 'new namespace has the right parent' do
- allow(Gitlab::GithubImport::ProjectCreator)
+ allow(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider)
.and_return(double(execute: true))
@@ -289,7 +289,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do
let!(:parent_namespace) { create(:group, name: 'foo', owner: user) }
it 'takes the selected namespace and name' do
- expect(Gitlab::GithubImport::ProjectCreator)
+ expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider)
.and_return(double(execute: true))
@@ -297,7 +297,7 @@ shared_examples 'a GitHub-ish import controller: POST create' do
end
it 'creates the namespaces' do
- allow(Gitlab::GithubImport::ProjectCreator)
+ allow(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider)
.and_return(double(execute: true))
diff --git a/spec/support/fixture_helpers.rb b/spec/support/fixture_helpers.rb
index 5515c355cea..128aaaf25fe 100644
--- a/spec/support/fixture_helpers.rb
+++ b/spec/support/fixture_helpers.rb
@@ -1,6 +1,7 @@
module FixtureHelpers
def fixture_file(filename)
return '' if filename.blank?
+
File.read(expand_fixture_path(filename))
end
diff --git a/spec/support/generate-seed-repo-rb b/spec/support/generate-seed-repo-rb
index ef3c8e7087f..4ee33f9725b 100755
--- a/spec/support/generate-seed-repo-rb
+++ b/spec/support/generate-seed-repo-rb
@@ -33,6 +33,7 @@ end
def capture!(cmd, dir)
output = IO.popen(cmd, 'r', chdir: dir) { |io| io.read }
raise "command failed with #{$?}: #{cmd.join(' ')}" unless $?.success?
+
output.chomp
end
diff --git a/spec/support/gitaly.rb b/spec/support/gitaly.rb
index 89fb362cf14..c7e8a39a617 100644
--- a/spec/support/gitaly.rb
+++ b/spec/support/gitaly.rb
@@ -1,6 +1,11 @@
RSpec.configure do |config|
config.before(:each) do |example|
- next if example.metadata[:skip_gitaly_mock]
- allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(true)
+ if example.metadata[:disable_gitaly]
+ allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(false)
+ else
+ next if example.metadata[:skip_gitaly_mock]
+
+ allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(true)
+ end
end
end
diff --git a/spec/support/google_api/cloud_platform_helpers.rb b/spec/support/google_api/cloud_platform_helpers.rb
index dabf0db7666..8a073e58db8 100644
--- a/spec/support/google_api/cloud_platform_helpers.rb
+++ b/spec/support/google_api/cloud_platform_helpers.rb
@@ -63,7 +63,7 @@ module GoogleApi
##
# gcloud container clusters create
- # https://cloud.google.com/container-engine/reference/rest/v1/projects.zones.clusters/create
+ # https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/projects.zones.clusters/create
# rubocop:disable Metrics/CyclomaticComplexity
# rubocop:disable Metrics/PerceivedComplexity
def cloud_platform_cluster_body(**options)
diff --git a/spec/support/matchers/be_a_binary_string.rb b/spec/support/matchers/be_a_binary_string.rb
new file mode 100644
index 00000000000..f041ae76167
--- /dev/null
+++ b/spec/support/matchers/be_a_binary_string.rb
@@ -0,0 +1,9 @@
+RSpec::Matchers.define :be_a_binary_string do |_|
+ match do |actual|
+ actual.is_a?(String) && actual.encoding == Encoding.find('ASCII-8BIT')
+ end
+
+ description do
+ "be a String with binary encoding"
+ end
+end
diff --git a/spec/support/matchers/have_gitlab_http_status.rb b/spec/support/matchers/have_gitlab_http_status.rb
index 3198f1b9edd..e7e418cdde4 100644
--- a/spec/support/matchers/have_gitlab_http_status.rb
+++ b/spec/support/matchers/have_gitlab_http_status.rb
@@ -8,7 +8,11 @@ RSpec::Matchers.define :have_gitlab_http_status do |expected|
end
failure_message do |actual|
+ # actual can be either an ActionDispatch::TestResponse (which uses #response_code)
+ # or a Capybara::Session (which uses #status_code)
+ response_code = actual.try(:response_code) || actual.try(:status_code)
+
"expected the response to have status code #{expected.inspect}" \
- " but it was #{actual.response_code}. The response was: #{actual.body}"
+ " but it was #{response_code}. The response was: #{actual.body}"
end
end
diff --git a/spec/support/matchers/security_header_matcher.rb b/spec/support/matchers/security_header_matcher.rb
new file mode 100644
index 00000000000..f8518d13ebb
--- /dev/null
+++ b/spec/support/matchers/security_header_matcher.rb
@@ -0,0 +1,5 @@
+RSpec::Matchers.define :include_security_headers do |expected|
+ match do |actual|
+ expect(actual.headers).to include('X-Content-Type-Options')
+ end
+end
diff --git a/spec/support/prometheus/additional_metrics_shared_examples.rb b/spec/support/prometheus/additional_metrics_shared_examples.rb
index 620fa37d455..dbbd4ad4d40 100644
--- a/spec/support/prometheus/additional_metrics_shared_examples.rb
+++ b/spec/support/prometheus/additional_metrics_shared_examples.rb
@@ -41,16 +41,30 @@ RSpec.shared_examples 'additional metrics query' do
end
describe 'project has Kubernetes service' do
- let(:project) { create(:kubernetes_project) }
- let(:environment) { create(:environment, slug: 'environment-slug', project: project) }
- let(:kube_namespace) { project.kubernetes_service.actual_namespace }
+ shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do
+ let(:environment) { create(:environment, slug: 'environment-slug', project: project) }
+ let(:kube_namespace) { project.deployment_platform.actual_namespace }
- it_behaves_like 'query context containing environment slug and filter'
+ it_behaves_like 'query context containing environment slug and filter'
- it 'query context contains kube_namespace' do
- expect(subject).to receive(:query_metrics).with(hash_including(kube_namespace: kube_namespace))
+ it 'query context contains kube_namespace' do
+ expect(subject).to receive(:query_metrics).with(hash_including(kube_namespace: kube_namespace))
- subject.query(*query_params)
+ subject.query(*query_params)
+ end
+ end
+
+ context 'when user configured kubernetes from Integration > Kubernetes' do
+ let(:project) { create(:kubernetes_project) }
+
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
+ end
+
+ context 'when user configured kubernetes from CI/CD > Clusters' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:project) { cluster.project }
+
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
end
end
diff --git a/spec/support/protected_tags/access_control_ce_shared_examples.rb b/spec/support/protected_tags/access_control_ce_shared_examples.rb
index 2770cdcbefc..71eec9f3217 100644
--- a/spec/support/protected_tags/access_control_ce_shared_examples.rb
+++ b/spec/support/protected_tags/access_control_ce_shared_examples.rb
@@ -1,5 +1,5 @@
RSpec.shared_examples "protected tags > access control > CE" do
- ProtectedTag::CreateAccessLevel.human_access_levels.each do |(access_type_id, access_type_name)|
+ ProtectedRefAccess::HUMAN_ACCESS_LEVELS.each do |(access_type_id, access_type_name)|
it "allows creating protected tags that #{access_type_name} can create" do
visit project_protected_tags_path(project)
diff --git a/spec/support/query_recorder.rb b/spec/support/query_recorder.rb
index ba0b805caad..8cf8f45a8b2 100644
--- a/spec/support/query_recorder.rb
+++ b/spec/support/query_recorder.rb
@@ -8,7 +8,14 @@ module ActiveRecord
ActiveSupport::Notifications.subscribed(method(:callback), 'sql.active_record', &block)
end
+ def show_backtrace(values)
+ Rails.logger.debug("QueryRecorder SQL: #{values[:sql]}")
+ caller.each { |line| Rails.logger.debug(" --> #{line}") }
+ end
+
def callback(name, start, finish, message_id, values)
+ show_backtrace(values) if ENV['QUERY_RECORDER_DEBUG']
+
if values[:name]&.include?("CACHE")
@cached << values[:sql]
elsif !values[:name]&.include?("SCHEMA")
@@ -34,7 +41,8 @@ RSpec::Matchers.define :exceed_query_limit do |expected|
supports_block_expectations
match do |block|
- query_count(&block) > expected_count + threshold
+ @subject_block = block
+ actual_count > expected_count + threshold
end
failure_message_when_negated do |actual|
@@ -48,6 +56,11 @@ RSpec::Matchers.define :exceed_query_limit do |expected|
self
end
+ def for_query(query)
+ @query = query
+ self
+ end
+
def threshold
@threshold.to_i
end
@@ -61,18 +74,28 @@ RSpec::Matchers.define :exceed_query_limit do |expected|
end
def actual_count
- @recorder.count
+ @actual_count ||= if @query
+ recorder.log.select { |recorded| recorded =~ @query }.size
+ else
+ recorder.count
+ end
+ end
+
+ def recorder
+ @recorder ||= ActiveRecord::QueryRecorder.new(&@subject_block)
end
- def query_count(&block)
- @recorder = ActiveRecord::QueryRecorder.new(&block)
- @recorder.count
+ def count_queries(queries)
+ queries.each_with_object(Hash.new(0)) { |query, counts| counts[query] += 1 }
end
def log_message
if expected.is_a?(ActiveRecord::QueryRecorder)
- extra_queries = (expected.log - @recorder.log).join("\n\n")
- "Extra queries: \n\n #{extra_queries}"
+ counts = count_queries(expected.log)
+ extra_queries = @recorder.log.reject { |query| counts[query] -= 1 unless counts[query].zero? }
+ extra_queries_display = count_queries(extra_queries).map { |query, count| "[#{count}] #{query}" }
+
+ (['Extra queries:'] + extra_queries_display).join("\n\n")
else
@recorder.log_message
end
diff --git a/spec/support/selection_helper.rb b/spec/support/selection_helper.rb
new file mode 100644
index 00000000000..b4725b137b2
--- /dev/null
+++ b/spec/support/selection_helper.rb
@@ -0,0 +1,6 @@
+module SelectionHelper
+ def select_element(selector)
+ find(selector)
+ execute_script("let range = document.createRange(); let sel = window.getSelection(); range.selectNodeContents(document.querySelector('#{selector}')); sel.addRange(range);")
+ end
+end
diff --git a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
new file mode 100644
index 00000000000..935c08221e0
--- /dev/null
+++ b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
@@ -0,0 +1,240 @@
+shared_examples 'handle uploads' do
+ let(:user) { create(:user) }
+ let(:jpg) { fixture_file_upload(Rails.root + 'spec/fixtures/rails_sample.jpg', 'image/jpg') }
+ let(:txt) { fixture_file_upload(Rails.root + 'spec/fixtures/doc_sample.txt', 'text/plain') }
+
+ describe "POST #create" do
+ context 'when a user is not authorized to upload a file' do
+ it 'returns 404 status' do
+ post :create, params.merge(file: jpg, format: :json)
+
+ expect(response.status).to eq(404)
+ end
+ end
+
+ context 'when a user can upload a file' do
+ before do
+ sign_in(user)
+ model.add_developer(user)
+ end
+
+ context "without params['file']" do
+ it "returns an error" do
+ post :create, params.merge(format: :json)
+
+ expect(response).to have_gitlab_http_status(422)
+ end
+ end
+
+ context 'with valid image' do
+ before do
+ post :create, params.merge(file: jpg, format: :json)
+ end
+
+ it 'returns a content with original filename, new link, and correct type.' do
+ expect(response.body).to match '\"alt\":\"rails_sample\"'
+ expect(response.body).to match "\"url\":\"/uploads"
+ end
+
+ # NOTE: This is as close as we're getting to an Integration test for this
+ # behavior. We're avoiding a proper Feature test because those should be
+ # testing things entirely user-facing, which the Upload model is very much
+ # not.
+ it 'creates a corresponding Upload record' do
+ upload = Upload.last
+
+ aggregate_failures do
+ expect(upload).to exist
+ expect(upload.model).to eq(model)
+ end
+ end
+ end
+
+ context 'with valid non-image file' do
+ before do
+ post :create, params.merge(file: txt, format: :json)
+ end
+
+ it 'returns a content with original filename, new link, and correct type.' do
+ expect(response.body).to match '\"alt\":\"doc_sample.txt\"'
+ expect(response.body).to match "\"url\":\"/uploads"
+ end
+ end
+ end
+ end
+
+ describe "GET #show" do
+ let(:show_upload) do
+ get :show, params.merge(secret: "123456", filename: "image.jpg")
+ end
+
+ context "when the model is public" do
+ before do
+ model.update_attribute(:visibility_level, Gitlab::VisibilityLevel::PUBLIC)
+ end
+
+ context "when not signed in" do
+ context "when the file exists" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
+ allow(jpg).to receive(:exists?).and_return(true)
+ end
+
+ it "responds with status 200" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+
+ context "when the file doesn't exist" do
+ it "responds with status 404" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ context "when signed in" do
+ before do
+ sign_in(user)
+ end
+
+ context "when the file exists" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
+ allow(jpg).to receive(:exists?).and_return(true)
+ end
+
+ it "responds with status 200" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+
+ context "when the file doesn't exist" do
+ it "responds with status 404" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+ end
+
+ context "when the model is private" do
+ before do
+ model.update_attribute(:visibility_level, Gitlab::VisibilityLevel::PRIVATE)
+ end
+
+ context "when not signed in" do
+ context "when the file exists" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
+ allow(jpg).to receive(:exists?).and_return(true)
+ end
+
+ context "when the file is an image" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
+ end
+
+ it "responds with status 200" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+
+ context "when the file is not an image" do
+ it "redirects to the sign in page" do
+ show_upload
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+ end
+
+ context "when the file doesn't exist" do
+ it "redirects to the sign in page" do
+ show_upload
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+ end
+
+ context "when signed in" do
+ before do
+ sign_in(user)
+ end
+
+ context "when the user has access to the project" do
+ before do
+ model.add_developer(user)
+ end
+
+ context "when the file exists" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
+ allow(jpg).to receive(:exists?).and_return(true)
+ end
+
+ it "responds with status 200" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+
+ context "when the file doesn't exist" do
+ it "responds with status 404" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ context "when the user doesn't have access to the model" do
+ context "when the file exists" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
+ allow(jpg).to receive(:exists?).and_return(true)
+ end
+
+ context "when the file is an image" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
+ end
+
+ it "responds with status 200" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+
+ context "when the file is not an image" do
+ it "responds with status 404" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ context "when the file doesn't exist" do
+ it "responds with status 404" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/protected_branches_access_control_ce.rb b/spec/support/shared_examples/features/protected_branches_access_control_ce.rb
index 5fde91512da..17f319f49e9 100644
--- a/spec/support/shared_examples/features/protected_branches_access_control_ce.rb
+++ b/spec/support/shared_examples/features/protected_branches_access_control_ce.rb
@@ -1,5 +1,5 @@
shared_examples "protected branches > access control > CE" do
- ProtectedBranch::PushAccessLevel.human_access_levels.each do |(access_type_id, access_type_name)|
+ ProtectedRefAccess::HUMAN_ACCESS_LEVELS.each do |(access_type_id, access_type_name)|
it "allows creating protected branches that #{access_type_name} can push to" do
visit project_protected_branches_path(project)
@@ -44,7 +44,7 @@ shared_examples "protected branches > access control > CE" do
end
end
- ProtectedBranch::MergeAccessLevel.human_access_levels.each do |(access_type_id, access_type_name)|
+ ProtectedRefAccess::HUMAN_ACCESS_LEVELS.each do |(access_type_id, access_type_name)|
it "allows creating protected branches that #{access_type_name} can merge to" do
visit project_protected_branches_path(project)
diff --git a/spec/support/shared_examples/throttled_touch.rb b/spec/support/shared_examples/throttled_touch.rb
new file mode 100644
index 00000000000..4a25bb9b750
--- /dev/null
+++ b/spec/support/shared_examples/throttled_touch.rb
@@ -0,0 +1,20 @@
+shared_examples_for 'throttled touch' do
+ describe '#touch' do
+ it 'updates the updated_at timestamp' do
+ Timecop.freeze do
+ subject.touch
+ expect(subject.updated_at).to eq(Time.zone.now)
+ end
+ end
+
+ it 'updates the object at most once per minute' do
+ first_updated_at = Time.zone.now - (ThrottledTouch::TOUCH_INTERVAL * 2)
+ second_updated_at = Time.zone.now - (ThrottledTouch::TOUCH_INTERVAL * 1.5)
+
+ Timecop.freeze(first_updated_at) { subject.touch }
+ Timecop.freeze(second_updated_at) { subject.touch }
+
+ expect(subject.updated_at).to eq(first_updated_at)
+ end
+ end
+end
diff --git a/spec/support/stored_repositories.rb b/spec/support/stored_repositories.rb
index f3deae0f455..f9121cce985 100644
--- a/spec/support/stored_repositories.rb
+++ b/spec/support/stored_repositories.rb
@@ -12,6 +12,25 @@ RSpec.configure do |config|
raise GRPC::Unavailable.new('Gitaly broken in this spec')
end
- Gitlab::Git::Storage::CircuitBreaker.reset_all!
+ # Track the maximum number of failures
+ first_failure = Time.parse("2017-11-14 17:52:30")
+ last_failure = Time.parse("2017-11-14 18:54:37")
+ failure_count = Gitlab::CurrentSettings
+ .current_application_settings
+ .circuitbreaker_failure_count_threshold + 1
+ cache_key = "#{Gitlab::Git::Storage::REDIS_KEY_PREFIX}broken:#{Gitlab::Environment.hostname}"
+
+ Gitlab::Git::Storage.redis.with do |redis|
+ redis.pipelined do
+ redis.zadd(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, cache_key)
+ redis.hset(cache_key, :first_failure, first_failure.to_i)
+ redis.hset(cache_key, :last_failure, last_failure.to_i)
+ redis.hset(cache_key, :failure_count, failure_count.to_i)
+ end
+ end
+ end
+
+ config.after(:each, :broken_storage) do
+ Gitlab::Git::Storage.redis.with(&:flushall)
end
end
diff --git a/spec/support/stub_configuration.rb b/spec/support/stub_configuration.rb
index 4ead78529c3..b36cf3c544c 100644
--- a/spec/support/stub_configuration.rb
+++ b/spec/support/stub_configuration.rb
@@ -43,6 +43,8 @@ module StubConfiguration
end
def stub_storage_settings(messages)
+ messages.deep_stringify_keys!
+
# Default storage is always required
messages['default'] ||= Gitlab.config.repositories.storages.default
messages.each do |storage_name, storage_settings|
diff --git a/spec/support/stub_gitlab_calls.rb b/spec/support/stub_gitlab_calls.rb
index 5f22d886910..c1618f5086c 100644
--- a/spec/support/stub_gitlab_calls.rb
+++ b/spec/support/stub_gitlab_calls.rb
@@ -21,6 +21,12 @@ module StubGitlabCalls
allow_any_instance_of(Ci::Pipeline).to receive(:ci_yaml_file) { ci_yaml }
end
+ def stub_repository_ci_yaml_file(sha:, path: '.gitlab-ci.yml')
+ allow_any_instance_of(Repository)
+ .to receive(:gitlab_ci_yml_for).with(sha, path)
+ .and_return(gitlab_ci_yaml)
+ end
+
def stub_ci_builds_disabled
allow_any_instance_of(Project).to receive(:builds_enabled?).and_return(false)
end
diff --git a/spec/support/test_env.rb b/spec/support/test_env.rb
index fff120fcb88..b300b493f86 100644
--- a/spec/support/test_env.rb
+++ b/spec/support/test_env.rb
@@ -120,6 +120,7 @@ module TestEnv
FileUtils.mkdir_p(repos_path)
FileUtils.mkdir_p(backup_path)
FileUtils.mkdir_p(pages_path)
+ FileUtils.mkdir_p(artifacts_path)
end
def clean_gitlab_test_path
@@ -233,6 +234,10 @@ module TestEnv
Gitlab.config.pages.path
end
+ def artifacts_path
+ Gitlab.config.artifacts.path
+ end
+
# When no cached assets exist, manually hit the root path to create them
#
# Otherwise they'd be created by the first test, often timing out and
diff --git a/spec/support/track_untracked_uploads_helpers.rb b/spec/support/track_untracked_uploads_helpers.rb
new file mode 100644
index 00000000000..d05eda08201
--- /dev/null
+++ b/spec/support/track_untracked_uploads_helpers.rb
@@ -0,0 +1,20 @@
+module TrackUntrackedUploadsHelpers
+ def uploaded_file
+ fixture_path = Rails.root.join('spec', 'fixtures', 'rails_sample.jpg')
+ fixture_file_upload(fixture_path)
+ end
+
+ def ensure_temporary_tracking_table_exists
+ Gitlab::BackgroundMigration::PrepareUntrackedUploads.new.send(:ensure_temporary_tracking_table_exists)
+ end
+
+ def drop_temp_table_if_exists
+ ActiveRecord::Base.connection.drop_table(:untracked_files_for_uploads) if ActiveRecord::Base.connection.table_exists?(:untracked_files_for_uploads)
+ end
+
+ def create_or_update_appearance(attrs)
+ a = Appearance.first_or_initialize(title: 'foo', description: 'bar')
+ a.update!(attrs)
+ a
+ end
+end
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index bf2e11bc360..b41c3b3958a 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -212,7 +212,7 @@ describe 'gitlab:app namespace rake task' do
# Avoid asking gitaly about the root ref (which will fail beacuse of the
# mocked storages)
- allow_any_instance_of(Repository).to receive(:empty_repo?).and_return(false)
+ allow_any_instance_of(Repository).to receive(:empty?).and_return(false)
end
after do
diff --git a/spec/tasks/gitlab/cleanup_rake_spec.rb b/spec/tasks/gitlab/cleanup_rake_spec.rb
new file mode 100644
index 00000000000..9e746ceddd6
--- /dev/null
+++ b/spec/tasks/gitlab/cleanup_rake_spec.rb
@@ -0,0 +1,67 @@
+require 'rake_helper'
+
+describe 'gitlab:cleanup rake tasks' do
+ before do
+ Rake.application.rake_require 'tasks/gitlab/cleanup'
+ end
+
+ describe 'cleanup' do
+ let(:gitaly_address) { Gitlab.config.repositories.storages.default.gitaly_address }
+ let(:storages) do
+ {
+ 'default' => { 'path' => Settings.absolute('tmp/tests/default_storage'), 'gitaly_address' => gitaly_address }
+ }
+ end
+
+ before do
+ FileUtils.mkdir(Settings.absolute('tmp/tests/default_storage'))
+ allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
+ end
+
+ after do
+ FileUtils.rm_rf(Settings.absolute('tmp/tests/default_storage'))
+ end
+
+ describe 'cleanup:repos' do
+ before do
+ FileUtils.mkdir_p(Settings.absolute('tmp/tests/default_storage/broken/project.git'))
+ FileUtils.mkdir_p(Settings.absolute('tmp/tests/default_storage/@hashed/12/34/5678.git'))
+ end
+
+ it 'moves it to an orphaned path' do
+ run_rake_task('gitlab:cleanup:repos')
+ repo_list = Dir['tmp/tests/default_storage/broken/*']
+
+ expect(repo_list.first).to include('+orphaned+')
+ end
+
+ it 'ignores @hashed repos' do
+ run_rake_task('gitlab:cleanup:repos')
+
+ expect(Dir.exist?(Settings.absolute('tmp/tests/default_storage/@hashed/12/34/5678.git'))).to be_truthy
+ end
+ end
+
+ describe 'cleanup:dirs' do
+ it 'removes missing namespaces' do
+ FileUtils.mkdir_p(Settings.absolute("tmp/tests/default_storage/namespace_1/project.git"))
+ FileUtils.mkdir_p(Settings.absolute("tmp/tests/default_storage/namespace_2/project.git"))
+ allow(Namespace).to receive(:pluck).and_return('namespace_1')
+
+ stub_env('REMOVE', 'true')
+ run_rake_task('gitlab:cleanup:dirs')
+
+ expect(Dir.exist?(Settings.absolute('tmp/tests/default_storage/namespace_1'))).to be_truthy
+ expect(Dir.exist?(Settings.absolute('tmp/tests/default_storage/namespace_2'))).to be_falsey
+ end
+
+ it 'ignores @hashed directory' do
+ FileUtils.mkdir_p(Settings.absolute('tmp/tests/default_storage/@hashed/12/34/5678.git'))
+
+ run_rake_task('gitlab:cleanup:dirs')
+
+ expect(Dir.exist?(Settings.absolute('tmp/tests/default_storage/@hashed/12/34/5678.git'))).to be_truthy
+ end
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/gitaly_rake_spec.rb b/spec/tasks/gitlab/gitaly_rake_spec.rb
index 5dd8fe8eaa5..6aba86fdc3c 100644
--- a/spec/tasks/gitlab/gitaly_rake_spec.rb
+++ b/spec/tasks/gitlab/gitaly_rake_spec.rb
@@ -47,7 +47,7 @@ describe 'gitlab:gitaly namespace rake task' do
stub_env('CI', false)
FileUtils.mkdir_p(clone_path)
expect(Dir).to receive(:chdir).with(clone_path).and_call_original
- allow(Bundler).to receive(:bundle_path).and_return('/fake/bundle_path')
+ allow(Rails.env).to receive(:test?).and_return(false)
end
context 'gmake is available' do
@@ -57,7 +57,7 @@ describe 'gitlab:gitaly namespace rake task' do
it 'calls gmake in the gitaly directory' do
expect(Gitlab::Popen).to receive(:popen).with(%w[which gmake]).and_return(['/usr/bin/gmake', 0])
- expect(main_object).to receive(:run_command!).with(command_preamble + %w[gmake BUNDLE_PATH=/fake/bundle_path]).and_return(true)
+ expect(main_object).to receive(:run_command!).with(command_preamble + %w[gmake]).and_return(true)
run_rake_task('gitlab:gitaly:install', clone_path)
end
@@ -70,18 +70,20 @@ describe 'gitlab:gitaly namespace rake task' do
end
it 'calls make in the gitaly directory' do
- expect(main_object).to receive(:run_command!).with(command_preamble + %w[make BUNDLE_PATH=/fake/bundle_path]).and_return(true)
+ expect(main_object).to receive(:run_command!).with(command_preamble + %w[make]).and_return(true)
run_rake_task('gitlab:gitaly:install', clone_path)
end
- context 'when Rails.env is not "test"' do
+ context 'when Rails.env is test' do
+ let(:command) { %w[make BUNDLE_FLAGS=--no-deployment] }
+
before do
- allow(Rails.env).to receive(:test?).and_return(false)
+ allow(Rails.env).to receive(:test?).and_return(true)
end
- it 'calls make in the gitaly directory without BUNDLE_PATH' do
- expect(main_object).to receive(:run_command!).with(command_preamble + ['make']).and_return(true)
+ it 'calls make in the gitaly directory with --no-deployment flag for bundle' do
+ expect(main_object).to receive(:run_command!).with(command_preamble + command).and_return(true)
run_rake_task('gitlab:gitaly:install', clone_path)
end
@@ -110,6 +112,7 @@ describe 'gitlab:gitaly namespace rake task' do
expected_output = <<~TOML
# Gitaly storage configuration generated from #{Gitlab.config.source} on #{Time.current.to_s(:long)}
# This is in TOML format suitable for use in Gitaly's config.toml file.
+ bin_dir = "tmp/tests/gitaly"
socket_path = "/path/to/my.socket"
[gitlab-shell]
dir = "#{Gitlab.config.gitlab_shell.path}"
diff --git a/spec/unicorn/unicorn_spec.rb b/spec/unicorn/unicorn_spec.rb
index 41de94d35c2..a4cf479a339 100644
--- a/spec/unicorn/unicorn_spec.rb
+++ b/spec/unicorn/unicorn_spec.rb
@@ -37,7 +37,22 @@ describe 'Unicorn' do
config_path = 'tmp/tests/unicorn.rb'
File.write(config_path, config_lines.join("\n") + "\n")
- cmd = %W[unicorn -E test -c #{config_path} #{Rails.root.join('config.ru')}]
+ rackup_path = 'tmp/tests/config.ru'
+ File.write(rackup_path, <<~EOS)
+ app =
+ proc do |env|
+ if env['REQUEST_METHOD'] == 'GET'
+ [200, {}, [Process.pid]]
+ else
+ Process.kill(env['QUERY_STRING'], Process.pid)
+ [200, {}, ['Bye!']]
+ end
+ end
+
+ run app
+ EOS
+
+ cmd = %W[unicorn -E test -c #{config_path} #{rackup_path}]
@unicorn_master_pid = spawn(*cmd)
wait_unicorn_boot!(@unicorn_master_pid, ready_file)
WebMock.allow_net_connect!
@@ -45,14 +60,14 @@ describe 'Unicorn' do
%w[SIGQUIT SIGTERM SIGKILL].each do |signal|
it "has a worker that self-terminates on signal #{signal}" do
- response = Excon.get('unix:///unicorn_test/pid', socket: @socket_path)
+ response = Excon.get('unix://', socket: @socket_path)
expect(response.status).to eq(200)
worker_pid = response.body.to_i
expect(worker_pid).to be > 0
begin
- Excon.post('unix:///unicorn_test/kill', socket: @socket_path, body: "signal=#{signal}")
+ Excon.post("unix://?#{signal}", socket: @socket_path)
rescue Excon::Error::Socket
# The connection may be closed abruptly
end
@@ -71,6 +86,7 @@ describe 'Unicorn' do
timeout = 5 * 60
timeout.times do
return if File.exist?(ready_file)
+
pid = Process.waitpid(master_pid, Process::WNOHANG)
raise "unicorn failed to boot: #{$?}" unless pid.nil?
diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb
index f52b2bab05b..fd195d6f9b8 100644
--- a/spec/uploaders/file_uploader_spec.rb
+++ b/spec/uploaders/file_uploader_spec.rb
@@ -28,25 +28,51 @@ describe FileUploader do
end
context 'hashed storage' do
- let(:project) { build_stubbed(:project, :hashed) }
+ context 'when rolled out attachments' do
+ let(:project) { build_stubbed(:project, :hashed) }
- describe '.absolute_path' do
- it 'returns the correct absolute path by building it dynamically' do
- upload = double(model: project, path: 'secret/foo.jpg')
+ describe '.absolute_path' do
+ it 'returns the correct absolute path by building it dynamically' do
+ upload = double(model: project, path: 'secret/foo.jpg')
- dynamic_segment = project.disk_path
+ dynamic_segment = project.disk_path
- expect(described_class.absolute_path(upload))
- .to end_with("#{dynamic_segment}/secret/foo.jpg")
+ expect(described_class.absolute_path(upload))
+ .to end_with("#{dynamic_segment}/secret/foo.jpg")
+ end
+ end
+
+ describe "#store_dir" do
+ it "stores in the namespace path" do
+ uploader = described_class.new(project)
+
+ expect(uploader.store_dir).to include(project.disk_path)
+ expect(uploader.store_dir).not_to include("system")
+ end
end
end
- describe "#store_dir" do
- it "stores in the namespace path" do
- uploader = described_class.new(project)
+ context 'when only repositories are rolled out' do
+ let(:project) { build_stubbed(:project, storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) }
- expect(uploader.store_dir).to include(project.disk_path)
- expect(uploader.store_dir).not_to include("system")
+ describe '.absolute_path' do
+ it 'returns the correct absolute path by building it dynamically' do
+ upload = double(model: project, path: 'secret/foo.jpg')
+
+ dynamic_segment = project.full_path
+
+ expect(described_class.absolute_path(upload))
+ .to end_with("#{dynamic_segment}/secret/foo.jpg")
+ end
+ end
+
+ describe "#store_dir" do
+ it "stores in the namespace path" do
+ uploader = described_class.new(project)
+
+ expect(uploader.store_dir).to include(project.full_path)
+ expect(uploader.store_dir).not_to include("system")
+ end
end
end
end
diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb
new file mode 100644
index 00000000000..14fd5f3600f
--- /dev/null
+++ b/spec/uploaders/job_artifact_uploader_spec.rb
@@ -0,0 +1,51 @@
+require 'spec_helper'
+
+describe JobArtifactUploader do
+ let(:job_artifact) { create(:ci_job_artifact) }
+ let(:uploader) { described_class.new(job_artifact, :file) }
+ let(:local_path) { Gitlab.config.artifacts.path }
+
+ describe '#store_dir' do
+ subject { uploader.store_dir }
+
+ let(:path) { "#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/#{job_artifact.project_id}/#{job_artifact.id}" }
+
+ context 'when using local storage' do
+ it { is_expected.to start_with(local_path) }
+ it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
+ it { is_expected.to end_with(path) }
+ end
+ end
+
+ describe '#cache_dir' do
+ subject { uploader.cache_dir }
+
+ it { is_expected.to start_with(local_path) }
+ it { is_expected.to end_with('/tmp/cache') }
+ end
+
+ describe '#work_dir' do
+ subject { uploader.work_dir }
+
+ it { is_expected.to start_with(local_path) }
+ it { is_expected.to end_with('/tmp/work') }
+ end
+
+ context 'file is stored in valid local_path' do
+ let(:file) do
+ fixture_file_upload(
+ Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
+ end
+
+ before do
+ uploader.store!(file)
+ end
+
+ subject { uploader.file.path }
+
+ it { is_expected.to start_with(local_path) }
+ it { is_expected.to include("/#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/") }
+ it { is_expected.to include("/#{job_artifact.project_id}/") }
+ it { is_expected.to end_with("ci_build_artifacts.zip") }
+ end
+end
diff --git a/spec/uploaders/artifact_uploader_spec.rb b/spec/uploaders/legacy_artifact_uploader_spec.rb
index 88f394b2938..efeffb78772 100644
--- a/spec/uploaders/artifact_uploader_spec.rb
+++ b/spec/uploaders/legacy_artifact_uploader_spec.rb
@@ -1,9 +1,8 @@
require 'rails_helper'
-describe ArtifactUploader do
- let(:store) { described_class::LOCAL_STORE }
- let(:job) { create(:ci_build, artifacts_file_store: store) }
- let(:uploader) { described_class.new(job, :artifacts_file) }
+describe LegacyArtifactUploader do
+ let(:job) { create(:ci_build) }
+ let(:uploader) { described_class.new(job, :legacy_artifacts_file) }
let(:local_path) { Gitlab.config.artifacts.path }
describe '.local_store_path' do
@@ -18,7 +17,7 @@ describe ArtifactUploader do
describe '.artifacts_upload_path' do
subject { described_class.artifacts_upload_path }
-
+
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('tmp/uploads/') }
end
@@ -32,29 +31,19 @@ describe ArtifactUploader do
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with(path) }
end
-
- context 'when using remote storage' do
- let(:store) { described_class::REMOTE_STORE }
-
- before do
- stub_artifacts_object_storage
- end
-
- it { is_expected.to eq(path) }
- end
end
describe '#cache_dir' do
subject { uploader.cache_dir }
- it { is_expected.to start_with(path) }
+ it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/cache') }
end
describe '#work_dir' do
subject { uploader.work_dir }
- it { is_expected.to start_with(path) }
+ it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/work') }
end
@@ -66,11 +55,23 @@ describe ArtifactUploader do
subject { uploader.filename }
it { is_expected.to be_nil }
+ end
- context 'with artifacts' do
- let(:job) { create(:ci_build, :artifacts) }
+ context 'file is stored in valid path' do
+ let(:file) do
+ fixture_file_upload(
+ Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
+ end
- it { is_expected.not_to be_nil }
+ before do
+ uploader.store!(file)
end
+
+ subject { uploader.file.path }
+
+ it { is_expected.to start_with(local_path) }
+ it { is_expected.to include("/#{job.created_at.utc.strftime('%Y_%m')}/") }
+ it { is_expected.to include("/#{job.project_id}/") }
+ it { is_expected.to end_with("ci_build_artifacts.zip") }
end
end
diff --git a/spec/uploaders/namespace_file_uploader_spec.rb b/spec/uploaders/namespace_file_uploader_spec.rb
new file mode 100644
index 00000000000..c6c4500c179
--- /dev/null
+++ b/spec/uploaders/namespace_file_uploader_spec.rb
@@ -0,0 +1,21 @@
+require 'spec_helper'
+
+describe NamespaceFileUploader do
+ let(:group) { build_stubbed(:group) }
+ let(:uploader) { described_class.new(group) }
+
+ describe "#store_dir" do
+ it "stores in the namespace id directory" do
+ expect(uploader.store_dir).to include(group.id.to_s)
+ end
+ end
+
+ describe ".absolute_path" do
+ it "stores in thecorrect directory" do
+ upload_record = create(:upload, :namespace_upload, model: group)
+
+ expect(described_class.absolute_path(upload_record))
+ .to include("-/system/namespace/#{group.id}")
+ end
+ end
+end
diff --git a/spec/views/dashboard/projects/_blank_state_admin_welcome.haml.rb b/spec/views/dashboard/projects/_blank_state_admin_welcome.haml.rb
new file mode 100644
index 00000000000..2f58eec86dc
--- /dev/null
+++ b/spec/views/dashboard/projects/_blank_state_admin_welcome.haml.rb
@@ -0,0 +1,15 @@
+require 'spec_helper'
+
+describe 'dashboard/projects/_blank_state_admin_welcome.html.haml' do
+ let(:user) { create(:admin) }
+
+ before do
+ allow(view).to receive(:current_user).and_return(user)
+ end
+
+ it 'links to new group path' do
+ render
+
+ expect(rendered).to have_link('Create a group', href: new_group_path)
+ end
+end
diff --git a/spec/views/projects/commit/show.html.haml_spec.rb b/spec/views/projects/commit/show.html.haml_spec.rb
index 32c95c6bb0d..a9c32122600 100644
--- a/spec/views/projects/commit/show.html.haml_spec.rb
+++ b/spec/views/projects/commit/show.html.haml_spec.rb
@@ -2,14 +2,15 @@ require 'spec_helper'
describe 'projects/commit/show.html.haml' do
let(:project) { create(:project, :repository) }
+ let(:commit) { project.commit }
before do
assign(:project, project)
assign(:repository, project.repository)
- assign(:commit, project.commit)
- assign(:noteable, project.commit)
+ assign(:commit, commit)
+ assign(:noteable, commit)
assign(:notes, [])
- assign(:diffs, project.commit.diffs)
+ assign(:diffs, commit.diffs)
allow(view).to receive(:current_user).and_return(nil)
allow(view).to receive(:can?).and_return(false)
@@ -43,4 +44,19 @@ describe 'projects/commit/show.html.haml' do
expect(rendered).not_to have_selector('.limit-container-width')
end
end
+
+ context 'in the context of a merge request' do
+ let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+
+ before do
+ assign(:merge_request, merge_request)
+ render
+ end
+
+ it 'shows that it is in the context of a merge request' do
+ merge_request_url = diffs_project_merge_request_url(project, merge_request, commit_id: commit.id)
+ expect(rendered).to have_content("This commit is part of merge request")
+ expect(rendered).to have_link(merge_request.to_reference, merge_request_url)
+ end
+ end
end
diff --git a/spec/views/projects/jobs/show.html.haml_spec.rb b/spec/views/projects/jobs/show.html.haml_spec.rb
index d4279626e75..6139529013f 100644
--- a/spec/views/projects/jobs/show.html.haml_spec.rb
+++ b/spec/views/projects/jobs/show.html.haml_spec.rb
@@ -185,6 +185,31 @@ describe 'projects/jobs/show' do
end
end
+ context 'when incomplete trigger_request is used' do
+ before do
+ build.trigger_request = FactoryGirl.build(:ci_trigger_request, trigger: nil)
+ end
+
+ it 'test should not render token block' do
+ render
+
+ expect(rendered).not_to have_content('Token')
+ end
+ end
+
+ context 'when complete trigger_request is used' do
+ before do
+ build.trigger_request = FactoryGirl.build(:ci_trigger_request)
+ end
+
+ it 'should render token' do
+ render
+
+ expect(rendered).to have_content('Token')
+ expect(rendered).to have_content(build.trigger_request.trigger.short_token)
+ end
+ end
+
describe 'commit title in sidebar' do
let(:commit_title) { project.commit.title }
diff --git a/spec/views/projects/merge_requests/_commits.html.haml_spec.rb b/spec/views/projects/merge_requests/_commits.html.haml_spec.rb
index efed2e02a1b..3ca67114558 100644
--- a/spec/views/projects/merge_requests/_commits.html.haml_spec.rb
+++ b/spec/views/projects/merge_requests/_commits.html.haml_spec.rb
@@ -25,8 +25,8 @@ describe 'projects/merge_requests/_commits.html.haml' do
it 'shows commits from source project' do
render
- commit = source_project.commit(merge_request.source_branch)
- href = project_commit_path(source_project, commit)
+ commit = merge_request.commits.first # HEAD
+ href = diffs_project_merge_request_path(target_project, merge_request, commit_id: commit)
expect(rendered).to have_link(Commit.truncate_sha(commit.sha), href: href)
end
diff --git a/spec/views/projects/merge_requests/diffs/_diffs.html.haml_spec.rb b/spec/views/projects/merge_requests/diffs/_diffs.html.haml_spec.rb
new file mode 100644
index 00000000000..e7c40421f1f
--- /dev/null
+++ b/spec/views/projects/merge_requests/diffs/_diffs.html.haml_spec.rb
@@ -0,0 +1,36 @@
+require 'spec_helper'
+
+describe 'projects/merge_requests/diffs/_diffs.html.haml' do
+ include Devise::Test::ControllerHelpers
+
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public, :repository) }
+ let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project, author: user) }
+
+ before do
+ allow(view).to receive(:url_for).and_return(controller.request.fullpath)
+
+ assign(:merge_request, merge_request)
+ assign(:environment, merge_request.environments_for(user).last)
+ assign(:diffs, merge_request.diffs)
+ assign(:merge_request_diffs, merge_request.diffs)
+ assign(:diff_notes_disabled, true) # disable note creation
+ assign(:use_legacy_diff_notes, false)
+ assign(:grouped_diff_discussions, {})
+ assign(:notes, [])
+ end
+
+ context 'for a commit' do
+ let(:commit) { merge_request.commits.last }
+
+ before do
+ assign(:commit, commit)
+ end
+
+ it "shows the commit scope" do
+ render
+
+ expect(rendered).to have_content "Only comments from the following commit are shown below"
+ end
+ end
+end
diff --git a/spec/views/projects/pipelines_settings/_show.html.haml_spec.rb b/spec/views/projects/pipelines_settings/_show.html.haml_spec.rb
index c757ccf02d3..95f0be49412 100644
--- a/spec/views/projects/pipelines_settings/_show.html.haml_spec.rb
+++ b/spec/views/projects/pipelines_settings/_show.html.haml_spec.rb
@@ -35,7 +35,7 @@ describe 'projects/pipelines_settings/_show' do
context 'when kubernetes is active' do
before do
- project.build_kubernetes_service(active: true)
+ create(:kubernetes_service, project: project)
end
context 'when auto devops domain is not defined' do
diff --git a/spec/workers/authorized_projects_worker_spec.rb b/spec/workers/authorized_projects_worker_spec.rb
index 90ed1309d4a..0d6eb536c33 100644
--- a/spec/workers/authorized_projects_worker_spec.rb
+++ b/spec/workers/authorized_projects_worker_spec.rb
@@ -65,7 +65,6 @@ describe AuthorizedProjectsWorker do
args_list = build_args_list(project.owner.id)
push_bulk_args = {
'class' => described_class,
- 'queue' => described_class.sidekiq_options['queue'],
'args' => args_list
}
diff --git a/spec/workers/background_migration_worker_spec.rb b/spec/workers/background_migration_worker_spec.rb
index 4f6e3474634..1c54cf55fa0 100644
--- a/spec/workers/background_migration_worker_spec.rb
+++ b/spec/workers/background_migration_worker_spec.rb
@@ -10,35 +10,4 @@ describe BackgroundMigrationWorker, :sidekiq do
described_class.new.perform('Foo', [10, 20])
end
end
-
- describe '.perform_bulk' do
- it 'enqueues background migrations in bulk' do
- Sidekiq::Testing.fake! do
- described_class.perform_bulk([['Foo', [1]], ['Foo', [2]]])
-
- expect(described_class.jobs.count).to eq 2
- expect(described_class.jobs).to all(include('enqueued_at'))
- end
- end
- end
-
- describe '.perform_bulk_in' do
- context 'when delay is valid' do
- it 'correctly schedules background migrations' do
- Sidekiq::Testing.fake! do
- described_class.perform_bulk_in(1.minute, [['Foo', [1]], ['Foo', [2]]])
-
- expect(described_class.jobs.count).to eq 2
- expect(described_class.jobs).to all(include('at'))
- end
- end
- end
-
- context 'when delay is invalid' do
- it 'raises an ArgumentError exception' do
- expect { described_class.perform_bulk_in(-60, [['Foo']]) }
- .to raise_error(ArgumentError)
- end
- end
- end
end
diff --git a/spec/workers/concerns/application_worker_spec.rb b/spec/workers/concerns/application_worker_spec.rb
new file mode 100644
index 00000000000..0145563e0ed
--- /dev/null
+++ b/spec/workers/concerns/application_worker_spec.rb
@@ -0,0 +1,58 @@
+require 'spec_helper'
+
+describe ApplicationWorker do
+ let(:worker) do
+ Class.new do
+ def self.name
+ 'Gitlab::Foo::Bar::DummyWorker'
+ end
+
+ include ApplicationWorker
+ end
+ end
+
+ describe 'Sidekiq options' do
+ it 'sets the queue name based on the class name' do
+ expect(worker.sidekiq_options['queue']).to eq('foo_bar_dummy')
+ end
+ end
+
+ describe '.queue' do
+ it 'returns the queue name' do
+ worker.sidekiq_options queue: :some_queue
+
+ expect(worker.queue).to eq('some_queue')
+ end
+ end
+
+ describe '.bulk_perform_async' do
+ it 'enqueues jobs in bulk' do
+ Sidekiq::Testing.fake! do
+ worker.bulk_perform_async([['Foo', [1]], ['Foo', [2]]])
+
+ expect(worker.jobs.count).to eq 2
+ expect(worker.jobs).to all(include('enqueued_at'))
+ end
+ end
+ end
+
+ describe '.bulk_perform_in' do
+ context 'when delay is valid' do
+ it 'correctly schedules jobs' do
+ Sidekiq::Testing.fake! do
+ worker.bulk_perform_in(1.minute, [['Foo', [1]], ['Foo', [2]]])
+
+ expect(worker.jobs.count).to eq 2
+ expect(worker.jobs).to all(include('at'))
+ end
+ end
+ end
+
+ context 'when delay is invalid' do
+ it 'raises an ArgumentError exception' do
+ expect { worker.bulk_perform_in(-60, [['Foo']]) }
+ .to raise_error(ArgumentError)
+ end
+ end
+ end
+end
diff --git a/spec/workers/concerns/cluster_queue_spec.rb b/spec/workers/concerns/cluster_queue_spec.rb
index 1050651fa51..5049886b55c 100644
--- a/spec/workers/concerns/cluster_queue_spec.rb
+++ b/spec/workers/concerns/cluster_queue_spec.rb
@@ -3,7 +3,11 @@ require 'spec_helper'
describe ClusterQueue do
let(:worker) do
Class.new do
- include Sidekiq::Worker
+ def self.name
+ 'DummyWorker'
+ end
+
+ include ApplicationWorker
include ClusterQueue
end
end
diff --git a/spec/workers/concerns/cronjob_queue_spec.rb b/spec/workers/concerns/cronjob_queue_spec.rb
index 5d1336c21a6..3ae1c5f54d8 100644
--- a/spec/workers/concerns/cronjob_queue_spec.rb
+++ b/spec/workers/concerns/cronjob_queue_spec.rb
@@ -3,7 +3,11 @@ require 'spec_helper'
describe CronjobQueue do
let(:worker) do
Class.new do
- include Sidekiq::Worker
+ def self.name
+ 'DummyWorker'
+ end
+
+ include ApplicationWorker
include CronjobQueue
end
end
diff --git a/spec/workers/concerns/dedicated_sidekiq_queue_spec.rb b/spec/workers/concerns/dedicated_sidekiq_queue_spec.rb
deleted file mode 100644
index 512baec8b7e..00000000000
--- a/spec/workers/concerns/dedicated_sidekiq_queue_spec.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-require 'spec_helper'
-
-describe DedicatedSidekiqQueue do
- let(:worker) do
- Class.new do
- def self.name
- 'Foo::Bar::DummyWorker'
- end
-
- include Sidekiq::Worker
- include DedicatedSidekiqQueue
- end
- end
-
- describe 'queue names' do
- it 'sets the queue name based on the class name' do
- expect(worker.sidekiq_options['queue']).to eq('foo_bar_dummy')
- end
- end
-end
diff --git a/spec/workers/concerns/gitlab/github_import/notify_upon_death_spec.rb b/spec/workers/concerns/gitlab/github_import/notify_upon_death_spec.rb
new file mode 100644
index 00000000000..4b9aa9a7ef8
--- /dev/null
+++ b/spec/workers/concerns/gitlab/github_import/notify_upon_death_spec.rb
@@ -0,0 +1,49 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::NotifyUponDeath do
+ let(:worker_class) do
+ Class.new do
+ include Sidekiq::Worker
+ include Gitlab::GithubImport::NotifyUponDeath
+ end
+ end
+
+ describe '.sidekiq_retries_exhausted' do
+ it 'notifies the JobWaiter when 3 arguments are given and the last is a String' do
+ job = { 'args' => [12, {}, '123abc'], 'jid' => '123' }
+
+ expect(Gitlab::JobWaiter)
+ .to receive(:notify)
+ .with('123abc', '123')
+
+ worker_class.sidekiq_retries_exhausted_block.call(job)
+ end
+
+ it 'does not notify the JobWaiter when only 2 arguments are given' do
+ job = { 'args' => [12, {}], 'jid' => '123' }
+
+ expect(Gitlab::JobWaiter)
+ .not_to receive(:notify)
+
+ worker_class.sidekiq_retries_exhausted_block.call(job)
+ end
+
+ it 'does not notify the JobWaiter when only 1 argument is given' do
+ job = { 'args' => [12], 'jid' => '123' }
+
+ expect(Gitlab::JobWaiter)
+ .not_to receive(:notify)
+
+ worker_class.sidekiq_retries_exhausted_block.call(job)
+ end
+
+ it 'does not notify the JobWaiter when the last argument is not a String' do
+ job = { 'args' => [12, {}, 40], 'jid' => '123' }
+
+ expect(Gitlab::JobWaiter)
+ .not_to receive(:notify)
+
+ worker_class.sidekiq_retries_exhausted_block.call(job)
+ end
+ end
+end
diff --git a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
new file mode 100644
index 00000000000..68cfe9d5545
--- /dev/null
+++ b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
@@ -0,0 +1,74 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::ObjectImporter do
+ let(:worker) do
+ Class.new do
+ def self.name
+ 'DummyWorker'
+ end
+
+ include(Gitlab::GithubImport::ObjectImporter)
+
+ def counter_name
+ :dummy_counter
+ end
+
+ def counter_description
+ 'This is a counter'
+ end
+ end.new
+ end
+
+ describe '#import' do
+ it 'imports the object' do
+ representation_class = double(:representation_class)
+ importer_class = double(:importer_class)
+ importer_instance = double(:importer_instance)
+ representation = double(:representation)
+ project = double(:project, path_with_namespace: 'foo/bar')
+ client = double(:client)
+
+ expect(worker)
+ .to receive(:representation_class)
+ .and_return(representation_class)
+
+ expect(worker)
+ .to receive(:importer_class)
+ .and_return(importer_class)
+
+ expect(representation_class)
+ .to receive(:from_json_hash)
+ .with(an_instance_of(Hash))
+ .and_return(representation)
+
+ expect(importer_class)
+ .to receive(:new)
+ .with(representation, project, client)
+ .and_return(importer_instance)
+
+ expect(importer_instance)
+ .to receive(:execute)
+
+ expect(worker.counter)
+ .to receive(:increment)
+ .with(project: 'foo/bar')
+ .and_call_original
+
+ worker.import(project, client, { 'number' => 10 })
+ end
+ end
+
+ describe '#counter' do
+ it 'returns a Prometheus counter' do
+ expect(worker)
+ .to receive(:counter_name)
+ .and_call_original
+
+ expect(worker)
+ .to receive(:counter_description)
+ .and_call_original
+
+ worker.counter
+ end
+ end
+end
diff --git a/spec/workers/concerns/gitlab/github_import/queue_spec.rb b/spec/workers/concerns/gitlab/github_import/queue_spec.rb
new file mode 100644
index 00000000000..9c69ee32da1
--- /dev/null
+++ b/spec/workers/concerns/gitlab/github_import/queue_spec.rb
@@ -0,0 +1,16 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Queue do
+ it 'sets the Sidekiq options for the worker' do
+ worker = Class.new do
+ def self.name
+ 'DummyWorker'
+ end
+
+ include ApplicationWorker
+ include Gitlab::GithubImport::Queue
+ end
+
+ expect(worker.sidekiq_options['queue']).to eq('github_importer')
+ end
+end
diff --git a/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb b/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb
new file mode 100644
index 00000000000..8de4059c4ae
--- /dev/null
+++ b/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb
@@ -0,0 +1,110 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::ReschedulingMethods do
+ let(:worker) do
+ Class.new { include(Gitlab::GithubImport::ReschedulingMethods) }.new
+ end
+
+ describe '#perform' do
+ context 'with a non-existing project' do
+ it 'does not perform any work' do
+ expect(worker)
+ .not_to receive(:try_import)
+
+ worker.perform(-1, {})
+ end
+
+ it 'notifies any waiters so they do not wait forever' do
+ expect(worker)
+ .to receive(:notify_waiter)
+ .with('123')
+
+ worker.perform(-1, {}, '123')
+ end
+ end
+
+ context 'with an existing project' do
+ let(:project) { create(:project) }
+
+ it 'notifies any waiters upon successfully importing the data' do
+ expect(worker)
+ .to receive(:try_import)
+ .with(
+ an_instance_of(Project),
+ an_instance_of(Gitlab::GithubImport::Client),
+ { 'number' => 2 }
+ )
+ .and_return(true)
+
+ expect(worker)
+ .to receive(:notify_waiter).with('123')
+
+ worker.perform(project.id, { 'number' => 2 }, '123')
+ end
+
+ it 'reschedules itself if the data could not be imported' do
+ expect(worker)
+ .to receive(:try_import)
+ .with(
+ an_instance_of(Project),
+ an_instance_of(Gitlab::GithubImport::Client),
+ { 'number' => 2 }
+ )
+ .and_return(false)
+
+ expect(worker)
+ .not_to receive(:notify_waiter)
+
+ expect_any_instance_of(Gitlab::GithubImport::Client)
+ .to receive(:rate_limit_resets_in)
+ .and_return(14)
+
+ expect(worker.class)
+ .to receive(:perform_in)
+ .with(14, project.id, { 'number' => 2 }, '123')
+
+ worker.perform(project.id, { 'number' => 2 }, '123')
+ end
+ end
+ end
+
+ describe '#try_import' do
+ it 'returns true when the import succeeds' do
+ expect(worker)
+ .to receive(:import)
+ .with(10, 20)
+
+ expect(worker.try_import(10, 20)).to eq(true)
+ end
+
+ it 'returns false when the import fails due to hitting the GitHub API rate limit' do
+ expect(worker)
+ .to receive(:import)
+ .with(10, 20)
+ .and_raise(Gitlab::GithubImport::RateLimitError)
+
+ expect(worker.try_import(10, 20)).to eq(false)
+ end
+ end
+
+ describe '#notify_waiter' do
+ it 'notifies the waiter if a waiter key is specified' do
+ expect(worker)
+ .to receive(:jid)
+ .and_return('abc123')
+
+ expect(Gitlab::JobWaiter)
+ .to receive(:notify)
+ .with('123', 'abc123')
+
+ worker.notify_waiter('123')
+ end
+
+ it 'does not notify any waiters if no waiter key is specified' do
+ expect(Gitlab::JobWaiter)
+ .not_to receive(:notify)
+
+ worker.notify_waiter(nil)
+ end
+ end
+end
diff --git a/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb b/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
new file mode 100644
index 00000000000..241e8a2b6d3
--- /dev/null
+++ b/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
@@ -0,0 +1,77 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::StageMethods do
+ let(:project) { create(:project) }
+ let(:worker) do
+ Class.new { include(Gitlab::GithubImport::StageMethods) }.new
+ end
+
+ describe '#perform' do
+ it 'returns if no project could be found' do
+ expect(worker).not_to receive(:try_import)
+
+ worker.perform(-1)
+ end
+
+ it 'imports the data when the project exists' do
+ allow(worker)
+ .to receive(:find_project)
+ .with(project.id)
+ .and_return(project)
+
+ expect(worker)
+ .to receive(:try_import)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Client),
+ an_instance_of(Project)
+ )
+
+ worker.perform(project.id)
+ end
+ end
+
+ describe '#try_import' do
+ it 'imports the project' do
+ client = double(:client)
+
+ expect(worker)
+ .to receive(:import)
+ .with(client, project)
+
+ worker.try_import(client, project)
+ end
+
+ it 'reschedules the worker if RateLimitError was raised' do
+ client = double(:client, rate_limit_resets_in: 10)
+
+ expect(worker)
+ .to receive(:import)
+ .with(client, project)
+ .and_raise(Gitlab::GithubImport::RateLimitError)
+
+ expect(worker.class)
+ .to receive(:perform_in)
+ .with(10, project.id)
+
+ worker.try_import(client, project)
+ end
+ end
+
+ describe '#find_project' do
+ it 'returns a Project for an existing ID' do
+ project.update_column(:import_status, 'started')
+
+ expect(worker.find_project(project.id)).to eq(project)
+ end
+
+ it 'returns nil for a project that failed importing' do
+ project.update_column(:import_status, 'failed')
+
+ expect(worker.find_project(project.id)).to be_nil
+ end
+
+ it 'returns nil for a non-existing project ID' do
+ expect(worker.find_project(-1)).to be_nil
+ end
+ end
+end
diff --git a/spec/workers/concerns/pipeline_queue_spec.rb b/spec/workers/concerns/pipeline_queue_spec.rb
index eac5a770e5f..dd911760948 100644
--- a/spec/workers/concerns/pipeline_queue_spec.rb
+++ b/spec/workers/concerns/pipeline_queue_spec.rb
@@ -3,7 +3,11 @@ require 'spec_helper'
describe PipelineQueue do
let(:worker) do
Class.new do
- include Sidekiq::Worker
+ def self.name
+ 'DummyWorker'
+ end
+
+ include ApplicationWorker
include PipelineQueue
end
end
diff --git a/spec/workers/concerns/repository_check_queue_spec.rb b/spec/workers/concerns/repository_check_queue_spec.rb
index 8868e969829..fdbbfcc90a5 100644
--- a/spec/workers/concerns/repository_check_queue_spec.rb
+++ b/spec/workers/concerns/repository_check_queue_spec.rb
@@ -3,7 +3,11 @@ require 'spec_helper'
describe RepositoryCheckQueue do
let(:worker) do
Class.new do
- include Sidekiq::Worker
+ def self.name
+ 'DummyWorker'
+ end
+
+ include ApplicationWorker
include RepositoryCheckQueue
end
end
diff --git a/spec/workers/create_pipeline_worker_spec.rb b/spec/workers/create_pipeline_worker_spec.rb
new file mode 100644
index 00000000000..02cb0f46cb4
--- /dev/null
+++ b/spec/workers/create_pipeline_worker_spec.rb
@@ -0,0 +1,36 @@
+require 'spec_helper'
+
+describe CreatePipelineWorker do
+ describe '#perform' do
+ let(:worker) { described_class.new }
+
+ context 'when a project not found' do
+ it 'does not call the Service' do
+ expect(Ci::CreatePipelineService).not_to receive(:new)
+ expect { worker.perform(99, create(:user).id, 'master', :web) }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'when a user not found' do
+ let(:project) { create(:project) }
+
+ it 'does not call the Service' do
+ expect(Ci::CreatePipelineService).not_to receive(:new)
+ expect { worker.perform(project.id, 99, project.default_branch, :web) }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'when everything is ok' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:create_pipeline_service) { instance_double(Ci::CreatePipelineService) }
+
+ it 'calls the Service' do
+ expect(Ci::CreatePipelineService).to receive(:new).with(project, user, ref: project.default_branch).and_return(create_pipeline_service)
+ expect(create_pipeline_service).to receive(:execute).with(:web, any_args)
+
+ worker.perform(project.id, user.id, project.default_branch, :web)
+ end
+ end
+ end
+end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 30908534eb3..7ee0a51a263 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -1,44 +1,21 @@
require 'spec_helper'
describe 'Every Sidekiq worker' do
- let(:workers) do
- root = Rails.root.join('app', 'workers')
- concerns = root.join('concerns').to_s
-
- workers = Dir[root.join('**', '*.rb')]
- .reject { |path| path.start_with?(concerns) }
-
- workers.map do |path|
- ns = Pathname.new(path).relative_path_from(root).to_s.gsub('.rb', '')
-
- ns.camelize.constantize
- end
+ it 'includes ApplicationWorker' do
+ expect(Gitlab::SidekiqConfig.workers).to all(include(ApplicationWorker))
end
it 'does not use the default queue' do
- workers.each do |worker|
- expect(worker.sidekiq_options['queue'].to_s).not_to eq('default')
- end
+ expect(Gitlab::SidekiqConfig.workers.map(&:queue)).not_to include('default')
end
it 'uses the cronjob queue when the worker runs as a cronjob' do
- cron_workers = Settings.cron_jobs
- .map { |job_name, options| options['job_class'].constantize }
- .to_set
-
- workers.each do |worker|
- next unless cron_workers.include?(worker)
-
- expect(worker.sidekiq_options['queue'].to_s).to eq('cronjob')
- end
+ expect(Gitlab::SidekiqConfig.cron_workers.map(&:queue)).to all(eq('cronjob'))
end
it 'defines the queue in the Sidekiq configuration file' do
- config = YAML.load_file(Rails.root.join('config', 'sidekiq_queues.yml').to_s)
- queue_names = config[:queues].map { |(queue, _)| queue }.to_set
+ config_queue_names = Gitlab::SidekiqConfig.config_queues.to_set
- workers.each do |worker|
- expect(queue_names).to include(worker.sidekiq_options['queue'].to_s)
- end
+ expect(Gitlab::SidekiqConfig.worker_queues).to all(be_in(config_queue_names))
end
end
diff --git a/spec/workers/expire_build_instance_artifacts_worker_spec.rb b/spec/workers/expire_build_instance_artifacts_worker_spec.rb
index bed5c5e2ecb..e1a56c72162 100644
--- a/spec/workers/expire_build_instance_artifacts_worker_spec.rb
+++ b/spec/workers/expire_build_instance_artifacts_worker_spec.rb
@@ -11,12 +11,8 @@ describe ExpireBuildInstanceArtifactsWorker do
end
context 'with expired artifacts' do
- let(:artifacts_expiry) { { artifacts_expire_at: Time.now - 7.days } }
-
context 'when associated project is valid' do
- let(:build) do
- create(:ci_build, :artifacts, artifacts_expiry)
- end
+ let(:build) { create(:ci_build, :artifacts, :expired) }
it 'does expire' do
expect(build.reload.artifacts_expired?).to be_truthy
@@ -26,14 +22,14 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_falsey
end
- it 'does nullify artifacts_file column' do
- expect(build.reload.artifacts_file_identifier).to be_nil
+ it 'does remove the job artifact record' do
+ expect(build.reload.job_artifacts_archive).to be_nil
end
end
end
context 'with not yet expired artifacts' do
- let(:build) do
+ set(:build) do
create(:ci_build, :artifacts, artifacts_expire_at: Time.now + 7.days)
end
@@ -45,8 +41,8 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_truthy
end
- it 'does not nullify artifacts_file column' do
- expect(build.reload.artifacts_file_identifier).not_to be_nil
+ it 'does not remove the job artifact record' do
+ expect(build.reload.job_artifacts_archive).not_to be_nil
end
end
@@ -61,13 +57,13 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_truthy
end
- it 'does not nullify artifacts_file column' do
- expect(build.reload.artifacts_file_identifier).not_to be_nil
+ it 'does not remove the job artifact record' do
+ expect(build.reload.job_artifacts_archive).not_to be_nil
end
end
context 'for expired artifacts' do
- let(:build) { create(:ci_build, artifacts_expire_at: Time.now - 7.days) }
+ let(:build) { create(:ci_build, :expired) }
it 'is still expired' do
expect(build.reload.artifacts_expired?).to be_truthy
diff --git a/spec/workers/gitlab/github_import/advance_stage_worker_spec.rb b/spec/workers/gitlab/github_import/advance_stage_worker_spec.rb
new file mode 100644
index 00000000000..3be49a0dee8
--- /dev/null
+++ b/spec/workers/gitlab/github_import/advance_stage_worker_spec.rb
@@ -0,0 +1,115 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::AdvanceStageWorker, :clean_gitlab_redis_shared_state do
+ let(:project) { create(:project, import_jid: '123') }
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ context 'when the project no longer exists' do
+ it 'does not perform any work' do
+ expect(worker).not_to receive(:wait_for_jobs)
+
+ worker.perform(-1, { '123' => 2 }, :finish)
+ end
+ end
+
+ context 'when there are remaining jobs' do
+ before do
+ allow(worker)
+ .to receive(:find_project)
+ .and_return(project)
+ end
+
+ it 'reschedules itself' do
+ expect(worker)
+ .to receive(:wait_for_jobs)
+ .with({ '123' => 2 })
+ .and_return({ '123' => 1 })
+
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(described_class::INTERVAL, project.id, { '123' => 1 }, :finish)
+
+ worker.perform(project.id, { '123' => 2 }, :finish)
+ end
+ end
+
+ context 'when there are no remaining jobs' do
+ before do
+ allow(worker)
+ .to receive(:find_project)
+ .and_return(project)
+
+ allow(worker)
+ .to receive(:wait_for_jobs)
+ .with({ '123' => 2 })
+ .and_return({})
+ end
+
+ it 'schedules the next stage' do
+ expect(project)
+ .to receive(:refresh_import_jid_expiration)
+
+ expect(Gitlab::GithubImport::Stage::FinishImportWorker)
+ .to receive(:perform_async)
+ .with(project.id)
+
+ worker.perform(project.id, { '123' => 2 }, :finish)
+ end
+
+ it 'raises KeyError when the stage name is invalid' do
+ expect { worker.perform(project.id, { '123' => 2 }, :kittens) }
+ .to raise_error(KeyError)
+ end
+ end
+ end
+
+ describe '#wait_for_jobs' do
+ it 'waits for jobs to complete and returns a new pair of keys to wait for' do
+ waiter1 = double(:waiter1, jobs_remaining: 1, key: '123')
+ waiter2 = double(:waiter2, jobs_remaining: 0, key: '456')
+
+ expect(Gitlab::JobWaiter)
+ .to receive(:new)
+ .ordered
+ .with(2, '123')
+ .and_return(waiter1)
+
+ expect(Gitlab::JobWaiter)
+ .to receive(:new)
+ .ordered
+ .with(1, '456')
+ .and_return(waiter2)
+
+ expect(waiter1)
+ .to receive(:wait)
+ .with(described_class::BLOCKING_WAIT_TIME)
+
+ expect(waiter2)
+ .to receive(:wait)
+ .with(described_class::BLOCKING_WAIT_TIME)
+
+ new_waiters = worker.wait_for_jobs({ '123' => 2, '456' => 1 })
+
+ expect(new_waiters).to eq({ '123' => 1 })
+ end
+ end
+
+ describe '#find_project' do
+ it 'returns a Project' do
+ project.update_column(:import_status, 'started')
+
+ found = worker.find_project(project.id)
+
+ expect(found).to be_an_instance_of(Project)
+
+ # This test is there to make sure we only select the columns we care
+ # about.
+ expect(found.attributes).to eq({ 'id' => nil, 'import_jid' => '123' })
+ end
+
+ it 'returns nil if the project import is not running' do
+ expect(worker.find_project(project.id)).to be_nil
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb b/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb
new file mode 100644
index 00000000000..7c8c665a9b3
--- /dev/null
+++ b/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb
@@ -0,0 +1,42 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::ImportDiffNoteWorker do
+ let(:worker) { described_class.new }
+
+ describe '#import' do
+ it 'imports a diff note' do
+ project = double(:project, path_with_namespace: 'foo/bar')
+ client = double(:client)
+ importer = double(:importer)
+ hash = {
+ 'noteable_id' => 42,
+ 'path' => 'README.md',
+ 'commit_id' => '123abc',
+ 'diff_hunk' => "@@ -1 +1 @@\n-Hello\n+Hello world",
+ 'user' => { 'id' => 4, 'login' => 'alice' },
+ 'note' => 'Hello world',
+ 'created_at' => Time.zone.now.to_s,
+ 'updated_at' => Time.zone.now.to_s
+ }
+
+ expect(Gitlab::GithubImport::Importer::DiffNoteImporter)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Representation::DiffNote),
+ project,
+ client
+ )
+ .and_return(importer)
+
+ expect(importer)
+ .to receive(:execute)
+
+ expect(worker.counter)
+ .to receive(:increment)
+ .with(project: 'foo/bar')
+ .and_call_original
+
+ worker.import(project, client, hash)
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/import_issue_worker_spec.rb b/spec/workers/gitlab/github_import/import_issue_worker_spec.rb
new file mode 100644
index 00000000000..4116380ff4d
--- /dev/null
+++ b/spec/workers/gitlab/github_import/import_issue_worker_spec.rb
@@ -0,0 +1,45 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::ImportIssueWorker do
+ let(:worker) { described_class.new }
+
+ describe '#import' do
+ it 'imports an issue' do
+ project = double(:project, path_with_namespace: 'foo/bar')
+ client = double(:client)
+ importer = double(:importer)
+ hash = {
+ 'iid' => 42,
+ 'title' => 'My Issue',
+ 'description' => 'This is my issue',
+ 'milestone_number' => 4,
+ 'state' => 'opened',
+ 'assignees' => [{ 'id' => 4, 'login' => 'alice' }],
+ 'label_names' => %w[bug],
+ 'user' => { 'id' => 4, 'login' => 'alice' },
+ 'created_at' => Time.zone.now.to_s,
+ 'updated_at' => Time.zone.now.to_s,
+ 'pull_request' => false
+ }
+
+ expect(Gitlab::GithubImport::Importer::IssueAndLabelLinksImporter)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Representation::Issue),
+ project,
+ client
+ )
+ .and_return(importer)
+
+ expect(importer)
+ .to receive(:execute)
+
+ expect(worker.counter)
+ .to receive(:increment)
+ .with(project: 'foo/bar')
+ .and_call_original
+
+ worker.import(project, client, hash)
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/import_note_worker_spec.rb b/spec/workers/gitlab/github_import/import_note_worker_spec.rb
new file mode 100644
index 00000000000..0ca825a722b
--- /dev/null
+++ b/spec/workers/gitlab/github_import/import_note_worker_spec.rb
@@ -0,0 +1,40 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::ImportNoteWorker do
+ let(:worker) { described_class.new }
+
+ describe '#import' do
+ it 'imports a note' do
+ project = double(:project, path_with_namespace: 'foo/bar')
+ client = double(:client)
+ importer = double(:importer)
+ hash = {
+ 'noteable_id' => 42,
+ 'noteable_type' => 'issues',
+ 'user' => { 'id' => 4, 'login' => 'alice' },
+ 'note' => 'Hello world',
+ 'created_at' => Time.zone.now.to_s,
+ 'updated_at' => Time.zone.now.to_s
+ }
+
+ expect(Gitlab::GithubImport::Importer::NoteImporter)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Representation::Note),
+ project,
+ client
+ )
+ .and_return(importer)
+
+ expect(importer)
+ .to receive(:execute)
+
+ expect(worker.counter)
+ .to receive(:increment)
+ .with(project: 'foo/bar')
+ .and_call_original
+
+ worker.import(project, client, hash)
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb b/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb
new file mode 100644
index 00000000000..d49f560af42
--- /dev/null
+++ b/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb
@@ -0,0 +1,51 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::ImportPullRequestWorker do
+ let(:worker) { described_class.new }
+
+ describe '#import' do
+ it 'imports a pull request' do
+ project = double(:project, path_with_namespace: 'foo/bar')
+ client = double(:client)
+ importer = double(:importer)
+ hash = {
+ 'iid' => 42,
+ 'title' => 'My Pull Request',
+ 'description' => 'This is my pull request',
+ 'source_branch' => 'my-feature',
+ 'source_branch_sha' => '123abc',
+ 'target_branch' => 'master',
+ 'target_branch_sha' => '456def',
+ 'source_repository_id' => 400,
+ 'target_repository_id' => 200,
+ 'source_repository_owner' => 'alice',
+ 'state' => 'closed',
+ 'milestone_number' => 4,
+ 'user' => { 'id' => 4, 'login' => 'alice' },
+ 'assignee' => { 'id' => 4, 'login' => 'alice' },
+ 'created_at' => Time.zone.now.to_s,
+ 'updated_at' => Time.zone.now.to_s,
+ 'merged_at' => Time.zone.now.to_s
+ }
+
+ expect(Gitlab::GithubImport::Importer::PullRequestImporter)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Representation::PullRequest),
+ project,
+ client
+ )
+ .and_return(importer)
+
+ expect(importer)
+ .to receive(:execute)
+
+ expect(worker.counter)
+ .to receive(:increment)
+ .with(project: 'foo/bar')
+ .and_call_original
+
+ worker.import(project, client, hash)
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/refresh_import_jid_worker_spec.rb b/spec/workers/gitlab/github_import/refresh_import_jid_worker_spec.rb
new file mode 100644
index 00000000000..073c6d7a2f5
--- /dev/null
+++ b/spec/workers/gitlab/github_import/refresh_import_jid_worker_spec.rb
@@ -0,0 +1,95 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::RefreshImportJidWorker do
+ let(:worker) { described_class.new }
+
+ describe '.perform_in_the_future' do
+ it 'schedules a job in the future' do
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(1.minute.to_i, 10, '123')
+
+ described_class.perform_in_the_future(10, '123')
+ end
+ end
+
+ describe '#perform' do
+ let(:project) { create(:project, import_jid: '123abc') }
+
+ context 'when the project does not exist' do
+ it 'does nothing' do
+ expect(Gitlab::SidekiqStatus)
+ .not_to receive(:running?)
+
+ worker.perform(-1, '123')
+ end
+ end
+
+ context 'when the job is running' do
+ it 'refreshes the import JID and reschedules itself' do
+ allow(worker)
+ .to receive(:find_project)
+ .with(project.id)
+ .and_return(project)
+
+ expect(Gitlab::SidekiqStatus)
+ .to receive(:running?)
+ .with('123')
+ .and_return(true)
+
+ expect(project)
+ .to receive(:refresh_import_jid_expiration)
+
+ expect(worker.class)
+ .to receive(:perform_in_the_future)
+ .with(project.id, '123')
+
+ worker.perform(project.id, '123')
+ end
+ end
+
+ context 'when the job is no longer running' do
+ it 'returns' do
+ allow(worker)
+ .to receive(:find_project)
+ .with(project.id)
+ .and_return(project)
+
+ expect(Gitlab::SidekiqStatus)
+ .to receive(:running?)
+ .with('123')
+ .and_return(false)
+
+ expect(project)
+ .not_to receive(:refresh_import_jid_expiration)
+
+ worker.perform(project.id, '123')
+ end
+ end
+ end
+
+ describe '#find_project' do
+ it 'returns a Project' do
+ project = create(:project, import_status: 'started')
+
+ expect(worker.find_project(project.id)).to be_an_instance_of(Project)
+ end
+
+ it 'only selects the import JID field' do
+ project = create(:project, import_status: 'started', import_jid: '123abc')
+
+ expect(worker.find_project(project.id).attributes)
+ .to eq({ 'id' => nil, 'import_jid' => '123abc' })
+ end
+
+ it 'returns nil for a project for which the import process failed' do
+ project = create(:project, import_status: 'failed')
+
+ expect(worker.find_project(project.id)).to be_nil
+ end
+
+ it 'returns nil for a non-existing project' do
+ expect(worker.find_project(-1)).to be_nil
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb b/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
new file mode 100644
index 00000000000..91e0cddb5d8
--- /dev/null
+++ b/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
@@ -0,0 +1,32 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Stage::FinishImportWorker do
+ let(:project) { create(:project) }
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ it 'marks the import as finished' do
+ expect(project).to receive(:after_import)
+ expect(worker).to receive(:report_import_time).with(project)
+
+ worker.import(double(:client), project)
+ end
+ end
+
+ describe '#report_import_time' do
+ it 'reports the total import time' do
+ expect(worker.histogram)
+ .to receive(:observe)
+ .with({ project: project.path_with_namespace }, a_kind_of(Numeric))
+ .and_call_original
+
+ expect(worker.counter)
+ .to receive(:increment)
+ .and_call_original
+
+ expect(worker.logger).to receive(:info).with(an_instance_of(String))
+
+ worker.report_import_time(project)
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb
new file mode 100644
index 00000000000..8c80d660287
--- /dev/null
+++ b/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb
@@ -0,0 +1,30 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Stage::ImportBaseDataWorker do
+ let(:project) { create(:project) }
+ let(:worker) { described_class.new }
+
+ describe '#import' do
+ it 'imports the base data of a project' do
+ importer = double(:importer)
+ client = double(:client)
+
+ described_class::IMPORTERS.each do |klass|
+ expect(klass)
+ .to receive(:new)
+ .with(project, client)
+ .and_return(importer)
+
+ expect(importer).to receive(:execute)
+ end
+
+ expect(project).to receive(:refresh_import_jid_expiration)
+
+ expect(Gitlab::GithubImport::Stage::ImportPullRequestsWorker)
+ .to receive(:perform_async)
+ .with(project.id)
+
+ worker.import(client, project)
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb
new file mode 100644
index 00000000000..ab347f5b75b
--- /dev/null
+++ b/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb
@@ -0,0 +1,32 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Stage::ImportIssuesAndDiffNotesWorker do
+ let(:project) { create(:project) }
+ let(:worker) { described_class.new }
+
+ describe '#import' do
+ it 'imports the issues and diff notes' do
+ client = double(:client)
+
+ described_class::IMPORTERS.each do |klass|
+ importer = double(:importer)
+ waiter = Gitlab::JobWaiter.new(2, '123')
+
+ expect(klass)
+ .to receive(:new)
+ .with(project, client)
+ .and_return(importer)
+
+ expect(importer)
+ .to receive(:execute)
+ .and_return(waiter)
+ end
+
+ expect(Gitlab::GithubImport::AdvanceStageWorker)
+ .to receive(:perform_async)
+ .with(project.id, { '123' => 2 }, :notes)
+
+ worker.import(client, project)
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb
new file mode 100644
index 00000000000..098d2d55386
--- /dev/null
+++ b/spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb
@@ -0,0 +1,29 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Stage::ImportNotesWorker do
+ let(:project) { create(:project) }
+ let(:worker) { described_class.new }
+
+ describe '#import' do
+ it 'imports all the notes' do
+ importer = double(:importer)
+ client = double(:client)
+ waiter = Gitlab::JobWaiter.new(2, '123')
+
+ expect(Gitlab::GithubImport::Importer::NotesImporter)
+ .to receive(:new)
+ .with(project, client)
+ .and_return(importer)
+
+ expect(importer)
+ .to receive(:execute)
+ .and_return(waiter)
+
+ expect(Gitlab::GithubImport::AdvanceStageWorker)
+ .to receive(:perform_async)
+ .with(project.id, { '123' => 2 }, :finish)
+
+ worker.import(client, project)
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb
new file mode 100644
index 00000000000..2fc91a3e80a
--- /dev/null
+++ b/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb
@@ -0,0 +1,32 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Stage::ImportPullRequestsWorker do
+ let(:project) { create(:project) }
+ let(:worker) { described_class.new }
+
+ describe '#import' do
+ it 'imports all the pull requests' do
+ importer = double(:importer)
+ client = double(:client)
+ waiter = Gitlab::JobWaiter.new(2, '123')
+
+ expect(Gitlab::GithubImport::Importer::PullRequestsImporter)
+ .to receive(:new)
+ .with(project, client)
+ .and_return(importer)
+
+ expect(importer)
+ .to receive(:execute)
+ .and_return(waiter)
+
+ expect(project)
+ .to receive(:refresh_import_jid_expiration)
+
+ expect(Gitlab::GithubImport::AdvanceStageWorker)
+ .to receive(:perform_async)
+ .with(project.id, { '123' => 2 }, :issues_and_diff_notes)
+
+ worker.import(client, project)
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
new file mode 100644
index 00000000000..adab535ac05
--- /dev/null
+++ b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
@@ -0,0 +1,49 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Stage::ImportRepositoryWorker do
+ let(:project) { double(:project, id: 4) }
+ let(:worker) { described_class.new }
+
+ describe '#import' do
+ before do
+ expect(Gitlab::GithubImport::RefreshImportJidWorker)
+ .to receive(:perform_in_the_future)
+ .with(project.id, '123')
+
+ expect(worker)
+ .to receive(:jid)
+ .and_return('123')
+ end
+
+ context 'when the import succeeds' do
+ it 'schedules the importing of the base data' do
+ client = double(:client)
+
+ expect_any_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter)
+ .to receive(:execute)
+ .and_return(true)
+
+ expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker)
+ .to receive(:perform_async)
+ .with(project.id)
+
+ worker.import(client, project)
+ end
+ end
+
+ context 'when the import fails' do
+ it 'does not schedule the importing of the base data' do
+ client = double(:client)
+
+ expect_any_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter)
+ .to receive(:execute)
+ .and_return(false)
+
+ expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker)
+ .not_to receive(:perform_async)
+
+ worker.import(client, project)
+ end
+ end
+ end
+end
diff --git a/spec/workers/pipeline_schedule_worker_spec.rb b/spec/workers/pipeline_schedule_worker_spec.rb
index 75197039f5a..e7a4ac0f3d6 100644
--- a/spec/workers/pipeline_schedule_worker_spec.rb
+++ b/spec/workers/pipeline_schedule_worker_spec.rb
@@ -22,25 +22,32 @@ describe PipelineScheduleWorker do
end
context 'when there is a scheduled pipeline within next_run_at' do
- it 'creates a new pipeline' do
- expect { subject }.to change { project.pipelines.count }.by(1)
- expect(Ci::Pipeline.last).to be_schedule
+ shared_examples 'successful scheduling' do
+ it 'creates a new pipeline' do
+ expect { subject }.to change { project.pipelines.count }.by(1)
+ expect(Ci::Pipeline.last).to be_schedule
+
+ pipeline_schedule.reload
+ expect(pipeline_schedule.next_run_at).to be > Time.now
+ expect(pipeline_schedule).to eq(project.pipelines.last.pipeline_schedule)
+ expect(pipeline_schedule).to be_active
+ end
end
- it 'updates the next_run_at field' do
- subject
+ it_behaves_like 'successful scheduling'
- expect(pipeline_schedule.reload.next_run_at).to be > Time.now
- end
-
- it 'sets the schedule on the pipeline' do
- subject
+ context 'when the latest commit contains [ci skip]' do
+ before do
+ allow_any_instance_of(Ci::Pipeline)
+ .to receive(:git_commit_message)
+ .and_return('some commit [ci skip]')
+ end
- expect(project.pipelines.last.pipeline_schedule).to eq(pipeline_schedule)
+ it_behaves_like 'successful scheduling'
end
end
- context 'inactive schedule' do
+ context 'when the schedule is deactivated' do
before do
pipeline_schedule.deactivate!
end
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index 05eecf5f0bb..5d9b0679796 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -66,19 +66,21 @@ describe PostReceive do
end
context "gitlab-ci.yml" do
+ let(:changes) { "123456 789012 refs/heads/feature\n654321 210987 refs/tags/tag" }
+
subject { described_class.new.perform(gl_repository, key_id, base64_changes) }
context "creates a Ci::Pipeline for every change" do
before do
stub_ci_pipeline_to_return_yaml_file
- # TODO, don't stub private methods
- #
- allow_any_instance_of(Ci::CreatePipelineService)
- .to receive(:commit).and_return(OpenStruct.new(id: '123456'))
+ allow_any_instance_of(Project)
+ .to receive(:commit)
+ .and_return(project.commit)
allow_any_instance_of(Repository)
- .to receive(:branch_exists?).and_return(true)
+ .to receive(:branch_exists?)
+ .and_return(true)
end
it { expect { subject }.to change { Ci::Pipeline.count }.by(2) }
diff --git a/spec/workers/project_migrate_hashed_storage_worker_spec.rb b/spec/workers/project_migrate_hashed_storage_worker_spec.rb
index f5226dee0ad..2e3951e7afc 100644
--- a/spec/workers/project_migrate_hashed_storage_worker_spec.rb
+++ b/spec/workers/project_migrate_hashed_storage_worker_spec.rb
@@ -1,29 +1,53 @@
require 'spec_helper'
-describe ProjectMigrateHashedStorageWorker do
+describe ProjectMigrateHashedStorageWorker, :clean_gitlab_redis_shared_state do
describe '#perform' do
let(:project) { create(:project, :empty_repo) }
let(:pending_delete_project) { create(:project, :empty_repo, pending_delete: true) }
- it 'skips when project no longer exists' do
- nonexistent_id = 999999999999
+ context 'when have exclusive lease' do
+ before do
+ lease = subject.lease_for(project.id)
- expect(::Projects::HashedStorageMigrationService).not_to receive(:new)
- subject.perform(nonexistent_id)
- end
+ allow(Gitlab::ExclusiveLease).to receive(:new).and_return(lease)
+ allow(lease).to receive(:try_obtain).and_return(true)
+ end
+
+ it 'skips when project no longer exists' do
+ nonexistent_id = 999999999999
+
+ expect(::Projects::HashedStorageMigrationService).not_to receive(:new)
+ subject.perform(nonexistent_id)
+ end
+
+ it 'skips when project is pending delete' do
+ expect(::Projects::HashedStorageMigrationService).not_to receive(:new)
- it 'skips when project is pending delete' do
- expect(::Projects::HashedStorageMigrationService).not_to receive(:new)
+ subject.perform(pending_delete_project.id)
+ end
- subject.perform(pending_delete_project.id)
+ it 'delegates removal to service class' do
+ service = double('service')
+ expect(::Projects::HashedStorageMigrationService).to receive(:new).with(project, subject.logger).and_return(service)
+ expect(service).to receive(:execute)
+
+ subject.perform(project.id)
+ end
end
- it 'delegates removal to service class' do
- service = double('service')
- expect(::Projects::HashedStorageMigrationService).to receive(:new).with(project, subject.logger).and_return(service)
- expect(service).to receive(:execute)
+ context 'when dont have exclusive lease' do
+ before do
+ lease = subject.lease_for(project.id)
+
+ allow(Gitlab::ExclusiveLease).to receive(:new).and_return(lease)
+ allow(lease).to receive(:try_obtain).and_return(false)
+ end
+
+ it 'skips when dont have lease' do
+ expect(::Projects::HashedStorageMigrationService).not_to receive(:new)
- subject.perform(project.id)
+ subject.perform(project.id)
+ end
end
end
end
diff --git a/spec/workers/reactive_caching_worker_spec.rb b/spec/workers/reactive_caching_worker_spec.rb
index 5f4453c15d6..3da851de067 100644
--- a/spec/workers/reactive_caching_worker_spec.rb
+++ b/spec/workers/reactive_caching_worker_spec.rb
@@ -1,15 +1,28 @@
require 'spec_helper'
describe ReactiveCachingWorker do
- let(:project) { create(:kubernetes_project) }
- let(:service) { project.deployment_service }
- subject { described_class.new.perform("KubernetesService", service.id) }
+ let(:service) { project.deployment_platform }
describe '#perform' do
- it 'calls #exclusively_update_reactive_cache!' do
- expect_any_instance_of(KubernetesService).to receive(:exclusively_update_reactive_cache!)
+ context 'when user configured kubernetes from Integration > Kubernetes' do
+ let(:project) { create(:kubernetes_project) }
- subject
+ it 'calls #exclusively_update_reactive_cache!' do
+ expect_any_instance_of(KubernetesService).to receive(:exclusively_update_reactive_cache!)
+
+ described_class.new.perform("KubernetesService", service.id)
+ end
+ end
+
+ context 'when user configured kubernetes from CI/CD > Clusters' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:project) { cluster.project }
+
+ it 'calls #exclusively_update_reactive_cache!' do
+ expect_any_instance_of(Clusters::Platforms::Kubernetes).to receive(:exclusively_update_reactive_cache!)
+
+ described_class.new.perform("Clusters::Platforms::Kubernetes", service.id)
+ end
end
end
end
diff --git a/spec/workers/repository_fork_worker_spec.rb b/spec/workers/repository_fork_worker_spec.rb
index e881ec37ae5..74c85848b7e 100644
--- a/spec/workers/repository_fork_worker_spec.rb
+++ b/spec/workers/repository_fork_worker_spec.rb
@@ -1,8 +1,8 @@
require 'spec_helper'
describe RepositoryForkWorker do
- let(:project) { create(:project, :repository, :import_scheduled) }
- let(:fork_project) { create(:project, :repository, forked_from_project: project) }
+ let(:project) { create(:project, :repository) }
+ let(:fork_project) { create(:project, :repository, :import_scheduled, forked_from_project: project) }
let(:shell) { Gitlab::Shell.new }
subject { described_class.new }
@@ -12,50 +12,39 @@ describe RepositoryForkWorker do
end
describe "#perform" do
+ def perform!
+ subject.perform(fork_project.id, '/test/path', project.disk_path)
+ end
+
+ def expect_fork_repository
+ expect(shell).to receive(:fork_repository).with(
+ '/test/path',
+ project.disk_path,
+ fork_project.repository_storage_path,
+ fork_project.disk_path
+ )
+ end
+
describe 'when a worker was reset without cleanup' do
let(:jid) { '12345678' }
- let(:started_project) { create(:project, :repository, :import_started) }
it 'creates a new repository from a fork' do
allow(subject).to receive(:jid).and_return(jid)
- expect(shell).to receive(:fork_repository).with(
- '/test/path',
- project.full_path,
- project.repository_storage_path,
- fork_project.namespace.full_path
- ).and_return(true)
-
- subject.perform(
- project.id,
- '/test/path',
- project.full_path,
- fork_project.namespace.full_path)
+ expect_fork_repository.and_return(true)
+
+ perform!
end
end
it "creates a new repository from a fork" do
- expect(shell).to receive(:fork_repository).with(
- '/test/path',
- project.full_path,
- project.repository_storage_path,
- fork_project.namespace.full_path
- ).and_return(true)
+ expect_fork_repository.and_return(true)
- subject.perform(
- project.id,
- '/test/path',
- project.full_path,
- fork_project.namespace.full_path)
+ perform!
end
it 'flushes various caches' do
- expect(shell).to receive(:fork_repository).with(
- '/test/path',
- project.full_path,
- project.repository_storage_path,
- fork_project.namespace.full_path
- ).and_return(true)
+ expect_fork_repository.and_return(true)
expect_any_instance_of(Repository).to receive(:expire_emptiness_caches)
.and_call_original
@@ -63,32 +52,22 @@ describe RepositoryForkWorker do
expect_any_instance_of(Repository).to receive(:expire_exists_cache)
.and_call_original
- subject.perform(project.id, '/test/path', project.full_path,
- fork_project.namespace.full_path)
+ perform!
end
it "handles bad fork" do
- source_path = project.full_path
- target_path = fork_project.namespace.full_path
- error_message = "Unable to fork project #{project.id} for repository #{source_path} -> #{target_path}"
+ error_message = "Unable to fork project #{fork_project.id} for repository #{project.full_path} -> #{fork_project.full_path}"
- expect(shell).to receive(:fork_repository).and_return(false)
+ expect_fork_repository.and_return(false)
- expect do
- subject.perform(project.id, '/test/path', source_path, target_path)
- end.to raise_error(RepositoryForkWorker::ForkError, error_message)
+ expect { perform! }.to raise_error(RepositoryForkWorker::ForkError, error_message)
end
it 'handles unexpected error' do
- source_path = project.full_path
- target_path = fork_project.namespace.full_path
-
- allow_any_instance_of(Gitlab::Shell).to receive(:fork_repository).and_raise(RuntimeError)
+ expect_fork_repository.and_raise(RuntimeError)
- expect do
- subject.perform(project.id, '/test/path', source_path, target_path)
- end.to raise_error(RepositoryForkWorker::ForkError)
- expect(project.reload.import_status).to eq('failed')
+ expect { perform! }.to raise_error(RepositoryForkWorker::ForkError)
+ expect(fork_project.reload.import_status).to eq('failed')
end
end
end
diff --git a/spec/workers/repository_import_worker_spec.rb b/spec/workers/repository_import_worker_spec.rb
index 5cff5108477..0af537647ad 100644
--- a/spec/workers/repository_import_worker_spec.rb
+++ b/spec/workers/repository_import_worker_spec.rb
@@ -59,5 +59,28 @@ describe RepositoryImportWorker do
expect(project.reload.import_status).to eq('failed')
end
end
+
+ context 'when using an asynchronous importer' do
+ it 'does not mark the import process as finished' do
+ service = double(:service)
+
+ allow(Projects::ImportService)
+ .to receive(:new)
+ .and_return(service)
+
+ allow(service)
+ .to receive(:execute)
+ .and_return(true)
+
+ allow(service)
+ .to receive(:async?)
+ .and_return(true)
+
+ expect_any_instance_of(Project)
+ .not_to receive(:import_finish)
+
+ subject.perform(project.id)
+ end
+ end
end
end
diff --git a/spec/workers/stuck_ci_jobs_worker_spec.rb b/spec/workers/stuck_ci_jobs_worker_spec.rb
index ac6f4fefb4e..bdc64c6785b 100644
--- a/spec/workers/stuck_ci_jobs_worker_spec.rb
+++ b/spec/workers/stuck_ci_jobs_worker_spec.rb
@@ -105,8 +105,8 @@ describe StuckCiJobsWorker do
job.project.update(pending_delete: true)
end
- it 'does not drop job' do
- expect_any_instance_of(Ci::Build).not_to receive(:drop)
+ it 'does drop job' do
+ expect_any_instance_of(Ci::Build).to receive(:drop).and_call_original
worker.perform
end
end
@@ -117,7 +117,7 @@ describe StuckCiJobsWorker do
let(:worker2) { described_class.new }
it 'is guard by exclusive lease when executed concurrently' do
- expect(worker).to receive(:drop).at_least(:once)
+ expect(worker).to receive(:drop).at_least(:once).and_call_original
expect(worker2).not_to receive(:drop)
worker.perform
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(false)
@@ -125,8 +125,8 @@ describe StuckCiJobsWorker do
end
it 'can be executed in sequence' do
- expect(worker).to receive(:drop).at_least(:once)
- expect(worker2).to receive(:drop).at_least(:once)
+ expect(worker).to receive(:drop).at_least(:once).and_call_original
+ expect(worker2).to receive(:drop).at_least(:once).and_call_original
worker.perform
worker2.perform
end
diff --git a/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb b/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb
new file mode 100644
index 00000000000..522e1566271
--- /dev/null
+++ b/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb
@@ -0,0 +1,38 @@
+require 'spec_helper'
+
+describe UpdateHeadPipelineForMergeRequestWorker do
+ describe '#perform' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :repository) }
+ let(:merge_request) { create(:merge_request, source_project: project) }
+ let(:latest_sha) { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' }
+
+ context 'when pipeline exists for the source project and branch' do
+ before do
+ create(:ci_empty_pipeline, project: project, ref: merge_request.source_branch, sha: latest_sha)
+ end
+
+ it 'updates the head_pipeline_id of the merge_request' do
+ expect { subject.perform(merge_request.id) }.to change { merge_request.reload.head_pipeline_id }
+ end
+
+ context 'when merge request sha does not equal pipeline sha' do
+ before do
+ merge_request.merge_request_diff.update(head_commit_sha: 'different_sha')
+ end
+
+ it 'does not update head_pipeline_id' do
+ expect { subject.perform(merge_request.id) }.to raise_error(ArgumentError)
+
+ expect(merge_request.reload.head_pipeline_id).to eq(nil)
+ end
+ end
+ end
+
+ context 'when pipeline does not exist for the source project and branch' do
+ it 'does not update the head_pipeline_id of the merge_request' do
+ expect { subject.perform(merge_request.id) }.not_to change { merge_request.reload.head_pipeline_id }
+ end
+ end
+ end
+end
diff --git a/spec/workers/update_merge_requests_worker_spec.rb b/spec/workers/update_merge_requests_worker_spec.rb
index 558ff9109ec..0fa19ac84bb 100644
--- a/spec/workers/update_merge_requests_worker_spec.rb
+++ b/spec/workers/update_merge_requests_worker_spec.rb
@@ -23,5 +23,17 @@ describe UpdateMergeRequestsWorker do
perform
end
+
+ context 'when slow' do
+ before do
+ stub_const("UpdateMergeRequestsWorker::LOG_TIME_THRESHOLD", -1)
+ end
+
+ it 'logs debug info' do
+ expect(Rails.logger).to receive(:info).with(a_string_matching(/\AUpdateMergeRequestsWorker#perform.*project_id=#{project.id},user_id=#{user.id},oldrev=#{oldrev},newrev=#{newrev},ref=#{ref}/))
+
+ perform
+ end
+ end
end
end